ttm               456 arch/powerpc/perf/power5+-pmu.c 	unsigned int ttm;
ttm               541 arch/powerpc/perf/power5+-pmu.c 		ttm = unit >> 2;
ttm               542 arch/powerpc/perf/power5+-pmu.c 		mmcr1 |= (unsigned long)ttm
ttm               387 arch/powerpc/perf/power5-pmu.c 	unsigned int ttm, grp;
ttm               481 arch/powerpc/perf/power5-pmu.c 		ttm = unit >> 2;
ttm               482 arch/powerpc/perf/power5-pmu.c 		mmcr1 |= (unsigned long)ttm
ttm               260 arch/powerpc/perf/ppc970-pmu.c 	unsigned int ttm, grp;
ttm               317 arch/powerpc/perf/ppc970-pmu.c 		ttm = unitmap[i];
ttm               318 arch/powerpc/perf/ppc970-pmu.c 		++ttmuse[(ttm >> 2) & 1];
ttm               319 arch/powerpc/perf/ppc970-pmu.c 		mmcr1 |= (unsigned long)(ttm & ~4) << MMCR1_TTM1SEL_SH;
ttm               331 arch/powerpc/perf/ppc970-pmu.c 			ttm = (unitmap[unit] >> 2) & 1;
ttm               333 arch/powerpc/perf/ppc970-pmu.c 			ttm = 2;
ttm               335 arch/powerpc/perf/ppc970-pmu.c 			ttm = 3;
ttm               339 arch/powerpc/perf/ppc970-pmu.c 		mmcr1 |= (unsigned long)ttm
ttm               531 arch/x86/platform/uv/tlb_uv.c 	cycles_t ttm;
ttm               550 arch/x86/platform/uv/tlb_uv.c 			ttm = get_cycles();
ttm               558 arch/x86/platform/uv/tlb_uv.c 			if (cycles_2_us(ttm - bcp->send_message) < timeout_us) {
ttm               604 arch/x86/platform/uv/tlb_uv.c 	cycles_t ttm;
ttm               626 arch/x86/platform/uv/tlb_uv.c 			ttm = get_cycles();
ttm               637 arch/x86/platform/uv/tlb_uv.c 			if (cycles_2_us(ttm - bcp->send_message) < timeout_us) {
ttm               652 arch/x86/platform/uv/tlb_uv.c 				ttm = get_cycles();
ttm               653 arch/x86/platform/uv/tlb_uv.c 				if ((ttm - bcp->send_message) > bcp->timeout_interval)
ttm               280 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (WARN(amdgpu_ttm_tt_get_usermm(bo->tbo.ttm),
ttm               494 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = amdgpu_ttm_tt_set_userptr(bo->tbo.ttm, user_addr, 0);
ttm               507 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = amdgpu_ttm_tt_get_user_pages(bo, bo->tbo.ttm->pages);
ttm               525 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	amdgpu_ttm_tt_get_user_pages_done(bo->tbo.ttm);
ttm              1184 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		bo->tbo.ttm->sg = sg;
ttm              1329 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm)) {
ttm              1354 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) &&
ttm              1376 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	    !amdgpu_ttm_tt_get_usermm(bo->tbo.ttm)) {
ttm              1414 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (!amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) && !bo->pin_count)
ttm              1493 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	    !amdgpu_ttm_tt_get_usermm(mem->bo->tbo.ttm) && !mem->bo->pin_count)
ttm              1527 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm)) {
ttm              1734 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		ret = amdgpu_ttm_tt_get_user_pages(bo, bo->tbo.ttm->pages);
ttm              1743 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		amdgpu_ttm_tt_get_user_pages_done(bo->tbo.ttm);
ttm              1822 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		if (bo->tbo.ttm->pages[0]) {
ttm               107 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 		usermm = amdgpu_ttm_tt_get_usermm(bo->tbo.ttm);
ttm                66 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm)) {
ttm               541 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		usermm = amdgpu_ttm_tt_get_usermm(bo->tbo.ttm);
ttm               545 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		if (amdgpu_ttm_tt_is_userptr(bo->tbo.ttm) &&
ttm               553 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 			amdgpu_ttm_tt_set_user_pages(bo->tbo.ttm,
ttm               624 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		e->user_pages = kvmalloc_array(bo->tbo.ttm->num_pages,
ttm               639 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		for (i = 0; i < bo->tbo.ttm->num_pages; i++) {
ttm               640 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 			if (bo->tbo.ttm->pages[i] != e->user_pages[i]) {
ttm              1302 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		r |= !amdgpu_ttm_tt_get_user_pages_done(bo->tbo.ttm);
ttm                54 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	return drm_prime_pages_to_sg(bo->tbo.ttm->pages, npages);
ttm               122 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) ||
ttm               339 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm) ||
ttm               389 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	bo->tbo.ttm->sg = sg;
ttm               132 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	mm = amdgpu_ttm_tt_get_usermm(abo->tbo.ttm);
ttm               321 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	r = amdgpu_ttm_tt_set_userptr(bo->tbo.ttm, args->addr, args->flags);
ttm               332 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 		r = amdgpu_ttm_tt_get_user_pages(bo, bo->tbo.ttm->pages);
ttm               355 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 		amdgpu_ttm_tt_get_user_pages_done(bo->tbo.ttm);
ttm               375 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	if (amdgpu_ttm_tt_get_usermm(robj->tbo.ttm) ||
ttm               709 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 		if (amdgpu_ttm_tt_get_usermm(robj->tbo.ttm)) {
ttm                45 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	struct ttm_dma_tt *ttm;
ttm                49 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 		ttm = container_of(bo->tbo.ttm, struct ttm_dma_tt, ttm);
ttm                50 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 		*addr = ttm->dma_address[0];
ttm                59 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	*flags = amdgpu_ttm_tt_pde_flags(bo->tbo.ttm, &bo->tbo.mem);
ttm               122 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	struct ttm_dma_tt *ttm;
ttm               124 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	if (bo->num_pages != 1 || bo->ttm->caching_state == tt_cached)
ttm               127 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	ttm = container_of(bo->ttm, struct ttm_dma_tt, ttm);
ttm               128 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	if (ttm->dma_address[0] + PAGE_SIZE >= adev->gmc.agp_size)
ttm               131 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	return adev->gmc.agp_start + ttm->dma_address[0];
ttm               179 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 		if (!amdgpu_ttm_tt_affect_userptr(bo->tbo.ttm, start, end))
ttm               279 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 			if (amdgpu_ttm_tt_affect_userptr(bo->tbo.ttm,
ttm               889 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (amdgpu_ttm_tt_get_usermm(bo->tbo.ttm))
ttm               229 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (amdgpu_ttm_tt_get_usermm(bo->ttm))
ttm               514 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	r = ttm_tt_set_placement_caching(bo->ttm, tmp_mem.placement);
ttm               520 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	r = ttm_tt_bind(bo->ttm, &tmp_mem, ctx);
ttm               633 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (old_mem->mem_type == TTM_PL_SYSTEM && bo->ttm == NULL) {
ttm               766 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct ttm_dma_tt	ttm;
ttm               790 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct ttm_tt *ttm = bo->tbo.ttm;
ttm               791 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_ttm_tt *gtt = (void *)ttm;
ttm               815 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	pfns = kvmalloc_array(ttm->num_pages, sizeof(*pfns), GFP_KERNEL);
ttm               823 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	range->default_flags |= amdgpu_ttm_tt_is_readonly(ttm) ?
ttm               828 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	range->end = start + ttm->num_pages * PAGE_SIZE;
ttm               857 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	for (i = 0; i < ttm->num_pages; i++) {
ttm               891 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c bool amdgpu_ttm_tt_get_user_pages_done(struct ttm_tt *ttm)
ttm               893 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_ttm_tt *gtt = (void *)ttm;
ttm               900 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		gtt->userptr, ttm->num_pages);
ttm               925 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c void amdgpu_ttm_tt_set_user_pages(struct ttm_tt *ttm, struct page **pages)
ttm               929 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	for (i = 0; i < ttm->num_pages; ++i)
ttm               930 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		ttm->pages[i] = pages ? pages[i] : NULL;
ttm               938 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c static int amdgpu_ttm_tt_pin_userptr(struct ttm_tt *ttm)
ttm               940 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(ttm->bdev);
ttm               941 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_ttm_tt *gtt = (void *)ttm;
ttm               950 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	r = sg_alloc_table_from_pages(ttm->sg, ttm->pages, ttm->num_pages, 0,
ttm               951 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 				      ttm->num_pages << PAGE_SHIFT,
ttm               958 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	nents = dma_map_sg(adev->dev, ttm->sg->sgl, ttm->sg->nents, direction);
ttm               959 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (nents != ttm->sg->nents)
ttm               963 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	drm_prime_sg_to_page_addr_arrays(ttm->sg, ttm->pages,
ttm               964 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 					 gtt->ttm.dma_address, ttm->num_pages);
ttm               969 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	kfree(ttm->sg);
ttm               976 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c static void amdgpu_ttm_tt_unpin_userptr(struct ttm_tt *ttm)
ttm               978 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(ttm->bdev);
ttm               979 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_ttm_tt *gtt = (void *)ttm;
ttm               986 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (!ttm->sg->sgl)
ttm               990 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	dma_unmap_sg(adev->dev, ttm->sg->sgl, ttm->sg->nents, direction);
ttm               992 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	sg_free_table(ttm->sg);
ttm               996 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	    ttm->pages[0] == hmm_device_entry_to_page(gtt->range,
ttm              1007 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct ttm_tt *ttm = tbo->ttm;
ttm              1008 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_ttm_tt *gtt = (void *)ttm;
ttm              1015 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 				ttm->pages, gtt->ttm.dma_address, flags);
ttm              1025 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 				ttm->num_pages - page_idx,
ttm              1026 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 				&ttm->pages[page_idx],
ttm              1027 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 				&(gtt->ttm.dma_address[page_idx]), flags);
ttm              1029 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		r = amdgpu_gart_bind(adev, gtt->offset, ttm->num_pages,
ttm              1030 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 				     ttm->pages, gtt->ttm.dma_address, flags);
ttm              1036 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 			  ttm->num_pages, gtt->offset);
ttm              1047 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c static int amdgpu_ttm_backend_bind(struct ttm_tt *ttm,
ttm              1050 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(ttm->bdev);
ttm              1051 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_ttm_tt *gtt = (void*)ttm;
ttm              1056 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		r = amdgpu_ttm_tt_pin_userptr(ttm);
ttm              1062 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (!ttm->num_pages) {
ttm              1064 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		     ttm->num_pages, bo_mem, ttm);
ttm              1078 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	flags = amdgpu_ttm_tt_pte_flags(adev, ttm, bo_mem);
ttm              1082 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	r = amdgpu_gart_bind(adev, gtt->offset, ttm->num_pages,
ttm              1083 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		ttm->pages, gtt->ttm.dma_address, flags);
ttm              1087 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 			  ttm->num_pages, gtt->offset);
ttm              1098 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_ttm_tt *gtt = (void*)bo->ttm;
ttm              1130 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		flags = amdgpu_ttm_tt_pte_flags(adev, bo->ttm, &tmp);
ttm              1162 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (!tbo->ttm)
ttm              1165 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	flags = amdgpu_ttm_tt_pte_flags(adev, tbo->ttm, &tbo->mem);
ttm              1177 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c static int amdgpu_ttm_backend_unbind(struct ttm_tt *ttm)
ttm              1179 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(ttm->bdev);
ttm              1180 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_ttm_tt *gtt = (void *)ttm;
ttm              1185 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		amdgpu_ttm_tt_unpin_userptr(ttm);
ttm              1191 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	r = amdgpu_gart_unbind(adev, gtt->offset, ttm->num_pages);
ttm              1194 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 			  gtt->ttm.ttm.num_pages, gtt->offset);
ttm              1198 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c static void amdgpu_ttm_backend_destroy(struct ttm_tt *ttm)
ttm              1200 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_ttm_tt *gtt = (void *)ttm;
ttm              1205 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	ttm_dma_tt_fini(&gtt->ttm);
ttm              1234 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	gtt->ttm.ttm.func = &amdgpu_backend_func;
ttm              1237 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (ttm_sg_tt_init(&gtt->ttm, bo, page_flags)) {
ttm              1241 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	return &gtt->ttm.ttm;
ttm              1250 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c static int amdgpu_ttm_tt_populate(struct ttm_tt *ttm,
ttm              1253 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_device *adev = amdgpu_ttm_adev(ttm->bdev);
ttm              1254 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_ttm_tt *gtt = (void *)ttm;
ttm              1255 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	bool slave = !!(ttm->page_flags & TTM_PAGE_FLAG_SG);
ttm              1259 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		ttm->sg = kzalloc(sizeof(struct sg_table), GFP_KERNEL);
ttm              1260 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		if (!ttm->sg)
ttm              1263 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		ttm->page_flags |= TTM_PAGE_FLAG_SG;
ttm              1264 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		ttm->state = tt_unbound;
ttm              1268 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (slave && ttm->sg) {
ttm              1269 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		drm_prime_sg_to_page_addr_arrays(ttm->sg, ttm->pages,
ttm              1270 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 						 gtt->ttm.dma_address,
ttm              1271 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 						 ttm->num_pages);
ttm              1272 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		ttm->state = tt_unbound;
ttm              1278 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		return ttm_dma_populate(&gtt->ttm, adev->dev, ctx);
ttm              1284 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	return ttm_populate_and_map_pages(adev->dev, &gtt->ttm, ctx);
ttm              1293 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c static void amdgpu_ttm_tt_unpopulate(struct ttm_tt *ttm)
ttm              1296 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_ttm_tt *gtt = (void *)ttm;
ttm              1297 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	bool slave = !!(ttm->page_flags & TTM_PAGE_FLAG_SG);
ttm              1300 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		amdgpu_ttm_tt_set_user_pages(ttm, NULL);
ttm              1301 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		kfree(ttm->sg);
ttm              1302 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		ttm->page_flags &= ~TTM_PAGE_FLAG_SG;
ttm              1309 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	adev = amdgpu_ttm_adev(ttm->bdev);
ttm              1313 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		ttm_dma_unpopulate(&gtt->ttm, adev->dev);
ttm              1319 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	ttm_unmap_and_unpopulate_pages(adev->dev, &gtt->ttm);
ttm              1333 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c int amdgpu_ttm_tt_set_userptr(struct ttm_tt *ttm, uint64_t addr,
ttm              1336 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_ttm_tt *gtt = (void *)ttm;
ttm              1355 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c struct mm_struct *amdgpu_ttm_tt_get_usermm(struct ttm_tt *ttm)
ttm              1357 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_ttm_tt *gtt = (void *)ttm;
ttm              1373 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c bool amdgpu_ttm_tt_affect_userptr(struct ttm_tt *ttm, unsigned long start,
ttm              1376 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_ttm_tt *gtt = (void *)ttm;
ttm              1385 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	size = (unsigned long)gtt->ttm.ttm.num_pages * PAGE_SIZE;
ttm              1395 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c bool amdgpu_ttm_tt_is_userptr(struct ttm_tt *ttm)
ttm              1397 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_ttm_tt *gtt = (void *)ttm;
ttm              1408 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c bool amdgpu_ttm_tt_is_readonly(struct ttm_tt *ttm)
ttm              1410 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_ttm_tt *gtt = (void *)ttm;
ttm              1426 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c uint64_t amdgpu_ttm_tt_pde_flags(struct ttm_tt *ttm, struct ttm_mem_reg *mem)
ttm              1436 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		if (ttm->caching_state == tt_cached)
ttm              1451 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c uint64_t amdgpu_ttm_tt_pte_flags(struct amdgpu_device *adev, struct ttm_tt *ttm,
ttm              1454 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	uint64_t flags = amdgpu_ttm_tt_pde_flags(ttm, mem);
ttm              1459 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (!amdgpu_ttm_tt_is_readonly(ttm))
ttm              1903 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct amdgpu_ttm_tt *gtt = (void *)bo->ttm;
ttm              1905 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct ttm_tt *ttm = bo->ttm;
ttm              1942 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	dma_address = &gtt->ttm.dma_address[offset >> PAGE_SHIFT];
ttm              1943 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	flags = amdgpu_ttm_tt_pte_flags(adev, ttm, mem);
ttm               108 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h bool amdgpu_ttm_tt_get_user_pages_done(struct ttm_tt *ttm);
ttm               115 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h static inline bool amdgpu_ttm_tt_get_user_pages_done(struct ttm_tt *ttm)
ttm               121 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h void amdgpu_ttm_tt_set_user_pages(struct ttm_tt *ttm, struct page **pages);
ttm               122 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h int amdgpu_ttm_tt_set_userptr(struct ttm_tt *ttm, uint64_t addr,
ttm               124 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h bool amdgpu_ttm_tt_has_userptr(struct ttm_tt *ttm);
ttm               125 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h struct mm_struct *amdgpu_ttm_tt_get_usermm(struct ttm_tt *ttm);
ttm               126 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h bool amdgpu_ttm_tt_affect_userptr(struct ttm_tt *ttm, unsigned long start,
ttm               128 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h bool amdgpu_ttm_tt_userptr_invalidated(struct ttm_tt *ttm,
ttm               130 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h bool amdgpu_ttm_tt_is_userptr(struct ttm_tt *ttm);
ttm               131 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h bool amdgpu_ttm_tt_is_readonly(struct ttm_tt *ttm);
ttm               132 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h uint64_t amdgpu_ttm_tt_pde_flags(struct ttm_tt *ttm, struct ttm_mem_reg *mem);
ttm               133 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h uint64_t amdgpu_ttm_tt_pte_flags(struct amdgpu_device *adev, struct ttm_tt *ttm,
ttm              1701 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		struct ttm_dma_tt *ttm;
ttm              1706 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 			ttm = container_of(bo->tbo.ttm, struct ttm_dma_tt, ttm);
ttm              1707 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 			pages_addr = ttm->dma_address;
ttm              1713 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		flags = amdgpu_ttm_tt_pte_flags(adev, bo->tbo.ttm, mem);
ttm               136 drivers/gpu/drm/cirrus/cirrus_drv.h 	} ttm;
ttm               215 drivers/gpu/drm/nouveau/nouveau_bo.c 	nvbo->bo.bdev = &drm->ttm.bdev;
ttm               542 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct ttm_dma_tt *ttm_dma = (struct ttm_dma_tt *)nvbo->bo.ttm;
ttm               552 drivers/gpu/drm/nouveau/nouveau_bo.c 	for (i = 0; i < ttm_dma->ttm.num_pages; i++)
ttm               562 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct ttm_dma_tt *ttm_dma = (struct ttm_dma_tt *)nvbo->bo.ttm;
ttm               572 drivers/gpu/drm/nouveau/nouveau_bo.c 	for (i = 0; i < ttm_dma->ttm.num_pages; i++)
ttm               678 drivers/gpu/drm/nouveau/nouveau_bo.c 			const u8 type = mmu->type[drm->ttm.type_vram].type;
ttm              1128 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_channel *chan = drm->ttm.chan;
ttm              1146 drivers/gpu/drm/nouveau/nouveau_bo.c 		ret = drm->ttm.move(chan, bo, &bo->mem, new_reg);
ttm              1212 drivers/gpu/drm/nouveau/nouveau_bo.c 				       &drm->ttm.copy);
ttm              1214 drivers/gpu/drm/nouveau/nouveau_bo.c 			ret = mthd->init(chan, drm->ttm.copy.handle);
ttm              1216 drivers/gpu/drm/nouveau/nouveau_bo.c 				nvif_object_fini(&drm->ttm.copy);
ttm              1220 drivers/gpu/drm/nouveau/nouveau_bo.c 			drm->ttm.move = mthd->exec;
ttm              1221 drivers/gpu/drm/nouveau/nouveau_bo.c 			drm->ttm.chan = chan;
ttm              1253 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = ttm_tt_bind(bo->ttm, &tmp_reg, &ctx);
ttm              1387 drivers/gpu/drm/nouveau/nouveau_bo.c 	if (old_reg->mem_type == TTM_PL_SYSTEM && !bo->ttm) {
ttm              1395 drivers/gpu/drm/nouveau/nouveau_bo.c 	if (drm->ttm.move) {
ttm              1584 drivers/gpu/drm/nouveau/nouveau_bo.c nouveau_ttm_tt_populate(struct ttm_tt *ttm, struct ttm_operation_ctx *ctx)
ttm              1586 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct ttm_dma_tt *ttm_dma = (void *)ttm;
ttm              1591 drivers/gpu/drm/nouveau/nouveau_bo.c 	bool slave = !!(ttm->page_flags & TTM_PAGE_FLAG_SG);
ttm              1593 drivers/gpu/drm/nouveau/nouveau_bo.c 	if (ttm->state != tt_unpopulated)
ttm              1596 drivers/gpu/drm/nouveau/nouveau_bo.c 	if (slave && ttm->sg) {
ttm              1598 drivers/gpu/drm/nouveau/nouveau_bo.c 		drm_prime_sg_to_page_addr_arrays(ttm->sg, ttm->pages,
ttm              1599 drivers/gpu/drm/nouveau/nouveau_bo.c 						 ttm_dma->dma_address, ttm->num_pages);
ttm              1600 drivers/gpu/drm/nouveau/nouveau_bo.c 		ttm->state = tt_unbound;
ttm              1604 drivers/gpu/drm/nouveau/nouveau_bo.c 	drm = nouveau_bdev(ttm->bdev);
ttm              1609 drivers/gpu/drm/nouveau/nouveau_bo.c 		return ttm_agp_tt_populate(ttm, ctx);
ttm              1615 drivers/gpu/drm/nouveau/nouveau_bo.c 		return ttm_dma_populate((void *)ttm, dev, ctx);
ttm              1619 drivers/gpu/drm/nouveau/nouveau_bo.c 	r = ttm_pool_populate(ttm, ctx);
ttm              1624 drivers/gpu/drm/nouveau/nouveau_bo.c 	for (i = 0; i < ttm->num_pages; i++) {
ttm              1627 drivers/gpu/drm/nouveau/nouveau_bo.c 		addr = dma_map_page(dev, ttm->pages[i], 0, PAGE_SIZE,
ttm              1636 drivers/gpu/drm/nouveau/nouveau_bo.c 			ttm_pool_unpopulate(ttm);
ttm              1646 drivers/gpu/drm/nouveau/nouveau_bo.c nouveau_ttm_tt_unpopulate(struct ttm_tt *ttm)
ttm              1648 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct ttm_dma_tt *ttm_dma = (void *)ttm;
ttm              1652 drivers/gpu/drm/nouveau/nouveau_bo.c 	bool slave = !!(ttm->page_flags & TTM_PAGE_FLAG_SG);
ttm              1657 drivers/gpu/drm/nouveau/nouveau_bo.c 	drm = nouveau_bdev(ttm->bdev);
ttm              1662 drivers/gpu/drm/nouveau/nouveau_bo.c 		ttm_agp_tt_unpopulate(ttm);
ttm              1669 drivers/gpu/drm/nouveau/nouveau_bo.c 		ttm_dma_unpopulate((void *)ttm, dev);
ttm              1674 drivers/gpu/drm/nouveau/nouveau_bo.c 	for (i = 0; i < ttm->num_pages; i++) {
ttm              1681 drivers/gpu/drm/nouveau/nouveau_bo.c 	ttm_pool_unpopulate(ttm);
ttm               475 drivers/gpu/drm/nouveau/nouveau_dmem.c 	switch (drm->ttm.copy.oclass) {
ttm               481 drivers/gpu/drm/nouveau/nouveau_dmem.c 		drm->dmem->migrate.chan = drm->ttm.chan;
ttm               293 drivers/gpu/drm/nouveau/nouveau_drm.c 	nvif_object_fini(&drm->ttm.copy);
ttm               826 drivers/gpu/drm/nouveau/nouveau_drm.c 	ttm_bo_evict_mm(&drm->ttm.bdev, TTM_PL_VRAM);
ttm               163 drivers/gpu/drm/nouveau/nouveau_drv.h 	} ttm;
ttm               228 drivers/gpu/drm/nouveau/nouveau_drv.h 	return !(mmu->type[drm->ttm.type_host[0]].type & NVIF_MEM_UNCACHED);
ttm               107 drivers/gpu/drm/nouveau/nouveau_mem.c 		type = drm->ttm.type_ncoh[!!mem->kind];
ttm               109 drivers/gpu/drm/nouveau/nouveau_mem.c 		type = drm->ttm.type_host[0];
ttm               119 drivers/gpu/drm/nouveau/nouveau_mem.c 	if (tt->ttm.sg) args.sgl = tt->ttm.sg->sgl;
ttm               148 drivers/gpu/drm/nouveau/nouveau_mem.c 					 drm->ttm.type_vram, page, size,
ttm               156 drivers/gpu/drm/nouveau/nouveau_mem.c 					 drm->ttm.type_vram, page, size,
ttm                35 drivers/gpu/drm/nouveau/nouveau_prime.c 	return drm_prime_pages_to_sg(nvbo->bo.ttm->pages, npages);
ttm                13 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	struct ttm_dma_tt ttm;
ttm                18 drivers/gpu/drm/nouveau/nouveau_sgdma.c nouveau_sgdma_destroy(struct ttm_tt *ttm)
ttm                20 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)ttm;
ttm                22 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	if (ttm) {
ttm                23 drivers/gpu/drm/nouveau/nouveau_sgdma.c 		ttm_dma_tt_fini(&nvbe->ttm);
ttm                29 drivers/gpu/drm/nouveau/nouveau_sgdma.c nv04_sgdma_bind(struct ttm_tt *ttm, struct ttm_mem_reg *reg)
ttm                31 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)ttm;
ttm                35 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	ret = nouveau_mem_host(reg, &nvbe->ttm);
ttm                50 drivers/gpu/drm/nouveau/nouveau_sgdma.c nv04_sgdma_unbind(struct ttm_tt *ttm)
ttm                52 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)ttm;
ttm                64 drivers/gpu/drm/nouveau/nouveau_sgdma.c nv50_sgdma_bind(struct ttm_tt *ttm, struct ttm_mem_reg *reg)
ttm                66 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)ttm;
ttm                70 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	ret = nouveau_mem_host(reg, &nvbe->ttm);
ttm                95 drivers/gpu/drm/nouveau/nouveau_sgdma.c 		nvbe->ttm.ttm.func = &nv04_sgdma_backend;
ttm                97 drivers/gpu/drm/nouveau/nouveau_sgdma.c 		nvbe->ttm.ttm.func = &nv50_sgdma_backend;
ttm                99 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	if (ttm_dma_tt_init(&nvbe->ttm, bo, page_flags))
ttm               106 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	return &nvbe->ttm.ttm;
ttm               167 drivers/gpu/drm/nouveau/nouveau_ttm.c 	return ttm_bo_mmap(filp, vma, &drm->ttm.bdev);
ttm               181 drivers/gpu/drm/nouveau/nouveau_ttm.c 	drm->ttm.type_host[!!kind] = typei;
ttm               187 drivers/gpu/drm/nouveau/nouveau_ttm.c 	drm->ttm.type_ncoh[!!kind] = typei;
ttm               220 drivers/gpu/drm/nouveau/nouveau_ttm.c 		drm->ttm.type_vram = typei;
ttm               222 drivers/gpu/drm/nouveau/nouveau_ttm.c 		drm->ttm.type_vram = -1;
ttm               232 drivers/gpu/drm/nouveau/nouveau_ttm.c 	ret = ttm_bo_device_init(&drm->ttm.bdev,
ttm               247 drivers/gpu/drm/nouveau/nouveau_ttm.c 	ret = ttm_bo_init_mm(&drm->ttm.bdev, TTM_PL_VRAM,
ttm               254 drivers/gpu/drm/nouveau/nouveau_ttm.c 	drm->ttm.mtrr = arch_phys_wc_add(device->func->resource_addr(device, 1),
ttm               264 drivers/gpu/drm/nouveau/nouveau_ttm.c 	ret = ttm_bo_init_mm(&drm->ttm.bdev, TTM_PL_TT,
ttm               281 drivers/gpu/drm/nouveau/nouveau_ttm.c 	ttm_bo_clean_mm(&drm->ttm.bdev, TTM_PL_VRAM);
ttm               282 drivers/gpu/drm/nouveau/nouveau_ttm.c 	ttm_bo_clean_mm(&drm->ttm.bdev, TTM_PL_TT);
ttm               284 drivers/gpu/drm/nouveau/nouveau_ttm.c 	ttm_bo_device_release(&drm->ttm.bdev);
ttm               286 drivers/gpu/drm/nouveau/nouveau_ttm.c 	arch_phys_wc_del(drm->ttm.mtrr);
ttm               287 drivers/gpu/drm/nouveau/nouveau_ttm.c 	drm->ttm.mtrr = 0;
ttm                 8 drivers/gpu/drm/nouveau/nouveau_ttm.h 	return container_of(bd, struct nouveau_drm, ttm.bdev);
ttm               204 drivers/gpu/drm/qxl/qxl_ttm.c 	struct ttm_tt		        ttm;
ttm               209 drivers/gpu/drm/qxl/qxl_ttm.c static int qxl_ttm_backend_bind(struct ttm_tt *ttm,
ttm               212 drivers/gpu/drm/qxl/qxl_ttm.c 	struct qxl_ttm_tt *gtt = (void *)ttm;
ttm               215 drivers/gpu/drm/qxl/qxl_ttm.c 	if (!ttm->num_pages) {
ttm               217 drivers/gpu/drm/qxl/qxl_ttm.c 		     ttm->num_pages, bo_mem, ttm);
ttm               223 drivers/gpu/drm/qxl/qxl_ttm.c static int qxl_ttm_backend_unbind(struct ttm_tt *ttm)
ttm               229 drivers/gpu/drm/qxl/qxl_ttm.c static void qxl_ttm_backend_destroy(struct ttm_tt *ttm)
ttm               231 drivers/gpu/drm/qxl/qxl_ttm.c 	struct qxl_ttm_tt *gtt = (void *)ttm;
ttm               233 drivers/gpu/drm/qxl/qxl_ttm.c 	ttm_tt_fini(&gtt->ttm);
ttm               253 drivers/gpu/drm/qxl/qxl_ttm.c 	gtt->ttm.func = &qxl_backend_func;
ttm               255 drivers/gpu/drm/qxl/qxl_ttm.c 	if (ttm_tt_init(&gtt->ttm, bo, page_flags)) {
ttm               259 drivers/gpu/drm/qxl/qxl_ttm.c 	return &gtt->ttm;
ttm               283 drivers/gpu/drm/qxl/qxl_ttm.c 	if (old_mem->mem_type == TTM_PL_SYSTEM && bo->ttm == NULL) {
ttm              2813 drivers/gpu/drm/radeon/radeon.h extern int radeon_ttm_tt_set_userptr(struct ttm_tt *ttm, uint64_t addr,
ttm              2815 drivers/gpu/drm/radeon/radeon.h extern bool radeon_ttm_tt_has_userptr(struct ttm_tt *ttm);
ttm              2816 drivers/gpu/drm/radeon/radeon.h extern bool radeon_ttm_tt_is_readonly(struct ttm_tt *ttm);
ttm               163 drivers/gpu/drm/radeon/radeon_cs.c 		if (radeon_ttm_tt_has_userptr(p->relocs[i].robj->tbo.ttm)) {
ttm               333 drivers/gpu/drm/radeon/radeon_gem.c 	r = radeon_ttm_tt_set_userptr(bo->tbo.ttm, args->addr, args->flags);
ttm               422 drivers/gpu/drm/radeon/radeon_gem.c 	if (radeon_ttm_tt_has_userptr(robj->tbo.ttm)) {
ttm               723 drivers/gpu/drm/radeon/radeon_gem.c 	if (radeon_ttm_tt_has_userptr(robj->tbo.ttm))
ttm                99 drivers/gpu/drm/radeon/radeon_mn.c 			if (!bo->tbo.ttm || bo->tbo.ttm->state != tt_bound)
ttm               334 drivers/gpu/drm/radeon/radeon_object.c 	if (radeon_ttm_tt_has_userptr(bo->tbo.ttm))
ttm                39 drivers/gpu/drm/radeon/radeon_prime.c 	return drm_prime_pages_to_sg(bo->tbo.ttm->pages, npages);
ttm               124 drivers/gpu/drm/radeon/radeon_prime.c 	if (radeon_ttm_tt_has_userptr(bo->tbo.ttm))
ttm               185 drivers/gpu/drm/radeon/radeon_ttm.c 	if (radeon_ttm_tt_has_userptr(bo->ttm))
ttm               282 drivers/gpu/drm/radeon/radeon_ttm.c 	r = ttm_tt_set_placement_caching(bo->ttm, tmp_mem.placement);
ttm               287 drivers/gpu/drm/radeon/radeon_ttm.c 	r = ttm_tt_bind(bo->ttm, &tmp_mem, &ctx);
ttm               358 drivers/gpu/drm/radeon/radeon_ttm.c 	if (old_mem->mem_type == TTM_PL_SYSTEM && bo->ttm == NULL) {
ttm               475 drivers/gpu/drm/radeon/radeon_ttm.c 	struct ttm_dma_tt		ttm;
ttm               485 drivers/gpu/drm/radeon/radeon_ttm.c static int radeon_ttm_tt_pin_userptr(struct ttm_tt *ttm)
ttm               487 drivers/gpu/drm/radeon/radeon_ttm.c 	struct radeon_device *rdev = radeon_get_rdev(ttm->bdev);
ttm               488 drivers/gpu/drm/radeon/radeon_ttm.c 	struct radeon_ttm_tt *gtt = (void *)ttm;
ttm               502 drivers/gpu/drm/radeon/radeon_ttm.c 		unsigned long end = gtt->userptr + ttm->num_pages * PAGE_SIZE;
ttm               510 drivers/gpu/drm/radeon/radeon_ttm.c 		unsigned num_pages = ttm->num_pages - pinned;
ttm               512 drivers/gpu/drm/radeon/radeon_ttm.c 		struct page **pages = ttm->pages + pinned;
ttm               521 drivers/gpu/drm/radeon/radeon_ttm.c 	} while (pinned < ttm->num_pages);
ttm               523 drivers/gpu/drm/radeon/radeon_ttm.c 	r = sg_alloc_table_from_pages(ttm->sg, ttm->pages, ttm->num_pages, 0,
ttm               524 drivers/gpu/drm/radeon/radeon_ttm.c 				      ttm->num_pages << PAGE_SHIFT,
ttm               530 drivers/gpu/drm/radeon/radeon_ttm.c 	nents = dma_map_sg(rdev->dev, ttm->sg->sgl, ttm->sg->nents, direction);
ttm               531 drivers/gpu/drm/radeon/radeon_ttm.c 	if (nents != ttm->sg->nents)
ttm               534 drivers/gpu/drm/radeon/radeon_ttm.c 	drm_prime_sg_to_page_addr_arrays(ttm->sg, ttm->pages,
ttm               535 drivers/gpu/drm/radeon/radeon_ttm.c 					 gtt->ttm.dma_address, ttm->num_pages);
ttm               540 drivers/gpu/drm/radeon/radeon_ttm.c 	kfree(ttm->sg);
ttm               543 drivers/gpu/drm/radeon/radeon_ttm.c 	release_pages(ttm->pages, pinned);
ttm               547 drivers/gpu/drm/radeon/radeon_ttm.c static void radeon_ttm_tt_unpin_userptr(struct ttm_tt *ttm)
ttm               549 drivers/gpu/drm/radeon/radeon_ttm.c 	struct radeon_device *rdev = radeon_get_rdev(ttm->bdev);
ttm               550 drivers/gpu/drm/radeon/radeon_ttm.c 	struct radeon_ttm_tt *gtt = (void *)ttm;
ttm               558 drivers/gpu/drm/radeon/radeon_ttm.c 	if (!ttm->sg->sgl)
ttm               562 drivers/gpu/drm/radeon/radeon_ttm.c 	dma_unmap_sg(rdev->dev, ttm->sg->sgl, ttm->sg->nents, direction);
ttm               564 drivers/gpu/drm/radeon/radeon_ttm.c 	for_each_sg_page(ttm->sg->sgl, &sg_iter, ttm->sg->nents, 0) {
ttm               573 drivers/gpu/drm/radeon/radeon_ttm.c 	sg_free_table(ttm->sg);
ttm               576 drivers/gpu/drm/radeon/radeon_ttm.c static int radeon_ttm_backend_bind(struct ttm_tt *ttm,
ttm               579 drivers/gpu/drm/radeon/radeon_ttm.c 	struct radeon_ttm_tt *gtt = (void*)ttm;
ttm               585 drivers/gpu/drm/radeon/radeon_ttm.c 		radeon_ttm_tt_pin_userptr(ttm);
ttm               590 drivers/gpu/drm/radeon/radeon_ttm.c 	if (!ttm->num_pages) {
ttm               592 drivers/gpu/drm/radeon/radeon_ttm.c 		     ttm->num_pages, bo_mem, ttm);
ttm               594 drivers/gpu/drm/radeon/radeon_ttm.c 	if (ttm->caching_state == tt_cached)
ttm               596 drivers/gpu/drm/radeon/radeon_ttm.c 	r = radeon_gart_bind(gtt->rdev, gtt->offset, ttm->num_pages,
ttm               597 drivers/gpu/drm/radeon/radeon_ttm.c 			     ttm->pages, gtt->ttm.dma_address, flags);
ttm               600 drivers/gpu/drm/radeon/radeon_ttm.c 			  ttm->num_pages, (unsigned)gtt->offset);
ttm               606 drivers/gpu/drm/radeon/radeon_ttm.c static int radeon_ttm_backend_unbind(struct ttm_tt *ttm)
ttm               608 drivers/gpu/drm/radeon/radeon_ttm.c 	struct radeon_ttm_tt *gtt = (void *)ttm;
ttm               610 drivers/gpu/drm/radeon/radeon_ttm.c 	radeon_gart_unbind(gtt->rdev, gtt->offset, ttm->num_pages);
ttm               613 drivers/gpu/drm/radeon/radeon_ttm.c 		radeon_ttm_tt_unpin_userptr(ttm);
ttm               618 drivers/gpu/drm/radeon/radeon_ttm.c static void radeon_ttm_backend_destroy(struct ttm_tt *ttm)
ttm               620 drivers/gpu/drm/radeon/radeon_ttm.c 	struct radeon_ttm_tt *gtt = (void *)ttm;
ttm               622 drivers/gpu/drm/radeon/radeon_ttm.c 	ttm_dma_tt_fini(&gtt->ttm);
ttm               650 drivers/gpu/drm/radeon/radeon_ttm.c 	gtt->ttm.ttm.func = &radeon_backend_func;
ttm               652 drivers/gpu/drm/radeon/radeon_ttm.c 	if (ttm_dma_tt_init(&gtt->ttm, bo, page_flags)) {
ttm               656 drivers/gpu/drm/radeon/radeon_ttm.c 	return &gtt->ttm.ttm;
ttm               659 drivers/gpu/drm/radeon/radeon_ttm.c static struct radeon_ttm_tt *radeon_ttm_tt_to_gtt(struct ttm_tt *ttm)
ttm               661 drivers/gpu/drm/radeon/radeon_ttm.c 	if (!ttm || ttm->func != &radeon_backend_func)
ttm               663 drivers/gpu/drm/radeon/radeon_ttm.c 	return (struct radeon_ttm_tt *)ttm;
ttm               666 drivers/gpu/drm/radeon/radeon_ttm.c static int radeon_ttm_tt_populate(struct ttm_tt *ttm,
ttm               669 drivers/gpu/drm/radeon/radeon_ttm.c 	struct radeon_ttm_tt *gtt = radeon_ttm_tt_to_gtt(ttm);
ttm               671 drivers/gpu/drm/radeon/radeon_ttm.c 	bool slave = !!(ttm->page_flags & TTM_PAGE_FLAG_SG);
ttm               674 drivers/gpu/drm/radeon/radeon_ttm.c 		ttm->sg = kzalloc(sizeof(struct sg_table), GFP_KERNEL);
ttm               675 drivers/gpu/drm/radeon/radeon_ttm.c 		if (!ttm->sg)
ttm               678 drivers/gpu/drm/radeon/radeon_ttm.c 		ttm->page_flags |= TTM_PAGE_FLAG_SG;
ttm               679 drivers/gpu/drm/radeon/radeon_ttm.c 		ttm->state = tt_unbound;
ttm               683 drivers/gpu/drm/radeon/radeon_ttm.c 	if (slave && ttm->sg) {
ttm               684 drivers/gpu/drm/radeon/radeon_ttm.c 		drm_prime_sg_to_page_addr_arrays(ttm->sg, ttm->pages,
ttm               685 drivers/gpu/drm/radeon/radeon_ttm.c 						 gtt->ttm.dma_address, ttm->num_pages);
ttm               686 drivers/gpu/drm/radeon/radeon_ttm.c 		ttm->state = tt_unbound;
ttm               690 drivers/gpu/drm/radeon/radeon_ttm.c 	rdev = radeon_get_rdev(ttm->bdev);
ttm               693 drivers/gpu/drm/radeon/radeon_ttm.c 		return ttm_agp_tt_populate(ttm, ctx);
ttm               699 drivers/gpu/drm/radeon/radeon_ttm.c 		return ttm_dma_populate(&gtt->ttm, rdev->dev, ctx);
ttm               703 drivers/gpu/drm/radeon/radeon_ttm.c 	return ttm_populate_and_map_pages(rdev->dev, &gtt->ttm, ctx);
ttm               706 drivers/gpu/drm/radeon/radeon_ttm.c static void radeon_ttm_tt_unpopulate(struct ttm_tt *ttm)
ttm               709 drivers/gpu/drm/radeon/radeon_ttm.c 	struct radeon_ttm_tt *gtt = radeon_ttm_tt_to_gtt(ttm);
ttm               710 drivers/gpu/drm/radeon/radeon_ttm.c 	bool slave = !!(ttm->page_flags & TTM_PAGE_FLAG_SG);
ttm               713 drivers/gpu/drm/radeon/radeon_ttm.c 		kfree(ttm->sg);
ttm               714 drivers/gpu/drm/radeon/radeon_ttm.c 		ttm->page_flags &= ~TTM_PAGE_FLAG_SG;
ttm               721 drivers/gpu/drm/radeon/radeon_ttm.c 	rdev = radeon_get_rdev(ttm->bdev);
ttm               724 drivers/gpu/drm/radeon/radeon_ttm.c 		ttm_agp_tt_unpopulate(ttm);
ttm               731 drivers/gpu/drm/radeon/radeon_ttm.c 		ttm_dma_unpopulate(&gtt->ttm, rdev->dev);
ttm               736 drivers/gpu/drm/radeon/radeon_ttm.c 	ttm_unmap_and_unpopulate_pages(rdev->dev, &gtt->ttm);
ttm               739 drivers/gpu/drm/radeon/radeon_ttm.c int radeon_ttm_tt_set_userptr(struct ttm_tt *ttm, uint64_t addr,
ttm               742 drivers/gpu/drm/radeon/radeon_ttm.c 	struct radeon_ttm_tt *gtt = radeon_ttm_tt_to_gtt(ttm);
ttm               753 drivers/gpu/drm/radeon/radeon_ttm.c bool radeon_ttm_tt_has_userptr(struct ttm_tt *ttm)
ttm               755 drivers/gpu/drm/radeon/radeon_ttm.c 	struct radeon_ttm_tt *gtt = radeon_ttm_tt_to_gtt(ttm);
ttm               763 drivers/gpu/drm/radeon/radeon_ttm.c bool radeon_ttm_tt_is_readonly(struct ttm_tt *ttm)
ttm               765 drivers/gpu/drm/radeon/radeon_ttm.c 	struct radeon_ttm_tt *gtt = radeon_ttm_tt_to_gtt(ttm);
ttm               945 drivers/gpu/drm/radeon/radeon_vm.c 	if (bo_va->bo && radeon_ttm_tt_is_readonly(bo_va->bo->tbo.ttm))
ttm                46 drivers/gpu/drm/ttm/ttm_agp_backend.c 	struct ttm_tt ttm;
ttm                51 drivers/gpu/drm/ttm/ttm_agp_backend.c static int ttm_agp_bind(struct ttm_tt *ttm, struct ttm_mem_reg *bo_mem)
ttm                53 drivers/gpu/drm/ttm/ttm_agp_backend.c 	struct ttm_agp_backend *agp_be = container_of(ttm, struct ttm_agp_backend, ttm);
ttm                54 drivers/gpu/drm/ttm/ttm_agp_backend.c 	struct page *dummy_read_page = ttm->bdev->glob->dummy_read_page;
ttm                60 drivers/gpu/drm/ttm/ttm_agp_backend.c 	mem = agp_allocate_memory(agp_be->bridge, ttm->num_pages, AGP_USER_MEMORY);
ttm                65 drivers/gpu/drm/ttm/ttm_agp_backend.c 	for (i = 0; i < ttm->num_pages; i++) {
ttm                66 drivers/gpu/drm/ttm/ttm_agp_backend.c 		struct page *page = ttm->pages[i];
ttm                85 drivers/gpu/drm/ttm/ttm_agp_backend.c static int ttm_agp_unbind(struct ttm_tt *ttm)
ttm                87 drivers/gpu/drm/ttm/ttm_agp_backend.c 	struct ttm_agp_backend *agp_be = container_of(ttm, struct ttm_agp_backend, ttm);
ttm                98 drivers/gpu/drm/ttm/ttm_agp_backend.c static void ttm_agp_destroy(struct ttm_tt *ttm)
ttm               100 drivers/gpu/drm/ttm/ttm_agp_backend.c 	struct ttm_agp_backend *agp_be = container_of(ttm, struct ttm_agp_backend, ttm);
ttm               103 drivers/gpu/drm/ttm/ttm_agp_backend.c 		ttm_agp_unbind(ttm);
ttm               104 drivers/gpu/drm/ttm/ttm_agp_backend.c 	ttm_tt_fini(ttm);
ttm               126 drivers/gpu/drm/ttm/ttm_agp_backend.c 	agp_be->ttm.func = &ttm_agp_func;
ttm               128 drivers/gpu/drm/ttm/ttm_agp_backend.c 	if (ttm_tt_init(&agp_be->ttm, bo, page_flags)) {
ttm               133 drivers/gpu/drm/ttm/ttm_agp_backend.c 	return &agp_be->ttm;
ttm               137 drivers/gpu/drm/ttm/ttm_agp_backend.c int ttm_agp_tt_populate(struct ttm_tt *ttm, struct ttm_operation_ctx *ctx)
ttm               139 drivers/gpu/drm/ttm/ttm_agp_backend.c 	if (ttm->state != tt_unpopulated)
ttm               142 drivers/gpu/drm/ttm/ttm_agp_backend.c 	return ttm_pool_populate(ttm, ctx);
ttm               146 drivers/gpu/drm/ttm/ttm_agp_backend.c void ttm_agp_tt_unpopulate(struct ttm_tt *ttm)
ttm               148 drivers/gpu/drm/ttm/ttm_agp_backend.c 	ttm_pool_unpopulate(ttm);
ttm               160 drivers/gpu/drm/ttm/ttm_bo.c 	ttm_tt_destroy(bo->ttm);
ttm               188 drivers/gpu/drm/ttm/ttm_bo.c 	if (!(man->flags & TTM_MEMTYPE_FLAG_FIXED) && bo->ttm &&
ttm               189 drivers/gpu/drm/ttm/ttm_bo.c 	    !(bo->ttm->page_flags & (TTM_PAGE_FLAG_SG |
ttm               263 drivers/gpu/drm/ttm/ttm_bo.c 		if (bo->ttm && !(bo->ttm->page_flags &
ttm               345 drivers/gpu/drm/ttm/ttm_bo.c 		if (bo->ttm == NULL) {
ttm               352 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_tt_set_placement_caching(bo->ttm, mem->placement);
ttm               357 drivers/gpu/drm/ttm/ttm_bo.c 			ret = ttm_tt_bind(bo->ttm, mem, ctx);
ttm               414 drivers/gpu/drm/ttm/ttm_bo.c 		ttm_tt_destroy(bo->ttm);
ttm               415 drivers/gpu/drm/ttm/ttm_bo.c 		bo->ttm = NULL;
ttm               434 drivers/gpu/drm/ttm/ttm_bo.c 	ttm_tt_destroy(bo->ttm);
ttm               435 drivers/gpu/drm/ttm/ttm_bo.c 	bo->ttm = NULL;
ttm              1278 drivers/gpu/drm/ttm/ttm_bo.c 	if (bo->mem.mem_type == TTM_PL_SYSTEM && bo->ttm == NULL) {
ttm              1910 drivers/gpu/drm/ttm/ttm_bo.c 	    bo->ttm->caching_state != tt_cached) {
ttm              1942 drivers/gpu/drm/ttm/ttm_bo.c 	ret = ttm_tt_swapout(bo->ttm, bo->persistent_swap_storage);
ttm                57 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_tt *ttm = bo->ttm;
ttm                70 drivers/gpu/drm/ttm/ttm_bo_util.c 		ttm_tt_unbind(ttm);
ttm                77 drivers/gpu/drm/ttm/ttm_bo_util.c 	ret = ttm_tt_set_placement_caching(ttm, new_mem->placement);
ttm                82 drivers/gpu/drm/ttm/ttm_bo_util.c 		ret = ttm_tt_bind(ttm, new_mem, ctx);
ttm               312 drivers/gpu/drm/ttm/ttm_bo_util.c static int ttm_copy_io_ttm_page(struct ttm_tt *ttm, void *src,
ttm               316 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct page *d = ttm->pages[page];
ttm               334 drivers/gpu/drm/ttm/ttm_bo_util.c static int ttm_copy_ttm_io_page(struct ttm_tt *ttm, void *dst,
ttm               338 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct page *s = ttm->pages[page];
ttm               362 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_tt *ttm = bo->ttm;
ttm               394 drivers/gpu/drm/ttm/ttm_bo_util.c 	    (ttm == NULL || (ttm->state == tt_unpopulated &&
ttm               395 drivers/gpu/drm/ttm/ttm_bo_util.c 			     !(ttm->page_flags & TTM_PAGE_FLAG_SWAPPED)))) {
ttm               403 drivers/gpu/drm/ttm/ttm_bo_util.c 	if (ttm) {
ttm               404 drivers/gpu/drm/ttm/ttm_bo_util.c 		ret = ttm_tt_populate(ttm, ctx);
ttm               423 drivers/gpu/drm/ttm/ttm_bo_util.c 			ret = ttm_copy_ttm_io_page(ttm, new_iomap, page,
ttm               428 drivers/gpu/drm/ttm/ttm_bo_util.c 			ret = ttm_copy_io_ttm_page(ttm, old_iomap, page,
ttm               443 drivers/gpu/drm/ttm/ttm_bo_util.c 		ttm_tt_destroy(ttm);
ttm               444 drivers/gpu/drm/ttm/ttm_bo_util.c 		bo->ttm = NULL;
ttm               587 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_tt *ttm = bo->ttm;
ttm               591 drivers/gpu/drm/ttm/ttm_bo_util.c 	BUG_ON(!ttm);
ttm               593 drivers/gpu/drm/ttm/ttm_bo_util.c 	ret = ttm_tt_populate(ttm, &ctx);
ttm               604 drivers/gpu/drm/ttm/ttm_bo_util.c 		map->page = ttm->pages[start_page];
ttm               613 drivers/gpu/drm/ttm/ttm_bo_util.c 		map->virtual = vmap(ttm->pages + start_page, num_pages,
ttm               699 drivers/gpu/drm/ttm/ttm_bo_util.c 			ttm_tt_destroy(bo->ttm);
ttm               700 drivers/gpu/drm/ttm/ttm_bo_util.c 			bo->ttm = NULL;
ttm               728 drivers/gpu/drm/ttm/ttm_bo_util.c 			ghost_obj->ttm = NULL;
ttm               730 drivers/gpu/drm/ttm/ttm_bo_util.c 			bo->ttm = NULL;
ttm               784 drivers/gpu/drm/ttm/ttm_bo_util.c 			ghost_obj->ttm = NULL;
ttm               786 drivers/gpu/drm/ttm/ttm_bo_util.c 			bo->ttm = NULL;
ttm               822 drivers/gpu/drm/ttm/ttm_bo_util.c 			ttm_tt_destroy(bo->ttm);
ttm               823 drivers/gpu/drm/ttm/ttm_bo_util.c 			bo->ttm = NULL;
ttm               851 drivers/gpu/drm/ttm/ttm_bo_util.c 	bo->ttm = NULL;
ttm               118 drivers/gpu/drm/ttm/ttm_bo_vm.c 	struct ttm_tt *ttm = NULL;
ttm               158 drivers/gpu/drm/ttm/ttm_bo_vm.c 	if (bo->ttm && (bo->ttm->page_flags & TTM_PAGE_FLAG_SG)) {
ttm               242 drivers/gpu/drm/ttm/ttm_bo_vm.c 		ttm = bo->ttm;
ttm               247 drivers/gpu/drm/ttm/ttm_bo_vm.c 		if (ttm_tt_populate(ttm, &ctx)) {
ttm               263 drivers/gpu/drm/ttm/ttm_bo_vm.c 			page = ttm->pages[page_offset];
ttm               374 drivers/gpu/drm/ttm/ttm_bo_vm.c 		if (unlikely(bo->ttm->page_flags & TTM_PAGE_FLAG_SWAPPED)) {
ttm               375 drivers/gpu/drm/ttm/ttm_bo_vm.c 			ret = ttm_tt_swapin(bo->ttm);
ttm              1029 drivers/gpu/drm/ttm/ttm_page_alloc.c ttm_pool_unpopulate_helper(struct ttm_tt *ttm, unsigned mem_count_update)
ttm              1031 drivers/gpu/drm/ttm/ttm_page_alloc.c 	struct ttm_mem_global *mem_glob = ttm->bdev->glob->mem_glob;
ttm              1038 drivers/gpu/drm/ttm/ttm_page_alloc.c 		if (!ttm->pages[i])
ttm              1041 drivers/gpu/drm/ttm/ttm_page_alloc.c 		ttm_mem_global_free_page(mem_glob, ttm->pages[i], PAGE_SIZE);
ttm              1045 drivers/gpu/drm/ttm/ttm_page_alloc.c 	ttm_put_pages(ttm->pages, ttm->num_pages, ttm->page_flags,
ttm              1046 drivers/gpu/drm/ttm/ttm_page_alloc.c 		      ttm->caching_state);
ttm              1047 drivers/gpu/drm/ttm/ttm_page_alloc.c 	ttm->state = tt_unpopulated;
ttm              1050 drivers/gpu/drm/ttm/ttm_page_alloc.c int ttm_pool_populate(struct ttm_tt *ttm, struct ttm_operation_ctx *ctx)
ttm              1052 drivers/gpu/drm/ttm/ttm_page_alloc.c 	struct ttm_mem_global *mem_glob = ttm->bdev->glob->mem_glob;
ttm              1056 drivers/gpu/drm/ttm/ttm_page_alloc.c 	if (ttm->state != tt_unpopulated)
ttm              1059 drivers/gpu/drm/ttm/ttm_page_alloc.c 	if (ttm_check_under_lowerlimit(mem_glob, ttm->num_pages, ctx))
ttm              1062 drivers/gpu/drm/ttm/ttm_page_alloc.c 	ret = ttm_get_pages(ttm->pages, ttm->num_pages, ttm->page_flags,
ttm              1063 drivers/gpu/drm/ttm/ttm_page_alloc.c 			    ttm->caching_state);
ttm              1065 drivers/gpu/drm/ttm/ttm_page_alloc.c 		ttm_pool_unpopulate_helper(ttm, 0);
ttm              1069 drivers/gpu/drm/ttm/ttm_page_alloc.c 	for (i = 0; i < ttm->num_pages; ++i) {
ttm              1070 drivers/gpu/drm/ttm/ttm_page_alloc.c 		ret = ttm_mem_global_alloc_page(mem_glob, ttm->pages[i],
ttm              1073 drivers/gpu/drm/ttm/ttm_page_alloc.c 			ttm_pool_unpopulate_helper(ttm, i);
ttm              1078 drivers/gpu/drm/ttm/ttm_page_alloc.c 	if (unlikely(ttm->page_flags & TTM_PAGE_FLAG_SWAPPED)) {
ttm              1079 drivers/gpu/drm/ttm/ttm_page_alloc.c 		ret = ttm_tt_swapin(ttm);
ttm              1081 drivers/gpu/drm/ttm/ttm_page_alloc.c 			ttm_pool_unpopulate(ttm);
ttm              1086 drivers/gpu/drm/ttm/ttm_page_alloc.c 	ttm->state = tt_unbound;
ttm              1091 drivers/gpu/drm/ttm/ttm_page_alloc.c void ttm_pool_unpopulate(struct ttm_tt *ttm)
ttm              1093 drivers/gpu/drm/ttm/ttm_page_alloc.c 	ttm_pool_unpopulate_helper(ttm, ttm->num_pages);
ttm              1103 drivers/gpu/drm/ttm/ttm_page_alloc.c 	r = ttm_pool_populate(&tt->ttm, ctx);
ttm              1107 drivers/gpu/drm/ttm/ttm_page_alloc.c 	for (i = 0; i < tt->ttm.num_pages; ++i) {
ttm              1108 drivers/gpu/drm/ttm/ttm_page_alloc.c 		struct page *p = tt->ttm.pages[i];
ttm              1111 drivers/gpu/drm/ttm/ttm_page_alloc.c 		for (j = i + 1; j < tt->ttm.num_pages; ++j) {
ttm              1112 drivers/gpu/drm/ttm/ttm_page_alloc.c 			if (++p != tt->ttm.pages[j])
ttm              1118 drivers/gpu/drm/ttm/ttm_page_alloc.c 		tt->dma_address[i] = dma_map_page(dev, tt->ttm.pages[i],
ttm              1127 drivers/gpu/drm/ttm/ttm_page_alloc.c 			ttm_pool_unpopulate(&tt->ttm);
ttm              1144 drivers/gpu/drm/ttm/ttm_page_alloc.c 	for (i = 0; i < tt->ttm.num_pages;) {
ttm              1145 drivers/gpu/drm/ttm/ttm_page_alloc.c 		struct page *p = tt->ttm.pages[i];
ttm              1148 drivers/gpu/drm/ttm/ttm_page_alloc.c 		if (!tt->dma_address[i] || !tt->ttm.pages[i]) {
ttm              1153 drivers/gpu/drm/ttm/ttm_page_alloc.c 		for (j = i + 1; j < tt->ttm.num_pages; ++j) {
ttm              1154 drivers/gpu/drm/ttm/ttm_page_alloc.c 			if (++p != tt->ttm.pages[j])
ttm              1165 drivers/gpu/drm/ttm/ttm_page_alloc.c 	ttm_pool_unpopulate(&tt->ttm);
ttm               839 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	struct ttm_tt *ttm = &ttm_dma->ttm;
ttm               847 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 		ttm->pages[index] = d_page->p;
ttm               859 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	struct ttm_tt *ttm = &ttm_dma->ttm;
ttm               862 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	if (ttm->page_flags & TTM_PAGE_FLAG_DMA32)
ttm               866 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	if (ttm->page_flags & TTM_PAGE_FLAG_ZERO_ALLOC)
ttm               876 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	if (ttm->page_flags & TTM_PAGE_FLAG_NO_RETRY)
ttm               889 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	struct ttm_tt *ttm = &ttm_dma->ttm;
ttm               890 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	struct ttm_mem_global *mem_glob = ttm->bdev->glob->mem_glob;
ttm               891 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	unsigned long num_pages = ttm->num_pages;
ttm               898 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	if (ttm->state != tt_unpopulated)
ttm               907 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	type = ttm_to_type(ttm->page_flags, ttm->caching_state);
ttm               910 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	if (ttm->page_flags & TTM_PAGE_FLAG_DMA32)
ttm               929 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 		ret = ttm_mem_global_alloc_page(mem_glob, ttm->pages[i],
ttm               938 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 			ttm->pages[j] = ttm->pages[j - 1] + 1;
ttm               966 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 		ret = ttm_mem_global_alloc_page(mem_glob, ttm->pages[i],
ttm               978 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	if (unlikely(ttm->page_flags & TTM_PAGE_FLAG_SWAPPED)) {
ttm               979 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 		ret = ttm_tt_swapin(ttm);
ttm               986 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	ttm->state = tt_unbound;
ttm               994 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	struct ttm_tt *ttm = &ttm_dma->ttm;
ttm               995 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	struct ttm_mem_global *mem_glob = ttm->bdev->glob->mem_glob;
ttm              1003 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	type = ttm_to_type(ttm->page_flags, ttm->caching_state);
ttm              1035 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 		     ttm_to_type(ttm->page_flags, tt_cached)) == pool);
ttm              1041 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 		ttm->pages[count] = d_page->p;
ttm              1072 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	for (i = 0; i < ttm->num_pages; i++) {
ttm              1073 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 		ttm->pages[i] = NULL;
ttm              1080 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	ttm->state = tt_unpopulated;
ttm                70 drivers/gpu/drm/ttm/ttm_tt.c 		bo->ttm = NULL;
ttm                75 drivers/gpu/drm/ttm/ttm_tt.c 	bo->ttm = bdev->driver->ttm_tt_create(bo, page_flags);
ttm                76 drivers/gpu/drm/ttm/ttm_tt.c 	if (unlikely(bo->ttm == NULL))
ttm                85 drivers/gpu/drm/ttm/ttm_tt.c static int ttm_tt_alloc_page_directory(struct ttm_tt *ttm)
ttm                87 drivers/gpu/drm/ttm/ttm_tt.c 	ttm->pages = kvmalloc_array(ttm->num_pages, sizeof(void*),
ttm                89 drivers/gpu/drm/ttm/ttm_tt.c 	if (!ttm->pages)
ttm                94 drivers/gpu/drm/ttm/ttm_tt.c static int ttm_dma_tt_alloc_page_directory(struct ttm_dma_tt *ttm)
ttm                96 drivers/gpu/drm/ttm/ttm_tt.c 	ttm->ttm.pages = kvmalloc_array(ttm->ttm.num_pages,
ttm                97 drivers/gpu/drm/ttm/ttm_tt.c 					  sizeof(*ttm->ttm.pages) +
ttm                98 drivers/gpu/drm/ttm/ttm_tt.c 					  sizeof(*ttm->dma_address),
ttm               100 drivers/gpu/drm/ttm/ttm_tt.c 	if (!ttm->ttm.pages)
ttm               102 drivers/gpu/drm/ttm/ttm_tt.c 	ttm->dma_address = (void *) (ttm->ttm.pages + ttm->ttm.num_pages);
ttm               106 drivers/gpu/drm/ttm/ttm_tt.c static int ttm_sg_tt_alloc_page_directory(struct ttm_dma_tt *ttm)
ttm               108 drivers/gpu/drm/ttm/ttm_tt.c 	ttm->dma_address = kvmalloc_array(ttm->ttm.num_pages,
ttm               109 drivers/gpu/drm/ttm/ttm_tt.c 					  sizeof(*ttm->dma_address),
ttm               111 drivers/gpu/drm/ttm/ttm_tt.c 	if (!ttm->dma_address)
ttm               147 drivers/gpu/drm/ttm/ttm_tt.c static int ttm_tt_set_caching(struct ttm_tt *ttm,
ttm               154 drivers/gpu/drm/ttm/ttm_tt.c 	if (ttm->caching_state == c_state)
ttm               157 drivers/gpu/drm/ttm/ttm_tt.c 	if (ttm->state == tt_unpopulated) {
ttm               159 drivers/gpu/drm/ttm/ttm_tt.c 		ttm->caching_state = c_state;
ttm               163 drivers/gpu/drm/ttm/ttm_tt.c 	if (ttm->caching_state == tt_cached)
ttm               164 drivers/gpu/drm/ttm/ttm_tt.c 		drm_clflush_pages(ttm->pages, ttm->num_pages);
ttm               166 drivers/gpu/drm/ttm/ttm_tt.c 	for (i = 0; i < ttm->num_pages; ++i) {
ttm               167 drivers/gpu/drm/ttm/ttm_tt.c 		cur_page = ttm->pages[i];
ttm               170 drivers/gpu/drm/ttm/ttm_tt.c 						      ttm->caching_state,
ttm               177 drivers/gpu/drm/ttm/ttm_tt.c 	ttm->caching_state = c_state;
ttm               183 drivers/gpu/drm/ttm/ttm_tt.c 		cur_page = ttm->pages[j];
ttm               186 drivers/gpu/drm/ttm/ttm_tt.c 						      ttm->caching_state);
ttm               193 drivers/gpu/drm/ttm/ttm_tt.c int ttm_tt_set_placement_caching(struct ttm_tt *ttm, uint32_t placement)
ttm               204 drivers/gpu/drm/ttm/ttm_tt.c 	return ttm_tt_set_caching(ttm, state);
ttm               208 drivers/gpu/drm/ttm/ttm_tt.c void ttm_tt_destroy(struct ttm_tt *ttm)
ttm               210 drivers/gpu/drm/ttm/ttm_tt.c 	if (ttm == NULL)
ttm               213 drivers/gpu/drm/ttm/ttm_tt.c 	ttm_tt_unbind(ttm);
ttm               215 drivers/gpu/drm/ttm/ttm_tt.c 	if (ttm->state == tt_unbound)
ttm               216 drivers/gpu/drm/ttm/ttm_tt.c 		ttm_tt_unpopulate(ttm);
ttm               218 drivers/gpu/drm/ttm/ttm_tt.c 	if (!(ttm->page_flags & TTM_PAGE_FLAG_PERSISTENT_SWAP) &&
ttm               219 drivers/gpu/drm/ttm/ttm_tt.c 	    ttm->swap_storage)
ttm               220 drivers/gpu/drm/ttm/ttm_tt.c 		fput(ttm->swap_storage);
ttm               222 drivers/gpu/drm/ttm/ttm_tt.c 	ttm->swap_storage = NULL;
ttm               223 drivers/gpu/drm/ttm/ttm_tt.c 	ttm->func->destroy(ttm);
ttm               226 drivers/gpu/drm/ttm/ttm_tt.c void ttm_tt_init_fields(struct ttm_tt *ttm, struct ttm_buffer_object *bo,
ttm               229 drivers/gpu/drm/ttm/ttm_tt.c 	ttm->bdev = bo->bdev;
ttm               230 drivers/gpu/drm/ttm/ttm_tt.c 	ttm->num_pages = bo->num_pages;
ttm               231 drivers/gpu/drm/ttm/ttm_tt.c 	ttm->caching_state = tt_cached;
ttm               232 drivers/gpu/drm/ttm/ttm_tt.c 	ttm->page_flags = page_flags;
ttm               233 drivers/gpu/drm/ttm/ttm_tt.c 	ttm->state = tt_unpopulated;
ttm               234 drivers/gpu/drm/ttm/ttm_tt.c 	ttm->swap_storage = NULL;
ttm               235 drivers/gpu/drm/ttm/ttm_tt.c 	ttm->sg = bo->sg;
ttm               238 drivers/gpu/drm/ttm/ttm_tt.c int ttm_tt_init(struct ttm_tt *ttm, struct ttm_buffer_object *bo,
ttm               241 drivers/gpu/drm/ttm/ttm_tt.c 	ttm_tt_init_fields(ttm, bo, page_flags);
ttm               243 drivers/gpu/drm/ttm/ttm_tt.c 	if (ttm_tt_alloc_page_directory(ttm)) {
ttm               244 drivers/gpu/drm/ttm/ttm_tt.c 		ttm_tt_destroy(ttm);
ttm               252 drivers/gpu/drm/ttm/ttm_tt.c void ttm_tt_fini(struct ttm_tt *ttm)
ttm               254 drivers/gpu/drm/ttm/ttm_tt.c 	kvfree(ttm->pages);
ttm               255 drivers/gpu/drm/ttm/ttm_tt.c 	ttm->pages = NULL;
ttm               262 drivers/gpu/drm/ttm/ttm_tt.c 	struct ttm_tt *ttm = &ttm_dma->ttm;
ttm               264 drivers/gpu/drm/ttm/ttm_tt.c 	ttm_tt_init_fields(ttm, bo, page_flags);
ttm               268 drivers/gpu/drm/ttm/ttm_tt.c 		ttm_tt_destroy(ttm);
ttm               279 drivers/gpu/drm/ttm/ttm_tt.c 	struct ttm_tt *ttm = &ttm_dma->ttm;
ttm               282 drivers/gpu/drm/ttm/ttm_tt.c 	ttm_tt_init_fields(ttm, bo, page_flags);
ttm               290 drivers/gpu/drm/ttm/ttm_tt.c 		ttm_tt_destroy(ttm);
ttm               300 drivers/gpu/drm/ttm/ttm_tt.c 	struct ttm_tt *ttm = &ttm_dma->ttm;
ttm               302 drivers/gpu/drm/ttm/ttm_tt.c 	if (ttm->pages)
ttm               303 drivers/gpu/drm/ttm/ttm_tt.c 		kvfree(ttm->pages);
ttm               306 drivers/gpu/drm/ttm/ttm_tt.c 	ttm->pages = NULL;
ttm               311 drivers/gpu/drm/ttm/ttm_tt.c void ttm_tt_unbind(struct ttm_tt *ttm)
ttm               315 drivers/gpu/drm/ttm/ttm_tt.c 	if (ttm->state == tt_bound) {
ttm               316 drivers/gpu/drm/ttm/ttm_tt.c 		ret = ttm->func->unbind(ttm);
ttm               318 drivers/gpu/drm/ttm/ttm_tt.c 		ttm->state = tt_unbound;
ttm               322 drivers/gpu/drm/ttm/ttm_tt.c int ttm_tt_bind(struct ttm_tt *ttm, struct ttm_mem_reg *bo_mem,
ttm               327 drivers/gpu/drm/ttm/ttm_tt.c 	if (!ttm)
ttm               330 drivers/gpu/drm/ttm/ttm_tt.c 	if (ttm->state == tt_bound)
ttm               333 drivers/gpu/drm/ttm/ttm_tt.c 	ret = ttm_tt_populate(ttm, ctx);
ttm               337 drivers/gpu/drm/ttm/ttm_tt.c 	ret = ttm->func->bind(ttm, bo_mem);
ttm               341 drivers/gpu/drm/ttm/ttm_tt.c 	ttm->state = tt_bound;
ttm               347 drivers/gpu/drm/ttm/ttm_tt.c int ttm_tt_swapin(struct ttm_tt *ttm)
ttm               356 drivers/gpu/drm/ttm/ttm_tt.c 	swap_storage = ttm->swap_storage;
ttm               361 drivers/gpu/drm/ttm/ttm_tt.c 	for (i = 0; i < ttm->num_pages; ++i) {
ttm               364 drivers/gpu/drm/ttm/ttm_tt.c 		gfp_mask |= (ttm->page_flags & TTM_PAGE_FLAG_NO_RETRY ? __GFP_RETRY_MAYFAIL : 0);
ttm               371 drivers/gpu/drm/ttm/ttm_tt.c 		to_page = ttm->pages[i];
ttm               379 drivers/gpu/drm/ttm/ttm_tt.c 	if (!(ttm->page_flags & TTM_PAGE_FLAG_PERSISTENT_SWAP))
ttm               381 drivers/gpu/drm/ttm/ttm_tt.c 	ttm->swap_storage = NULL;
ttm               382 drivers/gpu/drm/ttm/ttm_tt.c 	ttm->page_flags &= ~TTM_PAGE_FLAG_SWAPPED;
ttm               389 drivers/gpu/drm/ttm/ttm_tt.c int ttm_tt_swapout(struct ttm_tt *ttm, struct file *persistent_swap_storage)
ttm               398 drivers/gpu/drm/ttm/ttm_tt.c 	BUG_ON(ttm->state != tt_unbound && ttm->state != tt_unpopulated);
ttm               399 drivers/gpu/drm/ttm/ttm_tt.c 	BUG_ON(ttm->caching_state != tt_cached);
ttm               403 drivers/gpu/drm/ttm/ttm_tt.c 						ttm->num_pages << PAGE_SHIFT,
ttm               415 drivers/gpu/drm/ttm/ttm_tt.c 	for (i = 0; i < ttm->num_pages; ++i) {
ttm               418 drivers/gpu/drm/ttm/ttm_tt.c 		gfp_mask |= (ttm->page_flags & TTM_PAGE_FLAG_NO_RETRY ? __GFP_RETRY_MAYFAIL : 0);
ttm               420 drivers/gpu/drm/ttm/ttm_tt.c 		from_page = ttm->pages[i];
ttm               435 drivers/gpu/drm/ttm/ttm_tt.c 	ttm_tt_unpopulate(ttm);
ttm               436 drivers/gpu/drm/ttm/ttm_tt.c 	ttm->swap_storage = swap_storage;
ttm               437 drivers/gpu/drm/ttm/ttm_tt.c 	ttm->page_flags |= TTM_PAGE_FLAG_SWAPPED;
ttm               439 drivers/gpu/drm/ttm/ttm_tt.c 		ttm->page_flags |= TTM_PAGE_FLAG_PERSISTENT_SWAP;
ttm               449 drivers/gpu/drm/ttm/ttm_tt.c static void ttm_tt_add_mapping(struct ttm_tt *ttm)
ttm               453 drivers/gpu/drm/ttm/ttm_tt.c 	if (ttm->page_flags & TTM_PAGE_FLAG_SG)
ttm               456 drivers/gpu/drm/ttm/ttm_tt.c 	for (i = 0; i < ttm->num_pages; ++i)
ttm               457 drivers/gpu/drm/ttm/ttm_tt.c 		ttm->pages[i]->mapping = ttm->bdev->dev_mapping;
ttm               460 drivers/gpu/drm/ttm/ttm_tt.c int ttm_tt_populate(struct ttm_tt *ttm, struct ttm_operation_ctx *ctx)
ttm               464 drivers/gpu/drm/ttm/ttm_tt.c 	if (ttm->state != tt_unpopulated)
ttm               467 drivers/gpu/drm/ttm/ttm_tt.c 	if (ttm->bdev->driver->ttm_tt_populate)
ttm               468 drivers/gpu/drm/ttm/ttm_tt.c 		ret = ttm->bdev->driver->ttm_tt_populate(ttm, ctx);
ttm               470 drivers/gpu/drm/ttm/ttm_tt.c 		ret = ttm_pool_populate(ttm, ctx);
ttm               472 drivers/gpu/drm/ttm/ttm_tt.c 		ttm_tt_add_mapping(ttm);
ttm               476 drivers/gpu/drm/ttm/ttm_tt.c static void ttm_tt_clear_mapping(struct ttm_tt *ttm)
ttm               479 drivers/gpu/drm/ttm/ttm_tt.c 	struct page **page = ttm->pages;
ttm               481 drivers/gpu/drm/ttm/ttm_tt.c 	if (ttm->page_flags & TTM_PAGE_FLAG_SG)
ttm               484 drivers/gpu/drm/ttm/ttm_tt.c 	for (i = 0; i < ttm->num_pages; ++i) {
ttm               490 drivers/gpu/drm/ttm/ttm_tt.c void ttm_tt_unpopulate(struct ttm_tt *ttm)
ttm               492 drivers/gpu/drm/ttm/ttm_tt.c 	if (ttm->state == tt_unpopulated)
ttm               495 drivers/gpu/drm/ttm/ttm_tt.c 	ttm_tt_clear_mapping(ttm);
ttm               496 drivers/gpu/drm/ttm/ttm_tt.c 	if (ttm->bdev->driver->ttm_tt_unpopulate)
ttm               497 drivers/gpu/drm/ttm/ttm_tt.c 		ttm->bdev->driver->ttm_tt_unpopulate(ttm);
ttm               499 drivers/gpu/drm/ttm/ttm_tt.c 		ttm_pool_unpopulate(ttm);
ttm               207 drivers/gpu/drm/virtio/virtgpu_object.c 	struct page **pages = bo->tbo.ttm->pages;
ttm               219 drivers/gpu/drm/virtio/virtgpu_object.c 	if (bo->tbo.ttm->state == tt_unpopulated)
ttm               220 drivers/gpu/drm/virtio/virtgpu_object.c 		bo->tbo.ttm->bdev->driver->ttm_tt_populate(bo->tbo.ttm, &ctx);
ttm                37 drivers/gpu/drm/virtio/virtgpu_prime.c 	if (!bo->tbo.ttm->pages || !bo->tbo.ttm->num_pages)
ttm                41 drivers/gpu/drm/virtio/virtgpu_prime.c 	return drm_prime_pages_to_sg(bo->tbo.ttm->pages,
ttm                42 drivers/gpu/drm/virtio/virtgpu_prime.c 				     bo->tbo.ttm->num_pages);
ttm               191 drivers/gpu/drm/virtio/virtgpu_ttm.c 	struct ttm_dma_tt		ttm;
ttm               195 drivers/gpu/drm/virtio/virtgpu_ttm.c static int virtio_gpu_ttm_tt_bind(struct ttm_tt *ttm,
ttm               199 drivers/gpu/drm/virtio/virtgpu_ttm.c 		container_of(ttm, struct virtio_gpu_ttm_tt, ttm.ttm);
ttm               207 drivers/gpu/drm/virtio/virtgpu_ttm.c static int virtio_gpu_ttm_tt_unbind(struct ttm_tt *ttm)
ttm               210 drivers/gpu/drm/virtio/virtgpu_ttm.c 		container_of(ttm, struct virtio_gpu_ttm_tt, ttm.ttm);
ttm               218 drivers/gpu/drm/virtio/virtgpu_ttm.c static void virtio_gpu_ttm_tt_destroy(struct ttm_tt *ttm)
ttm               221 drivers/gpu/drm/virtio/virtgpu_ttm.c 		container_of(ttm, struct virtio_gpu_ttm_tt, ttm.ttm);
ttm               223 drivers/gpu/drm/virtio/virtgpu_ttm.c 	ttm_dma_tt_fini(&gtt->ttm);
ttm               243 drivers/gpu/drm/virtio/virtgpu_ttm.c 	gtt->ttm.ttm.func = &virtio_gpu_tt_func;
ttm               245 drivers/gpu/drm/virtio/virtgpu_ttm.c 	if (ttm_dma_tt_init(&gtt->ttm, bo, page_flags)) {
ttm               249 drivers/gpu/drm/virtio/virtgpu_ttm.c 	return &gtt->ttm.ttm;
ttm               466 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 	if (dst->ttm->state == tt_unpopulated) {
ttm               467 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 		ret = dst->ttm->bdev->driver->ttm_tt_populate(dst->ttm, &ctx);
ttm               472 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 	if (src->ttm->state == tt_unpopulated) {
ttm               473 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 		ret = src->ttm->bdev->driver->ttm_tt_populate(src->ttm, &ctx);
ttm               482 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 	d.dst_pages = dst->ttm->pages;
ttm               483 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 	d.src_pages = src->ttm->pages;
ttm               268 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	ret = vmw_bo_driver.ttm_tt_populate(batch->otable_bo->ttm, &ctx);
ttm               453 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	ret = vmw_bo_driver.ttm_tt_populate(mob->pt_bo->ttm, &ctx);
ttm               427 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	vsgt->pages = vmw_tt->dma_ttm.ttm.pages;
ttm               428 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	vsgt->num_pages = vmw_tt->dma_ttm.ttm.num_pages;
ttm               536 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		container_of(bo->ttm, struct vmw_ttm_tt, dma_ttm.ttm);
ttm               553 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		container_of(bo->ttm, struct vmw_ttm_tt, dma_ttm.ttm);
ttm               573 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		container_of(bo->ttm, struct vmw_ttm_tt, dma_ttm.ttm);
ttm               579 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c static int vmw_ttm_bind(struct ttm_tt *ttm, struct ttm_mem_reg *bo_mem)
ttm               582 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		container_of(ttm, struct vmw_ttm_tt, dma_ttm.ttm);
ttm               595 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 				    ttm->num_pages, vmw_be->gmr_id);
ttm               599 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 				vmw_mob_create(ttm->num_pages);
ttm               605 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 				    &vmw_be->vsgt, ttm->num_pages,
ttm               613 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c static int vmw_ttm_unbind(struct ttm_tt *ttm)
ttm               616 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		container_of(ttm, struct vmw_ttm_tt, dma_ttm.ttm);
ttm               636 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c static void vmw_ttm_destroy(struct ttm_tt *ttm)
ttm               639 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		container_of(ttm, struct vmw_ttm_tt, dma_ttm.ttm);
ttm               645 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		ttm_tt_fini(ttm);
ttm               654 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c static int vmw_ttm_populate(struct ttm_tt *ttm, struct ttm_operation_ctx *ctx)
ttm               657 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		container_of(ttm, struct vmw_ttm_tt, dma_ttm.ttm);
ttm               662 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	if (ttm->state != tt_unpopulated)
ttm               667 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 			ttm_round_pot(ttm->num_pages * sizeof(dma_addr_t));
ttm               677 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		ret = ttm_pool_populate(ttm, ctx);
ttm               682 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c static void vmw_ttm_unpopulate(struct ttm_tt *ttm)
ttm               684 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	struct vmw_ttm_tt *vmw_tt = container_of(ttm, struct vmw_ttm_tt,
ttm               685 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 						 dma_ttm.ttm);
ttm               698 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 			ttm_round_pot(ttm->num_pages * sizeof(dma_addr_t));
ttm               703 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		ttm_pool_unpopulate(ttm);
ttm               722 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	vmw_be->dma_ttm.ttm.func = &vmw_ttm_func;
ttm               729 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		ret = ttm_tt_init(&vmw_be->dma_ttm.ttm, bo, page_flags);
ttm               733 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	return &vmw_be->dma_ttm.ttm;
ttm               516 drivers/net/ethernet/dec/tulip/de4x5.c     u_int ttm;              /* Transmit Threshold Mode for each media    */
ttm              4635 drivers/net/ethernet/dec/tulip/de4x5.c 	lp->phy[lp->active].ttm = get_unaligned_le16(p);
ttm              4716 drivers/net/ethernet/dec/tulip/de4x5.c 	lp->phy[lp->active].ttm = get_unaligned_le16(p); p += 2;
ttm               198 include/drm/ttm/ttm_bo_api.h 	struct ttm_tt *ttm;
ttm               247 include/drm/ttm/ttm_bo_driver.h 	int (*ttm_tt_populate)(struct ttm_tt *ttm,
ttm               257 include/drm/ttm/ttm_bo_driver.h 	void (*ttm_tt_unpopulate)(struct ttm_tt *ttm);
ttm                50 include/drm/ttm/ttm_page_alloc.h int ttm_pool_populate(struct ttm_tt *ttm, struct ttm_operation_ctx *ctx);
ttm                59 include/drm/ttm/ttm_page_alloc.h void ttm_pool_unpopulate(struct ttm_tt *ttm);
ttm                63 include/drm/ttm/ttm_tt.h 	int (*bind) (struct ttm_tt *ttm, struct ttm_mem_reg *bo_mem);
ttm                73 include/drm/ttm/ttm_tt.h 	int (*unbind) (struct ttm_tt *ttm);
ttm                83 include/drm/ttm/ttm_tt.h 	void (*destroy) (struct ttm_tt *ttm);
ttm               133 include/drm/ttm/ttm_tt.h 	struct ttm_tt ttm;
ttm               161 include/drm/ttm/ttm_tt.h int ttm_tt_init(struct ttm_tt *ttm, struct ttm_buffer_object *bo,
ttm               175 include/drm/ttm/ttm_tt.h void ttm_tt_fini(struct ttm_tt *ttm);
ttm               186 include/drm/ttm/ttm_tt.h int ttm_tt_bind(struct ttm_tt *ttm, struct ttm_mem_reg *bo_mem,
ttm               196 include/drm/ttm/ttm_tt.h void ttm_tt_destroy(struct ttm_tt *ttm);
ttm               205 include/drm/ttm/ttm_tt.h void ttm_tt_unbind(struct ttm_tt *ttm);
ttm               214 include/drm/ttm/ttm_tt.h int ttm_tt_swapin(struct ttm_tt *ttm);
ttm               229 include/drm/ttm/ttm_tt.h int ttm_tt_set_placement_caching(struct ttm_tt *ttm, uint32_t placement);
ttm               230 include/drm/ttm/ttm_tt.h int ttm_tt_swapout(struct ttm_tt *ttm, struct file *persistent_swap_storage);
ttm               239 include/drm/ttm/ttm_tt.h int ttm_tt_populate(struct ttm_tt *ttm, struct ttm_operation_ctx *ctx);
ttm               248 include/drm/ttm/ttm_tt.h void ttm_tt_unpopulate(struct ttm_tt *ttm);
ttm               268 include/drm/ttm/ttm_tt.h int ttm_agp_tt_populate(struct ttm_tt *ttm, struct ttm_operation_ctx *ctx);
ttm               269 include/drm/ttm/ttm_tt.h void ttm_agp_tt_unpopulate(struct ttm_tt *ttm);