Lines Matching refs:bo_va

296 	struct radeon_bo_va *bo_va;  in radeon_vm_bo_find()  local
298 list_for_each_entry(bo_va, &bo->va, bo_list) { in radeon_vm_bo_find()
299 if (bo_va->vm == vm) { in radeon_vm_bo_find()
300 return bo_va; in radeon_vm_bo_find()
323 struct radeon_bo_va *bo_va; in radeon_vm_bo_add() local
325 bo_va = kzalloc(sizeof(struct radeon_bo_va), GFP_KERNEL); in radeon_vm_bo_add()
326 if (bo_va == NULL) { in radeon_vm_bo_add()
329 bo_va->vm = vm; in radeon_vm_bo_add()
330 bo_va->bo = bo; in radeon_vm_bo_add()
331 bo_va->it.start = 0; in radeon_vm_bo_add()
332 bo_va->it.last = 0; in radeon_vm_bo_add()
333 bo_va->flags = 0; in radeon_vm_bo_add()
334 bo_va->ref_count = 1; in radeon_vm_bo_add()
335 INIT_LIST_HEAD(&bo_va->bo_list); in radeon_vm_bo_add()
336 INIT_LIST_HEAD(&bo_va->vm_status); in radeon_vm_bo_add()
339 list_add_tail(&bo_va->bo_list, &bo->va); in radeon_vm_bo_add()
342 return bo_va; in radeon_vm_bo_add()
446 struct radeon_bo_va *bo_va, in radeon_vm_bo_set_addr() argument
450 uint64_t size = radeon_bo_size(bo_va->bo); in radeon_vm_bo_set_addr()
451 struct radeon_vm *vm = bo_va->vm; in radeon_vm_bo_set_addr()
482 if (it && it != &bo_va->it) { in radeon_vm_bo_set_addr()
487 "(bo %p 0x%010lx 0x%010lx)\n", bo_va->bo, in radeon_vm_bo_set_addr()
495 if (bo_va->it.start || bo_va->it.last) { in radeon_vm_bo_set_addr()
504 tmp->it.start = bo_va->it.start; in radeon_vm_bo_set_addr()
505 tmp->it.last = bo_va->it.last; in radeon_vm_bo_set_addr()
507 tmp->bo = radeon_bo_ref(bo_va->bo); in radeon_vm_bo_set_addr()
509 interval_tree_remove(&bo_va->it, &vm->va); in radeon_vm_bo_set_addr()
511 bo_va->it.start = 0; in radeon_vm_bo_set_addr()
512 bo_va->it.last = 0; in radeon_vm_bo_set_addr()
513 list_del_init(&bo_va->vm_status); in radeon_vm_bo_set_addr()
520 bo_va->it.start = soffset; in radeon_vm_bo_set_addr()
521 bo_va->it.last = eoffset; in radeon_vm_bo_set_addr()
522 list_add(&bo_va->vm_status, &vm->cleared); in radeon_vm_bo_set_addr()
524 interval_tree_insert(&bo_va->it, &vm->va); in radeon_vm_bo_set_addr()
527 bo_va->flags = flags; in radeon_vm_bo_set_addr()
537 radeon_bo_unreserve(bo_va->bo); in radeon_vm_bo_set_addr()
580 radeon_bo_unreserve(bo_va->bo); in radeon_vm_bo_set_addr()
911 struct radeon_bo_va *bo_va, in radeon_vm_bo_update() argument
914 struct radeon_vm *vm = bo_va->vm; in radeon_vm_bo_update()
921 if (!bo_va->it.start) { in radeon_vm_bo_update()
923 bo_va->bo, vm); in radeon_vm_bo_update()
929 if (list_empty(&bo_va->vm_status)) { in radeon_vm_bo_update()
933 list_del_init(&bo_va->vm_status); in radeon_vm_bo_update()
935 list_del(&bo_va->vm_status); in radeon_vm_bo_update()
936 list_add(&bo_va->vm_status, &vm->cleared); in radeon_vm_bo_update()
940 bo_va->flags &= ~RADEON_VM_PAGE_VALID; in radeon_vm_bo_update()
941 bo_va->flags &= ~RADEON_VM_PAGE_SYSTEM; in radeon_vm_bo_update()
942 bo_va->flags &= ~RADEON_VM_PAGE_SNOOPED; in radeon_vm_bo_update()
943 if (bo_va->bo && radeon_ttm_tt_is_readonly(bo_va->bo->tbo.ttm)) in radeon_vm_bo_update()
944 bo_va->flags &= ~RADEON_VM_PAGE_WRITEABLE; in radeon_vm_bo_update()
949 bo_va->flags |= RADEON_VM_PAGE_VALID; in radeon_vm_bo_update()
952 bo_va->flags |= RADEON_VM_PAGE_SYSTEM; in radeon_vm_bo_update()
953 if (!(bo_va->bo->flags & (RADEON_GEM_GTT_WC | RADEON_GEM_GTT_UC))) in radeon_vm_bo_update()
954 bo_va->flags |= RADEON_VM_PAGE_SNOOPED; in radeon_vm_bo_update()
963 trace_radeon_vm_bo_update(bo_va); in radeon_vm_bo_update()
965 nptes = bo_va->it.last - bo_va->it.start + 1; in radeon_vm_bo_update()
974 flags = radeon_vm_page_flags(bo_va->flags); in radeon_vm_bo_update()
1003 if (!(bo_va->flags & RADEON_VM_PAGE_VALID)) { in radeon_vm_bo_update()
1010 r = radeon_vm_update_ptes(rdev, vm, &ib, bo_va->it.start, in radeon_vm_bo_update()
1011 bo_va->it.last + 1, addr, in radeon_vm_bo_update()
1012 radeon_vm_page_flags(bo_va->flags)); in radeon_vm_bo_update()
1027 radeon_vm_fence_pts(vm, bo_va->it.start, bo_va->it.last + 1, ib.fence); in radeon_vm_bo_update()
1028 radeon_fence_unref(&bo_va->last_pt_update); in radeon_vm_bo_update()
1029 bo_va->last_pt_update = radeon_fence_ref(ib.fence); in radeon_vm_bo_update()
1049 struct radeon_bo_va *bo_va; in radeon_vm_clear_freed() local
1054 bo_va = list_first_entry(&vm->freed, in radeon_vm_clear_freed()
1058 r = radeon_vm_bo_update(rdev, bo_va, NULL); in radeon_vm_clear_freed()
1059 radeon_bo_unref(&bo_va->bo); in radeon_vm_clear_freed()
1060 radeon_fence_unref(&bo_va->last_pt_update); in radeon_vm_clear_freed()
1062 list_del(&bo_va->vm_status); in radeon_vm_clear_freed()
1063 kfree(bo_va); in radeon_vm_clear_freed()
1087 struct radeon_bo_va *bo_va; in radeon_vm_clear_invalids() local
1092 bo_va = list_first_entry(&vm->invalidated, in radeon_vm_clear_invalids()
1096 r = radeon_vm_bo_update(rdev, bo_va, NULL); in radeon_vm_clear_invalids()
1118 struct radeon_bo_va *bo_va) in radeon_vm_bo_rmv() argument
1120 struct radeon_vm *vm = bo_va->vm; in radeon_vm_bo_rmv()
1122 list_del(&bo_va->bo_list); in radeon_vm_bo_rmv()
1125 if (bo_va->it.start || bo_va->it.last) in radeon_vm_bo_rmv()
1126 interval_tree_remove(&bo_va->it, &vm->va); in radeon_vm_bo_rmv()
1129 list_del(&bo_va->vm_status); in radeon_vm_bo_rmv()
1130 if (bo_va->it.start || bo_va->it.last) { in radeon_vm_bo_rmv()
1131 bo_va->bo = radeon_bo_ref(bo_va->bo); in radeon_vm_bo_rmv()
1132 list_add(&bo_va->vm_status, &vm->freed); in radeon_vm_bo_rmv()
1134 radeon_fence_unref(&bo_va->last_pt_update); in radeon_vm_bo_rmv()
1135 kfree(bo_va); in radeon_vm_bo_rmv()
1154 struct radeon_bo_va *bo_va; in radeon_vm_bo_invalidate() local
1156 list_for_each_entry(bo_va, &bo->va, bo_list) { in radeon_vm_bo_invalidate()
1157 spin_lock(&bo_va->vm->status_lock); in radeon_vm_bo_invalidate()
1158 if (list_empty(&bo_va->vm_status) && in radeon_vm_bo_invalidate()
1159 (bo_va->it.start || bo_va->it.last)) in radeon_vm_bo_invalidate()
1160 list_add(&bo_va->vm_status, &bo_va->vm->invalidated); in radeon_vm_bo_invalidate()
1161 spin_unlock(&bo_va->vm->status_lock); in radeon_vm_bo_invalidate()
1231 struct radeon_bo_va *bo_va, *tmp; in radeon_vm_fini() local
1237 rbtree_postorder_for_each_entry_safe(bo_va, tmp, &vm->va, it.rb) { in radeon_vm_fini()
1238 interval_tree_remove(&bo_va->it, &vm->va); in radeon_vm_fini()
1239 r = radeon_bo_reserve(bo_va->bo, false); in radeon_vm_fini()
1241 list_del_init(&bo_va->bo_list); in radeon_vm_fini()
1242 radeon_bo_unreserve(bo_va->bo); in radeon_vm_fini()
1243 radeon_fence_unref(&bo_va->last_pt_update); in radeon_vm_fini()
1244 kfree(bo_va); in radeon_vm_fini()
1247 list_for_each_entry_safe(bo_va, tmp, &vm->freed, vm_status) { in radeon_vm_fini()
1248 radeon_bo_unref(&bo_va->bo); in radeon_vm_fini()
1249 radeon_fence_unref(&bo_va->last_pt_update); in radeon_vm_fini()
1250 kfree(bo_va); in radeon_vm_fini()