Lines Matching refs:rmn

70 	struct amdgpu_mn *rmn = container_of(work, struct amdgpu_mn, work);  in amdgpu_mn_destroy()  local
71 struct amdgpu_device *adev = rmn->adev; in amdgpu_mn_destroy()
76 mutex_lock(&rmn->lock); in amdgpu_mn_destroy()
77 hash_del(&rmn->node); in amdgpu_mn_destroy()
78 rbtree_postorder_for_each_entry_safe(node, next_node, &rmn->objects, in amdgpu_mn_destroy()
81 interval_tree_remove(&node->it, &rmn->objects); in amdgpu_mn_destroy()
88 mutex_unlock(&rmn->lock); in amdgpu_mn_destroy()
90 mmu_notifier_unregister(&rmn->mn, rmn->mm); in amdgpu_mn_destroy()
91 kfree(rmn); in amdgpu_mn_destroy()
105 struct amdgpu_mn *rmn = container_of(mn, struct amdgpu_mn, mn); in amdgpu_mn_release() local
106 INIT_WORK(&rmn->work, amdgpu_mn_destroy); in amdgpu_mn_release()
107 schedule_work(&rmn->work); in amdgpu_mn_release()
126 struct amdgpu_mn *rmn = container_of(mn, struct amdgpu_mn, mn); in amdgpu_mn_invalidate_range_start() local
132 mutex_lock(&rmn->lock); in amdgpu_mn_invalidate_range_start()
134 it = interval_tree_iter_first(&rmn->objects, start, end); in amdgpu_mn_invalidate_range_start()
169 mutex_unlock(&rmn->lock); in amdgpu_mn_invalidate_range_start()
187 struct amdgpu_mn *rmn; in amdgpu_mn_get() local
193 hash_for_each_possible(adev->mn_hash, rmn, node, (unsigned long)mm) in amdgpu_mn_get()
194 if (rmn->mm == mm) in amdgpu_mn_get()
197 rmn = kzalloc(sizeof(*rmn), GFP_KERNEL); in amdgpu_mn_get()
198 if (!rmn) { in amdgpu_mn_get()
199 rmn = ERR_PTR(-ENOMEM); in amdgpu_mn_get()
203 rmn->adev = adev; in amdgpu_mn_get()
204 rmn->mm = mm; in amdgpu_mn_get()
205 rmn->mn.ops = &amdgpu_mn_ops; in amdgpu_mn_get()
206 mutex_init(&rmn->lock); in amdgpu_mn_get()
207 rmn->objects = RB_ROOT; in amdgpu_mn_get()
209 r = __mmu_notifier_register(&rmn->mn, mm); in amdgpu_mn_get()
213 hash_add(adev->mn_hash, &rmn->node, (unsigned long)mm); in amdgpu_mn_get()
219 return rmn; in amdgpu_mn_get()
224 kfree(rmn); in amdgpu_mn_get()
242 struct amdgpu_mn *rmn; in amdgpu_mn_register() local
247 rmn = amdgpu_mn_get(adev); in amdgpu_mn_register()
248 if (IS_ERR(rmn)) in amdgpu_mn_register()
249 return PTR_ERR(rmn); in amdgpu_mn_register()
253 mutex_lock(&rmn->lock); in amdgpu_mn_register()
255 while ((it = interval_tree_iter_first(&rmn->objects, addr, end))) { in amdgpu_mn_register()
258 interval_tree_remove(&node->it, &rmn->objects); in amdgpu_mn_register()
267 mutex_unlock(&rmn->lock); in amdgpu_mn_register()
272 bo->mn = rmn; in amdgpu_mn_register()
280 interval_tree_insert(&node->it, &rmn->objects); in amdgpu_mn_register()
282 mutex_unlock(&rmn->lock); in amdgpu_mn_register()
297 struct amdgpu_mn *rmn; in amdgpu_mn_unregister() local
301 rmn = bo->mn; in amdgpu_mn_unregister()
302 if (rmn == NULL) { in amdgpu_mn_unregister()
307 mutex_lock(&rmn->lock); in amdgpu_mn_unregister()
317 interval_tree_remove(&node->it, &rmn->objects); in amdgpu_mn_unregister()
321 mutex_unlock(&rmn->lock); in amdgpu_mn_unregister()