Lines Matching refs:rmn

70 	struct radeon_mn *rmn = container_of(work, struct radeon_mn, work);  in radeon_mn_destroy()  local
71 struct radeon_device *rdev = rmn->rdev; in radeon_mn_destroy()
76 mutex_lock(&rmn->lock); in radeon_mn_destroy()
77 hash_del(&rmn->node); in radeon_mn_destroy()
78 rbtree_postorder_for_each_entry_safe(node, next_node, &rmn->objects, in radeon_mn_destroy()
81 interval_tree_remove(&node->it, &rmn->objects); in radeon_mn_destroy()
88 mutex_unlock(&rmn->lock); in radeon_mn_destroy()
90 mmu_notifier_unregister(&rmn->mn, rmn->mm); in radeon_mn_destroy()
91 kfree(rmn); in radeon_mn_destroy()
105 struct radeon_mn *rmn = container_of(mn, struct radeon_mn, mn); in radeon_mn_release() local
106 INIT_WORK(&rmn->work, radeon_mn_destroy); in radeon_mn_release()
107 schedule_work(&rmn->work); in radeon_mn_release()
126 struct radeon_mn *rmn = container_of(mn, struct radeon_mn, mn); in radeon_mn_invalidate_range_start() local
132 mutex_lock(&rmn->lock); in radeon_mn_invalidate_range_start()
134 it = interval_tree_iter_first(&rmn->objects, start, end); in radeon_mn_invalidate_range_start()
168 mutex_unlock(&rmn->lock); in radeon_mn_invalidate_range_start()
186 struct radeon_mn *rmn; in radeon_mn_get() local
192 hash_for_each_possible(rdev->mn_hash, rmn, node, (unsigned long)mm) in radeon_mn_get()
193 if (rmn->mm == mm) in radeon_mn_get()
196 rmn = kzalloc(sizeof(*rmn), GFP_KERNEL); in radeon_mn_get()
197 if (!rmn) { in radeon_mn_get()
198 rmn = ERR_PTR(-ENOMEM); in radeon_mn_get()
202 rmn->rdev = rdev; in radeon_mn_get()
203 rmn->mm = mm; in radeon_mn_get()
204 rmn->mn.ops = &radeon_mn_ops; in radeon_mn_get()
205 mutex_init(&rmn->lock); in radeon_mn_get()
206 rmn->objects = RB_ROOT; in radeon_mn_get()
208 r = __mmu_notifier_register(&rmn->mn, mm); in radeon_mn_get()
212 hash_add(rdev->mn_hash, &rmn->node, (unsigned long)mm); in radeon_mn_get()
218 return rmn; in radeon_mn_get()
223 kfree(rmn); in radeon_mn_get()
241 struct radeon_mn *rmn; in radeon_mn_register() local
246 rmn = radeon_mn_get(rdev); in radeon_mn_register()
247 if (IS_ERR(rmn)) in radeon_mn_register()
248 return PTR_ERR(rmn); in radeon_mn_register()
252 mutex_lock(&rmn->lock); in radeon_mn_register()
254 while ((it = interval_tree_iter_first(&rmn->objects, addr, end))) { in radeon_mn_register()
257 interval_tree_remove(&node->it, &rmn->objects); in radeon_mn_register()
266 mutex_unlock(&rmn->lock); in radeon_mn_register()
271 bo->mn = rmn; in radeon_mn_register()
279 interval_tree_insert(&node->it, &rmn->objects); in radeon_mn_register()
281 mutex_unlock(&rmn->lock); in radeon_mn_register()
296 struct radeon_mn *rmn; in radeon_mn_unregister() local
300 rmn = bo->mn; in radeon_mn_unregister()
301 if (rmn == NULL) { in radeon_mn_unregister()
306 mutex_lock(&rmn->lock); in radeon_mn_unregister()
316 interval_tree_remove(&node->it, &rmn->objects); in radeon_mn_unregister()
320 mutex_unlock(&rmn->lock); in radeon_mn_unregister()