Lines Matching refs:mm
56 void __mmu_notifier_release(struct mm_struct *mm) in __mmu_notifier_release() argument
66 hlist_for_each_entry_rcu(mn, &mm->mmu_notifier_mm->list, hlist) in __mmu_notifier_release()
74 mn->ops->release(mn, mm); in __mmu_notifier_release()
76 spin_lock(&mm->mmu_notifier_mm->lock); in __mmu_notifier_release()
77 while (unlikely(!hlist_empty(&mm->mmu_notifier_mm->list))) { in __mmu_notifier_release()
78 mn = hlist_entry(mm->mmu_notifier_mm->list.first, in __mmu_notifier_release()
89 spin_unlock(&mm->mmu_notifier_mm->lock); in __mmu_notifier_release()
109 int __mmu_notifier_clear_flush_young(struct mm_struct *mm, in __mmu_notifier_clear_flush_young() argument
117 hlist_for_each_entry_rcu(mn, &mm->mmu_notifier_mm->list, hlist) { in __mmu_notifier_clear_flush_young()
119 young |= mn->ops->clear_flush_young(mn, mm, start, end); in __mmu_notifier_clear_flush_young()
126 int __mmu_notifier_test_young(struct mm_struct *mm, in __mmu_notifier_test_young() argument
133 hlist_for_each_entry_rcu(mn, &mm->mmu_notifier_mm->list, hlist) { in __mmu_notifier_test_young()
135 young = mn->ops->test_young(mn, mm, address); in __mmu_notifier_test_young()
145 void __mmu_notifier_change_pte(struct mm_struct *mm, unsigned long address, in __mmu_notifier_change_pte() argument
152 hlist_for_each_entry_rcu(mn, &mm->mmu_notifier_mm->list, hlist) { in __mmu_notifier_change_pte()
154 mn->ops->change_pte(mn, mm, address, pte); in __mmu_notifier_change_pte()
159 void __mmu_notifier_invalidate_page(struct mm_struct *mm, in __mmu_notifier_invalidate_page() argument
166 hlist_for_each_entry_rcu(mn, &mm->mmu_notifier_mm->list, hlist) { in __mmu_notifier_invalidate_page()
168 mn->ops->invalidate_page(mn, mm, address); in __mmu_notifier_invalidate_page()
173 void __mmu_notifier_invalidate_range_start(struct mm_struct *mm, in __mmu_notifier_invalidate_range_start() argument
180 hlist_for_each_entry_rcu(mn, &mm->mmu_notifier_mm->list, hlist) { in __mmu_notifier_invalidate_range_start()
182 mn->ops->invalidate_range_start(mn, mm, start, end); in __mmu_notifier_invalidate_range_start()
188 void __mmu_notifier_invalidate_range_end(struct mm_struct *mm, in __mmu_notifier_invalidate_range_end() argument
195 hlist_for_each_entry_rcu(mn, &mm->mmu_notifier_mm->list, hlist) { in __mmu_notifier_invalidate_range_end()
205 mn->ops->invalidate_range(mn, mm, start, end); in __mmu_notifier_invalidate_range_end()
207 mn->ops->invalidate_range_end(mn, mm, start, end); in __mmu_notifier_invalidate_range_end()
213 void __mmu_notifier_invalidate_range(struct mm_struct *mm, in __mmu_notifier_invalidate_range() argument
220 hlist_for_each_entry_rcu(mn, &mm->mmu_notifier_mm->list, hlist) { in __mmu_notifier_invalidate_range()
222 mn->ops->invalidate_range(mn, mm, start, end); in __mmu_notifier_invalidate_range()
229 struct mm_struct *mm, in do_mmu_notifier_register() argument
235 BUG_ON(atomic_read(&mm->mm_users) <= 0); in do_mmu_notifier_register()
249 down_write(&mm->mmap_sem); in do_mmu_notifier_register()
250 ret = mm_take_all_locks(mm); in do_mmu_notifier_register()
254 if (!mm_has_notifiers(mm)) { in do_mmu_notifier_register()
258 mm->mmu_notifier_mm = mmu_notifier_mm; in do_mmu_notifier_register()
261 atomic_inc(&mm->mm_count); in do_mmu_notifier_register()
271 spin_lock(&mm->mmu_notifier_mm->lock); in do_mmu_notifier_register()
272 hlist_add_head(&mn->hlist, &mm->mmu_notifier_mm->list); in do_mmu_notifier_register()
273 spin_unlock(&mm->mmu_notifier_mm->lock); in do_mmu_notifier_register()
275 mm_drop_all_locks(mm); in do_mmu_notifier_register()
278 up_write(&mm->mmap_sem); in do_mmu_notifier_register()
281 BUG_ON(atomic_read(&mm->mm_users) <= 0); in do_mmu_notifier_register()
298 int mmu_notifier_register(struct mmu_notifier *mn, struct mm_struct *mm) in mmu_notifier_register() argument
300 return do_mmu_notifier_register(mn, mm, 1); in mmu_notifier_register()
308 int __mmu_notifier_register(struct mmu_notifier *mn, struct mm_struct *mm) in __mmu_notifier_register() argument
310 return do_mmu_notifier_register(mn, mm, 0); in __mmu_notifier_register()
315 void __mmu_notifier_mm_destroy(struct mm_struct *mm) in __mmu_notifier_mm_destroy() argument
317 BUG_ON(!hlist_empty(&mm->mmu_notifier_mm->list)); in __mmu_notifier_mm_destroy()
318 kfree(mm->mmu_notifier_mm); in __mmu_notifier_mm_destroy()
319 mm->mmu_notifier_mm = LIST_POISON1; /* debug */ in __mmu_notifier_mm_destroy()
332 void mmu_notifier_unregister(struct mmu_notifier *mn, struct mm_struct *mm) in mmu_notifier_unregister() argument
334 BUG_ON(atomic_read(&mm->mm_count) <= 0); in mmu_notifier_unregister()
349 mn->ops->release(mn, mm); in mmu_notifier_unregister()
352 spin_lock(&mm->mmu_notifier_mm->lock); in mmu_notifier_unregister()
358 spin_unlock(&mm->mmu_notifier_mm->lock); in mmu_notifier_unregister()
367 BUG_ON(atomic_read(&mm->mm_count) <= 0); in mmu_notifier_unregister()
369 mmdrop(mm); in mmu_notifier_unregister()
377 struct mm_struct *mm) in mmu_notifier_unregister_no_release() argument
379 spin_lock(&mm->mmu_notifier_mm->lock); in mmu_notifier_unregister_no_release()
385 spin_unlock(&mm->mmu_notifier_mm->lock); in mmu_notifier_unregister_no_release()
387 BUG_ON(atomic_read(&mm->mm_count) <= 0); in mmu_notifier_unregister_no_release()
388 mmdrop(mm); in mmu_notifier_unregister_no_release()