Lines Matching refs:dst_vma
22 struct vm_area_struct *dst_vma, in mcopy_atomic_pte() argument
36 page = alloc_page_vma(GFP_HIGHUSER_MOVABLE, dst_vma, dst_addr); in mcopy_atomic_pte()
69 _dst_pte = mk_pte(page, dst_vma->vm_page_prot); in mcopy_atomic_pte()
70 if (dst_vma->vm_flags & VM_WRITE) in mcopy_atomic_pte()
79 page_add_new_anon_rmap(page, dst_vma, dst_addr); in mcopy_atomic_pte()
81 lru_cache_add_active_or_unevictable(page, dst_vma); in mcopy_atomic_pte()
86 update_mmu_cache(dst_vma, dst_addr, dst_pte); in mcopy_atomic_pte()
102 struct vm_area_struct *dst_vma, in mfill_zeropage_pte() argument
110 dst_vma->vm_page_prot)); in mfill_zeropage_pte()
117 update_mmu_cache(dst_vma, dst_addr, dst_pte); in mfill_zeropage_pte()
148 struct vm_area_struct *dst_vma; in __mcopy_atomic() local
177 dst_vma = find_vma(dst_mm, dst_start); in __mcopy_atomic()
178 if (!dst_vma || (dst_vma->vm_flags & VM_SHARED)) in __mcopy_atomic()
180 if (dst_start < dst_vma->vm_start || in __mcopy_atomic()
181 dst_start + len > dst_vma->vm_end) in __mcopy_atomic()
193 if (!dst_vma->vm_userfaultfd_ctx.ctx) in __mcopy_atomic()
200 if (dst_vma->vm_ops) in __mcopy_atomic()
209 if (unlikely(anon_vma_prepare(dst_vma))) in __mcopy_atomic()
233 unlikely(__pte_alloc(dst_mm, dst_vma, dst_pmd, in __mcopy_atomic()
248 err = mcopy_atomic_pte(dst_mm, dst_pmd, dst_vma, in __mcopy_atomic()
251 err = mfill_zeropage_pte(dst_mm, dst_pmd, dst_vma, in __mcopy_atomic()