Lines Matching refs:vma
107 struct vm_area_struct *vma; in ia64_init_addr_space() local
116 vma = kmem_cache_zalloc(vm_area_cachep, GFP_KERNEL); in ia64_init_addr_space()
117 if (vma) { in ia64_init_addr_space()
118 INIT_LIST_HEAD(&vma->anon_vma_chain); in ia64_init_addr_space()
119 vma->vm_mm = current->mm; in ia64_init_addr_space()
120 vma->vm_start = current->thread.rbs_bot & PAGE_MASK; in ia64_init_addr_space()
121 vma->vm_end = vma->vm_start + PAGE_SIZE; in ia64_init_addr_space()
122 vma->vm_flags = VM_DATA_DEFAULT_FLAGS|VM_GROWSUP|VM_ACCOUNT; in ia64_init_addr_space()
123 vma->vm_page_prot = vm_get_page_prot(vma->vm_flags); in ia64_init_addr_space()
125 if (insert_vm_struct(current->mm, vma)) { in ia64_init_addr_space()
127 kmem_cache_free(vm_area_cachep, vma); in ia64_init_addr_space()
135 vma = kmem_cache_zalloc(vm_area_cachep, GFP_KERNEL); in ia64_init_addr_space()
136 if (vma) { in ia64_init_addr_space()
137 INIT_LIST_HEAD(&vma->anon_vma_chain); in ia64_init_addr_space()
138 vma->vm_mm = current->mm; in ia64_init_addr_space()
139 vma->vm_end = PAGE_SIZE; in ia64_init_addr_space()
140 vma->vm_page_prot = __pgprot(pgprot_val(PAGE_READONLY) | _PAGE_MA_NAT); in ia64_init_addr_space()
141 vma->vm_flags = VM_READ | VM_MAYREAD | VM_IO | in ia64_init_addr_space()
144 if (insert_vm_struct(current->mm, vma)) { in ia64_init_addr_space()
146 kmem_cache_free(vm_area_cachep, vma); in ia64_init_addr_space()