Lines Matching refs:vma
30 nvkm_vm_map_at(struct nvkm_vma *vma, u64 delta, struct nvkm_mem *node) in nvkm_vm_map_at() argument
32 struct nvkm_vm *vm = vma->vm; in nvkm_vm_map_at()
35 int big = vma->node->type != mmu->spg_shift; in nvkm_vm_map_at()
36 u32 offset = vma->node->offset + (delta >> 12); in nvkm_vm_map_at()
37 u32 bits = vma->node->type - 12; in nvkm_vm_map_at()
56 mmu->map(vma, pgt, node, pte, len, phys, delta); in nvkm_vm_map_at()
66 delta += (u64)len << vma->node->type; in nvkm_vm_map_at()
74 nvkm_vm_map_sg_table(struct nvkm_vma *vma, u64 delta, u64 length, in nvkm_vm_map_sg_table() argument
77 struct nvkm_vm *vm = vma->vm; in nvkm_vm_map_sg_table()
79 int big = vma->node->type != mmu->spg_shift; in nvkm_vm_map_sg_table()
80 u32 offset = vma->node->offset + (delta >> 12); in nvkm_vm_map_sg_table()
81 u32 bits = vma->node->type - 12; in nvkm_vm_map_sg_table()
82 u32 num = length >> vma->node->type; in nvkm_vm_map_sg_table()
103 mmu->map_sg(vma, pgt, mem, pte, 1, &addr); in nvkm_vm_map_sg_table()
118 mmu->map_sg(vma, pgt, mem, pte, 1, &addr); in nvkm_vm_map_sg_table()
132 nvkm_vm_map_sg(struct nvkm_vma *vma, u64 delta, u64 length, in nvkm_vm_map_sg() argument
135 struct nvkm_vm *vm = vma->vm; in nvkm_vm_map_sg()
138 int big = vma->node->type != mmu->spg_shift; in nvkm_vm_map_sg()
139 u32 offset = vma->node->offset + (delta >> 12); in nvkm_vm_map_sg()
140 u32 bits = vma->node->type - 12; in nvkm_vm_map_sg()
141 u32 num = length >> vma->node->type; in nvkm_vm_map_sg()
155 mmu->map_sg(vma, pgt, mem, pte, len, list); in nvkm_vm_map_sg()
170 nvkm_vm_map(struct nvkm_vma *vma, struct nvkm_mem *node) in nvkm_vm_map() argument
173 nvkm_vm_map_sg_table(vma, 0, node->size << 12, node); in nvkm_vm_map()
176 nvkm_vm_map_sg(vma, 0, node->size << 12, node); in nvkm_vm_map()
178 nvkm_vm_map_at(vma, 0, node); in nvkm_vm_map()
182 nvkm_vm_unmap_at(struct nvkm_vma *vma, u64 delta, u64 length) in nvkm_vm_unmap_at() argument
184 struct nvkm_vm *vm = vma->vm; in nvkm_vm_unmap_at()
186 int big = vma->node->type != mmu->spg_shift; in nvkm_vm_unmap_at()
187 u32 offset = vma->node->offset + (delta >> 12); in nvkm_vm_unmap_at()
188 u32 bits = vma->node->type - 12; in nvkm_vm_unmap_at()
189 u32 num = length >> vma->node->type; in nvkm_vm_unmap_at()
217 nvkm_vm_unmap(struct nvkm_vma *vma) in nvkm_vm_unmap() argument
219 nvkm_vm_unmap_at(vma, 0, (u64)vma->node->length << 12); in nvkm_vm_unmap()
288 struct nvkm_vma *vma) in nvkm_vm_get() argument
298 &vma->node); in nvkm_vm_get()
304 fpde = (vma->node->offset >> mmu->pgt_bits); in nvkm_vm_get()
305 lpde = (vma->node->offset + vma->node->length - 1) >> mmu->pgt_bits; in nvkm_vm_get()
309 int big = (vma->node->type != mmu->spg_shift); in nvkm_vm_get()
316 ret = nvkm_vm_map_pgt(vm, pde, vma->node->type); in nvkm_vm_get()
320 nvkm_mm_free(&vm->mm, &vma->node); in nvkm_vm_get()
327 vma->vm = NULL; in nvkm_vm_get()
328 nvkm_vm_ref(vm, &vma->vm, NULL); in nvkm_vm_get()
329 vma->offset = (u64)vma->node->offset << 12; in nvkm_vm_get()
330 vma->access = access; in nvkm_vm_get()
335 nvkm_vm_put(struct nvkm_vma *vma) in nvkm_vm_put() argument
337 struct nvkm_vm *vm = vma->vm; in nvkm_vm_put()
341 if (unlikely(vma->node == NULL)) in nvkm_vm_put()
343 fpde = (vma->node->offset >> mmu->pgt_bits); in nvkm_vm_put()
344 lpde = (vma->node->offset + vma->node->length - 1) >> mmu->pgt_bits; in nvkm_vm_put()
347 nvkm_vm_unmap_pgt(vm, vma->node->type != mmu->spg_shift, fpde, lpde); in nvkm_vm_put()
348 nvkm_mm_free(&vm->mm, &vma->node); in nvkm_vm_put()
351 nvkm_vm_ref(NULL, &vma->vm, NULL); in nvkm_vm_put()