Lines Matching refs:vma

132 		struct i915_vma *vma;  in eb_lookup_vmas()  local
146 vma = i915_gem_obj_lookup_or_create_vma(obj, vm); in eb_lookup_vmas()
147 if (IS_ERR(vma)) { in eb_lookup_vmas()
149 ret = PTR_ERR(vma); in eb_lookup_vmas()
154 list_add_tail(&vma->exec_list, &eb->vmas); in eb_lookup_vmas()
157 vma->exec_entry = &exec[i]; in eb_lookup_vmas()
159 eb->lut[i] = vma; in eb_lookup_vmas()
162 vma->exec_handle = handle; in eb_lookup_vmas()
163 hlist_add_head(&vma->exec_node, in eb_lookup_vmas()
200 struct i915_vma *vma; in eb_get_vma() local
202 vma = hlist_entry(node, struct i915_vma, exec_node); in eb_get_vma()
203 if (vma->exec_handle == handle) in eb_get_vma()
204 return vma; in eb_get_vma()
211 i915_gem_execbuffer_unreserve_vma(struct i915_vma *vma) in i915_gem_execbuffer_unreserve_vma() argument
214 struct drm_i915_gem_object *obj = vma->obj; in i915_gem_execbuffer_unreserve_vma()
216 if (!drm_mm_node_allocated(&vma->node)) in i915_gem_execbuffer_unreserve_vma()
219 entry = vma->exec_entry; in i915_gem_execbuffer_unreserve_vma()
225 vma->pin_count--; in i915_gem_execbuffer_unreserve_vma()
238 struct i915_vma *vma; in eb_destroy() local
240 vma = list_first_entry(&eb->vmas, in eb_destroy()
243 list_del_init(&vma->exec_list); in eb_destroy()
244 i915_gem_execbuffer_unreserve_vma(vma); in eb_destroy()
245 drm_gem_object_unreference(&vma->obj->base); in eb_destroy()
492 i915_gem_execbuffer_relocate_vma(struct i915_vma *vma, in i915_gem_execbuffer_relocate_vma() argument
498 struct drm_i915_gem_exec_object2 *entry = vma->exec_entry; in i915_gem_execbuffer_relocate_vma()
517 ret = i915_gem_execbuffer_relocate_entry(vma->obj, eb, r); in i915_gem_execbuffer_relocate_vma()
538 i915_gem_execbuffer_relocate_vma_slow(struct i915_vma *vma, in i915_gem_execbuffer_relocate_vma_slow() argument
542 const struct drm_i915_gem_exec_object2 *entry = vma->exec_entry; in i915_gem_execbuffer_relocate_vma_slow()
546 ret = i915_gem_execbuffer_relocate_entry(vma->obj, eb, &relocs[i]); in i915_gem_execbuffer_relocate_vma_slow()
557 struct i915_vma *vma; in i915_gem_execbuffer_relocate() local
568 list_for_each_entry(vma, &eb->vmas, exec_list) { in i915_gem_execbuffer_relocate()
569 ret = i915_gem_execbuffer_relocate_vma(vma, eb); in i915_gem_execbuffer_relocate()
585 i915_gem_execbuffer_reserve_vma(struct i915_vma *vma, in i915_gem_execbuffer_reserve_vma() argument
589 struct drm_i915_gem_object *obj = vma->obj; in i915_gem_execbuffer_reserve_vma()
590 struct drm_i915_gem_exec_object2 *entry = vma->exec_entry; in i915_gem_execbuffer_reserve_vma()
595 if (!drm_mm_node_allocated(&vma->node)) { in i915_gem_execbuffer_reserve_vma()
604 ret = i915_gem_object_pin(obj, vma->vm, entry->alignment, flags); in i915_gem_execbuffer_reserve_vma()
607 ret = i915_gem_object_pin(obj, vma->vm, in i915_gem_execbuffer_reserve_vma()
624 if (entry->offset != vma->node.start) { in i915_gem_execbuffer_reserve_vma()
625 entry->offset = vma->node.start; in i915_gem_execbuffer_reserve_vma()
638 need_reloc_mappable(struct i915_vma *vma) in need_reloc_mappable() argument
640 struct drm_i915_gem_exec_object2 *entry = vma->exec_entry; in need_reloc_mappable()
645 if (!i915_is_ggtt(vma->vm)) in need_reloc_mappable()
649 if (HAS_LLC(vma->obj->base.dev)) in need_reloc_mappable()
652 if (vma->obj->base.write_domain == I915_GEM_DOMAIN_CPU) in need_reloc_mappable()
659 eb_vma_misplaced(struct i915_vma *vma) in eb_vma_misplaced() argument
661 struct drm_i915_gem_exec_object2 *entry = vma->exec_entry; in eb_vma_misplaced()
662 struct drm_i915_gem_object *obj = vma->obj; in eb_vma_misplaced()
665 !i915_is_ggtt(vma->vm)); in eb_vma_misplaced()
668 vma->node.start & (entry->alignment - 1)) in eb_vma_misplaced()
672 vma->node.start < BATCH_OFFSET_BIAS) in eb_vma_misplaced()
688 struct i915_vma *vma; in i915_gem_execbuffer_reserve() local
703 vma = list_first_entry(vmas, struct i915_vma, exec_list); in i915_gem_execbuffer_reserve()
704 obj = vma->obj; in i915_gem_execbuffer_reserve()
705 entry = vma->exec_entry; in i915_gem_execbuffer_reserve()
712 need_mappable = need_fence || need_reloc_mappable(vma); in i915_gem_execbuffer_reserve()
716 list_move(&vma->exec_list, &ordered_vmas); in i915_gem_execbuffer_reserve()
718 list_move_tail(&vma->exec_list, &ordered_vmas); in i915_gem_execbuffer_reserve()
742 list_for_each_entry(vma, vmas, exec_list) { in i915_gem_execbuffer_reserve()
743 if (!drm_mm_node_allocated(&vma->node)) in i915_gem_execbuffer_reserve()
746 if (eb_vma_misplaced(vma)) in i915_gem_execbuffer_reserve()
747 ret = i915_vma_unbind(vma); in i915_gem_execbuffer_reserve()
749 ret = i915_gem_execbuffer_reserve_vma(vma, ring, need_relocs); in i915_gem_execbuffer_reserve()
755 list_for_each_entry(vma, vmas, exec_list) { in i915_gem_execbuffer_reserve()
756 if (drm_mm_node_allocated(&vma->node)) in i915_gem_execbuffer_reserve()
759 ret = i915_gem_execbuffer_reserve_vma(vma, ring, need_relocs); in i915_gem_execbuffer_reserve()
769 list_for_each_entry(vma, vmas, exec_list) in i915_gem_execbuffer_reserve()
770 i915_gem_execbuffer_unreserve_vma(vma); in i915_gem_execbuffer_reserve()
788 struct i915_vma *vma; in i915_gem_execbuffer_relocate_slow() local
798 vma = list_first_entry(&eb->vmas, struct i915_vma, exec_list); in i915_gem_execbuffer_relocate_slow()
799 list_del_init(&vma->exec_list); in i915_gem_execbuffer_relocate_slow()
800 i915_gem_execbuffer_unreserve_vma(vma); in i915_gem_execbuffer_relocate_slow()
801 drm_gem_object_unreference(&vma->obj->base); in i915_gem_execbuffer_relocate_slow()
874 list_for_each_entry(vma, &eb->vmas, exec_list) { in i915_gem_execbuffer_relocate_slow()
875 int offset = vma->exec_entry - exec; in i915_gem_execbuffer_relocate_slow()
876 ret = i915_gem_execbuffer_relocate_vma_slow(vma, eb, in i915_gem_execbuffer_relocate_slow()
898 struct i915_vma *vma; in i915_gem_execbuffer_move_to_gpu() local
903 list_for_each_entry(vma, vmas, exec_list) { in i915_gem_execbuffer_move_to_gpu()
904 struct drm_i915_gem_object *obj = vma->obj; in i915_gem_execbuffer_move_to_gpu()
1020 struct i915_vma *vma; in i915_gem_execbuffer_move_to_active() local
1022 list_for_each_entry(vma, vmas, exec_list) { in i915_gem_execbuffer_move_to_active()
1023 struct drm_i915_gem_exec_object2 *entry = vma->exec_entry; in i915_gem_execbuffer_move_to_active()
1024 struct drm_i915_gem_object *obj = vma->obj; in i915_gem_execbuffer_move_to_active()
1034 i915_vma_move_to_active(vma, ring); in i915_gem_execbuffer_move_to_active()
1147 struct i915_vma *vma; in i915_gem_execbuffer_parse() local
1170 vma = i915_gem_obj_to_ggtt(shadow_batch_obj); in i915_gem_execbuffer_parse()
1171 vma->exec_entry = shadow_exec_entry; in i915_gem_execbuffer_parse()
1172 vma->exec_entry->flags = __EXEC_OBJECT_PURGEABLE | __EXEC_OBJECT_HAS_PIN; in i915_gem_execbuffer_parse()
1174 list_add_tail(&vma->exec_list, &eb->vmas); in i915_gem_execbuffer_parse()
1385 struct i915_vma *vma = list_entry(eb->vmas.prev, typeof(*vma), exec_list); in eb_get_batch() local
1396 vma->exec_entry->flags |= __EXEC_OBJECT_NEEDS_BIAS; in eb_get_batch()
1398 return vma->obj; in eb_get_batch()