vm_rb              74 arch/unicore32/include/asm/mmu_context.h 		rb_erase(&high_vma->vm_rb, &mm->mm_rb); \
vm_rb             282 drivers/gpu/drm/drm_vma_manager.c 		entry = rb_entry(*iter, struct drm_vma_offset_file, vm_rb);
vm_rb             301 drivers/gpu/drm/drm_vma_manager.c 	rb_link_node(&new->vm_rb, parent, iter);
vm_rb             302 drivers/gpu/drm/drm_vma_manager.c 	rb_insert_color(&new->vm_rb, &node->vm_files);
vm_rb             335 drivers/gpu/drm/drm_vma_manager.c 		entry = rb_entry(iter, struct drm_vma_offset_file, vm_rb);
vm_rb             338 drivers/gpu/drm/drm_vma_manager.c 				rb_erase(&entry->vm_rb, &node->vm_files);
vm_rb             376 drivers/gpu/drm/drm_vma_manager.c 		entry = rb_entry(iter, struct drm_vma_offset_file, vm_rb);
vm_rb              78 fs/proc/nommu.c 	return nommu_region_show(m, rb_entry(p, struct vm_region, vm_rb));
vm_rb              30 fs/proc/task_nommu.c 		vma = rb_entry(p, struct vm_area_struct, vm_rb);
vm_rb              91 fs/proc/task_nommu.c 		vma = rb_entry(p, struct vm_area_struct, vm_rb);
vm_rb             109 fs/proc/task_nommu.c 		vma = rb_entry(p, struct vm_area_struct, vm_rb);
vm_rb             195 fs/proc/task_nommu.c 	return nommu_vma_show(m, rb_entry(p, struct vm_area_struct, vm_rb));
vm_rb              47 include/drm/drm_vma_manager.h 	struct rb_node vm_rb;
vm_rb             263 include/linux/mm_types.h 	struct rb_node	vm_rb;		/* link in global region tree */
vm_rb             302 include/linux/mm_types.h 	struct rb_node vm_rb;
vm_rb             596 kernel/fork.c  		rb_link = &tmp->vm_rb.rb_right;
vm_rb             597 kernel/fork.c  		rb_parent = &tmp->vm_rb;
vm_rb             311 mm/mmap.c      	if (vma->vm_rb.rb_left) {
vm_rb             312 mm/mmap.c      		subtree_gap = rb_entry(vma->vm_rb.rb_left,
vm_rb             313 mm/mmap.c      				struct vm_area_struct, vm_rb)->rb_subtree_gap;
vm_rb             317 mm/mmap.c      	if (vma->vm_rb.rb_right) {
vm_rb             318 mm/mmap.c      		subtree_gap = rb_entry(vma->vm_rb.rb_right,
vm_rb             319 mm/mmap.c      				struct vm_area_struct, vm_rb)->rb_subtree_gap;
vm_rb             335 mm/mmap.c      		vma = rb_entry(nd, struct vm_area_struct, vm_rb);
vm_rb             380 mm/mmap.c      		vma = rb_entry(nd, struct vm_area_struct, vm_rb);
vm_rb             432 mm/mmap.c      			 struct vm_area_struct, vm_rb,
vm_rb             446 mm/mmap.c      	vma_gap_callbacks_propagate(&vma->vm_rb, NULL);
vm_rb             455 mm/mmap.c      	rb_insert_augmented(&vma->vm_rb, root, &vma_gap_callbacks);
vm_rb             465 mm/mmap.c      	rb_erase_augmented(&vma->vm_rb, root, &vma_gap_callbacks);
vm_rb             539 mm/mmap.c      		vma_tmp = rb_entry(__rb_parent, struct vm_area_struct, vm_rb);
vm_rb             554 mm/mmap.c      		*pprev = rb_entry(rb_prev, struct vm_area_struct, vm_rb);
vm_rb             606 mm/mmap.c      	rb_link_node(&vma->vm_rb, rb_parent, rb_link);
vm_rb            1908 mm/mmap.c      	vma = rb_entry(mm->mm_rb.rb_node, struct vm_area_struct, vm_rb);
vm_rb            1915 mm/mmap.c      		if (gap_end >= low_limit && vma->vm_rb.rb_left) {
vm_rb            1917 mm/mmap.c      				rb_entry(vma->vm_rb.rb_left,
vm_rb            1918 mm/mmap.c      					 struct vm_area_struct, vm_rb);
vm_rb            1935 mm/mmap.c      		if (vma->vm_rb.rb_right) {
vm_rb            1937 mm/mmap.c      				rb_entry(vma->vm_rb.rb_right,
vm_rb            1938 mm/mmap.c      					 struct vm_area_struct, vm_rb);
vm_rb            1947 mm/mmap.c      			struct rb_node *prev = &vma->vm_rb;
vm_rb            1951 mm/mmap.c      				       struct vm_area_struct, vm_rb);
vm_rb            1952 mm/mmap.c      			if (prev == vma->vm_rb.rb_left) {
vm_rb            2012 mm/mmap.c      	vma = rb_entry(mm->mm_rb.rb_node, struct vm_area_struct, vm_rb);
vm_rb            2019 mm/mmap.c      		if (gap_start <= high_limit && vma->vm_rb.rb_right) {
vm_rb            2021 mm/mmap.c      				rb_entry(vma->vm_rb.rb_right,
vm_rb            2022 mm/mmap.c      					 struct vm_area_struct, vm_rb);
vm_rb            2039 mm/mmap.c      		if (vma->vm_rb.rb_left) {
vm_rb            2041 mm/mmap.c      				rb_entry(vma->vm_rb.rb_left,
vm_rb            2042 mm/mmap.c      					 struct vm_area_struct, vm_rb);
vm_rb            2051 mm/mmap.c      			struct rb_node *prev = &vma->vm_rb;
vm_rb            2055 mm/mmap.c      				       struct vm_area_struct, vm_rb);
vm_rb            2056 mm/mmap.c      			if (prev == vma->vm_rb.rb_right) {
vm_rb            2250 mm/mmap.c      		tmp = rb_entry(rb_node, struct vm_area_struct, vm_rb);
vm_rb            2283 mm/mmap.c      		*pprev = rb_node ? rb_entry(rb_node, struct vm_area_struct, vm_rb) : NULL;
vm_rb             465 mm/nommu.c     	last = rb_entry(lastp, struct vm_region, vm_rb);
vm_rb             470 mm/nommu.c     		region = rb_entry(p, struct vm_region, vm_rb);
vm_rb             471 mm/nommu.c     		last = rb_entry(lastp, struct vm_region, vm_rb);
vm_rb             500 mm/nommu.c     		pregion = rb_entry(parent, struct vm_region, vm_rb);
vm_rb             511 mm/nommu.c     	rb_link_node(&region->vm_rb, parent, p);
vm_rb             512 mm/nommu.c     	rb_insert_color(&region->vm_rb, &nommu_region_tree);
vm_rb             525 mm/nommu.c     	rb_erase(&region->vm_rb, &nommu_region_tree);
vm_rb             613 mm/nommu.c     		pvma = rb_entry(parent, struct vm_area_struct, vm_rb);
vm_rb             636 mm/nommu.c     	rb_link_node(&vma->vm_rb, parent, p);
vm_rb             637 mm/nommu.c     	rb_insert_color(&vma->vm_rb, &mm->mm_rb);
vm_rb             642 mm/nommu.c     		prev = rb_entry(rb_prev, struct vm_area_struct, vm_rb);
vm_rb             678 mm/nommu.c     	rb_erase(&vma->vm_rb, &mm->mm_rb);
vm_rb            1170 mm/nommu.c     			pregion = rb_entry(rb, struct vm_region, vm_rb);
vm_rb             286 mm/util.c      					struct vm_area_struct, vm_rb);