vmap_area         446 kernel/crash_core.c 	VMCOREINFO_OFFSET(vmap_area, va_start);
vmap_area         447 kernel/crash_core.c 	VMCOREINFO_OFFSET(vmap_area, list);
vmap_area         372 mm/vmalloc.c   static DEFINE_PER_CPU(struct vmap_area *, ne_fit_preload_node);
vmap_area         375 mm/vmalloc.c   va_size(struct vmap_area *va)
vmap_area         383 mm/vmalloc.c   	struct vmap_area *va;
vmap_area         385 mm/vmalloc.c   	va = rb_entry_safe(node, struct vmap_area, rb_node);
vmap_area         393 mm/vmalloc.c   compute_subtree_max_size(struct vmap_area *va)
vmap_area         401 mm/vmalloc.c   	struct vmap_area, rb_node, unsigned long, subtree_max_size, va_size)
vmap_area         414 mm/vmalloc.c   static struct vmap_area *__find_vmap_area(unsigned long addr)
vmap_area         419 mm/vmalloc.c   		struct vmap_area *va;
vmap_area         421 mm/vmalloc.c   		va = rb_entry(n, struct vmap_area, rb_node);
vmap_area         438 mm/vmalloc.c   find_va_links(struct vmap_area *va,
vmap_area         442 mm/vmalloc.c   	struct vmap_area *tmp_va;
vmap_area         461 mm/vmalloc.c   		tmp_va = rb_entry(*link, struct vmap_area, rb_node);
vmap_area         496 mm/vmalloc.c   	list = &rb_entry(parent, struct vmap_area, rb_node)->list;
vmap_area         501 mm/vmalloc.c   link_va(struct vmap_area *va, struct rb_root *root,
vmap_area         509 mm/vmalloc.c   		head = &rb_entry(parent, struct vmap_area, rb_node)->list;
vmap_area         540 mm/vmalloc.c   unlink_va(struct vmap_area *va, struct rb_root *root)
vmap_area         559 mm/vmalloc.c   	struct vmap_area *va;
vmap_area         567 mm/vmalloc.c   	va = rb_entry(n, struct vmap_area, rb_node);
vmap_area         572 mm/vmalloc.c   		va = rb_entry(node, struct vmap_area, rb_node);
vmap_area         587 mm/vmalloc.c   		va = rb_entry(n, struct vmap_area, rb_node);
vmap_area         625 mm/vmalloc.c   augment_tree_propagate_from(struct vmap_area *va)
vmap_area         631 mm/vmalloc.c   		va = rb_entry(node, struct vmap_area, rb_node);
vmap_area         653 mm/vmalloc.c   insert_vmap_area(struct vmap_area *va,
vmap_area         664 mm/vmalloc.c   insert_vmap_area_augment(struct vmap_area *va,
vmap_area         687 mm/vmalloc.c   merge_or_add_vmap_area(struct vmap_area *va,
vmap_area         690 mm/vmalloc.c   	struct vmap_area *sibling;
vmap_area         717 mm/vmalloc.c   		sibling = list_entry(next, struct vmap_area, list);
vmap_area         741 mm/vmalloc.c   		sibling = list_entry(next->prev, struct vmap_area, list);
vmap_area         765 mm/vmalloc.c   is_within_this_va(struct vmap_area *va, unsigned long size,
vmap_area         788 mm/vmalloc.c   static __always_inline struct vmap_area *
vmap_area         792 mm/vmalloc.c   	struct vmap_area *va;
vmap_area         803 mm/vmalloc.c   		va = rb_entry(node, struct vmap_area, rb_node);
vmap_area         828 mm/vmalloc.c   				va = rb_entry(node, struct vmap_area, rb_node);
vmap_area         847 mm/vmalloc.c   static struct vmap_area *
vmap_area         851 mm/vmalloc.c   	struct vmap_area *va;
vmap_area         866 mm/vmalloc.c   	struct vmap_area *va_1, *va_2;
vmap_area         891 mm/vmalloc.c   classify_va_fit_type(struct vmap_area *va,
vmap_area         917 mm/vmalloc.c   adjust_va_to_fit_type(struct vmap_area *va,
vmap_area         921 mm/vmalloc.c   	struct vmap_area *lva = NULL;
vmap_area        1012 mm/vmalloc.c   	struct vmap_area *va;
vmap_area        1050 mm/vmalloc.c   static struct vmap_area *alloc_vmap_area(unsigned long size,
vmap_area        1055 mm/vmalloc.c   	struct vmap_area *va, *pva;
vmap_area        1165 mm/vmalloc.c   static void __free_vmap_area(struct vmap_area *va)
vmap_area        1182 mm/vmalloc.c   static void free_vmap_area(struct vmap_area *va)
vmap_area        1192 mm/vmalloc.c   static void unmap_vmap_area(struct vmap_area *va)
vmap_area        1250 mm/vmalloc.c   	struct vmap_area *va;
vmap_area        1251 mm/vmalloc.c   	struct vmap_area *n_va;
vmap_area        1328 mm/vmalloc.c   static void free_vmap_area_noflush(struct vmap_area *va)
vmap_area        1349 mm/vmalloc.c   static void free_unmap_vmap_area(struct vmap_area *va)
vmap_area        1359 mm/vmalloc.c   static struct vmap_area *find_vmap_area(unsigned long addr)
vmap_area        1361 mm/vmalloc.c   	struct vmap_area *va;
vmap_area        1407 mm/vmalloc.c   	struct vmap_area *va;
vmap_area        1461 mm/vmalloc.c   	struct vmap_area *va;
vmap_area        1741 mm/vmalloc.c   	struct vmap_area *va;
vmap_area        1790 mm/vmalloc.c   		struct vmap_area *va;
vmap_area        1864 mm/vmalloc.c   	struct vmap_area *busy, *free;
vmap_area        1903 mm/vmalloc.c   	struct vmap_area *va;
vmap_area        1910 mm/vmalloc.c   	vmap_area_cachep = KMEM_CACHE(vmap_area, SLAB_PANIC);
vmap_area        2018 mm/vmalloc.c   static void setup_vmalloc_vm(struct vm_struct *vm, struct vmap_area *va,
vmap_area        2045 mm/vmalloc.c   	struct vmap_area *va;
vmap_area        2128 mm/vmalloc.c   	struct vmap_area *va;
vmap_area        2149 mm/vmalloc.c   	struct vmap_area *va;
vmap_area        2848 mm/vmalloc.c   	struct vmap_area *va;
vmap_area        2927 mm/vmalloc.c   	struct vmap_area *va;
vmap_area        3135 mm/vmalloc.c   static struct vmap_area *node_to_va(struct rb_node *n)
vmap_area        3137 mm/vmalloc.c   	return rb_entry_safe(n, struct vmap_area, rb_node);
vmap_area        3149 mm/vmalloc.c   static struct vmap_area *
vmap_area        3152 mm/vmalloc.c   	struct vmap_area *va, *tmp;
vmap_area        3159 mm/vmalloc.c   		tmp = rb_entry(n, struct vmap_area, rb_node);
vmap_area        3184 mm/vmalloc.c   pvm_determine_end_from_reverse(struct vmap_area **va, unsigned long align)
vmap_area        3231 mm/vmalloc.c   	struct vmap_area **vas, *va;
vmap_area        3477 mm/vmalloc.c   	struct vmap_area *va;
vmap_area        3492 mm/vmalloc.c   	struct vmap_area *va;
vmap_area        3495 mm/vmalloc.c   	va = list_entry(p, struct vmap_area, list);