alloc             951 arch/alpha/kernel/pci_iommu.c 	.alloc			= alpha_pci_alloc_coherent,
alloc             173 arch/arc/kernel/unwind.c 			    void *(*alloc) (unsigned long));
alloc             258 arch/arc/kernel/unwind.c 			    void *(*alloc) (unsigned long))
alloc             317 arch/arc/kernel/unwind.c 	header = alloc(hdrSize);
alloc             454 arch/arm/common/dmabounce.c 	.alloc			= arm_dma_alloc,
alloc             185 arch/arm/mach-exynos/suspend.c 	.alloc		= exynos_pmu_domain_alloc,
alloc             224 arch/arm/mach-imx/gpc.c 	.alloc		= imx_gpc_domain_alloc,
alloc             538 arch/arm/mach-omap2/omap-wakeupgen.c 	.alloc		= wakeupgen_domain_alloc,
alloc             177 arch/arm/mm/dma-mapping-nommu.c 	.alloc			= arm_nommu_dma_alloc,
alloc              66 arch/arm/mm/dma-mapping.c 	void *(*alloc)(struct arm_dma_alloc_args *args,
alloc             183 arch/arm/mm/dma-mapping.c 	.alloc			= arm_dma_alloc,
alloc             210 arch/arm/mm/dma-mapping.c 	.alloc			= arm_coherent_dma_alloc,
alloc             626 arch/arm/mm/dma-mapping.c 	.alloc = simple_allocator_alloc,
alloc             646 arch/arm/mm/dma-mapping.c 	.alloc = cma_allocator_alloc,
alloc             662 arch/arm/mm/dma-mapping.c 	.alloc = pool_allocator_alloc,
alloc             683 arch/arm/mm/dma-mapping.c 	.alloc = remap_allocator_alloc,
alloc             749 arch/arm/mm/dma-mapping.c 	addr = buf->allocator->alloc(&args, &page);
alloc            2019 arch/arm/mm/dma-mapping.c 	.alloc		= arm_iommu_alloc_attrs,
alloc            2041 arch/arm/mm/dma-mapping.c 	.alloc		= arm_coherent_iommu_alloc_attrs,
alloc             741 arch/arm/mm/mmu.c 				void *(*alloc)(unsigned long sz))
alloc             744 arch/arm/mm/mmu.c 		pte_t *pte = alloc(PTE_HWTABLE_OFF + PTE_HWTABLE_SIZE);
alloc             760 arch/arm/mm/mmu.c 				  void *(*alloc)(unsigned long sz),
alloc             763 arch/arm/mm/mmu.c 	pte_t *pte = arm_pte_alloc(pmd, addr, type->prot_l1, alloc);
alloc             801 arch/arm/mm/mmu.c 				      void *(*alloc)(unsigned long sz), bool ng)
alloc             822 arch/arm/mm/mmu.c 				       __phys_to_pfn(phys), type, alloc, ng);
alloc             833 arch/arm/mm/mmu.c 				  void *(*alloc)(unsigned long sz), bool ng)
alloc             840 arch/arm/mm/mmu.c 		alloc_init_pmd(pud, addr, next, phys, type, alloc, ng);
alloc             908 arch/arm/mm/mmu.c 				    void *(*alloc)(unsigned long sz),
alloc             943 arch/arm/mm/mmu.c 		alloc_init_pud(pgd, addr, next, phys, type, alloc, ng);
alloc            2065 arch/ia64/hp/common/sba_iommu.c 	.alloc			= sba_alloc_coherent,
alloc             673 arch/mips/jazz/jazzdma.c 	.alloc			= jazz_dma_alloc,
alloc             392 arch/mips/pci/pci-xtalk-bridge.c 	.alloc      = bridge_domain_alloc,
alloc             171 arch/mips/sgi-ip27/ip27-irq.c 	.alloc = hub_domain_alloc,
alloc              62 arch/powerpc/include/asm/iommu.h 	__be64 *(*useraddrptr)(struct iommu_table *tbl, long index, bool alloc);
alloc             198 arch/powerpc/kernel/dma-iommu.c 	.alloc			= dma_iommu_alloc_coherent,
alloc              54 arch/powerpc/kernel/optprobes.c 	.alloc = __ppc_alloc_insn_page,
alloc             283 arch/powerpc/mm/init_64.c 			  altmap->free + altmap->alloc + altmap->align;
alloc              55 arch/powerpc/platforms/powernv/pci-ioda-tce.c static __be64 *pnv_tce(struct iommu_table *tbl, bool user, long idx, bool alloc)
alloc              69 arch/powerpc/platforms/powernv/pci-ioda-tce.c 			if (!alloc)
alloc             121 arch/powerpc/platforms/powernv/pci-ioda-tce.c 		bool alloc)
alloc             139 arch/powerpc/platforms/powernv/pci-ioda-tce.c 		ptce = pnv_tce(tbl, false, idx, alloc);
alloc             141 arch/powerpc/platforms/powernv/pci-ioda-tce.c 			return alloc ? H_HARDWARE : H_TOO_HARD;
alloc             154 arch/powerpc/platforms/powernv/pci-ioda-tce.c __be64 *pnv_tce_useraddrptr(struct iommu_table *tbl, long index, bool alloc)
alloc             159 arch/powerpc/platforms/powernv/pci-ioda-tce.c 	return pnv_tce(tbl, true, index - tbl->it_offset, alloc);
alloc             231 arch/powerpc/platforms/powernv/pci.h 		bool alloc);
alloc             233 arch/powerpc/platforms/powernv/pci.h 		bool alloc);
alloc             690 arch/powerpc/platforms/ps3/system-bus.c 	.alloc = ps3_alloc_coherent,
alloc             702 arch/powerpc/platforms/ps3/system-bus.c 	.alloc = ps3_alloc_coherent,
alloc             140 arch/powerpc/platforms/pseries/ibmebus.c 	.alloc              = ibmebus_alloc_coherent,
alloc             600 arch/powerpc/platforms/pseries/vio.c 	.alloc             = vio_dma_iommu_alloc_coherent,
alloc              68 arch/s390/include/asm/perf_event.h #define SFB_ALLOC_REG(hwc)	((hwc)->extra_reg.alloc)
alloc              51 arch/s390/kernel/kprobes.c 	.alloc = alloc_s390_insn_page,
alloc             392 arch/s390/mm/pgalloc.c 			  unsigned long end, int alloc)
alloc             396 arch/s390/mm/pgalloc.c 	if (!alloc)
alloc             408 arch/s390/mm/pgalloc.c 			     unsigned long end, int alloc)
alloc             418 arch/s390/mm/pgalloc.c 			if (!alloc)
alloc             426 arch/s390/mm/pgalloc.c 		rc = base_page_walk(table, addr, next, alloc);
alloc             429 arch/s390/mm/pgalloc.c 		if (!alloc)
alloc             437 arch/s390/mm/pgalloc.c 			     unsigned long end, int alloc)
alloc             447 arch/s390/mm/pgalloc.c 			if (!alloc)
alloc             455 arch/s390/mm/pgalloc.c 		rc = base_segment_walk(table, addr, next, alloc);
alloc             458 arch/s390/mm/pgalloc.c 		if (!alloc)
alloc             465 arch/s390/mm/pgalloc.c 			     unsigned long end, int alloc)
alloc             475 arch/s390/mm/pgalloc.c 			if (!alloc)
alloc             483 arch/s390/mm/pgalloc.c 		rc = base_region3_walk(table, addr, next, alloc);
alloc             486 arch/s390/mm/pgalloc.c 		if (!alloc)
alloc             493 arch/s390/mm/pgalloc.c 			     unsigned long end, int alloc)
alloc             503 arch/s390/mm/pgalloc.c 			if (!alloc)
alloc             511 arch/s390/mm/pgalloc.c 		rc = base_region2_walk(table, addr, next, alloc);
alloc             514 arch/s390/mm/pgalloc.c 		if (!alloc)
alloc             665 arch/s390/pci/pci_dma.c 	.alloc		= s390_dma_alloc,
alloc             757 arch/sparc/kernel/iommu.c 	.alloc			= dma_4u_alloc_coherent,
alloc              67 arch/sparc/kernel/mdesc.c 	struct mdesc_handle *(*alloc)(unsigned int mdesc_size);
alloc             196 arch/sparc/kernel/mdesc.c 	.alloc = mdesc_memblock_alloc,
alloc             232 arch/sparc/kernel/mdesc.c 	.alloc = mdesc_kmalloc,
alloc             239 arch/sparc/kernel/mdesc.c 	struct mdesc_handle *hp = mops->alloc(mdesc_size);
alloc             688 arch/sparc/kernel/pci_sun4v.c 	.alloc				= dma_4v_alloc_coherent,
alloc             279 arch/sparc/mm/io-unit.c 	.alloc			= iounit_alloc,
alloc             422 arch/sparc/mm/iommu.c 	.alloc			= sbus_iommu_alloc,
alloc             433 arch/sparc/mm/iommu.c 	.alloc			= sbus_iommu_alloc,
alloc            2654 arch/x86/events/intel/core.c 	if (reg->alloc && !cpuc->is_fake)
alloc            2687 arch/x86/events/intel/core.c 			reg->alloc = 1;
alloc            2728 arch/x86/events/intel/core.c 	if (!reg->alloc || cpuc->is_fake)
alloc            2737 arch/x86/events/intel/core.c 	reg->alloc = 0;
alloc              61 arch/x86/events/intel/uncore.c 	struct pci2phy_map *map, *alloc = NULL;
alloc              72 arch/x86/events/intel/uncore.c 	if (!alloc) {
alloc              74 arch/x86/events/intel/uncore.c 		alloc = kmalloc(sizeof(struct pci2phy_map), GFP_KERNEL);
alloc              77 arch/x86/events/intel/uncore.c 		if (!alloc)
alloc              83 arch/x86/events/intel/uncore.c 	map = alloc;
alloc              84 arch/x86/events/intel/uncore.c 	alloc = NULL;
alloc              91 arch/x86/events/intel/uncore.c 	kfree(alloc);
alloc             156 arch/x86/events/intel/uncore.c 	    (!uncore_box_is_fake(box) && reg1->alloc))
alloc             172 arch/x86/events/intel/uncore.c 			reg1->alloc = 1;
alloc             192 arch/x86/events/intel/uncore.c 	if (uncore_box_is_fake(box) || !reg1->alloc)
alloc             197 arch/x86/events/intel/uncore.c 	reg1->alloc = 0;
alloc             673 arch/x86/events/intel/uncore_nhmex.c 	int i, idx[2], alloc = 0;
alloc             680 arch/x86/events/intel/uncore_nhmex.c 		if (!uncore_box_is_fake(box) && (reg1->alloc & (0x1 << i)))
alloc             689 arch/x86/events/intel/uncore_nhmex.c 		alloc |= (0x1 << i);
alloc             694 arch/x86/events/intel/uncore_nhmex.c 	    (uncore_box_is_fake(box) || !reg2->alloc) &&
alloc             707 arch/x86/events/intel/uncore_nhmex.c 		reg1->alloc |= alloc;
alloc             709 arch/x86/events/intel/uncore_nhmex.c 			reg2->alloc = 1;
alloc             713 arch/x86/events/intel/uncore_nhmex.c 	if (idx[0] != 0xff && !(alloc & 0x1) &&
alloc             731 arch/x86/events/intel/uncore_nhmex.c 	if (alloc & 0x1)
alloc             733 arch/x86/events/intel/uncore_nhmex.c 	if (alloc & 0x2)
alloc             746 arch/x86/events/intel/uncore_nhmex.c 	if (reg1->alloc & 0x1)
alloc             748 arch/x86/events/intel/uncore_nhmex.c 	if (reg1->alloc & 0x2)
alloc             750 arch/x86/events/intel/uncore_nhmex.c 	reg1->alloc = 0;
alloc             752 arch/x86/events/intel/uncore_nhmex.c 	if (reg2->alloc) {
alloc             754 arch/x86/events/intel/uncore_nhmex.c 		reg2->alloc = 0;
alloc             989 arch/x86/events/intel/uncore_nhmex.c 	if (!uncore_box_is_fake(box) && reg1->alloc)
alloc            1055 arch/x86/events/intel/uncore_nhmex.c 			reg1->alloc = 1;
alloc            1068 arch/x86/events/intel/uncore_nhmex.c 	if (uncore_box_is_fake(box) || !reg1->alloc)
alloc            1083 arch/x86/events/intel/uncore_nhmex.c 	reg1->alloc = 0;
alloc             851 arch/x86/events/intel/uncore_snbep.c 		if (reg1->alloc & (0x1 << i))
alloc             854 arch/x86/events/intel/uncore_snbep.c 	reg1->alloc = 0;
alloc             863 arch/x86/events/intel/uncore_snbep.c 	int i, alloc = 0;
alloc             874 arch/x86/events/intel/uncore_snbep.c 		if (!uncore_box_is_fake(box) && (reg1->alloc & (0x1 << i)))
alloc             883 arch/x86/events/intel/uncore_snbep.c 			alloc |= (0x1 << i);
alloc             893 arch/x86/events/intel/uncore_snbep.c 		reg1->alloc |= alloc;
alloc             898 arch/x86/events/intel/uncore_snbep.c 		if (alloc & (0x1 << i))
alloc            1000 arch/x86/events/intel/uncore_snbep.c 	    (!uncore_box_is_fake(box) && reg1->alloc))
alloc            1026 arch/x86/events/intel/uncore_snbep.c 		reg1->alloc = 1;
alloc            1036 arch/x86/events/intel/uncore_snbep.c 	if (uncore_box_is_fake(box) || !reg1->alloc)
alloc            1040 arch/x86/events/intel/uncore_snbep.c 	reg1->alloc = 0;
alloc             678 arch/x86/kernel/amd_gart_64.c 	.alloc				= gart_alloc_coherent,
alloc            3075 arch/x86/kernel/apic/io_apic.c 	.alloc		= mp_irqdomain_alloc,
alloc             631 arch/x86/kernel/apic/vector.c 	.alloc		= x86_vector_alloc_irqs,
alloc             238 arch/x86/kernel/devicetree.c 	.alloc		= dt_irqdomain_alloc,
alloc             464 arch/x86/kernel/pci-calgary_64.c 	.alloc = calgary_alloc_coherent,
alloc             149 arch/x86/platform/uv/uv_irq.c 	.alloc		= uv_domain_alloc,
alloc              76 arch/xtensa/variants/csp/include/variant/tie-asm.h     .macro xchal_ncp_store  ptr at1 at2 at3 at4  continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0
alloc             142 arch/xtensa/variants/csp/include/variant/tie-asm.h     .macro xchal_ncp_load  ptr at1 at2 at3 at4  continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0
alloc              77 arch/xtensa/variants/dc233c/include/variant/tie-asm.h     .macro xchal_ncp_store  ptr at1 at2 at3 at4  continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0
alloc             141 arch/xtensa/variants/dc233c/include/variant/tie-asm.h     .macro xchal_ncp_load  ptr at1 at2 at3 at4  continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0
alloc              76 arch/xtensa/variants/de212/include/variant/tie-asm.h     .macro xchal_ncp_store  ptr at1 at2 at3 at4  continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0
alloc             130 arch/xtensa/variants/de212/include/variant/tie-asm.h     .macro xchal_ncp_load  ptr at1 at2 at3 at4  continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0
alloc              76 arch/xtensa/variants/test_kc705_be/include/variant/tie-asm.h     .macro xchal_ncp_store  ptr at1 at2 at3 at4  continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0
alloc             142 arch/xtensa/variants/test_kc705_be/include/variant/tie-asm.h     .macro xchal_ncp_load  ptr at1 at2 at3 at4  continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0
alloc             201 arch/xtensa/variants/test_kc705_be/include/variant/tie-asm.h     .macro	xchal_cp1_store  ptr at1 at2 at3 at4  continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0
alloc             249 arch/xtensa/variants/test_kc705_be/include/variant/tie-asm.h     .macro	xchal_cp1_load  ptr at1 at2 at3 at4  continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0
alloc              77 arch/xtensa/variants/test_kc705_hifi/include/variant/tie-asm.h     .macro xchal_ncp_store  ptr at1 at2 at3 at4  continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0
alloc             143 arch/xtensa/variants/test_kc705_hifi/include/variant/tie-asm.h     .macro xchal_ncp_load  ptr at1 at2 at3 at4  continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0
alloc             205 arch/xtensa/variants/test_kc705_hifi/include/variant/tie-asm.h     .macro	xchal_cp1_store  ptr at1 at2 at3 at4  continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0
alloc             262 arch/xtensa/variants/test_kc705_hifi/include/variant/tie-asm.h     .macro	xchal_cp1_load  ptr at1 at2 at3 at4  continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0
alloc              74 crypto/algboss.c 		inst = tmpl->alloc(param->tb);
alloc             485 drivers/android/binder.c 	struct binder_alloc alloc;
alloc            2040 drivers/android/binder.c 	    binder_alloc_copy_from_buffer(&proc->alloc, object, buffer,
alloc            2111 drivers/android/binder.c 	if (binder_alloc_copy_from_buffer(&proc->alloc, &object_offset,
alloc            2195 drivers/android/binder.c 		if (binder_alloc_copy_from_buffer(&proc->alloc,
alloc            2289 drivers/android/binder.c 		if (!binder_alloc_copy_from_buffer(&proc->alloc, &object_offset,
alloc            2421 drivers/android/binder.c 						&proc->alloc, &fd, buffer,
alloc            2668 drivers/android/binder.c 		ret = binder_alloc_copy_from_buffer(&target_proc->alloc,
alloc            2726 drivers/android/binder.c 	if (binder_alloc_copy_to_buffer(&target_proc->alloc, b, buffer_offset,
alloc            3135 drivers/android/binder.c 	t->buffer = binder_alloc_new_buf(&target_proc->alloc, tr->data_size,
alloc            3157 drivers/android/binder.c 		err = binder_alloc_copy_to_buffer(&target_proc->alloc,
alloc            3173 drivers/android/binder.c 				&target_proc->alloc,
alloc            3186 drivers/android/binder.c 				&target_proc->alloc,
alloc            3230 drivers/android/binder.c 		if (binder_alloc_copy_from_buffer(&target_proc->alloc,
alloc            3265 drivers/android/binder.c 			    binder_alloc_copy_to_buffer(&target_proc->alloc,
alloc            3282 drivers/android/binder.c 			    binder_alloc_copy_to_buffer(&target_proc->alloc,
alloc            3302 drivers/android/binder.c 			    binder_alloc_copy_to_buffer(&target_proc->alloc,
alloc            3373 drivers/android/binder.c 						&target_proc->alloc,
alloc            3399 drivers/android/binder.c 			    binder_alloc_copy_to_buffer(&target_proc->alloc,
alloc            3494 drivers/android/binder.c 	binder_alloc_free_buf(&target_proc->alloc, t->buffer);
alloc            3594 drivers/android/binder.c 	binder_alloc_free_buf(&proc->alloc, buffer);
alloc            3766 drivers/android/binder.c 			buffer = binder_alloc_prepare_to_free(&proc->alloc,
alloc            4146 drivers/android/binder.c 		if (binder_alloc_copy_to_buffer(&proc->alloc, t->buffer,
alloc            4160 drivers/android/binder.c 			err = binder_alloc_copy_from_buffer(&proc->alloc, &fd,
alloc            4693 drivers/android/binder.c 	binder_alloc_deferred_release(&proc->alloc);
alloc            5010 drivers/android/binder.c 	binder_selftest_alloc(&proc->alloc);
alloc            5153 drivers/android/binder.c 	binder_alloc_vma_close(&proc->alloc);
alloc            5193 drivers/android/binder.c 	ret = binder_alloc_mmap_handler(&proc->alloc, vma);
alloc            5235 drivers/android/binder.c 	binder_alloc_init(&proc->alloc);
alloc            5760 drivers/android/binder.c 	binder_alloc_print_allocated(m, &proc->alloc);
alloc            5877 drivers/android/binder.c 		binder_alloc_get_free_async_space(&proc->alloc);
alloc            5916 drivers/android/binder.c 	count = binder_alloc_get_allocated_count(&proc->alloc);
alloc            5919 drivers/android/binder.c 	binder_alloc_print_pages(m, &proc->alloc);
alloc              60 drivers/android/binder_alloc.c static size_t binder_alloc_buffer_size(struct binder_alloc *alloc,
alloc              63 drivers/android/binder_alloc.c 	if (list_is_last(&buffer->entry, &alloc->buffers))
alloc              64 drivers/android/binder_alloc.c 		return alloc->buffer + alloc->buffer_size - buffer->user_data;
alloc              68 drivers/android/binder_alloc.c static void binder_insert_free_buffer(struct binder_alloc *alloc,
alloc              71 drivers/android/binder_alloc.c 	struct rb_node **p = &alloc->free_buffers.rb_node;
alloc              79 drivers/android/binder_alloc.c 	new_buffer_size = binder_alloc_buffer_size(alloc, new_buffer);
alloc              83 drivers/android/binder_alloc.c 		      alloc->pid, new_buffer_size, new_buffer);
alloc              90 drivers/android/binder_alloc.c 		buffer_size = binder_alloc_buffer_size(alloc, buffer);
alloc              98 drivers/android/binder_alloc.c 	rb_insert_color(&new_buffer->rb_node, &alloc->free_buffers);
alloc             102 drivers/android/binder_alloc.c 		struct binder_alloc *alloc, struct binder_buffer *new_buffer)
alloc             104 drivers/android/binder_alloc.c 	struct rb_node **p = &alloc->allocated_buffers.rb_node;
alloc             123 drivers/android/binder_alloc.c 	rb_insert_color(&new_buffer->rb_node, &alloc->allocated_buffers);
alloc             127 drivers/android/binder_alloc.c 		struct binder_alloc *alloc,
alloc             130 drivers/android/binder_alloc.c 	struct rb_node *n = alloc->allocated_buffers.rb_node;
alloc             170 drivers/android/binder_alloc.c struct binder_buffer *binder_alloc_prepare_to_free(struct binder_alloc *alloc,
alloc             175 drivers/android/binder_alloc.c 	mutex_lock(&alloc->mutex);
alloc             176 drivers/android/binder_alloc.c 	buffer = binder_alloc_prepare_to_free_locked(alloc, user_ptr);
alloc             177 drivers/android/binder_alloc.c 	mutex_unlock(&alloc->mutex);
alloc             181 drivers/android/binder_alloc.c static int binder_update_page_range(struct binder_alloc *alloc, int allocate,
alloc             192 drivers/android/binder_alloc.c 		     "%d: %s pages %pK-%pK\n", alloc->pid,
alloc             198 drivers/android/binder_alloc.c 	trace_binder_update_page_range(alloc, allocate, start, end);
alloc             204 drivers/android/binder_alloc.c 		page = &alloc->pages[(page_addr - alloc->buffer) / PAGE_SIZE];
alloc             211 drivers/android/binder_alloc.c 	if (need_mm && mmget_not_zero(alloc->vma_vm_mm))
alloc             212 drivers/android/binder_alloc.c 		mm = alloc->vma_vm_mm;
alloc             216 drivers/android/binder_alloc.c 		vma = alloc->vma;
alloc             222 drivers/android/binder_alloc.c 				   alloc->pid);
alloc             231 drivers/android/binder_alloc.c 		index = (page_addr - alloc->buffer) / PAGE_SIZE;
alloc             232 drivers/android/binder_alloc.c 		page = &alloc->pages[index];
alloc             235 drivers/android/binder_alloc.c 			trace_binder_alloc_lru_start(alloc, index);
alloc             240 drivers/android/binder_alloc.c 			trace_binder_alloc_lru_end(alloc, index);
alloc             247 drivers/android/binder_alloc.c 		trace_binder_alloc_page_start(alloc, index);
alloc             253 drivers/android/binder_alloc.c 				alloc->pid, page_addr);
alloc             256 drivers/android/binder_alloc.c 		page->alloc = alloc;
alloc             263 drivers/android/binder_alloc.c 			       alloc->pid, user_page_addr);
alloc             267 drivers/android/binder_alloc.c 		if (index + 1 > alloc->pages_high)
alloc             268 drivers/android/binder_alloc.c 			alloc->pages_high = index + 1;
alloc             270 drivers/android/binder_alloc.c 		trace_binder_alloc_page_end(alloc, index);
alloc             284 drivers/android/binder_alloc.c 		index = (page_addr - alloc->buffer) / PAGE_SIZE;
alloc             285 drivers/android/binder_alloc.c 		page = &alloc->pages[index];
alloc             287 drivers/android/binder_alloc.c 		trace_binder_free_lru_start(alloc, index);
alloc             292 drivers/android/binder_alloc.c 		trace_binder_free_lru_end(alloc, index);
alloc             314 drivers/android/binder_alloc.c static inline void binder_alloc_set_vma(struct binder_alloc *alloc,
alloc             318 drivers/android/binder_alloc.c 		alloc->vma_vm_mm = vma->vm_mm;
alloc             326 drivers/android/binder_alloc.c 	alloc->vma = vma;
alloc             330 drivers/android/binder_alloc.c 		struct binder_alloc *alloc)
alloc             334 drivers/android/binder_alloc.c 	if (alloc->vma) {
alloc             337 drivers/android/binder_alloc.c 		vma = alloc->vma;
alloc             343 drivers/android/binder_alloc.c 				struct binder_alloc *alloc,
alloc             349 drivers/android/binder_alloc.c 	struct rb_node *n = alloc->free_buffers.rb_node;
alloc             358 drivers/android/binder_alloc.c 	if (!binder_alloc_get_vma(alloc)) {
alloc             361 drivers/android/binder_alloc.c 				   alloc->pid);
alloc             371 drivers/android/binder_alloc.c 				alloc->pid, data_size, offsets_size);
alloc             378 drivers/android/binder_alloc.c 				alloc->pid, extra_buffers_size);
alloc             382 drivers/android/binder_alloc.c 	    alloc->free_async_space < size + sizeof(struct binder_buffer)) {
alloc             385 drivers/android/binder_alloc.c 			      alloc->pid, size);
alloc             395 drivers/android/binder_alloc.c 		buffer_size = binder_alloc_buffer_size(alloc, buffer);
alloc             415 drivers/android/binder_alloc.c 		for (n = rb_first(&alloc->allocated_buffers); n != NULL;
alloc             418 drivers/android/binder_alloc.c 			buffer_size = binder_alloc_buffer_size(alloc, buffer);
alloc             424 drivers/android/binder_alloc.c 		for (n = rb_first(&alloc->free_buffers); n != NULL;
alloc             427 drivers/android/binder_alloc.c 			buffer_size = binder_alloc_buffer_size(alloc, buffer);
alloc             435 drivers/android/binder_alloc.c 				   alloc->pid, size);
alloc             445 drivers/android/binder_alloc.c 		buffer_size = binder_alloc_buffer_size(alloc, buffer);
alloc             450 drivers/android/binder_alloc.c 		      alloc->pid, size, buffer, buffer_size);
alloc             459 drivers/android/binder_alloc.c 	ret = binder_update_page_range(alloc, 1, (void __user *)
alloc             470 drivers/android/binder_alloc.c 			       __func__, alloc->pid);
alloc             476 drivers/android/binder_alloc.c 		binder_insert_free_buffer(alloc, new_buffer);
alloc             479 drivers/android/binder_alloc.c 	rb_erase(best_fit, &alloc->free_buffers);
alloc             482 drivers/android/binder_alloc.c 	binder_insert_allocated_buffer_locked(alloc, buffer);
alloc             485 drivers/android/binder_alloc.c 		      alloc->pid, size, buffer);
alloc             491 drivers/android/binder_alloc.c 		alloc->free_async_space -= size + sizeof(struct binder_buffer);
alloc             494 drivers/android/binder_alloc.c 			      alloc->pid, size, alloc->free_async_space);
alloc             499 drivers/android/binder_alloc.c 	binder_update_page_range(alloc, 0, (void __user *)
alloc             520 drivers/android/binder_alloc.c struct binder_buffer *binder_alloc_new_buf(struct binder_alloc *alloc,
alloc             528 drivers/android/binder_alloc.c 	mutex_lock(&alloc->mutex);
alloc             529 drivers/android/binder_alloc.c 	buffer = binder_alloc_new_buf_locked(alloc, data_size, offsets_size,
alloc             531 drivers/android/binder_alloc.c 	mutex_unlock(&alloc->mutex);
alloc             546 drivers/android/binder_alloc.c static void binder_delete_free_buffer(struct binder_alloc *alloc,
alloc             551 drivers/android/binder_alloc.c 	BUG_ON(alloc->buffers.next == &buffer->entry);
alloc             558 drivers/android/binder_alloc.c 				   alloc->pid, buffer->user_data,
alloc             562 drivers/android/binder_alloc.c 	if (!list_is_last(&buffer->entry, &alloc->buffers)) {
alloc             568 drivers/android/binder_alloc.c 					   alloc->pid,
alloc             577 drivers/android/binder_alloc.c 				   alloc->pid, buffer->user_data);
alloc             584 drivers/android/binder_alloc.c 				   alloc->pid, buffer->user_data,
alloc             587 drivers/android/binder_alloc.c 		binder_update_page_range(alloc, 0, buffer_start_page(buffer),
alloc             594 drivers/android/binder_alloc.c static void binder_free_buf_locked(struct binder_alloc *alloc,
alloc             599 drivers/android/binder_alloc.c 	buffer_size = binder_alloc_buffer_size(alloc, buffer);
alloc             607 drivers/android/binder_alloc.c 		      alloc->pid, buffer, size, buffer_size);
alloc             612 drivers/android/binder_alloc.c 	BUG_ON(buffer->user_data < alloc->buffer);
alloc             613 drivers/android/binder_alloc.c 	BUG_ON(buffer->user_data > alloc->buffer + alloc->buffer_size);
alloc             616 drivers/android/binder_alloc.c 		alloc->free_async_space += size + sizeof(struct binder_buffer);
alloc             620 drivers/android/binder_alloc.c 			      alloc->pid, size, alloc->free_async_space);
alloc             623 drivers/android/binder_alloc.c 	binder_update_page_range(alloc, 0,
alloc             628 drivers/android/binder_alloc.c 	rb_erase(&buffer->rb_node, &alloc->allocated_buffers);
alloc             630 drivers/android/binder_alloc.c 	if (!list_is_last(&buffer->entry, &alloc->buffers)) {
alloc             634 drivers/android/binder_alloc.c 			rb_erase(&next->rb_node, &alloc->free_buffers);
alloc             635 drivers/android/binder_alloc.c 			binder_delete_free_buffer(alloc, next);
alloc             638 drivers/android/binder_alloc.c 	if (alloc->buffers.next != &buffer->entry) {
alloc             642 drivers/android/binder_alloc.c 			binder_delete_free_buffer(alloc, buffer);
alloc             643 drivers/android/binder_alloc.c 			rb_erase(&prev->rb_node, &alloc->free_buffers);
alloc             647 drivers/android/binder_alloc.c 	binder_insert_free_buffer(alloc, buffer);
alloc             657 drivers/android/binder_alloc.c void binder_alloc_free_buf(struct binder_alloc *alloc,
alloc             660 drivers/android/binder_alloc.c 	mutex_lock(&alloc->mutex);
alloc             661 drivers/android/binder_alloc.c 	binder_free_buf_locked(alloc, buffer);
alloc             662 drivers/android/binder_alloc.c 	mutex_unlock(&alloc->mutex);
alloc             678 drivers/android/binder_alloc.c int binder_alloc_mmap_handler(struct binder_alloc *alloc,
alloc             686 drivers/android/binder_alloc.c 	if (alloc->buffer_size) {
alloc             691 drivers/android/binder_alloc.c 	alloc->buffer_size = min_t(unsigned long, vma->vm_end - vma->vm_start,
alloc             695 drivers/android/binder_alloc.c 	alloc->buffer = (void __user *)vma->vm_start;
alloc             697 drivers/android/binder_alloc.c 	alloc->pages = kcalloc(alloc->buffer_size / PAGE_SIZE,
alloc             698 drivers/android/binder_alloc.c 			       sizeof(alloc->pages[0]),
alloc             700 drivers/android/binder_alloc.c 	if (alloc->pages == NULL) {
alloc             713 drivers/android/binder_alloc.c 	buffer->user_data = alloc->buffer;
alloc             714 drivers/android/binder_alloc.c 	list_add(&buffer->entry, &alloc->buffers);
alloc             716 drivers/android/binder_alloc.c 	binder_insert_free_buffer(alloc, buffer);
alloc             717 drivers/android/binder_alloc.c 	alloc->free_async_space = alloc->buffer_size / 2;
alloc             718 drivers/android/binder_alloc.c 	binder_alloc_set_vma(alloc, vma);
alloc             719 drivers/android/binder_alloc.c 	mmgrab(alloc->vma_vm_mm);
alloc             724 drivers/android/binder_alloc.c 	kfree(alloc->pages);
alloc             725 drivers/android/binder_alloc.c 	alloc->pages = NULL;
alloc             727 drivers/android/binder_alloc.c 	alloc->buffer = NULL;
alloc             729 drivers/android/binder_alloc.c 	alloc->buffer_size = 0;
alloc             734 drivers/android/binder_alloc.c 			   alloc->pid, vma->vm_start, vma->vm_end,
alloc             740 drivers/android/binder_alloc.c void binder_alloc_deferred_release(struct binder_alloc *alloc)
alloc             747 drivers/android/binder_alloc.c 	mutex_lock(&alloc->mutex);
alloc             748 drivers/android/binder_alloc.c 	BUG_ON(alloc->vma);
alloc             750 drivers/android/binder_alloc.c 	while ((n = rb_first(&alloc->allocated_buffers))) {
alloc             756 drivers/android/binder_alloc.c 		binder_free_buf_locked(alloc, buffer);
alloc             760 drivers/android/binder_alloc.c 	while (!list_empty(&alloc->buffers)) {
alloc             761 drivers/android/binder_alloc.c 		buffer = list_first_entry(&alloc->buffers,
alloc             766 drivers/android/binder_alloc.c 		WARN_ON_ONCE(!list_empty(&alloc->buffers));
alloc             771 drivers/android/binder_alloc.c 	if (alloc->pages) {
alloc             774 drivers/android/binder_alloc.c 		for (i = 0; i < alloc->buffer_size / PAGE_SIZE; i++) {
alloc             778 drivers/android/binder_alloc.c 			if (!alloc->pages[i].page_ptr)
alloc             782 drivers/android/binder_alloc.c 					      &alloc->pages[i].lru);
alloc             783 drivers/android/binder_alloc.c 			page_addr = alloc->buffer + i * PAGE_SIZE;
alloc             786 drivers/android/binder_alloc.c 				     __func__, alloc->pid, i, page_addr,
alloc             788 drivers/android/binder_alloc.c 			__free_page(alloc->pages[i].page_ptr);
alloc             791 drivers/android/binder_alloc.c 		kfree(alloc->pages);
alloc             793 drivers/android/binder_alloc.c 	mutex_unlock(&alloc->mutex);
alloc             794 drivers/android/binder_alloc.c 	if (alloc->vma_vm_mm)
alloc             795 drivers/android/binder_alloc.c 		mmdrop(alloc->vma_vm_mm);
alloc             799 drivers/android/binder_alloc.c 		     __func__, alloc->pid, buffers, page_count);
alloc             821 drivers/android/binder_alloc.c 				  struct binder_alloc *alloc)
alloc             825 drivers/android/binder_alloc.c 	mutex_lock(&alloc->mutex);
alloc             826 drivers/android/binder_alloc.c 	for (n = rb_first(&alloc->allocated_buffers); n != NULL; n = rb_next(n))
alloc             829 drivers/android/binder_alloc.c 	mutex_unlock(&alloc->mutex);
alloc             838 drivers/android/binder_alloc.c 			      struct binder_alloc *alloc)
alloc             846 drivers/android/binder_alloc.c 	mutex_lock(&alloc->mutex);
alloc             851 drivers/android/binder_alloc.c 	if (binder_alloc_get_vma(alloc) != NULL) {
alloc             852 drivers/android/binder_alloc.c 		for (i = 0; i < alloc->buffer_size / PAGE_SIZE; i++) {
alloc             853 drivers/android/binder_alloc.c 			page = &alloc->pages[i];
alloc             862 drivers/android/binder_alloc.c 	mutex_unlock(&alloc->mutex);
alloc             864 drivers/android/binder_alloc.c 	seq_printf(m, "  pages high watermark: %zu\n", alloc->pages_high);
alloc             873 drivers/android/binder_alloc.c int binder_alloc_get_allocated_count(struct binder_alloc *alloc)
alloc             878 drivers/android/binder_alloc.c 	mutex_lock(&alloc->mutex);
alloc             879 drivers/android/binder_alloc.c 	for (n = rb_first(&alloc->allocated_buffers); n != NULL; n = rb_next(n))
alloc             881 drivers/android/binder_alloc.c 	mutex_unlock(&alloc->mutex);
alloc             894 drivers/android/binder_alloc.c void binder_alloc_vma_close(struct binder_alloc *alloc)
alloc             896 drivers/android/binder_alloc.c 	binder_alloc_set_vma(alloc, NULL);
alloc             918 drivers/android/binder_alloc.c 	struct binder_alloc *alloc;
alloc             923 drivers/android/binder_alloc.c 	alloc = page->alloc;
alloc             924 drivers/android/binder_alloc.c 	if (!mutex_trylock(&alloc->mutex))
alloc             930 drivers/android/binder_alloc.c 	index = page - alloc->pages;
alloc             931 drivers/android/binder_alloc.c 	page_addr = (uintptr_t)alloc->buffer + index * PAGE_SIZE;
alloc             933 drivers/android/binder_alloc.c 	mm = alloc->vma_vm_mm;
alloc             938 drivers/android/binder_alloc.c 	vma = binder_alloc_get_vma(alloc);
alloc             944 drivers/android/binder_alloc.c 		trace_binder_unmap_user_start(alloc, index);
alloc             948 drivers/android/binder_alloc.c 		trace_binder_unmap_user_end(alloc, index);
alloc             953 drivers/android/binder_alloc.c 	trace_binder_unmap_kernel_start(alloc, index);
alloc             958 drivers/android/binder_alloc.c 	trace_binder_unmap_kernel_end(alloc, index);
alloc             961 drivers/android/binder_alloc.c 	mutex_unlock(&alloc->mutex);
alloc             968 drivers/android/binder_alloc.c 	mutex_unlock(&alloc->mutex);
alloc            1003 drivers/android/binder_alloc.c void binder_alloc_init(struct binder_alloc *alloc)
alloc            1005 drivers/android/binder_alloc.c 	alloc->pid = current->group_leader->pid;
alloc            1006 drivers/android/binder_alloc.c 	mutex_init(&alloc->mutex);
alloc            1007 drivers/android/binder_alloc.c 	INIT_LIST_HEAD(&alloc->buffers);
alloc            1041 drivers/android/binder_alloc.c static inline bool check_buffer(struct binder_alloc *alloc,
alloc            1045 drivers/android/binder_alloc.c 	size_t buffer_size = binder_alloc_buffer_size(alloc, buffer);
alloc            1073 drivers/android/binder_alloc.c static struct page *binder_alloc_get_page(struct binder_alloc *alloc,
alloc            1079 drivers/android/binder_alloc.c 		(buffer->user_data - alloc->buffer);
alloc            1084 drivers/android/binder_alloc.c 	lru_page = &alloc->pages[index];
alloc            1102 drivers/android/binder_alloc.c binder_alloc_copy_user_to_buffer(struct binder_alloc *alloc,
alloc            1108 drivers/android/binder_alloc.c 	if (!check_buffer(alloc, buffer, buffer_offset, bytes))
alloc            1118 drivers/android/binder_alloc.c 		page = binder_alloc_get_page(alloc, buffer,
alloc            1133 drivers/android/binder_alloc.c static int binder_alloc_do_buffer_copy(struct binder_alloc *alloc,
alloc            1141 drivers/android/binder_alloc.c 	if (!check_buffer(alloc, buffer, buffer_offset, bytes))
alloc            1151 drivers/android/binder_alloc.c 		page = binder_alloc_get_page(alloc, buffer,
alloc            1173 drivers/android/binder_alloc.c int binder_alloc_copy_to_buffer(struct binder_alloc *alloc,
alloc            1179 drivers/android/binder_alloc.c 	return binder_alloc_do_buffer_copy(alloc, true, buffer, buffer_offset,
alloc            1183 drivers/android/binder_alloc.c int binder_alloc_copy_from_buffer(struct binder_alloc *alloc,
alloc            1189 drivers/android/binder_alloc.c 	return binder_alloc_do_buffer_copy(alloc, false, buffer, buffer_offset,
alloc              65 drivers/android/binder_alloc.h 	struct binder_alloc *alloc;
alloc             109 drivers/android/binder_alloc.h void binder_selftest_alloc(struct binder_alloc *alloc);
alloc             111 drivers/android/binder_alloc.h static inline void binder_selftest_alloc(struct binder_alloc *alloc) {}
alloc             116 drivers/android/binder_alloc.h extern struct binder_buffer *binder_alloc_new_buf(struct binder_alloc *alloc,
alloc             121 drivers/android/binder_alloc.h extern void binder_alloc_init(struct binder_alloc *alloc);
alloc             123 drivers/android/binder_alloc.h extern void binder_alloc_vma_close(struct binder_alloc *alloc);
alloc             125 drivers/android/binder_alloc.h binder_alloc_prepare_to_free(struct binder_alloc *alloc,
alloc             127 drivers/android/binder_alloc.h extern void binder_alloc_free_buf(struct binder_alloc *alloc,
alloc             129 drivers/android/binder_alloc.h extern int binder_alloc_mmap_handler(struct binder_alloc *alloc,
alloc             131 drivers/android/binder_alloc.h extern void binder_alloc_deferred_release(struct binder_alloc *alloc);
alloc             132 drivers/android/binder_alloc.h extern int binder_alloc_get_allocated_count(struct binder_alloc *alloc);
alloc             134 drivers/android/binder_alloc.h 					 struct binder_alloc *alloc);
alloc             136 drivers/android/binder_alloc.h 			      struct binder_alloc *alloc);
alloc             145 drivers/android/binder_alloc.h binder_alloc_get_free_async_space(struct binder_alloc *alloc)
alloc             149 drivers/android/binder_alloc.h 	mutex_lock(&alloc->mutex);
alloc             150 drivers/android/binder_alloc.h 	free_async_space = alloc->free_async_space;
alloc             151 drivers/android/binder_alloc.h 	mutex_unlock(&alloc->mutex);
alloc             156 drivers/android/binder_alloc.h binder_alloc_copy_user_to_buffer(struct binder_alloc *alloc,
alloc             162 drivers/android/binder_alloc.h int binder_alloc_copy_to_buffer(struct binder_alloc *alloc,
alloc             168 drivers/android/binder_alloc.h int binder_alloc_copy_from_buffer(struct binder_alloc *alloc,
alloc              92 drivers/android/binder_alloc_selftest.c static bool check_buffer_pages_allocated(struct binder_alloc *alloc,
alloc             103 drivers/android/binder_alloc_selftest.c 		page_index = (page_addr - alloc->buffer) / PAGE_SIZE;
alloc             104 drivers/android/binder_alloc_selftest.c 		if (!alloc->pages[page_index].page_ptr ||
alloc             105 drivers/android/binder_alloc_selftest.c 		    !list_empty(&alloc->pages[page_index].lru)) {
alloc             107 drivers/android/binder_alloc_selftest.c 			       alloc->pages[page_index].page_ptr ?
alloc             115 drivers/android/binder_alloc_selftest.c static void binder_selftest_alloc_buf(struct binder_alloc *alloc,
alloc             122 drivers/android/binder_alloc_selftest.c 		buffers[i] = binder_alloc_new_buf(alloc, sizes[i], 0, 0, 0);
alloc             124 drivers/android/binder_alloc_selftest.c 		    !check_buffer_pages_allocated(alloc, buffers[i],
alloc             132 drivers/android/binder_alloc_selftest.c static void binder_selftest_free_buf(struct binder_alloc *alloc,
alloc             139 drivers/android/binder_alloc_selftest.c 		binder_alloc_free_buf(alloc, buffers[seq[i]]);
alloc             147 drivers/android/binder_alloc_selftest.c 		if (list_empty(&alloc->pages[i].lru)) {
alloc             150 drivers/android/binder_alloc_selftest.c 			       alloc->pages[i].page_ptr ? "alloc" : "free", i);
alloc             156 drivers/android/binder_alloc_selftest.c static void binder_selftest_free_page(struct binder_alloc *alloc)
alloc             166 drivers/android/binder_alloc_selftest.c 	for (i = 0; i < (alloc->buffer_size / PAGE_SIZE); i++) {
alloc             167 drivers/android/binder_alloc_selftest.c 		if (alloc->pages[i].page_ptr) {
alloc             169 drivers/android/binder_alloc_selftest.c 			       list_empty(&alloc->pages[i].lru) ?
alloc             176 drivers/android/binder_alloc_selftest.c static void binder_selftest_alloc_free(struct binder_alloc *alloc,
alloc             181 drivers/android/binder_alloc_selftest.c 	binder_selftest_alloc_buf(alloc, buffers, sizes, seq);
alloc             182 drivers/android/binder_alloc_selftest.c 	binder_selftest_free_buf(alloc, buffers, sizes, seq, end);
alloc             185 drivers/android/binder_alloc_selftest.c 	binder_selftest_alloc_buf(alloc, buffers, sizes, seq);
alloc             189 drivers/android/binder_alloc_selftest.c 	binder_selftest_free_buf(alloc, buffers, sizes, seq, end);
alloc             190 drivers/android/binder_alloc_selftest.c 	binder_selftest_free_page(alloc);
alloc             205 drivers/android/binder_alloc_selftest.c static void binder_selftest_free_seq(struct binder_alloc *alloc,
alloc             212 drivers/android/binder_alloc_selftest.c 		binder_selftest_alloc_free(alloc, sizes, seq, end);
alloc             219 drivers/android/binder_alloc_selftest.c 		binder_selftest_free_seq(alloc, sizes, seq, index + 1, end);
alloc             223 drivers/android/binder_alloc_selftest.c static void binder_selftest_alloc_size(struct binder_alloc *alloc,
alloc             243 drivers/android/binder_alloc_selftest.c 	back_sizes[0] += alloc->buffer_size - end_offset[BUFFER_NUM - 1];
alloc             244 drivers/android/binder_alloc_selftest.c 	binder_selftest_free_seq(alloc, front_sizes, seq, 0,
alloc             246 drivers/android/binder_alloc_selftest.c 	binder_selftest_free_seq(alloc, back_sizes, seq, 0, alloc->buffer_size);
alloc             249 drivers/android/binder_alloc_selftest.c static void binder_selftest_alloc_offset(struct binder_alloc *alloc,
alloc             256 drivers/android/binder_alloc_selftest.c 		binder_selftest_alloc_size(alloc, end_offset);
alloc             270 drivers/android/binder_alloc_selftest.c 		binder_selftest_alloc_offset(alloc, end_offset, index + 1);
alloc             283 drivers/android/binder_alloc_selftest.c void binder_selftest_alloc(struct binder_alloc *alloc)
alloc             290 drivers/android/binder_alloc_selftest.c 	if (!binder_selftest_run || !alloc->vma)
alloc             293 drivers/android/binder_alloc_selftest.c 	binder_selftest_alloc_offset(alloc, end_offset, 0);
alloc             286 drivers/android/binder_trace.h 	TP_PROTO(struct binder_alloc *alloc, bool allocate,
alloc             288 drivers/android/binder_trace.h 	TP_ARGS(alloc, allocate, start, end),
alloc             296 drivers/android/binder_trace.h 		__entry->proc = alloc->pid;
alloc             298 drivers/android/binder_trace.h 		__entry->offset = start - alloc->buffer;
alloc             307 drivers/android/binder_trace.h 	TP_PROTO(const struct binder_alloc *alloc, size_t page_index),
alloc             308 drivers/android/binder_trace.h 	TP_ARGS(alloc, page_index),
alloc             314 drivers/android/binder_trace.h 		__entry->proc = alloc->pid;
alloc             322 drivers/android/binder_trace.h 	TP_PROTO(const struct binder_alloc *alloc, size_t page_index),
alloc             323 drivers/android/binder_trace.h 	TP_ARGS(alloc, page_index));
alloc             326 drivers/android/binder_trace.h 	TP_PROTO(const struct binder_alloc *alloc, size_t page_index),
alloc             327 drivers/android/binder_trace.h 	TP_ARGS(alloc, page_index));
alloc             330 drivers/android/binder_trace.h 	TP_PROTO(const struct binder_alloc *alloc, size_t page_index),
alloc             331 drivers/android/binder_trace.h 	TP_ARGS(alloc, page_index));
alloc             334 drivers/android/binder_trace.h 	TP_PROTO(const struct binder_alloc *alloc, size_t page_index),
alloc             335 drivers/android/binder_trace.h 	TP_ARGS(alloc, page_index));
alloc             338 drivers/android/binder_trace.h 	TP_PROTO(const struct binder_alloc *alloc, size_t page_index),
alloc             339 drivers/android/binder_trace.h 	TP_ARGS(alloc, page_index));
alloc             342 drivers/android/binder_trace.h 	TP_PROTO(const struct binder_alloc *alloc, size_t page_index),
alloc             343 drivers/android/binder_trace.h 	TP_ARGS(alloc, page_index));
alloc             346 drivers/android/binder_trace.h 	TP_PROTO(const struct binder_alloc *alloc, size_t page_index),
alloc             347 drivers/android/binder_trace.h 	TP_ARGS(alloc, page_index));
alloc             350 drivers/android/binder_trace.h 	TP_PROTO(const struct binder_alloc *alloc, size_t page_index),
alloc             351 drivers/android/binder_trace.h 	TP_ARGS(alloc, page_index));
alloc             354 drivers/android/binder_trace.h 	TP_PROTO(const struct binder_alloc *alloc, size_t page_index),
alloc             355 drivers/android/binder_trace.h 	TP_ARGS(alloc, page_index));
alloc             358 drivers/android/binder_trace.h 	TP_PROTO(const struct binder_alloc *alloc, size_t page_index),
alloc             359 drivers/android/binder_trace.h 	TP_ARGS(alloc, page_index));
alloc              57 drivers/base/component.c 	size_t alloc;
alloc             319 drivers/base/component.c 	if (match->alloc == num)
alloc             332 drivers/base/component.c 	match->alloc = num;
alloc             362 drivers/base/component.c 	if (match->num == match->alloc) {
alloc             363 drivers/base/component.c 		size_t new_size = match->alloc + 16;
alloc             155 drivers/char/agp/compat_ioctl.c 	struct agp_allocate32 alloc;
alloc             158 drivers/char/agp/compat_ioctl.c 	if (copy_from_user(&alloc, arg, sizeof(alloc)))
alloc             161 drivers/char/agp/compat_ioctl.c 	memory = agp_allocate_memory_wrap(alloc.pg_count, alloc.type);
alloc             166 drivers/char/agp/compat_ioctl.c 	alloc.key = memory->key;
alloc             167 drivers/char/agp/compat_ioctl.c 	alloc.physical = memory->physical;
alloc             169 drivers/char/agp/compat_ioctl.c 	if (copy_to_user(arg, &alloc, sizeof(alloc))) {
alloc             877 drivers/char/agp/frontend.c 	struct agp_allocate alloc;
alloc             880 drivers/char/agp/frontend.c 	if (copy_from_user(&alloc, arg, sizeof(struct agp_allocate)))
alloc             883 drivers/char/agp/frontend.c 	if (alloc.type >= AGP_USER_TYPES)
alloc             886 drivers/char/agp/frontend.c 	memory = agp_allocate_memory_wrap(alloc.pg_count, alloc.type);
alloc             891 drivers/char/agp/frontend.c 	alloc.key = memory->key;
alloc             892 drivers/char/agp/frontend.c 	alloc.physical = memory->physical;
alloc             894 drivers/char/agp/frontend.c 	if (copy_to_user(arg, &alloc, sizeof(struct agp_allocate))) {
alloc             349 drivers/char/tpm/tpm2-space.c static u32 tpm2_map_to_vhandle(struct tpm_space *space, u32 phandle, bool alloc)
alloc             354 drivers/char/tpm/tpm2-space.c 		if (alloc) {
alloc             318 drivers/cpufreq/cpufreq_conservative.c 	.alloc = cs_alloc,
alloc             356 drivers/cpufreq/cpufreq_governor.c 	policy_dbs = gov->alloc();
alloc             137 drivers/cpufreq/cpufreq_governor.h 	struct policy_dbs_info *(*alloc)(void);
alloc             404 drivers/cpufreq/cpufreq_ondemand.c 	.alloc = od_alloc,
alloc             519 drivers/crypto/hisilicon/sec/sec_drv.c static void sec_queue_ar_alloc(struct sec_queue *queue, u32 alloc)
alloc             525 drivers/crypto/hisilicon/sec/sec_drv.c 	if (alloc == SEC_QUEUE_AR_FROCE_ALLOC) {
alloc             536 drivers/crypto/hisilicon/sec/sec_drv.c static void sec_queue_aw_alloc(struct sec_queue *queue, u32 alloc)
alloc             542 drivers/crypto/hisilicon/sec/sec_drv.c 	if (alloc == SEC_QUEUE_AW_FROCE_ALLOC) {
alloc             634 drivers/dma/dw-edma/dw-edma-core.c 	u32 alloc, off_alloc;
alloc             652 drivers/dma/dw-edma/dw-edma-core.c 		alloc = wr_alloc;
alloc             658 drivers/dma/dw-edma/dw-edma-core.c 		alloc = rd_alloc;
alloc             663 drivers/dma/dw-edma/dw-edma-core.c 	for (j = 0; (alloc || dw->nr_irqs == 1) && j < cnt; j++, i++) {
alloc             691 drivers/dma/dw-edma/dw-edma-core.c 			pos = off_alloc + (j % alloc);
alloc             753 drivers/dma/dw-edma/dw-edma-core.c static inline void dw_edma_dec_irq_alloc(int *nr_irqs, u32 *alloc, u16 cnt)
alloc             755 drivers/dma/dw-edma/dw-edma-core.c 	if (*nr_irqs && *alloc < cnt) {
alloc             756 drivers/dma/dw-edma/dw-edma-core.c 		(*alloc)++;
alloc             761 drivers/dma/dw-edma/dw-edma-core.c static inline void dw_edma_add_irq_mask(u32 *mask, u32 alloc, u16 cnt)
alloc             763 drivers/dma/dw-edma/dw-edma-core.c 	while (*mask * alloc < cnt)
alloc             199 drivers/gpio/gpio-lpc18xx.c 	.alloc	= lpc18xx_gpio_pin_ic_domain_alloc,
alloc             438 drivers/gpio/gpio-thunderx.c 	.alloc		= thunderx_gpio_irq_alloc,
alloc             317 drivers/gpio/gpio-uniphier.c 	.alloc = uniphier_gpio_irq_domain_alloc,
alloc             211 drivers/gpio/gpio-xgene-sb.c 	.alloc          = xgene_gpio_sb_domain_alloc,
alloc            1947 drivers/gpio/gpiolib.c 	ops->alloc = gpiochip_hierarchy_irq_domain_alloc;
alloc            1183 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 	struct _SET_VOLTAGE_PS_ALLOCATION alloc;
alloc             421 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 		args->out.alloc.ctx_id = id;
alloc             957 drivers/gpu/drm/drm_atomic.c 		int alloc = max(index + 1, config->num_connector);
alloc             959 drivers/gpu/drm/drm_atomic.c 		c = krealloc(state->connectors, alloc * sizeof(*state->connectors), GFP_KERNEL);
alloc             965 drivers/gpu/drm/drm_atomic.c 		       sizeof(*state->connectors) * (alloc - state->num_connector));
alloc             967 drivers/gpu/drm/drm_atomic.c 		state->num_connector = alloc;
alloc              57 drivers/gpu/drm/i915/gt/intel_context.c 			err = ce->ops->alloc(ce);
alloc              26 drivers/gpu/drm/i915/gt/intel_context_types.h 	int (*alloc)(struct intel_context *ce);
alloc            1858 drivers/gpu/drm/i915/gt/intel_lrc.c 	.alloc = execlists_context_alloc,
alloc            1518 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 	.alloc = ring_context_alloc,
alloc             155 drivers/gpu/drm/i915/gt/mock_engine.c 	.alloc = mock_context_alloc,
alloc            1042 drivers/gpu/drm/i915/i915_gem_gtt.c 	struct i915_page_table *alloc = NULL;
alloc            1065 drivers/gpu/drm/i915/i915_gem_gtt.c 			pt = fetch_and_zero(&alloc);
alloc            1094 drivers/gpu/drm/i915/i915_gem_gtt.c 				alloc = pt, pt = pd->entry[idx];
alloc            1128 drivers/gpu/drm/i915/i915_gem_gtt.c 	if (alloc)
alloc            1129 drivers/gpu/drm/i915/i915_gem_gtt.c 		free_px(vm, alloc);
alloc            1690 drivers/gpu/drm/i915/i915_gem_gtt.c 	struct i915_page_table *pt, *alloc = NULL;
alloc            1706 drivers/gpu/drm/i915/i915_gem_gtt.c 			pt = fetch_and_zero(&alloc);
alloc            1725 drivers/gpu/drm/i915/i915_gem_gtt.c 				alloc = pt;
alloc            1744 drivers/gpu/drm/i915/i915_gem_gtt.c 	if (alloc)
alloc            1745 drivers/gpu/drm/i915/i915_gem_gtt.c 		free_px(vm, alloc);
alloc            3863 drivers/gpu/drm/i915/intel_pm.c 				   struct skl_ddb_entry *alloc, /* out */
alloc            3876 drivers/gpu/drm/i915/intel_pm.c 		alloc->start = 0;
alloc            3877 drivers/gpu/drm/i915/intel_pm.c 		alloc->end = 0;
alloc            3903 drivers/gpu/drm/i915/intel_pm.c 		*alloc = to_intel_crtc_state(for_crtc->state)->wm.skl.ddb;
alloc            3930 drivers/gpu/drm/i915/intel_pm.c 	alloc->start = ddb_size * width_before_pipe / total_width;
alloc            3931 drivers/gpu/drm/i915/intel_pm.c 	alloc->end = ddb_size * (width_before_pipe + pipe_width) / total_width;
alloc            4341 drivers/gpu/drm/i915/intel_pm.c 	struct skl_ddb_entry *alloc = &crtc_state->wm.skl.ddb;
alloc            4361 drivers/gpu/drm/i915/intel_pm.c 		alloc->start = alloc->end = 0;
alloc            4377 drivers/gpu/drm/i915/intel_pm.c 					   ddb, alloc, &num_active);
alloc            4378 drivers/gpu/drm/i915/intel_pm.c 	alloc_size = skl_ddb_entry_size(alloc);
alloc            4386 drivers/gpu/drm/i915/intel_pm.c 		alloc->end - total[PLANE_CURSOR];
alloc            4387 drivers/gpu/drm/i915/intel_pm.c 	crtc_state->wm.skl.plane_ddb_y[PLANE_CURSOR].end = alloc->end;
alloc            4471 drivers/gpu/drm/i915/intel_pm.c 	start = alloc->start;
alloc             270 drivers/gpu/drm/i915/intel_runtime_pm.c 		unsigned long alloc = dbg.count;
alloc             275 drivers/gpu/drm/i915/intel_runtime_pm.c 		if (dbg.count <= alloc) {
alloc             283 drivers/gpu/drm/i915/intel_runtime_pm.c 		if (dbg.count <= alloc)
alloc            3101 drivers/gpu/drm/radeon/radeon_atombios.c 	struct _SET_VOLTAGE_PS_ALLOCATION alloc;
alloc            1127 drivers/gpu/drm/tegra/drm.c 	struct iova *alloc;
alloc            1161 drivers/gpu/drm/tegra/drm.c 	alloc = alloc_iova(&tegra->carveout.domain,
alloc            1164 drivers/gpu/drm/tegra/drm.c 	if (!alloc) {
alloc            1169 drivers/gpu/drm/tegra/drm.c 	*dma = iova_dma_addr(&tegra->carveout.domain, alloc);
alloc            1178 drivers/gpu/drm/tegra/drm.c 	__free_iova(&tegra->carveout.domain, alloc);
alloc             139 drivers/gpu/drm/tegra/falcon.c 	falcon->firmware.vaddr = falcon->ops->alloc(falcon, firmware->size,
alloc             171 drivers/gpu/drm/tegra/falcon.c 	if (!falcon->ops || !falcon->ops->alloc || !falcon->ops->free)
alloc              80 drivers/gpu/drm/tegra/falcon.h 	void *(*alloc)(struct falcon *falcon, size_t size,
alloc             177 drivers/gpu/drm/tegra/vic.c 	.alloc = vic_falcon_alloc,
alloc              74 drivers/gpu/host1x/cdma.c 	struct iova *alloc;
alloc              99 drivers/gpu/host1x/cdma.c 		alloc = alloc_iova(&host1x->iova, size >> shift,
alloc             101 drivers/gpu/host1x/cdma.c 		if (!alloc) {
alloc             106 drivers/gpu/host1x/cdma.c 		pb->dma = iova_dma_addr(&host1x->iova, alloc);
alloc             127 drivers/gpu/host1x/cdma.c 	__free_iova(&host1x->iova, alloc);
alloc             133 drivers/gpu/host1x/job.c 		struct iova *alloc;
alloc             150 drivers/gpu/host1x/job.c 			alloc = alloc_iova(&host->iova, gather_size >> shift,
alloc             152 drivers/gpu/host1x/job.c 			if (!alloc) {
alloc             158 drivers/gpu/host1x/job.c 					iova_dma_addr(&host->iova, alloc),
alloc             161 drivers/gpu/host1x/job.c 				__free_iova(&host->iova, alloc);
alloc             167 drivers/gpu/host1x/job.c 				iova_dma_addr(&host->iova, alloc);
alloc             362 drivers/hwtracing/coresight/coresight-catu.c 	.alloc = catu_alloc_etr_buf,
alloc             660 drivers/hwtracing/coresight/coresight-tmc-etr.c 	.alloc = tmc_etr_alloc_flat_buf,
alloc             738 drivers/hwtracing/coresight/coresight-tmc-etr.c 	.alloc = tmc_etr_alloc_sg_buf,
alloc             806 drivers/hwtracing/coresight/coresight-tmc-etr.c 		if (etr_buf_ops[mode] && etr_buf_ops[mode]->alloc)
alloc             807 drivers/hwtracing/coresight/coresight-tmc-etr.c 			rc = etr_buf_ops[mode]->alloc(drvdata, etr_buf,
alloc             213 drivers/hwtracing/coresight/coresight-tmc.h 	int (*alloc)(struct tmc_drvdata *drvdata, struct etr_buf *etr_buf,
alloc              44 drivers/infiniband/hw/cxgb4/id_table.c u32 c4iw_id_alloc(struct c4iw_id_table *alloc)
alloc              49 drivers/infiniband/hw/cxgb4/id_table.c 	spin_lock_irqsave(&alloc->lock, flags);
alloc              51 drivers/infiniband/hw/cxgb4/id_table.c 	obj = find_next_zero_bit(alloc->table, alloc->max, alloc->last);
alloc              52 drivers/infiniband/hw/cxgb4/id_table.c 	if (obj >= alloc->max)
alloc              53 drivers/infiniband/hw/cxgb4/id_table.c 		obj = find_first_zero_bit(alloc->table, alloc->max);
alloc              55 drivers/infiniband/hw/cxgb4/id_table.c 	if (obj < alloc->max) {
alloc              56 drivers/infiniband/hw/cxgb4/id_table.c 		if (alloc->flags & C4IW_ID_TABLE_F_RANDOM)
alloc              57 drivers/infiniband/hw/cxgb4/id_table.c 			alloc->last += prandom_u32() % RANDOM_SKIP;
alloc              59 drivers/infiniband/hw/cxgb4/id_table.c 			alloc->last = obj + 1;
alloc              60 drivers/infiniband/hw/cxgb4/id_table.c 		if (alloc->last >= alloc->max)
alloc              61 drivers/infiniband/hw/cxgb4/id_table.c 			alloc->last = 0;
alloc              62 drivers/infiniband/hw/cxgb4/id_table.c 		set_bit(obj, alloc->table);
alloc              63 drivers/infiniband/hw/cxgb4/id_table.c 		obj += alloc->start;
alloc              67 drivers/infiniband/hw/cxgb4/id_table.c 	spin_unlock_irqrestore(&alloc->lock, flags);
alloc              71 drivers/infiniband/hw/cxgb4/id_table.c void c4iw_id_free(struct c4iw_id_table *alloc, u32 obj)
alloc              75 drivers/infiniband/hw/cxgb4/id_table.c 	obj -= alloc->start;
alloc              77 drivers/infiniband/hw/cxgb4/id_table.c 	spin_lock_irqsave(&alloc->lock, flags);
alloc              78 drivers/infiniband/hw/cxgb4/id_table.c 	clear_bit(obj, alloc->table);
alloc              79 drivers/infiniband/hw/cxgb4/id_table.c 	spin_unlock_irqrestore(&alloc->lock, flags);
alloc              82 drivers/infiniband/hw/cxgb4/id_table.c int c4iw_id_table_alloc(struct c4iw_id_table *alloc, u32 start, u32 num,
alloc              87 drivers/infiniband/hw/cxgb4/id_table.c 	alloc->start = start;
alloc              88 drivers/infiniband/hw/cxgb4/id_table.c 	alloc->flags = flags;
alloc              90 drivers/infiniband/hw/cxgb4/id_table.c 		alloc->last = prandom_u32() % RANDOM_SKIP;
alloc              92 drivers/infiniband/hw/cxgb4/id_table.c 		alloc->last = 0;
alloc              93 drivers/infiniband/hw/cxgb4/id_table.c 	alloc->max  = num;
alloc              94 drivers/infiniband/hw/cxgb4/id_table.c 	spin_lock_init(&alloc->lock);
alloc              95 drivers/infiniband/hw/cxgb4/id_table.c 	alloc->table = kmalloc_array(BITS_TO_LONGS(num), sizeof(long),
alloc              97 drivers/infiniband/hw/cxgb4/id_table.c 	if (!alloc->table)
alloc             100 drivers/infiniband/hw/cxgb4/id_table.c 	bitmap_zero(alloc->table, num);
alloc             101 drivers/infiniband/hw/cxgb4/id_table.c 	if (!(alloc->flags & C4IW_ID_TABLE_F_EMPTY))
alloc             103 drivers/infiniband/hw/cxgb4/id_table.c 			set_bit(i, alloc->table);
alloc             108 drivers/infiniband/hw/cxgb4/id_table.c void c4iw_id_table_free(struct c4iw_id_table *alloc)
alloc             110 drivers/infiniband/hw/cxgb4/id_table.c 	kfree(alloc->table);
alloc             937 drivers/infiniband/hw/cxgb4/iw_cxgb4.h u32 c4iw_id_alloc(struct c4iw_id_table *alloc);
alloc             938 drivers/infiniband/hw/cxgb4/iw_cxgb4.h void c4iw_id_free(struct c4iw_id_table *alloc, u32 obj);
alloc             939 drivers/infiniband/hw/cxgb4/iw_cxgb4.h int c4iw_id_table_alloc(struct c4iw_id_table *alloc, u32 start, u32 num,
alloc             941 drivers/infiniband/hw/cxgb4/iw_cxgb4.h void c4iw_id_table_free(struct c4iw_id_table *alloc);
alloc              40 drivers/infiniband/hw/mthca/mthca_allocator.c u32 mthca_alloc(struct mthca_alloc *alloc)
alloc              45 drivers/infiniband/hw/mthca/mthca_allocator.c 	spin_lock_irqsave(&alloc->lock, flags);
alloc              47 drivers/infiniband/hw/mthca/mthca_allocator.c 	obj = find_next_zero_bit(alloc->table, alloc->max, alloc->last);
alloc              48 drivers/infiniband/hw/mthca/mthca_allocator.c 	if (obj >= alloc->max) {
alloc              49 drivers/infiniband/hw/mthca/mthca_allocator.c 		alloc->top = (alloc->top + alloc->max) & alloc->mask;
alloc              50 drivers/infiniband/hw/mthca/mthca_allocator.c 		obj = find_first_zero_bit(alloc->table, alloc->max);
alloc              53 drivers/infiniband/hw/mthca/mthca_allocator.c 	if (obj < alloc->max) {
alloc              54 drivers/infiniband/hw/mthca/mthca_allocator.c 		set_bit(obj, alloc->table);
alloc              55 drivers/infiniband/hw/mthca/mthca_allocator.c 		obj |= alloc->top;
alloc              59 drivers/infiniband/hw/mthca/mthca_allocator.c 	spin_unlock_irqrestore(&alloc->lock, flags);
alloc              64 drivers/infiniband/hw/mthca/mthca_allocator.c void mthca_free(struct mthca_alloc *alloc, u32 obj)
alloc              68 drivers/infiniband/hw/mthca/mthca_allocator.c 	obj &= alloc->max - 1;
alloc              70 drivers/infiniband/hw/mthca/mthca_allocator.c 	spin_lock_irqsave(&alloc->lock, flags);
alloc              72 drivers/infiniband/hw/mthca/mthca_allocator.c 	clear_bit(obj, alloc->table);
alloc              73 drivers/infiniband/hw/mthca/mthca_allocator.c 	alloc->last = min(alloc->last, obj);
alloc              74 drivers/infiniband/hw/mthca/mthca_allocator.c 	alloc->top = (alloc->top + alloc->max) & alloc->mask;
alloc              76 drivers/infiniband/hw/mthca/mthca_allocator.c 	spin_unlock_irqrestore(&alloc->lock, flags);
alloc              79 drivers/infiniband/hw/mthca/mthca_allocator.c int mthca_alloc_init(struct mthca_alloc *alloc, u32 num, u32 mask,
alloc              88 drivers/infiniband/hw/mthca/mthca_allocator.c 	alloc->last = 0;
alloc              89 drivers/infiniband/hw/mthca/mthca_allocator.c 	alloc->top  = 0;
alloc              90 drivers/infiniband/hw/mthca/mthca_allocator.c 	alloc->max  = num;
alloc              91 drivers/infiniband/hw/mthca/mthca_allocator.c 	alloc->mask = mask;
alloc              92 drivers/infiniband/hw/mthca/mthca_allocator.c 	spin_lock_init(&alloc->lock);
alloc              93 drivers/infiniband/hw/mthca/mthca_allocator.c 	alloc->table = kmalloc_array(BITS_TO_LONGS(num), sizeof(long),
alloc              95 drivers/infiniband/hw/mthca/mthca_allocator.c 	if (!alloc->table)
alloc              98 drivers/infiniband/hw/mthca/mthca_allocator.c 	bitmap_zero(alloc->table, num);
alloc             100 drivers/infiniband/hw/mthca/mthca_allocator.c 		set_bit(i, alloc->table);
alloc             105 drivers/infiniband/hw/mthca/mthca_allocator.c void mthca_alloc_cleanup(struct mthca_alloc *alloc)
alloc             107 drivers/infiniband/hw/mthca/mthca_allocator.c 	kfree(alloc->table);
alloc             172 drivers/infiniband/hw/mthca/mthca_av.c 		index = mthca_alloc(&dev->av_table.alloc);
alloc             247 drivers/infiniband/hw/mthca/mthca_av.c 		mthca_free(&dev->av_table.alloc,
alloc             333 drivers/infiniband/hw/mthca/mthca_av.c 	err = mthca_alloc_init(&dev->av_table.alloc,
alloc             363 drivers/infiniband/hw/mthca/mthca_av.c 	mthca_alloc_cleanup(&dev->av_table.alloc);
alloc             375 drivers/infiniband/hw/mthca/mthca_av.c 	mthca_alloc_cleanup(&dev->av_table.alloc);
alloc             779 drivers/infiniband/hw/mthca/mthca_cq.c 	cq->cqn = mthca_alloc(&dev->cq_table.alloc);
alloc             882 drivers/infiniband/hw/mthca/mthca_cq.c 	mthca_free(&dev->cq_table.alloc, cq->cqn);
alloc             947 drivers/infiniband/hw/mthca/mthca_cq.c 	mthca_free(&dev->cq_table.alloc, cq->cqn);
alloc             957 drivers/infiniband/hw/mthca/mthca_cq.c 	err = mthca_alloc_init(&dev->cq_table.alloc,
alloc             967 drivers/infiniband/hw/mthca/mthca_cq.c 		mthca_alloc_cleanup(&dev->cq_table.alloc);
alloc             975 drivers/infiniband/hw/mthca/mthca_cq.c 	mthca_alloc_cleanup(&dev->cq_table.alloc);
alloc             195 drivers/infiniband/hw/mthca/mthca_dev.h 	struct mthca_alloc alloc;
alloc             201 drivers/infiniband/hw/mthca/mthca_dev.h 	struct mthca_alloc alloc;
alloc             227 drivers/infiniband/hw/mthca/mthca_dev.h 	struct mthca_alloc alloc;
alloc             240 drivers/infiniband/hw/mthca/mthca_dev.h 	struct mthca_alloc 	alloc;
alloc             247 drivers/infiniband/hw/mthca/mthca_dev.h 	struct mthca_alloc 	alloc;
alloc             254 drivers/infiniband/hw/mthca/mthca_dev.h 	struct mthca_alloc     	alloc;
alloc             270 drivers/infiniband/hw/mthca/mthca_dev.h 	struct mthca_alloc alloc;
alloc             275 drivers/infiniband/hw/mthca/mthca_dev.h 	struct mthca_alloc 	alloc;
alloc             414 drivers/infiniband/hw/mthca/mthca_dev.h u32 mthca_alloc(struct mthca_alloc *alloc);
alloc             415 drivers/infiniband/hw/mthca/mthca_dev.h void mthca_free(struct mthca_alloc *alloc, u32 obj);
alloc             416 drivers/infiniband/hw/mthca/mthca_dev.h int mthca_alloc_init(struct mthca_alloc *alloc, u32 num, u32 mask,
alloc             418 drivers/infiniband/hw/mthca/mthca_dev.h void mthca_alloc_cleanup(struct mthca_alloc *alloc);
alloc             514 drivers/infiniband/hw/mthca/mthca_eq.c 	eq->eqn = mthca_alloc(&dev->eq_table.alloc);
alloc             568 drivers/infiniband/hw/mthca/mthca_eq.c 	mthca_free(&dev->eq_table.alloc, eq->eqn);
alloc             773 drivers/infiniband/hw/mthca/mthca_eq.c 	err = mthca_alloc_init(&dev->eq_table.alloc,
alloc             884 drivers/infiniband/hw/mthca/mthca_eq.c 	mthca_alloc_cleanup(&dev->eq_table.alloc);
alloc             904 drivers/infiniband/hw/mthca/mthca_eq.c 	mthca_alloc_cleanup(&dev->eq_table.alloc);
alloc             148 drivers/infiniband/hw/mthca/mthca_mcg.c 		index = mthca_alloc(&dev->mcg_table.alloc);
alloc             206 drivers/infiniband/hw/mthca/mthca_mcg.c 		mthca_free(&dev->mcg_table.alloc, index);
alloc             286 drivers/infiniband/hw/mthca/mthca_mcg.c 			mthca_free(&dev->mcg_table.alloc, amgm_index_to_free);
alloc             305 drivers/infiniband/hw/mthca/mthca_mcg.c 		mthca_free(&dev->mcg_table.alloc, index);
alloc             320 drivers/infiniband/hw/mthca/mthca_mcg.c 	err = mthca_alloc_init(&dev->mcg_table.alloc,
alloc             334 drivers/infiniband/hw/mthca/mthca_mcg.c 	mthca_alloc_cleanup(&dev->mcg_table.alloc);
alloc             611 drivers/infiniband/hw/mthca/mthca_memfree.c 			goto alloc;
alloc             626 drivers/infiniband/hw/mthca/mthca_memfree.c alloc:
alloc              46 drivers/infiniband/hw/mthca/mthca_pd.c 	pd->pd_num = mthca_alloc(&dev->pd_table.alloc);
alloc              56 drivers/infiniband/hw/mthca/mthca_pd.c 			mthca_free(&dev->pd_table.alloc, pd->pd_num);
alloc              66 drivers/infiniband/hw/mthca/mthca_pd.c 	mthca_free(&dev->pd_table.alloc, pd->pd_num);
alloc              71 drivers/infiniband/hw/mthca/mthca_pd.c 	return mthca_alloc_init(&dev->pd_table.alloc,
alloc              80 drivers/infiniband/hw/mthca/mthca_pd.c 	mthca_alloc_cleanup(&dev->pd_table.alloc);
alloc            1311 drivers/infiniband/hw/mthca/mthca_qp.c 	qp->qpn = mthca_alloc(&dev->qp_table.alloc);
alloc            1321 drivers/infiniband/hw/mthca/mthca_qp.c 		mthca_free(&dev->qp_table.alloc, qp->qpn);
alloc            1495 drivers/infiniband/hw/mthca/mthca_qp.c 		mthca_free(&dev->qp_table.alloc, qp->qpn);
alloc            2270 drivers/infiniband/hw/mthca/mthca_qp.c 	err = mthca_alloc_init(&dev->qp_table.alloc,
alloc            2281 drivers/infiniband/hw/mthca/mthca_qp.c 		mthca_alloc_cleanup(&dev->qp_table.alloc);
alloc            2301 drivers/infiniband/hw/mthca/mthca_qp.c 	mthca_alloc_cleanup(&dev->qp_table.alloc);
alloc            2314 drivers/infiniband/hw/mthca/mthca_qp.c 	mthca_alloc_cleanup(&dev->qp_table.alloc);
alloc             237 drivers/infiniband/hw/mthca/mthca_srq.c 	srq->srqn = mthca_alloc(&dev->srq_table.alloc);
alloc             322 drivers/infiniband/hw/mthca/mthca_srq.c 	mthca_free(&dev->srq_table.alloc, srq->srqn);
alloc             368 drivers/infiniband/hw/mthca/mthca_srq.c 	mthca_free(&dev->srq_table.alloc, srq->srqn);
alloc             676 drivers/infiniband/hw/mthca/mthca_srq.c 	err = mthca_alloc_init(&dev->srq_table.alloc,
alloc             686 drivers/infiniband/hw/mthca/mthca_srq.c 		mthca_alloc_cleanup(&dev->srq_table.alloc);
alloc             697 drivers/infiniband/hw/mthca/mthca_srq.c 	mthca_alloc_cleanup(&dev->srq_table.alloc);
alloc              40 drivers/infiniband/hw/mthca/mthca_uar.c 	uar->index = mthca_alloc(&dev->uar_table.alloc);
alloc              51 drivers/infiniband/hw/mthca/mthca_uar.c 	mthca_free(&dev->uar_table.alloc, uar->index);
alloc              58 drivers/infiniband/hw/mthca/mthca_uar.c 	ret = mthca_alloc_init(&dev->uar_table.alloc,
alloc              67 drivers/infiniband/hw/mthca/mthca_uar.c 		mthca_alloc_cleanup(&dev->uar_table.alloc);
alloc              77 drivers/infiniband/hw/mthca/mthca_uar.c 	mthca_alloc_cleanup(&dev->uar_table.alloc);
alloc              82 drivers/input/touchscreen/usbtouchscreen.c 	int  (*alloc)       (struct usbtouch_usb *usbtouch);
alloc            1202 drivers/input/touchscreen/usbtouchscreen.c 		.alloc		= mtouch_alloc,
alloc            1362 drivers/input/touchscreen/usbtouchscreen.c 		.alloc		= nexio_alloc,
alloc            1756 drivers/input/touchscreen/usbtouchscreen.c 	if (type->alloc) {
alloc            1757 drivers/input/touchscreen/usbtouchscreen.c 		err = type->alloc(usbtouch);
alloc            2781 drivers/iommu/amd_iommu.c 	.alloc		= alloc_coherent,
alloc            3486 drivers/iommu/amd_iommu.c static u64 *__get_gcr3_pte(u64 *root, int level, int pasid, bool alloc)
alloc            3500 drivers/iommu/amd_iommu.c 			if (!alloc)
alloc            4400 drivers/iommu/amd_iommu.c 	.alloc = irq_remapping_alloc,
alloc             155 drivers/iommu/amd_iommu_v2.c 						  int pasid, bool alloc)
alloc             172 drivers/iommu/amd_iommu_v2.c 			if (!alloc)
alloc            1094 drivers/iommu/dma-iommu.c 	.alloc			= iommu_dma_alloc,
alloc             135 drivers/iommu/hyperv-iommu.c 	.alloc = hyperv_irq_remapping_alloc,
alloc             697 drivers/iommu/intel-iommu.c 					 u8 devfn, int alloc)
alloc             715 drivers/iommu/intel-iommu.c 		if (!alloc)
alloc            3776 drivers/iommu/intel-iommu.c 	.alloc = intel_alloc_coherent,
alloc            4021 drivers/iommu/intel-iommu.c 	.alloc			= intel_alloc_coherent,
alloc            1444 drivers/iommu/intel_irq_remapping.c 	.alloc = intel_irq_remapping_alloc,
alloc             843 drivers/iommu/io-pgtable-arm-v7s.c 	.alloc	= arm_v7s_alloc_pgtable,
alloc            1074 drivers/iommu/io-pgtable-arm.c 	.alloc	= arm_64_lpae_alloc_pgtable_s1,
alloc            1079 drivers/iommu/io-pgtable-arm.c 	.alloc	= arm_64_lpae_alloc_pgtable_s2,
alloc            1084 drivers/iommu/io-pgtable-arm.c 	.alloc	= arm_32_lpae_alloc_pgtable_s1,
alloc            1089 drivers/iommu/io-pgtable-arm.c 	.alloc	= arm_32_lpae_alloc_pgtable_s2,
alloc            1094 drivers/iommu/io-pgtable-arm.c 	.alloc	= arm_mali_lpae_alloc_pgtable,
alloc              43 drivers/iommu/io-pgtable.c 	iop = fns->alloc(cfg, cookie);
alloc             186 drivers/irqchip/irq-alpine-msi.c 	.alloc	= alpine_msix_middle_domain_alloc,
alloc             267 drivers/irqchip/irq-armada-370-xp.c 	.alloc	= armada_370_xp_msi_alloc,
alloc             190 drivers/irqchip/irq-crossbar.c 	.alloc		= crossbar_domain_alloc,
alloc             232 drivers/irqchip/irq-gic-v2m.c 	.alloc			= gicv2m_irq_domain_alloc,
alloc            2695 drivers/irqchip/irq-gic-v3-its.c 	.alloc			= its_irq_domain_alloc,
alloc            3181 drivers/irqchip/irq-gic-v3-its.c 	.alloc			= its_vpe_irq_domain_alloc,
alloc             141 drivers/irqchip/irq-gic-v3-mbi.c 	.alloc			= mbi_irq_domain_alloc,
alloc            1399 drivers/irqchip/irq-gic-v3.c 	.alloc = gic_irq_domain_alloc,
alloc            1068 drivers/irqchip/irq-gic.c 	.alloc = gic_irq_domain_alloc,
alloc             192 drivers/irqchip/irq-imx-gpcv2.c 	.alloc		= imx_gpcv2_domain_alloc,
alloc             194 drivers/irqchip/irq-ixp4xx.c 	.alloc = ixp4xx_irq_domain_alloc,
alloc             188 drivers/irqchip/irq-ls-scfg-msi.c 	.alloc	= ls_scfg_msi_domain_irq_alloc,
alloc             231 drivers/irqchip/irq-mbigen.c 	.alloc		= mbigen_irq_domain_alloc,
alloc             347 drivers/irqchip/irq-meson-gpio.c 	.alloc		= meson_gpio_irq_domain_alloc,
alloc             223 drivers/irqchip/irq-mips-cpu.c 	.alloc	= mips_cpu_ipi_alloc,
alloc             548 drivers/irqchip/irq-mips-gic.c 	.alloc = gic_irq_domain_alloc,
alloc             651 drivers/irqchip/irq-mips-gic.c 	.alloc = gic_ipi_domain_alloc,
alloc             156 drivers/irqchip/irq-mtk-cirq.c 	.alloc		= mtk_cirq_domain_alloc,
alloc             118 drivers/irqchip/irq-mtk-sysirq.c 	.alloc		= mtk_sysirq_domain_alloc,
alloc             148 drivers/irqchip/irq-mvebu-gicp.c 	.alloc	= gicp_irq_domain_alloc,
alloc             268 drivers/irqchip/irq-mvebu-icu.c 	.alloc     = mvebu_icu_irq_domain_alloc,
alloc             144 drivers/irqchip/irq-mvebu-odmi.c 	.alloc	= odmi_irq_domain_alloc,
alloc             193 drivers/irqchip/irq-mvebu-sei.c 	.alloc	= mvebu_sei_domain_alloc,
alloc             237 drivers/irqchip/irq-mvebu-sei.c 	.alloc		= mvebu_sei_ap_alloc,
alloc             306 drivers/irqchip/irq-mvebu-sei.c 	.alloc	= mvebu_sei_cp_domain_alloc,
alloc              79 drivers/irqchip/irq-nvic.c 	.alloc = nvic_irq_domain_alloc,
alloc             214 drivers/irqchip/irq-partition-percpu.c 	desc->ops.alloc = partition_domain_alloc;
alloc             138 drivers/irqchip/irq-renesas-rza1.c 	.alloc = rza1_irqc_alloc,
alloc             173 drivers/irqchip/irq-sni-exiu.c 	.alloc		= exiu_domain_alloc,
alloc             415 drivers/irqchip/irq-stm32-exti.c 	.alloc  = stm32_exti_alloc,
alloc             788 drivers/irqchip/irq-stm32-exti.c 	.alloc	= stm32_exti_h_domain_alloc,
alloc             271 drivers/irqchip/irq-tegra.c 	.alloc		= tegra_ictlr_domain_alloc,
alloc             489 drivers/irqchip/irq-ti-sci-inta.c 	.alloc		= ti_sci_inta_irq_domain_alloc,
alloc             189 drivers/irqchip/irq-ti-sci-intr.c 	.alloc		= ti_sci_intr_irq_domain_alloc,
alloc             158 drivers/irqchip/irq-uniphier-aidet.c 	.alloc = uniphier_aidet_domain_alloc,
alloc             174 drivers/irqchip/irq-vf610-mscm-ir.c 	.alloc = vf610_mscm_ir_domain_alloc,
alloc             231 drivers/irqchip/qcom-pdc.c 	.alloc		= qcom_pdc_alloc,
alloc             575 drivers/md/bcache/alloc.c 					    struct bkey *alloc)
alloc             591 drivers/md/bcache/alloc.c 	if (!ret->sectors_free && KEY_PTRS(alloc)) {
alloc             593 drivers/md/bcache/alloc.c 		bkey_copy(&ret->key, alloc);
alloc             594 drivers/md/bcache/alloc.c 		bkey_init(alloc);
alloc             621 drivers/md/bcache/alloc.c 	BKEY_PADDED(key) alloc;
alloc             631 drivers/md/bcache/alloc.c 	bkey_init(&alloc.key);
alloc             634 drivers/md/bcache/alloc.c 	while (!(b = pick_data_bucket(c, k, write_point, &alloc.key))) {
alloc             641 drivers/md/bcache/alloc.c 		if (bch_bucket_alloc_set(c, watermark, &alloc.key, 1, wait))
alloc             652 drivers/md/bcache/alloc.c 	if (KEY_PTRS(&alloc.key))
alloc             653 drivers/md/bcache/alloc.c 		bkey_put(c, &alloc.key);
alloc             215 drivers/media/common/videobuf2/videobuf2-core.c 		mem_priv = call_ptr_memop(vb, alloc,
alloc             600 drivers/media/common/videobuf2/videobuf2-core.c 	if (!(q->io_modes & VB2_MMAP) || !q->mem_ops->alloc ||
alloc             692 drivers/media/common/videobuf2/videobuf2-dma-contig.c 	.alloc		= vb2_dc_alloc,
alloc             637 drivers/media/common/videobuf2/videobuf2-dma-sg.c 	.alloc		= vb2_dma_sg_alloc,
alloc             431 drivers/media/common/videobuf2/videobuf2-vmalloc.c 	.alloc		= vb2_vmalloc_alloc,
alloc             146 drivers/media/pci/tw686x/tw686x-video.c 	.alloc		= tw686x_memcpy_dma_alloc,
alloc             332 drivers/media/pci/tw686x/tw686x-video.c 	.alloc		= tw686x_sg_dma_alloc,
alloc             684 drivers/media/pci/tw686x/tw686x-video.c 	if (dev->dma_ops->alloc && realloc) {
alloc             689 drivers/media/pci/tw686x/tw686x-video.c 			err = dev->dma_ops->alloc(vc, pb);
alloc             106 drivers/media/pci/tw686x/tw686x.h 	int (*alloc)(struct tw686x_video_channel *vc, unsigned int pb);
alloc             155 drivers/media/radio/radio-aimslab.c 	.alloc = rtrack_alloc,
alloc             121 drivers/media/radio/radio-aztech.c 	.alloc = aztech_alloc,
alloc             275 drivers/media/radio/radio-gemtek.c 	.alloc = gemtek_alloc,
alloc             183 drivers/media/radio/radio-isa.c 	struct radio_isa_card *isa = drv->ops->alloc();
alloc              45 drivers/media/radio/radio-isa.h 	struct radio_isa_card *(*alloc)(void);
alloc             104 drivers/media/radio/radio-rtrack2.c 	.alloc = rtrack2_alloc,
alloc             131 drivers/media/radio/radio-terratec.c 	.alloc = terratec_alloc,
alloc             203 drivers/media/radio/radio-trust.c 	.alloc = trust_alloc,
alloc             134 drivers/media/radio/radio-typhoon.c 	.alloc = typhoon_alloc,
alloc             207 drivers/media/radio/radio-zoltrix.c 	.alloc = zoltrix_alloc,
alloc             186 drivers/media/rc/rc-main.c 	rc_map->alloc = roundup_pow_of_two(size * sizeof(struct rc_map_table));
alloc             187 drivers/media/rc/rc-main.c 	rc_map->size = rc_map->alloc / sizeof(struct rc_map_table);
alloc             188 drivers/media/rc/rc-main.c 	rc_map->scan = kmalloc(rc_map->alloc, GFP_KERNEL);
alloc             196 drivers/media/rc/rc-main.c 		rc_map->size, rc_map->alloc);
alloc             230 drivers/media/rc/rc-main.c 	unsigned int oldalloc = rc_map->alloc;
alloc             237 drivers/media/rc/rc-main.c 		if (rc_map->alloc >= IR_TAB_MAX_SIZE)
alloc             259 drivers/media/rc/rc-main.c 	rc_map->alloc = newalloc;
alloc             260 drivers/media/rc/rc-main.c 	rc_map->size = rc_map->alloc / sizeof(struct rc_map_table);
alloc            1017 drivers/media/usb/em28xx/em28xx-core.c 	int alloc;
alloc            1026 drivers/media/usb/em28xx/em28xx-core.c 		alloc = 0;
alloc            1029 drivers/media/usb/em28xx/em28xx-core.c 		alloc = 1;
alloc            1032 drivers/media/usb/em28xx/em28xx-core.c 	if (alloc) {
alloc            2740 drivers/message/fusion/mptbase.c 	if (ioc->alloc != NULL) {
alloc            2743 drivers/message/fusion/mptbase.c 		    ioc->name, ioc->alloc, ioc->alloc_sz));
alloc            2745 drivers/message/fusion/mptbase.c 				ioc->alloc, ioc->alloc_dma);
alloc            2748 drivers/message/fusion/mptbase.c 		ioc->alloc = NULL;
alloc            4512 drivers/message/fusion/mptbase.c 		ioc->alloc = mem;
alloc            4614 drivers/message/fusion/mptbase.c 	if (ioc->alloc != NULL) {
alloc            4618 drivers/message/fusion/mptbase.c 				ioc->alloc, ioc->alloc_dma);
alloc            6744 drivers/message/fusion/mptbase.c 					(void *)ioc->alloc, (void *)(ulong)ioc->alloc_dma);
alloc             632 drivers/message/fusion/mptbase.h 	u8			*alloc;		/* frames alloc ptr */
alloc             400 drivers/mfd/qcom-pm8xxx.c 	.alloc = pm8xxx_irq_domain_alloc,
alloc              67 drivers/misc/habanalabs/memory.c 	num_pgs = (args->alloc.mem_size + (page_size - 1)) >> page_shift;
alloc            1103 drivers/misc/habanalabs/memory.c 		if (args->in.alloc.mem_size == 0) {
alloc            1178 drivers/misc/habanalabs/memory.c 		if (args->in.alloc.mem_size == 0) {
alloc             237 drivers/misc/mic/host/mic_boot.c 	.alloc = __mic_dma_alloc,
alloc             189 drivers/misc/mic/scif/scif_rma.c 	struct scif_allocmsg *alloc = &window->alloc_handle;
alloc             194 drivers/misc/mic/scif/scif_rma.c 	err = wait_event_timeout(alloc->allocwq,
alloc             195 drivers/misc/mic/scif/scif_rma.c 				 alloc->state != OP_IN_PROGRESS,
alloc             201 drivers/misc/mic/scif/scif_rma.c 	if (alloc->state == OP_COMPLETED) {
alloc             744 drivers/misc/mic/scif/scif_rma.c 	struct scif_allocmsg *alloc = &window->alloc_handle;
alloc             747 drivers/misc/mic/scif/scif_rma.c 	alloc->state = OP_IN_PROGRESS;
alloc             748 drivers/misc/mic/scif/scif_rma.c 	init_waitqueue_head(&alloc->allocwq);
alloc             770 drivers/misc/mic/scif/scif_rma.c 	struct scif_allocmsg *alloc = &window->alloc_handle;
alloc             785 drivers/misc/mic/scif/scif_rma.c 	err = wait_event_timeout(alloc->allocwq,
alloc             786 drivers/misc/mic/scif/scif_rma.c 				 alloc->state != OP_IN_PROGRESS,
alloc             803 drivers/misc/mic/scif/scif_rma.c 	if (alloc->state == OP_FAILED)
alloc             824 drivers/misc/mic/scif/scif_rma.c 	remote_window = scif_ioremap(alloc->phys_addr, sizeof(*window),
alloc             929 drivers/misc/mic/scif/scif_rma.c 	window->peer_window = alloc->vaddr;
alloc            1038 drivers/net/ethernet/atheros/ag71xx.c 			       void *(*alloc)(unsigned int size))
alloc            1046 drivers/net/ethernet/atheros/ag71xx.c 	data = alloc(ag71xx_buffer_size(ag));
alloc             133 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 	u32 lo, hi, used, alloc;
alloc             377 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 			alloc = T5_ALLOC_G(lo);
alloc             380 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 			alloc = ALLOC_G(lo);
alloc             383 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 		meminfo_buff->port_alloc[i] = alloc;
alloc             394 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 			alloc = T5_ALLOC_G(lo);
alloc             397 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 			alloc = ALLOC_G(lo);
alloc             400 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 		meminfo_buff->loopback_alloc[i] = alloc;
alloc             117 drivers/net/ethernet/ec_bhf.c 	u8 *alloc;
alloc             341 drivers/net/ethernet/ec_bhf.c 	buf->alloc = dma_alloc_coherent(dev, buf->alloc_len, &buf->alloc_phys,
alloc             343 drivers/net/ethernet/ec_bhf.c 	if (buf->alloc == NULL) {
alloc             349 drivers/net/ethernet/ec_bhf.c 	buf->buf = buf->alloc + (buf->buf_phys - buf->alloc_phys);
alloc             426 drivers/net/ethernet/ec_bhf.c 	dma_free_coherent(dev, priv->rx_buf.alloc_len, priv->rx_buf.alloc,
alloc             444 drivers/net/ethernet/ec_bhf.c 			  priv->tx_buf.alloc, priv->tx_buf.alloc_phys);
alloc             446 drivers/net/ethernet/ec_bhf.c 			  priv->rx_buf.alloc, priv->rx_buf.alloc_phys);
alloc            3939 drivers/net/ethernet/hisilicon/hns3/hns3pf/hclge_main.c 	int alloc = 0;
alloc            3958 drivers/net/ethernet/hisilicon/hns3/hns3pf/hclge_main.c 				alloc++;
alloc            3964 drivers/net/ethernet/hisilicon/hns3/hns3pf/hclge_main.c 	hdev->num_msi_left -= alloc;
alloc            3965 drivers/net/ethernet/hisilicon/hns3/hns3pf/hclge_main.c 	hdev->num_msi_used += alloc;
alloc            3967 drivers/net/ethernet/hisilicon/hns3/hns3pf/hclge_main.c 	return alloc;
alloc             509 drivers/net/ethernet/hisilicon/hns3/hns3vf/hclgevf_main.c 	int alloc = 0;
alloc             526 drivers/net/ethernet/hisilicon/hns3/hns3vf/hclgevf_main.c 				alloc++;
alloc             532 drivers/net/ethernet/hisilicon/hns3/hns3vf/hclgevf_main.c 	hdev->num_msi_left -= alloc;
alloc             533 drivers/net/ethernet/hisilicon/hns3/hns3vf/hclgevf_main.c 	hdev->num_msi_used += alloc;
alloc             535 drivers/net/ethernet/hisilicon/hns3/hns3vf/hclgevf_main.c 	return alloc;
alloc             315 drivers/net/ethernet/intel/i40e/i40e_xsk.c 			   bool alloc(struct i40e_ring *rx_ring,
alloc             326 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		if (!alloc(rx_ring, bi)) {
alloc             447 drivers/net/ethernet/intel/i40e/i40e_xsk.c void i40e_zca_free(struct zero_copy_allocator *alloc, unsigned long handle)
alloc             454 drivers/net/ethernet/intel/i40e/i40e_xsk.c 	rx_ring = container_of(alloc, struct i40e_ring, zca);
alloc              15 drivers/net/ethernet/intel/i40e/i40e_xsk.h void i40e_zca_free(struct zero_copy_allocator *alloc, unsigned long handle);
alloc              36 drivers/net/ethernet/intel/ixgbe/ixgbe_txrx_common.h void ixgbe_zca_free(struct zero_copy_allocator *alloc, unsigned long handle);
alloc             226 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c void ixgbe_zca_free(struct zero_copy_allocator *alloc, unsigned long handle)
alloc             233 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 	rx_ring = container_of(alloc, struct ixgbe_ring, zca);
alloc             313 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 			    bool alloc(struct ixgbe_ring *rx_ring,
alloc             330 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		if (!alloc(rx_ring, bi)) {
alloc             560 drivers/net/ethernet/jme.c 	txring->alloc = dma_alloc_coherent(&(jme->pdev->dev),
alloc             565 drivers/net/ethernet/jme.c 	if (!txring->alloc)
alloc             571 drivers/net/ethernet/jme.c 	txring->desc		= (void *)ALIGN((unsigned long)(txring->alloc),
alloc             589 drivers/net/ethernet/jme.c 			  txring->alloc,
alloc             608 drivers/net/ethernet/jme.c 	if (txring->alloc) {
alloc             626 drivers/net/ethernet/jme.c 				  txring->alloc,
alloc             629 drivers/net/ethernet/jme.c 		txring->alloc		= NULL;
alloc             780 drivers/net/ethernet/jme.c 	if (rxring->alloc) {
alloc             789 drivers/net/ethernet/jme.c 				  rxring->alloc,
alloc             791 drivers/net/ethernet/jme.c 		rxring->alloc    = NULL;
alloc             807 drivers/net/ethernet/jme.c 	rxring->alloc = dma_alloc_coherent(&(jme->pdev->dev),
alloc             811 drivers/net/ethernet/jme.c 	if (!rxring->alloc)
alloc             817 drivers/net/ethernet/jme.c 	rxring->desc		= (void *)ALIGN((unsigned long)(rxring->alloc),
alloc             846 drivers/net/ethernet/jme.c 			  rxring->alloc,
alloc             364 drivers/net/ethernet/jme.h 	void *alloc;		/* pointer to allocated memory */
alloc            1497 drivers/net/ethernet/marvell/octeontx2/af/rvu_npc.c 		goto alloc;
alloc            1530 drivers/net/ethernet/marvell/octeontx2/af/rvu_npc.c alloc:
alloc            1582 drivers/net/ethernet/marvell/octeontx2/af/rvu_npc.c 		goto alloc;
alloc            1594 drivers/net/ethernet/marvell/octeontx2/af/rvu_npc.c 			goto alloc;
alloc            1600 drivers/net/ethernet/marvell/octeontx2/af/rvu_npc.c 			goto alloc;
alloc            1506 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c 				int alloc)
alloc            1528 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c 			if (r->com.state == RES_QP_MAPPED && !alloc)
alloc            1536 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c 			if ((r->com.state == RES_QP_RESERVED && alloc) ||
alloc            6650 drivers/net/ethernet/mellanox/mlxsw/reg.h static inline void mlxsw_reg_ralta_pack(char *payload, bool alloc,
alloc            6655 drivers/net/ethernet/mellanox/mlxsw/reg.h 	mlxsw_reg_ralta_op_set(payload, !alloc);
alloc             600 drivers/net/ethernet/mellanox/mlxsw/spectrum.h 	int (*alloc)(struct mlxsw_sp *mlxsw_sp, void *priv,
alloc             381 drivers/net/ethernet/mellanox/mlxsw/spectrum1_kvdl.c 	.alloc = mlxsw_sp1_kvdl_alloc,
alloc             269 drivers/net/ethernet/mellanox/mlxsw/spectrum2_kvdl.c 	.alloc = mlxsw_sp2_kvdl_alloc,
alloc              52 drivers/net/ethernet/mellanox/mlxsw/spectrum_kvdl.c 	return kvdl->kvdl_ops->alloc(mlxsw_sp, kvdl->priv, type,
alloc            1005 drivers/net/ethernet/micrel/ksz884x.c 	int alloc;
alloc            3804 drivers/net/ethernet/micrel/ksz884x.c 	int alloc = info->alloc;
alloc            3808 drivers/net/ethernet/micrel/ksz884x.c 	while (!(alloc & 1)) {
alloc            3810 drivers/net/ethernet/micrel/ksz884x.c 		alloc >>= 1;
alloc            3812 drivers/net/ethernet/micrel/ksz884x.c 	if (alloc != 1 || shift < MIN_DESC_SHIFT) {
alloc            3814 drivers/net/ethernet/micrel/ksz884x.c 		while (alloc) {
alloc            3816 drivers/net/ethernet/micrel/ksz884x.c 			alloc >>= 1;
alloc            3820 drivers/net/ethernet/micrel/ksz884x.c 		alloc = 1 << shift;
alloc            3821 drivers/net/ethernet/micrel/ksz884x.c 		info->alloc = alloc;
alloc            3823 drivers/net/ethernet/micrel/ksz884x.c 	info->mask = info->alloc - 1;
alloc            3834 drivers/net/ethernet/micrel/ksz884x.c 	for (i = 0; i < desc_info->alloc; i++) {
alloc            3844 drivers/net/ethernet/micrel/ksz884x.c 	desc_info->avail = desc_info->alloc;
alloc            3868 drivers/net/ethernet/micrel/ksz884x.c 	info->avail = info->alloc;
alloc            4364 drivers/net/ethernet/micrel/ksz884x.c 	desc_info->ring = kcalloc(desc_info->alloc, sizeof(struct ksz_desc),
alloc            4388 drivers/net/ethernet/micrel/ksz884x.c 		hw->rx_desc_info.size * hw->rx_desc_info.alloc +
alloc            4389 drivers/net/ethernet/micrel/ksz884x.c 		hw->tx_desc_info.size * hw->tx_desc_info.alloc +
alloc            4412 drivers/net/ethernet/micrel/ksz884x.c 	offset = hw->rx_desc_info.alloc * hw->rx_desc_info.size;
alloc            4454 drivers/net/ethernet/micrel/ksz884x.c 	for (i = 0; i < hw->rx_desc_info.alloc; i++) {
alloc            4491 drivers/net/ethernet/micrel/ksz884x.c 	hw->rx_desc_info.alloc = NUM_OF_RX_DESC;
alloc            4492 drivers/net/ethernet/micrel/ksz884x.c 	hw->tx_desc_info.alloc = NUM_OF_TX_DESC;
alloc            4576 drivers/net/ethernet/micrel/ksz884x.c 	for (i = 0; i < desc_info->alloc; i++) {
alloc            4747 drivers/net/ethernet/micrel/ksz884x.c 	while (info->avail < info->alloc) {
alloc            5037 drivers/net/ethernet/micrel/ksz884x.c 	int left = info->alloc;
alloc            5073 drivers/net/ethernet/micrel/ksz884x.c 	int left = info->alloc;
alloc            5118 drivers/net/ethernet/micrel/ksz884x.c 	int left = info->alloc;
alloc            6411 drivers/net/ethernet/micrel/ksz884x.c 	ring->tx_pending = hw->tx_desc_info.alloc;
alloc            6413 drivers/net/ethernet/micrel/ksz884x.c 	ring->rx_pending = hw->rx_desc_info.alloc;
alloc             189 drivers/net/ethernet/rocker/rocker_main.c 	unsigned char *alloc;
alloc             196 drivers/net/ethernet/rocker/rocker_main.c 	alloc = kzalloc(ROCKER_TEST_DMA_BUF_SIZE * 2 + offset,
alloc             198 drivers/net/ethernet/rocker/rocker_main.c 	if (!alloc)
alloc             200 drivers/net/ethernet/rocker/rocker_main.c 	buf = alloc + offset;
alloc             240 drivers/net/ethernet/rocker/rocker_main.c 	kfree(alloc);
alloc             804 drivers/net/fddi/skfp/h/smt.h 	struct smt_p_001d	alloc ;		/* SBA Allocatable */
alloc             868 drivers/net/wireless/intel/iwlegacy/3945-rs.c 	.alloc = il3945_rs_alloc,
alloc            2780 drivers/net/wireless/intel/iwlegacy/4965-rs.c 	.alloc = il4965_rs_alloc,
alloc            3288 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	.alloc = rs_alloc,
alloc            4146 drivers/net/wireless/intel/iwlwifi/mvm/rs.c 	.alloc = rs_alloc,
alloc            1020 drivers/net/wireless/intel/iwlwifi/pcie/tx.c 	bool alloc = false;
alloc            1026 drivers/net/wireless/intel/iwlwifi/pcie/tx.c 		alloc = true;
alloc            1076 drivers/net/wireless/intel/iwlwifi/pcie/tx.c 	if (alloc)
alloc            2063 drivers/net/wireless/intel/iwlwifi/pcie/tx.c 		goto alloc;
alloc            2072 drivers/net/wireless/intel/iwlwifi/pcie/tx.c alloc:
alloc             573 drivers/net/wireless/mediatek/mt76/dma.c 	.alloc = mt76_dma_alloc_queue,
alloc             151 drivers/net/wireless/mediatek/mt76/mt76.h 	int (*alloc)(struct mt76_dev *dev, struct mt76_queue *q,
alloc             598 drivers/net/wireless/mediatek/mt76/mt76.h #define mt76_queue_alloc(dev, ...)	(dev)->mt76.queue_ops->alloc(&((dev)->mt76), __VA_ARGS__)
alloc             300 drivers/net/wireless/realtek/rtlwifi/rc.c 	.alloc = rtl_rate_alloc,
alloc             901 drivers/nvdimm/label.c 	int i, alloc, victims, nfree, old_num_resources, nlabel, rc = -ENXIO;
alloc             931 drivers/nvdimm/label.c 	alloc = 0;
alloc             936 drivers/nvdimm/label.c 			alloc++;
alloc             963 drivers/nvdimm/label.c 	if (nfree - alloc < 0 || nfree - alloc + victims < 1) {
alloc             699 drivers/nvdimm/pfn_devs.c 		altmap->alloc = 0;
alloc            1021 drivers/parisc/ccio-dma.c 	.alloc =		ccio_alloc,
alloc            1073 drivers/parisc/sba_iommu.c 	.alloc =		sba_alloc,
alloc             250 drivers/pci/controller/dwc/pcie-designware-host.c 	.alloc	= dw_pcie_irq_domain_alloc,
alloc             724 drivers/pci/controller/pci-aardvark.c 	.alloc = advk_msi_irq_domain_alloc,
alloc             239 drivers/pci/controller/pci-xgene-msi.c 	.alloc  = xgene_irq_domain_alloc,
alloc             170 drivers/pci/controller/pcie-altera-msi.c 	.alloc	= altera_irq_domain_alloc,
alloc             290 drivers/pci/controller/pcie-iproc-msi.c 	.alloc = iproc_msi_irq_domain_alloc,
alloc             481 drivers/pci/controller/pcie-mediatek.c 	.alloc	= mtk_pcie_irq_domain_alloc,
alloc             803 drivers/pci/controller/pcie-mobiveil.c 	.alloc	= mobiveil_irq_msi_domain_alloc,
alloc             169 drivers/pci/controller/pcie-tango.c 	.alloc	= tango_irq_domain_alloc,
alloc             515 drivers/pci/controller/pcie-xilinx-nwl.c 	.alloc  = nwl_irq_domain_alloc,
alloc             425 drivers/pci/controller/vmd.c 	ASSIGN_VMD_DMA_OPS(source, dest, alloc);
alloc             436 drivers/perf/xgene_pmu.c 	XGENE_PMU_EVENT_ATTR(pa-req-buf-alloc-all,		0x01),
alloc             437 drivers/perf/xgene_pmu.c 	XGENE_PMU_EVENT_ATTR(pa-req-buf-alloc-rd,		0x02),
alloc             438 drivers/perf/xgene_pmu.c 	XGENE_PMU_EVENT_ATTR(pa-req-buf-alloc-wr,		0x03),
alloc             746 drivers/pinctrl/qcom/pinctrl-ssbi-gpio.c 	.alloc = pm8xxx_domain_alloc,
alloc             436 drivers/pinctrl/stm32/pinctrl-stm32.c 	.alloc          = stm32_gpio_domain_alloc,
alloc             962 drivers/remoteproc/remoteproc_core.c 		     int (*alloc)(struct rproc *, struct rproc_mem_entry *),
alloc             977 drivers/remoteproc/remoteproc_core.c 	mem->alloc = alloc;
alloc            1181 drivers/remoteproc/remoteproc_core.c 		if (entry->alloc) {
alloc            1182 drivers/remoteproc/remoteproc_core.c 			ret = entry->alloc(rproc, entry);
alloc             106 drivers/s390/cio/cmf.c 	int  (*alloc)  (struct ccw_device *);
alloc             715 drivers/s390/cio/cmf.c 	.alloc	= alloc_cmb,
alloc             968 drivers/s390/cio/cmf.c 	.alloc	    = alloc_cmbe,
alloc            1138 drivers/s390/cio/cmf.c 	ret = cmbops->alloc(cdev);
alloc              70 drivers/scsi/arm/queue.c 	queue->alloc = q = kmalloc_array(nqueues, sizeof(QE_t), GFP_KERNEL);
alloc              79 drivers/scsi/arm/queue.c 	return queue->alloc != NULL;
alloc              91 drivers/scsi/arm/queue.c 	kfree(queue->alloc);
alloc              14 drivers/scsi/arm/queue.h 	void *alloc;			/* start of allocated mem */
alloc            2608 drivers/scsi/bnx2fc/bnx2fc_fcoe.c 	.alloc = bnx2fc_ctlr_alloc,
alloc            2459 drivers/scsi/fcoe/fcoe.c 	.alloc = fcoe_ctlr_alloc,
alloc             780 drivers/scsi/fcoe/fcoe_transport.c 	err = ft->alloc ? ft->alloc(netdev) : -ENODEV;
alloc             192 drivers/scsi/qedf/qedf.h 	unsigned int alloc;
alloc             350 drivers/scsi/qedf/qedf_io.c 		if (!io_req->alloc)
alloc             366 drivers/scsi/qedf/qedf_io.c 	io_req->alloc = 1;
alloc             474 drivers/scsi/qedf/qedf_io.c 	io_req->alloc = 0;
alloc            1637 drivers/scsi/qedf/qedf_io.c 		if (io_req->alloc) {
alloc            1946 drivers/soc/tegra/pmc.c 	.alloc = tegra_pmc_irq_alloc,
alloc            1151 drivers/spmi/spmi-pmic-arb.c 	.alloc = qpnpint_irq_domain_alloc,
alloc             286 drivers/staging/uwb/drp-ie.c 	const struct uwb_drp_alloc *alloc;
alloc             295 drivers/staging/uwb/drp-ie.c 		alloc = &drp_ie->allocs[cnt];
alloc             296 drivers/staging/uwb/drp-ie.c 		zone_bm = le16_to_cpu(alloc->zone_bm);
alloc             297 drivers/staging/uwb/drp-ie.c 		mas_bm = le16_to_cpu(alloc->mas_bm);
alloc              68 drivers/tee/optee/shm_pool.c 	.alloc = pool_op_alloc,
alloc             148 drivers/tee/tee_shm.c 	rc = poolm->ops->alloc(poolm, shm, size);
alloc              45 drivers/tee/tee_shm_pool.c 	.alloc = pool_op_gen_alloc,
alloc             148 drivers/tee/tee_shm_pool.c 	return mgr && mgr->ops && mgr->ops->alloc && mgr->ops->free &&
alloc            2927 drivers/tty/n_gsm.c 	bool alloc = false;
alloc            2953 drivers/tty/n_gsm.c 		alloc = true;
alloc            2962 drivers/tty/n_gsm.c 		if (alloc)
alloc            1406 drivers/tty/vt/vt_ioctl.c int vt_move_to_console(unsigned int vt, int alloc)
alloc            1418 drivers/tty/vt/vt_ioctl.c 	if (alloc && vc_allocate(vt)) {
alloc             207 drivers/usb/host/imx21-dbg.c 		if (etd->alloc)
alloc             304 drivers/usb/host/imx21-dbg.c 			etd->alloc,
alloc             154 drivers/usb/host/imx21-hcd.c 		if (etd->alloc == 0) {
alloc             156 drivers/usb/host/imx21-hcd.c 			etd->alloc = 1;
alloc             203 drivers/usb/host/imx21-hcd.c 	if (imx21->etd[num].alloc == 0) {
alloc            1446 drivers/usb/host/imx21-hcd.c 		if (imx21->etd[i].alloc && imx21->etd[i].ep == ep) {
alloc             357 drivers/usb/host/imx21-hcd.h 	int alloc;
alloc             137 drivers/usb/mon/mon_bin.c 	size_t alloc;		/* Length of data (can be zero) */
alloc            1072 drivers/usb/mon/mon_bin.c 		if (getb.alloc > 0x10000000)	/* Want to cast to u32 */
alloc            1076 drivers/usb/mon/mon_bin.c 		    getb.data, (unsigned int)getb.alloc);
alloc            1848 drivers/video/fbdev/amifb.c 	int size, alloc;
alloc            1851 drivers/video/fbdev/amifb.c 	alloc = var->height * var->width;
alloc             641 drivers/video/fbdev/omap/lcdc.c 	region->alloc = 1;
alloc              49 drivers/video/fbdev/omap/omapfb.h 	unsigned	alloc:1;	/* allocated by the driver */
alloc            1330 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 	rg->alloc = 0;
alloc            1374 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 	rg->alloc = false;
alloc            1414 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 	rg->alloc = 1;
alloc              50 drivers/video/fbdev/omap2/omapfb/omapfb.h 	bool		alloc;		/* allocated by the driver */
alloc            1006 drivers/vme/bridges/vme_fake.c 	void *alloc = kmalloc(size, GFP_KERNEL);
alloc            1008 drivers/vme/bridges/vme_fake.c 	if (alloc)
alloc            1009 drivers/vme/bridges/vme_fake.c 		*dma = fake_ptr_to_pci(alloc);
alloc            1011 drivers/vme/bridges/vme_fake.c 	return alloc;
alloc             543 drivers/xen/swiotlb-xen.c 	.alloc = xen_swiotlb_alloc_coherent,
alloc             219 drivers/xen/xenbus/xenbus_comms.c 			void *alloc;
alloc             272 drivers/xen/xenbus/xenbus_comms.c 		state.alloc = kmalloc(len, GFP_NOIO | __GFP_HIGH);
alloc             273 drivers/xen/xenbus/xenbus_comms.c 		if (!state.alloc)
alloc             279 drivers/xen/xenbus/xenbus_comms.c 			state.body = state.alloc;
alloc             327 drivers/xen/xenbus/xenbus_comms.c 	state.alloc = NULL;
alloc             333 drivers/xen/xenbus/xenbus_comms.c 	kfree(state.alloc);
alloc             334 drivers/xen/xenbus/xenbus_comms.c 	state.alloc = NULL;
alloc            1596 fs/binfmt_elf.c  alloc:
alloc            1620 fs/binfmt_elf.c 				goto alloc;
alloc            2636 fs/btrfs/block-group.c 			     u64 bytenr, u64 num_bytes, int alloc)
alloc            2649 fs/btrfs/block-group.c 	if (alloc)
alloc            2670 fs/btrfs/block-group.c 		if (!alloc && !btrfs_block_group_cache_done(cache))
alloc            2685 fs/btrfs/block-group.c 		if (alloc) {
alloc            2729 fs/btrfs/block-group.c 		if (!alloc && old_val == 0)
alloc             214 fs/btrfs/block-group.h 			     u64 bytenr, u64 num_bytes, int alloc);
alloc             206 fs/ceph/caps.c 	int alloc = 0;
alloc             232 fs/ceph/caps.c 			alloc++;
alloc             274 fs/ceph/caps.c 			ctx, need, have + alloc);
alloc             280 fs/ceph/caps.c 		BUG_ON(have + alloc != need);
alloc             286 fs/ceph/caps.c 	mdsc->caps_total_count += alloc;
alloc             287 fs/ceph/caps.c 	mdsc->caps_reserve_count += alloc;
alloc             295 fs/ceph/caps.c 		__ceph_unreserve_caps(mdsc, have + alloc);
alloc             199 fs/dlm/lowcomms.c static struct connection *__nodeid2con(int nodeid, gfp_t alloc)
alloc             205 fs/dlm/lowcomms.c 	if (con || !alloc)
alloc             208 fs/dlm/lowcomms.c 	con = kmem_cache_zalloc(con_cache, alloc);
alloc            1047 fs/f2fs/data.c 		goto alloc;
alloc            1052 fs/f2fs/data.c alloc:
alloc            3146 fs/f2fs/f2fs.h 			unsigned int val, int alloc);
alloc            3692 fs/f2fs/segment.c 					unsigned int val, int alloc)
alloc            3701 fs/f2fs/segment.c 		if (alloc && __has_cursum_space(journal, 1, NAT_JOURNAL))
alloc            3707 fs/f2fs/segment.c 		if (alloc && __has_cursum_space(journal, 1, SIT_JOURNAL))
alloc              70 fs/jfs/jfs_dmap.c static int dbAdjCtl(struct bmap * bmp, s64 blkno, int newval, int alloc,
alloc            2495 fs/jfs/jfs_dmap.c dbAdjCtl(struct bmap * bmp, s64 blkno, int newval, int alloc, int level)
alloc            2541 fs/jfs/jfs_dmap.c 	if (alloc) {
alloc            2579 fs/jfs/jfs_dmap.c 			     dbAdjCtl(bmp, blkno, dcp->stree[ROOT], alloc,
alloc            2585 fs/jfs/jfs_dmap.c 				if (alloc) {
alloc              33 fs/kernfs/inode.c static struct kernfs_iattrs *__kernfs_iattrs(struct kernfs_node *kn, int alloc)
alloc              40 fs/kernfs/inode.c 	if (kn->iattr || !alloc)
alloc             379 fs/nfs/dns_resolve.c 	.alloc		= nfs_dns_ent_alloc,
alloc             256 fs/nfs/nfs3acl.c 	struct posix_acl *orig = acl, *dfacl = NULL, *alloc;
alloc             262 fs/nfs/nfs3acl.c 			alloc = get_acl(inode, ACL_TYPE_DEFAULT);
alloc             263 fs/nfs/nfs3acl.c 			if (IS_ERR(alloc))
alloc             265 fs/nfs/nfs3acl.c 			dfacl = alloc;
alloc             269 fs/nfs/nfs3acl.c 			alloc = get_acl(inode, ACL_TYPE_ACCESS);
alloc             270 fs/nfs/nfs3acl.c 			if (IS_ERR(alloc))
alloc             273 fs/nfs/nfs3acl.c 			acl = alloc;
alloc             279 fs/nfs/nfs3acl.c 		alloc = posix_acl_from_mode(inode->i_mode, GFP_KERNEL);
alloc             280 fs/nfs/nfs3acl.c 		if (IS_ERR(alloc))
alloc             282 fs/nfs/nfs3acl.c 		acl = alloc;
alloc             293 fs/nfs/nfs3acl.c 	status = PTR_ERR(alloc);
alloc             258 fs/nfsd/export.c 	.alloc		= expkey_alloc,
alloc             776 fs/nfsd/export.c 	.alloc		= svc_export_alloc,
alloc             457 fs/nfsd/nfs4acl.c 	int alloc;
alloc             466 fs/nfsd/nfs4acl.c 	alloc = sizeof(struct posix_ace_state_array)
alloc             468 fs/nfsd/nfs4acl.c 	state->users = kzalloc(alloc, GFP_KERNEL);
alloc             471 fs/nfsd/nfs4acl.c 	state->groups = kzalloc(alloc, GFP_KERNEL);
alloc             194 fs/nfsd/nfs4idmap.c 	.alloc		= ent_alloc,
alloc             357 fs/nfsd/nfs4idmap.c 	.alloc		= ent_alloc,
alloc            1278 fs/nilfs2/segment.c 	int err, alloc = 0;
alloc            1297 fs/nilfs2/segment.c 			alloc++;
alloc            1308 fs/nilfs2/segment.c 			alloc++;
alloc            1316 fs/nilfs2/segment.c 	if (alloc) {
alloc            1282 fs/ocfs2/aops.c 					loff_t pos, unsigned len, int alloc)
alloc            1289 fs/ocfs2/aops.c 	if (alloc == 0)
alloc            2100 fs/ocfs2/aops.c ocfs2_dio_alloc_write_ctx(struct buffer_head *bh, int *alloc)
alloc            2115 fs/ocfs2/aops.c 	*alloc = 1;
alloc            2771 fs/ocfs2/dir.c 	u32 alloc, dx_alloc, bit_off, len, num_dx_entries = 0;
alloc            2792 fs/ocfs2/dir.c 	alloc = ocfs2_clusters_for_bytes(sb, bytes);
alloc            2829 fs/ocfs2/dir.c 	BUG_ON(alloc > 2);
alloc            2831 fs/ocfs2/dir.c 	ret = ocfs2_reserve_clusters(osb, alloc + dx_alloc, &data_ac);
alloc            2841 fs/ocfs2/dir.c 	if (alloc == 2)
alloc            2852 fs/ocfs2/dir.c 		ocfs2_clusters_to_bytes(osb->sb, alloc + dx_alloc));
alloc            3020 fs/ocfs2/dir.c 	if (alloc > len) {
alloc              37 fs/ocfs2/localalloc.c static u32 ocfs2_local_alloc_count_bits(struct ocfs2_dinode *alloc);
alloc              40 fs/ocfs2/localalloc.c 					     struct ocfs2_dinode *alloc,
alloc              44 fs/ocfs2/localalloc.c static void ocfs2_clear_local_alloc(struct ocfs2_dinode *alloc);
alloc              48 fs/ocfs2/localalloc.c 				    struct ocfs2_dinode *alloc,
alloc             273 fs/ocfs2/localalloc.c 	struct ocfs2_dinode *alloc = NULL;
alloc             307 fs/ocfs2/localalloc.c 	alloc = (struct ocfs2_dinode *) alloc_bh->b_data;
alloc             308 fs/ocfs2/localalloc.c 	la = OCFS2_LOCAL_ALLOC(alloc);
alloc             310 fs/ocfs2/localalloc.c 	if (!(le32_to_cpu(alloc->i_flags) &
alloc             327 fs/ocfs2/localalloc.c 	num_used = ocfs2_local_alloc_count_bits(alloc);
alloc             332 fs/ocfs2/localalloc.c 	    || alloc->id1.bitmap1.i_used
alloc             333 fs/ocfs2/localalloc.c 	    || alloc->id1.bitmap1.i_total
alloc             338 fs/ocfs2/localalloc.c 		     num_used, le32_to_cpu(alloc->id1.bitmap1.i_used),
alloc             339 fs/ocfs2/localalloc.c 		     le32_to_cpu(alloc->id1.bitmap1.i_total),
alloc             340 fs/ocfs2/localalloc.c 		     OCFS2_LOCAL_ALLOC(alloc)->la_bm_off);
alloc             377 fs/ocfs2/localalloc.c 	struct ocfs2_dinode *alloc = NULL;
alloc             426 fs/ocfs2/localalloc.c 	alloc = (struct ocfs2_dinode *) bh->b_data;
alloc             428 fs/ocfs2/localalloc.c 	alloc_copy = kmemdup(alloc, bh->b_size, GFP_NOFS);
alloc             441 fs/ocfs2/localalloc.c 	ocfs2_clear_local_alloc(alloc);
alloc             485 fs/ocfs2/localalloc.c 	struct ocfs2_dinode *alloc;
alloc             516 fs/ocfs2/localalloc.c 	alloc = (struct ocfs2_dinode *) alloc_bh->b_data;
alloc             517 fs/ocfs2/localalloc.c 	ocfs2_clear_local_alloc(alloc);
alloc             519 fs/ocfs2/localalloc.c 	ocfs2_compute_meta_ecc(osb->sb, alloc_bh->b_data, &alloc->i_check);
alloc             549 fs/ocfs2/localalloc.c 					struct ocfs2_dinode *alloc)
alloc             584 fs/ocfs2/localalloc.c 	status = ocfs2_sync_local_to_main(osb, handle, alloc,
alloc             621 fs/ocfs2/localalloc.c 	struct ocfs2_dinode *alloc;
alloc             652 fs/ocfs2/localalloc.c 	alloc = (struct ocfs2_dinode *) osb->local_alloc_bh->b_data;
alloc             655 fs/ocfs2/localalloc.c 	if (le32_to_cpu(alloc->id1.bitmap1.i_used) !=
alloc             656 fs/ocfs2/localalloc.c 	    ocfs2_local_alloc_count_bits(alloc)) {
alloc             658 fs/ocfs2/localalloc.c 				(unsigned long long)le64_to_cpu(alloc->i_blkno),
alloc             659 fs/ocfs2/localalloc.c 				le32_to_cpu(alloc->id1.bitmap1.i_used),
alloc             660 fs/ocfs2/localalloc.c 				ocfs2_local_alloc_count_bits(alloc));
alloc             665 fs/ocfs2/localalloc.c 	free_bits = le32_to_cpu(alloc->id1.bitmap1.i_total) -
alloc             666 fs/ocfs2/localalloc.c 		le32_to_cpu(alloc->id1.bitmap1.i_used);
alloc             687 fs/ocfs2/localalloc.c 		free_bits = le32_to_cpu(alloc->id1.bitmap1.i_total) -
alloc             688 fs/ocfs2/localalloc.c 			le32_to_cpu(alloc->id1.bitmap1.i_used);
alloc             725 fs/ocfs2/localalloc.c 	struct ocfs2_dinode *alloc;
alloc             731 fs/ocfs2/localalloc.c 	alloc = (struct ocfs2_dinode *) osb->local_alloc_bh->b_data;
alloc             732 fs/ocfs2/localalloc.c 	la = OCFS2_LOCAL_ALLOC(alloc);
alloc             734 fs/ocfs2/localalloc.c 	start = ocfs2_local_alloc_find_clear_bits(osb, alloc, &bits_wanted,
alloc             762 fs/ocfs2/localalloc.c 	le32_add_cpu(&alloc->id1.bitmap1.i_used, *num_bits);
alloc             781 fs/ocfs2/localalloc.c 	struct ocfs2_dinode *alloc;
alloc             787 fs/ocfs2/localalloc.c 	alloc = (struct ocfs2_dinode *) osb->local_alloc_bh->b_data;
alloc             788 fs/ocfs2/localalloc.c 	la = OCFS2_LOCAL_ALLOC(alloc);
alloc             806 fs/ocfs2/localalloc.c 	le32_add_cpu(&alloc->id1.bitmap1.i_used, -num_bits);
alloc             813 fs/ocfs2/localalloc.c static u32 ocfs2_local_alloc_count_bits(struct ocfs2_dinode *alloc)
alloc             816 fs/ocfs2/localalloc.c 	struct ocfs2_local_alloc *la = OCFS2_LOCAL_ALLOC(alloc);
alloc             825 fs/ocfs2/localalloc.c 				     struct ocfs2_dinode *alloc,
alloc             835 fs/ocfs2/localalloc.c 	if (!alloc->id1.bitmap1.i_total) {
alloc             864 fs/ocfs2/localalloc.c 	bitmap = OCFS2_LOCAL_ALLOC(alloc)->la_bitmap;
alloc             867 fs/ocfs2/localalloc.c 	left = le32_to_cpu(alloc->id1.bitmap1.i_total);
alloc             906 fs/ocfs2/localalloc.c 		le32_to_cpu(alloc->id1.bitmap1.i_total),
alloc             912 fs/ocfs2/localalloc.c static void ocfs2_clear_local_alloc(struct ocfs2_dinode *alloc)
alloc             914 fs/ocfs2/localalloc.c 	struct ocfs2_local_alloc *la = OCFS2_LOCAL_ALLOC(alloc);
alloc             917 fs/ocfs2/localalloc.c 	alloc->id1.bitmap1.i_total = 0;
alloc             918 fs/ocfs2/localalloc.c 	alloc->id1.bitmap1.i_used = 0;
alloc             951 fs/ocfs2/localalloc.c 				    struct ocfs2_dinode *alloc,
alloc             960 fs/ocfs2/localalloc.c 	struct ocfs2_local_alloc *la = OCFS2_LOCAL_ALLOC(alloc);
alloc             963 fs/ocfs2/localalloc.c 	     le32_to_cpu(alloc->id1.bitmap1.i_total),
alloc             964 fs/ocfs2/localalloc.c 	     le32_to_cpu(alloc->id1.bitmap1.i_used));
alloc             966 fs/ocfs2/localalloc.c 	if (!alloc->id1.bitmap1.i_total) {
alloc             970 fs/ocfs2/localalloc.c 	if (le32_to_cpu(alloc->id1.bitmap1.i_used) ==
alloc             971 fs/ocfs2/localalloc.c 	    le32_to_cpu(alloc->id1.bitmap1.i_total)) {
alloc             979 fs/ocfs2/localalloc.c 	left = le32_to_cpu(alloc->id1.bitmap1.i_total);
alloc            1154 fs/ocfs2/localalloc.c 	struct ocfs2_dinode *alloc = NULL;
alloc            1157 fs/ocfs2/localalloc.c 	alloc = (struct ocfs2_dinode *) osb->local_alloc_bh->b_data;
alloc            1158 fs/ocfs2/localalloc.c 	la = OCFS2_LOCAL_ALLOC(alloc);
alloc            1161 fs/ocfs2/localalloc.c 		le32_to_cpu(alloc->id1.bitmap1.i_total),
alloc            1212 fs/ocfs2/localalloc.c 	alloc->id1.bitmap1.i_total = cpu_to_le32(cluster_count);
alloc            1217 fs/ocfs2/localalloc.c 	alloc->id1.bitmap1.i_used = 0;
alloc            1218 fs/ocfs2/localalloc.c 	memset(OCFS2_LOCAL_ALLOC(alloc)->la_bitmap, 0,
alloc            1222 fs/ocfs2/localalloc.c 			     OCFS2_LOCAL_ALLOC(alloc)->la_bitmap);
alloc            1225 fs/ocfs2/localalloc.c 		OCFS2_LOCAL_ALLOC(alloc)->la_bm_off,
alloc            1226 fs/ocfs2/localalloc.c 		le32_to_cpu(alloc->id1.bitmap1.i_total));
alloc            1243 fs/ocfs2/localalloc.c 	struct ocfs2_dinode *alloc;
alloc            1268 fs/ocfs2/localalloc.c 	alloc = (struct ocfs2_dinode *) osb->local_alloc_bh->b_data;
alloc            1275 fs/ocfs2/localalloc.c 	alloc_copy = kmemdup(alloc, osb->local_alloc_bh->b_size, GFP_NOFS);
alloc            1291 fs/ocfs2/localalloc.c 	ocfs2_clear_local_alloc(alloc);
alloc              27 fs/ocfs2/localalloc.h 					struct ocfs2_dinode *alloc);
alloc             321 fs/quota/quota_v2.c 	bool alloc = false;
alloc             330 fs/quota/quota_v2.c 		alloc = true;
alloc             338 fs/quota/quota_v2.c 	if (alloc)
alloc             199 fs/xfs/libxfs/xfs_alloc.c 	rec.alloc.ar_startblock = cpu_to_be32(bno);
alloc             200 fs/xfs/libxfs/xfs_alloc.c 	rec.alloc.ar_blockcount = cpu_to_be32(len);
alloc             223 fs/xfs/libxfs/xfs_alloc.c 	*bno = be32_to_cpu(rec->alloc.ar_startblock);
alloc             224 fs/xfs/libxfs/xfs_alloc.c 	*len = be32_to_cpu(rec->alloc.ar_blockcount);
alloc            3077 fs/xfs/libxfs/xfs_alloc.c 	irec.ar_startblock = be32_to_cpu(rec->alloc.ar_startblock);
alloc            3078 fs/xfs/libxfs/xfs_alloc.c 	irec.ar_blockcount = be32_to_cpu(rec->alloc.ar_blockcount);
alloc             132 fs/xfs/libxfs/xfs_alloc_btree.c 		len = rec->alloc.ar_blockcount;
alloc             135 fs/xfs/libxfs/xfs_alloc_btree.c 		if (be32_to_cpu(rec->alloc.ar_blockcount) <=
alloc             138 fs/xfs/libxfs/xfs_alloc_btree.c 		len = rec->alloc.ar_blockcount;
alloc             189 fs/xfs/libxfs/xfs_alloc_btree.c 	key->alloc.ar_startblock = rec->alloc.ar_startblock;
alloc             190 fs/xfs/libxfs/xfs_alloc_btree.c 	key->alloc.ar_blockcount = rec->alloc.ar_blockcount;
alloc             200 fs/xfs/libxfs/xfs_alloc_btree.c 	x = be32_to_cpu(rec->alloc.ar_startblock);
alloc             201 fs/xfs/libxfs/xfs_alloc_btree.c 	x += be32_to_cpu(rec->alloc.ar_blockcount) - 1;
alloc             202 fs/xfs/libxfs/xfs_alloc_btree.c 	key->alloc.ar_startblock = cpu_to_be32(x);
alloc             203 fs/xfs/libxfs/xfs_alloc_btree.c 	key->alloc.ar_blockcount = 0;
alloc             211 fs/xfs/libxfs/xfs_alloc_btree.c 	key->alloc.ar_blockcount = rec->alloc.ar_blockcount;
alloc             212 fs/xfs/libxfs/xfs_alloc_btree.c 	key->alloc.ar_startblock = 0;
alloc             220 fs/xfs/libxfs/xfs_alloc_btree.c 	rec->alloc.ar_startblock = cpu_to_be32(cur->bc_rec.a.ar_startblock);
alloc             221 fs/xfs/libxfs/xfs_alloc_btree.c 	rec->alloc.ar_blockcount = cpu_to_be32(cur->bc_rec.a.ar_blockcount);
alloc             242 fs/xfs/libxfs/xfs_alloc_btree.c 	xfs_alloc_key_t		*kp = &key->alloc;
alloc             253 fs/xfs/libxfs/xfs_alloc_btree.c 	xfs_alloc_key_t		*kp = &key->alloc;
alloc             269 fs/xfs/libxfs/xfs_alloc_btree.c 	return (int64_t)be32_to_cpu(k1->alloc.ar_startblock) -
alloc             270 fs/xfs/libxfs/xfs_alloc_btree.c 			  be32_to_cpu(k2->alloc.ar_startblock);
alloc             281 fs/xfs/libxfs/xfs_alloc_btree.c 	diff =  be32_to_cpu(k1->alloc.ar_blockcount) -
alloc             282 fs/xfs/libxfs/xfs_alloc_btree.c 		be32_to_cpu(k2->alloc.ar_blockcount);
alloc             286 fs/xfs/libxfs/xfs_alloc_btree.c 	return  be32_to_cpu(k1->alloc.ar_startblock) -
alloc             287 fs/xfs/libxfs/xfs_alloc_btree.c 		be32_to_cpu(k2->alloc.ar_startblock);
alloc             389 fs/xfs/libxfs/xfs_alloc_btree.c 	return be32_to_cpu(k1->alloc.ar_startblock) <
alloc             390 fs/xfs/libxfs/xfs_alloc_btree.c 	       be32_to_cpu(k2->alloc.ar_startblock);
alloc             399 fs/xfs/libxfs/xfs_alloc_btree.c 	return be32_to_cpu(r1->alloc.ar_startblock) +
alloc             400 fs/xfs/libxfs/xfs_alloc_btree.c 		be32_to_cpu(r1->alloc.ar_blockcount) <=
alloc             401 fs/xfs/libxfs/xfs_alloc_btree.c 		be32_to_cpu(r2->alloc.ar_startblock);
alloc             410 fs/xfs/libxfs/xfs_alloc_btree.c 	return be32_to_cpu(k1->alloc.ar_blockcount) <
alloc             411 fs/xfs/libxfs/xfs_alloc_btree.c 		be32_to_cpu(k2->alloc.ar_blockcount) ||
alloc             412 fs/xfs/libxfs/xfs_alloc_btree.c 		(k1->alloc.ar_blockcount == k2->alloc.ar_blockcount &&
alloc             413 fs/xfs/libxfs/xfs_alloc_btree.c 		 be32_to_cpu(k1->alloc.ar_startblock) <
alloc             414 fs/xfs/libxfs/xfs_alloc_btree.c 		 be32_to_cpu(k2->alloc.ar_startblock));
alloc             423 fs/xfs/libxfs/xfs_alloc_btree.c 	return be32_to_cpu(r1->alloc.ar_blockcount) <
alloc             424 fs/xfs/libxfs/xfs_alloc_btree.c 		be32_to_cpu(r2->alloc.ar_blockcount) ||
alloc             425 fs/xfs/libxfs/xfs_alloc_btree.c 		(r1->alloc.ar_blockcount == r2->alloc.ar_blockcount &&
alloc             426 fs/xfs/libxfs/xfs_alloc_btree.c 		 be32_to_cpu(r1->alloc.ar_startblock) <
alloc             427 fs/xfs/libxfs/xfs_alloc_btree.c 		 be32_to_cpu(r2->alloc.ar_startblock));
alloc            2695 fs/xfs/libxfs/xfs_btree.c 	XFS_BTREE_STATS_INC(cur, alloc);
alloc            2950 fs/xfs/libxfs/xfs_btree.c 	XFS_BTREE_STATS_INC(cur, alloc);
alloc            3047 fs/xfs/libxfs/xfs_btree.c 	XFS_BTREE_STATS_INC(cur, alloc);
alloc              35 fs/xfs/libxfs/xfs_btree.h 	xfs_alloc_key_t			alloc;
alloc              45 fs/xfs/libxfs/xfs_btree.h 	struct xfs_alloc_rec		alloc;
alloc             607 fs/xfs/libxfs/xfs_ialloc.c 	int			*alloc)
alloc             790 fs/xfs/libxfs/xfs_ialloc.c 		*alloc = 0;
alloc             898 fs/xfs/libxfs/xfs_ialloc.c 	*alloc = 1;
alloc            1966 fs/xfs/libxfs/xfs_ialloc.c 		xic->alloc = xfs_inobt_irec_to_allocmask(&rec);
alloc              22 fs/xfs/libxfs/xfs_ialloc.h 	uint64_t	alloc;		/* inode phys. allocation bitmap for
alloc             182 fs/xfs/libxfs/xfs_trans_resv.c 	bool			alloc)
alloc             188 fs/xfs/libxfs/xfs_trans_resv.c 	if (alloc) {
alloc             101 fs/xfs/scrub/alloc.c 	bno = be32_to_cpu(rec->alloc.ar_startblock);
alloc             102 fs/xfs/scrub/alloc.c 	len = be32_to_cpu(rec->alloc.ar_blockcount);
alloc            2548 fs/xfs/xfs_inode.c 		if ((xic->alloc & XFS_INOBT_MASK(ioffset)) == 0) {
alloc              60 include/crypto/algapi.h 	struct crypto_instance *(*alloc)(struct rtattr **tb);
alloc              80 include/linux/dma-mapping.h 	void* (*alloc)(struct device *dev, size_t size,
alloc             702 include/linux/intel-iommu.h 					 u8 devfn, int alloc);
alloc             228 include/linux/io-pgtable.h 	struct io_pgtable *(*alloc)(struct io_pgtable_cfg *cfg, void *cookie);
alloc             115 include/linux/irqdomain.h 	int (*alloc)(struct irq_domain *d, unsigned int virq,
alloc             243 include/linux/kprobes.h 	void *(*alloc)(void);	/* allocate insn page */
alloc            1587 include/linux/lsm_hooks.h 					void **buffer, bool alloc);
alloc              23 include/linux/mempool.h 	mempool_alloc_t *alloc;
alloc              24 include/linux/memremap.h 	unsigned long alloc;
alloc             114 include/linux/perf_event.h 	int		alloc;	/* extra register already allocated */
alloc             340 include/linux/remoteproc.h 	int (*alloc)(struct rproc *rproc, struct rproc_mem_entry *mem);
alloc             603 include/linux/remoteproc.h 		     int (*alloc)(struct rproc *, struct rproc_mem_entry *),
alloc             149 include/linux/security.h 				 void **buffer, bool alloc);
alloc             351 include/linux/security.h int security_inode_getsecurity(struct inode *inode, const char *name, void **buffer, bool alloc);
alloc             853 include/linux/security.h static inline int security_inode_getsecurity(struct inode *inode, const char *name, void **buffer, bool alloc)
alloc              89 include/linux/sunrpc/cache.h 	struct cache_head *	(*alloc)(void);
alloc             115 include/linux/sunrpc/rpc_pipe_fs.h 		struct rpc_pipe_dir_object *(*alloc)(void *),
alloc             219 include/linux/tee_drv.h 	int (*alloc)(struct tee_shm_pool_mgr *poolmgr, struct tee_shm *shm,
alloc             149 include/linux/vt_kern.h extern int vt_move_to_console(unsigned int vt, int alloc);
alloc             112 include/media/rc-map.h 	unsigned int		alloc;
alloc             119 include/media/videobuf2-core.h 	void		*(*alloc)(struct device *dev, unsigned long attrs,
alloc            5936 include/net/mac80211.h 	void *(*alloc)(struct ieee80211_hw *hw);
alloc              93 include/net/page_pool.h 	struct pp_alloc_cache alloc ____cacheline_aligned_in_smp;
alloc              45 include/net/tc_act/tc_ife.h 	int	(*alloc)(struct tcf_meta_info *, void *, gfp_t);
alloc             307 include/scsi/libfcoe.h 	int (*alloc) (struct net_device *device);
alloc             534 include/sound/gus.h void snd_gf1_mem_lock(struct snd_gf1_mem * alloc, int xup);
alloc             535 include/sound/gus.h int snd_gf1_mem_xfree(struct snd_gf1_mem * alloc, struct snd_gf1_mem_block * block);
alloc             536 include/sound/gus.h struct snd_gf1_mem_block *snd_gf1_mem_alloc(struct snd_gf1_mem * alloc, int owner,
alloc             539 include/sound/gus.h int snd_gf1_mem_free(struct snd_gf1_mem * alloc, unsigned int address);
alloc             540 include/sound/gus.h int snd_gf1_mem_free_owner(struct snd_gf1_mem * alloc, int owner);
alloc             247 include/uapi/drm/amdgpu_drm.h 		} alloc;
alloc             320 include/uapi/misc/habanalabs.h 		} alloc;
alloc             313 kernel/dma/mapping.c 	else if (ops->alloc)
alloc             314 kernel/dma/mapping.c 		cpu_addr = ops->alloc(dev, size, dma_handle, flag, attrs);
alloc              54 kernel/dma/virt.c 	.alloc			= dma_virt_alloc,
alloc            1296 kernel/irq/irqdomain.c 	if (!domain->ops->alloc) {
alloc            1301 kernel/irq/irqdomain.c 	return domain->ops->alloc(domain, irq_base, nr_irqs, arg);
alloc            1679 kernel/irq/irqdomain.c 	if (domain->ops->alloc)
alloc             184 kernel/irq/msi.c 	.alloc		= msi_domain_alloc,
alloc             119 kernel/kprobes.c 	.alloc = alloc_insn_page,
alloc             173 kernel/kprobes.c 	kip->insns = c->alloc();
alloc             297 kernel/kprobes.c 	.alloc = alloc_insn_page,
alloc            1546 kernel/power/snapshot.c 	unsigned long alloc;
alloc            1551 kernel/power/snapshot.c 	alloc = avail_normal - alloc_normal;
alloc            1552 kernel/power/snapshot.c 	if (nr_pages < alloc)
alloc            1553 kernel/power/snapshot.c 		alloc = nr_pages;
alloc            1555 kernel/power/snapshot.c 	return preallocate_image_pages(alloc, GFP_IMAGE);
alloc            1578 kernel/power/snapshot.c 	unsigned long alloc = __fraction(nr_pages, highmem, total);
alloc            1580 kernel/power/snapshot.c 	return preallocate_image_pages(alloc, GFP_IMAGE | __GFP_HIGHMEM);
alloc            1704 kernel/power/snapshot.c 	unsigned long alloc, save_highmem, pages_highmem, avail_normal;
alloc            1796 kernel/power/snapshot.c 	alloc = count - max_size;
alloc            1797 kernel/power/snapshot.c 	if (alloc > pages_highmem)
alloc            1798 kernel/power/snapshot.c 		alloc -= pages_highmem;
alloc            1800 kernel/power/snapshot.c 		alloc = 0;
alloc            1801 kernel/power/snapshot.c 	pages = preallocate_image_memory(alloc, avail_normal);
alloc            1802 kernel/power/snapshot.c 	if (pages < alloc) {
alloc            1804 kernel/power/snapshot.c 		alloc -= pages;
alloc            1806 kernel/power/snapshot.c 		pages_highmem = preallocate_image_highmem(alloc);
alloc            1807 kernel/power/snapshot.c 		if (pages_highmem < alloc)
alloc            1814 kernel/power/snapshot.c 		alloc = (count - pages) - size;
alloc            1815 kernel/power/snapshot.c 		pages += preallocate_image_highmem(alloc);
alloc            1821 kernel/power/snapshot.c 		alloc = max_size - size;
alloc            1822 kernel/power/snapshot.c 		size = preallocate_highmem_fraction(alloc, highmem, count);
alloc            1824 kernel/power/snapshot.c 		alloc -= size;
alloc            1825 kernel/power/snapshot.c 		size = preallocate_image_memory(alloc, avail_normal);
alloc            1826 kernel/power/snapshot.c 		pages_highmem += preallocate_image_highmem(alloc - size);
alloc             598 kernel/resource.c 	struct resource tmp = *new, avail, alloc;
alloc             626 kernel/resource.c 			alloc.flags = avail.flags;
alloc             627 kernel/resource.c 			alloc.start = constraint->alignf(constraint->alignf_data, &avail,
alloc             629 kernel/resource.c 			alloc.end = alloc.start + size - 1;
alloc             630 kernel/resource.c 			if (alloc.start <= alloc.end &&
alloc             631 kernel/resource.c 			    resource_contains(&avail, &alloc)) {
alloc             632 kernel/resource.c 				new->start = alloc.start;
alloc             633 kernel/resource.c 				new->end = alloc.end;
alloc             839 kernel/trace/trace.c 	int alloc;
alloc             850 kernel/trace/trace.c 	alloc = sizeof(*entry) + size + 2; /* possible \n added */
alloc             854 kernel/trace/trace.c 	event = __trace_buffer_lock_reserve(buffer, TRACE_PRINT, alloc, 
alloc             385 lib/idr.c      	struct ida_bitmap *bitmap, *alloc = NULL;
alloc             415 lib/idr.c      		bitmap = alloc;
alloc             419 lib/idr.c      			goto alloc;
alloc             442 lib/idr.c      			bitmap = alloc;
alloc             446 lib/idr.c      				goto alloc;
alloc             458 lib/idr.c      	if (bitmap != alloc)
alloc             459 lib/idr.c      		kfree(alloc);
alloc             463 lib/idr.c      alloc:
alloc             465 lib/idr.c      	alloc = kzalloc(sizeof(*bitmap), gfp);
alloc             466 lib/idr.c      	if (!alloc)
alloc             490 lib/test_overflow.c #define alloc010(alloc, arg, sz) alloc(sz, alloc_GFP)
alloc             491 lib/test_overflow.c #define alloc011(alloc, arg, sz) alloc(sz, alloc_GFP, NUMA_NO_NODE)
alloc             492 lib/test_overflow.c #define alloc000(alloc, arg, sz) alloc(sz)
alloc             493 lib/test_overflow.c #define alloc001(alloc, arg, sz) alloc(sz, NUMA_NO_NODE)
alloc             494 lib/test_overflow.c #define alloc110(alloc, arg, sz) alloc(arg, sz, alloc_GFP)
alloc             509 lib/test_overflow.c 	ptr = alloc ## want_arg ## want_gfp ## want_node (func, arg, 1);\
alloc             517 lib/test_overflow.c 	ptr = alloc ## want_arg ## want_gfp ## want_node (func, arg,	\
alloc             526 lib/test_overflow.c 	ptr = alloc ## want_arg ## want_gfp ## want_node (func, arg,	\
alloc            1328 mm/huge_memory.c 		goto alloc;
alloc            1364 mm/huge_memory.c alloc:
alloc            1459 mm/memblock.c  	phys_addr_t alloc;
alloc            1472 mm/memblock.c  	alloc = memblock_alloc_range_nid(size, align, min_addr, max_addr, nid);
alloc            1475 mm/memblock.c  	if (!alloc && min_addr)
alloc            1476 mm/memblock.c  		alloc = memblock_alloc_range_nid(size, align, 0, max_addr, nid);
alloc            1478 mm/memblock.c  	if (!alloc)
alloc            1481 mm/memblock.c  	return phys_to_virt(alloc);
alloc             303 mm/memory_hotplug.c 		altmap->alloc = 0;
alloc              85 mm/mempool.c   	if (pool->alloc == mempool_alloc_slab || pool->alloc == mempool_kmalloc)
alloc              89 mm/mempool.c   	if (pool->alloc == mempool_alloc_pages) {
alloc             108 mm/mempool.c   	if (pool->alloc == mempool_alloc_slab || pool->alloc == mempool_kmalloc)
alloc             110 mm/mempool.c   	if (pool->alloc == mempool_alloc_pages)
alloc             116 mm/mempool.c   	if (pool->alloc == mempool_alloc_slab || pool->alloc == mempool_kmalloc)
alloc             118 mm/mempool.c   	if (pool->alloc == mempool_alloc_pages)
alloc             187 mm/mempool.c   	pool->alloc	= alloc_fn;
alloc             202 mm/mempool.c   		element = pool->alloc(gfp_mask, pool->pool_data);
alloc             342 mm/mempool.c   		element = pool->alloc(GFP_KERNEL, pool->pool_data);
alloc             393 mm/mempool.c   	element = pool->alloc(gfp_temp, pool->pool_data);
alloc             134 mm/memremap.c  	WARN_ONCE(pgmap->altmap.alloc, "failed to free all reserved pages\n");
alloc             376 mm/memremap.c  	altmap->alloc -= nr_pfns;
alloc            2172 mm/percpu.c    	int alloc = 0, alloc_end = 0;
alloc            2199 mm/percpu.c    		     alloc < alloc_end; alloc++) {
alloc            2200 mm/percpu.c    			if (!(alloc % apl)) {
alloc             537 mm/slub.c      	enum track_item alloc)
alloc             546 mm/slub.c      	return p + alloc;
alloc             550 mm/slub.c      			enum track_item alloc, unsigned long addr)
alloc             552 mm/slub.c      	struct track *p = get_track(s, object, alloc);
alloc             991 mm/slub.c      								int alloc)
alloc             996 mm/slub.c      			alloc ? "alloc" : "free",
alloc            1000 mm/slub.c      		if (!alloc)
alloc            4616 mm/slub.c      		struct page *page, enum track_item alloc,
alloc            4627 mm/slub.c      			add_location(t, s, get_track(s, p, alloc));
alloc            4631 mm/slub.c      					enum track_item alloc)
alloc            4657 mm/slub.c      			process_slab(&t, s, page, alloc, map);
alloc            4659 mm/slub.c      			process_slab(&t, s, page, alloc, map);
alloc              85 mm/sparse-vmemmap.c 	return altmap->base_pfn + altmap->reserve + altmap->alloc
alloc              91 mm/sparse-vmemmap.c 	unsigned long allocated = altmap->alloc + altmap->align;
alloc             123 mm/sparse-vmemmap.c 	altmap->alloc += nr_pfns;
alloc             128 mm/sparse-vmemmap.c 			__func__, pfn, altmap->alloc, altmap->align, nr_pfns);
alloc             218 net/core/net_namespace.c static int __peernet2id_alloc(struct net *net, struct net *peer, bool *alloc)
alloc             221 net/core/net_namespace.c 	bool alloc_it = *alloc;
alloc             223 net/core/net_namespace.c 	*alloc = false;
alloc             233 net/core/net_namespace.c 		*alloc = true;
alloc             255 net/core/net_namespace.c 	bool alloc = false, alive = false;
alloc             268 net/core/net_namespace.c 		alive = alloc = true;
alloc             269 net/core/net_namespace.c 	id = __peernet2id_alloc(net, peer, &alloc);
alloc             271 net/core/net_namespace.c 	if (alloc && id >= 0)
alloc              93 net/core/page_pool.c 		if (likely(pool->alloc.count)) {
alloc              95 net/core/page_pool.c 			page = pool->alloc.cache[--pool->alloc.count];
alloc             111 net/core/page_pool.c 		pool->alloc.count = __ptr_ring_consume_batched(r,
alloc             112 net/core/page_pool.c 							pool->alloc.cache,
alloc             276 net/core/page_pool.c 	if (unlikely(pool->alloc.count == PP_ALLOC_CACHE_SIZE))
alloc             280 net/core/page_pool.c 	pool->alloc.cache[pool->alloc.count++] = page;
alloc             360 net/core/page_pool.c 	while (pool->alloc.count) {
alloc             361 net/core/page_pool.c 		page = pool->alloc.cache[--pool->alloc.count];
alloc            2541 net/core/sock.c 		u64 alloc;
alloc            2545 net/core/sock.c 		alloc = sk_sockets_allocated_read_positive(sk);
alloc            2546 net/core/sock.c 		if (sk_prot_mem_limits(sk, 2) > alloc *
alloc             236 net/mac80211/rate.c 	ref->priv = ref->ops->alloc(&local->hw);
alloc            1724 net/mac80211/rc80211_minstrel_ht.c 	.alloc = minstrel_ht_alloc,
alloc              53 net/netfilter/core.c 	size_t alloc = sizeof(*e) +
alloc              61 net/netfilter/core.c 	e = kvzalloc(alloc, GFP_KERNEL);
alloc              42 net/netfilter/nf_conntrack_extend.c 	unsigned int newlen, newoff, oldlen, alloc;
alloc              70 net/netfilter/nf_conntrack_extend.c 	alloc = max(newlen, NF_CT_EXT_PREALLOC);
alloc              72 net/netfilter/nf_conntrack_extend.c 	new = __krealloc(old, alloc, gfp);
alloc            1635 net/netfilter/nf_tables_api.c 						     unsigned int alloc)
alloc            1637 net/netfilter/nf_tables_api.c 	if (alloc > INT_MAX)
alloc            1640 net/netfilter/nf_tables_api.c 	alloc += 1;	/* NULL, ends rules */
alloc            1641 net/netfilter/nf_tables_api.c 	if (sizeof(struct nft_rule *) > INT_MAX / alloc)
alloc            1644 net/netfilter/nf_tables_api.c 	alloc *= sizeof(struct nft_rule *);
alloc            1645 net/netfilter/nf_tables_api.c 	alloc += sizeof(struct nft_rules_old);
alloc            1647 net/netfilter/nf_tables_api.c 	return kvmalloc(alloc, GFP_KERNEL);
alloc            6611 net/netfilter/nf_tables_api.c 	unsigned int alloc = 0;
alloc            6623 net/netfilter/nf_tables_api.c 			alloc++;
alloc            6626 net/netfilter/nf_tables_api.c 	chain->rules_next = nf_tables_chain_alloc_rules(chain, alloc);
alloc             186 net/sched/act_ife.c 	    !mops->get || !mops->alloc)
alloc             309 net/sched/act_ife.c 		ret = ops->alloc(mi, metaval, atomic ? GFP_ATOMIC : GFP_KERNEL);
alloc              51 net/sched/act_meta_mark.c 	.alloc = ife_alloc_meta_u32,
alloc              51 net/sched/act_meta_skbprio.c 	.alloc = ife_alloc_meta_u32,
alloc              51 net/sched/act_meta_skbtcindex.c 	.alloc = ife_alloc_meta_u16,
alloc             287 net/sunrpc/auth_gss/svcauth_gss.c 	.alloc		= rsi_alloc,
alloc             558 net/sunrpc/auth_gss/svcauth_gss.c 	.alloc		= rsc_alloc,
alloc              87 net/sunrpc/cache.c 	new = detail->alloc();
alloc             188 net/sunrpc/cache.c 	tmp = detail->alloc();
alloc             970 net/sunrpc/rpc_pipe.c 		struct rpc_pipe_dir_object *(*alloc)(void *),
alloc             982 net/sunrpc/rpc_pipe.c 	pdo = alloc(data);
alloc             593 net/sunrpc/svcauth_unix.c 	.alloc		= unix_gid_alloc,
alloc             890 net/sunrpc/svcauth_unix.c 	.alloc		= ip_map_alloc,
alloc             370 security/commoncap.c 			  bool alloc)
alloc             401 security/commoncap.c 		if (alloc)
alloc             419 security/commoncap.c 		if (alloc) {
alloc             434 security/commoncap.c 	if (alloc) {
alloc              97 security/integrity/evm/evm_crypto.c 		goto alloc;
alloc             120 security/integrity/evm/evm_crypto.c alloc:
alloc            1303 security/security.c int security_inode_getsecurity(struct inode *inode, const char *name, void **buffer, bool alloc)
alloc            1314 security/security.c 		rc = hp->hook.inode_getsecurity(inode, name, buffer, alloc);
alloc            3341 security/selinux/hooks.c static int selinux_inode_getsecurity(struct inode *inode, const char *name, void **buffer, bool alloc)
alloc            3371 security/selinux/hooks.c 	if (alloc) {
alloc              91 security/selinux/ss/sidtab.c static struct context *sidtab_do_lookup(struct sidtab *s, u32 index, int alloc)
alloc             101 security/selinux/ss/sidtab.c 	if (alloc && sidtab_alloc_roots(s, level) != 0)
alloc             114 security/selinux/ss/sidtab.c 			if (alloc)
alloc             122 security/selinux/ss/sidtab.c 		if (alloc)
alloc            1435 security/smack/smack_lsm.c 				   bool alloc)
alloc            1467 security/smack/smack_lsm.c 	if (alloc) {
alloc              18 sound/isa/gus/gus_mem.c void snd_gf1_mem_lock(struct snd_gf1_mem * alloc, int xup)
alloc              21 sound/isa/gus/gus_mem.c 		mutex_lock(&alloc->memory_mutex);
alloc              23 sound/isa/gus/gus_mem.c 		mutex_unlock(&alloc->memory_mutex);
alloc              27 sound/isa/gus/gus_mem.c static struct snd_gf1_mem_block *snd_gf1_mem_xalloc(struct snd_gf1_mem * alloc,
alloc              36 sound/isa/gus/gus_mem.c 	pblock = alloc->first;
alloc              42 sound/isa/gus/gus_mem.c 			if (pblock == alloc->first)
alloc              43 sound/isa/gus/gus_mem.c 				alloc->first = nblock;
alloc              46 sound/isa/gus/gus_mem.c 			mutex_unlock(&alloc->memory_mutex);
alloc              52 sound/isa/gus/gus_mem.c 	if (alloc->last == NULL) {
alloc              54 sound/isa/gus/gus_mem.c 		alloc->first = alloc->last = nblock;
alloc              56 sound/isa/gus/gus_mem.c 		nblock->prev = alloc->last;
alloc              57 sound/isa/gus/gus_mem.c 		alloc->last->next = nblock;
alloc              58 sound/isa/gus/gus_mem.c 		alloc->last = nblock;
alloc              63 sound/isa/gus/gus_mem.c int snd_gf1_mem_xfree(struct snd_gf1_mem * alloc, struct snd_gf1_mem_block * block)
alloc              67 sound/isa/gus/gus_mem.c 		mutex_unlock(&alloc->memory_mutex);
alloc              70 sound/isa/gus/gus_mem.c 	if (alloc->first == block) {
alloc              71 sound/isa/gus/gus_mem.c 		alloc->first = block->next;
alloc              79 sound/isa/gus/gus_mem.c 	if (alloc->last == block) {
alloc              80 sound/isa/gus/gus_mem.c 		alloc->last = block->prev;
alloc              93 sound/isa/gus/gus_mem.c static struct snd_gf1_mem_block *snd_gf1_mem_look(struct snd_gf1_mem * alloc,
alloc              98 sound/isa/gus/gus_mem.c 	for (block = alloc->first; block; block = block->next) {
alloc             106 sound/isa/gus/gus_mem.c static struct snd_gf1_mem_block *snd_gf1_mem_share(struct snd_gf1_mem * alloc,
alloc             114 sound/isa/gus/gus_mem.c 	for (block = alloc->first; block; block = block->next)
alloc             121 sound/isa/gus/gus_mem.c static int snd_gf1_mem_find(struct snd_gf1_mem * alloc,
alloc             125 sound/isa/gus/gus_mem.c 	struct snd_gf1_bank_info *info = w_16 ? alloc->banks_16 : alloc->banks_8;
alloc             140 sound/isa/gus/gus_mem.c 	for (pblock = alloc->first, idx = 0; pblock; pblock = pblock->next) {
alloc             173 sound/isa/gus/gus_mem.c struct snd_gf1_mem_block *snd_gf1_mem_alloc(struct snd_gf1_mem * alloc, int owner,
alloc             179 sound/isa/gus/gus_mem.c 	snd_gf1_mem_lock(alloc, 0);
alloc             181 sound/isa/gus/gus_mem.c 		nblock = snd_gf1_mem_share(alloc, share_id);
alloc             189 sound/isa/gus/gus_mem.c 			snd_gf1_mem_lock(alloc, 1);
alloc             194 sound/isa/gus/gus_mem.c 	if (snd_gf1_mem_find(alloc, &block, size, w_16, align) < 0) {
alloc             195 sound/isa/gus/gus_mem.c 		snd_gf1_mem_lock(alloc, 1);
alloc             202 sound/isa/gus/gus_mem.c 	nblock = snd_gf1_mem_xalloc(alloc, &block);
alloc             203 sound/isa/gus/gus_mem.c 	snd_gf1_mem_lock(alloc, 1);
alloc             207 sound/isa/gus/gus_mem.c int snd_gf1_mem_free(struct snd_gf1_mem * alloc, unsigned int address)
alloc             212 sound/isa/gus/gus_mem.c 	snd_gf1_mem_lock(alloc, 0);
alloc             213 sound/isa/gus/gus_mem.c 	if ((block = snd_gf1_mem_look(alloc, address)) != NULL) {
alloc             214 sound/isa/gus/gus_mem.c 		result = snd_gf1_mem_xfree(alloc, block);
alloc             215 sound/isa/gus/gus_mem.c 		snd_gf1_mem_lock(alloc, 1);
alloc             218 sound/isa/gus/gus_mem.c 	snd_gf1_mem_lock(alloc, 1);
alloc             224 sound/isa/gus/gus_mem.c 	struct snd_gf1_mem *alloc;
alloc             227 sound/isa/gus/gus_mem.c 	alloc = &gus->gf1.mem_alloc;
alloc             228 sound/isa/gus/gus_mem.c 	mutex_init(&alloc->memory_mutex);
alloc             229 sound/isa/gus/gus_mem.c 	alloc->first = alloc->last = NULL;
alloc             239 sound/isa/gus/gus_mem.c 		if (snd_gf1_mem_xalloc(alloc, &block) == NULL)
alloc             245 sound/isa/gus/gus_mem.c 	if (snd_gf1_mem_xalloc(alloc, &block) == NULL)
alloc             255 sound/isa/gus/gus_mem.c 	struct snd_gf1_mem *alloc;
alloc             258 sound/isa/gus/gus_mem.c 	alloc = &gus->gf1.mem_alloc;
alloc             259 sound/isa/gus/gus_mem.c 	block = alloc->first;
alloc             262 sound/isa/gus/gus_mem.c 		snd_gf1_mem_xfree(alloc, block);
alloc             273 sound/isa/gus/gus_mem.c 	struct snd_gf1_mem *alloc;
alloc             279 sound/isa/gus/gus_mem.c 	alloc = &gus->gf1.mem_alloc;
alloc             280 sound/isa/gus/gus_mem.c 	mutex_lock(&alloc->memory_mutex);
alloc             283 sound/isa/gus/gus_mem.c 		snd_iprintf(buffer, "0x%06x (%04ik)%s", alloc->banks_8[i].address, alloc->banks_8[i].size >> 10, i + 1 < 4 ? "," : "");
alloc             287 sound/isa/gus/gus_mem.c 		snd_iprintf(buffer, "0x%06x (%04ik)%s", alloc->banks_16[i].address, alloc->banks_16[i].size >> 10, i + 1 < 4 ? "," : "");
alloc             288 sound/isa/gus/gus_mem.c 		total += alloc->banks_16[i].size;
alloc             292 sound/isa/gus/gus_mem.c 	for (block = alloc->first, i = 0; block; block = block->next, i++) {
alloc             324 sound/isa/gus/gus_mem.c 	mutex_unlock(&alloc->memory_mutex);
alloc              24 sound/soc/intel/skylake/skl-sst-utils.c 		u32 alloc    : 1;
alloc              24 tools/lib/subcmd/help.c 	ALLOC_GROW(cmds->names, cmds->cnt + 1, cmds->alloc);
alloc              36 tools/lib/subcmd/help.c 	cmds->alloc = 0;
alloc               9 tools/lib/subcmd/help.h 	size_t alloc;
alloc              11 tools/lib/subcmd/sigchain.c 	int alloc;
alloc              26 tools/lib/subcmd/sigchain.c 	ALLOC_GROW(s->old, s->n + 1, s->alloc);
alloc              39 tools/lib/subcmd/subcmd-util.h #define ALLOC_GROW(x, nr, alloc) \
alloc              41 tools/lib/subcmd/subcmd-util.h 		if ((nr) > alloc) { \
alloc              42 tools/lib/subcmd/subcmd-util.h 			if (alloc_nr(alloc) < (nr)) \
alloc              43 tools/lib/subcmd/subcmd-util.h 				alloc = (nr); \
alloc              45 tools/lib/subcmd/subcmd-util.h 				alloc = alloc_nr(alloc); \
alloc              46 tools/lib/subcmd/subcmd-util.h 			x = xrealloc((x), alloc * sizeof(*(x))); \
alloc             265 tools/perf/util/build-id.c 	bool alloc = (bf == NULL);
alloc             283 tools/perf/util/build-id.c 	if (ret < 0 || (!alloc && size < (unsigned int)ret))
alloc              37 tools/perf/util/help-unknown-cmd.c 	if (nr > cmds->alloc) {
alloc              39 tools/perf/util/help-unknown-cmd.c 		if (alloc_nr(cmds->alloc) < nr)
alloc              40 tools/perf/util/help-unknown-cmd.c 			cmds->alloc = nr;
alloc              42 tools/perf/util/help-unknown-cmd.c 			cmds->alloc = alloc_nr(cmds->alloc);
alloc              43 tools/perf/util/help-unknown-cmd.c 		tmp = realloc(cmds->names, cmds->alloc * sizeof(*cmds->names));
alloc              22 tools/perf/util/strbuf.c 	sb->alloc = sb->len = 0;
alloc              31 tools/perf/util/strbuf.c 	if (sb->alloc) {
alloc              39 tools/perf/util/strbuf.c 	char *res = sb->alloc ? sb->buf : NULL;
alloc              51 tools/perf/util/strbuf.c 	if (nr < sb->alloc)
alloc              57 tools/perf/util/strbuf.c 	if (alloc_nr(sb->alloc) > nr)
alloc              58 tools/perf/util/strbuf.c 		nr = alloc_nr(sb->alloc);
alloc              64 tools/perf/util/strbuf.c 	buf = realloc(sb->alloc ? sb->buf : NULL, nr * sizeof(*buf));
alloc              69 tools/perf/util/strbuf.c 	sb->alloc = nr;
alloc             106 tools/perf/util/strbuf.c 	len = vsnprintf(sb->buf + sb->len, sb->alloc - sb->len, fmt, ap);
alloc             117 tools/perf/util/strbuf.c 		len = vsnprintf(sb->buf + sb->len, sb->alloc - sb->len, fmt, ap_saved);
alloc             142 tools/perf/util/strbuf.c 	size_t oldalloc = sb->alloc;
alloc             152 tools/perf/util/strbuf.c 		cnt = read(fd, sb->buf + sb->len, sb->alloc - sb->len - 1);
alloc              51 tools/perf/util/strbuf.h 	size_t alloc;
alloc              65 tools/perf/util/strbuf.h 	return sb->alloc ? sb->alloc - sb->len - 1 : 0;
alloc              71 tools/perf/util/strbuf.h 	if (!sb->alloc) {
alloc              76 tools/perf/util/strbuf.h 	assert(len < sb->alloc);
alloc             401 tools/perf/util/time-utils.c 		goto alloc;
alloc             413 tools/perf/util/time-utils.c alloc:
alloc             199 tools/testing/nvdimm/test/nfit.c 	int (*alloc)(struct nfit_test *t);
alloc            2977 tools/testing/nvdimm/test/nfit.c 	if (nfit_test->alloc(nfit_test))
alloc            3210 tools/testing/nvdimm/test/nfit.c 			nfit_test->alloc = nfit_test0_alloc;
alloc            3217 tools/testing/nvdimm/test/nfit.c 			nfit_test->alloc = nfit_test1_alloc;