Lines Matching refs:ai
824 static int __init pcpu_verify_alloc_info(const struct pcpu_alloc_info *ai);
1378 struct pcpu_alloc_info *ai; in pcpu_alloc_alloc_info() local
1383 base_size = ALIGN(sizeof(*ai) + nr_groups * sizeof(ai->groups[0]), in pcpu_alloc_alloc_info()
1384 __alignof__(ai->groups[0].cpu_map[0])); in pcpu_alloc_alloc_info()
1385 ai_size = base_size + nr_units * sizeof(ai->groups[0].cpu_map[0]); in pcpu_alloc_alloc_info()
1390 ai = ptr; in pcpu_alloc_alloc_info()
1393 ai->groups[0].cpu_map = ptr; in pcpu_alloc_alloc_info()
1396 ai->groups[0].cpu_map[unit] = NR_CPUS; in pcpu_alloc_alloc_info()
1398 ai->nr_groups = nr_groups; in pcpu_alloc_alloc_info()
1399 ai->__ai_size = PFN_ALIGN(ai_size); in pcpu_alloc_alloc_info()
1401 return ai; in pcpu_alloc_alloc_info()
1410 void __init pcpu_free_alloc_info(struct pcpu_alloc_info *ai) in pcpu_free_alloc_info() argument
1412 memblock_free_early(__pa(ai), ai->__ai_size); in pcpu_free_alloc_info()
1423 const struct pcpu_alloc_info *ai) in pcpu_dump_alloc_info() argument
1431 v = ai->nr_groups; in pcpu_dump_alloc_info()
1440 upa = ai->alloc_size / ai->unit_size; in pcpu_dump_alloc_info()
1445 lvl, ai->static_size, ai->reserved_size, ai->dyn_size, in pcpu_dump_alloc_info()
1446 ai->unit_size, ai->alloc_size / ai->atom_size, ai->atom_size); in pcpu_dump_alloc_info()
1448 for (group = 0; group < ai->nr_groups; group++) { in pcpu_dump_alloc_info()
1449 const struct pcpu_group_info *gi = &ai->groups[group]; in pcpu_dump_alloc_info()
1528 int __init pcpu_setup_first_chunk(const struct pcpu_alloc_info *ai, in pcpu_setup_first_chunk() argument
1533 size_t dyn_size = ai->dyn_size; in pcpu_setup_first_chunk()
1534 size_t size_sum = ai->static_size + ai->reserved_size + dyn_size; in pcpu_setup_first_chunk()
1548 pcpu_dump_alloc_info(KERN_EMERG, ai); \ in pcpu_setup_first_chunk()
1554 PCPU_SETUP_BUG_ON(ai->nr_groups <= 0); in pcpu_setup_first_chunk()
1556 PCPU_SETUP_BUG_ON(!ai->static_size); in pcpu_setup_first_chunk()
1561 PCPU_SETUP_BUG_ON(ai->unit_size < size_sum); in pcpu_setup_first_chunk()
1562 PCPU_SETUP_BUG_ON(offset_in_page(ai->unit_size)); in pcpu_setup_first_chunk()
1563 PCPU_SETUP_BUG_ON(ai->unit_size < PCPU_MIN_UNIT_SIZE); in pcpu_setup_first_chunk()
1564 PCPU_SETUP_BUG_ON(ai->dyn_size < PERCPU_DYNAMIC_EARLY_SIZE); in pcpu_setup_first_chunk()
1565 PCPU_SETUP_BUG_ON(pcpu_verify_alloc_info(ai) < 0); in pcpu_setup_first_chunk()
1568 group_offsets = memblock_virt_alloc(ai->nr_groups * in pcpu_setup_first_chunk()
1570 group_sizes = memblock_virt_alloc(ai->nr_groups * in pcpu_setup_first_chunk()
1581 for (group = 0, unit = 0; group < ai->nr_groups; group++, unit += i) { in pcpu_setup_first_chunk()
1582 const struct pcpu_group_info *gi = &ai->groups[group]; in pcpu_setup_first_chunk()
1585 group_sizes[group] = gi->nr_units * ai->unit_size; in pcpu_setup_first_chunk()
1597 unit_off[cpu] = gi->base_offset + i * ai->unit_size; in pcpu_setup_first_chunk()
1615 pcpu_dump_alloc_info(KERN_DEBUG, ai); in pcpu_setup_first_chunk()
1617 pcpu_nr_groups = ai->nr_groups; in pcpu_setup_first_chunk()
1624 pcpu_unit_pages = ai->unit_size >> PAGE_SHIFT; in pcpu_setup_first_chunk()
1626 pcpu_atom_size = ai->atom_size; in pcpu_setup_first_chunk()
1657 if (ai->reserved_size) { in pcpu_setup_first_chunk()
1658 schunk->free_size = ai->reserved_size; in pcpu_setup_first_chunk()
1660 pcpu_reserved_chunk_limit = ai->static_size + ai->reserved_size; in pcpu_setup_first_chunk()
1668 schunk->map[1] = ai->static_size; in pcpu_setup_first_chunk()
1671 schunk->map[++schunk->map_used] = ai->static_size + schunk->free_size; in pcpu_setup_first_chunk()
1787 struct pcpu_alloc_info *ai; in pcpu_build_alloc_info() local
1870 ai = pcpu_alloc_alloc_info(nr_groups, nr_units); in pcpu_build_alloc_info()
1871 if (!ai) in pcpu_build_alloc_info()
1873 cpu_map = ai->groups[0].cpu_map; in pcpu_build_alloc_info()
1876 ai->groups[group].cpu_map = cpu_map; in pcpu_build_alloc_info()
1880 ai->static_size = static_size; in pcpu_build_alloc_info()
1881 ai->reserved_size = reserved_size; in pcpu_build_alloc_info()
1882 ai->dyn_size = dyn_size; in pcpu_build_alloc_info()
1883 ai->unit_size = alloc_size / upa; in pcpu_build_alloc_info()
1884 ai->atom_size = atom_size; in pcpu_build_alloc_info()
1885 ai->alloc_size = alloc_size; in pcpu_build_alloc_info()
1888 struct pcpu_group_info *gi = &ai->groups[group]; in pcpu_build_alloc_info()
1895 gi->base_offset = unit * ai->unit_size; in pcpu_build_alloc_info()
1905 return ai; in pcpu_build_alloc_info()
1950 struct pcpu_alloc_info *ai; in pcpu_embed_first_chunk() local
1954 ai = pcpu_build_alloc_info(reserved_size, dyn_size, atom_size, in pcpu_embed_first_chunk()
1956 if (IS_ERR(ai)) in pcpu_embed_first_chunk()
1957 return PTR_ERR(ai); in pcpu_embed_first_chunk()
1959 size_sum = ai->static_size + ai->reserved_size + ai->dyn_size; in pcpu_embed_first_chunk()
1960 areas_size = PFN_ALIGN(ai->nr_groups * sizeof(void *)); in pcpu_embed_first_chunk()
1969 for (group = 0; group < ai->nr_groups; group++) { in pcpu_embed_first_chunk()
1970 struct pcpu_group_info *gi = &ai->groups[group]; in pcpu_embed_first_chunk()
1979 ptr = alloc_fn(cpu, gi->nr_units * ai->unit_size, atom_size); in pcpu_embed_first_chunk()
1996 for (group = 0; group < ai->nr_groups; group++) { in pcpu_embed_first_chunk()
1997 struct pcpu_group_info *gi = &ai->groups[group]; in pcpu_embed_first_chunk()
2000 for (i = 0; i < gi->nr_units; i++, ptr += ai->unit_size) { in pcpu_embed_first_chunk()
2003 free_fn(ptr, ai->unit_size); in pcpu_embed_first_chunk()
2007 memcpy(ptr, __per_cpu_load, ai->static_size); in pcpu_embed_first_chunk()
2008 free_fn(ptr + size_sum, ai->unit_size - size_sum); in pcpu_embed_first_chunk()
2014 for (group = 0; group < ai->nr_groups; group++) { in pcpu_embed_first_chunk()
2015 ai->groups[group].base_offset = areas[group] - base; in pcpu_embed_first_chunk()
2017 ai->groups[group].base_offset); in pcpu_embed_first_chunk()
2019 max_distance += ai->unit_size; in pcpu_embed_first_chunk()
2034 PFN_DOWN(size_sum), base, ai->static_size, ai->reserved_size, in pcpu_embed_first_chunk()
2035 ai->dyn_size, ai->unit_size); in pcpu_embed_first_chunk()
2037 rc = pcpu_setup_first_chunk(ai, base); in pcpu_embed_first_chunk()
2041 for (group = 0; group < ai->nr_groups; group++) in pcpu_embed_first_chunk()
2044 ai->groups[group].nr_units * ai->unit_size); in pcpu_embed_first_chunk()
2046 pcpu_free_alloc_info(ai); in pcpu_embed_first_chunk()
2076 struct pcpu_alloc_info *ai; in pcpu_page_first_chunk() local
2085 ai = pcpu_build_alloc_info(reserved_size, 0, PAGE_SIZE, NULL); in pcpu_page_first_chunk()
2086 if (IS_ERR(ai)) in pcpu_page_first_chunk()
2087 return PTR_ERR(ai); in pcpu_page_first_chunk()
2088 BUG_ON(ai->nr_groups != 1); in pcpu_page_first_chunk()
2089 BUG_ON(ai->groups[0].nr_units != num_possible_cpus()); in pcpu_page_first_chunk()
2091 unit_pages = ai->unit_size >> PAGE_SHIFT; in pcpu_page_first_chunk()
2102 unsigned int cpu = ai->groups[0].cpu_map[unit]; in pcpu_page_first_chunk()
2118 vm.size = num_possible_cpus() * ai->unit_size; in pcpu_page_first_chunk()
2123 (unsigned long)vm.addr + unit * ai->unit_size; in pcpu_page_first_chunk()
2143 memcpy((void *)unit_addr, __per_cpu_load, ai->static_size); in pcpu_page_first_chunk()
2148 unit_pages, psize_str, vm.addr, ai->static_size, in pcpu_page_first_chunk()
2149 ai->reserved_size, ai->dyn_size); in pcpu_page_first_chunk()
2151 rc = pcpu_setup_first_chunk(ai, vm.addr); in pcpu_page_first_chunk()
2160 pcpu_free_alloc_info(ai); in pcpu_page_first_chunk()
2229 struct pcpu_alloc_info *ai; in setup_per_cpu_areas() local
2232 ai = pcpu_alloc_alloc_info(1, 1); in setup_per_cpu_areas()
2236 if (!ai || !fc) in setup_per_cpu_areas()
2241 ai->dyn_size = unit_size; in setup_per_cpu_areas()
2242 ai->unit_size = unit_size; in setup_per_cpu_areas()
2243 ai->atom_size = unit_size; in setup_per_cpu_areas()
2244 ai->alloc_size = unit_size; in setup_per_cpu_areas()
2245 ai->groups[0].nr_units = 1; in setup_per_cpu_areas()
2246 ai->groups[0].cpu_map[0] = 0; in setup_per_cpu_areas()
2248 if (pcpu_setup_first_chunk(ai, fc) < 0) in setup_per_cpu_areas()