Searched refs:cpu_map (Results 1 - 39 of 39) sorted by relevance

/linux-4.1.27/tools/perf/util/
H A Dcpumap.h10 struct cpu_map { struct
15 struct cpu_map *cpu_map__new(const char *cpu_list);
16 struct cpu_map *cpu_map__dummy_new(void);
17 void cpu_map__delete(struct cpu_map *map);
18 struct cpu_map *cpu_map__read(FILE *file);
19 size_t cpu_map__fprintf(struct cpu_map *map, FILE *fp);
20 int cpu_map__get_socket(struct cpu_map *map, int idx);
21 int cpu_map__get_core(struct cpu_map *map, int idx);
22 int cpu_map__build_socket_map(struct cpu_map *cpus, struct cpu_map **sockp);
23 int cpu_map__build_core_map(struct cpu_map *cpus, struct cpu_map **corep);
25 static inline int cpu_map__socket(struct cpu_map *sock, int s) cpu_map__socket()
42 static inline int cpu_map__nr(const struct cpu_map *map) cpu_map__nr()
47 static inline bool cpu_map__empty(const struct cpu_map *map) cpu_map__empty()
61 pr_debug("cpu_map not initialized\n"); cpu__max_node()
69 pr_debug("cpu_map not initialized\n"); cpu__max_cpu()
77 pr_debug("cpu_map not initialized\n"); cpu__get_node()
H A Dcpumap.c9 static struct cpu_map *cpu_map__default_new(void) cpu_map__default_new()
11 struct cpu_map *cpus; cpu_map__default_new()
30 static struct cpu_map *cpu_map__trim_new(int nr_cpus, int *tmp_cpus) cpu_map__trim_new()
33 struct cpu_map *cpus = malloc(sizeof(*cpus) + payload_size); cpu_map__trim_new()
43 struct cpu_map *cpu_map__read(FILE *file) cpu_map__read()
45 struct cpu_map *cpus = NULL; cpu_map__read()
98 static struct cpu_map *cpu_map__read_all_cpu_map(void) cpu_map__read_all_cpu_map()
100 struct cpu_map *cpus = NULL; cpu_map__read_all_cpu_map()
112 struct cpu_map *cpu_map__new(const char *cpu_list) cpu_map__new()
114 struct cpu_map *cpus = NULL; cpu_map__new()
179 size_t cpu_map__fprintf(struct cpu_map *map, FILE *fp) cpu_map__fprintf()
190 struct cpu_map *cpu_map__dummy_new(void) cpu_map__dummy_new()
192 struct cpu_map *cpus = malloc(sizeof(*cpus) + sizeof(int)); cpu_map__dummy_new()
202 void cpu_map__delete(struct cpu_map *map) cpu_map__delete()
207 int cpu_map__get_socket(struct cpu_map *map, int idx) cpu_map__get_socket()
240 static int cpu_map__build_map(struct cpu_map *cpus, struct cpu_map **res, cpu_map__build_map()
241 int (*f)(struct cpu_map *map, int cpu)) cpu_map__build_map()
243 struct cpu_map *c; cpu_map__build_map()
270 int cpu_map__get_core(struct cpu_map *map, int idx) cpu_map__get_core()
311 int cpu_map__build_socket_map(struct cpu_map *cpus, struct cpu_map **sockp) cpu_map__build_socket_map()
316 int cpu_map__build_core_map(struct cpu_map *cpus, struct cpu_map **corep) cpu_map__build_core_map()
H A Devlist.h15 struct cpu_map;
52 struct cpu_map *cpus;
64 void perf_evlist__init(struct perf_evlist *evlist, struct cpu_map *cpus,
147 struct cpu_map *cpus, perf_evlist__set_maps()
H A Dpmu.h23 struct cpu_map *cpus;
H A Drecord.c60 struct cpu_map *cpus; perf_probe_api()
217 struct cpu_map *cpus = cpu_map__new(NULL); perf_evlist__can_select_event()
H A Devsel.h85 struct cpu_map *cpus;
110 struct cpu_map;
197 struct cpu_map *cpus);
200 int perf_evsel__open(struct perf_evsel *evsel, struct cpu_map *cpus,
H A Dpython.c366 struct cpu_map *cpus;
417 .tp_name = "perf.cpu_map",
622 struct cpu_map *cpus = NULL; pyrf_evsel__open()
691 struct cpu_map *cpus; pyrf_evlist__init()
1048 PyModule_AddObject(module, "cpu_map", (PyObject*)&pyrf_cpu_map__type); initperf()
H A Dpmu.c407 static struct cpu_map *pmu_cpumask(const char *name) pmu_cpumask()
412 struct cpu_map *cpus; pmu_cpumask()
H A Devlist.c35 void perf_evlist__init(struct perf_evlist *evlist, struct cpu_map *cpus, perf_evlist__init()
999 const struct cpu_map *cpus = evlist->cpus; perf_evlist__mmap()
1264 * FIXME: -ENOMEM is the best we can do here, the cpu_map perf_evlist__create_syswide_maps()
H A Devsel.c1134 static int __perf_evsel__open(struct perf_evsel *evsel, struct cpu_map *cpus, __perf_evsel__open()
1294 struct cpu_map map;
1309 int perf_evsel__open(struct perf_evsel *evsel, struct cpu_map *cpus, perf_evsel__open()
1324 struct cpu_map *cpus) perf_evsel__open_per_cpu()
H A Dsvghelper.c734 struct cpu_map *m; str_to_bitmap()
H A Dparse-events.c277 char *name, struct cpu_map *cpus) __add_event()
H A Dsession.c1621 struct cpu_map *map; perf_session__cpu_bitmap()
H A Dheader.c703 struct cpu_map *node_map = NULL; write_numa_topology()
/linux-4.1.27/tools/perf/python/
H A Dtwatch.py19 cpus = perf.cpu_map()
/linux-4.1.27/drivers/soc/ti/
H A Dknav_qmss_acc.c217 unsigned long cpu_map; knav_range_setup_acc_irq() local
224 cpu_map = range->irqs[0].cpu_map; knav_range_setup_acc_irq()
228 cpu_map = range->irqs[queue].cpu_map; knav_range_setup_acc_irq()
251 if (!ret && cpu_map) { knav_range_setup_acc_irq()
252 ret = irq_set_affinity_hint(irq, to_cpumask(&cpu_map)); knav_range_setup_acc_irq()
H A Dknav_qmss_queue.c108 unsigned long cpu_map; knav_queue_setup_irq() local
113 cpu_map = range->irqs[queue].cpu_map; knav_queue_setup_irq()
119 if (cpu_map) { knav_queue_setup_irq()
120 ret = irq_set_affinity_hint(irq, to_cpumask(&cpu_map)); knav_queue_setup_irq()
1226 range->irqs[i].cpu_map = knav_setup_queue_range()
H A Dknav_qmss.h324 u32 cpu_map; member in struct:knav_irq_info
/linux-4.1.27/arch/ia64/mm/
H A Ddiscontig.c188 unsigned int *cpu_map; setup_per_cpu_areas() local
198 cpu_map = ai->groups[0].cpu_map; setup_per_cpu_areas()
207 /* build cpu_map, units are grouped by node */ setup_per_cpu_areas()
212 cpu_map[unit++] = cpu; setup_per_cpu_areas()
231 * CPUs are put into groups according to node. Walk cpu_map setup_per_cpu_areas()
237 cpu = cpu_map[unit]; setup_per_cpu_areas()
249 gi->cpu_map = &cpu_map[unit]; setup_per_cpu_areas()
H A Dcontig.c163 gi->cpu_map[gi->nr_units++] = cpu; setup_per_cpu_areas()
/linux-4.1.27/tools/perf/tests/
H A Dkeep-tracking.c64 struct cpu_map *cpus = NULL; test__keep_tracking()
H A Dopen-syscall-all-cpus.c10 struct cpu_map *cpus; test__open_syscall_event_on_all_cpus()
H A Dmmap-basic.c23 struct cpu_map *cpus; test__basic_mmap()
H A Dperf-time-to-tsc.c49 struct cpu_map *cpus = NULL; test__perf_time_to_tsc()
H A Dcode-reading.c403 struct cpu_map *cpus = NULL; do_test_code_reading()
H A Dswitch-tracking.c322 struct cpu_map *cpus = NULL; test__switch_tracking()
/linux-4.1.27/mm/
H A Dpercpu.c1366 * @nr_units units. The returned ai's groups[0].cpu_map points to the
1367 * cpu_map array which is long enough for @nr_units and filled with
1368 * NR_CPUS. It's the caller's responsibility to initialize cpu_map
1384 __alignof__(ai->groups[0].cpu_map[0])); pcpu_alloc_alloc_info()
1385 ai_size = base_size + nr_units * sizeof(ai->groups[0].cpu_map[0]); pcpu_alloc_alloc_info()
1393 ai->groups[0].cpu_map = ptr; pcpu_alloc_alloc_info()
1396 ai->groups[0].cpu_map[unit] = NR_CPUS; pcpu_alloc_alloc_info()
1462 if (gi->cpu_map[unit] != NR_CPUS) pcpu_dump_alloc_info()
1464 gi->cpu_map[unit]); pcpu_dump_alloc_info()
1588 cpu = gi->cpu_map[i]; pcpu_setup_first_chunk()
1789 unsigned int *cpu_map; pcpu_build_alloc_info() local
1874 cpu_map = ai->groups[0].cpu_map;
1877 ai->groups[group].cpu_map = cpu_map;
1878 cpu_map += roundup(group_cnt[group], upa);
1900 gi->cpu_map[gi->nr_units++] = cpu;
1976 cpu = gi->cpu_map[i]; pcpu_embed_first_chunk()
2002 if (gi->cpu_map[i] == NR_CPUS) { pcpu_embed_first_chunk()
2103 unsigned int cpu = ai->groups[0].cpu_map[unit]; pcpu_page_first_chunk()
2247 ai->groups[0].cpu_map[0] = 0; setup_per_cpu_areas()
/linux-4.1.27/include/linux/
H A Dpercpu.h63 unsigned int *cpu_map; /* unit->cpu map, empty member in struct:pcpu_group_info
H A Dnetdevice.h728 struct xps_map __rcu *cpu_map[0]; member in struct:xps_dev_maps
/linux-4.1.27/kernel/sched/
H A Dcore.c6090 static void __sdt_free(const struct cpumask *cpu_map);
6091 static int __sdt_alloc(const struct cpumask *cpu_map);
6094 const struct cpumask *cpu_map) __free_domain_allocs()
6103 __sdt_free(cpu_map); /* fall through */ __free_domain_allocs()
6110 const struct cpumask *cpu_map) __visit_domain_allocation_hell()
6114 if (__sdt_alloc(cpu_map)) __visit_domain_allocation_hell()
6587 static int __sdt_alloc(const struct cpumask *cpu_map) __sdt_alloc() argument
6607 for_each_cpu(j, cpu_map) { for_each_cpu()
6640 static void __sdt_free(const struct cpumask *cpu_map) __sdt_free() argument
6648 for_each_cpu(j, cpu_map) { for_each_cpu()
6673 const struct cpumask *cpu_map, struct sched_domain_attr *attr, build_sched_domain()
6680 cpumask_and(sched_domain_span(sd), cpu_map, tl->mask(cpu)); build_sched_domain()
6710 static int build_sched_domains(const struct cpumask *cpu_map, build_sched_domains() argument
6718 alloc_state = __visit_domain_allocation_hell(&d, cpu_map); build_sched_domains()
6722 /* Set up domains for cpus specified by the cpu_map. */ for_each_cpu()
6723 for_each_cpu(i, cpu_map) { for_each_cpu()
6728 sd = build_sched_domain(tl, cpu_map, attr, sd, i); for_each_sd_topology()
6733 if (cpumask_equal(cpu_map, sched_domain_span(sd))) for_each_sd_topology()
6739 for_each_cpu(i, cpu_map) { for_each_cpu()
6754 if (!cpumask_test_cpu(i, cpu_map))
6765 for_each_cpu(i, cpu_map) { for_each_cpu()
6773 __free_domain_allocs(&d, alloc_state, cpu_map);
6829 static int init_sched_domains(const struct cpumask *cpu_map) init_sched_domains() argument
6838 cpumask_andnot(doms_cur[0], cpu_map, cpu_isolated_map); init_sched_domains()
6846 * Detach sched domains from a group of cpus specified in cpu_map
6849 static void detach_destroy_domains(const struct cpumask *cpu_map) detach_destroy_domains() argument
6854 for_each_cpu(i, cpu_map) detach_destroy_domains()
6093 __free_domain_allocs(struct s_data *d, enum s_alloc what, const struct cpumask *cpu_map) __free_domain_allocs() argument
6109 __visit_domain_allocation_hell(struct s_data *d, const struct cpumask *cpu_map) __visit_domain_allocation_hell() argument
6672 build_sched_domain(struct sched_domain_topology_level *tl, const struct cpumask *cpu_map, struct sched_domain_attr *attr, struct sched_domain *child, int cpu) build_sched_domain() argument
/linux-4.1.27/tools/perf/
H A Dbuiltin-stat.c145 static struct cpu_map *aggr_map;
146 static int (*aggr_get_id)(struct cpu_map *m, int cpu);
166 static inline struct cpu_map *perf_evsel__cpus(struct perf_evsel *evsel) perf_evsel__cpus()
401 struct cpu_map *cpus = perf_evsel__cpus(counter); check_per_pkg()
/linux-4.1.27/net/core/
H A Dflow_dissector.c432 dev_maps->cpu_map[skb->sender_cpu - 1]); get_xps_queue()
H A Ddev.c1911 map = xmap_dereference(dev_maps->cpu_map[cpu]); remove_xps_queue()
1918 RCU_INIT_POINTER(dev_maps->cpu_map[cpu], NULL); remove_xps_queue()
2023 map = dev_maps ? xmap_dereference(dev_maps->cpu_map[cpu]) : for_each_online_cpu()
2030 RCU_INIT_POINTER(new_dev_maps->cpu_map[cpu], map); for_each_online_cpu()
2041 map = xmap_dereference(new_dev_maps->cpu_map[cpu]); for_each_possible_cpu()
2055 map = xmap_dereference(dev_maps->cpu_map[cpu]); for_each_possible_cpu()
2056 RCU_INIT_POINTER(new_dev_maps->cpu_map[cpu], map); for_each_possible_cpu()
2066 new_map = xmap_dereference(new_dev_maps->cpu_map[cpu]); for_each_possible_cpu()
2067 map = xmap_dereference(dev_maps->cpu_map[cpu]); for_each_possible_cpu()
2109 new_map = xmap_dereference(new_dev_maps->cpu_map[cpu]); for_each_possible_cpu()
2110 map = dev_maps ? xmap_dereference(dev_maps->cpu_map[cpu]) : for_each_possible_cpu()
H A Dnet-sysfs.c1172 rcu_dereference(dev_maps->cpu_map[i]); for_each_possible_cpu()
/linux-4.1.27/drivers/scsi/lpfc/
H A Dlpfc_sli4.h607 struct lpfc_vector_map_info *cpu_map; member in struct:lpfc_sli4_hba
H A Dlpfc_init.c5438 phba->sli4_hba.cpu_map = kzalloc((sizeof(struct lpfc_vector_map_info) * lpfc_sli4_driver_resource_setup()
5441 if (!phba->sli4_hba.cpu_map) { lpfc_sli4_driver_resource_setup()
5455 kfree(phba->sli4_hba.cpu_map); lpfc_sli4_driver_resource_setup()
5464 cpup = phba->sli4_hba.cpu_map; lpfc_sli4_driver_resource_setup()
5524 kfree(phba->sli4_hba.cpu_map); lpfc_sli4_driver_resource_unset()
8615 cpup = phba->sli4_hba.cpu_map; lpfc_find_next_cpu()
8639 cpup = phba->sli4_hba.cpu_map; lpfc_find_next_cpu()
8680 /* Init cpu_map array */ lpfc_sli4_set_affinity()
8681 memset(phba->sli4_hba.cpu_map, 0xff, lpfc_sli4_set_affinity()
8692 cpup = phba->sli4_hba.cpu_map; lpfc_sli4_set_affinity()
8718 cpup = phba->sli4_hba.cpu_map; lpfc_sli4_set_affinity()
8736 cpup = phba->sli4_hba.cpu_map; lpfc_sli4_set_affinity()
8809 cpup = phba->sli4_hba.cpu_map; lpfc_sli4_set_affinity()
8846 cpup = phba->sli4_hba.cpu_map; lpfc_sli4_set_affinity()
8855 cpup = phba->sli4_hba.cpu_map; lpfc_sli4_set_affinity()
H A Dlpfc_attr.c4338 cpup = &phba->sli4_hba.cpu_map[phba->sli4_hba.curr_disp_cpu]; lpfc_fcp_cpu_map_show()
H A Dlpfc_sli.c8160 cpup = phba->sli4_hba.cpu_map; lpfc_sli4_scmd_to_wqidx_distr()
/linux-4.1.27/drivers/s390/char/
H A Dzcore.c51 int cpu_map[NR_CPUS]; member in struct:sys_info

Completed in 960 milliseconds