Lines Matching refs:cpu
81 static inline void set_cpu_sibling_map(int cpu) in set_cpu_sibling_map() argument
85 cpumask_set_cpu(cpu, &cpu_sibling_setup_map); in set_cpu_sibling_map()
89 if (cpu_data[cpu].package == cpu_data[i].package && in set_cpu_sibling_map()
90 cpu_data[cpu].core == cpu_data[i].core) { in set_cpu_sibling_map()
91 cpumask_set_cpu(i, &cpu_sibling_map[cpu]); in set_cpu_sibling_map()
92 cpumask_set_cpu(cpu, &cpu_sibling_map[i]); in set_cpu_sibling_map()
96 cpumask_set_cpu(cpu, &cpu_sibling_map[cpu]); in set_cpu_sibling_map()
99 static inline void set_cpu_core_map(int cpu) in set_cpu_core_map() argument
103 cpumask_set_cpu(cpu, &cpu_core_setup_map); in set_cpu_core_map()
106 if (cpu_data[cpu].package == cpu_data[i].package) { in set_cpu_core_map()
107 cpumask_set_cpu(i, &cpu_core_map[cpu]); in set_cpu_core_map()
108 cpumask_set_cpu(cpu, &cpu_core_map[i]); in set_cpu_core_map()
154 unsigned int cpu; in start_secondary() local
169 cpu = smp_processor_id(); in start_secondary()
170 cpu_data[cpu].udelay_val = loops_per_jiffy; in start_secondary()
172 cpumask_set_cpu(cpu, &cpu_coherent_mask); in start_secondary()
173 notify_cpu_starting(cpu); in start_secondary()
175 set_cpu_online(cpu, true); in start_secondary()
177 set_cpu_sibling_map(cpu); in start_secondary()
178 set_cpu_core_map(cpu); in start_secondary()
182 cpumask_set_cpu(cpu, &cpu_callin_map); in start_secondary()
184 synchronise_count_slave(cpu); in start_secondary()
238 current_thread_info()->cpu = 0; in smp_prepare_cpus()
257 int __cpu_up(unsigned int cpu, struct task_struct *tidle) in __cpu_up() argument
259 mp_ops->boot_secondary(cpu, tidle); in __cpu_up()
264 while (!cpumask_test_cpu(cpu, &cpu_callin_map)) { in __cpu_up()
269 synchronise_count_master(cpu); in __cpu_up()
337 unsigned int cpu; in flush_tlb_mm() local
339 for_each_online_cpu(cpu) { in flush_tlb_mm()
340 if (cpu != smp_processor_id() && cpu_context(cpu, mm)) in flush_tlb_mm()
341 cpu_context(cpu, mm) = 0; in flush_tlb_mm()
376 unsigned int cpu; in flush_tlb_range() local
378 for_each_online_cpu(cpu) { in flush_tlb_range()
379 if (cpu != smp_processor_id() && cpu_context(cpu, mm)) in flush_tlb_range()
380 cpu_context(cpu, mm) = 0; in flush_tlb_range()
422 unsigned int cpu; in flush_tlb_page() local
424 for_each_online_cpu(cpu) { in flush_tlb_page()
425 if (cpu != smp_processor_id() && cpu_context(cpu, vma->vm_mm)) in flush_tlb_page()
426 cpu_context(cpu, vma->vm_mm) = 0; in flush_tlb_page()
453 int cpu = smp_processor_id(); in dump_send_ipi() local
458 if (i != cpu) in dump_send_ipi()
474 int cpu; in tick_broadcast() local
476 for_each_cpu(cpu, mask) { in tick_broadcast()
477 count = &per_cpu(tick_broadcast_count, cpu); in tick_broadcast()
478 csd = &per_cpu(tick_broadcast_csd, cpu); in tick_broadcast()
481 smp_call_function_single_async(cpu, csd); in tick_broadcast()
487 int cpu = smp_processor_id(); in tick_broadcast_callee() local
489 atomic_set(&per_cpu(tick_broadcast_count, cpu), 0); in tick_broadcast_callee()
495 int cpu; in tick_broadcast_init() local
497 for (cpu = 0; cpu < NR_CPUS; cpu++) { in tick_broadcast_init()
498 csd = &per_cpu(tick_broadcast_csd, cpu); in tick_broadcast_init()