last_cpu           48 arch/alpha/kernel/irq.c 	static int last_cpu;
last_cpu           49 arch/alpha/kernel/irq.c 	int cpu = last_cpu + 1;
last_cpu           61 arch/alpha/kernel/irq.c 	last_cpu = cpu;
last_cpu          101 arch/arm/mach-tegra/cpuidle-tegra30.c 	bool last_cpu;
last_cpu          105 arch/arm/mach-tegra/cpuidle-tegra30.c 	last_cpu = tegra_set_cpu_in_lp2();
last_cpu          109 arch/arm/mach-tegra/cpuidle-tegra30.c 		if (last_cpu)
last_cpu          129 arch/arm/mach-tegra/pm.c 	bool last_cpu = false;
last_cpu          139 arch/arm/mach-tegra/pm.c 		last_cpu = true;
last_cpu          144 arch/arm/mach-tegra/pm.c 	return last_cpu;
last_cpu           71 arch/ia64/include/asm/switch_to.h 		      		      task_thread_info(current)->last_cpu))) { \
last_cpu           72 arch/ia64/include/asm/switch_to.h 		task_thread_info(current)->last_cpu = task_cpu(current);       \
last_cpu           28 arch/ia64/include/asm/thread_info.h 	__u32 last_cpu;			/* Last CPU thread ran on */
last_cpu           75 arch/powerpc/include/asm/kvm_book3s.h 	u16 last_cpu;
last_cpu          151 arch/powerpc/kvm/mpic.c 	int last_cpu;
last_cpu          449 arch/powerpc/kvm/mpic.c 	if (src->destmask == (1 << src->last_cpu)) {
last_cpu          451 arch/powerpc/kvm/mpic.c 		IRQ_local_pipe(opp, src->last_cpu, n_IRQ, active, was_active);
last_cpu          462 arch/powerpc/kvm/mpic.c 		for (i = src->last_cpu + 1; i != src->last_cpu; i++) {
last_cpu          469 arch/powerpc/kvm/mpic.c 				src->last_cpu = i;
last_cpu         1214 arch/powerpc/xmon/xmon.c 	unsigned long cpu, first_cpu, last_cpu;
last_cpu         1220 arch/powerpc/xmon/xmon.c 		last_cpu = first_cpu = NR_CPUS;
last_cpu         1223 arch/powerpc/xmon/xmon.c 				if (cpu == last_cpu + 1) {
last_cpu         1224 arch/powerpc/xmon/xmon.c 					last_cpu = cpu;
last_cpu         1226 arch/powerpc/xmon/xmon.c 					if (last_cpu != first_cpu)
last_cpu         1227 arch/powerpc/xmon/xmon.c 						printf("-0x%lx", last_cpu);
last_cpu         1228 arch/powerpc/xmon/xmon.c 					last_cpu = first_cpu = cpu;
last_cpu         1233 arch/powerpc/xmon/xmon.c 		if (last_cpu != first_cpu)
last_cpu         1234 arch/powerpc/xmon/xmon.c 			printf("-0x%lx", last_cpu);
last_cpu          193 arch/sparc/kernel/cpumap.c 	int n, id, cpu, prev_cpu, last_cpu, level;
last_cpu          233 arch/sparc/kernel/cpumap.c 	for (last_cpu = (num_possible_cpus() - 1); last_cpu >= 0; last_cpu--) {
last_cpu          234 arch/sparc/kernel/cpumap.c 		if (cpu_online(last_cpu))
last_cpu          238 arch/sparc/kernel/cpumap.c 	while (++cpu <= last_cpu) {
last_cpu          250 arch/sparc/kernel/cpumap.c 			if ((id != prev_id[level]) || (cpu == last_cpu)) {
last_cpu          256 arch/sparc/kernel/cpumap.c 				if (cpu == last_cpu)
last_cpu          268 arch/sparc/kernel/cpumap.c 					    (cpu == last_cpu) ? cpu : prev_cpu;
last_cpu          604 arch/x86/events/core.c 	event->hw.last_cpu = -1;
last_cpu         1064 arch/x86/events/core.c 	hwc->last_cpu = smp_processor_id();
last_cpu         1107 arch/x86/events/core.c 		hwc->last_cpu == smp_processor_id() &&
last_cpu          507 arch/x86/include/asm/fpu/internal.h 	fpu->last_cpu = -1;
last_cpu          512 arch/x86/include/asm/fpu/internal.h 	return fpu == this_cpu_read(fpu_fpregs_owner_ctx) && cpu == fpu->last_cpu;
last_cpu          545 arch/x86/include/asm/fpu/internal.h 		fpu->last_cpu = cpu;
last_cpu          576 arch/x86/include/asm/fpu/internal.h 			old_fpu->last_cpu = -1;
last_cpu          578 arch/x86/include/asm/fpu/internal.h 			old_fpu->last_cpu = cpu;
last_cpu          294 arch/x86/include/asm/fpu/types.h 	unsigned int			last_cpu;
last_cpu          172 arch/x86/kernel/fpu/core.c 	dst_fpu->last_cpu = -1;
last_cpu          370 arch/x86/kernel/fpu/core.c 	fpu->last_cpu = smp_processor_id();
last_cpu          248 arch/x86/kvm/svm.c 	unsigned int last_cpu;
last_cpu         5073 arch/x86/kvm/svm.c 	    svm->last_cpu == cpu)
last_cpu         5076 arch/x86/kvm/svm.c 	svm->last_cpu = cpu;
last_cpu           57 drivers/crypto/caam/qi.c static DEFINE_PER_CPU(int, last_cpu);
last_cpu          432 drivers/crypto/caam/qi.c 		int *pcpu = &get_cpu_var(last_cpu);
last_cpu          439 drivers/crypto/caam/qi.c 		put_cpu_var(last_cpu);
last_cpu          137 drivers/nvme/target/tcp.c 	int			last_cpu;
last_cpu         1502 drivers/nvme/target/tcp.c 	port->last_cpu = cpumask_next_wrap(port->last_cpu,
last_cpu         1504 drivers/nvme/target/tcp.c 	queue->cpu = port->last_cpu;
last_cpu         1602 drivers/nvme/target/tcp.c 	port->last_cpu = -1;
last_cpu          109 drivers/opp/cpu.c 				      int last_cpu)
last_cpu          117 drivers/opp/cpu.c 		if (cpu == last_cpu)
last_cpu          218 drivers/opp/opp.h void _dev_pm_opp_cpumask_remove_table(const struct cpumask *cpumask, int last_cpu);
last_cpu         1271 drivers/scsi/lpfc/lpfc_init.c 			if (eqcnt[eq->last_cpu] < 2)
last_cpu         1272 drivers/scsi/lpfc/lpfc_init.c 				eqcnt[eq->last_cpu]++;
last_cpu         1292 drivers/scsi/lpfc/lpfc_init.c 			if (eq->last_cpu != i) {
last_cpu         1294 drivers/scsi/lpfc/lpfc_init.c 						      eq->last_cpu);
last_cpu         8838 drivers/scsi/lpfc/lpfc_init.c 		qdesc->last_cpu = qdesc->chann;
last_cpu         8843 drivers/scsi/lpfc/lpfc_init.c 		eqi = per_cpu_ptr(phba->sli4_hba.eq_info, qdesc->last_cpu);
last_cpu         14251 drivers/scsi/lpfc/lpfc_sli.c 	fpeq->last_cpu = raw_smp_processor_id();
last_cpu          146 drivers/scsi/lpfc/lpfc_sli4.h 	uint16_t last_cpu;	/* most recent cpu */
last_cpu          293 drivers/xen/xen-acpi-processor.c 	unsigned int i, last_cpu, max_acpi_id = 0;
last_cpu          303 drivers/xen/xen-acpi-processor.c 	last_cpu = op.u.pcpu_info.max_present;
last_cpu          304 drivers/xen/xen-acpi-processor.c 	for (i = 0; i <= last_cpu; i++) {
last_cpu          131 include/linux/perf_event.h 			int		last_cpu;
last_cpu          176 tools/perf/builtin-ftrace.c 	int last_cpu;
last_cpu          178 tools/perf/builtin-ftrace.c 	last_cpu = cpu_map__cpu(cpumap, cpumap->nr - 1);
last_cpu          179 tools/perf/builtin-ftrace.c 	mask_size = last_cpu / 4 + 2; /* one more byte for EOS */
last_cpu          180 tools/perf/builtin-ftrace.c 	mask_size += last_cpu / 32; /* ',' is needed for every 32th cpus */
last_cpu          527 tools/perf/util/cpumap.c 	int last_cpu = cpu_map__cpu(map, map->nr - 1);
last_cpu          532 tools/perf/util/cpumap.c 	bitmap = zalloc(last_cpu / 8 + 1);
last_cpu          543 tools/perf/util/cpumap.c 	for (cpu = last_cpu / 4 * 4; cpu >= 0; cpu -= 4) {
last_cpu          144 tools/virtio/vringh_test.c 	unsigned int first_cpu, last_cpu;
last_cpu          168 tools/virtio/vringh_test.c 	find_cpus(&first_cpu, &last_cpu);
last_cpu          169 tools/virtio/vringh_test.c 	printf("Using CPUS %u and %u\n", first_cpu, last_cpu);