vcpu_is_preempted   42 arch/powerpc/include/asm/spinlock.h #define vcpu_is_preempted vcpu_is_preempted
vcpu_is_preempted   80 arch/x86/hyperv/hv_spinlock.c 	pv_ops.lock.vcpu_is_preempted = PV_CALLEE_SAVE(hv_vcpu_is_preempted);
vcpu_is_preempted  662 arch/x86/include/asm/paravirt.h 	return PVOP_CALLEE1(bool, lock.vcpu_is_preempted, cpu);
vcpu_is_preempted  320 arch/x86/include/asm/paravirt_types.h 	struct paravirt_callee_save vcpu_is_preempted;
vcpu_is_preempted   58 arch/x86/include/asm/qspinlock.h #define vcpu_is_preempted vcpu_is_preempted
vcpu_is_preempted  523 arch/x86/kernel/kvm.c 		if (vcpu_is_preempted(cpu)) {
vcpu_is_preempted  853 arch/x86/kernel/kvm.c 		pv_ops.lock.vcpu_is_preempted =
vcpu_is_preempted   32 arch/x86/kernel/paravirt-spinlocks.c 	return pv_ops.lock.vcpu_is_preempted.func ==
vcpu_is_preempted  445 arch/x86/kernel/paravirt.c 	.lock.vcpu_is_preempted		=
vcpu_is_preempted   72 arch/x86/kernel/paravirt_patch.c 	unsigned char vcpu_is_preempted[2];
vcpu_is_preempted   76 arch/x86/kernel/paravirt_patch.c 	.vcpu_is_preempted	= { 0x31, 0xc0 },	// xor %eax, %eax
vcpu_is_preempted  116 arch/x86/kernel/paravirt_patch.c 	case PARAVIRT_PATCH(lock.vcpu_is_preempted):
vcpu_is_preempted  118 arch/x86/kernel/paravirt_patch.c 			return PATCH(lock, vcpu_is_preempted, insn_buff, len);
vcpu_is_preempted  135 arch/x86/xen/spinlock.c 	pv_ops.lock.vcpu_is_preempted = PV_CALLEE_SAVE(xen_vcpu_stolen);
vcpu_is_preempted 1846 include/linux/sched.h #ifndef vcpu_is_preempted
vcpu_is_preempted  569 kernel/locking/mutex.c 				vcpu_is_preempted(task_cpu(owner))) {
vcpu_is_preempted  605 kernel/locking/mutex.c 		retval = owner->on_cpu && !vcpu_is_preempted(task_cpu(owner));
vcpu_is_preempted  143 kernel/locking/osq_lock.c 		if (need_resched() || vcpu_is_preempted(node_cpu(node->prev)))
vcpu_is_preempted  653 kernel/locking/rwsem.c 	return owner->on_cpu && !vcpu_is_preempted(task_cpu(owner));
vcpu_is_preempted 4645 kernel/sched/core.c 	if (vcpu_is_preempted(cpu))