Home
last modified time | relevance | path

Searched refs:kvm_vcpu_get_hsr (Results 1 – 9 of 9) sorted by relevance

/linux-4.4.14/arch/arm/include/asm/
Dkvm_emulate.h96 static inline u32 kvm_vcpu_get_hsr(struct kvm_vcpu *vcpu) in kvm_vcpu_get_hsr() function
118 return kvm_vcpu_get_hsr(vcpu) & HSR_ISV; in kvm_vcpu_dabt_isvalid()
123 return kvm_vcpu_get_hsr(vcpu) & HSR_WNR; in kvm_vcpu_dabt_iswrite()
128 return kvm_vcpu_get_hsr(vcpu) & HSR_SSE; in kvm_vcpu_dabt_issext()
133 return (kvm_vcpu_get_hsr(vcpu) & HSR_SRT_MASK) >> HSR_SRT_SHIFT; in kvm_vcpu_dabt_get_rd()
138 return kvm_vcpu_get_hsr(vcpu) & HSR_DABT_EA; in kvm_vcpu_dabt_isextabt()
143 return kvm_vcpu_get_hsr(vcpu) & HSR_DABT_S1PTW; in kvm_vcpu_dabt_iss1tw()
149 switch ((kvm_vcpu_get_hsr(vcpu) >> 22) & 0x3) { in kvm_vcpu_dabt_get_as()
165 return kvm_vcpu_get_hsr(vcpu) & HSR_IL; in kvm_vcpu_trap_il_is32bit()
170 return kvm_vcpu_get_hsr(vcpu) >> HSR_EC_SHIFT; in kvm_vcpu_trap_get_class()
[all …]
/linux-4.4.14/arch/arm64/include/asm/
Dkvm_emulate.h139 static inline u32 kvm_vcpu_get_hsr(const struct kvm_vcpu *vcpu) in kvm_vcpu_get_hsr() function
156 return kvm_vcpu_get_hsr(vcpu) & ESR_ELx_xVC_IMM_MASK; in kvm_vcpu_hvc_get_imm()
161 return !!(kvm_vcpu_get_hsr(vcpu) & ESR_ELx_ISV); in kvm_vcpu_dabt_isvalid()
166 return !!(kvm_vcpu_get_hsr(vcpu) & ESR_ELx_WNR); in kvm_vcpu_dabt_iswrite()
171 return !!(kvm_vcpu_get_hsr(vcpu) & ESR_ELx_SSE); in kvm_vcpu_dabt_issext()
176 return (kvm_vcpu_get_hsr(vcpu) & ESR_ELx_SRT_MASK) >> ESR_ELx_SRT_SHIFT; in kvm_vcpu_dabt_get_rd()
181 return !!(kvm_vcpu_get_hsr(vcpu) & ESR_ELx_EA); in kvm_vcpu_dabt_isextabt()
186 return !!(kvm_vcpu_get_hsr(vcpu) & ESR_ELx_S1PTW); in kvm_vcpu_dabt_iss1tw()
191 return 1 << ((kvm_vcpu_get_hsr(vcpu) & ESR_ELx_SAS) >> ESR_ELx_SAS_SHIFT); in kvm_vcpu_dabt_get_as()
197 return !!(kvm_vcpu_get_hsr(vcpu) & ESR_ELx_IL); in kvm_vcpu_trap_il_is32bit()
[all …]
/linux-4.4.14/arch/arm/kvm/
Dhandle_exit.c65 kvm_vcpu_get_hfar(vcpu), kvm_vcpu_get_hsr(vcpu)); in handle_pabt_hyp()
73 kvm_vcpu_get_hfar(vcpu), kvm_vcpu_get_hsr(vcpu)); in handle_dabt_hyp()
90 if (kvm_vcpu_get_hsr(vcpu) & HSR_WFI_IS_WFE) { in kvm_handle_wfx()
128 (unsigned int)kvm_vcpu_get_hsr(vcpu)); in kvm_get_exit_handler()
Dcoproc.c477 params.CRn = (kvm_vcpu_get_hsr(vcpu) >> 1) & 0xf; in kvm_handle_cp15_64()
478 params.Rt1 = (kvm_vcpu_get_hsr(vcpu) >> 5) & 0xf; in kvm_handle_cp15_64()
479 params.is_write = ((kvm_vcpu_get_hsr(vcpu) & 1) == 0); in kvm_handle_cp15_64()
482 params.Op1 = (kvm_vcpu_get_hsr(vcpu) >> 16) & 0xf; in kvm_handle_cp15_64()
484 params.Rt2 = (kvm_vcpu_get_hsr(vcpu) >> 10) & 0xf; in kvm_handle_cp15_64()
509 params.CRm = (kvm_vcpu_get_hsr(vcpu) >> 1) & 0xf; in kvm_handle_cp15_32()
510 params.Rt1 = (kvm_vcpu_get_hsr(vcpu) >> 5) & 0xf; in kvm_handle_cp15_32()
511 params.is_write = ((kvm_vcpu_get_hsr(vcpu) & 1) == 0); in kvm_handle_cp15_32()
514 params.CRn = (kvm_vcpu_get_hsr(vcpu) >> 10) & 0xf; in kvm_handle_cp15_32()
515 params.Op1 = (kvm_vcpu_get_hsr(vcpu) >> 14) & 0x7; in kvm_handle_cp15_32()
[all …]
Demulate.c181 if (kvm_vcpu_get_hsr(vcpu) >> 30) in kvm_condition_valid()
187 if ((kvm_vcpu_get_hsr(vcpu) & HSR_CV) >> HSR_CV_SHIFT) in kvm_condition_valid()
188 cond = (kvm_vcpu_get_hsr(vcpu) & HSR_COND) >> HSR_COND_SHIFT; in kvm_condition_valid()
Dmmu.c1410 trace_kvm_guest_fault(*vcpu_pc(vcpu), kvm_vcpu_get_hsr(vcpu), in kvm_handle_guest_abort()
1420 (unsigned long)kvm_vcpu_get_hsr(vcpu)); in kvm_handle_guest_abort()
/linux-4.4.14/arch/arm64/kvm/
Dhandle_exit.c72 if (kvm_vcpu_get_hsr(vcpu) & ESR_ELx_WFx_ISS_WFE) { in kvm_handle_wfx()
99 u32 hsr = kvm_vcpu_get_hsr(vcpu); in kvm_handle_guest_debug()
147 u32 hsr = kvm_vcpu_get_hsr(vcpu); in kvm_get_exit_handler()
Demulate.c57 u32 esr = kvm_vcpu_get_hsr(vcpu); in kvm_vcpu_get_condition()
75 if (kvm_vcpu_get_hsr(vcpu) >> 30) in kvm_condition_valid32()
Dsys_regs.c1056 u32 hsr = kvm_vcpu_get_hsr(vcpu); in kvm_handle_cp_64()
1108 u32 hsr = kvm_vcpu_get_hsr(vcpu); in kvm_handle_cp_32()
1223 unsigned long esr = kvm_vcpu_get_hsr(vcpu); in kvm_handle_sys_reg()