Home
last modified time | relevance | path

Searched refs:kvm_vcpu_get_hsr (Results 1 – 9 of 9) sorted by relevance

/linux-4.1.27/arch/arm/include/asm/
Dkvm_emulate.h84 static inline u32 kvm_vcpu_get_hsr(struct kvm_vcpu *vcpu) in kvm_vcpu_get_hsr() function
106 return kvm_vcpu_get_hsr(vcpu) & HSR_ISV; in kvm_vcpu_dabt_isvalid()
111 return kvm_vcpu_get_hsr(vcpu) & HSR_WNR; in kvm_vcpu_dabt_iswrite()
116 return kvm_vcpu_get_hsr(vcpu) & HSR_SSE; in kvm_vcpu_dabt_issext()
121 return (kvm_vcpu_get_hsr(vcpu) & HSR_SRT_MASK) >> HSR_SRT_SHIFT; in kvm_vcpu_dabt_get_rd()
126 return kvm_vcpu_get_hsr(vcpu) & HSR_DABT_EA; in kvm_vcpu_dabt_isextabt()
131 return kvm_vcpu_get_hsr(vcpu) & HSR_DABT_S1PTW; in kvm_vcpu_dabt_iss1tw()
137 switch ((kvm_vcpu_get_hsr(vcpu) >> 22) & 0x3) { in kvm_vcpu_dabt_get_as()
153 return kvm_vcpu_get_hsr(vcpu) & HSR_IL; in kvm_vcpu_trap_il_is32bit()
158 return kvm_vcpu_get_hsr(vcpu) >> HSR_EC_SHIFT; in kvm_vcpu_trap_get_class()
[all …]
/linux-4.1.27/arch/arm64/include/asm/
Dkvm_emulate.h131 static inline u32 kvm_vcpu_get_hsr(const struct kvm_vcpu *vcpu) in kvm_vcpu_get_hsr() function
148 return kvm_vcpu_get_hsr(vcpu) & ESR_ELx_xVC_IMM_MASK; in kvm_vcpu_hvc_get_imm()
153 return !!(kvm_vcpu_get_hsr(vcpu) & ESR_ELx_ISV); in kvm_vcpu_dabt_isvalid()
158 return !!(kvm_vcpu_get_hsr(vcpu) & ESR_ELx_WNR); in kvm_vcpu_dabt_iswrite()
163 return !!(kvm_vcpu_get_hsr(vcpu) & ESR_ELx_SSE); in kvm_vcpu_dabt_issext()
168 return (kvm_vcpu_get_hsr(vcpu) & ESR_ELx_SRT_MASK) >> ESR_ELx_SRT_SHIFT; in kvm_vcpu_dabt_get_rd()
173 return !!(kvm_vcpu_get_hsr(vcpu) & ESR_ELx_EA); in kvm_vcpu_dabt_isextabt()
178 return !!(kvm_vcpu_get_hsr(vcpu) & ESR_ELx_S1PTW); in kvm_vcpu_dabt_iss1tw()
183 return 1 << ((kvm_vcpu_get_hsr(vcpu) & ESR_ELx_SAS) >> ESR_ELx_SAS_SHIFT); in kvm_vcpu_dabt_get_as()
189 return !!(kvm_vcpu_get_hsr(vcpu) & ESR_ELx_IL); in kvm_vcpu_trap_il_is32bit()
[all …]
/linux-4.1.27/arch/arm/kvm/
Dhandle_exit.c65 kvm_vcpu_get_hfar(vcpu), kvm_vcpu_get_hsr(vcpu)); in handle_pabt_hyp()
73 kvm_vcpu_get_hfar(vcpu), kvm_vcpu_get_hsr(vcpu)); in handle_dabt_hyp()
90 if (kvm_vcpu_get_hsr(vcpu) & HSR_WFI_IS_WFE) { in kvm_handle_wfx()
128 (unsigned int)kvm_vcpu_get_hsr(vcpu)); in kvm_get_exit_handler()
Dcoproc.c477 params.CRn = (kvm_vcpu_get_hsr(vcpu) >> 1) & 0xf; in kvm_handle_cp15_64()
478 params.Rt1 = (kvm_vcpu_get_hsr(vcpu) >> 5) & 0xf; in kvm_handle_cp15_64()
479 params.is_write = ((kvm_vcpu_get_hsr(vcpu) & 1) == 0); in kvm_handle_cp15_64()
482 params.Op1 = (kvm_vcpu_get_hsr(vcpu) >> 16) & 0xf; in kvm_handle_cp15_64()
484 params.Rt2 = (kvm_vcpu_get_hsr(vcpu) >> 10) & 0xf; in kvm_handle_cp15_64()
509 params.CRm = (kvm_vcpu_get_hsr(vcpu) >> 1) & 0xf; in kvm_handle_cp15_32()
510 params.Rt1 = (kvm_vcpu_get_hsr(vcpu) >> 5) & 0xf; in kvm_handle_cp15_32()
511 params.is_write = ((kvm_vcpu_get_hsr(vcpu) & 1) == 0); in kvm_handle_cp15_32()
514 params.CRn = (kvm_vcpu_get_hsr(vcpu) >> 10) & 0xf; in kvm_handle_cp15_32()
515 params.Op1 = (kvm_vcpu_get_hsr(vcpu) >> 14) & 0x7; in kvm_handle_cp15_32()
[all …]
Demulate.c181 if (kvm_vcpu_get_hsr(vcpu) >> 30) in kvm_condition_valid()
187 if ((kvm_vcpu_get_hsr(vcpu) & HSR_CV) >> HSR_CV_SHIFT) in kvm_condition_valid()
188 cond = (kvm_vcpu_get_hsr(vcpu) & HSR_COND) >> HSR_COND_SHIFT; in kvm_condition_valid()
Dmmu.c1409 trace_kvm_guest_fault(*vcpu_pc(vcpu), kvm_vcpu_get_hsr(vcpu), in kvm_handle_guest_abort()
1419 (unsigned long)kvm_vcpu_get_hsr(vcpu)); in kvm_handle_guest_abort()
/linux-4.1.27/arch/arm64/kvm/
Demulate.c57 u32 esr = kvm_vcpu_get_hsr(vcpu); in kvm_vcpu_get_condition()
75 if (kvm_vcpu_get_hsr(vcpu) >> 30) in kvm_condition_valid32()
Dhandle_exit.c72 if (kvm_vcpu_get_hsr(vcpu) & ESR_ELx_WFx_ISS_WFE) { in kvm_handle_wfx()
103 u32 hsr = kvm_vcpu_get_hsr(vcpu); in kvm_get_exit_handler()
Dsys_regs.c832 u32 hsr = kvm_vcpu_get_hsr(vcpu); in kvm_handle_cp_64()
888 u32 hsr = kvm_vcpu_get_hsr(vcpu); in kvm_handle_cp_32()
1000 unsigned long esr = kvm_vcpu_get_hsr(vcpu); in kvm_handle_sys_reg()