1 
   2 
   3 
   4 
   5 
   6 
   7 #include <linux/arm-smccc.h>
   8 #include <linux/linkage.h>
   9 
  10 #include <asm/alternative.h>
  11 #include <asm/assembler.h>
  12 #include <asm/cpufeature.h>
  13 #include <asm/kvm_arm.h>
  14 #include <asm/kvm_asm.h>
  15 #include <asm/kvm_mmu.h>
  16 #include <asm/mmu.h>
  17 
  18         .text
  19         .pushsection    .hyp.text, "ax"
  20 
  21 .macro do_el2_call
  22         
  23 
  24 
  25 
  26         str     lr, [sp, #-16]!
  27         mov     lr, x0
  28         mov     x0, x1
  29         mov     x1, x2
  30         mov     x2, x3
  31         blr     lr
  32         ldr     lr, [sp], #16
  33 .endm
  34 
  35 el1_sync:                               
  36 
  37         mrs     x0, esr_el2
  38         lsr     x0, x0, #ESR_ELx_EC_SHIFT
  39         cmp     x0, #ESR_ELx_EC_HVC64
  40         ccmp    x0, #ESR_ELx_EC_HVC32, #4, ne
  41         b.ne    el1_trap
  42 
  43         mrs     x1, vttbr_el2           
  44         cbnz    x1, el1_hvc_guest       
  45 
  46         
  47         ldp     x0, x1, [sp], #16
  48 
  49         
  50         cmp     x0, #HVC_STUB_HCALL_NR
  51         b.hs    1f
  52 
  53         
  54 
  55 
  56 
  57 
  58 
  59 
  60 
  61         ldr     x5, =__kvm_handle_stub_hvc
  62         ldr_l   x6, kimage_voffset
  63 
  64         
  65         sub     x5, x5, x6
  66         br      x5
  67 
  68 1:
  69         
  70 
  71 
  72         kern_hyp_va     x0
  73         do_el2_call
  74 
  75         eret
  76         sb
  77 
  78 el1_hvc_guest:
  79         
  80 
  81 
  82 
  83 
  84 
  85         ldr     x1, [sp]                                
  86         eor     w1, w1, #ARM_SMCCC_ARCH_WORKAROUND_1
  87         cbz     w1, wa_epilogue
  88 
  89         
  90         eor     w1, w1, #(ARM_SMCCC_ARCH_WORKAROUND_1 ^ \
  91                           ARM_SMCCC_ARCH_WORKAROUND_2)
  92         cbnz    w1, el1_trap
  93 
  94 #ifdef CONFIG_ARM64_SSBD
  95 alternative_cb  arm64_enable_wa2_handling
  96         b       wa2_end
  97 alternative_cb_end
  98         get_vcpu_ptr    x2, x0
  99         ldr     x0, [x2, #VCPU_WORKAROUND_FLAGS]
 100 
 101         
 102         ldr     x1, [sp, #8]                    
 103         clz     w1, w1                          
 104         lsr     w1, w1, #5                      
 105         eor     w1, w1, #1                      
 106         bfi     x0, x1, #VCPU_WORKAROUND_2_FLAG_SHIFT, #1
 107         str     x0, [x2, #VCPU_WORKAROUND_FLAGS]
 108 
 109         
 110         hyp_ldr_this_cpu x0, arm64_ssbd_callback_required, x2
 111         cbz     x0, wa2_end
 112 
 113         mov     w0, #ARM_SMCCC_ARCH_WORKAROUND_2
 114         smc     #0
 115 
 116         
 117         mov     x3, xzr
 118 wa2_end:
 119         mov     x2, xzr
 120         mov     x1, xzr
 121 #endif
 122 
 123 wa_epilogue:
 124         mov     x0, xzr
 125         add     sp, sp, #16
 126         eret
 127         sb
 128 
 129 el1_trap:
 130         get_vcpu_ptr    x1, x0
 131         mov     x0, #ARM_EXCEPTION_TRAP
 132         b       __guest_exit
 133 
 134 el1_irq:
 135         get_vcpu_ptr    x1, x0
 136         mov     x0, #ARM_EXCEPTION_IRQ
 137         b       __guest_exit
 138 
 139 el1_error:
 140         get_vcpu_ptr    x1, x0
 141         mov     x0, #ARM_EXCEPTION_EL1_SERROR
 142         b       __guest_exit
 143 
 144 el2_sync:
 145         
 146         mrs     x0, spsr_el2
 147 
 148         
 149         tst     x0, #PSR_IL_BIT
 150         b.eq    __hyp_panic
 151 
 152         
 153         get_vcpu_ptr    x1, x0
 154         mov     x0, #ARM_EXCEPTION_IL
 155         b       __guest_exit
 156 
 157 
 158 el2_error:
 159         ldp     x0, x1, [sp], #16
 160 
 161         
 162 
 163 
 164 
 165 
 166 
 167 
 168 
 169 
 170 
 171 
 172 
 173         mrs     x0, elr_el2
 174         adr     x1, abort_guest_exit_start
 175         cmp     x0, x1
 176         adr     x1, abort_guest_exit_end
 177         ccmp    x0, x1, #4, ne
 178         b.ne    __hyp_panic
 179         mov     x0, #(1 << ARM_EXIT_WITH_SERROR_BIT)
 180         eret
 181         sb
 182 
 183 ENTRY(__hyp_do_panic)
 184         mov     lr, #(PSR_F_BIT | PSR_I_BIT | PSR_A_BIT | PSR_D_BIT |\
 185                       PSR_MODE_EL1h)
 186         msr     spsr_el2, lr
 187         ldr     lr, =panic
 188         msr     elr_el2, lr
 189         eret
 190         sb
 191 ENDPROC(__hyp_do_panic)
 192 
 193 ENTRY(__hyp_panic)
 194         get_host_ctxt x0, x1
 195         b       hyp_panic
 196 ENDPROC(__hyp_panic)
 197 
 198 .macro invalid_vector   label, target = __hyp_panic
 199         .align  2
 200 \label:
 201         b \target
 202 ENDPROC(\label)
 203 .endm
 204 
 205         
 206         invalid_vector  el2t_sync_invalid
 207         invalid_vector  el2t_irq_invalid
 208         invalid_vector  el2t_fiq_invalid
 209         invalid_vector  el2t_error_invalid
 210         invalid_vector  el2h_sync_invalid
 211         invalid_vector  el2h_irq_invalid
 212         invalid_vector  el2h_fiq_invalid
 213         invalid_vector  el1_fiq_invalid
 214 
 215         .ltorg
 216 
 217         .align 11
 218 
 219 .macro check_preamble_length start, end
 220 
 221 .if ((\end-\start) != KVM_VECTOR_PREAMBLE)
 222         .error "KVM vector preamble length mismatch"
 223 .endif
 224 .endm
 225 
 226 .macro valid_vect target
 227         .align 7
 228 661:
 229         esb
 230         stp     x0, x1, [sp, #-16]!
 231 662:
 232         b       \target
 233 
 234 check_preamble_length 661b, 662b
 235 .endm
 236 
 237 .macro invalid_vect target
 238         .align 7
 239 661:
 240         b       \target
 241         nop
 242 662:
 243         ldp     x0, x1, [sp], #16
 244         b       \target
 245 
 246 check_preamble_length 661b, 662b
 247 .endm
 248 
 249 ENTRY(__kvm_hyp_vector)
 250         invalid_vect    el2t_sync_invalid       
 251         invalid_vect    el2t_irq_invalid        
 252         invalid_vect    el2t_fiq_invalid        
 253         invalid_vect    el2t_error_invalid      
 254 
 255         valid_vect      el2_sync                
 256         invalid_vect    el2h_irq_invalid        
 257         invalid_vect    el2h_fiq_invalid        
 258         valid_vect      el2_error               
 259 
 260         valid_vect      el1_sync                
 261         valid_vect      el1_irq                 
 262         invalid_vect    el1_fiq_invalid         
 263         valid_vect      el1_error               
 264 
 265         valid_vect      el1_sync                
 266         valid_vect      el1_irq                 
 267         invalid_vect    el1_fiq_invalid         
 268         valid_vect      el1_error               
 269 ENDPROC(__kvm_hyp_vector)
 270 
 271 #ifdef CONFIG_KVM_INDIRECT_VECTORS
 272 .macro hyp_ventry
 273         .align 7
 274 1:      esb
 275         .rept 26
 276         nop
 277         .endr
 278 
 279 
 280 
 281 
 282 
 283 
 284 
 285 
 286 
 287 
 288 
 289 
 290 
 291 
 292 
 293 
 294 
 295 
 296 alternative_cb  kvm_patch_vector_branch
 297         stp     x0, x1, [sp, #-16]!
 298         b       __kvm_hyp_vector + (1b - 0b + KVM_VECTOR_PREAMBLE)
 299         nop
 300         nop
 301         nop
 302 alternative_cb_end
 303 .endm
 304 
 305 .macro generate_vectors
 306 0:
 307         .rept 16
 308         hyp_ventry
 309         .endr
 310         .org 0b + SZ_2K         
 311 .endm
 312 
 313         .align  11
 314 ENTRY(__bp_harden_hyp_vecs_start)
 315         .rept BP_HARDEN_EL2_SLOTS
 316         generate_vectors
 317         .endr
 318 ENTRY(__bp_harden_hyp_vecs_end)
 319 
 320         .popsection
 321 
 322 ENTRY(__smccc_workaround_1_smc_start)
 323         esb
 324         sub     sp, sp, #(8 * 4)
 325         stp     x2, x3, [sp, #(8 * 0)]
 326         stp     x0, x1, [sp, #(8 * 2)]
 327         mov     w0, #ARM_SMCCC_ARCH_WORKAROUND_1
 328         smc     #0
 329         ldp     x2, x3, [sp, #(8 * 0)]
 330         ldp     x0, x1, [sp, #(8 * 2)]
 331         add     sp, sp, #(8 * 4)
 332 ENTRY(__smccc_workaround_1_smc_end)
 333 #endif