2 * Copyright (C) 2015 - ARM Ltd
3 * Author: Marc Zyngier <marc.zyngier@arm.com>
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License version 2 as
7 * published by the Free Software Foundation.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
14 * You should have received a copy of the GNU General Public License
15 * along with this program. If not, see <http://www.gnu.org/licenses/>.
18 #include <linux/arm-smccc.h>
19 #include <linux/linkage.h>
21 #include <asm/alternative.h>
22 #include <asm/assembler.h>
23 #include <asm/cpufeature.h>
24 #include <asm/kvm_arm.h>
25 #include <asm/kvm_asm.h>
26 #include <asm/kvm_mmu.h>
28 .macro save_caller_saved_regs_vect
29 stp x0, x1, [sp, #-16]!
30 stp x2, x3, [sp, #-16]!
31 stp x4, x5, [sp, #-16]!
32 stp x6, x7, [sp, #-16]!
33 stp x8, x9, [sp, #-16]!
34 stp x10, x11, [sp, #-16]!
35 stp x12, x13, [sp, #-16]!
36 stp x14, x15, [sp, #-16]!
37 stp x16, x17, [sp, #-16]!
40 .macro restore_caller_saved_regs_vect
41 ldp x16, x17, [sp], #16
42 ldp x14, x15, [sp], #16
43 ldp x12, x13, [sp], #16
44 ldp x10, x11, [sp], #16
53 .pushsection .hyp.text, "ax"
57 * Shuffle the parameters before calling the function
58 * pointed to in x0. Assumes parameters in x[1,2,3].
72 * We used to rely on having an exception return to get
73 * an implicit isb. In the E2H case, we don't have it anymore.
74 * rather than changing all the leaf functions, just do it here
75 * before returning to the rest of the kernel.
79 ENDPROC(__vhe_hyp_call)
81 el1_sync: // Guest trapped into EL2
82 stp x0, x1, [sp, #-16]!
85 lsr x0, x0, #ESR_ELx_EC_SHIFT
86 cmp x0, #ESR_ELx_EC_HVC64
87 ccmp x0, #ESR_ELx_EC_HVC32, #4, ne
90 mrs x1, vttbr_el2 // If vttbr is valid, the guest
91 cbnz x1, el1_hvc_guest // called HVC
93 /* Here, we're pretty sure the host called HVC. */
96 /* Check for a stub HVC call */
97 cmp x0, #HVC_STUB_HCALL_NR
101 * Compute the idmap address of __kvm_handle_stub_hvc and
102 * jump there. Since we use kimage_voffset, do not use the
103 * HYP VA for __kvm_handle_stub_hvc, but the kernel VA instead
104 * (by loading it from the constant pool).
106 * Preserve x0-x4, which may contain stub parameters.
108 ldr x5, =__kvm_handle_stub_hvc
109 ldr_l x6, kimage_voffset
117 * Perform the EL2 call
126 * Fastest possible path for ARM_SMCCC_ARCH_WORKAROUND_1.
127 * The workaround has already been applied on the host,
128 * so let's quickly get back to the guest. We don't bother
129 * restoring x1, as it can be clobbered anyway.
131 ldr x1, [sp] // Guest's x0
132 eor w1, w1, #ARM_SMCCC_ARCH_WORKAROUND_1
135 /* ARM_SMCCC_ARCH_WORKAROUND_2 handling */
136 eor w1, w1, #(ARM_SMCCC_ARCH_WORKAROUND_1 ^ \
137 ARM_SMCCC_ARCH_WORKAROUND_2)
140 #ifdef CONFIG_ARM64_SSBD
141 alternative_cb arm64_enable_wa2_handling
145 ldr x0, [x2, #VCPU_WORKAROUND_FLAGS]
147 // Sanitize the argument and update the guest flags
148 ldr x1, [sp, #8] // Guest's x1
149 clz w1, w1 // Murphy's device:
150 lsr w1, w1, #5 // w1 = !!w1 without using
151 eor w1, w1, #1 // the flags...
152 bfi x0, x1, #VCPU_WORKAROUND_2_FLAG_SHIFT, #1
153 str x0, [x2, #VCPU_WORKAROUND_FLAGS]
155 /* Check that we actually need to perform the call */
156 hyp_ldr_this_cpu x0, arm64_ssbd_callback_required, x2
159 mov w0, #ARM_SMCCC_ARCH_WORKAROUND_2
162 /* Don't leak data from the SMC call */
178 lsr x0, x0, #ESR_ELx_EC_SHIFT
185 * We trap the first access to the FP/SIMD to save the host context
186 * and restore the guest context lazily.
187 * If FP/SIMD is not implemented, handle the trap and inject an
188 * undefined instruction exception to the guest.
190 alternative_if_not ARM64_HAS_NO_FPSIMD
191 cmp x0, #ESR_ELx_EC_FP_ASIMD
192 b.eq __fpsimd_guest_restore
193 alternative_else_nop_endif
195 mov x0, #ARM_EXCEPTION_TRAP
199 stp x0, x1, [sp, #-16]!
201 mov x0, #ARM_EXCEPTION_IRQ
205 stp x0, x1, [sp, #-16]!
207 mov x0, #ARM_EXCEPTION_EL1_SERROR
211 save_caller_saved_regs_vect
212 stp x29, x30, [sp, #-16]!
213 bl kvm_unexpected_el2_exception
214 ldp x29, x30, [sp], #16
215 restore_caller_saved_regs_vect
220 save_caller_saved_regs_vect
221 stp x29, x30, [sp, #-16]!
223 bl kvm_unexpected_el2_exception
225 ldp x29, x30, [sp], #16
226 restore_caller_saved_regs_vect
230 ENTRY(__hyp_do_panic)
231 mov lr, #(PSR_F_BIT | PSR_I_BIT | PSR_A_BIT | PSR_D_BIT |\
237 ENDPROC(__hyp_do_panic)
244 .macro invalid_vector label, target = __hyp_panic
251 /* None of these should ever happen */
252 invalid_vector el2t_sync_invalid
253 invalid_vector el2t_irq_invalid
254 invalid_vector el2t_fiq_invalid
255 invalid_vector el2t_error_invalid
256 invalid_vector el2h_irq_invalid
257 invalid_vector el2h_fiq_invalid
258 invalid_vector el1_sync_invalid
259 invalid_vector el1_irq_invalid
260 invalid_vector el1_fiq_invalid
266 ENTRY(__kvm_hyp_vector)
267 ventry el2t_sync_invalid // Synchronous EL2t
268 ventry el2t_irq_invalid // IRQ EL2t
269 ventry el2t_fiq_invalid // FIQ EL2t
270 ventry el2t_error_invalid // Error EL2t
272 ventry el2_sync // Synchronous EL2h
273 ventry el2h_irq_invalid // IRQ EL2h
274 ventry el2h_fiq_invalid // FIQ EL2h
275 ventry el2_error // Error EL2h
277 ventry el1_sync // Synchronous 64-bit EL1
278 ventry el1_irq // IRQ 64-bit EL1
279 ventry el1_fiq_invalid // FIQ 64-bit EL1
280 ventry el1_error // Error 64-bit EL1
282 ventry el1_sync // Synchronous 32-bit EL1
283 ventry el1_irq // IRQ 32-bit EL1
284 ventry el1_fiq_invalid // FIQ 32-bit EL1
285 ventry el1_error // Error 32-bit EL1
286 ENDPROC(__kvm_hyp_vector)