WIP FPC-III support
[linux/fpc-iii.git] / arch / arm64 / include / asm / kvm_ptrauth.h
blob0cd0965255d25ba1b5a65b3107ff879931b29632
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /* arch/arm64/include/asm/kvm_ptrauth.h: Guest/host ptrauth save/restore
3 * Copyright 2019 Arm Limited
4 * Authors: Mark Rutland <mark.rutland@arm.com>
5 * Amit Daniel Kachhap <amit.kachhap@arm.com>
6 */
8 #ifndef __ASM_KVM_PTRAUTH_H
9 #define __ASM_KVM_PTRAUTH_H
11 #ifdef __ASSEMBLY__
13 #include <asm/sysreg.h>
15 #ifdef CONFIG_ARM64_PTR_AUTH
17 #define PTRAUTH_REG_OFFSET(x) (x - CPU_APIAKEYLO_EL1)
20 * CPU_AP*_EL1 values exceed immediate offset range (512) for stp
21 * instruction so below macros takes CPU_APIAKEYLO_EL1 as base and
22 * calculates the offset of the keys from this base to avoid an extra add
23 * instruction. These macros assumes the keys offsets follow the order of
24 * the sysreg enum in kvm_host.h.
26 .macro ptrauth_save_state base, reg1, reg2
27 mrs_s \reg1, SYS_APIAKEYLO_EL1
28 mrs_s \reg2, SYS_APIAKEYHI_EL1
29 stp \reg1, \reg2, [\base, #PTRAUTH_REG_OFFSET(CPU_APIAKEYLO_EL1)]
30 mrs_s \reg1, SYS_APIBKEYLO_EL1
31 mrs_s \reg2, SYS_APIBKEYHI_EL1
32 stp \reg1, \reg2, [\base, #PTRAUTH_REG_OFFSET(CPU_APIBKEYLO_EL1)]
33 mrs_s \reg1, SYS_APDAKEYLO_EL1
34 mrs_s \reg2, SYS_APDAKEYHI_EL1
35 stp \reg1, \reg2, [\base, #PTRAUTH_REG_OFFSET(CPU_APDAKEYLO_EL1)]
36 mrs_s \reg1, SYS_APDBKEYLO_EL1
37 mrs_s \reg2, SYS_APDBKEYHI_EL1
38 stp \reg1, \reg2, [\base, #PTRAUTH_REG_OFFSET(CPU_APDBKEYLO_EL1)]
39 mrs_s \reg1, SYS_APGAKEYLO_EL1
40 mrs_s \reg2, SYS_APGAKEYHI_EL1
41 stp \reg1, \reg2, [\base, #PTRAUTH_REG_OFFSET(CPU_APGAKEYLO_EL1)]
42 .endm
44 .macro ptrauth_restore_state base, reg1, reg2
45 ldp \reg1, \reg2, [\base, #PTRAUTH_REG_OFFSET(CPU_APIAKEYLO_EL1)]
46 msr_s SYS_APIAKEYLO_EL1, \reg1
47 msr_s SYS_APIAKEYHI_EL1, \reg2
48 ldp \reg1, \reg2, [\base, #PTRAUTH_REG_OFFSET(CPU_APIBKEYLO_EL1)]
49 msr_s SYS_APIBKEYLO_EL1, \reg1
50 msr_s SYS_APIBKEYHI_EL1, \reg2
51 ldp \reg1, \reg2, [\base, #PTRAUTH_REG_OFFSET(CPU_APDAKEYLO_EL1)]
52 msr_s SYS_APDAKEYLO_EL1, \reg1
53 msr_s SYS_APDAKEYHI_EL1, \reg2
54 ldp \reg1, \reg2, [\base, #PTRAUTH_REG_OFFSET(CPU_APDBKEYLO_EL1)]
55 msr_s SYS_APDBKEYLO_EL1, \reg1
56 msr_s SYS_APDBKEYHI_EL1, \reg2
57 ldp \reg1, \reg2, [\base, #PTRAUTH_REG_OFFSET(CPU_APGAKEYLO_EL1)]
58 msr_s SYS_APGAKEYLO_EL1, \reg1
59 msr_s SYS_APGAKEYHI_EL1, \reg2
60 .endm
63 * Both ptrauth_switch_to_guest and ptrauth_switch_to_hyp macros will
64 * check for the presence ARM64_HAS_ADDRESS_AUTH, which is defined as
65 * (ARM64_HAS_ADDRESS_AUTH_ARCH || ARM64_HAS_ADDRESS_AUTH_IMP_DEF) and
66 * then proceed ahead with the save/restore of Pointer Authentication
67 * key registers if enabled for the guest.
69 .macro ptrauth_switch_to_guest g_ctxt, reg1, reg2, reg3
70 alternative_if_not ARM64_HAS_ADDRESS_AUTH
71 b .L__skip_switch\@
72 alternative_else_nop_endif
73 mrs \reg1, hcr_el2
74 and \reg1, \reg1, #(HCR_API | HCR_APK)
75 cbz \reg1, .L__skip_switch\@
76 add \reg1, \g_ctxt, #CPU_APIAKEYLO_EL1
77 ptrauth_restore_state \reg1, \reg2, \reg3
78 .L__skip_switch\@:
79 .endm
81 .macro ptrauth_switch_to_hyp g_ctxt, h_ctxt, reg1, reg2, reg3
82 alternative_if_not ARM64_HAS_ADDRESS_AUTH
83 b .L__skip_switch\@
84 alternative_else_nop_endif
85 mrs \reg1, hcr_el2
86 and \reg1, \reg1, #(HCR_API | HCR_APK)
87 cbz \reg1, .L__skip_switch\@
88 add \reg1, \g_ctxt, #CPU_APIAKEYLO_EL1
89 ptrauth_save_state \reg1, \reg2, \reg3
90 add \reg1, \h_ctxt, #CPU_APIAKEYLO_EL1
91 ptrauth_restore_state \reg1, \reg2, \reg3
92 isb
93 .L__skip_switch\@:
94 .endm
96 #else /* !CONFIG_ARM64_PTR_AUTH */
97 .macro ptrauth_switch_to_guest g_ctxt, reg1, reg2, reg3
98 .endm
99 .macro ptrauth_switch_to_hyp g_ctxt, h_ctxt, reg1, reg2, reg3
100 .endm
101 #endif /* CONFIG_ARM64_PTR_AUTH */
102 #endif /* __ASSEMBLY__ */
103 #endif /* __ASM_KVM_PTRAUTH_H */