2 * Copyright (C) 2012,2013 - ARM Ltd
3 * Author: Marc Zyngier <marc.zyngier@arm.com>
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License version 2 as
7 * published by the Free Software Foundation.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
14 * You should have received a copy of the GNU General Public License
15 * along with this program. If not, see <http://www.gnu.org/licenses/>.
18 #include <linux/linkage.h>
19 #include <linux/irqchip/arm-gic-v3.h>
21 #include <asm/assembler.h>
22 #include <asm/memory.h>
23 #include <asm/asm-offsets.h>
25 #include <asm/kvm_asm.h>
26 #include <asm/kvm_arm.h>
29 .pushsection .hyp.text, "ax"
32 * We store LRs in reverse order to let the CPU deal with streaming
33 * access. Use this macro to make it look saner...
35 #define LR_OFFSET(n) (VGIC_V3_CPU_LR + (15 - n) * 8)
38 * Save the VGIC CPU state into memory
39 * x0: Register pointing to VCPU struct
40 * Do not corrupt x1!!!
42 .macro save_vgic_v3_state
43 // Compute the address of struct vgic_cpu
44 add x3, x0, #VCPU_VGIC_CPU
46 // Make sure stores to the GIC via the memory mapped interface
47 // are now visible to the system register interface
50 // Save all interesting registers
52 mrs_s x5, ICH_VMCR_EL2
53 mrs_s x6, ICH_MISR_EL2
54 mrs_s x7, ICH_EISR_EL2
55 mrs_s x8, ICH_ELSR_EL2
57 str w4, [x3, #VGIC_V3_CPU_HCR]
58 str w5, [x3, #VGIC_V3_CPU_VMCR]
59 str w6, [x3, #VGIC_V3_CPU_MISR]
60 str w7, [x3, #VGIC_V3_CPU_EISR]
61 str w8, [x3, #VGIC_V3_CPU_ELRSR]
63 msr_s ICH_HCR_EL2, xzr
65 mrs_s x21, ICH_VTR_EL2
67 ubfiz w23, w22, 2, 4 // w23 = (15 - ListRegs) * 4
74 mrs_s x20, ICH_LR15_EL2
75 mrs_s x19, ICH_LR14_EL2
76 mrs_s x18, ICH_LR13_EL2
77 mrs_s x17, ICH_LR12_EL2
78 mrs_s x16, ICH_LR11_EL2
79 mrs_s x15, ICH_LR10_EL2
80 mrs_s x14, ICH_LR9_EL2
81 mrs_s x13, ICH_LR8_EL2
82 mrs_s x12, ICH_LR7_EL2
83 mrs_s x11, ICH_LR6_EL2
84 mrs_s x10, ICH_LR5_EL2
96 str x20, [x3, #LR_OFFSET(15)]
97 str x19, [x3, #LR_OFFSET(14)]
98 str x18, [x3, #LR_OFFSET(13)]
99 str x17, [x3, #LR_OFFSET(12)]
100 str x16, [x3, #LR_OFFSET(11)]
101 str x15, [x3, #LR_OFFSET(10)]
102 str x14, [x3, #LR_OFFSET(9)]
103 str x13, [x3, #LR_OFFSET(8)]
104 str x12, [x3, #LR_OFFSET(7)]
105 str x11, [x3, #LR_OFFSET(6)]
106 str x10, [x3, #LR_OFFSET(5)]
107 str x9, [x3, #LR_OFFSET(4)]
108 str x8, [x3, #LR_OFFSET(3)]
109 str x7, [x3, #LR_OFFSET(2)]
110 str x6, [x3, #LR_OFFSET(1)]
111 str x5, [x3, #LR_OFFSET(0)]
113 tbnz w21, #29, 6f // 6 bits
114 tbz w21, #30, 5f // 5 bits
116 mrs_s x20, ICH_AP0R3_EL2
117 str w20, [x3, #(VGIC_V3_CPU_AP0R + 3*4)]
118 mrs_s x19, ICH_AP0R2_EL2
119 str w19, [x3, #(VGIC_V3_CPU_AP0R + 2*4)]
120 6: mrs_s x18, ICH_AP0R1_EL2
121 str w18, [x3, #(VGIC_V3_CPU_AP0R + 1*4)]
122 5: mrs_s x17, ICH_AP0R0_EL2
123 str w17, [x3, #VGIC_V3_CPU_AP0R]
125 tbnz w21, #29, 6f // 6 bits
126 tbz w21, #30, 5f // 5 bits
128 mrs_s x20, ICH_AP1R3_EL2
129 str w20, [x3, #(VGIC_V3_CPU_AP1R + 3*4)]
130 mrs_s x19, ICH_AP1R2_EL2
131 str w19, [x3, #(VGIC_V3_CPU_AP1R + 2*4)]
132 6: mrs_s x18, ICH_AP1R1_EL2
133 str w18, [x3, #(VGIC_V3_CPU_AP1R + 1*4)]
134 5: mrs_s x17, ICH_AP1R0_EL2
135 str w17, [x3, #VGIC_V3_CPU_AP1R]
137 // Restore SRE_EL1 access and re-enable SRE at EL1.
138 mrs_s x5, ICC_SRE_EL2
139 orr x5, x5, #ICC_SRE_EL2_ENABLE
140 msr_s ICC_SRE_EL2, x5
143 msr_s ICC_SRE_EL1, x5
147 * Restore the VGIC CPU state from memory
148 * x0: Register pointing to VCPU struct
150 .macro restore_vgic_v3_state
151 // Disable SRE_EL1 access. Necessary, otherwise
152 // ICH_VMCR_EL2.VFIQEn becomes one, and FIQ happens...
153 msr_s ICC_SRE_EL1, xzr
156 // Compute the address of struct vgic_cpu
157 add x3, x0, #VCPU_VGIC_CPU
159 // Restore all interesting registers
160 ldr w4, [x3, #VGIC_V3_CPU_HCR]
161 ldr w5, [x3, #VGIC_V3_CPU_VMCR]
163 msr_s ICH_HCR_EL2, x4
164 msr_s ICH_VMCR_EL2, x5
166 mrs_s x21, ICH_VTR_EL2
168 tbnz w21, #29, 6f // 6 bits
169 tbz w21, #30, 5f // 5 bits
171 ldr w20, [x3, #(VGIC_V3_CPU_AP1R + 3*4)]
172 msr_s ICH_AP1R3_EL2, x20
173 ldr w19, [x3, #(VGIC_V3_CPU_AP1R + 2*4)]
174 msr_s ICH_AP1R2_EL2, x19
175 6: ldr w18, [x3, #(VGIC_V3_CPU_AP1R + 1*4)]
176 msr_s ICH_AP1R1_EL2, x18
177 5: ldr w17, [x3, #VGIC_V3_CPU_AP1R]
178 msr_s ICH_AP1R0_EL2, x17
180 tbnz w21, #29, 6f // 6 bits
181 tbz w21, #30, 5f // 5 bits
183 ldr w20, [x3, #(VGIC_V3_CPU_AP0R + 3*4)]
184 msr_s ICH_AP0R3_EL2, x20
185 ldr w19, [x3, #(VGIC_V3_CPU_AP0R + 2*4)]
186 msr_s ICH_AP0R2_EL2, x19
187 6: ldr w18, [x3, #(VGIC_V3_CPU_AP0R + 1*4)]
188 msr_s ICH_AP0R1_EL2, x18
189 5: ldr w17, [x3, #VGIC_V3_CPU_AP0R]
190 msr_s ICH_AP0R0_EL2, x17
194 ubfiz w23, w22, 2, 4 // w23 = (15 - ListRegs) * 4
201 ldr x20, [x3, #LR_OFFSET(15)]
202 ldr x19, [x3, #LR_OFFSET(14)]
203 ldr x18, [x3, #LR_OFFSET(13)]
204 ldr x17, [x3, #LR_OFFSET(12)]
205 ldr x16, [x3, #LR_OFFSET(11)]
206 ldr x15, [x3, #LR_OFFSET(10)]
207 ldr x14, [x3, #LR_OFFSET(9)]
208 ldr x13, [x3, #LR_OFFSET(8)]
209 ldr x12, [x3, #LR_OFFSET(7)]
210 ldr x11, [x3, #LR_OFFSET(6)]
211 ldr x10, [x3, #LR_OFFSET(5)]
212 ldr x9, [x3, #LR_OFFSET(4)]
213 ldr x8, [x3, #LR_OFFSET(3)]
214 ldr x7, [x3, #LR_OFFSET(2)]
215 ldr x6, [x3, #LR_OFFSET(1)]
216 ldr x5, [x3, #LR_OFFSET(0)]
223 msr_s ICH_LR15_EL2, x20
224 msr_s ICH_LR14_EL2, x19
225 msr_s ICH_LR13_EL2, x18
226 msr_s ICH_LR12_EL2, x17
227 msr_s ICH_LR11_EL2, x16
228 msr_s ICH_LR10_EL2, x15
229 msr_s ICH_LR9_EL2, x14
230 msr_s ICH_LR8_EL2, x13
231 msr_s ICH_LR7_EL2, x12
232 msr_s ICH_LR6_EL2, x11
233 msr_s ICH_LR5_EL2, x10
234 msr_s ICH_LR4_EL2, x9
235 msr_s ICH_LR3_EL2, x8
236 msr_s ICH_LR2_EL2, x7
237 msr_s ICH_LR1_EL2, x6
238 msr_s ICH_LR0_EL2, x5
240 // Ensure that the above will have reached the
241 // (re)distributors. This ensure the guest will read
242 // the correct values from the memory-mapped interface.
246 // Prevent the guest from touching the GIC system registers
247 mrs_s x5, ICC_SRE_EL2
248 and x5, x5, #~ICC_SRE_EL2_ENABLE
249 msr_s ICC_SRE_EL2, x5
252 ENTRY(__save_vgic_v3_state)
255 ENDPROC(__save_vgic_v3_state)
257 ENTRY(__restore_vgic_v3_state)
258 restore_vgic_v3_state
260 ENDPROC(__restore_vgic_v3_state)
262 ENTRY(__vgic_v3_get_ich_vtr_el2)
263 mrs_s x0, ICH_VTR_EL2
265 ENDPROC(__vgic_v3_get_ich_vtr_el2)