2 * Copyright (C) 2012,2013 - ARM Ltd
3 * Author: Marc Zyngier <marc.zyngier@arm.com>
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License version 2 as
7 * published by the Free Software Foundation.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
14 * You should have received a copy of the GNU General Public License
15 * along with this program. If not, see <http://www.gnu.org/licenses/>.
18 #include <linux/linkage.h>
19 #include <linux/irqchip/arm-gic-v3.h>
21 #include <asm/assembler.h>
22 #include <asm/memory.h>
23 #include <asm/asm-offsets.h>
25 #include <asm/kvm_asm.h>
26 #include <asm/kvm_arm.h>
29 .pushsection .hyp.text, "ax"
32 * We store LRs in reverse order to let the CPU deal with streaming
33 * access. Use this macro to make it look saner...
35 #define LR_OFFSET(n) (VGIC_V3_CPU_LR + (15 - n) * 8)
38 * Save the VGIC CPU state into memory
39 * x0: Register pointing to VCPU struct
40 * Do not corrupt x1!!!
42 .macro save_vgic_v3_state
43 // Compute the address of struct vgic_cpu
44 add x3, x0, #VCPU_VGIC_CPU
46 // Make sure stores to the GIC via the memory mapped interface
47 // are now visible to the system register interface
50 // Save all interesting registers
51 mrs_s x5, ICH_VMCR_EL2
52 mrs_s x6, ICH_MISR_EL2
53 mrs_s x7, ICH_EISR_EL2
54 mrs_s x8, ICH_ELSR_EL2
56 str w5, [x3, #VGIC_V3_CPU_VMCR]
57 str w6, [x3, #VGIC_V3_CPU_MISR]
58 str w7, [x3, #VGIC_V3_CPU_EISR]
59 str w8, [x3, #VGIC_V3_CPU_ELRSR]
61 msr_s ICH_HCR_EL2, xzr
63 mrs_s x21, ICH_VTR_EL2
65 ubfiz w23, w22, 2, 4 // w23 = (15 - ListRegs) * 4
72 mrs_s x20, ICH_LR15_EL2
73 mrs_s x19, ICH_LR14_EL2
74 mrs_s x18, ICH_LR13_EL2
75 mrs_s x17, ICH_LR12_EL2
76 mrs_s x16, ICH_LR11_EL2
77 mrs_s x15, ICH_LR10_EL2
78 mrs_s x14, ICH_LR9_EL2
79 mrs_s x13, ICH_LR8_EL2
80 mrs_s x12, ICH_LR7_EL2
81 mrs_s x11, ICH_LR6_EL2
82 mrs_s x10, ICH_LR5_EL2
94 str x20, [x3, #LR_OFFSET(15)]
95 str x19, [x3, #LR_OFFSET(14)]
96 str x18, [x3, #LR_OFFSET(13)]
97 str x17, [x3, #LR_OFFSET(12)]
98 str x16, [x3, #LR_OFFSET(11)]
99 str x15, [x3, #LR_OFFSET(10)]
100 str x14, [x3, #LR_OFFSET(9)]
101 str x13, [x3, #LR_OFFSET(8)]
102 str x12, [x3, #LR_OFFSET(7)]
103 str x11, [x3, #LR_OFFSET(6)]
104 str x10, [x3, #LR_OFFSET(5)]
105 str x9, [x3, #LR_OFFSET(4)]
106 str x8, [x3, #LR_OFFSET(3)]
107 str x7, [x3, #LR_OFFSET(2)]
108 str x6, [x3, #LR_OFFSET(1)]
109 str x5, [x3, #LR_OFFSET(0)]
111 tbnz w21, #29, 6f // 6 bits
112 tbz w21, #30, 5f // 5 bits
114 mrs_s x20, ICH_AP0R3_EL2
115 str w20, [x3, #(VGIC_V3_CPU_AP0R + 3*4)]
116 mrs_s x19, ICH_AP0R2_EL2
117 str w19, [x3, #(VGIC_V3_CPU_AP0R + 2*4)]
118 6: mrs_s x18, ICH_AP0R1_EL2
119 str w18, [x3, #(VGIC_V3_CPU_AP0R + 1*4)]
120 5: mrs_s x17, ICH_AP0R0_EL2
121 str w17, [x3, #VGIC_V3_CPU_AP0R]
123 tbnz w21, #29, 6f // 6 bits
124 tbz w21, #30, 5f // 5 bits
126 mrs_s x20, ICH_AP1R3_EL2
127 str w20, [x3, #(VGIC_V3_CPU_AP1R + 3*4)]
128 mrs_s x19, ICH_AP1R2_EL2
129 str w19, [x3, #(VGIC_V3_CPU_AP1R + 2*4)]
130 6: mrs_s x18, ICH_AP1R1_EL2
131 str w18, [x3, #(VGIC_V3_CPU_AP1R + 1*4)]
132 5: mrs_s x17, ICH_AP1R0_EL2
133 str w17, [x3, #VGIC_V3_CPU_AP1R]
135 // Restore SRE_EL1 access and re-enable SRE at EL1.
136 mrs_s x5, ICC_SRE_EL2
137 orr x5, x5, #ICC_SRE_EL2_ENABLE
138 msr_s ICC_SRE_EL2, x5
141 msr_s ICC_SRE_EL1, x5
145 * Restore the VGIC CPU state from memory
146 * x0: Register pointing to VCPU struct
148 .macro restore_vgic_v3_state
149 // Compute the address of struct vgic_cpu
150 add x3, x0, #VCPU_VGIC_CPU
152 // Restore all interesting registers
153 ldr w4, [x3, #VGIC_V3_CPU_HCR]
154 ldr w5, [x3, #VGIC_V3_CPU_VMCR]
155 ldr w25, [x3, #VGIC_V3_CPU_SRE]
157 msr_s ICC_SRE_EL1, x25
159 // make sure SRE is valid before writing the other registers
162 msr_s ICH_HCR_EL2, x4
163 msr_s ICH_VMCR_EL2, x5
165 mrs_s x21, ICH_VTR_EL2
167 tbnz w21, #29, 6f // 6 bits
168 tbz w21, #30, 5f // 5 bits
170 ldr w20, [x3, #(VGIC_V3_CPU_AP1R + 3*4)]
171 msr_s ICH_AP1R3_EL2, x20
172 ldr w19, [x3, #(VGIC_V3_CPU_AP1R + 2*4)]
173 msr_s ICH_AP1R2_EL2, x19
174 6: ldr w18, [x3, #(VGIC_V3_CPU_AP1R + 1*4)]
175 msr_s ICH_AP1R1_EL2, x18
176 5: ldr w17, [x3, #VGIC_V3_CPU_AP1R]
177 msr_s ICH_AP1R0_EL2, x17
179 tbnz w21, #29, 6f // 6 bits
180 tbz w21, #30, 5f // 5 bits
182 ldr w20, [x3, #(VGIC_V3_CPU_AP0R + 3*4)]
183 msr_s ICH_AP0R3_EL2, x20
184 ldr w19, [x3, #(VGIC_V3_CPU_AP0R + 2*4)]
185 msr_s ICH_AP0R2_EL2, x19
186 6: ldr w18, [x3, #(VGIC_V3_CPU_AP0R + 1*4)]
187 msr_s ICH_AP0R1_EL2, x18
188 5: ldr w17, [x3, #VGIC_V3_CPU_AP0R]
189 msr_s ICH_AP0R0_EL2, x17
193 ubfiz w23, w22, 2, 4 // w23 = (15 - ListRegs) * 4
200 ldr x20, [x3, #LR_OFFSET(15)]
201 ldr x19, [x3, #LR_OFFSET(14)]
202 ldr x18, [x3, #LR_OFFSET(13)]
203 ldr x17, [x3, #LR_OFFSET(12)]
204 ldr x16, [x3, #LR_OFFSET(11)]
205 ldr x15, [x3, #LR_OFFSET(10)]
206 ldr x14, [x3, #LR_OFFSET(9)]
207 ldr x13, [x3, #LR_OFFSET(8)]
208 ldr x12, [x3, #LR_OFFSET(7)]
209 ldr x11, [x3, #LR_OFFSET(6)]
210 ldr x10, [x3, #LR_OFFSET(5)]
211 ldr x9, [x3, #LR_OFFSET(4)]
212 ldr x8, [x3, #LR_OFFSET(3)]
213 ldr x7, [x3, #LR_OFFSET(2)]
214 ldr x6, [x3, #LR_OFFSET(1)]
215 ldr x5, [x3, #LR_OFFSET(0)]
222 msr_s ICH_LR15_EL2, x20
223 msr_s ICH_LR14_EL2, x19
224 msr_s ICH_LR13_EL2, x18
225 msr_s ICH_LR12_EL2, x17
226 msr_s ICH_LR11_EL2, x16
227 msr_s ICH_LR10_EL2, x15
228 msr_s ICH_LR9_EL2, x14
229 msr_s ICH_LR8_EL2, x13
230 msr_s ICH_LR7_EL2, x12
231 msr_s ICH_LR6_EL2, x11
232 msr_s ICH_LR5_EL2, x10
233 msr_s ICH_LR4_EL2, x9
234 msr_s ICH_LR3_EL2, x8
235 msr_s ICH_LR2_EL2, x7
236 msr_s ICH_LR1_EL2, x6
237 msr_s ICH_LR0_EL2, x5
239 // Ensure that the above will have reached the
240 // (re)distributors. This ensure the guest will read
241 // the correct values from the memory-mapped interface.
245 // Prevent the guest from touching the GIC system registers
246 // if SRE isn't enabled for GICv3 emulation
248 mrs_s x5, ICC_SRE_EL2
249 and x5, x5, #~ICC_SRE_EL2_ENABLE
250 msr_s ICC_SRE_EL2, x5
254 ENTRY(__save_vgic_v3_state)
257 ENDPROC(__save_vgic_v3_state)
259 ENTRY(__restore_vgic_v3_state)
260 restore_vgic_v3_state
262 ENDPROC(__restore_vgic_v3_state)
264 ENTRY(__vgic_v3_get_ich_vtr_el2)
265 mrs_s x0, ICH_VTR_EL2
267 ENDPROC(__vgic_v3_get_ich_vtr_el2)