1 #include <linux/irqchip/arm-gic.h>
3 #define VCPU_USR_REG(_reg_nr) (VCPU_USR_REGS + (_reg_nr * 4))
4 #define VCPU_USR_SP (VCPU_USR_REG(13))
5 #define VCPU_USR_LR (VCPU_USR_REG(14))
6 #define CP15_OFFSET(_cp15_reg_idx) (VCPU_CP15 + (_cp15_reg_idx * 4))
9 * Many of these macros need to access the VCPU structure, which is always
10 * held in r0. These macros should never clobber r1, as it is used to hold the
11 * exception code on the return path (except of course the macro that switches
12 * all the registers before the final jump to the VM).
14 vcpu .req r0 @ vcpu pointer always in r0
16 /* Clobbers {r2-r6} */
17 .macro store_vfp_state vfp_base
18 @ The VFPFMRX and VFPFMXR macros are the VMRS and VMSR instructions
20 @ Make sure VFP is enabled so we can touch the registers.
25 tst r2, #FPEXC_EX @ Check for VFP Subarchitecture
27 @ If FPEXC_EX is 0, then FPINST/FPINST2 reads are upredictable, so
28 @ we only need to save them if FPEXC_EX is set.
31 VFPFMRX r5, FPINST2, ne @ vmrsne
32 bic r6, r2, #FPEXC_EX @ FPEXC_EX disable
35 VFPFSTMIA \vfp_base, r6 @ Save VFP registers
36 stm \vfp_base, {r2-r5} @ Save FPEXC, FPSCR, FPINST, FPINST2
39 /* Assume FPEXC_EN is on and FPEXC_EX is off, clobbers {r2-r6} */
40 .macro restore_vfp_state vfp_base
41 VFPFLDMIA \vfp_base, r6 @ Load VFP registers
42 ldm \vfp_base, {r2-r5} @ Load FPEXC, FPSCR, FPINST, FPINST2
45 tst r2, #FPEXC_EX @ Check for VFP Subarchitecture
49 VFPFMXR FPINST2, r5, ne
51 VFPFMXR FPEXC, r2 @ FPEXC (last, in case !EN)
54 /* These are simply for the macros to work - value don't have meaning */
62 .macro push_host_regs_mode mode
70 * Store all host persistent registers on the stack.
71 * Clobbers all registers, in all modes, except r0 and r1.
74 /* Hyp regs. Only ELR_hyp (SPSR_hyp already saved) */
79 push {r4-r12} @ r0-r3 are always clobbered
84 push_host_regs_mode svc
85 push_host_regs_mode abt
86 push_host_regs_mode und
87 push_host_regs_mode irq
101 .macro pop_host_regs_mode mode
109 * Restore all host registers from the stack.
110 * Clobbers all registers, in all modes, except r0 and r1.
112 .macro restore_host_regs
123 pop_host_regs_mode irq
124 pop_host_regs_mode und
125 pop_host_regs_mode abt
126 pop_host_regs_mode svc
138 * Restore SP, LR and SPSR for a given mode. offset is the offset of
139 * this mode's registers from the VCPU base.
141 * Assumes vcpu pointer in vcpu reg
143 * Clobbers r1, r2, r3, r4.
145 .macro restore_guest_regs_mode mode, offset
146 add r1, vcpu, \offset
154 * Restore all guest registers from the vcpu struct.
156 * Assumes vcpu pointer in vcpu reg
158 * Clobbers *all* registers.
160 .macro restore_guest_regs
161 restore_guest_regs_mode svc, #VCPU_SVC_REGS
162 restore_guest_regs_mode abt, #VCPU_ABT_REGS
163 restore_guest_regs_mode und, #VCPU_UND_REGS
164 restore_guest_regs_mode irq, #VCPU_IRQ_REGS
166 add r1, vcpu, #VCPU_FIQ_REGS
178 ldr r2, [vcpu, #VCPU_PC]
179 ldr r3, [vcpu, #VCPU_CPSR]
183 @ Load user registers
184 ldr r2, [vcpu, #VCPU_USR_SP]
185 ldr r3, [vcpu, #VCPU_USR_LR]
188 add vcpu, vcpu, #(VCPU_USR_REGS)
193 * Save SP, LR and SPSR for a given mode. offset is the offset of
194 * this mode's registers from the VCPU base.
196 * Assumes vcpu pointer in vcpu reg
198 * Clobbers r2, r3, r4, r5.
200 .macro save_guest_regs_mode mode, offset
201 add r2, vcpu, \offset
209 * Save all guest registers to the vcpu struct
210 * Expects guest's r0, r1, r2 on the stack.
212 * Assumes vcpu pointer in vcpu reg
214 * Clobbers r2, r3, r4, r5.
216 .macro save_guest_regs
217 @ Store usr registers
218 add r2, vcpu, #VCPU_USR_REG(3)
220 add r2, vcpu, #VCPU_USR_REG(0)
221 pop {r3, r4, r5} @ r0, r1, r2
225 str r2, [vcpu, #VCPU_USR_SP]
226 str r3, [vcpu, #VCPU_USR_LR]
231 str r2, [vcpu, #VCPU_PC]
232 str r3, [vcpu, #VCPU_CPSR]
234 @ Store other guest registers
235 save_guest_regs_mode svc, #VCPU_SVC_REGS
236 save_guest_regs_mode abt, #VCPU_ABT_REGS
237 save_guest_regs_mode und, #VCPU_UND_REGS
238 save_guest_regs_mode irq, #VCPU_IRQ_REGS
241 /* Reads cp15 registers from hardware and stores them in memory
242 * @store_to_vcpu: If 0, registers are written in-order to the stack,
243 * otherwise to the VCPU struct pointed to by vcpup
245 * Assumes vcpu pointer in vcpu reg
249 .macro read_cp15_state store_to_vcpu
250 mrc p15, 0, r2, c1, c0, 0 @ SCTLR
251 mrc p15, 0, r3, c1, c0, 2 @ CPACR
252 mrc p15, 0, r4, c2, c0, 2 @ TTBCR
253 mrc p15, 0, r5, c3, c0, 0 @ DACR
254 mrrc p15, 0, r6, r7, c2 @ TTBR 0
255 mrrc p15, 1, r8, r9, c2 @ TTBR 1
256 mrc p15, 0, r10, c10, c2, 0 @ PRRR
257 mrc p15, 0, r11, c10, c2, 1 @ NMRR
258 mrc p15, 2, r12, c0, c0, 0 @ CSSELR
260 .if \store_to_vcpu == 0
261 push {r2-r12} @ Push CP15 registers
263 str r2, [vcpu, #CP15_OFFSET(c1_SCTLR)]
264 str r3, [vcpu, #CP15_OFFSET(c1_CPACR)]
265 str r4, [vcpu, #CP15_OFFSET(c2_TTBCR)]
266 str r5, [vcpu, #CP15_OFFSET(c3_DACR)]
267 add r2, vcpu, #CP15_OFFSET(c2_TTBR0)
269 add r2, vcpu, #CP15_OFFSET(c2_TTBR1)
271 str r10, [vcpu, #CP15_OFFSET(c10_PRRR)]
272 str r11, [vcpu, #CP15_OFFSET(c10_NMRR)]
273 str r12, [vcpu, #CP15_OFFSET(c0_CSSELR)]
276 mrc p15, 0, r2, c13, c0, 1 @ CID
277 mrc p15, 0, r3, c13, c0, 2 @ TID_URW
278 mrc p15, 0, r4, c13, c0, 3 @ TID_URO
279 mrc p15, 0, r5, c13, c0, 4 @ TID_PRIV
280 mrc p15, 0, r6, c5, c0, 0 @ DFSR
281 mrc p15, 0, r7, c5, c0, 1 @ IFSR
282 mrc p15, 0, r8, c5, c1, 0 @ ADFSR
283 mrc p15, 0, r9, c5, c1, 1 @ AIFSR
284 mrc p15, 0, r10, c6, c0, 0 @ DFAR
285 mrc p15, 0, r11, c6, c0, 2 @ IFAR
286 mrc p15, 0, r12, c12, c0, 0 @ VBAR
288 .if \store_to_vcpu == 0
289 push {r2-r12} @ Push CP15 registers
291 str r2, [vcpu, #CP15_OFFSET(c13_CID)]
292 str r3, [vcpu, #CP15_OFFSET(c13_TID_URW)]
293 str r4, [vcpu, #CP15_OFFSET(c13_TID_URO)]
294 str r5, [vcpu, #CP15_OFFSET(c13_TID_PRIV)]
295 str r6, [vcpu, #CP15_OFFSET(c5_DFSR)]
296 str r7, [vcpu, #CP15_OFFSET(c5_IFSR)]
297 str r8, [vcpu, #CP15_OFFSET(c5_ADFSR)]
298 str r9, [vcpu, #CP15_OFFSET(c5_AIFSR)]
299 str r10, [vcpu, #CP15_OFFSET(c6_DFAR)]
300 str r11, [vcpu, #CP15_OFFSET(c6_IFAR)]
301 str r12, [vcpu, #CP15_OFFSET(c12_VBAR)]
304 mrc p15, 0, r2, c14, c1, 0 @ CNTKCTL
305 mrrc p15, 0, r4, r5, c7 @ PAR
307 .if \store_to_vcpu == 0
310 str r2, [vcpu, #CP15_OFFSET(c14_CNTKCTL)]
311 add r12, vcpu, #CP15_OFFSET(c7_PAR)
317 * Reads cp15 registers from memory and writes them to hardware
318 * @read_from_vcpu: If 0, registers are read in-order from the stack,
319 * otherwise from the VCPU struct pointed to by vcpup
321 * Assumes vcpu pointer in vcpu reg
323 .macro write_cp15_state read_from_vcpu
324 .if \read_from_vcpu == 0
327 ldr r2, [vcpu, #CP15_OFFSET(c14_CNTKCTL)]
328 add r12, vcpu, #CP15_OFFSET(c7_PAR)
332 mcr p15, 0, r2, c14, c1, 0 @ CNTKCTL
333 mcrr p15, 0, r4, r5, c7 @ PAR
335 .if \read_from_vcpu == 0
338 ldr r2, [vcpu, #CP15_OFFSET(c13_CID)]
339 ldr r3, [vcpu, #CP15_OFFSET(c13_TID_URW)]
340 ldr r4, [vcpu, #CP15_OFFSET(c13_TID_URO)]
341 ldr r5, [vcpu, #CP15_OFFSET(c13_TID_PRIV)]
342 ldr r6, [vcpu, #CP15_OFFSET(c5_DFSR)]
343 ldr r7, [vcpu, #CP15_OFFSET(c5_IFSR)]
344 ldr r8, [vcpu, #CP15_OFFSET(c5_ADFSR)]
345 ldr r9, [vcpu, #CP15_OFFSET(c5_AIFSR)]
346 ldr r10, [vcpu, #CP15_OFFSET(c6_DFAR)]
347 ldr r11, [vcpu, #CP15_OFFSET(c6_IFAR)]
348 ldr r12, [vcpu, #CP15_OFFSET(c12_VBAR)]
351 mcr p15, 0, r2, c13, c0, 1 @ CID
352 mcr p15, 0, r3, c13, c0, 2 @ TID_URW
353 mcr p15, 0, r4, c13, c0, 3 @ TID_URO
354 mcr p15, 0, r5, c13, c0, 4 @ TID_PRIV
355 mcr p15, 0, r6, c5, c0, 0 @ DFSR
356 mcr p15, 0, r7, c5, c0, 1 @ IFSR
357 mcr p15, 0, r8, c5, c1, 0 @ ADFSR
358 mcr p15, 0, r9, c5, c1, 1 @ AIFSR
359 mcr p15, 0, r10, c6, c0, 0 @ DFAR
360 mcr p15, 0, r11, c6, c0, 2 @ IFAR
361 mcr p15, 0, r12, c12, c0, 0 @ VBAR
363 .if \read_from_vcpu == 0
366 ldr r2, [vcpu, #CP15_OFFSET(c1_SCTLR)]
367 ldr r3, [vcpu, #CP15_OFFSET(c1_CPACR)]
368 ldr r4, [vcpu, #CP15_OFFSET(c2_TTBCR)]
369 ldr r5, [vcpu, #CP15_OFFSET(c3_DACR)]
370 add r12, vcpu, #CP15_OFFSET(c2_TTBR0)
372 add r12, vcpu, #CP15_OFFSET(c2_TTBR1)
374 ldr r10, [vcpu, #CP15_OFFSET(c10_PRRR)]
375 ldr r11, [vcpu, #CP15_OFFSET(c10_NMRR)]
376 ldr r12, [vcpu, #CP15_OFFSET(c0_CSSELR)]
379 mcr p15, 0, r2, c1, c0, 0 @ SCTLR
380 mcr p15, 0, r3, c1, c0, 2 @ CPACR
381 mcr p15, 0, r4, c2, c0, 2 @ TTBCR
382 mcr p15, 0, r5, c3, c0, 0 @ DACR
383 mcrr p15, 0, r6, r7, c2 @ TTBR 0
384 mcrr p15, 1, r8, r9, c2 @ TTBR 1
385 mcr p15, 0, r10, c10, c2, 0 @ PRRR
386 mcr p15, 0, r11, c10, c2, 1 @ NMRR
387 mcr p15, 2, r12, c0, c0, 0 @ CSSELR
391 * Save the VGIC CPU state into memory
393 * Assumes vcpu pointer in vcpu reg
395 .macro save_vgic_state
396 #ifdef CONFIG_KVM_ARM_VGIC
397 /* Get VGIC VCTRL base into r2 */
398 ldr r2, [vcpu, #VCPU_KVM]
399 ldr r2, [r2, #KVM_VGIC_VCTRL]
403 /* Compute the address of struct vgic_cpu */
404 add r11, vcpu, #VCPU_VGIC_CPU
406 /* Save all interesting registers */
407 ldr r3, [r2, #GICH_HCR]
408 ldr r4, [r2, #GICH_VMCR]
409 ldr r5, [r2, #GICH_MISR]
410 ldr r6, [r2, #GICH_EISR0]
411 ldr r7, [r2, #GICH_EISR1]
412 ldr r8, [r2, #GICH_ELRSR0]
413 ldr r9, [r2, #GICH_ELRSR1]
414 ldr r10, [r2, #GICH_APR]
416 str r3, [r11, #VGIC_CPU_HCR]
417 str r4, [r11, #VGIC_CPU_VMCR]
418 str r5, [r11, #VGIC_CPU_MISR]
419 str r6, [r11, #VGIC_CPU_EISR]
420 str r7, [r11, #(VGIC_CPU_EISR + 4)]
421 str r8, [r11, #VGIC_CPU_ELRSR]
422 str r9, [r11, #(VGIC_CPU_ELRSR + 4)]
423 str r10, [r11, #VGIC_CPU_APR]
427 str r5, [r2, #GICH_HCR]
429 /* Save list registers */
430 add r2, r2, #GICH_LR0
431 add r3, r11, #VGIC_CPU_LR
432 ldr r4, [r11, #VGIC_CPU_NR_LR]
442 * Restore the VGIC CPU state from memory
444 * Assumes vcpu pointer in vcpu reg
446 .macro restore_vgic_state
447 #ifdef CONFIG_KVM_ARM_VGIC
448 /* Get VGIC VCTRL base into r2 */
449 ldr r2, [vcpu, #VCPU_KVM]
450 ldr r2, [r2, #KVM_VGIC_VCTRL]
454 /* Compute the address of struct vgic_cpu */
455 add r11, vcpu, #VCPU_VGIC_CPU
457 /* We only restore a minimal set of registers */
458 ldr r3, [r11, #VGIC_CPU_HCR]
459 ldr r4, [r11, #VGIC_CPU_VMCR]
460 ldr r8, [r11, #VGIC_CPU_APR]
462 str r3, [r2, #GICH_HCR]
463 str r4, [r2, #GICH_VMCR]
464 str r8, [r2, #GICH_APR]
466 /* Restore list registers */
467 add r2, r2, #GICH_LR0
468 add r3, r11, #VGIC_CPU_LR
469 ldr r4, [r11, #VGIC_CPU_NR_LR]
478 #define CNTHCTL_PL1PCTEN (1 << 0)
479 #define CNTHCTL_PL1PCEN (1 << 1)
482 * Save the timer state onto the VCPU and allow physical timer/counter access
485 * Assumes vcpu pointer in vcpu reg
488 .macro save_timer_state
489 #ifdef CONFIG_KVM_ARM_TIMER
490 ldr r4, [vcpu, #VCPU_KVM]
491 ldr r2, [r4, #KVM_TIMER_ENABLED]
495 mrc p15, 0, r2, c14, c3, 1 @ CNTV_CTL
496 str r2, [vcpu, #VCPU_TIMER_CNTV_CTL]
497 bic r2, #1 @ Clear ENABLE
498 mcr p15, 0, r2, c14, c3, 1 @ CNTV_CTL
501 mrrc p15, 3, r2, r3, c14 @ CNTV_CVAL
502 ldr r4, =VCPU_TIMER_CNTV_CVAL
506 @ Ensure host CNTVCT == CNTPCT
508 mcrr p15, 4, r2, r2, c14 @ CNTVOFF
512 @ Allow physical timer/counter access for the host
513 mrc p15, 4, r2, c14, c1, 0 @ CNTHCTL
514 orr r2, r2, #(CNTHCTL_PL1PCEN | CNTHCTL_PL1PCTEN)
515 mcr p15, 4, r2, c14, c1, 0 @ CNTHCTL
519 * Load the timer state from the VCPU and deny physical timer/counter access
522 * Assumes vcpu pointer in vcpu reg
525 .macro restore_timer_state
526 @ Disallow physical timer access for the guest
527 @ Physical counter access is allowed
528 mrc p15, 4, r2, c14, c1, 0 @ CNTHCTL
529 orr r2, r2, #CNTHCTL_PL1PCTEN
530 bic r2, r2, #CNTHCTL_PL1PCEN
531 mcr p15, 4, r2, c14, c1, 0 @ CNTHCTL
533 #ifdef CONFIG_KVM_ARM_TIMER
534 ldr r4, [vcpu, #VCPU_KVM]
535 ldr r2, [r4, #KVM_TIMER_ENABLED]
539 ldr r2, [r4, #KVM_TIMER_CNTVOFF]
540 ldr r3, [r4, #(KVM_TIMER_CNTVOFF + 4)]
541 mcrr p15, 4, r2, r3, c14 @ CNTVOFF
543 ldr r4, =VCPU_TIMER_CNTV_CVAL
546 mcrr p15, 3, r2, r3, c14 @ CNTV_CVAL
549 ldr r2, [vcpu, #VCPU_TIMER_CNTV_CTL]
551 mcr p15, 0, r2, c14, c3, 1 @ CNTV_CTL
559 /* Configures the HSTR (Hyp System Trap Register) on entry/return
560 * (hardware reset value is 0) */
561 .macro set_hstr operation
562 mrc p15, 4, r2, c1, c1, 3
564 .if \operation == vmentry
565 orr r2, r2, r3 @ Trap CR{15}
567 bic r2, r2, r3 @ Don't trap any CRx accesses
569 mcr p15, 4, r2, c1, c1, 3
572 /* Configures the HCPTR (Hyp Coprocessor Trap Register) on entry/return
573 * (hardware reset value is 0). Keep previous value in r2. */
574 .macro set_hcptr operation, mask
575 mrc p15, 4, r2, c1, c1, 2
577 .if \operation == vmentry
578 orr r3, r2, r3 @ Trap coproc-accesses defined in mask
580 bic r3, r2, r3 @ Don't trap defined coproc-accesses
582 mcr p15, 4, r3, c1, c1, 2
585 /* Configures the HDCR (Hyp Debug Configuration Register) on entry/return
586 * (hardware reset value is 0) */
587 .macro set_hdcr operation
588 mrc p15, 4, r2, c1, c1, 1
589 ldr r3, =(HDCR_TPM|HDCR_TPMCR)
590 .if \operation == vmentry
591 orr r2, r2, r3 @ Trap some perfmon accesses
593 bic r2, r2, r3 @ Don't trap any perfmon accesses
595 mcr p15, 4, r2, c1, c1, 1
598 /* Enable/Disable: stage-2 trans., trap interrupts, trap wfi, trap smc */
599 .macro configure_hyp_role operation
600 mrc p15, 4, r2, c1, c1, 0 @ HCR
601 bic r2, r2, #HCR_VIRT_EXCP_MASK
602 ldr r3, =HCR_GUEST_MASK
603 .if \operation == vmentry
605 ldr r3, [vcpu, #VCPU_IRQ_LINES]
610 mcr p15, 4, r2, c1, c1, 0
614 mrc p15, 4, vcpu, c13, c0, 2 @ HTPIDR