arm64: dts: Revert "specify console via command line"
[linux/fpc-iii.git] / arch / powerpc / kvm / book3s_64_slb.S
blob4d958dd21e597fafe9585d33d8ef02a1412021af
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  *
4  * Copyright SUSE Linux Products GmbH 2009
5  *
6  * Authors: Alexander Graf <agraf@suse.de>
7  */
9 #include <asm/asm-compat.h>
10 #include <asm/feature-fixups.h>
12 #define SHADOW_SLB_ENTRY_LEN    0x10
13 #define OFFSET_ESID(x)          (SHADOW_SLB_ENTRY_LEN * x)
14 #define OFFSET_VSID(x)          ((SHADOW_SLB_ENTRY_LEN * x) + 8)
16 /******************************************************************************
17  *                                                                            *
18  *                               Entry code                                   *
19  *                                                                            *
20  *****************************************************************************/
22 .macro LOAD_GUEST_SEGMENTS
24         /* Required state:
25          *
26          * MSR = ~IR|DR
27          * R13 = PACA
28          * R1 = host R1
29          * R2 = host R2
30          * R3 = shadow vcpu
31          * all other volatile GPRS = free except R4, R6
32          * SVCPU[CR]  = guest CR
33          * SVCPU[XER] = guest XER
34          * SVCPU[CTR] = guest CTR
35          * SVCPU[LR]  = guest LR
36          */
38 BEGIN_FW_FTR_SECTION
40         /* Declare SLB shadow as 0 entries big */
42         ld      r11, PACA_SLBSHADOWPTR(r13)
43         li      r8, 0
44         stb     r8, 3(r11)
46 END_FW_FTR_SECTION_IFSET(FW_FEATURE_LPAR)
48         /* Flush SLB */
50         li      r10, 0
51         slbmte  r10, r10
52         slbia
54         /* Fill SLB with our shadow */
56         lbz     r12, SVCPU_SLB_MAX(r3)
57         mulli   r12, r12, 16
58         addi    r12, r12, SVCPU_SLB
59         add     r12, r12, r3
61         /* for (r11 = kvm_slb; r11 < kvm_slb + kvm_slb_size; r11+=slb_entry) */
62         li      r11, SVCPU_SLB
63         add     r11, r11, r3
65 slb_loop_enter:
67         ld      r10, 0(r11)
69         andis.  r9, r10, SLB_ESID_V@h
70         beq     slb_loop_enter_skip
72         ld      r9, 8(r11)
73         slbmte  r9, r10
75 slb_loop_enter_skip:
76         addi    r11, r11, 16
77         cmpd    cr0, r11, r12
78         blt     slb_loop_enter
80 slb_do_enter:
82 .endm
84 /******************************************************************************
85  *                                                                            *
86  *                               Exit code                                    *
87  *                                                                            *
88  *****************************************************************************/
90 .macro LOAD_HOST_SEGMENTS
92         /* Register usage at this point:
93          *
94          * R1         = host R1
95          * R2         = host R2
96          * R12        = exit handler id
97          * R13        = shadow vcpu - SHADOW_VCPU_OFF [=PACA on PPC64]
98          * SVCPU.*    = guest *
99          * SVCPU[CR]  = guest CR
100          * SVCPU[XER] = guest XER
101          * SVCPU[CTR] = guest CTR
102          * SVCPU[LR]  = guest LR
103          *
104          */
106         /* Remove all SLB entries that are in use. */
108         li      r0, 0
109         slbmte  r0, r0
110         slbia
112         /* Restore bolted entries from the shadow */
114         ld      r11, PACA_SLBSHADOWPTR(r13)
116 BEGIN_FW_FTR_SECTION
118         /* Declare SLB shadow as SLB_NUM_BOLTED entries big */
120         li      r8, SLB_NUM_BOLTED
121         stb     r8, 3(r11)
123 END_FW_FTR_SECTION_IFSET(FW_FEATURE_LPAR)
125         /* Manually load all entries from shadow SLB */
127         li      r8, SLBSHADOW_SAVEAREA
128         li      r7, SLBSHADOW_SAVEAREA + 8
130         .rept   SLB_NUM_BOLTED
131         LDX_BE  r10, r11, r8
132         cmpdi   r10, 0
133         beq     1f
134         LDX_BE  r9, r11, r7
135         slbmte  r9, r10
136 1:      addi    r7, r7, SHADOW_SLB_ENTRY_LEN
137         addi    r8, r8, SHADOW_SLB_ENTRY_LEN
138         .endr
140         isync
141         sync
143 slb_do_exit:
145 .endm