Merge tag 'for_linus' of git://git.kernel.org/pub/scm/linux/kernel/git/mst/vhost
[cris-mirror.git] / arch / powerpc / kvm / book3s_64_slb.S
blob688722acd692a63f61a0575cb8b7ce9bca591a66
1 /*
2  * This program is free software; you can redistribute it and/or modify
3  * it under the terms of the GNU General Public License, version 2, as
4  * published by the Free Software Foundation.
5  *
6  * This program is distributed in the hope that it will be useful,
7  * but WITHOUT ANY WARRANTY; without even the implied warranty of
8  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
9  * GNU General Public License for more details.
10  *
11  * You should have received a copy of the GNU General Public License
12  * along with this program; if not, write to the Free Software
13  * Foundation, 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
14  *
15  * Copyright SUSE Linux Products GmbH 2009
16  *
17  * Authors: Alexander Graf <agraf@suse.de>
18  */
20 #define SHADOW_SLB_ENTRY_LEN    0x10
21 #define OFFSET_ESID(x)          (SHADOW_SLB_ENTRY_LEN * x)
22 #define OFFSET_VSID(x)          ((SHADOW_SLB_ENTRY_LEN * x) + 8)
24 /******************************************************************************
25  *                                                                            *
26  *                               Entry code                                   *
27  *                                                                            *
28  *****************************************************************************/
30 .macro LOAD_GUEST_SEGMENTS
32         /* Required state:
33          *
34          * MSR = ~IR|DR
35          * R13 = PACA
36          * R1 = host R1
37          * R2 = host R2
38          * R3 = shadow vcpu
39          * all other volatile GPRS = free except R4, R6
40          * SVCPU[CR]  = guest CR
41          * SVCPU[XER] = guest XER
42          * SVCPU[CTR] = guest CTR
43          * SVCPU[LR]  = guest LR
44          */
46 BEGIN_FW_FTR_SECTION
48         /* Declare SLB shadow as 0 entries big */
50         ld      r11, PACA_SLBSHADOWPTR(r13)
51         li      r8, 0
52         stb     r8, 3(r11)
54 END_FW_FTR_SECTION_IFSET(FW_FEATURE_LPAR)
56         /* Flush SLB */
58         li      r10, 0
59         slbmte  r10, r10
60         slbia
62         /* Fill SLB with our shadow */
64         lbz     r12, SVCPU_SLB_MAX(r3)
65         mulli   r12, r12, 16
66         addi    r12, r12, SVCPU_SLB
67         add     r12, r12, r3
69         /* for (r11 = kvm_slb; r11 < kvm_slb + kvm_slb_size; r11+=slb_entry) */
70         li      r11, SVCPU_SLB
71         add     r11, r11, r3
73 slb_loop_enter:
75         ld      r10, 0(r11)
77         andis.  r9, r10, SLB_ESID_V@h
78         beq     slb_loop_enter_skip
80         ld      r9, 8(r11)
81         slbmte  r9, r10
83 slb_loop_enter_skip:
84         addi    r11, r11, 16
85         cmpd    cr0, r11, r12
86         blt     slb_loop_enter
88 slb_do_enter:
90 .endm
92 /******************************************************************************
93  *                                                                            *
94  *                               Exit code                                    *
95  *                                                                            *
96  *****************************************************************************/
98 .macro LOAD_HOST_SEGMENTS
100         /* Register usage at this point:
101          *
102          * R1         = host R1
103          * R2         = host R2
104          * R12        = exit handler id
105          * R13        = shadow vcpu - SHADOW_VCPU_OFF [=PACA on PPC64]
106          * SVCPU.*    = guest *
107          * SVCPU[CR]  = guest CR
108          * SVCPU[XER] = guest XER
109          * SVCPU[CTR] = guest CTR
110          * SVCPU[LR]  = guest LR
111          *
112          */
114         /* Remove all SLB entries that are in use. */
116         li      r0, 0
117         slbmte  r0, r0
118         slbia
120         /* Restore bolted entries from the shadow */
122         ld      r11, PACA_SLBSHADOWPTR(r13)
124 BEGIN_FW_FTR_SECTION
126         /* Declare SLB shadow as SLB_NUM_BOLTED entries big */
128         li      r8, SLB_NUM_BOLTED
129         stb     r8, 3(r11)
131 END_FW_FTR_SECTION_IFSET(FW_FEATURE_LPAR)
133         /* Manually load all entries from shadow SLB */
135         li      r8, SLBSHADOW_SAVEAREA
136         li      r7, SLBSHADOW_SAVEAREA + 8
138         .rept   SLB_NUM_BOLTED
139         LDX_BE  r10, r11, r8
140         cmpdi   r10, 0
141         beq     1f
142         LDX_BE  r9, r11, r7
143         slbmte  r9, r10
144 1:      addi    r7, r7, SHADOW_SLB_ENTRY_LEN
145         addi    r8, r8, SHADOW_SLB_ENTRY_LEN
146         .endr
148         isync
149         sync
151 slb_do_exit:
153 .endm