arm64: dts: Revert "specify console via command line"
[linux/fpc-iii.git] / arch / powerpc / kernel / head_32.h
blob9db162f79fe6e650fd61be32dd80bda9ea844f08
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __HEAD_32_H__
3 #define __HEAD_32_H__
5 #include <asm/ptrace.h> /* for STACK_FRAME_REGS_MARKER */
7 /*
8 * Exception entry code. This code runs with address translation
9 * turned off, i.e. using physical addresses.
10 * We assume sprg3 has the physical address of the current
11 * task's thread_struct.
13 .macro EXCEPTION_PROLOG handle_dar_dsisr=0
14 EXCEPTION_PROLOG_0 handle_dar_dsisr=\handle_dar_dsisr
15 EXCEPTION_PROLOG_1
16 EXCEPTION_PROLOG_2 handle_dar_dsisr=\handle_dar_dsisr
17 .endm
19 .macro EXCEPTION_PROLOG_0 handle_dar_dsisr=0
20 mtspr SPRN_SPRG_SCRATCH0,r10
21 mtspr SPRN_SPRG_SCRATCH1,r11
22 #ifdef CONFIG_VMAP_STACK
23 mfspr r10, SPRN_SPRG_THREAD
24 .if \handle_dar_dsisr
25 mfspr r11, SPRN_DAR
26 stw r11, DAR(r10)
27 mfspr r11, SPRN_DSISR
28 stw r11, DSISR(r10)
29 .endif
30 mfspr r11, SPRN_SRR0
31 stw r11, SRR0(r10)
32 #endif
33 mfspr r11, SPRN_SRR1 /* check whether user or kernel */
34 #ifdef CONFIG_VMAP_STACK
35 stw r11, SRR1(r10)
36 #endif
37 mfcr r10
38 andi. r11, r11, MSR_PR
39 .endm
41 .macro EXCEPTION_PROLOG_1 for_rtas=0
42 #ifdef CONFIG_VMAP_STACK
43 .ifeq \for_rtas
44 li r11, MSR_KERNEL & ~(MSR_IR | MSR_RI) /* can take DTLB miss */
45 mtmsr r11
46 isync
47 .endif
48 subi r11, r1, INT_FRAME_SIZE /* use r1 if kernel */
49 #else
50 tophys(r11,r1) /* use tophys(r1) if kernel */
51 subi r11, r11, INT_FRAME_SIZE /* alloc exc. frame */
52 #endif
53 beq 1f
54 mfspr r11,SPRN_SPRG_THREAD
55 tovirt_vmstack r11, r11
56 lwz r11,TASK_STACK-THREAD(r11)
57 addi r11, r11, THREAD_SIZE - INT_FRAME_SIZE
58 tophys_novmstack r11, r11
60 #ifdef CONFIG_VMAP_STACK
61 mtcrf 0x7f, r11
62 bt 32 - THREAD_ALIGN_SHIFT, stack_overflow
63 #endif
64 .endm
66 .macro EXCEPTION_PROLOG_2 handle_dar_dsisr=0
67 #if defined(CONFIG_VMAP_STACK) && defined(CONFIG_PPC_BOOK3S)
68 BEGIN_MMU_FTR_SECTION
69 mtcr r10
70 FTR_SECTION_ELSE
71 stw r10, _CCR(r11)
72 ALT_MMU_FTR_SECTION_END_IFSET(MMU_FTR_HPTE_TABLE)
73 #else
74 stw r10,_CCR(r11) /* save registers */
75 #endif
76 mfspr r10, SPRN_SPRG_SCRATCH0
77 stw r12,GPR12(r11)
78 stw r9,GPR9(r11)
79 stw r10,GPR10(r11)
80 #if defined(CONFIG_VMAP_STACK) && defined(CONFIG_PPC_BOOK3S)
81 BEGIN_MMU_FTR_SECTION
82 mfcr r10
83 stw r10, _CCR(r11)
84 END_MMU_FTR_SECTION_IFSET(MMU_FTR_HPTE_TABLE)
85 #endif
86 mfspr r12,SPRN_SPRG_SCRATCH1
87 stw r12,GPR11(r11)
88 mflr r10
89 stw r10,_LINK(r11)
90 #ifdef CONFIG_VMAP_STACK
91 mfspr r12, SPRN_SPRG_THREAD
92 tovirt(r12, r12)
93 .if \handle_dar_dsisr
94 lwz r10, DAR(r12)
95 stw r10, _DAR(r11)
96 lwz r10, DSISR(r12)
97 stw r10, _DSISR(r11)
98 .endif
99 lwz r9, SRR1(r12)
100 #if defined(CONFIG_VMAP_STACK) && defined(CONFIG_PPC_BOOK3S)
101 BEGIN_MMU_FTR_SECTION
102 andi. r10, r9, MSR_PR
103 END_MMU_FTR_SECTION_IFSET(MMU_FTR_HPTE_TABLE)
104 #endif
105 lwz r12, SRR0(r12)
106 #else
107 mfspr r12,SPRN_SRR0
108 mfspr r9,SPRN_SRR1
109 #endif
110 stw r1,GPR1(r11)
111 stw r1,0(r11)
112 tovirt_novmstack r1, r11 /* set new kernel sp */
113 #ifdef CONFIG_40x
114 rlwinm r9,r9,0,14,12 /* clear MSR_WE (necessary?) */
115 #else
116 #ifdef CONFIG_VMAP_STACK
117 li r10, MSR_KERNEL & ~MSR_IR /* can take exceptions */
118 #else
119 li r10,MSR_KERNEL & ~(MSR_IR|MSR_DR) /* can take exceptions */
120 #endif
121 mtmsr r10 /* (except for mach check in rtas) */
122 #endif
123 stw r0,GPR0(r11)
124 lis r10,STACK_FRAME_REGS_MARKER@ha /* exception frame marker */
125 addi r10,r10,STACK_FRAME_REGS_MARKER@l
126 stw r10,8(r11)
127 SAVE_4GPRS(3, r11)
128 SAVE_2GPRS(7, r11)
129 .endm
131 .macro SYSCALL_ENTRY trapno
132 mfspr r12,SPRN_SPRG_THREAD
133 #ifdef CONFIG_VMAP_STACK
134 mfspr r9, SPRN_SRR0
135 mfspr r11, SPRN_SRR1
136 stw r9, SRR0(r12)
137 stw r11, SRR1(r12)
138 #endif
139 mfcr r10
140 lwz r11,TASK_STACK-THREAD(r12)
141 rlwinm r10,r10,0,4,2 /* Clear SO bit in CR */
142 addi r11, r11, THREAD_SIZE - INT_FRAME_SIZE
143 #ifdef CONFIG_VMAP_STACK
144 li r9, MSR_KERNEL & ~(MSR_IR | MSR_RI) /* can take DTLB miss */
145 mtmsr r9
146 isync
147 #endif
148 tovirt_vmstack r12, r12
149 tophys_novmstack r11, r11
150 mflr r9
151 stw r10,_CCR(r11) /* save registers */
152 stw r9, _LINK(r11)
153 #ifdef CONFIG_VMAP_STACK
154 lwz r10, SRR0(r12)
155 lwz r9, SRR1(r12)
156 #else
157 mfspr r10,SPRN_SRR0
158 mfspr r9,SPRN_SRR1
159 #endif
160 stw r1,GPR1(r11)
161 stw r1,0(r11)
162 tovirt_novmstack r1, r11 /* set new kernel sp */
163 stw r10,_NIP(r11)
164 #ifdef CONFIG_40x
165 rlwinm r9,r9,0,14,12 /* clear MSR_WE (necessary?) */
166 #else
167 #ifdef CONFIG_VMAP_STACK
168 LOAD_REG_IMMEDIATE(r10, MSR_KERNEL & ~MSR_IR) /* can take exceptions */
169 #else
170 LOAD_REG_IMMEDIATE(r10, MSR_KERNEL & ~(MSR_IR|MSR_DR)) /* can take exceptions */
171 #endif
172 mtmsr r10 /* (except for mach check in rtas) */
173 #endif
174 lis r10,STACK_FRAME_REGS_MARKER@ha /* exception frame marker */
175 stw r2,GPR2(r11)
176 addi r10,r10,STACK_FRAME_REGS_MARKER@l
177 stw r9,_MSR(r11)
178 li r2, \trapno + 1
179 stw r10,8(r11)
180 stw r2,_TRAP(r11)
181 SAVE_GPR(0, r11)
182 SAVE_4GPRS(3, r11)
183 SAVE_2GPRS(7, r11)
184 addi r11,r1,STACK_FRAME_OVERHEAD
185 addi r2,r12,-THREAD
186 stw r11,PT_REGS(r12)
187 #if defined(CONFIG_40x)
188 /* Check to see if the dbcr0 register is set up to debug. Use the
189 internal debug mode bit to do this. */
190 lwz r12,THREAD_DBCR0(r12)
191 andis. r12,r12,DBCR0_IDM@h
192 #endif
193 ACCOUNT_CPU_USER_ENTRY(r2, r11, r12)
194 #if defined(CONFIG_40x)
195 beq+ 3f
196 /* From user and task is ptraced - load up global dbcr0 */
197 li r12,-1 /* clear all pending debug events */
198 mtspr SPRN_DBSR,r12
199 lis r11,global_dbcr0@ha
200 tophys(r11,r11)
201 addi r11,r11,global_dbcr0@l
202 lwz r12,0(r11)
203 mtspr SPRN_DBCR0,r12
204 lwz r12,4(r11)
205 addi r12,r12,-1
206 stw r12,4(r11)
207 #endif
210 tovirt_novmstack r2, r2 /* set r2 to current */
211 lis r11, transfer_to_syscall@h
212 ori r11, r11, transfer_to_syscall@l
213 #ifdef CONFIG_TRACE_IRQFLAGS
215 * If MSR is changing we need to keep interrupts disabled at this point
216 * otherwise we might risk taking an interrupt before we tell lockdep
217 * they are enabled.
219 LOAD_REG_IMMEDIATE(r10, MSR_KERNEL)
220 rlwimi r10, r9, 0, MSR_EE
221 #else
222 LOAD_REG_IMMEDIATE(r10, MSR_KERNEL | MSR_EE)
223 #endif
224 #if defined(CONFIG_PPC_8xx) && defined(CONFIG_PERF_EVENTS)
225 mtspr SPRN_NRI, r0
226 #endif
227 mtspr SPRN_SRR1,r10
228 mtspr SPRN_SRR0,r11
229 SYNC
230 RFI /* jump to handler, enable MMU */
231 .endm
233 .macro save_dar_dsisr_on_stack reg1, reg2, sp
234 #ifndef CONFIG_VMAP_STACK
235 mfspr \reg1, SPRN_DAR
236 mfspr \reg2, SPRN_DSISR
237 stw \reg1, _DAR(\sp)
238 stw \reg2, _DSISR(\sp)
239 #endif
240 .endm
242 .macro get_and_save_dar_dsisr_on_stack reg1, reg2, sp
243 #ifdef CONFIG_VMAP_STACK
244 lwz \reg1, _DAR(\sp)
245 lwz \reg2, _DSISR(\sp)
246 #else
247 save_dar_dsisr_on_stack \reg1, \reg2, \sp
248 #endif
249 .endm
251 .macro tovirt_vmstack dst, src
252 #ifdef CONFIG_VMAP_STACK
253 tovirt(\dst, \src)
254 #else
255 .ifnc \dst, \src
256 mr \dst, \src
257 .endif
258 #endif
259 .endm
261 .macro tovirt_novmstack dst, src
262 #ifndef CONFIG_VMAP_STACK
263 tovirt(\dst, \src)
264 #else
265 .ifnc \dst, \src
266 mr \dst, \src
267 .endif
268 #endif
269 .endm
271 .macro tophys_novmstack dst, src
272 #ifndef CONFIG_VMAP_STACK
273 tophys(\dst, \src)
274 #else
275 .ifnc \dst, \src
276 mr \dst, \src
277 .endif
278 #endif
279 .endm
282 * Note: code which follows this uses cr0.eq (set if from kernel),
283 * r11, r12 (SRR0), and r9 (SRR1).
285 * Note2: once we have set r1 we are in a position to take exceptions
286 * again, and we could thus set MSR:RI at that point.
290 * Exception vectors.
292 #ifdef CONFIG_PPC_BOOK3S
293 #define START_EXCEPTION(n, label) \
294 . = n; \
295 DO_KVM n; \
296 label:
298 #else
299 #define START_EXCEPTION(n, label) \
300 . = n; \
301 label:
303 #endif
305 #define EXCEPTION(n, label, hdlr, xfer) \
306 START_EXCEPTION(n, label) \
307 EXCEPTION_PROLOG; \
308 addi r3,r1,STACK_FRAME_OVERHEAD; \
309 xfer(n, hdlr)
311 #define EXC_XFER_TEMPLATE(hdlr, trap, msr, tfer, ret) \
312 li r10,trap; \
313 stw r10,_TRAP(r11); \
314 LOAD_REG_IMMEDIATE(r10, msr); \
315 bl tfer; \
316 .long hdlr; \
317 .long ret
319 #define EXC_XFER_STD(n, hdlr) \
320 EXC_XFER_TEMPLATE(hdlr, n, MSR_KERNEL, transfer_to_handler_full, \
321 ret_from_except_full)
323 #define EXC_XFER_LITE(n, hdlr) \
324 EXC_XFER_TEMPLATE(hdlr, n+1, MSR_KERNEL, transfer_to_handler, \
325 ret_from_except)
327 .macro vmap_stack_overflow_exception
328 #ifdef CONFIG_VMAP_STACK
329 #ifdef CONFIG_SMP
330 mfspr r11, SPRN_SPRG_THREAD
331 tovirt(r11, r11)
332 lwz r11, TASK_CPU - THREAD(r11)
333 slwi r11, r11, 3
334 addis r11, r11, emergency_ctx@ha
335 #else
336 lis r11, emergency_ctx@ha
337 #endif
338 lwz r11, emergency_ctx@l(r11)
339 cmpwi cr1, r11, 0
340 bne cr1, 1f
341 lis r11, init_thread_union@ha
342 addi r11, r11, init_thread_union@l
343 1: addi r11, r11, THREAD_SIZE - INT_FRAME_SIZE
344 EXCEPTION_PROLOG_2
345 SAVE_NVGPRS(r11)
346 addi r3, r1, STACK_FRAME_OVERHEAD
347 EXC_XFER_STD(0, stack_overflow_exception)
348 #endif
349 .endm
351 #endif /* __HEAD_32_H__ */