2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
6 * Copyright (C) 1996, 98, 99, 2000, 01 Ralf Baechle
8 * Multi-arch abstraction and asm macros for easier reading:
9 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
11 * Carsten Langgaard, carstenl@mips.com
12 * Copyright (C) 2000 MIPS Technologies, Inc.
13 * Copyright (C) 1999, 2001 Silicon Graphics, Inc.
16 #include <asm/asmmacro.h>
17 #include <asm/errno.h>
18 #include <asm/fpregdef.h>
19 #include <asm/mipsregs.h>
20 #include <asm/asm-offsets.h>
21 #include <asm/regdef.h>
23 /* preprocessor replaces the fp in ".set fp=64" with $30 otherwise */
26 .macro EX insn, reg, src
30 .ex\@: \insn \reg, \src
32 .section __ex_table,"a"
40 * _save_fp_context() - save FP context from the FPU
41 * @a0 - pointer to fpregs field of sigcontext
42 * @a1 - pointer to fpc_csr field of sigcontext
44 * Save FP context, including the 32 FP data registers and the FP
45 * control & status register, from the FPU to signal context.
47 LEAF(_save_fp_context)
53 #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) || \
54 defined(CONFIG_CPU_MIPS32_R6)
57 #ifdef CONFIG_CPU_MIPS32_R2
62 bgez t0, 1f # skip storing odd if FR=0
65 /* Store the 16 odd double precision registers */
87 /* Store the 16 even double precision registers */
100 EX sdc1 $f24, 192(a0)
101 EX sdc1 $f26, 208(a0)
102 EX sdc1 $f28, 224(a0)
103 EX sdc1 $f30, 240(a0)
108 END(_save_fp_context)
111 * _restore_fp_context() - restore FP context to the FPU
112 * @a0 - pointer to fpregs field of sigcontext
113 * @a1 - pointer to fpc_csr field of sigcontext
115 * Restore FP context, including the 32 FP data registers and the FP
116 * control & status register, from signal context to the FPU.
118 LEAF(_restore_fp_context)
121 #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2) || \
122 defined(CONFIG_CPU_MIPS32_R6)
125 #ifdef CONFIG_CPU_MIPS32_R2
130 bgez t0, 1f # skip loading odd if FR=0
139 EX ldc1 $f13, 104(a0)
140 EX ldc1 $f15, 120(a0)
141 EX ldc1 $f17, 136(a0)
142 EX ldc1 $f19, 152(a0)
143 EX ldc1 $f21, 168(a0)
144 EX ldc1 $f23, 184(a0)
145 EX ldc1 $f25, 200(a0)
146 EX ldc1 $f27, 216(a0)
147 EX ldc1 $f29, 232(a0)
148 EX ldc1 $f31, 248(a0)
160 EX ldc1 $f14, 112(a0)
161 EX ldc1 $f16, 128(a0)
162 EX ldc1 $f18, 144(a0)
163 EX ldc1 $f20, 160(a0)
164 EX ldc1 $f22, 176(a0)
165 EX ldc1 $f24, 192(a0)
166 EX ldc1 $f26, 208(a0)
167 EX ldc1 $f28, 224(a0)
168 EX ldc1 $f30, 240(a0)
173 END(_restore_fp_context)
175 #ifdef CONFIG_CPU_HAS_MSA
177 .macro op_one_wr op, idx, base
179 \idx: \op \idx, 0, \base
184 .macro op_msa_wr name, op
203 op_one_wr \op, 10, a1
204 op_one_wr \op, 11, a1
205 op_one_wr \op, 12, a1
206 op_one_wr \op, 13, a1
207 op_one_wr \op, 14, a1
208 op_one_wr \op, 15, a1
209 op_one_wr \op, 16, a1
210 op_one_wr \op, 17, a1
211 op_one_wr \op, 18, a1
212 op_one_wr \op, 19, a1
213 op_one_wr \op, 20, a1
214 op_one_wr \op, 21, a1
215 op_one_wr \op, 22, a1
216 op_one_wr \op, 23, a1
217 op_one_wr \op, 24, a1
218 op_one_wr \op, 25, a1
219 op_one_wr \op, 26, a1
220 op_one_wr \op, 27, a1
221 op_one_wr \op, 28, a1
222 op_one_wr \op, 29, a1
223 op_one_wr \op, 30, a1
224 op_one_wr \op, 31, a1
229 op_msa_wr read_msa_wr_b, st_b
230 op_msa_wr read_msa_wr_h, st_h
231 op_msa_wr read_msa_wr_w, st_w
232 op_msa_wr read_msa_wr_d, st_d
234 op_msa_wr write_msa_wr_b, ld_b
235 op_msa_wr write_msa_wr_h, ld_h
236 op_msa_wr write_msa_wr_w, ld_w
237 op_msa_wr write_msa_wr_d, ld_d
239 #endif /* CONFIG_CPU_HAS_MSA */
241 #ifdef CONFIG_CPU_HAS_MSA
243 .macro save_msa_upper wr, off, base
248 EX sd $1, \off(\base)
249 #elif defined(CONFIG_CPU_LITTLE_ENDIAN)
251 EX sw $1, \off(\base)
253 EX sw $1, (\off+4)(\base)
254 #else /* CONFIG_CPU_BIG_ENDIAN */
256 EX sw $1, (\off+4)(\base)
258 EX sw $1, \off(\base)
263 LEAF(_save_msa_all_upper)
264 save_msa_upper 0, 0x00, a0
265 save_msa_upper 1, 0x08, a0
266 save_msa_upper 2, 0x10, a0
267 save_msa_upper 3, 0x18, a0
268 save_msa_upper 4, 0x20, a0
269 save_msa_upper 5, 0x28, a0
270 save_msa_upper 6, 0x30, a0
271 save_msa_upper 7, 0x38, a0
272 save_msa_upper 8, 0x40, a0
273 save_msa_upper 9, 0x48, a0
274 save_msa_upper 10, 0x50, a0
275 save_msa_upper 11, 0x58, a0
276 save_msa_upper 12, 0x60, a0
277 save_msa_upper 13, 0x68, a0
278 save_msa_upper 14, 0x70, a0
279 save_msa_upper 15, 0x78, a0
280 save_msa_upper 16, 0x80, a0
281 save_msa_upper 17, 0x88, a0
282 save_msa_upper 18, 0x90, a0
283 save_msa_upper 19, 0x98, a0
284 save_msa_upper 20, 0xa0, a0
285 save_msa_upper 21, 0xa8, a0
286 save_msa_upper 22, 0xb0, a0
287 save_msa_upper 23, 0xb8, a0
288 save_msa_upper 24, 0xc0, a0
289 save_msa_upper 25, 0xc8, a0
290 save_msa_upper 26, 0xd0, a0
291 save_msa_upper 27, 0xd8, a0
292 save_msa_upper 28, 0xe0, a0
293 save_msa_upper 29, 0xe8, a0
294 save_msa_upper 30, 0xf0, a0
295 save_msa_upper 31, 0xf8, a0
298 END(_save_msa_all_upper)
300 .macro restore_msa_upper wr, off, base
304 EX ld $1, \off(\base)
306 #elif defined(CONFIG_CPU_LITTLE_ENDIAN)
307 EX lw $1, \off(\base)
309 EX lw $1, (\off+4)(\base)
311 #else /* CONFIG_CPU_BIG_ENDIAN */
312 EX lw $1, (\off+4)(\base)
314 EX lw $1, \off(\base)
320 LEAF(_restore_msa_all_upper)
321 restore_msa_upper 0, 0x00, a0
322 restore_msa_upper 1, 0x08, a0
323 restore_msa_upper 2, 0x10, a0
324 restore_msa_upper 3, 0x18, a0
325 restore_msa_upper 4, 0x20, a0
326 restore_msa_upper 5, 0x28, a0
327 restore_msa_upper 6, 0x30, a0
328 restore_msa_upper 7, 0x38, a0
329 restore_msa_upper 8, 0x40, a0
330 restore_msa_upper 9, 0x48, a0
331 restore_msa_upper 10, 0x50, a0
332 restore_msa_upper 11, 0x58, a0
333 restore_msa_upper 12, 0x60, a0
334 restore_msa_upper 13, 0x68, a0
335 restore_msa_upper 14, 0x70, a0
336 restore_msa_upper 15, 0x78, a0
337 restore_msa_upper 16, 0x80, a0
338 restore_msa_upper 17, 0x88, a0
339 restore_msa_upper 18, 0x90, a0
340 restore_msa_upper 19, 0x98, a0
341 restore_msa_upper 20, 0xa0, a0
342 restore_msa_upper 21, 0xa8, a0
343 restore_msa_upper 22, 0xb0, a0
344 restore_msa_upper 23, 0xb8, a0
345 restore_msa_upper 24, 0xc0, a0
346 restore_msa_upper 25, 0xc8, a0
347 restore_msa_upper 26, 0xd0, a0
348 restore_msa_upper 27, 0xd8, a0
349 restore_msa_upper 28, 0xe0, a0
350 restore_msa_upper 29, 0xe8, a0
351 restore_msa_upper 30, 0xf0, a0
352 restore_msa_upper 31, 0xf8, a0
355 END(_restore_msa_all_upper)
357 #endif /* CONFIG_CPU_HAS_MSA */
363 fault: li v0, -EFAULT # failure