2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
6 * Copyright (C) 1994, 1995, 1996, 1998, 1999, 2002, 2003 Ralf Baechle
7 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
8 * Copyright (C) 1994, 1995, 1996, by Andreas Busse
9 * Copyright (C) 1999 Silicon Graphics, Inc.
10 * Copyright (C) 2000 MIPS Technologies, Inc.
11 * written by Carsten Langgaard, carstenl@mips.com
14 #define USE_ALTERNATE_RESUME_IMPL 1
17 #include "r4k_switch.S"
20 * task_struct *resume(task_struct *prev, task_struct *next,
21 * struct thread_info *next_ti, int usedfpu)
27 LONG_S t1, THREAD_STATUS(a0)
28 cpu_save_nonscratch a0
29 LONG_S ra, THREAD_REG31(a0)
32 * check if we need to save FPU registers
34 PTR_L t3, TASK_THREAD_INFO(a0)
35 LONG_L t0, TI_FLAGS(t3)
42 LONG_S t0, TI_FLAGS(t3)
45 * clear saved user stack CU1 bit
54 fpu_save_double a0 t0 t1 # c0_status passed in t0
59 /* check if we need to save COP2 registers */
60 PTR_L t2, TASK_THREAD_INFO(a0)
64 /* Disable COP2 in the stored process state */
69 /* Enable COP2 so we can save it */
79 /* Disable COP2 now that we are done */
86 #if CONFIG_CAVIUM_OCTEON_CVMSEG_SIZE > 0
87 /* Check if we need to store CVMSEG state */
88 mfc0 t0, $11,7 /* CvmMemCtl */
89 bbit0 t0, 6, 3f /* Is user access enabled? */
91 /* Store the CVMSEG state */
92 /* Extract the size of CVMSEG */
94 /* Multiply * (cache line size/sizeof(long)/2) */
96 li t1, -32768 /* Base address of CVMSEG */
97 LONG_ADDI t2, a0, THREAD_CVMSEG /* Where to store CVMSEG to */
101 LONG_L t8, 0(t1) /* Load from CVMSEG */
102 subu t0, 1 /* Decrement loop var */
103 LONG_L t9, LONGSIZE(t1)/* Load from CVMSEG */
104 LONG_ADDU t1, LONGSIZE*2 /* Increment loc in CVMSEG */
105 LONG_S t8, 0(t2) /* Store CVMSEG to thread storage */
106 LONG_ADDU t2, LONGSIZE*2 /* Increment loc in thread storage */
107 bnez t0, 2b /* Loop until we've copied it all */
108 LONG_S t9, -LONGSIZE(t2)/* Store CVMSEG to thread storage */
111 /* Disable access to CVMSEG */
112 mfc0 t0, $11,7 /* CvmMemCtl */
113 xori t0, t0, 0x40 /* Bit 6 is CVMSEG user enable */
114 mtc0 t0, $11,7 /* CvmMemCtl */
118 #if defined(CONFIG_CC_STACKPROTECTOR) && !defined(CONFIG_SMP)
119 PTR_LA t8, __stack_chk_guard
120 LONG_L t9, TASK_STACK_CANARY(a1)
125 * The order of restoring the registers takes care of the race
126 * updating $28, $29 and kernelsp without disabling ints.
129 cpu_restore_nonscratch a1
131 PTR_ADDU t0, $28, _THREAD_SIZE - 32
132 set_saved_sp t0, t1, t2
134 mfc0 t1, CP0_STATUS /* Do we really need this? */
137 LONG_L a2, THREAD_STATUS(a1)
147 * void octeon_cop2_save(struct octeon_cop2_state *a0)
150 LEAF(octeon_cop2_save)
152 dmfc0 t9, $9,7 /* CvmCtl register. */
154 /* Save the COP2 CRC state */
158 sd t0, OCTEON_CP2_CRC_IV(a0)
159 sd t1, OCTEON_CP2_CRC_LENGTH(a0)
160 sd t2, OCTEON_CP2_CRC_POLY(a0)
161 /* Skip next instructions if CvmCtl[NODFA_CP2] set */
164 /* Save the LLM state */
167 sd t0, OCTEON_CP2_LLM_DAT(a0)
168 sd t1, OCTEON_CP2_LLM_DAT+8(a0)
170 1: bbit1 t9, 26, 3f /* done if CvmCtl[NOCRYPTO] set */
172 /* Save the COP2 crypto state */
173 /* this part is mostly common to both pass 1 and later revisions */
178 sd t0, OCTEON_CP2_3DES_IV(a0)
180 sd t1, OCTEON_CP2_3DES_KEY(a0)
181 dmfc2 t1, 0x0111 /* only necessary for pass 1 */
182 sd t2, OCTEON_CP2_3DES_KEY+8(a0)
184 sd t3, OCTEON_CP2_3DES_KEY+16(a0)
186 sd t0, OCTEON_CP2_3DES_RESULT(a0)
188 sd t1, OCTEON_CP2_AES_INP0(a0) /* only necessary for pass 1 */
190 sd t2, OCTEON_CP2_AES_IV(a0)
192 sd t3, OCTEON_CP2_AES_IV+8(a0)
194 sd t0, OCTEON_CP2_AES_KEY(a0)
196 sd t1, OCTEON_CP2_AES_KEY+8(a0)
198 sd t2, OCTEON_CP2_AES_KEY+16(a0)
200 sd t3, OCTEON_CP2_AES_KEY+24(a0)
201 mfc0 t3, $15,0 /* Get the processor ID register */
202 sd t0, OCTEON_CP2_AES_KEYLEN(a0)
203 li t0, 0x000d0000 /* This is the processor ID of Octeon Pass1 */
204 sd t1, OCTEON_CP2_AES_RESULT(a0)
205 sd t2, OCTEON_CP2_AES_RESULT+8(a0)
206 /* Skip to the Pass1 version of the remainder of the COP2 state */
209 /* the non-pass1 state when !CvmCtl[NOCRYPTO] */
214 sd t1, OCTEON_CP2_HSH_DATW(a0)
216 sd t2, OCTEON_CP2_HSH_DATW+8(a0)
218 sd t3, OCTEON_CP2_HSH_DATW+16(a0)
220 sd t0, OCTEON_CP2_HSH_DATW+24(a0)
222 sd t1, OCTEON_CP2_HSH_DATW+32(a0)
224 sd t2, OCTEON_CP2_HSH_DATW+40(a0)
226 sd t3, OCTEON_CP2_HSH_DATW+48(a0)
228 sd t0, OCTEON_CP2_HSH_DATW+56(a0)
230 sd t1, OCTEON_CP2_HSH_DATW+64(a0)
232 sd t2, OCTEON_CP2_HSH_DATW+72(a0)
234 sd t3, OCTEON_CP2_HSH_DATW+80(a0)
236 sd t0, OCTEON_CP2_HSH_DATW+88(a0)
238 sd t1, OCTEON_CP2_HSH_DATW+96(a0)
240 sd t2, OCTEON_CP2_HSH_DATW+104(a0)
242 sd t3, OCTEON_CP2_HSH_DATW+112(a0)
244 sd t0, OCTEON_CP2_HSH_IVW(a0)
246 sd t1, OCTEON_CP2_HSH_IVW+8(a0)
248 sd t2, OCTEON_CP2_HSH_IVW+16(a0)
250 sd t3, OCTEON_CP2_HSH_IVW+24(a0)
252 sd t0, OCTEON_CP2_HSH_IVW+32(a0)
254 sd t1, OCTEON_CP2_HSH_IVW+40(a0)
256 sd t2, OCTEON_CP2_HSH_IVW+48(a0)
258 sd t3, OCTEON_CP2_HSH_IVW+56(a0)
260 sd t0, OCTEON_CP2_GFM_MULT(a0)
262 sd t1, OCTEON_CP2_GFM_MULT+8(a0)
263 sd t2, OCTEON_CP2_GFM_POLY(a0)
264 sd t3, OCTEON_CP2_GFM_RESULT(a0)
265 sd t0, OCTEON_CP2_GFM_RESULT+8(a0)
268 2: /* pass 1 special stuff when !CvmCtl[NOCRYPTO] */
273 sd t3, OCTEON_CP2_HSH_DATW(a0)
275 sd t0, OCTEON_CP2_HSH_DATW+8(a0)
277 sd t1, OCTEON_CP2_HSH_DATW+16(a0)
279 sd t2, OCTEON_CP2_HSH_DATW+24(a0)
281 sd t3, OCTEON_CP2_HSH_DATW+32(a0)
283 sd t0, OCTEON_CP2_HSH_DATW+40(a0)
285 sd t1, OCTEON_CP2_HSH_DATW+48(a0)
286 sd t2, OCTEON_CP2_HSH_IVW(a0)
287 sd t3, OCTEON_CP2_HSH_IVW+8(a0)
288 sd t0, OCTEON_CP2_HSH_IVW+16(a0)
290 3: /* pass 1 or CvmCtl[NOCRYPTO] set */
292 END(octeon_cop2_save)
295 * void octeon_cop2_restore(struct octeon_cop2_state *a0)
300 LEAF(octeon_cop2_restore)
301 /* First cache line was prefetched before the call */
303 dmfc0 t9, $9,7 /* CvmCtl register. */
306 ld t0, OCTEON_CP2_CRC_IV(a0)
308 ld t1, OCTEON_CP2_CRC_LENGTH(a0)
309 ld t2, OCTEON_CP2_CRC_POLY(a0)
311 /* Restore the COP2 CRC state */
314 bbit1 t9, 28, 2f /* Skip LLM if CvmCtl[NODFA_CP2] is set */
317 /* Restore the LLM state */
318 ld t0, OCTEON_CP2_LLM_DAT(a0)
319 ld t1, OCTEON_CP2_LLM_DAT+8(a0)
324 bbit1 t9, 26, done_restore /* done if CvmCtl[NOCRYPTO] set */
327 /* Restore the COP2 crypto state common to pass 1 and pass 2 */
328 ld t0, OCTEON_CP2_3DES_IV(a0)
329 ld t1, OCTEON_CP2_3DES_KEY(a0)
330 ld t2, OCTEON_CP2_3DES_KEY+8(a0)
332 ld t0, OCTEON_CP2_3DES_KEY+16(a0)
334 ld t1, OCTEON_CP2_3DES_RESULT(a0)
336 ld t2, OCTEON_CP2_AES_INP0(a0) /* only really needed for pass 1 */
338 ld t0, OCTEON_CP2_AES_IV(a0)
340 ld t1, OCTEON_CP2_AES_IV+8(a0)
341 dmtc2 t2, 0x010A /* only really needed for pass 1 */
342 ld t2, OCTEON_CP2_AES_KEY(a0)
344 ld t0, OCTEON_CP2_AES_KEY+8(a0)
346 ld t1, OCTEON_CP2_AES_KEY+16(a0)
348 ld t2, OCTEON_CP2_AES_KEY+24(a0)
350 ld t0, OCTEON_CP2_AES_KEYLEN(a0)
352 ld t1, OCTEON_CP2_AES_RESULT(a0)
354 ld t2, OCTEON_CP2_AES_RESULT+8(a0)
355 mfc0 t3, $15,0 /* Get the processor ID register */
357 li t0, 0x000d0000 /* This is the processor ID of Octeon Pass1 */
359 bne t0, t3, 3f /* Skip the next stuff for non-pass1 */
362 /* this code is specific for pass 1 */
363 ld t0, OCTEON_CP2_HSH_DATW(a0)
364 ld t1, OCTEON_CP2_HSH_DATW+8(a0)
365 ld t2, OCTEON_CP2_HSH_DATW+16(a0)
367 ld t0, OCTEON_CP2_HSH_DATW+24(a0)
369 ld t1, OCTEON_CP2_HSH_DATW+32(a0)
371 ld t2, OCTEON_CP2_HSH_DATW+40(a0)
373 ld t0, OCTEON_CP2_HSH_DATW+48(a0)
375 ld t1, OCTEON_CP2_HSH_IVW(a0)
377 ld t2, OCTEON_CP2_HSH_IVW+8(a0)
379 ld t0, OCTEON_CP2_HSH_IVW+16(a0)
382 b done_restore /* unconditional branch */
385 3: /* this is post-pass1 code */
386 ld t2, OCTEON_CP2_HSH_DATW(a0)
387 ld t0, OCTEON_CP2_HSH_DATW+8(a0)
388 ld t1, OCTEON_CP2_HSH_DATW+16(a0)
390 ld t2, OCTEON_CP2_HSH_DATW+24(a0)
392 ld t0, OCTEON_CP2_HSH_DATW+32(a0)
394 ld t1, OCTEON_CP2_HSH_DATW+40(a0)
396 ld t2, OCTEON_CP2_HSH_DATW+48(a0)
398 ld t0, OCTEON_CP2_HSH_DATW+56(a0)
400 ld t1, OCTEON_CP2_HSH_DATW+64(a0)
402 ld t2, OCTEON_CP2_HSH_DATW+72(a0)
404 ld t0, OCTEON_CP2_HSH_DATW+80(a0)
406 ld t1, OCTEON_CP2_HSH_DATW+88(a0)
408 ld t2, OCTEON_CP2_HSH_DATW+96(a0)
410 ld t0, OCTEON_CP2_HSH_DATW+104(a0)
412 ld t1, OCTEON_CP2_HSH_DATW+112(a0)
414 ld t2, OCTEON_CP2_HSH_IVW(a0)
416 ld t0, OCTEON_CP2_HSH_IVW+8(a0)
418 ld t1, OCTEON_CP2_HSH_IVW+16(a0)
420 ld t2, OCTEON_CP2_HSH_IVW+24(a0)
422 ld t0, OCTEON_CP2_HSH_IVW+32(a0)
424 ld t1, OCTEON_CP2_HSH_IVW+40(a0)
426 ld t2, OCTEON_CP2_HSH_IVW+48(a0)
428 ld t0, OCTEON_CP2_HSH_IVW+56(a0)
430 ld t1, OCTEON_CP2_GFM_MULT(a0)
432 ld t2, OCTEON_CP2_GFM_MULT+8(a0)
434 ld t0, OCTEON_CP2_GFM_POLY(a0)
436 ld t1, OCTEON_CP2_GFM_RESULT(a0)
438 ld t2, OCTEON_CP2_GFM_RESULT+8(a0)
446 END(octeon_cop2_restore)
450 * void octeon_mult_save()
451 * sp is assumed to point to a struct pt_regs
453 * NOTE: This is called in SAVE_SOME in stackframe.h. It can only
454 * safely modify k0 and k1.
459 LEAF(octeon_mult_save)
460 dmfc0 k0, $9,7 /* CvmCtl register. */
461 bbit1 k0, 27, 1f /* Skip CvmCtl[NOMUL] */
464 /* Save the multiplier state */
467 sd k0, PT_MTP(sp) /* PT_MTP has P0 */
469 sd k1, PT_MTP+8(sp) /* PT_MTP+8 has P1 */
472 sd k0, PT_MTP+16(sp) /* PT_MTP+16 has P2 */
474 sd k1, PT_MPL(sp) /* PT_MPL has MPL0 */
476 sd k0, PT_MPL+8(sp) /* PT_MPL+8 has MPL1 */
478 sd k1, PT_MPL+16(sp) /* PT_MPL+16 has MPL2 */
480 1: /* Resume here if CvmCtl[NOMUL] */
482 END(octeon_mult_save)
486 * void octeon_mult_restore()
487 * sp is assumed to point to a struct pt_regs
489 * NOTE: This is called in RESTORE_SOME in stackframe.h.
494 LEAF(octeon_mult_restore)
495 dmfc0 k1, $9,7 /* CvmCtl register. */
496 ld v0, PT_MPL(sp) /* MPL0 */
497 ld v1, PT_MPL+8(sp) /* MPL1 */
498 ld k0, PT_MPL+16(sp) /* MPL2 */
499 bbit1 k1, 27, 1f /* Skip CvmCtl[NOMUL] */
500 /* Normally falls through, so no time wasted here */
503 /* Restore the multiplier state */
504 ld k1, PT_MTP+16(sp) /* P2 */
506 ld v0, PT_MTP+8(sp) /* P1 */
508 ld v1, PT_MTP(sp) /* P0 */
515 1: /* Resume here if CvmCtl[NOMUL] */
518 END(octeon_mult_restore)