2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
6 * Copyright (C) 1994, 1995, 1996, 1998, 1999, 2002, 2003 Ralf Baechle
7 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
8 * Copyright (C) 1994, 1995, 1996, by Andreas Busse
9 * Copyright (C) 1999 Silicon Graphics, Inc.
10 * Copyright (C) 2000 MIPS Technologies, Inc.
11 * written by Carsten Langgaard, carstenl@mips.com
14 #include <asm/cachectl.h>
15 #include <asm/fpregdef.h>
16 #include <asm/mipsregs.h>
17 #include <asm/asm-offsets.h>
18 #include <asm/pgtable-bits.h>
19 #include <asm/regdef.h>
20 #include <asm/stackframe.h>
21 #include <asm/thread_info.h>
23 #include <asm/asmmacro.h>
26 * Offset to the current process status flags, the first 32 bytes of the
29 #define ST_OFF (_THREAD_SIZE - 32 - PT_SIZE + PT_STATUS)
31 #ifndef USE_ALTERNATE_RESUME_IMPL
33 * task_struct *resume(task_struct *prev, task_struct *next,
34 * struct thread_info *next_ti, s32 fp_save)
39 LONG_S t1, THREAD_STATUS(a0)
40 cpu_save_nonscratch a0
41 LONG_S ra, THREAD_REG31(a0)
44 * Check whether we need to save any FP context. FP context is saved
45 * iff the process has used the context with the scalar FPU or the MSA
46 * ASE in the current time slice, as indicated by _TIF_USEDFPU and
47 * _TIF_USEDMSA respectively. switch_to will have set fp_save
48 * accordingly to an FP_SAVE_ enum value.
53 * We do. Clear the saved CU1 bit for prev, such that next time it is
54 * scheduled it will start in userland with the FPU disabled. If the
55 * task uses the FPU then it will be enabled again via the do_cpu trap.
56 * This allows us to lazily restore the FP context.
58 PTR_L t3, TASK_THREAD_INFO(a0)
64 /* Check whether we're saving scalar or vector context. */
67 /* Save 128b MSA vector context + scalar FP control & status. */
70 sw t1, THREAD_FCR31(a0)
73 1: /* Save 32b/64b scalar FP context. */
74 fpu_save_double a0 t0 t1 # c0_status passed in t0
78 #if defined(CONFIG_CC_STACKPROTECTOR) && !defined(CONFIG_SMP)
79 PTR_LA t8, __stack_chk_guard
80 LONG_L t9, TASK_STACK_CANARY(a1)
85 * The order of restoring the registers takes care of the race
86 * updating $28, $29 and kernelsp without disabling ints.
89 cpu_restore_nonscratch a1
91 PTR_ADDU t0, $28, _THREAD_SIZE - 32
92 set_saved_sp t0, t1, t2
93 mfc0 t1, CP0_STATUS /* Do we really need this? */
96 LONG_L a2, THREAD_STATUS(a1)
105 #endif /* USE_ALTERNATE_RESUME_IMPL */
108 * Save a thread's fp context.
111 #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2)
114 fpu_save_double a0 t0 t1 # clobbers t1
119 * Restore a thread's fp context.
122 #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPS32_R2)
125 fpu_restore_double a0 t0 t1 # clobbers t1
129 #ifdef CONFIG_CPU_HAS_MSA
132 * Save a thread's MSA vector context.
140 * Restore a thread's MSA vector context.
147 LEAF(_init_msa_upper)
155 * Load the FPU with signalling NANS. This bit pattern we're using has
156 * the property that no matter whether considered as single or as double
157 * precision represents signaling NANS.
159 * We initialize fcr31 to rounding to nearest, no exceptions.
162 #define FPU_DEFAULT 0x00000000
178 bgez t0, 1f # 16 / 32 register mode?
199 #ifdef CONFIG_CPU_MIPS32
233 #ifdef CONFIG_CPU_MIPS32_R2
236 sll t0, t0, 5 # is Status.FR set?
237 bgez t0, 1f # no: skip setting upper 32b
272 #endif /* CONFIG_CPU_MIPS32_R2 */