1 /* SPDX-License-Identifier: GPL-2.0-or-later */
3 * FPU support code, moved here from head.S so that it can be used
4 * by chips which use other head-whatever.S files.
6 * Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org)
7 * Copyright (C) 1996 Cort Dougan <cort@cs.nmt.edu>
8 * Copyright (C) 1996 Paul Mackerras.
9 * Copyright (C) 1997 Dan Malek (dmalek@jlc.net).
15 #include <asm/cputable.h>
16 #include <asm/cache.h>
17 #include <asm/thread_info.h>
18 #include <asm/ppc_asm.h>
19 #include <asm/asm-offsets.h>
20 #include <asm/ptrace.h>
21 #include <asm/export.h>
22 #include <asm/asm-compat.h>
23 #include <asm/feature-fixups.h>
26 #define __REST_32FPVSRS(n,c,base) \
29 END_FTR_SECTION_IFSET(CPU_FTR_VSX); \
30 REST_32FPRS(n,base); \
32 2: REST_32VSRS(n,c,base); \
35 #define __SAVE_32FPVSRS(n,c,base) \
38 END_FTR_SECTION_IFSET(CPU_FTR_VSX); \
39 SAVE_32FPRS(n,base); \
41 2: SAVE_32VSRS(n,c,base); \
44 #define __REST_32FPVSRS(n,b,base) REST_32FPRS(n, base)
45 #define __SAVE_32FPVSRS(n,b,base) SAVE_32FPRS(n, base)
47 #define REST_32FPVSRS(n,c,base) __REST_32FPVSRS(n,__REG_##c,__REG_##base)
48 #define SAVE_32FPVSRS(n,c,base) __SAVE_32FPVSRS(n,__REG_##c,__REG_##base)
51 * Load state from memory into FP registers including FPSCR.
52 * Assumes the caller has enabled FP in the MSR.
54 _GLOBAL(load_fp_state)
55 lfd fr0,FPSTATE_FPSCR(r3)
57 REST_32FPVSRS(0, R4, R3)
59 EXPORT_SYMBOL(load_fp_state)
60 _ASM_NOKPROBE_SYMBOL(load_fp_state); /* used by restore_math */
63 * Store FP state into memory, including FPSCR
64 * Assumes the caller has enabled FP in the MSR.
66 _GLOBAL(store_fp_state)
67 SAVE_32FPVSRS(0, R4, R3)
69 stfd fr0,FPSTATE_FPSCR(r3)
71 EXPORT_SYMBOL(store_fp_state)
74 * This task wants to use the FPU now.
75 * On UP, disable FP for the task which had the FPU previously,
76 * and save its floating-point registers in its thread_struct.
77 * Load up this task's FP registers from its thread_struct,
78 * enable the FPU for the current task and return to the task.
79 * Note that on 32-bit this can only use registers that will be
80 * restored by fast_exception_return, i.e. r3 - r6, r10 and r11.
88 END_FTR_SECTION_IFSET(CPU_FTR_VSX)
90 MTMSRD(r5) /* enable use of fpu now */
92 /* enable use of FP after return */
94 mfspr r5,SPRN_SPRG_THREAD /* current task's THREAD (phys) */
95 #ifdef CONFIG_VMAP_STACK
98 lwz r4,THREAD_FPEXC_MODE(r5)
99 ori r9,r9,MSR_FP /* enable FP for current */
102 ld r4,PACACURRENT(r13)
103 addi r5,r4,THREAD /* Get THREAD */
104 lwz r4,THREAD_FPEXC_MODE(r5)
110 stb r4,THREAD_LOAD_FP(r5)
111 addi r10,r5,THREAD_FPSTATE
112 lfd fr0,FPSTATE_FPSCR(r10)
114 REST_32FPVSRS(0, R4, R10)
115 /* restore registers and return */
116 /* we haven't used ctr or xer or lr */
118 _ASM_NOKPROBE_SYMBOL(load_up_fpu)
122 * Save the floating-point registers in its thread_struct.
123 * Enables the FPU for use in the kernel on return.
126 addi r3,r3,THREAD /* want THREAD of task */
127 PPC_LL r6,THREAD_FPSAVEAREA(r3)
128 PPC_LL r5,PT_REGS(r3)
131 addi r6,r3,THREAD_FPSTATE
132 2: SAVE_32FPVSRS(0, R4, R6)
134 stfd fr0,FPSTATE_FPSCR(r6)