1 /* SPDX-License-Identifier: GPL-2.0-or-later */
3 * FPU support code, moved here from head.S so that it can be used
4 * by chips which use other head-whatever.S files.
6 * Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org)
7 * Copyright (C) 1996 Cort Dougan <cort@cs.nmt.edu>
8 * Copyright (C) 1996 Paul Mackerras.
9 * Copyright (C) 1997 Dan Malek (dmalek@jlc.net).
15 #include <asm/pgtable.h>
16 #include <asm/cputable.h>
17 #include <asm/cache.h>
18 #include <asm/thread_info.h>
19 #include <asm/ppc_asm.h>
20 #include <asm/asm-offsets.h>
21 #include <asm/ptrace.h>
22 #include <asm/export.h>
23 #include <asm/asm-compat.h>
24 #include <asm/feature-fixups.h>
27 #define __REST_32FPVSRS(n,c,base) \
30 END_FTR_SECTION_IFSET(CPU_FTR_VSX); \
31 REST_32FPRS(n,base); \
33 2: REST_32VSRS(n,c,base); \
36 #define __SAVE_32FPVSRS(n,c,base) \
39 END_FTR_SECTION_IFSET(CPU_FTR_VSX); \
40 SAVE_32FPRS(n,base); \
42 2: SAVE_32VSRS(n,c,base); \
45 #define __REST_32FPVSRS(n,b,base) REST_32FPRS(n, base)
46 #define __SAVE_32FPVSRS(n,b,base) SAVE_32FPRS(n, base)
48 #define REST_32FPVSRS(n,c,base) __REST_32FPVSRS(n,__REG_##c,__REG_##base)
49 #define SAVE_32FPVSRS(n,c,base) __SAVE_32FPVSRS(n,__REG_##c,__REG_##base)
52 * Load state from memory into FP registers including FPSCR.
53 * Assumes the caller has enabled FP in the MSR.
55 _GLOBAL(load_fp_state)
56 lfd fr0,FPSTATE_FPSCR(r3)
58 REST_32FPVSRS(0, R4, R3)
60 EXPORT_SYMBOL(load_fp_state)
61 _ASM_NOKPROBE_SYMBOL(load_fp_state); /* used by restore_math */
64 * Store FP state into memory, including FPSCR
65 * Assumes the caller has enabled FP in the MSR.
67 _GLOBAL(store_fp_state)
68 SAVE_32FPVSRS(0, R4, R3)
70 stfd fr0,FPSTATE_FPSCR(r3)
72 EXPORT_SYMBOL(store_fp_state)
75 * This task wants to use the FPU now.
76 * On UP, disable FP for the task which had the FPU previously,
77 * and save its floating-point registers in its thread_struct.
78 * Load up this task's FP registers from its thread_struct,
79 * enable the FPU for the current task and return to the task.
80 * Note that on 32-bit this can only use registers that will be
81 * restored by fast_exception_return, i.e. r3 - r6, r10 and r11.
89 END_FTR_SECTION_IFSET(CPU_FTR_VSX)
92 MTMSRD(r5) /* enable use of fpu now */
94 /* enable use of FP after return */
96 mfspr r5,SPRN_SPRG_THREAD /* current task's THREAD (phys) */
97 #ifdef CONFIG_VMAP_STACK
100 lwz r4,THREAD_FPEXC_MODE(r5)
101 ori r9,r9,MSR_FP /* enable FP for current */
104 ld r4,PACACURRENT(r13)
105 addi r5,r4,THREAD /* Get THREAD */
106 lwz r4,THREAD_FPEXC_MODE(r5)
111 /* Don't care if r4 overflows, this is desired behaviour */
112 lbz r4,THREAD_LOAD_FP(r5)
114 stb r4,THREAD_LOAD_FP(r5)
115 addi r10,r5,THREAD_FPSTATE
116 lfd fr0,FPSTATE_FPSCR(r10)
118 REST_32FPVSRS(0, R4, R10)
119 /* restore registers and return */
120 /* we haven't used ctr or xer or lr */
125 * Save the floating-point registers in its thread_struct.
126 * Enables the FPU for use in the kernel on return.
129 addi r3,r3,THREAD /* want THREAD of task */
130 PPC_LL r6,THREAD_FPSAVEAREA(r3)
131 PPC_LL r5,PT_REGS(r3)
134 addi r6,r3,THREAD_FPSTATE
135 2: SAVE_32FPVSRS(0, R4, R6)
137 stfd fr0,FPSTATE_FPSCR(r6)
141 * These are used in the alignment trap handler when emulating
142 * single-precision loads and stores.