Merge tag 'trace-printf-v6.13' of git://git.kernel.org/pub/scm/linux/kernel/git/trace...
[drm/drm-misc.git] / arch / loongarch / include / asm / fpu.h
blob3177674228f8961408be4e2e028d5d083fb8e71c
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3 * Author: Huacai Chen <chenhuacai@loongson.cn>
4 * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
5 */
6 #ifndef _ASM_FPU_H
7 #define _ASM_FPU_H
9 #include <linux/sched.h>
10 #include <linux/sched/task_stack.h>
11 #include <linux/ptrace.h>
12 #include <linux/thread_info.h>
13 #include <linux/bitops.h>
15 #include <asm/cpu.h>
16 #include <asm/cpu-features.h>
17 #include <asm/current.h>
18 #include <asm/loongarch.h>
19 #include <asm/processor.h>
20 #include <asm/ptrace.h>
22 struct sigcontext;
24 #define kernel_fpu_available() cpu_has_fpu
25 extern void kernel_fpu_begin(void);
26 extern void kernel_fpu_end(void);
28 extern void _init_fpu(unsigned int);
29 extern void _save_fp(struct loongarch_fpu *);
30 extern void _restore_fp(struct loongarch_fpu *);
32 extern void _save_lsx(struct loongarch_fpu *fpu);
33 extern void _restore_lsx(struct loongarch_fpu *fpu);
34 extern void _init_lsx_upper(void);
35 extern void _restore_lsx_upper(struct loongarch_fpu *fpu);
37 extern void _save_lasx(struct loongarch_fpu *fpu);
38 extern void _restore_lasx(struct loongarch_fpu *fpu);
39 extern void _init_lasx_upper(void);
40 extern void _restore_lasx_upper(struct loongarch_fpu *fpu);
42 static inline void enable_lsx(void);
43 static inline void disable_lsx(void);
44 static inline void save_lsx(struct task_struct *t);
45 static inline void restore_lsx(struct task_struct *t);
47 static inline void enable_lasx(void);
48 static inline void disable_lasx(void);
49 static inline void save_lasx(struct task_struct *t);
50 static inline void restore_lasx(struct task_struct *t);
53 * Mask the FCSR Cause bits according to the Enable bits, observing
54 * that Unimplemented is always enabled.
56 static inline unsigned long mask_fcsr_x(unsigned long fcsr)
58 return fcsr & ((fcsr & FPU_CSR_ALL_E) <<
59 (ffs(FPU_CSR_ALL_X) - ffs(FPU_CSR_ALL_E)));
62 static inline int is_fp_enabled(void)
64 return (csr_read32(LOONGARCH_CSR_EUEN) & CSR_EUEN_FPEN) ?
65 1 : 0;
68 static inline int is_lsx_enabled(void)
70 if (!cpu_has_lsx)
71 return 0;
73 return (csr_read32(LOONGARCH_CSR_EUEN) & CSR_EUEN_LSXEN) ?
74 1 : 0;
77 static inline int is_lasx_enabled(void)
79 if (!cpu_has_lasx)
80 return 0;
82 return (csr_read32(LOONGARCH_CSR_EUEN) & CSR_EUEN_LASXEN) ?
83 1 : 0;
86 static inline int is_simd_enabled(void)
88 return is_lsx_enabled() | is_lasx_enabled();
91 #define enable_fpu() set_csr_euen(CSR_EUEN_FPEN)
93 #define disable_fpu() clear_csr_euen(CSR_EUEN_FPEN)
95 #define clear_fpu_owner() clear_thread_flag(TIF_USEDFPU)
97 static inline int is_fpu_owner(void)
99 return test_thread_flag(TIF_USEDFPU);
102 static inline void __own_fpu(void)
104 enable_fpu();
105 set_thread_flag(TIF_USEDFPU);
106 KSTK_EUEN(current) |= CSR_EUEN_FPEN;
109 static inline void own_fpu_inatomic(int restore)
111 if (cpu_has_fpu && !is_fpu_owner()) {
112 __own_fpu();
113 if (restore)
114 _restore_fp(&current->thread.fpu);
118 static inline void own_fpu(int restore)
120 preempt_disable();
121 own_fpu_inatomic(restore);
122 preempt_enable();
125 static inline void lose_fpu_inatomic(int save, struct task_struct *tsk)
127 if (is_fpu_owner()) {
128 if (!is_simd_enabled()) {
129 if (save)
130 _save_fp(&tsk->thread.fpu);
131 disable_fpu();
132 } else {
133 if (save) {
134 if (!is_lasx_enabled())
135 save_lsx(tsk);
136 else
137 save_lasx(tsk);
139 disable_fpu();
140 disable_lsx();
141 disable_lasx();
142 clear_tsk_thread_flag(tsk, TIF_USEDSIMD);
144 clear_tsk_thread_flag(tsk, TIF_USEDFPU);
146 KSTK_EUEN(tsk) &= ~(CSR_EUEN_FPEN | CSR_EUEN_LSXEN | CSR_EUEN_LASXEN);
149 static inline void lose_fpu(int save)
151 preempt_disable();
152 lose_fpu_inatomic(save, current);
153 preempt_enable();
156 static inline void init_fpu(void)
158 unsigned int fcsr = current->thread.fpu.fcsr;
160 __own_fpu();
161 _init_fpu(fcsr);
162 set_used_math();
165 static inline void save_fp(struct task_struct *tsk)
167 if (cpu_has_fpu)
168 _save_fp(&tsk->thread.fpu);
171 static inline void restore_fp(struct task_struct *tsk)
173 if (cpu_has_fpu)
174 _restore_fp(&tsk->thread.fpu);
177 static inline void save_fpu_regs(struct task_struct *tsk)
179 unsigned int euen;
181 if (tsk == current) {
182 preempt_disable();
184 euen = csr_read32(LOONGARCH_CSR_EUEN);
186 #ifdef CONFIG_CPU_HAS_LASX
187 if (euen & CSR_EUEN_LASXEN)
188 _save_lasx(&current->thread.fpu);
189 else
190 #endif
191 #ifdef CONFIG_CPU_HAS_LSX
192 if (euen & CSR_EUEN_LSXEN)
193 _save_lsx(&current->thread.fpu);
194 else
195 #endif
196 if (euen & CSR_EUEN_FPEN)
197 _save_fp(&current->thread.fpu);
199 preempt_enable();
203 static inline int is_simd_owner(void)
205 return test_thread_flag(TIF_USEDSIMD);
208 #ifdef CONFIG_CPU_HAS_LSX
210 static inline void enable_lsx(void)
212 if (cpu_has_lsx)
213 csr_xchg32(CSR_EUEN_LSXEN, CSR_EUEN_LSXEN, LOONGARCH_CSR_EUEN);
216 static inline void disable_lsx(void)
218 if (cpu_has_lsx)
219 csr_xchg32(0, CSR_EUEN_LSXEN, LOONGARCH_CSR_EUEN);
222 static inline void save_lsx(struct task_struct *t)
224 if (cpu_has_lsx)
225 _save_lsx(&t->thread.fpu);
228 static inline void restore_lsx(struct task_struct *t)
230 if (cpu_has_lsx)
231 _restore_lsx(&t->thread.fpu);
234 static inline void init_lsx_upper(void)
236 if (cpu_has_lsx)
237 _init_lsx_upper();
240 static inline void restore_lsx_upper(struct task_struct *t)
242 if (cpu_has_lsx)
243 _restore_lsx_upper(&t->thread.fpu);
246 #else
247 static inline void enable_lsx(void) {}
248 static inline void disable_lsx(void) {}
249 static inline void save_lsx(struct task_struct *t) {}
250 static inline void restore_lsx(struct task_struct *t) {}
251 static inline void init_lsx_upper(void) {}
252 static inline void restore_lsx_upper(struct task_struct *t) {}
253 #endif
255 #ifdef CONFIG_CPU_HAS_LASX
257 static inline void enable_lasx(void)
260 if (cpu_has_lasx)
261 csr_xchg32(CSR_EUEN_LASXEN, CSR_EUEN_LASXEN, LOONGARCH_CSR_EUEN);
264 static inline void disable_lasx(void)
266 if (cpu_has_lasx)
267 csr_xchg32(0, CSR_EUEN_LASXEN, LOONGARCH_CSR_EUEN);
270 static inline void save_lasx(struct task_struct *t)
272 if (cpu_has_lasx)
273 _save_lasx(&t->thread.fpu);
276 static inline void restore_lasx(struct task_struct *t)
278 if (cpu_has_lasx)
279 _restore_lasx(&t->thread.fpu);
282 static inline void init_lasx_upper(void)
284 if (cpu_has_lasx)
285 _init_lasx_upper();
288 static inline void restore_lasx_upper(struct task_struct *t)
290 if (cpu_has_lasx)
291 _restore_lasx_upper(&t->thread.fpu);
294 #else
295 static inline void enable_lasx(void) {}
296 static inline void disable_lasx(void) {}
297 static inline void save_lasx(struct task_struct *t) {}
298 static inline void restore_lasx(struct task_struct *t) {}
299 static inline void init_lasx_upper(void) {}
300 static inline void restore_lasx_upper(struct task_struct *t) {}
301 #endif
303 static inline int thread_lsx_context_live(void)
305 if (!cpu_has_lsx)
306 return 0;
308 return test_thread_flag(TIF_LSX_CTX_LIVE);
311 static inline int thread_lasx_context_live(void)
313 if (!cpu_has_lasx)
314 return 0;
316 return test_thread_flag(TIF_LASX_CTX_LIVE);
319 #endif /* _ASM_FPU_H */