kvm tools, setup: Create private directory
[linux-2.6/next.git] / arch / powerpc / kernel / fpu.S
blobde369558bf0a183c58e94f5f9871fc497f06b971
1 /*
2  *  FPU support code, moved here from head.S so that it can be used
3  *  by chips which use other head-whatever.S files.
4  *
5  *    Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org)
6  *    Copyright (C) 1996 Cort Dougan <cort@cs.nmt.edu>
7  *    Copyright (C) 1996 Paul Mackerras.
8  *    Copyright (C) 1997 Dan Malek (dmalek@jlc.net).
9  *
10  *  This program is free software; you can redistribute it and/or
11  *  modify it under the terms of the GNU General Public License
12  *  as published by the Free Software Foundation; either version
13  *  2 of the License, or (at your option) any later version.
14  *
15  */
17 #include <asm/reg.h>
18 #include <asm/page.h>
19 #include <asm/mmu.h>
20 #include <asm/pgtable.h>
21 #include <asm/cputable.h>
22 #include <asm/cache.h>
23 #include <asm/thread_info.h>
24 #include <asm/ppc_asm.h>
25 #include <asm/asm-offsets.h>
26 #include <asm/ptrace.h>
28 #ifdef CONFIG_VSX
29 #define REST_32FPVSRS(n,c,base)                                         \
30 BEGIN_FTR_SECTION                                                       \
31         b       2f;                                                     \
32 END_FTR_SECTION_IFSET(CPU_FTR_VSX);                                     \
33         REST_32FPRS(n,base);                                            \
34         b       3f;                                                     \
35 2:      REST_32VSRS(n,c,base);                                          \
38 #define SAVE_32FPVSRS(n,c,base)                                         \
39 BEGIN_FTR_SECTION                                                       \
40         b       2f;                                                     \
41 END_FTR_SECTION_IFSET(CPU_FTR_VSX);                                     \
42         SAVE_32FPRS(n,base);                                            \
43         b       3f;                                                     \
44 2:      SAVE_32VSRS(n,c,base);                                          \
46 #else
47 #define REST_32FPVSRS(n,b,base) REST_32FPRS(n, base)
48 #define SAVE_32FPVSRS(n,b,base) SAVE_32FPRS(n, base)
49 #endif
52  * This task wants to use the FPU now.
53  * On UP, disable FP for the task which had the FPU previously,
54  * and save its floating-point registers in its thread_struct.
55  * Load up this task's FP registers from its thread_struct,
56  * enable the FPU for the current task and return to the task.
57  */
58 _GLOBAL(load_up_fpu)
59         mfmsr   r5
60         ori     r5,r5,MSR_FP
61 #ifdef CONFIG_VSX
62 BEGIN_FTR_SECTION
63         oris    r5,r5,MSR_VSX@h
64 END_FTR_SECTION_IFSET(CPU_FTR_VSX)
65 #endif
66         SYNC
67         MTMSRD(r5)                      /* enable use of fpu now */
68         isync
70  * For SMP, we don't do lazy FPU switching because it just gets too
71  * horrendously complex, especially when a task switches from one CPU
72  * to another.  Instead we call giveup_fpu in switch_to.
73  */
74 #ifndef CONFIG_SMP
75         LOAD_REG_ADDRBASE(r3, last_task_used_math)
76         toreal(r3)
77         PPC_LL  r4,ADDROFF(last_task_used_math)(r3)
78         PPC_LCMPI       0,r4,0
79         beq     1f
80         toreal(r4)
81         addi    r4,r4,THREAD            /* want last_task_used_math->thread */
82         SAVE_32FPVSRS(0, r5, r4)
83         mffs    fr0
84         stfd    fr0,THREAD_FPSCR(r4)
85         PPC_LL  r5,PT_REGS(r4)
86         toreal(r5)
87         PPC_LL  r4,_MSR-STACK_FRAME_OVERHEAD(r5)
88         li      r10,MSR_FP|MSR_FE0|MSR_FE1
89         andc    r4,r4,r10               /* disable FP for previous task */
90         PPC_STL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
92 #endif /* CONFIG_SMP */
93         /* enable use of FP after return */
94 #ifdef CONFIG_PPC32
95         mfspr   r5,SPRN_SPRG_THREAD             /* current task's THREAD (phys) */
96         lwz     r4,THREAD_FPEXC_MODE(r5)
97         ori     r9,r9,MSR_FP            /* enable FP for current */
98         or      r9,r9,r4
99 #else
100         ld      r4,PACACURRENT(r13)
101         addi    r5,r4,THREAD            /* Get THREAD */
102         lwz     r4,THREAD_FPEXC_MODE(r5)
103         ori     r12,r12,MSR_FP
104         or      r12,r12,r4
105         std     r12,_MSR(r1)
106 #endif
107         lfd     fr0,THREAD_FPSCR(r5)
108         MTFSF_L(fr0)
109         REST_32FPVSRS(0, r4, r5)
110 #ifndef CONFIG_SMP
111         subi    r4,r5,THREAD
112         fromreal(r4)
113         PPC_STL r4,ADDROFF(last_task_used_math)(r3)
114 #endif /* CONFIG_SMP */
115         /* restore registers and return */
116         /* we haven't used ctr or xer or lr */
117         blr
120  * giveup_fpu(tsk)
121  * Disable FP for the task given as the argument,
122  * and save the floating-point registers in its thread_struct.
123  * Enables the FPU for use in the kernel on return.
124  */
125 _GLOBAL(giveup_fpu)
126         mfmsr   r5
127         ori     r5,r5,MSR_FP
128 #ifdef CONFIG_VSX
129 BEGIN_FTR_SECTION
130         oris    r5,r5,MSR_VSX@h
131 END_FTR_SECTION_IFSET(CPU_FTR_VSX)
132 #endif
133         SYNC_601
134         ISYNC_601
135         MTMSRD(r5)                      /* enable use of fpu now */
136         SYNC_601
137         isync
138         PPC_LCMPI       0,r3,0
139         beqlr-                          /* if no previous owner, done */
140         addi    r3,r3,THREAD            /* want THREAD of task */
141         PPC_LL  r5,PT_REGS(r3)
142         PPC_LCMPI       0,r5,0
143         SAVE_32FPVSRS(0, r4 ,r3)
144         mffs    fr0
145         stfd    fr0,THREAD_FPSCR(r3)
146         beq     1f
147         PPC_LL  r4,_MSR-STACK_FRAME_OVERHEAD(r5)
148         li      r3,MSR_FP|MSR_FE0|MSR_FE1
149 #ifdef CONFIG_VSX
150 BEGIN_FTR_SECTION
151         oris    r3,r3,MSR_VSX@h
152 END_FTR_SECTION_IFSET(CPU_FTR_VSX)
153 #endif
154         andc    r4,r4,r3                /* disable FP for previous task */
155         PPC_STL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
157 #ifndef CONFIG_SMP
158         li      r5,0
159         LOAD_REG_ADDRBASE(r4,last_task_used_math)
160         PPC_STL r5,ADDROFF(last_task_used_math)(r4)
161 #endif /* CONFIG_SMP */
162         blr
165  * These are used in the alignment trap handler when emulating
166  * single-precision loads and stores.
167  */
169 _GLOBAL(cvt_fd)
170         lfs     0,0(r3)
171         stfd    0,0(r4)
172         blr
174 _GLOBAL(cvt_df)
175         lfd     0,0(r3)
176         stfs    0,0(r4)
177         blr