fs: use kmem_cache_zalloc instead
[pv_ops_mirror.git] / include / asm-m68knommu / entry.h
blobc2553d26273dc13b42f8c2486698ae56b71acdab
1 #ifndef __M68KNOMMU_ENTRY_H
2 #define __M68KNOMMU_ENTRY_H
4 #include <asm/setup.h>
5 #include <asm/page.h>
7 /*
8 * Stack layout in 'ret_from_exception':
10 * This allows access to the syscall arguments in registers d1-d5
12 * 0(sp) - d1
13 * 4(sp) - d2
14 * 8(sp) - d3
15 * C(sp) - d4
16 * 10(sp) - d5
17 * 14(sp) - a0
18 * 18(sp) - a1
19 * 1C(sp) - a2
20 * 20(sp) - d0
21 * 24(sp) - orig_d0
22 * 28(sp) - stack adjustment
23 * 2C(sp) - [ sr ] [ format & vector ]
24 * 2E(sp) - [ pc-hiword ] [ sr ]
25 * 30(sp) - [ pc-loword ] [ pc-hiword ]
26 * 32(sp) - [ format & vector ] [ pc-loword ]
27 * ^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^
28 * M68K COLDFIRE
31 #define ALLOWINT 0xf8ff
33 #ifdef __ASSEMBLY__
35 /* process bits for task_struct.flags */
36 PF_TRACESYS_OFF = 3
37 PF_TRACESYS_BIT = 5
38 PF_PTRACED_OFF = 3
39 PF_PTRACED_BIT = 4
40 PF_DTRACE_OFF = 1
41 PF_DTRACE_BIT = 5
43 LENOSYS = 38
45 #define SWITCH_STACK_SIZE (6*4+4) /* Includes return address */
48 * This defines the normal kernel pt-regs layout.
50 * regs are a2-a6 and d6-d7 preserved by C code
51 * the kernel doesn't mess with usp unless it needs to
54 #ifdef CONFIG_COLDFIRE
56 * This is made a little more tricky on the ColdFire. There is no
57 * separate kernel and user stack pointers. Need to artificially
58 * construct a usp in software... When doing this we need to disable
59 * interrupts, otherwise bad things could happen.
61 .macro SAVE_ALL
62 move #0x2700,%sr /* disable intrs */
63 btst #5,%sp@(2) /* from user? */
64 bnes 6f /* no, skip */
65 movel %sp,sw_usp /* save user sp */
66 addql #8,sw_usp /* remove exception */
67 movel sw_ksp,%sp /* kernel sp */
68 subql #8,%sp /* room for exception */
69 clrl %sp@- /* stkadj */
70 movel %d0,%sp@- /* orig d0 */
71 movel %d0,%sp@- /* d0 */
72 lea %sp@(-32),%sp /* space for 8 regs */
73 moveml %d1-%d5/%a0-%a2,%sp@
74 movel sw_usp,%a0 /* get usp */
75 movel %a0@-,%sp@(PT_PC) /* copy exception program counter */
76 movel %a0@-,%sp@(PT_FORMATVEC)/* copy exception format/vector/sr */
77 bra 7f
79 clrl %sp@- /* stkadj */
80 movel %d0,%sp@- /* orig d0 */
81 movel %d0,%sp@- /* d0 */
82 lea %sp@(-32),%sp /* space for 8 regs */
83 moveml %d1-%d5/%a0-%a2,%sp@
85 .endm
87 .macro RESTORE_ALL
88 btst #5,%sp@(PT_SR) /* going user? */
89 bnes 8f /* no, skip */
90 move #0x2700,%sr /* disable intrs */
91 movel sw_usp,%a0 /* get usp */
92 movel %sp@(PT_PC),%a0@- /* copy exception program counter */
93 movel %sp@(PT_FORMATVEC),%a0@-/* copy exception format/vector/sr */
94 moveml %sp@,%d1-%d5/%a0-%a2
95 lea %sp@(32),%sp /* space for 8 regs */
96 movel %sp@+,%d0
97 addql #4,%sp /* orig d0 */
98 addl %sp@+,%sp /* stkadj */
99 addql #8,%sp /* remove exception */
100 movel %sp,sw_ksp /* save ksp */
101 subql #8,sw_usp /* set exception */
102 movel sw_usp,%sp /* restore usp */
105 moveml %sp@,%d1-%d5/%a0-%a2
106 lea %sp@(32),%sp /* space for 8 regs */
107 movel %sp@+,%d0
108 addql #4,%sp /* orig d0 */
109 addl %sp@+,%sp /* stkadj */
111 .endm
114 * Quick exception save, use current stack only.
116 .macro SAVE_LOCAL
117 move #0x2700,%sr /* disable intrs */
118 clrl %sp@- /* stkadj */
119 movel %d0,%sp@- /* orig d0 */
120 movel %d0,%sp@- /* d0 */
121 lea %sp@(-32),%sp /* space for 8 regs */
122 moveml %d1-%d5/%a0-%a2,%sp@
123 .endm
125 .macro RESTORE_LOCAL
126 moveml %sp@,%d1-%d5/%a0-%a2
127 lea %sp@(32),%sp /* space for 8 regs */
128 movel %sp@+,%d0
129 addql #4,%sp /* orig d0 */
130 addl %sp@+,%sp /* stkadj */
132 .endm
134 .macro SAVE_SWITCH_STACK
135 lea %sp@(-24),%sp /* 6 regs */
136 moveml %a3-%a6/%d6-%d7,%sp@
137 .endm
139 .macro RESTORE_SWITCH_STACK
140 moveml %sp@,%a3-%a6/%d6-%d7
141 lea %sp@(24),%sp /* 6 regs */
142 .endm
145 * Software copy of the user and kernel stack pointers... Ugh...
146 * Need these to get around ColdFire not having separate kernel
147 * and user stack pointers.
149 .globl sw_usp
150 .globl sw_ksp
152 #else /* !CONFIG_COLDFIRE */
155 * Standard 68k interrupt entry and exit macros.
157 .macro SAVE_ALL
158 clrl %sp@- /* stkadj */
159 movel %d0,%sp@- /* orig d0 */
160 movel %d0,%sp@- /* d0 */
161 moveml %d1-%d5/%a0-%a2,%sp@-
162 .endm
164 .macro RESTORE_ALL
165 moveml %sp@+,%a0-%a2/%d1-%d5
166 movel %sp@+,%d0
167 addql #4,%sp /* orig d0 */
168 addl %sp@+,%sp /* stkadj */
170 .endm
172 .macro SAVE_SWITCH_STACK
173 moveml %a3-%a6/%d6-%d7,%sp@-
174 .endm
176 .macro RESTORE_SWITCH_STACK
177 moveml %sp@+,%a3-%a6/%d6-%d7
178 .endm
180 #endif /* !CONFIG_COLDFIRE */
181 #endif /* __ASSEMBLY__ */
182 #endif /* __M68KNOMMU_ENTRY_H */