1 #ifndef __M68KNOMMU_ENTRY_H
2 #define __M68KNOMMU_ENTRY_H
8 * Stack layout in 'ret_from_exception':
10 * This allows access to the syscall arguments in registers d1-d5
22 * 28(sp) - stack adjustment
23 * 2C(sp) - [ sr ] [ format & vector ]
24 * 2E(sp) - [ pc-hiword ] [ sr ]
25 * 30(sp) - [ pc-loword ] [ pc-hiword ]
26 * 32(sp) - [ format & vector ] [ pc-loword ]
27 * ^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^
31 #define ALLOWINT 0xf8ff
35 /* process bits for task_struct.flags */
45 #define SWITCH_STACK_SIZE (6*4+4) /* Includes return address */
48 * This defines the normal kernel pt-regs layout.
50 * regs are a2-a6 and d6-d7 preserved by C code
51 * the kernel doesn't mess with usp unless it needs to
54 #ifdef CONFIG_COLDFIRE
56 * This is made a little more tricky on the ColdFire. There is no
57 * separate kernel and user stack pointers. Need to artificially
58 * construct a usp in software... When doing this we need to disable
59 * interrupts, otherwise bad things could happen.
62 move
#0x2700,%sr /* disable intrs */
63 btst
#5,%sp@(2) /* from user? */
64 bnes
6f
/* no, skip */
65 movel
%sp
,sw_usp
/* save user sp */
66 addql
#8,sw_usp /* remove exception */
67 movel sw_ksp
,%sp
/* kernel sp */
68 subql
#8,%sp /* room for exception */
69 clrl
%sp@
- /* stkadj */
70 movel
%d0
,%sp@
- /* orig d0 */
71 movel
%d0
,%sp@
- /* d0 */
72 lea
%sp@
(-32),%sp
/* space for 8 regs */
73 moveml
%d1
-%d5
/%a0
-%a2
,%sp@
74 movel sw_usp
,%a0
/* get usp */
75 movel
%a0@
-,%sp@
(PT_OFF_PC
) /* copy exception program counter */
76 movel
%a0@
-,%sp@
(PT_OFF_FORMATVEC
)/*copy exception format/vector/sr */
79 clrl
%sp@
- /* stkadj */
80 movel
%d0
,%sp@
- /* orig d0 */
81 movel
%d0
,%sp@
- /* d0 */
82 lea
%sp@
(-32),%sp
/* space for 8 regs */
83 moveml
%d1
-%d5
/%a0
-%a2
,%sp@
88 btst
#5,%sp@(PT_SR) /* going user? */
89 bnes
8f
/* no, skip */
90 move
#0x2700,%sr /* disable intrs */
91 movel sw_usp
,%a0
/* get usp */
92 movel
%sp@
(PT_OFF_PC
),%a0@
- /* copy exception program counter */
93 movel
%sp@
(PT_OFF_FORMATVEC
),%a0@
-/*copy exception format/vector/sr */
94 moveml
%sp@
,%d1
-%d5
/%a0
-%a2
95 lea
%sp@
(32),%sp
/* space for 8 regs */
97 addql
#4,%sp /* orig d0 */
98 addl
%sp@
+,%sp
/* stkadj */
99 addql
#8,%sp /* remove exception */
100 movel
%sp
,sw_ksp
/* save ksp */
101 subql
#8,sw_usp /* set exception */
102 movel sw_usp
,%sp
/* restore usp */
105 moveml
%sp@
,%d1
-%d5
/%a0
-%a2
106 lea
%sp@
(32),%sp
/* space for 8 regs */
108 addql
#4,%sp /* orig d0 */
109 addl
%sp@
+,%sp
/* stkadj */
114 * Quick exception save, use current stack only.
117 move
#0x2700,%sr /* disable intrs */
118 clrl
%sp@
- /* stkadj */
119 movel
%d0
,%sp@
- /* orig d0 */
120 movel
%d0
,%sp@
- /* d0 */
121 lea
%sp@
(-32),%sp
/* space for 8 regs */
122 moveml
%d1
-%d5
/%a0
-%a2
,%sp@
126 moveml
%sp@
,%d1
-%d5
/%a0
-%a2
127 lea
%sp@
(32),%sp
/* space for 8 regs */
129 addql
#4,%sp /* orig d0 */
130 addl
%sp@
+,%sp
/* stkadj */
134 .macro SAVE_SWITCH_STACK
135 lea
%sp@
(-24),%sp
/* 6 regs */
136 moveml
%a3
-%a6
/%d6
-%d7
,%sp@
139 .macro RESTORE_SWITCH_STACK
140 moveml
%sp@
,%a3
-%a6
/%d6
-%d7
141 lea
%sp@
(24),%sp
/* 6 regs */
145 * Software copy of the user and kernel stack pointers... Ugh...
146 * Need these to get around ColdFire not having separate kernel
147 * and user stack pointers.
152 #else /* !CONFIG_COLDFIRE */
155 * Standard 68k interrupt entry and exit macros.
158 clrl
%sp@
- /* stkadj */
159 movel
%d0
,%sp@
- /* orig d0 */
160 movel
%d0
,%sp@
- /* d0 */
161 moveml
%d1
-%d5
/%a0
-%a2
,%sp@
-
165 moveml
%sp@
+,%a0
-%a2
/%d1
-%d5
167 addql
#4,%sp /* orig d0 */
168 addl
%sp@
+,%sp
/* stkadj */
172 .macro SAVE_SWITCH_STACK
173 moveml
%a3
-%a6
/%d6
-%d7
,%sp@
-
176 .macro RESTORE_SWITCH_STACK
177 moveml
%sp@
+,%a3
-%a6
/%d6
-%d7
180 #endif /* !CONFIG_COLDFIRE */
181 #endif /* __ASSEMBLY__ */
182 #endif /* __M68KNOMMU_ENTRY_H */