1 #ifndef __M68KNOMMU_ENTRY_H
2 #define __M68KNOMMU_ENTRY_H
8 * Stack layout in 'ret_from_exception':
10 * This allows access to the syscall arguments in registers d1-d5
22 * 28(sp) - stack adjustment
23 * 2C(sp) - [ sr ] [ format & vector ]
24 * 2E(sp) - [ pc-hiword ] [ sr ]
25 * 30(sp) - [ pc-loword ] [ pc-hiword ]
26 * 32(sp) - [ format & vector ] [ pc-loword ]
27 * ^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^
31 #define ALLOWINT (~0x700)
35 #define SWITCH_STACK_SIZE (6*4+4) /* Includes return address */
38 * This defines the normal kernel pt-regs layout.
40 * regs are a2-a6 and d6-d7 preserved by C code
41 * the kernel doesn't mess with usp unless it needs to
44 #ifdef CONFIG_COLDFIRE
46 * This is made a little more tricky on the ColdFire. There is no
47 * separate kernel and user stack pointers. Need to artificially
48 * construct a usp in software... When doing this we need to disable
49 * interrupts, otherwise bad things could happen.
52 move
#0x2700,%sr /* disable intrs */
53 btst
#5,%sp@(2) /* from user? */
54 bnes
6f
/* no, skip */
55 movel
%sp
,sw_usp
/* save user sp */
56 addql
#8,sw_usp /* remove exception */
57 movel sw_ksp
,%sp
/* kernel sp */
58 subql
#8,%sp /* room for exception */
59 clrl
%sp@
- /* stkadj */
60 movel
%d0
,%sp@
- /* orig d0 */
61 movel
%d0
,%sp@
- /* d0 */
62 lea
%sp@
(-32),%sp
/* space for 8 regs */
63 moveml
%d1
-%d5
/%a0
-%a2
,%sp@
64 movel sw_usp
,%a0
/* get usp */
65 movel
%a0@
-,%sp@
(PT_OFF_PC
) /* copy exception program counter */
66 movel
%a0@
-,%sp@
(PT_OFF_FORMATVEC
)/*copy exception format/vector/sr */
69 clrl
%sp@
- /* stkadj */
70 movel
%d0
,%sp@
- /* orig d0 */
71 movel
%d0
,%sp@
- /* d0 */
72 lea
%sp@
(-32),%sp
/* space for 8 regs */
73 moveml
%d1
-%d5
/%a0
-%a2
,%sp@
78 btst
#5,%sp@(PT_SR) /* going user? */
79 bnes
8f
/* no, skip */
80 move
#0x2700,%sr /* disable intrs */
81 movel sw_usp
,%a0
/* get usp */
82 movel
%sp@
(PT_OFF_PC
),%a0@
- /* copy exception program counter */
83 movel
%sp@
(PT_OFF_FORMATVEC
),%a0@
-/*copy exception format/vector/sr */
84 moveml
%sp@
,%d1
-%d5
/%a0
-%a2
85 lea
%sp@
(32),%sp
/* space for 8 regs */
87 addql
#4,%sp /* orig d0 */
88 addl
%sp@
+,%sp
/* stkadj */
89 addql
#8,%sp /* remove exception */
90 movel
%sp
,sw_ksp
/* save ksp */
91 subql
#8,sw_usp /* set exception */
92 movel sw_usp
,%sp
/* restore usp */
95 moveml
%sp@
,%d1
-%d5
/%a0
-%a2
96 lea
%sp@
(32),%sp
/* space for 8 regs */
98 addql
#4,%sp /* orig d0 */
99 addl
%sp@
+,%sp
/* stkadj */
104 * Quick exception save, use current stack only.
107 move
#0x2700,%sr /* disable intrs */
108 clrl
%sp@
- /* stkadj */
109 movel
%d0
,%sp@
- /* orig d0 */
110 movel
%d0
,%sp@
- /* d0 */
111 lea
%sp@
(-32),%sp
/* space for 8 regs */
112 moveml
%d1
-%d5
/%a0
-%a2
,%sp@
116 moveml
%sp@
,%d1
-%d5
/%a0
-%a2
117 lea
%sp@
(32),%sp
/* space for 8 regs */
119 addql
#4,%sp /* orig d0 */
120 addl
%sp@
+,%sp
/* stkadj */
124 .macro SAVE_SWITCH_STACK
125 lea
%sp@
(-24),%sp
/* 6 regs */
126 moveml
%a3
-%a6
/%d6
-%d7
,%sp@
129 .macro RESTORE_SWITCH_STACK
130 moveml
%sp@
,%a3
-%a6
/%d6
-%d7
131 lea
%sp@
(24),%sp
/* 6 regs */
135 * Software copy of the user and kernel stack pointers... Ugh...
136 * Need these to get around ColdFire not having separate kernel
137 * and user stack pointers.
142 #else /* !CONFIG_COLDFIRE */
145 * Standard 68k interrupt entry and exit macros.
148 clrl
%sp@
- /* stkadj */
149 movel
%d0
,%sp@
- /* orig d0 */
150 movel
%d0
,%sp@
- /* d0 */
151 moveml
%d1
-%d5
/%a0
-%a2
,%sp@
-
155 moveml
%sp@
+,%a0
-%a2
/%d1
-%d5
157 addql
#4,%sp /* orig d0 */
158 addl
%sp@
+,%sp
/* stkadj */
162 .macro SAVE_SWITCH_STACK
163 moveml
%a3
-%a6
/%d6
-%d7
,%sp@
-
166 .macro RESTORE_SWITCH_STACK
167 moveml
%sp@
+,%a3
-%a6
/%d6
-%d7
170 #endif /* !CONFIG_COLDFIRE */
171 #endif /* __ASSEMBLY__ */
172 #endif /* __M68KNOMMU_ENTRY_H */