conn rcv_lock converted to spinlock, struct cor_sock created, kernel_packet skb_clone...
[cor_2_6_31.git] / arch / x86 / include / asm / calling.h
blob0e63c9a2a8d0d722d2eba8afafc175f48096698d
1 /*
3 x86 function call convention, 64-bit:
4 -------------------------------------
5 arguments | callee-saved | extra caller-saved | return
6 [callee-clobbered] | | [callee-clobbered] |
7 ---------------------------------------------------------------------------
8 rdi rsi rdx rcx r8-9 | rbx rbp [*] r12-15 | r10-11 | rax, rdx [**]
10 ( rsp is obviously invariant across normal function calls. (gcc can 'merge'
11 functions when it sees tail-call optimization possibilities) rflags is
12 clobbered. Leftover arguments are passed over the stack frame.)
14 [*] In the frame-pointers case rbp is fixed to the stack frame.
16 [**] for struct return values wider than 64 bits the return convention is a
17 bit more complex: up to 128 bits width we return small structures
18 straight in rax, rdx. For structures larger than that (3 words or
19 larger) the caller puts a pointer to an on-stack return struct
20 [allocated in the caller's stack frame] into the first argument - i.e.
21 into rdi. All other arguments shift up by one in this case.
22 Fortunately this case is rare in the kernel.
24 For 32-bit we have the following conventions - kernel is built with
25 -mregparm=3 and -freg-struct-return:
27 x86 function calling convention, 32-bit:
28 ----------------------------------------
29 arguments | callee-saved | extra caller-saved | return
30 [callee-clobbered] | | [callee-clobbered] |
31 -------------------------------------------------------------------------
32 eax edx ecx | ebx edi esi ebp [*] | <none> | eax, edx [**]
34 ( here too esp is obviously invariant across normal function calls. eflags
35 is clobbered. Leftover arguments are passed over the stack frame. )
37 [*] In the frame-pointers case ebp is fixed to the stack frame.
39 [**] We build with -freg-struct-return, which on 32-bit means similar
40 semantics as on 64-bit: edx can be used for a second return value
41 (i.e. covering integer and structure sizes up to 64 bits) - after that
42 it gets more complex and more expensive: 3-word or larger struct returns
43 get done in the caller's frame and the pointer to the return struct goes
44 into regparm0, i.e. eax - the other arguments shift up and the
45 function's register parameters degenerate to regparm=2 in essence.
51 * 64-bit system call stack frame layout defines and helpers,
52 * for assembly code:
55 #define R15 0
56 #define R14 8
57 #define R13 16
58 #define R12 24
59 #define RBP 32
60 #define RBX 40
62 /* arguments: interrupts/non tracing syscalls only save up to here: */
63 #define R11 48
64 #define R10 56
65 #define R9 64
66 #define R8 72
67 #define RAX 80
68 #define RCX 88
69 #define RDX 96
70 #define RSI 104
71 #define RDI 112
72 #define ORIG_RAX 120 /* + error_code */
73 /* end of arguments */
75 /* cpu exception frame or undefined in case of fast syscall: */
76 #define RIP 128
77 #define CS 136
78 #define EFLAGS 144
79 #define RSP 152
80 #define SS 160
82 #define ARGOFFSET R11
83 #define SWFRAME ORIG_RAX
85 .macro SAVE_ARGS addskip=0, norcx=0, nor891011=0
86 subq $9*8+\addskip, %rsp
87 CFI_ADJUST_CFA_OFFSET 9*8+\addskip
88 movq %rdi, 8*8(%rsp)
89 CFI_REL_OFFSET rdi, 8*8
90 movq %rsi, 7*8(%rsp)
91 CFI_REL_OFFSET rsi, 7*8
92 movq %rdx, 6*8(%rsp)
93 CFI_REL_OFFSET rdx, 6*8
94 .if \norcx
95 .else
96 movq %rcx, 5*8(%rsp)
97 CFI_REL_OFFSET rcx, 5*8
98 .endif
99 movq %rax, 4*8(%rsp)
100 CFI_REL_OFFSET rax, 4*8
101 .if \nor891011
102 .else
103 movq %r8, 3*8(%rsp)
104 CFI_REL_OFFSET r8, 3*8
105 movq %r9, 2*8(%rsp)
106 CFI_REL_OFFSET r9, 2*8
107 movq %r10, 1*8(%rsp)
108 CFI_REL_OFFSET r10, 1*8
109 movq %r11, (%rsp)
110 CFI_REL_OFFSET r11, 0*8
111 .endif
112 .endm
114 #define ARG_SKIP 9*8
116 .macro RESTORE_ARGS skiprax=0, addskip=0, skiprcx=0, skipr11=0, \
117 skipr8910=0, skiprdx=0
118 .if \skipr11
119 .else
120 movq (%rsp), %r11
121 CFI_RESTORE r11
122 .endif
123 .if \skipr8910
124 .else
125 movq 1*8(%rsp), %r10
126 CFI_RESTORE r10
127 movq 2*8(%rsp), %r9
128 CFI_RESTORE r9
129 movq 3*8(%rsp), %r8
130 CFI_RESTORE r8
131 .endif
132 .if \skiprax
133 .else
134 movq 4*8(%rsp), %rax
135 CFI_RESTORE rax
136 .endif
137 .if \skiprcx
138 .else
139 movq 5*8(%rsp), %rcx
140 CFI_RESTORE rcx
141 .endif
142 .if \skiprdx
143 .else
144 movq 6*8(%rsp), %rdx
145 CFI_RESTORE rdx
146 .endif
147 movq 7*8(%rsp), %rsi
148 CFI_RESTORE rsi
149 movq 8*8(%rsp), %rdi
150 CFI_RESTORE rdi
151 .if ARG_SKIP+\addskip > 0
152 addq $ARG_SKIP+\addskip, %rsp
153 CFI_ADJUST_CFA_OFFSET -(ARG_SKIP+\addskip)
154 .endif
155 .endm
157 .macro LOAD_ARGS offset, skiprax=0
158 movq \offset(%rsp), %r11
159 movq \offset+8(%rsp), %r10
160 movq \offset+16(%rsp), %r9
161 movq \offset+24(%rsp), %r8
162 movq \offset+40(%rsp), %rcx
163 movq \offset+48(%rsp), %rdx
164 movq \offset+56(%rsp), %rsi
165 movq \offset+64(%rsp), %rdi
166 .if \skiprax
167 .else
168 movq \offset+72(%rsp), %rax
169 .endif
170 .endm
172 #define REST_SKIP 6*8
174 .macro SAVE_REST
175 subq $REST_SKIP, %rsp
176 CFI_ADJUST_CFA_OFFSET REST_SKIP
177 movq %rbx, 5*8(%rsp)
178 CFI_REL_OFFSET rbx, 5*8
179 movq %rbp, 4*8(%rsp)
180 CFI_REL_OFFSET rbp, 4*8
181 movq %r12, 3*8(%rsp)
182 CFI_REL_OFFSET r12, 3*8
183 movq %r13, 2*8(%rsp)
184 CFI_REL_OFFSET r13, 2*8
185 movq %r14, 1*8(%rsp)
186 CFI_REL_OFFSET r14, 1*8
187 movq %r15, (%rsp)
188 CFI_REL_OFFSET r15, 0*8
189 .endm
191 .macro RESTORE_REST
192 movq (%rsp), %r15
193 CFI_RESTORE r15
194 movq 1*8(%rsp), %r14
195 CFI_RESTORE r14
196 movq 2*8(%rsp), %r13
197 CFI_RESTORE r13
198 movq 3*8(%rsp), %r12
199 CFI_RESTORE r12
200 movq 4*8(%rsp), %rbp
201 CFI_RESTORE rbp
202 movq 5*8(%rsp), %rbx
203 CFI_RESTORE rbx
204 addq $REST_SKIP, %rsp
205 CFI_ADJUST_CFA_OFFSET -(REST_SKIP)
206 .endm
208 .macro SAVE_ALL
209 SAVE_ARGS
210 SAVE_REST
211 .endm
213 .macro RESTORE_ALL addskip=0
214 RESTORE_REST
215 RESTORE_ARGS 0, \addskip
216 .endm
218 .macro icebp
219 .byte 0xf1
220 .endm