[PATCH] dvb: stv0299: reduce i2c xfer and set register 0x12 from inittab
[linux/fpc-iii.git] / include / asm-x86_64 / calling.h
blobfc2c5a6c262a20356c1fc1dc5e475fa84d97189c
1 /*
2 * Some macros to handle stack frames in assembly.
3 */
5 #include <linux/config.h>
7 #define R15 0
8 #define R14 8
9 #define R13 16
10 #define R12 24
11 #define RBP 32
12 #define RBX 40
13 /* arguments: interrupts/non tracing syscalls only save upto here*/
14 #define R11 48
15 #define R10 56
16 #define R9 64
17 #define R8 72
18 #define RAX 80
19 #define RCX 88
20 #define RDX 96
21 #define RSI 104
22 #define RDI 112
23 #define ORIG_RAX 120 /* + error_code */
24 /* end of arguments */
25 /* cpu exception frame or undefined in case of fast syscall. */
26 #define RIP 128
27 #define CS 136
28 #define EFLAGS 144
29 #define RSP 152
30 #define SS 160
31 #define ARGOFFSET R11
32 #define SWFRAME ORIG_RAX
34 .macro SAVE_ARGS addskip=0,norcx=0,nor891011=0
35 subq $9*8+\addskip,%rsp
36 CFI_ADJUST_CFA_OFFSET 9*8+\addskip
37 movq %rdi,8*8(%rsp)
38 CFI_REL_OFFSET rdi,8*8
39 movq %rsi,7*8(%rsp)
40 CFI_REL_OFFSET rsi,7*8
41 movq %rdx,6*8(%rsp)
42 CFI_REL_OFFSET rdx,6*8
43 .if \norcx
44 .else
45 movq %rcx,5*8(%rsp)
46 CFI_REL_OFFSET rcx,5*8
47 .endif
48 movq %rax,4*8(%rsp)
49 CFI_REL_OFFSET rax,4*8
50 .if \nor891011
51 .else
52 movq %r8,3*8(%rsp)
53 CFI_REL_OFFSET r8,3*8
54 movq %r9,2*8(%rsp)
55 CFI_REL_OFFSET r9,2*8
56 movq %r10,1*8(%rsp)
57 CFI_REL_OFFSET r10,1*8
58 movq %r11,(%rsp)
59 CFI_REL_OFFSET r11,0*8
60 .endif
61 .endm
63 #define ARG_SKIP 9*8
64 .macro RESTORE_ARGS skiprax=0,addskip=0,skiprcx=0,skipr11=0,skipr8910=0,skiprdx=0
65 .if \skipr11
66 .else
67 movq (%rsp),%r11
68 CFI_RESTORE r11
69 .endif
70 .if \skipr8910
71 .else
72 movq 1*8(%rsp),%r10
73 CFI_RESTORE r10
74 movq 2*8(%rsp),%r9
75 CFI_RESTORE r9
76 movq 3*8(%rsp),%r8
77 CFI_RESTORE r8
78 .endif
79 .if \skiprax
80 .else
81 movq 4*8(%rsp),%rax
82 CFI_RESTORE rax
83 .endif
84 .if \skiprcx
85 .else
86 movq 5*8(%rsp),%rcx
87 CFI_RESTORE rcx
88 .endif
89 .if \skiprdx
90 .else
91 movq 6*8(%rsp),%rdx
92 CFI_RESTORE rdx
93 .endif
94 movq 7*8(%rsp),%rsi
95 CFI_RESTORE rsi
96 movq 8*8(%rsp),%rdi
97 CFI_RESTORE rdi
98 .if ARG_SKIP+\addskip > 0
99 addq $ARG_SKIP+\addskip,%rsp
100 CFI_ADJUST_CFA_OFFSET -(ARG_SKIP+\addskip)
101 .endif
102 .endm
104 .macro LOAD_ARGS offset
105 movq \offset(%rsp),%r11
106 movq \offset+8(%rsp),%r10
107 movq \offset+16(%rsp),%r9
108 movq \offset+24(%rsp),%r8
109 movq \offset+40(%rsp),%rcx
110 movq \offset+48(%rsp),%rdx
111 movq \offset+56(%rsp),%rsi
112 movq \offset+64(%rsp),%rdi
113 movq \offset+72(%rsp),%rax
114 .endm
116 #define REST_SKIP 6*8
117 .macro SAVE_REST
118 subq $REST_SKIP,%rsp
119 CFI_ADJUST_CFA_OFFSET REST_SKIP
120 movq %rbx,5*8(%rsp)
121 CFI_REL_OFFSET rbx,5*8
122 movq %rbp,4*8(%rsp)
123 CFI_REL_OFFSET rbp,4*8
124 movq %r12,3*8(%rsp)
125 CFI_REL_OFFSET r12,3*8
126 movq %r13,2*8(%rsp)
127 CFI_REL_OFFSET r13,2*8
128 movq %r14,1*8(%rsp)
129 CFI_REL_OFFSET r14,1*8
130 movq %r15,(%rsp)
131 CFI_REL_OFFSET r15,0*8
132 .endm
134 .macro RESTORE_REST
135 movq (%rsp),%r15
136 CFI_RESTORE r15
137 movq 1*8(%rsp),%r14
138 CFI_RESTORE r14
139 movq 2*8(%rsp),%r13
140 CFI_RESTORE r13
141 movq 3*8(%rsp),%r12
142 CFI_RESTORE r12
143 movq 4*8(%rsp),%rbp
144 CFI_RESTORE rbp
145 movq 5*8(%rsp),%rbx
146 CFI_RESTORE rbx
147 addq $REST_SKIP,%rsp
148 CFI_ADJUST_CFA_OFFSET -(REST_SKIP)
149 .endm
151 .macro SAVE_ALL
152 SAVE_ARGS
153 SAVE_REST
154 .endm
156 .macro RESTORE_ALL addskip=0
157 RESTORE_REST
158 RESTORE_ARGS 0,\addskip
159 .endm
161 .macro icebp
162 .byte 0xf1
163 .endm