spi: efm32: Convert to use GPIO descriptors
[linux/fpc-iii.git] / arch / xtensa / include / asm / asmmacro.h
blobbfc89e11f4698ee5f7dee1de89d6d89dd6e16358
1 /*
2 * include/asm-xtensa/asmmacro.h
4 * This file is subject to the terms and conditions of the GNU General Public
5 * License. See the file "COPYING" in the main directory of this archive
6 * for more details.
8 * Copyright (C) 2005 Tensilica Inc.
9 */
11 #ifndef _XTENSA_ASMMACRO_H
12 #define _XTENSA_ASMMACRO_H
14 #include <asm/core.h>
17 * Some little helpers for loops. Use zero-overhead-loops
18 * where applicable and if supported by the processor.
20 * __loopi ar, at, size, inc
21 * ar register initialized with the start address
22 * at scratch register used by macro
23 * size size immediate value
24 * inc increment
26 * __loops ar, as, at, inc_log2[, mask_log2][, cond][, ncond]
27 * ar register initialized with the start address
28 * as register initialized with the size
29 * at scratch register use by macro
30 * inc_log2 increment [in log2]
31 * mask_log2 mask [in log2]
32 * cond true condition (used in loop'cond')
33 * ncond false condition (used in b'ncond')
35 * __loop as
36 * restart loop. 'as' register must not have been modified!
38 * __endla ar, as, incr
39 * ar start address (modified)
40 * as scratch register used by __loops/__loopi macros or
41 * end address used by __loopt macro
42 * inc increment
46 * loop for given size as immediate
49 .macro __loopi ar, at, size, incr
51 #if XCHAL_HAVE_LOOPS
52 movi \at, ((\size + \incr - 1) / (\incr))
53 loop \at, 99f
54 #else
55 addi \at, \ar, \size
56 98:
57 #endif
59 .endm
62 * loop for given size in register
65 .macro __loops ar, as, at, incr_log2, mask_log2, cond, ncond
67 #if XCHAL_HAVE_LOOPS
68 .ifgt \incr_log2 - 1
69 addi \at, \as, (1 << \incr_log2) - 1
70 .ifnc \mask_log2,
71 extui \at, \at, \incr_log2, \mask_log2
72 .else
73 srli \at, \at, \incr_log2
74 .endif
75 .endif
76 loop\cond \at, 99f
77 #else
78 .ifnc \mask_log2,
79 extui \at, \as, \incr_log2, \mask_log2
80 .else
81 .ifnc \ncond,
82 srli \at, \as, \incr_log2
83 .endif
84 .endif
85 .ifnc \ncond,
86 b\ncond \at, 99f
88 .endif
89 .ifnc \mask_log2,
90 slli \at, \at, \incr_log2
91 add \at, \ar, \at
92 .else
93 add \at, \ar, \as
94 .endif
95 #endif
96 98:
98 .endm
101 * loop from ar to as
104 .macro __loopt ar, as, at, incr_log2
106 #if XCHAL_HAVE_LOOPS
107 sub \at, \as, \ar
108 .ifgt \incr_log2 - 1
109 addi \at, \at, (1 << \incr_log2) - 1
110 srli \at, \at, \incr_log2
111 .endif
112 loop \at, 99f
113 #else
115 #endif
117 .endm
120 * restart loop. registers must be unchanged
123 .macro __loop as
125 #if XCHAL_HAVE_LOOPS
126 loop \as, 99f
127 #else
129 #endif
131 .endm
134 * end of loop with no increment of the address.
137 .macro __endl ar, as
138 #if !XCHAL_HAVE_LOOPS
139 bltu \ar, \as, 98b
140 #endif
142 .endm
145 * end of loop with increment of the address.
148 .macro __endla ar, as, incr
149 addi \ar, \ar, \incr
150 __endl \ar \as
151 .endm
153 /* Load or store instructions that may cause exceptions use the EX macro. */
155 #define EX(handler) \
156 .section __ex_table, "a"; \
157 .word 97f, handler; \
158 .previous \
163 * Extract unaligned word that is split between two registers w0 and w1
164 * into r regardless of machine endianness. SAR must be loaded with the
165 * starting bit of the word (see __ssa8).
168 .macro __src_b r, w0, w1
169 #ifdef __XTENSA_EB__
170 src \r, \w0, \w1
171 #else
172 src \r, \w1, \w0
173 #endif
174 .endm
177 * Load 2 lowest address bits of r into SAR for __src_b to extract unaligned
178 * word starting at r from two registers loaded from consecutive aligned
179 * addresses covering r regardless of machine endianness.
181 * r 0 1 2 3
182 * LE SAR 0 8 16 24
183 * BE SAR 32 24 16 8
186 .macro __ssa8 r
187 #ifdef __XTENSA_EB__
188 ssa8b \r
189 #else
190 ssa8l \r
191 #endif
192 .endm
194 #define XTENSA_STACK_ALIGNMENT 16
196 #if defined(__XTENSA_WINDOWED_ABI__)
197 #define XTENSA_FRAME_SIZE_RESERVE 16
198 #define XTENSA_SPILL_STACK_RESERVE 32
200 #define abi_entry(frame_size) \
201 entry sp, (XTENSA_FRAME_SIZE_RESERVE + \
202 (((frame_size) + XTENSA_STACK_ALIGNMENT - 1) & \
203 -XTENSA_STACK_ALIGNMENT))
204 #define abi_entry_default abi_entry(0)
206 #define abi_ret(frame_size) retw
207 #define abi_ret_default retw
209 #elif defined(__XTENSA_CALL0_ABI__)
211 #define XTENSA_SPILL_STACK_RESERVE 0
213 #define abi_entry(frame_size) __abi_entry (frame_size)
215 .macro __abi_entry frame_size
216 .ifgt \frame_size
217 addi sp, sp, -(((\frame_size) + XTENSA_STACK_ALIGNMENT - 1) & \
218 -XTENSA_STACK_ALIGNMENT)
219 .endif
220 .endm
222 #define abi_entry_default
224 #define abi_ret(frame_size) __abi_ret (frame_size)
226 .macro __abi_ret frame_size
227 .ifgt \frame_size
228 addi sp, sp, (((\frame_size) + XTENSA_STACK_ALIGNMENT - 1) & \
229 -XTENSA_STACK_ALIGNMENT)
230 .endif
232 .endm
234 #define abi_ret_default ret
236 #else
237 #error Unsupported Xtensa ABI
238 #endif
240 #define __XTENSA_HANDLER .section ".exception.text", "ax"
242 #endif /* _XTENSA_ASMMACRO_H */