gro: Allow tunnel stacking in the case of FOU/GUE
[linux/fpc-iii.git] / arch / mips / include / asm / hazards.h
blob4087b47ad1cbea16050e968a4daf9f0531b4aa6d
1 /*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
6 * Copyright (C) 2003, 04, 07 Ralf Baechle <ralf@linux-mips.org>
7 * Copyright (C) MIPS Technologies, Inc.
8 * written by Ralf Baechle <ralf@linux-mips.org>
9 */
10 #ifndef _ASM_HAZARDS_H
11 #define _ASM_HAZARDS_H
13 #include <linux/stringify.h>
14 #include <asm/compiler.h>
16 #define ___ssnop \
17 sll $0, $0, 1
19 #define ___ehb \
20 sll $0, $0, 3
23 * TLB hazards
25 #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6) && !defined(CONFIG_CPU_CAVIUM_OCTEON)
28 * MIPSR2 defines ehb for hazard avoidance
31 #define __mtc0_tlbw_hazard \
32 ___ehb
34 #define __tlbw_use_hazard \
35 ___ehb
37 #define __tlb_probe_hazard \
38 ___ehb
40 #define __irq_enable_hazard \
41 ___ehb
43 #define __irq_disable_hazard \
44 ___ehb
46 #define __back_to_back_c0_hazard \
47 ___ehb
50 * gcc has a tradition of misscompiling the previous construct using the
51 * address of a label as argument to inline assembler. Gas otoh has the
52 * annoying difference between la and dla which are only usable for 32-bit
53 * rsp. 64-bit code, so can't be used without conditional compilation.
54 * The alterantive is switching the assembler to 64-bit code which happens
55 * to work right even for 32-bit code ...
57 #define instruction_hazard() \
58 do { \
59 unsigned long tmp; \
61 __asm__ __volatile__( \
62 " .set "MIPS_ISA_LEVEL" \n" \
63 " dla %0, 1f \n" \
64 " jr.hb %0 \n" \
65 " .set mips0 \n" \
66 "1: \n" \
67 : "=r" (tmp)); \
68 } while (0)
70 #elif (defined(CONFIG_CPU_MIPSR1) && !defined(CONFIG_MIPS_ALCHEMY)) || \
71 defined(CONFIG_CPU_BMIPS)
74 * These are slightly complicated by the fact that we guarantee R1 kernels to
75 * run fine on R2 processors.
78 #define __mtc0_tlbw_hazard \
79 ___ssnop; \
80 ___ssnop; \
81 ___ehb
83 #define __tlbw_use_hazard \
84 ___ssnop; \
85 ___ssnop; \
86 ___ssnop; \
87 ___ehb
89 #define __tlb_probe_hazard \
90 ___ssnop; \
91 ___ssnop; \
92 ___ssnop; \
93 ___ehb
95 #define __irq_enable_hazard \
96 ___ssnop; \
97 ___ssnop; \
98 ___ssnop; \
99 ___ehb
101 #define __irq_disable_hazard \
102 ___ssnop; \
103 ___ssnop; \
104 ___ssnop; \
105 ___ehb
107 #define __back_to_back_c0_hazard \
108 ___ssnop; \
109 ___ssnop; \
110 ___ssnop; \
111 ___ehb
114 * gcc has a tradition of misscompiling the previous construct using the
115 * address of a label as argument to inline assembler. Gas otoh has the
116 * annoying difference between la and dla which are only usable for 32-bit
117 * rsp. 64-bit code, so can't be used without conditional compilation.
118 * The alterantive is switching the assembler to 64-bit code which happens
119 * to work right even for 32-bit code ...
121 #define __instruction_hazard() \
122 do { \
123 unsigned long tmp; \
125 __asm__ __volatile__( \
126 " .set mips64r2 \n" \
127 " dla %0, 1f \n" \
128 " jr.hb %0 \n" \
129 " .set mips0 \n" \
130 "1: \n" \
131 : "=r" (tmp)); \
132 } while (0)
134 #define instruction_hazard() \
135 do { \
136 if (cpu_has_mips_r2_r6) \
137 __instruction_hazard(); \
138 } while (0)
140 #elif defined(CONFIG_MIPS_ALCHEMY) || defined(CONFIG_CPU_CAVIUM_OCTEON) || \
141 defined(CONFIG_CPU_LOONGSON2) || defined(CONFIG_CPU_R10000) || \
142 defined(CONFIG_CPU_R5500) || defined(CONFIG_CPU_XLR)
145 * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer.
148 #define __mtc0_tlbw_hazard
150 #define __tlbw_use_hazard
152 #define __tlb_probe_hazard
154 #define __irq_enable_hazard
156 #define __irq_disable_hazard
158 #define __back_to_back_c0_hazard
160 #define instruction_hazard() do { } while (0)
162 #elif defined(CONFIG_CPU_SB1)
165 * Mostly like R4000 for historic reasons
167 #define __mtc0_tlbw_hazard
169 #define __tlbw_use_hazard
171 #define __tlb_probe_hazard
173 #define __irq_enable_hazard
175 #define __irq_disable_hazard \
176 ___ssnop; \
177 ___ssnop; \
178 ___ssnop
180 #define __back_to_back_c0_hazard
182 #define instruction_hazard() do { } while (0)
184 #else
187 * Finally the catchall case for all other processors including R4000, R4400,
188 * R4600, R4700, R5000, RM7000, NEC VR41xx etc.
190 * The taken branch will result in a two cycle penalty for the two killed
191 * instructions on R4000 / R4400. Other processors only have a single cycle
192 * hazard so this is nice trick to have an optimal code for a range of
193 * processors.
195 #define __mtc0_tlbw_hazard \
196 nop; \
199 #define __tlbw_use_hazard \
200 nop; \
201 nop; \
204 #define __tlb_probe_hazard \
205 nop; \
206 nop; \
209 #define __irq_enable_hazard \
210 ___ssnop; \
211 ___ssnop; \
212 ___ssnop
214 #define __irq_disable_hazard \
215 nop; \
216 nop; \
219 #define __back_to_back_c0_hazard \
220 ___ssnop; \
221 ___ssnop; \
222 ___ssnop
224 #define instruction_hazard() do { } while (0)
226 #endif
229 /* FPU hazards */
231 #if defined(CONFIG_CPU_SB1)
233 #define __enable_fpu_hazard \
234 .set push; \
235 .set mips64; \
236 .set noreorder; \
237 ___ssnop; \
238 bnezl $0, .+4; \
239 ___ssnop; \
240 .set pop
242 #define __disable_fpu_hazard
244 #elif defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
246 #define __enable_fpu_hazard \
247 ___ehb
249 #define __disable_fpu_hazard \
250 ___ehb
252 #else
254 #define __enable_fpu_hazard \
255 nop; \
256 nop; \
257 nop; \
260 #define __disable_fpu_hazard \
261 ___ehb
263 #endif
265 #ifdef __ASSEMBLY__
267 #define _ssnop ___ssnop
268 #define _ehb ___ehb
269 #define mtc0_tlbw_hazard __mtc0_tlbw_hazard
270 #define tlbw_use_hazard __tlbw_use_hazard
271 #define tlb_probe_hazard __tlb_probe_hazard
272 #define irq_enable_hazard __irq_enable_hazard
273 #define irq_disable_hazard __irq_disable_hazard
274 #define back_to_back_c0_hazard __back_to_back_c0_hazard
275 #define enable_fpu_hazard __enable_fpu_hazard
276 #define disable_fpu_hazard __disable_fpu_hazard
278 #else
280 #define _ssnop() \
281 do { \
282 __asm__ __volatile__( \
283 __stringify(___ssnop) \
284 ); \
285 } while (0)
287 #define _ehb() \
288 do { \
289 __asm__ __volatile__( \
290 __stringify(___ehb) \
291 ); \
292 } while (0)
295 #define mtc0_tlbw_hazard() \
296 do { \
297 __asm__ __volatile__( \
298 __stringify(__mtc0_tlbw_hazard) \
299 ); \
300 } while (0)
303 #define tlbw_use_hazard() \
304 do { \
305 __asm__ __volatile__( \
306 __stringify(__tlbw_use_hazard) \
307 ); \
308 } while (0)
311 #define tlb_probe_hazard() \
312 do { \
313 __asm__ __volatile__( \
314 __stringify(__tlb_probe_hazard) \
315 ); \
316 } while (0)
319 #define irq_enable_hazard() \
320 do { \
321 __asm__ __volatile__( \
322 __stringify(__irq_enable_hazard) \
323 ); \
324 } while (0)
327 #define irq_disable_hazard() \
328 do { \
329 __asm__ __volatile__( \
330 __stringify(__irq_disable_hazard) \
331 ); \
332 } while (0)
335 #define back_to_back_c0_hazard() \
336 do { \
337 __asm__ __volatile__( \
338 __stringify(__back_to_back_c0_hazard) \
339 ); \
340 } while (0)
343 #define enable_fpu_hazard() \
344 do { \
345 __asm__ __volatile__( \
346 __stringify(__enable_fpu_hazard) \
347 ); \
348 } while (0)
351 #define disable_fpu_hazard() \
352 do { \
353 __asm__ __volatile__( \
354 __stringify(__disable_fpu_hazard) \
355 ); \
356 } while (0)
359 * MIPS R2 instruction hazard barrier. Needs to be called as a subroutine.
361 extern void mips_ihb(void);
363 #endif /* __ASSEMBLY__ */
365 #endif /* _ASM_HAZARDS_H */