2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
6 * Copyright (C) 1994, 95, 96, 97, 98, 99, 2003 by Ralf Baechle
7 * Copyright (C) 1996 by Paul M. Antoine
8 * Copyright (C) 1999 Silicon Graphics
9 * Copyright (C) 2000 MIPS Technologies, Inc.
11 #ifndef _ASM_IRQFLAGS_H
12 #define _ASM_IRQFLAGS_H
16 #include <linux/compiler.h>
17 #include <asm/hazards.h>
19 #if defined(CONFIG_CPU_MIPSR2) && !defined(CONFIG_MIPS_MT_SMTC)
22 " .macro arch_local_irq_disable\n"
26 " irq_disable_hazard \n"
30 static inline void arch_local_irq_disable(void)
33 "arch_local_irq_disable"
41 " .macro arch_local_irq_save result \n"
46 " andi \\result, 1 \n"
47 " irq_disable_hazard \n"
51 static inline unsigned long arch_local_irq_save(void)
54 asm volatile("arch_local_irq_save\t%0"
63 " .macro arch_local_irq_restore flags \n"
67 #if defined(CONFIG_IRQ_CPU)
69 * Slow, but doesn't suffer from a relatively unlikely race
70 * condition we're having since days 1.
72 " beqz \\flags, 1f \n"
78 * Fast, dangerous. Life is fun, life is good.
81 " ins $1, \\flags, 0, 1 \n"
84 " irq_disable_hazard \n"
88 static inline void arch_local_irq_restore(unsigned long flags
)
93 "arch_local_irq_restore\t%0"
99 static inline void __arch_local_irq_restore(unsigned long flags
)
101 unsigned long __tmp1
;
103 __asm__
__volatile__(
104 "arch_local_irq_restore\t%0"
110 /* Functions that require preempt_{dis,en}able() are in mips-atomic.c */
111 void arch_local_irq_disable(void);
112 unsigned long arch_local_irq_save(void);
113 void arch_local_irq_restore(unsigned long flags
);
114 void __arch_local_irq_restore(unsigned long flags
);
115 #endif /* if defined(CONFIG_CPU_MIPSR2) && !defined(CONFIG_MIPS_MT_SMTC) */
119 " .macro arch_local_irq_enable \n"
123 #ifdef CONFIG_MIPS_MT_SMTC
124 " mfc0 $1, $2, 1 # SMTC - clear TCStatus.IXMT \n"
128 #elif defined(CONFIG_CPU_MIPSR2)
136 " irq_enable_hazard \n"
140 extern void smtc_ipi_replay(void);
142 static inline void arch_local_irq_enable(void)
144 #ifdef CONFIG_MIPS_MT_SMTC
146 * SMTC kernel needs to do a software replay of queued
147 * IPIs, at the cost of call overhead on each local_irq_enable()
151 __asm__
__volatile__(
152 "arch_local_irq_enable"
160 " .macro arch_local_save_flags flags \n"
163 #ifdef CONFIG_MIPS_MT_SMTC
164 " mfc0 \\flags, $2, 1 \n"
166 " mfc0 \\flags, $12 \n"
171 static inline unsigned long arch_local_save_flags(void)
174 asm volatile("arch_local_save_flags %0" : "=r" (flags
));
179 static inline int arch_irqs_disabled_flags(unsigned long flags
)
181 #ifdef CONFIG_MIPS_MT_SMTC
183 * SMTC model uses TCStatus.IXMT to disable interrupts for a thread/CPU
185 return flags
& 0x400;
191 #endif /* #ifndef __ASSEMBLY__ */
194 * Do the CPU's IRQ-state tracing from assembly code.
196 #ifdef CONFIG_TRACE_IRQFLAGS
197 /* Reload some registers clobbered by trace_hardirqs_on */
199 # define TRACE_IRQS_RELOAD_REGS \
200 LONG_L $11, PT_R11(sp); \
201 LONG_L $10, PT_R10(sp); \
202 LONG_L $9, PT_R9(sp); \
203 LONG_L $8, PT_R8(sp); \
204 LONG_L $7, PT_R7(sp); \
205 LONG_L $6, PT_R6(sp); \
206 LONG_L $5, PT_R5(sp); \
207 LONG_L $4, PT_R4(sp); \
210 # define TRACE_IRQS_RELOAD_REGS \
211 LONG_L $7, PT_R7(sp); \
212 LONG_L $6, PT_R6(sp); \
213 LONG_L $5, PT_R5(sp); \
214 LONG_L $4, PT_R4(sp); \
217 # define TRACE_IRQS_ON \
218 CLI; /* make sure trace_hardirqs_on() is called in kernel level */ \
219 jal trace_hardirqs_on
220 # define TRACE_IRQS_ON_RELOAD \
222 TRACE_IRQS_RELOAD_REGS
223 # define TRACE_IRQS_OFF \
224 jal trace_hardirqs_off
226 # define TRACE_IRQS_ON
227 # define TRACE_IRQS_ON_RELOAD
228 # define TRACE_IRQS_OFF
231 #endif /* _ASM_IRQFLAGS_H */