fs: use kmem_cache_zalloc instead
[pv_ops_mirror.git] / include / asm-s390 / irqflags.h
blob3f26131120b746c47c81e7d370d76a2422b46153
1 /*
2 * include/asm-s390/irqflags.h
4 * Copyright (C) IBM Corp. 2006
5 * Author(s): Heiko Carstens <heiko.carstens@de.ibm.com>
6 */
8 #ifndef __ASM_IRQFLAGS_H
9 #define __ASM_IRQFLAGS_H
11 #ifdef __KERNEL__
13 #if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
15 /* store then or system mask. */
16 #define __raw_local_irq_stosm(__or) \
17 ({ \
18 unsigned long __mask; \
19 asm volatile( \
20 " stosm %0,%1" \
21 : "=Q" (__mask) : "i" (__or) : "memory"); \
22 __mask; \
25 /* store then and system mask. */
26 #define __raw_local_irq_stnsm(__and) \
27 ({ \
28 unsigned long __mask; \
29 asm volatile( \
30 " stnsm %0,%1" \
31 : "=Q" (__mask) : "i" (__and) : "memory"); \
32 __mask; \
35 /* set system mask. */
36 #define __raw_local_irq_ssm(__mask) \
37 ({ \
38 asm volatile("ssm %0" : : "Q" (__mask) : "memory"); \
41 #else /* __GNUC__ */
43 /* store then or system mask. */
44 #define __raw_local_irq_stosm(__or) \
45 ({ \
46 unsigned long __mask; \
47 asm volatile( \
48 " stosm 0(%1),%2" \
49 : "=m" (__mask) \
50 : "a" (&__mask), "i" (__or) : "memory"); \
51 __mask; \
54 /* store then and system mask. */
55 #define __raw_local_irq_stnsm(__and) \
56 ({ \
57 unsigned long __mask; \
58 asm volatile( \
59 " stnsm 0(%1),%2" \
60 : "=m" (__mask) \
61 : "a" (&__mask), "i" (__and) : "memory"); \
62 __mask; \
65 /* set system mask. */
66 #define __raw_local_irq_ssm(__mask) \
67 ({ \
68 asm volatile( \
69 " ssm 0(%0)" \
70 : : "a" (&__mask), "m" (__mask) : "memory"); \
73 #endif /* __GNUC__ */
75 /* interrupt control.. */
76 static inline unsigned long raw_local_irq_enable(void)
78 return __raw_local_irq_stosm(0x03);
81 static inline unsigned long raw_local_irq_disable(void)
83 return __raw_local_irq_stnsm(0xfc);
86 #define raw_local_save_flags(x) \
87 do { \
88 typecheck(unsigned long, x); \
89 (x) = __raw_local_irq_stosm(0x00); \
90 } while (0)
92 static inline void raw_local_irq_restore(unsigned long flags)
94 __raw_local_irq_ssm(flags);
97 static inline int raw_irqs_disabled_flags(unsigned long flags)
99 return !(flags & (3UL << (BITS_PER_LONG - 8)));
102 /* For spinlocks etc */
103 #define raw_local_irq_save(x) ((x) = raw_local_irq_disable())
105 #endif /* __KERNEL__ */
106 #endif /* __ASM_IRQFLAGS_H */