fs: use kmem_cache_zalloc instead
[pv_ops_mirror.git] / include / asm-sh64 / bitops.h
blobf3bdcdb5d0463089205783e876db06f73e096409
1 #ifndef __ASM_SH64_BITOPS_H
2 #define __ASM_SH64_BITOPS_H
4 /*
5 * This file is subject to the terms and conditions of the GNU General Public
6 * License. See the file "COPYING" in the main directory of this archive
7 * for more details.
9 * include/asm-sh64/bitops.h
11 * Copyright (C) 2000, 2001 Paolo Alberelli
12 * Copyright (C) 2003 Paul Mundt
15 #ifdef __KERNEL__
16 #include <linux/compiler.h>
17 #include <asm/system.h>
18 /* For __swab32 */
19 #include <asm/byteorder.h>
21 static __inline__ void set_bit(int nr, volatile void * addr)
23 int mask;
24 volatile unsigned int *a = addr;
25 unsigned long flags;
27 a += nr >> 5;
28 mask = 1 << (nr & 0x1f);
29 local_irq_save(flags);
30 *a |= mask;
31 local_irq_restore(flags);
35 * clear_bit() doesn't provide any barrier for the compiler.
37 #define smp_mb__before_clear_bit() barrier()
38 #define smp_mb__after_clear_bit() barrier()
39 static inline void clear_bit(int nr, volatile unsigned long *a)
41 int mask;
42 unsigned long flags;
44 a += nr >> 5;
45 mask = 1 << (nr & 0x1f);
46 local_irq_save(flags);
47 *a &= ~mask;
48 local_irq_restore(flags);
51 static __inline__ void change_bit(int nr, volatile void * addr)
53 int mask;
54 volatile unsigned int *a = addr;
55 unsigned long flags;
57 a += nr >> 5;
58 mask = 1 << (nr & 0x1f);
59 local_irq_save(flags);
60 *a ^= mask;
61 local_irq_restore(flags);
64 static __inline__ int test_and_set_bit(int nr, volatile void * addr)
66 int mask, retval;
67 volatile unsigned int *a = addr;
68 unsigned long flags;
70 a += nr >> 5;
71 mask = 1 << (nr & 0x1f);
72 local_irq_save(flags);
73 retval = (mask & *a) != 0;
74 *a |= mask;
75 local_irq_restore(flags);
77 return retval;
80 static __inline__ int test_and_clear_bit(int nr, volatile void * addr)
82 int mask, retval;
83 volatile unsigned int *a = addr;
84 unsigned long flags;
86 a += nr >> 5;
87 mask = 1 << (nr & 0x1f);
88 local_irq_save(flags);
89 retval = (mask & *a) != 0;
90 *a &= ~mask;
91 local_irq_restore(flags);
93 return retval;
96 static __inline__ int test_and_change_bit(int nr, volatile void * addr)
98 int mask, retval;
99 volatile unsigned int *a = addr;
100 unsigned long flags;
102 a += nr >> 5;
103 mask = 1 << (nr & 0x1f);
104 local_irq_save(flags);
105 retval = (mask & *a) != 0;
106 *a ^= mask;
107 local_irq_restore(flags);
109 return retval;
112 #include <asm-generic/bitops/non-atomic.h>
114 static __inline__ unsigned long ffz(unsigned long word)
116 unsigned long result, __d2, __d3;
118 __asm__("gettr tr0, %2\n\t"
119 "pta $+32, tr0\n\t"
120 "andi %1, 1, %3\n\t"
121 "beq %3, r63, tr0\n\t"
122 "pta $+4, tr0\n"
123 "0:\n\t"
124 "shlri.l %1, 1, %1\n\t"
125 "addi %0, 1, %0\n\t"
126 "andi %1, 1, %3\n\t"
127 "beqi %3, 1, tr0\n"
128 "1:\n\t"
129 "ptabs %2, tr0\n\t"
130 : "=r" (result), "=r" (word), "=r" (__d2), "=r" (__d3)
131 : "0" (0L), "1" (word));
133 return result;
136 #include <asm-generic/bitops/__ffs.h>
137 #include <asm-generic/bitops/find.h>
138 #include <asm-generic/bitops/hweight.h>
139 #include <asm-generic/bitops/sched.h>
140 #include <asm-generic/bitops/ffs.h>
141 #include <asm-generic/bitops/ext2-non-atomic.h>
142 #include <asm-generic/bitops/ext2-atomic.h>
143 #include <asm-generic/bitops/minix.h>
144 #include <asm-generic/bitops/fls.h>
145 #include <asm-generic/bitops/fls64.h>
147 #endif /* __KERNEL__ */
149 #endif /* __ASM_SH64_BITOPS_H */