sched: remove PREEMPT_RESTRICT
[pv_ops_mirror.git] / include / asm-sh64 / bitops.h
blob600c59efb4c29a6df194d7bcd574ed4537dc9b07
1 #ifndef __ASM_SH64_BITOPS_H
2 #define __ASM_SH64_BITOPS_H
4 /*
5 * This file is subject to the terms and conditions of the GNU General Public
6 * License. See the file "COPYING" in the main directory of this archive
7 * for more details.
9 * include/asm-sh64/bitops.h
11 * Copyright (C) 2000, 2001 Paolo Alberelli
12 * Copyright (C) 2003 Paul Mundt
15 #ifdef __KERNEL__
17 #ifndef _LINUX_BITOPS_H
18 #error only <linux/bitops.h> can be included directly
19 #endif
21 #include <linux/compiler.h>
22 #include <asm/system.h>
23 /* For __swab32 */
24 #include <asm/byteorder.h>
26 static __inline__ void set_bit(int nr, volatile void * addr)
28 int mask;
29 volatile unsigned int *a = addr;
30 unsigned long flags;
32 a += nr >> 5;
33 mask = 1 << (nr & 0x1f);
34 local_irq_save(flags);
35 *a |= mask;
36 local_irq_restore(flags);
40 * clear_bit() doesn't provide any barrier for the compiler.
42 #define smp_mb__before_clear_bit() barrier()
43 #define smp_mb__after_clear_bit() barrier()
44 static inline void clear_bit(int nr, volatile unsigned long *a)
46 int mask;
47 unsigned long flags;
49 a += nr >> 5;
50 mask = 1 << (nr & 0x1f);
51 local_irq_save(flags);
52 *a &= ~mask;
53 local_irq_restore(flags);
56 static __inline__ void change_bit(int nr, volatile void * addr)
58 int mask;
59 volatile unsigned int *a = addr;
60 unsigned long flags;
62 a += nr >> 5;
63 mask = 1 << (nr & 0x1f);
64 local_irq_save(flags);
65 *a ^= mask;
66 local_irq_restore(flags);
69 static __inline__ int test_and_set_bit(int nr, volatile void * addr)
71 int mask, retval;
72 volatile unsigned int *a = addr;
73 unsigned long flags;
75 a += nr >> 5;
76 mask = 1 << (nr & 0x1f);
77 local_irq_save(flags);
78 retval = (mask & *a) != 0;
79 *a |= mask;
80 local_irq_restore(flags);
82 return retval;
85 static __inline__ int test_and_clear_bit(int nr, volatile void * addr)
87 int mask, retval;
88 volatile unsigned int *a = addr;
89 unsigned long flags;
91 a += nr >> 5;
92 mask = 1 << (nr & 0x1f);
93 local_irq_save(flags);
94 retval = (mask & *a) != 0;
95 *a &= ~mask;
96 local_irq_restore(flags);
98 return retval;
101 static __inline__ int test_and_change_bit(int nr, volatile void * addr)
103 int mask, retval;
104 volatile unsigned int *a = addr;
105 unsigned long flags;
107 a += nr >> 5;
108 mask = 1 << (nr & 0x1f);
109 local_irq_save(flags);
110 retval = (mask & *a) != 0;
111 *a ^= mask;
112 local_irq_restore(flags);
114 return retval;
117 #include <asm-generic/bitops/non-atomic.h>
119 static __inline__ unsigned long ffz(unsigned long word)
121 unsigned long result, __d2, __d3;
123 __asm__("gettr tr0, %2\n\t"
124 "pta $+32, tr0\n\t"
125 "andi %1, 1, %3\n\t"
126 "beq %3, r63, tr0\n\t"
127 "pta $+4, tr0\n"
128 "0:\n\t"
129 "shlri.l %1, 1, %1\n\t"
130 "addi %0, 1, %0\n\t"
131 "andi %1, 1, %3\n\t"
132 "beqi %3, 1, tr0\n"
133 "1:\n\t"
134 "ptabs %2, tr0\n\t"
135 : "=r" (result), "=r" (word), "=r" (__d2), "=r" (__d3)
136 : "0" (0L), "1" (word));
138 return result;
141 #include <asm-generic/bitops/__ffs.h>
142 #include <asm-generic/bitops/find.h>
143 #include <asm-generic/bitops/hweight.h>
144 #include <asm-generic/bitops/lock.h>
145 #include <asm-generic/bitops/sched.h>
146 #include <asm-generic/bitops/ffs.h>
147 #include <asm-generic/bitops/ext2-non-atomic.h>
148 #include <asm-generic/bitops/ext2-atomic.h>
149 #include <asm-generic/bitops/minix.h>
150 #include <asm-generic/bitops/fls.h>
151 #include <asm-generic/bitops/fls64.h>
153 #endif /* __KERNEL__ */
155 #endif /* __ASM_SH64_BITOPS_H */