Merge tag 'for_linus' of git://git.kernel.org/pub/scm/linux/kernel/git/mst/vhost
[cris-mirror.git] / arch / mn10300 / include / asm / bitops.h
blobfe6f8e2c3617de08fc0804ff2098fbe6386504fc
1 /* MN10300 bit operations
3 * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
4 * Written by David Howells (dhowells@redhat.com)
6 * This program is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU General Public Licence
8 * as published by the Free Software Foundation; either version
9 * 2 of the Licence, or (at your option) any later version.
11 * These have to be done with inline assembly: that way the bit-setting
12 * is guaranteed to be atomic. All bit operations return 0 if the bit
13 * was cleared before the operation and != 0 if it was not.
15 * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1).
17 #ifndef __ASM_BITOPS_H
18 #define __ASM_BITOPS_H
20 #include <asm/cpu-regs.h>
21 #include <asm/barrier.h>
24 * set bit
26 #define __set_bit(nr, addr) \
27 ({ \
28 volatile unsigned char *_a = (unsigned char *)(addr); \
29 const unsigned shift = (nr) & 7; \
30 _a += (nr) >> 3; \
32 asm volatile("bset %2,(%1) # set_bit reg" \
33 : "=m"(*_a) \
34 : "a"(_a), "d"(1 << shift), "m"(*_a) \
35 : "memory", "cc"); \
38 #define set_bit(nr, addr) __set_bit((nr), (addr))
41 * clear bit
43 #define ___clear_bit(nr, addr) \
44 ({ \
45 volatile unsigned char *_a = (unsigned char *)(addr); \
46 const unsigned shift = (nr) & 7; \
47 _a += (nr) >> 3; \
49 asm volatile("bclr %2,(%1) # clear_bit reg" \
50 : "=m"(*_a) \
51 : "a"(_a), "d"(1 << shift), "m"(*_a) \
52 : "memory", "cc"); \
55 #define clear_bit(nr, addr) ___clear_bit((nr), (addr))
58 static inline void __clear_bit(unsigned long nr, volatile void *addr)
60 unsigned int *a = (unsigned int *) addr;
61 int mask;
63 a += nr >> 5;
64 mask = 1 << (nr & 0x1f);
65 *a &= ~mask;
69 * test bit
71 static inline int test_bit(unsigned long nr, const volatile void *addr)
73 return 1UL & (((const volatile unsigned int *) addr)[nr >> 5] >> (nr & 31));
77 * change bit
79 static inline void __change_bit(unsigned long nr, volatile void *addr)
81 int mask;
82 unsigned int *a = (unsigned int *) addr;
84 a += nr >> 5;
85 mask = 1 << (nr & 0x1f);
86 *a ^= mask;
89 extern void change_bit(unsigned long nr, volatile void *addr);
92 * test and set bit
94 #define __test_and_set_bit(nr,addr) \
95 ({ \
96 volatile unsigned char *_a = (unsigned char *)(addr); \
97 const unsigned shift = (nr) & 7; \
98 unsigned epsw; \
99 _a += (nr) >> 3; \
101 asm volatile("bset %3,(%2) # test_set_bit reg\n" \
102 "mov epsw,%1" \
103 : "=m"(*_a), "=d"(epsw) \
104 : "a"(_a), "d"(1 << shift), "m"(*_a) \
105 : "memory", "cc"); \
107 !(epsw & EPSW_FLAG_Z); \
110 #define test_and_set_bit(nr, addr) __test_and_set_bit((nr), (addr))
113 * test and clear bit
115 #define __test_and_clear_bit(nr, addr) \
116 ({ \
117 volatile unsigned char *_a = (unsigned char *)(addr); \
118 const unsigned shift = (nr) & 7; \
119 unsigned epsw; \
120 _a += (nr) >> 3; \
122 asm volatile("bclr %3,(%2) # test_clear_bit reg\n" \
123 "mov epsw,%1" \
124 : "=m"(*_a), "=d"(epsw) \
125 : "a"(_a), "d"(1 << shift), "m"(*_a) \
126 : "memory", "cc"); \
128 !(epsw & EPSW_FLAG_Z); \
131 #define test_and_clear_bit(nr, addr) __test_and_clear_bit((nr), (addr))
134 * test and change bit
136 static inline int __test_and_change_bit(unsigned long nr, volatile void *addr)
138 int mask, retval;
139 unsigned int *a = (unsigned int *)addr;
141 a += nr >> 5;
142 mask = 1 << (nr & 0x1f);
143 retval = (mask & *a) != 0;
144 *a ^= mask;
146 return retval;
149 extern int test_and_change_bit(unsigned long nr, volatile void *addr);
151 #include <asm-generic/bitops/lock.h>
153 #ifdef __KERNEL__
156 * __ffs - find first bit set
157 * @x: the word to search
159 * - return 31..0 to indicate bit 31..0 most least significant bit set
160 * - if no bits are set in x, the result is undefined
162 static inline __attribute__((const))
163 unsigned long __ffs(unsigned long x)
165 int bit;
166 asm("bsch %2,%0" : "=r"(bit) : "0"(0), "r"(x & -x) : "cc");
167 return bit;
171 * special slimline version of fls() for calculating ilog2_u32()
172 * - note: no protection against n == 0
174 static inline __attribute__((const))
175 int __ilog2_u32(u32 n)
177 int bit;
178 asm("bsch %2,%0" : "=r"(bit) : "0"(0), "r"(n) : "cc");
179 return bit;
183 * fls - find last bit set
184 * @x: the word to search
186 * This is defined the same way as ffs:
187 * - return 32..1 to indicate bit 31..0 most significant bit set
188 * - return 0 to indicate no bits set
190 static inline __attribute__((const))
191 int fls(int x)
193 return (x != 0) ? __ilog2_u32(x) + 1 : 0;
197 * __fls - find last (most-significant) set bit in a long word
198 * @word: the word to search
200 * Undefined if no set bit exists, so code should check against 0 first.
202 static inline unsigned long __fls(unsigned long word)
204 return __ilog2_u32(word);
208 * ffs - find first bit set
209 * @x: the word to search
211 * - return 32..1 to indicate bit 31..0 most least significant bit set
212 * - return 0 to indicate no bits set
214 static inline __attribute__((const))
215 int ffs(int x)
217 /* Note: (x & -x) gives us a mask that is the least significant
218 * (rightmost) 1-bit of the value in x.
220 return fls(x & -x);
223 #include <asm-generic/bitops/ffz.h>
224 #include <asm-generic/bitops/fls64.h>
225 #include <asm-generic/bitops/find.h>
226 #include <asm-generic/bitops/sched.h>
227 #include <asm-generic/bitops/hweight.h>
228 #include <asm-generic/bitops/ext2-atomic-setbit.h>
229 #include <asm-generic/bitops/le.h>
231 #endif /* __KERNEL__ */
232 #endif /* __ASM_BITOPS_H */