[SCSI] ibmvscsi: convert kmalloc + memset to kcalloc
[wrt350n-kernel.git] / arch / powerpc / lib / bitops.c
blobf68ad71a018749523752b34ad65fed35520d4712
1 #include <linux/types.h>
2 #include <linux/module.h>
3 #include <asm/byteorder.h>
4 #include <asm/bitops.h>
6 /**
7 * find_next_bit - find the next set bit in a memory region
8 * @addr: The address to base the search on
9 * @offset: The bitnumber to start searching at
10 * @size: The maximum size to search
12 unsigned long find_next_bit(const unsigned long *addr, unsigned long size,
13 unsigned long offset)
15 const unsigned long *p = addr + BITOP_WORD(offset);
16 unsigned long result = offset & ~(BITS_PER_LONG-1);
17 unsigned long tmp;
19 if (offset >= size)
20 return size;
21 size -= result;
22 offset %= BITS_PER_LONG;
23 if (offset) {
24 tmp = *(p++);
25 tmp &= (~0UL << offset);
26 if (size < BITS_PER_LONG)
27 goto found_first;
28 if (tmp)
29 goto found_middle;
30 size -= BITS_PER_LONG;
31 result += BITS_PER_LONG;
33 while (size & ~(BITS_PER_LONG-1)) {
34 if ((tmp = *(p++)))
35 goto found_middle;
36 result += BITS_PER_LONG;
37 size -= BITS_PER_LONG;
39 if (!size)
40 return result;
41 tmp = *p;
43 found_first:
44 tmp &= (~0UL >> (BITS_PER_LONG - size));
45 if (tmp == 0UL) /* Are any bits set? */
46 return result + size; /* Nope. */
47 found_middle:
48 return result + __ffs(tmp);
50 EXPORT_SYMBOL(find_next_bit);
53 * This implementation of find_{first,next}_zero_bit was stolen from
54 * Linus' asm-alpha/bitops.h.
56 unsigned long find_next_zero_bit(const unsigned long *addr, unsigned long size,
57 unsigned long offset)
59 const unsigned long *p = addr + BITOP_WORD(offset);
60 unsigned long result = offset & ~(BITS_PER_LONG-1);
61 unsigned long tmp;
63 if (offset >= size)
64 return size;
65 size -= result;
66 offset %= BITS_PER_LONG;
67 if (offset) {
68 tmp = *(p++);
69 tmp |= ~0UL >> (BITS_PER_LONG - offset);
70 if (size < BITS_PER_LONG)
71 goto found_first;
72 if (~tmp)
73 goto found_middle;
74 size -= BITS_PER_LONG;
75 result += BITS_PER_LONG;
77 while (size & ~(BITS_PER_LONG-1)) {
78 if (~(tmp = *(p++)))
79 goto found_middle;
80 result += BITS_PER_LONG;
81 size -= BITS_PER_LONG;
83 if (!size)
84 return result;
85 tmp = *p;
87 found_first:
88 tmp |= ~0UL << size;
89 if (tmp == ~0UL) /* Are any bits zero? */
90 return result + size; /* Nope. */
91 found_middle:
92 return result + ffz(tmp);
94 EXPORT_SYMBOL(find_next_zero_bit);
96 static inline unsigned int ext2_ilog2(unsigned int x)
98 int lz;
100 asm("cntlzw %0,%1": "=r"(lz):"r"(x));
101 return 31 - lz;
104 static inline unsigned int ext2_ffz(unsigned int x)
106 u32 rc;
107 if ((x = ~x) == 0)
108 return 32;
109 rc = ext2_ilog2(x & -x);
110 return rc;
113 unsigned long find_next_zero_le_bit(const unsigned long *addr,
114 unsigned long size, unsigned long offset)
116 const unsigned int *p = ((const unsigned int *)addr) + (offset >> 5);
117 unsigned int result = offset & ~31;
118 unsigned int tmp;
120 if (offset >= size)
121 return size;
122 size -= result;
123 offset &= 31;
124 if (offset) {
125 tmp = cpu_to_le32p(p++);
126 tmp |= ~0U >> (32 - offset); /* bug or feature ? */
127 if (size < 32)
128 goto found_first;
129 if (tmp != ~0)
130 goto found_middle;
131 size -= 32;
132 result += 32;
134 while (size >= 32) {
135 if ((tmp = cpu_to_le32p(p++)) != ~0)
136 goto found_middle;
137 result += 32;
138 size -= 32;
140 if (!size)
141 return result;
142 tmp = cpu_to_le32p(p);
143 found_first:
144 tmp |= ~0 << size;
145 if (tmp == ~0) /* Are any bits zero? */
146 return result + size; /* Nope. */
147 found_middle:
148 return result + ext2_ffz(tmp);
150 EXPORT_SYMBOL(find_next_zero_le_bit);