2 * These are too big to be inlined.
5 #include <linux/kernel.h>
6 #include <linux/module.h>
7 #include <linux/bitops.h>
8 #include <asm/byteorder.h>
10 unsigned long find_next_zero_bit(const unsigned long *addr
, unsigned long size
,
13 const unsigned long *p
= addr
+ (offset
>> 6);
14 unsigned long result
= offset
& ~63UL;
23 tmp
|= ~0UL >> (64 - offset
);
31 while (size
& ~63UL) {
43 if (tmp
== ~0UL) /* Are any bits zero? */
44 return result
+ size
; /* Nope. */
46 return result
+ ffz(tmp
);
49 EXPORT_SYMBOL(find_next_zero_bit
);
51 unsigned long find_next_bit(const unsigned long *addr
, unsigned long size
,
54 const unsigned long *p
= addr
+ (offset
>> 6);
55 unsigned long result
= offset
& ~63UL;
64 tmp
&= (~0UL << offset
);
72 while (size
& ~63UL) {
83 tmp
&= (~0UL >> (64 - size
));
84 if (tmp
== 0UL) /* Are any bits set? */
85 return result
+ size
; /* Nope. */
87 return result
+ __ffs(tmp
);
90 EXPORT_SYMBOL(find_next_bit
);
92 static inline unsigned int ext2_ilog2(unsigned int x
)
96 asm("cntlzw %0,%1": "=r"(lz
):"r"(x
));
100 static inline unsigned int ext2_ffz(unsigned int x
)
105 rc
= ext2_ilog2(x
& -x
);
109 unsigned long find_next_zero_le_bit(const unsigned long *addr
, unsigned long size
,
110 unsigned long offset
)
112 const unsigned int *p
= ((const unsigned int *)addr
) + (offset
>> 5);
113 unsigned int result
= offset
& ~31;
121 tmp
= cpu_to_le32p(p
++);
122 tmp
|= ~0U >> (32 - offset
); /* bug or feature ? */
131 if ((tmp
= cpu_to_le32p(p
++)) != ~0)
138 tmp
= cpu_to_le32p(p
);
141 if (tmp
== ~0) /* Are any bits zero? */
142 return result
+ size
; /* Nope. */
144 return result
+ ext2_ffz(tmp
);
147 EXPORT_SYMBOL(find_next_zero_le_bit
);