mm: fix exec activate_mm vs TLB shootdown and lazy tlb switching race
[linux/fpc-iii.git] / lib / bitmap.c
blobfbe38a83acb32f50d95f27d09a6d395f99a9c29a
1 /*
2 * lib/bitmap.c
3 * Helper functions for bitmap.h.
5 * This source code is licensed under the GNU General Public License,
6 * Version 2. See the file COPYING for more details.
7 */
8 #include <linux/export.h>
9 #include <linux/thread_info.h>
10 #include <linux/ctype.h>
11 #include <linux/errno.h>
12 #include <linux/bitmap.h>
13 #include <linux/bitops.h>
14 #include <linux/bug.h>
15 #include <linux/kernel.h>
16 #include <linux/slab.h>
17 #include <linux/string.h>
18 #include <linux/uaccess.h>
20 #include <asm/page.h>
23 * bitmaps provide an array of bits, implemented using an an
24 * array of unsigned longs. The number of valid bits in a
25 * given bitmap does _not_ need to be an exact multiple of
26 * BITS_PER_LONG.
28 * The possible unused bits in the last, partially used word
29 * of a bitmap are 'don't care'. The implementation makes
30 * no particular effort to keep them zero. It ensures that
31 * their value will not affect the results of any operation.
32 * The bitmap operations that return Boolean (bitmap_empty,
33 * for example) or scalar (bitmap_weight, for example) results
34 * carefully filter out these unused bits from impacting their
35 * results.
37 * These operations actually hold to a slightly stronger rule:
38 * if you don't input any bitmaps to these ops that have some
39 * unused bits set, then they won't output any set unused bits
40 * in output bitmaps.
42 * The byte ordering of bitmaps is more natural on little
43 * endian architectures. See the big-endian headers
44 * include/asm-ppc64/bitops.h and include/asm-s390/bitops.h
45 * for the best explanations of this ordering.
48 int __bitmap_equal(const unsigned long *bitmap1,
49 const unsigned long *bitmap2, unsigned int bits)
51 unsigned int k, lim = bits/BITS_PER_LONG;
52 for (k = 0; k < lim; ++k)
53 if (bitmap1[k] != bitmap2[k])
54 return 0;
56 if (bits % BITS_PER_LONG)
57 if ((bitmap1[k] ^ bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits))
58 return 0;
60 return 1;
62 EXPORT_SYMBOL(__bitmap_equal);
64 void __bitmap_complement(unsigned long *dst, const unsigned long *src, unsigned int bits)
66 unsigned int k, lim = bits/BITS_PER_LONG;
67 for (k = 0; k < lim; ++k)
68 dst[k] = ~src[k];
70 if (bits % BITS_PER_LONG)
71 dst[k] = ~src[k];
73 EXPORT_SYMBOL(__bitmap_complement);
75 /**
76 * __bitmap_shift_right - logical right shift of the bits in a bitmap
77 * @dst : destination bitmap
78 * @src : source bitmap
79 * @shift : shift by this many bits
80 * @nbits : bitmap size, in bits
82 * Shifting right (dividing) means moving bits in the MS -> LS bit
83 * direction. Zeros are fed into the vacated MS positions and the
84 * LS bits shifted off the bottom are lost.
86 void __bitmap_shift_right(unsigned long *dst, const unsigned long *src,
87 unsigned shift, unsigned nbits)
89 unsigned k, lim = BITS_TO_LONGS(nbits);
90 unsigned off = shift/BITS_PER_LONG, rem = shift % BITS_PER_LONG;
91 unsigned long mask = BITMAP_LAST_WORD_MASK(nbits);
92 for (k = 0; off + k < lim; ++k) {
93 unsigned long upper, lower;
96 * If shift is not word aligned, take lower rem bits of
97 * word above and make them the top rem bits of result.
99 if (!rem || off + k + 1 >= lim)
100 upper = 0;
101 else {
102 upper = src[off + k + 1];
103 if (off + k + 1 == lim - 1)
104 upper &= mask;
105 upper <<= (BITS_PER_LONG - rem);
107 lower = src[off + k];
108 if (off + k == lim - 1)
109 lower &= mask;
110 lower >>= rem;
111 dst[k] = lower | upper;
113 if (off)
114 memset(&dst[lim - off], 0, off*sizeof(unsigned long));
116 EXPORT_SYMBOL(__bitmap_shift_right);
120 * __bitmap_shift_left - logical left shift of the bits in a bitmap
121 * @dst : destination bitmap
122 * @src : source bitmap
123 * @shift : shift by this many bits
124 * @nbits : bitmap size, in bits
126 * Shifting left (multiplying) means moving bits in the LS -> MS
127 * direction. Zeros are fed into the vacated LS bit positions
128 * and those MS bits shifted off the top are lost.
131 void __bitmap_shift_left(unsigned long *dst, const unsigned long *src,
132 unsigned int shift, unsigned int nbits)
134 int k;
135 unsigned int lim = BITS_TO_LONGS(nbits);
136 unsigned int off = shift/BITS_PER_LONG, rem = shift % BITS_PER_LONG;
137 for (k = lim - off - 1; k >= 0; --k) {
138 unsigned long upper, lower;
141 * If shift is not word aligned, take upper rem bits of
142 * word below and make them the bottom rem bits of result.
144 if (rem && k > 0)
145 lower = src[k - 1] >> (BITS_PER_LONG - rem);
146 else
147 lower = 0;
148 upper = src[k] << rem;
149 dst[k + off] = lower | upper;
151 if (off)
152 memset(dst, 0, off*sizeof(unsigned long));
154 EXPORT_SYMBOL(__bitmap_shift_left);
156 int __bitmap_and(unsigned long *dst, const unsigned long *bitmap1,
157 const unsigned long *bitmap2, unsigned int bits)
159 unsigned int k;
160 unsigned int lim = bits/BITS_PER_LONG;
161 unsigned long result = 0;
163 for (k = 0; k < lim; k++)
164 result |= (dst[k] = bitmap1[k] & bitmap2[k]);
165 if (bits % BITS_PER_LONG)
166 result |= (dst[k] = bitmap1[k] & bitmap2[k] &
167 BITMAP_LAST_WORD_MASK(bits));
168 return result != 0;
170 EXPORT_SYMBOL(__bitmap_and);
172 void __bitmap_or(unsigned long *dst, const unsigned long *bitmap1,
173 const unsigned long *bitmap2, unsigned int bits)
175 unsigned int k;
176 unsigned int nr = BITS_TO_LONGS(bits);
178 for (k = 0; k < nr; k++)
179 dst[k] = bitmap1[k] | bitmap2[k];
181 EXPORT_SYMBOL(__bitmap_or);
183 void __bitmap_xor(unsigned long *dst, const unsigned long *bitmap1,
184 const unsigned long *bitmap2, unsigned int bits)
186 unsigned int k;
187 unsigned int nr = BITS_TO_LONGS(bits);
189 for (k = 0; k < nr; k++)
190 dst[k] = bitmap1[k] ^ bitmap2[k];
192 EXPORT_SYMBOL(__bitmap_xor);
194 int __bitmap_andnot(unsigned long *dst, const unsigned long *bitmap1,
195 const unsigned long *bitmap2, unsigned int bits)
197 unsigned int k;
198 unsigned int lim = bits/BITS_PER_LONG;
199 unsigned long result = 0;
201 for (k = 0; k < lim; k++)
202 result |= (dst[k] = bitmap1[k] & ~bitmap2[k]);
203 if (bits % BITS_PER_LONG)
204 result |= (dst[k] = bitmap1[k] & ~bitmap2[k] &
205 BITMAP_LAST_WORD_MASK(bits));
206 return result != 0;
208 EXPORT_SYMBOL(__bitmap_andnot);
210 int __bitmap_intersects(const unsigned long *bitmap1,
211 const unsigned long *bitmap2, unsigned int bits)
213 unsigned int k, lim = bits/BITS_PER_LONG;
214 for (k = 0; k < lim; ++k)
215 if (bitmap1[k] & bitmap2[k])
216 return 1;
218 if (bits % BITS_PER_LONG)
219 if ((bitmap1[k] & bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits))
220 return 1;
221 return 0;
223 EXPORT_SYMBOL(__bitmap_intersects);
225 int __bitmap_subset(const unsigned long *bitmap1,
226 const unsigned long *bitmap2, unsigned int bits)
228 unsigned int k, lim = bits/BITS_PER_LONG;
229 for (k = 0; k < lim; ++k)
230 if (bitmap1[k] & ~bitmap2[k])
231 return 0;
233 if (bits % BITS_PER_LONG)
234 if ((bitmap1[k] & ~bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits))
235 return 0;
236 return 1;
238 EXPORT_SYMBOL(__bitmap_subset);
240 int __bitmap_weight(const unsigned long *bitmap, unsigned int bits)
242 unsigned int k, lim = bits/BITS_PER_LONG;
243 int w = 0;
245 for (k = 0; k < lim; k++)
246 w += hweight_long(bitmap[k]);
248 if (bits % BITS_PER_LONG)
249 w += hweight_long(bitmap[k] & BITMAP_LAST_WORD_MASK(bits));
251 return w;
253 EXPORT_SYMBOL(__bitmap_weight);
255 void __bitmap_set(unsigned long *map, unsigned int start, int len)
257 unsigned long *p = map + BIT_WORD(start);
258 const unsigned int size = start + len;
259 int bits_to_set = BITS_PER_LONG - (start % BITS_PER_LONG);
260 unsigned long mask_to_set = BITMAP_FIRST_WORD_MASK(start);
262 while (len - bits_to_set >= 0) {
263 *p |= mask_to_set;
264 len -= bits_to_set;
265 bits_to_set = BITS_PER_LONG;
266 mask_to_set = ~0UL;
267 p++;
269 if (len) {
270 mask_to_set &= BITMAP_LAST_WORD_MASK(size);
271 *p |= mask_to_set;
274 EXPORT_SYMBOL(__bitmap_set);
276 void __bitmap_clear(unsigned long *map, unsigned int start, int len)
278 unsigned long *p = map + BIT_WORD(start);
279 const unsigned int size = start + len;
280 int bits_to_clear = BITS_PER_LONG - (start % BITS_PER_LONG);
281 unsigned long mask_to_clear = BITMAP_FIRST_WORD_MASK(start);
283 while (len - bits_to_clear >= 0) {
284 *p &= ~mask_to_clear;
285 len -= bits_to_clear;
286 bits_to_clear = BITS_PER_LONG;
287 mask_to_clear = ~0UL;
288 p++;
290 if (len) {
291 mask_to_clear &= BITMAP_LAST_WORD_MASK(size);
292 *p &= ~mask_to_clear;
295 EXPORT_SYMBOL(__bitmap_clear);
298 * bitmap_find_next_zero_area_off - find a contiguous aligned zero area
299 * @map: The address to base the search on
300 * @size: The bitmap size in bits
301 * @start: The bitnumber to start searching at
302 * @nr: The number of zeroed bits we're looking for
303 * @align_mask: Alignment mask for zero area
304 * @align_offset: Alignment offset for zero area.
306 * The @align_mask should be one less than a power of 2; the effect is that
307 * the bit offset of all zero areas this function finds plus @align_offset
308 * is multiple of that power of 2.
310 unsigned long bitmap_find_next_zero_area_off(unsigned long *map,
311 unsigned long size,
312 unsigned long start,
313 unsigned int nr,
314 unsigned long align_mask,
315 unsigned long align_offset)
317 unsigned long index, end, i;
318 again:
319 index = find_next_zero_bit(map, size, start);
321 /* Align allocation */
322 index = __ALIGN_MASK(index + align_offset, align_mask) - align_offset;
324 end = index + nr;
325 if (end > size)
326 return end;
327 i = find_next_bit(map, end, index);
328 if (i < end) {
329 start = i + 1;
330 goto again;
332 return index;
334 EXPORT_SYMBOL(bitmap_find_next_zero_area_off);
337 * Bitmap printing & parsing functions: first version by Nadia Yvette Chambers,
338 * second version by Paul Jackson, third by Joe Korty.
341 #define CHUNKSZ 32
342 #define nbits_to_hold_value(val) fls(val)
343 #define BASEDEC 10 /* fancier cpuset lists input in decimal */
346 * __bitmap_parse - convert an ASCII hex string into a bitmap.
347 * @buf: pointer to buffer containing string.
348 * @buflen: buffer size in bytes. If string is smaller than this
349 * then it must be terminated with a \0.
350 * @is_user: location of buffer, 0 indicates kernel space
351 * @maskp: pointer to bitmap array that will contain result.
352 * @nmaskbits: size of bitmap, in bits.
354 * Commas group hex digits into chunks. Each chunk defines exactly 32
355 * bits of the resultant bitmask. No chunk may specify a value larger
356 * than 32 bits (%-EOVERFLOW), and if a chunk specifies a smaller value
357 * then leading 0-bits are prepended. %-EINVAL is returned for illegal
358 * characters and for grouping errors such as "1,,5", ",44", "," and "".
359 * Leading and trailing whitespace accepted, but not embedded whitespace.
361 int __bitmap_parse(const char *buf, unsigned int buflen,
362 int is_user, unsigned long *maskp,
363 int nmaskbits)
365 int c, old_c, totaldigits, ndigits, nchunks, nbits;
366 u32 chunk;
367 const char __user __force *ubuf = (const char __user __force *)buf;
369 bitmap_zero(maskp, nmaskbits);
371 nchunks = nbits = totaldigits = c = 0;
372 do {
373 chunk = 0;
374 ndigits = totaldigits;
376 /* Get the next chunk of the bitmap */
377 while (buflen) {
378 old_c = c;
379 if (is_user) {
380 if (__get_user(c, ubuf++))
381 return -EFAULT;
383 else
384 c = *buf++;
385 buflen--;
386 if (isspace(c))
387 continue;
390 * If the last character was a space and the current
391 * character isn't '\0', we've got embedded whitespace.
392 * This is a no-no, so throw an error.
394 if (totaldigits && c && isspace(old_c))
395 return -EINVAL;
397 /* A '\0' or a ',' signal the end of the chunk */
398 if (c == '\0' || c == ',')
399 break;
401 if (!isxdigit(c))
402 return -EINVAL;
405 * Make sure there are at least 4 free bits in 'chunk'.
406 * If not, this hexdigit will overflow 'chunk', so
407 * throw an error.
409 if (chunk & ~((1UL << (CHUNKSZ - 4)) - 1))
410 return -EOVERFLOW;
412 chunk = (chunk << 4) | hex_to_bin(c);
413 totaldigits++;
415 if (ndigits == totaldigits)
416 return -EINVAL;
417 if (nchunks == 0 && chunk == 0)
418 continue;
420 __bitmap_shift_left(maskp, maskp, CHUNKSZ, nmaskbits);
421 *maskp |= chunk;
422 nchunks++;
423 nbits += (nchunks == 1) ? nbits_to_hold_value(chunk) : CHUNKSZ;
424 if (nbits > nmaskbits)
425 return -EOVERFLOW;
426 } while (buflen && c == ',');
428 return 0;
430 EXPORT_SYMBOL(__bitmap_parse);
433 * bitmap_parse_user - convert an ASCII hex string in a user buffer into a bitmap
435 * @ubuf: pointer to user buffer containing string.
436 * @ulen: buffer size in bytes. If string is smaller than this
437 * then it must be terminated with a \0.
438 * @maskp: pointer to bitmap array that will contain result.
439 * @nmaskbits: size of bitmap, in bits.
441 * Wrapper for __bitmap_parse(), providing it with user buffer.
443 * We cannot have this as an inline function in bitmap.h because it needs
444 * linux/uaccess.h to get the access_ok() declaration and this causes
445 * cyclic dependencies.
447 int bitmap_parse_user(const char __user *ubuf,
448 unsigned int ulen, unsigned long *maskp,
449 int nmaskbits)
451 if (!access_ok(VERIFY_READ, ubuf, ulen))
452 return -EFAULT;
453 return __bitmap_parse((const char __force *)ubuf,
454 ulen, 1, maskp, nmaskbits);
457 EXPORT_SYMBOL(bitmap_parse_user);
460 * bitmap_print_to_pagebuf - convert bitmap to list or hex format ASCII string
461 * @list: indicates whether the bitmap must be list
462 * @buf: page aligned buffer into which string is placed
463 * @maskp: pointer to bitmap to convert
464 * @nmaskbits: size of bitmap, in bits
466 * Output format is a comma-separated list of decimal numbers and
467 * ranges if list is specified or hex digits grouped into comma-separated
468 * sets of 8 digits/set. Returns the number of characters written to buf.
470 * It is assumed that @buf is a pointer into a PAGE_SIZE area and that
471 * sufficient storage remains at @buf to accommodate the
472 * bitmap_print_to_pagebuf() output.
474 int bitmap_print_to_pagebuf(bool list, char *buf, const unsigned long *maskp,
475 int nmaskbits)
477 ptrdiff_t len = PTR_ALIGN(buf + PAGE_SIZE - 1, PAGE_SIZE) - buf;
478 int n = 0;
480 if (len > 1)
481 n = list ? scnprintf(buf, len, "%*pbl\n", nmaskbits, maskp) :
482 scnprintf(buf, len, "%*pb\n", nmaskbits, maskp);
483 return n;
485 EXPORT_SYMBOL(bitmap_print_to_pagebuf);
488 * __bitmap_parselist - convert list format ASCII string to bitmap
489 * @buf: read nul-terminated user string from this buffer
490 * @buflen: buffer size in bytes. If string is smaller than this
491 * then it must be terminated with a \0.
492 * @is_user: location of buffer, 0 indicates kernel space
493 * @maskp: write resulting mask here
494 * @nmaskbits: number of bits in mask to be written
496 * Input format is a comma-separated list of decimal numbers and
497 * ranges. Consecutively set bits are shown as two hyphen-separated
498 * decimal numbers, the smallest and largest bit numbers set in
499 * the range.
500 * Optionally each range can be postfixed to denote that only parts of it
501 * should be set. The range will divided to groups of specific size.
502 * From each group will be used only defined amount of bits.
503 * Syntax: range:used_size/group_size
504 * Example: 0-1023:2/256 ==> 0,1,256,257,512,513,768,769
506 * Returns: 0 on success, -errno on invalid input strings. Error values:
508 * - ``-EINVAL``: second number in range smaller than first
509 * - ``-EINVAL``: invalid character in string
510 * - ``-ERANGE``: bit number specified too large for mask
512 static int __bitmap_parselist(const char *buf, unsigned int buflen,
513 int is_user, unsigned long *maskp,
514 int nmaskbits)
516 unsigned int a, b, old_a, old_b;
517 unsigned int group_size, used_size, off;
518 int c, old_c, totaldigits, ndigits;
519 const char __user __force *ubuf = (const char __user __force *)buf;
520 int at_start, in_range, in_partial_range;
522 totaldigits = c = 0;
523 old_a = old_b = 0;
524 group_size = used_size = 0;
525 bitmap_zero(maskp, nmaskbits);
526 do {
527 at_start = 1;
528 in_range = 0;
529 in_partial_range = 0;
530 a = b = 0;
531 ndigits = totaldigits;
533 /* Get the next cpu# or a range of cpu#'s */
534 while (buflen) {
535 old_c = c;
536 if (is_user) {
537 if (__get_user(c, ubuf++))
538 return -EFAULT;
539 } else
540 c = *buf++;
541 buflen--;
542 if (isspace(c))
543 continue;
545 /* A '\0' or a ',' signal the end of a cpu# or range */
546 if (c == '\0' || c == ',')
547 break;
549 * whitespaces between digits are not allowed,
550 * but it's ok if whitespaces are on head or tail.
551 * when old_c is whilespace,
552 * if totaldigits == ndigits, whitespace is on head.
553 * if whitespace is on tail, it should not run here.
554 * as c was ',' or '\0',
555 * the last code line has broken the current loop.
557 if ((totaldigits != ndigits) && isspace(old_c))
558 return -EINVAL;
560 if (c == '/') {
561 used_size = a;
562 at_start = 1;
563 in_range = 0;
564 a = b = 0;
565 continue;
568 if (c == ':') {
569 old_a = a;
570 old_b = b;
571 at_start = 1;
572 in_range = 0;
573 in_partial_range = 1;
574 a = b = 0;
575 continue;
578 if (c == '-') {
579 if (at_start || in_range)
580 return -EINVAL;
581 b = 0;
582 in_range = 1;
583 at_start = 1;
584 continue;
587 if (!isdigit(c))
588 return -EINVAL;
590 b = b * 10 + (c - '0');
591 if (!in_range)
592 a = b;
593 at_start = 0;
594 totaldigits++;
596 if (ndigits == totaldigits)
597 continue;
598 if (in_partial_range) {
599 group_size = a;
600 a = old_a;
601 b = old_b;
602 old_a = old_b = 0;
603 } else {
604 used_size = group_size = b - a + 1;
606 /* if no digit is after '-', it's wrong*/
607 if (at_start && in_range)
608 return -EINVAL;
609 if (!(a <= b) || group_size == 0 || !(used_size <= group_size))
610 return -EINVAL;
611 if (b >= nmaskbits)
612 return -ERANGE;
613 while (a <= b) {
614 off = min(b - a + 1, used_size);
615 bitmap_set(maskp, a, off);
616 a += group_size;
618 } while (buflen && c == ',');
619 return 0;
622 int bitmap_parselist(const char *bp, unsigned long *maskp, int nmaskbits)
624 char *nl = strchrnul(bp, '\n');
625 int len = nl - bp;
627 return __bitmap_parselist(bp, len, 0, maskp, nmaskbits);
629 EXPORT_SYMBOL(bitmap_parselist);
633 * bitmap_parselist_user()
635 * @ubuf: pointer to user buffer containing string.
636 * @ulen: buffer size in bytes. If string is smaller than this
637 * then it must be terminated with a \0.
638 * @maskp: pointer to bitmap array that will contain result.
639 * @nmaskbits: size of bitmap, in bits.
641 * Wrapper for bitmap_parselist(), providing it with user buffer.
643 * We cannot have this as an inline function in bitmap.h because it needs
644 * linux/uaccess.h to get the access_ok() declaration and this causes
645 * cyclic dependencies.
647 int bitmap_parselist_user(const char __user *ubuf,
648 unsigned int ulen, unsigned long *maskp,
649 int nmaskbits)
651 if (!access_ok(VERIFY_READ, ubuf, ulen))
652 return -EFAULT;
653 return __bitmap_parselist((const char __force *)ubuf,
654 ulen, 1, maskp, nmaskbits);
656 EXPORT_SYMBOL(bitmap_parselist_user);
660 * bitmap_pos_to_ord - find ordinal of set bit at given position in bitmap
661 * @buf: pointer to a bitmap
662 * @pos: a bit position in @buf (0 <= @pos < @nbits)
663 * @nbits: number of valid bit positions in @buf
665 * Map the bit at position @pos in @buf (of length @nbits) to the
666 * ordinal of which set bit it is. If it is not set or if @pos
667 * is not a valid bit position, map to -1.
669 * If for example, just bits 4 through 7 are set in @buf, then @pos
670 * values 4 through 7 will get mapped to 0 through 3, respectively,
671 * and other @pos values will get mapped to -1. When @pos value 7
672 * gets mapped to (returns) @ord value 3 in this example, that means
673 * that bit 7 is the 3rd (starting with 0th) set bit in @buf.
675 * The bit positions 0 through @bits are valid positions in @buf.
677 static int bitmap_pos_to_ord(const unsigned long *buf, unsigned int pos, unsigned int nbits)
679 if (pos >= nbits || !test_bit(pos, buf))
680 return -1;
682 return __bitmap_weight(buf, pos);
686 * bitmap_ord_to_pos - find position of n-th set bit in bitmap
687 * @buf: pointer to bitmap
688 * @ord: ordinal bit position (n-th set bit, n >= 0)
689 * @nbits: number of valid bit positions in @buf
691 * Map the ordinal offset of bit @ord in @buf to its position in @buf.
692 * Value of @ord should be in range 0 <= @ord < weight(buf). If @ord
693 * >= weight(buf), returns @nbits.
695 * If for example, just bits 4 through 7 are set in @buf, then @ord
696 * values 0 through 3 will get mapped to 4 through 7, respectively,
697 * and all other @ord values returns @nbits. When @ord value 3
698 * gets mapped to (returns) @pos value 7 in this example, that means
699 * that the 3rd set bit (starting with 0th) is at position 7 in @buf.
701 * The bit positions 0 through @nbits-1 are valid positions in @buf.
703 unsigned int bitmap_ord_to_pos(const unsigned long *buf, unsigned int ord, unsigned int nbits)
705 unsigned int pos;
707 for (pos = find_first_bit(buf, nbits);
708 pos < nbits && ord;
709 pos = find_next_bit(buf, nbits, pos + 1))
710 ord--;
712 return pos;
716 * bitmap_remap - Apply map defined by a pair of bitmaps to another bitmap
717 * @dst: remapped result
718 * @src: subset to be remapped
719 * @old: defines domain of map
720 * @new: defines range of map
721 * @nbits: number of bits in each of these bitmaps
723 * Let @old and @new define a mapping of bit positions, such that
724 * whatever position is held by the n-th set bit in @old is mapped
725 * to the n-th set bit in @new. In the more general case, allowing
726 * for the possibility that the weight 'w' of @new is less than the
727 * weight of @old, map the position of the n-th set bit in @old to
728 * the position of the m-th set bit in @new, where m == n % w.
730 * If either of the @old and @new bitmaps are empty, or if @src and
731 * @dst point to the same location, then this routine copies @src
732 * to @dst.
734 * The positions of unset bits in @old are mapped to themselves
735 * (the identify map).
737 * Apply the above specified mapping to @src, placing the result in
738 * @dst, clearing any bits previously set in @dst.
740 * For example, lets say that @old has bits 4 through 7 set, and
741 * @new has bits 12 through 15 set. This defines the mapping of bit
742 * position 4 to 12, 5 to 13, 6 to 14 and 7 to 15, and of all other
743 * bit positions unchanged. So if say @src comes into this routine
744 * with bits 1, 5 and 7 set, then @dst should leave with bits 1,
745 * 13 and 15 set.
747 void bitmap_remap(unsigned long *dst, const unsigned long *src,
748 const unsigned long *old, const unsigned long *new,
749 unsigned int nbits)
751 unsigned int oldbit, w;
753 if (dst == src) /* following doesn't handle inplace remaps */
754 return;
755 bitmap_zero(dst, nbits);
757 w = bitmap_weight(new, nbits);
758 for_each_set_bit(oldbit, src, nbits) {
759 int n = bitmap_pos_to_ord(old, oldbit, nbits);
761 if (n < 0 || w == 0)
762 set_bit(oldbit, dst); /* identity map */
763 else
764 set_bit(bitmap_ord_to_pos(new, n % w, nbits), dst);
767 EXPORT_SYMBOL(bitmap_remap);
770 * bitmap_bitremap - Apply map defined by a pair of bitmaps to a single bit
771 * @oldbit: bit position to be mapped
772 * @old: defines domain of map
773 * @new: defines range of map
774 * @bits: number of bits in each of these bitmaps
776 * Let @old and @new define a mapping of bit positions, such that
777 * whatever position is held by the n-th set bit in @old is mapped
778 * to the n-th set bit in @new. In the more general case, allowing
779 * for the possibility that the weight 'w' of @new is less than the
780 * weight of @old, map the position of the n-th set bit in @old to
781 * the position of the m-th set bit in @new, where m == n % w.
783 * The positions of unset bits in @old are mapped to themselves
784 * (the identify map).
786 * Apply the above specified mapping to bit position @oldbit, returning
787 * the new bit position.
789 * For example, lets say that @old has bits 4 through 7 set, and
790 * @new has bits 12 through 15 set. This defines the mapping of bit
791 * position 4 to 12, 5 to 13, 6 to 14 and 7 to 15, and of all other
792 * bit positions unchanged. So if say @oldbit is 5, then this routine
793 * returns 13.
795 int bitmap_bitremap(int oldbit, const unsigned long *old,
796 const unsigned long *new, int bits)
798 int w = bitmap_weight(new, bits);
799 int n = bitmap_pos_to_ord(old, oldbit, bits);
800 if (n < 0 || w == 0)
801 return oldbit;
802 else
803 return bitmap_ord_to_pos(new, n % w, bits);
805 EXPORT_SYMBOL(bitmap_bitremap);
808 * bitmap_onto - translate one bitmap relative to another
809 * @dst: resulting translated bitmap
810 * @orig: original untranslated bitmap
811 * @relmap: bitmap relative to which translated
812 * @bits: number of bits in each of these bitmaps
814 * Set the n-th bit of @dst iff there exists some m such that the
815 * n-th bit of @relmap is set, the m-th bit of @orig is set, and
816 * the n-th bit of @relmap is also the m-th _set_ bit of @relmap.
817 * (If you understood the previous sentence the first time your
818 * read it, you're overqualified for your current job.)
820 * In other words, @orig is mapped onto (surjectively) @dst,
821 * using the map { <n, m> | the n-th bit of @relmap is the
822 * m-th set bit of @relmap }.
824 * Any set bits in @orig above bit number W, where W is the
825 * weight of (number of set bits in) @relmap are mapped nowhere.
826 * In particular, if for all bits m set in @orig, m >= W, then
827 * @dst will end up empty. In situations where the possibility
828 * of such an empty result is not desired, one way to avoid it is
829 * to use the bitmap_fold() operator, below, to first fold the
830 * @orig bitmap over itself so that all its set bits x are in the
831 * range 0 <= x < W. The bitmap_fold() operator does this by
832 * setting the bit (m % W) in @dst, for each bit (m) set in @orig.
834 * Example [1] for bitmap_onto():
835 * Let's say @relmap has bits 30-39 set, and @orig has bits
836 * 1, 3, 5, 7, 9 and 11 set. Then on return from this routine,
837 * @dst will have bits 31, 33, 35, 37 and 39 set.
839 * When bit 0 is set in @orig, it means turn on the bit in
840 * @dst corresponding to whatever is the first bit (if any)
841 * that is turned on in @relmap. Since bit 0 was off in the
842 * above example, we leave off that bit (bit 30) in @dst.
844 * When bit 1 is set in @orig (as in the above example), it
845 * means turn on the bit in @dst corresponding to whatever
846 * is the second bit that is turned on in @relmap. The second
847 * bit in @relmap that was turned on in the above example was
848 * bit 31, so we turned on bit 31 in @dst.
850 * Similarly, we turned on bits 33, 35, 37 and 39 in @dst,
851 * because they were the 4th, 6th, 8th and 10th set bits
852 * set in @relmap, and the 4th, 6th, 8th and 10th bits of
853 * @orig (i.e. bits 3, 5, 7 and 9) were also set.
855 * When bit 11 is set in @orig, it means turn on the bit in
856 * @dst corresponding to whatever is the twelfth bit that is
857 * turned on in @relmap. In the above example, there were
858 * only ten bits turned on in @relmap (30..39), so that bit
859 * 11 was set in @orig had no affect on @dst.
861 * Example [2] for bitmap_fold() + bitmap_onto():
862 * Let's say @relmap has these ten bits set::
864 * 40 41 42 43 45 48 53 61 74 95
866 * (for the curious, that's 40 plus the first ten terms of the
867 * Fibonacci sequence.)
869 * Further lets say we use the following code, invoking
870 * bitmap_fold() then bitmap_onto, as suggested above to
871 * avoid the possibility of an empty @dst result::
873 * unsigned long *tmp; // a temporary bitmap's bits
875 * bitmap_fold(tmp, orig, bitmap_weight(relmap, bits), bits);
876 * bitmap_onto(dst, tmp, relmap, bits);
878 * Then this table shows what various values of @dst would be, for
879 * various @orig's. I list the zero-based positions of each set bit.
880 * The tmp column shows the intermediate result, as computed by
881 * using bitmap_fold() to fold the @orig bitmap modulo ten
882 * (the weight of @relmap):
884 * =============== ============== =================
885 * @orig tmp @dst
886 * 0 0 40
887 * 1 1 41
888 * 9 9 95
889 * 10 0 40 [#f1]_
890 * 1 3 5 7 1 3 5 7 41 43 48 61
891 * 0 1 2 3 4 0 1 2 3 4 40 41 42 43 45
892 * 0 9 18 27 0 9 8 7 40 61 74 95
893 * 0 10 20 30 0 40
894 * 0 11 22 33 0 1 2 3 40 41 42 43
895 * 0 12 24 36 0 2 4 6 40 42 45 53
896 * 78 102 211 1 2 8 41 42 74 [#f1]_
897 * =============== ============== =================
899 * .. [#f1]
901 * For these marked lines, if we hadn't first done bitmap_fold()
902 * into tmp, then the @dst result would have been empty.
904 * If either of @orig or @relmap is empty (no set bits), then @dst
905 * will be returned empty.
907 * If (as explained above) the only set bits in @orig are in positions
908 * m where m >= W, (where W is the weight of @relmap) then @dst will
909 * once again be returned empty.
911 * All bits in @dst not set by the above rule are cleared.
913 void bitmap_onto(unsigned long *dst, const unsigned long *orig,
914 const unsigned long *relmap, unsigned int bits)
916 unsigned int n, m; /* same meaning as in above comment */
918 if (dst == orig) /* following doesn't handle inplace mappings */
919 return;
920 bitmap_zero(dst, bits);
923 * The following code is a more efficient, but less
924 * obvious, equivalent to the loop:
925 * for (m = 0; m < bitmap_weight(relmap, bits); m++) {
926 * n = bitmap_ord_to_pos(orig, m, bits);
927 * if (test_bit(m, orig))
928 * set_bit(n, dst);
932 m = 0;
933 for_each_set_bit(n, relmap, bits) {
934 /* m == bitmap_pos_to_ord(relmap, n, bits) */
935 if (test_bit(m, orig))
936 set_bit(n, dst);
937 m++;
940 EXPORT_SYMBOL(bitmap_onto);
943 * bitmap_fold - fold larger bitmap into smaller, modulo specified size
944 * @dst: resulting smaller bitmap
945 * @orig: original larger bitmap
946 * @sz: specified size
947 * @nbits: number of bits in each of these bitmaps
949 * For each bit oldbit in @orig, set bit oldbit mod @sz in @dst.
950 * Clear all other bits in @dst. See further the comment and
951 * Example [2] for bitmap_onto() for why and how to use this.
953 void bitmap_fold(unsigned long *dst, const unsigned long *orig,
954 unsigned int sz, unsigned int nbits)
956 unsigned int oldbit;
958 if (dst == orig) /* following doesn't handle inplace mappings */
959 return;
960 bitmap_zero(dst, nbits);
962 for_each_set_bit(oldbit, orig, nbits)
963 set_bit(oldbit % sz, dst);
965 EXPORT_SYMBOL(bitmap_fold);
968 * Common code for bitmap_*_region() routines.
969 * bitmap: array of unsigned longs corresponding to the bitmap
970 * pos: the beginning of the region
971 * order: region size (log base 2 of number of bits)
972 * reg_op: operation(s) to perform on that region of bitmap
974 * Can set, verify and/or release a region of bits in a bitmap,
975 * depending on which combination of REG_OP_* flag bits is set.
977 * A region of a bitmap is a sequence of bits in the bitmap, of
978 * some size '1 << order' (a power of two), aligned to that same
979 * '1 << order' power of two.
981 * Returns 1 if REG_OP_ISFREE succeeds (region is all zero bits).
982 * Returns 0 in all other cases and reg_ops.
985 enum {
986 REG_OP_ISFREE, /* true if region is all zero bits */
987 REG_OP_ALLOC, /* set all bits in region */
988 REG_OP_RELEASE, /* clear all bits in region */
991 static int __reg_op(unsigned long *bitmap, unsigned int pos, int order, int reg_op)
993 int nbits_reg; /* number of bits in region */
994 int index; /* index first long of region in bitmap */
995 int offset; /* bit offset region in bitmap[index] */
996 int nlongs_reg; /* num longs spanned by region in bitmap */
997 int nbitsinlong; /* num bits of region in each spanned long */
998 unsigned long mask; /* bitmask for one long of region */
999 int i; /* scans bitmap by longs */
1000 int ret = 0; /* return value */
1003 * Either nlongs_reg == 1 (for small orders that fit in one long)
1004 * or (offset == 0 && mask == ~0UL) (for larger multiword orders.)
1006 nbits_reg = 1 << order;
1007 index = pos / BITS_PER_LONG;
1008 offset = pos - (index * BITS_PER_LONG);
1009 nlongs_reg = BITS_TO_LONGS(nbits_reg);
1010 nbitsinlong = min(nbits_reg, BITS_PER_LONG);
1013 * Can't do "mask = (1UL << nbitsinlong) - 1", as that
1014 * overflows if nbitsinlong == BITS_PER_LONG.
1016 mask = (1UL << (nbitsinlong - 1));
1017 mask += mask - 1;
1018 mask <<= offset;
1020 switch (reg_op) {
1021 case REG_OP_ISFREE:
1022 for (i = 0; i < nlongs_reg; i++) {
1023 if (bitmap[index + i] & mask)
1024 goto done;
1026 ret = 1; /* all bits in region free (zero) */
1027 break;
1029 case REG_OP_ALLOC:
1030 for (i = 0; i < nlongs_reg; i++)
1031 bitmap[index + i] |= mask;
1032 break;
1034 case REG_OP_RELEASE:
1035 for (i = 0; i < nlongs_reg; i++)
1036 bitmap[index + i] &= ~mask;
1037 break;
1039 done:
1040 return ret;
1044 * bitmap_find_free_region - find a contiguous aligned mem region
1045 * @bitmap: array of unsigned longs corresponding to the bitmap
1046 * @bits: number of bits in the bitmap
1047 * @order: region size (log base 2 of number of bits) to find
1049 * Find a region of free (zero) bits in a @bitmap of @bits bits and
1050 * allocate them (set them to one). Only consider regions of length
1051 * a power (@order) of two, aligned to that power of two, which
1052 * makes the search algorithm much faster.
1054 * Return the bit offset in bitmap of the allocated region,
1055 * or -errno on failure.
1057 int bitmap_find_free_region(unsigned long *bitmap, unsigned int bits, int order)
1059 unsigned int pos, end; /* scans bitmap by regions of size order */
1061 for (pos = 0 ; (end = pos + (1U << order)) <= bits; pos = end) {
1062 if (!__reg_op(bitmap, pos, order, REG_OP_ISFREE))
1063 continue;
1064 __reg_op(bitmap, pos, order, REG_OP_ALLOC);
1065 return pos;
1067 return -ENOMEM;
1069 EXPORT_SYMBOL(bitmap_find_free_region);
1072 * bitmap_release_region - release allocated bitmap region
1073 * @bitmap: array of unsigned longs corresponding to the bitmap
1074 * @pos: beginning of bit region to release
1075 * @order: region size (log base 2 of number of bits) to release
1077 * This is the complement to __bitmap_find_free_region() and releases
1078 * the found region (by clearing it in the bitmap).
1080 * No return value.
1082 void bitmap_release_region(unsigned long *bitmap, unsigned int pos, int order)
1084 __reg_op(bitmap, pos, order, REG_OP_RELEASE);
1086 EXPORT_SYMBOL(bitmap_release_region);
1089 * bitmap_allocate_region - allocate bitmap region
1090 * @bitmap: array of unsigned longs corresponding to the bitmap
1091 * @pos: beginning of bit region to allocate
1092 * @order: region size (log base 2 of number of bits) to allocate
1094 * Allocate (set bits in) a specified region of a bitmap.
1096 * Return 0 on success, or %-EBUSY if specified region wasn't
1097 * free (not all bits were zero).
1099 int bitmap_allocate_region(unsigned long *bitmap, unsigned int pos, int order)
1101 if (!__reg_op(bitmap, pos, order, REG_OP_ISFREE))
1102 return -EBUSY;
1103 return __reg_op(bitmap, pos, order, REG_OP_ALLOC);
1105 EXPORT_SYMBOL(bitmap_allocate_region);
1108 * bitmap_from_u32array - copy the contents of a u32 array of bits to bitmap
1109 * @bitmap: array of unsigned longs, the destination bitmap, non NULL
1110 * @nbits: number of bits in @bitmap
1111 * @buf: array of u32 (in host byte order), the source bitmap, non NULL
1112 * @nwords: number of u32 words in @buf
1114 * copy min(nbits, 32*nwords) bits from @buf to @bitmap, remaining
1115 * bits between nword and nbits in @bitmap (if any) are cleared. In
1116 * last word of @bitmap, the bits beyond nbits (if any) are kept
1117 * unchanged.
1119 * Return the number of bits effectively copied.
1121 unsigned int
1122 bitmap_from_u32array(unsigned long *bitmap, unsigned int nbits,
1123 const u32 *buf, unsigned int nwords)
1125 unsigned int dst_idx, src_idx;
1127 for (src_idx = dst_idx = 0; dst_idx < BITS_TO_LONGS(nbits); ++dst_idx) {
1128 unsigned long part = 0;
1130 if (src_idx < nwords)
1131 part = buf[src_idx++];
1133 #if BITS_PER_LONG == 64
1134 if (src_idx < nwords)
1135 part |= ((unsigned long) buf[src_idx++]) << 32;
1136 #endif
1138 if (dst_idx < nbits/BITS_PER_LONG)
1139 bitmap[dst_idx] = part;
1140 else {
1141 unsigned long mask = BITMAP_LAST_WORD_MASK(nbits);
1143 bitmap[dst_idx] = (bitmap[dst_idx] & ~mask)
1144 | (part & mask);
1148 return min_t(unsigned int, nbits, 32*nwords);
1150 EXPORT_SYMBOL(bitmap_from_u32array);
1153 * bitmap_to_u32array - copy the contents of bitmap to a u32 array of bits
1154 * @buf: array of u32 (in host byte order), the dest bitmap, non NULL
1155 * @nwords: number of u32 words in @buf
1156 * @bitmap: array of unsigned longs, the source bitmap, non NULL
1157 * @nbits: number of bits in @bitmap
1159 * copy min(nbits, 32*nwords) bits from @bitmap to @buf. Remaining
1160 * bits after nbits in @buf (if any) are cleared.
1162 * Return the number of bits effectively copied.
1164 unsigned int
1165 bitmap_to_u32array(u32 *buf, unsigned int nwords,
1166 const unsigned long *bitmap, unsigned int nbits)
1168 unsigned int dst_idx = 0, src_idx = 0;
1170 while (dst_idx < nwords) {
1171 unsigned long part = 0;
1173 if (src_idx < BITS_TO_LONGS(nbits)) {
1174 part = bitmap[src_idx];
1175 if (src_idx >= nbits/BITS_PER_LONG)
1176 part &= BITMAP_LAST_WORD_MASK(nbits);
1177 src_idx++;
1180 buf[dst_idx++] = part & 0xffffffffUL;
1182 #if BITS_PER_LONG == 64
1183 if (dst_idx < nwords) {
1184 part >>= 32;
1185 buf[dst_idx++] = part & 0xffffffffUL;
1187 #endif
1190 return min_t(unsigned int, nbits, 32*nwords);
1192 EXPORT_SYMBOL(bitmap_to_u32array);
1195 * bitmap_copy_le - copy a bitmap, putting the bits into little-endian order.
1196 * @dst: destination buffer
1197 * @src: bitmap to copy
1198 * @nbits: number of bits in the bitmap
1200 * Require nbits % BITS_PER_LONG == 0.
1202 #ifdef __BIG_ENDIAN
1203 void bitmap_copy_le(unsigned long *dst, const unsigned long *src, unsigned int nbits)
1205 unsigned int i;
1207 for (i = 0; i < nbits/BITS_PER_LONG; i++) {
1208 if (BITS_PER_LONG == 64)
1209 dst[i] = cpu_to_le64(src[i]);
1210 else
1211 dst[i] = cpu_to_le32(src[i]);
1214 EXPORT_SYMBOL(bitmap_copy_le);
1215 #endif
1217 unsigned long *bitmap_alloc(unsigned int nbits, gfp_t flags)
1219 return kmalloc_array(BITS_TO_LONGS(nbits), sizeof(unsigned long),
1220 flags);
1222 EXPORT_SYMBOL(bitmap_alloc);
1224 unsigned long *bitmap_zalloc(unsigned int nbits, gfp_t flags)
1226 return bitmap_alloc(nbits, flags | __GFP_ZERO);
1228 EXPORT_SYMBOL(bitmap_zalloc);
1230 void bitmap_free(const unsigned long *bitmap)
1232 kfree(bitmap);
1234 EXPORT_SYMBOL(bitmap_free);