1 // SPDX-License-Identifier: GPL-2.0-only
4 * Helper functions for bitmap.h.
7 #include <linux/bitmap.h>
8 #include <linux/bitops.h>
9 #include <linux/ctype.h>
10 #include <linux/device.h>
11 #include <linux/export.h>
12 #include <linux/slab.h>
15 * DOC: bitmap introduction
17 * bitmaps provide an array of bits, implemented using an
18 * array of unsigned longs. The number of valid bits in a
19 * given bitmap does _not_ need to be an exact multiple of
22 * The possible unused bits in the last, partially used word
23 * of a bitmap are 'don't care'. The implementation makes
24 * no particular effort to keep them zero. It ensures that
25 * their value will not affect the results of any operation.
26 * The bitmap operations that return Boolean (bitmap_empty,
27 * for example) or scalar (bitmap_weight, for example) results
28 * carefully filter out these unused bits from impacting their
31 * The byte ordering of bitmaps is more natural on little
32 * endian architectures. See the big-endian headers
33 * include/asm-ppc64/bitops.h and include/asm-s390/bitops.h
34 * for the best explanations of this ordering.
37 bool __bitmap_equal(const unsigned long *bitmap1
,
38 const unsigned long *bitmap2
, unsigned int bits
)
40 unsigned int k
, lim
= bits
/BITS_PER_LONG
;
41 for (k
= 0; k
< lim
; ++k
)
42 if (bitmap1
[k
] != bitmap2
[k
])
45 if (bits
% BITS_PER_LONG
)
46 if ((bitmap1
[k
] ^ bitmap2
[k
]) & BITMAP_LAST_WORD_MASK(bits
))
51 EXPORT_SYMBOL(__bitmap_equal
);
53 bool __bitmap_or_equal(const unsigned long *bitmap1
,
54 const unsigned long *bitmap2
,
55 const unsigned long *bitmap3
,
58 unsigned int k
, lim
= bits
/ BITS_PER_LONG
;
61 for (k
= 0; k
< lim
; ++k
) {
62 if ((bitmap1
[k
] | bitmap2
[k
]) != bitmap3
[k
])
66 if (!(bits
% BITS_PER_LONG
))
69 tmp
= (bitmap1
[k
] | bitmap2
[k
]) ^ bitmap3
[k
];
70 return (tmp
& BITMAP_LAST_WORD_MASK(bits
)) == 0;
73 void __bitmap_complement(unsigned long *dst
, const unsigned long *src
, unsigned int bits
)
75 unsigned int k
, lim
= BITS_TO_LONGS(bits
);
76 for (k
= 0; k
< lim
; ++k
)
79 EXPORT_SYMBOL(__bitmap_complement
);
82 * __bitmap_shift_right - logical right shift of the bits in a bitmap
83 * @dst : destination bitmap
84 * @src : source bitmap
85 * @shift : shift by this many bits
86 * @nbits : bitmap size, in bits
88 * Shifting right (dividing) means moving bits in the MS -> LS bit
89 * direction. Zeros are fed into the vacated MS positions and the
90 * LS bits shifted off the bottom are lost.
92 void __bitmap_shift_right(unsigned long *dst
, const unsigned long *src
,
93 unsigned shift
, unsigned nbits
)
95 unsigned k
, lim
= BITS_TO_LONGS(nbits
);
96 unsigned off
= shift
/BITS_PER_LONG
, rem
= shift
% BITS_PER_LONG
;
97 unsigned long mask
= BITMAP_LAST_WORD_MASK(nbits
);
98 for (k
= 0; off
+ k
< lim
; ++k
) {
99 unsigned long upper
, lower
;
102 * If shift is not word aligned, take lower rem bits of
103 * word above and make them the top rem bits of result.
105 if (!rem
|| off
+ k
+ 1 >= lim
)
108 upper
= src
[off
+ k
+ 1];
109 if (off
+ k
+ 1 == lim
- 1)
111 upper
<<= (BITS_PER_LONG
- rem
);
113 lower
= src
[off
+ k
];
114 if (off
+ k
== lim
- 1)
117 dst
[k
] = lower
| upper
;
120 memset(&dst
[lim
- off
], 0, off
*sizeof(unsigned long));
122 EXPORT_SYMBOL(__bitmap_shift_right
);
126 * __bitmap_shift_left - logical left shift of the bits in a bitmap
127 * @dst : destination bitmap
128 * @src : source bitmap
129 * @shift : shift by this many bits
130 * @nbits : bitmap size, in bits
132 * Shifting left (multiplying) means moving bits in the LS -> MS
133 * direction. Zeros are fed into the vacated LS bit positions
134 * and those MS bits shifted off the top are lost.
137 void __bitmap_shift_left(unsigned long *dst
, const unsigned long *src
,
138 unsigned int shift
, unsigned int nbits
)
141 unsigned int lim
= BITS_TO_LONGS(nbits
);
142 unsigned int off
= shift
/BITS_PER_LONG
, rem
= shift
% BITS_PER_LONG
;
143 for (k
= lim
- off
- 1; k
>= 0; --k
) {
144 unsigned long upper
, lower
;
147 * If shift is not word aligned, take upper rem bits of
148 * word below and make them the bottom rem bits of result.
151 lower
= src
[k
- 1] >> (BITS_PER_LONG
- rem
);
154 upper
= src
[k
] << rem
;
155 dst
[k
+ off
] = lower
| upper
;
158 memset(dst
, 0, off
*sizeof(unsigned long));
160 EXPORT_SYMBOL(__bitmap_shift_left
);
163 * bitmap_cut() - remove bit region from bitmap and right shift remaining bits
164 * @dst: destination bitmap, might overlap with src
165 * @src: source bitmap
166 * @first: start bit of region to be removed
167 * @cut: number of bits to remove
168 * @nbits: bitmap size, in bits
170 * Set the n-th bit of @dst iff the n-th bit of @src is set and
171 * n is less than @first, or the m-th bit of @src is set for any
172 * m such that @first <= n < nbits, and m = n + @cut.
174 * In pictures, example for a big-endian 32-bit architecture:
176 * The @src bitmap is::
180 * 10000000 11000001 11110010 00010101 10000000 11000001 01110010 00010101
184 * if @cut is 3, and @first is 14, bits 14-16 in @src are cut and @dst is::
188 * 10110000 00011000 00110010 00010101 00010000 00011000 00101110 01000010
193 * Note that @dst and @src might overlap partially or entirely.
195 * This is implemented in the obvious way, with a shift and carry
196 * step for each moved bit. Optimisation is left as an exercise
199 void bitmap_cut(unsigned long *dst
, const unsigned long *src
,
200 unsigned int first
, unsigned int cut
, unsigned int nbits
)
202 unsigned int len
= BITS_TO_LONGS(nbits
);
203 unsigned long keep
= 0, carry
;
206 if (first
% BITS_PER_LONG
) {
207 keep
= src
[first
/ BITS_PER_LONG
] &
208 (~0UL >> (BITS_PER_LONG
- first
% BITS_PER_LONG
));
211 memmove(dst
, src
, len
* sizeof(*dst
));
214 for (i
= first
/ BITS_PER_LONG
; i
< len
; i
++) {
216 carry
= dst
[i
+ 1] & 1UL;
220 dst
[i
] = (dst
[i
] >> 1) | (carry
<< (BITS_PER_LONG
- 1));
224 dst
[first
/ BITS_PER_LONG
] &= ~0UL << (first
% BITS_PER_LONG
);
225 dst
[first
/ BITS_PER_LONG
] |= keep
;
227 EXPORT_SYMBOL(bitmap_cut
);
229 bool __bitmap_and(unsigned long *dst
, const unsigned long *bitmap1
,
230 const unsigned long *bitmap2
, unsigned int bits
)
233 unsigned int lim
= bits
/BITS_PER_LONG
;
234 unsigned long result
= 0;
236 for (k
= 0; k
< lim
; k
++)
237 result
|= (dst
[k
] = bitmap1
[k
] & bitmap2
[k
]);
238 if (bits
% BITS_PER_LONG
)
239 result
|= (dst
[k
] = bitmap1
[k
] & bitmap2
[k
] &
240 BITMAP_LAST_WORD_MASK(bits
));
243 EXPORT_SYMBOL(__bitmap_and
);
245 void __bitmap_or(unsigned long *dst
, const unsigned long *bitmap1
,
246 const unsigned long *bitmap2
, unsigned int bits
)
249 unsigned int nr
= BITS_TO_LONGS(bits
);
251 for (k
= 0; k
< nr
; k
++)
252 dst
[k
] = bitmap1
[k
] | bitmap2
[k
];
254 EXPORT_SYMBOL(__bitmap_or
);
256 void __bitmap_xor(unsigned long *dst
, const unsigned long *bitmap1
,
257 const unsigned long *bitmap2
, unsigned int bits
)
260 unsigned int nr
= BITS_TO_LONGS(bits
);
262 for (k
= 0; k
< nr
; k
++)
263 dst
[k
] = bitmap1
[k
] ^ bitmap2
[k
];
265 EXPORT_SYMBOL(__bitmap_xor
);
267 bool __bitmap_andnot(unsigned long *dst
, const unsigned long *bitmap1
,
268 const unsigned long *bitmap2
, unsigned int bits
)
271 unsigned int lim
= bits
/BITS_PER_LONG
;
272 unsigned long result
= 0;
274 for (k
= 0; k
< lim
; k
++)
275 result
|= (dst
[k
] = bitmap1
[k
] & ~bitmap2
[k
]);
276 if (bits
% BITS_PER_LONG
)
277 result
|= (dst
[k
] = bitmap1
[k
] & ~bitmap2
[k
] &
278 BITMAP_LAST_WORD_MASK(bits
));
281 EXPORT_SYMBOL(__bitmap_andnot
);
283 void __bitmap_replace(unsigned long *dst
,
284 const unsigned long *old
, const unsigned long *new,
285 const unsigned long *mask
, unsigned int nbits
)
288 unsigned int nr
= BITS_TO_LONGS(nbits
);
290 for (k
= 0; k
< nr
; k
++)
291 dst
[k
] = (old
[k
] & ~mask
[k
]) | (new[k
] & mask
[k
]);
293 EXPORT_SYMBOL(__bitmap_replace
);
295 bool __bitmap_intersects(const unsigned long *bitmap1
,
296 const unsigned long *bitmap2
, unsigned int bits
)
298 unsigned int k
, lim
= bits
/BITS_PER_LONG
;
299 for (k
= 0; k
< lim
; ++k
)
300 if (bitmap1
[k
] & bitmap2
[k
])
303 if (bits
% BITS_PER_LONG
)
304 if ((bitmap1
[k
] & bitmap2
[k
]) & BITMAP_LAST_WORD_MASK(bits
))
308 EXPORT_SYMBOL(__bitmap_intersects
);
310 bool __bitmap_subset(const unsigned long *bitmap1
,
311 const unsigned long *bitmap2
, unsigned int bits
)
313 unsigned int k
, lim
= bits
/BITS_PER_LONG
;
314 for (k
= 0; k
< lim
; ++k
)
315 if (bitmap1
[k
] & ~bitmap2
[k
])
318 if (bits
% BITS_PER_LONG
)
319 if ((bitmap1
[k
] & ~bitmap2
[k
]) & BITMAP_LAST_WORD_MASK(bits
))
323 EXPORT_SYMBOL(__bitmap_subset
);
325 #define BITMAP_WEIGHT(FETCH, bits) \
327 unsigned int __bits = (bits), idx, w = 0; \
329 for (idx = 0; idx < __bits / BITS_PER_LONG; idx++) \
330 w += hweight_long(FETCH); \
332 if (__bits % BITS_PER_LONG) \
333 w += hweight_long((FETCH) & BITMAP_LAST_WORD_MASK(__bits)); \
338 unsigned int __bitmap_weight(const unsigned long *bitmap
, unsigned int bits
)
340 return BITMAP_WEIGHT(bitmap
[idx
], bits
);
342 EXPORT_SYMBOL(__bitmap_weight
);
344 unsigned int __bitmap_weight_and(const unsigned long *bitmap1
,
345 const unsigned long *bitmap2
, unsigned int bits
)
347 return BITMAP_WEIGHT(bitmap1
[idx
] & bitmap2
[idx
], bits
);
349 EXPORT_SYMBOL(__bitmap_weight_and
);
351 unsigned int __bitmap_weight_andnot(const unsigned long *bitmap1
,
352 const unsigned long *bitmap2
, unsigned int bits
)
354 return BITMAP_WEIGHT(bitmap1
[idx
] & ~bitmap2
[idx
], bits
);
356 EXPORT_SYMBOL(__bitmap_weight_andnot
);
358 void __bitmap_set(unsigned long *map
, unsigned int start
, int len
)
360 unsigned long *p
= map
+ BIT_WORD(start
);
361 const unsigned int size
= start
+ len
;
362 int bits_to_set
= BITS_PER_LONG
- (start
% BITS_PER_LONG
);
363 unsigned long mask_to_set
= BITMAP_FIRST_WORD_MASK(start
);
365 while (len
- bits_to_set
>= 0) {
368 bits_to_set
= BITS_PER_LONG
;
373 mask_to_set
&= BITMAP_LAST_WORD_MASK(size
);
377 EXPORT_SYMBOL(__bitmap_set
);
379 void __bitmap_clear(unsigned long *map
, unsigned int start
, int len
)
381 unsigned long *p
= map
+ BIT_WORD(start
);
382 const unsigned int size
= start
+ len
;
383 int bits_to_clear
= BITS_PER_LONG
- (start
% BITS_PER_LONG
);
384 unsigned long mask_to_clear
= BITMAP_FIRST_WORD_MASK(start
);
386 while (len
- bits_to_clear
>= 0) {
387 *p
&= ~mask_to_clear
;
388 len
-= bits_to_clear
;
389 bits_to_clear
= BITS_PER_LONG
;
390 mask_to_clear
= ~0UL;
394 mask_to_clear
&= BITMAP_LAST_WORD_MASK(size
);
395 *p
&= ~mask_to_clear
;
398 EXPORT_SYMBOL(__bitmap_clear
);
401 * bitmap_find_next_zero_area_off - find a contiguous aligned zero area
402 * @map: The address to base the search on
403 * @size: The bitmap size in bits
404 * @start: The bitnumber to start searching at
405 * @nr: The number of zeroed bits we're looking for
406 * @align_mask: Alignment mask for zero area
407 * @align_offset: Alignment offset for zero area.
409 * The @align_mask should be one less than a power of 2; the effect is that
410 * the bit offset of all zero areas this function finds plus @align_offset
411 * is multiple of that power of 2.
413 unsigned long bitmap_find_next_zero_area_off(unsigned long *map
,
417 unsigned long align_mask
,
418 unsigned long align_offset
)
420 unsigned long index
, end
, i
;
422 index
= find_next_zero_bit(map
, size
, start
);
424 /* Align allocation */
425 index
= __ALIGN_MASK(index
+ align_offset
, align_mask
) - align_offset
;
430 i
= find_next_bit(map
, end
, index
);
437 EXPORT_SYMBOL(bitmap_find_next_zero_area_off
);
440 * bitmap_pos_to_ord - find ordinal of set bit at given position in bitmap
441 * @buf: pointer to a bitmap
442 * @pos: a bit position in @buf (0 <= @pos < @nbits)
443 * @nbits: number of valid bit positions in @buf
445 * Map the bit at position @pos in @buf (of length @nbits) to the
446 * ordinal of which set bit it is. If it is not set or if @pos
447 * is not a valid bit position, map to -1.
449 * If for example, just bits 4 through 7 are set in @buf, then @pos
450 * values 4 through 7 will get mapped to 0 through 3, respectively,
451 * and other @pos values will get mapped to -1. When @pos value 7
452 * gets mapped to (returns) @ord value 3 in this example, that means
453 * that bit 7 is the 3rd (starting with 0th) set bit in @buf.
455 * The bit positions 0 through @bits are valid positions in @buf.
457 static int bitmap_pos_to_ord(const unsigned long *buf
, unsigned int pos
, unsigned int nbits
)
459 if (pos
>= nbits
|| !test_bit(pos
, buf
))
462 return bitmap_weight(buf
, pos
);
466 * bitmap_remap - Apply map defined by a pair of bitmaps to another bitmap
467 * @dst: remapped result
468 * @src: subset to be remapped
469 * @old: defines domain of map
470 * @new: defines range of map
471 * @nbits: number of bits in each of these bitmaps
473 * Let @old and @new define a mapping of bit positions, such that
474 * whatever position is held by the n-th set bit in @old is mapped
475 * to the n-th set bit in @new. In the more general case, allowing
476 * for the possibility that the weight 'w' of @new is less than the
477 * weight of @old, map the position of the n-th set bit in @old to
478 * the position of the m-th set bit in @new, where m == n % w.
480 * If either of the @old and @new bitmaps are empty, or if @src and
481 * @dst point to the same location, then this routine copies @src
484 * The positions of unset bits in @old are mapped to themselves
485 * (the identity map).
487 * Apply the above specified mapping to @src, placing the result in
488 * @dst, clearing any bits previously set in @dst.
490 * For example, lets say that @old has bits 4 through 7 set, and
491 * @new has bits 12 through 15 set. This defines the mapping of bit
492 * position 4 to 12, 5 to 13, 6 to 14 and 7 to 15, and of all other
493 * bit positions unchanged. So if say @src comes into this routine
494 * with bits 1, 5 and 7 set, then @dst should leave with bits 1,
497 void bitmap_remap(unsigned long *dst
, const unsigned long *src
,
498 const unsigned long *old
, const unsigned long *new,
501 unsigned int oldbit
, w
;
503 if (dst
== src
) /* following doesn't handle inplace remaps */
505 bitmap_zero(dst
, nbits
);
507 w
= bitmap_weight(new, nbits
);
508 for_each_set_bit(oldbit
, src
, nbits
) {
509 int n
= bitmap_pos_to_ord(old
, oldbit
, nbits
);
512 set_bit(oldbit
, dst
); /* identity map */
514 set_bit(find_nth_bit(new, nbits
, n
% w
), dst
);
517 EXPORT_SYMBOL(bitmap_remap
);
520 * bitmap_bitremap - Apply map defined by a pair of bitmaps to a single bit
521 * @oldbit: bit position to be mapped
522 * @old: defines domain of map
523 * @new: defines range of map
524 * @bits: number of bits in each of these bitmaps
526 * Let @old and @new define a mapping of bit positions, such that
527 * whatever position is held by the n-th set bit in @old is mapped
528 * to the n-th set bit in @new. In the more general case, allowing
529 * for the possibility that the weight 'w' of @new is less than the
530 * weight of @old, map the position of the n-th set bit in @old to
531 * the position of the m-th set bit in @new, where m == n % w.
533 * The positions of unset bits in @old are mapped to themselves
534 * (the identity map).
536 * Apply the above specified mapping to bit position @oldbit, returning
537 * the new bit position.
539 * For example, lets say that @old has bits 4 through 7 set, and
540 * @new has bits 12 through 15 set. This defines the mapping of bit
541 * position 4 to 12, 5 to 13, 6 to 14 and 7 to 15, and of all other
542 * bit positions unchanged. So if say @oldbit is 5, then this routine
545 int bitmap_bitremap(int oldbit
, const unsigned long *old
,
546 const unsigned long *new, int bits
)
548 int w
= bitmap_weight(new, bits
);
549 int n
= bitmap_pos_to_ord(old
, oldbit
, bits
);
553 return find_nth_bit(new, bits
, n
% w
);
555 EXPORT_SYMBOL(bitmap_bitremap
);
559 * bitmap_onto - translate one bitmap relative to another
560 * @dst: resulting translated bitmap
561 * @orig: original untranslated bitmap
562 * @relmap: bitmap relative to which translated
563 * @bits: number of bits in each of these bitmaps
565 * Set the n-th bit of @dst iff there exists some m such that the
566 * n-th bit of @relmap is set, the m-th bit of @orig is set, and
567 * the n-th bit of @relmap is also the m-th _set_ bit of @relmap.
568 * (If you understood the previous sentence the first time your
569 * read it, you're overqualified for your current job.)
571 * In other words, @orig is mapped onto (surjectively) @dst,
572 * using the map { <n, m> | the n-th bit of @relmap is the
573 * m-th set bit of @relmap }.
575 * Any set bits in @orig above bit number W, where W is the
576 * weight of (number of set bits in) @relmap are mapped nowhere.
577 * In particular, if for all bits m set in @orig, m >= W, then
578 * @dst will end up empty. In situations where the possibility
579 * of such an empty result is not desired, one way to avoid it is
580 * to use the bitmap_fold() operator, below, to first fold the
581 * @orig bitmap over itself so that all its set bits x are in the
582 * range 0 <= x < W. The bitmap_fold() operator does this by
583 * setting the bit (m % W) in @dst, for each bit (m) set in @orig.
585 * Example [1] for bitmap_onto():
586 * Let's say @relmap has bits 30-39 set, and @orig has bits
587 * 1, 3, 5, 7, 9 and 11 set. Then on return from this routine,
588 * @dst will have bits 31, 33, 35, 37 and 39 set.
590 * When bit 0 is set in @orig, it means turn on the bit in
591 * @dst corresponding to whatever is the first bit (if any)
592 * that is turned on in @relmap. Since bit 0 was off in the
593 * above example, we leave off that bit (bit 30) in @dst.
595 * When bit 1 is set in @orig (as in the above example), it
596 * means turn on the bit in @dst corresponding to whatever
597 * is the second bit that is turned on in @relmap. The second
598 * bit in @relmap that was turned on in the above example was
599 * bit 31, so we turned on bit 31 in @dst.
601 * Similarly, we turned on bits 33, 35, 37 and 39 in @dst,
602 * because they were the 4th, 6th, 8th and 10th set bits
603 * set in @relmap, and the 4th, 6th, 8th and 10th bits of
604 * @orig (i.e. bits 3, 5, 7 and 9) were also set.
606 * When bit 11 is set in @orig, it means turn on the bit in
607 * @dst corresponding to whatever is the twelfth bit that is
608 * turned on in @relmap. In the above example, there were
609 * only ten bits turned on in @relmap (30..39), so that bit
610 * 11 was set in @orig had no affect on @dst.
612 * Example [2] for bitmap_fold() + bitmap_onto():
613 * Let's say @relmap has these ten bits set::
615 * 40 41 42 43 45 48 53 61 74 95
617 * (for the curious, that's 40 plus the first ten terms of the
618 * Fibonacci sequence.)
620 * Further lets say we use the following code, invoking
621 * bitmap_fold() then bitmap_onto, as suggested above to
622 * avoid the possibility of an empty @dst result::
624 * unsigned long *tmp; // a temporary bitmap's bits
626 * bitmap_fold(tmp, orig, bitmap_weight(relmap, bits), bits);
627 * bitmap_onto(dst, tmp, relmap, bits);
629 * Then this table shows what various values of @dst would be, for
630 * various @orig's. I list the zero-based positions of each set bit.
631 * The tmp column shows the intermediate result, as computed by
632 * using bitmap_fold() to fold the @orig bitmap modulo ten
633 * (the weight of @relmap):
635 * =============== ============== =================
641 * 1 3 5 7 1 3 5 7 41 43 48 61
642 * 0 1 2 3 4 0 1 2 3 4 40 41 42 43 45
643 * 0 9 18 27 0 9 8 7 40 61 74 95
645 * 0 11 22 33 0 1 2 3 40 41 42 43
646 * 0 12 24 36 0 2 4 6 40 42 45 53
647 * 78 102 211 1 2 8 41 42 74 [#f1]_
648 * =============== ============== =================
652 * For these marked lines, if we hadn't first done bitmap_fold()
653 * into tmp, then the @dst result would have been empty.
655 * If either of @orig or @relmap is empty (no set bits), then @dst
656 * will be returned empty.
658 * If (as explained above) the only set bits in @orig are in positions
659 * m where m >= W, (where W is the weight of @relmap) then @dst will
660 * once again be returned empty.
662 * All bits in @dst not set by the above rule are cleared.
664 void bitmap_onto(unsigned long *dst
, const unsigned long *orig
,
665 const unsigned long *relmap
, unsigned int bits
)
667 unsigned int n
, m
; /* same meaning as in above comment */
669 if (dst
== orig
) /* following doesn't handle inplace mappings */
671 bitmap_zero(dst
, bits
);
674 * The following code is a more efficient, but less
675 * obvious, equivalent to the loop:
676 * for (m = 0; m < bitmap_weight(relmap, bits); m++) {
677 * n = find_nth_bit(orig, bits, m);
678 * if (test_bit(m, orig))
684 for_each_set_bit(n
, relmap
, bits
) {
685 /* m == bitmap_pos_to_ord(relmap, n, bits) */
686 if (test_bit(m
, orig
))
693 * bitmap_fold - fold larger bitmap into smaller, modulo specified size
694 * @dst: resulting smaller bitmap
695 * @orig: original larger bitmap
696 * @sz: specified size
697 * @nbits: number of bits in each of these bitmaps
699 * For each bit oldbit in @orig, set bit oldbit mod @sz in @dst.
700 * Clear all other bits in @dst. See further the comment and
701 * Example [2] for bitmap_onto() for why and how to use this.
703 void bitmap_fold(unsigned long *dst
, const unsigned long *orig
,
704 unsigned int sz
, unsigned int nbits
)
708 if (dst
== orig
) /* following doesn't handle inplace mappings */
710 bitmap_zero(dst
, nbits
);
712 for_each_set_bit(oldbit
, orig
, nbits
)
713 set_bit(oldbit
% sz
, dst
);
715 #endif /* CONFIG_NUMA */
717 unsigned long *bitmap_alloc(unsigned int nbits
, gfp_t flags
)
719 return kmalloc_array(BITS_TO_LONGS(nbits
), sizeof(unsigned long),
722 EXPORT_SYMBOL(bitmap_alloc
);
724 unsigned long *bitmap_zalloc(unsigned int nbits
, gfp_t flags
)
726 return bitmap_alloc(nbits
, flags
| __GFP_ZERO
);
728 EXPORT_SYMBOL(bitmap_zalloc
);
730 unsigned long *bitmap_alloc_node(unsigned int nbits
, gfp_t flags
, int node
)
732 return kmalloc_array_node(BITS_TO_LONGS(nbits
), sizeof(unsigned long),
735 EXPORT_SYMBOL(bitmap_alloc_node
);
737 unsigned long *bitmap_zalloc_node(unsigned int nbits
, gfp_t flags
, int node
)
739 return bitmap_alloc_node(nbits
, flags
| __GFP_ZERO
, node
);
741 EXPORT_SYMBOL(bitmap_zalloc_node
);
743 void bitmap_free(const unsigned long *bitmap
)
747 EXPORT_SYMBOL(bitmap_free
);
749 static void devm_bitmap_free(void *data
)
751 unsigned long *bitmap
= data
;
756 unsigned long *devm_bitmap_alloc(struct device
*dev
,
757 unsigned int nbits
, gfp_t flags
)
759 unsigned long *bitmap
;
762 bitmap
= bitmap_alloc(nbits
, flags
);
766 ret
= devm_add_action_or_reset(dev
, devm_bitmap_free
, bitmap
);
772 EXPORT_SYMBOL_GPL(devm_bitmap_alloc
);
774 unsigned long *devm_bitmap_zalloc(struct device
*dev
,
775 unsigned int nbits
, gfp_t flags
)
777 return devm_bitmap_alloc(dev
, nbits
, flags
| __GFP_ZERO
);
779 EXPORT_SYMBOL_GPL(devm_bitmap_zalloc
);
781 #if BITS_PER_LONG == 64
783 * bitmap_from_arr32 - copy the contents of u32 array of bits to bitmap
784 * @bitmap: array of unsigned longs, the destination bitmap
785 * @buf: array of u32 (in host byte order), the source bitmap
786 * @nbits: number of bits in @bitmap
788 void bitmap_from_arr32(unsigned long *bitmap
, const u32
*buf
, unsigned int nbits
)
790 unsigned int i
, halfwords
;
792 halfwords
= DIV_ROUND_UP(nbits
, 32);
793 for (i
= 0; i
< halfwords
; i
++) {
794 bitmap
[i
/2] = (unsigned long) buf
[i
];
796 bitmap
[i
/2] |= ((unsigned long) buf
[i
]) << 32;
799 /* Clear tail bits in last word beyond nbits. */
800 if (nbits
% BITS_PER_LONG
)
801 bitmap
[(halfwords
- 1) / 2] &= BITMAP_LAST_WORD_MASK(nbits
);
803 EXPORT_SYMBOL(bitmap_from_arr32
);
806 * bitmap_to_arr32 - copy the contents of bitmap to a u32 array of bits
807 * @buf: array of u32 (in host byte order), the dest bitmap
808 * @bitmap: array of unsigned longs, the source bitmap
809 * @nbits: number of bits in @bitmap
811 void bitmap_to_arr32(u32
*buf
, const unsigned long *bitmap
, unsigned int nbits
)
813 unsigned int i
, halfwords
;
815 halfwords
= DIV_ROUND_UP(nbits
, 32);
816 for (i
= 0; i
< halfwords
; i
++) {
817 buf
[i
] = (u32
) (bitmap
[i
/2] & UINT_MAX
);
819 buf
[i
] = (u32
) (bitmap
[i
/2] >> 32);
822 /* Clear tail bits in last element of array beyond nbits. */
823 if (nbits
% BITS_PER_LONG
)
824 buf
[halfwords
- 1] &= (u32
) (UINT_MAX
>> ((-nbits
) & 31));
826 EXPORT_SYMBOL(bitmap_to_arr32
);
829 #if BITS_PER_LONG == 32
831 * bitmap_from_arr64 - copy the contents of u64 array of bits to bitmap
832 * @bitmap: array of unsigned longs, the destination bitmap
833 * @buf: array of u64 (in host byte order), the source bitmap
834 * @nbits: number of bits in @bitmap
836 void bitmap_from_arr64(unsigned long *bitmap
, const u64
*buf
, unsigned int nbits
)
840 for (n
= nbits
; n
> 0; n
-= 64) {
845 *bitmap
++ = val
>> 32;
849 * Clear tail bits in the last word beyond nbits.
851 * Negative index is OK because here we point to the word next
852 * to the last word of the bitmap, except for nbits == 0, which
853 * is tested implicitly.
855 if (nbits
% BITS_PER_LONG
)
856 bitmap
[-1] &= BITMAP_LAST_WORD_MASK(nbits
);
858 EXPORT_SYMBOL(bitmap_from_arr64
);
861 * bitmap_to_arr64 - copy the contents of bitmap to a u64 array of bits
862 * @buf: array of u64 (in host byte order), the dest bitmap
863 * @bitmap: array of unsigned longs, the source bitmap
864 * @nbits: number of bits in @bitmap
866 void bitmap_to_arr64(u64
*buf
, const unsigned long *bitmap
, unsigned int nbits
)
868 const unsigned long *end
= bitmap
+ BITS_TO_LONGS(nbits
);
870 while (bitmap
< end
) {
873 *buf
|= (u64
)(*bitmap
++) << 32;
877 /* Clear tail bits in the last element of array beyond nbits. */
879 buf
[-1] &= GENMASK_ULL((nbits
- 1) % 64, 0);
881 EXPORT_SYMBOL(bitmap_to_arr64
);