Linux kernel mirror (for testing)
git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel
os
linux
1// SPDX-License-Identifier: GPL-2.0-only
2/*
3 * lib/bitmap.c
4 * Helper functions for bitmap.h.
5 */
6
7#include <linux/bitmap.h>
8#include <linux/bitops.h>
9#include <linux/ctype.h>
10#include <linux/device.h>
11#include <linux/export.h>
12#include <linux/slab.h>
13
14/**
15 * DOC: bitmap introduction
16 *
17 * bitmaps provide an array of bits, implemented using an
18 * array of unsigned longs. The number of valid bits in a
19 * given bitmap does _not_ need to be an exact multiple of
20 * BITS_PER_LONG.
21 *
22 * The possible unused bits in the last, partially used word
23 * of a bitmap are 'don't care'. The implementation makes
24 * no particular effort to keep them zero. It ensures that
25 * their value will not affect the results of any operation.
26 * The bitmap operations that return Boolean (bitmap_empty,
27 * for example) or scalar (bitmap_weight, for example) results
28 * carefully filter out these unused bits from impacting their
29 * results.
30 *
31 * The byte ordering of bitmaps is more natural on little
32 * endian architectures. See the big-endian headers
33 * include/asm-ppc64/bitops.h and include/asm-s390/bitops.h
34 * for the best explanations of this ordering.
35 */
36
37bool __bitmap_equal(const unsigned long *bitmap1,
38 const unsigned long *bitmap2, unsigned int bits)
39{
40 unsigned int k, lim = bits/BITS_PER_LONG;
41 for (k = 0; k < lim; ++k)
42 if (bitmap1[k] != bitmap2[k])
43 return false;
44
45 if (bits % BITS_PER_LONG)
46 if ((bitmap1[k] ^ bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits))
47 return false;
48
49 return true;
50}
51EXPORT_SYMBOL(__bitmap_equal);
52
53bool __bitmap_or_equal(const unsigned long *bitmap1,
54 const unsigned long *bitmap2,
55 const unsigned long *bitmap3,
56 unsigned int bits)
57{
58 unsigned int k, lim = bits / BITS_PER_LONG;
59 unsigned long tmp;
60
61 for (k = 0; k < lim; ++k) {
62 if ((bitmap1[k] | bitmap2[k]) != bitmap3[k])
63 return false;
64 }
65
66 if (!(bits % BITS_PER_LONG))
67 return true;
68
69 tmp = (bitmap1[k] | bitmap2[k]) ^ bitmap3[k];
70 return (tmp & BITMAP_LAST_WORD_MASK(bits)) == 0;
71}
72
73void __bitmap_complement(unsigned long *dst, const unsigned long *src, unsigned int bits)
74{
75 unsigned int k, lim = BITS_TO_LONGS(bits);
76 for (k = 0; k < lim; ++k)
77 dst[k] = ~src[k];
78}
79EXPORT_SYMBOL(__bitmap_complement);
80
81/**
82 * __bitmap_shift_right - logical right shift of the bits in a bitmap
83 * @dst : destination bitmap
84 * @src : source bitmap
85 * @shift : shift by this many bits
86 * @nbits : bitmap size, in bits
87 *
88 * Shifting right (dividing) means moving bits in the MS -> LS bit
89 * direction. Zeros are fed into the vacated MS positions and the
90 * LS bits shifted off the bottom are lost.
91 */
92void __bitmap_shift_right(unsigned long *dst, const unsigned long *src,
93 unsigned shift, unsigned nbits)
94{
95 unsigned k, lim = BITS_TO_LONGS(nbits);
96 unsigned off = shift/BITS_PER_LONG, rem = shift % BITS_PER_LONG;
97 unsigned long mask = BITMAP_LAST_WORD_MASK(nbits);
98 for (k = 0; off + k < lim; ++k) {
99 unsigned long upper, lower;
100
101 /*
102 * If shift is not word aligned, take lower rem bits of
103 * word above and make them the top rem bits of result.
104 */
105 if (!rem || off + k + 1 >= lim)
106 upper = 0;
107 else {
108 upper = src[off + k + 1];
109 if (off + k + 1 == lim - 1)
110 upper &= mask;
111 upper <<= (BITS_PER_LONG - rem);
112 }
113 lower = src[off + k];
114 if (off + k == lim - 1)
115 lower &= mask;
116 lower >>= rem;
117 dst[k] = lower | upper;
118 }
119 if (off)
120 memset(&dst[lim - off], 0, off*sizeof(unsigned long));
121}
122EXPORT_SYMBOL(__bitmap_shift_right);
123
124
125/**
126 * __bitmap_shift_left - logical left shift of the bits in a bitmap
127 * @dst : destination bitmap
128 * @src : source bitmap
129 * @shift : shift by this many bits
130 * @nbits : bitmap size, in bits
131 *
132 * Shifting left (multiplying) means moving bits in the LS -> MS
133 * direction. Zeros are fed into the vacated LS bit positions
134 * and those MS bits shifted off the top are lost.
135 */
136
137void __bitmap_shift_left(unsigned long *dst, const unsigned long *src,
138 unsigned int shift, unsigned int nbits)
139{
140 int k;
141 unsigned int lim = BITS_TO_LONGS(nbits);
142 unsigned int off = shift/BITS_PER_LONG, rem = shift % BITS_PER_LONG;
143 for (k = lim - off - 1; k >= 0; --k) {
144 unsigned long upper, lower;
145
146 /*
147 * If shift is not word aligned, take upper rem bits of
148 * word below and make them the bottom rem bits of result.
149 */
150 if (rem && k > 0)
151 lower = src[k - 1] >> (BITS_PER_LONG - rem);
152 else
153 lower = 0;
154 upper = src[k] << rem;
155 dst[k + off] = lower | upper;
156 }
157 if (off)
158 memset(dst, 0, off*sizeof(unsigned long));
159}
160EXPORT_SYMBOL(__bitmap_shift_left);
161
162/**
163 * bitmap_cut() - remove bit region from bitmap and right shift remaining bits
164 * @dst: destination bitmap, might overlap with src
165 * @src: source bitmap
166 * @first: start bit of region to be removed
167 * @cut: number of bits to remove
168 * @nbits: bitmap size, in bits
169 *
170 * Set the n-th bit of @dst iff the n-th bit of @src is set and
171 * n is less than @first, or the m-th bit of @src is set for any
172 * m such that @first <= n < nbits, and m = n + @cut.
173 *
174 * In pictures, example for a big-endian 32-bit architecture:
175 *
176 * The @src bitmap is::
177 *
178 * 31 63
179 * | |
180 * 10000000 11000001 11110010 00010101 10000000 11000001 01110010 00010101
181 * | | | |
182 * 16 14 0 32
183 *
184 * if @cut is 3, and @first is 14, bits 14-16 in @src are cut and @dst is::
185 *
186 * 31 63
187 * | |
188 * 10110000 00011000 00110010 00010101 00010000 00011000 00101110 01000010
189 * | | |
190 * 14 (bit 17 0 32
191 * from @src)
192 *
193 * Note that @dst and @src might overlap partially or entirely.
194 *
195 * This is implemented in the obvious way, with a shift and carry
196 * step for each moved bit. Optimisation is left as an exercise
197 * for the compiler.
198 */
199void bitmap_cut(unsigned long *dst, const unsigned long *src,
200 unsigned int first, unsigned int cut, unsigned int nbits)
201{
202 unsigned int len = BITS_TO_LONGS(nbits);
203 unsigned long keep = 0, carry;
204 int i;
205
206 if (first % BITS_PER_LONG) {
207 keep = src[first / BITS_PER_LONG] &
208 (~0UL >> (BITS_PER_LONG - first % BITS_PER_LONG));
209 }
210
211 memmove(dst, src, len * sizeof(*dst));
212
213 while (cut--) {
214 for (i = first / BITS_PER_LONG; i < len; i++) {
215 if (i < len - 1)
216 carry = dst[i + 1] & 1UL;
217 else
218 carry = 0;
219
220 dst[i] = (dst[i] >> 1) | (carry << (BITS_PER_LONG - 1));
221 }
222 }
223
224 dst[first / BITS_PER_LONG] &= ~0UL << (first % BITS_PER_LONG);
225 dst[first / BITS_PER_LONG] |= keep;
226}
227EXPORT_SYMBOL(bitmap_cut);
228
229bool __bitmap_and(unsigned long *dst, const unsigned long *bitmap1,
230 const unsigned long *bitmap2, unsigned int bits)
231{
232 unsigned int k;
233 unsigned int lim = bits/BITS_PER_LONG;
234 unsigned long result = 0;
235
236 for (k = 0; k < lim; k++)
237 result |= (dst[k] = bitmap1[k] & bitmap2[k]);
238 if (bits % BITS_PER_LONG)
239 result |= (dst[k] = bitmap1[k] & bitmap2[k] &
240 BITMAP_LAST_WORD_MASK(bits));
241 return result != 0;
242}
243EXPORT_SYMBOL(__bitmap_and);
244
245void __bitmap_or(unsigned long *dst, const unsigned long *bitmap1,
246 const unsigned long *bitmap2, unsigned int bits)
247{
248 unsigned int k;
249 unsigned int nr = BITS_TO_LONGS(bits);
250
251 for (k = 0; k < nr; k++)
252 dst[k] = bitmap1[k] | bitmap2[k];
253}
254EXPORT_SYMBOL(__bitmap_or);
255
256void __bitmap_xor(unsigned long *dst, const unsigned long *bitmap1,
257 const unsigned long *bitmap2, unsigned int bits)
258{
259 unsigned int k;
260 unsigned int nr = BITS_TO_LONGS(bits);
261
262 for (k = 0; k < nr; k++)
263 dst[k] = bitmap1[k] ^ bitmap2[k];
264}
265EXPORT_SYMBOL(__bitmap_xor);
266
267bool __bitmap_andnot(unsigned long *dst, const unsigned long *bitmap1,
268 const unsigned long *bitmap2, unsigned int bits)
269{
270 unsigned int k;
271 unsigned int lim = bits/BITS_PER_LONG;
272 unsigned long result = 0;
273
274 for (k = 0; k < lim; k++)
275 result |= (dst[k] = bitmap1[k] & ~bitmap2[k]);
276 if (bits % BITS_PER_LONG)
277 result |= (dst[k] = bitmap1[k] & ~bitmap2[k] &
278 BITMAP_LAST_WORD_MASK(bits));
279 return result != 0;
280}
281EXPORT_SYMBOL(__bitmap_andnot);
282
283void __bitmap_replace(unsigned long *dst,
284 const unsigned long *old, const unsigned long *new,
285 const unsigned long *mask, unsigned int nbits)
286{
287 unsigned int k;
288 unsigned int nr = BITS_TO_LONGS(nbits);
289
290 for (k = 0; k < nr; k++)
291 dst[k] = (old[k] & ~mask[k]) | (new[k] & mask[k]);
292}
293EXPORT_SYMBOL(__bitmap_replace);
294
295bool __bitmap_intersects(const unsigned long *bitmap1,
296 const unsigned long *bitmap2, unsigned int bits)
297{
298 unsigned int k, lim = bits/BITS_PER_LONG;
299 for (k = 0; k < lim; ++k)
300 if (bitmap1[k] & bitmap2[k])
301 return true;
302
303 if (bits % BITS_PER_LONG)
304 if ((bitmap1[k] & bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits))
305 return true;
306 return false;
307}
308EXPORT_SYMBOL(__bitmap_intersects);
309
310bool __bitmap_subset(const unsigned long *bitmap1,
311 const unsigned long *bitmap2, unsigned int bits)
312{
313 unsigned int k, lim = bits/BITS_PER_LONG;
314 for (k = 0; k < lim; ++k)
315 if (bitmap1[k] & ~bitmap2[k])
316 return false;
317
318 if (bits % BITS_PER_LONG)
319 if ((bitmap1[k] & ~bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits))
320 return false;
321 return true;
322}
323EXPORT_SYMBOL(__bitmap_subset);
324
325#define BITMAP_WEIGHT(FETCH, bits) \
326({ \
327 unsigned int __bits = (bits), idx, w = 0; \
328 \
329 for (idx = 0; idx < __bits / BITS_PER_LONG; idx++) \
330 w += hweight_long(FETCH); \
331 \
332 if (__bits % BITS_PER_LONG) \
333 w += hweight_long((FETCH) & BITMAP_LAST_WORD_MASK(__bits)); \
334 \
335 w; \
336})
337
338unsigned int __bitmap_weight(const unsigned long *bitmap, unsigned int bits)
339{
340 return BITMAP_WEIGHT(bitmap[idx], bits);
341}
342EXPORT_SYMBOL(__bitmap_weight);
343
344unsigned int __bitmap_weight_and(const unsigned long *bitmap1,
345 const unsigned long *bitmap2, unsigned int bits)
346{
347 return BITMAP_WEIGHT(bitmap1[idx] & bitmap2[idx], bits);
348}
349EXPORT_SYMBOL(__bitmap_weight_and);
350
351unsigned int __bitmap_weight_andnot(const unsigned long *bitmap1,
352 const unsigned long *bitmap2, unsigned int bits)
353{
354 return BITMAP_WEIGHT(bitmap1[idx] & ~bitmap2[idx], bits);
355}
356EXPORT_SYMBOL(__bitmap_weight_andnot);
357
358unsigned int __bitmap_weighted_or(unsigned long *dst, const unsigned long *bitmap1,
359 const unsigned long *bitmap2, unsigned int bits)
360{
361 return BITMAP_WEIGHT(({dst[idx] = bitmap1[idx] | bitmap2[idx]; dst[idx]; }), bits);
362}
363
364void __bitmap_set(unsigned long *map, unsigned int start, int len)
365{
366 unsigned long *p = map + BIT_WORD(start);
367 const unsigned int size = start + len;
368 int bits_to_set = BITS_PER_LONG - (start % BITS_PER_LONG);
369 unsigned long mask_to_set = BITMAP_FIRST_WORD_MASK(start);
370
371 while (len - bits_to_set >= 0) {
372 *p |= mask_to_set;
373 len -= bits_to_set;
374 bits_to_set = BITS_PER_LONG;
375 mask_to_set = ~0UL;
376 p++;
377 }
378 if (len) {
379 mask_to_set &= BITMAP_LAST_WORD_MASK(size);
380 *p |= mask_to_set;
381 }
382}
383EXPORT_SYMBOL(__bitmap_set);
384
385void __bitmap_clear(unsigned long *map, unsigned int start, int len)
386{
387 unsigned long *p = map + BIT_WORD(start);
388 const unsigned int size = start + len;
389 int bits_to_clear = BITS_PER_LONG - (start % BITS_PER_LONG);
390 unsigned long mask_to_clear = BITMAP_FIRST_WORD_MASK(start);
391
392 while (len - bits_to_clear >= 0) {
393 *p &= ~mask_to_clear;
394 len -= bits_to_clear;
395 bits_to_clear = BITS_PER_LONG;
396 mask_to_clear = ~0UL;
397 p++;
398 }
399 if (len) {
400 mask_to_clear &= BITMAP_LAST_WORD_MASK(size);
401 *p &= ~mask_to_clear;
402 }
403}
404EXPORT_SYMBOL(__bitmap_clear);
405
406/**
407 * bitmap_find_next_zero_area_off - find a contiguous aligned zero area
408 * @map: The address to base the search on
409 * @size: The bitmap size in bits
410 * @start: The bitnumber to start searching at
411 * @nr: The number of zeroed bits we're looking for
412 * @align_mask: Alignment mask for zero area
413 * @align_offset: Alignment offset for zero area.
414 *
415 * The @align_mask should be one less than a power of 2; the effect is that
416 * the bit offset of all zero areas this function finds plus @align_offset
417 * is multiple of that power of 2.
418 */
419unsigned long bitmap_find_next_zero_area_off(unsigned long *map,
420 unsigned long size,
421 unsigned long start,
422 unsigned int nr,
423 unsigned long align_mask,
424 unsigned long align_offset)
425{
426 unsigned long index, end, i;
427again:
428 index = find_next_zero_bit(map, size, start);
429
430 /* Align allocation */
431 index = __ALIGN_MASK(index + align_offset, align_mask) - align_offset;
432
433 end = index + nr;
434 if (end > size)
435 return end;
436 i = find_next_bit(map, end, index);
437 if (i < end) {
438 start = i + 1;
439 goto again;
440 }
441 return index;
442}
443EXPORT_SYMBOL(bitmap_find_next_zero_area_off);
444
445/**
446 * bitmap_pos_to_ord - find ordinal of set bit at given position in bitmap
447 * @buf: pointer to a bitmap
448 * @pos: a bit position in @buf (0 <= @pos < @nbits)
449 * @nbits: number of valid bit positions in @buf
450 *
451 * Map the bit at position @pos in @buf (of length @nbits) to the
452 * ordinal of which set bit it is. If it is not set or if @pos
453 * is not a valid bit position, map to -1.
454 *
455 * If for example, just bits 4 through 7 are set in @buf, then @pos
456 * values 4 through 7 will get mapped to 0 through 3, respectively,
457 * and other @pos values will get mapped to -1. When @pos value 7
458 * gets mapped to (returns) @ord value 3 in this example, that means
459 * that bit 7 is the 3rd (starting with 0th) set bit in @buf.
460 *
461 * The bit positions 0 through @bits are valid positions in @buf.
462 */
463static int bitmap_pos_to_ord(const unsigned long *buf, unsigned int pos, unsigned int nbits)
464{
465 if (pos >= nbits || !test_bit(pos, buf))
466 return -1;
467
468 return bitmap_weight(buf, pos);
469}
470
471/**
472 * bitmap_remap - Apply map defined by a pair of bitmaps to another bitmap
473 * @dst: remapped result
474 * @src: subset to be remapped
475 * @old: defines domain of map
476 * @new: defines range of map
477 * @nbits: number of bits in each of these bitmaps
478 *
479 * Let @old and @new define a mapping of bit positions, such that
480 * whatever position is held by the n-th set bit in @old is mapped
481 * to the n-th set bit in @new. In the more general case, allowing
482 * for the possibility that the weight 'w' of @new is less than the
483 * weight of @old, map the position of the n-th set bit in @old to
484 * the position of the m-th set bit in @new, where m == n % w.
485 *
486 * If either of the @old and @new bitmaps are empty, or if @src and
487 * @dst point to the same location, then this routine copies @src
488 * to @dst.
489 *
490 * The positions of unset bits in @old are mapped to themselves
491 * (the identity map).
492 *
493 * Apply the above specified mapping to @src, placing the result in
494 * @dst, clearing any bits previously set in @dst.
495 *
496 * For example, lets say that @old has bits 4 through 7 set, and
497 * @new has bits 12 through 15 set. This defines the mapping of bit
498 * position 4 to 12, 5 to 13, 6 to 14 and 7 to 15, and of all other
499 * bit positions unchanged. So if say @src comes into this routine
500 * with bits 1, 5 and 7 set, then @dst should leave with bits 1,
501 * 13 and 15 set.
502 */
503void bitmap_remap(unsigned long *dst, const unsigned long *src,
504 const unsigned long *old, const unsigned long *new,
505 unsigned int nbits)
506{
507 unsigned int oldbit, w;
508
509 if (dst == src) /* following doesn't handle inplace remaps */
510 return;
511 bitmap_zero(dst, nbits);
512
513 w = bitmap_weight(new, nbits);
514 for_each_set_bit(oldbit, src, nbits) {
515 int n = bitmap_pos_to_ord(old, oldbit, nbits);
516
517 if (n < 0 || w == 0)
518 set_bit(oldbit, dst); /* identity map */
519 else
520 set_bit(find_nth_bit(new, nbits, n % w), dst);
521 }
522}
523EXPORT_SYMBOL(bitmap_remap);
524
525/**
526 * bitmap_bitremap - Apply map defined by a pair of bitmaps to a single bit
527 * @oldbit: bit position to be mapped
528 * @old: defines domain of map
529 * @new: defines range of map
530 * @bits: number of bits in each of these bitmaps
531 *
532 * Let @old and @new define a mapping of bit positions, such that
533 * whatever position is held by the n-th set bit in @old is mapped
534 * to the n-th set bit in @new. In the more general case, allowing
535 * for the possibility that the weight 'w' of @new is less than the
536 * weight of @old, map the position of the n-th set bit in @old to
537 * the position of the m-th set bit in @new, where m == n % w.
538 *
539 * The positions of unset bits in @old are mapped to themselves
540 * (the identity map).
541 *
542 * Apply the above specified mapping to bit position @oldbit, returning
543 * the new bit position.
544 *
545 * For example, lets say that @old has bits 4 through 7 set, and
546 * @new has bits 12 through 15 set. This defines the mapping of bit
547 * position 4 to 12, 5 to 13, 6 to 14 and 7 to 15, and of all other
548 * bit positions unchanged. So if say @oldbit is 5, then this routine
549 * returns 13.
550 */
551int bitmap_bitremap(int oldbit, const unsigned long *old,
552 const unsigned long *new, int bits)
553{
554 int w = bitmap_weight(new, bits);
555 int n = bitmap_pos_to_ord(old, oldbit, bits);
556 if (n < 0 || w == 0)
557 return oldbit;
558 else
559 return find_nth_bit(new, bits, n % w);
560}
561EXPORT_SYMBOL(bitmap_bitremap);
562
563#ifdef CONFIG_NUMA
564/**
565 * bitmap_onto - translate one bitmap relative to another
566 * @dst: resulting translated bitmap
567 * @orig: original untranslated bitmap
568 * @relmap: bitmap relative to which translated
569 * @bits: number of bits in each of these bitmaps
570 *
571 * Set the n-th bit of @dst iff there exists some m such that the
572 * n-th bit of @relmap is set, the m-th bit of @orig is set, and
573 * the n-th bit of @relmap is also the m-th _set_ bit of @relmap.
574 * (If you understood the previous sentence the first time your
575 * read it, you're overqualified for your current job.)
576 *
577 * In other words, @orig is mapped onto (surjectively) @dst,
578 * using the map { <n, m> | the n-th bit of @relmap is the
579 * m-th set bit of @relmap }.
580 *
581 * Any set bits in @orig above bit number W, where W is the
582 * weight of (number of set bits in) @relmap are mapped nowhere.
583 * In particular, if for all bits m set in @orig, m >= W, then
584 * @dst will end up empty. In situations where the possibility
585 * of such an empty result is not desired, one way to avoid it is
586 * to use the bitmap_fold() operator, below, to first fold the
587 * @orig bitmap over itself so that all its set bits x are in the
588 * range 0 <= x < W. The bitmap_fold() operator does this by
589 * setting the bit (m % W) in @dst, for each bit (m) set in @orig.
590 *
591 * Example [1] for bitmap_onto():
592 * Let's say @relmap has bits 30-39 set, and @orig has bits
593 * 1, 3, 5, 7, 9 and 11 set. Then on return from this routine,
594 * @dst will have bits 31, 33, 35, 37 and 39 set.
595 *
596 * When bit 0 is set in @orig, it means turn on the bit in
597 * @dst corresponding to whatever is the first bit (if any)
598 * that is turned on in @relmap. Since bit 0 was off in the
599 * above example, we leave off that bit (bit 30) in @dst.
600 *
601 * When bit 1 is set in @orig (as in the above example), it
602 * means turn on the bit in @dst corresponding to whatever
603 * is the second bit that is turned on in @relmap. The second
604 * bit in @relmap that was turned on in the above example was
605 * bit 31, so we turned on bit 31 in @dst.
606 *
607 * Similarly, we turned on bits 33, 35, 37 and 39 in @dst,
608 * because they were the 4th, 6th, 8th and 10th set bits
609 * set in @relmap, and the 4th, 6th, 8th and 10th bits of
610 * @orig (i.e. bits 3, 5, 7 and 9) were also set.
611 *
612 * When bit 11 is set in @orig, it means turn on the bit in
613 * @dst corresponding to whatever is the twelfth bit that is
614 * turned on in @relmap. In the above example, there were
615 * only ten bits turned on in @relmap (30..39), so that bit
616 * 11 was set in @orig had no affect on @dst.
617 *
618 * Example [2] for bitmap_fold() + bitmap_onto():
619 * Let's say @relmap has these ten bits set::
620 *
621 * 40 41 42 43 45 48 53 61 74 95
622 *
623 * (for the curious, that's 40 plus the first ten terms of the
624 * Fibonacci sequence.)
625 *
626 * Further lets say we use the following code, invoking
627 * bitmap_fold() then bitmap_onto, as suggested above to
628 * avoid the possibility of an empty @dst result::
629 *
630 * unsigned long *tmp; // a temporary bitmap's bits
631 *
632 * bitmap_fold(tmp, orig, bitmap_weight(relmap, bits), bits);
633 * bitmap_onto(dst, tmp, relmap, bits);
634 *
635 * Then this table shows what various values of @dst would be, for
636 * various @orig's. I list the zero-based positions of each set bit.
637 * The tmp column shows the intermediate result, as computed by
638 * using bitmap_fold() to fold the @orig bitmap modulo ten
639 * (the weight of @relmap):
640 *
641 * =============== ============== =================
642 * @orig tmp @dst
643 * 0 0 40
644 * 1 1 41
645 * 9 9 95
646 * 10 0 40 [#f1]_
647 * 1 3 5 7 1 3 5 7 41 43 48 61
648 * 0 1 2 3 4 0 1 2 3 4 40 41 42 43 45
649 * 0 9 18 27 0 9 8 7 40 61 74 95
650 * 0 10 20 30 0 40
651 * 0 11 22 33 0 1 2 3 40 41 42 43
652 * 0 12 24 36 0 2 4 6 40 42 45 53
653 * 78 102 211 1 2 8 41 42 74 [#f1]_
654 * =============== ============== =================
655 *
656 * .. [#f1]
657 *
658 * For these marked lines, if we hadn't first done bitmap_fold()
659 * into tmp, then the @dst result would have been empty.
660 *
661 * If either of @orig or @relmap is empty (no set bits), then @dst
662 * will be returned empty.
663 *
664 * If (as explained above) the only set bits in @orig are in positions
665 * m where m >= W, (where W is the weight of @relmap) then @dst will
666 * once again be returned empty.
667 *
668 * All bits in @dst not set by the above rule are cleared.
669 */
670void bitmap_onto(unsigned long *dst, const unsigned long *orig,
671 const unsigned long *relmap, unsigned int bits)
672{
673 unsigned int n, m; /* same meaning as in above comment */
674
675 if (dst == orig) /* following doesn't handle inplace mappings */
676 return;
677 bitmap_zero(dst, bits);
678
679 /*
680 * The following code is a more efficient, but less
681 * obvious, equivalent to the loop:
682 * for (m = 0; m < bitmap_weight(relmap, bits); m++) {
683 * n = find_nth_bit(orig, bits, m);
684 * if (test_bit(m, orig))
685 * set_bit(n, dst);
686 * }
687 */
688
689 m = 0;
690 for_each_set_bit(n, relmap, bits) {
691 /* m == bitmap_pos_to_ord(relmap, n, bits) */
692 if (test_bit(m, orig))
693 set_bit(n, dst);
694 m++;
695 }
696}
697
698/**
699 * bitmap_fold - fold larger bitmap into smaller, modulo specified size
700 * @dst: resulting smaller bitmap
701 * @orig: original larger bitmap
702 * @sz: specified size
703 * @nbits: number of bits in each of these bitmaps
704 *
705 * For each bit oldbit in @orig, set bit oldbit mod @sz in @dst.
706 * Clear all other bits in @dst. See further the comment and
707 * Example [2] for bitmap_onto() for why and how to use this.
708 */
709void bitmap_fold(unsigned long *dst, const unsigned long *orig,
710 unsigned int sz, unsigned int nbits)
711{
712 unsigned int oldbit;
713
714 if (dst == orig) /* following doesn't handle inplace mappings */
715 return;
716 bitmap_zero(dst, nbits);
717
718 for_each_set_bit(oldbit, orig, nbits)
719 set_bit(oldbit % sz, dst);
720}
721#endif /* CONFIG_NUMA */
722
723unsigned long *bitmap_alloc(unsigned int nbits, gfp_t flags)
724{
725 return kmalloc_array(BITS_TO_LONGS(nbits), sizeof(unsigned long),
726 flags);
727}
728EXPORT_SYMBOL(bitmap_alloc);
729
730unsigned long *bitmap_zalloc(unsigned int nbits, gfp_t flags)
731{
732 return bitmap_alloc(nbits, flags | __GFP_ZERO);
733}
734EXPORT_SYMBOL(bitmap_zalloc);
735
736unsigned long *bitmap_alloc_node(unsigned int nbits, gfp_t flags, int node)
737{
738 return kmalloc_array_node(BITS_TO_LONGS(nbits), sizeof(unsigned long),
739 flags, node);
740}
741EXPORT_SYMBOL(bitmap_alloc_node);
742
743unsigned long *bitmap_zalloc_node(unsigned int nbits, gfp_t flags, int node)
744{
745 return bitmap_alloc_node(nbits, flags | __GFP_ZERO, node);
746}
747EXPORT_SYMBOL(bitmap_zalloc_node);
748
749void bitmap_free(const unsigned long *bitmap)
750{
751 kfree(bitmap);
752}
753EXPORT_SYMBOL(bitmap_free);
754
755static void devm_bitmap_free(void *data)
756{
757 unsigned long *bitmap = data;
758
759 bitmap_free(bitmap);
760}
761
762unsigned long *devm_bitmap_alloc(struct device *dev,
763 unsigned int nbits, gfp_t flags)
764{
765 unsigned long *bitmap;
766 int ret;
767
768 bitmap = bitmap_alloc(nbits, flags);
769 if (!bitmap)
770 return NULL;
771
772 ret = devm_add_action_or_reset(dev, devm_bitmap_free, bitmap);
773 if (ret)
774 return NULL;
775
776 return bitmap;
777}
778EXPORT_SYMBOL_GPL(devm_bitmap_alloc);
779
780unsigned long *devm_bitmap_zalloc(struct device *dev,
781 unsigned int nbits, gfp_t flags)
782{
783 return devm_bitmap_alloc(dev, nbits, flags | __GFP_ZERO);
784}
785EXPORT_SYMBOL_GPL(devm_bitmap_zalloc);
786
787#if BITS_PER_LONG == 64
788/**
789 * bitmap_from_arr32 - copy the contents of u32 array of bits to bitmap
790 * @bitmap: array of unsigned longs, the destination bitmap
791 * @buf: array of u32 (in host byte order), the source bitmap
792 * @nbits: number of bits in @bitmap
793 */
794void bitmap_from_arr32(unsigned long *bitmap, const u32 *buf, unsigned int nbits)
795{
796 unsigned int i, halfwords;
797
798 halfwords = DIV_ROUND_UP(nbits, 32);
799 for (i = 0; i < halfwords; i++) {
800 bitmap[i/2] = (unsigned long) buf[i];
801 if (++i < halfwords)
802 bitmap[i/2] |= ((unsigned long) buf[i]) << 32;
803 }
804
805 /* Clear tail bits in last word beyond nbits. */
806 if (nbits % BITS_PER_LONG)
807 bitmap[(halfwords - 1) / 2] &= BITMAP_LAST_WORD_MASK(nbits);
808}
809EXPORT_SYMBOL(bitmap_from_arr32);
810
811/**
812 * bitmap_to_arr32 - copy the contents of bitmap to a u32 array of bits
813 * @buf: array of u32 (in host byte order), the dest bitmap
814 * @bitmap: array of unsigned longs, the source bitmap
815 * @nbits: number of bits in @bitmap
816 */
817void bitmap_to_arr32(u32 *buf, const unsigned long *bitmap, unsigned int nbits)
818{
819 unsigned int i, halfwords;
820
821 halfwords = DIV_ROUND_UP(nbits, 32);
822 for (i = 0; i < halfwords; i++) {
823 buf[i] = (u32) (bitmap[i/2] & UINT_MAX);
824 if (++i < halfwords)
825 buf[i] = (u32) (bitmap[i/2] >> 32);
826 }
827
828 /* Clear tail bits in last element of array beyond nbits. */
829 if (nbits % BITS_PER_LONG)
830 buf[halfwords - 1] &= (u32) (UINT_MAX >> ((-nbits) & 31));
831}
832EXPORT_SYMBOL(bitmap_to_arr32);
833#endif
834
835#if BITS_PER_LONG == 32
836/**
837 * bitmap_from_arr64 - copy the contents of u64 array of bits to bitmap
838 * @bitmap: array of unsigned longs, the destination bitmap
839 * @buf: array of u64 (in host byte order), the source bitmap
840 * @nbits: number of bits in @bitmap
841 */
842void bitmap_from_arr64(unsigned long *bitmap, const u64 *buf, unsigned int nbits)
843{
844 int n;
845
846 for (n = nbits; n > 0; n -= 64) {
847 u64 val = *buf++;
848
849 *bitmap++ = val;
850 if (n > 32)
851 *bitmap++ = val >> 32;
852 }
853
854 /*
855 * Clear tail bits in the last word beyond nbits.
856 *
857 * Negative index is OK because here we point to the word next
858 * to the last word of the bitmap, except for nbits == 0, which
859 * is tested implicitly.
860 */
861 if (nbits % BITS_PER_LONG)
862 bitmap[-1] &= BITMAP_LAST_WORD_MASK(nbits);
863}
864EXPORT_SYMBOL(bitmap_from_arr64);
865
866/**
867 * bitmap_to_arr64 - copy the contents of bitmap to a u64 array of bits
868 * @buf: array of u64 (in host byte order), the dest bitmap
869 * @bitmap: array of unsigned longs, the source bitmap
870 * @nbits: number of bits in @bitmap
871 */
872void bitmap_to_arr64(u64 *buf, const unsigned long *bitmap, unsigned int nbits)
873{
874 const unsigned long *end = bitmap + BITS_TO_LONGS(nbits);
875
876 while (bitmap < end) {
877 *buf = *bitmap++;
878 if (bitmap < end)
879 *buf |= (u64)(*bitmap++) << 32;
880 buf++;
881 }
882
883 /* Clear tail bits in the last element of array beyond nbits. */
884 if (nbits % 64)
885 buf[-1] &= GENMASK_ULL((nbits - 1) % 64, 0);
886}
887EXPORT_SYMBOL(bitmap_to_arr64);
888#endif