bitmap: unify find_bit operations

bitmap_for_each_{set,clear}_region() are similar to for_each_bit()
macros in include/linux/find.h, but interface and implementation
of them are different.

This patch adds for_each_bitrange() macros and drops unused
bitmap_*_region() API in sake of unification.

Signed-off-by: Yury Norov <yury.norov@gmail.com>
Tested-by: Wolfram Sang <wsa+renesas@sang-engineering.com>
Acked-by: Dennis Zhou <dennis@kernel.org>
Acked-by: Ulf Hansson <ulf.hansson@linaro.org> # For MMC

+65 -46
+1 -1
drivers/mmc/host/renesas_sdhi_core.c
··· 628 * is at least SH_MOBILE_SDHI_MIN_TAP_ROW probes long then use the 629 * center index as the tap, otherwise bail out. 630 */ 631 - bitmap_for_each_set_region(bitmap, rs, re, 0, taps_size) { 632 if (re - rs > tap_cnt) { 633 tap_end = re; 634 tap_start = rs;
··· 628 * is at least SH_MOBILE_SDHI_MIN_TAP_ROW probes long then use the 629 * center index as the tap, otherwise bail out. 630 */ 631 + for_each_set_bitrange(rs, re, bitmap, taps_size) { 632 if (re - rs > tap_cnt) { 633 tap_end = re; 634 tap_start = rs;
-33
include/linux/bitmap.h
··· 55 * bitmap_clear(dst, pos, nbits) Clear specified bit area 56 * bitmap_find_next_zero_area(buf, len, pos, n, mask) Find bit free area 57 * bitmap_find_next_zero_area_off(buf, len, pos, n, mask, mask_off) as above 58 - * bitmap_next_clear_region(map, &start, &end, nbits) Find next clear region 59 - * bitmap_next_set_region(map, &start, &end, nbits) Find next set region 60 - * bitmap_for_each_clear_region(map, rs, re, start, end) 61 - * Iterate over all clear regions 62 - * bitmap_for_each_set_region(map, rs, re, start, end) 63 - * Iterate over all set regions 64 * bitmap_shift_right(dst, src, n, nbits) *dst = *src >> n 65 * bitmap_shift_left(dst, src, n, nbits) *dst = *src << n 66 * bitmap_cut(dst, src, first, n, nbits) Cut n bits from first, copy rest ··· 461 __bitmap_replace(dst, old, new, mask, nbits); 462 } 463 464 - static inline void bitmap_next_clear_region(unsigned long *bitmap, 465 - unsigned int *rs, unsigned int *re, 466 - unsigned int end) 467 - { 468 - *rs = find_next_zero_bit(bitmap, end, *rs); 469 - *re = find_next_bit(bitmap, end, *rs + 1); 470 - } 471 - 472 static inline void bitmap_next_set_region(unsigned long *bitmap, 473 unsigned int *rs, unsigned int *re, 474 unsigned int end) ··· 468 *rs = find_next_bit(bitmap, end, *rs); 469 *re = find_next_zero_bit(bitmap, end, *rs + 1); 470 } 471 - 472 - /* 473 - * Bitmap region iterators. Iterates over the bitmap between [@start, @end). 474 - * @rs and @re should be integer variables and will be set to start and end 475 - * index of the current clear or set region. 476 - */ 477 - #define bitmap_for_each_clear_region(bitmap, rs, re, start, end) \ 478 - for ((rs) = (start), \ 479 - bitmap_next_clear_region((bitmap), &(rs), &(re), (end)); \ 480 - (rs) < (re); \ 481 - (rs) = (re) + 1, \ 482 - bitmap_next_clear_region((bitmap), &(rs), &(re), (end))) 483 - 484 - #define bitmap_for_each_set_region(bitmap, rs, re, start, end) \ 485 - for ((rs) = (start), \ 486 - bitmap_next_set_region((bitmap), &(rs), &(re), (end)); \ 487 - (rs) < (re); \ 488 - (rs) = (re) + 1, \ 489 - bitmap_next_set_region((bitmap), &(rs), &(re), (end))) 490 491 /** 492 * BITMAP_FROM_U64() - Represent u64 value in the format suitable for bitmap.
··· 55 * bitmap_clear(dst, pos, nbits) Clear specified bit area 56 * bitmap_find_next_zero_area(buf, len, pos, n, mask) Find bit free area 57 * bitmap_find_next_zero_area_off(buf, len, pos, n, mask, mask_off) as above 58 * bitmap_shift_right(dst, src, n, nbits) *dst = *src >> n 59 * bitmap_shift_left(dst, src, n, nbits) *dst = *src << n 60 * bitmap_cut(dst, src, first, n, nbits) Cut n bits from first, copy rest ··· 467 __bitmap_replace(dst, old, new, mask, nbits); 468 } 469 470 static inline void bitmap_next_set_region(unsigned long *bitmap, 471 unsigned int *rs, unsigned int *re, 472 unsigned int end) ··· 482 *rs = find_next_bit(bitmap, end, *rs); 483 *re = find_next_zero_bit(bitmap, end, *rs + 1); 484 } 485 486 /** 487 * BITMAP_FROM_U64() - Represent u64 value in the format suitable for bitmap.
+56
include/linux/find.h
··· 302 (bit) = find_next_zero_bit((addr), (size), (bit) + 1)) 303 304 /** 305 * for_each_set_clump8 - iterate over bitmap for each 8-bit clump with set bits 306 * @start: bit offset to start search and to store the current iteration offset 307 * @clump: location to store copy of current 8-bit clump
··· 302 (bit) = find_next_zero_bit((addr), (size), (bit) + 1)) 303 304 /** 305 + * for_each_set_bitrange - iterate over all set bit ranges [b; e) 306 + * @b: bit offset of start of current bitrange (first set bit) 307 + * @e: bit offset of end of current bitrange (first unset bit) 308 + * @addr: bitmap address to base the search on 309 + * @size: bitmap size in number of bits 310 + */ 311 + #define for_each_set_bitrange(b, e, addr, size) \ 312 + for ((b) = find_next_bit((addr), (size), 0), \ 313 + (e) = find_next_zero_bit((addr), (size), (b) + 1); \ 314 + (b) < (size); \ 315 + (b) = find_next_bit((addr), (size), (e) + 1), \ 316 + (e) = find_next_zero_bit((addr), (size), (b) + 1)) 317 + 318 + /** 319 + * for_each_set_bitrange_from - iterate over all set bit ranges [b; e) 320 + * @b: bit offset of start of current bitrange (first set bit); must be initialized 321 + * @e: bit offset of end of current bitrange (first unset bit) 322 + * @addr: bitmap address to base the search on 323 + * @size: bitmap size in number of bits 324 + */ 325 + #define for_each_set_bitrange_from(b, e, addr, size) \ 326 + for ((b) = find_next_bit((addr), (size), (b)), \ 327 + (e) = find_next_zero_bit((addr), (size), (b) + 1); \ 328 + (b) < (size); \ 329 + (b) = find_next_bit((addr), (size), (e) + 1), \ 330 + (e) = find_next_zero_bit((addr), (size), (b) + 1)) 331 + 332 + /** 333 + * for_each_clear_bitrange - iterate over all unset bit ranges [b; e) 334 + * @b: bit offset of start of current bitrange (first unset bit) 335 + * @e: bit offset of end of current bitrange (first set bit) 336 + * @addr: bitmap address to base the search on 337 + * @size: bitmap size in number of bits 338 + */ 339 + #define for_each_clear_bitrange(b, e, addr, size) \ 340 + for ((b) = find_next_zero_bit((addr), (size), 0), \ 341 + (e) = find_next_bit((addr), (size), (b) + 1); \ 342 + (b) < (size); \ 343 + (b) = find_next_zero_bit((addr), (size), (e) + 1), \ 344 + (e) = find_next_bit((addr), (size), (b) + 1)) 345 + 346 + /** 347 + * for_each_clear_bitrange_from - iterate over all unset bit ranges [b; e) 348 + * @b: bit offset of start of current bitrange (first set bit); must be initialized 349 + * @e: bit offset of end of current bitrange (first unset bit) 350 + * @addr: bitmap address to base the search on 351 + * @size: bitmap size in number of bits 352 + */ 353 + #define for_each_clear_bitrange_from(b, e, addr, size) \ 354 + for ((b) = find_next_zero_bit((addr), (size), (b)), \ 355 + (e) = find_next_bit((addr), (size), (b) + 1); \ 356 + (b) < (size); \ 357 + (b) = find_next_zero_bit((addr), (size), (e) + 1), \ 358 + (e) = find_next_bit((addr), (size), (b) + 1)) 359 + 360 + /** 361 * for_each_set_clump8 - iterate over bitmap for each 8-bit clump with set bits 362 * @start: bit offset to start search and to store the current iteration offset 363 * @clump: location to store copy of current 8-bit clump
+8 -12
mm/percpu.c
··· 779 { 780 struct pcpu_block_md *block = chunk->md_blocks + index; 781 unsigned long *alloc_map = pcpu_index_alloc_map(chunk, index); 782 - unsigned int rs, re, start; /* region start, region end */ 783 784 /* promote scan_hint to contig_hint */ 785 if (block->scan_hint) { ··· 795 block->right_free = 0; 796 797 /* iterate over free areas and update the contig hints */ 798 - bitmap_for_each_clear_region(alloc_map, rs, re, start, 799 - PCPU_BITMAP_BLOCK_BITS) 800 - pcpu_block_update(block, rs, re); 801 } 802 803 /** ··· 1851 1852 /* populate if not all pages are already there */ 1853 if (!is_atomic) { 1854 - unsigned int page_start, page_end, rs, re; 1855 1856 - page_start = PFN_DOWN(off); 1857 page_end = PFN_UP(off + size); 1858 1859 - bitmap_for_each_clear_region(chunk->populated, rs, re, 1860 - page_start, page_end) { 1861 WARN_ON(chunk->immutable); 1862 1863 ret = pcpu_populate_chunk(chunk, rs, re, pcpu_gfp); ··· 2012 list_for_each_entry_safe(chunk, next, &to_free, list) { 2013 unsigned int rs, re; 2014 2015 - bitmap_for_each_set_region(chunk->populated, rs, re, 0, 2016 - chunk->nr_pages) { 2017 pcpu_depopulate_chunk(chunk, rs, re); 2018 spin_lock_irq(&pcpu_lock); 2019 pcpu_chunk_depopulated(chunk, rs, re); ··· 2082 continue; 2083 2084 /* @chunk can't go away while pcpu_alloc_mutex is held */ 2085 - bitmap_for_each_clear_region(chunk->populated, rs, re, 0, 2086 - chunk->nr_pages) { 2087 int nr = min_t(int, re - rs, nr_to_pop); 2088 2089 spin_unlock_irq(&pcpu_lock);
··· 779 { 780 struct pcpu_block_md *block = chunk->md_blocks + index; 781 unsigned long *alloc_map = pcpu_index_alloc_map(chunk, index); 782 + unsigned int start, end; /* region start, region end */ 783 784 /* promote scan_hint to contig_hint */ 785 if (block->scan_hint) { ··· 795 block->right_free = 0; 796 797 /* iterate over free areas and update the contig hints */ 798 + for_each_clear_bitrange_from(start, end, alloc_map, PCPU_BITMAP_BLOCK_BITS) 799 + pcpu_block_update(block, start, end); 800 } 801 802 /** ··· 1852 1853 /* populate if not all pages are already there */ 1854 if (!is_atomic) { 1855 + unsigned int page_end, rs, re; 1856 1857 + rs = PFN_DOWN(off); 1858 page_end = PFN_UP(off + size); 1859 1860 + for_each_clear_bitrange_from(rs, re, chunk->populated, page_end) { 1861 WARN_ON(chunk->immutable); 1862 1863 ret = pcpu_populate_chunk(chunk, rs, re, pcpu_gfp); ··· 2014 list_for_each_entry_safe(chunk, next, &to_free, list) { 2015 unsigned int rs, re; 2016 2017 + for_each_set_bitrange(rs, re, chunk->populated, chunk->nr_pages) { 2018 pcpu_depopulate_chunk(chunk, rs, re); 2019 spin_lock_irq(&pcpu_lock); 2020 pcpu_chunk_depopulated(chunk, rs, re); ··· 2085 continue; 2086 2087 /* @chunk can't go away while pcpu_alloc_mutex is held */ 2088 + for_each_clear_bitrange(rs, re, chunk->populated, chunk->nr_pages) { 2089 int nr = min_t(int, re - rs, nr_to_pop); 2090 2091 spin_unlock_irq(&pcpu_lock);