Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

find: Switch from inline to __always_inline

'inline' keyword is only a recommendation for compiler. If it decides to
not inline find_bit nodemask functions, the whole small_const_nbits()
machinery doesn't work.

This is how a standard GCC 11.3.0 does for my x86_64 build now. This patch
replaces 'inline' directive with unconditional '__always_inline' to make
sure that there's always a chance for compile-time optimization. It doesn't
change size of kernel image, according to bloat-o-meter.

[[ Brian: split out from:
Subject: [PATCH 1/3] bitmap: switch from inline to __always_inline
https://lore.kernel.org/all/20221027043810.350460-2-yury.norov@gmail.com/
But rewritten, as there were too many conflicts. ]]

Co-developed-by: Brian Norris <briannorris@chromium.org>
Signed-off-by: Brian Norris <briannorris@chromium.org>
Reviewed-by: Kees Cook <kees@kernel.org>
Reviewed-by: Nathan Chancellor <nathan@kernel.org>
Signed-off-by: Yury Norov <yury.norov@gmail.com>

+25 -25
+25 -25
include/linux/find.h
··· 52 52 * Returns the bit number for the next set bit 53 53 * If no bits are set, returns @size. 54 54 */ 55 - static inline 55 + static __always_inline 56 56 unsigned long find_next_bit(const unsigned long *addr, unsigned long size, 57 57 unsigned long offset) 58 58 { ··· 81 81 * Returns the bit number for the next set bit 82 82 * If no bits are set, returns @size. 83 83 */ 84 - static inline 84 + static __always_inline 85 85 unsigned long find_next_and_bit(const unsigned long *addr1, 86 86 const unsigned long *addr2, unsigned long size, 87 87 unsigned long offset) ··· 112 112 * Returns the bit number for the next set bit 113 113 * If no bits are set, returns @size. 114 114 */ 115 - static inline 115 + static __always_inline 116 116 unsigned long find_next_andnot_bit(const unsigned long *addr1, 117 117 const unsigned long *addr2, unsigned long size, 118 118 unsigned long offset) ··· 142 142 * Returns the bit number for the next set bit 143 143 * If no bits are set, returns @size. 144 144 */ 145 - static inline 145 + static __always_inline 146 146 unsigned long find_next_or_bit(const unsigned long *addr1, 147 147 const unsigned long *addr2, unsigned long size, 148 148 unsigned long offset) ··· 171 171 * Returns the bit number of the next zero bit 172 172 * If no bits are zero, returns @size. 173 173 */ 174 - static inline 174 + static __always_inline 175 175 unsigned long find_next_zero_bit(const unsigned long *addr, unsigned long size, 176 176 unsigned long offset) 177 177 { ··· 198 198 * Returns the bit number of the first set bit. 199 199 * If no bits are set, returns @size. 200 200 */ 201 - static inline 201 + static __always_inline 202 202 unsigned long find_first_bit(const unsigned long *addr, unsigned long size) 203 203 { 204 204 if (small_const_nbits(size)) { ··· 224 224 * Returns the bit number of the N'th set bit. 225 225 * If no such, returns >= @size. 226 226 */ 227 - static inline 227 + static __always_inline 228 228 unsigned long find_nth_bit(const unsigned long *addr, unsigned long size, unsigned long n) 229 229 { 230 230 if (n >= size) ··· 249 249 * Returns the bit number of the N'th set bit. 250 250 * If no such, returns @size. 251 251 */ 252 - static inline 252 + static __always_inline 253 253 unsigned long find_nth_and_bit(const unsigned long *addr1, const unsigned long *addr2, 254 254 unsigned long size, unsigned long n) 255 255 { ··· 276 276 * Returns the bit number of the N'th set bit. 277 277 * If no such, returns @size. 278 278 */ 279 - static inline 279 + static __always_inline 280 280 unsigned long find_nth_andnot_bit(const unsigned long *addr1, const unsigned long *addr2, 281 281 unsigned long size, unsigned long n) 282 282 { ··· 332 332 * Returns the bit number for the next set bit 333 333 * If no bits are set, returns @size. 334 334 */ 335 - static inline 335 + static __always_inline 336 336 unsigned long find_first_and_bit(const unsigned long *addr1, 337 337 const unsigned long *addr2, 338 338 unsigned long size) ··· 357 357 * Returns the bit number for the first set bit 358 358 * If no bits are set, returns @size. 359 359 */ 360 - static inline 360 + static __always_inline 361 361 unsigned long find_first_and_and_bit(const unsigned long *addr1, 362 362 const unsigned long *addr2, 363 363 const unsigned long *addr3, ··· 381 381 * Returns the bit number of the first cleared bit. 382 382 * If no bits are zero, returns @size. 383 383 */ 384 - static inline 384 + static __always_inline 385 385 unsigned long find_first_zero_bit(const unsigned long *addr, unsigned long size) 386 386 { 387 387 if (small_const_nbits(size)) { ··· 402 402 * 403 403 * Returns the bit number of the last set bit, or size. 404 404 */ 405 - static inline 405 + static __always_inline 406 406 unsigned long find_last_bit(const unsigned long *addr, unsigned long size) 407 407 { 408 408 if (small_const_nbits(size)) { ··· 425 425 * Returns the bit number for the next set bit, or first set bit up to @offset 426 426 * If no bits are set, returns @size. 427 427 */ 428 - static inline 428 + static __always_inline 429 429 unsigned long find_next_and_bit_wrap(const unsigned long *addr1, 430 430 const unsigned long *addr2, 431 431 unsigned long size, unsigned long offset) ··· 448 448 * Returns the bit number for the next set bit, or first set bit up to @offset 449 449 * If no bits are set, returns @size. 450 450 */ 451 - static inline 451 + static __always_inline 452 452 unsigned long find_next_bit_wrap(const unsigned long *addr, 453 453 unsigned long size, unsigned long offset) 454 454 { ··· 465 465 * Helper for for_each_set_bit_wrap(). Make sure you're doing right thing 466 466 * before using it alone. 467 467 */ 468 - static inline 468 + static __always_inline 469 469 unsigned long __for_each_wrap(const unsigned long *bitmap, unsigned long size, 470 470 unsigned long start, unsigned long n) 471 471 { ··· 506 506 507 507 #if defined(__LITTLE_ENDIAN) 508 508 509 - static inline unsigned long find_next_zero_bit_le(const void *addr, 510 - unsigned long size, unsigned long offset) 509 + static __always_inline 510 + unsigned long find_next_zero_bit_le(const void *addr, unsigned long size, unsigned long offset) 511 511 { 512 512 return find_next_zero_bit(addr, size, offset); 513 513 } 514 514 515 - static inline unsigned long find_next_bit_le(const void *addr, 516 - unsigned long size, unsigned long offset) 515 + static __always_inline 516 + unsigned long find_next_bit_le(const void *addr, unsigned long size, unsigned long offset) 517 517 { 518 518 return find_next_bit(addr, size, offset); 519 519 } 520 520 521 - static inline unsigned long find_first_zero_bit_le(const void *addr, 522 - unsigned long size) 521 + static __always_inline 522 + unsigned long find_first_zero_bit_le(const void *addr, unsigned long size) 523 523 { 524 524 return find_first_zero_bit(addr, size); 525 525 } ··· 527 527 #elif defined(__BIG_ENDIAN) 528 528 529 529 #ifndef find_next_zero_bit_le 530 - static inline 530 + static __always_inline 531 531 unsigned long find_next_zero_bit_le(const void *addr, unsigned 532 532 long size, unsigned long offset) 533 533 { ··· 546 546 #endif 547 547 548 548 #ifndef find_first_zero_bit_le 549 - static inline 549 + static __always_inline 550 550 unsigned long find_first_zero_bit_le(const void *addr, unsigned long size) 551 551 { 552 552 if (small_const_nbits(size)) { ··· 560 560 #endif 561 561 562 562 #ifndef find_next_bit_le 563 - static inline 563 + static __always_inline 564 564 unsigned long find_next_bit_le(const void *addr, unsigned 565 565 long size, unsigned long offset) 566 566 {