Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

linux/bitmap: Force inlining of bitmap weight functions

With this config:

http://busybox.net/~vda/kernel_config_OPTIMIZE_INLINING_and_Os

gcc-4.7.2 generates many copies of these tiny functions:

bitmap_weight (55 copies):
55 push %rbp
48 89 e5 mov %rsp,%rbp
e8 3f 3a 8b 00 callq __bitmap_weight
5d pop %rbp
c3 retq

hweight_long (23 copies):
55 push %rbp
e8 b5 65 8e 00 callq __sw_hweight64
48 89 e5 mov %rsp,%rbp
5d pop %rbp
c3 retq

See https://gcc.gnu.org/bugzilla/show_bug.cgi?id=66122

This patch fixes this via s/inline/__always_inline/

While at it, replaced two "__inline__" with usual "inline"
(the rest of the source file uses the latter).

text data bss dec filename
86971357 17195880 36659200 140826437 vmlinux.before
86971120 17195912 36659200 140826232 vmlinux

Signed-off-by: Denys Vlasenko <dvlasenk@redhat.com>
Cc: Andrew Morton <akpm@linux-foundation.org>
Cc: David Rientjes <rientjes@google.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: Thomas Graf <tgraf@suug.ch>
Cc: linux-kernel@vger.kernel.org
Link: http://lkml.kernel.org/r/1438697716-28121-1-git-send-email-dvlasenk@redhat.com
Signed-off-by: Ingo Molnar <mingo@kernel.org>

authored by

Denys Vlasenko and committed by
Ingo Molnar
1a1d48a4 c2f3ba74

+4 -4
+1 -1
include/linux/bitmap.h
··· 295 295 return find_first_zero_bit(src, nbits) == nbits; 296 296 } 297 297 298 - static inline int bitmap_weight(const unsigned long *src, unsigned int nbits) 298 + static __always_inline int bitmap_weight(const unsigned long *src, unsigned int nbits) 299 299 { 300 300 if (small_const_nbits(nbits)) 301 301 return hweight_long(*src & BITMAP_LAST_WORD_MASK(nbits));
+3 -3
include/linux/bitops.h
··· 57 57 (bit) < (size); \ 58 58 (bit) = find_next_zero_bit((addr), (size), (bit) + 1)) 59 59 60 - static __inline__ int get_bitmask_order(unsigned int count) 60 + static inline int get_bitmask_order(unsigned int count) 61 61 { 62 62 int order; 63 63 ··· 65 65 return order; /* We could be slightly more clever with -1 here... */ 66 66 } 67 67 68 - static __inline__ int get_count_order(unsigned int count) 68 + static inline int get_count_order(unsigned int count) 69 69 { 70 70 int order; 71 71 ··· 75 75 return order; 76 76 } 77 77 78 - static inline unsigned long hweight_long(unsigned long w) 78 + static __always_inline unsigned long hweight_long(unsigned long w) 79 79 { 80 80 return sizeof(w) == 4 ? hweight32(w) : hweight64(w); 81 81 }