diff options
author | Denys Vlasenko <dvlasenk@redhat.com> | 2015-08-04 10:15:14 -0400 |
---|---|---|
committer | Ingo Molnar <mingo@kernel.org> | 2015-08-05 03:38:08 -0400 |
commit | 1a1d48a4a8fde49aedc045d894efe67173d59fe0 (patch) | |
tree | b1e4dc0bd4c7621c98610947d3baa417d73ae6ef | |
parent | c2f3ba745d1c2013811cac6308c4abf9527c478e (diff) |
linux/bitmap: Force inlining of bitmap weight functions
With this config:
http://busybox.net/~vda/kernel_config_OPTIMIZE_INLINING_and_Os
gcc-4.7.2 generates many copies of these tiny functions:
bitmap_weight (55 copies):
55 push %rbp
48 89 e5 mov %rsp,%rbp
e8 3f 3a 8b 00 callq __bitmap_weight
5d pop %rbp
c3 retq
hweight_long (23 copies):
55 push %rbp
e8 b5 65 8e 00 callq __sw_hweight64
48 89 e5 mov %rsp,%rbp
5d pop %rbp
c3 retq
See https://gcc.gnu.org/bugzilla/show_bug.cgi?id=66122
This patch fixes this via s/inline/__always_inline/
While at it, replaced two "__inline__" with usual "inline"
(the rest of the source file uses the latter).
text data bss dec filename
86971357 17195880 36659200 140826437 vmlinux.before
86971120 17195912 36659200 140826232 vmlinux
Signed-off-by: Denys Vlasenko <dvlasenk@redhat.com>
Cc: Andrew Morton <akpm@linux-foundation.org>
Cc: David Rientjes <rientjes@google.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: Thomas Graf <tgraf@suug.ch>
Cc: linux-kernel@vger.kernel.org
Link: http://lkml.kernel.org/r/1438697716-28121-1-git-send-email-dvlasenk@redhat.com
Signed-off-by: Ingo Molnar <mingo@kernel.org>
-rw-r--r-- | include/linux/bitmap.h | 2 | ||||
-rw-r--r-- | include/linux/bitops.h | 6 |
2 files changed, 4 insertions, 4 deletions
diff --git a/include/linux/bitmap.h b/include/linux/bitmap.h index ea17cca9e685..9653fdb76a42 100644 --- a/include/linux/bitmap.h +++ b/include/linux/bitmap.h | |||
@@ -295,7 +295,7 @@ static inline int bitmap_full(const unsigned long *src, unsigned int nbits) | |||
295 | return find_first_zero_bit(src, nbits) == nbits; | 295 | return find_first_zero_bit(src, nbits) == nbits; |
296 | } | 296 | } |
297 | 297 | ||
298 | static inline int bitmap_weight(const unsigned long *src, unsigned int nbits) | 298 | static __always_inline int bitmap_weight(const unsigned long *src, unsigned int nbits) |
299 | { | 299 | { |
300 | if (small_const_nbits(nbits)) | 300 | if (small_const_nbits(nbits)) |
301 | return hweight_long(*src & BITMAP_LAST_WORD_MASK(nbits)); | 301 | return hweight_long(*src & BITMAP_LAST_WORD_MASK(nbits)); |
diff --git a/include/linux/bitops.h b/include/linux/bitops.h index 297f5bda4fdf..e63553386ae7 100644 --- a/include/linux/bitops.h +++ b/include/linux/bitops.h | |||
@@ -57,7 +57,7 @@ extern unsigned long __sw_hweight64(__u64 w); | |||
57 | (bit) < (size); \ | 57 | (bit) < (size); \ |
58 | (bit) = find_next_zero_bit((addr), (size), (bit) + 1)) | 58 | (bit) = find_next_zero_bit((addr), (size), (bit) + 1)) |
59 | 59 | ||
60 | static __inline__ int get_bitmask_order(unsigned int count) | 60 | static inline int get_bitmask_order(unsigned int count) |
61 | { | 61 | { |
62 | int order; | 62 | int order; |
63 | 63 | ||
@@ -65,7 +65,7 @@ static __inline__ int get_bitmask_order(unsigned int count) | |||
65 | return order; /* We could be slightly more clever with -1 here... */ | 65 | return order; /* We could be slightly more clever with -1 here... */ |
66 | } | 66 | } |
67 | 67 | ||
68 | static __inline__ int get_count_order(unsigned int count) | 68 | static inline int get_count_order(unsigned int count) |
69 | { | 69 | { |
70 | int order; | 70 | int order; |
71 | 71 | ||
@@ -75,7 +75,7 @@ static __inline__ int get_count_order(unsigned int count) | |||
75 | return order; | 75 | return order; |
76 | } | 76 | } |
77 | 77 | ||
78 | static inline unsigned long hweight_long(unsigned long w) | 78 | static __always_inline unsigned long hweight_long(unsigned long w) |
79 | { | 79 | { |
80 | return sizeof(w) == 4 ? hweight32(w) : hweight64(w); | 80 | return sizeof(w) == 4 ? hweight32(w) : hweight64(w); |
81 | } | 81 | } |