aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-x86_64/bitops.h
diff options
context:
space:
mode:
authorAndi Kleen <ak@suse.de>2006-09-26 04:52:38 -0400
committerAndi Kleen <andi@basil.nowhere.org>2006-09-26 04:52:38 -0400
commit0136611c62e8650e354b95c76dff6d2ce6030eff (patch)
tree9ba66105bccc4d83b84663b8dda7e51962c22a04 /include/asm-x86_64/bitops.h
parent8380aabb99719af583447133f19a4d8074b5c337 (diff)
[PATCH] optimize hweight64 for x86_64
Based on patch from David Rientjes <rientjes@google.com>, but changed by AK. Optimizes the 64-bit hamming weight for x86_64 processors assuming they have fast multiplication. Uses five fewer bitops than the generic hweight64. Benchmark on one EMT64 showed ~25% speedup with 2^24 consecutive calls. Define a new ARCH_HAS_FAST_MULTIPLIER that can be set by other architectures that can also multiply fast. Signed-off-by: Andi Kleen <ak@suse.de>
Diffstat (limited to 'include/asm-x86_64/bitops.h')
-rw-r--r--include/asm-x86_64/bitops.h2
1 files changed, 2 insertions, 0 deletions
diff --git a/include/asm-x86_64/bitops.h b/include/asm-x86_64/bitops.h
index f7ba57b1cc08..5b535eaf5309 100644
--- a/include/asm-x86_64/bitops.h
+++ b/include/asm-x86_64/bitops.h
@@ -399,6 +399,8 @@ static __inline__ int fls(int x)
399 return r+1; 399 return r+1;
400} 400}
401 401
402#define ARCH_HAS_FAST_MULTIPLIER 1
403
402#include <asm-generic/bitops/hweight.h> 404#include <asm-generic/bitops/hweight.h>
403 405
404#endif /* __KERNEL__ */ 406#endif /* __KERNEL__ */