aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-powerpc
diff options
context:
space:
mode:
authorPaul Mackerras <paulus@samba.org>2008-04-18 00:26:08 -0400
committerPaul Mackerras <paulus@samba.org>2008-04-18 02:25:15 -0400
commit9f264be6101c42cb9e471c58322fb83a5cde1461 (patch)
tree5d96ec582d5483c67e5511f0aa0c61d869a48640 /include/asm-powerpc
parent945feb174b14e7098cc7ecf0cf4768d35bc52f9c (diff)
[POWERPC] Optimize fls64() on 64-bit processors
64-bit powerpc processors can find the leftmost 1 bit in a 64-bit doubleword in one instruction, so use that rather than using the generic fls64(), which does two 32-bit fls() calls. Signed-off-by: Paul Mackerras <paulus@samba.org>
Diffstat (limited to 'include/asm-powerpc')
-rw-r--r--include/asm-powerpc/bitops.h17
1 files changed, 17 insertions, 0 deletions
diff --git a/include/asm-powerpc/bitops.h b/include/asm-powerpc/bitops.h
index 36c8f3a43792..a99a74929475 100644
--- a/include/asm-powerpc/bitops.h
+++ b/include/asm-powerpc/bitops.h
@@ -312,7 +312,24 @@ static __inline__ int fls(unsigned int x)
312 asm ("cntlzw %0,%1" : "=r" (lz) : "r" (x)); 312 asm ("cntlzw %0,%1" : "=r" (lz) : "r" (x));
313 return 32 - lz; 313 return 32 - lz;
314} 314}
315
316/*
317 * 64-bit can do this using one cntlzd (count leading zeroes doubleword)
318 * instruction; for 32-bit we use the generic version, which does two
319 * 32-bit fls calls.
320 */
321#ifdef __powerpc64__
322static __inline__ int fls64(__u64 x)
323{
324 int lz;
325
326 asm ("cntlzd %0,%1" : "=r" (lz) : "r" (x));
327 return 64 - lz;
328}
329#else
315#include <asm-generic/bitops/fls64.h> 330#include <asm-generic/bitops/fls64.h>
331#endif /* __powerpc64__ */
332
316#include <asm-generic/bitops/hweight.h> 333#include <asm-generic/bitops/hweight.h>
317#include <asm-generic/bitops/find.h> 334#include <asm-generic/bitops/find.h>
318 335