aboutsummaryrefslogtreecommitdiffstats
path: root/include/linux/bitops.h
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2010-02-28 13:20:25 -0500
committerLinus Torvalds <torvalds@linux-foundation.org>2010-02-28 13:20:25 -0500
commit6556a6743549defc32e5f90ee2cb1ecd833a44c3 (patch)
tree622306583d4a3c13235a8bfc012854c125c597f1 /include/linux/bitops.h
parente0d272429a34ff143bfa04ee8e29dd4eed2964c7 (diff)
parent1dd2980d990068e20045b90c424518cc7f3657ff (diff)
Merge branch 'perf-core-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/linux-2.6-tip
* 'perf-core-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/linux-2.6-tip: (172 commits) perf_event, amd: Fix spinlock initialization perf_event: Fix preempt warning in perf_clock() perf tools: Flush maps on COMM events perf_events, x86: Split PMU definitions into separate files perf annotate: Handle samples not at objdump output addr boundaries perf_events, x86: Remove superflous MSR writes perf_events: Simplify code by removing cpu argument to hw_perf_group_sched_in() perf_events, x86: AMD event scheduling perf_events: Add new start/stop PMU callbacks perf_events: Report the MMAP pgoff value in bytes perf annotate: Defer allocating sym_priv->hist array perf symbols: Improve debugging information about symtab origins perf top: Use a macro instead of a constant variable perf symbols: Check the right return variable perf/scripts: Tag syscall_name helper as not yet available perf/scripts: Add perf-trace-python Documentation perf/scripts: Remove unnecessary PyTuple resizes perf/scripts: Add syscall tracing scripts perf/scripts: Add Python scripting engine perf/scripts: Remove check-perf-trace from listed scripts ... Fix trivial conflict in tools/perf/util/probe-event.c
Diffstat (limited to 'include/linux/bitops.h')
-rw-r--r--include/linux/bitops.h29
1 files changed, 27 insertions, 2 deletions
diff --git a/include/linux/bitops.h b/include/linux/bitops.h
index c05a29cb9bb2..25b8b2f33ae9 100644
--- a/include/linux/bitops.h
+++ b/include/linux/bitops.h
@@ -25,7 +25,7 @@
25static __inline__ int get_bitmask_order(unsigned int count) 25static __inline__ int get_bitmask_order(unsigned int count)
26{ 26{
27 int order; 27 int order;
28 28
29 order = fls(count); 29 order = fls(count);
30 return order; /* We could be slightly more clever with -1 here... */ 30 return order; /* We could be slightly more clever with -1 here... */
31} 31}
@@ -33,7 +33,7 @@ static __inline__ int get_bitmask_order(unsigned int count)
33static __inline__ int get_count_order(unsigned int count) 33static __inline__ int get_count_order(unsigned int count)
34{ 34{
35 int order; 35 int order;
36 36
37 order = fls(count) - 1; 37 order = fls(count) - 1;
38 if (count & (count - 1)) 38 if (count & (count - 1))
39 order++; 39 order++;
@@ -45,6 +45,31 @@ static inline unsigned long hweight_long(unsigned long w)
45 return sizeof(w) == 4 ? hweight32(w) : hweight64(w); 45 return sizeof(w) == 4 ? hweight32(w) : hweight64(w);
46} 46}
47 47
48/*
49 * Clearly slow versions of the hweightN() functions, their benefit is
50 * of course compile time evaluation of constant arguments.
51 */
52#define HWEIGHT8(w) \
53 ( BUILD_BUG_ON_ZERO(!__builtin_constant_p(w)) + \
54 (!!((w) & (1ULL << 0))) + \
55 (!!((w) & (1ULL << 1))) + \
56 (!!((w) & (1ULL << 2))) + \
57 (!!((w) & (1ULL << 3))) + \
58 (!!((w) & (1ULL << 4))) + \
59 (!!((w) & (1ULL << 5))) + \
60 (!!((w) & (1ULL << 6))) + \
61 (!!((w) & (1ULL << 7))) )
62
63#define HWEIGHT16(w) (HWEIGHT8(w) + HWEIGHT8((w) >> 8))
64#define HWEIGHT32(w) (HWEIGHT16(w) + HWEIGHT16((w) >> 16))
65#define HWEIGHT64(w) (HWEIGHT32(w) + HWEIGHT32((w) >> 32))
66
67/*
68 * Type invariant version that simply casts things to the
69 * largest type.
70 */
71#define HWEIGHT(w) HWEIGHT64((u64)(w))
72
48/** 73/**
49 * rol32 - rotate a 32-bit value left 74 * rol32 - rotate a 32-bit value left
50 * @word: value to rotate 75 * @word: value to rotate