aboutsummaryrefslogtreecommitdiffstats
path: root/include/linux/bitops.h
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@ppc970.osdl.org>2005-04-16 18:20:36 -0400
committerLinus Torvalds <torvalds@ppc970.osdl.org>2005-04-16 18:20:36 -0400
commit1da177e4c3f41524e886b7f1b8a0c1fc7321cac2 (patch)
tree0bba044c4ce775e45a88a51686b5d9f90697ea9d /include/linux/bitops.h
Linux-2.6.12-rc2v2.6.12-rc2
Initial git repository build. I'm not bothering with the full history, even though we have it. We can create a separate "historical" git archive of that later if we want to, and in the meantime it's about 3.2GB when imported into git - space that would just make the early git days unnecessarily complicated, when we don't have a lot of good infrastructure for it. Let it rip!
Diffstat (limited to 'include/linux/bitops.h')
-rw-r--r--include/linux/bitops.h159
1 files changed, 159 insertions, 0 deletions
diff --git a/include/linux/bitops.h b/include/linux/bitops.h
new file mode 100644
index 000000000000..cb3c3ef50f50
--- /dev/null
+++ b/include/linux/bitops.h
@@ -0,0 +1,159 @@
1#ifndef _LINUX_BITOPS_H
2#define _LINUX_BITOPS_H
3#include <asm/types.h>
4
5/*
6 * ffs: find first bit set. This is defined the same way as
7 * the libc and compiler builtin ffs routines, therefore
8 * differs in spirit from the above ffz (man ffs).
9 */
10
11static inline int generic_ffs(int x)
12{
13 int r = 1;
14
15 if (!x)
16 return 0;
17 if (!(x & 0xffff)) {
18 x >>= 16;
19 r += 16;
20 }
21 if (!(x & 0xff)) {
22 x >>= 8;
23 r += 8;
24 }
25 if (!(x & 0xf)) {
26 x >>= 4;
27 r += 4;
28 }
29 if (!(x & 3)) {
30 x >>= 2;
31 r += 2;
32 }
33 if (!(x & 1)) {
34 x >>= 1;
35 r += 1;
36 }
37 return r;
38}
39
40/*
41 * fls: find last bit set.
42 */
43
44static __inline__ int generic_fls(int x)
45{
46 int r = 32;
47
48 if (!x)
49 return 0;
50 if (!(x & 0xffff0000u)) {
51 x <<= 16;
52 r -= 16;
53 }
54 if (!(x & 0xff000000u)) {
55 x <<= 8;
56 r -= 8;
57 }
58 if (!(x & 0xf0000000u)) {
59 x <<= 4;
60 r -= 4;
61 }
62 if (!(x & 0xc0000000u)) {
63 x <<= 2;
64 r -= 2;
65 }
66 if (!(x & 0x80000000u)) {
67 x <<= 1;
68 r -= 1;
69 }
70 return r;
71}
72
73/*
74 * Include this here because some architectures need generic_ffs/fls in
75 * scope
76 */
77#include <asm/bitops.h>
78
79static __inline__ int get_bitmask_order(unsigned int count)
80{
81 int order;
82
83 order = fls(count);
84 return order; /* We could be slightly more clever with -1 here... */
85}
86
87/*
88 * hweightN: returns the hamming weight (i.e. the number
89 * of bits set) of a N-bit word
90 */
91
92static inline unsigned int generic_hweight32(unsigned int w)
93{
94 unsigned int res = (w & 0x55555555) + ((w >> 1) & 0x55555555);
95 res = (res & 0x33333333) + ((res >> 2) & 0x33333333);
96 res = (res & 0x0F0F0F0F) + ((res >> 4) & 0x0F0F0F0F);
97 res = (res & 0x00FF00FF) + ((res >> 8) & 0x00FF00FF);
98 return (res & 0x0000FFFF) + ((res >> 16) & 0x0000FFFF);
99}
100
101static inline unsigned int generic_hweight16(unsigned int w)
102{
103 unsigned int res = (w & 0x5555) + ((w >> 1) & 0x5555);
104 res = (res & 0x3333) + ((res >> 2) & 0x3333);
105 res = (res & 0x0F0F) + ((res >> 4) & 0x0F0F);
106 return (res & 0x00FF) + ((res >> 8) & 0x00FF);
107}
108
109static inline unsigned int generic_hweight8(unsigned int w)
110{
111 unsigned int res = (w & 0x55) + ((w >> 1) & 0x55);
112 res = (res & 0x33) + ((res >> 2) & 0x33);
113 return (res & 0x0F) + ((res >> 4) & 0x0F);
114}
115
116static inline unsigned long generic_hweight64(__u64 w)
117{
118#if BITS_PER_LONG < 64
119 return generic_hweight32((unsigned int)(w >> 32)) +
120 generic_hweight32((unsigned int)w);
121#else
122 u64 res;
123 res = (w & 0x5555555555555555ul) + ((w >> 1) & 0x5555555555555555ul);
124 res = (res & 0x3333333333333333ul) + ((res >> 2) & 0x3333333333333333ul);
125 res = (res & 0x0F0F0F0F0F0F0F0Ful) + ((res >> 4) & 0x0F0F0F0F0F0F0F0Ful);
126 res = (res & 0x00FF00FF00FF00FFul) + ((res >> 8) & 0x00FF00FF00FF00FFul);
127 res = (res & 0x0000FFFF0000FFFFul) + ((res >> 16) & 0x0000FFFF0000FFFFul);
128 return (res & 0x00000000FFFFFFFFul) + ((res >> 32) & 0x00000000FFFFFFFFul);
129#endif
130}
131
132static inline unsigned long hweight_long(unsigned long w)
133{
134 return sizeof(w) == 4 ? generic_hweight32(w) : generic_hweight64(w);
135}
136
137/*
138 * rol32 - rotate a 32-bit value left
139 *
140 * @word: value to rotate
141 * @shift: bits to roll
142 */
143static inline __u32 rol32(__u32 word, unsigned int shift)
144{
145 return (word << shift) | (word >> (32 - shift));
146}
147
148/*
149 * ror32 - rotate a 32-bit value right
150 *
151 * @word: value to rotate
152 * @shift: bits to roll
153 */
154static inline __u32 ror32(__u32 word, unsigned int shift)
155{
156 return (word >> shift) | (word << (32 - shift));
157}
158
159#endif