aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-x86
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2008-04-26 16:46:11 -0400
committerLinus Torvalds <torvalds@linux-foundation.org>2008-04-26 16:46:11 -0400
commit9b79ed952bd7344d40152f8a560ad8a0d93f3886 (patch)
tree0cdf72321a9eeb2a766b7b98d5a87ad3d46ad620 /include/asm-x86
parenta52b0d25a722e84da999005b75f972aa4824253c (diff)
parent19870def587554c4055df3e74a21508e3647fb7e (diff)
Merge branch 'for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/x86/linux-2.6-generic-bitops-v3
* 'for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/x86/linux-2.6-generic-bitops-v3: x86, bitops: select the generic bitmap search functions x86: include/asm-x86/pgalloc.h/bitops.h: checkpatch cleanups - formatting only x86: finalize bitops unification x86, UML: remove x86-specific implementations of find_first_bit x86: optimize find_first_bit for small bitmaps x86: switch 64-bit to generic find_first_bit x86: generic versions of find_first_(zero_)bit, convert i386 bitops: use __fls for fls64 on 64-bit archs generic: implement __fls on all 64-bit archs generic: introduce a generic __fls implementation x86: merge the simple bitops and move them to bitops.h x86, generic: optimize find_next_(zero_)bit for small constant-size bitmaps x86, uml: fix uml with generic find_next_bit for x86 x86: change x86 to use generic find_next_bit uml: Kconfig cleanup uml: fix build error
Diffstat (limited to 'include/asm-x86')
-rw-r--r--include/asm-x86/bitops.h149
-rw-r--r--include/asm-x86/bitops_32.h166
-rw-r--r--include/asm-x86/bitops_64.h162
3 files changed, 136 insertions, 341 deletions
diff --git a/include/asm-x86/bitops.h b/include/asm-x86/bitops.h
index 1ae7b270a1ef..b81a4d4d3337 100644
--- a/include/asm-x86/bitops.h
+++ b/include/asm-x86/bitops.h
@@ -62,12 +62,9 @@ static inline void set_bit(int nr, volatile void *addr)
62 */ 62 */
63static inline void __set_bit(int nr, volatile void *addr) 63static inline void __set_bit(int nr, volatile void *addr)
64{ 64{
65 asm volatile("bts %1,%0" 65 asm volatile("bts %1,%0" : ADDR : "Ir" (nr) : "memory");
66 : ADDR
67 : "Ir" (nr) : "memory");
68} 66}
69 67
70
71/** 68/**
72 * clear_bit - Clears a bit in memory 69 * clear_bit - Clears a bit in memory
73 * @nr: Bit to clear 70 * @nr: Bit to clear
@@ -297,19 +294,145 @@ static inline int variable_test_bit(int nr, volatile const void *addr)
297static int test_bit(int nr, const volatile unsigned long *addr); 294static int test_bit(int nr, const volatile unsigned long *addr);
298#endif 295#endif
299 296
300#define test_bit(nr,addr) \ 297#define test_bit(nr, addr) \
301 (__builtin_constant_p(nr) ? \ 298 (__builtin_constant_p((nr)) \
302 constant_test_bit((nr),(addr)) : \ 299 ? constant_test_bit((nr), (addr)) \
303 variable_test_bit((nr),(addr))) 300 : variable_test_bit((nr), (addr)))
301
302/**
303 * __ffs - find first set bit in word
304 * @word: The word to search
305 *
306 * Undefined if no bit exists, so code should check against 0 first.
307 */
308static inline unsigned long __ffs(unsigned long word)
309{
310 asm("bsf %1,%0"
311 : "=r" (word)
312 : "rm" (word));
313 return word;
314}
315
316/**
317 * ffz - find first zero bit in word
318 * @word: The word to search
319 *
320 * Undefined if no zero exists, so code should check against ~0UL first.
321 */
322static inline unsigned long ffz(unsigned long word)
323{
324 asm("bsf %1,%0"
325 : "=r" (word)
326 : "r" (~word));
327 return word;
328}
329
330/*
331 * __fls: find last set bit in word
332 * @word: The word to search
333 *
334 * Undefined if no zero exists, so code should check against ~0UL first.
335 */
336static inline unsigned long __fls(unsigned long word)
337{
338 asm("bsr %1,%0"
339 : "=r" (word)
340 : "rm" (word));
341 return word;
342}
343
344#ifdef __KERNEL__
345/**
346 * ffs - find first set bit in word
347 * @x: the word to search
348 *
349 * This is defined the same way as the libc and compiler builtin ffs
350 * routines, therefore differs in spirit from the other bitops.
351 *
352 * ffs(value) returns 0 if value is 0 or the position of the first
353 * set bit if value is nonzero. The first (least significant) bit
354 * is at position 1.
355 */
356static inline int ffs(int x)
357{
358 int r;
359#ifdef CONFIG_X86_CMOV
360 asm("bsfl %1,%0\n\t"
361 "cmovzl %2,%0"
362 : "=r" (r) : "rm" (x), "r" (-1));
363#else
364 asm("bsfl %1,%0\n\t"
365 "jnz 1f\n\t"
366 "movl $-1,%0\n"
367 "1:" : "=r" (r) : "rm" (x));
368#endif
369 return r + 1;
370}
371
372/**
373 * fls - find last set bit in word
374 * @x: the word to search
375 *
376 * This is defined in a similar way as the libc and compiler builtin
377 * ffs, but returns the position of the most significant set bit.
378 *
379 * fls(value) returns 0 if value is 0 or the position of the last
380 * set bit if value is nonzero. The last (most significant) bit is
381 * at position 32.
382 */
383static inline int fls(int x)
384{
385 int r;
386#ifdef CONFIG_X86_CMOV
387 asm("bsrl %1,%0\n\t"
388 "cmovzl %2,%0"
389 : "=&r" (r) : "rm" (x), "rm" (-1));
390#else
391 asm("bsrl %1,%0\n\t"
392 "jnz 1f\n\t"
393 "movl $-1,%0\n"
394 "1:" : "=r" (r) : "rm" (x));
395#endif
396 return r + 1;
397}
398#endif /* __KERNEL__ */
304 399
305#undef BASE_ADDR 400#undef BASE_ADDR
306#undef BIT_ADDR 401#undef BIT_ADDR
307#undef ADDR 402#undef ADDR
308 403
309#ifdef CONFIG_X86_32 404static inline void set_bit_string(unsigned long *bitmap,
310# include "bitops_32.h" 405 unsigned long i, int len)
311#else 406{
312# include "bitops_64.h" 407 unsigned long end = i + len;
313#endif 408 while (i < end) {
409 __set_bit(i, bitmap);
410 i++;
411 }
412}
413
414#ifdef __KERNEL__
415
416#include <asm-generic/bitops/sched.h>
417
418#define ARCH_HAS_FAST_MULTIPLIER 1
419
420#include <asm-generic/bitops/hweight.h>
421
422#endif /* __KERNEL__ */
423
424#include <asm-generic/bitops/fls64.h>
425
426#ifdef __KERNEL__
427
428#include <asm-generic/bitops/ext2-non-atomic.h>
429
430#define ext2_set_bit_atomic(lock, nr, addr) \
431 test_and_set_bit((nr), (unsigned long *)(addr))
432#define ext2_clear_bit_atomic(lock, nr, addr) \
433 test_and_clear_bit((nr), (unsigned long *)(addr))
434
435#include <asm-generic/bitops/minix.h>
314 436
437#endif /* __KERNEL__ */
315#endif /* _ASM_X86_BITOPS_H */ 438#endif /* _ASM_X86_BITOPS_H */
diff --git a/include/asm-x86/bitops_32.h b/include/asm-x86/bitops_32.h
deleted file mode 100644
index 2513a81f82aa..000000000000
--- a/include/asm-x86/bitops_32.h
+++ /dev/null
@@ -1,166 +0,0 @@
1#ifndef _I386_BITOPS_H
2#define _I386_BITOPS_H
3
4/*
5 * Copyright 1992, Linus Torvalds.
6 */
7
8/**
9 * find_first_zero_bit - find the first zero bit in a memory region
10 * @addr: The address to start the search at
11 * @size: The maximum size to search
12 *
13 * Returns the bit number of the first zero bit, not the number of the byte
14 * containing a bit.
15 */
16static inline int find_first_zero_bit(const unsigned long *addr, unsigned size)
17{
18 int d0, d1, d2;
19 int res;
20
21 if (!size)
22 return 0;
23 /* This looks at memory.
24 * Mark it volatile to tell gcc not to move it around
25 */
26 asm volatile("movl $-1,%%eax\n\t"
27 "xorl %%edx,%%edx\n\t"
28 "repe; scasl\n\t"
29 "je 1f\n\t"
30 "xorl -4(%%edi),%%eax\n\t"
31 "subl $4,%%edi\n\t"
32 "bsfl %%eax,%%edx\n"
33 "1:\tsubl %%ebx,%%edi\n\t"
34 "shll $3,%%edi\n\t"
35 "addl %%edi,%%edx"
36 : "=d" (res), "=&c" (d0), "=&D" (d1), "=&a" (d2)
37 : "1" ((size + 31) >> 5), "2" (addr),
38 "b" (addr) : "memory");
39 return res;
40}
41
42/**
43 * find_next_zero_bit - find the first zero bit in a memory region
44 * @addr: The address to base the search on
45 * @offset: The bit number to start searching at
46 * @size: The maximum size to search
47 */
48int find_next_zero_bit(const unsigned long *addr, int size, int offset);
49
50/**
51 * __ffs - find first bit in word.
52 * @word: The word to search
53 *
54 * Undefined if no bit exists, so code should check against 0 first.
55 */
56static inline unsigned long __ffs(unsigned long word)
57{
58 __asm__("bsfl %1,%0"
59 :"=r" (word)
60 :"rm" (word));
61 return word;
62}
63
64/**
65 * find_first_bit - find the first set bit in a memory region
66 * @addr: The address to start the search at
67 * @size: The maximum size to search
68 *
69 * Returns the bit number of the first set bit, not the number of the byte
70 * containing a bit.
71 */
72static inline unsigned find_first_bit(const unsigned long *addr, unsigned size)
73{
74 unsigned x = 0;
75
76 while (x < size) {
77 unsigned long val = *addr++;
78 if (val)
79 return __ffs(val) + x;
80 x += sizeof(*addr) << 3;
81 }
82 return x;
83}
84
85/**
86 * find_next_bit - find the first set bit in a memory region
87 * @addr: The address to base the search on
88 * @offset: The bit number to start searching at
89 * @size: The maximum size to search
90 */
91int find_next_bit(const unsigned long *addr, int size, int offset);
92
93/**
94 * ffz - find first zero in word.
95 * @word: The word to search
96 *
97 * Undefined if no zero exists, so code should check against ~0UL first.
98 */
99static inline unsigned long ffz(unsigned long word)
100{
101 __asm__("bsfl %1,%0"
102 :"=r" (word)
103 :"r" (~word));
104 return word;
105}
106
107#ifdef __KERNEL__
108
109#include <asm-generic/bitops/sched.h>
110
111/**
112 * ffs - find first bit set
113 * @x: the word to search
114 *
115 * This is defined the same way as
116 * the libc and compiler builtin ffs routines, therefore
117 * differs in spirit from the above ffz() (man ffs).
118 */
119static inline int ffs(int x)
120{
121 int r;
122
123 __asm__("bsfl %1,%0\n\t"
124 "jnz 1f\n\t"
125 "movl $-1,%0\n"
126 "1:" : "=r" (r) : "rm" (x));
127 return r+1;
128}
129
130/**
131 * fls - find last bit set
132 * @x: the word to search
133 *
134 * This is defined the same way as ffs().
135 */
136static inline int fls(int x)
137{
138 int r;
139
140 __asm__("bsrl %1,%0\n\t"
141 "jnz 1f\n\t"
142 "movl $-1,%0\n"
143 "1:" : "=r" (r) : "rm" (x));
144 return r+1;
145}
146
147#include <asm-generic/bitops/hweight.h>
148
149#endif /* __KERNEL__ */
150
151#include <asm-generic/bitops/fls64.h>
152
153#ifdef __KERNEL__
154
155#include <asm-generic/bitops/ext2-non-atomic.h>
156
157#define ext2_set_bit_atomic(lock, nr, addr) \
158 test_and_set_bit((nr), (unsigned long *)(addr))
159#define ext2_clear_bit_atomic(lock, nr, addr) \
160 test_and_clear_bit((nr), (unsigned long *)(addr))
161
162#include <asm-generic/bitops/minix.h>
163
164#endif /* __KERNEL__ */
165
166#endif /* _I386_BITOPS_H */
diff --git a/include/asm-x86/bitops_64.h b/include/asm-x86/bitops_64.h
deleted file mode 100644
index 365f8207ea59..000000000000
--- a/include/asm-x86/bitops_64.h
+++ /dev/null
@@ -1,162 +0,0 @@
1#ifndef _X86_64_BITOPS_H
2#define _X86_64_BITOPS_H
3
4/*
5 * Copyright 1992, Linus Torvalds.
6 */
7
8extern long find_first_zero_bit(const unsigned long *addr, unsigned long size);
9extern long find_next_zero_bit(const unsigned long *addr, long size, long offset);
10extern long find_first_bit(const unsigned long *addr, unsigned long size);
11extern long find_next_bit(const unsigned long *addr, long size, long offset);
12
13/* return index of first bet set in val or max when no bit is set */
14static inline long __scanbit(unsigned long val, unsigned long max)
15{
16 asm("bsfq %1,%0 ; cmovz %2,%0" : "=&r" (val) : "r" (val), "r" (max));
17 return val;
18}
19
20#define find_next_bit(addr,size,off) \
21((__builtin_constant_p(size) && (size) <= BITS_PER_LONG ? \
22 ((off) + (__scanbit((*(unsigned long *)addr) >> (off),(size)-(off)))) : \
23 find_next_bit(addr,size,off)))
24
25#define find_next_zero_bit(addr,size,off) \
26((__builtin_constant_p(size) && (size) <= BITS_PER_LONG ? \
27 ((off)+(__scanbit(~(((*(unsigned long *)addr)) >> (off)),(size)-(off)))) : \
28 find_next_zero_bit(addr,size,off)))
29
30#define find_first_bit(addr, size) \
31 ((__builtin_constant_p((size)) && (size) <= BITS_PER_LONG \
32 ? (__scanbit(*(unsigned long *)(addr), (size))) \
33 : find_first_bit((addr), (size))))
34
35#define find_first_zero_bit(addr, size) \
36 ((__builtin_constant_p((size)) && (size) <= BITS_PER_LONG \
37 ? (__scanbit(~*(unsigned long *)(addr), (size))) \
38 : find_first_zero_bit((addr), (size))))
39
40static inline void set_bit_string(unsigned long *bitmap, unsigned long i,
41 int len)
42{
43 unsigned long end = i + len;
44 while (i < end) {
45 __set_bit(i, bitmap);
46 i++;
47 }
48}
49
50/**
51 * ffz - find first zero in word.
52 * @word: The word to search
53 *
54 * Undefined if no zero exists, so code should check against ~0UL first.
55 */
56static inline unsigned long ffz(unsigned long word)
57{
58 __asm__("bsfq %1,%0"
59 :"=r" (word)
60 :"r" (~word));
61 return word;
62}
63
64/**
65 * __ffs - find first bit in word.
66 * @word: The word to search
67 *
68 * Undefined if no bit exists, so code should check against 0 first.
69 */
70static inline unsigned long __ffs(unsigned long word)
71{
72 __asm__("bsfq %1,%0"
73 :"=r" (word)
74 :"rm" (word));
75 return word;
76}
77
78/*
79 * __fls: find last bit set.
80 * @word: The word to search
81 *
82 * Undefined if no zero exists, so code should check against ~0UL first.
83 */
84static inline unsigned long __fls(unsigned long word)
85{
86 __asm__("bsrq %1,%0"
87 :"=r" (word)
88 :"rm" (word));
89 return word;
90}
91
92#ifdef __KERNEL__
93
94#include <asm-generic/bitops/sched.h>
95
96/**
97 * ffs - find first bit set
98 * @x: the word to search
99 *
100 * This is defined the same way as
101 * the libc and compiler builtin ffs routines, therefore
102 * differs in spirit from the above ffz (man ffs).
103 */
104static inline int ffs(int x)
105{
106 int r;
107
108 __asm__("bsfl %1,%0\n\t"
109 "cmovzl %2,%0"
110 : "=r" (r) : "rm" (x), "r" (-1));
111 return r+1;
112}
113
114/**
115 * fls64 - find last bit set in 64 bit word
116 * @x: the word to search
117 *
118 * This is defined the same way as fls.
119 */
120static inline int fls64(__u64 x)
121{
122 if (x == 0)
123 return 0;
124 return __fls(x) + 1;
125}
126
127/**
128 * fls - find last bit set
129 * @x: the word to search
130 *
131 * This is defined the same way as ffs.
132 */
133static inline int fls(int x)
134{
135 int r;
136
137 __asm__("bsrl %1,%0\n\t"
138 "cmovzl %2,%0"
139 : "=&r" (r) : "rm" (x), "rm" (-1));
140 return r+1;
141}
142
143#define ARCH_HAS_FAST_MULTIPLIER 1
144
145#include <asm-generic/bitops/hweight.h>
146
147#endif /* __KERNEL__ */
148
149#ifdef __KERNEL__
150
151#include <asm-generic/bitops/ext2-non-atomic.h>
152
153#define ext2_set_bit_atomic(lock, nr, addr) \
154 test_and_set_bit((nr), (unsigned long *)(addr))
155#define ext2_clear_bit_atomic(lock, nr, addr) \
156 test_and_clear_bit((nr), (unsigned long *)(addr))
157
158#include <asm-generic/bitops/minix.h>
159
160#endif /* __KERNEL__ */
161
162#endif /* _X86_64_BITOPS_H */