aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-x86/bitops.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-x86/bitops.h')
-rw-r--r--include/asm-x86/bitops.h149
1 files changed, 136 insertions, 13 deletions
diff --git a/include/asm-x86/bitops.h b/include/asm-x86/bitops.h
index 1ae7b270a1ef..b81a4d4d3337 100644
--- a/include/asm-x86/bitops.h
+++ b/include/asm-x86/bitops.h
@@ -62,12 +62,9 @@ static inline void set_bit(int nr, volatile void *addr)
62 */ 62 */
63static inline void __set_bit(int nr, volatile void *addr) 63static inline void __set_bit(int nr, volatile void *addr)
64{ 64{
65 asm volatile("bts %1,%0" 65 asm volatile("bts %1,%0" : ADDR : "Ir" (nr) : "memory");
66 : ADDR
67 : "Ir" (nr) : "memory");
68} 66}
69 67
70
71/** 68/**
72 * clear_bit - Clears a bit in memory 69 * clear_bit - Clears a bit in memory
73 * @nr: Bit to clear 70 * @nr: Bit to clear
@@ -297,19 +294,145 @@ static inline int variable_test_bit(int nr, volatile const void *addr)
297static int test_bit(int nr, const volatile unsigned long *addr); 294static int test_bit(int nr, const volatile unsigned long *addr);
298#endif 295#endif
299 296
300#define test_bit(nr,addr) \ 297#define test_bit(nr, addr) \
301 (__builtin_constant_p(nr) ? \ 298 (__builtin_constant_p((nr)) \
302 constant_test_bit((nr),(addr)) : \ 299 ? constant_test_bit((nr), (addr)) \
303 variable_test_bit((nr),(addr))) 300 : variable_test_bit((nr), (addr)))
301
302/**
303 * __ffs - find first set bit in word
304 * @word: The word to search
305 *
306 * Undefined if no bit exists, so code should check against 0 first.
307 */
308static inline unsigned long __ffs(unsigned long word)
309{
310 asm("bsf %1,%0"
311 : "=r" (word)
312 : "rm" (word));
313 return word;
314}
315
316/**
317 * ffz - find first zero bit in word
318 * @word: The word to search
319 *
320 * Undefined if no zero exists, so code should check against ~0UL first.
321 */
322static inline unsigned long ffz(unsigned long word)
323{
324 asm("bsf %1,%0"
325 : "=r" (word)
326 : "r" (~word));
327 return word;
328}
329
330/*
331 * __fls: find last set bit in word
332 * @word: The word to search
333 *
334 * Undefined if no zero exists, so code should check against ~0UL first.
335 */
336static inline unsigned long __fls(unsigned long word)
337{
338 asm("bsr %1,%0"
339 : "=r" (word)
340 : "rm" (word));
341 return word;
342}
343
344#ifdef __KERNEL__
345/**
346 * ffs - find first set bit in word
347 * @x: the word to search
348 *
349 * This is defined the same way as the libc and compiler builtin ffs
350 * routines, therefore differs in spirit from the other bitops.
351 *
352 * ffs(value) returns 0 if value is 0 or the position of the first
353 * set bit if value is nonzero. The first (least significant) bit
354 * is at position 1.
355 */
356static inline int ffs(int x)
357{
358 int r;
359#ifdef CONFIG_X86_CMOV
360 asm("bsfl %1,%0\n\t"
361 "cmovzl %2,%0"
362 : "=r" (r) : "rm" (x), "r" (-1));
363#else
364 asm("bsfl %1,%0\n\t"
365 "jnz 1f\n\t"
366 "movl $-1,%0\n"
367 "1:" : "=r" (r) : "rm" (x));
368#endif
369 return r + 1;
370}
371
372/**
373 * fls - find last set bit in word
374 * @x: the word to search
375 *
376 * This is defined in a similar way as the libc and compiler builtin
377 * ffs, but returns the position of the most significant set bit.
378 *
379 * fls(value) returns 0 if value is 0 or the position of the last
380 * set bit if value is nonzero. The last (most significant) bit is
381 * at position 32.
382 */
383static inline int fls(int x)
384{
385 int r;
386#ifdef CONFIG_X86_CMOV
387 asm("bsrl %1,%0\n\t"
388 "cmovzl %2,%0"
389 : "=&r" (r) : "rm" (x), "rm" (-1));
390#else
391 asm("bsrl %1,%0\n\t"
392 "jnz 1f\n\t"
393 "movl $-1,%0\n"
394 "1:" : "=r" (r) : "rm" (x));
395#endif
396 return r + 1;
397}
398#endif /* __KERNEL__ */
304 399
305#undef BASE_ADDR 400#undef BASE_ADDR
306#undef BIT_ADDR 401#undef BIT_ADDR
307#undef ADDR 402#undef ADDR
308 403
309#ifdef CONFIG_X86_32 404static inline void set_bit_string(unsigned long *bitmap,
310# include "bitops_32.h" 405 unsigned long i, int len)
311#else 406{
312# include "bitops_64.h" 407 unsigned long end = i + len;
313#endif 408 while (i < end) {
409 __set_bit(i, bitmap);
410 i++;
411 }
412}
413
414#ifdef __KERNEL__
415
416#include <asm-generic/bitops/sched.h>
417
418#define ARCH_HAS_FAST_MULTIPLIER 1
419
420#include <asm-generic/bitops/hweight.h>
421
422#endif /* __KERNEL__ */
423
424#include <asm-generic/bitops/fls64.h>
425
426#ifdef __KERNEL__
427
428#include <asm-generic/bitops/ext2-non-atomic.h>
429
430#define ext2_set_bit_atomic(lock, nr, addr) \
431 test_and_set_bit((nr), (unsigned long *)(addr))
432#define ext2_clear_bit_atomic(lock, nr, addr) \
433 test_and_clear_bit((nr), (unsigned long *)(addr))
434
435#include <asm-generic/bitops/minix.h>
314 436
437#endif /* __KERNEL__ */
315#endif /* _ASM_X86_BITOPS_H */ 438#endif /* _ASM_X86_BITOPS_H */