aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-alpha
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-alpha')
-rw-r--r--include/asm-alpha/bitops.h123
1 files changed, 7 insertions, 116 deletions
diff --git a/include/asm-alpha/bitops.h b/include/asm-alpha/bitops.h
index e3e602fe5060..3f88715e811e 100644
--- a/include/asm-alpha/bitops.h
+++ b/include/asm-alpha/bitops.h
@@ -319,9 +319,9 @@ static inline int fls(int word)
319 return 64 - __kernel_ctlz(word & 0xffffffff); 319 return 64 - __kernel_ctlz(word & 0xffffffff);
320} 320}
321#else 321#else
322#define fls generic_fls 322#include <asm-generic/bitops/fls.h>
323#endif 323#endif
324#define fls64 generic_fls64 324#include <asm-generic/bitops/fls64.h>
325 325
326/* Compute powers of two for the given integer. */ 326/* Compute powers of two for the given integer. */
327static inline long floor_log2(unsigned long word) 327static inline long floor_log2(unsigned long word)
@@ -358,112 +358,12 @@ static inline unsigned long hweight64(unsigned long w)
358#define hweight16(x) (unsigned int) hweight64((x) & 0xfffful) 358#define hweight16(x) (unsigned int) hweight64((x) & 0xfffful)
359#define hweight8(x) (unsigned int) hweight64((x) & 0xfful) 359#define hweight8(x) (unsigned int) hweight64((x) & 0xfful)
360#else 360#else
361static inline unsigned long hweight64(unsigned long w) 361#include <asm-generic/bitops/hweight.h>
362{
363 unsigned long result;
364 for (result = 0; w ; w >>= 1)
365 result += (w & 1);
366 return result;
367}
368
369#define hweight32(x) generic_hweight32(x)
370#define hweight16(x) generic_hweight16(x)
371#define hweight8(x) generic_hweight8(x)
372#endif 362#endif
373 363
374#endif /* __KERNEL__ */ 364#endif /* __KERNEL__ */
375 365
376/* 366#include <asm-generic/bitops/find.h>
377 * Find next zero bit in a bitmap reasonably efficiently..
378 */
379static inline unsigned long
380find_next_zero_bit(const void *addr, unsigned long size, unsigned long offset)
381{
382 const unsigned long *p = addr;
383 unsigned long result = offset & ~63UL;
384 unsigned long tmp;
385
386 p += offset >> 6;
387 if (offset >= size)
388 return size;
389 size -= result;
390 offset &= 63UL;
391 if (offset) {
392 tmp = *(p++);
393 tmp |= ~0UL >> (64-offset);
394 if (size < 64)
395 goto found_first;
396 if (~tmp)
397 goto found_middle;
398 size -= 64;
399 result += 64;
400 }
401 while (size & ~63UL) {
402 if (~(tmp = *(p++)))
403 goto found_middle;
404 result += 64;
405 size -= 64;
406 }
407 if (!size)
408 return result;
409 tmp = *p;
410 found_first:
411 tmp |= ~0UL << size;
412 if (tmp == ~0UL) /* Are any bits zero? */
413 return result + size; /* Nope. */
414 found_middle:
415 return result + ffz(tmp);
416}
417
418/*
419 * Find next one bit in a bitmap reasonably efficiently.
420 */
421static inline unsigned long
422find_next_bit(const void * addr, unsigned long size, unsigned long offset)
423{
424 const unsigned long *p = addr;
425 unsigned long result = offset & ~63UL;
426 unsigned long tmp;
427
428 p += offset >> 6;
429 if (offset >= size)
430 return size;
431 size -= result;
432 offset &= 63UL;
433 if (offset) {
434 tmp = *(p++);
435 tmp &= ~0UL << offset;
436 if (size < 64)
437 goto found_first;
438 if (tmp)
439 goto found_middle;
440 size -= 64;
441 result += 64;
442 }
443 while (size & ~63UL) {
444 if ((tmp = *(p++)))
445 goto found_middle;
446 result += 64;
447 size -= 64;
448 }
449 if (!size)
450 return result;
451 tmp = *p;
452 found_first:
453 tmp &= ~0UL >> (64 - size);
454 if (!tmp)
455 return result + size;
456 found_middle:
457 return result + __ffs(tmp);
458}
459
460/*
461 * The optimizer actually does good code for this case.
462 */
463#define find_first_zero_bit(addr, size) \
464 find_next_zero_bit((addr), (size), 0)
465#define find_first_bit(addr, size) \
466 find_next_bit((addr), (size), 0)
467 367
468#ifdef __KERNEL__ 368#ifdef __KERNEL__
469 369
@@ -487,21 +387,12 @@ sched_find_first_bit(unsigned long b[3])
487 return __ffs(b0) + ofs; 387 return __ffs(b0) + ofs;
488} 388}
489 389
390#include <asm-generic/bitops/ext2-non-atomic.h>
490 391
491#define ext2_set_bit __test_and_set_bit
492#define ext2_set_bit_atomic(l,n,a) test_and_set_bit(n,a) 392#define ext2_set_bit_atomic(l,n,a) test_and_set_bit(n,a)
493#define ext2_clear_bit __test_and_clear_bit
494#define ext2_clear_bit_atomic(l,n,a) test_and_clear_bit(n,a) 393#define ext2_clear_bit_atomic(l,n,a) test_and_clear_bit(n,a)
495#define ext2_test_bit test_bit 394
496#define ext2_find_first_zero_bit find_first_zero_bit 395#include <asm-generic/bitops/minix.h>
497#define ext2_find_next_zero_bit find_next_zero_bit
498
499/* Bitmap functions for the minix filesystem. */
500#define minix_test_and_set_bit(nr,addr) __test_and_set_bit(nr,addr)
501#define minix_set_bit(nr,addr) __set_bit(nr,addr)
502#define minix_test_and_clear_bit(nr,addr) __test_and_clear_bit(nr,addr)
503#define minix_test_bit(nr,addr) test_bit(nr,addr)
504#define minix_find_first_zero_bit(addr,size) find_first_zero_bit(addr,size)
505 396
506#endif /* __KERNEL__ */ 397#endif /* __KERNEL__ */
507 398