aboutsummaryrefslogtreecommitdiffstats
path: root/arch/s390/include/asm/bitops.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/s390/include/asm/bitops.h')
-rw-r--r--arch/s390/include/asm/bitops.h81
1 files changed, 81 insertions, 0 deletions
diff --git a/arch/s390/include/asm/bitops.h b/arch/s390/include/asm/bitops.h
index 6f573890fb28..15422933c60b 100644
--- a/arch/s390/include/asm/bitops.h
+++ b/arch/s390/include/asm/bitops.h
@@ -640,6 +640,87 @@ static inline unsigned long find_first_bit(const unsigned long * addr,
640} 640}
641#define find_first_bit find_first_bit 641#define find_first_bit find_first_bit
642 642
643/*
644 * Big endian variant whichs starts bit counting from left using
645 * the flogr (find leftmost one) instruction.
646 */
647static inline unsigned long __flo_word(unsigned long nr, unsigned long val)
648{
649 register unsigned long bit asm("2") = val;
650 register unsigned long out asm("3");
651
652 asm volatile (
653 " .insn rre,0xb9830000,%[bit],%[bit]\n"
654 : [bit] "+d" (bit), [out] "=d" (out) : : "cc");
655 return nr + bit;
656}
657
658/*
659 * 64 bit special left bitops format:
660 * order in memory:
661 * 00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f
662 * 10 11 12 13 14 15 16 17 18 19 1a 1b 1c 1d 1e 1f
663 * 20 21 22 23 24 25 26 27 28 29 2a 2b 2c 2d 2e 2f
664 * 30 31 32 33 34 35 36 37 38 39 3a 3b 3c 3d 3e 3f
665 * after that follows the next long with bit numbers
666 * 40 41 42 43 44 45 46 47 48 49 4a 4b 4c 4d 4e 4f
667 * 50 51 52 53 54 55 56 57 58 59 5a 5b 5c 5d 5e 5f
668 * 60 61 62 63 64 65 66 67 68 69 6a 6b 6c 6d 6e 6f
669 * 70 71 72 73 74 75 76 77 78 79 7a 7b 7c 7d 7e 7f
670 * The reason for this bit ordering is the fact that
671 * the hardware sets bits in a bitmap starting at bit 0
672 * and we don't want to scan the bitmap from the 'wrong
673 * end'.
674 */
675static inline unsigned long find_first_bit_left(const unsigned long *addr,
676 unsigned long size)
677{
678 unsigned long bytes, bits;
679
680 if (!size)
681 return 0;
682 bytes = __ffs_word_loop(addr, size);
683 bits = __flo_word(bytes * 8, __load_ulong_be(addr, bytes));
684 return (bits < size) ? bits : size;
685}
686
687static inline int find_next_bit_left(const unsigned long *addr,
688 unsigned long size,
689 unsigned long offset)
690{
691 const unsigned long *p;
692 unsigned long bit, set;
693
694 if (offset >= size)
695 return size;
696 bit = offset & (__BITOPS_WORDSIZE - 1);
697 offset -= bit;
698 size -= offset;
699 p = addr + offset / __BITOPS_WORDSIZE;
700 if (bit) {
701 set = __flo_word(0, *p & (~0UL << bit));
702 if (set >= size)
703 return size + offset;
704 if (set < __BITOPS_WORDSIZE)
705 return set + offset;
706 offset += __BITOPS_WORDSIZE;
707 size -= __BITOPS_WORDSIZE;
708 p++;
709 }
710 return offset + find_first_bit_left(p, size);
711}
712
713#define for_each_set_bit_left(bit, addr, size) \
714 for ((bit) = find_first_bit_left((addr), (size)); \
715 (bit) < (size); \
716 (bit) = find_next_bit_left((addr), (size), (bit) + 1))
717
718/* same as for_each_set_bit() but use bit as value to start with */
719#define for_each_set_bit_left_cont(bit, addr, size) \
720 for ((bit) = find_next_bit_left((addr), (size), (bit)); \
721 (bit) < (size); \
722 (bit) = find_next_bit_left((addr), (size), (bit) + 1))
723
643/** 724/**
644 * find_next_zero_bit - find the first zero bit in a memory region 725 * find_next_zero_bit - find the first zero bit in a memory region
645 * @addr: The address to base the search on 726 * @addr: The address to base the search on