aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAkinobu Mita <akinobu.mita@gmail.com>2013-03-23 10:05:29 -0400
committerMartin Schwidefsky <schwidefsky@de.ibm.com>2013-04-17 08:07:29 -0400
commit01c2475f6d959ed3beb9ce1b0bc6f8108179af65 (patch)
tree044c092597a2154d2fc8aa642f611f254ef891c2
parentf7f8d7e51d3c31426ee006c38d5b0ae3c9b8733e (diff)
s390/bitops: remove unnecessary macro definitions in asm/bitops.h
Remove unused __BITOPS_ALIGN, and replace __BITOPS_WORDSIZE with BITS_PER_LONG. Signed-off-by: Akinobu Mita <akinobu.mita@gmail.com> Signed-off-by: Heiko Carstens <heiko.carstens@de.ibm.com> Signed-off-by: Martin Schwidefsky <schwidefsky@de.ibm.com>
-rw-r--r--arch/s390/include/asm/bitops.h110
1 files changed, 53 insertions, 57 deletions
diff --git a/arch/s390/include/asm/bitops.h b/arch/s390/include/asm/bitops.h
index 15422933c60b..2bc357408f43 100644
--- a/arch/s390/include/asm/bitops.h
+++ b/arch/s390/include/asm/bitops.h
@@ -61,8 +61,6 @@ extern const char _sb_findmap[];
61 61
62#ifndef CONFIG_64BIT 62#ifndef CONFIG_64BIT
63 63
64#define __BITOPS_ALIGN 3
65#define __BITOPS_WORDSIZE 32
66#define __BITOPS_OR "or" 64#define __BITOPS_OR "or"
67#define __BITOPS_AND "nr" 65#define __BITOPS_AND "nr"
68#define __BITOPS_XOR "xr" 66#define __BITOPS_XOR "xr"
@@ -81,8 +79,6 @@ extern const char _sb_findmap[];
81 79
82#else /* CONFIG_64BIT */ 80#else /* CONFIG_64BIT */
83 81
84#define __BITOPS_ALIGN 7
85#define __BITOPS_WORDSIZE 64
86#define __BITOPS_OR "ogr" 82#define __BITOPS_OR "ogr"
87#define __BITOPS_AND "ngr" 83#define __BITOPS_AND "ngr"
88#define __BITOPS_XOR "xgr" 84#define __BITOPS_XOR "xgr"
@@ -101,7 +97,7 @@ extern const char _sb_findmap[];
101 97
102#endif /* CONFIG_64BIT */ 98#endif /* CONFIG_64BIT */
103 99
104#define __BITOPS_WORDS(bits) (((bits)+__BITOPS_WORDSIZE-1)/__BITOPS_WORDSIZE) 100#define __BITOPS_WORDS(bits) (((bits) + BITS_PER_LONG - 1) / BITS_PER_LONG)
105#define __BITOPS_BARRIER() asm volatile("" : : : "memory") 101#define __BITOPS_BARRIER() asm volatile("" : : : "memory")
106 102
107#ifdef CONFIG_SMP 103#ifdef CONFIG_SMP
@@ -114,9 +110,9 @@ static inline void set_bit_cs(unsigned long nr, volatile unsigned long *ptr)
114 110
115 addr = (unsigned long) ptr; 111 addr = (unsigned long) ptr;
116 /* calculate address for CS */ 112 /* calculate address for CS */
117 addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; 113 addr += (nr ^ (nr & (BITS_PER_LONG - 1))) >> 3;
118 /* make OR mask */ 114 /* make OR mask */
119 mask = 1UL << (nr & (__BITOPS_WORDSIZE - 1)); 115 mask = 1UL << (nr & (BITS_PER_LONG - 1));
120 /* Do the atomic update. */ 116 /* Do the atomic update. */
121 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_OR); 117 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_OR);
122} 118}
@@ -130,9 +126,9 @@ static inline void clear_bit_cs(unsigned long nr, volatile unsigned long *ptr)
130 126
131 addr = (unsigned long) ptr; 127 addr = (unsigned long) ptr;
132 /* calculate address for CS */ 128 /* calculate address for CS */
133 addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; 129 addr += (nr ^ (nr & (BITS_PER_LONG - 1))) >> 3;
134 /* make AND mask */ 130 /* make AND mask */
135 mask = ~(1UL << (nr & (__BITOPS_WORDSIZE - 1))); 131 mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
136 /* Do the atomic update. */ 132 /* Do the atomic update. */
137 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_AND); 133 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_AND);
138} 134}
@@ -146,9 +142,9 @@ static inline void change_bit_cs(unsigned long nr, volatile unsigned long *ptr)
146 142
147 addr = (unsigned long) ptr; 143 addr = (unsigned long) ptr;
148 /* calculate address for CS */ 144 /* calculate address for CS */
149 addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; 145 addr += (nr ^ (nr & (BITS_PER_LONG - 1))) >> 3;
150 /* make XOR mask */ 146 /* make XOR mask */
151 mask = 1UL << (nr & (__BITOPS_WORDSIZE - 1)); 147 mask = 1UL << (nr & (BITS_PER_LONG - 1));
152 /* Do the atomic update. */ 148 /* Do the atomic update. */
153 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_XOR); 149 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_XOR);
154} 150}
@@ -163,9 +159,9 @@ test_and_set_bit_cs(unsigned long nr, volatile unsigned long *ptr)
163 159
164 addr = (unsigned long) ptr; 160 addr = (unsigned long) ptr;
165 /* calculate address for CS */ 161 /* calculate address for CS */
166 addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; 162 addr += (nr ^ (nr & (BITS_PER_LONG - 1))) >> 3;
167 /* make OR/test mask */ 163 /* make OR/test mask */
168 mask = 1UL << (nr & (__BITOPS_WORDSIZE - 1)); 164 mask = 1UL << (nr & (BITS_PER_LONG - 1));
169 /* Do the atomic update. */ 165 /* Do the atomic update. */
170 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_OR); 166 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_OR);
171 __BITOPS_BARRIER(); 167 __BITOPS_BARRIER();
@@ -182,9 +178,9 @@ test_and_clear_bit_cs(unsigned long nr, volatile unsigned long *ptr)
182 178
183 addr = (unsigned long) ptr; 179 addr = (unsigned long) ptr;
184 /* calculate address for CS */ 180 /* calculate address for CS */
185 addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; 181 addr += (nr ^ (nr & (BITS_PER_LONG - 1))) >> 3;
186 /* make AND/test mask */ 182 /* make AND/test mask */
187 mask = ~(1UL << (nr & (__BITOPS_WORDSIZE - 1))); 183 mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
188 /* Do the atomic update. */ 184 /* Do the atomic update. */
189 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_AND); 185 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_AND);
190 __BITOPS_BARRIER(); 186 __BITOPS_BARRIER();
@@ -201,9 +197,9 @@ test_and_change_bit_cs(unsigned long nr, volatile unsigned long *ptr)
201 197
202 addr = (unsigned long) ptr; 198 addr = (unsigned long) ptr;
203 /* calculate address for CS */ 199 /* calculate address for CS */
204 addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; 200 addr += (nr ^ (nr & (BITS_PER_LONG - 1))) >> 3;
205 /* make XOR/test mask */ 201 /* make XOR/test mask */
206 mask = 1UL << (nr & (__BITOPS_WORDSIZE - 1)); 202 mask = 1UL << (nr & (BITS_PER_LONG - 1));
207 /* Do the atomic update. */ 203 /* Do the atomic update. */
208 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_XOR); 204 __BITOPS_LOOP(old, new, addr, mask, __BITOPS_XOR);
209 __BITOPS_BARRIER(); 205 __BITOPS_BARRIER();
@@ -218,7 +214,7 @@ static inline void __set_bit(unsigned long nr, volatile unsigned long *ptr)
218{ 214{
219 unsigned long addr; 215 unsigned long addr;
220 216
221 addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); 217 addr = (unsigned long) ptr + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
222 asm volatile( 218 asm volatile(
223 " oc %O0(1,%R0),%1" 219 " oc %O0(1,%R0),%1"
224 : "=Q" (*(char *) addr) : "Q" (_oi_bitmap[nr & 7]) : "cc" ); 220 : "=Q" (*(char *) addr) : "Q" (_oi_bitmap[nr & 7]) : "cc" );
@@ -229,7 +225,7 @@ __constant_set_bit(const unsigned long nr, volatile unsigned long *ptr)
229{ 225{
230 unsigned long addr; 226 unsigned long addr;
231 227
232 addr = ((unsigned long) ptr) + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); 228 addr = ((unsigned long) ptr) + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
233 *(unsigned char *) addr |= 1 << (nr & 7); 229 *(unsigned char *) addr |= 1 << (nr & 7);
234} 230}
235 231
@@ -246,7 +242,7 @@ __clear_bit(unsigned long nr, volatile unsigned long *ptr)
246{ 242{
247 unsigned long addr; 243 unsigned long addr;
248 244
249 addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); 245 addr = (unsigned long) ptr + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
250 asm volatile( 246 asm volatile(
251 " nc %O0(1,%R0),%1" 247 " nc %O0(1,%R0),%1"
252 : "=Q" (*(char *) addr) : "Q" (_ni_bitmap[nr & 7]) : "cc" ); 248 : "=Q" (*(char *) addr) : "Q" (_ni_bitmap[nr & 7]) : "cc" );
@@ -257,7 +253,7 @@ __constant_clear_bit(const unsigned long nr, volatile unsigned long *ptr)
257{ 253{
258 unsigned long addr; 254 unsigned long addr;
259 255
260 addr = ((unsigned long) ptr) + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); 256 addr = ((unsigned long) ptr) + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
261 *(unsigned char *) addr &= ~(1 << (nr & 7)); 257 *(unsigned char *) addr &= ~(1 << (nr & 7));
262} 258}
263 259
@@ -273,7 +269,7 @@ static inline void __change_bit(unsigned long nr, volatile unsigned long *ptr)
273{ 269{
274 unsigned long addr; 270 unsigned long addr;
275 271
276 addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); 272 addr = (unsigned long) ptr + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
277 asm volatile( 273 asm volatile(
278 " xc %O0(1,%R0),%1" 274 " xc %O0(1,%R0),%1"
279 : "=Q" (*(char *) addr) : "Q" (_oi_bitmap[nr & 7]) : "cc" ); 275 : "=Q" (*(char *) addr) : "Q" (_oi_bitmap[nr & 7]) : "cc" );
@@ -284,7 +280,7 @@ __constant_change_bit(const unsigned long nr, volatile unsigned long *ptr)
284{ 280{
285 unsigned long addr; 281 unsigned long addr;
286 282
287 addr = ((unsigned long) ptr) + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); 283 addr = ((unsigned long) ptr) + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
288 *(unsigned char *) addr ^= 1 << (nr & 7); 284 *(unsigned char *) addr ^= 1 << (nr & 7);
289} 285}
290 286
@@ -302,7 +298,7 @@ test_and_set_bit_simple(unsigned long nr, volatile unsigned long *ptr)
302 unsigned long addr; 298 unsigned long addr;
303 unsigned char ch; 299 unsigned char ch;
304 300
305 addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); 301 addr = (unsigned long) ptr + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
306 ch = *(unsigned char *) addr; 302 ch = *(unsigned char *) addr;
307 asm volatile( 303 asm volatile(
308 " oc %O0(1,%R0),%1" 304 " oc %O0(1,%R0),%1"
@@ -321,7 +317,7 @@ test_and_clear_bit_simple(unsigned long nr, volatile unsigned long *ptr)
321 unsigned long addr; 317 unsigned long addr;
322 unsigned char ch; 318 unsigned char ch;
323 319
324 addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); 320 addr = (unsigned long) ptr + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
325 ch = *(unsigned char *) addr; 321 ch = *(unsigned char *) addr;
326 asm volatile( 322 asm volatile(
327 " nc %O0(1,%R0),%1" 323 " nc %O0(1,%R0),%1"
@@ -340,7 +336,7 @@ test_and_change_bit_simple(unsigned long nr, volatile unsigned long *ptr)
340 unsigned long addr; 336 unsigned long addr;
341 unsigned char ch; 337 unsigned char ch;
342 338
343 addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); 339 addr = (unsigned long) ptr + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
344 ch = *(unsigned char *) addr; 340 ch = *(unsigned char *) addr;
345 asm volatile( 341 asm volatile(
346 " xc %O0(1,%R0),%1" 342 " xc %O0(1,%R0),%1"
@@ -376,7 +372,7 @@ static inline int __test_bit(unsigned long nr, const volatile unsigned long *ptr
376 unsigned long addr; 372 unsigned long addr;
377 unsigned char ch; 373 unsigned char ch;
378 374
379 addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3); 375 addr = (unsigned long) ptr + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
380 ch = *(volatile unsigned char *) addr; 376 ch = *(volatile unsigned char *) addr;
381 return (ch >> (nr & 7)) & 1; 377 return (ch >> (nr & 7)) & 1;
382} 378}
@@ -384,7 +380,7 @@ static inline int __test_bit(unsigned long nr, const volatile unsigned long *ptr
384static inline int 380static inline int
385__constant_test_bit(unsigned long nr, const volatile unsigned long *addr) { 381__constant_test_bit(unsigned long nr, const volatile unsigned long *addr) {
386 return (((volatile char *) addr) 382 return (((volatile char *) addr)
387 [(nr^(__BITOPS_WORDSIZE-8))>>3] & (1<<(nr&7))) != 0; 383 [(nr^(BITS_PER_LONG-8))>>3] & (1<<(nr&7))) != 0;
388} 384}
389 385
390#define test_bit(nr,addr) \ 386#define test_bit(nr,addr) \
@@ -693,18 +689,18 @@ static inline int find_next_bit_left(const unsigned long *addr,
693 689
694 if (offset >= size) 690 if (offset >= size)
695 return size; 691 return size;
696 bit = offset & (__BITOPS_WORDSIZE - 1); 692 bit = offset & (BITS_PER_LONG - 1);
697 offset -= bit; 693 offset -= bit;
698 size -= offset; 694 size -= offset;
699 p = addr + offset / __BITOPS_WORDSIZE; 695 p = addr + offset / BITS_PER_LONG;
700 if (bit) { 696 if (bit) {
701 set = __flo_word(0, *p & (~0UL << bit)); 697 set = __flo_word(0, *p & (~0UL << bit));
702 if (set >= size) 698 if (set >= size)
703 return size + offset; 699 return size + offset;
704 if (set < __BITOPS_WORDSIZE) 700 if (set < BITS_PER_LONG)
705 return set + offset; 701 return set + offset;
706 offset += __BITOPS_WORDSIZE; 702 offset += BITS_PER_LONG;
707 size -= __BITOPS_WORDSIZE; 703 size -= BITS_PER_LONG;
708 p++; 704 p++;
709 } 705 }
710 return offset + find_first_bit_left(p, size); 706 return offset + find_first_bit_left(p, size);
@@ -736,22 +732,22 @@ static inline int find_next_zero_bit (const unsigned long * addr,
736 732
737 if (offset >= size) 733 if (offset >= size)
738 return size; 734 return size;
739 bit = offset & (__BITOPS_WORDSIZE - 1); 735 bit = offset & (BITS_PER_LONG - 1);
740 offset -= bit; 736 offset -= bit;
741 size -= offset; 737 size -= offset;
742 p = addr + offset / __BITOPS_WORDSIZE; 738 p = addr + offset / BITS_PER_LONG;
743 if (bit) { 739 if (bit) {
744 /* 740 /*
745 * __ffz_word returns __BITOPS_WORDSIZE 741 * __ffz_word returns BITS_PER_LONG
746 * if no zero bit is present in the word. 742 * if no zero bit is present in the word.
747 */ 743 */
748 set = __ffz_word(bit, *p >> bit); 744 set = __ffz_word(bit, *p >> bit);
749 if (set >= size) 745 if (set >= size)
750 return size + offset; 746 return size + offset;
751 if (set < __BITOPS_WORDSIZE) 747 if (set < BITS_PER_LONG)
752 return set + offset; 748 return set + offset;
753 offset += __BITOPS_WORDSIZE; 749 offset += BITS_PER_LONG;
754 size -= __BITOPS_WORDSIZE; 750 size -= BITS_PER_LONG;
755 p++; 751 p++;
756 } 752 }
757 return offset + find_first_zero_bit(p, size); 753 return offset + find_first_zero_bit(p, size);
@@ -773,22 +769,22 @@ static inline int find_next_bit (const unsigned long * addr,
773 769
774 if (offset >= size) 770 if (offset >= size)
775 return size; 771 return size;
776 bit = offset & (__BITOPS_WORDSIZE - 1); 772 bit = offset & (BITS_PER_LONG - 1);
777 offset -= bit; 773 offset -= bit;
778 size -= offset; 774 size -= offset;
779 p = addr + offset / __BITOPS_WORDSIZE; 775 p = addr + offset / BITS_PER_LONG;
780 if (bit) { 776 if (bit) {
781 /* 777 /*
782 * __ffs_word returns __BITOPS_WORDSIZE 778 * __ffs_word returns BITS_PER_LONG
783 * if no one bit is present in the word. 779 * if no one bit is present in the word.
784 */ 780 */
785 set = __ffs_word(0, *p & (~0UL << bit)); 781 set = __ffs_word(0, *p & (~0UL << bit));
786 if (set >= size) 782 if (set >= size)
787 return size + offset; 783 return size + offset;
788 if (set < __BITOPS_WORDSIZE) 784 if (set < BITS_PER_LONG)
789 return set + offset; 785 return set + offset;
790 offset += __BITOPS_WORDSIZE; 786 offset += BITS_PER_LONG;
791 size -= __BITOPS_WORDSIZE; 787 size -= BITS_PER_LONG;
792 p++; 788 p++;
793 } 789 }
794 return offset + find_first_bit(p, size); 790 return offset + find_first_bit(p, size);
@@ -843,22 +839,22 @@ static inline int find_next_zero_bit_le(void *vaddr, unsigned long size,
843 839
844 if (offset >= size) 840 if (offset >= size)
845 return size; 841 return size;
846 bit = offset & (__BITOPS_WORDSIZE - 1); 842 bit = offset & (BITS_PER_LONG - 1);
847 offset -= bit; 843 offset -= bit;
848 size -= offset; 844 size -= offset;
849 p = addr + offset / __BITOPS_WORDSIZE; 845 p = addr + offset / BITS_PER_LONG;
850 if (bit) { 846 if (bit) {
851 /* 847 /*
852 * s390 version of ffz returns __BITOPS_WORDSIZE 848 * s390 version of ffz returns BITS_PER_LONG
853 * if no zero bit is present in the word. 849 * if no zero bit is present in the word.
854 */ 850 */
855 set = __ffz_word(bit, __load_ulong_le(p, 0) >> bit); 851 set = __ffz_word(bit, __load_ulong_le(p, 0) >> bit);
856 if (set >= size) 852 if (set >= size)
857 return size + offset; 853 return size + offset;
858 if (set < __BITOPS_WORDSIZE) 854 if (set < BITS_PER_LONG)
859 return set + offset; 855 return set + offset;
860 offset += __BITOPS_WORDSIZE; 856 offset += BITS_PER_LONG;
861 size -= __BITOPS_WORDSIZE; 857 size -= BITS_PER_LONG;
862 p++; 858 p++;
863 } 859 }
864 return offset + find_first_zero_bit_le(p, size); 860 return offset + find_first_zero_bit_le(p, size);
@@ -885,22 +881,22 @@ static inline int find_next_bit_le(void *vaddr, unsigned long size,
885 881
886 if (offset >= size) 882 if (offset >= size)
887 return size; 883 return size;
888 bit = offset & (__BITOPS_WORDSIZE - 1); 884 bit = offset & (BITS_PER_LONG - 1);
889 offset -= bit; 885 offset -= bit;
890 size -= offset; 886 size -= offset;
891 p = addr + offset / __BITOPS_WORDSIZE; 887 p = addr + offset / BITS_PER_LONG;
892 if (bit) { 888 if (bit) {
893 /* 889 /*
894 * s390 version of ffz returns __BITOPS_WORDSIZE 890 * s390 version of ffz returns BITS_PER_LONG
895 * if no zero bit is present in the word. 891 * if no zero bit is present in the word.
896 */ 892 */
897 set = __ffs_word(0, __load_ulong_le(p, 0) & (~0UL << bit)); 893 set = __ffs_word(0, __load_ulong_le(p, 0) & (~0UL << bit));
898 if (set >= size) 894 if (set >= size)
899 return size + offset; 895 return size + offset;
900 if (set < __BITOPS_WORDSIZE) 896 if (set < BITS_PER_LONG)
901 return set + offset; 897 return set + offset;
902 offset += __BITOPS_WORDSIZE; 898 offset += BITS_PER_LONG;
903 size -= __BITOPS_WORDSIZE; 899 size -= BITS_PER_LONG;
904 p++; 900 p++;
905 } 901 }
906 return offset + find_first_bit_le(p, size); 902 return offset + find_first_bit_le(p, size);