diff options
author | Heiko Carstens <heiko.carstens@de.ibm.com> | 2006-06-29 08:56:13 -0400 |
---|---|---|
committer | Martin Schwidefsky <schwidefsky@de.ibm.com> | 2006-06-29 08:56:13 -0400 |
commit | c406abd3a6d0cf5ce8db4db155a729a28fb98c4f (patch) | |
tree | 3b837f54a28a9f9754f8b34fd9cc4847557025e9 /include/asm-s390/bitops.h | |
parent | 0a6047eef1c465c38aacfbdab193161b3f0cd144 (diff) |
[S390] cleanup bitops.h.
Encapsulate complete bitops.h with #ifdef __KERNEL__ and remove the now
superfluous ALIGN_CS define and its users.
This patch is needed for compiling klibc.
Signed-off-by: Heiko Carstens <heiko.carstens@de.ibm.com>
Signed-off-by: Martin Schwidefsky <schwidefsky@de.ibm.com>
Diffstat (limited to 'include/asm-s390/bitops.h')
-rw-r--r-- | include/asm-s390/bitops.h | 42 |
1 files changed, 3 insertions, 39 deletions
diff --git a/include/asm-s390/bitops.h b/include/asm-s390/bitops.h index 4d2b126ba159..0ddcdba79e4a 100644 --- a/include/asm-s390/bitops.h +++ b/include/asm-s390/bitops.h | |||
@@ -12,6 +12,9 @@ | |||
12 | * Copyright (C) 1992, Linus Torvalds | 12 | * Copyright (C) 1992, Linus Torvalds |
13 | * | 13 | * |
14 | */ | 14 | */ |
15 | |||
16 | #ifdef __KERNEL__ | ||
17 | |||
15 | #include <linux/compiler.h> | 18 | #include <linux/compiler.h> |
16 | 19 | ||
17 | /* | 20 | /* |
@@ -50,19 +53,6 @@ | |||
50 | * with operation of the form "set_bit(bitnr, flags)". | 53 | * with operation of the form "set_bit(bitnr, flags)". |
51 | */ | 54 | */ |
52 | 55 | ||
53 | /* set ALIGN_CS to 1 if the SMP safe bit operations should | ||
54 | * align the address to 4 byte boundary. It seems to work | ||
55 | * without the alignment. | ||
56 | */ | ||
57 | #ifdef __KERNEL__ | ||
58 | #define ALIGN_CS 0 | ||
59 | #else | ||
60 | #define ALIGN_CS 1 | ||
61 | #ifndef CONFIG_SMP | ||
62 | #error "bitops won't work without CONFIG_SMP" | ||
63 | #endif | ||
64 | #endif | ||
65 | |||
66 | /* bitmap tables from arch/S390/kernel/bitmap.S */ | 56 | /* bitmap tables from arch/S390/kernel/bitmap.S */ |
67 | extern const char _oi_bitmap[]; | 57 | extern const char _oi_bitmap[]; |
68 | extern const char _ni_bitmap[]; | 58 | extern const char _ni_bitmap[]; |
@@ -121,10 +111,6 @@ static inline void set_bit_cs(unsigned long nr, volatile unsigned long *ptr) | |||
121 | unsigned long addr, old, new, mask; | 111 | unsigned long addr, old, new, mask; |
122 | 112 | ||
123 | addr = (unsigned long) ptr; | 113 | addr = (unsigned long) ptr; |
124 | #if ALIGN_CS == 1 | ||
125 | nr += (addr & __BITOPS_ALIGN) << 3; /* add alignment to bit number */ | ||
126 | addr ^= addr & __BITOPS_ALIGN; /* align address to 8 */ | ||
127 | #endif | ||
128 | /* calculate address for CS */ | 114 | /* calculate address for CS */ |
129 | addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; | 115 | addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; |
130 | /* make OR mask */ | 116 | /* make OR mask */ |
@@ -141,10 +127,6 @@ static inline void clear_bit_cs(unsigned long nr, volatile unsigned long *ptr) | |||
141 | unsigned long addr, old, new, mask; | 127 | unsigned long addr, old, new, mask; |
142 | 128 | ||
143 | addr = (unsigned long) ptr; | 129 | addr = (unsigned long) ptr; |
144 | #if ALIGN_CS == 1 | ||
145 | nr += (addr & __BITOPS_ALIGN) << 3; /* add alignment to bit number */ | ||
146 | addr ^= addr & __BITOPS_ALIGN; /* align address to 8 */ | ||
147 | #endif | ||
148 | /* calculate address for CS */ | 130 | /* calculate address for CS */ |
149 | addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; | 131 | addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; |
150 | /* make AND mask */ | 132 | /* make AND mask */ |
@@ -161,10 +143,6 @@ static inline void change_bit_cs(unsigned long nr, volatile unsigned long *ptr) | |||
161 | unsigned long addr, old, new, mask; | 143 | unsigned long addr, old, new, mask; |
162 | 144 | ||
163 | addr = (unsigned long) ptr; | 145 | addr = (unsigned long) ptr; |
164 | #if ALIGN_CS == 1 | ||
165 | nr += (addr & __BITOPS_ALIGN) << 3; /* add alignment to bit number */ | ||
166 | addr ^= addr & __BITOPS_ALIGN; /* align address to 8 */ | ||
167 | #endif | ||
168 | /* calculate address for CS */ | 146 | /* calculate address for CS */ |
169 | addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; | 147 | addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; |
170 | /* make XOR mask */ | 148 | /* make XOR mask */ |
@@ -182,10 +160,6 @@ test_and_set_bit_cs(unsigned long nr, volatile unsigned long *ptr) | |||
182 | unsigned long addr, old, new, mask; | 160 | unsigned long addr, old, new, mask; |
183 | 161 | ||
184 | addr = (unsigned long) ptr; | 162 | addr = (unsigned long) ptr; |
185 | #if ALIGN_CS == 1 | ||
186 | nr += (addr & __BITOPS_ALIGN) << 3; /* add alignment to bit number */ | ||
187 | addr ^= addr & __BITOPS_ALIGN; /* align address to 8 */ | ||
188 | #endif | ||
189 | /* calculate address for CS */ | 163 | /* calculate address for CS */ |
190 | addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; | 164 | addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; |
191 | /* make OR/test mask */ | 165 | /* make OR/test mask */ |
@@ -205,10 +179,6 @@ test_and_clear_bit_cs(unsigned long nr, volatile unsigned long *ptr) | |||
205 | unsigned long addr, old, new, mask; | 179 | unsigned long addr, old, new, mask; |
206 | 180 | ||
207 | addr = (unsigned long) ptr; | 181 | addr = (unsigned long) ptr; |
208 | #if ALIGN_CS == 1 | ||
209 | nr += (addr & __BITOPS_ALIGN) << 3; /* add alignment to bit number */ | ||
210 | addr ^= addr & __BITOPS_ALIGN; /* align address to 8 */ | ||
211 | #endif | ||
212 | /* calculate address for CS */ | 182 | /* calculate address for CS */ |
213 | addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; | 183 | addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; |
214 | /* make AND/test mask */ | 184 | /* make AND/test mask */ |
@@ -228,10 +198,6 @@ test_and_change_bit_cs(unsigned long nr, volatile unsigned long *ptr) | |||
228 | unsigned long addr, old, new, mask; | 198 | unsigned long addr, old, new, mask; |
229 | 199 | ||
230 | addr = (unsigned long) ptr; | 200 | addr = (unsigned long) ptr; |
231 | #if ALIGN_CS == 1 | ||
232 | nr += (addr & __BITOPS_ALIGN) << 3; /* add alignment to bit number */ | ||
233 | addr ^= addr & __BITOPS_ALIGN; /* align address to 8 */ | ||
234 | #endif | ||
235 | /* calculate address for CS */ | 201 | /* calculate address for CS */ |
236 | addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; | 202 | addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3; |
237 | /* make XOR/test mask */ | 203 | /* make XOR/test mask */ |
@@ -834,8 +800,6 @@ static inline int sched_find_first_bit(unsigned long *b) | |||
834 | 800 | ||
835 | #include <asm-generic/bitops/hweight.h> | 801 | #include <asm-generic/bitops/hweight.h> |
836 | 802 | ||
837 | #ifdef __KERNEL__ | ||
838 | |||
839 | /* | 803 | /* |
840 | * ATTENTION: intel byte ordering convention for ext2 and minix !! | 804 | * ATTENTION: intel byte ordering convention for ext2 and minix !! |
841 | * bit 0 is the LSB of addr; bit 31 is the MSB of addr; | 805 | * bit 0 is the LSB of addr; bit 31 is the MSB of addr; |