aboutsummaryrefslogtreecommitdiffstats
path: root/arch/s390/include
diff options
context:
space:
mode:
authorHeiko Carstens <heiko.carstens@de.ibm.com>2013-09-17 03:48:44 -0400
committerMartin Schwidefsky <schwidefsky@de.ibm.com>2013-10-24 11:16:53 -0400
commit4ae803253e4649803dc6e972bb5e59fc107cf474 (patch)
tree62aa7337c3b93b3e548557ac0202d072a598f460 /arch/s390/include
parent370b0b5f7744d62a9ba6c25fd6b0dcba84419443 (diff)
s390/bitops: optimize set_bit() for constant values
Since zEC12 we have the interlocked-access facility 2 which allows to use the instructions ni/oi/xi to update a single byte in storage with compare-and-swap semantics. So change set_bit(), clear_bit() and change_bit() to generate such code instead of a compare-and-swap loop (or using the load-and-* instruction family), if possible. This reduces the text segment by yet another 8KB (defconfig). Alternatively the long displacement variants niy/oiy/xiy could have been used, but the extended displacement field is usually not needed and therefore would only increase the size of the text segment again. Signed-off-by: Heiko Carstens <heiko.carstens@de.ibm.com> Signed-off-by: Martin Schwidefsky <schwidefsky@de.ibm.com>
Diffstat (limited to 'arch/s390/include')
-rw-r--r--arch/s390/include/asm/bitops.h36
1 files changed, 36 insertions, 0 deletions
diff --git a/arch/s390/include/asm/bitops.h b/arch/s390/include/asm/bitops.h
index 6038349c8410..16df62dde094 100644
--- a/arch/s390/include/asm/bitops.h
+++ b/arch/s390/include/asm/bitops.h
@@ -151,6 +151,18 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *ptr)
151 unsigned long *addr = __bitops_word(nr, ptr); 151 unsigned long *addr = __bitops_word(nr, ptr);
152 unsigned long mask; 152 unsigned long mask;
153 153
154#ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
155 if (__builtin_constant_p(nr)) {
156 unsigned char *caddr = __bitops_byte(nr, ptr);
157
158 asm volatile(
159 "oi %0,%b1\n"
160 : "+Q" (*caddr)
161 : "i" (1 << (nr & 7))
162 : "cc");
163 return;
164 }
165#endif
154 mask = 1UL << (nr & (BITS_PER_LONG - 1)); 166 mask = 1UL << (nr & (BITS_PER_LONG - 1));
155 __BITOPS_LOOP(addr, mask, __BITOPS_OR); 167 __BITOPS_LOOP(addr, mask, __BITOPS_OR);
156} 168}
@@ -160,6 +172,18 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *ptr)
160 unsigned long *addr = __bitops_word(nr, ptr); 172 unsigned long *addr = __bitops_word(nr, ptr);
161 unsigned long mask; 173 unsigned long mask;
162 174
175#ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
176 if (__builtin_constant_p(nr)) {
177 unsigned char *caddr = __bitops_byte(nr, ptr);
178
179 asm volatile(
180 "ni %0,%b1\n"
181 : "+Q" (*caddr)
182 : "i" (~(1 << (nr & 7)))
183 : "cc");
184 return;
185 }
186#endif
163 mask = ~(1UL << (nr & (BITS_PER_LONG - 1))); 187 mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
164 __BITOPS_LOOP(addr, mask, __BITOPS_AND); 188 __BITOPS_LOOP(addr, mask, __BITOPS_AND);
165} 189}
@@ -169,6 +193,18 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *ptr)
169 unsigned long *addr = __bitops_word(nr, ptr); 193 unsigned long *addr = __bitops_word(nr, ptr);
170 unsigned long mask; 194 unsigned long mask;
171 195
196#ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
197 if (__builtin_constant_p(nr)) {
198 unsigned char *caddr = __bitops_byte(nr, ptr);
199
200 asm volatile(
201 "xi %0,%b1\n"
202 : "+Q" (*caddr)
203 : "i" (1 << (nr & 7))
204 : "cc");
205 return;
206 }
207#endif
172 mask = 1UL << (nr & (BITS_PER_LONG - 1)); 208 mask = 1UL << (nr & (BITS_PER_LONG - 1));
173 __BITOPS_LOOP(addr, mask, __BITOPS_XOR); 209 __BITOPS_LOOP(addr, mask, __BITOPS_XOR);
174} 210}