aboutsummaryrefslogtreecommitdiffstats
path: root/arch/s390
diff options
context:
space:
mode:
authorMartin Schwidefsky <schwidefsky@de.ibm.com>2009-12-07 06:52:05 -0500
committerMartin Schwidefsky <sky@mschwide.boeblingen.de.ibm.com>2009-12-07 06:51:36 -0500
commit39475179d40996b4efa662e3825735a84d2526d1 (patch)
tree272e70b862bac408de84541d17adde0cd42d51ac /arch/s390
parent369a46325d07061e0f66e16a1f59ef4f526a6464 (diff)
[S390] Improve code generated by atomic operations.
Git commit ea435467500612636f8f4fb639ff6e76b2496e4b changed the definition of atomic_t and atomic64_t for s390 by adding the volatile modifier to the counter field. This has an unfortunate side effect with newer versions of the gcc. The typeof operator now picks up the volatile modifier from the expression. This causes the compiler to think that it has to store the two temporary variable old_val and new_val in the __CS_LOOP for the different atomic operations to the stack as the variables are now volatile. Both stores are superfluous. The hack to replace typeof(ptr->counter) with int in __CS_LOOP and and long long in __CSG_LOOP avoids the two stores. A better solution would be to drop the volatile from the counter field of the atomic_t and atomic64_t definition. But that is a touchy subject .. Cc: Matthew Wilcox <matthew@wil.cx> Signed-off-by: Martin Schwidefsky <schwidefsky@de.ibm.com>
Diffstat (limited to 'arch/s390')
-rw-r--r--arch/s390/include/asm/atomic.h8
1 files changed, 4 insertions, 4 deletions
diff --git a/arch/s390/include/asm/atomic.h b/arch/s390/include/asm/atomic.h
index ae7c8f9f94a5..2a113d6a7dfd 100644
--- a/arch/s390/include/asm/atomic.h
+++ b/arch/s390/include/asm/atomic.h
@@ -21,7 +21,7 @@
21#if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2) 21#if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
22 22
23#define __CS_LOOP(ptr, op_val, op_string) ({ \ 23#define __CS_LOOP(ptr, op_val, op_string) ({ \
24 typeof(ptr->counter) old_val, new_val; \ 24 int old_val, new_val; \
25 asm volatile( \ 25 asm volatile( \
26 " l %0,%2\n" \ 26 " l %0,%2\n" \
27 "0: lr %1,%0\n" \ 27 "0: lr %1,%0\n" \
@@ -38,7 +38,7 @@
38#else /* __GNUC__ */ 38#else /* __GNUC__ */
39 39
40#define __CS_LOOP(ptr, op_val, op_string) ({ \ 40#define __CS_LOOP(ptr, op_val, op_string) ({ \
41 typeof(ptr->counter) old_val, new_val; \ 41 int old_val, new_val; \
42 asm volatile( \ 42 asm volatile( \
43 " l %0,0(%3)\n" \ 43 " l %0,0(%3)\n" \
44 "0: lr %1,%0\n" \ 44 "0: lr %1,%0\n" \
@@ -143,7 +143,7 @@ static inline int atomic_add_unless(atomic_t *v, int a, int u)
143#if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2) 143#if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
144 144
145#define __CSG_LOOP(ptr, op_val, op_string) ({ \ 145#define __CSG_LOOP(ptr, op_val, op_string) ({ \
146 typeof(ptr->counter) old_val, new_val; \ 146 long long old_val, new_val; \
147 asm volatile( \ 147 asm volatile( \
148 " lg %0,%2\n" \ 148 " lg %0,%2\n" \
149 "0: lgr %1,%0\n" \ 149 "0: lgr %1,%0\n" \
@@ -160,7 +160,7 @@ static inline int atomic_add_unless(atomic_t *v, int a, int u)
160#else /* __GNUC__ */ 160#else /* __GNUC__ */
161 161
162#define __CSG_LOOP(ptr, op_val, op_string) ({ \ 162#define __CSG_LOOP(ptr, op_val, op_string) ({ \
163 typeof(ptr->counter) old_val, new_val; \ 163 long long old_val, new_val; \
164 asm volatile( \ 164 asm volatile( \
165 " lg %0,0(%3)\n" \ 165 " lg %0,0(%3)\n" \
166 "0: lgr %1,%0\n" \ 166 "0: lgr %1,%0\n" \