aboutsummaryrefslogtreecommitdiffstats
path: root/arch/ia64/include/asm/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/ia64/include/asm/atomic.h')
-rw-r--r--arch/ia64/include/asm/atomic.h69
1 files changed, 23 insertions, 46 deletions
diff --git a/arch/ia64/include/asm/atomic.h b/arch/ia64/include/asm/atomic.h
index 762eeb0fcc1d..2524fb60fbc2 100644
--- a/arch/ia64/include/asm/atomic.h
+++ b/arch/ia64/include/asm/atomic.h
@@ -66,38 +66,35 @@ ATOMIC_OPS(add, +)
66ATOMIC_OPS(sub, -) 66ATOMIC_OPS(sub, -)
67 67
68#ifdef __OPTIMIZE__ 68#ifdef __OPTIMIZE__
69#define __ia64_atomic_const(i) __builtin_constant_p(i) ? \ 69#define __ia64_atomic_const(i) \
70 static const int __ia64_atomic_p = __builtin_constant_p(i) ? \
70 ((i) == 1 || (i) == 4 || (i) == 8 || (i) == 16 || \ 71 ((i) == 1 || (i) == 4 || (i) == 8 || (i) == 16 || \
71 (i) == -1 || (i) == -4 || (i) == -8 || (i) == -16) : 0 72 (i) == -1 || (i) == -4 || (i) == -8 || (i) == -16) : 0;\
73 __ia64_atomic_p
74#else
75#define __ia64_atomic_const(i) 0
76#endif
72 77
73#define atomic_add_return(i, v) \ 78#define atomic_add_return(i,v) \
74({ \ 79({ \
75 int __i = (i); \ 80 int __ia64_aar_i = (i); \
76 static const int __ia64_atomic_p = __ia64_atomic_const(i); \ 81 __ia64_atomic_const(i) \
77 __ia64_atomic_p ? ia64_fetch_and_add(__i, &(v)->counter) : \ 82 ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \
78 ia64_atomic_add(__i, v); \ 83 : ia64_atomic_add(__ia64_aar_i, v); \
79}) 84})
80 85
81#define atomic_sub_return(i, v) \ 86#define atomic_sub_return(i,v) \
82({ \ 87({ \
83 int __i = (i); \ 88 int __ia64_asr_i = (i); \
84 static const int __ia64_atomic_p = __ia64_atomic_const(i); \ 89 __ia64_atomic_const(i) \
85 __ia64_atomic_p ? ia64_fetch_and_add(-__i, &(v)->counter) : \ 90 ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \
86 ia64_atomic_sub(__i, v); \ 91 : ia64_atomic_sub(__ia64_asr_i, v); \
87}) 92})
88#else
89#define atomic_add_return(i, v) ia64_atomic_add(i, v)
90#define atomic_sub_return(i, v) ia64_atomic_sub(i, v)
91#endif
92 93
93#define atomic_fetch_add(i,v) \ 94#define atomic_fetch_add(i,v) \
94({ \ 95({ \
95 int __ia64_aar_i = (i); \ 96 int __ia64_aar_i = (i); \
96 (__builtin_constant_p(i) \ 97 __ia64_atomic_const(i) \
97 && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \
98 || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \
99 || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \
100 || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \
101 ? ia64_fetchadd(__ia64_aar_i, &(v)->counter, acq) \ 98 ? ia64_fetchadd(__ia64_aar_i, &(v)->counter, acq) \
102 : ia64_atomic_fetch_add(__ia64_aar_i, v); \ 99 : ia64_atomic_fetch_add(__ia64_aar_i, v); \
103}) 100})
@@ -105,11 +102,7 @@ ATOMIC_OPS(sub, -)
105#define atomic_fetch_sub(i,v) \ 102#define atomic_fetch_sub(i,v) \
106({ \ 103({ \
107 int __ia64_asr_i = (i); \ 104 int __ia64_asr_i = (i); \
108 (__builtin_constant_p(i) \ 105 __ia64_atomic_const(i) \
109 && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \
110 || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \
111 || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \
112 || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \
113 ? ia64_fetchadd(-__ia64_asr_i, &(v)->counter, acq) \ 106 ? ia64_fetchadd(-__ia64_asr_i, &(v)->counter, acq) \
114 : ia64_atomic_fetch_sub(__ia64_asr_i, v); \ 107 : ia64_atomic_fetch_sub(__ia64_asr_i, v); \
115}) 108})
@@ -170,11 +163,7 @@ ATOMIC64_OPS(sub, -)
170#define atomic64_add_return(i,v) \ 163#define atomic64_add_return(i,v) \
171({ \ 164({ \
172 long __ia64_aar_i = (i); \ 165 long __ia64_aar_i = (i); \
173 (__builtin_constant_p(i) \ 166 __ia64_atomic_const(i) \
174 && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \
175 || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \
176 || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \
177 || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \
178 ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \ 167 ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \
179 : ia64_atomic64_add(__ia64_aar_i, v); \ 168 : ia64_atomic64_add(__ia64_aar_i, v); \
180}) 169})
@@ -182,11 +171,7 @@ ATOMIC64_OPS(sub, -)
182#define atomic64_sub_return(i,v) \ 171#define atomic64_sub_return(i,v) \
183({ \ 172({ \
184 long __ia64_asr_i = (i); \ 173 long __ia64_asr_i = (i); \
185 (__builtin_constant_p(i) \ 174 __ia64_atomic_const(i) \
186 && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \
187 || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \
188 || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \
189 || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \
190 ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \ 175 ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \
191 : ia64_atomic64_sub(__ia64_asr_i, v); \ 176 : ia64_atomic64_sub(__ia64_asr_i, v); \
192}) 177})
@@ -194,11 +179,7 @@ ATOMIC64_OPS(sub, -)
194#define atomic64_fetch_add(i,v) \ 179#define atomic64_fetch_add(i,v) \
195({ \ 180({ \
196 long __ia64_aar_i = (i); \ 181 long __ia64_aar_i = (i); \
197 (__builtin_constant_p(i) \ 182 __ia64_atomic_const(i) \
198 && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \
199 || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \
200 || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \
201 || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \
202 ? ia64_fetchadd(__ia64_aar_i, &(v)->counter, acq) \ 183 ? ia64_fetchadd(__ia64_aar_i, &(v)->counter, acq) \
203 : ia64_atomic64_fetch_add(__ia64_aar_i, v); \ 184 : ia64_atomic64_fetch_add(__ia64_aar_i, v); \
204}) 185})
@@ -206,11 +187,7 @@ ATOMIC64_OPS(sub, -)
206#define atomic64_fetch_sub(i,v) \ 187#define atomic64_fetch_sub(i,v) \
207({ \ 188({ \
208 long __ia64_asr_i = (i); \ 189 long __ia64_asr_i = (i); \
209 (__builtin_constant_p(i) \ 190 __ia64_atomic_const(i) \
210 && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \
211 || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \
212 || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \
213 || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \
214 ? ia64_fetchadd(-__ia64_asr_i, &(v)->counter, acq) \ 191 ? ia64_fetchadd(-__ia64_asr_i, &(v)->counter, acq) \
215 : ia64_atomic64_fetch_sub(__ia64_asr_i, v); \ 192 : ia64_atomic64_fetch_sub(__ia64_asr_i, v); \
216}) 193})