aboutsummaryrefslogtreecommitdiffstats
path: root/arch/alpha/include/asm/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/alpha/include/asm/atomic.h')
-rw-r--r--arch/alpha/include/asm/atomic.h88
1 files changed, 65 insertions, 23 deletions
diff --git a/arch/alpha/include/asm/atomic.h b/arch/alpha/include/asm/atomic.h
index c2cbe4fc391c..78b03ef39f6f 100644
--- a/arch/alpha/include/asm/atomic.h
+++ b/arch/alpha/include/asm/atomic.h
@@ -186,17 +186,24 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
186 */ 186 */
187static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u) 187static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
188{ 188{
189 int c, old; 189 int c, new, old;
190 c = atomic_read(v); 190 smp_mb();
191 for (;;) { 191 __asm__ __volatile__(
192 if (unlikely(c == (u))) 192 "1: ldl_l %[old],%[mem]\n"
193 break; 193 " cmpeq %[old],%[u],%[c]\n"
194 old = atomic_cmpxchg((v), c, c + (a)); 194 " addl %[old],%[a],%[new]\n"
195 if (likely(old == c)) 195 " bne %[c],2f\n"
196 break; 196 " stl_c %[new],%[mem]\n"
197 c = old; 197 " beq %[new],3f\n"
198 } 198 "2:\n"
199 return c; 199 ".subsection 2\n"
200 "3: br 1b\n"
201 ".previous"
202 : [old] "=&r"(old), [new] "=&r"(new), [c] "=&r"(c)
203 : [mem] "m"(*v), [a] "rI"(a), [u] "rI"((long)u)
204 : "memory");
205 smp_mb();
206 return old;
200} 207}
201 208
202 209
@@ -207,21 +214,56 @@ static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
207 * @u: ...unless v is equal to u. 214 * @u: ...unless v is equal to u.
208 * 215 *
209 * Atomically adds @a to @v, so long as it was not @u. 216 * Atomically adds @a to @v, so long as it was not @u.
210 * Returns the old value of @v. 217 * Returns true iff @v was not @u.
211 */ 218 */
212static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) 219static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
213{ 220{
214 long c, old; 221 long c, tmp;
215 c = atomic64_read(v); 222 smp_mb();
216 for (;;) { 223 __asm__ __volatile__(
217 if (unlikely(c == (u))) 224 "1: ldq_l %[tmp],%[mem]\n"
218 break; 225 " cmpeq %[tmp],%[u],%[c]\n"
219 old = atomic64_cmpxchg((v), c, c + (a)); 226 " addq %[tmp],%[a],%[tmp]\n"
220 if (likely(old == c)) 227 " bne %[c],2f\n"
221 break; 228 " stq_c %[tmp],%[mem]\n"
222 c = old; 229 " beq %[tmp],3f\n"
223 } 230 "2:\n"
224 return c != (u); 231 ".subsection 2\n"
232 "3: br 1b\n"
233 ".previous"
234 : [tmp] "=&r"(tmp), [c] "=&r"(c)
235 : [mem] "m"(*v), [a] "rI"(a), [u] "rI"(u)
236 : "memory");
237 smp_mb();
238 return !c;
239}
240
241/*
242 * atomic64_dec_if_positive - decrement by 1 if old value positive
243 * @v: pointer of type atomic_t
244 *
245 * The function returns the old value of *v minus 1, even if
246 * the atomic variable, v, was not decremented.
247 */
248static inline long atomic64_dec_if_positive(atomic64_t *v)
249{
250 long old, tmp;
251 smp_mb();
252 __asm__ __volatile__(
253 "1: ldq_l %[old],%[mem]\n"
254 " subq %[old],1,%[tmp]\n"
255 " ble %[old],2f\n"
256 " stq_c %[tmp],%[mem]\n"
257 " beq %[tmp],3f\n"
258 "2:\n"
259 ".subsection 2\n"
260 "3: br 1b\n"
261 ".previous"
262 : [old] "=&r"(old), [tmp] "=&r"(tmp)
263 : [mem] "m"(*v)
264 : "memory");
265 smp_mb();
266 return old - 1;
225} 267}
226 268
227#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) 269#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)