aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-powerpc/atomic.h
diff options
context:
space:
mode:
authorMathieu Desnoyers <mathieu.desnoyers@polymtl.ca>2007-01-25 11:15:52 -0500
committerPaul Mackerras <paulus@samba.org>2007-02-15 22:00:20 -0500
commit41806ef4bfacbe5c4e520d8da2fcedcda335c922 (patch)
tree1443a7e100c5f5a4715d604b8a9de8715ca15afb /include/asm-powerpc/atomic.h
parent8c0238b3f1a7849b89707ac6b7b0c84e1ed2df70 (diff)
[POWERPC] atomic.h: Add atomic64 cmpxchg, xchg and add_unless to powerpc
atomic.h : Add atomic64 cmpxchg, xchg and add_unless to powerpc Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@polymtl.ca> Signed-off-by: Paul Mackerras <paulus@samba.org>
Diffstat (limited to 'include/asm-powerpc/atomic.h')
-rw-r--r--include/asm-powerpc/atomic.h40
1 files changed, 39 insertions, 1 deletions
diff --git a/include/asm-powerpc/atomic.h b/include/asm-powerpc/atomic.h
index f038e33e6d48..2ce4b6b7b348 100644
--- a/include/asm-powerpc/atomic.h
+++ b/include/asm-powerpc/atomic.h
@@ -165,7 +165,8 @@ static __inline__ int atomic_dec_return(atomic_t *v)
165 return t; 165 return t;
166} 166}
167 167
168#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) 168#define atomic_cmpxchg(v, o, n) \
169 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
169#define atomic_xchg(v, new) (xchg(&((v)->counter), new)) 170#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
170 171
171/** 172/**
@@ -413,6 +414,43 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
413 return t; 414 return t;
414} 415}
415 416
417#define atomic64_cmpxchg(v, o, n) \
418 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
419#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
420
421/**
422 * atomic64_add_unless - add unless the number is a given value
423 * @v: pointer of type atomic64_t
424 * @a: the amount to add to v...
425 * @u: ...unless v is equal to u.
426 *
427 * Atomically adds @a to @v, so long as it was not @u.
428 * Returns non-zero if @v was not @u, and zero otherwise.
429 */
430static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
431{
432 long t;
433
434 __asm__ __volatile__ (
435 LWSYNC_ON_SMP
436"1: ldarx %0,0,%1 # atomic_add_unless\n\
437 cmpd 0,%0,%3 \n\
438 beq- 2f \n\
439 add %0,%2,%0 \n"
440" stdcx. %0,0,%1 \n\
441 bne- 1b \n"
442 ISYNC_ON_SMP
443" subf %0,%2,%0 \n\
4442:"
445 : "=&r" (t)
446 : "r" (&v->counter), "r" (a), "r" (u)
447 : "cc", "memory");
448
449 return t != u;
450}
451
452#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
453
416#endif /* __powerpc64__ */ 454#endif /* __powerpc64__ */
417 455
418#include <asm-generic/atomic.h> 456#include <asm-generic/atomic.h>