diff options
author | Mathieu Desnoyers <mathieu.desnoyers@polymtl.ca> | 2007-05-08 03:34:34 -0400 |
---|---|---|
committer | Linus Torvalds <torvalds@woody.linux-foundation.org> | 2007-05-08 14:15:19 -0400 |
commit | 2549c8589cc0550f0714d32720877d7af133ae40 (patch) | |
tree | e481403722dab01dc61e724c5c1669af60fc4eda /include/asm-sparc64 | |
parent | f46e477ed94f6407982690ef53dab7898834268f (diff) |
atomic.h: add atomic64 cmpxchg, xchg and add_unless to sparc64
Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@polymtl.ca>
Cc: "David S. Miller" <davem@davemloft.net>
Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
Diffstat (limited to 'include/asm-sparc64')
-rw-r--r-- | include/asm-sparc64/atomic.h | 24 |
1 files changed, 22 insertions, 2 deletions
diff --git a/include/asm-sparc64/atomic.h b/include/asm-sparc64/atomic.h index 2f0bec26a695..c3feb3af2cfe 100644 --- a/include/asm-sparc64/atomic.h +++ b/include/asm-sparc64/atomic.h | |||
@@ -70,12 +70,12 @@ extern int atomic64_sub_ret(int, atomic64_t *); | |||
70 | #define atomic_add_negative(i, v) (atomic_add_ret(i, v) < 0) | 70 | #define atomic_add_negative(i, v) (atomic_add_ret(i, v) < 0) |
71 | #define atomic64_add_negative(i, v) (atomic64_add_ret(i, v) < 0) | 71 | #define atomic64_add_negative(i, v) (atomic64_add_ret(i, v) < 0) |
72 | 72 | ||
73 | #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) | 73 | #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) |
74 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) | 74 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) |
75 | 75 | ||
76 | #define atomic_add_unless(v, a, u) \ | 76 | #define atomic_add_unless(v, a, u) \ |
77 | ({ \ | 77 | ({ \ |
78 | int c, old; \ | 78 | __typeof__((v)->counter) c, old; \ |
79 | c = atomic_read(v); \ | 79 | c = atomic_read(v); \ |
80 | for (;;) { \ | 80 | for (;;) { \ |
81 | if (unlikely(c == (u))) \ | 81 | if (unlikely(c == (u))) \ |
@@ -89,6 +89,26 @@ extern int atomic64_sub_ret(int, atomic64_t *); | |||
89 | }) | 89 | }) |
90 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) | 90 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) |
91 | 91 | ||
92 | #define atomic64_cmpxchg(v, o, n) \ | ||
93 | ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n))) | ||
94 | #define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) | ||
95 | |||
96 | #define atomic64_add_unless(v, a, u) \ | ||
97 | ({ \ | ||
98 | __typeof__((v)->counter) c, old; \ | ||
99 | c = atomic64_read(v); \ | ||
100 | for (;;) { \ | ||
101 | if (unlikely(c == (u))) \ | ||
102 | break; \ | ||
103 | old = atomic64_cmpxchg((v), c, c + (a)); \ | ||
104 | if (likely(old == c)) \ | ||
105 | break; \ | ||
106 | c = old; \ | ||
107 | } \ | ||
108 | likely(c != (u)); \ | ||
109 | }) | ||
110 | #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) | ||
111 | |||
92 | /* Atomic operations are already serializing */ | 112 | /* Atomic operations are already serializing */ |
93 | #ifdef CONFIG_SMP | 113 | #ifdef CONFIG_SMP |
94 | #define smp_mb__before_atomic_dec() membar_storeload_loadload(); | 114 | #define smp_mb__before_atomic_dec() membar_storeload_loadload(); |