diff options
author | Nick Piggin <npiggin@suse.de> | 2006-02-20 04:41:40 -0500 |
---|---|---|
committer | Paul Mackerras <paulus@samba.org> | 2006-02-23 22:06:02 -0500 |
commit | f055affb89f587a03f3411c3fd49ef31295c3d48 (patch) | |
tree | 1c40a29191eba8267390fe397f8108ffa8d9ddb5 /include/asm-powerpc/atomic.h | |
parent | 4f629d7db32decbadaab2abfa4d021fee94990ef (diff) |
[PATCH] powerpc: native atomic_add_unless
Do atomic_add_unless natively instead of using cmpxchg.
Improved register allocation idea from Joel Schopp.
Signed-off-by: Nick Piggin <npiggin@suse.de>
Signed-off-by: Paul Mackerras <paulus@samba.org>
Diffstat (limited to 'include/asm-powerpc/atomic.h')
-rw-r--r-- | include/asm-powerpc/atomic.h | 38 |
1 files changed, 24 insertions, 14 deletions
diff --git a/include/asm-powerpc/atomic.h b/include/asm-powerpc/atomic.h index 147a38dcc766..bb3c0ab7e667 100644 --- a/include/asm-powerpc/atomic.h +++ b/include/asm-powerpc/atomic.h | |||
@@ -8,6 +8,7 @@ | |||
8 | typedef struct { volatile int counter; } atomic_t; | 8 | typedef struct { volatile int counter; } atomic_t; |
9 | 9 | ||
10 | #ifdef __KERNEL__ | 10 | #ifdef __KERNEL__ |
11 | #include <linux/compiler.h> | ||
11 | #include <asm/synch.h> | 12 | #include <asm/synch.h> |
12 | #include <asm/asm-compat.h> | 13 | #include <asm/asm-compat.h> |
13 | 14 | ||
@@ -176,20 +177,29 @@ static __inline__ int atomic_dec_return(atomic_t *v) | |||
176 | * Atomically adds @a to @v, so long as it was not @u. | 177 | * Atomically adds @a to @v, so long as it was not @u. |
177 | * Returns non-zero if @v was not @u, and zero otherwise. | 178 | * Returns non-zero if @v was not @u, and zero otherwise. |
178 | */ | 179 | */ |
179 | #define atomic_add_unless(v, a, u) \ | 180 | static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) |
180 | ({ \ | 181 | { |
181 | int c, old; \ | 182 | int t; |
182 | c = atomic_read(v); \ | 183 | |
183 | for (;;) { \ | 184 | __asm__ __volatile__ ( |
184 | if (unlikely(c == (u))) \ | 185 | LWSYNC_ON_SMP |
185 | break; \ | 186 | "1: lwarx %0,0,%1 # atomic_add_unless\n\ |
186 | old = atomic_cmpxchg((v), c, c + (a)); \ | 187 | cmpw 0,%0,%3 \n\ |
187 | if (likely(old == c)) \ | 188 | beq- 2f \n\ |
188 | break; \ | 189 | add %0,%2,%0 \n" |
189 | c = old; \ | 190 | PPC405_ERR77(0,%2) |
190 | } \ | 191 | " stwcx. %0,0,%1 \n\ |
191 | c != (u); \ | 192 | bne- 1b \n" |
192 | }) | 193 | ISYNC_ON_SMP |
194 | " subf %0,%2,%0 \n\ | ||
195 | 2:" | ||
196 | : "=&r" (t) | ||
197 | : "r" (&v->counter), "r" (a), "r" (u) | ||
198 | : "cc", "memory"); | ||
199 | |||
200 | return t != u; | ||
201 | } | ||
202 | |||
193 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) | 203 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) |
194 | 204 | ||
195 | #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0) | 205 | #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0) |