aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-powerpc/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-powerpc/atomic.h')
-rw-r--r--include/asm-powerpc/atomic.h48
1 files changed, 25 insertions, 23 deletions
diff --git a/include/asm-powerpc/atomic.h b/include/asm-powerpc/atomic.h
index ec4b14468959..147a38dcc766 100644
--- a/include/asm-powerpc/atomic.h
+++ b/include/asm-powerpc/atomic.h
@@ -36,7 +36,7 @@ static __inline__ int atomic_add_return(int a, atomic_t *v)
36 int t; 36 int t;
37 37
38 __asm__ __volatile__( 38 __asm__ __volatile__(
39 EIEIO_ON_SMP 39 LWSYNC_ON_SMP
40"1: lwarx %0,0,%2 # atomic_add_return\n\ 40"1: lwarx %0,0,%2 # atomic_add_return\n\
41 add %0,%1,%0\n" 41 add %0,%1,%0\n"
42 PPC405_ERR77(0,%2) 42 PPC405_ERR77(0,%2)
@@ -72,7 +72,7 @@ static __inline__ int atomic_sub_return(int a, atomic_t *v)
72 int t; 72 int t;
73 73
74 __asm__ __volatile__( 74 __asm__ __volatile__(
75 EIEIO_ON_SMP 75 LWSYNC_ON_SMP
76"1: lwarx %0,0,%2 # atomic_sub_return\n\ 76"1: lwarx %0,0,%2 # atomic_sub_return\n\
77 subf %0,%1,%0\n" 77 subf %0,%1,%0\n"
78 PPC405_ERR77(0,%2) 78 PPC405_ERR77(0,%2)
@@ -106,7 +106,7 @@ static __inline__ int atomic_inc_return(atomic_t *v)
106 int t; 106 int t;
107 107
108 __asm__ __volatile__( 108 __asm__ __volatile__(
109 EIEIO_ON_SMP 109 LWSYNC_ON_SMP
110"1: lwarx %0,0,%1 # atomic_inc_return\n\ 110"1: lwarx %0,0,%1 # atomic_inc_return\n\
111 addic %0,%0,1\n" 111 addic %0,%0,1\n"
112 PPC405_ERR77(0,%1) 112 PPC405_ERR77(0,%1)
@@ -150,7 +150,7 @@ static __inline__ int atomic_dec_return(atomic_t *v)
150 int t; 150 int t;
151 151
152 __asm__ __volatile__( 152 __asm__ __volatile__(
153 EIEIO_ON_SMP 153 LWSYNC_ON_SMP
154"1: lwarx %0,0,%1 # atomic_dec_return\n\ 154"1: lwarx %0,0,%1 # atomic_dec_return\n\
155 addic %0,%0,-1\n" 155 addic %0,%0,-1\n"
156 PPC405_ERR77(0,%1) 156 PPC405_ERR77(0,%1)
@@ -165,6 +165,7 @@ static __inline__ int atomic_dec_return(atomic_t *v)
165} 165}
166 166
167#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) 167#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
168#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
168 169
169/** 170/**
170 * atomic_add_unless - add unless the number is a given value 171 * atomic_add_unless - add unless the number is a given value
@@ -175,19 +176,19 @@ static __inline__ int atomic_dec_return(atomic_t *v)
175 * Atomically adds @a to @v, so long as it was not @u. 176 * Atomically adds @a to @v, so long as it was not @u.
176 * Returns non-zero if @v was not @u, and zero otherwise. 177 * Returns non-zero if @v was not @u, and zero otherwise.
177 */ 178 */
178#define atomic_add_unless(v, a, u) \ 179#define atomic_add_unless(v, a, u) \
179({ \ 180({ \
180 int c, old; \ 181 int c, old; \
181 c = atomic_read(v); \ 182 c = atomic_read(v); \
182 for (;;) { \ 183 for (;;) { \
183 if (unlikely(c == (u))) \ 184 if (unlikely(c == (u))) \
184 break; \ 185 break; \
185 old = atomic_cmpxchg((v), c, c + (a)); \ 186 old = atomic_cmpxchg((v), c, c + (a)); \
186 if (likely(old == c)) \ 187 if (likely(old == c)) \
187 break; \ 188 break; \
188 c = old; \ 189 c = old; \
189 } \ 190 } \
190 c != (u); \ 191 c != (u); \
191}) 192})
192#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 193#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
193 194
@@ -203,7 +204,7 @@ static __inline__ int atomic_dec_if_positive(atomic_t *v)
203 int t; 204 int t;
204 205
205 __asm__ __volatile__( 206 __asm__ __volatile__(
206 EIEIO_ON_SMP 207 LWSYNC_ON_SMP
207"1: lwarx %0,0,%1 # atomic_dec_if_positive\n\ 208"1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
208 addic. %0,%0,-1\n\ 209 addic. %0,%0,-1\n\
209 blt- 2f\n" 210 blt- 2f\n"
@@ -252,7 +253,7 @@ static __inline__ long atomic64_add_return(long a, atomic64_t *v)
252 long t; 253 long t;
253 254
254 __asm__ __volatile__( 255 __asm__ __volatile__(
255 EIEIO_ON_SMP 256 LWSYNC_ON_SMP
256"1: ldarx %0,0,%2 # atomic64_add_return\n\ 257"1: ldarx %0,0,%2 # atomic64_add_return\n\
257 add %0,%1,%0\n\ 258 add %0,%1,%0\n\
258 stdcx. %0,0,%2 \n\ 259 stdcx. %0,0,%2 \n\
@@ -286,7 +287,7 @@ static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
286 long t; 287 long t;
287 288
288 __asm__ __volatile__( 289 __asm__ __volatile__(
289 EIEIO_ON_SMP 290 LWSYNC_ON_SMP
290"1: ldarx %0,0,%2 # atomic64_sub_return\n\ 291"1: ldarx %0,0,%2 # atomic64_sub_return\n\
291 subf %0,%1,%0\n\ 292 subf %0,%1,%0\n\
292 stdcx. %0,0,%2 \n\ 293 stdcx. %0,0,%2 \n\
@@ -318,7 +319,7 @@ static __inline__ long atomic64_inc_return(atomic64_t *v)
318 long t; 319 long t;
319 320
320 __asm__ __volatile__( 321 __asm__ __volatile__(
321 EIEIO_ON_SMP 322 LWSYNC_ON_SMP
322"1: ldarx %0,0,%1 # atomic64_inc_return\n\ 323"1: ldarx %0,0,%1 # atomic64_inc_return\n\
323 addic %0,%0,1\n\ 324 addic %0,%0,1\n\
324 stdcx. %0,0,%1 \n\ 325 stdcx. %0,0,%1 \n\
@@ -360,7 +361,7 @@ static __inline__ long atomic64_dec_return(atomic64_t *v)
360 long t; 361 long t;
361 362
362 __asm__ __volatile__( 363 __asm__ __volatile__(
363 EIEIO_ON_SMP 364 LWSYNC_ON_SMP
364"1: ldarx %0,0,%1 # atomic64_dec_return\n\ 365"1: ldarx %0,0,%1 # atomic64_dec_return\n\
365 addic %0,%0,-1\n\ 366 addic %0,%0,-1\n\
366 stdcx. %0,0,%1\n\ 367 stdcx. %0,0,%1\n\
@@ -385,7 +386,7 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
385 long t; 386 long t;
386 387
387 __asm__ __volatile__( 388 __asm__ __volatile__(
388 EIEIO_ON_SMP 389 LWSYNC_ON_SMP
389"1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\ 390"1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
390 addic. %0,%0,-1\n\ 391 addic. %0,%0,-1\n\
391 blt- 2f\n\ 392 blt- 2f\n\
@@ -402,5 +403,6 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
402 403
403#endif /* __powerpc64__ */ 404#endif /* __powerpc64__ */
404 405
406#include <asm-generic/atomic.h>
405#endif /* __KERNEL__ */ 407#endif /* __KERNEL__ */
406#endif /* _ASM_POWERPC_ATOMIC_H_ */ 408#endif /* _ASM_POWERPC_ATOMIC_H_ */