diff options
Diffstat (limited to 'arch/powerpc/include/asm/atomic.h')
-rw-r--r-- | arch/powerpc/include/asm/atomic.h | 14 |
1 files changed, 7 insertions, 7 deletions
diff --git a/arch/powerpc/include/asm/atomic.h b/arch/powerpc/include/asm/atomic.h index b2bcbee622ea..952e161fbb89 100644 --- a/arch/powerpc/include/asm/atomic.h +++ b/arch/powerpc/include/asm/atomic.h | |||
@@ -181,21 +181,21 @@ static __inline__ int atomic_dec_return(atomic_t *v) | |||
181 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) | 181 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) |
182 | 182 | ||
183 | /** | 183 | /** |
184 | * atomic_add_unless - add unless the number is a given value | 184 | * __atomic_add_unless - add unless the number is a given value |
185 | * @v: pointer of type atomic_t | 185 | * @v: pointer of type atomic_t |
186 | * @a: the amount to add to v... | 186 | * @a: the amount to add to v... |
187 | * @u: ...unless v is equal to u. | 187 | * @u: ...unless v is equal to u. |
188 | * | 188 | * |
189 | * Atomically adds @a to @v, so long as it was not @u. | 189 | * Atomically adds @a to @v, so long as it was not @u. |
190 | * Returns non-zero if @v was not @u, and zero otherwise. | 190 | * Returns the old value of @v. |
191 | */ | 191 | */ |
192 | static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) | 192 | static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u) |
193 | { | 193 | { |
194 | int t; | 194 | int t; |
195 | 195 | ||
196 | __asm__ __volatile__ ( | 196 | __asm__ __volatile__ ( |
197 | PPC_RELEASE_BARRIER | 197 | PPC_RELEASE_BARRIER |
198 | "1: lwarx %0,0,%1 # atomic_add_unless\n\ | 198 | "1: lwarx %0,0,%1 # __atomic_add_unless\n\ |
199 | cmpw 0,%0,%3 \n\ | 199 | cmpw 0,%0,%3 \n\ |
200 | beq- 2f \n\ | 200 | beq- 2f \n\ |
201 | add %0,%2,%0 \n" | 201 | add %0,%2,%0 \n" |
@@ -209,7 +209,7 @@ static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) | |||
209 | : "r" (&v->counter), "r" (a), "r" (u) | 209 | : "r" (&v->counter), "r" (a), "r" (u) |
210 | : "cc", "memory"); | 210 | : "cc", "memory"); |
211 | 211 | ||
212 | return t != u; | 212 | return t; |
213 | } | 213 | } |
214 | 214 | ||
215 | 215 | ||
@@ -443,7 +443,7 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v) | |||
443 | * @u: ...unless v is equal to u. | 443 | * @u: ...unless v is equal to u. |
444 | * | 444 | * |
445 | * Atomically adds @a to @v, so long as it was not @u. | 445 | * Atomically adds @a to @v, so long as it was not @u. |
446 | * Returns non-zero if @v was not @u, and zero otherwise. | 446 | * Returns the old value of @v. |
447 | */ | 447 | */ |
448 | static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) | 448 | static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) |
449 | { | 449 | { |
@@ -451,7 +451,7 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) | |||
451 | 451 | ||
452 | __asm__ __volatile__ ( | 452 | __asm__ __volatile__ ( |
453 | PPC_RELEASE_BARRIER | 453 | PPC_RELEASE_BARRIER |
454 | "1: ldarx %0,0,%1 # atomic_add_unless\n\ | 454 | "1: ldarx %0,0,%1 # __atomic_add_unless\n\ |
455 | cmpd 0,%0,%3 \n\ | 455 | cmpd 0,%0,%3 \n\ |
456 | beq- 2f \n\ | 456 | beq- 2f \n\ |
457 | add %0,%2,%0 \n" | 457 | add %0,%2,%0 \n" |