summaryrefslogtreecommitdiffstats
path: root/arch/alpha
diff options
context:
space:
mode:
Diffstat (limited to 'arch/alpha')
-rw-r--r--arch/alpha/include/asm/atomic.h10
1 files changed, 5 insertions, 5 deletions
diff --git a/arch/alpha/include/asm/atomic.h b/arch/alpha/include/asm/atomic.h
index 88b7491490bc..3d6704910268 100644
--- a/arch/alpha/include/asm/atomic.h
+++ b/arch/alpha/include/asm/atomic.h
@@ -176,15 +176,15 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
176#define atomic_xchg(v, new) (xchg(&((v)->counter), new)) 176#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
177 177
178/** 178/**
179 * atomic_add_unless - add unless the number is a given value 179 * __atomic_add_unless - add unless the number is a given value
180 * @v: pointer of type atomic_t 180 * @v: pointer of type atomic_t
181 * @a: the amount to add to v... 181 * @a: the amount to add to v...
182 * @u: ...unless v is equal to u. 182 * @u: ...unless v is equal to u.
183 * 183 *
184 * Atomically adds @a to @v, so long as it was not @u. 184 * Atomically adds @a to @v, so long as it was not @u.
185 * Returns non-zero if @v was not @u, and zero otherwise. 185 * Returns the old value of @v.
186 */ 186 */
187static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) 187static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
188{ 188{
189 int c, old; 189 int c, old;
190 c = atomic_read(v); 190 c = atomic_read(v);
@@ -196,7 +196,7 @@ static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
196 break; 196 break;
197 c = old; 197 c = old;
198 } 198 }
199 return c != (u); 199 return c;
200} 200}
201 201
202 202
@@ -207,7 +207,7 @@ static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
207 * @u: ...unless v is equal to u. 207 * @u: ...unless v is equal to u.
208 * 208 *
209 * Atomically adds @a to @v, so long as it was not @u. 209 * Atomically adds @a to @v, so long as it was not @u.
210 * Returns non-zero if @v was not @u, and zero otherwise. 210 * Returns the old value of @v.
211 */ 211 */
212static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) 212static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
213{ 213{