aboutsummaryrefslogtreecommitdiffstats
path: root/arch/tile
diff options
context:
space:
mode:
authorArun Sharma <asharma@fb.com>2011-07-26 19:09:07 -0400
committerLinus Torvalds <torvalds@linux-foundation.org>2011-07-26 19:49:47 -0400
commitf24219b4e90cf70ec4a211b17fbabc725a0ddf3c (patch)
treec1c753bd425d61a5094995d9835b23b46383d9b2 /arch/tile
parent60063497a95e716c9a689af3be2687d261f115b4 (diff)
atomic: move atomic_add_unless to generic code
This is in preparation for more generic atomic primitives based on __atomic_add_unless. Signed-off-by: Arun Sharma <asharma@fb.com> Signed-off-by: Hans-Christian Egtvedt <hans-christian.egtvedt@atmel.com> Reviewed-by: Eric Dumazet <eric.dumazet@gmail.com> Cc: Ingo Molnar <mingo@elte.hu> Cc: David Miller <davem@davemloft.net> Acked-by: Mike Frysinger <vapier@gentoo.org> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
Diffstat (limited to 'arch/tile')
-rw-r--r--arch/tile/include/asm/atomic_32.h10
-rw-r--r--arch/tile/include/asm/atomic_64.h4
2 files changed, 7 insertions, 7 deletions
diff --git a/arch/tile/include/asm/atomic_32.h b/arch/tile/include/asm/atomic_32.h
index 246feed4794d..c03349e0ca9f 100644
--- a/arch/tile/include/asm/atomic_32.h
+++ b/arch/tile/include/asm/atomic_32.h
@@ -81,18 +81,18 @@ static inline int atomic_add_return(int i, atomic_t *v)
81} 81}
82 82
83/** 83/**
84 * atomic_add_unless - add unless the number is already a given value 84 * __atomic_add_unless - add unless the number is already a given value
85 * @v: pointer of type atomic_t 85 * @v: pointer of type atomic_t
86 * @a: the amount to add to v... 86 * @a: the amount to add to v...
87 * @u: ...unless v is equal to u. 87 * @u: ...unless v is equal to u.
88 * 88 *
89 * Atomically adds @a to @v, so long as @v was not already @u. 89 * Atomically adds @a to @v, so long as @v was not already @u.
90 * Returns non-zero if @v was not @u, and zero otherwise. 90 * Returns the old value of @v.
91 */ 91 */
92static inline int atomic_add_unless(atomic_t *v, int a, int u) 92static inline int __atomic_add_unless(atomic_t *v, int a, int u)
93{ 93{
94 smp_mb(); /* barrier for proper semantics */ 94 smp_mb(); /* barrier for proper semantics */
95 return _atomic_xchg_add_unless(v, a, u) != u; 95 return _atomic_xchg_add_unless(v, a, u);
96} 96}
97 97
98/** 98/**
@@ -199,7 +199,7 @@ static inline u64 atomic64_add_return(u64 i, atomic64_t *v)
199 * @u: ...unless v is equal to u. 199 * @u: ...unless v is equal to u.
200 * 200 *
201 * Atomically adds @a to @v, so long as @v was not already @u. 201 * Atomically adds @a to @v, so long as @v was not already @u.
202 * Returns non-zero if @v was not @u, and zero otherwise. 202 * Returns the old value of @v.
203 */ 203 */
204static inline u64 atomic64_add_unless(atomic64_t *v, u64 a, u64 u) 204static inline u64 atomic64_add_unless(atomic64_t *v, u64 a, u64 u)
205{ 205{
diff --git a/arch/tile/include/asm/atomic_64.h b/arch/tile/include/asm/atomic_64.h
index a48dda30cbcc..27fe667fddfe 100644
--- a/arch/tile/include/asm/atomic_64.h
+++ b/arch/tile/include/asm/atomic_64.h
@@ -64,7 +64,7 @@ static inline int atomic_add_return(int i, atomic_t *v)
64 return val; 64 return val;
65} 65}
66 66
67static inline int atomic_add_unless(atomic_t *v, int a, int u) 67static inline int __atomic_add_unless(atomic_t *v, int a, int u)
68{ 68{
69 int guess, oldval = v->counter; 69 int guess, oldval = v->counter;
70 do { 70 do {
@@ -73,7 +73,7 @@ static inline int atomic_add_unless(atomic_t *v, int a, int u)
73 guess = oldval; 73 guess = oldval;
74 oldval = atomic_cmpxchg(v, guess, guess + a); 74 oldval = atomic_cmpxchg(v, guess, guess + a);
75 } while (guess != oldval); 75 } while (guess != oldval);
76 return oldval != u; 76 return oldval;
77} 77}
78 78
79/* Now the true 64-bit operations. */ 79/* Now the true 64-bit operations. */