aboutsummaryrefslogtreecommitdiffstats
path: root/arch/avr32
diff options
context:
space:
mode:
authorArun Sharma <asharma@fb.com>2011-07-26 19:09:07 -0400
committerLinus Torvalds <torvalds@linux-foundation.org>2011-07-26 19:49:47 -0400
commitf24219b4e90cf70ec4a211b17fbabc725a0ddf3c (patch)
treec1c753bd425d61a5094995d9835b23b46383d9b2 /arch/avr32
parent60063497a95e716c9a689af3be2687d261f115b4 (diff)
atomic: move atomic_add_unless to generic code
This is in preparation for more generic atomic primitives based on __atomic_add_unless. Signed-off-by: Arun Sharma <asharma@fb.com> Signed-off-by: Hans-Christian Egtvedt <hans-christian.egtvedt@atmel.com> Reviewed-by: Eric Dumazet <eric.dumazet@gmail.com> Cc: Ingo Molnar <mingo@elte.hu> Cc: David Miller <davem@davemloft.net> Acked-by: Mike Frysinger <vapier@gentoo.org> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
Diffstat (limited to 'arch/avr32')
-rw-r--r--arch/avr32/include/asm/atomic.h57
1 files changed, 25 insertions, 32 deletions
diff --git a/arch/avr32/include/asm/atomic.h b/arch/avr32/include/asm/atomic.h
index f229c3849f03..dc6c3a41a2d7 100644
--- a/arch/avr32/include/asm/atomic.h
+++ b/arch/avr32/include/asm/atomic.h
@@ -78,70 +78,63 @@ static inline int atomic_add_return(int i, atomic_t *v)
78/* 78/*
79 * atomic_sub_unless - sub unless the number is a given value 79 * atomic_sub_unless - sub unless the number is a given value
80 * @v: pointer of type atomic_t 80 * @v: pointer of type atomic_t
81 * @a: the amount to add to v... 81 * @a: the amount to subtract from v...
82 * @u: ...unless v is equal to u. 82 * @u: ...unless v is equal to u.
83 * 83 *
84 * If the atomic value v is not equal to u, this function subtracts a 84 * Atomically subtract @a from @v, so long as it was not @u.
85 * from v, and returns non zero. If v is equal to u then it returns 85 * Returns the old value of @v.
86 * zero. This is done as an atomic operation.
87*/ 86*/
88static inline int atomic_sub_unless(atomic_t *v, int a, int u) 87static inline void atomic_sub_unless(atomic_t *v, int a, int u)
89{ 88{
90 int tmp, result = 0; 89 int tmp;
91 90
92 asm volatile( 91 asm volatile(
93 "/* atomic_sub_unless */\n" 92 "/* atomic_sub_unless */\n"
94 "1: ssrf 5\n" 93 "1: ssrf 5\n"
95 " ld.w %0, %3\n" 94 " ld.w %0, %2\n"
96 " cp.w %0, %5\n" 95 " cp.w %0, %4\n"
97 " breq 1f\n" 96 " breq 1f\n"
98 " sub %0, %4\n" 97 " sub %0, %3\n"
99 " stcond %2, %0\n" 98 " stcond %1, %0\n"
100 " brne 1b\n" 99 " brne 1b\n"
101 " mov %1, 1\n"
102 "1:" 100 "1:"
103 : "=&r"(tmp), "=&r"(result), "=o"(v->counter) 101 : "=&r"(tmp), "=o"(v->counter)
104 : "m"(v->counter), "rKs21"(a), "rKs21"(u), "1"(result) 102 : "m"(v->counter), "rKs21"(a), "rKs21"(u)
105 : "cc", "memory"); 103 : "cc", "memory");
106
107 return result;
108} 104}
109 105
110/* 106/*
111 * atomic_add_unless - add unless the number is a given value 107 * __atomic_add_unless - add unless the number is a given value
112 * @v: pointer of type atomic_t 108 * @v: pointer of type atomic_t
113 * @a: the amount to add to v... 109 * @a: the amount to add to v...
114 * @u: ...unless v is equal to u. 110 * @u: ...unless v is equal to u.
115 * 111 *
116 * If the atomic value v is not equal to u, this function adds a to v, 112 * Atomically adds @a to @v, so long as it was not @u.
117 * and returns non zero. If v is equal to u then it returns zero. This 113 * Returns the old value of @v.
118 * is done as an atomic operation.
119*/ 114*/
120static inline int atomic_add_unless(atomic_t *v, int a, int u) 115static inline int __atomic_add_unless(atomic_t *v, int a, int u)
121{ 116{
122 int tmp, result; 117 int tmp, old = atomic_read(v);
123 118
124 if (__builtin_constant_p(a) && (a >= -1048575) && (a <= 1048576)) 119 if (__builtin_constant_p(a) && (a >= -1048575) && (a <= 1048576))
125 result = atomic_sub_unless(v, -a, u); 120 atomic_sub_unless(v, -a, u);
126 else { 121 else {
127 result = 0;
128 asm volatile( 122 asm volatile(
129 "/* atomic_add_unless */\n" 123 "/* __atomic_add_unless */\n"
130 "1: ssrf 5\n" 124 "1: ssrf 5\n"
131 " ld.w %0, %3\n" 125 " ld.w %0, %2\n"
132 " cp.w %0, %5\n" 126 " cp.w %0, %4\n"
133 " breq 1f\n" 127 " breq 1f\n"
134 " add %0, %4\n" 128 " add %0, %3\n"
135 " stcond %2, %0\n" 129 " stcond %1, %0\n"
136 " brne 1b\n" 130 " brne 1b\n"
137 " mov %1, 1\n"
138 "1:" 131 "1:"
139 : "=&r"(tmp), "=&r"(result), "=o"(v->counter) 132 : "=&r"(tmp), "=o"(v->counter)
140 : "m"(v->counter), "r"(a), "ir"(u), "1"(result) 133 : "m"(v->counter), "r"(a), "ir"(u)
141 : "cc", "memory"); 134 : "cc", "memory");
142 } 135 }
143 136
144 return result; 137 return old;
145} 138}
146 139
147/* 140/*