aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-i386/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-i386/atomic.h')
-rw-r--r--include/asm-i386/atomic.h47
1 files changed, 28 insertions, 19 deletions
diff --git a/include/asm-i386/atomic.h b/include/asm-i386/atomic.h
index 4dd272331361..437aac801711 100644
--- a/include/asm-i386/atomic.h
+++ b/include/asm-i386/atomic.h
@@ -3,6 +3,7 @@
3 3
4#include <linux/compiler.h> 4#include <linux/compiler.h>
5#include <asm/processor.h> 5#include <asm/processor.h>
6#include <asm/cmpxchg.h>
6 7
7/* 8/*
8 * Atomic operations that C can't guarantee us. Useful for 9 * Atomic operations that C can't guarantee us. Useful for
@@ -51,7 +52,7 @@ static __inline__ void atomic_add(int i, atomic_t *v)
51} 52}
52 53
53/** 54/**
54 * atomic_sub - subtract the atomic variable 55 * atomic_sub - subtract integer from atomic variable
55 * @i: integer value to subtract 56 * @i: integer value to subtract
56 * @v: pointer of type atomic_t 57 * @v: pointer of type atomic_t
57 * 58 *
@@ -170,7 +171,7 @@ static __inline__ int atomic_add_negative(int i, atomic_t *v)
170} 171}
171 172
172/** 173/**
173 * atomic_add_return - add and return 174 * atomic_add_return - add integer and return
174 * @v: pointer of type atomic_t 175 * @v: pointer of type atomic_t
175 * @i: integer value to add 176 * @i: integer value to add
176 * 177 *
@@ -181,7 +182,7 @@ static __inline__ int atomic_add_return(int i, atomic_t *v)
181 int __i; 182 int __i;
182#ifdef CONFIG_M386 183#ifdef CONFIG_M386
183 unsigned long flags; 184 unsigned long flags;
184 if(unlikely(boot_cpu_data.x86==3)) 185 if(unlikely(boot_cpu_data.x86 <= 3))
185 goto no_xadd; 186 goto no_xadd;
186#endif 187#endif
187 /* Modern 486+ processor */ 188 /* Modern 486+ processor */
@@ -202,13 +203,20 @@ no_xadd: /* Legacy 386 processor */
202#endif 203#endif
203} 204}
204 205
206/**
207 * atomic_sub_return - subtract integer and return
208 * @v: pointer of type atomic_t
209 * @i: integer value to subtract
210 *
211 * Atomically subtracts @i from @v and returns @v - @i
212 */
205static __inline__ int atomic_sub_return(int i, atomic_t *v) 213static __inline__ int atomic_sub_return(int i, atomic_t *v)
206{ 214{
207 return atomic_add_return(-i,v); 215 return atomic_add_return(-i,v);
208} 216}
209 217
210#define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new)) 218#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
211#define atomic_xchg(v, new) (xchg(&((v)->counter), new)) 219#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
212 220
213/** 221/**
214 * atomic_add_unless - add unless the number is already a given value 222 * atomic_add_unless - add unless the number is already a given value
@@ -219,20 +227,21 @@ static __inline__ int atomic_sub_return(int i, atomic_t *v)
219 * Atomically adds @a to @v, so long as @v was not already @u. 227 * Atomically adds @a to @v, so long as @v was not already @u.
220 * Returns non-zero if @v was not @u, and zero otherwise. 228 * Returns non-zero if @v was not @u, and zero otherwise.
221 */ 229 */
222#define atomic_add_unless(v, a, u) \ 230static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
223({ \ 231{
224 int c, old; \ 232 int c, old;
225 c = atomic_read(v); \ 233 c = atomic_read(v);
226 for (;;) { \ 234 for (;;) {
227 if (unlikely(c == (u))) \ 235 if (unlikely(c == (u)))
228 break; \ 236 break;
229 old = atomic_cmpxchg((v), c, c + (a)); \ 237 old = atomic_cmpxchg((v), c, c + (a));
230 if (likely(old == c)) \ 238 if (likely(old == c))
231 break; \ 239 break;
232 c = old; \ 240 c = old;
233 } \ 241 }
234 c != (u); \ 242 return c != (u);
235}) 243}
244
236#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 245#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
237 246
238#define atomic_inc_return(v) (atomic_add_return(1,v)) 247#define atomic_inc_return(v) (atomic_add_return(1,v))