aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-x86_64/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-x86_64/atomic.h')
-rw-r--r--include/asm-x86_64/atomic.h70
1 files changed, 58 insertions, 12 deletions
diff --git a/include/asm-x86_64/atomic.h b/include/asm-x86_64/atomic.h
index fc4c5956e1ea..50db9f39274f 100644
--- a/include/asm-x86_64/atomic.h
+++ b/include/asm-x86_64/atomic.h
@@ -160,8 +160,8 @@ static __inline__ int atomic_inc_and_test(atomic_t *v)
160 160
161/** 161/**
162 * atomic_add_negative - add and test if negative 162 * atomic_add_negative - add and test if negative
163 * @v: pointer of type atomic_t
164 * @i: integer value to add 163 * @i: integer value to add
164 * @v: pointer of type atomic_t
165 * 165 *
166 * Atomically adds @i to @v and returns true 166 * Atomically adds @i to @v and returns true
167 * if the result is negative, or false when 167 * if the result is negative, or false when
@@ -178,6 +178,31 @@ static __inline__ int atomic_add_negative(int i, atomic_t *v)
178 return c; 178 return c;
179} 179}
180 180
181/**
182 * atomic_add_return - add and return
183 * @i: integer value to add
184 * @v: pointer of type atomic_t
185 *
186 * Atomically adds @i to @v and returns @i + @v
187 */
188static __inline__ int atomic_add_return(int i, atomic_t *v)
189{
190 int __i = i;
191 __asm__ __volatile__(
192 LOCK "xaddl %0, %1;"
193 :"=r"(i)
194 :"m"(v->counter), "0"(i));
195 return i + __i;
196}
197
198static __inline__ int atomic_sub_return(int i, atomic_t *v)
199{
200 return atomic_add_return(-i,v);
201}
202
203#define atomic_inc_return(v) (atomic_add_return(1,v))
204#define atomic_dec_return(v) (atomic_sub_return(1,v))
205
181/* An 64bit atomic type */ 206/* An 64bit atomic type */
182 207
183typedef struct { volatile long counter; } atomic64_t; 208typedef struct { volatile long counter; } atomic64_t;
@@ -320,14 +345,14 @@ static __inline__ int atomic64_inc_and_test(atomic64_t *v)
320 345
321/** 346/**
322 * atomic64_add_negative - add and test if negative 347 * atomic64_add_negative - add and test if negative
323 * @v: pointer to atomic64_t
324 * @i: integer value to add 348 * @i: integer value to add
349 * @v: pointer to type atomic64_t
325 * 350 *
326 * Atomically adds @i to @v and returns true 351 * Atomically adds @i to @v and returns true
327 * if the result is negative, or false when 352 * if the result is negative, or false when
328 * result is greater than or equal to zero. 353 * result is greater than or equal to zero.
329 */ 354 */
330static __inline__ long atomic64_add_negative(long i, atomic64_t *v) 355static __inline__ int atomic64_add_negative(long i, atomic64_t *v)
331{ 356{
332 unsigned char c; 357 unsigned char c;
333 358
@@ -339,29 +364,50 @@ static __inline__ long atomic64_add_negative(long i, atomic64_t *v)
339} 364}
340 365
341/** 366/**
342 * atomic_add_return - add and return 367 * atomic64_add_return - add and return
343 * @v: pointer of type atomic_t
344 * @i: integer value to add 368 * @i: integer value to add
369 * @v: pointer to type atomic64_t
345 * 370 *
346 * Atomically adds @i to @v and returns @i + @v 371 * Atomically adds @i to @v and returns @i + @v
347 */ 372 */
348static __inline__ int atomic_add_return(int i, atomic_t *v) 373static __inline__ long atomic64_add_return(long i, atomic64_t *v)
349{ 374{
350 int __i = i; 375 long __i = i;
351 __asm__ __volatile__( 376 __asm__ __volatile__(
352 LOCK "xaddl %0, %1;" 377 LOCK "xaddq %0, %1;"
353 :"=r"(i) 378 :"=r"(i)
354 :"m"(v->counter), "0"(i)); 379 :"m"(v->counter), "0"(i));
355 return i + __i; 380 return i + __i;
356} 381}
357 382
358static __inline__ int atomic_sub_return(int i, atomic_t *v) 383static __inline__ long atomic64_sub_return(long i, atomic64_t *v)
359{ 384{
360 return atomic_add_return(-i,v); 385 return atomic64_add_return(-i,v);
361} 386}
362 387
363#define atomic_inc_return(v) (atomic_add_return(1,v)) 388#define atomic64_inc_return(v) (atomic64_add_return(1,v))
364#define atomic_dec_return(v) (atomic_sub_return(1,v)) 389#define atomic64_dec_return(v) (atomic64_sub_return(1,v))
390
391#define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new))
392
393/**
394 * atomic_add_unless - add unless the number is a given value
395 * @v: pointer of type atomic_t
396 * @a: the amount to add to v...
397 * @u: ...unless v is equal to u.
398 *
399 * Atomically adds @a to @v, so long as it was not @u.
400 * Returns non-zero if @v was not @u, and zero otherwise.
401 */
402#define atomic_add_unless(v, a, u) \
403({ \
404 int c, old; \
405 c = atomic_read(v); \
406 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
407 c = old; \
408 c != (u); \
409})
410#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
365 411
366/* These are x86-specific, used by some header files */ 412/* These are x86-specific, used by some header files */
367#define atomic_clear_mask(mask, addr) \ 413#define atomic_clear_mask(mask, addr) \