diff options
Diffstat (limited to 'include/asm-x86_64/atomic.h')
-rw-r--r-- | include/asm-x86_64/atomic.h | 73 |
1 files changed, 61 insertions, 12 deletions
diff --git a/include/asm-x86_64/atomic.h b/include/asm-x86_64/atomic.h index fc4c5956e1ea..4b5cd553e772 100644 --- a/include/asm-x86_64/atomic.h +++ b/include/asm-x86_64/atomic.h | |||
@@ -2,6 +2,7 @@ | |||
2 | #define __ARCH_X86_64_ATOMIC__ | 2 | #define __ARCH_X86_64_ATOMIC__ |
3 | 3 | ||
4 | #include <linux/config.h> | 4 | #include <linux/config.h> |
5 | #include <asm/types.h> | ||
5 | 6 | ||
6 | /* atomic_t should be 32 bit signed type */ | 7 | /* atomic_t should be 32 bit signed type */ |
7 | 8 | ||
@@ -160,8 +161,8 @@ static __inline__ int atomic_inc_and_test(atomic_t *v) | |||
160 | 161 | ||
161 | /** | 162 | /** |
162 | * atomic_add_negative - add and test if negative | 163 | * atomic_add_negative - add and test if negative |
163 | * @v: pointer of type atomic_t | ||
164 | * @i: integer value to add | 164 | * @i: integer value to add |
165 | * @v: pointer of type atomic_t | ||
165 | * | 166 | * |
166 | * Atomically adds @i to @v and returns true | 167 | * Atomically adds @i to @v and returns true |
167 | * if the result is negative, or false when | 168 | * if the result is negative, or false when |
@@ -178,6 +179,31 @@ static __inline__ int atomic_add_negative(int i, atomic_t *v) | |||
178 | return c; | 179 | return c; |
179 | } | 180 | } |
180 | 181 | ||
182 | /** | ||
183 | * atomic_add_return - add and return | ||
184 | * @i: integer value to add | ||
185 | * @v: pointer of type atomic_t | ||
186 | * | ||
187 | * Atomically adds @i to @v and returns @i + @v | ||
188 | */ | ||
189 | static __inline__ int atomic_add_return(int i, atomic_t *v) | ||
190 | { | ||
191 | int __i = i; | ||
192 | __asm__ __volatile__( | ||
193 | LOCK "xaddl %0, %1;" | ||
194 | :"=r"(i) | ||
195 | :"m"(v->counter), "0"(i)); | ||
196 | return i + __i; | ||
197 | } | ||
198 | |||
199 | static __inline__ int atomic_sub_return(int i, atomic_t *v) | ||
200 | { | ||
201 | return atomic_add_return(-i,v); | ||
202 | } | ||
203 | |||
204 | #define atomic_inc_return(v) (atomic_add_return(1,v)) | ||
205 | #define atomic_dec_return(v) (atomic_sub_return(1,v)) | ||
206 | |||
181 | /* An 64bit atomic type */ | 207 | /* An 64bit atomic type */ |
182 | 208 | ||
183 | typedef struct { volatile long counter; } atomic64_t; | 209 | typedef struct { volatile long counter; } atomic64_t; |
@@ -320,14 +346,14 @@ static __inline__ int atomic64_inc_and_test(atomic64_t *v) | |||
320 | 346 | ||
321 | /** | 347 | /** |
322 | * atomic64_add_negative - add and test if negative | 348 | * atomic64_add_negative - add and test if negative |
323 | * @v: pointer to atomic64_t | ||
324 | * @i: integer value to add | 349 | * @i: integer value to add |
350 | * @v: pointer to type atomic64_t | ||
325 | * | 351 | * |
326 | * Atomically adds @i to @v and returns true | 352 | * Atomically adds @i to @v and returns true |
327 | * if the result is negative, or false when | 353 | * if the result is negative, or false when |
328 | * result is greater than or equal to zero. | 354 | * result is greater than or equal to zero. |
329 | */ | 355 | */ |
330 | static __inline__ long atomic64_add_negative(long i, atomic64_t *v) | 356 | static __inline__ int atomic64_add_negative(long i, atomic64_t *v) |
331 | { | 357 | { |
332 | unsigned char c; | 358 | unsigned char c; |
333 | 359 | ||
@@ -339,29 +365,51 @@ static __inline__ long atomic64_add_negative(long i, atomic64_t *v) | |||
339 | } | 365 | } |
340 | 366 | ||
341 | /** | 367 | /** |
342 | * atomic_add_return - add and return | 368 | * atomic64_add_return - add and return |
343 | * @v: pointer of type atomic_t | ||
344 | * @i: integer value to add | 369 | * @i: integer value to add |
370 | * @v: pointer to type atomic64_t | ||
345 | * | 371 | * |
346 | * Atomically adds @i to @v and returns @i + @v | 372 | * Atomically adds @i to @v and returns @i + @v |
347 | */ | 373 | */ |
348 | static __inline__ int atomic_add_return(int i, atomic_t *v) | 374 | static __inline__ long atomic64_add_return(long i, atomic64_t *v) |
349 | { | 375 | { |
350 | int __i = i; | 376 | long __i = i; |
351 | __asm__ __volatile__( | 377 | __asm__ __volatile__( |
352 | LOCK "xaddl %0, %1;" | 378 | LOCK "xaddq %0, %1;" |
353 | :"=r"(i) | 379 | :"=r"(i) |
354 | :"m"(v->counter), "0"(i)); | 380 | :"m"(v->counter), "0"(i)); |
355 | return i + __i; | 381 | return i + __i; |
356 | } | 382 | } |
357 | 383 | ||
358 | static __inline__ int atomic_sub_return(int i, atomic_t *v) | 384 | static __inline__ long atomic64_sub_return(long i, atomic64_t *v) |
359 | { | 385 | { |
360 | return atomic_add_return(-i,v); | 386 | return atomic64_add_return(-i,v); |
361 | } | 387 | } |
362 | 388 | ||
363 | #define atomic_inc_return(v) (atomic_add_return(1,v)) | 389 | #define atomic64_inc_return(v) (atomic64_add_return(1,v)) |
364 | #define atomic_dec_return(v) (atomic_sub_return(1,v)) | 390 | #define atomic64_dec_return(v) (atomic64_sub_return(1,v)) |
391 | |||
392 | #define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new)) | ||
393 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) | ||
394 | |||
395 | /** | ||
396 | * atomic_add_unless - add unless the number is a given value | ||
397 | * @v: pointer of type atomic_t | ||
398 | * @a: the amount to add to v... | ||
399 | * @u: ...unless v is equal to u. | ||
400 | * | ||
401 | * Atomically adds @a to @v, so long as it was not @u. | ||
402 | * Returns non-zero if @v was not @u, and zero otherwise. | ||
403 | */ | ||
404 | #define atomic_add_unless(v, a, u) \ | ||
405 | ({ \ | ||
406 | int c, old; \ | ||
407 | c = atomic_read(v); \ | ||
408 | while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \ | ||
409 | c = old; \ | ||
410 | c != (u); \ | ||
411 | }) | ||
412 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) | ||
365 | 413 | ||
366 | /* These are x86-specific, used by some header files */ | 414 | /* These are x86-specific, used by some header files */ |
367 | #define atomic_clear_mask(mask, addr) \ | 415 | #define atomic_clear_mask(mask, addr) \ |
@@ -378,4 +426,5 @@ __asm__ __volatile__(LOCK "orl %0,%1" \ | |||
378 | #define smp_mb__before_atomic_inc() barrier() | 426 | #define smp_mb__before_atomic_inc() barrier() |
379 | #define smp_mb__after_atomic_inc() barrier() | 427 | #define smp_mb__after_atomic_inc() barrier() |
380 | 428 | ||
429 | #include <asm-generic/atomic.h> | ||
381 | #endif | 430 | #endif |