diff options
author | Mathieu Desnoyers <compudj@krystal.dyndns.org> | 2007-05-08 03:34:38 -0400 |
---|---|---|
committer | Linus Torvalds <torvalds@woody.linux-foundation.org> | 2007-05-08 14:15:20 -0400 |
commit | 2856f5e31c1413bf6e4f1371e07e17078a5fee5e (patch) | |
tree | 587dfe584f0913813d0cf2414a9378618143db15 /include/asm-mips | |
parent | 79d365a306c3af53d8a732fec79b76c0b285d816 (diff) |
atomic.h: atomic_add_unless as inline. Remove system.h atomic.h circular dependency
atomic_add_unless as inline. Remove system.h atomic.h circular dependency.
I agree (with Andi Kleen) this typeof is not needed and more error
prone. All the original atomic.h code that uses cmpxchg (which includes
the atomic_add_unless) uses defines instead of inline functions,
probably to circumvent a circular dependency between system.h and
atomic.h on powerpc (which my patch addresses). Therefore, it makes
sense to use inline functions that will provide type checking.
atomic_add_unless as inline. Remove system.h atomic.h circular dependency.
Digging into the FRV architecture shows me that it is also affected by
such a circular dependency. Here is the diff applying this against the
rest of my atomic.h patches.
It applies over the atomic.h standardization patches.
Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@polymtl.ca>
Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
Diffstat (limited to 'include/asm-mips')
-rw-r--r-- | include/asm-mips/atomic.h | 46 |
1 files changed, 30 insertions, 16 deletions
diff --git a/include/asm-mips/atomic.h b/include/asm-mips/atomic.h index 6423ffa195a4..62daa746a9c9 100644 --- a/include/asm-mips/atomic.h +++ b/include/asm-mips/atomic.h | |||
@@ -18,6 +18,7 @@ | |||
18 | #include <asm/barrier.h> | 18 | #include <asm/barrier.h> |
19 | #include <asm/cpu-features.h> | 19 | #include <asm/cpu-features.h> |
20 | #include <asm/war.h> | 20 | #include <asm/war.h> |
21 | #include <asm/system.h> | ||
21 | 22 | ||
22 | typedef struct { volatile int counter; } atomic_t; | 23 | typedef struct { volatile int counter; } atomic_t; |
23 | 24 | ||
@@ -318,14 +319,20 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) | |||
318 | * Atomically adds @a to @v, so long as it was not @u. | 319 | * Atomically adds @a to @v, so long as it was not @u. |
319 | * Returns non-zero if @v was not @u, and zero otherwise. | 320 | * Returns non-zero if @v was not @u, and zero otherwise. |
320 | */ | 321 | */ |
321 | #define atomic_add_unless(v, a, u) \ | 322 | static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) |
322 | ({ \ | 323 | { |
323 | __typeof__((v)->counter) c, old; \ | 324 | int c, old; |
324 | c = atomic_read(v); \ | 325 | c = atomic_read(v); |
325 | while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \ | 326 | for (;;) { |
326 | c = old; \ | 327 | if (unlikely(c == (u))) |
327 | c != (u); \ | 328 | break; |
328 | }) | 329 | old = atomic_cmpxchg((v), c, c + (a)); |
330 | if (likely(old == c)) | ||
331 | break; | ||
332 | c = old; | ||
333 | } | ||
334 | return c != (u); | ||
335 | } | ||
329 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) | 336 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) |
330 | 337 | ||
331 | #define atomic_dec_return(v) atomic_sub_return(1,(v)) | 338 | #define atomic_dec_return(v) atomic_sub_return(1,(v)) |
@@ -694,14 +701,21 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) | |||
694 | * Atomically adds @a to @v, so long as it was not @u. | 701 | * Atomically adds @a to @v, so long as it was not @u. |
695 | * Returns non-zero if @v was not @u, and zero otherwise. | 702 | * Returns non-zero if @v was not @u, and zero otherwise. |
696 | */ | 703 | */ |
697 | #define atomic64_add_unless(v, a, u) \ | 704 | static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) |
698 | ({ \ | 705 | { |
699 | __typeof__((v)->counter) c, old; \ | 706 | long c, old; |
700 | c = atomic_read(v); \ | 707 | c = atomic64_read(v); |
701 | while (c != (u) && (old = atomic64_cmpxchg((v), c, c + (a))) != c) \ | 708 | for (;;) { |
702 | c = old; \ | 709 | if (unlikely(c == (u))) |
703 | c != (u); \ | 710 | break; |
704 | }) | 711 | old = atomic64_cmpxchg((v), c, c + (a)); |
712 | if (likely(old == c)) | ||
713 | break; | ||
714 | c = old; | ||
715 | } | ||
716 | return c != (u); | ||
717 | } | ||
718 | |||
705 | #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) | 719 | #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) |
706 | 720 | ||
707 | #define atomic64_dec_return(v) atomic64_sub_return(1,(v)) | 721 | #define atomic64_dec_return(v) atomic64_sub_return(1,(v)) |