diff options
Diffstat (limited to 'include/asm-sparc64/atomic.h')
-rw-r--r-- | include/asm-sparc64/atomic.h | 59 |
1 files changed, 31 insertions, 28 deletions
diff --git a/include/asm-sparc64/atomic.h b/include/asm-sparc64/atomic.h index c3feb3af2cfe..3fb4e1f7f186 100644 --- a/include/asm-sparc64/atomic.h +++ b/include/asm-sparc64/atomic.h | |||
@@ -9,6 +9,7 @@ | |||
9 | #define __ARCH_SPARC64_ATOMIC__ | 9 | #define __ARCH_SPARC64_ATOMIC__ |
10 | 10 | ||
11 | #include <linux/types.h> | 11 | #include <linux/types.h> |
12 | #include <asm/system.h> | ||
12 | 13 | ||
13 | typedef struct { volatile int counter; } atomic_t; | 14 | typedef struct { volatile int counter; } atomic_t; |
14 | typedef struct { volatile __s64 counter; } atomic64_t; | 15 | typedef struct { volatile __s64 counter; } atomic64_t; |
@@ -73,40 +74,42 @@ extern int atomic64_sub_ret(int, atomic64_t *); | |||
73 | #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) | 74 | #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) |
74 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) | 75 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) |
75 | 76 | ||
76 | #define atomic_add_unless(v, a, u) \ | 77 | static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) |
77 | ({ \ | 78 | { |
78 | __typeof__((v)->counter) c, old; \ | 79 | int c, old; |
79 | c = atomic_read(v); \ | 80 | c = atomic_read(v); |
80 | for (;;) { \ | 81 | for (;;) { |
81 | if (unlikely(c == (u))) \ | 82 | if (unlikely(c == (u))) |
82 | break; \ | 83 | break; |
83 | old = atomic_cmpxchg((v), c, c + (a)); \ | 84 | old = atomic_cmpxchg((v), c, c + (a)); |
84 | if (likely(old == c)) \ | 85 | if (likely(old == c)) |
85 | break; \ | 86 | break; |
86 | c = old; \ | 87 | c = old; |
87 | } \ | 88 | } |
88 | likely(c != (u)); \ | 89 | return c != (u); |
89 | }) | 90 | } |
91 | |||
90 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) | 92 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) |
91 | 93 | ||
92 | #define atomic64_cmpxchg(v, o, n) \ | 94 | #define atomic64_cmpxchg(v, o, n) \ |
93 | ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n))) | 95 | ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n))) |
94 | #define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) | 96 | #define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) |
95 | 97 | ||
96 | #define atomic64_add_unless(v, a, u) \ | 98 | static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) |
97 | ({ \ | 99 | { |
98 | __typeof__((v)->counter) c, old; \ | 100 | long c, old; |
99 | c = atomic64_read(v); \ | 101 | c = atomic64_read(v); |
100 | for (;;) { \ | 102 | for (;;) { |
101 | if (unlikely(c == (u))) \ | 103 | if (unlikely(c == (u))) |
102 | break; \ | 104 | break; |
103 | old = atomic64_cmpxchg((v), c, c + (a)); \ | 105 | old = atomic64_cmpxchg((v), c, c + (a)); |
104 | if (likely(old == c)) \ | 106 | if (likely(old == c)) |
105 | break; \ | 107 | break; |
106 | c = old; \ | 108 | c = old; |
107 | } \ | 109 | } |
108 | likely(c != (u)); \ | 110 | return c != (u); |
109 | }) | 111 | } |
112 | |||
110 | #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) | 113 | #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) |
111 | 114 | ||
112 | /* Atomic operations are already serializing */ | 115 | /* Atomic operations are already serializing */ |