aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-ia64
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-ia64')
-rw-r--r--include/asm-ia64/atomic.h59
1 files changed, 31 insertions, 28 deletions
diff --git a/include/asm-ia64/atomic.h b/include/asm-ia64/atomic.h
index b16ad235c7ee..1fc3b83325da 100644
--- a/include/asm-ia64/atomic.h
+++ b/include/asm-ia64/atomic.h
@@ -15,6 +15,7 @@
15#include <linux/types.h> 15#include <linux/types.h>
16 16
17#include <asm/intrinsics.h> 17#include <asm/intrinsics.h>
18#include <asm/system.h>
18 19
19/* 20/*
20 * On IA-64, counter must always be volatile to ensure that that the 21 * On IA-64, counter must always be volatile to ensure that that the
@@ -95,36 +96,38 @@ ia64_atomic64_sub (__s64 i, atomic64_t *v)
95 (cmpxchg(&((v)->counter), old, new)) 96 (cmpxchg(&((v)->counter), old, new))
96#define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) 97#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
97 98
98#define atomic_add_unless(v, a, u) \ 99static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
99({ \ 100{
100 __typeof__(v->counter) c, old; \ 101 int c, old;
101 c = atomic_read(v); \ 102 c = atomic_read(v);
102 for (;;) { \ 103 for (;;) {
103 if (unlikely(c == (u))) \ 104 if (unlikely(c == (u)))
104 break; \ 105 break;
105 old = atomic_cmpxchg((v), c, c + (a)); \ 106 old = atomic_cmpxchg((v), c, c + (a));
106 if (likely(old == c)) \ 107 if (likely(old == c))
107 break; \ 108 break;
108 c = old; \ 109 c = old;
109 } \ 110 }
110 c != (u); \ 111 return c != (u);
111}) 112}
113
112#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 114#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
113 115
114#define atomic64_add_unless(v, a, u) \ 116static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
115({ \ 117{
116 __typeof__(v->counter) c, old; \ 118 long c, old;
117 c = atomic64_read(v); \ 119 c = atomic64_read(v);
118 for (;;) { \ 120 for (;;) {
119 if (unlikely(c == (u))) \ 121 if (unlikely(c == (u)))
120 break; \ 122 break;
121 old = atomic64_cmpxchg((v), c, c + (a)); \ 123 old = atomic64_cmpxchg((v), c, c + (a));
122 if (likely(old == c)) \ 124 if (likely(old == c))
123 break; \ 125 break;
124 c = old; \ 126 c = old;
125 } \ 127 }
126 c != (u); \ 128 return c != (u);
127}) 129}
130
128#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) 131#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
129 132
130#define atomic_add_return(i,v) \ 133#define atomic_add_return(i,v) \