diff options
author | Peter Zijlstra <peterz@infradead.org> | 2016-04-17 18:54:38 -0400 |
---|---|---|
committer | Ingo Molnar <mingo@kernel.org> | 2016-06-16 04:48:32 -0400 |
commit | 28aa2bda2211f4327d83b44a4f917b4a061b1c56 (patch) | |
tree | e47ab8a7c49f19bd42722dcc2b8cf083e08cf270 /include/asm-generic | |
parent | e12133324b7daaa176bb687c1eb59e1a6b203da4 (diff) |
locking/atomic: Implement atomic{,64,_long}_fetch_{add,sub,and,andnot,or,xor}{,_relaxed,_acquire,_release}()
Now that all the architectures have implemented support for these new
atomic primitives add on the generic infrastructure to expose and use
it.
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Cc: Andrew Morton <akpm@linux-foundation.org>
Cc: Arnd Bergmann <arnd@arndb.de>
Cc: Boqun Feng <boqun.feng@gmail.com>
Cc: Borislav Petkov <bp@suse.de>
Cc: Davidlohr Bueso <dave@stgolabs.net>
Cc: Frederic Weisbecker <fweisbec@gmail.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Paul E. McKenney <paulmck@linux.vnet.ibm.com>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: Will Deacon <will.deacon@arm.com>
Cc: linux-arch@vger.kernel.org
Cc: linux-kernel@vger.kernel.org
Signed-off-by: Ingo Molnar <mingo@kernel.org>
Diffstat (limited to 'include/asm-generic')
-rw-r--r-- | include/asm-generic/atomic-long.h | 36 | ||||
-rw-r--r-- | include/asm-generic/atomic.h | 49 | ||||
-rw-r--r-- | include/asm-generic/atomic64.h | 15 |
3 files changed, 95 insertions, 5 deletions
diff --git a/include/asm-generic/atomic-long.h b/include/asm-generic/atomic-long.h index 5e1f345b58dd..2d0d3cf791ab 100644 --- a/include/asm-generic/atomic-long.h +++ b/include/asm-generic/atomic-long.h | |||
@@ -112,6 +112,40 @@ static __always_inline void atomic_long_dec(atomic_long_t *l) | |||
112 | ATOMIC_LONG_PFX(_dec)(v); | 112 | ATOMIC_LONG_PFX(_dec)(v); |
113 | } | 113 | } |
114 | 114 | ||
115 | #define ATOMIC_LONG_FETCH_OP(op, mo) \ | ||
116 | static inline long \ | ||
117 | atomic_long_fetch_##op##mo(long i, atomic_long_t *l) \ | ||
118 | { \ | ||
119 | ATOMIC_LONG_PFX(_t) *v = (ATOMIC_LONG_PFX(_t) *)l; \ | ||
120 | \ | ||
121 | return (long)ATOMIC_LONG_PFX(_fetch_##op##mo)(i, v); \ | ||
122 | } | ||
123 | |||
124 | ATOMIC_LONG_FETCH_OP(add, ) | ||
125 | ATOMIC_LONG_FETCH_OP(add, _relaxed) | ||
126 | ATOMIC_LONG_FETCH_OP(add, _acquire) | ||
127 | ATOMIC_LONG_FETCH_OP(add, _release) | ||
128 | ATOMIC_LONG_FETCH_OP(sub, ) | ||
129 | ATOMIC_LONG_FETCH_OP(sub, _relaxed) | ||
130 | ATOMIC_LONG_FETCH_OP(sub, _acquire) | ||
131 | ATOMIC_LONG_FETCH_OP(sub, _release) | ||
132 | ATOMIC_LONG_FETCH_OP(and, ) | ||
133 | ATOMIC_LONG_FETCH_OP(and, _relaxed) | ||
134 | ATOMIC_LONG_FETCH_OP(and, _acquire) | ||
135 | ATOMIC_LONG_FETCH_OP(and, _release) | ||
136 | ATOMIC_LONG_FETCH_OP(andnot, ) | ||
137 | ATOMIC_LONG_FETCH_OP(andnot, _relaxed) | ||
138 | ATOMIC_LONG_FETCH_OP(andnot, _acquire) | ||
139 | ATOMIC_LONG_FETCH_OP(andnot, _release) | ||
140 | ATOMIC_LONG_FETCH_OP(or, ) | ||
141 | ATOMIC_LONG_FETCH_OP(or, _relaxed) | ||
142 | ATOMIC_LONG_FETCH_OP(or, _acquire) | ||
143 | ATOMIC_LONG_FETCH_OP(or, _release) | ||
144 | ATOMIC_LONG_FETCH_OP(xor, ) | ||
145 | ATOMIC_LONG_FETCH_OP(xor, _relaxed) | ||
146 | ATOMIC_LONG_FETCH_OP(xor, _acquire) | ||
147 | ATOMIC_LONG_FETCH_OP(xor, _release) | ||
148 | |||
115 | #define ATOMIC_LONG_OP(op) \ | 149 | #define ATOMIC_LONG_OP(op) \ |
116 | static __always_inline void \ | 150 | static __always_inline void \ |
117 | atomic_long_##op(long i, atomic_long_t *l) \ | 151 | atomic_long_##op(long i, atomic_long_t *l) \ |
@@ -124,9 +158,9 @@ atomic_long_##op(long i, atomic_long_t *l) \ | |||
124 | ATOMIC_LONG_OP(add) | 158 | ATOMIC_LONG_OP(add) |
125 | ATOMIC_LONG_OP(sub) | 159 | ATOMIC_LONG_OP(sub) |
126 | ATOMIC_LONG_OP(and) | 160 | ATOMIC_LONG_OP(and) |
161 | ATOMIC_LONG_OP(andnot) | ||
127 | ATOMIC_LONG_OP(or) | 162 | ATOMIC_LONG_OP(or) |
128 | ATOMIC_LONG_OP(xor) | 163 | ATOMIC_LONG_OP(xor) |
129 | ATOMIC_LONG_OP(andnot) | ||
130 | 164 | ||
131 | #undef ATOMIC_LONG_OP | 165 | #undef ATOMIC_LONG_OP |
132 | 166 | ||
diff --git a/include/asm-generic/atomic.h b/include/asm-generic/atomic.h index 74f1a3704d7a..a2304ccf4ed0 100644 --- a/include/asm-generic/atomic.h +++ b/include/asm-generic/atomic.h | |||
@@ -61,6 +61,18 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \ | |||
61 | return c c_op i; \ | 61 | return c c_op i; \ |
62 | } | 62 | } |
63 | 63 | ||
64 | #define ATOMIC_FETCH_OP(op, c_op) \ | ||
65 | static inline int atomic_fetch_##op(int i, atomic_t *v) \ | ||
66 | { \ | ||
67 | int c, old; \ | ||
68 | \ | ||
69 | c = v->counter; \ | ||
70 | while ((old = cmpxchg(&v->counter, c, c c_op i)) != c) \ | ||
71 | c = old; \ | ||
72 | \ | ||
73 | return c; \ | ||
74 | } | ||
75 | |||
64 | #else | 76 | #else |
65 | 77 | ||
66 | #include <linux/irqflags.h> | 78 | #include <linux/irqflags.h> |
@@ -88,6 +100,20 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \ | |||
88 | return ret; \ | 100 | return ret; \ |
89 | } | 101 | } |
90 | 102 | ||
103 | #define ATOMIC_FETCH_OP(op, c_op) \ | ||
104 | static inline int atomic_fetch_##op(int i, atomic_t *v) \ | ||
105 | { \ | ||
106 | unsigned long flags; \ | ||
107 | int ret; \ | ||
108 | \ | ||
109 | raw_local_irq_save(flags); \ | ||
110 | ret = v->counter; \ | ||
111 | v->counter = v->counter c_op i; \ | ||
112 | raw_local_irq_restore(flags); \ | ||
113 | \ | ||
114 | return ret; \ | ||
115 | } | ||
116 | |||
91 | #endif /* CONFIG_SMP */ | 117 | #endif /* CONFIG_SMP */ |
92 | 118 | ||
93 | #ifndef atomic_add_return | 119 | #ifndef atomic_add_return |
@@ -98,6 +124,28 @@ ATOMIC_OP_RETURN(add, +) | |||
98 | ATOMIC_OP_RETURN(sub, -) | 124 | ATOMIC_OP_RETURN(sub, -) |
99 | #endif | 125 | #endif |
100 | 126 | ||
127 | #ifndef atomic_fetch_add | ||
128 | ATOMIC_FETCH_OP(add, +) | ||
129 | #endif | ||
130 | |||
131 | #ifndef atomic_fetch_sub | ||
132 | ATOMIC_FETCH_OP(sub, -) | ||
133 | #endif | ||
134 | |||
135 | #ifndef atomic_fetch_and | ||
136 | ATOMIC_FETCH_OP(and, &) | ||
137 | #endif | ||
138 | |||
139 | #ifndef atomic_fetch_or | ||
140 | #define atomic_fetch_or atomic_fetch_or | ||
141 | |||
142 | ATOMIC_FETCH_OP(or, |) | ||
143 | #endif | ||
144 | |||
145 | #ifndef atomic_fetch_xor | ||
146 | ATOMIC_FETCH_OP(xor, ^) | ||
147 | #endif | ||
148 | |||
101 | #ifndef atomic_and | 149 | #ifndef atomic_and |
102 | ATOMIC_OP(and, &) | 150 | ATOMIC_OP(and, &) |
103 | #endif | 151 | #endif |
@@ -110,6 +158,7 @@ ATOMIC_OP(or, |) | |||
110 | ATOMIC_OP(xor, ^) | 158 | ATOMIC_OP(xor, ^) |
111 | #endif | 159 | #endif |
112 | 160 | ||
161 | #undef ATOMIC_FETCH_OP | ||
113 | #undef ATOMIC_OP_RETURN | 162 | #undef ATOMIC_OP_RETURN |
114 | #undef ATOMIC_OP | 163 | #undef ATOMIC_OP |
115 | 164 | ||
diff --git a/include/asm-generic/atomic64.h b/include/asm-generic/atomic64.h index d48e78ccad3d..dad68bf46c77 100644 --- a/include/asm-generic/atomic64.h +++ b/include/asm-generic/atomic64.h | |||
@@ -27,16 +27,23 @@ extern void atomic64_##op(long long a, atomic64_t *v); | |||
27 | #define ATOMIC64_OP_RETURN(op) \ | 27 | #define ATOMIC64_OP_RETURN(op) \ |
28 | extern long long atomic64_##op##_return(long long a, atomic64_t *v); | 28 | extern long long atomic64_##op##_return(long long a, atomic64_t *v); |
29 | 29 | ||
30 | #define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_OP_RETURN(op) | 30 | #define ATOMIC64_FETCH_OP(op) \ |
31 | extern long long atomic64_fetch_##op(long long a, atomic64_t *v); | ||
32 | |||
33 | #define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_OP_RETURN(op) ATOMIC64_FETCH_OP(op) | ||
31 | 34 | ||
32 | ATOMIC64_OPS(add) | 35 | ATOMIC64_OPS(add) |
33 | ATOMIC64_OPS(sub) | 36 | ATOMIC64_OPS(sub) |
34 | 37 | ||
35 | ATOMIC64_OP(and) | 38 | #undef ATOMIC64_OPS |
36 | ATOMIC64_OP(or) | 39 | #define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_FETCH_OP(op) |
37 | ATOMIC64_OP(xor) | 40 | |
41 | ATOMIC64_OPS(and) | ||
42 | ATOMIC64_OPS(or) | ||
43 | ATOMIC64_OPS(xor) | ||
38 | 44 | ||
39 | #undef ATOMIC64_OPS | 45 | #undef ATOMIC64_OPS |
46 | #undef ATOMIC64_FETCH_OP | ||
40 | #undef ATOMIC64_OP_RETURN | 47 | #undef ATOMIC64_OP_RETURN |
41 | #undef ATOMIC64_OP | 48 | #undef ATOMIC64_OP |
42 | 49 | ||