aboutsummaryrefslogtreecommitdiffstats
path: root/arch/parisc/include
diff options
context:
space:
mode:
authorPeter Zijlstra <peterz@infradead.org>2016-04-17 19:16:05 -0400
committerIngo Molnar <mingo@kernel.org>2016-06-16 04:48:28 -0400
commite5857a6ed6004cac5273b8cdc189ab4b6363cfaf (patch)
tree78b096aecfe6a57233a32f4fd3e0f73d2469f032 /arch/parisc/include
parentf8d638e28d7cc858066d2de484d9719dc181593a (diff)
locking/atomic, arch/parisc: Implement atomic{,64}_fetch_{add,sub,and,or,xor}()
Implement FETCH-OP atomic primitives, these are very similar to the existing OP-RETURN primitives we already have, except they return the value of the atomic variable _before_ modification. This is especially useful for irreversible operations -- such as bitops (because it becomes impossible to reconstruct the state prior to modification). Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Cc: Andrew Morton <akpm@linux-foundation.org> Cc: Helge Deller <deller@gmx.de> Cc: James E.J. Bottomley <jejb@parisc-linux.org> Cc: Linus Torvalds <torvalds@linux-foundation.org> Cc: Paul E. McKenney <paulmck@linux.vnet.ibm.com> Cc: Peter Zijlstra <peterz@infradead.org> Cc: Thomas Gleixner <tglx@linutronix.de> Cc: linux-arch@vger.kernel.org Cc: linux-kernel@vger.kernel.org Cc: linux-parisc@vger.kernel.org Signed-off-by: Ingo Molnar <mingo@kernel.org>
Diffstat (limited to 'arch/parisc/include')
-rw-r--r--arch/parisc/include/asm/atomic.h65
1 files changed, 57 insertions, 8 deletions
diff --git a/arch/parisc/include/asm/atomic.h b/arch/parisc/include/asm/atomic.h
index 1d109990a022..29df1f871910 100644
--- a/arch/parisc/include/asm/atomic.h
+++ b/arch/parisc/include/asm/atomic.h
@@ -121,16 +121,41 @@ static __inline__ int atomic_##op##_return(int i, atomic_t *v) \
121 return ret; \ 121 return ret; \
122} 122}
123 123
124#define ATOMIC_OPS(op, c_op) ATOMIC_OP(op, c_op) ATOMIC_OP_RETURN(op, c_op) 124#define ATOMIC_FETCH_OP(op, c_op) \
125static __inline__ int atomic_fetch_##op(int i, atomic_t *v) \
126{ \
127 unsigned long flags; \
128 int ret; \
129 \
130 _atomic_spin_lock_irqsave(v, flags); \
131 ret = v->counter; \
132 v->counter c_op i; \
133 _atomic_spin_unlock_irqrestore(v, flags); \
134 \
135 return ret; \
136}
137
138#define ATOMIC_OPS(op, c_op) \
139 ATOMIC_OP(op, c_op) \
140 ATOMIC_OP_RETURN(op, c_op) \
141 ATOMIC_FETCH_OP(op, c_op)
125 142
126ATOMIC_OPS(add, +=) 143ATOMIC_OPS(add, +=)
127ATOMIC_OPS(sub, -=) 144ATOMIC_OPS(sub, -=)
128 145
129ATOMIC_OP(and, &=) 146#undef ATOMIC_OPS
130ATOMIC_OP(or, |=) 147#define ATOMIC_OPS(op, c_op) \
131ATOMIC_OP(xor, ^=) 148 ATOMIC_OP(op, c_op) \
149 ATOMIC_FETCH_OP(op, c_op)
150
151#define atomic_fetch_or atomic_fetch_or
152
153ATOMIC_OPS(and, &=)
154ATOMIC_OPS(or, |=)
155ATOMIC_OPS(xor, ^=)
132 156
133#undef ATOMIC_OPS 157#undef ATOMIC_OPS
158#undef ATOMIC_FETCH_OP
134#undef ATOMIC_OP_RETURN 159#undef ATOMIC_OP_RETURN
135#undef ATOMIC_OP 160#undef ATOMIC_OP
136 161
@@ -185,15 +210,39 @@ static __inline__ s64 atomic64_##op##_return(s64 i, atomic64_t *v) \
185 return ret; \ 210 return ret; \
186} 211}
187 212
188#define ATOMIC64_OPS(op, c_op) ATOMIC64_OP(op, c_op) ATOMIC64_OP_RETURN(op, c_op) 213#define ATOMIC64_FETCH_OP(op, c_op) \
214static __inline__ s64 atomic64_fetch_##op(s64 i, atomic64_t *v) \
215{ \
216 unsigned long flags; \
217 s64 ret; \
218 \
219 _atomic_spin_lock_irqsave(v, flags); \
220 ret = v->counter; \
221 v->counter c_op i; \
222 _atomic_spin_unlock_irqrestore(v, flags); \
223 \
224 return ret; \
225}
226
227#define ATOMIC64_OPS(op, c_op) \
228 ATOMIC64_OP(op, c_op) \
229 ATOMIC64_OP_RETURN(op, c_op) \
230 ATOMIC64_FETCH_OP(op, c_op)
189 231
190ATOMIC64_OPS(add, +=) 232ATOMIC64_OPS(add, +=)
191ATOMIC64_OPS(sub, -=) 233ATOMIC64_OPS(sub, -=)
192ATOMIC64_OP(and, &=)
193ATOMIC64_OP(or, |=)
194ATOMIC64_OP(xor, ^=)
195 234
196#undef ATOMIC64_OPS 235#undef ATOMIC64_OPS
236#define ATOMIC64_OPS(op, c_op) \
237 ATOMIC64_OP(op, c_op) \
238 ATOMIC64_FETCH_OP(op, c_op)
239
240ATOMIC64_OPS(and, &=)
241ATOMIC64_OPS(or, |=)
242ATOMIC64_OPS(xor, ^=)
243
244#undef ATOMIC64_OPS
245#undef ATOMIC64_FETCH_OP
197#undef ATOMIC64_OP_RETURN 246#undef ATOMIC64_OP_RETURN
198#undef ATOMIC64_OP 247#undef ATOMIC64_OP
199 248