aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMark Rutland <mark.rutland@arm.com>2018-06-21 08:13:21 -0400
committerIngo Molnar <mingo@kernel.org>2018-06-21 08:25:24 -0400
commit7cc7eaad49c30ac165ecf84d95b26f7e0d53bd97 (patch)
tree0b1476f680f75f9e8e704e2d86195db23a082887
parentb3a2a05f9111de0b79312e577608a27b0318c0a1 (diff)
atomics/treewide: Clean up '*_andnot()' ifdeffery
The ifdeffery for atomic*_{fetch_,}andnot() is unlike that for all the other atomics. If atomic*_andnot() is not defined, the corresponding atomic*_fetch_andnot() is assumed to not be defined. Additionally, the fallbacks for the various ordering cases are written much later in atomic.h as static inlines. This isn't problematic today, but gets in the way of scripting the generation of atomics. To prepare for scripting, this patch: * Switches to separate ifdefs for atomic*_andnot() and atomic*_fetch_andnot(), updating implementations as appropriate. * Moves the fallbacks into the standards ifdefs, as macro expansions rather than static inlines. * Removes trivial andnot implementations from architectures, where these are superseded by core code. There should be no functional change as a result of this patch. Signed-off-by: Mark Rutland <mark.rutland@arm.com> Reviewed-by: Will Deacon <will.deacon@arm.com> Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org> Cc: Boqun Feng <boqun.feng@gmail.com> Cc: Linus Torvalds <torvalds@linux-foundation.org> Cc: Thomas Gleixner <tglx@linutronix.de> Link: https://lore.kernel.org/lkml/20180621121321.4761-19-mark.rutland@arm.com Signed-off-by: Ingo Molnar <mingo@kernel.org>
-rw-r--r--arch/arc/include/asm/atomic.h8
-rw-r--r--arch/arm/include/asm/atomic.h2
-rw-r--r--include/linux/atomic.h96
3 files changed, 36 insertions, 70 deletions
diff --git a/arch/arc/include/asm/atomic.h b/arch/arc/include/asm/atomic.h
index 8f64f3b79b8a..4e0072730241 100644
--- a/arch/arc/include/asm/atomic.h
+++ b/arch/arc/include/asm/atomic.h
@@ -187,7 +187,8 @@ static inline int atomic_fetch_##op(int i, atomic_t *v) \
187ATOMIC_OPS(add, +=, add) 187ATOMIC_OPS(add, +=, add)
188ATOMIC_OPS(sub, -=, sub) 188ATOMIC_OPS(sub, -=, sub)
189 189
190#define atomic_andnot atomic_andnot 190#define atomic_andnot atomic_andnot
191#define atomic_fetch_andnot atomic_fetch_andnot
191 192
192#undef ATOMIC_OPS 193#undef ATOMIC_OPS
193#define ATOMIC_OPS(op, c_op, asm_op) \ 194#define ATOMIC_OPS(op, c_op, asm_op) \
@@ -296,8 +297,6 @@ ATOMIC_OPS(add, +=, CTOP_INST_AADD_DI_R2_R2_R3)
296 ATOMIC_FETCH_OP(op, c_op, asm_op) 297 ATOMIC_FETCH_OP(op, c_op, asm_op)
297 298
298ATOMIC_OPS(and, &=, CTOP_INST_AAND_DI_R2_R2_R3) 299ATOMIC_OPS(and, &=, CTOP_INST_AAND_DI_R2_R2_R3)
299#define atomic_andnot(mask, v) atomic_and(~(mask), (v))
300#define atomic_fetch_andnot(mask, v) atomic_fetch_and(~(mask), (v))
301ATOMIC_OPS(or, |=, CTOP_INST_AOR_DI_R2_R2_R3) 300ATOMIC_OPS(or, |=, CTOP_INST_AOR_DI_R2_R2_R3)
302ATOMIC_OPS(xor, ^=, CTOP_INST_AXOR_DI_R2_R2_R3) 301ATOMIC_OPS(xor, ^=, CTOP_INST_AXOR_DI_R2_R2_R3)
303 302
@@ -430,7 +429,8 @@ static inline long long atomic64_fetch_##op(long long a, atomic64_t *v) \
430 ATOMIC64_OP_RETURN(op, op1, op2) \ 429 ATOMIC64_OP_RETURN(op, op1, op2) \
431 ATOMIC64_FETCH_OP(op, op1, op2) 430 ATOMIC64_FETCH_OP(op, op1, op2)
432 431
433#define atomic64_andnot atomic64_andnot 432#define atomic64_andnot atomic64_andnot
433#define atomic64_fetch_andnot atomic64_fetch_andnot
434 434
435ATOMIC64_OPS(add, add.f, adc) 435ATOMIC64_OPS(add, add.f, adc)
436ATOMIC64_OPS(sub, sub.f, sbc) 436ATOMIC64_OPS(sub, sub.f, sbc)
diff --git a/arch/arm/include/asm/atomic.h b/arch/arm/include/asm/atomic.h
index 884c241424fe..f74756641410 100644
--- a/arch/arm/include/asm/atomic.h
+++ b/arch/arm/include/asm/atomic.h
@@ -216,6 +216,8 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
216 return ret; 216 return ret;
217} 217}
218 218
219#define atomic_fetch_andnot atomic_fetch_andnot
220
219#endif /* __LINUX_ARM_ARCH__ */ 221#endif /* __LINUX_ARM_ARCH__ */
220 222
221#define ATOMIC_OPS(op, c_op, asm_op) \ 223#define ATOMIC_OPS(op, c_op, asm_op) \
diff --git a/include/linux/atomic.h b/include/linux/atomic.h
index 93fe5b4041e1..8e04f1f69bd9 100644
--- a/include/linux/atomic.h
+++ b/include/linux/atomic.h
@@ -354,12 +354,22 @@
354#endif 354#endif
355#endif /* atomic_fetch_and_relaxed */ 355#endif /* atomic_fetch_and_relaxed */
356 356
357#ifdef atomic_andnot 357#ifndef atomic_andnot
358/* atomic_fetch_andnot_relaxed */ 358#define atomic_andnot(i, v) atomic_and(~(int)(i), (v))
359#endif
360
359#ifndef atomic_fetch_andnot_relaxed 361#ifndef atomic_fetch_andnot_relaxed
360#define atomic_fetch_andnot_relaxed atomic_fetch_andnot 362
361#define atomic_fetch_andnot_acquire atomic_fetch_andnot 363#ifndef atomic_fetch_andnot
362#define atomic_fetch_andnot_release atomic_fetch_andnot 364#define atomic_fetch_andnot(i, v) atomic_fetch_and(~(int)(i), (v))
365#define atomic_fetch_andnot_relaxed(i, v) atomic_fetch_and_relaxed(~(int)(i), (v))
366#define atomic_fetch_andnot_acquire(i, v) atomic_fetch_and_acquire(~(int)(i), (v))
367#define atomic_fetch_andnot_release(i, v) atomic_fetch_and_release(~(int)(i), (v))
368#else /* atomic_fetch_andnot */
369#define atomic_fetch_andnot_relaxed atomic_fetch_andnot
370#define atomic_fetch_andnot_acquire atomic_fetch_andnot
371#define atomic_fetch_andnot_release atomic_fetch_andnot
372#endif /* atomic_fetch_andnot */
363 373
364#else /* atomic_fetch_andnot_relaxed */ 374#else /* atomic_fetch_andnot_relaxed */
365 375
@@ -378,7 +388,6 @@
378 __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__) 388 __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
379#endif 389#endif
380#endif /* atomic_fetch_andnot_relaxed */ 390#endif /* atomic_fetch_andnot_relaxed */
381#endif /* atomic_andnot */
382 391
383/* atomic_fetch_xor_relaxed */ 392/* atomic_fetch_xor_relaxed */
384#ifndef atomic_fetch_xor_relaxed 393#ifndef atomic_fetch_xor_relaxed
@@ -655,33 +664,6 @@ static inline bool atomic_add_negative(int i, atomic_t *v)
655} 664}
656#endif 665#endif
657 666
658#ifndef atomic_andnot
659static inline void atomic_andnot(int i, atomic_t *v)
660{
661 atomic_and(~i, v);
662}
663
664static inline int atomic_fetch_andnot(int i, atomic_t *v)
665{
666 return atomic_fetch_and(~i, v);
667}
668
669static inline int atomic_fetch_andnot_relaxed(int i, atomic_t *v)
670{
671 return atomic_fetch_and_relaxed(~i, v);
672}
673
674static inline int atomic_fetch_andnot_acquire(int i, atomic_t *v)
675{
676 return atomic_fetch_and_acquire(~i, v);
677}
678
679static inline int atomic_fetch_andnot_release(int i, atomic_t *v)
680{
681 return atomic_fetch_and_release(~i, v);
682}
683#endif
684
685#ifndef atomic_inc_unless_negative 667#ifndef atomic_inc_unless_negative
686static inline bool atomic_inc_unless_negative(atomic_t *v) 668static inline bool atomic_inc_unless_negative(atomic_t *v)
687{ 669{
@@ -1029,12 +1011,22 @@ static inline int atomic_dec_if_positive(atomic_t *v)
1029#endif 1011#endif
1030#endif /* atomic64_fetch_and_relaxed */ 1012#endif /* atomic64_fetch_and_relaxed */
1031 1013
1032#ifdef atomic64_andnot 1014#ifndef atomic64_andnot
1033/* atomic64_fetch_andnot_relaxed */ 1015#define atomic64_andnot(i, v) atomic64_and(~(long long)(i), (v))
1016#endif
1017
1034#ifndef atomic64_fetch_andnot_relaxed 1018#ifndef atomic64_fetch_andnot_relaxed
1035#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot 1019
1036#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot 1020#ifndef atomic64_fetch_andnot
1037#define atomic64_fetch_andnot_release atomic64_fetch_andnot 1021#define atomic64_fetch_andnot(i, v) atomic64_fetch_and(~(long long)(i), (v))
1022#define atomic64_fetch_andnot_relaxed(i, v) atomic64_fetch_and_relaxed(~(long long)(i), (v))
1023#define atomic64_fetch_andnot_acquire(i, v) atomic64_fetch_and_acquire(~(long long)(i), (v))
1024#define atomic64_fetch_andnot_release(i, v) atomic64_fetch_and_release(~(long long)(i), (v))
1025#else /* atomic64_fetch_andnot */
1026#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
1027#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
1028#define atomic64_fetch_andnot_release atomic64_fetch_andnot
1029#endif /* atomic64_fetch_andnot */
1038 1030
1039#else /* atomic64_fetch_andnot_relaxed */ 1031#else /* atomic64_fetch_andnot_relaxed */
1040 1032
@@ -1053,7 +1045,6 @@ static inline int atomic_dec_if_positive(atomic_t *v)
1053 __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__) 1045 __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
1054#endif 1046#endif
1055#endif /* atomic64_fetch_andnot_relaxed */ 1047#endif /* atomic64_fetch_andnot_relaxed */
1056#endif /* atomic64_andnot */
1057 1048
1058/* atomic64_fetch_xor_relaxed */ 1049/* atomic64_fetch_xor_relaxed */
1059#ifndef atomic64_fetch_xor_relaxed 1050#ifndef atomic64_fetch_xor_relaxed
@@ -1262,33 +1253,6 @@ static inline bool atomic64_add_negative(long long i, atomic64_t *v)
1262} 1253}
1263#endif 1254#endif
1264 1255
1265#ifndef atomic64_andnot
1266static inline void atomic64_andnot(long long i, atomic64_t *v)
1267{
1268 atomic64_and(~i, v);
1269}
1270
1271static inline long long atomic64_fetch_andnot(long long i, atomic64_t *v)
1272{
1273 return atomic64_fetch_and(~i, v);
1274}
1275
1276static inline long long atomic64_fetch_andnot_relaxed(long long i, atomic64_t *v)
1277{
1278 return atomic64_fetch_and_relaxed(~i, v);
1279}
1280
1281static inline long long atomic64_fetch_andnot_acquire(long long i, atomic64_t *v)
1282{
1283 return atomic64_fetch_and_acquire(~i, v);
1284}
1285
1286static inline long long atomic64_fetch_andnot_release(long long i, atomic64_t *v)
1287{
1288 return atomic64_fetch_and_release(~i, v);
1289}
1290#endif
1291
1292#ifndef atomic64_inc_unless_negative 1256#ifndef atomic64_inc_unless_negative
1293static inline bool atomic64_inc_unless_negative(atomic64_t *v) 1257static inline bool atomic64_inc_unless_negative(atomic64_t *v)
1294{ 1258{