diff options
author | Mark Rutland <mark.rutland@arm.com> | 2018-06-21 08:13:19 -0400 |
---|---|---|
committer | Ingo Molnar <mingo@kernel.org> | 2018-06-21 08:25:24 -0400 |
commit | 9837559d8eb01ce834e56fc9a567c1d94ebd3698 (patch) | |
tree | bb475ca639022bddfc8bbdc9c1b963ab0ff30def | |
parent | 18cc1814d4e7560412c9c8c6d28f9d6782c8b402 (diff) |
atomics/treewide: Make unconditional inc/dec ops optional
Many of the inc/dec ops are mandatory, but for most architectures inc/dec are
simply trivial wrappers around their corresponding add/sub ops.
Let's make all the inc/dec ops optional, so that we can get rid of these
boilerplate wrappers.
The instrumented atomics are updated accordingly.
There should be no functional change as a result of this patch.
Signed-off-by: Mark Rutland <mark.rutland@arm.com>
Reviewed-by: Will Deacon <will.deacon@arm.com>
Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Acked-by: Palmer Dabbelt <palmer@sifive.com>
Cc: Boqun Feng <boqun.feng@gmail.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Link: https://lore.kernel.org/lkml/20180621121321.4761-17-mark.rutland@arm.com
Signed-off-by: Ingo Molnar <mingo@kernel.org>
24 files changed, 86 insertions, 296 deletions
diff --git a/arch/alpha/include/asm/atomic.h b/arch/alpha/include/asm/atomic.h index 25f8693c5a42..f6410cb68058 100644 --- a/arch/alpha/include/asm/atomic.h +++ b/arch/alpha/include/asm/atomic.h | |||
@@ -297,16 +297,4 @@ static inline long atomic64_dec_if_positive(atomic64_t *v) | |||
297 | return old - 1; | 297 | return old - 1; |
298 | } | 298 | } |
299 | 299 | ||
300 | #define atomic_dec_return(v) atomic_sub_return(1,(v)) | ||
301 | #define atomic64_dec_return(v) atomic64_sub_return(1,(v)) | ||
302 | |||
303 | #define atomic_inc_return(v) atomic_add_return(1,(v)) | ||
304 | #define atomic64_inc_return(v) atomic64_add_return(1,(v)) | ||
305 | |||
306 | #define atomic_inc(v) atomic_add(1,(v)) | ||
307 | #define atomic64_inc(v) atomic64_add(1,(v)) | ||
308 | |||
309 | #define atomic_dec(v) atomic_sub(1,(v)) | ||
310 | #define atomic64_dec(v) atomic64_sub(1,(v)) | ||
311 | |||
312 | #endif /* _ALPHA_ATOMIC_H */ | 300 | #endif /* _ALPHA_ATOMIC_H */ |
diff --git a/arch/arc/include/asm/atomic.h b/arch/arc/include/asm/atomic.h index 4222e726f84c..27b95a928c1e 100644 --- a/arch/arc/include/asm/atomic.h +++ b/arch/arc/include/asm/atomic.h | |||
@@ -308,12 +308,6 @@ ATOMIC_OPS(xor, ^=, CTOP_INST_AXOR_DI_R2_R2_R3) | |||
308 | #undef ATOMIC_OP_RETURN | 308 | #undef ATOMIC_OP_RETURN |
309 | #undef ATOMIC_OP | 309 | #undef ATOMIC_OP |
310 | 310 | ||
311 | #define atomic_inc(v) atomic_add(1, v) | ||
312 | #define atomic_dec(v) atomic_sub(1, v) | ||
313 | |||
314 | #define atomic_inc_return(v) atomic_add_return(1, (v)) | ||
315 | #define atomic_dec_return(v) atomic_sub_return(1, (v)) | ||
316 | |||
317 | #ifdef CONFIG_GENERIC_ATOMIC64 | 311 | #ifdef CONFIG_GENERIC_ATOMIC64 |
318 | 312 | ||
319 | #include <asm-generic/atomic64.h> | 313 | #include <asm-generic/atomic64.h> |
@@ -560,11 +554,6 @@ static inline long long atomic64_fetch_add_unless(atomic64_t *v, long long a, | |||
560 | } | 554 | } |
561 | #define atomic64_fetch_add_unless atomic64_fetch_add_unless | 555 | #define atomic64_fetch_add_unless atomic64_fetch_add_unless |
562 | 556 | ||
563 | #define atomic64_inc(v) atomic64_add(1LL, (v)) | ||
564 | #define atomic64_inc_return(v) atomic64_add_return(1LL, (v)) | ||
565 | #define atomic64_dec(v) atomic64_sub(1LL, (v)) | ||
566 | #define atomic64_dec_return(v) atomic64_sub_return(1LL, (v)) | ||
567 | |||
568 | #endif /* !CONFIG_GENERIC_ATOMIC64 */ | 557 | #endif /* !CONFIG_GENERIC_ATOMIC64 */ |
569 | 558 | ||
570 | #endif /* !__ASSEMBLY__ */ | 559 | #endif /* !__ASSEMBLY__ */ |
diff --git a/arch/arm/include/asm/atomic.h b/arch/arm/include/asm/atomic.h index 35fb7f504daa..5a58d061d3d2 100644 --- a/arch/arm/include/asm/atomic.h +++ b/arch/arm/include/asm/atomic.h | |||
@@ -245,12 +245,6 @@ ATOMIC_OPS(xor, ^=, eor) | |||
245 | 245 | ||
246 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) | 246 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) |
247 | 247 | ||
248 | #define atomic_inc(v) atomic_add(1, v) | ||
249 | #define atomic_dec(v) atomic_sub(1, v) | ||
250 | |||
251 | #define atomic_inc_return_relaxed(v) (atomic_add_return_relaxed(1, v)) | ||
252 | #define atomic_dec_return_relaxed(v) (atomic_sub_return_relaxed(1, v)) | ||
253 | |||
254 | #ifndef CONFIG_GENERIC_ATOMIC64 | 248 | #ifndef CONFIG_GENERIC_ATOMIC64 |
255 | typedef struct { | 249 | typedef struct { |
256 | long long counter; | 250 | long long counter; |
@@ -512,11 +506,6 @@ static inline long long atomic64_fetch_add_unless(atomic64_t *v, long long a, | |||
512 | } | 506 | } |
513 | #define atomic64_fetch_add_unless atomic64_fetch_add_unless | 507 | #define atomic64_fetch_add_unless atomic64_fetch_add_unless |
514 | 508 | ||
515 | #define atomic64_inc(v) atomic64_add(1LL, (v)) | ||
516 | #define atomic64_inc_return_relaxed(v) atomic64_add_return_relaxed(1LL, (v)) | ||
517 | #define atomic64_dec(v) atomic64_sub(1LL, (v)) | ||
518 | #define atomic64_dec_return_relaxed(v) atomic64_sub_return_relaxed(1LL, (v)) | ||
519 | |||
520 | #endif /* !CONFIG_GENERIC_ATOMIC64 */ | 509 | #endif /* !CONFIG_GENERIC_ATOMIC64 */ |
521 | #endif | 510 | #endif |
522 | #endif | 511 | #endif |
diff --git a/arch/arm64/include/asm/atomic.h b/arch/arm64/include/asm/atomic.h index edbe53fa3106..078f785cd97f 100644 --- a/arch/arm64/include/asm/atomic.h +++ b/arch/arm64/include/asm/atomic.h | |||
@@ -50,21 +50,11 @@ | |||
50 | #define atomic_add_return_release atomic_add_return_release | 50 | #define atomic_add_return_release atomic_add_return_release |
51 | #define atomic_add_return atomic_add_return | 51 | #define atomic_add_return atomic_add_return |
52 | 52 | ||
53 | #define atomic_inc_return_relaxed(v) atomic_add_return_relaxed(1, (v)) | ||
54 | #define atomic_inc_return_acquire(v) atomic_add_return_acquire(1, (v)) | ||
55 | #define atomic_inc_return_release(v) atomic_add_return_release(1, (v)) | ||
56 | #define atomic_inc_return(v) atomic_add_return(1, (v)) | ||
57 | |||
58 | #define atomic_sub_return_relaxed atomic_sub_return_relaxed | 53 | #define atomic_sub_return_relaxed atomic_sub_return_relaxed |
59 | #define atomic_sub_return_acquire atomic_sub_return_acquire | 54 | #define atomic_sub_return_acquire atomic_sub_return_acquire |
60 | #define atomic_sub_return_release atomic_sub_return_release | 55 | #define atomic_sub_return_release atomic_sub_return_release |
61 | #define atomic_sub_return atomic_sub_return | 56 | #define atomic_sub_return atomic_sub_return |
62 | 57 | ||
63 | #define atomic_dec_return_relaxed(v) atomic_sub_return_relaxed(1, (v)) | ||
64 | #define atomic_dec_return_acquire(v) atomic_sub_return_acquire(1, (v)) | ||
65 | #define atomic_dec_return_release(v) atomic_sub_return_release(1, (v)) | ||
66 | #define atomic_dec_return(v) atomic_sub_return(1, (v)) | ||
67 | |||
68 | #define atomic_fetch_add_relaxed atomic_fetch_add_relaxed | 58 | #define atomic_fetch_add_relaxed atomic_fetch_add_relaxed |
69 | #define atomic_fetch_add_acquire atomic_fetch_add_acquire | 59 | #define atomic_fetch_add_acquire atomic_fetch_add_acquire |
70 | #define atomic_fetch_add_release atomic_fetch_add_release | 60 | #define atomic_fetch_add_release atomic_fetch_add_release |
@@ -108,8 +98,6 @@ | |||
108 | cmpxchg_release(&((v)->counter), (old), (new)) | 98 | cmpxchg_release(&((v)->counter), (old), (new)) |
109 | #define atomic_cmpxchg(v, old, new) cmpxchg(&((v)->counter), (old), (new)) | 99 | #define atomic_cmpxchg(v, old, new) cmpxchg(&((v)->counter), (old), (new)) |
110 | 100 | ||
111 | #define atomic_inc(v) atomic_add(1, (v)) | ||
112 | #define atomic_dec(v) atomic_sub(1, (v)) | ||
113 | #define atomic_andnot atomic_andnot | 101 | #define atomic_andnot atomic_andnot |
114 | 102 | ||
115 | /* | 103 | /* |
@@ -124,21 +112,11 @@ | |||
124 | #define atomic64_add_return_release atomic64_add_return_release | 112 | #define atomic64_add_return_release atomic64_add_return_release |
125 | #define atomic64_add_return atomic64_add_return | 113 | #define atomic64_add_return atomic64_add_return |
126 | 114 | ||
127 | #define atomic64_inc_return_relaxed(v) atomic64_add_return_relaxed(1, (v)) | ||
128 | #define atomic64_inc_return_acquire(v) atomic64_add_return_acquire(1, (v)) | ||
129 | #define atomic64_inc_return_release(v) atomic64_add_return_release(1, (v)) | ||
130 | #define atomic64_inc_return(v) atomic64_add_return(1, (v)) | ||
131 | |||
132 | #define atomic64_sub_return_relaxed atomic64_sub_return_relaxed | 115 | #define atomic64_sub_return_relaxed atomic64_sub_return_relaxed |
133 | #define atomic64_sub_return_acquire atomic64_sub_return_acquire | 116 | #define atomic64_sub_return_acquire atomic64_sub_return_acquire |
134 | #define atomic64_sub_return_release atomic64_sub_return_release | 117 | #define atomic64_sub_return_release atomic64_sub_return_release |
135 | #define atomic64_sub_return atomic64_sub_return | 118 | #define atomic64_sub_return atomic64_sub_return |
136 | 119 | ||
137 | #define atomic64_dec_return_relaxed(v) atomic64_sub_return_relaxed(1, (v)) | ||
138 | #define atomic64_dec_return_acquire(v) atomic64_sub_return_acquire(1, (v)) | ||
139 | #define atomic64_dec_return_release(v) atomic64_sub_return_release(1, (v)) | ||
140 | #define atomic64_dec_return(v) atomic64_sub_return(1, (v)) | ||
141 | |||
142 | #define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed | 120 | #define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed |
143 | #define atomic64_fetch_add_acquire atomic64_fetch_add_acquire | 121 | #define atomic64_fetch_add_acquire atomic64_fetch_add_acquire |
144 | #define atomic64_fetch_add_release atomic64_fetch_add_release | 122 | #define atomic64_fetch_add_release atomic64_fetch_add_release |
@@ -179,8 +157,6 @@ | |||
179 | #define atomic64_cmpxchg_release atomic_cmpxchg_release | 157 | #define atomic64_cmpxchg_release atomic_cmpxchg_release |
180 | #define atomic64_cmpxchg atomic_cmpxchg | 158 | #define atomic64_cmpxchg atomic_cmpxchg |
181 | 159 | ||
182 | #define atomic64_inc(v) atomic64_add(1, (v)) | ||
183 | #define atomic64_dec(v) atomic64_sub(1, (v)) | ||
184 | #define atomic64_andnot atomic64_andnot | 160 | #define atomic64_andnot atomic64_andnot |
185 | 161 | ||
186 | #endif | 162 | #endif |
diff --git a/arch/h8300/include/asm/atomic.h b/arch/h8300/include/asm/atomic.h index 8977b5157c8f..c6b6a06231b2 100644 --- a/arch/h8300/include/asm/atomic.h +++ b/arch/h8300/include/asm/atomic.h | |||
@@ -69,13 +69,6 @@ ATOMIC_OPS(sub, -=) | |||
69 | #undef ATOMIC_OP_RETURN | 69 | #undef ATOMIC_OP_RETURN |
70 | #undef ATOMIC_OP | 70 | #undef ATOMIC_OP |
71 | 71 | ||
72 | #define atomic_inc_return(v) atomic_add_return(1, v) | ||
73 | #define atomic_dec_return(v) atomic_sub_return(1, v) | ||
74 | |||
75 | #define atomic_inc(v) (void)atomic_inc_return(v) | ||
76 | |||
77 | #define atomic_dec(v) (void)atomic_dec_return(v) | ||
78 | |||
79 | static inline int atomic_cmpxchg(atomic_t *v, int old, int new) | 72 | static inline int atomic_cmpxchg(atomic_t *v, int old, int new) |
80 | { | 73 | { |
81 | int ret; | 74 | int ret; |
diff --git a/arch/hexagon/include/asm/atomic.h b/arch/hexagon/include/asm/atomic.h index 31638f511674..311b9894ccc8 100644 --- a/arch/hexagon/include/asm/atomic.h +++ b/arch/hexagon/include/asm/atomic.h | |||
@@ -198,10 +198,4 @@ static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u) | |||
198 | } | 198 | } |
199 | #define atomic_fetch_add_unless atomic_fetch_add_unless | 199 | #define atomic_fetch_add_unless atomic_fetch_add_unless |
200 | 200 | ||
201 | #define atomic_inc(v) atomic_add(1, (v)) | ||
202 | #define atomic_dec(v) atomic_sub(1, (v)) | ||
203 | |||
204 | #define atomic_inc_return(v) (atomic_add_return(1, v)) | ||
205 | #define atomic_dec_return(v) (atomic_sub_return(1, v)) | ||
206 | |||
207 | #endif | 201 | #endif |
diff --git a/arch/ia64/include/asm/atomic.h b/arch/ia64/include/asm/atomic.h index e4143c462e65..46a15a974bed 100644 --- a/arch/ia64/include/asm/atomic.h +++ b/arch/ia64/include/asm/atomic.h | |||
@@ -231,19 +231,10 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v) | |||
231 | return dec; | 231 | return dec; |
232 | } | 232 | } |
233 | 233 | ||
234 | #define atomic_dec_return(v) atomic_sub_return(1, (v)) | ||
235 | #define atomic_inc_return(v) atomic_add_return(1, (v)) | ||
236 | #define atomic64_dec_return(v) atomic64_sub_return(1, (v)) | ||
237 | #define atomic64_inc_return(v) atomic64_add_return(1, (v)) | ||
238 | |||
239 | #define atomic_add(i,v) (void)atomic_add_return((i), (v)) | 234 | #define atomic_add(i,v) (void)atomic_add_return((i), (v)) |
240 | #define atomic_sub(i,v) (void)atomic_sub_return((i), (v)) | 235 | #define atomic_sub(i,v) (void)atomic_sub_return((i), (v)) |
241 | #define atomic_inc(v) atomic_add(1, (v)) | ||
242 | #define atomic_dec(v) atomic_sub(1, (v)) | ||
243 | 236 | ||
244 | #define atomic64_add(i,v) (void)atomic64_add_return((i), (v)) | 237 | #define atomic64_add(i,v) (void)atomic64_add_return((i), (v)) |
245 | #define atomic64_sub(i,v) (void)atomic64_sub_return((i), (v)) | 238 | #define atomic64_sub(i,v) (void)atomic64_sub_return((i), (v)) |
246 | #define atomic64_inc(v) atomic64_add(1, (v)) | ||
247 | #define atomic64_dec(v) atomic64_sub(1, (v)) | ||
248 | 239 | ||
249 | #endif /* _ASM_IA64_ATOMIC_H */ | 240 | #endif /* _ASM_IA64_ATOMIC_H */ |
diff --git a/arch/m68k/include/asm/atomic.h b/arch/m68k/include/asm/atomic.h index 9df09c876fa2..47228b0d4163 100644 --- a/arch/m68k/include/asm/atomic.h +++ b/arch/m68k/include/asm/atomic.h | |||
@@ -126,11 +126,13 @@ static inline void atomic_inc(atomic_t *v) | |||
126 | { | 126 | { |
127 | __asm__ __volatile__("addql #1,%0" : "+m" (*v)); | 127 | __asm__ __volatile__("addql #1,%0" : "+m" (*v)); |
128 | } | 128 | } |
129 | #define atomic_inc atomic_inc | ||
129 | 130 | ||
130 | static inline void atomic_dec(atomic_t *v) | 131 | static inline void atomic_dec(atomic_t *v) |
131 | { | 132 | { |
132 | __asm__ __volatile__("subql #1,%0" : "+m" (*v)); | 133 | __asm__ __volatile__("subql #1,%0" : "+m" (*v)); |
133 | } | 134 | } |
135 | #define atomic_dec atomic_dec | ||
134 | 136 | ||
135 | static inline int atomic_dec_and_test(atomic_t *v) | 137 | static inline int atomic_dec_and_test(atomic_t *v) |
136 | { | 138 | { |
@@ -192,9 +194,6 @@ static inline int atomic_xchg(atomic_t *v, int new) | |||
192 | 194 | ||
193 | #endif /* !CONFIG_RMW_INSNS */ | 195 | #endif /* !CONFIG_RMW_INSNS */ |
194 | 196 | ||
195 | #define atomic_dec_return(v) atomic_sub_return(1, (v)) | ||
196 | #define atomic_inc_return(v) atomic_add_return(1, (v)) | ||
197 | |||
198 | static inline int atomic_sub_and_test(int i, atomic_t *v) | 197 | static inline int atomic_sub_and_test(int i, atomic_t *v) |
199 | { | 198 | { |
200 | char c; | 199 | char c; |
diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h index fd3008ae164c..79be687de4ab 100644 --- a/arch/mips/include/asm/atomic.h +++ b/arch/mips/include/asm/atomic.h | |||
@@ -274,31 +274,12 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) | |||
274 | #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) | 274 | #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) |
275 | #define atomic_xchg(v, new) (xchg(&((v)->counter), (new))) | 275 | #define atomic_xchg(v, new) (xchg(&((v)->counter), (new))) |
276 | 276 | ||
277 | #define atomic_dec_return(v) atomic_sub_return(1, (v)) | ||
278 | #define atomic_inc_return(v) atomic_add_return(1, (v)) | ||
279 | |||
280 | /* | 277 | /* |
281 | * atomic_dec_if_positive - decrement by 1 if old value positive | 278 | * atomic_dec_if_positive - decrement by 1 if old value positive |
282 | * @v: pointer of type atomic_t | 279 | * @v: pointer of type atomic_t |
283 | */ | 280 | */ |
284 | #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v) | 281 | #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v) |
285 | 282 | ||
286 | /* | ||
287 | * atomic_inc - increment atomic variable | ||
288 | * @v: pointer of type atomic_t | ||
289 | * | ||
290 | * Atomically increments @v by 1. | ||
291 | */ | ||
292 | #define atomic_inc(v) atomic_add(1, (v)) | ||
293 | |||
294 | /* | ||
295 | * atomic_dec - decrement and test | ||
296 | * @v: pointer of type atomic_t | ||
297 | * | ||
298 | * Atomically decrements @v by 1. | ||
299 | */ | ||
300 | #define atomic_dec(v) atomic_sub(1, (v)) | ||
301 | |||
302 | #ifdef CONFIG_64BIT | 283 | #ifdef CONFIG_64BIT |
303 | 284 | ||
304 | #define ATOMIC64_INIT(i) { (i) } | 285 | #define ATOMIC64_INIT(i) { (i) } |
@@ -554,31 +535,12 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) | |||
554 | ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n))) | 535 | ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n))) |
555 | #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new))) | 536 | #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new))) |
556 | 537 | ||
557 | #define atomic64_dec_return(v) atomic64_sub_return(1, (v)) | ||
558 | #define atomic64_inc_return(v) atomic64_add_return(1, (v)) | ||
559 | |||
560 | /* | 538 | /* |
561 | * atomic64_dec_if_positive - decrement by 1 if old value positive | 539 | * atomic64_dec_if_positive - decrement by 1 if old value positive |
562 | * @v: pointer of type atomic64_t | 540 | * @v: pointer of type atomic64_t |
563 | */ | 541 | */ |
564 | #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v) | 542 | #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v) |
565 | 543 | ||
566 | /* | ||
567 | * atomic64_inc - increment atomic variable | ||
568 | * @v: pointer of type atomic64_t | ||
569 | * | ||
570 | * Atomically increments @v by 1. | ||
571 | */ | ||
572 | #define atomic64_inc(v) atomic64_add(1, (v)) | ||
573 | |||
574 | /* | ||
575 | * atomic64_dec - decrement and test | ||
576 | * @v: pointer of type atomic64_t | ||
577 | * | ||
578 | * Atomically decrements @v by 1. | ||
579 | */ | ||
580 | #define atomic64_dec(v) atomic64_sub(1, (v)) | ||
581 | |||
582 | #endif /* CONFIG_64BIT */ | 544 | #endif /* CONFIG_64BIT */ |
583 | 545 | ||
584 | #endif /* _ASM_ATOMIC_H */ | 546 | #endif /* _ASM_ATOMIC_H */ |
diff --git a/arch/parisc/include/asm/atomic.h b/arch/parisc/include/asm/atomic.h index f85844ff6336..10bc490327c1 100644 --- a/arch/parisc/include/asm/atomic.h +++ b/arch/parisc/include/asm/atomic.h | |||
@@ -136,12 +136,6 @@ ATOMIC_OPS(xor, ^=) | |||
136 | #undef ATOMIC_OP_RETURN | 136 | #undef ATOMIC_OP_RETURN |
137 | #undef ATOMIC_OP | 137 | #undef ATOMIC_OP |
138 | 138 | ||
139 | #define atomic_inc(v) (atomic_add( 1,(v))) | ||
140 | #define atomic_dec(v) (atomic_add( -1,(v))) | ||
141 | |||
142 | #define atomic_inc_return(v) (atomic_add_return( 1,(v))) | ||
143 | #define atomic_dec_return(v) (atomic_add_return( -1,(v))) | ||
144 | |||
145 | #define ATOMIC_INIT(i) { (i) } | 139 | #define ATOMIC_INIT(i) { (i) } |
146 | 140 | ||
147 | #ifdef CONFIG_64BIT | 141 | #ifdef CONFIG_64BIT |
@@ -224,12 +218,6 @@ atomic64_read(const atomic64_t *v) | |||
224 | return READ_ONCE((v)->counter); | 218 | return READ_ONCE((v)->counter); |
225 | } | 219 | } |
226 | 220 | ||
227 | #define atomic64_inc(v) (atomic64_add( 1,(v))) | ||
228 | #define atomic64_dec(v) (atomic64_add( -1,(v))) | ||
229 | |||
230 | #define atomic64_inc_return(v) (atomic64_add_return( 1,(v))) | ||
231 | #define atomic64_dec_return(v) (atomic64_add_return( -1,(v))) | ||
232 | |||
233 | /* exported interface */ | 221 | /* exported interface */ |
234 | #define atomic64_cmpxchg(v, o, n) \ | 222 | #define atomic64_cmpxchg(v, o, n) \ |
235 | ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n))) | 223 | ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n))) |
diff --git a/arch/powerpc/include/asm/atomic.h b/arch/powerpc/include/asm/atomic.h index 5d76f05d2be3..ebaefdee4a57 100644 --- a/arch/powerpc/include/asm/atomic.h +++ b/arch/powerpc/include/asm/atomic.h | |||
@@ -143,6 +143,7 @@ static __inline__ void atomic_inc(atomic_t *v) | |||
143 | : "r" (&v->counter) | 143 | : "r" (&v->counter) |
144 | : "cc", "xer"); | 144 | : "cc", "xer"); |
145 | } | 145 | } |
146 | #define atomic_inc atomic_inc | ||
146 | 147 | ||
147 | static __inline__ int atomic_inc_return_relaxed(atomic_t *v) | 148 | static __inline__ int atomic_inc_return_relaxed(atomic_t *v) |
148 | { | 149 | { |
@@ -175,6 +176,7 @@ static __inline__ void atomic_dec(atomic_t *v) | |||
175 | : "r" (&v->counter) | 176 | : "r" (&v->counter) |
176 | : "cc", "xer"); | 177 | : "cc", "xer"); |
177 | } | 178 | } |
179 | #define atomic_dec atomic_dec | ||
178 | 180 | ||
179 | static __inline__ int atomic_dec_return_relaxed(atomic_t *v) | 181 | static __inline__ int atomic_dec_return_relaxed(atomic_t *v) |
180 | { | 182 | { |
@@ -411,6 +413,7 @@ static __inline__ void atomic64_inc(atomic64_t *v) | |||
411 | : "r" (&v->counter) | 413 | : "r" (&v->counter) |
412 | : "cc", "xer"); | 414 | : "cc", "xer"); |
413 | } | 415 | } |
416 | #define atomic64_inc atomic64_inc | ||
414 | 417 | ||
415 | static __inline__ long atomic64_inc_return_relaxed(atomic64_t *v) | 418 | static __inline__ long atomic64_inc_return_relaxed(atomic64_t *v) |
416 | { | 419 | { |
@@ -441,6 +444,7 @@ static __inline__ void atomic64_dec(atomic64_t *v) | |||
441 | : "r" (&v->counter) | 444 | : "r" (&v->counter) |
442 | : "cc", "xer"); | 445 | : "cc", "xer"); |
443 | } | 446 | } |
447 | #define atomic64_dec atomic64_dec | ||
444 | 448 | ||
445 | static __inline__ long atomic64_dec_return_relaxed(atomic64_t *v) | 449 | static __inline__ long atomic64_dec_return_relaxed(atomic64_t *v) |
446 | { | 450 | { |
diff --git a/arch/riscv/include/asm/atomic.h b/arch/riscv/include/asm/atomic.h index 68eef0a805ca..512b89485790 100644 --- a/arch/riscv/include/asm/atomic.h +++ b/arch/riscv/include/asm/atomic.h | |||
@@ -209,82 +209,6 @@ ATOMIC_OPS(xor, xor, i) | |||
209 | #undef ATOMIC_FETCH_OP | 209 | #undef ATOMIC_FETCH_OP |
210 | #undef ATOMIC_OP_RETURN | 210 | #undef ATOMIC_OP_RETURN |
211 | 211 | ||
212 | #define ATOMIC_OP(op, func_op, I, c_type, prefix) \ | ||
213 | static __always_inline \ | ||
214 | void atomic##prefix##_##op(atomic##prefix##_t *v) \ | ||
215 | { \ | ||
216 | atomic##prefix##_##func_op(I, v); \ | ||
217 | } | ||
218 | |||
219 | #define ATOMIC_FETCH_OP(op, func_op, I, c_type, prefix) \ | ||
220 | static __always_inline \ | ||
221 | c_type atomic##prefix##_fetch_##op##_relaxed(atomic##prefix##_t *v) \ | ||
222 | { \ | ||
223 | return atomic##prefix##_fetch_##func_op##_relaxed(I, v); \ | ||
224 | } \ | ||
225 | static __always_inline \ | ||
226 | c_type atomic##prefix##_fetch_##op(atomic##prefix##_t *v) \ | ||
227 | { \ | ||
228 | return atomic##prefix##_fetch_##func_op(I, v); \ | ||
229 | } | ||
230 | |||
231 | #define ATOMIC_OP_RETURN(op, asm_op, c_op, I, c_type, prefix) \ | ||
232 | static __always_inline \ | ||
233 | c_type atomic##prefix##_##op##_return_relaxed(atomic##prefix##_t *v) \ | ||
234 | { \ | ||
235 | return atomic##prefix##_fetch_##op##_relaxed(v) c_op I; \ | ||
236 | } \ | ||
237 | static __always_inline \ | ||
238 | c_type atomic##prefix##_##op##_return(atomic##prefix##_t *v) \ | ||
239 | { \ | ||
240 | return atomic##prefix##_fetch_##op(v) c_op I; \ | ||
241 | } | ||
242 | |||
243 | #ifdef CONFIG_GENERIC_ATOMIC64 | ||
244 | #define ATOMIC_OPS(op, asm_op, c_op, I) \ | ||
245 | ATOMIC_OP( op, asm_op, I, int, ) \ | ||
246 | ATOMIC_FETCH_OP( op, asm_op, I, int, ) \ | ||
247 | ATOMIC_OP_RETURN(op, asm_op, c_op, I, int, ) | ||
248 | #else | ||
249 | #define ATOMIC_OPS(op, asm_op, c_op, I) \ | ||
250 | ATOMIC_OP( op, asm_op, I, int, ) \ | ||
251 | ATOMIC_FETCH_OP( op, asm_op, I, int, ) \ | ||
252 | ATOMIC_OP_RETURN(op, asm_op, c_op, I, int, ) \ | ||
253 | ATOMIC_OP( op, asm_op, I, long, 64) \ | ||
254 | ATOMIC_FETCH_OP( op, asm_op, I, long, 64) \ | ||
255 | ATOMIC_OP_RETURN(op, asm_op, c_op, I, long, 64) | ||
256 | #endif | ||
257 | |||
258 | ATOMIC_OPS(inc, add, +, 1) | ||
259 | ATOMIC_OPS(dec, add, +, -1) | ||
260 | |||
261 | #define atomic_inc_return_relaxed atomic_inc_return_relaxed | ||
262 | #define atomic_dec_return_relaxed atomic_dec_return_relaxed | ||
263 | #define atomic_inc_return atomic_inc_return | ||
264 | #define atomic_dec_return atomic_dec_return | ||
265 | |||
266 | #define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed | ||
267 | #define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed | ||
268 | #define atomic_fetch_inc atomic_fetch_inc | ||
269 | #define atomic_fetch_dec atomic_fetch_dec | ||
270 | |||
271 | #ifndef CONFIG_GENERIC_ATOMIC64 | ||
272 | #define atomic64_inc_return_relaxed atomic64_inc_return_relaxed | ||
273 | #define atomic64_dec_return_relaxed atomic64_dec_return_relaxed | ||
274 | #define atomic64_inc_return atomic64_inc_return | ||
275 | #define atomic64_dec_return atomic64_dec_return | ||
276 | |||
277 | #define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed | ||
278 | #define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed | ||
279 | #define atomic64_fetch_inc atomic64_fetch_inc | ||
280 | #define atomic64_fetch_dec atomic64_fetch_dec | ||
281 | #endif | ||
282 | |||
283 | #undef ATOMIC_OPS | ||
284 | #undef ATOMIC_OP | ||
285 | #undef ATOMIC_FETCH_OP | ||
286 | #undef ATOMIC_OP_RETURN | ||
287 | |||
288 | /* This is required to provide a full barrier on success. */ | 212 | /* This is required to provide a full barrier on success. */ |
289 | static __always_inline int atomic_fetch_add_unless(atomic_t *v, int a, int u) | 213 | static __always_inline int atomic_fetch_add_unless(atomic_t *v, int a, int u) |
290 | { | 214 | { |
diff --git a/arch/s390/include/asm/atomic.h b/arch/s390/include/asm/atomic.h index 7f5fbd595f01..376e64af951f 100644 --- a/arch/s390/include/asm/atomic.h +++ b/arch/s390/include/asm/atomic.h | |||
@@ -55,13 +55,9 @@ static inline void atomic_add(int i, atomic_t *v) | |||
55 | __atomic_add(i, &v->counter); | 55 | __atomic_add(i, &v->counter); |
56 | } | 56 | } |
57 | 57 | ||
58 | #define atomic_inc(_v) atomic_add(1, _v) | ||
59 | #define atomic_inc_return(_v) atomic_add_return(1, _v) | ||
60 | #define atomic_sub(_i, _v) atomic_add(-(int)(_i), _v) | 58 | #define atomic_sub(_i, _v) atomic_add(-(int)(_i), _v) |
61 | #define atomic_sub_return(_i, _v) atomic_add_return(-(int)(_i), _v) | 59 | #define atomic_sub_return(_i, _v) atomic_add_return(-(int)(_i), _v) |
62 | #define atomic_fetch_sub(_i, _v) atomic_fetch_add(-(int)(_i), _v) | 60 | #define atomic_fetch_sub(_i, _v) atomic_fetch_add(-(int)(_i), _v) |
63 | #define atomic_dec(_v) atomic_sub(1, _v) | ||
64 | #define atomic_dec_return(_v) atomic_sub_return(1, _v) | ||
65 | 61 | ||
66 | #define ATOMIC_OPS(op) \ | 62 | #define ATOMIC_OPS(op) \ |
67 | static inline void atomic_##op(int i, atomic_t *v) \ | 63 | static inline void atomic_##op(int i, atomic_t *v) \ |
@@ -166,12 +162,8 @@ static inline long atomic64_dec_if_positive(atomic64_t *v) | |||
166 | return dec; | 162 | return dec; |
167 | } | 163 | } |
168 | 164 | ||
169 | #define atomic64_inc(_v) atomic64_add(1, _v) | ||
170 | #define atomic64_inc_return(_v) atomic64_add_return(1, _v) | ||
171 | #define atomic64_sub_return(_i, _v) atomic64_add_return(-(long)(_i), _v) | 165 | #define atomic64_sub_return(_i, _v) atomic64_add_return(-(long)(_i), _v) |
172 | #define atomic64_fetch_sub(_i, _v) atomic64_fetch_add(-(long)(_i), _v) | 166 | #define atomic64_fetch_sub(_i, _v) atomic64_fetch_add(-(long)(_i), _v) |
173 | #define atomic64_sub(_i, _v) atomic64_add(-(long)(_i), _v) | 167 | #define atomic64_sub(_i, _v) atomic64_add(-(long)(_i), _v) |
174 | #define atomic64_dec(_v) atomic64_sub(1, _v) | ||
175 | #define atomic64_dec_return(_v) atomic64_sub_return(1, _v) | ||
176 | 168 | ||
177 | #endif /* __ARCH_S390_ATOMIC__ */ | 169 | #endif /* __ARCH_S390_ATOMIC__ */ |
diff --git a/arch/sh/include/asm/atomic.h b/arch/sh/include/asm/atomic.h index d438494fa112..f37b95a80232 100644 --- a/arch/sh/include/asm/atomic.h +++ b/arch/sh/include/asm/atomic.h | |||
@@ -32,12 +32,6 @@ | |||
32 | #include <asm/atomic-irq.h> | 32 | #include <asm/atomic-irq.h> |
33 | #endif | 33 | #endif |
34 | 34 | ||
35 | #define atomic_dec_return(v) atomic_sub_return(1, (v)) | ||
36 | #define atomic_inc_return(v) atomic_add_return(1, (v)) | ||
37 | |||
38 | #define atomic_inc(v) atomic_add(1, (v)) | ||
39 | #define atomic_dec(v) atomic_sub(1, (v)) | ||
40 | |||
41 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) | 35 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) |
42 | #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) | 36 | #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) |
43 | 37 | ||
diff --git a/arch/sparc/include/asm/atomic_32.h b/arch/sparc/include/asm/atomic_32.h index 3a26573790c6..94c930f0bc62 100644 --- a/arch/sparc/include/asm/atomic_32.h +++ b/arch/sparc/include/asm/atomic_32.h | |||
@@ -38,8 +38,6 @@ void atomic_set(atomic_t *, int); | |||
38 | 38 | ||
39 | #define atomic_add(i, v) ((void)atomic_add_return( (int)(i), (v))) | 39 | #define atomic_add(i, v) ((void)atomic_add_return( (int)(i), (v))) |
40 | #define atomic_sub(i, v) ((void)atomic_add_return(-(int)(i), (v))) | 40 | #define atomic_sub(i, v) ((void)atomic_add_return(-(int)(i), (v))) |
41 | #define atomic_inc(v) ((void)atomic_add_return( 1, (v))) | ||
42 | #define atomic_dec(v) ((void)atomic_add_return( -1, (v))) | ||
43 | 41 | ||
44 | #define atomic_and(i, v) ((void)atomic_fetch_and((i), (v))) | 42 | #define atomic_and(i, v) ((void)atomic_fetch_and((i), (v))) |
45 | #define atomic_or(i, v) ((void)atomic_fetch_or((i), (v))) | 43 | #define atomic_or(i, v) ((void)atomic_fetch_or((i), (v))) |
@@ -48,7 +46,4 @@ void atomic_set(atomic_t *, int); | |||
48 | #define atomic_sub_return(i, v) (atomic_add_return(-(int)(i), (v))) | 46 | #define atomic_sub_return(i, v) (atomic_add_return(-(int)(i), (v))) |
49 | #define atomic_fetch_sub(i, v) (atomic_fetch_add (-(int)(i), (v))) | 47 | #define atomic_fetch_sub(i, v) (atomic_fetch_add (-(int)(i), (v))) |
50 | 48 | ||
51 | #define atomic_inc_return(v) (atomic_add_return( 1, (v))) | ||
52 | #define atomic_dec_return(v) (atomic_add_return( -1, (v))) | ||
53 | |||
54 | #endif /* !(__ARCH_SPARC_ATOMIC__) */ | 49 | #endif /* !(__ARCH_SPARC_ATOMIC__) */ |
diff --git a/arch/sparc/include/asm/atomic_64.h b/arch/sparc/include/asm/atomic_64.h index 634508282aea..304865c7cdbb 100644 --- a/arch/sparc/include/asm/atomic_64.h +++ b/arch/sparc/include/asm/atomic_64.h | |||
@@ -50,18 +50,6 @@ ATOMIC_OPS(xor) | |||
50 | #undef ATOMIC_OP_RETURN | 50 | #undef ATOMIC_OP_RETURN |
51 | #undef ATOMIC_OP | 51 | #undef ATOMIC_OP |
52 | 52 | ||
53 | #define atomic_dec_return(v) atomic_sub_return(1, v) | ||
54 | #define atomic64_dec_return(v) atomic64_sub_return(1, v) | ||
55 | |||
56 | #define atomic_inc_return(v) atomic_add_return(1, v) | ||
57 | #define atomic64_inc_return(v) atomic64_add_return(1, v) | ||
58 | |||
59 | #define atomic_inc(v) atomic_add(1, v) | ||
60 | #define atomic64_inc(v) atomic64_add(1, v) | ||
61 | |||
62 | #define atomic_dec(v) atomic_sub(1, v) | ||
63 | #define atomic64_dec(v) atomic64_sub(1, v) | ||
64 | |||
65 | #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) | 53 | #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) |
66 | 54 | ||
67 | static inline int atomic_xchg(atomic_t *v, int new) | 55 | static inline int atomic_xchg(atomic_t *v, int new) |
diff --git a/arch/x86/include/asm/atomic.h b/arch/x86/include/asm/atomic.h index 73bda4abe180..823fd2f320cf 100644 --- a/arch/x86/include/asm/atomic.h +++ b/arch/x86/include/asm/atomic.h | |||
@@ -92,6 +92,7 @@ static __always_inline bool arch_atomic_sub_and_test(int i, atomic_t *v) | |||
92 | * | 92 | * |
93 | * Atomically increments @v by 1. | 93 | * Atomically increments @v by 1. |
94 | */ | 94 | */ |
95 | #define arch_atomic_inc arch_atomic_inc | ||
95 | static __always_inline void arch_atomic_inc(atomic_t *v) | 96 | static __always_inline void arch_atomic_inc(atomic_t *v) |
96 | { | 97 | { |
97 | asm volatile(LOCK_PREFIX "incl %0" | 98 | asm volatile(LOCK_PREFIX "incl %0" |
@@ -104,6 +105,7 @@ static __always_inline void arch_atomic_inc(atomic_t *v) | |||
104 | * | 105 | * |
105 | * Atomically decrements @v by 1. | 106 | * Atomically decrements @v by 1. |
106 | */ | 107 | */ |
108 | #define arch_atomic_dec arch_atomic_dec | ||
107 | static __always_inline void arch_atomic_dec(atomic_t *v) | 109 | static __always_inline void arch_atomic_dec(atomic_t *v) |
108 | { | 110 | { |
109 | asm volatile(LOCK_PREFIX "decl %0" | 111 | asm volatile(LOCK_PREFIX "decl %0" |
@@ -177,9 +179,6 @@ static __always_inline int arch_atomic_sub_return(int i, atomic_t *v) | |||
177 | return arch_atomic_add_return(-i, v); | 179 | return arch_atomic_add_return(-i, v); |
178 | } | 180 | } |
179 | 181 | ||
180 | #define arch_atomic_inc_return(v) (arch_atomic_add_return(1, v)) | ||
181 | #define arch_atomic_dec_return(v) (arch_atomic_sub_return(1, v)) | ||
182 | |||
183 | static __always_inline int arch_atomic_fetch_add(int i, atomic_t *v) | 182 | static __always_inline int arch_atomic_fetch_add(int i, atomic_t *v) |
184 | { | 183 | { |
185 | return xadd(&v->counter, i); | 184 | return xadd(&v->counter, i); |
diff --git a/arch/x86/include/asm/atomic64_32.h b/arch/x86/include/asm/atomic64_32.h index a26810d005e0..472c7af0ed48 100644 --- a/arch/x86/include/asm/atomic64_32.h +++ b/arch/x86/include/asm/atomic64_32.h | |||
@@ -158,6 +158,7 @@ static inline long long arch_atomic64_inc_return(atomic64_t *v) | |||
158 | "S" (v) : "memory", "ecx"); | 158 | "S" (v) : "memory", "ecx"); |
159 | return a; | 159 | return a; |
160 | } | 160 | } |
161 | #define arch_atomic64_inc_return arch_atomic64_inc_return | ||
161 | 162 | ||
162 | static inline long long arch_atomic64_dec_return(atomic64_t *v) | 163 | static inline long long arch_atomic64_dec_return(atomic64_t *v) |
163 | { | 164 | { |
@@ -166,6 +167,7 @@ static inline long long arch_atomic64_dec_return(atomic64_t *v) | |||
166 | "S" (v) : "memory", "ecx"); | 167 | "S" (v) : "memory", "ecx"); |
167 | return a; | 168 | return a; |
168 | } | 169 | } |
170 | #define arch_atomic64_dec_return arch_atomic64_dec_return | ||
169 | 171 | ||
170 | /** | 172 | /** |
171 | * arch_atomic64_add - add integer to atomic64 variable | 173 | * arch_atomic64_add - add integer to atomic64 variable |
@@ -203,6 +205,7 @@ static inline long long arch_atomic64_sub(long long i, atomic64_t *v) | |||
203 | * | 205 | * |
204 | * Atomically increments @v by 1. | 206 | * Atomically increments @v by 1. |
205 | */ | 207 | */ |
208 | #define arch_atomic64_inc arch_atomic64_inc | ||
206 | static inline void arch_atomic64_inc(atomic64_t *v) | 209 | static inline void arch_atomic64_inc(atomic64_t *v) |
207 | { | 210 | { |
208 | __alternative_atomic64(inc, inc_return, /* no output */, | 211 | __alternative_atomic64(inc, inc_return, /* no output */, |
@@ -215,6 +218,7 @@ static inline void arch_atomic64_inc(atomic64_t *v) | |||
215 | * | 218 | * |
216 | * Atomically decrements @v by 1. | 219 | * Atomically decrements @v by 1. |
217 | */ | 220 | */ |
221 | #define arch_atomic64_dec arch_atomic64_dec | ||
218 | static inline void arch_atomic64_dec(atomic64_t *v) | 222 | static inline void arch_atomic64_dec(atomic64_t *v) |
219 | { | 223 | { |
220 | __alternative_atomic64(dec, dec_return, /* no output */, | 224 | __alternative_atomic64(dec, dec_return, /* no output */, |
diff --git a/arch/x86/include/asm/atomic64_64.h b/arch/x86/include/asm/atomic64_64.h index 6a65228a3db6..1b282272a801 100644 --- a/arch/x86/include/asm/atomic64_64.h +++ b/arch/x86/include/asm/atomic64_64.h | |||
@@ -83,6 +83,7 @@ static inline bool arch_atomic64_sub_and_test(long i, atomic64_t *v) | |||
83 | * | 83 | * |
84 | * Atomically increments @v by 1. | 84 | * Atomically increments @v by 1. |
85 | */ | 85 | */ |
86 | #define arch_atomic64_inc arch_atomic64_inc | ||
86 | static __always_inline void arch_atomic64_inc(atomic64_t *v) | 87 | static __always_inline void arch_atomic64_inc(atomic64_t *v) |
87 | { | 88 | { |
88 | asm volatile(LOCK_PREFIX "incq %0" | 89 | asm volatile(LOCK_PREFIX "incq %0" |
@@ -96,6 +97,7 @@ static __always_inline void arch_atomic64_inc(atomic64_t *v) | |||
96 | * | 97 | * |
97 | * Atomically decrements @v by 1. | 98 | * Atomically decrements @v by 1. |
98 | */ | 99 | */ |
100 | #define arch_atomic64_dec arch_atomic64_dec | ||
99 | static __always_inline void arch_atomic64_dec(atomic64_t *v) | 101 | static __always_inline void arch_atomic64_dec(atomic64_t *v) |
100 | { | 102 | { |
101 | asm volatile(LOCK_PREFIX "decq %0" | 103 | asm volatile(LOCK_PREFIX "decq %0" |
@@ -173,9 +175,6 @@ static inline long arch_atomic64_fetch_sub(long i, atomic64_t *v) | |||
173 | return xadd(&v->counter, -i); | 175 | return xadd(&v->counter, -i); |
174 | } | 176 | } |
175 | 177 | ||
176 | #define arch_atomic64_inc_return(v) (arch_atomic64_add_return(1, (v))) | ||
177 | #define arch_atomic64_dec_return(v) (arch_atomic64_sub_return(1, (v))) | ||
178 | |||
179 | static inline long arch_atomic64_cmpxchg(atomic64_t *v, long old, long new) | 178 | static inline long arch_atomic64_cmpxchg(atomic64_t *v, long old, long new) |
180 | { | 179 | { |
181 | return arch_cmpxchg(&v->counter, old, new); | 180 | return arch_cmpxchg(&v->counter, old, new); |
diff --git a/arch/xtensa/include/asm/atomic.h b/arch/xtensa/include/asm/atomic.h index 332ae4eca737..7de0149e1cf7 100644 --- a/arch/xtensa/include/asm/atomic.h +++ b/arch/xtensa/include/asm/atomic.h | |||
@@ -197,38 +197,6 @@ ATOMIC_OPS(xor) | |||
197 | #undef ATOMIC_OP_RETURN | 197 | #undef ATOMIC_OP_RETURN |
198 | #undef ATOMIC_OP | 198 | #undef ATOMIC_OP |
199 | 199 | ||
200 | /** | ||
201 | * atomic_inc - increment atomic variable | ||
202 | * @v: pointer of type atomic_t | ||
203 | * | ||
204 | * Atomically increments @v by 1. | ||
205 | */ | ||
206 | #define atomic_inc(v) atomic_add(1,(v)) | ||
207 | |||
208 | /** | ||
209 | * atomic_inc - increment atomic variable | ||
210 | * @v: pointer of type atomic_t | ||
211 | * | ||
212 | * Atomically increments @v by 1. | ||
213 | */ | ||
214 | #define atomic_inc_return(v) atomic_add_return(1,(v)) | ||
215 | |||
216 | /** | ||
217 | * atomic_dec - decrement atomic variable | ||
218 | * @v: pointer of type atomic_t | ||
219 | * | ||
220 | * Atomically decrements @v by 1. | ||
221 | */ | ||
222 | #define atomic_dec(v) atomic_sub(1,(v)) | ||
223 | |||
224 | /** | ||
225 | * atomic_dec_return - decrement atomic variable | ||
226 | * @v: pointer of type atomic_t | ||
227 | * | ||
228 | * Atomically decrements @v by 1. | ||
229 | */ | ||
230 | #define atomic_dec_return(v) atomic_sub_return(1,(v)) | ||
231 | |||
232 | #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) | 200 | #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) |
233 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) | 201 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) |
234 | 202 | ||
diff --git a/include/asm-generic/atomic-instrumented.h b/include/asm-generic/atomic-instrumented.h index 6b64c200de73..12f9634750d7 100644 --- a/include/asm-generic/atomic-instrumented.h +++ b/include/asm-generic/atomic-instrumented.h | |||
@@ -102,29 +102,41 @@ static __always_inline s64 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u | |||
102 | } | 102 | } |
103 | #endif | 103 | #endif |
104 | 104 | ||
105 | #ifdef arch_atomic_inc | ||
106 | #define atomic_inc atomic_inc | ||
105 | static __always_inline void atomic_inc(atomic_t *v) | 107 | static __always_inline void atomic_inc(atomic_t *v) |
106 | { | 108 | { |
107 | kasan_check_write(v, sizeof(*v)); | 109 | kasan_check_write(v, sizeof(*v)); |
108 | arch_atomic_inc(v); | 110 | arch_atomic_inc(v); |
109 | } | 111 | } |
112 | #endif | ||
110 | 113 | ||
114 | #ifdef arch_atomic64_inc | ||
115 | #define atomic64_inc atomic64_inc | ||
111 | static __always_inline void atomic64_inc(atomic64_t *v) | 116 | static __always_inline void atomic64_inc(atomic64_t *v) |
112 | { | 117 | { |
113 | kasan_check_write(v, sizeof(*v)); | 118 | kasan_check_write(v, sizeof(*v)); |
114 | arch_atomic64_inc(v); | 119 | arch_atomic64_inc(v); |
115 | } | 120 | } |
121 | #endif | ||
116 | 122 | ||
123 | #ifdef arch_atomic_dec | ||
124 | #define atomic_dec atomic_dec | ||
117 | static __always_inline void atomic_dec(atomic_t *v) | 125 | static __always_inline void atomic_dec(atomic_t *v) |
118 | { | 126 | { |
119 | kasan_check_write(v, sizeof(*v)); | 127 | kasan_check_write(v, sizeof(*v)); |
120 | arch_atomic_dec(v); | 128 | arch_atomic_dec(v); |
121 | } | 129 | } |
130 | #endif | ||
122 | 131 | ||
132 | #ifdef atch_atomic64_dec | ||
133 | #define atomic64_dec | ||
123 | static __always_inline void atomic64_dec(atomic64_t *v) | 134 | static __always_inline void atomic64_dec(atomic64_t *v) |
124 | { | 135 | { |
125 | kasan_check_write(v, sizeof(*v)); | 136 | kasan_check_write(v, sizeof(*v)); |
126 | arch_atomic64_dec(v); | 137 | arch_atomic64_dec(v); |
127 | } | 138 | } |
139 | #endif | ||
128 | 140 | ||
129 | static __always_inline void atomic_add(int i, atomic_t *v) | 141 | static __always_inline void atomic_add(int i, atomic_t *v) |
130 | { | 142 | { |
@@ -186,29 +198,41 @@ static __always_inline void atomic64_xor(s64 i, atomic64_t *v) | |||
186 | arch_atomic64_xor(i, v); | 198 | arch_atomic64_xor(i, v); |
187 | } | 199 | } |
188 | 200 | ||
201 | #ifdef arch_atomic_inc_return | ||
202 | #define atomic_inc_return atomic_inc_return | ||
189 | static __always_inline int atomic_inc_return(atomic_t *v) | 203 | static __always_inline int atomic_inc_return(atomic_t *v) |
190 | { | 204 | { |
191 | kasan_check_write(v, sizeof(*v)); | 205 | kasan_check_write(v, sizeof(*v)); |
192 | return arch_atomic_inc_return(v); | 206 | return arch_atomic_inc_return(v); |
193 | } | 207 | } |
208 | #endif | ||
194 | 209 | ||
210 | #ifdef arch_atomic64_in_return | ||
211 | #define atomic64_inc_return atomic64_inc_return | ||
195 | static __always_inline s64 atomic64_inc_return(atomic64_t *v) | 212 | static __always_inline s64 atomic64_inc_return(atomic64_t *v) |
196 | { | 213 | { |
197 | kasan_check_write(v, sizeof(*v)); | 214 | kasan_check_write(v, sizeof(*v)); |
198 | return arch_atomic64_inc_return(v); | 215 | return arch_atomic64_inc_return(v); |
199 | } | 216 | } |
217 | #endif | ||
200 | 218 | ||
219 | #ifdef arch_atomic_dec_return | ||
220 | #define atomic_dec_return atomic_dec_return | ||
201 | static __always_inline int atomic_dec_return(atomic_t *v) | 221 | static __always_inline int atomic_dec_return(atomic_t *v) |
202 | { | 222 | { |
203 | kasan_check_write(v, sizeof(*v)); | 223 | kasan_check_write(v, sizeof(*v)); |
204 | return arch_atomic_dec_return(v); | 224 | return arch_atomic_dec_return(v); |
205 | } | 225 | } |
226 | #endif | ||
206 | 227 | ||
228 | #ifdef arch_atomic64_dec_return | ||
229 | #define atomic64_dec_return atomic64_dec_return | ||
207 | static __always_inline s64 atomic64_dec_return(atomic64_t *v) | 230 | static __always_inline s64 atomic64_dec_return(atomic64_t *v) |
208 | { | 231 | { |
209 | kasan_check_write(v, sizeof(*v)); | 232 | kasan_check_write(v, sizeof(*v)); |
210 | return arch_atomic64_dec_return(v); | 233 | return arch_atomic64_dec_return(v); |
211 | } | 234 | } |
235 | #endif | ||
212 | 236 | ||
213 | #ifdef arch_atomic64_inc_not_zero | 237 | #ifdef arch_atomic64_inc_not_zero |
214 | #define atomic64_inc_not_zero atomic64_inc_not_zero | 238 | #define atomic64_inc_not_zero atomic64_inc_not_zero |
diff --git a/include/asm-generic/atomic.h b/include/asm-generic/atomic.h index 40cab858aaaa..13324aa828eb 100644 --- a/include/asm-generic/atomic.h +++ b/include/asm-generic/atomic.h | |||
@@ -196,19 +196,6 @@ static inline void atomic_sub(int i, atomic_t *v) | |||
196 | atomic_sub_return(i, v); | 196 | atomic_sub_return(i, v); |
197 | } | 197 | } |
198 | 198 | ||
199 | static inline void atomic_inc(atomic_t *v) | ||
200 | { | ||
201 | atomic_add_return(1, v); | ||
202 | } | ||
203 | |||
204 | static inline void atomic_dec(atomic_t *v) | ||
205 | { | ||
206 | atomic_sub_return(1, v); | ||
207 | } | ||
208 | |||
209 | #define atomic_dec_return(v) atomic_sub_return(1, (v)) | ||
210 | #define atomic_inc_return(v) atomic_add_return(1, (v)) | ||
211 | |||
212 | #define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v))) | 199 | #define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v))) |
213 | #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new))) | 200 | #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new))) |
214 | 201 | ||
diff --git a/include/asm-generic/atomic64.h b/include/asm-generic/atomic64.h index d3827ab97aa4..242b79ae0b57 100644 --- a/include/asm-generic/atomic64.h +++ b/include/asm-generic/atomic64.h | |||
@@ -56,9 +56,4 @@ extern long long atomic64_xchg(atomic64_t *v, long long new); | |||
56 | extern long long atomic64_fetch_add_unless(atomic64_t *v, long long a, long long u); | 56 | extern long long atomic64_fetch_add_unless(atomic64_t *v, long long a, long long u); |
57 | #define atomic64_fetch_add_unless atomic64_fetch_add_unless | 57 | #define atomic64_fetch_add_unless atomic64_fetch_add_unless |
58 | 58 | ||
59 | #define atomic64_inc(v) atomic64_add(1LL, (v)) | ||
60 | #define atomic64_inc_return(v) atomic64_add_return(1LL, (v)) | ||
61 | #define atomic64_dec(v) atomic64_sub(1LL, (v)) | ||
62 | #define atomic64_dec_return(v) atomic64_sub_return(1LL, (v)) | ||
63 | |||
64 | #endif /* _ASM_GENERIC_ATOMIC64_H */ | 59 | #endif /* _ASM_GENERIC_ATOMIC64_H */ |
diff --git a/include/linux/atomic.h b/include/linux/atomic.h index 3ee8da9023cd..24f345df7ba6 100644 --- a/include/linux/atomic.h +++ b/include/linux/atomic.h | |||
@@ -97,11 +97,23 @@ | |||
97 | #endif | 97 | #endif |
98 | #endif /* atomic_add_return_relaxed */ | 98 | #endif /* atomic_add_return_relaxed */ |
99 | 99 | ||
100 | #ifndef atomic_inc | ||
101 | #define atomic_inc(v) atomic_add(1, (v)) | ||
102 | #endif | ||
103 | |||
100 | /* atomic_inc_return_relaxed */ | 104 | /* atomic_inc_return_relaxed */ |
101 | #ifndef atomic_inc_return_relaxed | 105 | #ifndef atomic_inc_return_relaxed |
106 | |||
107 | #ifndef atomic_inc_return | ||
108 | #define atomic_inc_return(v) atomic_add_return(1, (v)) | ||
109 | #define atomic_inc_return_relaxed(v) atomic_add_return_relaxed(1, (v)) | ||
110 | #define atomic_inc_return_acquire(v) atomic_add_return_acquire(1, (v)) | ||
111 | #define atomic_inc_return_release(v) atomic_add_return_release(1, (v)) | ||
112 | #else /* atomic_inc_return */ | ||
102 | #define atomic_inc_return_relaxed atomic_inc_return | 113 | #define atomic_inc_return_relaxed atomic_inc_return |
103 | #define atomic_inc_return_acquire atomic_inc_return | 114 | #define atomic_inc_return_acquire atomic_inc_return |
104 | #define atomic_inc_return_release atomic_inc_return | 115 | #define atomic_inc_return_release atomic_inc_return |
116 | #endif /* atomic_inc_return */ | ||
105 | 117 | ||
106 | #else /* atomic_inc_return_relaxed */ | 118 | #else /* atomic_inc_return_relaxed */ |
107 | 119 | ||
@@ -145,11 +157,23 @@ | |||
145 | #endif | 157 | #endif |
146 | #endif /* atomic_sub_return_relaxed */ | 158 | #endif /* atomic_sub_return_relaxed */ |
147 | 159 | ||
160 | #ifndef atomic_dec | ||
161 | #define atomic_dec(v) atomic_sub(1, (v)) | ||
162 | #endif | ||
163 | |||
148 | /* atomic_dec_return_relaxed */ | 164 | /* atomic_dec_return_relaxed */ |
149 | #ifndef atomic_dec_return_relaxed | 165 | #ifndef atomic_dec_return_relaxed |
166 | |||
167 | #ifndef atomic_dec_return | ||
168 | #define atomic_dec_return(v) atomic_sub_return(1, (v)) | ||
169 | #define atomic_dec_return_relaxed(v) atomic_sub_return_relaxed(1, (v)) | ||
170 | #define atomic_dec_return_acquire(v) atomic_sub_return_acquire(1, (v)) | ||
171 | #define atomic_dec_return_release(v) atomic_sub_return_release(1, (v)) | ||
172 | #else /* atomic_dec_return */ | ||
150 | #define atomic_dec_return_relaxed atomic_dec_return | 173 | #define atomic_dec_return_relaxed atomic_dec_return |
151 | #define atomic_dec_return_acquire atomic_dec_return | 174 | #define atomic_dec_return_acquire atomic_dec_return |
152 | #define atomic_dec_return_release atomic_dec_return | 175 | #define atomic_dec_return_release atomic_dec_return |
176 | #endif /* atomic_dec_return */ | ||
153 | 177 | ||
154 | #else /* atomic_dec_return_relaxed */ | 178 | #else /* atomic_dec_return_relaxed */ |
155 | 179 | ||
@@ -748,11 +772,23 @@ static inline int atomic_dec_if_positive(atomic_t *v) | |||
748 | #endif | 772 | #endif |
749 | #endif /* atomic64_add_return_relaxed */ | 773 | #endif /* atomic64_add_return_relaxed */ |
750 | 774 | ||
775 | #ifndef atomic64_inc | ||
776 | #define atomic64_inc(v) atomic64_add(1, (v)) | ||
777 | #endif | ||
778 | |||
751 | /* atomic64_inc_return_relaxed */ | 779 | /* atomic64_inc_return_relaxed */ |
752 | #ifndef atomic64_inc_return_relaxed | 780 | #ifndef atomic64_inc_return_relaxed |
781 | |||
782 | #ifndef atomic64_inc_return | ||
783 | #define atomic64_inc_return(v) atomic64_add_return(1, (v)) | ||
784 | #define atomic64_inc_return_relaxed(v) atomic64_add_return_relaxed(1, (v)) | ||
785 | #define atomic64_inc_return_acquire(v) atomic64_add_return_acquire(1, (v)) | ||
786 | #define atomic64_inc_return_release(v) atomic64_add_return_release(1, (v)) | ||
787 | #else /* atomic64_inc_return */ | ||
753 | #define atomic64_inc_return_relaxed atomic64_inc_return | 788 | #define atomic64_inc_return_relaxed atomic64_inc_return |
754 | #define atomic64_inc_return_acquire atomic64_inc_return | 789 | #define atomic64_inc_return_acquire atomic64_inc_return |
755 | #define atomic64_inc_return_release atomic64_inc_return | 790 | #define atomic64_inc_return_release atomic64_inc_return |
791 | #endif /* atomic64_inc_return */ | ||
756 | 792 | ||
757 | #else /* atomic64_inc_return_relaxed */ | 793 | #else /* atomic64_inc_return_relaxed */ |
758 | 794 | ||
@@ -797,11 +833,23 @@ static inline int atomic_dec_if_positive(atomic_t *v) | |||
797 | #endif | 833 | #endif |
798 | #endif /* atomic64_sub_return_relaxed */ | 834 | #endif /* atomic64_sub_return_relaxed */ |
799 | 835 | ||
836 | #ifndef atomic64_dec | ||
837 | #define atomic64_dec(v) atomic64_sub(1, (v)) | ||
838 | #endif | ||
839 | |||
800 | /* atomic64_dec_return_relaxed */ | 840 | /* atomic64_dec_return_relaxed */ |
801 | #ifndef atomic64_dec_return_relaxed | 841 | #ifndef atomic64_dec_return_relaxed |
842 | |||
843 | #ifndef atomic64_dec_return | ||
844 | #define atomic64_dec_return(v) atomic64_sub_return(1, (v)) | ||
845 | #define atomic64_dec_return_relaxed(v) atomic64_sub_return_relaxed(1, (v)) | ||
846 | #define atomic64_dec_return_acquire(v) atomic64_sub_return_acquire(1, (v)) | ||
847 | #define atomic64_dec_return_release(v) atomic64_sub_return_release(1, (v)) | ||
848 | #else /* atomic64_dec_return */ | ||
802 | #define atomic64_dec_return_relaxed atomic64_dec_return | 849 | #define atomic64_dec_return_relaxed atomic64_dec_return |
803 | #define atomic64_dec_return_acquire atomic64_dec_return | 850 | #define atomic64_dec_return_acquire atomic64_dec_return |
804 | #define atomic64_dec_return_release atomic64_dec_return | 851 | #define atomic64_dec_return_release atomic64_dec_return |
852 | #endif /* atomic64_dec_return */ | ||
805 | 853 | ||
806 | #else /* atomic64_dec_return_relaxed */ | 854 | #else /* atomic64_dec_return_relaxed */ |
807 | 855 | ||