summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMark Rutland <mark.rutland@arm.com>2018-06-21 08:13:18 -0400
committerIngo Molnar <mingo@kernel.org>2018-06-21 08:25:24 -0400
commit18cc1814d4e7560412c9c8c6d28f9d6782c8b402 (patch)
tree7295996dcedd3f3d90da1e570a1b459499a0d54a
parent356701329fb391184618eda7b7fb68cb35271506 (diff)
atomics/treewide: Make test ops optional
Some of the atomics return the result of a test applied after the atomic operation, and almost all architectures implement these as trivial wrappers around the underlying atomic. Specifically: * <atomic>_inc_and_test(v) is (<atomic>_inc_return(v) == 0) * <atomic>_dec_and_test(v) is (<atomic>_dec_return(v) == 0) * <atomic>_sub_and_test(i, v) is (<atomic>_sub_return(i, v) == 0) * <atomic>_add_negative(i, v) is (<atomic>_add_return(i, v) < 0) Rather than have these definitions duplicated in all architectures, with minor inconsistencies in formatting and documentation, let's make these operations optional, with default fallbacks as above. Implementations must now provide a preprocessor symbol. The instrumented atomics are updated accordingly. Both x86 and m68k have custom implementations, which are left as-is, given preprocessor symbols to avoid being overridden. There should be no functional change as a result of this patch. Signed-off-by: Mark Rutland <mark.rutland@arm.com> Reviewed-by: Will Deacon <will.deacon@arm.com> Acked-by: Geert Uytterhoeven <geert@linux-m68k.org> Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org> Acked-by: Palmer Dabbelt <palmer@sifive.com> Cc: Boqun Feng <boqun.feng@gmail.com> Cc: Linus Torvalds <torvalds@linux-foundation.org> Cc: Thomas Gleixner <tglx@linutronix.de> Link: https://lore.kernel.org/lkml/20180621121321.4761-16-mark.rutland@arm.com Signed-off-by: Ingo Molnar <mingo@kernel.org>
-rw-r--r--arch/alpha/include/asm/atomic.h12
-rw-r--r--arch/arc/include/asm/atomic.h10
-rw-r--r--arch/arm/include/asm/atomic.h9
-rw-r--r--arch/arm64/include/asm/atomic.h8
-rw-r--r--arch/h8300/include/asm/atomic.h5
-rw-r--r--arch/hexagon/include/asm/atomic.h5
-rw-r--r--arch/ia64/include/asm/atomic.h23
-rw-r--r--arch/m68k/include/asm/atomic.h4
-rw-r--r--arch/mips/include/asm/atomic.h84
-rw-r--r--arch/parisc/include/asm/atomic.h22
-rw-r--r--arch/powerpc/include/asm/atomic.h30
-rw-r--r--arch/riscv/include/asm/atomic.h46
-rw-r--r--arch/s390/include/asm/atomic.h8
-rw-r--r--arch/sh/include/asm/atomic.h4
-rw-r--r--arch/sparc/include/asm/atomic_32.h15
-rw-r--r--arch/sparc/include/asm/atomic_64.h20
-rw-r--r--arch/x86/include/asm/atomic.h4
-rw-r--r--arch/x86/include/asm/atomic64_32.h54
-rw-r--r--arch/x86/include/asm/atomic64_64.h4
-rw-r--r--arch/xtensa/include/asm/atomic.h42
-rw-r--r--include/asm-generic/atomic-instrumented.h24
-rw-r--r--include/asm-generic/atomic.h9
-rw-r--r--include/asm-generic/atomic64.h4
-rw-r--r--include/linux/atomic.h124
24 files changed, 160 insertions, 410 deletions
diff --git a/arch/alpha/include/asm/atomic.h b/arch/alpha/include/asm/atomic.h
index cc486dbb3837..25f8693c5a42 100644
--- a/arch/alpha/include/asm/atomic.h
+++ b/arch/alpha/include/asm/atomic.h
@@ -297,24 +297,12 @@ static inline long atomic64_dec_if_positive(atomic64_t *v)
297 return old - 1; 297 return old - 1;
298} 298}
299 299
300#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
301#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
302
303#define atomic_dec_return(v) atomic_sub_return(1,(v)) 300#define atomic_dec_return(v) atomic_sub_return(1,(v))
304#define atomic64_dec_return(v) atomic64_sub_return(1,(v)) 301#define atomic64_dec_return(v) atomic64_sub_return(1,(v))
305 302
306#define atomic_inc_return(v) atomic_add_return(1,(v)) 303#define atomic_inc_return(v) atomic_add_return(1,(v))
307#define atomic64_inc_return(v) atomic64_add_return(1,(v)) 304#define atomic64_inc_return(v) atomic64_add_return(1,(v))
308 305
309#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
310#define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
311
312#define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
313#define atomic64_inc_and_test(v) (atomic64_add_return(1, (v)) == 0)
314
315#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
316#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
317
318#define atomic_inc(v) atomic_add(1,(v)) 306#define atomic_inc(v) atomic_add(1,(v))
319#define atomic64_inc(v) atomic64_add(1,(v)) 307#define atomic64_inc(v) atomic64_add(1,(v))
320 308
diff --git a/arch/arc/include/asm/atomic.h b/arch/arc/include/asm/atomic.h
index 4917ffa61579..4222e726f84c 100644
--- a/arch/arc/include/asm/atomic.h
+++ b/arch/arc/include/asm/atomic.h
@@ -311,14 +311,8 @@ ATOMIC_OPS(xor, ^=, CTOP_INST_AXOR_DI_R2_R2_R3)
311#define atomic_inc(v) atomic_add(1, v) 311#define atomic_inc(v) atomic_add(1, v)
312#define atomic_dec(v) atomic_sub(1, v) 312#define atomic_dec(v) atomic_sub(1, v)
313 313
314#define atomic_inc_and_test(v) (atomic_add_return(1, v) == 0)
315#define atomic_dec_and_test(v) (atomic_sub_return(1, v) == 0)
316#define atomic_inc_return(v) atomic_add_return(1, (v)) 314#define atomic_inc_return(v) atomic_add_return(1, (v))
317#define atomic_dec_return(v) atomic_sub_return(1, (v)) 315#define atomic_dec_return(v) atomic_sub_return(1, (v))
318#define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)
319
320#define atomic_add_negative(i, v) (atomic_add_return(i, v) < 0)
321
322 316
323#ifdef CONFIG_GENERIC_ATOMIC64 317#ifdef CONFIG_GENERIC_ATOMIC64
324 318
@@ -566,14 +560,10 @@ static inline long long atomic64_fetch_add_unless(atomic64_t *v, long long a,
566} 560}
567#define atomic64_fetch_add_unless atomic64_fetch_add_unless 561#define atomic64_fetch_add_unless atomic64_fetch_add_unless
568 562
569#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
570#define atomic64_inc(v) atomic64_add(1LL, (v)) 563#define atomic64_inc(v) atomic64_add(1LL, (v))
571#define atomic64_inc_return(v) atomic64_add_return(1LL, (v)) 564#define atomic64_inc_return(v) atomic64_add_return(1LL, (v))
572#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
573#define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
574#define atomic64_dec(v) atomic64_sub(1LL, (v)) 565#define atomic64_dec(v) atomic64_sub(1LL, (v))
575#define atomic64_dec_return(v) atomic64_sub_return(1LL, (v)) 566#define atomic64_dec_return(v) atomic64_sub_return(1LL, (v))
576#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
577 567
578#endif /* !CONFIG_GENERIC_ATOMIC64 */ 568#endif /* !CONFIG_GENERIC_ATOMIC64 */
579 569
diff --git a/arch/arm/include/asm/atomic.h b/arch/arm/include/asm/atomic.h
index 852e1fee72b0..35fb7f504daa 100644
--- a/arch/arm/include/asm/atomic.h
+++ b/arch/arm/include/asm/atomic.h
@@ -248,13 +248,8 @@ ATOMIC_OPS(xor, ^=, eor)
248#define atomic_inc(v) atomic_add(1, v) 248#define atomic_inc(v) atomic_add(1, v)
249#define atomic_dec(v) atomic_sub(1, v) 249#define atomic_dec(v) atomic_sub(1, v)
250 250
251#define atomic_inc_and_test(v) (atomic_add_return(1, v) == 0)
252#define atomic_dec_and_test(v) (atomic_sub_return(1, v) == 0)
253#define atomic_inc_return_relaxed(v) (atomic_add_return_relaxed(1, v)) 251#define atomic_inc_return_relaxed(v) (atomic_add_return_relaxed(1, v))
254#define atomic_dec_return_relaxed(v) (atomic_sub_return_relaxed(1, v)) 252#define atomic_dec_return_relaxed(v) (atomic_sub_return_relaxed(1, v))
255#define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)
256
257#define atomic_add_negative(i,v) (atomic_add_return(i, v) < 0)
258 253
259#ifndef CONFIG_GENERIC_ATOMIC64 254#ifndef CONFIG_GENERIC_ATOMIC64
260typedef struct { 255typedef struct {
@@ -517,14 +512,10 @@ static inline long long atomic64_fetch_add_unless(atomic64_t *v, long long a,
517} 512}
518#define atomic64_fetch_add_unless atomic64_fetch_add_unless 513#define atomic64_fetch_add_unless atomic64_fetch_add_unless
519 514
520#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
521#define atomic64_inc(v) atomic64_add(1LL, (v)) 515#define atomic64_inc(v) atomic64_add(1LL, (v))
522#define atomic64_inc_return_relaxed(v) atomic64_add_return_relaxed(1LL, (v)) 516#define atomic64_inc_return_relaxed(v) atomic64_add_return_relaxed(1LL, (v))
523#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
524#define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
525#define atomic64_dec(v) atomic64_sub(1LL, (v)) 517#define atomic64_dec(v) atomic64_sub(1LL, (v))
526#define atomic64_dec_return_relaxed(v) atomic64_sub_return_relaxed(1LL, (v)) 518#define atomic64_dec_return_relaxed(v) atomic64_sub_return_relaxed(1LL, (v))
527#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
528 519
529#endif /* !CONFIG_GENERIC_ATOMIC64 */ 520#endif /* !CONFIG_GENERIC_ATOMIC64 */
530#endif 521#endif
diff --git a/arch/arm64/include/asm/atomic.h b/arch/arm64/include/asm/atomic.h
index 82db0e4febd4..edbe53fa3106 100644
--- a/arch/arm64/include/asm/atomic.h
+++ b/arch/arm64/include/asm/atomic.h
@@ -110,10 +110,6 @@
110 110
111#define atomic_inc(v) atomic_add(1, (v)) 111#define atomic_inc(v) atomic_add(1, (v))
112#define atomic_dec(v) atomic_sub(1, (v)) 112#define atomic_dec(v) atomic_sub(1, (v))
113#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
114#define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
115#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
116#define atomic_add_negative(i, v) (atomic_add_return((i), (v)) < 0)
117#define atomic_andnot atomic_andnot 113#define atomic_andnot atomic_andnot
118 114
119/* 115/*
@@ -185,10 +181,6 @@
185 181
186#define atomic64_inc(v) atomic64_add(1, (v)) 182#define atomic64_inc(v) atomic64_add(1, (v))
187#define atomic64_dec(v) atomic64_sub(1, (v)) 183#define atomic64_dec(v) atomic64_sub(1, (v))
188#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
189#define atomic64_dec_and_test(v) (atomic64_dec_return(v) == 0)
190#define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
191#define atomic64_add_negative(i, v) (atomic64_add_return((i), (v)) < 0)
192#define atomic64_andnot atomic64_andnot 184#define atomic64_andnot atomic64_andnot
193 185
194#endif 186#endif
diff --git a/arch/h8300/include/asm/atomic.h b/arch/h8300/include/asm/atomic.h
index 710364946308..8977b5157c8f 100644
--- a/arch/h8300/include/asm/atomic.h
+++ b/arch/h8300/include/asm/atomic.h
@@ -69,17 +69,12 @@ ATOMIC_OPS(sub, -=)
69#undef ATOMIC_OP_RETURN 69#undef ATOMIC_OP_RETURN
70#undef ATOMIC_OP 70#undef ATOMIC_OP
71 71
72#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
73#define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)
74
75#define atomic_inc_return(v) atomic_add_return(1, v) 72#define atomic_inc_return(v) atomic_add_return(1, v)
76#define atomic_dec_return(v) atomic_sub_return(1, v) 73#define atomic_dec_return(v) atomic_sub_return(1, v)
77 74
78#define atomic_inc(v) (void)atomic_inc_return(v) 75#define atomic_inc(v) (void)atomic_inc_return(v)
79#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
80 76
81#define atomic_dec(v) (void)atomic_dec_return(v) 77#define atomic_dec(v) (void)atomic_dec_return(v)
82#define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
83 78
84static inline int atomic_cmpxchg(atomic_t *v, int old, int new) 79static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
85{ 80{
diff --git a/arch/hexagon/include/asm/atomic.h b/arch/hexagon/include/asm/atomic.h
index 86c67e9adbfa..31638f511674 100644
--- a/arch/hexagon/include/asm/atomic.h
+++ b/arch/hexagon/include/asm/atomic.h
@@ -201,11 +201,6 @@ static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
201#define atomic_inc(v) atomic_add(1, (v)) 201#define atomic_inc(v) atomic_add(1, (v))
202#define atomic_dec(v) atomic_sub(1, (v)) 202#define atomic_dec(v) atomic_sub(1, (v))
203 203
204#define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
205#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
206#define atomic_sub_and_test(i, v) (atomic_sub_return(i, (v)) == 0)
207#define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
208
209#define atomic_inc_return(v) (atomic_add_return(1, v)) 204#define atomic_inc_return(v) (atomic_add_return(1, v))
210#define atomic_dec_return(v) (atomic_sub_return(1, v)) 205#define atomic_dec_return(v) (atomic_sub_return(1, v))
211 206
diff --git a/arch/ia64/include/asm/atomic.h b/arch/ia64/include/asm/atomic.h
index 0f80a3eafaba..e4143c462e65 100644
--- a/arch/ia64/include/asm/atomic.h
+++ b/arch/ia64/include/asm/atomic.h
@@ -231,34 +231,11 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
231 return dec; 231 return dec;
232} 232}
233 233
234/*
235 * Atomically add I to V and return TRUE if the resulting value is
236 * negative.
237 */
238static __inline__ int
239atomic_add_negative (int i, atomic_t *v)
240{
241 return atomic_add_return(i, v) < 0;
242}
243
244static __inline__ long
245atomic64_add_negative (__s64 i, atomic64_t *v)
246{
247 return atomic64_add_return(i, v) < 0;
248}
249
250#define atomic_dec_return(v) atomic_sub_return(1, (v)) 234#define atomic_dec_return(v) atomic_sub_return(1, (v))
251#define atomic_inc_return(v) atomic_add_return(1, (v)) 235#define atomic_inc_return(v) atomic_add_return(1, (v))
252#define atomic64_dec_return(v) atomic64_sub_return(1, (v)) 236#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
253#define atomic64_inc_return(v) atomic64_add_return(1, (v)) 237#define atomic64_inc_return(v) atomic64_add_return(1, (v))
254 238
255#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
256#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
257#define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
258#define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
259#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
260#define atomic64_inc_and_test(v) (atomic64_add_return(1, (v)) == 0)
261
262#define atomic_add(i,v) (void)atomic_add_return((i), (v)) 239#define atomic_add(i,v) (void)atomic_add_return((i), (v))
263#define atomic_sub(i,v) (void)atomic_sub_return((i), (v)) 240#define atomic_sub(i,v) (void)atomic_sub_return((i), (v))
264#define atomic_inc(v) atomic_add(1, (v)) 241#define atomic_inc(v) atomic_add(1, (v))
diff --git a/arch/m68k/include/asm/atomic.h b/arch/m68k/include/asm/atomic.h
index 596882cda224..9df09c876fa2 100644
--- a/arch/m68k/include/asm/atomic.h
+++ b/arch/m68k/include/asm/atomic.h
@@ -138,6 +138,7 @@ static inline int atomic_dec_and_test(atomic_t *v)
138 __asm__ __volatile__("subql #1,%1; seq %0" : "=d" (c), "+m" (*v)); 138 __asm__ __volatile__("subql #1,%1; seq %0" : "=d" (c), "+m" (*v));
139 return c != 0; 139 return c != 0;
140} 140}
141#define atomic_dec_and_test atomic_dec_and_test
141 142
142static inline int atomic_dec_and_test_lt(atomic_t *v) 143static inline int atomic_dec_and_test_lt(atomic_t *v)
143{ 144{
@@ -155,6 +156,7 @@ static inline int atomic_inc_and_test(atomic_t *v)
155 __asm__ __volatile__("addql #1,%1; seq %0" : "=d" (c), "+m" (*v)); 156 __asm__ __volatile__("addql #1,%1; seq %0" : "=d" (c), "+m" (*v));
156 return c != 0; 157 return c != 0;
157} 158}
159#define atomic_inc_and_test atomic_inc_and_test
158 160
159#ifdef CONFIG_RMW_INSNS 161#ifdef CONFIG_RMW_INSNS
160 162
@@ -201,6 +203,7 @@ static inline int atomic_sub_and_test(int i, atomic_t *v)
201 : ASM_DI (i)); 203 : ASM_DI (i));
202 return c != 0; 204 return c != 0;
203} 205}
206#define atomic_sub_and_test atomic_sub_and_test
204 207
205static inline int atomic_add_negative(int i, atomic_t *v) 208static inline int atomic_add_negative(int i, atomic_t *v)
206{ 209{
@@ -210,5 +213,6 @@ static inline int atomic_add_negative(int i, atomic_t *v)
210 : ASM_DI (i)); 213 : ASM_DI (i));
211 return c != 0; 214 return c != 0;
212} 215}
216#define atomic_add_negative atomic_add_negative
213 217
214#endif /* __ARCH_M68K_ATOMIC __ */ 218#endif /* __ARCH_M68K_ATOMIC __ */
diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h
index d42b27df1548..fd3008ae164c 100644
--- a/arch/mips/include/asm/atomic.h
+++ b/arch/mips/include/asm/atomic.h
@@ -278,37 +278,6 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
278#define atomic_inc_return(v) atomic_add_return(1, (v)) 278#define atomic_inc_return(v) atomic_add_return(1, (v))
279 279
280/* 280/*
281 * atomic_sub_and_test - subtract value from variable and test result
282 * @i: integer value to subtract
283 * @v: pointer of type atomic_t
284 *
285 * Atomically subtracts @i from @v and returns
286 * true if the result is zero, or false for all
287 * other cases.
288 */
289#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
290
291/*
292 * atomic_inc_and_test - increment and test
293 * @v: pointer of type atomic_t
294 *
295 * Atomically increments @v by 1
296 * and returns true if the result is zero, or false for all
297 * other cases.
298 */
299#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
300
301/*
302 * atomic_dec_and_test - decrement by 1 and test
303 * @v: pointer of type atomic_t
304 *
305 * Atomically decrements @v by 1 and
306 * returns true if the result is 0, or false for all other
307 * cases.
308 */
309#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
310
311/*
312 * atomic_dec_if_positive - decrement by 1 if old value positive 281 * atomic_dec_if_positive - decrement by 1 if old value positive
313 * @v: pointer of type atomic_t 282 * @v: pointer of type atomic_t
314 */ 283 */
@@ -330,17 +299,6 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
330 */ 299 */
331#define atomic_dec(v) atomic_sub(1, (v)) 300#define atomic_dec(v) atomic_sub(1, (v))
332 301
333/*
334 * atomic_add_negative - add and test if negative
335 * @v: pointer of type atomic_t
336 * @i: integer value to add
337 *
338 * Atomically adds @i to @v and returns true
339 * if the result is negative, or false when
340 * result is greater than or equal to zero.
341 */
342#define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
343
344#ifdef CONFIG_64BIT 302#ifdef CONFIG_64BIT
345 303
346#define ATOMIC64_INIT(i) { (i) } 304#define ATOMIC64_INIT(i) { (i) }
@@ -600,37 +558,6 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
600#define atomic64_inc_return(v) atomic64_add_return(1, (v)) 558#define atomic64_inc_return(v) atomic64_add_return(1, (v))
601 559
602/* 560/*
603 * atomic64_sub_and_test - subtract value from variable and test result
604 * @i: integer value to subtract
605 * @v: pointer of type atomic64_t
606 *
607 * Atomically subtracts @i from @v and returns
608 * true if the result is zero, or false for all
609 * other cases.
610 */
611#define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
612
613/*
614 * atomic64_inc_and_test - increment and test
615 * @v: pointer of type atomic64_t
616 *
617 * Atomically increments @v by 1
618 * and returns true if the result is zero, or false for all
619 * other cases.
620 */
621#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
622
623/*
624 * atomic64_dec_and_test - decrement by 1 and test
625 * @v: pointer of type atomic64_t
626 *
627 * Atomically decrements @v by 1 and
628 * returns true if the result is 0, or false for all other
629 * cases.
630 */
631#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
632
633/*
634 * atomic64_dec_if_positive - decrement by 1 if old value positive 561 * atomic64_dec_if_positive - decrement by 1 if old value positive
635 * @v: pointer of type atomic64_t 562 * @v: pointer of type atomic64_t
636 */ 563 */
@@ -652,17 +579,6 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
652 */ 579 */
653#define atomic64_dec(v) atomic64_sub(1, (v)) 580#define atomic64_dec(v) atomic64_sub(1, (v))
654 581
655/*
656 * atomic64_add_negative - add and test if negative
657 * @v: pointer of type atomic64_t
658 * @i: integer value to add
659 *
660 * Atomically adds @i to @v and returns true
661 * if the result is negative, or false when
662 * result is greater than or equal to zero.
663 */
664#define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
665
666#endif /* CONFIG_64BIT */ 582#endif /* CONFIG_64BIT */
667 583
668#endif /* _ASM_ATOMIC_H */ 584#endif /* _ASM_ATOMIC_H */
diff --git a/arch/parisc/include/asm/atomic.h b/arch/parisc/include/asm/atomic.h
index f53ba2d6ff67..f85844ff6336 100644
--- a/arch/parisc/include/asm/atomic.h
+++ b/arch/parisc/include/asm/atomic.h
@@ -142,22 +142,6 @@ ATOMIC_OPS(xor, ^=)
142#define atomic_inc_return(v) (atomic_add_return( 1,(v))) 142#define atomic_inc_return(v) (atomic_add_return( 1,(v)))
143#define atomic_dec_return(v) (atomic_add_return( -1,(v))) 143#define atomic_dec_return(v) (atomic_add_return( -1,(v)))
144 144
145#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
146
147/*
148 * atomic_inc_and_test - increment and test
149 * @v: pointer of type atomic_t
150 *
151 * Atomically increments @v by 1
152 * and returns true if the result is zero, or false for all
153 * other cases.
154 */
155#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
156
157#define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
158
159#define atomic_sub_and_test(i,v) (atomic_sub_return((i),(v)) == 0)
160
161#define ATOMIC_INIT(i) { (i) } 145#define ATOMIC_INIT(i) { (i) }
162 146
163#ifdef CONFIG_64BIT 147#ifdef CONFIG_64BIT
@@ -246,12 +230,6 @@ atomic64_read(const atomic64_t *v)
246#define atomic64_inc_return(v) (atomic64_add_return( 1,(v))) 230#define atomic64_inc_return(v) (atomic64_add_return( 1,(v)))
247#define atomic64_dec_return(v) (atomic64_add_return( -1,(v))) 231#define atomic64_dec_return(v) (atomic64_add_return( -1,(v)))
248 232
249#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
250
251#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
252#define atomic64_dec_and_test(v) (atomic64_dec_return(v) == 0)
253#define atomic64_sub_and_test(i,v) (atomic64_sub_return((i),(v)) == 0)
254
255/* exported interface */ 233/* exported interface */
256#define atomic64_cmpxchg(v, o, n) \ 234#define atomic64_cmpxchg(v, o, n) \
257 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n))) 235 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
diff --git a/arch/powerpc/include/asm/atomic.h b/arch/powerpc/include/asm/atomic.h
index 233dbf31911c..5d76f05d2be3 100644
--- a/arch/powerpc/include/asm/atomic.h
+++ b/arch/powerpc/include/asm/atomic.h
@@ -129,8 +129,6 @@ ATOMIC_OPS(xor, xor)
129#undef ATOMIC_OP_RETURN_RELAXED 129#undef ATOMIC_OP_RETURN_RELAXED
130#undef ATOMIC_OP 130#undef ATOMIC_OP
131 131
132#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
133
134static __inline__ void atomic_inc(atomic_t *v) 132static __inline__ void atomic_inc(atomic_t *v)
135{ 133{
136 int t; 134 int t;
@@ -163,16 +161,6 @@ static __inline__ int atomic_inc_return_relaxed(atomic_t *v)
163 return t; 161 return t;
164} 162}
165 163
166/*
167 * atomic_inc_and_test - increment and test
168 * @v: pointer of type atomic_t
169 *
170 * Atomically increments @v by 1
171 * and returns true if the result is zero, or false for all
172 * other cases.
173 */
174#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
175
176static __inline__ void atomic_dec(atomic_t *v) 164static __inline__ void atomic_dec(atomic_t *v)
177{ 165{
178 int t; 166 int t;
@@ -281,9 +269,6 @@ static __inline__ int atomic_inc_not_zero(atomic_t *v)
281} 269}
282#define atomic_inc_not_zero(v) atomic_inc_not_zero((v)) 270#define atomic_inc_not_zero(v) atomic_inc_not_zero((v))
283 271
284#define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
285#define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
286
287/* 272/*
288 * Atomically test *v and decrement if it is greater than 0. 273 * Atomically test *v and decrement if it is greater than 0.
289 * The function returns the old value of *v minus 1, even if 274 * The function returns the old value of *v minus 1, even if
@@ -413,8 +398,6 @@ ATOMIC64_OPS(xor, xor)
413#undef ATOMIC64_OP_RETURN_RELAXED 398#undef ATOMIC64_OP_RETURN_RELAXED
414#undef ATOMIC64_OP 399#undef ATOMIC64_OP
415 400
416#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
417
418static __inline__ void atomic64_inc(atomic64_t *v) 401static __inline__ void atomic64_inc(atomic64_t *v)
419{ 402{
420 long t; 403 long t;
@@ -445,16 +428,6 @@ static __inline__ long atomic64_inc_return_relaxed(atomic64_t *v)
445 return t; 428 return t;
446} 429}
447 430
448/*
449 * atomic64_inc_and_test - increment and test
450 * @v: pointer of type atomic64_t
451 *
452 * Atomically increments @v by 1
453 * and returns true if the result is zero, or false for all
454 * other cases.
455 */
456#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
457
458static __inline__ void atomic64_dec(atomic64_t *v) 431static __inline__ void atomic64_dec(atomic64_t *v)
459{ 432{
460 long t; 433 long t;
@@ -488,9 +461,6 @@ static __inline__ long atomic64_dec_return_relaxed(atomic64_t *v)
488#define atomic64_inc_return_relaxed atomic64_inc_return_relaxed 461#define atomic64_inc_return_relaxed atomic64_inc_return_relaxed
489#define atomic64_dec_return_relaxed atomic64_dec_return_relaxed 462#define atomic64_dec_return_relaxed atomic64_dec_return_relaxed
490 463
491#define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
492#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
493
494/* 464/*
495 * Atomically test *v and decrement if it is greater than 0. 465 * Atomically test *v and decrement if it is greater than 0.
496 * The function returns the old value of *v minus 1. 466 * The function returns the old value of *v minus 1.
diff --git a/arch/riscv/include/asm/atomic.h b/arch/riscv/include/asm/atomic.h
index d959bbaaad41..68eef0a805ca 100644
--- a/arch/riscv/include/asm/atomic.h
+++ b/arch/riscv/include/asm/atomic.h
@@ -209,36 +209,6 @@ ATOMIC_OPS(xor, xor, i)
209#undef ATOMIC_FETCH_OP 209#undef ATOMIC_FETCH_OP
210#undef ATOMIC_OP_RETURN 210#undef ATOMIC_OP_RETURN
211 211
212/*
213 * The extra atomic operations that are constructed from one of the core
214 * AMO-based operations above (aside from sub, which is easier to fit above).
215 * These are required to perform a full barrier, but they're OK this way
216 * because atomic_*_return is also required to perform a full barrier.
217 *
218 */
219#define ATOMIC_OP(op, func_op, comp_op, I, c_type, prefix) \
220static __always_inline \
221bool atomic##prefix##_##op(c_type i, atomic##prefix##_t *v) \
222{ \
223 return atomic##prefix##_##func_op##_return(i, v) comp_op I; \
224}
225
226#ifdef CONFIG_GENERIC_ATOMIC64
227#define ATOMIC_OPS(op, func_op, comp_op, I) \
228 ATOMIC_OP(op, func_op, comp_op, I, int, )
229#else
230#define ATOMIC_OPS(op, func_op, comp_op, I) \
231 ATOMIC_OP(op, func_op, comp_op, I, int, ) \
232 ATOMIC_OP(op, func_op, comp_op, I, long, 64)
233#endif
234
235ATOMIC_OPS(add_and_test, add, ==, 0)
236ATOMIC_OPS(sub_and_test, sub, ==, 0)
237ATOMIC_OPS(add_negative, add, <, 0)
238
239#undef ATOMIC_OP
240#undef ATOMIC_OPS
241
242#define ATOMIC_OP(op, func_op, I, c_type, prefix) \ 212#define ATOMIC_OP(op, func_op, I, c_type, prefix) \
243static __always_inline \ 213static __always_inline \
244void atomic##prefix##_##op(atomic##prefix##_t *v) \ 214void atomic##prefix##_##op(atomic##prefix##_t *v) \
@@ -315,22 +285,6 @@ ATOMIC_OPS(dec, add, +, -1)
315#undef ATOMIC_FETCH_OP 285#undef ATOMIC_FETCH_OP
316#undef ATOMIC_OP_RETURN 286#undef ATOMIC_OP_RETURN
317 287
318#define ATOMIC_OP(op, func_op, comp_op, I, prefix) \
319static __always_inline \
320bool atomic##prefix##_##op(atomic##prefix##_t *v) \
321{ \
322 return atomic##prefix##_##func_op##_return(v) comp_op I; \
323}
324
325ATOMIC_OP(inc_and_test, inc, ==, 0, )
326ATOMIC_OP(dec_and_test, dec, ==, 0, )
327#ifndef CONFIG_GENERIC_ATOMIC64
328ATOMIC_OP(inc_and_test, inc, ==, 0, 64)
329ATOMIC_OP(dec_and_test, dec, ==, 0, 64)
330#endif
331
332#undef ATOMIC_OP
333
334/* This is required to provide a full barrier on success. */ 288/* This is required to provide a full barrier on success. */
335static __always_inline int atomic_fetch_add_unless(atomic_t *v, int a, int u) 289static __always_inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
336{ 290{
diff --git a/arch/s390/include/asm/atomic.h b/arch/s390/include/asm/atomic.h
index eb9329741bad..7f5fbd595f01 100644
--- a/arch/s390/include/asm/atomic.h
+++ b/arch/s390/include/asm/atomic.h
@@ -55,17 +55,13 @@ static inline void atomic_add(int i, atomic_t *v)
55 __atomic_add(i, &v->counter); 55 __atomic_add(i, &v->counter);
56} 56}
57 57
58#define atomic_add_negative(_i, _v) (atomic_add_return(_i, _v) < 0)
59#define atomic_inc(_v) atomic_add(1, _v) 58#define atomic_inc(_v) atomic_add(1, _v)
60#define atomic_inc_return(_v) atomic_add_return(1, _v) 59#define atomic_inc_return(_v) atomic_add_return(1, _v)
61#define atomic_inc_and_test(_v) (atomic_add_return(1, _v) == 0)
62#define atomic_sub(_i, _v) atomic_add(-(int)(_i), _v) 60#define atomic_sub(_i, _v) atomic_add(-(int)(_i), _v)
63#define atomic_sub_return(_i, _v) atomic_add_return(-(int)(_i), _v) 61#define atomic_sub_return(_i, _v) atomic_add_return(-(int)(_i), _v)
64#define atomic_fetch_sub(_i, _v) atomic_fetch_add(-(int)(_i), _v) 62#define atomic_fetch_sub(_i, _v) atomic_fetch_add(-(int)(_i), _v)
65#define atomic_sub_and_test(_i, _v) (atomic_sub_return(_i, _v) == 0)
66#define atomic_dec(_v) atomic_sub(1, _v) 63#define atomic_dec(_v) atomic_sub(1, _v)
67#define atomic_dec_return(_v) atomic_sub_return(1, _v) 64#define atomic_dec_return(_v) atomic_sub_return(1, _v)
68#define atomic_dec_and_test(_v) (atomic_sub_return(1, _v) == 0)
69 65
70#define ATOMIC_OPS(op) \ 66#define ATOMIC_OPS(op) \
71static inline void atomic_##op(int i, atomic_t *v) \ 67static inline void atomic_##op(int i, atomic_t *v) \
@@ -170,16 +166,12 @@ static inline long atomic64_dec_if_positive(atomic64_t *v)
170 return dec; 166 return dec;
171} 167}
172 168
173#define atomic64_add_negative(_i, _v) (atomic64_add_return(_i, _v) < 0)
174#define atomic64_inc(_v) atomic64_add(1, _v) 169#define atomic64_inc(_v) atomic64_add(1, _v)
175#define atomic64_inc_return(_v) atomic64_add_return(1, _v) 170#define atomic64_inc_return(_v) atomic64_add_return(1, _v)
176#define atomic64_inc_and_test(_v) (atomic64_add_return(1, _v) == 0)
177#define atomic64_sub_return(_i, _v) atomic64_add_return(-(long)(_i), _v) 171#define atomic64_sub_return(_i, _v) atomic64_add_return(-(long)(_i), _v)
178#define atomic64_fetch_sub(_i, _v) atomic64_fetch_add(-(long)(_i), _v) 172#define atomic64_fetch_sub(_i, _v) atomic64_fetch_add(-(long)(_i), _v)
179#define atomic64_sub(_i, _v) atomic64_add(-(long)(_i), _v) 173#define atomic64_sub(_i, _v) atomic64_add(-(long)(_i), _v)
180#define atomic64_sub_and_test(_i, _v) (atomic64_sub_return(_i, _v) == 0)
181#define atomic64_dec(_v) atomic64_sub(1, _v) 174#define atomic64_dec(_v) atomic64_sub(1, _v)
182#define atomic64_dec_return(_v) atomic64_sub_return(1, _v) 175#define atomic64_dec_return(_v) atomic64_sub_return(1, _v)
183#define atomic64_dec_and_test(_v) (atomic64_sub_return(1, _v) == 0)
184 176
185#endif /* __ARCH_S390_ATOMIC__ */ 177#endif /* __ARCH_S390_ATOMIC__ */
diff --git a/arch/sh/include/asm/atomic.h b/arch/sh/include/asm/atomic.h
index 422fac764ca1..d438494fa112 100644
--- a/arch/sh/include/asm/atomic.h
+++ b/arch/sh/include/asm/atomic.h
@@ -32,12 +32,8 @@
32#include <asm/atomic-irq.h> 32#include <asm/atomic-irq.h>
33#endif 33#endif
34 34
35#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
36#define atomic_dec_return(v) atomic_sub_return(1, (v)) 35#define atomic_dec_return(v) atomic_sub_return(1, (v))
37#define atomic_inc_return(v) atomic_add_return(1, (v)) 36#define atomic_inc_return(v) atomic_add_return(1, (v))
38#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
39#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
40#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
41 37
42#define atomic_inc(v) atomic_add(1, (v)) 38#define atomic_inc(v) atomic_add(1, (v))
43#define atomic_dec(v) atomic_sub(1, (v)) 39#define atomic_dec(v) atomic_sub(1, (v))
diff --git a/arch/sparc/include/asm/atomic_32.h b/arch/sparc/include/asm/atomic_32.h
index 9d7a15acc0c5..3a26573790c6 100644
--- a/arch/sparc/include/asm/atomic_32.h
+++ b/arch/sparc/include/asm/atomic_32.h
@@ -51,19 +51,4 @@ void atomic_set(atomic_t *, int);
51#define atomic_inc_return(v) (atomic_add_return( 1, (v))) 51#define atomic_inc_return(v) (atomic_add_return( 1, (v)))
52#define atomic_dec_return(v) (atomic_add_return( -1, (v))) 52#define atomic_dec_return(v) (atomic_add_return( -1, (v)))
53 53
54#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
55
56/*
57 * atomic_inc_and_test - increment and test
58 * @v: pointer of type atomic_t
59 *
60 * Atomically increments @v by 1
61 * and returns true if the result is zero, or false for all
62 * other cases.
63 */
64#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
65
66#define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
67#define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)
68
69#endif /* !(__ARCH_SPARC_ATOMIC__) */ 54#endif /* !(__ARCH_SPARC_ATOMIC__) */
diff --git a/arch/sparc/include/asm/atomic_64.h b/arch/sparc/include/asm/atomic_64.h
index 458783e99997..634508282aea 100644
--- a/arch/sparc/include/asm/atomic_64.h
+++ b/arch/sparc/include/asm/atomic_64.h
@@ -56,32 +56,12 @@ ATOMIC_OPS(xor)
56#define atomic_inc_return(v) atomic_add_return(1, v) 56#define atomic_inc_return(v) atomic_add_return(1, v)
57#define atomic64_inc_return(v) atomic64_add_return(1, v) 57#define atomic64_inc_return(v) atomic64_add_return(1, v)
58 58
59/*
60 * atomic_inc_and_test - increment and test
61 * @v: pointer of type atomic_t
62 *
63 * Atomically increments @v by 1
64 * and returns true if the result is zero, or false for all
65 * other cases.
66 */
67#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
68#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
69
70#define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)
71#define atomic64_sub_and_test(i, v) (atomic64_sub_return(i, v) == 0)
72
73#define atomic_dec_and_test(v) (atomic_sub_return(1, v) == 0)
74#define atomic64_dec_and_test(v) (atomic64_sub_return(1, v) == 0)
75
76#define atomic_inc(v) atomic_add(1, v) 59#define atomic_inc(v) atomic_add(1, v)
77#define atomic64_inc(v) atomic64_add(1, v) 60#define atomic64_inc(v) atomic64_add(1, v)
78 61
79#define atomic_dec(v) atomic_sub(1, v) 62#define atomic_dec(v) atomic_sub(1, v)
80#define atomic64_dec(v) atomic64_sub(1, v) 63#define atomic64_dec(v) atomic64_sub(1, v)
81 64
82#define atomic_add_negative(i, v) (atomic_add_return(i, v) < 0)
83#define atomic64_add_negative(i, v) (atomic64_add_return(i, v) < 0)
84
85#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) 65#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
86 66
87static inline int atomic_xchg(atomic_t *v, int new) 67static inline int atomic_xchg(atomic_t *v, int new)
diff --git a/arch/x86/include/asm/atomic.h b/arch/x86/include/asm/atomic.h
index 616327ac9d39..73bda4abe180 100644
--- a/arch/x86/include/asm/atomic.h
+++ b/arch/x86/include/asm/atomic.h
@@ -80,6 +80,7 @@ static __always_inline void arch_atomic_sub(int i, atomic_t *v)
80 * true if the result is zero, or false for all 80 * true if the result is zero, or false for all
81 * other cases. 81 * other cases.
82 */ 82 */
83#define arch_atomic_sub_and_test arch_atomic_sub_and_test
83static __always_inline bool arch_atomic_sub_and_test(int i, atomic_t *v) 84static __always_inline bool arch_atomic_sub_and_test(int i, atomic_t *v)
84{ 85{
85 GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", e); 86 GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", e);
@@ -117,6 +118,7 @@ static __always_inline void arch_atomic_dec(atomic_t *v)
117 * returns true if the result is 0, or false for all other 118 * returns true if the result is 0, or false for all other
118 * cases. 119 * cases.
119 */ 120 */
121#define arch_atomic_dec_and_test arch_atomic_dec_and_test
120static __always_inline bool arch_atomic_dec_and_test(atomic_t *v) 122static __always_inline bool arch_atomic_dec_and_test(atomic_t *v)
121{ 123{
122 GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", e); 124 GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", e);
@@ -130,6 +132,7 @@ static __always_inline bool arch_atomic_dec_and_test(atomic_t *v)
130 * and returns true if the result is zero, or false for all 132 * and returns true if the result is zero, or false for all
131 * other cases. 133 * other cases.
132 */ 134 */
135#define arch_atomic_inc_and_test arch_atomic_inc_and_test
133static __always_inline bool arch_atomic_inc_and_test(atomic_t *v) 136static __always_inline bool arch_atomic_inc_and_test(atomic_t *v)
134{ 137{
135 GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", e); 138 GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", e);
@@ -144,6 +147,7 @@ static __always_inline bool arch_atomic_inc_and_test(atomic_t *v)
144 * if the result is negative, or false when 147 * if the result is negative, or false when
145 * result is greater than or equal to zero. 148 * result is greater than or equal to zero.
146 */ 149 */
150#define arch_atomic_add_negative arch_atomic_add_negative
147static __always_inline bool arch_atomic_add_negative(int i, atomic_t *v) 151static __always_inline bool arch_atomic_add_negative(int i, atomic_t *v)
148{ 152{
149 GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", s); 153 GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", s);
diff --git a/arch/x86/include/asm/atomic64_32.h b/arch/x86/include/asm/atomic64_32.h
index 2a33cc17801b..a26810d005e0 100644
--- a/arch/x86/include/asm/atomic64_32.h
+++ b/arch/x86/include/asm/atomic64_32.h
@@ -198,20 +198,6 @@ static inline long long arch_atomic64_sub(long long i, atomic64_t *v)
198} 198}
199 199
200/** 200/**
201 * arch_atomic64_sub_and_test - subtract value from variable and test result
202 * @i: integer value to subtract
203 * @v: pointer to type atomic64_t
204 *
205 * Atomically subtracts @i from @v and returns
206 * true if the result is zero, or false for all
207 * other cases.
208 */
209static inline int arch_atomic64_sub_and_test(long long i, atomic64_t *v)
210{
211 return arch_atomic64_sub_return(i, v) == 0;
212}
213
214/**
215 * arch_atomic64_inc - increment atomic64 variable 201 * arch_atomic64_inc - increment atomic64 variable
216 * @v: pointer to type atomic64_t 202 * @v: pointer to type atomic64_t
217 * 203 *
@@ -236,46 +222,6 @@ static inline void arch_atomic64_dec(atomic64_t *v)
236} 222}
237 223
238/** 224/**
239 * arch_atomic64_dec_and_test - decrement and test
240 * @v: pointer to type atomic64_t
241 *
242 * Atomically decrements @v by 1 and
243 * returns true if the result is 0, or false for all other
244 * cases.
245 */
246static inline int arch_atomic64_dec_and_test(atomic64_t *v)
247{
248 return arch_atomic64_dec_return(v) == 0;
249}
250
251/**
252 * atomic64_inc_and_test - increment and test
253 * @v: pointer to type atomic64_t
254 *
255 * Atomically increments @v by 1
256 * and returns true if the result is zero, or false for all
257 * other cases.
258 */
259static inline int arch_atomic64_inc_and_test(atomic64_t *v)
260{
261 return arch_atomic64_inc_return(v) == 0;
262}
263
264/**
265 * arch_atomic64_add_negative - add and test if negative
266 * @i: integer value to add
267 * @v: pointer to type atomic64_t
268 *
269 * Atomically adds @i to @v and returns true
270 * if the result is negative, or false when
271 * result is greater than or equal to zero.
272 */
273static inline int arch_atomic64_add_negative(long long i, atomic64_t *v)
274{
275 return arch_atomic64_add_return(i, v) < 0;
276}
277
278/**
279 * arch_atomic64_add_unless - add unless the number is a given value 225 * arch_atomic64_add_unless - add unless the number is a given value
280 * @v: pointer of type atomic64_t 226 * @v: pointer of type atomic64_t
281 * @a: the amount to add to v... 227 * @a: the amount to add to v...
diff --git a/arch/x86/include/asm/atomic64_64.h b/arch/x86/include/asm/atomic64_64.h
index 7e04b294e6eb..6a65228a3db6 100644
--- a/arch/x86/include/asm/atomic64_64.h
+++ b/arch/x86/include/asm/atomic64_64.h
@@ -71,6 +71,7 @@ static inline void arch_atomic64_sub(long i, atomic64_t *v)
71 * true if the result is zero, or false for all 71 * true if the result is zero, or false for all
72 * other cases. 72 * other cases.
73 */ 73 */
74#define arch_atomic64_sub_and_test arch_atomic64_sub_and_test
74static inline bool arch_atomic64_sub_and_test(long i, atomic64_t *v) 75static inline bool arch_atomic64_sub_and_test(long i, atomic64_t *v)
75{ 76{
76 GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, "er", i, "%0", e); 77 GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, "er", i, "%0", e);
@@ -110,6 +111,7 @@ static __always_inline void arch_atomic64_dec(atomic64_t *v)
110 * returns true if the result is 0, or false for all other 111 * returns true if the result is 0, or false for all other
111 * cases. 112 * cases.
112 */ 113 */
114#define arch_atomic64_dec_and_test arch_atomic64_dec_and_test
113static inline bool arch_atomic64_dec_and_test(atomic64_t *v) 115static inline bool arch_atomic64_dec_and_test(atomic64_t *v)
114{ 116{
115 GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, "%0", e); 117 GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, "%0", e);
@@ -123,6 +125,7 @@ static inline bool arch_atomic64_dec_and_test(atomic64_t *v)
123 * and returns true if the result is zero, or false for all 125 * and returns true if the result is zero, or false for all
124 * other cases. 126 * other cases.
125 */ 127 */
128#define arch_atomic64_inc_and_test arch_atomic64_inc_and_test
126static inline bool arch_atomic64_inc_and_test(atomic64_t *v) 129static inline bool arch_atomic64_inc_and_test(atomic64_t *v)
127{ 130{
128 GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, "%0", e); 131 GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, "%0", e);
@@ -137,6 +140,7 @@ static inline bool arch_atomic64_inc_and_test(atomic64_t *v)
137 * if the result is negative, or false when 140 * if the result is negative, or false when
138 * result is greater than or equal to zero. 141 * result is greater than or equal to zero.
139 */ 142 */
143#define arch_atomic64_add_negative arch_atomic64_add_negative
140static inline bool arch_atomic64_add_negative(long i, atomic64_t *v) 144static inline bool arch_atomic64_add_negative(long i, atomic64_t *v)
141{ 145{
142 GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, "er", i, "%0", s); 146 GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, "er", i, "%0", s);
diff --git a/arch/xtensa/include/asm/atomic.h b/arch/xtensa/include/asm/atomic.h
index f4c9f82c40c6..332ae4eca737 100644
--- a/arch/xtensa/include/asm/atomic.h
+++ b/arch/xtensa/include/asm/atomic.h
@@ -198,17 +198,6 @@ ATOMIC_OPS(xor)
198#undef ATOMIC_OP 198#undef ATOMIC_OP
199 199
200/** 200/**
201 * atomic_sub_and_test - subtract value from variable and test result
202 * @i: integer value to subtract
203 * @v: pointer of type atomic_t
204 *
205 * Atomically subtracts @i from @v and returns
206 * true if the result is zero, or false for all
207 * other cases.
208 */
209#define atomic_sub_and_test(i,v) (atomic_sub_return((i),(v)) == 0)
210
211/**
212 * atomic_inc - increment atomic variable 201 * atomic_inc - increment atomic variable
213 * @v: pointer of type atomic_t 202 * @v: pointer of type atomic_t
214 * 203 *
@@ -240,37 +229,6 @@ ATOMIC_OPS(xor)
240 */ 229 */
241#define atomic_dec_return(v) atomic_sub_return(1,(v)) 230#define atomic_dec_return(v) atomic_sub_return(1,(v))
242 231
243/**
244 * atomic_dec_and_test - decrement and test
245 * @v: pointer of type atomic_t
246 *
247 * Atomically decrements @v by 1 and
248 * returns true if the result is 0, or false for all other
249 * cases.
250 */
251#define atomic_dec_and_test(v) (atomic_sub_return(1,(v)) == 0)
252
253/**
254 * atomic_inc_and_test - increment and test
255 * @v: pointer of type atomic_t
256 *
257 * Atomically increments @v by 1
258 * and returns true if the result is zero, or false for all
259 * other cases.
260 */
261#define atomic_inc_and_test(v) (atomic_add_return(1,(v)) == 0)
262
263/**
264 * atomic_add_negative - add and test if negative
265 * @v: pointer of type atomic_t
266 * @i: integer value to add
267 *
268 * Atomically adds @i to @v and returns true
269 * if the result is negative, or false when
270 * result is greater than or equal to zero.
271 */
272#define atomic_add_negative(i,v) (atomic_add_return((i),(v)) < 0)
273
274#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) 232#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
275#define atomic_xchg(v, new) (xchg(&((v)->counter), new)) 233#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
276 234
diff --git a/include/asm-generic/atomic-instrumented.h b/include/asm-generic/atomic-instrumented.h
index 2b487f28ef35..6b64c200de73 100644
--- a/include/asm-generic/atomic-instrumented.h
+++ b/include/asm-generic/atomic-instrumented.h
@@ -225,29 +225,41 @@ static __always_inline s64 atomic64_dec_if_positive(atomic64_t *v)
225 return arch_atomic64_dec_if_positive(v); 225 return arch_atomic64_dec_if_positive(v);
226} 226}
227 227
228#ifdef arch_atomic_dec_and_test
229#define atomic_dec_and_test atomic_dec_and_test
228static __always_inline bool atomic_dec_and_test(atomic_t *v) 230static __always_inline bool atomic_dec_and_test(atomic_t *v)
229{ 231{
230 kasan_check_write(v, sizeof(*v)); 232 kasan_check_write(v, sizeof(*v));
231 return arch_atomic_dec_and_test(v); 233 return arch_atomic_dec_and_test(v);
232} 234}
235#endif
233 236
237#ifdef arch_atomic64_dec_and_test
238#define atomic64_dec_and_test atomic64_dec_and_test
234static __always_inline bool atomic64_dec_and_test(atomic64_t *v) 239static __always_inline bool atomic64_dec_and_test(atomic64_t *v)
235{ 240{
236 kasan_check_write(v, sizeof(*v)); 241 kasan_check_write(v, sizeof(*v));
237 return arch_atomic64_dec_and_test(v); 242 return arch_atomic64_dec_and_test(v);
238} 243}
244#endif
239 245
246#ifdef arch_atomic_inc_and_test
247#define atomic_inc_and_test atomic_inc_and_test
240static __always_inline bool atomic_inc_and_test(atomic_t *v) 248static __always_inline bool atomic_inc_and_test(atomic_t *v)
241{ 249{
242 kasan_check_write(v, sizeof(*v)); 250 kasan_check_write(v, sizeof(*v));
243 return arch_atomic_inc_and_test(v); 251 return arch_atomic_inc_and_test(v);
244} 252}
253#endif
245 254
255#ifdef arch_atomic64_inc_and_test
256#define atomic64_inc_and_test atomic64_inc_and_test
246static __always_inline bool atomic64_inc_and_test(atomic64_t *v) 257static __always_inline bool atomic64_inc_and_test(atomic64_t *v)
247{ 258{
248 kasan_check_write(v, sizeof(*v)); 259 kasan_check_write(v, sizeof(*v));
249 return arch_atomic64_inc_and_test(v); 260 return arch_atomic64_inc_and_test(v);
250} 261}
262#endif
251 263
252static __always_inline int atomic_add_return(int i, atomic_t *v) 264static __always_inline int atomic_add_return(int i, atomic_t *v)
253{ 265{
@@ -333,29 +345,41 @@ static __always_inline s64 atomic64_fetch_xor(s64 i, atomic64_t *v)
333 return arch_atomic64_fetch_xor(i, v); 345 return arch_atomic64_fetch_xor(i, v);
334} 346}
335 347
348#ifdef arch_atomic_sub_and_test
349#define atomic_sub_and_test atomic_sub_and_test
336static __always_inline bool atomic_sub_and_test(int i, atomic_t *v) 350static __always_inline bool atomic_sub_and_test(int i, atomic_t *v)
337{ 351{
338 kasan_check_write(v, sizeof(*v)); 352 kasan_check_write(v, sizeof(*v));
339 return arch_atomic_sub_and_test(i, v); 353 return arch_atomic_sub_and_test(i, v);
340} 354}
355#endif
341 356
357#ifdef arch_atomic64_sub_and_test
358#define atomic64_sub_and_test atomic64_sub_and_test
342static __always_inline bool atomic64_sub_and_test(s64 i, atomic64_t *v) 359static __always_inline bool atomic64_sub_and_test(s64 i, atomic64_t *v)
343{ 360{
344 kasan_check_write(v, sizeof(*v)); 361 kasan_check_write(v, sizeof(*v));
345 return arch_atomic64_sub_and_test(i, v); 362 return arch_atomic64_sub_and_test(i, v);
346} 363}
364#endif
347 365
366#ifdef arch_atomic_add_negative
367#define atomic_add_negative atomic_add_negative
348static __always_inline bool atomic_add_negative(int i, atomic_t *v) 368static __always_inline bool atomic_add_negative(int i, atomic_t *v)
349{ 369{
350 kasan_check_write(v, sizeof(*v)); 370 kasan_check_write(v, sizeof(*v));
351 return arch_atomic_add_negative(i, v); 371 return arch_atomic_add_negative(i, v);
352} 372}
373#endif
353 374
375#ifdef arch_atomic64_add_negative
376#define atomic64_add_negative atomic64_add_negative
354static __always_inline bool atomic64_add_negative(s64 i, atomic64_t *v) 377static __always_inline bool atomic64_add_negative(s64 i, atomic64_t *v)
355{ 378{
356 kasan_check_write(v, sizeof(*v)); 379 kasan_check_write(v, sizeof(*v));
357 return arch_atomic64_add_negative(i, v); 380 return arch_atomic64_add_negative(i, v);
358} 381}
382#endif
359 383
360static __always_inline unsigned long 384static __always_inline unsigned long
361cmpxchg_size(volatile void *ptr, unsigned long old, unsigned long new, int size) 385cmpxchg_size(volatile void *ptr, unsigned long old, unsigned long new, int size)
diff --git a/include/asm-generic/atomic.h b/include/asm-generic/atomic.h
index 757e45821220..40cab858aaaa 100644
--- a/include/asm-generic/atomic.h
+++ b/include/asm-generic/atomic.h
@@ -186,11 +186,6 @@ ATOMIC_OP(xor, ^)
186 186
187#include <linux/irqflags.h> 187#include <linux/irqflags.h>
188 188
189static inline int atomic_add_negative(int i, atomic_t *v)
190{
191 return atomic_add_return(i, v) < 0;
192}
193
194static inline void atomic_add(int i, atomic_t *v) 189static inline void atomic_add(int i, atomic_t *v)
195{ 190{
196 atomic_add_return(i, v); 191 atomic_add_return(i, v);
@@ -214,10 +209,6 @@ static inline void atomic_dec(atomic_t *v)
214#define atomic_dec_return(v) atomic_sub_return(1, (v)) 209#define atomic_dec_return(v) atomic_sub_return(1, (v))
215#define atomic_inc_return(v) atomic_add_return(1, (v)) 210#define atomic_inc_return(v) atomic_add_return(1, (v))
216 211
217#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
218#define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
219#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
220
221#define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v))) 212#define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v)))
222#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new))) 213#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
223 214
diff --git a/include/asm-generic/atomic64.h b/include/asm-generic/atomic64.h
index 49460107b29a..d3827ab97aa4 100644
--- a/include/asm-generic/atomic64.h
+++ b/include/asm-generic/atomic64.h
@@ -56,13 +56,9 @@ extern long long atomic64_xchg(atomic64_t *v, long long new);
56extern long long atomic64_fetch_add_unless(atomic64_t *v, long long a, long long u); 56extern long long atomic64_fetch_add_unless(atomic64_t *v, long long a, long long u);
57#define atomic64_fetch_add_unless atomic64_fetch_add_unless 57#define atomic64_fetch_add_unless atomic64_fetch_add_unless
58 58
59#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
60#define atomic64_inc(v) atomic64_add(1LL, (v)) 59#define atomic64_inc(v) atomic64_add(1LL, (v))
61#define atomic64_inc_return(v) atomic64_add_return(1LL, (v)) 60#define atomic64_inc_return(v) atomic64_add_return(1LL, (v))
62#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
63#define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
64#define atomic64_dec(v) atomic64_sub(1LL, (v)) 61#define atomic64_dec(v) atomic64_sub(1LL, (v))
65#define atomic64_dec_return(v) atomic64_sub_return(1LL, (v)) 62#define atomic64_dec_return(v) atomic64_sub_return(1LL, (v))
66#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
67 63
68#endif /* _ASM_GENERIC_ATOMIC64_H */ 64#endif /* _ASM_GENERIC_ATOMIC64_H */
diff --git a/include/linux/atomic.h b/include/linux/atomic.h
index 530562ac7909..3ee8da9023cd 100644
--- a/include/linux/atomic.h
+++ b/include/linux/atomic.h
@@ -569,6 +569,68 @@ static inline bool atomic_add_unless(atomic_t *v, int a, int u)
569#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 569#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
570#endif 570#endif
571 571
572/**
573 * atomic_inc_and_test - increment and test
574 * @v: pointer of type atomic_t
575 *
576 * Atomically increments @v by 1
577 * and returns true if the result is zero, or false for all
578 * other cases.
579 */
580#ifndef atomic_inc_and_test
581static inline bool atomic_inc_and_test(atomic_t *v)
582{
583 return atomic_inc_return(v) == 0;
584}
585#endif
586
587/**
588 * atomic_dec_and_test - decrement and test
589 * @v: pointer of type atomic_t
590 *
591 * Atomically decrements @v by 1 and
592 * returns true if the result is 0, or false for all other
593 * cases.
594 */
595#ifndef atomic_dec_and_test
596static inline bool atomic_dec_and_test(atomic_t *v)
597{
598 return atomic_dec_return(v) == 0;
599}
600#endif
601
602/**
603 * atomic_sub_and_test - subtract value from variable and test result
604 * @i: integer value to subtract
605 * @v: pointer of type atomic_t
606 *
607 * Atomically subtracts @i from @v and returns
608 * true if the result is zero, or false for all
609 * other cases.
610 */
611#ifndef atomic_sub_and_test
612static inline bool atomic_sub_and_test(int i, atomic_t *v)
613{
614 return atomic_sub_return(i, v) == 0;
615}
616#endif
617
618/**
619 * atomic_add_negative - add and test if negative
620 * @i: integer value to add
621 * @v: pointer of type atomic_t
622 *
623 * Atomically adds @i to @v and returns true
624 * if the result is negative, or false when
625 * result is greater than or equal to zero.
626 */
627#ifndef atomic_add_negative
628static inline bool atomic_add_negative(int i, atomic_t *v)
629{
630 return atomic_add_return(i, v) < 0;
631}
632#endif
633
572#ifndef atomic_andnot 634#ifndef atomic_andnot
573static inline void atomic_andnot(int i, atomic_t *v) 635static inline void atomic_andnot(int i, atomic_t *v)
574{ 636{
@@ -1091,6 +1153,68 @@ static inline bool atomic64_add_unless(atomic64_t *v, long long a, long long u)
1091#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) 1153#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
1092#endif 1154#endif
1093 1155
1156/**
1157 * atomic64_inc_and_test - increment and test
1158 * @v: pointer of type atomic64_t
1159 *
1160 * Atomically increments @v by 1
1161 * and returns true if the result is zero, or false for all
1162 * other cases.
1163 */
1164#ifndef atomic64_inc_and_test
1165static inline bool atomic64_inc_and_test(atomic64_t *v)
1166{
1167 return atomic64_inc_return(v) == 0;
1168}
1169#endif
1170
1171/**
1172 * atomic64_dec_and_test - decrement and test
1173 * @v: pointer of type atomic64_t
1174 *
1175 * Atomically decrements @v by 1 and
1176 * returns true if the result is 0, or false for all other
1177 * cases.
1178 */
1179#ifndef atomic64_dec_and_test
1180static inline bool atomic64_dec_and_test(atomic64_t *v)
1181{
1182 return atomic64_dec_return(v) == 0;
1183}
1184#endif
1185
1186/**
1187 * atomic64_sub_and_test - subtract value from variable and test result
1188 * @i: integer value to subtract
1189 * @v: pointer of type atomic64_t
1190 *
1191 * Atomically subtracts @i from @v and returns
1192 * true if the result is zero, or false for all
1193 * other cases.
1194 */
1195#ifndef atomic64_sub_and_test
1196static inline bool atomic64_sub_and_test(long long i, atomic64_t *v)
1197{
1198 return atomic64_sub_return(i, v) == 0;
1199}
1200#endif
1201
1202/**
1203 * atomic64_add_negative - add and test if negative
1204 * @i: integer value to add
1205 * @v: pointer of type atomic64_t
1206 *
1207 * Atomically adds @i to @v and returns true
1208 * if the result is negative, or false when
1209 * result is greater than or equal to zero.
1210 */
1211#ifndef atomic64_add_negative
1212static inline bool atomic64_add_negative(long long i, atomic64_t *v)
1213{
1214 return atomic64_add_return(i, v) < 0;
1215}
1216#endif
1217
1094#ifndef atomic64_andnot 1218#ifndef atomic64_andnot
1095static inline void atomic64_andnot(long long i, atomic64_t *v) 1219static inline void atomic64_andnot(long long i, atomic64_t *v)
1096{ 1220{