summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--arch/alpha/include/asm/atomic.h4
-rw-r--r--arch/arc/include/asm/atomic.h4
-rw-r--r--arch/arm/include/asm/atomic.h4
-rw-r--r--arch/arm64/include/asm/atomic.h2
-rw-r--r--arch/h8300/include/asm/atomic.h2
-rw-r--r--arch/hexagon/include/asm/atomic.h4
-rw-r--r--arch/ia64/include/asm/atomic.h2
-rw-r--r--arch/m68k/include/asm/atomic.h2
-rw-r--r--arch/mips/include/asm/atomic.h4
-rw-r--r--arch/openrisc/include/asm/atomic.h4
-rw-r--r--arch/parisc/include/asm/atomic.h4
-rw-r--r--arch/powerpc/include/asm/atomic.h8
-rw-r--r--arch/riscv/include/asm/atomic.h4
-rw-r--r--arch/s390/include/asm/atomic.h2
-rw-r--r--arch/sh/include/asm/atomic.h4
-rw-r--r--arch/sparc/include/asm/atomic_32.h2
-rw-r--r--arch/sparc/include/asm/atomic_64.h2
-rw-r--r--arch/sparc/lib/atomic32.c4
-rw-r--r--arch/x86/include/asm/atomic.h4
-rw-r--r--arch/xtensa/include/asm/atomic.h4
-rw-r--r--drivers/block/rbd.c2
-rw-r--r--drivers/infiniband/core/rdma_core.c2
-rw-r--r--fs/afs/rxrpc.c2
-rw-r--r--include/asm-generic/atomic-instrumented.h4
-rw-r--r--include/asm-generic/atomic.h4
-rw-r--r--include/linux/atomic.h2
-rw-r--r--kernel/bpf/syscall.c4
-rw-r--r--net/rxrpc/call_object.c2
-rw-r--r--net/rxrpc/conn_object.c4
-rw-r--r--net/rxrpc/local_object.c2
-rw-r--r--net/rxrpc/peer_object.c2
31 files changed, 50 insertions, 50 deletions
diff --git a/arch/alpha/include/asm/atomic.h b/arch/alpha/include/asm/atomic.h
index 767bfdd42992..392b15a4dd4f 100644
--- a/arch/alpha/include/asm/atomic.h
+++ b/arch/alpha/include/asm/atomic.h
@@ -206,7 +206,7 @@ ATOMIC_OPS(xor, xor)
206#define atomic_xchg(v, new) (xchg(&((v)->counter), new)) 206#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
207 207
208/** 208/**
209 * __atomic_add_unless - add unless the number is a given value 209 * atomic_fetch_add_unless - add unless the number is a given value
210 * @v: pointer of type atomic_t 210 * @v: pointer of type atomic_t
211 * @a: the amount to add to v... 211 * @a: the amount to add to v...
212 * @u: ...unless v is equal to u. 212 * @u: ...unless v is equal to u.
@@ -214,7 +214,7 @@ ATOMIC_OPS(xor, xor)
214 * Atomically adds @a to @v, so long as it was not @u. 214 * Atomically adds @a to @v, so long as it was not @u.
215 * Returns the old value of @v. 215 * Returns the old value of @v.
216 */ 216 */
217static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u) 217static __inline__ int atomic_fetch_add_unless(atomic_t *v, int a, int u)
218{ 218{
219 int c, new, old; 219 int c, new, old;
220 smp_mb(); 220 smp_mb();
diff --git a/arch/arc/include/asm/atomic.h b/arch/arc/include/asm/atomic.h
index 11859287c52a..67121b5ff3a3 100644
--- a/arch/arc/include/asm/atomic.h
+++ b/arch/arc/include/asm/atomic.h
@@ -309,7 +309,7 @@ ATOMIC_OPS(xor, ^=, CTOP_INST_AXOR_DI_R2_R2_R3)
309#undef ATOMIC_OP 309#undef ATOMIC_OP
310 310
311/** 311/**
312 * __atomic_add_unless - add unless the number is a given value 312 * atomic_fetch_add_unless - add unless the number is a given value
313 * @v: pointer of type atomic_t 313 * @v: pointer of type atomic_t
314 * @a: the amount to add to v... 314 * @a: the amount to add to v...
315 * @u: ...unless v is equal to u. 315 * @u: ...unless v is equal to u.
@@ -317,7 +317,7 @@ ATOMIC_OPS(xor, ^=, CTOP_INST_AXOR_DI_R2_R2_R3)
317 * Atomically adds @a to @v, so long as it was not @u. 317 * Atomically adds @a to @v, so long as it was not @u.
318 * Returns the old value of @v 318 * Returns the old value of @v
319 */ 319 */
320#define __atomic_add_unless(v, a, u) \ 320#define atomic_fetch_add_unless(v, a, u) \
321({ \ 321({ \
322 int c, old; \ 322 int c, old; \
323 \ 323 \
diff --git a/arch/arm/include/asm/atomic.h b/arch/arm/include/asm/atomic.h
index 66d0e215a773..9d56d0727c9b 100644
--- a/arch/arm/include/asm/atomic.h
+++ b/arch/arm/include/asm/atomic.h
@@ -130,7 +130,7 @@ static inline int atomic_cmpxchg_relaxed(atomic_t *ptr, int old, int new)
130} 130}
131#define atomic_cmpxchg_relaxed atomic_cmpxchg_relaxed 131#define atomic_cmpxchg_relaxed atomic_cmpxchg_relaxed
132 132
133static inline int __atomic_add_unless(atomic_t *v, int a, int u) 133static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
134{ 134{
135 int oldval, newval; 135 int oldval, newval;
136 unsigned long tmp; 136 unsigned long tmp;
@@ -215,7 +215,7 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
215 return ret; 215 return ret;
216} 216}
217 217
218static inline int __atomic_add_unless(atomic_t *v, int a, int u) 218static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
219{ 219{
220 int c, old; 220 int c, old;
221 221
diff --git a/arch/arm64/include/asm/atomic.h b/arch/arm64/include/asm/atomic.h
index c0235e0ff849..264d20339f74 100644
--- a/arch/arm64/include/asm/atomic.h
+++ b/arch/arm64/include/asm/atomic.h
@@ -125,7 +125,7 @@
125#define atomic_dec_and_test(v) (atomic_dec_return(v) == 0) 125#define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
126#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0) 126#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
127#define atomic_add_negative(i, v) (atomic_add_return((i), (v)) < 0) 127#define atomic_add_negative(i, v) (atomic_add_return((i), (v)) < 0)
128#define __atomic_add_unless(v, a, u) ___atomic_add_unless(v, a, u,) 128#define atomic_fetch_add_unless(v, a, u) ___atomic_add_unless(v, a, u,)
129#define atomic_andnot atomic_andnot 129#define atomic_andnot atomic_andnot
130 130
131/* 131/*
diff --git a/arch/h8300/include/asm/atomic.h b/arch/h8300/include/asm/atomic.h
index b174dec099bf..5c856887fdf2 100644
--- a/arch/h8300/include/asm/atomic.h
+++ b/arch/h8300/include/asm/atomic.h
@@ -94,7 +94,7 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
94 return ret; 94 return ret;
95} 95}
96 96
97static inline int __atomic_add_unless(atomic_t *v, int a, int u) 97static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
98{ 98{
99 int ret; 99 int ret;
100 h8300flags flags; 100 h8300flags flags;
diff --git a/arch/hexagon/include/asm/atomic.h b/arch/hexagon/include/asm/atomic.h
index fb3dfb2a667e..287aa9f394f3 100644
--- a/arch/hexagon/include/asm/atomic.h
+++ b/arch/hexagon/include/asm/atomic.h
@@ -164,7 +164,7 @@ ATOMIC_OPS(xor)
164#undef ATOMIC_OP 164#undef ATOMIC_OP
165 165
166/** 166/**
167 * __atomic_add_unless - add unless the number is a given value 167 * atomic_fetch_add_unless - add unless the number is a given value
168 * @v: pointer to value 168 * @v: pointer to value
169 * @a: amount to add 169 * @a: amount to add
170 * @u: unless value is equal to u 170 * @u: unless value is equal to u
@@ -173,7 +173,7 @@ ATOMIC_OPS(xor)
173 * 173 *
174 */ 174 */
175 175
176static inline int __atomic_add_unless(atomic_t *v, int a, int u) 176static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
177{ 177{
178 int __oldval; 178 int __oldval;
179 register int tmp; 179 register int tmp;
diff --git a/arch/ia64/include/asm/atomic.h b/arch/ia64/include/asm/atomic.h
index 2524fb60fbc2..9d2ddde5f9d5 100644
--- a/arch/ia64/include/asm/atomic.h
+++ b/arch/ia64/include/asm/atomic.h
@@ -215,7 +215,7 @@ ATOMIC64_FETCH_OP(xor, ^)
215 (cmpxchg(&((v)->counter), old, new)) 215 (cmpxchg(&((v)->counter), old, new))
216#define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) 216#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
217 217
218static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u) 218static __inline__ int atomic_fetch_add_unless(atomic_t *v, int a, int u)
219{ 219{
220 int c, old; 220 int c, old;
221 c = atomic_read(v); 221 c = atomic_read(v);
diff --git a/arch/m68k/include/asm/atomic.h b/arch/m68k/include/asm/atomic.h
index e993e2860ee1..8022d9ea1213 100644
--- a/arch/m68k/include/asm/atomic.h
+++ b/arch/m68k/include/asm/atomic.h
@@ -211,7 +211,7 @@ static inline int atomic_add_negative(int i, atomic_t *v)
211 return c != 0; 211 return c != 0;
212} 212}
213 213
214static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u) 214static __inline__ int atomic_fetch_add_unless(atomic_t *v, int a, int u)
215{ 215{
216 int c, old; 216 int c, old;
217 c = atomic_read(v); 217 c = atomic_read(v);
diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h
index 0ab176bdb8e8..02fc1553cf9b 100644
--- a/arch/mips/include/asm/atomic.h
+++ b/arch/mips/include/asm/atomic.h
@@ -275,7 +275,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
275#define atomic_xchg(v, new) (xchg(&((v)->counter), (new))) 275#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
276 276
277/** 277/**
278 * __atomic_add_unless - add unless the number is a given value 278 * atomic_fetch_add_unless - add unless the number is a given value
279 * @v: pointer of type atomic_t 279 * @v: pointer of type atomic_t
280 * @a: the amount to add to v... 280 * @a: the amount to add to v...
281 * @u: ...unless v is equal to u. 281 * @u: ...unless v is equal to u.
@@ -283,7 +283,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
283 * Atomically adds @a to @v, so long as it was not @u. 283 * Atomically adds @a to @v, so long as it was not @u.
284 * Returns the old value of @v. 284 * Returns the old value of @v.
285 */ 285 */
286static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u) 286static __inline__ int atomic_fetch_add_unless(atomic_t *v, int a, int u)
287{ 287{
288 int c, old; 288 int c, old;
289 c = atomic_read(v); 289 c = atomic_read(v);
diff --git a/arch/openrisc/include/asm/atomic.h b/arch/openrisc/include/asm/atomic.h
index 146e1660f00e..b589fac39b92 100644
--- a/arch/openrisc/include/asm/atomic.h
+++ b/arch/openrisc/include/asm/atomic.h
@@ -100,7 +100,7 @@ ATOMIC_OP(xor)
100 * 100 *
101 * This is often used through atomic_inc_not_zero() 101 * This is often used through atomic_inc_not_zero()
102 */ 102 */
103static inline int __atomic_add_unless(atomic_t *v, int a, int u) 103static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
104{ 104{
105 int old, tmp; 105 int old, tmp;
106 106
@@ -119,7 +119,7 @@ static inline int __atomic_add_unless(atomic_t *v, int a, int u)
119 119
120 return old; 120 return old;
121} 121}
122#define __atomic_add_unless __atomic_add_unless 122#define atomic_fetch_add_unless atomic_fetch_add_unless
123 123
124#include <asm-generic/atomic.h> 124#include <asm-generic/atomic.h>
125 125
diff --git a/arch/parisc/include/asm/atomic.h b/arch/parisc/include/asm/atomic.h
index 88bae6676c9b..7748abced766 100644
--- a/arch/parisc/include/asm/atomic.h
+++ b/arch/parisc/include/asm/atomic.h
@@ -78,7 +78,7 @@ static __inline__ int atomic_read(const atomic_t *v)
78#define atomic_xchg(v, new) (xchg(&((v)->counter), new)) 78#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
79 79
80/** 80/**
81 * __atomic_add_unless - add unless the number is a given value 81 * atomic_fetch_add_unless - add unless the number is a given value
82 * @v: pointer of type atomic_t 82 * @v: pointer of type atomic_t
83 * @a: the amount to add to v... 83 * @a: the amount to add to v...
84 * @u: ...unless v is equal to u. 84 * @u: ...unless v is equal to u.
@@ -86,7 +86,7 @@ static __inline__ int atomic_read(const atomic_t *v)
86 * Atomically adds @a to @v, so long as it was not @u. 86 * Atomically adds @a to @v, so long as it was not @u.
87 * Returns the old value of @v. 87 * Returns the old value of @v.
88 */ 88 */
89static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u) 89static __inline__ int atomic_fetch_add_unless(atomic_t *v, int a, int u)
90{ 90{
91 int c, old; 91 int c, old;
92 c = atomic_read(v); 92 c = atomic_read(v);
diff --git a/arch/powerpc/include/asm/atomic.h b/arch/powerpc/include/asm/atomic.h
index 682b3e6a1e21..1483261080a1 100644
--- a/arch/powerpc/include/asm/atomic.h
+++ b/arch/powerpc/include/asm/atomic.h
@@ -218,7 +218,7 @@ static __inline__ int atomic_dec_return_relaxed(atomic_t *v)
218#define atomic_xchg_relaxed(v, new) xchg_relaxed(&((v)->counter), (new)) 218#define atomic_xchg_relaxed(v, new) xchg_relaxed(&((v)->counter), (new))
219 219
220/** 220/**
221 * __atomic_add_unless - add unless the number is a given value 221 * atomic_fetch_add_unless - add unless the number is a given value
222 * @v: pointer of type atomic_t 222 * @v: pointer of type atomic_t
223 * @a: the amount to add to v... 223 * @a: the amount to add to v...
224 * @u: ...unless v is equal to u. 224 * @u: ...unless v is equal to u.
@@ -226,13 +226,13 @@ static __inline__ int atomic_dec_return_relaxed(atomic_t *v)
226 * Atomically adds @a to @v, so long as it was not @u. 226 * Atomically adds @a to @v, so long as it was not @u.
227 * Returns the old value of @v. 227 * Returns the old value of @v.
228 */ 228 */
229static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u) 229static __inline__ int atomic_fetch_add_unless(atomic_t *v, int a, int u)
230{ 230{
231 int t; 231 int t;
232 232
233 __asm__ __volatile__ ( 233 __asm__ __volatile__ (
234 PPC_ATOMIC_ENTRY_BARRIER 234 PPC_ATOMIC_ENTRY_BARRIER
235"1: lwarx %0,0,%1 # __atomic_add_unless\n\ 235"1: lwarx %0,0,%1 # atomic_fetch_add_unless\n\
236 cmpw 0,%0,%3 \n\ 236 cmpw 0,%0,%3 \n\
237 beq 2f \n\ 237 beq 2f \n\
238 add %0,%2,%0 \n" 238 add %0,%2,%0 \n"
@@ -538,7 +538,7 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
538 538
539 __asm__ __volatile__ ( 539 __asm__ __volatile__ (
540 PPC_ATOMIC_ENTRY_BARRIER 540 PPC_ATOMIC_ENTRY_BARRIER
541"1: ldarx %0,0,%1 # __atomic_add_unless\n\ 541"1: ldarx %0,0,%1 # atomic_fetch_add_unless\n\
542 cmpd 0,%0,%3 \n\ 542 cmpd 0,%0,%3 \n\
543 beq 2f \n\ 543 beq 2f \n\
544 add %0,%2,%0 \n" 544 add %0,%2,%0 \n"
diff --git a/arch/riscv/include/asm/atomic.h b/arch/riscv/include/asm/atomic.h
index 855115ace98c..739e810c857e 100644
--- a/arch/riscv/include/asm/atomic.h
+++ b/arch/riscv/include/asm/atomic.h
@@ -332,7 +332,7 @@ ATOMIC_OP(dec_and_test, dec, ==, 0, 64)
332#undef ATOMIC_OP 332#undef ATOMIC_OP
333 333
334/* This is required to provide a full barrier on success. */ 334/* This is required to provide a full barrier on success. */
335static __always_inline int __atomic_add_unless(atomic_t *v, int a, int u) 335static __always_inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
336{ 336{
337 int prev, rc; 337 int prev, rc;
338 338
@@ -381,7 +381,7 @@ static __always_inline int atomic64_add_unless(atomic64_t *v, long a, long u)
381 */ 381 */
382static __always_inline int atomic_inc_not_zero(atomic_t *v) 382static __always_inline int atomic_inc_not_zero(atomic_t *v)
383{ 383{
384 return __atomic_add_unless(v, 1, 0); 384 return atomic_fetch_add_unless(v, 1, 0);
385} 385}
386 386
387#ifndef CONFIG_GENERIC_ATOMIC64 387#ifndef CONFIG_GENERIC_ATOMIC64
diff --git a/arch/s390/include/asm/atomic.h b/arch/s390/include/asm/atomic.h
index 4b55532f15c4..c2858cdd8c29 100644
--- a/arch/s390/include/asm/atomic.h
+++ b/arch/s390/include/asm/atomic.h
@@ -90,7 +90,7 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
90 return __atomic_cmpxchg(&v->counter, old, new); 90 return __atomic_cmpxchg(&v->counter, old, new);
91} 91}
92 92
93static inline int __atomic_add_unless(atomic_t *v, int a, int u) 93static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
94{ 94{
95 int c, old; 95 int c, old;
96 c = atomic_read(v); 96 c = atomic_read(v);
diff --git a/arch/sh/include/asm/atomic.h b/arch/sh/include/asm/atomic.h
index 0fd0099f43cc..ef45931ebac5 100644
--- a/arch/sh/include/asm/atomic.h
+++ b/arch/sh/include/asm/atomic.h
@@ -46,7 +46,7 @@
46#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) 46#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
47 47
48/** 48/**
49 * __atomic_add_unless - add unless the number is a given value 49 * atomic_fetch_add_unless - add unless the number is a given value
50 * @v: pointer of type atomic_t 50 * @v: pointer of type atomic_t
51 * @a: the amount to add to v... 51 * @a: the amount to add to v...
52 * @u: ...unless v is equal to u. 52 * @u: ...unless v is equal to u.
@@ -54,7 +54,7 @@
54 * Atomically adds @a to @v, so long as it was not @u. 54 * Atomically adds @a to @v, so long as it was not @u.
55 * Returns the old value of @v. 55 * Returns the old value of @v.
56 */ 56 */
57static inline int __atomic_add_unless(atomic_t *v, int a, int u) 57static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
58{ 58{
59 int c, old; 59 int c, old;
60 c = atomic_read(v); 60 c = atomic_read(v);
diff --git a/arch/sparc/include/asm/atomic_32.h b/arch/sparc/include/asm/atomic_32.h
index d13ce517f4b9..a58f4b43bcc7 100644
--- a/arch/sparc/include/asm/atomic_32.h
+++ b/arch/sparc/include/asm/atomic_32.h
@@ -27,7 +27,7 @@ int atomic_fetch_or(int, atomic_t *);
27int atomic_fetch_xor(int, atomic_t *); 27int atomic_fetch_xor(int, atomic_t *);
28int atomic_cmpxchg(atomic_t *, int, int); 28int atomic_cmpxchg(atomic_t *, int, int);
29int atomic_xchg(atomic_t *, int); 29int atomic_xchg(atomic_t *, int);
30int __atomic_add_unless(atomic_t *, int, int); 30int atomic_fetch_add_unless(atomic_t *, int, int);
31void atomic_set(atomic_t *, int); 31void atomic_set(atomic_t *, int);
32 32
33#define atomic_set_release(v, i) atomic_set((v), (i)) 33#define atomic_set_release(v, i) atomic_set((v), (i))
diff --git a/arch/sparc/include/asm/atomic_64.h b/arch/sparc/include/asm/atomic_64.h
index 28db058d471b..f416fd3d2708 100644
--- a/arch/sparc/include/asm/atomic_64.h
+++ b/arch/sparc/include/asm/atomic_64.h
@@ -89,7 +89,7 @@ static inline int atomic_xchg(atomic_t *v, int new)
89 return xchg(&v->counter, new); 89 return xchg(&v->counter, new);
90} 90}
91 91
92static inline int __atomic_add_unless(atomic_t *v, int a, int u) 92static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
93{ 93{
94 int c, old; 94 int c, old;
95 c = atomic_read(v); 95 c = atomic_read(v);
diff --git a/arch/sparc/lib/atomic32.c b/arch/sparc/lib/atomic32.c
index 465a901a0ada..281fa634bb1a 100644
--- a/arch/sparc/lib/atomic32.c
+++ b/arch/sparc/lib/atomic32.c
@@ -95,7 +95,7 @@ int atomic_cmpxchg(atomic_t *v, int old, int new)
95} 95}
96EXPORT_SYMBOL(atomic_cmpxchg); 96EXPORT_SYMBOL(atomic_cmpxchg);
97 97
98int __atomic_add_unless(atomic_t *v, int a, int u) 98int atomic_fetch_add_unless(atomic_t *v, int a, int u)
99{ 99{
100 int ret; 100 int ret;
101 unsigned long flags; 101 unsigned long flags;
@@ -107,7 +107,7 @@ int __atomic_add_unless(atomic_t *v, int a, int u)
107 spin_unlock_irqrestore(ATOMIC_HASH(v), flags); 107 spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
108 return ret; 108 return ret;
109} 109}
110EXPORT_SYMBOL(__atomic_add_unless); 110EXPORT_SYMBOL(atomic_fetch_add_unless);
111 111
112/* Atomic operations are already serializing */ 112/* Atomic operations are already serializing */
113void atomic_set(atomic_t *v, int i) 113void atomic_set(atomic_t *v, int i)
diff --git a/arch/x86/include/asm/atomic.h b/arch/x86/include/asm/atomic.h
index 0db6bec95489..84ed0bd76aef 100644
--- a/arch/x86/include/asm/atomic.h
+++ b/arch/x86/include/asm/atomic.h
@@ -254,7 +254,7 @@ static inline int arch_atomic_fetch_xor(int i, atomic_t *v)
254} 254}
255 255
256/** 256/**
257 * __arch_atomic_add_unless - add unless the number is already a given value 257 * arch_atomic_fetch_add_unless - add unless the number is already a given value
258 * @v: pointer of type atomic_t 258 * @v: pointer of type atomic_t
259 * @a: the amount to add to v... 259 * @a: the amount to add to v...
260 * @u: ...unless v is equal to u. 260 * @u: ...unless v is equal to u.
@@ -262,7 +262,7 @@ static inline int arch_atomic_fetch_xor(int i, atomic_t *v)
262 * Atomically adds @a to @v, so long as @v was not already @u. 262 * Atomically adds @a to @v, so long as @v was not already @u.
263 * Returns the old value of @v. 263 * Returns the old value of @v.
264 */ 264 */
265static __always_inline int __arch_atomic_add_unless(atomic_t *v, int a, int u) 265static __always_inline int arch_atomic_fetch_add_unless(atomic_t *v, int a, int u)
266{ 266{
267 int c = arch_atomic_read(v); 267 int c = arch_atomic_read(v);
268 268
diff --git a/arch/xtensa/include/asm/atomic.h b/arch/xtensa/include/asm/atomic.h
index e7a23f2a519a..4188e56c06c9 100644
--- a/arch/xtensa/include/asm/atomic.h
+++ b/arch/xtensa/include/asm/atomic.h
@@ -275,7 +275,7 @@ ATOMIC_OPS(xor)
275#define atomic_xchg(v, new) (xchg(&((v)->counter), new)) 275#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
276 276
277/** 277/**
278 * __atomic_add_unless - add unless the number is a given value 278 * atomic_fetch_add_unless - add unless the number is a given value
279 * @v: pointer of type atomic_t 279 * @v: pointer of type atomic_t
280 * @a: the amount to add to v... 280 * @a: the amount to add to v...
281 * @u: ...unless v is equal to u. 281 * @u: ...unless v is equal to u.
@@ -283,7 +283,7 @@ ATOMIC_OPS(xor)
283 * Atomically adds @a to @v, so long as it was not @u. 283 * Atomically adds @a to @v, so long as it was not @u.
284 * Returns the old value of @v. 284 * Returns the old value of @v.
285 */ 285 */
286static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u) 286static __inline__ int atomic_fetch_add_unless(atomic_t *v, int a, int u)
287{ 287{
288 int c, old; 288 int c, old;
289 c = atomic_read(v); 289 c = atomic_read(v);
diff --git a/drivers/block/rbd.c b/drivers/block/rbd.c
index fa0729c1e776..d81c653b9bf6 100644
--- a/drivers/block/rbd.c
+++ b/drivers/block/rbd.c
@@ -61,7 +61,7 @@ static int atomic_inc_return_safe(atomic_t *v)
61{ 61{
62 unsigned int counter; 62 unsigned int counter;
63 63
64 counter = (unsigned int)__atomic_add_unless(v, 1, 0); 64 counter = (unsigned int)atomic_fetch_add_unless(v, 1, 0);
65 if (counter <= (unsigned int)INT_MAX) 65 if (counter <= (unsigned int)INT_MAX)
66 return (int)counter; 66 return (int)counter;
67 67
diff --git a/drivers/infiniband/core/rdma_core.c b/drivers/infiniband/core/rdma_core.c
index a6e904973ba8..475910ffbcb6 100644
--- a/drivers/infiniband/core/rdma_core.c
+++ b/drivers/infiniband/core/rdma_core.c
@@ -121,7 +121,7 @@ static int uverbs_try_lock_object(struct ib_uobject *uobj, bool exclusive)
121 * this lock. 121 * this lock.
122 */ 122 */
123 if (!exclusive) 123 if (!exclusive)
124 return __atomic_add_unless(&uobj->usecnt, 1, -1) == -1 ? 124 return atomic_fetch_add_unless(&uobj->usecnt, 1, -1) == -1 ?
125 -EBUSY : 0; 125 -EBUSY : 0;
126 126
127 /* lock is either WRITE or DESTROY - should be exclusive */ 127 /* lock is either WRITE or DESTROY - should be exclusive */
diff --git a/fs/afs/rxrpc.c b/fs/afs/rxrpc.c
index a1b18082991b..183cc5418722 100644
--- a/fs/afs/rxrpc.c
+++ b/fs/afs/rxrpc.c
@@ -648,7 +648,7 @@ static void afs_wake_up_async_call(struct sock *sk, struct rxrpc_call *rxcall,
648 trace_afs_notify_call(rxcall, call); 648 trace_afs_notify_call(rxcall, call);
649 call->need_attention = true; 649 call->need_attention = true;
650 650
651 u = __atomic_add_unless(&call->usage, 1, 0); 651 u = atomic_fetch_add_unless(&call->usage, 1, 0);
652 if (u != 0) { 652 if (u != 0) {
653 trace_afs_call(call, afs_call_trace_wake, u, 653 trace_afs_call(call, afs_call_trace_wake, u,
654 atomic_read(&call->net->nr_outstanding_calls), 654 atomic_read(&call->net->nr_outstanding_calls),
diff --git a/include/asm-generic/atomic-instrumented.h b/include/asm-generic/atomic-instrumented.h
index ec07f23678ea..b8b14cc2df6c 100644
--- a/include/asm-generic/atomic-instrumented.h
+++ b/include/asm-generic/atomic-instrumented.h
@@ -84,10 +84,10 @@ static __always_inline bool atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 ne
84} 84}
85#endif 85#endif
86 86
87static __always_inline int __atomic_add_unless(atomic_t *v, int a, int u) 87static __always_inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
88{ 88{
89 kasan_check_write(v, sizeof(*v)); 89 kasan_check_write(v, sizeof(*v));
90 return __arch_atomic_add_unless(v, a, u); 90 return arch_atomic_fetch_add_unless(v, a, u);
91} 91}
92 92
93 93
diff --git a/include/asm-generic/atomic.h b/include/asm-generic/atomic.h
index abe6dd9ca2a8..10051ed6d088 100644
--- a/include/asm-generic/atomic.h
+++ b/include/asm-generic/atomic.h
@@ -221,8 +221,8 @@ static inline void atomic_dec(atomic_t *v)
221#define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v))) 221#define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v)))
222#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new))) 222#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
223 223
224#ifndef __atomic_add_unless 224#ifndef atomic_fetch_add_unless
225static inline int __atomic_add_unless(atomic_t *v, int a, int u) 225static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
226{ 226{
227 int c, old; 227 int c, old;
228 c = atomic_read(v); 228 c = atomic_read(v);
diff --git a/include/linux/atomic.h b/include/linux/atomic.h
index 01ce3997cb42..9cc982936675 100644
--- a/include/linux/atomic.h
+++ b/include/linux/atomic.h
@@ -530,7 +530,7 @@
530 */ 530 */
531static inline int atomic_add_unless(atomic_t *v, int a, int u) 531static inline int atomic_add_unless(atomic_t *v, int a, int u)
532{ 532{
533 return __atomic_add_unless(v, a, u) != u; 533 return atomic_fetch_add_unless(v, a, u) != u;
534} 534}
535 535
536/** 536/**
diff --git a/kernel/bpf/syscall.c b/kernel/bpf/syscall.c
index 35dc466641f2..f12db70d3bf3 100644
--- a/kernel/bpf/syscall.c
+++ b/kernel/bpf/syscall.c
@@ -575,7 +575,7 @@ static struct bpf_map *bpf_map_inc_not_zero(struct bpf_map *map,
575{ 575{
576 int refold; 576 int refold;
577 577
578 refold = __atomic_add_unless(&map->refcnt, 1, 0); 578 refold = atomic_fetch_add_unless(&map->refcnt, 1, 0);
579 579
580 if (refold >= BPF_MAX_REFCNT) { 580 if (refold >= BPF_MAX_REFCNT) {
581 __bpf_map_put(map, false); 581 __bpf_map_put(map, false);
@@ -1142,7 +1142,7 @@ struct bpf_prog *bpf_prog_inc_not_zero(struct bpf_prog *prog)
1142{ 1142{
1143 int refold; 1143 int refold;
1144 1144
1145 refold = __atomic_add_unless(&prog->aux->refcnt, 1, 0); 1145 refold = atomic_fetch_add_unless(&prog->aux->refcnt, 1, 0);
1146 1146
1147 if (refold >= BPF_MAX_REFCNT) { 1147 if (refold >= BPF_MAX_REFCNT) {
1148 __bpf_prog_put(prog, false); 1148 __bpf_prog_put(prog, false);
diff --git a/net/rxrpc/call_object.c b/net/rxrpc/call_object.c
index f6734d8cb01a..9486293fef5c 100644
--- a/net/rxrpc/call_object.c
+++ b/net/rxrpc/call_object.c
@@ -415,7 +415,7 @@ void rxrpc_incoming_call(struct rxrpc_sock *rx,
415bool rxrpc_queue_call(struct rxrpc_call *call) 415bool rxrpc_queue_call(struct rxrpc_call *call)
416{ 416{
417 const void *here = __builtin_return_address(0); 417 const void *here = __builtin_return_address(0);
418 int n = __atomic_add_unless(&call->usage, 1, 0); 418 int n = atomic_fetch_add_unless(&call->usage, 1, 0);
419 if (n == 0) 419 if (n == 0)
420 return false; 420 return false;
421 if (rxrpc_queue_work(&call->processor)) 421 if (rxrpc_queue_work(&call->processor))
diff --git a/net/rxrpc/conn_object.c b/net/rxrpc/conn_object.c
index 4c77a78a252a..77440a356b14 100644
--- a/net/rxrpc/conn_object.c
+++ b/net/rxrpc/conn_object.c
@@ -266,7 +266,7 @@ void rxrpc_kill_connection(struct rxrpc_connection *conn)
266bool rxrpc_queue_conn(struct rxrpc_connection *conn) 266bool rxrpc_queue_conn(struct rxrpc_connection *conn)
267{ 267{
268 const void *here = __builtin_return_address(0); 268 const void *here = __builtin_return_address(0);
269 int n = __atomic_add_unless(&conn->usage, 1, 0); 269 int n = atomic_fetch_add_unless(&conn->usage, 1, 0);
270 if (n == 0) 270 if (n == 0)
271 return false; 271 return false;
272 if (rxrpc_queue_work(&conn->processor)) 272 if (rxrpc_queue_work(&conn->processor))
@@ -309,7 +309,7 @@ rxrpc_get_connection_maybe(struct rxrpc_connection *conn)
309 const void *here = __builtin_return_address(0); 309 const void *here = __builtin_return_address(0);
310 310
311 if (conn) { 311 if (conn) {
312 int n = __atomic_add_unless(&conn->usage, 1, 0); 312 int n = atomic_fetch_add_unless(&conn->usage, 1, 0);
313 if (n > 0) 313 if (n > 0)
314 trace_rxrpc_conn(conn, rxrpc_conn_got, n + 1, here); 314 trace_rxrpc_conn(conn, rxrpc_conn_got, n + 1, here);
315 else 315 else
diff --git a/net/rxrpc/local_object.c b/net/rxrpc/local_object.c
index b493e6b62740..777c3ed4cfc0 100644
--- a/net/rxrpc/local_object.c
+++ b/net/rxrpc/local_object.c
@@ -305,7 +305,7 @@ struct rxrpc_local *rxrpc_get_local_maybe(struct rxrpc_local *local)
305 const void *here = __builtin_return_address(0); 305 const void *here = __builtin_return_address(0);
306 306
307 if (local) { 307 if (local) {
308 int n = __atomic_add_unless(&local->usage, 1, 0); 308 int n = atomic_fetch_add_unless(&local->usage, 1, 0);
309 if (n > 0) 309 if (n > 0)
310 trace_rxrpc_local(local, rxrpc_local_got, n + 1, here); 310 trace_rxrpc_local(local, rxrpc_local_got, n + 1, here);
311 else 311 else
diff --git a/net/rxrpc/peer_object.c b/net/rxrpc/peer_object.c
index 1b7e8107b3ae..1cf3b408017a 100644
--- a/net/rxrpc/peer_object.c
+++ b/net/rxrpc/peer_object.c
@@ -406,7 +406,7 @@ struct rxrpc_peer *rxrpc_get_peer_maybe(struct rxrpc_peer *peer)
406 const void *here = __builtin_return_address(0); 406 const void *here = __builtin_return_address(0);
407 407
408 if (peer) { 408 if (peer) {
409 int n = __atomic_add_unless(&peer->usage, 1, 0); 409 int n = atomic_fetch_add_unless(&peer->usage, 1, 0);
410 if (n > 0) 410 if (n > 0)
411 trace_rxrpc_peer(peer, rxrpc_peer_got, n + 1, here); 411 trace_rxrpc_peer(peer, rxrpc_peer_got, n + 1, here);
412 else 412 else