diff options
author | Will Deacon <will.deacon@arm.com> | 2015-06-04 11:41:36 -0400 |
---|---|---|
committer | Will Deacon <will.deacon@arm.com> | 2015-07-27 10:28:53 -0400 |
commit | a82e62382fcbbf5c3348e802af73583e0cac39c0 (patch) | |
tree | 01e8ff2e9c9ab4b937ef15de980a31cdcee2db27 /arch | |
parent | 4e39715f4b5cb3b44576fedb2d38aca87de3cf48 (diff) |
arm64: atomics: tidy up common atomic{,64}_* macros
The common (i.e. identical for ll/sc and lse) atomic macros in atomic.h
are needlessley different for atomic_t and atomic64_t.
This patch tidies up the definitions to make them consistent across the
two atomic types and factors out common code such as the add_unless
implementation based on cmpxchg.
Reviewed-by: Steve Capper <steve.capper@arm.com>
Reviewed-by: Catalin Marinas <catalin.marinas@arm.com>
Signed-off-by: Will Deacon <will.deacon@arm.com>
Diffstat (limited to 'arch')
-rw-r--r-- | arch/arm64/include/asm/atomic.h | 99 |
1 files changed, 40 insertions, 59 deletions
diff --git a/arch/arm64/include/asm/atomic.h b/arch/arm64/include/asm/atomic.h index 1fe8f209aeb4..0b26da365f3b 100644 --- a/arch/arm64/include/asm/atomic.h +++ b/arch/arm64/include/asm/atomic.h | |||
@@ -26,8 +26,6 @@ | |||
26 | #include <asm/barrier.h> | 26 | #include <asm/barrier.h> |
27 | #include <asm/lse.h> | 27 | #include <asm/lse.h> |
28 | 28 | ||
29 | #define ATOMIC_INIT(i) { (i) } | ||
30 | |||
31 | #ifdef __KERNEL__ | 29 | #ifdef __KERNEL__ |
32 | 30 | ||
33 | #define __ARM64_IN_ATOMIC_IMPL | 31 | #define __ARM64_IN_ATOMIC_IMPL |
@@ -42,71 +40,54 @@ | |||
42 | 40 | ||
43 | #include <asm/cmpxchg.h> | 41 | #include <asm/cmpxchg.h> |
44 | 42 | ||
45 | /* | 43 | #define ___atomic_add_unless(v, a, u, sfx) \ |
46 | * On ARM, ordinary assignment (str instruction) doesn't clear the local | 44 | ({ \ |
47 | * strex/ldrex monitor on some implementations. The reason we can use it for | 45 | typeof((v)->counter) c, old; \ |
48 | * atomic_set() is the clrex or dummy strex done on every exception return. | 46 | \ |
49 | */ | 47 | c = atomic##sfx##_read(v); \ |
50 | #define atomic_read(v) ACCESS_ONCE((v)->counter) | 48 | while (c != (u) && \ |
51 | #define atomic_set(v,i) (((v)->counter) = (i)) | 49 | (old = atomic##sfx##_cmpxchg((v), c, c + (a))) != c) \ |
52 | 50 | c = old; \ | |
53 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) | 51 | c; \ |
54 | 52 | }) | |
55 | static inline int __atomic_add_unless(atomic_t *v, int a, int u) | ||
56 | { | ||
57 | int c, old; | ||
58 | |||
59 | c = atomic_read(v); | ||
60 | while (c != u && (old = atomic_cmpxchg((v), c, c + a)) != c) | ||
61 | c = old; | ||
62 | return c; | ||
63 | } | ||
64 | |||
65 | #define atomic_inc(v) atomic_add(1, v) | ||
66 | #define atomic_dec(v) atomic_sub(1, v) | ||
67 | 53 | ||
68 | #define atomic_inc_and_test(v) (atomic_add_return(1, v) == 0) | 54 | #define ATOMIC_INIT(i) { (i) } |
69 | #define atomic_dec_and_test(v) (atomic_sub_return(1, v) == 0) | ||
70 | #define atomic_inc_return(v) (atomic_add_return(1, v)) | ||
71 | #define atomic_dec_return(v) (atomic_sub_return(1, v)) | ||
72 | #define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0) | ||
73 | |||
74 | #define atomic_add_negative(i,v) (atomic_add_return(i, v) < 0) | ||
75 | 55 | ||
76 | #define atomic_andnot atomic_andnot | 56 | #define atomic_read(v) READ_ONCE((v)->counter) |
57 | #define atomic_set(v, i) (((v)->counter) = (i)) | ||
58 | #define atomic_xchg(v, new) xchg(&((v)->counter), (new)) | ||
59 | |||
60 | #define atomic_inc(v) atomic_add(1, (v)) | ||
61 | #define atomic_dec(v) atomic_sub(1, (v)) | ||
62 | #define atomic_inc_return(v) atomic_add_return(1, (v)) | ||
63 | #define atomic_dec_return(v) atomic_sub_return(1, (v)) | ||
64 | #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) | ||
65 | #define atomic_dec_and_test(v) (atomic_dec_return(v) == 0) | ||
66 | #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0) | ||
67 | #define atomic_add_negative(i, v) (atomic_add_return((i), (v)) < 0) | ||
68 | #define __atomic_add_unless(v, a, u) ___atomic_add_unless(v, a, u,) | ||
69 | #define atomic_andnot atomic_andnot | ||
77 | 70 | ||
78 | /* | 71 | /* |
79 | * 64-bit atomic operations. | 72 | * 64-bit atomic operations. |
80 | */ | 73 | */ |
81 | #define ATOMIC64_INIT(i) { (i) } | 74 | #define ATOMIC64_INIT ATOMIC_INIT |
82 | 75 | #define atomic64_read atomic_read | |
83 | #define atomic64_read(v) ACCESS_ONCE((v)->counter) | 76 | #define atomic64_set atomic_set |
84 | #define atomic64_set(v,i) (((v)->counter) = (i)) | 77 | #define atomic64_xchg atomic_xchg |
85 | 78 | ||
86 | #define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) | 79 | #define atomic64_inc(v) atomic64_add(1, (v)) |
87 | 80 | #define atomic64_dec(v) atomic64_sub(1, (v)) | |
88 | static inline int atomic64_add_unless(atomic64_t *v, long a, long u) | 81 | #define atomic64_inc_return(v) atomic64_add_return(1, (v)) |
89 | { | 82 | #define atomic64_dec_return(v) atomic64_sub_return(1, (v)) |
90 | long c, old; | ||
91 | |||
92 | c = atomic64_read(v); | ||
93 | while (c != u && (old = atomic64_cmpxchg((v), c, c + a)) != c) | ||
94 | c = old; | ||
95 | |||
96 | return c != u; | ||
97 | } | ||
98 | |||
99 | #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0) | ||
100 | #define atomic64_inc(v) atomic64_add(1LL, (v)) | ||
101 | #define atomic64_inc_return(v) atomic64_add_return(1LL, (v)) | ||
102 | #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0) | 83 | #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0) |
103 | #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0) | 84 | #define atomic64_dec_and_test(v) (atomic64_dec_return(v) == 0) |
104 | #define atomic64_dec(v) atomic64_sub(1LL, (v)) | 85 | #define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0) |
105 | #define atomic64_dec_return(v) atomic64_sub_return(1LL, (v)) | 86 | #define atomic64_add_negative(i, v) (atomic64_add_return((i), (v)) < 0) |
106 | #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0) | 87 | #define atomic64_add_unless(v, a, u) (___atomic_add_unless(v, a, u, 64) != u) |
107 | #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1LL, 0LL) | 88 | #define atomic64_andnot atomic64_andnot |
108 | 89 | ||
109 | #define atomic64_andnot atomic64_andnot | 90 | #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) |
110 | 91 | ||
111 | #endif | 92 | #endif |
112 | #endif | 93 | #endif |