diff options
Diffstat (limited to 'arch/arm64/include/asm/atomic.h')
-rw-r--r-- | arch/arm64/include/asm/atomic.h | 201 |
1 files changed, 82 insertions, 119 deletions
diff --git a/arch/arm64/include/asm/atomic.h b/arch/arm64/include/asm/atomic.h index 65f1569ac96e..7047051ded40 100644 --- a/arch/arm64/include/asm/atomic.h +++ b/arch/arm64/include/asm/atomic.h | |||
@@ -35,7 +35,7 @@ | |||
35 | * strex/ldrex monitor on some implementations. The reason we can use it for | 35 | * strex/ldrex monitor on some implementations. The reason we can use it for |
36 | * atomic_set() is the clrex or dummy strex done on every exception return. | 36 | * atomic_set() is the clrex or dummy strex done on every exception return. |
37 | */ | 37 | */ |
38 | #define atomic_read(v) (*(volatile int *)&(v)->counter) | 38 | #define atomic_read(v) ACCESS_ONCE((v)->counter) |
39 | #define atomic_set(v,i) (((v)->counter) = (i)) | 39 | #define atomic_set(v,i) (((v)->counter) = (i)) |
40 | 40 | ||
41 | /* | 41 | /* |
@@ -43,69 +43,51 @@ | |||
43 | * store exclusive to ensure that these are atomic. We may loop | 43 | * store exclusive to ensure that these are atomic. We may loop |
44 | * to ensure that the update happens. | 44 | * to ensure that the update happens. |
45 | */ | 45 | */ |
46 | static inline void atomic_add(int i, atomic_t *v) | ||
47 | { | ||
48 | unsigned long tmp; | ||
49 | int result; | ||
50 | |||
51 | asm volatile("// atomic_add\n" | ||
52 | "1: ldxr %w0, %2\n" | ||
53 | " add %w0, %w0, %w3\n" | ||
54 | " stxr %w1, %w0, %2\n" | ||
55 | " cbnz %w1, 1b" | ||
56 | : "=&r" (result), "=&r" (tmp), "+Q" (v->counter) | ||
57 | : "Ir" (i)); | ||
58 | } | ||
59 | |||
60 | static inline int atomic_add_return(int i, atomic_t *v) | ||
61 | { | ||
62 | unsigned long tmp; | ||
63 | int result; | ||
64 | |||
65 | asm volatile("// atomic_add_return\n" | ||
66 | "1: ldxr %w0, %2\n" | ||
67 | " add %w0, %w0, %w3\n" | ||
68 | " stlxr %w1, %w0, %2\n" | ||
69 | " cbnz %w1, 1b" | ||
70 | : "=&r" (result), "=&r" (tmp), "+Q" (v->counter) | ||
71 | : "Ir" (i) | ||
72 | : "memory"); | ||
73 | |||
74 | smp_mb(); | ||
75 | return result; | ||
76 | } | ||
77 | |||
78 | static inline void atomic_sub(int i, atomic_t *v) | ||
79 | { | ||
80 | unsigned long tmp; | ||
81 | int result; | ||
82 | 46 | ||
83 | asm volatile("// atomic_sub\n" | 47 | #define ATOMIC_OP(op, asm_op) \ |
84 | "1: ldxr %w0, %2\n" | 48 | static inline void atomic_##op(int i, atomic_t *v) \ |
85 | " sub %w0, %w0, %w3\n" | 49 | { \ |
86 | " stxr %w1, %w0, %2\n" | 50 | unsigned long tmp; \ |
87 | " cbnz %w1, 1b" | 51 | int result; \ |
88 | : "=&r" (result), "=&r" (tmp), "+Q" (v->counter) | 52 | \ |
89 | : "Ir" (i)); | 53 | asm volatile("// atomic_" #op "\n" \ |
54 | "1: ldxr %w0, %2\n" \ | ||
55 | " " #asm_op " %w0, %w0, %w3\n" \ | ||
56 | " stxr %w1, %w0, %2\n" \ | ||
57 | " cbnz %w1, 1b" \ | ||
58 | : "=&r" (result), "=&r" (tmp), "+Q" (v->counter) \ | ||
59 | : "Ir" (i)); \ | ||
60 | } \ | ||
61 | |||
62 | #define ATOMIC_OP_RETURN(op, asm_op) \ | ||
63 | static inline int atomic_##op##_return(int i, atomic_t *v) \ | ||
64 | { \ | ||
65 | unsigned long tmp; \ | ||
66 | int result; \ | ||
67 | \ | ||
68 | asm volatile("// atomic_" #op "_return\n" \ | ||
69 | "1: ldxr %w0, %2\n" \ | ||
70 | " " #asm_op " %w0, %w0, %w3\n" \ | ||
71 | " stlxr %w1, %w0, %2\n" \ | ||
72 | " cbnz %w1, 1b" \ | ||
73 | : "=&r" (result), "=&r" (tmp), "+Q" (v->counter) \ | ||
74 | : "Ir" (i) \ | ||
75 | : "memory"); \ | ||
76 | \ | ||
77 | smp_mb(); \ | ||
78 | return result; \ | ||
90 | } | 79 | } |
91 | 80 | ||
92 | static inline int atomic_sub_return(int i, atomic_t *v) | 81 | #define ATOMIC_OPS(op, asm_op) \ |
93 | { | 82 | ATOMIC_OP(op, asm_op) \ |
94 | unsigned long tmp; | 83 | ATOMIC_OP_RETURN(op, asm_op) |
95 | int result; | ||
96 | 84 | ||
97 | asm volatile("// atomic_sub_return\n" | 85 | ATOMIC_OPS(add, add) |
98 | "1: ldxr %w0, %2\n" | 86 | ATOMIC_OPS(sub, sub) |
99 | " sub %w0, %w0, %w3\n" | ||
100 | " stlxr %w1, %w0, %2\n" | ||
101 | " cbnz %w1, 1b" | ||
102 | : "=&r" (result), "=&r" (tmp), "+Q" (v->counter) | ||
103 | : "Ir" (i) | ||
104 | : "memory"); | ||
105 | 87 | ||
106 | smp_mb(); | 88 | #undef ATOMIC_OPS |
107 | return result; | 89 | #undef ATOMIC_OP_RETURN |
108 | } | 90 | #undef ATOMIC_OP |
109 | 91 | ||
110 | static inline int atomic_cmpxchg(atomic_t *ptr, int old, int new) | 92 | static inline int atomic_cmpxchg(atomic_t *ptr, int old, int new) |
111 | { | 93 | { |
@@ -157,72 +139,53 @@ static inline int __atomic_add_unless(atomic_t *v, int a, int u) | |||
157 | */ | 139 | */ |
158 | #define ATOMIC64_INIT(i) { (i) } | 140 | #define ATOMIC64_INIT(i) { (i) } |
159 | 141 | ||
160 | #define atomic64_read(v) (*(volatile long *)&(v)->counter) | 142 | #define atomic64_read(v) ACCESS_ONCE((v)->counter) |
161 | #define atomic64_set(v,i) (((v)->counter) = (i)) | 143 | #define atomic64_set(v,i) (((v)->counter) = (i)) |
162 | 144 | ||
163 | static inline void atomic64_add(u64 i, atomic64_t *v) | 145 | #define ATOMIC64_OP(op, asm_op) \ |
164 | { | 146 | static inline void atomic64_##op(long i, atomic64_t *v) \ |
165 | long result; | 147 | { \ |
166 | unsigned long tmp; | 148 | long result; \ |
167 | 149 | unsigned long tmp; \ | |
168 | asm volatile("// atomic64_add\n" | 150 | \ |
169 | "1: ldxr %0, %2\n" | 151 | asm volatile("// atomic64_" #op "\n" \ |
170 | " add %0, %0, %3\n" | 152 | "1: ldxr %0, %2\n" \ |
171 | " stxr %w1, %0, %2\n" | 153 | " " #asm_op " %0, %0, %3\n" \ |
172 | " cbnz %w1, 1b" | 154 | " stxr %w1, %0, %2\n" \ |
173 | : "=&r" (result), "=&r" (tmp), "+Q" (v->counter) | 155 | " cbnz %w1, 1b" \ |
174 | : "Ir" (i)); | 156 | : "=&r" (result), "=&r" (tmp), "+Q" (v->counter) \ |
157 | : "Ir" (i)); \ | ||
158 | } \ | ||
159 | |||
160 | #define ATOMIC64_OP_RETURN(op, asm_op) \ | ||
161 | static inline long atomic64_##op##_return(long i, atomic64_t *v) \ | ||
162 | { \ | ||
163 | long result; \ | ||
164 | unsigned long tmp; \ | ||
165 | \ | ||
166 | asm volatile("// atomic64_" #op "_return\n" \ | ||
167 | "1: ldxr %0, %2\n" \ | ||
168 | " " #asm_op " %0, %0, %3\n" \ | ||
169 | " stlxr %w1, %0, %2\n" \ | ||
170 | " cbnz %w1, 1b" \ | ||
171 | : "=&r" (result), "=&r" (tmp), "+Q" (v->counter) \ | ||
172 | : "Ir" (i) \ | ||
173 | : "memory"); \ | ||
174 | \ | ||
175 | smp_mb(); \ | ||
176 | return result; \ | ||
175 | } | 177 | } |
176 | 178 | ||
177 | static inline long atomic64_add_return(long i, atomic64_t *v) | 179 | #define ATOMIC64_OPS(op, asm_op) \ |
178 | { | 180 | ATOMIC64_OP(op, asm_op) \ |
179 | long result; | 181 | ATOMIC64_OP_RETURN(op, asm_op) |
180 | unsigned long tmp; | ||
181 | 182 | ||
182 | asm volatile("// atomic64_add_return\n" | 183 | ATOMIC64_OPS(add, add) |
183 | "1: ldxr %0, %2\n" | 184 | ATOMIC64_OPS(sub, sub) |
184 | " add %0, %0, %3\n" | ||
185 | " stlxr %w1, %0, %2\n" | ||
186 | " cbnz %w1, 1b" | ||
187 | : "=&r" (result), "=&r" (tmp), "+Q" (v->counter) | ||
188 | : "Ir" (i) | ||
189 | : "memory"); | ||
190 | 185 | ||
191 | smp_mb(); | 186 | #undef ATOMIC64_OPS |
192 | return result; | 187 | #undef ATOMIC64_OP_RETURN |
193 | } | 188 | #undef ATOMIC64_OP |
194 | |||
195 | static inline void atomic64_sub(u64 i, atomic64_t *v) | ||
196 | { | ||
197 | long result; | ||
198 | unsigned long tmp; | ||
199 | |||
200 | asm volatile("// atomic64_sub\n" | ||
201 | "1: ldxr %0, %2\n" | ||
202 | " sub %0, %0, %3\n" | ||
203 | " stxr %w1, %0, %2\n" | ||
204 | " cbnz %w1, 1b" | ||
205 | : "=&r" (result), "=&r" (tmp), "+Q" (v->counter) | ||
206 | : "Ir" (i)); | ||
207 | } | ||
208 | |||
209 | static inline long atomic64_sub_return(long i, atomic64_t *v) | ||
210 | { | ||
211 | long result; | ||
212 | unsigned long tmp; | ||
213 | |||
214 | asm volatile("// atomic64_sub_return\n" | ||
215 | "1: ldxr %0, %2\n" | ||
216 | " sub %0, %0, %3\n" | ||
217 | " stlxr %w1, %0, %2\n" | ||
218 | " cbnz %w1, 1b" | ||
219 | : "=&r" (result), "=&r" (tmp), "+Q" (v->counter) | ||
220 | : "Ir" (i) | ||
221 | : "memory"); | ||
222 | |||
223 | smp_mb(); | ||
224 | return result; | ||
225 | } | ||
226 | 189 | ||
227 | static inline long atomic64_cmpxchg(atomic64_t *ptr, long old, long new) | 190 | static inline long atomic64_cmpxchg(atomic64_t *ptr, long old, long new) |
228 | { | 191 | { |