diff options
Diffstat (limited to 'arch/arc/include/asm/atomic.h')
-rw-r--r-- | arch/arc/include/asm/atomic.h | 83 |
1 files changed, 80 insertions, 3 deletions
diff --git a/arch/arc/include/asm/atomic.h b/arch/arc/include/asm/atomic.h index 7730d302cadb..5f3dcbbc0cc9 100644 --- a/arch/arc/include/asm/atomic.h +++ b/arch/arc/include/asm/atomic.h | |||
@@ -17,6 +17,8 @@ | |||
17 | #include <asm/barrier.h> | 17 | #include <asm/barrier.h> |
18 | #include <asm/smp.h> | 18 | #include <asm/smp.h> |
19 | 19 | ||
20 | #ifndef CONFIG_ARC_PLAT_EZNPS | ||
21 | |||
20 | #define atomic_read(v) READ_ONCE((v)->counter) | 22 | #define atomic_read(v) READ_ONCE((v)->counter) |
21 | 23 | ||
22 | #ifdef CONFIG_ARC_HAS_LLSC | 24 | #ifdef CONFIG_ARC_HAS_LLSC |
@@ -180,13 +182,88 @@ ATOMIC_OP(andnot, &= ~, bic) | |||
180 | ATOMIC_OP(or, |=, or) | 182 | ATOMIC_OP(or, |=, or) |
181 | ATOMIC_OP(xor, ^=, xor) | 183 | ATOMIC_OP(xor, ^=, xor) |
182 | 184 | ||
183 | #undef ATOMIC_OPS | ||
184 | #undef ATOMIC_OP_RETURN | ||
185 | #undef ATOMIC_OP | ||
186 | #undef SCOND_FAIL_RETRY_VAR_DEF | 185 | #undef SCOND_FAIL_RETRY_VAR_DEF |
187 | #undef SCOND_FAIL_RETRY_ASM | 186 | #undef SCOND_FAIL_RETRY_ASM |
188 | #undef SCOND_FAIL_RETRY_VARS | 187 | #undef SCOND_FAIL_RETRY_VARS |
189 | 188 | ||
189 | #else /* CONFIG_ARC_PLAT_EZNPS */ | ||
190 | |||
191 | static inline int atomic_read(const atomic_t *v) | ||
192 | { | ||
193 | int temp; | ||
194 | |||
195 | __asm__ __volatile__( | ||
196 | " ld.di %0, [%1]" | ||
197 | : "=r"(temp) | ||
198 | : "r"(&v->counter) | ||
199 | : "memory"); | ||
200 | return temp; | ||
201 | } | ||
202 | |||
203 | static inline void atomic_set(atomic_t *v, int i) | ||
204 | { | ||
205 | __asm__ __volatile__( | ||
206 | " st.di %0,[%1]" | ||
207 | : | ||
208 | : "r"(i), "r"(&v->counter) | ||
209 | : "memory"); | ||
210 | } | ||
211 | |||
212 | #define ATOMIC_OP(op, c_op, asm_op) \ | ||
213 | static inline void atomic_##op(int i, atomic_t *v) \ | ||
214 | { \ | ||
215 | __asm__ __volatile__( \ | ||
216 | " mov r2, %0\n" \ | ||
217 | " mov r3, %1\n" \ | ||
218 | " .word %2\n" \ | ||
219 | : \ | ||
220 | : "r"(i), "r"(&v->counter), "i"(asm_op) \ | ||
221 | : "r2", "r3", "memory"); \ | ||
222 | } \ | ||
223 | |||
224 | #define ATOMIC_OP_RETURN(op, c_op, asm_op) \ | ||
225 | static inline int atomic_##op##_return(int i, atomic_t *v) \ | ||
226 | { \ | ||
227 | unsigned int temp = i; \ | ||
228 | \ | ||
229 | /* Explicit full memory barrier needed before/after */ \ | ||
230 | smp_mb(); \ | ||
231 | \ | ||
232 | __asm__ __volatile__( \ | ||
233 | " mov r2, %0\n" \ | ||
234 | " mov r3, %1\n" \ | ||
235 | " .word %2\n" \ | ||
236 | " mov %0, r2" \ | ||
237 | : "+r"(temp) \ | ||
238 | : "r"(&v->counter), "i"(asm_op) \ | ||
239 | : "r2", "r3", "memory"); \ | ||
240 | \ | ||
241 | smp_mb(); \ | ||
242 | \ | ||
243 | temp c_op i; \ | ||
244 | \ | ||
245 | return temp; \ | ||
246 | } | ||
247 | |||
248 | #define ATOMIC_OPS(op, c_op, asm_op) \ | ||
249 | ATOMIC_OP(op, c_op, asm_op) \ | ||
250 | ATOMIC_OP_RETURN(op, c_op, asm_op) | ||
251 | |||
252 | ATOMIC_OPS(add, +=, CTOP_INST_AADD_DI_R2_R2_R3) | ||
253 | #define atomic_sub(i, v) atomic_add(-(i), (v)) | ||
254 | #define atomic_sub_return(i, v) atomic_add_return(-(i), (v)) | ||
255 | |||
256 | ATOMIC_OP(and, &=, CTOP_INST_AAND_DI_R2_R2_R3) | ||
257 | #define atomic_andnot(mask, v) atomic_and(~(mask), (v)) | ||
258 | ATOMIC_OP(or, |=, CTOP_INST_AOR_DI_R2_R2_R3) | ||
259 | ATOMIC_OP(xor, ^=, CTOP_INST_AXOR_DI_R2_R2_R3) | ||
260 | |||
261 | #endif /* CONFIG_ARC_PLAT_EZNPS */ | ||
262 | |||
263 | #undef ATOMIC_OPS | ||
264 | #undef ATOMIC_OP_RETURN | ||
265 | #undef ATOMIC_OP | ||
266 | |||
190 | /** | 267 | /** |
191 | * __atomic_add_unless - add unless the number is a given value | 268 | * __atomic_add_unless - add unless the number is a given value |
192 | * @v: pointer of type atomic_t | 269 | * @v: pointer of type atomic_t |