aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arc/include/asm/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/arc/include/asm/atomic.h')
-rw-r--r--arch/arc/include/asm/atomic.h45
1 files changed, 4 insertions, 41 deletions
diff --git a/arch/arc/include/asm/atomic.h b/arch/arc/include/asm/atomic.h
index 5f3dcbbc0cc9..dd683995bc9d 100644
--- a/arch/arc/include/asm/atomic.h
+++ b/arch/arc/include/asm/atomic.h
@@ -25,50 +25,17 @@
25 25
26#define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i)) 26#define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
27 27
28#ifdef CONFIG_ARC_STAR_9000923308
29
30#define SCOND_FAIL_RETRY_VAR_DEF \
31 unsigned int delay = 1, tmp; \
32
33#define SCOND_FAIL_RETRY_ASM \
34 " bz 4f \n" \
35 " ; --- scond fail delay --- \n" \
36 " mov %[tmp], %[delay] \n" /* tmp = delay */ \
37 "2: brne.d %[tmp], 0, 2b \n" /* while (tmp != 0) */ \
38 " sub %[tmp], %[tmp], 1 \n" /* tmp-- */ \
39 " rol %[delay], %[delay] \n" /* delay *= 2 */ \
40 " b 1b \n" /* start over */ \
41 "4: ; --- success --- \n" \
42
43#define SCOND_FAIL_RETRY_VARS \
44 ,[delay] "+&r" (delay),[tmp] "=&r" (tmp) \
45
46#else /* !CONFIG_ARC_STAR_9000923308 */
47
48#define SCOND_FAIL_RETRY_VAR_DEF
49
50#define SCOND_FAIL_RETRY_ASM \
51 " bnz 1b \n" \
52
53#define SCOND_FAIL_RETRY_VARS
54
55#endif
56
57#define ATOMIC_OP(op, c_op, asm_op) \ 28#define ATOMIC_OP(op, c_op, asm_op) \
58static inline void atomic_##op(int i, atomic_t *v) \ 29static inline void atomic_##op(int i, atomic_t *v) \
59{ \ 30{ \
60 unsigned int val; \ 31 unsigned int val; \
61 SCOND_FAIL_RETRY_VAR_DEF \
62 \ 32 \
63 __asm__ __volatile__( \ 33 __asm__ __volatile__( \
64 "1: llock %[val], [%[ctr]] \n" \ 34 "1: llock %[val], [%[ctr]] \n" \
65 " " #asm_op " %[val], %[val], %[i] \n" \ 35 " " #asm_op " %[val], %[val], %[i] \n" \
66 " scond %[val], [%[ctr]] \n" \ 36 " scond %[val], [%[ctr]] \n" \
67 " \n" \ 37 " bnz 1b \n" \
68 SCOND_FAIL_RETRY_ASM \
69 \
70 : [val] "=&r" (val) /* Early clobber to prevent reg reuse */ \ 38 : [val] "=&r" (val) /* Early clobber to prevent reg reuse */ \
71 SCOND_FAIL_RETRY_VARS \
72 : [ctr] "r" (&v->counter), /* Not "m": llock only supports reg direct addr mode */ \ 39 : [ctr] "r" (&v->counter), /* Not "m": llock only supports reg direct addr mode */ \
73 [i] "ir" (i) \ 40 [i] "ir" (i) \
74 : "cc"); \ 41 : "cc"); \
@@ -77,8 +44,7 @@ static inline void atomic_##op(int i, atomic_t *v) \
77#define ATOMIC_OP_RETURN(op, c_op, asm_op) \ 44#define ATOMIC_OP_RETURN(op, c_op, asm_op) \
78static inline int atomic_##op##_return(int i, atomic_t *v) \ 45static inline int atomic_##op##_return(int i, atomic_t *v) \
79{ \ 46{ \
80 unsigned int val; \ 47 unsigned int val; \
81 SCOND_FAIL_RETRY_VAR_DEF \
82 \ 48 \
83 /* \ 49 /* \
84 * Explicit full memory barrier needed before/after as \ 50 * Explicit full memory barrier needed before/after as \
@@ -90,11 +56,8 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \
90 "1: llock %[val], [%[ctr]] \n" \ 56 "1: llock %[val], [%[ctr]] \n" \
91 " " #asm_op " %[val], %[val], %[i] \n" \ 57 " " #asm_op " %[val], %[val], %[i] \n" \
92 " scond %[val], [%[ctr]] \n" \ 58 " scond %[val], [%[ctr]] \n" \
93 " \n" \ 59 " bnz 1b \n" \
94 SCOND_FAIL_RETRY_ASM \
95 \
96 : [val] "=&r" (val) \ 60 : [val] "=&r" (val) \
97 SCOND_FAIL_RETRY_VARS \
98 : [ctr] "r" (&v->counter), \ 61 : [ctr] "r" (&v->counter), \
99 [i] "ir" (i) \ 62 [i] "ir" (i) \
100 : "cc"); \ 63 : "cc"); \