aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arc/include/asm/cmpxchg.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/arc/include/asm/cmpxchg.h')
-rw-r--r--arch/arc/include/asm/cmpxchg.h76
1 files changed, 68 insertions, 8 deletions
diff --git a/arch/arc/include/asm/cmpxchg.h b/arch/arc/include/asm/cmpxchg.h
index a444be67cd53..d819de1c5d10 100644
--- a/arch/arc/include/asm/cmpxchg.h
+++ b/arch/arc/include/asm/cmpxchg.h
@@ -44,7 +44,7 @@ __cmpxchg(volatile void *ptr, unsigned long expected, unsigned long new)
44 return prev; 44 return prev;
45} 45}
46 46
47#else 47#elif !defined(CONFIG_ARC_PLAT_EZNPS)
48 48
49static inline unsigned long 49static inline unsigned long
50__cmpxchg(volatile void *ptr, unsigned long expected, unsigned long new) 50__cmpxchg(volatile void *ptr, unsigned long expected, unsigned long new)
@@ -64,23 +64,48 @@ __cmpxchg(volatile void *ptr, unsigned long expected, unsigned long new)
64 return prev; 64 return prev;
65} 65}
66 66
67#else /* CONFIG_ARC_PLAT_EZNPS */
68
69static inline unsigned long
70__cmpxchg(volatile void *ptr, unsigned long expected, unsigned long new)
71{
72 /*
73 * Explicit full memory barrier needed before/after
74 */
75 smp_mb();
76
77 write_aux_reg(CTOP_AUX_GPA1, expected);
78
79 __asm__ __volatile__(
80 " mov r2, %0\n"
81 " mov r3, %1\n"
82 " .word %2\n"
83 " mov %0, r2"
84 : "+r"(new)
85 : "r"(ptr), "i"(CTOP_INST_EXC_DI_R2_R2_R3)
86 : "r2", "r3", "memory");
87
88 smp_mb();
89
90 return new;
91}
92
67#endif /* CONFIG_ARC_HAS_LLSC */ 93#endif /* CONFIG_ARC_HAS_LLSC */
68 94
69#define cmpxchg(ptr, o, n) ((typeof(*(ptr)))__cmpxchg((ptr), \ 95#define cmpxchg(ptr, o, n) ((typeof(*(ptr)))__cmpxchg((ptr), \
70 (unsigned long)(o), (unsigned long)(n))) 96 (unsigned long)(o), (unsigned long)(n)))
71 97
72/* 98/*
73 * Since not supported natively, ARC cmpxchg() uses atomic_ops_lock (UP/SMP) 99 * atomic_cmpxchg is same as cmpxchg
74 * just to gaurantee semantics. 100 * LLSC: only different in data-type, semantics are exactly same
75 * atomic_cmpxchg() needs to use the same locks as it's other atomic siblings 101 * !LLSC: cmpxchg() has to use an external lock atomic_ops_lock to guarantee
76 * which also happens to be atomic_ops_lock. 102 * semantics, and this lock also happens to be used by atomic_*()
77 *
78 * Thus despite semantically being different, implementation of atomic_cmpxchg()
79 * is same as cmpxchg().
80 */ 103 */
81#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) 104#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
82 105
83 106
107#ifndef CONFIG_ARC_PLAT_EZNPS
108
84/* 109/*
85 * xchg (reg with memory) based on "Native atomic" EX insn 110 * xchg (reg with memory) based on "Native atomic" EX insn
86 */ 111 */
@@ -143,6 +168,41 @@ static inline unsigned long __xchg(unsigned long val, volatile void *ptr,
143 168
144#endif 169#endif
145 170
171#else /* CONFIG_ARC_PLAT_EZNPS */
172
173static inline unsigned long __xchg(unsigned long val, volatile void *ptr,
174 int size)
175{
176 extern unsigned long __xchg_bad_pointer(void);
177
178 switch (size) {
179 case 4:
180 /*
181 * Explicit full memory barrier needed before/after
182 */
183 smp_mb();
184
185 __asm__ __volatile__(
186 " mov r2, %0\n"
187 " mov r3, %1\n"
188 " .word %2\n"
189 " mov %0, r2\n"
190 : "+r"(val)
191 : "r"(ptr), "i"(CTOP_INST_XEX_DI_R2_R2_R3)
192 : "r2", "r3", "memory");
193
194 smp_mb();
195
196 return val;
197 }
198 return __xchg_bad_pointer();
199}
200
201#define xchg(ptr, with) ((typeof(*(ptr)))__xchg((unsigned long)(with), (ptr), \
202 sizeof(*(ptr))))
203
204#endif /* CONFIG_ARC_PLAT_EZNPS */
205
146/* 206/*
147 * "atomic" variant of xchg() 207 * "atomic" variant of xchg()
148 * REQ: It needs to follow the same serialization rules as other atomic_xxx() 208 * REQ: It needs to follow the same serialization rules as other atomic_xxx()