aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm
diff options
context:
space:
mode:
authorWill Deacon <will.deacon@arm.com>2013-10-09 12:01:21 -0400
committerRussell King <rmk+kernel@arm.linux.org.uk>2013-10-29 07:06:06 -0400
commit2523c67bb6962f98193dce1c73b6efb65a6ea92c (patch)
tree0c23d146be95ffcb82405c6b6770ae52421ba19e /arch/arm
parent494e492dd88d36cd201eae99873492450d1e9b4f (diff)
ARM: 7852/1: cmpxchg: implement barrier-less cmpxchg64_local
Our cmpxchg64 macros are wrappers around atomic64_cmpxchg. Whilst this is great for code re-use, there is a case for barrier-less cmpxchg where it is known to be safe (for example cmpxchg64_local and cmpxchg-based lockrefs). This patch introduces a 64-bit cmpxchg implementation specifically for the cmpxchg64_* macros, so that it can be later used by the lockref code. Signed-off-by: Will Deacon <will.deacon@arm.com> Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
Diffstat (limited to 'arch/arm')
-rw-r--r--arch/arm/include/asm/cmpxchg.h52
1 files changed, 42 insertions, 10 deletions
diff --git a/arch/arm/include/asm/cmpxchg.h b/arch/arm/include/asm/cmpxchg.h
index 4f009c10540d..fbd978fc248f 100644
--- a/arch/arm/include/asm/cmpxchg.h
+++ b/arch/arm/include/asm/cmpxchg.h
@@ -223,6 +223,42 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr,
223 return ret; 223 return ret;
224} 224}
225 225
226static inline unsigned long long __cmpxchg64(unsigned long long *ptr,
227 unsigned long long old,
228 unsigned long long new)
229{
230 unsigned long long oldval;
231 unsigned long res;
232
233 __asm__ __volatile__(
234"1: ldrexd %1, %H1, [%3]\n"
235" teq %1, %4\n"
236" teqeq %H1, %H4\n"
237" bne 2f\n"
238" strexd %0, %5, %H5, [%3]\n"
239" teq %0, #0\n"
240" bne 1b\n"
241"2:"
242 : "=&r" (res), "=&r" (oldval), "+Qo" (*ptr)
243 : "r" (ptr), "r" (old), "r" (new)
244 : "cc");
245
246 return oldval;
247}
248
249static inline unsigned long long __cmpxchg64_mb(unsigned long long *ptr,
250 unsigned long long old,
251 unsigned long long new)
252{
253 unsigned long long ret;
254
255 smp_mb();
256 ret = __cmpxchg64(ptr, old, new);
257 smp_mb();
258
259 return ret;
260}
261
226#define cmpxchg_local(ptr,o,n) \ 262#define cmpxchg_local(ptr,o,n) \
227 ((__typeof__(*(ptr)))__cmpxchg_local((ptr), \ 263 ((__typeof__(*(ptr)))__cmpxchg_local((ptr), \
228 (unsigned long)(o), \ 264 (unsigned long)(o), \
@@ -230,18 +266,14 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr,
230 sizeof(*(ptr)))) 266 sizeof(*(ptr))))
231 267
232#define cmpxchg64(ptr, o, n) \ 268#define cmpxchg64(ptr, o, n) \
233 ((__typeof__(*(ptr)))atomic64_cmpxchg(container_of((ptr), \ 269 ((__typeof__(*(ptr)))__cmpxchg64_mb((ptr), \
234 atomic64_t, \ 270 (unsigned long long)(o), \
235 counter), \ 271 (unsigned long long)(n)))
236 (unsigned long long)(o), \
237 (unsigned long long)(n)))
238 272
239#define cmpxchg64_local(ptr, o, n) \ 273#define cmpxchg64_local(ptr, o, n) \
240 ((__typeof__(*(ptr)))local64_cmpxchg(container_of((ptr), \ 274 ((__typeof__(*(ptr)))__cmpxchg64((ptr), \
241 local64_t, \ 275 (unsigned long long)(o), \
242 a), \ 276 (unsigned long long)(n)))
243 (unsigned long long)(o), \
244 (unsigned long long)(n)))
245 277
246#endif /* __LINUX_ARM_ARCH__ >= 6 */ 278#endif /* __LINUX_ARM_ARCH__ >= 6 */
247 279