aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAndreas Larsson <andreas@gaisler.com>2014-11-05 09:52:08 -0500
committerDavid S. Miller <davem@davemloft.net>2014-11-07 15:51:44 -0500
commit1a17fdc4f4ed06b63fac1937470378a5441a663a (patch)
tree1590a724b9a40458ba1a990ceb41fc51839196fc
parentab5c780913bca0a5763ca05dd5c2cb5cb08ccb26 (diff)
sparc32: Implement xchg and atomic_xchg using ATOMIC_HASH locks
Atomicity between xchg and cmpxchg cannot be guaranteed when xchg is implemented with a swap and cmpxchg is implemented with locks. Without this, e.g. mcs_spin_lock and mcs_spin_unlock are broken. Signed-off-by: Andreas Larsson <andreas@gaisler.com> Signed-off-by: David S. Miller <davem@davemloft.net>
-rw-r--r--arch/sparc/include/asm/atomic_32.h2
-rw-r--r--arch/sparc/include/asm/cmpxchg_32.h12
-rw-r--r--arch/sparc/lib/atomic32.c27
3 files changed, 30 insertions, 11 deletions
diff --git a/arch/sparc/include/asm/atomic_32.h b/arch/sparc/include/asm/atomic_32.h
index 765c1776ec9f..0e69b7e7a439 100644
--- a/arch/sparc/include/asm/atomic_32.h
+++ b/arch/sparc/include/asm/atomic_32.h
@@ -22,7 +22,7 @@
22 22
23int atomic_add_return(int, atomic_t *); 23int atomic_add_return(int, atomic_t *);
24int atomic_cmpxchg(atomic_t *, int, int); 24int atomic_cmpxchg(atomic_t *, int, int);
25#define atomic_xchg(v, new) (xchg(&((v)->counter), new)) 25int atomic_xchg(atomic_t *, int);
26int __atomic_add_unless(atomic_t *, int, int); 26int __atomic_add_unless(atomic_t *, int, int);
27void atomic_set(atomic_t *, int); 27void atomic_set(atomic_t *, int);
28 28
diff --git a/arch/sparc/include/asm/cmpxchg_32.h b/arch/sparc/include/asm/cmpxchg_32.h
index 32c29a133f9d..d38b52dca216 100644
--- a/arch/sparc/include/asm/cmpxchg_32.h
+++ b/arch/sparc/include/asm/cmpxchg_32.h
@@ -11,22 +11,14 @@
11#ifndef __ARCH_SPARC_CMPXCHG__ 11#ifndef __ARCH_SPARC_CMPXCHG__
12#define __ARCH_SPARC_CMPXCHG__ 12#define __ARCH_SPARC_CMPXCHG__
13 13
14static inline unsigned long xchg_u32(__volatile__ unsigned long *m, unsigned long val) 14unsigned long __xchg_u32(volatile u32 *m, u32 new);
15{
16 __asm__ __volatile__("swap [%2], %0"
17 : "=&r" (val)
18 : "0" (val), "r" (m)
19 : "memory");
20 return val;
21}
22
23void __xchg_called_with_bad_pointer(void); 15void __xchg_called_with_bad_pointer(void);
24 16
25static inline unsigned long __xchg(unsigned long x, __volatile__ void * ptr, int size) 17static inline unsigned long __xchg(unsigned long x, __volatile__ void * ptr, int size)
26{ 18{
27 switch (size) { 19 switch (size) {
28 case 4: 20 case 4:
29 return xchg_u32(ptr, x); 21 return __xchg_u32(ptr, x);
30 } 22 }
31 __xchg_called_with_bad_pointer(); 23 __xchg_called_with_bad_pointer();
32 return x; 24 return x;
diff --git a/arch/sparc/lib/atomic32.c b/arch/sparc/lib/atomic32.c
index a7c418ac26af..71cd65ab200c 100644
--- a/arch/sparc/lib/atomic32.c
+++ b/arch/sparc/lib/atomic32.c
@@ -45,6 +45,19 @@ ATOMIC_OP(add, +=)
45 45
46#undef ATOMIC_OP 46#undef ATOMIC_OP
47 47
48int atomic_xchg(atomic_t *v, int new)
49{
50 int ret;
51 unsigned long flags;
52
53 spin_lock_irqsave(ATOMIC_HASH(v), flags);
54 ret = v->counter;
55 v->counter = new;
56 spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
57 return ret;
58}
59EXPORT_SYMBOL(atomic_xchg);
60
48int atomic_cmpxchg(atomic_t *v, int old, int new) 61int atomic_cmpxchg(atomic_t *v, int old, int new)
49{ 62{
50 int ret; 63 int ret;
@@ -137,3 +150,17 @@ unsigned long __cmpxchg_u32(volatile u32 *ptr, u32 old, u32 new)
137 return (unsigned long)prev; 150 return (unsigned long)prev;
138} 151}
139EXPORT_SYMBOL(__cmpxchg_u32); 152EXPORT_SYMBOL(__cmpxchg_u32);
153
154unsigned long __xchg_u32(volatile u32 *ptr, u32 new)
155{
156 unsigned long flags;
157 u32 prev;
158
159 spin_lock_irqsave(ATOMIC_HASH(ptr), flags);
160 prev = *ptr;
161 *ptr = new;
162 spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags);
163
164 return (unsigned long)prev;
165}
166EXPORT_SYMBOL(__xchg_u32);