aboutsummaryrefslogtreecommitdiffstats
path: root/arch/sparc
diff options
context:
space:
mode:
authorBabu Moger <babu.moger@oracle.com>2017-05-24 19:55:14 -0400
committerDavid S. Miller <davem@davemloft.net>2017-05-25 15:06:51 -0400
commit79d39e2bab60d18a68a5abc00be4506864397efc (patch)
tree91e5e323937b20523c5d0f59e80fb81ecff74518 /arch/sparc
parenta37594f198363fd9321ece54440336fd4b2a9c8e (diff)
arch/sparc: Introduce xchg16 for SPARC
SPARC supports 32 bit and 64 bit xchg right now. Add the support for 16 bit (2 byte) xchg. This is required to support queued spinlock feature which uses 2 byte xchg. This is achieved using 4 byte cas instructions with byte manipulations. Also re-arranged the code to call __cmpxchg_u32 inside xchg16. Signed-off-by: Babu Moger <babu.moger@oracle.com> Reviewed-by: HÃ¥kon Bugge <haakon.bugge@oracle.com> Reviewed-by: Steven Sistare <steven.sistare@oracle.com> Reviewed-by: Shannon Nelson <shannon.nelson@oracle.com> Reviewed-by: Jane Chu <jane.chu@oracle.com> Reviewed-by: Vijay Kumar <vijay.ac.kumar@oracle.com> Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'arch/sparc')
-rw-r--r--arch/sparc/include/asm/cmpxchg_64.h49
1 files changed, 39 insertions, 10 deletions
diff --git a/arch/sparc/include/asm/cmpxchg_64.h b/arch/sparc/include/asm/cmpxchg_64.h
index 000f7d7a2d4a..4028f4f1e561 100644
--- a/arch/sparc/include/asm/cmpxchg_64.h
+++ b/arch/sparc/include/asm/cmpxchg_64.h
@@ -6,6 +6,17 @@
6#ifndef __ARCH_SPARC64_CMPXCHG__ 6#ifndef __ARCH_SPARC64_CMPXCHG__
7#define __ARCH_SPARC64_CMPXCHG__ 7#define __ARCH_SPARC64_CMPXCHG__
8 8
9static inline unsigned long
10__cmpxchg_u32(volatile int *m, int old, int new)
11{
12 __asm__ __volatile__("cas [%2], %3, %0"
13 : "=&r" (new)
14 : "0" (new), "r" (m), "r" (old)
15 : "memory");
16
17 return new;
18}
19
9static inline unsigned long xchg32(__volatile__ unsigned int *m, unsigned int val) 20static inline unsigned long xchg32(__volatile__ unsigned int *m, unsigned int val)
10{ 21{
11 unsigned long tmp1, tmp2; 22 unsigned long tmp1, tmp2;
@@ -44,10 +55,38 @@ static inline unsigned long xchg64(__volatile__ unsigned long *m, unsigned long
44 55
45void __xchg_called_with_bad_pointer(void); 56void __xchg_called_with_bad_pointer(void);
46 57
58/*
59 * Use 4 byte cas instruction to achieve 2 byte xchg. Main logic
60 * here is to get the bit shift of the byte we are interested in.
61 * The XOR is handy for reversing the bits for big-endian byte order.
62 */
63static inline unsigned long
64xchg16(__volatile__ unsigned short *m, unsigned short val)
65{
66 unsigned long maddr = (unsigned long)m;
67 int bit_shift = (((unsigned long)m & 2) ^ 2) << 3;
68 unsigned int mask = 0xffff << bit_shift;
69 unsigned int *ptr = (unsigned int *) (maddr & ~2);
70 unsigned int old32, new32, load32;
71
72 /* Read the old value */
73 load32 = *ptr;
74
75 do {
76 old32 = load32;
77 new32 = (load32 & (~mask)) | val << bit_shift;
78 load32 = __cmpxchg_u32(ptr, old32, new32);
79 } while (load32 != old32);
80
81 return (load32 & mask) >> bit_shift;
82}
83
47static inline unsigned long __xchg(unsigned long x, __volatile__ void * ptr, 84static inline unsigned long __xchg(unsigned long x, __volatile__ void * ptr,
48 int size) 85 int size)
49{ 86{
50 switch (size) { 87 switch (size) {
88 case 2:
89 return xchg16(ptr, x);
51 case 4: 90 case 4:
52 return xchg32(ptr, x); 91 return xchg32(ptr, x);
53 case 8: 92 case 8:
@@ -65,16 +104,6 @@ static inline unsigned long __xchg(unsigned long x, __volatile__ void * ptr,
65 104
66#include <asm-generic/cmpxchg-local.h> 105#include <asm-generic/cmpxchg-local.h>
67 106
68static inline unsigned long
69__cmpxchg_u32(volatile int *m, int old, int new)
70{
71 __asm__ __volatile__("cas [%2], %3, %0"
72 : "=&r" (new)
73 : "0" (new), "r" (m), "r" (old)
74 : "memory");
75
76 return new;
77}
78 107
79static inline unsigned long 108static inline unsigned long
80__cmpxchg_u64(volatile long *m, unsigned long old, unsigned long new) 109__cmpxchg_u64(volatile long *m, unsigned long old, unsigned long new)