aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorBoqun Feng <boqun.feng@gmail.com>2015-12-15 09:24:16 -0500
committerMichael Ellerman <mpe@ellerman.id.au>2016-02-17 08:11:31 -0500
commit26760fc19a7e663e4f49d586aca6740fb21d887d (patch)
tree8c73bf998f486b3d16129566cae55049e27c764f
parentdc53617c4a3f6ca35641dfd4279720365ce9f4da (diff)
powerpc: atomic: Implement acquire/release/relaxed variants for xchg
Implement xchg{,64}_relaxed and atomic{,64}_xchg_relaxed, based on these _relaxed variants, release/acquire variants and fully ordered versions can be built. Note that xchg{,64}_relaxed and atomic_{,64}_xchg_relaxed are not compiler barriers. Signed-off-by: Boqun Feng <boqun.feng@gmail.com> Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
-rw-r--r--arch/powerpc/include/asm/atomic.h2
-rw-r--r--arch/powerpc/include/asm/cmpxchg.h69
2 files changed, 32 insertions, 39 deletions
diff --git a/arch/powerpc/include/asm/atomic.h b/arch/powerpc/include/asm/atomic.h
index a35c27709e05..a19fcdc318ee 100644
--- a/arch/powerpc/include/asm/atomic.h
+++ b/arch/powerpc/include/asm/atomic.h
@@ -177,6 +177,7 @@ static __inline__ int atomic_dec_return_relaxed(atomic_t *v)
177 177
178#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) 178#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
179#define atomic_xchg(v, new) (xchg(&((v)->counter), new)) 179#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
180#define atomic_xchg_relaxed(v, new) xchg_relaxed(&((v)->counter), (new))
180 181
181/** 182/**
182 * __atomic_add_unless - add unless the number is a given value 183 * __atomic_add_unless - add unless the number is a given value
@@ -444,6 +445,7 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
444 445
445#define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) 446#define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
446#define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) 447#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
448#define atomic64_xchg_relaxed(v, new) xchg_relaxed(&((v)->counter), (new))
447 449
448/** 450/**
449 * atomic64_add_unless - add unless the number is a given value 451 * atomic64_add_unless - add unless the number is a given value
diff --git a/arch/powerpc/include/asm/cmpxchg.h b/arch/powerpc/include/asm/cmpxchg.h
index d1a8d93cccfd..17c7e14b37ca 100644
--- a/arch/powerpc/include/asm/cmpxchg.h
+++ b/arch/powerpc/include/asm/cmpxchg.h
@@ -9,21 +9,20 @@
9/* 9/*
10 * Atomic exchange 10 * Atomic exchange
11 * 11 *
12 * Changes the memory location '*ptr' to be val and returns 12 * Changes the memory location '*p' to be val and returns
13 * the previous value stored there. 13 * the previous value stored there.
14 */ 14 */
15
15static __always_inline unsigned long 16static __always_inline unsigned long
16__xchg_u32(volatile void *p, unsigned long val) 17__xchg_u32_local(volatile void *p, unsigned long val)
17{ 18{
18 unsigned long prev; 19 unsigned long prev;
19 20
20 __asm__ __volatile__( 21 __asm__ __volatile__(
21 PPC_ATOMIC_ENTRY_BARRIER
22"1: lwarx %0,0,%2 \n" 22"1: lwarx %0,0,%2 \n"
23 PPC405_ERR77(0,%2) 23 PPC405_ERR77(0,%2)
24" stwcx. %3,0,%2 \n\ 24" stwcx. %3,0,%2 \n\
25 bne- 1b" 25 bne- 1b"
26 PPC_ATOMIC_EXIT_BARRIER
27 : "=&r" (prev), "+m" (*(volatile unsigned int *)p) 26 : "=&r" (prev), "+m" (*(volatile unsigned int *)p)
28 : "r" (p), "r" (val) 27 : "r" (p), "r" (val)
29 : "cc", "memory"); 28 : "cc", "memory");
@@ -31,42 +30,34 @@ __xchg_u32(volatile void *p, unsigned long val)
31 return prev; 30 return prev;
32} 31}
33 32
34/*
35 * Atomic exchange
36 *
37 * Changes the memory location '*ptr' to be val and returns
38 * the previous value stored there.
39 */
40static __always_inline unsigned long 33static __always_inline unsigned long
41__xchg_u32_local(volatile void *p, unsigned long val) 34__xchg_u32_relaxed(u32 *p, unsigned long val)
42{ 35{
43 unsigned long prev; 36 unsigned long prev;
44 37
45 __asm__ __volatile__( 38 __asm__ __volatile__(
46"1: lwarx %0,0,%2 \n" 39"1: lwarx %0,0,%2\n"
47 PPC405_ERR77(0,%2) 40 PPC405_ERR77(0, %2)
48" stwcx. %3,0,%2 \n\ 41" stwcx. %3,0,%2\n"
49 bne- 1b" 42" bne- 1b"
50 : "=&r" (prev), "+m" (*(volatile unsigned int *)p) 43 : "=&r" (prev), "+m" (*p)
51 : "r" (p), "r" (val) 44 : "r" (p), "r" (val)
52 : "cc", "memory"); 45 : "cc");
53 46
54 return prev; 47 return prev;
55} 48}
56 49
57#ifdef CONFIG_PPC64 50#ifdef CONFIG_PPC64
58static __always_inline unsigned long 51static __always_inline unsigned long
59__xchg_u64(volatile void *p, unsigned long val) 52__xchg_u64_local(volatile void *p, unsigned long val)
60{ 53{
61 unsigned long prev; 54 unsigned long prev;
62 55
63 __asm__ __volatile__( 56 __asm__ __volatile__(
64 PPC_ATOMIC_ENTRY_BARRIER
65"1: ldarx %0,0,%2 \n" 57"1: ldarx %0,0,%2 \n"
66 PPC405_ERR77(0,%2) 58 PPC405_ERR77(0,%2)
67" stdcx. %3,0,%2 \n\ 59" stdcx. %3,0,%2 \n\
68 bne- 1b" 60 bne- 1b"
69 PPC_ATOMIC_EXIT_BARRIER
70 : "=&r" (prev), "+m" (*(volatile unsigned long *)p) 61 : "=&r" (prev), "+m" (*(volatile unsigned long *)p)
71 : "r" (p), "r" (val) 62 : "r" (p), "r" (val)
72 : "cc", "memory"); 63 : "cc", "memory");
@@ -75,18 +66,18 @@ __xchg_u64(volatile void *p, unsigned long val)
75} 66}
76 67
77static __always_inline unsigned long 68static __always_inline unsigned long
78__xchg_u64_local(volatile void *p, unsigned long val) 69__xchg_u64_relaxed(u64 *p, unsigned long val)
79{ 70{
80 unsigned long prev; 71 unsigned long prev;
81 72
82 __asm__ __volatile__( 73 __asm__ __volatile__(
83"1: ldarx %0,0,%2 \n" 74"1: ldarx %0,0,%2\n"
84 PPC405_ERR77(0,%2) 75 PPC405_ERR77(0, %2)
85" stdcx. %3,0,%2 \n\ 76" stdcx. %3,0,%2\n"
86 bne- 1b" 77" bne- 1b"
87 : "=&r" (prev), "+m" (*(volatile unsigned long *)p) 78 : "=&r" (prev), "+m" (*p)
88 : "r" (p), "r" (val) 79 : "r" (p), "r" (val)
89 : "cc", "memory"); 80 : "cc");
90 81
91 return prev; 82 return prev;
92} 83}
@@ -99,14 +90,14 @@ __xchg_u64_local(volatile void *p, unsigned long val)
99extern void __xchg_called_with_bad_pointer(void); 90extern void __xchg_called_with_bad_pointer(void);
100 91
101static __always_inline unsigned long 92static __always_inline unsigned long
102__xchg(volatile void *ptr, unsigned long x, unsigned int size) 93__xchg_local(volatile void *ptr, unsigned long x, unsigned int size)
103{ 94{
104 switch (size) { 95 switch (size) {
105 case 4: 96 case 4:
106 return __xchg_u32(ptr, x); 97 return __xchg_u32_local(ptr, x);
107#ifdef CONFIG_PPC64 98#ifdef CONFIG_PPC64
108 case 8: 99 case 8:
109 return __xchg_u64(ptr, x); 100 return __xchg_u64_local(ptr, x);
110#endif 101#endif
111 } 102 }
112 __xchg_called_with_bad_pointer(); 103 __xchg_called_with_bad_pointer();
@@ -114,25 +105,19 @@ __xchg(volatile void *ptr, unsigned long x, unsigned int size)
114} 105}
115 106
116static __always_inline unsigned long 107static __always_inline unsigned long
117__xchg_local(volatile void *ptr, unsigned long x, unsigned int size) 108__xchg_relaxed(void *ptr, unsigned long x, unsigned int size)
118{ 109{
119 switch (size) { 110 switch (size) {
120 case 4: 111 case 4:
121 return __xchg_u32_local(ptr, x); 112 return __xchg_u32_relaxed(ptr, x);
122#ifdef CONFIG_PPC64 113#ifdef CONFIG_PPC64
123 case 8: 114 case 8:
124 return __xchg_u64_local(ptr, x); 115 return __xchg_u64_relaxed(ptr, x);
125#endif 116#endif
126 } 117 }
127 __xchg_called_with_bad_pointer(); 118 __xchg_called_with_bad_pointer();
128 return x; 119 return x;
129} 120}
130#define xchg(ptr,x) \
131 ({ \
132 __typeof__(*(ptr)) _x_ = (x); \
133 (__typeof__(*(ptr))) __xchg((ptr), (unsigned long)_x_, sizeof(*(ptr))); \
134 })
135
136#define xchg_local(ptr,x) \ 121#define xchg_local(ptr,x) \
137 ({ \ 122 ({ \
138 __typeof__(*(ptr)) _x_ = (x); \ 123 __typeof__(*(ptr)) _x_ = (x); \
@@ -140,6 +125,12 @@ __xchg_local(volatile void *ptr, unsigned long x, unsigned int size)
140 (unsigned long)_x_, sizeof(*(ptr))); \ 125 (unsigned long)_x_, sizeof(*(ptr))); \
141 }) 126 })
142 127
128#define xchg_relaxed(ptr, x) \
129({ \
130 __typeof__(*(ptr)) _x_ = (x); \
131 (__typeof__(*(ptr))) __xchg_relaxed((ptr), \
132 (unsigned long)_x_, sizeof(*(ptr))); \
133})
143/* 134/*
144 * Compare and exchange - if *p == old, set it to new, 135 * Compare and exchange - if *p == old, set it to new,
145 * and return the old value of *p. 136 * and return the old value of *p.