aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-x86/cmpxchg_64.h
diff options
context:
space:
mode:
authorJoe Perches <joe@perches.com>2008-03-23 04:01:52 -0400
committerIngo Molnar <mingo@elte.hu>2008-04-17 11:41:22 -0400
commite52da357a15db9e12b96b4e40dffe6b9e54bb976 (patch)
tree09f49b780e743fabbf2a4cf88f394975c0b5c325 /include/asm-x86/cmpxchg_64.h
parent8121019cad7bfe61f8f626a85427aca66dfe0f1e (diff)
include/asm-x86/cmpxchg_64.h: checkpatch cleanups - formatting only
Signed-off-by: Joe Perches <joe@perches.com> Signed-off-by: Ingo Molnar <mingo@elte.hu>
Diffstat (limited to 'include/asm-x86/cmpxchg_64.h')
-rw-r--r--include/asm-x86/cmpxchg_64.h134
1 files changed, 69 insertions, 65 deletions
diff --git a/include/asm-x86/cmpxchg_64.h b/include/asm-x86/cmpxchg_64.h
index 56f5b41e071c..d9b26b9a28cf 100644
--- a/include/asm-x86/cmpxchg_64.h
+++ b/include/asm-x86/cmpxchg_64.h
@@ -3,7 +3,8 @@
3 3
4#include <asm/alternative.h> /* Provides LOCK_PREFIX */ 4#include <asm/alternative.h> /* Provides LOCK_PREFIX */
5 5
6#define xchg(ptr,v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v),(ptr),sizeof(*(ptr)))) 6#define xchg(ptr, v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v), \
7 (ptr), sizeof(*(ptr))))
7 8
8#define __xg(x) ((volatile long *)(x)) 9#define __xg(x) ((volatile long *)(x))
9 10
@@ -19,33 +20,34 @@ static inline void set_64bit(volatile unsigned long *ptr, unsigned long val)
19 * Note 2: xchg has side effect, so that attribute volatile is necessary, 20 * Note 2: xchg has side effect, so that attribute volatile is necessary,
20 * but generally the primitive is invalid, *ptr is output argument. --ANK 21 * but generally the primitive is invalid, *ptr is output argument. --ANK
21 */ 22 */
22static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size) 23static inline unsigned long __xchg(unsigned long x, volatile void *ptr,
24 int size)
23{ 25{
24 switch (size) { 26 switch (size) {
25 case 1: 27 case 1:
26 __asm__ __volatile__("xchgb %b0,%1" 28 asm volatile("xchgb %b0,%1"
27 :"=q" (x) 29 : "=q" (x)
28 :"m" (*__xg(ptr)), "0" (x) 30 : "m" (*__xg(ptr)), "0" (x)
29 :"memory"); 31 : "memory");
30 break; 32 break;
31 case 2: 33 case 2:
32 __asm__ __volatile__("xchgw %w0,%1" 34 asm volatile("xchgw %w0,%1"
33 :"=r" (x) 35 : "=r" (x)
34 :"m" (*__xg(ptr)), "0" (x) 36 : "m" (*__xg(ptr)), "0" (x)
35 :"memory"); 37 : "memory");
36 break; 38 break;
37 case 4: 39 case 4:
38 __asm__ __volatile__("xchgl %k0,%1" 40 asm volatile("xchgl %k0,%1"
39 :"=r" (x) 41 : "=r" (x)
40 :"m" (*__xg(ptr)), "0" (x) 42 : "m" (*__xg(ptr)), "0" (x)
41 :"memory"); 43 : "memory");
42 break; 44 break;
43 case 8: 45 case 8:
44 __asm__ __volatile__("xchgq %0,%1" 46 asm volatile("xchgq %0,%1"
45 :"=r" (x) 47 : "=r" (x)
46 :"m" (*__xg(ptr)), "0" (x) 48 : "m" (*__xg(ptr)), "0" (x)
47 :"memory"); 49 : "memory");
48 break; 50 break;
49 } 51 }
50 return x; 52 return x;
51} 53}
@@ -64,61 +66,62 @@ static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
64 unsigned long prev; 66 unsigned long prev;
65 switch (size) { 67 switch (size) {
66 case 1: 68 case 1:
67 __asm__ __volatile__(LOCK_PREFIX "cmpxchgb %b1,%2" 69 asm volatile(LOCK_PREFIX "cmpxchgb %b1,%2"
68 : "=a"(prev) 70 : "=a"(prev)
69 : "q"(new), "m"(*__xg(ptr)), "0"(old) 71 : "q"(new), "m"(*__xg(ptr)), "0"(old)
70 : "memory"); 72 : "memory");
71 return prev; 73 return prev;
72 case 2: 74 case 2:
73 __asm__ __volatile__(LOCK_PREFIX "cmpxchgw %w1,%2" 75 asm volatile(LOCK_PREFIX "cmpxchgw %w1,%2"
74 : "=a"(prev) 76 : "=a"(prev)
75 : "r"(new), "m"(*__xg(ptr)), "0"(old) 77 : "r"(new), "m"(*__xg(ptr)), "0"(old)
76 : "memory"); 78 : "memory");
77 return prev; 79 return prev;
78 case 4: 80 case 4:
79 __asm__ __volatile__(LOCK_PREFIX "cmpxchgl %k1,%2" 81 asm volatile(LOCK_PREFIX "cmpxchgl %k1,%2"
80 : "=a"(prev) 82 : "=a"(prev)
81 : "r"(new), "m"(*__xg(ptr)), "0"(old) 83 : "r"(new), "m"(*__xg(ptr)), "0"(old)
82 : "memory"); 84 : "memory");
83 return prev; 85 return prev;
84 case 8: 86 case 8:
85 __asm__ __volatile__(LOCK_PREFIX "cmpxchgq %1,%2" 87 asm volatile(LOCK_PREFIX "cmpxchgq %1,%2"
86 : "=a"(prev) 88 : "=a"(prev)
87 : "r"(new), "m"(*__xg(ptr)), "0"(old) 89 : "r"(new), "m"(*__xg(ptr)), "0"(old)
88 : "memory"); 90 : "memory");
89 return prev; 91 return prev;
90 } 92 }
91 return old; 93 return old;
92} 94}
93 95
94static inline unsigned long __cmpxchg_local(volatile void *ptr, 96static inline unsigned long __cmpxchg_local(volatile void *ptr,
95 unsigned long old, unsigned long new, int size) 97 unsigned long old,
98 unsigned long new, int size)
96{ 99{
97 unsigned long prev; 100 unsigned long prev;
98 switch (size) { 101 switch (size) {
99 case 1: 102 case 1:
100 __asm__ __volatile__("cmpxchgb %b1,%2" 103 asm volatile("cmpxchgb %b1,%2"
101 : "=a"(prev) 104 : "=a"(prev)
102 : "q"(new), "m"(*__xg(ptr)), "0"(old) 105 : "q"(new), "m"(*__xg(ptr)), "0"(old)
103 : "memory"); 106 : "memory");
104 return prev; 107 return prev;
105 case 2: 108 case 2:
106 __asm__ __volatile__("cmpxchgw %w1,%2" 109 asm volatile("cmpxchgw %w1,%2"
107 : "=a"(prev) 110 : "=a"(prev)
108 : "r"(new), "m"(*__xg(ptr)), "0"(old) 111 : "r"(new), "m"(*__xg(ptr)), "0"(old)
109 : "memory"); 112 : "memory");
110 return prev; 113 return prev;
111 case 4: 114 case 4:
112 __asm__ __volatile__("cmpxchgl %k1,%2" 115 asm volatile("cmpxchgl %k1,%2"
113 : "=a"(prev) 116 : "=a"(prev)
114 : "r"(new), "m"(*__xg(ptr)), "0"(old) 117 : "r"(new), "m"(*__xg(ptr)), "0"(old)
115 : "memory"); 118 : "memory");
116 return prev; 119 return prev;
117 case 8: 120 case 8:
118 __asm__ __volatile__("cmpxchgq %1,%2" 121 asm volatile("cmpxchgq %1,%2"
119 : "=a"(prev) 122 : "=a"(prev)
120 : "r"(new), "m"(*__xg(ptr)), "0"(old) 123 : "r"(new), "m"(*__xg(ptr)), "0"(old)
121 : "memory"); 124 : "memory");
122 return prev; 125 return prev;
123 } 126 }
124 return old; 127 return old;
@@ -126,19 +129,20 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr,
126 129
127#define cmpxchg(ptr, o, n) \ 130#define cmpxchg(ptr, o, n) \
128 ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o), \ 131 ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o), \
129 (unsigned long)(n), sizeof(*(ptr)))) 132 (unsigned long)(n), sizeof(*(ptr))))
130#define cmpxchg64(ptr, o, n) \ 133#define cmpxchg64(ptr, o, n) \
131 ({ \ 134({ \
132 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ 135 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
133 cmpxchg((ptr), (o), (n)); \ 136 cmpxchg((ptr), (o), (n)); \
134 }) 137})
135#define cmpxchg_local(ptr, o, n) \ 138#define cmpxchg_local(ptr, o, n) \
136 ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \ 139 ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
137 (unsigned long)(n), sizeof(*(ptr)))) 140 (unsigned long)(n), \
141 sizeof(*(ptr))))
138#define cmpxchg64_local(ptr, o, n) \ 142#define cmpxchg64_local(ptr, o, n) \
139 ({ \ 143({ \
140 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ 144 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
141 cmpxchg_local((ptr), (o), (n)); \ 145 cmpxchg_local((ptr), (o), (n)); \
142 }) 146})
143 147
144#endif 148#endif