diff options
author | Jeremy Fitzhardinge <jeremy.fitzhardinge@citrix.com> | 2011-08-18 14:48:06 -0400 |
---|---|---|
committer | H. Peter Anvin <hpa@linux.intel.com> | 2011-08-29 16:42:10 -0400 |
commit | e9826380d83d1bda3ee5663bf3fa4667a6fbe60a (patch) | |
tree | f7de160fc158e87f8bd78d67a4f0c43bf4356b4c /arch/x86/include/asm/cmpxchg.h | |
parent | 00a41546e8008b9944382eed1841c785f4fc8d9c (diff) |
x86, cmpxchg: Unify cmpxchg into cmpxchg.h
Everything that's actually common between 32 and 64-bit is moved into
cmpxchg.h.
xchg/cmpxchg will fail with a link error if they're passed an
unsupported size (which includes 64-bit args on 32-bit systems).
Signed-off-by: Jeremy Fitzhardinge <jeremy.fitzhardinge@citrix.com>
Link: http://lkml.kernel.org/r/4E5BCC40.3030501@goop.org
Signed-off-by: H. Peter Anvin <hpa@linux.intel.com>
Diffstat (limited to 'arch/x86/include/asm/cmpxchg.h')
-rw-r--r-- | arch/x86/include/asm/cmpxchg.h | 155 |
1 files changed, 155 insertions, 0 deletions
diff --git a/arch/x86/include/asm/cmpxchg.h b/arch/x86/include/asm/cmpxchg.h index a460fa088d4c..efe3ec778a58 100644 --- a/arch/x86/include/asm/cmpxchg.h +++ b/arch/x86/include/asm/cmpxchg.h | |||
@@ -1,5 +1,160 @@ | |||
1 | #ifndef ASM_X86_CMPXCHG_H | ||
2 | #define ASM_X86_CMPXCHG_H | ||
3 | |||
4 | #include <asm/alternative.h> /* Provides LOCK_PREFIX */ | ||
5 | |||
6 | /* Non-existant functions to indicate usage errors at link time. */ | ||
7 | extern void __xchg_wrong_size(void); | ||
8 | extern void __cmpxchg_wrong_size(void); | ||
9 | |||
10 | /* | ||
11 | * Constants for operation sizes. On 32-bit, the 64-bit size it set to | ||
12 | * -1 because sizeof will never return -1, thereby making those switch | ||
13 | * case statements guaranteeed dead code which the compiler will | ||
14 | * eliminate, and allowing the "missing symbol in the default case" to | ||
15 | * indicate a usage error. | ||
16 | */ | ||
17 | #define __X86_CASE_B 1 | ||
18 | #define __X86_CASE_W 2 | ||
19 | #define __X86_CASE_L 4 | ||
20 | #ifdef CONFIG_64BIT | ||
21 | #define __X86_CASE_Q 8 | ||
22 | #else | ||
23 | #define __X86_CASE_Q -1 /* sizeof will never return -1 */ | ||
24 | #endif | ||
25 | |||
26 | /* | ||
27 | * Note: no "lock" prefix even on SMP: xchg always implies lock anyway. | ||
28 | * Since this is generally used to protect other memory information, we | ||
29 | * use "asm volatile" and "memory" clobbers to prevent gcc from moving | ||
30 | * information around. | ||
31 | */ | ||
32 | #define __xchg(x, ptr, size) \ | ||
33 | ({ \ | ||
34 | __typeof(*(ptr)) __x = (x); \ | ||
35 | switch (size) { \ | ||
36 | case __X86_CASE_B: \ | ||
37 | { \ | ||
38 | volatile u8 *__ptr = (volatile u8 *)(ptr); \ | ||
39 | asm volatile("xchgb %0,%1" \ | ||
40 | : "=q" (__x), "+m" (*__ptr) \ | ||
41 | : "0" (__x) \ | ||
42 | : "memory"); \ | ||
43 | break; \ | ||
44 | } \ | ||
45 | case __X86_CASE_W: \ | ||
46 | { \ | ||
47 | volatile u16 *__ptr = (volatile u16 *)(ptr); \ | ||
48 | asm volatile("xchgw %0,%1" \ | ||
49 | : "=r" (__x), "+m" (*__ptr) \ | ||
50 | : "0" (__x) \ | ||
51 | : "memory"); \ | ||
52 | break; \ | ||
53 | } \ | ||
54 | case __X86_CASE_L: \ | ||
55 | { \ | ||
56 | volatile u32 *__ptr = (volatile u32 *)(ptr); \ | ||
57 | asm volatile("xchgl %0,%1" \ | ||
58 | : "=r" (__x), "+m" (*__ptr) \ | ||
59 | : "0" (__x) \ | ||
60 | : "memory"); \ | ||
61 | break; \ | ||
62 | } \ | ||
63 | case __X86_CASE_Q: \ | ||
64 | { \ | ||
65 | volatile u64 *__ptr = (volatile u64 *)(ptr); \ | ||
66 | asm volatile("xchgq %0,%1" \ | ||
67 | : "=r" (__x), "+m" (*__ptr) \ | ||
68 | : "0" (__x) \ | ||
69 | : "memory"); \ | ||
70 | break; \ | ||
71 | } \ | ||
72 | default: \ | ||
73 | __xchg_wrong_size(); \ | ||
74 | } \ | ||
75 | __x; \ | ||
76 | }) | ||
77 | |||
78 | #define xchg(ptr, v) \ | ||
79 | __xchg((v), (ptr), sizeof(*ptr)) | ||
80 | |||
81 | /* | ||
82 | * Atomic compare and exchange. Compare OLD with MEM, if identical, | ||
83 | * store NEW in MEM. Return the initial value in MEM. Success is | ||
84 | * indicated by comparing RETURN with OLD. | ||
85 | */ | ||
86 | #define __raw_cmpxchg(ptr, old, new, size, lock) \ | ||
87 | ({ \ | ||
88 | __typeof__(*(ptr)) __ret; \ | ||
89 | __typeof__(*(ptr)) __old = (old); \ | ||
90 | __typeof__(*(ptr)) __new = (new); \ | ||
91 | switch (size) { \ | ||
92 | case __X86_CASE_B: \ | ||
93 | { \ | ||
94 | volatile u8 *__ptr = (volatile u8 *)(ptr); \ | ||
95 | asm volatile(lock "cmpxchgb %2,%1" \ | ||
96 | : "=a" (__ret), "+m" (*__ptr) \ | ||
97 | : "q" (__new), "0" (__old) \ | ||
98 | : "memory"); \ | ||
99 | break; \ | ||
100 | } \ | ||
101 | case __X86_CASE_W: \ | ||
102 | { \ | ||
103 | volatile u16 *__ptr = (volatile u16 *)(ptr); \ | ||
104 | asm volatile(lock "cmpxchgw %2,%1" \ | ||
105 | : "=a" (__ret), "+m" (*__ptr) \ | ||
106 | : "r" (__new), "0" (__old) \ | ||
107 | : "memory"); \ | ||
108 | break; \ | ||
109 | } \ | ||
110 | case __X86_CASE_L: \ | ||
111 | { \ | ||
112 | volatile u32 *__ptr = (volatile u32 *)(ptr); \ | ||
113 | asm volatile(lock "cmpxchgl %2,%1" \ | ||
114 | : "=a" (__ret), "+m" (*__ptr) \ | ||
115 | : "r" (__new), "0" (__old) \ | ||
116 | : "memory"); \ | ||
117 | break; \ | ||
118 | } \ | ||
119 | case __X86_CASE_Q: \ | ||
120 | { \ | ||
121 | volatile u64 *__ptr = (volatile u64 *)(ptr); \ | ||
122 | asm volatile(lock "cmpxchgq %2,%1" \ | ||
123 | : "=a" (__ret), "+m" (*__ptr) \ | ||
124 | : "r" (__new), "0" (__old) \ | ||
125 | : "memory"); \ | ||
126 | break; \ | ||
127 | } \ | ||
128 | default: \ | ||
129 | __cmpxchg_wrong_size(); \ | ||
130 | } \ | ||
131 | __ret; \ | ||
132 | }) | ||
133 | |||
134 | #define __cmpxchg(ptr, old, new, size) \ | ||
135 | __raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX) | ||
136 | |||
137 | #define __sync_cmpxchg(ptr, old, new, size) \ | ||
138 | __raw_cmpxchg((ptr), (old), (new), (size), "lock; ") | ||
139 | |||
140 | #define __cmpxchg_local(ptr, old, new, size) \ | ||
141 | __raw_cmpxchg((ptr), (old), (new), (size), "") | ||
142 | |||
1 | #ifdef CONFIG_X86_32 | 143 | #ifdef CONFIG_X86_32 |
2 | # include "cmpxchg_32.h" | 144 | # include "cmpxchg_32.h" |
3 | #else | 145 | #else |
4 | # include "cmpxchg_64.h" | 146 | # include "cmpxchg_64.h" |
5 | #endif | 147 | #endif |
148 | |||
149 | #ifdef __HAVE_ARCH_CMPXCHG | ||
150 | #define cmpxchg(ptr, old, new) \ | ||
151 | __cmpxchg((ptr), (old), (new), sizeof(*ptr)) | ||
152 | |||
153 | #define sync_cmpxchg(ptr, old, new) \ | ||
154 | __sync_cmpxchg((ptr), (old), (new), sizeof(*ptr)) | ||
155 | |||
156 | #define cmpxchg_local(ptr, old, new) \ | ||
157 | __cmpxchg_local((ptr), (old), (new), sizeof(*ptr)) | ||
158 | #endif | ||
159 | |||
160 | #endif /* ASM_X86_CMPXCHG_H */ | ||