aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-x86_64/system.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-x86_64/system.h')
-rw-r--r--include/asm-x86_64/system.h130
1 files changed, 1 insertions, 129 deletions
diff --git a/include/asm-x86_64/system.h b/include/asm-x86_64/system.h
index 5316f3cac230..b7b8021e8c43 100644
--- a/include/asm-x86_64/system.h
+++ b/include/asm-x86_64/system.h
@@ -3,7 +3,7 @@
3 3
4#include <linux/kernel.h> 4#include <linux/kernel.h>
5#include <asm/segment.h> 5#include <asm/segment.h>
6#include <asm/alternative.h> 6#include <asm/cmpxchg.h>
7 7
8#ifdef __KERNEL__ 8#ifdef __KERNEL__
9 9
@@ -124,134 +124,6 @@ static inline void sched_cacheflush(void)
124 124
125#define nop() __asm__ __volatile__ ("nop") 125#define nop() __asm__ __volatile__ ("nop")
126 126
127#define xchg(ptr,v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v),(ptr),sizeof(*(ptr))))
128
129#define __xg(x) ((volatile long *)(x))
130
131static inline void set_64bit(volatile unsigned long *ptr, unsigned long val)
132{
133 *ptr = val;
134}
135
136#define _set_64bit set_64bit
137
138/*
139 * Note: no "lock" prefix even on SMP: xchg always implies lock anyway
140 * Note 2: xchg has side effect, so that attribute volatile is necessary,
141 * but generally the primitive is invalid, *ptr is output argument. --ANK
142 */
143static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
144{
145 switch (size) {
146 case 1:
147 __asm__ __volatile__("xchgb %b0,%1"
148 :"=q" (x)
149 :"m" (*__xg(ptr)), "0" (x)
150 :"memory");
151 break;
152 case 2:
153 __asm__ __volatile__("xchgw %w0,%1"
154 :"=r" (x)
155 :"m" (*__xg(ptr)), "0" (x)
156 :"memory");
157 break;
158 case 4:
159 __asm__ __volatile__("xchgl %k0,%1"
160 :"=r" (x)
161 :"m" (*__xg(ptr)), "0" (x)
162 :"memory");
163 break;
164 case 8:
165 __asm__ __volatile__("xchgq %0,%1"
166 :"=r" (x)
167 :"m" (*__xg(ptr)), "0" (x)
168 :"memory");
169 break;
170 }
171 return x;
172}
173
174/*
175 * Atomic compare and exchange. Compare OLD with MEM, if identical,
176 * store NEW in MEM. Return the initial value in MEM. Success is
177 * indicated by comparing RETURN with OLD.
178 */
179
180#define __HAVE_ARCH_CMPXCHG 1
181
182static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
183 unsigned long new, int size)
184{
185 unsigned long prev;
186 switch (size) {
187 case 1:
188 __asm__ __volatile__(LOCK_PREFIX "cmpxchgb %b1,%2"
189 : "=a"(prev)
190 : "q"(new), "m"(*__xg(ptr)), "0"(old)
191 : "memory");
192 return prev;
193 case 2:
194 __asm__ __volatile__(LOCK_PREFIX "cmpxchgw %w1,%2"
195 : "=a"(prev)
196 : "r"(new), "m"(*__xg(ptr)), "0"(old)
197 : "memory");
198 return prev;
199 case 4:
200 __asm__ __volatile__(LOCK_PREFIX "cmpxchgl %k1,%2"
201 : "=a"(prev)
202 : "r"(new), "m"(*__xg(ptr)), "0"(old)
203 : "memory");
204 return prev;
205 case 8:
206 __asm__ __volatile__(LOCK_PREFIX "cmpxchgq %1,%2"
207 : "=a"(prev)
208 : "r"(new), "m"(*__xg(ptr)), "0"(old)
209 : "memory");
210 return prev;
211 }
212 return old;
213}
214
215static inline unsigned long __cmpxchg_local(volatile void *ptr,
216 unsigned long old, unsigned long new, int size)
217{
218 unsigned long prev;
219 switch (size) {
220 case 1:
221 __asm__ __volatile__("cmpxchgb %b1,%2"
222 : "=a"(prev)
223 : "q"(new), "m"(*__xg(ptr)), "0"(old)
224 : "memory");
225 return prev;
226 case 2:
227 __asm__ __volatile__("cmpxchgw %w1,%2"
228 : "=a"(prev)
229 : "r"(new), "m"(*__xg(ptr)), "0"(old)
230 : "memory");
231 return prev;
232 case 4:
233 __asm__ __volatile__("cmpxchgl %k1,%2"
234 : "=a"(prev)
235 : "r"(new), "m"(*__xg(ptr)), "0"(old)
236 : "memory");
237 return prev;
238 case 8:
239 __asm__ __volatile__("cmpxchgq %1,%2"
240 : "=a"(prev)
241 : "r"(new), "m"(*__xg(ptr)), "0"(old)
242 : "memory");
243 return prev;
244 }
245 return old;
246}
247
248#define cmpxchg(ptr,o,n)\
249 ((__typeof__(*(ptr)))__cmpxchg((ptr),(unsigned long)(o),\
250 (unsigned long)(n),sizeof(*(ptr))))
251#define cmpxchg_local(ptr,o,n)\
252 ((__typeof__(*(ptr)))__cmpxchg((ptr),(unsigned long)(o),\
253 (unsigned long)(n),sizeof(*(ptr))))
254
255#ifdef CONFIG_SMP 127#ifdef CONFIG_SMP
256#define smp_mb() mb() 128#define smp_mb() mb()
257#define smp_rmb() rmb() 129#define smp_rmb() rmb()