aboutsummaryrefslogtreecommitdiffstats
path: root/arch/x86/include/asm/cmpxchg_32.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/x86/include/asm/cmpxchg_32.h')
-rw-r--r--arch/x86/include/asm/cmpxchg_32.h114
1 files changed, 0 insertions, 114 deletions
diff --git a/arch/x86/include/asm/cmpxchg_32.h b/arch/x86/include/asm/cmpxchg_32.h
index 3deb7250624c..fbebb07dd80b 100644
--- a/arch/x86/include/asm/cmpxchg_32.h
+++ b/arch/x86/include/asm/cmpxchg_32.h
@@ -1,61 +1,11 @@
1#ifndef _ASM_X86_CMPXCHG_32_H 1#ifndef _ASM_X86_CMPXCHG_32_H
2#define _ASM_X86_CMPXCHG_32_H 2#define _ASM_X86_CMPXCHG_32_H
3 3
4#include <linux/bitops.h> /* for LOCK_PREFIX */
5
6/* 4/*
7 * Note: if you use set64_bit(), __cmpxchg64(), or their variants, you 5 * Note: if you use set64_bit(), __cmpxchg64(), or their variants, you
8 * you need to test for the feature in boot_cpu_data. 6 * you need to test for the feature in boot_cpu_data.
9 */ 7 */
10 8
11extern void __xchg_wrong_size(void);
12
13/*
14 * Note: no "lock" prefix even on SMP: xchg always implies lock anyway.
15 * Since this is generally used to protect other memory information, we
16 * use "asm volatile" and "memory" clobbers to prevent gcc from moving
17 * information around.
18 */
19#define __xchg(x, ptr, size) \
20({ \
21 __typeof(*(ptr)) __x = (x); \
22 switch (size) { \
23 case 1: \
24 { \
25 volatile u8 *__ptr = (volatile u8 *)(ptr); \
26 asm volatile("xchgb %0,%1" \
27 : "=q" (__x), "+m" (*__ptr) \
28 : "0" (__x) \
29 : "memory"); \
30 break; \
31 } \
32 case 2: \
33 { \
34 volatile u16 *__ptr = (volatile u16 *)(ptr); \
35 asm volatile("xchgw %0,%1" \
36 : "=r" (__x), "+m" (*__ptr) \
37 : "0" (__x) \
38 : "memory"); \
39 break; \
40 } \
41 case 4: \
42 { \
43 volatile u32 *__ptr = (volatile u32 *)(ptr); \
44 asm volatile("xchgl %0,%1" \
45 : "=r" (__x), "+m" (*__ptr) \
46 : "0" (__x) \
47 : "memory"); \
48 break; \
49 } \
50 default: \
51 __xchg_wrong_size(); \
52 } \
53 __x; \
54})
55
56#define xchg(ptr, v) \
57 __xchg((v), (ptr), sizeof(*ptr))
58
59/* 9/*
60 * CMPXCHG8B only writes to the target if we had the previous 10 * CMPXCHG8B only writes to the target if we had the previous
61 * value in registers, otherwise it acts as a read and gives us the 11 * value in registers, otherwise it acts as a read and gives us the
@@ -84,72 +34,8 @@ static inline void set_64bit(volatile u64 *ptr, u64 value)
84 : "memory"); 34 : "memory");
85} 35}
86 36
87extern void __cmpxchg_wrong_size(void);
88
89/*
90 * Atomic compare and exchange. Compare OLD with MEM, if identical,
91 * store NEW in MEM. Return the initial value in MEM. Success is
92 * indicated by comparing RETURN with OLD.
93 */
94#define __raw_cmpxchg(ptr, old, new, size, lock) \
95({ \
96 __typeof__(*(ptr)) __ret; \
97 __typeof__(*(ptr)) __old = (old); \
98 __typeof__(*(ptr)) __new = (new); \
99 switch (size) { \
100 case 1: \
101 { \
102 volatile u8 *__ptr = (volatile u8 *)(ptr); \
103 asm volatile(lock "cmpxchgb %2,%1" \
104 : "=a" (__ret), "+m" (*__ptr) \
105 : "q" (__new), "0" (__old) \
106 : "memory"); \
107 break; \
108 } \
109 case 2: \
110 { \
111 volatile u16 *__ptr = (volatile u16 *)(ptr); \
112 asm volatile(lock "cmpxchgw %2,%1" \
113 : "=a" (__ret), "+m" (*__ptr) \
114 : "r" (__new), "0" (__old) \
115 : "memory"); \
116 break; \
117 } \
118 case 4: \
119 { \
120 volatile u32 *__ptr = (volatile u32 *)(ptr); \
121 asm volatile(lock "cmpxchgl %2,%1" \
122 : "=a" (__ret), "+m" (*__ptr) \
123 : "r" (__new), "0" (__old) \
124 : "memory"); \
125 break; \
126 } \
127 default: \
128 __cmpxchg_wrong_size(); \
129 } \
130 __ret; \
131})
132
133#define __cmpxchg(ptr, old, new, size) \
134 __raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX)
135
136#define __sync_cmpxchg(ptr, old, new, size) \
137 __raw_cmpxchg((ptr), (old), (new), (size), "lock; ")
138
139#define __cmpxchg_local(ptr, old, new, size) \
140 __raw_cmpxchg((ptr), (old), (new), (size), "")
141
142#ifdef CONFIG_X86_CMPXCHG 37#ifdef CONFIG_X86_CMPXCHG
143#define __HAVE_ARCH_CMPXCHG 1 38#define __HAVE_ARCH_CMPXCHG 1
144
145#define cmpxchg(ptr, old, new) \
146 __cmpxchg((ptr), (old), (new), sizeof(*ptr))
147
148#define sync_cmpxchg(ptr, old, new) \
149 __sync_cmpxchg((ptr), (old), (new), sizeof(*ptr))
150
151#define cmpxchg_local(ptr, old, new) \
152 __cmpxchg_local((ptr), (old), (new), sizeof(*ptr))
153#endif 39#endif
154 40
155#ifdef CONFIG_X86_CMPXCHG64 41#ifdef CONFIG_X86_CMPXCHG64