diff options
-rw-r--r-- | arch/s390/include/asm/cmpxchg.h | 205 | ||||
-rw-r--r-- | arch/s390/include/asm/system.h | 196 |
2 files changed, 206 insertions, 195 deletions
diff --git a/arch/s390/include/asm/cmpxchg.h b/arch/s390/include/asm/cmpxchg.h new file mode 100644 index 000000000000..4052df9030cb --- /dev/null +++ b/arch/s390/include/asm/cmpxchg.h | |||
@@ -0,0 +1,205 @@ | |||
1 | /* | ||
2 | * Copyright IBM Corp. 1999, 2011 | ||
3 | * | ||
4 | * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>, | ||
5 | */ | ||
6 | |||
7 | #ifndef __ASM_CMPXCHG_H | ||
8 | #define __ASM_CMPXCHG_H | ||
9 | |||
10 | #include <linux/types.h> | ||
11 | |||
12 | extern void __xchg_called_with_bad_pointer(void); | ||
13 | |||
14 | static inline unsigned long __xchg(unsigned long x, void *ptr, int size) | ||
15 | { | ||
16 | unsigned long addr, old; | ||
17 | int shift; | ||
18 | |||
19 | switch (size) { | ||
20 | case 1: | ||
21 | addr = (unsigned long) ptr; | ||
22 | shift = (3 ^ (addr & 3)) << 3; | ||
23 | addr ^= addr & 3; | ||
24 | asm volatile( | ||
25 | " l %0,%4\n" | ||
26 | "0: lr 0,%0\n" | ||
27 | " nr 0,%3\n" | ||
28 | " or 0,%2\n" | ||
29 | " cs %0,0,%4\n" | ||
30 | " jl 0b\n" | ||
31 | : "=&d" (old), "=Q" (*(int *) addr) | ||
32 | : "d" (x << shift), "d" (~(255 << shift)), | ||
33 | "Q" (*(int *) addr) : "memory", "cc", "0"); | ||
34 | return old >> shift; | ||
35 | case 2: | ||
36 | addr = (unsigned long) ptr; | ||
37 | shift = (2 ^ (addr & 2)) << 3; | ||
38 | addr ^= addr & 2; | ||
39 | asm volatile( | ||
40 | " l %0,%4\n" | ||
41 | "0: lr 0,%0\n" | ||
42 | " nr 0,%3\n" | ||
43 | " or 0,%2\n" | ||
44 | " cs %0,0,%4\n" | ||
45 | " jl 0b\n" | ||
46 | : "=&d" (old), "=Q" (*(int *) addr) | ||
47 | : "d" (x << shift), "d" (~(65535 << shift)), | ||
48 | "Q" (*(int *) addr) : "memory", "cc", "0"); | ||
49 | return old >> shift; | ||
50 | case 4: | ||
51 | asm volatile( | ||
52 | " l %0,%3\n" | ||
53 | "0: cs %0,%2,%3\n" | ||
54 | " jl 0b\n" | ||
55 | : "=&d" (old), "=Q" (*(int *) ptr) | ||
56 | : "d" (x), "Q" (*(int *) ptr) | ||
57 | : "memory", "cc"); | ||
58 | return old; | ||
59 | #ifdef CONFIG_64BIT | ||
60 | case 8: | ||
61 | asm volatile( | ||
62 | " lg %0,%3\n" | ||
63 | "0: csg %0,%2,%3\n" | ||
64 | " jl 0b\n" | ||
65 | : "=&d" (old), "=m" (*(long *) ptr) | ||
66 | : "d" (x), "Q" (*(long *) ptr) | ||
67 | : "memory", "cc"); | ||
68 | return old; | ||
69 | #endif /* CONFIG_64BIT */ | ||
70 | } | ||
71 | __xchg_called_with_bad_pointer(); | ||
72 | return x; | ||
73 | } | ||
74 | |||
75 | #define xchg(ptr, x) \ | ||
76 | ({ \ | ||
77 | __typeof__(*(ptr)) __ret; \ | ||
78 | __ret = (__typeof__(*(ptr))) \ | ||
79 | __xchg((unsigned long)(x), (void *)(ptr), sizeof(*(ptr)));\ | ||
80 | __ret; \ | ||
81 | }) | ||
82 | |||
83 | /* | ||
84 | * Atomic compare and exchange. Compare OLD with MEM, if identical, | ||
85 | * store NEW in MEM. Return the initial value in MEM. Success is | ||
86 | * indicated by comparing RETURN with OLD. | ||
87 | */ | ||
88 | |||
89 | #define __HAVE_ARCH_CMPXCHG | ||
90 | |||
91 | extern void __cmpxchg_called_with_bad_pointer(void); | ||
92 | |||
93 | static inline unsigned long __cmpxchg(void *ptr, unsigned long old, | ||
94 | unsigned long new, int size) | ||
95 | { | ||
96 | unsigned long addr, prev, tmp; | ||
97 | int shift; | ||
98 | |||
99 | switch (size) { | ||
100 | case 1: | ||
101 | addr = (unsigned long) ptr; | ||
102 | shift = (3 ^ (addr & 3)) << 3; | ||
103 | addr ^= addr & 3; | ||
104 | asm volatile( | ||
105 | " l %0,%2\n" | ||
106 | "0: nr %0,%5\n" | ||
107 | " lr %1,%0\n" | ||
108 | " or %0,%3\n" | ||
109 | " or %1,%4\n" | ||
110 | " cs %0,%1,%2\n" | ||
111 | " jnl 1f\n" | ||
112 | " xr %1,%0\n" | ||
113 | " nr %1,%5\n" | ||
114 | " jnz 0b\n" | ||
115 | "1:" | ||
116 | : "=&d" (prev), "=&d" (tmp), "=Q" (*(int *) ptr) | ||
117 | : "d" (old << shift), "d" (new << shift), | ||
118 | "d" (~(255 << shift)), "Q" (*(int *) ptr) | ||
119 | : "memory", "cc"); | ||
120 | return prev >> shift; | ||
121 | case 2: | ||
122 | addr = (unsigned long) ptr; | ||
123 | shift = (2 ^ (addr & 2)) << 3; | ||
124 | addr ^= addr & 2; | ||
125 | asm volatile( | ||
126 | " l %0,%2\n" | ||
127 | "0: nr %0,%5\n" | ||
128 | " lr %1,%0\n" | ||
129 | " or %0,%3\n" | ||
130 | " or %1,%4\n" | ||
131 | " cs %0,%1,%2\n" | ||
132 | " jnl 1f\n" | ||
133 | " xr %1,%0\n" | ||
134 | " nr %1,%5\n" | ||
135 | " jnz 0b\n" | ||
136 | "1:" | ||
137 | : "=&d" (prev), "=&d" (tmp), "=Q" (*(int *) ptr) | ||
138 | : "d" (old << shift), "d" (new << shift), | ||
139 | "d" (~(65535 << shift)), "Q" (*(int *) ptr) | ||
140 | : "memory", "cc"); | ||
141 | return prev >> shift; | ||
142 | case 4: | ||
143 | asm volatile( | ||
144 | " cs %0,%3,%1\n" | ||
145 | : "=&d" (prev), "=Q" (*(int *) ptr) | ||
146 | : "0" (old), "d" (new), "Q" (*(int *) ptr) | ||
147 | : "memory", "cc"); | ||
148 | return prev; | ||
149 | #ifdef CONFIG_64BIT | ||
150 | case 8: | ||
151 | asm volatile( | ||
152 | " csg %0,%3,%1\n" | ||
153 | : "=&d" (prev), "=Q" (*(long *) ptr) | ||
154 | : "0" (old), "d" (new), "Q" (*(long *) ptr) | ||
155 | : "memory", "cc"); | ||
156 | return prev; | ||
157 | #endif /* CONFIG_64BIT */ | ||
158 | } | ||
159 | __cmpxchg_called_with_bad_pointer(); | ||
160 | return old; | ||
161 | } | ||
162 | |||
163 | #define cmpxchg(ptr, o, n) \ | ||
164 | ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o), \ | ||
165 | (unsigned long)(n), sizeof(*(ptr)))) | ||
166 | |||
167 | #include <asm-generic/cmpxchg-local.h> | ||
168 | |||
169 | static inline unsigned long __cmpxchg_local(void *ptr, | ||
170 | unsigned long old, | ||
171 | unsigned long new, int size) | ||
172 | { | ||
173 | switch (size) { | ||
174 | case 1: | ||
175 | case 2: | ||
176 | case 4: | ||
177 | #ifdef CONFIG_64BIT | ||
178 | case 8: | ||
179 | #endif | ||
180 | return __cmpxchg(ptr, old, new, size); | ||
181 | default: | ||
182 | return __cmpxchg_local_generic(ptr, old, new, size); | ||
183 | } | ||
184 | |||
185 | return old; | ||
186 | } | ||
187 | |||
188 | /* | ||
189 | * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make | ||
190 | * them available. | ||
191 | */ | ||
192 | #define cmpxchg_local(ptr, o, n) \ | ||
193 | ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \ | ||
194 | (unsigned long)(n), sizeof(*(ptr)))) | ||
195 | #ifdef CONFIG_64BIT | ||
196 | #define cmpxchg64_local(ptr, o, n) \ | ||
197 | ({ \ | ||
198 | BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ | ||
199 | cmpxchg_local((ptr), (o), (n)); \ | ||
200 | }) | ||
201 | #else | ||
202 | #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) | ||
203 | #endif | ||
204 | |||
205 | #endif /* __ASM_CMPXCHG_H */ | ||
diff --git a/arch/s390/include/asm/system.h b/arch/s390/include/asm/system.h index 8f8d759f6a7b..d382629a0172 100644 --- a/arch/s390/include/asm/system.h +++ b/arch/s390/include/asm/system.h | |||
@@ -14,6 +14,7 @@ | |||
14 | #include <asm/setup.h> | 14 | #include <asm/setup.h> |
15 | #include <asm/processor.h> | 15 | #include <asm/processor.h> |
16 | #include <asm/lowcore.h> | 16 | #include <asm/lowcore.h> |
17 | #include <asm/cmpxchg.h> | ||
17 | 18 | ||
18 | #ifdef __KERNEL__ | 19 | #ifdef __KERNEL__ |
19 | 20 | ||
@@ -120,161 +121,6 @@ extern int memcpy_real(void *, void *, size_t); | |||
120 | 121 | ||
121 | #define nop() asm volatile("nop") | 122 | #define nop() asm volatile("nop") |
122 | 123 | ||
123 | #define xchg(ptr,x) \ | ||
124 | ({ \ | ||
125 | __typeof__(*(ptr)) __ret; \ | ||
126 | __ret = (__typeof__(*(ptr))) \ | ||
127 | __xchg((unsigned long)(x), (void *)(ptr),sizeof(*(ptr))); \ | ||
128 | __ret; \ | ||
129 | }) | ||
130 | |||
131 | extern void __xchg_called_with_bad_pointer(void); | ||
132 | |||
133 | static inline unsigned long __xchg(unsigned long x, void * ptr, int size) | ||
134 | { | ||
135 | unsigned long addr, old; | ||
136 | int shift; | ||
137 | |||
138 | switch (size) { | ||
139 | case 1: | ||
140 | addr = (unsigned long) ptr; | ||
141 | shift = (3 ^ (addr & 3)) << 3; | ||
142 | addr ^= addr & 3; | ||
143 | asm volatile( | ||
144 | " l %0,%4\n" | ||
145 | "0: lr 0,%0\n" | ||
146 | " nr 0,%3\n" | ||
147 | " or 0,%2\n" | ||
148 | " cs %0,0,%4\n" | ||
149 | " jl 0b\n" | ||
150 | : "=&d" (old), "=Q" (*(int *) addr) | ||
151 | : "d" (x << shift), "d" (~(255 << shift)), | ||
152 | "Q" (*(int *) addr) : "memory", "cc", "0"); | ||
153 | return old >> shift; | ||
154 | case 2: | ||
155 | addr = (unsigned long) ptr; | ||
156 | shift = (2 ^ (addr & 2)) << 3; | ||
157 | addr ^= addr & 2; | ||
158 | asm volatile( | ||
159 | " l %0,%4\n" | ||
160 | "0: lr 0,%0\n" | ||
161 | " nr 0,%3\n" | ||
162 | " or 0,%2\n" | ||
163 | " cs %0,0,%4\n" | ||
164 | " jl 0b\n" | ||
165 | : "=&d" (old), "=Q" (*(int *) addr) | ||
166 | : "d" (x << shift), "d" (~(65535 << shift)), | ||
167 | "Q" (*(int *) addr) : "memory", "cc", "0"); | ||
168 | return old >> shift; | ||
169 | case 4: | ||
170 | asm volatile( | ||
171 | " l %0,%3\n" | ||
172 | "0: cs %0,%2,%3\n" | ||
173 | " jl 0b\n" | ||
174 | : "=&d" (old), "=Q" (*(int *) ptr) | ||
175 | : "d" (x), "Q" (*(int *) ptr) | ||
176 | : "memory", "cc"); | ||
177 | return old; | ||
178 | #ifdef __s390x__ | ||
179 | case 8: | ||
180 | asm volatile( | ||
181 | " lg %0,%3\n" | ||
182 | "0: csg %0,%2,%3\n" | ||
183 | " jl 0b\n" | ||
184 | : "=&d" (old), "=m" (*(long *) ptr) | ||
185 | : "d" (x), "Q" (*(long *) ptr) | ||
186 | : "memory", "cc"); | ||
187 | return old; | ||
188 | #endif /* __s390x__ */ | ||
189 | } | ||
190 | __xchg_called_with_bad_pointer(); | ||
191 | return x; | ||
192 | } | ||
193 | |||
194 | /* | ||
195 | * Atomic compare and exchange. Compare OLD with MEM, if identical, | ||
196 | * store NEW in MEM. Return the initial value in MEM. Success is | ||
197 | * indicated by comparing RETURN with OLD. | ||
198 | */ | ||
199 | |||
200 | #define __HAVE_ARCH_CMPXCHG 1 | ||
201 | |||
202 | #define cmpxchg(ptr, o, n) \ | ||
203 | ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o), \ | ||
204 | (unsigned long)(n), sizeof(*(ptr)))) | ||
205 | |||
206 | extern void __cmpxchg_called_with_bad_pointer(void); | ||
207 | |||
208 | static inline unsigned long | ||
209 | __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size) | ||
210 | { | ||
211 | unsigned long addr, prev, tmp; | ||
212 | int shift; | ||
213 | |||
214 | switch (size) { | ||
215 | case 1: | ||
216 | addr = (unsigned long) ptr; | ||
217 | shift = (3 ^ (addr & 3)) << 3; | ||
218 | addr ^= addr & 3; | ||
219 | asm volatile( | ||
220 | " l %0,%2\n" | ||
221 | "0: nr %0,%5\n" | ||
222 | " lr %1,%0\n" | ||
223 | " or %0,%3\n" | ||
224 | " or %1,%4\n" | ||
225 | " cs %0,%1,%2\n" | ||
226 | " jnl 1f\n" | ||
227 | " xr %1,%0\n" | ||
228 | " nr %1,%5\n" | ||
229 | " jnz 0b\n" | ||
230 | "1:" | ||
231 | : "=&d" (prev), "=&d" (tmp), "=Q" (*(int *) ptr) | ||
232 | : "d" (old << shift), "d" (new << shift), | ||
233 | "d" (~(255 << shift)), "Q" (*(int *) ptr) | ||
234 | : "memory", "cc"); | ||
235 | return prev >> shift; | ||
236 | case 2: | ||
237 | addr = (unsigned long) ptr; | ||
238 | shift = (2 ^ (addr & 2)) << 3; | ||
239 | addr ^= addr & 2; | ||
240 | asm volatile( | ||
241 | " l %0,%2\n" | ||
242 | "0: nr %0,%5\n" | ||
243 | " lr %1,%0\n" | ||
244 | " or %0,%3\n" | ||
245 | " or %1,%4\n" | ||
246 | " cs %0,%1,%2\n" | ||
247 | " jnl 1f\n" | ||
248 | " xr %1,%0\n" | ||
249 | " nr %1,%5\n" | ||
250 | " jnz 0b\n" | ||
251 | "1:" | ||
252 | : "=&d" (prev), "=&d" (tmp), "=Q" (*(int *) ptr) | ||
253 | : "d" (old << shift), "d" (new << shift), | ||
254 | "d" (~(65535 << shift)), "Q" (*(int *) ptr) | ||
255 | : "memory", "cc"); | ||
256 | return prev >> shift; | ||
257 | case 4: | ||
258 | asm volatile( | ||
259 | " cs %0,%3,%1\n" | ||
260 | : "=&d" (prev), "=Q" (*(int *) ptr) | ||
261 | : "0" (old), "d" (new), "Q" (*(int *) ptr) | ||
262 | : "memory", "cc"); | ||
263 | return prev; | ||
264 | #ifdef __s390x__ | ||
265 | case 8: | ||
266 | asm volatile( | ||
267 | " csg %0,%3,%1\n" | ||
268 | : "=&d" (prev), "=Q" (*(long *) ptr) | ||
269 | : "0" (old), "d" (new), "Q" (*(long *) ptr) | ||
270 | : "memory", "cc"); | ||
271 | return prev; | ||
272 | #endif /* __s390x__ */ | ||
273 | } | ||
274 | __cmpxchg_called_with_bad_pointer(); | ||
275 | return old; | ||
276 | } | ||
277 | |||
278 | /* | 124 | /* |
279 | * Force strict CPU ordering. | 125 | * Force strict CPU ordering. |
280 | * And yes, this is required on UP too when we're talking | 126 | * And yes, this is required on UP too when we're talking |
@@ -353,46 +199,6 @@ __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size) | |||
353 | __ctl_load(__dummy, cr, cr); \ | 199 | __ctl_load(__dummy, cr, cr); \ |
354 | }) | 200 | }) |
355 | 201 | ||
356 | #include <linux/irqflags.h> | ||
357 | |||
358 | #include <asm-generic/cmpxchg-local.h> | ||
359 | |||
360 | static inline unsigned long __cmpxchg_local(volatile void *ptr, | ||
361 | unsigned long old, | ||
362 | unsigned long new, int size) | ||
363 | { | ||
364 | switch (size) { | ||
365 | case 1: | ||
366 | case 2: | ||
367 | case 4: | ||
368 | #ifdef __s390x__ | ||
369 | case 8: | ||
370 | #endif | ||
371 | return __cmpxchg(ptr, old, new, size); | ||
372 | default: | ||
373 | return __cmpxchg_local_generic(ptr, old, new, size); | ||
374 | } | ||
375 | |||
376 | return old; | ||
377 | } | ||
378 | |||
379 | /* | ||
380 | * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make | ||
381 | * them available. | ||
382 | */ | ||
383 | #define cmpxchg_local(ptr, o, n) \ | ||
384 | ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \ | ||
385 | (unsigned long)(n), sizeof(*(ptr)))) | ||
386 | #ifdef __s390x__ | ||
387 | #define cmpxchg64_local(ptr, o, n) \ | ||
388 | ({ \ | ||
389 | BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ | ||
390 | cmpxchg_local((ptr), (o), (n)); \ | ||
391 | }) | ||
392 | #else | ||
393 | #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) | ||
394 | #endif | ||
395 | |||
396 | /* | 202 | /* |
397 | * Use to set psw mask except for the first byte which | 203 | * Use to set psw mask except for the first byte which |
398 | * won't be changed by this function. | 204 | * won't be changed by this function. |