diff options
Diffstat (limited to 'arch/powerpc/include/asm/atomic.h')
-rw-r--r-- | arch/powerpc/include/asm/atomic.h | 479 |
1 files changed, 479 insertions, 0 deletions
diff --git a/arch/powerpc/include/asm/atomic.h b/arch/powerpc/include/asm/atomic.h new file mode 100644 index 000000000000..f3fc733758f5 --- /dev/null +++ b/arch/powerpc/include/asm/atomic.h | |||
@@ -0,0 +1,479 @@ | |||
1 | #ifndef _ASM_POWERPC_ATOMIC_H_ | ||
2 | #define _ASM_POWERPC_ATOMIC_H_ | ||
3 | |||
4 | /* | ||
5 | * PowerPC atomic operations | ||
6 | */ | ||
7 | |||
8 | typedef struct { int counter; } atomic_t; | ||
9 | |||
10 | #ifdef __KERNEL__ | ||
11 | #include <linux/compiler.h> | ||
12 | #include <asm/synch.h> | ||
13 | #include <asm/asm-compat.h> | ||
14 | #include <asm/system.h> | ||
15 | |||
16 | #define ATOMIC_INIT(i) { (i) } | ||
17 | |||
18 | static __inline__ int atomic_read(const atomic_t *v) | ||
19 | { | ||
20 | int t; | ||
21 | |||
22 | __asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter)); | ||
23 | |||
24 | return t; | ||
25 | } | ||
26 | |||
27 | static __inline__ void atomic_set(atomic_t *v, int i) | ||
28 | { | ||
29 | __asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i)); | ||
30 | } | ||
31 | |||
32 | static __inline__ void atomic_add(int a, atomic_t *v) | ||
33 | { | ||
34 | int t; | ||
35 | |||
36 | __asm__ __volatile__( | ||
37 | "1: lwarx %0,0,%3 # atomic_add\n\ | ||
38 | add %0,%2,%0\n" | ||
39 | PPC405_ERR77(0,%3) | ||
40 | " stwcx. %0,0,%3 \n\ | ||
41 | bne- 1b" | ||
42 | : "=&r" (t), "+m" (v->counter) | ||
43 | : "r" (a), "r" (&v->counter) | ||
44 | : "cc"); | ||
45 | } | ||
46 | |||
47 | static __inline__ int atomic_add_return(int a, atomic_t *v) | ||
48 | { | ||
49 | int t; | ||
50 | |||
51 | __asm__ __volatile__( | ||
52 | LWSYNC_ON_SMP | ||
53 | "1: lwarx %0,0,%2 # atomic_add_return\n\ | ||
54 | add %0,%1,%0\n" | ||
55 | PPC405_ERR77(0,%2) | ||
56 | " stwcx. %0,0,%2 \n\ | ||
57 | bne- 1b" | ||
58 | ISYNC_ON_SMP | ||
59 | : "=&r" (t) | ||
60 | : "r" (a), "r" (&v->counter) | ||
61 | : "cc", "memory"); | ||
62 | |||
63 | return t; | ||
64 | } | ||
65 | |||
66 | #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) | ||
67 | |||
68 | static __inline__ void atomic_sub(int a, atomic_t *v) | ||
69 | { | ||
70 | int t; | ||
71 | |||
72 | __asm__ __volatile__( | ||
73 | "1: lwarx %0,0,%3 # atomic_sub\n\ | ||
74 | subf %0,%2,%0\n" | ||
75 | PPC405_ERR77(0,%3) | ||
76 | " stwcx. %0,0,%3 \n\ | ||
77 | bne- 1b" | ||
78 | : "=&r" (t), "+m" (v->counter) | ||
79 | : "r" (a), "r" (&v->counter) | ||
80 | : "cc"); | ||
81 | } | ||
82 | |||
83 | static __inline__ int atomic_sub_return(int a, atomic_t *v) | ||
84 | { | ||
85 | int t; | ||
86 | |||
87 | __asm__ __volatile__( | ||
88 | LWSYNC_ON_SMP | ||
89 | "1: lwarx %0,0,%2 # atomic_sub_return\n\ | ||
90 | subf %0,%1,%0\n" | ||
91 | PPC405_ERR77(0,%2) | ||
92 | " stwcx. %0,0,%2 \n\ | ||
93 | bne- 1b" | ||
94 | ISYNC_ON_SMP | ||
95 | : "=&r" (t) | ||
96 | : "r" (a), "r" (&v->counter) | ||
97 | : "cc", "memory"); | ||
98 | |||
99 | return t; | ||
100 | } | ||
101 | |||
102 | static __inline__ void atomic_inc(atomic_t *v) | ||
103 | { | ||
104 | int t; | ||
105 | |||
106 | __asm__ __volatile__( | ||
107 | "1: lwarx %0,0,%2 # atomic_inc\n\ | ||
108 | addic %0,%0,1\n" | ||
109 | PPC405_ERR77(0,%2) | ||
110 | " stwcx. %0,0,%2 \n\ | ||
111 | bne- 1b" | ||
112 | : "=&r" (t), "+m" (v->counter) | ||
113 | : "r" (&v->counter) | ||
114 | : "cc"); | ||
115 | } | ||
116 | |||
117 | static __inline__ int atomic_inc_return(atomic_t *v) | ||
118 | { | ||
119 | int t; | ||
120 | |||
121 | __asm__ __volatile__( | ||
122 | LWSYNC_ON_SMP | ||
123 | "1: lwarx %0,0,%1 # atomic_inc_return\n\ | ||
124 | addic %0,%0,1\n" | ||
125 | PPC405_ERR77(0,%1) | ||
126 | " stwcx. %0,0,%1 \n\ | ||
127 | bne- 1b" | ||
128 | ISYNC_ON_SMP | ||
129 | : "=&r" (t) | ||
130 | : "r" (&v->counter) | ||
131 | : "cc", "memory"); | ||
132 | |||
133 | return t; | ||
134 | } | ||
135 | |||
136 | /* | ||
137 | * atomic_inc_and_test - increment and test | ||
138 | * @v: pointer of type atomic_t | ||
139 | * | ||
140 | * Atomically increments @v by 1 | ||
141 | * and returns true if the result is zero, or false for all | ||
142 | * other cases. | ||
143 | */ | ||
144 | #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) | ||
145 | |||
146 | static __inline__ void atomic_dec(atomic_t *v) | ||
147 | { | ||
148 | int t; | ||
149 | |||
150 | __asm__ __volatile__( | ||
151 | "1: lwarx %0,0,%2 # atomic_dec\n\ | ||
152 | addic %0,%0,-1\n" | ||
153 | PPC405_ERR77(0,%2)\ | ||
154 | " stwcx. %0,0,%2\n\ | ||
155 | bne- 1b" | ||
156 | : "=&r" (t), "+m" (v->counter) | ||
157 | : "r" (&v->counter) | ||
158 | : "cc"); | ||
159 | } | ||
160 | |||
161 | static __inline__ int atomic_dec_return(atomic_t *v) | ||
162 | { | ||
163 | int t; | ||
164 | |||
165 | __asm__ __volatile__( | ||
166 | LWSYNC_ON_SMP | ||
167 | "1: lwarx %0,0,%1 # atomic_dec_return\n\ | ||
168 | addic %0,%0,-1\n" | ||
169 | PPC405_ERR77(0,%1) | ||
170 | " stwcx. %0,0,%1\n\ | ||
171 | bne- 1b" | ||
172 | ISYNC_ON_SMP | ||
173 | : "=&r" (t) | ||
174 | : "r" (&v->counter) | ||
175 | : "cc", "memory"); | ||
176 | |||
177 | return t; | ||
178 | } | ||
179 | |||
180 | #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) | ||
181 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) | ||
182 | |||
183 | /** | ||
184 | * atomic_add_unless - add unless the number is a given value | ||
185 | * @v: pointer of type atomic_t | ||
186 | * @a: the amount to add to v... | ||
187 | * @u: ...unless v is equal to u. | ||
188 | * | ||
189 | * Atomically adds @a to @v, so long as it was not @u. | ||
190 | * Returns non-zero if @v was not @u, and zero otherwise. | ||
191 | */ | ||
192 | static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) | ||
193 | { | ||
194 | int t; | ||
195 | |||
196 | __asm__ __volatile__ ( | ||
197 | LWSYNC_ON_SMP | ||
198 | "1: lwarx %0,0,%1 # atomic_add_unless\n\ | ||
199 | cmpw 0,%0,%3 \n\ | ||
200 | beq- 2f \n\ | ||
201 | add %0,%2,%0 \n" | ||
202 | PPC405_ERR77(0,%2) | ||
203 | " stwcx. %0,0,%1 \n\ | ||
204 | bne- 1b \n" | ||
205 | ISYNC_ON_SMP | ||
206 | " subf %0,%2,%0 \n\ | ||
207 | 2:" | ||
208 | : "=&r" (t) | ||
209 | : "r" (&v->counter), "r" (a), "r" (u) | ||
210 | : "cc", "memory"); | ||
211 | |||
212 | return t != u; | ||
213 | } | ||
214 | |||
215 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) | ||
216 | |||
217 | #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0) | ||
218 | #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0) | ||
219 | |||
220 | /* | ||
221 | * Atomically test *v and decrement if it is greater than 0. | ||
222 | * The function returns the old value of *v minus 1, even if | ||
223 | * the atomic variable, v, was not decremented. | ||
224 | */ | ||
225 | static __inline__ int atomic_dec_if_positive(atomic_t *v) | ||
226 | { | ||
227 | int t; | ||
228 | |||
229 | __asm__ __volatile__( | ||
230 | LWSYNC_ON_SMP | ||
231 | "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\ | ||
232 | cmpwi %0,1\n\ | ||
233 | addi %0,%0,-1\n\ | ||
234 | blt- 2f\n" | ||
235 | PPC405_ERR77(0,%1) | ||
236 | " stwcx. %0,0,%1\n\ | ||
237 | bne- 1b" | ||
238 | ISYNC_ON_SMP | ||
239 | "\n\ | ||
240 | 2:" : "=&b" (t) | ||
241 | : "r" (&v->counter) | ||
242 | : "cc", "memory"); | ||
243 | |||
244 | return t; | ||
245 | } | ||
246 | |||
247 | #define smp_mb__before_atomic_dec() smp_mb() | ||
248 | #define smp_mb__after_atomic_dec() smp_mb() | ||
249 | #define smp_mb__before_atomic_inc() smp_mb() | ||
250 | #define smp_mb__after_atomic_inc() smp_mb() | ||
251 | |||
252 | #ifdef __powerpc64__ | ||
253 | |||
254 | typedef struct { long counter; } atomic64_t; | ||
255 | |||
256 | #define ATOMIC64_INIT(i) { (i) } | ||
257 | |||
258 | static __inline__ long atomic64_read(const atomic64_t *v) | ||
259 | { | ||
260 | long t; | ||
261 | |||
262 | __asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter)); | ||
263 | |||
264 | return t; | ||
265 | } | ||
266 | |||
267 | static __inline__ void atomic64_set(atomic64_t *v, long i) | ||
268 | { | ||
269 | __asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i)); | ||
270 | } | ||
271 | |||
272 | static __inline__ void atomic64_add(long a, atomic64_t *v) | ||
273 | { | ||
274 | long t; | ||
275 | |||
276 | __asm__ __volatile__( | ||
277 | "1: ldarx %0,0,%3 # atomic64_add\n\ | ||
278 | add %0,%2,%0\n\ | ||
279 | stdcx. %0,0,%3 \n\ | ||
280 | bne- 1b" | ||
281 | : "=&r" (t), "+m" (v->counter) | ||
282 | : "r" (a), "r" (&v->counter) | ||
283 | : "cc"); | ||
284 | } | ||
285 | |||
286 | static __inline__ long atomic64_add_return(long a, atomic64_t *v) | ||
287 | { | ||
288 | long t; | ||
289 | |||
290 | __asm__ __volatile__( | ||
291 | LWSYNC_ON_SMP | ||
292 | "1: ldarx %0,0,%2 # atomic64_add_return\n\ | ||
293 | add %0,%1,%0\n\ | ||
294 | stdcx. %0,0,%2 \n\ | ||
295 | bne- 1b" | ||
296 | ISYNC_ON_SMP | ||
297 | : "=&r" (t) | ||
298 | : "r" (a), "r" (&v->counter) | ||
299 | : "cc", "memory"); | ||
300 | |||
301 | return t; | ||
302 | } | ||
303 | |||
304 | #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0) | ||
305 | |||
306 | static __inline__ void atomic64_sub(long a, atomic64_t *v) | ||
307 | { | ||
308 | long t; | ||
309 | |||
310 | __asm__ __volatile__( | ||
311 | "1: ldarx %0,0,%3 # atomic64_sub\n\ | ||
312 | subf %0,%2,%0\n\ | ||
313 | stdcx. %0,0,%3 \n\ | ||
314 | bne- 1b" | ||
315 | : "=&r" (t), "+m" (v->counter) | ||
316 | : "r" (a), "r" (&v->counter) | ||
317 | : "cc"); | ||
318 | } | ||
319 | |||
320 | static __inline__ long atomic64_sub_return(long a, atomic64_t *v) | ||
321 | { | ||
322 | long t; | ||
323 | |||
324 | __asm__ __volatile__( | ||
325 | LWSYNC_ON_SMP | ||
326 | "1: ldarx %0,0,%2 # atomic64_sub_return\n\ | ||
327 | subf %0,%1,%0\n\ | ||
328 | stdcx. %0,0,%2 \n\ | ||
329 | bne- 1b" | ||
330 | ISYNC_ON_SMP | ||
331 | : "=&r" (t) | ||
332 | : "r" (a), "r" (&v->counter) | ||
333 | : "cc", "memory"); | ||
334 | |||
335 | return t; | ||
336 | } | ||
337 | |||
338 | static __inline__ void atomic64_inc(atomic64_t *v) | ||
339 | { | ||
340 | long t; | ||
341 | |||
342 | __asm__ __volatile__( | ||
343 | "1: ldarx %0,0,%2 # atomic64_inc\n\ | ||
344 | addic %0,%0,1\n\ | ||
345 | stdcx. %0,0,%2 \n\ | ||
346 | bne- 1b" | ||
347 | : "=&r" (t), "+m" (v->counter) | ||
348 | : "r" (&v->counter) | ||
349 | : "cc"); | ||
350 | } | ||
351 | |||
352 | static __inline__ long atomic64_inc_return(atomic64_t *v) | ||
353 | { | ||
354 | long t; | ||
355 | |||
356 | __asm__ __volatile__( | ||
357 | LWSYNC_ON_SMP | ||
358 | "1: ldarx %0,0,%1 # atomic64_inc_return\n\ | ||
359 | addic %0,%0,1\n\ | ||
360 | stdcx. %0,0,%1 \n\ | ||
361 | bne- 1b" | ||
362 | ISYNC_ON_SMP | ||
363 | : "=&r" (t) | ||
364 | : "r" (&v->counter) | ||
365 | : "cc", "memory"); | ||
366 | |||
367 | return t; | ||
368 | } | ||
369 | |||
370 | /* | ||
371 | * atomic64_inc_and_test - increment and test | ||
372 | * @v: pointer of type atomic64_t | ||
373 | * | ||
374 | * Atomically increments @v by 1 | ||
375 | * and returns true if the result is zero, or false for all | ||
376 | * other cases. | ||
377 | */ | ||
378 | #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0) | ||
379 | |||
380 | static __inline__ void atomic64_dec(atomic64_t *v) | ||
381 | { | ||
382 | long t; | ||
383 | |||
384 | __asm__ __volatile__( | ||
385 | "1: ldarx %0,0,%2 # atomic64_dec\n\ | ||
386 | addic %0,%0,-1\n\ | ||
387 | stdcx. %0,0,%2\n\ | ||
388 | bne- 1b" | ||
389 | : "=&r" (t), "+m" (v->counter) | ||
390 | : "r" (&v->counter) | ||
391 | : "cc"); | ||
392 | } | ||
393 | |||
394 | static __inline__ long atomic64_dec_return(atomic64_t *v) | ||
395 | { | ||
396 | long t; | ||
397 | |||
398 | __asm__ __volatile__( | ||
399 | LWSYNC_ON_SMP | ||
400 | "1: ldarx %0,0,%1 # atomic64_dec_return\n\ | ||
401 | addic %0,%0,-1\n\ | ||
402 | stdcx. %0,0,%1\n\ | ||
403 | bne- 1b" | ||
404 | ISYNC_ON_SMP | ||
405 | : "=&r" (t) | ||
406 | : "r" (&v->counter) | ||
407 | : "cc", "memory"); | ||
408 | |||
409 | return t; | ||
410 | } | ||
411 | |||
412 | #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0) | ||
413 | #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0) | ||
414 | |||
415 | /* | ||
416 | * Atomically test *v and decrement if it is greater than 0. | ||
417 | * The function returns the old value of *v minus 1. | ||
418 | */ | ||
419 | static __inline__ long atomic64_dec_if_positive(atomic64_t *v) | ||
420 | { | ||
421 | long t; | ||
422 | |||
423 | __asm__ __volatile__( | ||
424 | LWSYNC_ON_SMP | ||
425 | "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\ | ||
426 | addic. %0,%0,-1\n\ | ||
427 | blt- 2f\n\ | ||
428 | stdcx. %0,0,%1\n\ | ||
429 | bne- 1b" | ||
430 | ISYNC_ON_SMP | ||
431 | "\n\ | ||
432 | 2:" : "=&r" (t) | ||
433 | : "r" (&v->counter) | ||
434 | : "cc", "memory"); | ||
435 | |||
436 | return t; | ||
437 | } | ||
438 | |||
439 | #define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) | ||
440 | #define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) | ||
441 | |||
442 | /** | ||
443 | * atomic64_add_unless - add unless the number is a given value | ||
444 | * @v: pointer of type atomic64_t | ||
445 | * @a: the amount to add to v... | ||
446 | * @u: ...unless v is equal to u. | ||
447 | * | ||
448 | * Atomically adds @a to @v, so long as it was not @u. | ||
449 | * Returns non-zero if @v was not @u, and zero otherwise. | ||
450 | */ | ||
451 | static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) | ||
452 | { | ||
453 | long t; | ||
454 | |||
455 | __asm__ __volatile__ ( | ||
456 | LWSYNC_ON_SMP | ||
457 | "1: ldarx %0,0,%1 # atomic_add_unless\n\ | ||
458 | cmpd 0,%0,%3 \n\ | ||
459 | beq- 2f \n\ | ||
460 | add %0,%2,%0 \n" | ||
461 | " stdcx. %0,0,%1 \n\ | ||
462 | bne- 1b \n" | ||
463 | ISYNC_ON_SMP | ||
464 | " subf %0,%2,%0 \n\ | ||
465 | 2:" | ||
466 | : "=&r" (t) | ||
467 | : "r" (&v->counter), "r" (a), "r" (u) | ||
468 | : "cc", "memory"); | ||
469 | |||
470 | return t != u; | ||
471 | } | ||
472 | |||
473 | #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) | ||
474 | |||
475 | #endif /* __powerpc64__ */ | ||
476 | |||
477 | #include <asm-generic/atomic.h> | ||
478 | #endif /* __KERNEL__ */ | ||
479 | #endif /* _ASM_POWERPC_ATOMIC_H_ */ | ||