diff options
Diffstat (limited to 'include/asm-mips/atomic.h')
-rw-r--r-- | include/asm-mips/atomic.h | 49 |
1 files changed, 27 insertions, 22 deletions
diff --git a/include/asm-mips/atomic.h b/include/asm-mips/atomic.h index e64abc0d8221..c1a2409bb52a 100644 --- a/include/asm-mips/atomic.h +++ b/include/asm-mips/atomic.h | |||
@@ -9,20 +9,13 @@ | |||
9 | * License. See the file "COPYING" in the main directory of this archive | 9 | * License. See the file "COPYING" in the main directory of this archive |
10 | * for more details. | 10 | * for more details. |
11 | * | 11 | * |
12 | * Copyright (C) 1996, 97, 99, 2000, 03, 04 by Ralf Baechle | 12 | * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle |
13 | */ | 13 | */ |
14 | |||
15 | /* | ||
16 | * As workaround for the ATOMIC_DEC_AND_LOCK / atomic_dec_and_lock mess in | ||
17 | * <linux/spinlock.h> we have to include <linux/spinlock.h> outside the | ||
18 | * main big wrapper ... | ||
19 | */ | ||
20 | #include <linux/spinlock.h> | ||
21 | |||
22 | #ifndef _ASM_ATOMIC_H | 14 | #ifndef _ASM_ATOMIC_H |
23 | #define _ASM_ATOMIC_H | 15 | #define _ASM_ATOMIC_H |
24 | 16 | ||
25 | #include <linux/irqflags.h> | 17 | #include <linux/irqflags.h> |
18 | #include <asm/barrier.h> | ||
26 | #include <asm/cpu-features.h> | 19 | #include <asm/cpu-features.h> |
27 | #include <asm/war.h> | 20 | #include <asm/war.h> |
28 | 21 | ||
@@ -138,6 +131,8 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) | |||
138 | { | 131 | { |
139 | unsigned long result; | 132 | unsigned long result; |
140 | 133 | ||
134 | smp_mb(); | ||
135 | |||
141 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 136 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
142 | unsigned long temp; | 137 | unsigned long temp; |
143 | 138 | ||
@@ -148,7 +143,6 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) | |||
148 | " sc %0, %2 \n" | 143 | " sc %0, %2 \n" |
149 | " beqzl %0, 1b \n" | 144 | " beqzl %0, 1b \n" |
150 | " addu %0, %1, %3 \n" | 145 | " addu %0, %1, %3 \n" |
151 | " sync \n" | ||
152 | " .set mips0 \n" | 146 | " .set mips0 \n" |
153 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | 147 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
154 | : "Ir" (i), "m" (v->counter) | 148 | : "Ir" (i), "m" (v->counter) |
@@ -163,7 +157,6 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) | |||
163 | " sc %0, %2 \n" | 157 | " sc %0, %2 \n" |
164 | " beqz %0, 1b \n" | 158 | " beqz %0, 1b \n" |
165 | " addu %0, %1, %3 \n" | 159 | " addu %0, %1, %3 \n" |
166 | " sync \n" | ||
167 | " .set mips0 \n" | 160 | " .set mips0 \n" |
168 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | 161 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
169 | : "Ir" (i), "m" (v->counter) | 162 | : "Ir" (i), "m" (v->counter) |
@@ -178,6 +171,8 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) | |||
178 | local_irq_restore(flags); | 171 | local_irq_restore(flags); |
179 | } | 172 | } |
180 | 173 | ||
174 | smp_mb(); | ||
175 | |||
181 | return result; | 176 | return result; |
182 | } | 177 | } |
183 | 178 | ||
@@ -185,6 +180,8 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) | |||
185 | { | 180 | { |
186 | unsigned long result; | 181 | unsigned long result; |
187 | 182 | ||
183 | smp_mb(); | ||
184 | |||
188 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 185 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
189 | unsigned long temp; | 186 | unsigned long temp; |
190 | 187 | ||
@@ -195,7 +192,6 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) | |||
195 | " sc %0, %2 \n" | 192 | " sc %0, %2 \n" |
196 | " beqzl %0, 1b \n" | 193 | " beqzl %0, 1b \n" |
197 | " subu %0, %1, %3 \n" | 194 | " subu %0, %1, %3 \n" |
198 | " sync \n" | ||
199 | " .set mips0 \n" | 195 | " .set mips0 \n" |
200 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | 196 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
201 | : "Ir" (i), "m" (v->counter) | 197 | : "Ir" (i), "m" (v->counter) |
@@ -210,7 +206,6 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) | |||
210 | " sc %0, %2 \n" | 206 | " sc %0, %2 \n" |
211 | " beqz %0, 1b \n" | 207 | " beqz %0, 1b \n" |
212 | " subu %0, %1, %3 \n" | 208 | " subu %0, %1, %3 \n" |
213 | " sync \n" | ||
214 | " .set mips0 \n" | 209 | " .set mips0 \n" |
215 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | 210 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
216 | : "Ir" (i), "m" (v->counter) | 211 | : "Ir" (i), "m" (v->counter) |
@@ -225,6 +220,8 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) | |||
225 | local_irq_restore(flags); | 220 | local_irq_restore(flags); |
226 | } | 221 | } |
227 | 222 | ||
223 | smp_mb(); | ||
224 | |||
228 | return result; | 225 | return result; |
229 | } | 226 | } |
230 | 227 | ||
@@ -240,6 +237,8 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) | |||
240 | { | 237 | { |
241 | unsigned long result; | 238 | unsigned long result; |
242 | 239 | ||
240 | smp_mb(); | ||
241 | |||
243 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 242 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
244 | unsigned long temp; | 243 | unsigned long temp; |
245 | 244 | ||
@@ -253,7 +252,6 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) | |||
253 | " beqzl %0, 1b \n" | 252 | " beqzl %0, 1b \n" |
254 | " subu %0, %1, %3 \n" | 253 | " subu %0, %1, %3 \n" |
255 | " .set reorder \n" | 254 | " .set reorder \n" |
256 | " sync \n" | ||
257 | "1: \n" | 255 | "1: \n" |
258 | " .set mips0 \n" | 256 | " .set mips0 \n" |
259 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | 257 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
@@ -272,7 +270,6 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) | |||
272 | " beqz %0, 1b \n" | 270 | " beqz %0, 1b \n" |
273 | " subu %0, %1, %3 \n" | 271 | " subu %0, %1, %3 \n" |
274 | " .set reorder \n" | 272 | " .set reorder \n" |
275 | " sync \n" | ||
276 | "1: \n" | 273 | "1: \n" |
277 | " .set mips0 \n" | 274 | " .set mips0 \n" |
278 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | 275 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
@@ -289,6 +286,8 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) | |||
289 | local_irq_restore(flags); | 286 | local_irq_restore(flags); |
290 | } | 287 | } |
291 | 288 | ||
289 | smp_mb(); | ||
290 | |||
292 | return result; | 291 | return result; |
293 | } | 292 | } |
294 | 293 | ||
@@ -383,7 +382,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) | |||
383 | 382 | ||
384 | #ifdef CONFIG_64BIT | 383 | #ifdef CONFIG_64BIT |
385 | 384 | ||
386 | typedef struct { volatile __s64 counter; } atomic64_t; | 385 | typedef struct { volatile long counter; } atomic64_t; |
387 | 386 | ||
388 | #define ATOMIC64_INIT(i) { (i) } | 387 | #define ATOMIC64_INIT(i) { (i) } |
389 | 388 | ||
@@ -492,6 +491,8 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) | |||
492 | { | 491 | { |
493 | unsigned long result; | 492 | unsigned long result; |
494 | 493 | ||
494 | smp_mb(); | ||
495 | |||
495 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 496 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
496 | unsigned long temp; | 497 | unsigned long temp; |
497 | 498 | ||
@@ -502,7 +503,6 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) | |||
502 | " scd %0, %2 \n" | 503 | " scd %0, %2 \n" |
503 | " beqzl %0, 1b \n" | 504 | " beqzl %0, 1b \n" |
504 | " addu %0, %1, %3 \n" | 505 | " addu %0, %1, %3 \n" |
505 | " sync \n" | ||
506 | " .set mips0 \n" | 506 | " .set mips0 \n" |
507 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | 507 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
508 | : "Ir" (i), "m" (v->counter) | 508 | : "Ir" (i), "m" (v->counter) |
@@ -517,7 +517,6 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) | |||
517 | " scd %0, %2 \n" | 517 | " scd %0, %2 \n" |
518 | " beqz %0, 1b \n" | 518 | " beqz %0, 1b \n" |
519 | " addu %0, %1, %3 \n" | 519 | " addu %0, %1, %3 \n" |
520 | " sync \n" | ||
521 | " .set mips0 \n" | 520 | " .set mips0 \n" |
522 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | 521 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
523 | : "Ir" (i), "m" (v->counter) | 522 | : "Ir" (i), "m" (v->counter) |
@@ -532,6 +531,8 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) | |||
532 | local_irq_restore(flags); | 531 | local_irq_restore(flags); |
533 | } | 532 | } |
534 | 533 | ||
534 | smp_mb(); | ||
535 | |||
535 | return result; | 536 | return result; |
536 | } | 537 | } |
537 | 538 | ||
@@ -539,6 +540,8 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) | |||
539 | { | 540 | { |
540 | unsigned long result; | 541 | unsigned long result; |
541 | 542 | ||
543 | smp_mb(); | ||
544 | |||
542 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 545 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
543 | unsigned long temp; | 546 | unsigned long temp; |
544 | 547 | ||
@@ -549,7 +552,6 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) | |||
549 | " scd %0, %2 \n" | 552 | " scd %0, %2 \n" |
550 | " beqzl %0, 1b \n" | 553 | " beqzl %0, 1b \n" |
551 | " subu %0, %1, %3 \n" | 554 | " subu %0, %1, %3 \n" |
552 | " sync \n" | ||
553 | " .set mips0 \n" | 555 | " .set mips0 \n" |
554 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | 556 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
555 | : "Ir" (i), "m" (v->counter) | 557 | : "Ir" (i), "m" (v->counter) |
@@ -564,7 +566,6 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) | |||
564 | " scd %0, %2 \n" | 566 | " scd %0, %2 \n" |
565 | " beqz %0, 1b \n" | 567 | " beqz %0, 1b \n" |
566 | " subu %0, %1, %3 \n" | 568 | " subu %0, %1, %3 \n" |
567 | " sync \n" | ||
568 | " .set mips0 \n" | 569 | " .set mips0 \n" |
569 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | 570 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
570 | : "Ir" (i), "m" (v->counter) | 571 | : "Ir" (i), "m" (v->counter) |
@@ -579,6 +580,8 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) | |||
579 | local_irq_restore(flags); | 580 | local_irq_restore(flags); |
580 | } | 581 | } |
581 | 582 | ||
583 | smp_mb(); | ||
584 | |||
582 | return result; | 585 | return result; |
583 | } | 586 | } |
584 | 587 | ||
@@ -594,6 +597,8 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) | |||
594 | { | 597 | { |
595 | unsigned long result; | 598 | unsigned long result; |
596 | 599 | ||
600 | smp_mb(); | ||
601 | |||
597 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 602 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
598 | unsigned long temp; | 603 | unsigned long temp; |
599 | 604 | ||
@@ -607,7 +612,6 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) | |||
607 | " beqzl %0, 1b \n" | 612 | " beqzl %0, 1b \n" |
608 | " dsubu %0, %1, %3 \n" | 613 | " dsubu %0, %1, %3 \n" |
609 | " .set reorder \n" | 614 | " .set reorder \n" |
610 | " sync \n" | ||
611 | "1: \n" | 615 | "1: \n" |
612 | " .set mips0 \n" | 616 | " .set mips0 \n" |
613 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | 617 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
@@ -626,7 +630,6 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) | |||
626 | " beqz %0, 1b \n" | 630 | " beqz %0, 1b \n" |
627 | " dsubu %0, %1, %3 \n" | 631 | " dsubu %0, %1, %3 \n" |
628 | " .set reorder \n" | 632 | " .set reorder \n" |
629 | " sync \n" | ||
630 | "1: \n" | 633 | "1: \n" |
631 | " .set mips0 \n" | 634 | " .set mips0 \n" |
632 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | 635 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
@@ -643,6 +646,8 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) | |||
643 | local_irq_restore(flags); | 646 | local_irq_restore(flags); |
644 | } | 647 | } |
645 | 648 | ||
649 | smp_mb(); | ||
650 | |||
646 | return result; | 651 | return result; |
647 | } | 652 | } |
648 | 653 | ||