aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-mips/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-mips/atomic.h')
-rw-r--r--include/asm-mips/atomic.h39
1 files changed, 26 insertions, 13 deletions
diff --git a/include/asm-mips/atomic.h b/include/asm-mips/atomic.h
index 7978d8e11647..c1a2409bb52a 100644
--- a/include/asm-mips/atomic.h
+++ b/include/asm-mips/atomic.h
@@ -15,6 +15,7 @@
15#define _ASM_ATOMIC_H 15#define _ASM_ATOMIC_H
16 16
17#include <linux/irqflags.h> 17#include <linux/irqflags.h>
18#include <asm/barrier.h>
18#include <asm/cpu-features.h> 19#include <asm/cpu-features.h>
19#include <asm/war.h> 20#include <asm/war.h>
20 21
@@ -130,6 +131,8 @@ static __inline__ int atomic_add_return(int i, atomic_t * v)
130{ 131{
131 unsigned long result; 132 unsigned long result;
132 133
134 smp_mb();
135
133 if (cpu_has_llsc && R10000_LLSC_WAR) { 136 if (cpu_has_llsc && R10000_LLSC_WAR) {
134 unsigned long temp; 137 unsigned long temp;
135 138
@@ -140,7 +143,6 @@ static __inline__ int atomic_add_return(int i, atomic_t * v)
140 " sc %0, %2 \n" 143 " sc %0, %2 \n"
141 " beqzl %0, 1b \n" 144 " beqzl %0, 1b \n"
142 " addu %0, %1, %3 \n" 145 " addu %0, %1, %3 \n"
143 " sync \n"
144 " .set mips0 \n" 146 " .set mips0 \n"
145 : "=&r" (result), "=&r" (temp), "=m" (v->counter) 147 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
146 : "Ir" (i), "m" (v->counter) 148 : "Ir" (i), "m" (v->counter)
@@ -155,7 +157,6 @@ static __inline__ int atomic_add_return(int i, atomic_t * v)
155 " sc %0, %2 \n" 157 " sc %0, %2 \n"
156 " beqz %0, 1b \n" 158 " beqz %0, 1b \n"
157 " addu %0, %1, %3 \n" 159 " addu %0, %1, %3 \n"
158 " sync \n"
159 " .set mips0 \n" 160 " .set mips0 \n"
160 : "=&r" (result), "=&r" (temp), "=m" (v->counter) 161 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
161 : "Ir" (i), "m" (v->counter) 162 : "Ir" (i), "m" (v->counter)
@@ -170,6 +171,8 @@ static __inline__ int atomic_add_return(int i, atomic_t * v)
170 local_irq_restore(flags); 171 local_irq_restore(flags);
171 } 172 }
172 173
174 smp_mb();
175
173 return result; 176 return result;
174} 177}
175 178
@@ -177,6 +180,8 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v)
177{ 180{
178 unsigned long result; 181 unsigned long result;
179 182
183 smp_mb();
184
180 if (cpu_has_llsc && R10000_LLSC_WAR) { 185 if (cpu_has_llsc && R10000_LLSC_WAR) {
181 unsigned long temp; 186 unsigned long temp;
182 187
@@ -187,7 +192,6 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v)
187 " sc %0, %2 \n" 192 " sc %0, %2 \n"
188 " beqzl %0, 1b \n" 193 " beqzl %0, 1b \n"
189 " subu %0, %1, %3 \n" 194 " subu %0, %1, %3 \n"
190 " sync \n"
191 " .set mips0 \n" 195 " .set mips0 \n"
192 : "=&r" (result), "=&r" (temp), "=m" (v->counter) 196 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
193 : "Ir" (i), "m" (v->counter) 197 : "Ir" (i), "m" (v->counter)
@@ -202,7 +206,6 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v)
202 " sc %0, %2 \n" 206 " sc %0, %2 \n"
203 " beqz %0, 1b \n" 207 " beqz %0, 1b \n"
204 " subu %0, %1, %3 \n" 208 " subu %0, %1, %3 \n"
205 " sync \n"
206 " .set mips0 \n" 209 " .set mips0 \n"
207 : "=&r" (result), "=&r" (temp), "=m" (v->counter) 210 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
208 : "Ir" (i), "m" (v->counter) 211 : "Ir" (i), "m" (v->counter)
@@ -217,6 +220,8 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v)
217 local_irq_restore(flags); 220 local_irq_restore(flags);
218 } 221 }
219 222
223 smp_mb();
224
220 return result; 225 return result;
221} 226}
222 227
@@ -232,6 +237,8 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
232{ 237{
233 unsigned long result; 238 unsigned long result;
234 239
240 smp_mb();
241
235 if (cpu_has_llsc && R10000_LLSC_WAR) { 242 if (cpu_has_llsc && R10000_LLSC_WAR) {
236 unsigned long temp; 243 unsigned long temp;
237 244
@@ -245,7 +252,6 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
245 " beqzl %0, 1b \n" 252 " beqzl %0, 1b \n"
246 " subu %0, %1, %3 \n" 253 " subu %0, %1, %3 \n"
247 " .set reorder \n" 254 " .set reorder \n"
248 " sync \n"
249 "1: \n" 255 "1: \n"
250 " .set mips0 \n" 256 " .set mips0 \n"
251 : "=&r" (result), "=&r" (temp), "=m" (v->counter) 257 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
@@ -264,7 +270,6 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
264 " beqz %0, 1b \n" 270 " beqz %0, 1b \n"
265 " subu %0, %1, %3 \n" 271 " subu %0, %1, %3 \n"
266 " .set reorder \n" 272 " .set reorder \n"
267 " sync \n"
268 "1: \n" 273 "1: \n"
269 " .set mips0 \n" 274 " .set mips0 \n"
270 : "=&r" (result), "=&r" (temp), "=m" (v->counter) 275 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
@@ -281,6 +286,8 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
281 local_irq_restore(flags); 286 local_irq_restore(flags);
282 } 287 }
283 288
289 smp_mb();
290
284 return result; 291 return result;
285} 292}
286 293
@@ -375,7 +382,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
375 382
376#ifdef CONFIG_64BIT 383#ifdef CONFIG_64BIT
377 384
378typedef struct { volatile __s64 counter; } atomic64_t; 385typedef struct { volatile long counter; } atomic64_t;
379 386
380#define ATOMIC64_INIT(i) { (i) } 387#define ATOMIC64_INIT(i) { (i) }
381 388
@@ -484,6 +491,8 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v)
484{ 491{
485 unsigned long result; 492 unsigned long result;
486 493
494 smp_mb();
495
487 if (cpu_has_llsc && R10000_LLSC_WAR) { 496 if (cpu_has_llsc && R10000_LLSC_WAR) {
488 unsigned long temp; 497 unsigned long temp;
489 498
@@ -494,7 +503,6 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v)
494 " scd %0, %2 \n" 503 " scd %0, %2 \n"
495 " beqzl %0, 1b \n" 504 " beqzl %0, 1b \n"
496 " addu %0, %1, %3 \n" 505 " addu %0, %1, %3 \n"
497 " sync \n"
498 " .set mips0 \n" 506 " .set mips0 \n"
499 : "=&r" (result), "=&r" (temp), "=m" (v->counter) 507 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
500 : "Ir" (i), "m" (v->counter) 508 : "Ir" (i), "m" (v->counter)
@@ -509,7 +517,6 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v)
509 " scd %0, %2 \n" 517 " scd %0, %2 \n"
510 " beqz %0, 1b \n" 518 " beqz %0, 1b \n"
511 " addu %0, %1, %3 \n" 519 " addu %0, %1, %3 \n"
512 " sync \n"
513 " .set mips0 \n" 520 " .set mips0 \n"
514 : "=&r" (result), "=&r" (temp), "=m" (v->counter) 521 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
515 : "Ir" (i), "m" (v->counter) 522 : "Ir" (i), "m" (v->counter)
@@ -524,6 +531,8 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v)
524 local_irq_restore(flags); 531 local_irq_restore(flags);
525 } 532 }
526 533
534 smp_mb();
535
527 return result; 536 return result;
528} 537}
529 538
@@ -531,6 +540,8 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
531{ 540{
532 unsigned long result; 541 unsigned long result;
533 542
543 smp_mb();
544
534 if (cpu_has_llsc && R10000_LLSC_WAR) { 545 if (cpu_has_llsc && R10000_LLSC_WAR) {
535 unsigned long temp; 546 unsigned long temp;
536 547
@@ -541,7 +552,6 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
541 " scd %0, %2 \n" 552 " scd %0, %2 \n"
542 " beqzl %0, 1b \n" 553 " beqzl %0, 1b \n"
543 " subu %0, %1, %3 \n" 554 " subu %0, %1, %3 \n"
544 " sync \n"
545 " .set mips0 \n" 555 " .set mips0 \n"
546 : "=&r" (result), "=&r" (temp), "=m" (v->counter) 556 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
547 : "Ir" (i), "m" (v->counter) 557 : "Ir" (i), "m" (v->counter)
@@ -556,7 +566,6 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
556 " scd %0, %2 \n" 566 " scd %0, %2 \n"
557 " beqz %0, 1b \n" 567 " beqz %0, 1b \n"
558 " subu %0, %1, %3 \n" 568 " subu %0, %1, %3 \n"
559 " sync \n"
560 " .set mips0 \n" 569 " .set mips0 \n"
561 : "=&r" (result), "=&r" (temp), "=m" (v->counter) 570 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
562 : "Ir" (i), "m" (v->counter) 571 : "Ir" (i), "m" (v->counter)
@@ -571,6 +580,8 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
571 local_irq_restore(flags); 580 local_irq_restore(flags);
572 } 581 }
573 582
583 smp_mb();
584
574 return result; 585 return result;
575} 586}
576 587
@@ -586,6 +597,8 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
586{ 597{
587 unsigned long result; 598 unsigned long result;
588 599
600 smp_mb();
601
589 if (cpu_has_llsc && R10000_LLSC_WAR) { 602 if (cpu_has_llsc && R10000_LLSC_WAR) {
590 unsigned long temp; 603 unsigned long temp;
591 604
@@ -599,7 +612,6 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
599 " beqzl %0, 1b \n" 612 " beqzl %0, 1b \n"
600 " dsubu %0, %1, %3 \n" 613 " dsubu %0, %1, %3 \n"
601 " .set reorder \n" 614 " .set reorder \n"
602 " sync \n"
603 "1: \n" 615 "1: \n"
604 " .set mips0 \n" 616 " .set mips0 \n"
605 : "=&r" (result), "=&r" (temp), "=m" (v->counter) 617 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
@@ -618,7 +630,6 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
618 " beqz %0, 1b \n" 630 " beqz %0, 1b \n"
619 " dsubu %0, %1, %3 \n" 631 " dsubu %0, %1, %3 \n"
620 " .set reorder \n" 632 " .set reorder \n"
621 " sync \n"
622 "1: \n" 633 "1: \n"
623 " .set mips0 \n" 634 " .set mips0 \n"
624 : "=&r" (result), "=&r" (temp), "=m" (v->counter) 635 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
@@ -635,6 +646,8 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
635 local_irq_restore(flags); 646 local_irq_restore(flags);
636 } 647 }
637 648
649 smp_mb();
650
638 return result; 651 return result;
639} 652}
640 653