diff options
author | Ralf Baechle <ralf@linux-mips.org> | 2007-07-14 08:24:05 -0400 |
---|---|---|
committer | Ralf Baechle <ralf@linux-mips.org> | 2007-07-20 13:57:39 -0400 |
commit | 17099b1142f6c0359fca60a3464dea8fb30badea (patch) | |
tree | 26b9f3955dca84ccab594a76680c2a71e166768a /include/asm-mips/atomic.h | |
parent | ed203dadcd1373e80e95b04075e1eefc554a914b (diff) |
[MIPS] Make support for weakly ordered LL/SC a config option.
None of weakly ordered processor supported in tree need this but it seems
like this could change ...
Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Diffstat (limited to 'include/asm-mips/atomic.h')
-rw-r--r-- | include/asm-mips/atomic.h | 33 |
1 files changed, 17 insertions, 16 deletions
diff --git a/include/asm-mips/atomic.h b/include/asm-mips/atomic.h index 1b60624dab7e..7d8003769a44 100644 --- a/include/asm-mips/atomic.h +++ b/include/asm-mips/atomic.h | |||
@@ -138,7 +138,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) | |||
138 | { | 138 | { |
139 | unsigned long result; | 139 | unsigned long result; |
140 | 140 | ||
141 | smp_mb(); | 141 | smp_llsc_mb(); |
142 | 142 | ||
143 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 143 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
144 | unsigned long temp; | 144 | unsigned long temp; |
@@ -181,7 +181,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) | |||
181 | raw_local_irq_restore(flags); | 181 | raw_local_irq_restore(flags); |
182 | } | 182 | } |
183 | 183 | ||
184 | smp_mb(); | 184 | smp_llsc_mb(); |
185 | 185 | ||
186 | return result; | 186 | return result; |
187 | } | 187 | } |
@@ -190,7 +190,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) | |||
190 | { | 190 | { |
191 | unsigned long result; | 191 | unsigned long result; |
192 | 192 | ||
193 | smp_mb(); | 193 | smp_llsc_mb(); |
194 | 194 | ||
195 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 195 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
196 | unsigned long temp; | 196 | unsigned long temp; |
@@ -233,7 +233,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) | |||
233 | raw_local_irq_restore(flags); | 233 | raw_local_irq_restore(flags); |
234 | } | 234 | } |
235 | 235 | ||
236 | smp_mb(); | 236 | smp_llsc_mb(); |
237 | 237 | ||
238 | return result; | 238 | return result; |
239 | } | 239 | } |
@@ -250,7 +250,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) | |||
250 | { | 250 | { |
251 | unsigned long result; | 251 | unsigned long result; |
252 | 252 | ||
253 | smp_mb(); | 253 | smp_llsc_mb(); |
254 | 254 | ||
255 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 255 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
256 | unsigned long temp; | 256 | unsigned long temp; |
@@ -302,7 +302,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) | |||
302 | raw_local_irq_restore(flags); | 302 | raw_local_irq_restore(flags); |
303 | } | 303 | } |
304 | 304 | ||
305 | smp_mb(); | 305 | smp_llsc_mb(); |
306 | 306 | ||
307 | return result; | 307 | return result; |
308 | } | 308 | } |
@@ -519,7 +519,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) | |||
519 | { | 519 | { |
520 | unsigned long result; | 520 | unsigned long result; |
521 | 521 | ||
522 | smp_mb(); | 522 | smp_llsc_mb(); |
523 | 523 | ||
524 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 524 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
525 | unsigned long temp; | 525 | unsigned long temp; |
@@ -562,7 +562,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) | |||
562 | raw_local_irq_restore(flags); | 562 | raw_local_irq_restore(flags); |
563 | } | 563 | } |
564 | 564 | ||
565 | smp_mb(); | 565 | smp_llsc_mb(); |
566 | 566 | ||
567 | return result; | 567 | return result; |
568 | } | 568 | } |
@@ -571,7 +571,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) | |||
571 | { | 571 | { |
572 | unsigned long result; | 572 | unsigned long result; |
573 | 573 | ||
574 | smp_mb(); | 574 | smp_llsc_mb(); |
575 | 575 | ||
576 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 576 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
577 | unsigned long temp; | 577 | unsigned long temp; |
@@ -614,7 +614,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) | |||
614 | raw_local_irq_restore(flags); | 614 | raw_local_irq_restore(flags); |
615 | } | 615 | } |
616 | 616 | ||
617 | smp_mb(); | 617 | smp_llsc_mb(); |
618 | 618 | ||
619 | return result; | 619 | return result; |
620 | } | 620 | } |
@@ -631,7 +631,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) | |||
631 | { | 631 | { |
632 | unsigned long result; | 632 | unsigned long result; |
633 | 633 | ||
634 | smp_mb(); | 634 | smp_llsc_mb(); |
635 | 635 | ||
636 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 636 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
637 | unsigned long temp; | 637 | unsigned long temp; |
@@ -683,7 +683,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) | |||
683 | raw_local_irq_restore(flags); | 683 | raw_local_irq_restore(flags); |
684 | } | 684 | } |
685 | 685 | ||
686 | smp_mb(); | 686 | smp_llsc_mb(); |
687 | 687 | ||
688 | return result; | 688 | return result; |
689 | } | 689 | } |
@@ -791,10 +791,11 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) | |||
791 | * atomic*_return operations are serializing but not the non-*_return | 791 | * atomic*_return operations are serializing but not the non-*_return |
792 | * versions. | 792 | * versions. |
793 | */ | 793 | */ |
794 | #define smp_mb__before_atomic_dec() smp_mb() | 794 | #define smp_mb__before_atomic_dec() smp_llsc_mb() |
795 | #define smp_mb__after_atomic_dec() smp_mb() | 795 | #define smp_mb__after_atomic_dec() smp_llsc_mb() |
796 | #define smp_mb__before_atomic_inc() smp_mb() | 796 | #define smp_mb__before_atomic_inc() smp_llsc_mb() |
797 | #define smp_mb__after_atomic_inc() smp_mb() | 797 | #define smp_mb__after_atomic_inc() smp_llsc_mb() |
798 | 798 | ||
799 | #include <asm-generic/atomic.h> | 799 | #include <asm-generic/atomic.h> |
800 | |||
800 | #endif /* _ASM_ATOMIC_H */ | 801 | #endif /* _ASM_ATOMIC_H */ |