diff options
-rw-r--r-- | include/asm-mips/atomic.h | 43 |
1 files changed, 21 insertions, 22 deletions
diff --git a/include/asm-mips/atomic.h b/include/asm-mips/atomic.h index 94a95872d727..654b97d3e13a 100644 --- a/include/asm-mips/atomic.h +++ b/include/asm-mips/atomic.h | |||
@@ -24,10 +24,9 @@ | |||
24 | #define _ASM_ATOMIC_H | 24 | #define _ASM_ATOMIC_H |
25 | 25 | ||
26 | #include <asm/cpu-features.h> | 26 | #include <asm/cpu-features.h> |
27 | #include <asm/interrupt.h> | ||
27 | #include <asm/war.h> | 28 | #include <asm/war.h> |
28 | 29 | ||
29 | extern spinlock_t atomic_lock; | ||
30 | |||
31 | typedef struct { volatile int counter; } atomic_t; | 30 | typedef struct { volatile int counter; } atomic_t; |
32 | 31 | ||
33 | #define ATOMIC_INIT(i) { (i) } | 32 | #define ATOMIC_INIT(i) { (i) } |
@@ -85,9 +84,9 @@ static __inline__ void atomic_add(int i, atomic_t * v) | |||
85 | } else { | 84 | } else { |
86 | unsigned long flags; | 85 | unsigned long flags; |
87 | 86 | ||
88 | spin_lock_irqsave(&atomic_lock, flags); | 87 | local_irq_save(flags); |
89 | v->counter += i; | 88 | v->counter += i; |
90 | spin_unlock_irqrestore(&atomic_lock, flags); | 89 | local_irq_restore(flags); |
91 | } | 90 | } |
92 | } | 91 | } |
93 | 92 | ||
@@ -127,9 +126,9 @@ static __inline__ void atomic_sub(int i, atomic_t * v) | |||
127 | } else { | 126 | } else { |
128 | unsigned long flags; | 127 | unsigned long flags; |
129 | 128 | ||
130 | spin_lock_irqsave(&atomic_lock, flags); | 129 | local_irq_save(flags); |
131 | v->counter -= i; | 130 | v->counter -= i; |
132 | spin_unlock_irqrestore(&atomic_lock, flags); | 131 | local_irq_restore(flags); |
133 | } | 132 | } |
134 | } | 133 | } |
135 | 134 | ||
@@ -173,11 +172,11 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) | |||
173 | } else { | 172 | } else { |
174 | unsigned long flags; | 173 | unsigned long flags; |
175 | 174 | ||
176 | spin_lock_irqsave(&atomic_lock, flags); | 175 | local_irq_save(flags); |
177 | result = v->counter; | 176 | result = v->counter; |
178 | result += i; | 177 | result += i; |
179 | v->counter = result; | 178 | v->counter = result; |
180 | spin_unlock_irqrestore(&atomic_lock, flags); | 179 | local_irq_restore(flags); |
181 | } | 180 | } |
182 | 181 | ||
183 | return result; | 182 | return result; |
@@ -220,11 +219,11 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) | |||
220 | } else { | 219 | } else { |
221 | unsigned long flags; | 220 | unsigned long flags; |
222 | 221 | ||
223 | spin_lock_irqsave(&atomic_lock, flags); | 222 | local_irq_save(flags); |
224 | result = v->counter; | 223 | result = v->counter; |
225 | result -= i; | 224 | result -= i; |
226 | v->counter = result; | 225 | v->counter = result; |
227 | spin_unlock_irqrestore(&atomic_lock, flags); | 226 | local_irq_restore(flags); |
228 | } | 227 | } |
229 | 228 | ||
230 | return result; | 229 | return result; |
@@ -277,12 +276,12 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) | |||
277 | } else { | 276 | } else { |
278 | unsigned long flags; | 277 | unsigned long flags; |
279 | 278 | ||
280 | spin_lock_irqsave(&atomic_lock, flags); | 279 | local_irq_save(flags); |
281 | result = v->counter; | 280 | result = v->counter; |
282 | result -= i; | 281 | result -= i; |
283 | if (result >= 0) | 282 | if (result >= 0) |
284 | v->counter = result; | 283 | v->counter = result; |
285 | spin_unlock_irqrestore(&atomic_lock, flags); | 284 | local_irq_restore(flags); |
286 | } | 285 | } |
287 | 286 | ||
288 | return result; | 287 | return result; |
@@ -433,9 +432,9 @@ static __inline__ void atomic64_add(long i, atomic64_t * v) | |||
433 | } else { | 432 | } else { |
434 | unsigned long flags; | 433 | unsigned long flags; |
435 | 434 | ||
436 | spin_lock_irqsave(&atomic_lock, flags); | 435 | local_irq_save(flags); |
437 | v->counter += i; | 436 | v->counter += i; |
438 | spin_unlock_irqrestore(&atomic_lock, flags); | 437 | local_irq_restore(flags); |
439 | } | 438 | } |
440 | } | 439 | } |
441 | 440 | ||
@@ -475,9 +474,9 @@ static __inline__ void atomic64_sub(long i, atomic64_t * v) | |||
475 | } else { | 474 | } else { |
476 | unsigned long flags; | 475 | unsigned long flags; |
477 | 476 | ||
478 | spin_lock_irqsave(&atomic_lock, flags); | 477 | local_irq_save(flags); |
479 | v->counter -= i; | 478 | v->counter -= i; |
480 | spin_unlock_irqrestore(&atomic_lock, flags); | 479 | local_irq_restore(flags); |
481 | } | 480 | } |
482 | } | 481 | } |
483 | 482 | ||
@@ -521,11 +520,11 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) | |||
521 | } else { | 520 | } else { |
522 | unsigned long flags; | 521 | unsigned long flags; |
523 | 522 | ||
524 | spin_lock_irqsave(&atomic_lock, flags); | 523 | local_irq_save(flags); |
525 | result = v->counter; | 524 | result = v->counter; |
526 | result += i; | 525 | result += i; |
527 | v->counter = result; | 526 | v->counter = result; |
528 | spin_unlock_irqrestore(&atomic_lock, flags); | 527 | local_irq_restore(flags); |
529 | } | 528 | } |
530 | 529 | ||
531 | return result; | 530 | return result; |
@@ -568,11 +567,11 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) | |||
568 | } else { | 567 | } else { |
569 | unsigned long flags; | 568 | unsigned long flags; |
570 | 569 | ||
571 | spin_lock_irqsave(&atomic_lock, flags); | 570 | local_irq_save(flags); |
572 | result = v->counter; | 571 | result = v->counter; |
573 | result -= i; | 572 | result -= i; |
574 | v->counter = result; | 573 | v->counter = result; |
575 | spin_unlock_irqrestore(&atomic_lock, flags); | 574 | local_irq_restore(flags); |
576 | } | 575 | } |
577 | 576 | ||
578 | return result; | 577 | return result; |
@@ -625,12 +624,12 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) | |||
625 | } else { | 624 | } else { |
626 | unsigned long flags; | 625 | unsigned long flags; |
627 | 626 | ||
628 | spin_lock_irqsave(&atomic_lock, flags); | 627 | local_irq_save(flags); |
629 | result = v->counter; | 628 | result = v->counter; |
630 | result -= i; | 629 | result -= i; |
631 | if (result >= 0) | 630 | if (result >= 0) |
632 | v->counter = result; | 631 | v->counter = result; |
633 | spin_unlock_irqrestore(&atomic_lock, flags); | 632 | local_irq_restore(flags); |
634 | } | 633 | } |
635 | 634 | ||
636 | return result; | 635 | return result; |