diff options
Diffstat (limited to 'include/asm-mips/atomic.h')
| -rw-r--r-- | include/asm-mips/atomic.h | 45 | 
1 files changed, 23 insertions, 22 deletions
| diff --git a/include/asm-mips/atomic.h b/include/asm-mips/atomic.h index 55c37c106ef0..654b97d3e13a 100644 --- a/include/asm-mips/atomic.h +++ b/include/asm-mips/atomic.h | |||
| @@ -24,10 +24,9 @@ | |||
| 24 | #define _ASM_ATOMIC_H | 24 | #define _ASM_ATOMIC_H | 
| 25 | 25 | ||
| 26 | #include <asm/cpu-features.h> | 26 | #include <asm/cpu-features.h> | 
| 27 | #include <asm/interrupt.h> | ||
| 27 | #include <asm/war.h> | 28 | #include <asm/war.h> | 
| 28 | 29 | ||
| 29 | extern spinlock_t atomic_lock; | ||
| 30 | |||
| 31 | typedef struct { volatile int counter; } atomic_t; | 30 | typedef struct { volatile int counter; } atomic_t; | 
| 32 | 31 | ||
| 33 | #define ATOMIC_INIT(i) { (i) } | 32 | #define ATOMIC_INIT(i) { (i) } | 
| @@ -85,9 +84,9 @@ static __inline__ void atomic_add(int i, atomic_t * v) | |||
| 85 | } else { | 84 | } else { | 
| 86 | unsigned long flags; | 85 | unsigned long flags; | 
| 87 | 86 | ||
| 88 | spin_lock_irqsave(&atomic_lock, flags); | 87 | local_irq_save(flags); | 
| 89 | v->counter += i; | 88 | v->counter += i; | 
| 90 | spin_unlock_irqrestore(&atomic_lock, flags); | 89 | local_irq_restore(flags); | 
| 91 | } | 90 | } | 
| 92 | } | 91 | } | 
| 93 | 92 | ||
| @@ -127,9 +126,9 @@ static __inline__ void atomic_sub(int i, atomic_t * v) | |||
| 127 | } else { | 126 | } else { | 
| 128 | unsigned long flags; | 127 | unsigned long flags; | 
| 129 | 128 | ||
| 130 | spin_lock_irqsave(&atomic_lock, flags); | 129 | local_irq_save(flags); | 
| 131 | v->counter -= i; | 130 | v->counter -= i; | 
| 132 | spin_unlock_irqrestore(&atomic_lock, flags); | 131 | local_irq_restore(flags); | 
| 133 | } | 132 | } | 
| 134 | } | 133 | } | 
| 135 | 134 | ||
| @@ -173,11 +172,11 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) | |||
| 173 | } else { | 172 | } else { | 
| 174 | unsigned long flags; | 173 | unsigned long flags; | 
| 175 | 174 | ||
| 176 | spin_lock_irqsave(&atomic_lock, flags); | 175 | local_irq_save(flags); | 
| 177 | result = v->counter; | 176 | result = v->counter; | 
| 178 | result += i; | 177 | result += i; | 
| 179 | v->counter = result; | 178 | v->counter = result; | 
| 180 | spin_unlock_irqrestore(&atomic_lock, flags); | 179 | local_irq_restore(flags); | 
| 181 | } | 180 | } | 
| 182 | 181 | ||
| 183 | return result; | 182 | return result; | 
| @@ -220,11 +219,11 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) | |||
| 220 | } else { | 219 | } else { | 
| 221 | unsigned long flags; | 220 | unsigned long flags; | 
| 222 | 221 | ||
| 223 | spin_lock_irqsave(&atomic_lock, flags); | 222 | local_irq_save(flags); | 
| 224 | result = v->counter; | 223 | result = v->counter; | 
| 225 | result -= i; | 224 | result -= i; | 
| 226 | v->counter = result; | 225 | v->counter = result; | 
| 227 | spin_unlock_irqrestore(&atomic_lock, flags); | 226 | local_irq_restore(flags); | 
| 228 | } | 227 | } | 
| 229 | 228 | ||
| 230 | return result; | 229 | return result; | 
| @@ -277,18 +276,19 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) | |||
| 277 | } else { | 276 | } else { | 
| 278 | unsigned long flags; | 277 | unsigned long flags; | 
| 279 | 278 | ||
| 280 | spin_lock_irqsave(&atomic_lock, flags); | 279 | local_irq_save(flags); | 
| 281 | result = v->counter; | 280 | result = v->counter; | 
| 282 | result -= i; | 281 | result -= i; | 
| 283 | if (result >= 0) | 282 | if (result >= 0) | 
| 284 | v->counter = result; | 283 | v->counter = result; | 
| 285 | spin_unlock_irqrestore(&atomic_lock, flags); | 284 | local_irq_restore(flags); | 
| 286 | } | 285 | } | 
| 287 | 286 | ||
| 288 | return result; | 287 | return result; | 
| 289 | } | 288 | } | 
| 290 | 289 | ||
| 291 | #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) | 290 | #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) | 
| 291 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) | ||
| 292 | 292 | ||
| 293 | /** | 293 | /** | 
| 294 | * atomic_add_unless - add unless the number is a given value | 294 | * atomic_add_unless - add unless the number is a given value | 
| @@ -432,9 +432,9 @@ static __inline__ void atomic64_add(long i, atomic64_t * v) | |||
| 432 | } else { | 432 | } else { | 
| 433 | unsigned long flags; | 433 | unsigned long flags; | 
| 434 | 434 | ||
| 435 | spin_lock_irqsave(&atomic_lock, flags); | 435 | local_irq_save(flags); | 
| 436 | v->counter += i; | 436 | v->counter += i; | 
| 437 | spin_unlock_irqrestore(&atomic_lock, flags); | 437 | local_irq_restore(flags); | 
| 438 | } | 438 | } | 
| 439 | } | 439 | } | 
| 440 | 440 | ||
| @@ -474,9 +474,9 @@ static __inline__ void atomic64_sub(long i, atomic64_t * v) | |||
| 474 | } else { | 474 | } else { | 
| 475 | unsigned long flags; | 475 | unsigned long flags; | 
| 476 | 476 | ||
| 477 | spin_lock_irqsave(&atomic_lock, flags); | 477 | local_irq_save(flags); | 
| 478 | v->counter -= i; | 478 | v->counter -= i; | 
| 479 | spin_unlock_irqrestore(&atomic_lock, flags); | 479 | local_irq_restore(flags); | 
| 480 | } | 480 | } | 
| 481 | } | 481 | } | 
| 482 | 482 | ||
| @@ -520,11 +520,11 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) | |||
| 520 | } else { | 520 | } else { | 
| 521 | unsigned long flags; | 521 | unsigned long flags; | 
| 522 | 522 | ||
| 523 | spin_lock_irqsave(&atomic_lock, flags); | 523 | local_irq_save(flags); | 
| 524 | result = v->counter; | 524 | result = v->counter; | 
| 525 | result += i; | 525 | result += i; | 
| 526 | v->counter = result; | 526 | v->counter = result; | 
| 527 | spin_unlock_irqrestore(&atomic_lock, flags); | 527 | local_irq_restore(flags); | 
| 528 | } | 528 | } | 
| 529 | 529 | ||
| 530 | return result; | 530 | return result; | 
| @@ -567,11 +567,11 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) | |||
| 567 | } else { | 567 | } else { | 
| 568 | unsigned long flags; | 568 | unsigned long flags; | 
| 569 | 569 | ||
| 570 | spin_lock_irqsave(&atomic_lock, flags); | 570 | local_irq_save(flags); | 
| 571 | result = v->counter; | 571 | result = v->counter; | 
| 572 | result -= i; | 572 | result -= i; | 
| 573 | v->counter = result; | 573 | v->counter = result; | 
| 574 | spin_unlock_irqrestore(&atomic_lock, flags); | 574 | local_irq_restore(flags); | 
| 575 | } | 575 | } | 
| 576 | 576 | ||
| 577 | return result; | 577 | return result; | 
| @@ -624,12 +624,12 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) | |||
| 624 | } else { | 624 | } else { | 
| 625 | unsigned long flags; | 625 | unsigned long flags; | 
| 626 | 626 | ||
| 627 | spin_lock_irqsave(&atomic_lock, flags); | 627 | local_irq_save(flags); | 
| 628 | result = v->counter; | 628 | result = v->counter; | 
| 629 | result -= i; | 629 | result -= i; | 
| 630 | if (result >= 0) | 630 | if (result >= 0) | 
| 631 | v->counter = result; | 631 | v->counter = result; | 
| 632 | spin_unlock_irqrestore(&atomic_lock, flags); | 632 | local_irq_restore(flags); | 
| 633 | } | 633 | } | 
| 634 | 634 | ||
| 635 | return result; | 635 | return result; | 
| @@ -713,4 +713,5 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) | |||
| 713 | #define smp_mb__before_atomic_inc() smp_mb() | 713 | #define smp_mb__before_atomic_inc() smp_mb() | 
| 714 | #define smp_mb__after_atomic_inc() smp_mb() | 714 | #define smp_mb__after_atomic_inc() smp_mb() | 
| 715 | 715 | ||
| 716 | #include <asm-generic/atomic.h> | ||
| 716 | #endif /* _ASM_ATOMIC_H */ | 717 | #endif /* _ASM_ATOMIC_H */ | 
