aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--arch/mips/include/asm/atomic.h138
1 files changed, 129 insertions, 9 deletions
diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h
index 835b402e4574..431079f8e483 100644
--- a/arch/mips/include/asm/atomic.h
+++ b/arch/mips/include/asm/atomic.h
@@ -66,7 +66,7 @@ static __inline__ void atomic_##op(int i, atomic_t * v) \
66 " " #asm_op " %0, %2 \n" \ 66 " " #asm_op " %0, %2 \n" \
67 " sc %0, %1 \n" \ 67 " sc %0, %1 \n" \
68 " .set mips0 \n" \ 68 " .set mips0 \n" \
69 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \ 69 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
70 : "Ir" (i)); \ 70 : "Ir" (i)); \
71 } while (unlikely(!temp)); \ 71 } while (unlikely(!temp)); \
72 } else { \ 72 } else { \
@@ -130,18 +130,78 @@ static __inline__ int atomic_##op##_return(int i, atomic_t * v) \
130 return result; \ 130 return result; \
131} 131}
132 132
133#define ATOMIC_FETCH_OP(op, c_op, asm_op) \
134static __inline__ int atomic_fetch_##op(int i, atomic_t * v) \
135{ \
136 int result; \
137 \
138 smp_mb__before_llsc(); \
139 \
140 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
141 int temp; \
142 \
143 __asm__ __volatile__( \
144 " .set arch=r4000 \n" \
145 "1: ll %1, %2 # atomic_fetch_" #op " \n" \
146 " " #asm_op " %0, %1, %3 \n" \
147 " sc %0, %2 \n" \
148 " beqzl %0, 1b \n" \
149 " move %0, %1 \n" \
150 " .set mips0 \n" \
151 : "=&r" (result), "=&r" (temp), \
152 "+" GCC_OFF_SMALL_ASM() (v->counter) \
153 : "Ir" (i)); \
154 } else if (kernel_uses_llsc) { \
155 int temp; \
156 \
157 do { \
158 __asm__ __volatile__( \
159 " .set "MIPS_ISA_LEVEL" \n" \
160 " ll %1, %2 # atomic_fetch_" #op " \n" \
161 " " #asm_op " %0, %1, %3 \n" \
162 " sc %0, %2 \n" \
163 " .set mips0 \n" \
164 : "=&r" (result), "=&r" (temp), \
165 "+" GCC_OFF_SMALL_ASM() (v->counter) \
166 : "Ir" (i)); \
167 } while (unlikely(!result)); \
168 \
169 result = temp; \
170 } else { \
171 unsigned long flags; \
172 \
173 raw_local_irq_save(flags); \
174 result = v->counter; \
175 v->counter c_op i; \
176 raw_local_irq_restore(flags); \
177 } \
178 \
179 smp_llsc_mb(); \
180 \
181 return result; \
182}
183
133#define ATOMIC_OPS(op, c_op, asm_op) \ 184#define ATOMIC_OPS(op, c_op, asm_op) \
134 ATOMIC_OP(op, c_op, asm_op) \ 185 ATOMIC_OP(op, c_op, asm_op) \
135 ATOMIC_OP_RETURN(op, c_op, asm_op) 186 ATOMIC_OP_RETURN(op, c_op, asm_op) \
187 ATOMIC_FETCH_OP(op, c_op, asm_op)
136 188
137ATOMIC_OPS(add, +=, addu) 189ATOMIC_OPS(add, +=, addu)
138ATOMIC_OPS(sub, -=, subu) 190ATOMIC_OPS(sub, -=, subu)
139 191
140ATOMIC_OP(and, &=, and) 192#undef ATOMIC_OPS
141ATOMIC_OP(or, |=, or) 193#define ATOMIC_OPS(op, c_op, asm_op) \
142ATOMIC_OP(xor, ^=, xor) 194 ATOMIC_OP(op, c_op, asm_op) \
195 ATOMIC_FETCH_OP(op, c_op, asm_op)
196
197#define atomic_fetch_or atomic_fetch_or
198
199ATOMIC_OPS(and, &=, and)
200ATOMIC_OPS(or, |=, or)
201ATOMIC_OPS(xor, ^=, xor)
143 202
144#undef ATOMIC_OPS 203#undef ATOMIC_OPS
204#undef ATOMIC_FETCH_OP
145#undef ATOMIC_OP_RETURN 205#undef ATOMIC_OP_RETURN
146#undef ATOMIC_OP 206#undef ATOMIC_OP
147 207
@@ -414,17 +474,77 @@ static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \
414 return result; \ 474 return result; \
415} 475}
416 476
477#define ATOMIC64_FETCH_OP(op, c_op, asm_op) \
478static __inline__ long atomic64_fetch_##op(long i, atomic64_t * v) \
479{ \
480 long result; \
481 \
482 smp_mb__before_llsc(); \
483 \
484 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
485 long temp; \
486 \
487 __asm__ __volatile__( \
488 " .set arch=r4000 \n" \
489 "1: lld %1, %2 # atomic64_fetch_" #op "\n" \
490 " " #asm_op " %0, %1, %3 \n" \
491 " scd %0, %2 \n" \
492 " beqzl %0, 1b \n" \
493 " move %0, %1 \n" \
494 " .set mips0 \n" \
495 : "=&r" (result), "=&r" (temp), \
496 "+" GCC_OFF_SMALL_ASM() (v->counter) \
497 : "Ir" (i)); \
498 } else if (kernel_uses_llsc) { \
499 long temp; \
500 \
501 do { \
502 __asm__ __volatile__( \
503 " .set "MIPS_ISA_LEVEL" \n" \
504 " lld %1, %2 # atomic64_fetch_" #op "\n" \
505 " " #asm_op " %0, %1, %3 \n" \
506 " scd %0, %2 \n" \
507 " .set mips0 \n" \
508 : "=&r" (result), "=&r" (temp), \
509 "=" GCC_OFF_SMALL_ASM() (v->counter) \
510 : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) \
511 : "memory"); \
512 } while (unlikely(!result)); \
513 \
514 result = temp; \
515 } else { \
516 unsigned long flags; \
517 \
518 raw_local_irq_save(flags); \
519 result = v->counter; \
520 v->counter c_op i; \
521 raw_local_irq_restore(flags); \
522 } \
523 \
524 smp_llsc_mb(); \
525 \
526 return result; \
527}
528
417#define ATOMIC64_OPS(op, c_op, asm_op) \ 529#define ATOMIC64_OPS(op, c_op, asm_op) \
418 ATOMIC64_OP(op, c_op, asm_op) \ 530 ATOMIC64_OP(op, c_op, asm_op) \
419 ATOMIC64_OP_RETURN(op, c_op, asm_op) 531 ATOMIC64_OP_RETURN(op, c_op, asm_op) \
532 ATOMIC64_FETCH_OP(op, c_op, asm_op)
420 533
421ATOMIC64_OPS(add, +=, daddu) 534ATOMIC64_OPS(add, +=, daddu)
422ATOMIC64_OPS(sub, -=, dsubu) 535ATOMIC64_OPS(sub, -=, dsubu)
423ATOMIC64_OP(and, &=, and)
424ATOMIC64_OP(or, |=, or)
425ATOMIC64_OP(xor, ^=, xor)
426 536
427#undef ATOMIC64_OPS 537#undef ATOMIC64_OPS
538#define ATOMIC64_OPS(op, c_op, asm_op) \
539 ATOMIC64_OP(op, c_op, asm_op) \
540 ATOMIC64_FETCH_OP(op, c_op, asm_op)
541
542ATOMIC64_OPS(and, &=, and)
543ATOMIC64_OPS(or, |=, or)
544ATOMIC64_OPS(xor, ^=, xor)
545
546#undef ATOMIC64_OPS
547#undef ATOMIC64_FETCH_OP
428#undef ATOMIC64_OP_RETURN 548#undef ATOMIC64_OP_RETURN
429#undef ATOMIC64_OP 549#undef ATOMIC64_OP
430 550