diff options
Diffstat (limited to 'arch/mips/include/asm/atomic.h')
-rw-r--r-- | arch/mips/include/asm/atomic.h | 40 |
1 files changed, 20 insertions, 20 deletions
diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h index eb7f01cfd1ac..dd75d673447e 100644 --- a/arch/mips/include/asm/atomic.h +++ b/arch/mips/include/asm/atomic.h | |||
@@ -49,7 +49,7 @@ | |||
49 | */ | 49 | */ |
50 | static __inline__ void atomic_add(int i, atomic_t * v) | 50 | static __inline__ void atomic_add(int i, atomic_t * v) |
51 | { | 51 | { |
52 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 52 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
53 | int temp; | 53 | int temp; |
54 | 54 | ||
55 | __asm__ __volatile__( | 55 | __asm__ __volatile__( |
@@ -61,7 +61,7 @@ static __inline__ void atomic_add(int i, atomic_t * v) | |||
61 | " .set mips0 \n" | 61 | " .set mips0 \n" |
62 | : "=&r" (temp), "=m" (v->counter) | 62 | : "=&r" (temp), "=m" (v->counter) |
63 | : "Ir" (i), "m" (v->counter)); | 63 | : "Ir" (i), "m" (v->counter)); |
64 | } else if (cpu_has_llsc) { | 64 | } else if (kernel_uses_llsc) { |
65 | int temp; | 65 | int temp; |
66 | 66 | ||
67 | __asm__ __volatile__( | 67 | __asm__ __volatile__( |
@@ -94,7 +94,7 @@ static __inline__ void atomic_add(int i, atomic_t * v) | |||
94 | */ | 94 | */ |
95 | static __inline__ void atomic_sub(int i, atomic_t * v) | 95 | static __inline__ void atomic_sub(int i, atomic_t * v) |
96 | { | 96 | { |
97 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 97 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
98 | int temp; | 98 | int temp; |
99 | 99 | ||
100 | __asm__ __volatile__( | 100 | __asm__ __volatile__( |
@@ -106,7 +106,7 @@ static __inline__ void atomic_sub(int i, atomic_t * v) | |||
106 | " .set mips0 \n" | 106 | " .set mips0 \n" |
107 | : "=&r" (temp), "=m" (v->counter) | 107 | : "=&r" (temp), "=m" (v->counter) |
108 | : "Ir" (i), "m" (v->counter)); | 108 | : "Ir" (i), "m" (v->counter)); |
109 | } else if (cpu_has_llsc) { | 109 | } else if (kernel_uses_llsc) { |
110 | int temp; | 110 | int temp; |
111 | 111 | ||
112 | __asm__ __volatile__( | 112 | __asm__ __volatile__( |
@@ -139,7 +139,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) | |||
139 | 139 | ||
140 | smp_llsc_mb(); | 140 | smp_llsc_mb(); |
141 | 141 | ||
142 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 142 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
143 | int temp; | 143 | int temp; |
144 | 144 | ||
145 | __asm__ __volatile__( | 145 | __asm__ __volatile__( |
@@ -153,7 +153,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) | |||
153 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | 153 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
154 | : "Ir" (i), "m" (v->counter) | 154 | : "Ir" (i), "m" (v->counter) |
155 | : "memory"); | 155 | : "memory"); |
156 | } else if (cpu_has_llsc) { | 156 | } else if (kernel_uses_llsc) { |
157 | int temp; | 157 | int temp; |
158 | 158 | ||
159 | __asm__ __volatile__( | 159 | __asm__ __volatile__( |
@@ -191,7 +191,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) | |||
191 | 191 | ||
192 | smp_llsc_mb(); | 192 | smp_llsc_mb(); |
193 | 193 | ||
194 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 194 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
195 | int temp; | 195 | int temp; |
196 | 196 | ||
197 | __asm__ __volatile__( | 197 | __asm__ __volatile__( |
@@ -205,7 +205,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) | |||
205 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | 205 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
206 | : "Ir" (i), "m" (v->counter) | 206 | : "Ir" (i), "m" (v->counter) |
207 | : "memory"); | 207 | : "memory"); |
208 | } else if (cpu_has_llsc) { | 208 | } else if (kernel_uses_llsc) { |
209 | int temp; | 209 | int temp; |
210 | 210 | ||
211 | __asm__ __volatile__( | 211 | __asm__ __volatile__( |
@@ -251,7 +251,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) | |||
251 | 251 | ||
252 | smp_llsc_mb(); | 252 | smp_llsc_mb(); |
253 | 253 | ||
254 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 254 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
255 | int temp; | 255 | int temp; |
256 | 256 | ||
257 | __asm__ __volatile__( | 257 | __asm__ __volatile__( |
@@ -269,7 +269,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) | |||
269 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | 269 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
270 | : "Ir" (i), "m" (v->counter) | 270 | : "Ir" (i), "m" (v->counter) |
271 | : "memory"); | 271 | : "memory"); |
272 | } else if (cpu_has_llsc) { | 272 | } else if (kernel_uses_llsc) { |
273 | int temp; | 273 | int temp; |
274 | 274 | ||
275 | __asm__ __volatile__( | 275 | __asm__ __volatile__( |
@@ -428,7 +428,7 @@ static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) | |||
428 | */ | 428 | */ |
429 | static __inline__ void atomic64_add(long i, atomic64_t * v) | 429 | static __inline__ void atomic64_add(long i, atomic64_t * v) |
430 | { | 430 | { |
431 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 431 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
432 | long temp; | 432 | long temp; |
433 | 433 | ||
434 | __asm__ __volatile__( | 434 | __asm__ __volatile__( |
@@ -440,7 +440,7 @@ static __inline__ void atomic64_add(long i, atomic64_t * v) | |||
440 | " .set mips0 \n" | 440 | " .set mips0 \n" |
441 | : "=&r" (temp), "=m" (v->counter) | 441 | : "=&r" (temp), "=m" (v->counter) |
442 | : "Ir" (i), "m" (v->counter)); | 442 | : "Ir" (i), "m" (v->counter)); |
443 | } else if (cpu_has_llsc) { | 443 | } else if (kernel_uses_llsc) { |
444 | long temp; | 444 | long temp; |
445 | 445 | ||
446 | __asm__ __volatile__( | 446 | __asm__ __volatile__( |
@@ -473,7 +473,7 @@ static __inline__ void atomic64_add(long i, atomic64_t * v) | |||
473 | */ | 473 | */ |
474 | static __inline__ void atomic64_sub(long i, atomic64_t * v) | 474 | static __inline__ void atomic64_sub(long i, atomic64_t * v) |
475 | { | 475 | { |
476 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 476 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
477 | long temp; | 477 | long temp; |
478 | 478 | ||
479 | __asm__ __volatile__( | 479 | __asm__ __volatile__( |
@@ -485,7 +485,7 @@ static __inline__ void atomic64_sub(long i, atomic64_t * v) | |||
485 | " .set mips0 \n" | 485 | " .set mips0 \n" |
486 | : "=&r" (temp), "=m" (v->counter) | 486 | : "=&r" (temp), "=m" (v->counter) |
487 | : "Ir" (i), "m" (v->counter)); | 487 | : "Ir" (i), "m" (v->counter)); |
488 | } else if (cpu_has_llsc) { | 488 | } else if (kernel_uses_llsc) { |
489 | long temp; | 489 | long temp; |
490 | 490 | ||
491 | __asm__ __volatile__( | 491 | __asm__ __volatile__( |
@@ -518,7 +518,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) | |||
518 | 518 | ||
519 | smp_llsc_mb(); | 519 | smp_llsc_mb(); |
520 | 520 | ||
521 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 521 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
522 | long temp; | 522 | long temp; |
523 | 523 | ||
524 | __asm__ __volatile__( | 524 | __asm__ __volatile__( |
@@ -532,7 +532,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) | |||
532 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | 532 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
533 | : "Ir" (i), "m" (v->counter) | 533 | : "Ir" (i), "m" (v->counter) |
534 | : "memory"); | 534 | : "memory"); |
535 | } else if (cpu_has_llsc) { | 535 | } else if (kernel_uses_llsc) { |
536 | long temp; | 536 | long temp; |
537 | 537 | ||
538 | __asm__ __volatile__( | 538 | __asm__ __volatile__( |
@@ -570,7 +570,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) | |||
570 | 570 | ||
571 | smp_llsc_mb(); | 571 | smp_llsc_mb(); |
572 | 572 | ||
573 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 573 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
574 | long temp; | 574 | long temp; |
575 | 575 | ||
576 | __asm__ __volatile__( | 576 | __asm__ __volatile__( |
@@ -584,7 +584,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) | |||
584 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | 584 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
585 | : "Ir" (i), "m" (v->counter) | 585 | : "Ir" (i), "m" (v->counter) |
586 | : "memory"); | 586 | : "memory"); |
587 | } else if (cpu_has_llsc) { | 587 | } else if (kernel_uses_llsc) { |
588 | long temp; | 588 | long temp; |
589 | 589 | ||
590 | __asm__ __volatile__( | 590 | __asm__ __volatile__( |
@@ -630,7 +630,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) | |||
630 | 630 | ||
631 | smp_llsc_mb(); | 631 | smp_llsc_mb(); |
632 | 632 | ||
633 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 633 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
634 | long temp; | 634 | long temp; |
635 | 635 | ||
636 | __asm__ __volatile__( | 636 | __asm__ __volatile__( |
@@ -648,7 +648,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) | |||
648 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | 648 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
649 | : "Ir" (i), "m" (v->counter) | 649 | : "Ir" (i), "m" (v->counter) |
650 | : "memory"); | 650 | : "memory"); |
651 | } else if (cpu_has_llsc) { | 651 | } else if (kernel_uses_llsc) { |
652 | long temp; | 652 | long temp; |
653 | 653 | ||
654 | __asm__ __volatile__( | 654 | __asm__ __volatile__( |