diff options
author | David Daney <ddaney@caviumnetworks.com> | 2009-07-13 14:15:19 -0400 |
---|---|---|
committer | Ralf Baechle <ralf@linux-mips.org> | 2009-09-17 14:07:50 -0400 |
commit | b791d1193af9772040e592d5aa161790f800b762 (patch) | |
tree | 6adad3d9cdf278a3a1a3418ae75a2864d0cc7f39 /arch/mips/include | |
parent | f7ade3c168e4f437c11f57be012992bbb0e3075c (diff) |
MIPS: Allow kernel use of LL/SC to be separate from the presence of LL/SC.
On some CPUs, it is more efficient to disable and enable interrupts in the
kernel rather than use ll/sc for atomic operations. But if we were to set
cpu_has_llsc to false, we would break the userspace futex interface (in
asm/futex.h).
We separate the two concepts, with a new predicate kernel_uses_llsc, that
lets us disable the kernel's use of ll/sc while still allowing the futex
code to use it.
Also there were a couple of cases in bitops.h where we were using ll/sc
unconditionally even if cpu_has_llsc were false.
Signed-off-by: David Daney <ddaney@caviumnetworks.com>
Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Diffstat (limited to 'arch/mips/include')
-rw-r--r-- | arch/mips/include/asm/atomic.h | 40 | ||||
-rw-r--r-- | arch/mips/include/asm/bitops.h | 34 | ||||
-rw-r--r-- | arch/mips/include/asm/cmpxchg.h | 4 | ||||
-rw-r--r-- | arch/mips/include/asm/cpu-features.h | 3 | ||||
-rw-r--r-- | arch/mips/include/asm/local.h | 8 | ||||
-rw-r--r-- | arch/mips/include/asm/system.h | 8 |
6 files changed, 50 insertions, 47 deletions
diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h index eb7f01cfd1ac..dd75d673447e 100644 --- a/arch/mips/include/asm/atomic.h +++ b/arch/mips/include/asm/atomic.h | |||
@@ -49,7 +49,7 @@ | |||
49 | */ | 49 | */ |
50 | static __inline__ void atomic_add(int i, atomic_t * v) | 50 | static __inline__ void atomic_add(int i, atomic_t * v) |
51 | { | 51 | { |
52 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 52 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
53 | int temp; | 53 | int temp; |
54 | 54 | ||
55 | __asm__ __volatile__( | 55 | __asm__ __volatile__( |
@@ -61,7 +61,7 @@ static __inline__ void atomic_add(int i, atomic_t * v) | |||
61 | " .set mips0 \n" | 61 | " .set mips0 \n" |
62 | : "=&r" (temp), "=m" (v->counter) | 62 | : "=&r" (temp), "=m" (v->counter) |
63 | : "Ir" (i), "m" (v->counter)); | 63 | : "Ir" (i), "m" (v->counter)); |
64 | } else if (cpu_has_llsc) { | 64 | } else if (kernel_uses_llsc) { |
65 | int temp; | 65 | int temp; |
66 | 66 | ||
67 | __asm__ __volatile__( | 67 | __asm__ __volatile__( |
@@ -94,7 +94,7 @@ static __inline__ void atomic_add(int i, atomic_t * v) | |||
94 | */ | 94 | */ |
95 | static __inline__ void atomic_sub(int i, atomic_t * v) | 95 | static __inline__ void atomic_sub(int i, atomic_t * v) |
96 | { | 96 | { |
97 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 97 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
98 | int temp; | 98 | int temp; |
99 | 99 | ||
100 | __asm__ __volatile__( | 100 | __asm__ __volatile__( |
@@ -106,7 +106,7 @@ static __inline__ void atomic_sub(int i, atomic_t * v) | |||
106 | " .set mips0 \n" | 106 | " .set mips0 \n" |
107 | : "=&r" (temp), "=m" (v->counter) | 107 | : "=&r" (temp), "=m" (v->counter) |
108 | : "Ir" (i), "m" (v->counter)); | 108 | : "Ir" (i), "m" (v->counter)); |
109 | } else if (cpu_has_llsc) { | 109 | } else if (kernel_uses_llsc) { |
110 | int temp; | 110 | int temp; |
111 | 111 | ||
112 | __asm__ __volatile__( | 112 | __asm__ __volatile__( |
@@ -139,7 +139,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) | |||
139 | 139 | ||
140 | smp_llsc_mb(); | 140 | smp_llsc_mb(); |
141 | 141 | ||
142 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 142 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
143 | int temp; | 143 | int temp; |
144 | 144 | ||
145 | __asm__ __volatile__( | 145 | __asm__ __volatile__( |
@@ -153,7 +153,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) | |||
153 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | 153 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
154 | : "Ir" (i), "m" (v->counter) | 154 | : "Ir" (i), "m" (v->counter) |
155 | : "memory"); | 155 | : "memory"); |
156 | } else if (cpu_has_llsc) { | 156 | } else if (kernel_uses_llsc) { |
157 | int temp; | 157 | int temp; |
158 | 158 | ||
159 | __asm__ __volatile__( | 159 | __asm__ __volatile__( |
@@ -191,7 +191,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) | |||
191 | 191 | ||
192 | smp_llsc_mb(); | 192 | smp_llsc_mb(); |
193 | 193 | ||
194 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 194 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
195 | int temp; | 195 | int temp; |
196 | 196 | ||
197 | __asm__ __volatile__( | 197 | __asm__ __volatile__( |
@@ -205,7 +205,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) | |||
205 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | 205 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
206 | : "Ir" (i), "m" (v->counter) | 206 | : "Ir" (i), "m" (v->counter) |
207 | : "memory"); | 207 | : "memory"); |
208 | } else if (cpu_has_llsc) { | 208 | } else if (kernel_uses_llsc) { |
209 | int temp; | 209 | int temp; |
210 | 210 | ||
211 | __asm__ __volatile__( | 211 | __asm__ __volatile__( |
@@ -251,7 +251,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) | |||
251 | 251 | ||
252 | smp_llsc_mb(); | 252 | smp_llsc_mb(); |
253 | 253 | ||
254 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 254 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
255 | int temp; | 255 | int temp; |
256 | 256 | ||
257 | __asm__ __volatile__( | 257 | __asm__ __volatile__( |
@@ -269,7 +269,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) | |||
269 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | 269 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
270 | : "Ir" (i), "m" (v->counter) | 270 | : "Ir" (i), "m" (v->counter) |
271 | : "memory"); | 271 | : "memory"); |
272 | } else if (cpu_has_llsc) { | 272 | } else if (kernel_uses_llsc) { |
273 | int temp; | 273 | int temp; |
274 | 274 | ||
275 | __asm__ __volatile__( | 275 | __asm__ __volatile__( |
@@ -428,7 +428,7 @@ static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) | |||
428 | */ | 428 | */ |
429 | static __inline__ void atomic64_add(long i, atomic64_t * v) | 429 | static __inline__ void atomic64_add(long i, atomic64_t * v) |
430 | { | 430 | { |
431 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 431 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
432 | long temp; | 432 | long temp; |
433 | 433 | ||
434 | __asm__ __volatile__( | 434 | __asm__ __volatile__( |
@@ -440,7 +440,7 @@ static __inline__ void atomic64_add(long i, atomic64_t * v) | |||
440 | " .set mips0 \n" | 440 | " .set mips0 \n" |
441 | : "=&r" (temp), "=m" (v->counter) | 441 | : "=&r" (temp), "=m" (v->counter) |
442 | : "Ir" (i), "m" (v->counter)); | 442 | : "Ir" (i), "m" (v->counter)); |
443 | } else if (cpu_has_llsc) { | 443 | } else if (kernel_uses_llsc) { |
444 | long temp; | 444 | long temp; |
445 | 445 | ||
446 | __asm__ __volatile__( | 446 | __asm__ __volatile__( |
@@ -473,7 +473,7 @@ static __inline__ void atomic64_add(long i, atomic64_t * v) | |||
473 | */ | 473 | */ |
474 | static __inline__ void atomic64_sub(long i, atomic64_t * v) | 474 | static __inline__ void atomic64_sub(long i, atomic64_t * v) |
475 | { | 475 | { |
476 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 476 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
477 | long temp; | 477 | long temp; |
478 | 478 | ||
479 | __asm__ __volatile__( | 479 | __asm__ __volatile__( |
@@ -485,7 +485,7 @@ static __inline__ void atomic64_sub(long i, atomic64_t * v) | |||
485 | " .set mips0 \n" | 485 | " .set mips0 \n" |
486 | : "=&r" (temp), "=m" (v->counter) | 486 | : "=&r" (temp), "=m" (v->counter) |
487 | : "Ir" (i), "m" (v->counter)); | 487 | : "Ir" (i), "m" (v->counter)); |
488 | } else if (cpu_has_llsc) { | 488 | } else if (kernel_uses_llsc) { |
489 | long temp; | 489 | long temp; |
490 | 490 | ||
491 | __asm__ __volatile__( | 491 | __asm__ __volatile__( |
@@ -518,7 +518,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) | |||
518 | 518 | ||
519 | smp_llsc_mb(); | 519 | smp_llsc_mb(); |
520 | 520 | ||
521 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 521 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
522 | long temp; | 522 | long temp; |
523 | 523 | ||
524 | __asm__ __volatile__( | 524 | __asm__ __volatile__( |
@@ -532,7 +532,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) | |||
532 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | 532 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
533 | : "Ir" (i), "m" (v->counter) | 533 | : "Ir" (i), "m" (v->counter) |
534 | : "memory"); | 534 | : "memory"); |
535 | } else if (cpu_has_llsc) { | 535 | } else if (kernel_uses_llsc) { |
536 | long temp; | 536 | long temp; |
537 | 537 | ||
538 | __asm__ __volatile__( | 538 | __asm__ __volatile__( |
@@ -570,7 +570,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) | |||
570 | 570 | ||
571 | smp_llsc_mb(); | 571 | smp_llsc_mb(); |
572 | 572 | ||
573 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 573 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
574 | long temp; | 574 | long temp; |
575 | 575 | ||
576 | __asm__ __volatile__( | 576 | __asm__ __volatile__( |
@@ -584,7 +584,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) | |||
584 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | 584 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
585 | : "Ir" (i), "m" (v->counter) | 585 | : "Ir" (i), "m" (v->counter) |
586 | : "memory"); | 586 | : "memory"); |
587 | } else if (cpu_has_llsc) { | 587 | } else if (kernel_uses_llsc) { |
588 | long temp; | 588 | long temp; |
589 | 589 | ||
590 | __asm__ __volatile__( | 590 | __asm__ __volatile__( |
@@ -630,7 +630,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) | |||
630 | 630 | ||
631 | smp_llsc_mb(); | 631 | smp_llsc_mb(); |
632 | 632 | ||
633 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 633 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
634 | long temp; | 634 | long temp; |
635 | 635 | ||
636 | __asm__ __volatile__( | 636 | __asm__ __volatile__( |
@@ -648,7 +648,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) | |||
648 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) | 648 | : "=&r" (result), "=&r" (temp), "=m" (v->counter) |
649 | : "Ir" (i), "m" (v->counter) | 649 | : "Ir" (i), "m" (v->counter) |
650 | : "memory"); | 650 | : "memory"); |
651 | } else if (cpu_has_llsc) { | 651 | } else if (kernel_uses_llsc) { |
652 | long temp; | 652 | long temp; |
653 | 653 | ||
654 | __asm__ __volatile__( | 654 | __asm__ __volatile__( |
diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h index b1e9e97a9c78..84a383806b2c 100644 --- a/arch/mips/include/asm/bitops.h +++ b/arch/mips/include/asm/bitops.h | |||
@@ -61,7 +61,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) | |||
61 | unsigned short bit = nr & SZLONG_MASK; | 61 | unsigned short bit = nr & SZLONG_MASK; |
62 | unsigned long temp; | 62 | unsigned long temp; |
63 | 63 | ||
64 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 64 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
65 | __asm__ __volatile__( | 65 | __asm__ __volatile__( |
66 | " .set mips3 \n" | 66 | " .set mips3 \n" |
67 | "1: " __LL "%0, %1 # set_bit \n" | 67 | "1: " __LL "%0, %1 # set_bit \n" |
@@ -72,7 +72,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) | |||
72 | : "=&r" (temp), "=m" (*m) | 72 | : "=&r" (temp), "=m" (*m) |
73 | : "ir" (1UL << bit), "m" (*m)); | 73 | : "ir" (1UL << bit), "m" (*m)); |
74 | #ifdef CONFIG_CPU_MIPSR2 | 74 | #ifdef CONFIG_CPU_MIPSR2 |
75 | } else if (__builtin_constant_p(bit)) { | 75 | } else if (kernel_uses_llsc && __builtin_constant_p(bit)) { |
76 | __asm__ __volatile__( | 76 | __asm__ __volatile__( |
77 | "1: " __LL "%0, %1 # set_bit \n" | 77 | "1: " __LL "%0, %1 # set_bit \n" |
78 | " " __INS "%0, %4, %2, 1 \n" | 78 | " " __INS "%0, %4, %2, 1 \n" |
@@ -84,7 +84,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) | |||
84 | : "=&r" (temp), "=m" (*m) | 84 | : "=&r" (temp), "=m" (*m) |
85 | : "ir" (bit), "m" (*m), "r" (~0)); | 85 | : "ir" (bit), "m" (*m), "r" (~0)); |
86 | #endif /* CONFIG_CPU_MIPSR2 */ | 86 | #endif /* CONFIG_CPU_MIPSR2 */ |
87 | } else if (cpu_has_llsc) { | 87 | } else if (kernel_uses_llsc) { |
88 | __asm__ __volatile__( | 88 | __asm__ __volatile__( |
89 | " .set mips3 \n" | 89 | " .set mips3 \n" |
90 | "1: " __LL "%0, %1 # set_bit \n" | 90 | "1: " __LL "%0, %1 # set_bit \n" |
@@ -126,7 +126,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) | |||
126 | unsigned short bit = nr & SZLONG_MASK; | 126 | unsigned short bit = nr & SZLONG_MASK; |
127 | unsigned long temp; | 127 | unsigned long temp; |
128 | 128 | ||
129 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 129 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
130 | __asm__ __volatile__( | 130 | __asm__ __volatile__( |
131 | " .set mips3 \n" | 131 | " .set mips3 \n" |
132 | "1: " __LL "%0, %1 # clear_bit \n" | 132 | "1: " __LL "%0, %1 # clear_bit \n" |
@@ -137,7 +137,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) | |||
137 | : "=&r" (temp), "=m" (*m) | 137 | : "=&r" (temp), "=m" (*m) |
138 | : "ir" (~(1UL << bit)), "m" (*m)); | 138 | : "ir" (~(1UL << bit)), "m" (*m)); |
139 | #ifdef CONFIG_CPU_MIPSR2 | 139 | #ifdef CONFIG_CPU_MIPSR2 |
140 | } else if (__builtin_constant_p(bit)) { | 140 | } else if (kernel_uses_llsc && __builtin_constant_p(bit)) { |
141 | __asm__ __volatile__( | 141 | __asm__ __volatile__( |
142 | "1: " __LL "%0, %1 # clear_bit \n" | 142 | "1: " __LL "%0, %1 # clear_bit \n" |
143 | " " __INS "%0, $0, %2, 1 \n" | 143 | " " __INS "%0, $0, %2, 1 \n" |
@@ -149,7 +149,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) | |||
149 | : "=&r" (temp), "=m" (*m) | 149 | : "=&r" (temp), "=m" (*m) |
150 | : "ir" (bit), "m" (*m)); | 150 | : "ir" (bit), "m" (*m)); |
151 | #endif /* CONFIG_CPU_MIPSR2 */ | 151 | #endif /* CONFIG_CPU_MIPSR2 */ |
152 | } else if (cpu_has_llsc) { | 152 | } else if (kernel_uses_llsc) { |
153 | __asm__ __volatile__( | 153 | __asm__ __volatile__( |
154 | " .set mips3 \n" | 154 | " .set mips3 \n" |
155 | "1: " __LL "%0, %1 # clear_bit \n" | 155 | "1: " __LL "%0, %1 # clear_bit \n" |
@@ -202,7 +202,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr) | |||
202 | { | 202 | { |
203 | unsigned short bit = nr & SZLONG_MASK; | 203 | unsigned short bit = nr & SZLONG_MASK; |
204 | 204 | ||
205 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 205 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
206 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 206 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
207 | unsigned long temp; | 207 | unsigned long temp; |
208 | 208 | ||
@@ -215,7 +215,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr) | |||
215 | " .set mips0 \n" | 215 | " .set mips0 \n" |
216 | : "=&r" (temp), "=m" (*m) | 216 | : "=&r" (temp), "=m" (*m) |
217 | : "ir" (1UL << bit), "m" (*m)); | 217 | : "ir" (1UL << bit), "m" (*m)); |
218 | } else if (cpu_has_llsc) { | 218 | } else if (kernel_uses_llsc) { |
219 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 219 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
220 | unsigned long temp; | 220 | unsigned long temp; |
221 | 221 | ||
@@ -260,7 +260,7 @@ static inline int test_and_set_bit(unsigned long nr, | |||
260 | 260 | ||
261 | smp_llsc_mb(); | 261 | smp_llsc_mb(); |
262 | 262 | ||
263 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 263 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
264 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 264 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
265 | unsigned long temp; | 265 | unsigned long temp; |
266 | 266 | ||
@@ -275,7 +275,7 @@ static inline int test_and_set_bit(unsigned long nr, | |||
275 | : "=&r" (temp), "=m" (*m), "=&r" (res) | 275 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
276 | : "r" (1UL << bit), "m" (*m) | 276 | : "r" (1UL << bit), "m" (*m) |
277 | : "memory"); | 277 | : "memory"); |
278 | } else if (cpu_has_llsc) { | 278 | } else if (kernel_uses_llsc) { |
279 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 279 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
280 | unsigned long temp; | 280 | unsigned long temp; |
281 | 281 | ||
@@ -328,7 +328,7 @@ static inline int test_and_set_bit_lock(unsigned long nr, | |||
328 | unsigned short bit = nr & SZLONG_MASK; | 328 | unsigned short bit = nr & SZLONG_MASK; |
329 | unsigned long res; | 329 | unsigned long res; |
330 | 330 | ||
331 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 331 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
332 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 332 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
333 | unsigned long temp; | 333 | unsigned long temp; |
334 | 334 | ||
@@ -343,7 +343,7 @@ static inline int test_and_set_bit_lock(unsigned long nr, | |||
343 | : "=&r" (temp), "=m" (*m), "=&r" (res) | 343 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
344 | : "r" (1UL << bit), "m" (*m) | 344 | : "r" (1UL << bit), "m" (*m) |
345 | : "memory"); | 345 | : "memory"); |
346 | } else if (cpu_has_llsc) { | 346 | } else if (kernel_uses_llsc) { |
347 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 347 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
348 | unsigned long temp; | 348 | unsigned long temp; |
349 | 349 | ||
@@ -397,7 +397,7 @@ static inline int test_and_clear_bit(unsigned long nr, | |||
397 | 397 | ||
398 | smp_llsc_mb(); | 398 | smp_llsc_mb(); |
399 | 399 | ||
400 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 400 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
401 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 401 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
402 | unsigned long temp; | 402 | unsigned long temp; |
403 | 403 | ||
@@ -414,7 +414,7 @@ static inline int test_and_clear_bit(unsigned long nr, | |||
414 | : "r" (1UL << bit), "m" (*m) | 414 | : "r" (1UL << bit), "m" (*m) |
415 | : "memory"); | 415 | : "memory"); |
416 | #ifdef CONFIG_CPU_MIPSR2 | 416 | #ifdef CONFIG_CPU_MIPSR2 |
417 | } else if (__builtin_constant_p(nr)) { | 417 | } else if (kernel_uses_llsc && __builtin_constant_p(nr)) { |
418 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 418 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
419 | unsigned long temp; | 419 | unsigned long temp; |
420 | 420 | ||
@@ -431,7 +431,7 @@ static inline int test_and_clear_bit(unsigned long nr, | |||
431 | : "ir" (bit), "m" (*m) | 431 | : "ir" (bit), "m" (*m) |
432 | : "memory"); | 432 | : "memory"); |
433 | #endif | 433 | #endif |
434 | } else if (cpu_has_llsc) { | 434 | } else if (kernel_uses_llsc) { |
435 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 435 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
436 | unsigned long temp; | 436 | unsigned long temp; |
437 | 437 | ||
@@ -487,7 +487,7 @@ static inline int test_and_change_bit(unsigned long nr, | |||
487 | 487 | ||
488 | smp_llsc_mb(); | 488 | smp_llsc_mb(); |
489 | 489 | ||
490 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 490 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
491 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 491 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
492 | unsigned long temp; | 492 | unsigned long temp; |
493 | 493 | ||
@@ -502,7 +502,7 @@ static inline int test_and_change_bit(unsigned long nr, | |||
502 | : "=&r" (temp), "=m" (*m), "=&r" (res) | 502 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
503 | : "r" (1UL << bit), "m" (*m) | 503 | : "r" (1UL << bit), "m" (*m) |
504 | : "memory"); | 504 | : "memory"); |
505 | } else if (cpu_has_llsc) { | 505 | } else if (kernel_uses_llsc) { |
506 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 506 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
507 | unsigned long temp; | 507 | unsigned long temp; |
508 | 508 | ||
diff --git a/arch/mips/include/asm/cmpxchg.h b/arch/mips/include/asm/cmpxchg.h index 4a812c3ceb90..815a438a268d 100644 --- a/arch/mips/include/asm/cmpxchg.h +++ b/arch/mips/include/asm/cmpxchg.h | |||
@@ -16,7 +16,7 @@ | |||
16 | ({ \ | 16 | ({ \ |
17 | __typeof(*(m)) __ret; \ | 17 | __typeof(*(m)) __ret; \ |
18 | \ | 18 | \ |
19 | if (cpu_has_llsc && R10000_LLSC_WAR) { \ | 19 | if (kernel_uses_llsc && R10000_LLSC_WAR) { \ |
20 | __asm__ __volatile__( \ | 20 | __asm__ __volatile__( \ |
21 | " .set push \n" \ | 21 | " .set push \n" \ |
22 | " .set noat \n" \ | 22 | " .set noat \n" \ |
@@ -33,7 +33,7 @@ | |||
33 | : "=&r" (__ret), "=R" (*m) \ | 33 | : "=&r" (__ret), "=R" (*m) \ |
34 | : "R" (*m), "Jr" (old), "Jr" (new) \ | 34 | : "R" (*m), "Jr" (old), "Jr" (new) \ |
35 | : "memory"); \ | 35 | : "memory"); \ |
36 | } else if (cpu_has_llsc) { \ | 36 | } else if (kernel_uses_llsc) { \ |
37 | __asm__ __volatile__( \ | 37 | __asm__ __volatile__( \ |
38 | " .set push \n" \ | 38 | " .set push \n" \ |
39 | " .set noat \n" \ | 39 | " .set noat \n" \ |
diff --git a/arch/mips/include/asm/cpu-features.h b/arch/mips/include/asm/cpu-features.h index 8ab1d12ba7f4..1f4df647c384 100644 --- a/arch/mips/include/asm/cpu-features.h +++ b/arch/mips/include/asm/cpu-features.h | |||
@@ -80,6 +80,9 @@ | |||
80 | #ifndef cpu_has_llsc | 80 | #ifndef cpu_has_llsc |
81 | #define cpu_has_llsc (cpu_data[0].options & MIPS_CPU_LLSC) | 81 | #define cpu_has_llsc (cpu_data[0].options & MIPS_CPU_LLSC) |
82 | #endif | 82 | #endif |
83 | #ifndef kernel_uses_llsc | ||
84 | #define kernel_uses_llsc cpu_has_llsc | ||
85 | #endif | ||
83 | #ifndef cpu_has_mips16 | 86 | #ifndef cpu_has_mips16 |
84 | #define cpu_has_mips16 (cpu_data[0].ases & MIPS_ASE_MIPS16) | 87 | #define cpu_has_mips16 (cpu_data[0].ases & MIPS_ASE_MIPS16) |
85 | #endif | 88 | #endif |
diff --git a/arch/mips/include/asm/local.h b/arch/mips/include/asm/local.h index f96fd59e0845..361f4f16c30c 100644 --- a/arch/mips/include/asm/local.h +++ b/arch/mips/include/asm/local.h | |||
@@ -29,7 +29,7 @@ static __inline__ long local_add_return(long i, local_t * l) | |||
29 | { | 29 | { |
30 | unsigned long result; | 30 | unsigned long result; |
31 | 31 | ||
32 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 32 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
33 | unsigned long temp; | 33 | unsigned long temp; |
34 | 34 | ||
35 | __asm__ __volatile__( | 35 | __asm__ __volatile__( |
@@ -43,7 +43,7 @@ static __inline__ long local_add_return(long i, local_t * l) | |||
43 | : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) | 43 | : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) |
44 | : "Ir" (i), "m" (l->a.counter) | 44 | : "Ir" (i), "m" (l->a.counter) |
45 | : "memory"); | 45 | : "memory"); |
46 | } else if (cpu_has_llsc) { | 46 | } else if (kernel_uses_llsc) { |
47 | unsigned long temp; | 47 | unsigned long temp; |
48 | 48 | ||
49 | __asm__ __volatile__( | 49 | __asm__ __volatile__( |
@@ -74,7 +74,7 @@ static __inline__ long local_sub_return(long i, local_t * l) | |||
74 | { | 74 | { |
75 | unsigned long result; | 75 | unsigned long result; |
76 | 76 | ||
77 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 77 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
78 | unsigned long temp; | 78 | unsigned long temp; |
79 | 79 | ||
80 | __asm__ __volatile__( | 80 | __asm__ __volatile__( |
@@ -88,7 +88,7 @@ static __inline__ long local_sub_return(long i, local_t * l) | |||
88 | : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) | 88 | : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) |
89 | : "Ir" (i), "m" (l->a.counter) | 89 | : "Ir" (i), "m" (l->a.counter) |
90 | : "memory"); | 90 | : "memory"); |
91 | } else if (cpu_has_llsc) { | 91 | } else if (kernel_uses_llsc) { |
92 | unsigned long temp; | 92 | unsigned long temp; |
93 | 93 | ||
94 | __asm__ __volatile__( | 94 | __asm__ __volatile__( |
diff --git a/arch/mips/include/asm/system.h b/arch/mips/include/asm/system.h index cc7262ff0765..fcf5f98d90cc 100644 --- a/arch/mips/include/asm/system.h +++ b/arch/mips/include/asm/system.h | |||
@@ -94,7 +94,7 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val) | |||
94 | { | 94 | { |
95 | __u32 retval; | 95 | __u32 retval; |
96 | 96 | ||
97 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 97 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
98 | unsigned long dummy; | 98 | unsigned long dummy; |
99 | 99 | ||
100 | __asm__ __volatile__( | 100 | __asm__ __volatile__( |
@@ -109,7 +109,7 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val) | |||
109 | : "=&r" (retval), "=m" (*m), "=&r" (dummy) | 109 | : "=&r" (retval), "=m" (*m), "=&r" (dummy) |
110 | : "R" (*m), "Jr" (val) | 110 | : "R" (*m), "Jr" (val) |
111 | : "memory"); | 111 | : "memory"); |
112 | } else if (cpu_has_llsc) { | 112 | } else if (kernel_uses_llsc) { |
113 | unsigned long dummy; | 113 | unsigned long dummy; |
114 | 114 | ||
115 | __asm__ __volatile__( | 115 | __asm__ __volatile__( |
@@ -146,7 +146,7 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val) | |||
146 | { | 146 | { |
147 | __u64 retval; | 147 | __u64 retval; |
148 | 148 | ||
149 | if (cpu_has_llsc && R10000_LLSC_WAR) { | 149 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
150 | unsigned long dummy; | 150 | unsigned long dummy; |
151 | 151 | ||
152 | __asm__ __volatile__( | 152 | __asm__ __volatile__( |
@@ -159,7 +159,7 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val) | |||
159 | : "=&r" (retval), "=m" (*m), "=&r" (dummy) | 159 | : "=&r" (retval), "=m" (*m), "=&r" (dummy) |
160 | : "R" (*m), "Jr" (val) | 160 | : "R" (*m), "Jr" (val) |
161 | : "memory"); | 161 | : "memory"); |
162 | } else if (cpu_has_llsc) { | 162 | } else if (kernel_uses_llsc) { |
163 | unsigned long dummy; | 163 | unsigned long dummy; |
164 | 164 | ||
165 | __asm__ __volatile__( | 165 | __asm__ __volatile__( |