aboutsummaryrefslogtreecommitdiffstats
path: root/arch/mips/include/asm/bitops.h
diff options
context:
space:
mode:
authorDavid Daney <ddaney@caviumnetworks.com>2009-07-13 14:15:19 -0400
committerRalf Baechle <ralf@linux-mips.org>2009-09-17 14:07:50 -0400
commitb791d1193af9772040e592d5aa161790f800b762 (patch)
tree6adad3d9cdf278a3a1a3418ae75a2864d0cc7f39 /arch/mips/include/asm/bitops.h
parentf7ade3c168e4f437c11f57be012992bbb0e3075c (diff)
MIPS: Allow kernel use of LL/SC to be separate from the presence of LL/SC.
On some CPUs, it is more efficient to disable and enable interrupts in the kernel rather than use ll/sc for atomic operations. But if we were to set cpu_has_llsc to false, we would break the userspace futex interface (in asm/futex.h). We separate the two concepts, with a new predicate kernel_uses_llsc, that lets us disable the kernel's use of ll/sc while still allowing the futex code to use it. Also there were a couple of cases in bitops.h where we were using ll/sc unconditionally even if cpu_has_llsc were false. Signed-off-by: David Daney <ddaney@caviumnetworks.com> Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Diffstat (limited to 'arch/mips/include/asm/bitops.h')
-rw-r--r--arch/mips/include/asm/bitops.h34
1 files changed, 17 insertions, 17 deletions
diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h
index b1e9e97a9c7..84a383806b2 100644
--- a/arch/mips/include/asm/bitops.h
+++ b/arch/mips/include/asm/bitops.h
@@ -61,7 +61,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
61 unsigned short bit = nr & SZLONG_MASK; 61 unsigned short bit = nr & SZLONG_MASK;
62 unsigned long temp; 62 unsigned long temp;
63 63
64 if (cpu_has_llsc && R10000_LLSC_WAR) { 64 if (kernel_uses_llsc && R10000_LLSC_WAR) {
65 __asm__ __volatile__( 65 __asm__ __volatile__(
66 " .set mips3 \n" 66 " .set mips3 \n"
67 "1: " __LL "%0, %1 # set_bit \n" 67 "1: " __LL "%0, %1 # set_bit \n"
@@ -72,7 +72,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
72 : "=&r" (temp), "=m" (*m) 72 : "=&r" (temp), "=m" (*m)
73 : "ir" (1UL << bit), "m" (*m)); 73 : "ir" (1UL << bit), "m" (*m));
74#ifdef CONFIG_CPU_MIPSR2 74#ifdef CONFIG_CPU_MIPSR2
75 } else if (__builtin_constant_p(bit)) { 75 } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
76 __asm__ __volatile__( 76 __asm__ __volatile__(
77 "1: " __LL "%0, %1 # set_bit \n" 77 "1: " __LL "%0, %1 # set_bit \n"
78 " " __INS "%0, %4, %2, 1 \n" 78 " " __INS "%0, %4, %2, 1 \n"
@@ -84,7 +84,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
84 : "=&r" (temp), "=m" (*m) 84 : "=&r" (temp), "=m" (*m)
85 : "ir" (bit), "m" (*m), "r" (~0)); 85 : "ir" (bit), "m" (*m), "r" (~0));
86#endif /* CONFIG_CPU_MIPSR2 */ 86#endif /* CONFIG_CPU_MIPSR2 */
87 } else if (cpu_has_llsc) { 87 } else if (kernel_uses_llsc) {
88 __asm__ __volatile__( 88 __asm__ __volatile__(
89 " .set mips3 \n" 89 " .set mips3 \n"
90 "1: " __LL "%0, %1 # set_bit \n" 90 "1: " __LL "%0, %1 # set_bit \n"
@@ -126,7 +126,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
126 unsigned short bit = nr & SZLONG_MASK; 126 unsigned short bit = nr & SZLONG_MASK;
127 unsigned long temp; 127 unsigned long temp;
128 128
129 if (cpu_has_llsc && R10000_LLSC_WAR) { 129 if (kernel_uses_llsc && R10000_LLSC_WAR) {
130 __asm__ __volatile__( 130 __asm__ __volatile__(
131 " .set mips3 \n" 131 " .set mips3 \n"
132 "1: " __LL "%0, %1 # clear_bit \n" 132 "1: " __LL "%0, %1 # clear_bit \n"
@@ -137,7 +137,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
137 : "=&r" (temp), "=m" (*m) 137 : "=&r" (temp), "=m" (*m)
138 : "ir" (~(1UL << bit)), "m" (*m)); 138 : "ir" (~(1UL << bit)), "m" (*m));
139#ifdef CONFIG_CPU_MIPSR2 139#ifdef CONFIG_CPU_MIPSR2
140 } else if (__builtin_constant_p(bit)) { 140 } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
141 __asm__ __volatile__( 141 __asm__ __volatile__(
142 "1: " __LL "%0, %1 # clear_bit \n" 142 "1: " __LL "%0, %1 # clear_bit \n"
143 " " __INS "%0, $0, %2, 1 \n" 143 " " __INS "%0, $0, %2, 1 \n"
@@ -149,7 +149,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
149 : "=&r" (temp), "=m" (*m) 149 : "=&r" (temp), "=m" (*m)
150 : "ir" (bit), "m" (*m)); 150 : "ir" (bit), "m" (*m));
151#endif /* CONFIG_CPU_MIPSR2 */ 151#endif /* CONFIG_CPU_MIPSR2 */
152 } else if (cpu_has_llsc) { 152 } else if (kernel_uses_llsc) {
153 __asm__ __volatile__( 153 __asm__ __volatile__(
154 " .set mips3 \n" 154 " .set mips3 \n"
155 "1: " __LL "%0, %1 # clear_bit \n" 155 "1: " __LL "%0, %1 # clear_bit \n"
@@ -202,7 +202,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
202{ 202{
203 unsigned short bit = nr & SZLONG_MASK; 203 unsigned short bit = nr & SZLONG_MASK;
204 204
205 if (cpu_has_llsc && R10000_LLSC_WAR) { 205 if (kernel_uses_llsc && R10000_LLSC_WAR) {
206 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 206 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
207 unsigned long temp; 207 unsigned long temp;
208 208
@@ -215,7 +215,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
215 " .set mips0 \n" 215 " .set mips0 \n"
216 : "=&r" (temp), "=m" (*m) 216 : "=&r" (temp), "=m" (*m)
217 : "ir" (1UL << bit), "m" (*m)); 217 : "ir" (1UL << bit), "m" (*m));
218 } else if (cpu_has_llsc) { 218 } else if (kernel_uses_llsc) {
219 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 219 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
220 unsigned long temp; 220 unsigned long temp;
221 221
@@ -260,7 +260,7 @@ static inline int test_and_set_bit(unsigned long nr,
260 260
261 smp_llsc_mb(); 261 smp_llsc_mb();
262 262
263 if (cpu_has_llsc && R10000_LLSC_WAR) { 263 if (kernel_uses_llsc && R10000_LLSC_WAR) {
264 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 264 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
265 unsigned long temp; 265 unsigned long temp;
266 266
@@ -275,7 +275,7 @@ static inline int test_and_set_bit(unsigned long nr,
275 : "=&r" (temp), "=m" (*m), "=&r" (res) 275 : "=&r" (temp), "=m" (*m), "=&r" (res)
276 : "r" (1UL << bit), "m" (*m) 276 : "r" (1UL << bit), "m" (*m)
277 : "memory"); 277 : "memory");
278 } else if (cpu_has_llsc) { 278 } else if (kernel_uses_llsc) {
279 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 279 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
280 unsigned long temp; 280 unsigned long temp;
281 281
@@ -328,7 +328,7 @@ static inline int test_and_set_bit_lock(unsigned long nr,
328 unsigned short bit = nr & SZLONG_MASK; 328 unsigned short bit = nr & SZLONG_MASK;
329 unsigned long res; 329 unsigned long res;
330 330
331 if (cpu_has_llsc && R10000_LLSC_WAR) { 331 if (kernel_uses_llsc && R10000_LLSC_WAR) {
332 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 332 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
333 unsigned long temp; 333 unsigned long temp;
334 334
@@ -343,7 +343,7 @@ static inline int test_and_set_bit_lock(unsigned long nr,
343 : "=&r" (temp), "=m" (*m), "=&r" (res) 343 : "=&r" (temp), "=m" (*m), "=&r" (res)
344 : "r" (1UL << bit), "m" (*m) 344 : "r" (1UL << bit), "m" (*m)
345 : "memory"); 345 : "memory");
346 } else if (cpu_has_llsc) { 346 } else if (kernel_uses_llsc) {
347 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 347 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
348 unsigned long temp; 348 unsigned long temp;
349 349
@@ -397,7 +397,7 @@ static inline int test_and_clear_bit(unsigned long nr,
397 397
398 smp_llsc_mb(); 398 smp_llsc_mb();
399 399
400 if (cpu_has_llsc && R10000_LLSC_WAR) { 400 if (kernel_uses_llsc && R10000_LLSC_WAR) {
401 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 401 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
402 unsigned long temp; 402 unsigned long temp;
403 403
@@ -414,7 +414,7 @@ static inline int test_and_clear_bit(unsigned long nr,
414 : "r" (1UL << bit), "m" (*m) 414 : "r" (1UL << bit), "m" (*m)
415 : "memory"); 415 : "memory");
416#ifdef CONFIG_CPU_MIPSR2 416#ifdef CONFIG_CPU_MIPSR2
417 } else if (__builtin_constant_p(nr)) { 417 } else if (kernel_uses_llsc && __builtin_constant_p(nr)) {
418 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 418 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
419 unsigned long temp; 419 unsigned long temp;
420 420
@@ -431,7 +431,7 @@ static inline int test_and_clear_bit(unsigned long nr,
431 : "ir" (bit), "m" (*m) 431 : "ir" (bit), "m" (*m)
432 : "memory"); 432 : "memory");
433#endif 433#endif
434 } else if (cpu_has_llsc) { 434 } else if (kernel_uses_llsc) {
435 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 435 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
436 unsigned long temp; 436 unsigned long temp;
437 437
@@ -487,7 +487,7 @@ static inline int test_and_change_bit(unsigned long nr,
487 487
488 smp_llsc_mb(); 488 smp_llsc_mb();
489 489
490 if (cpu_has_llsc && R10000_LLSC_WAR) { 490 if (kernel_uses_llsc && R10000_LLSC_WAR) {
491 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 491 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
492 unsigned long temp; 492 unsigned long temp;
493 493
@@ -502,7 +502,7 @@ static inline int test_and_change_bit(unsigned long nr,
502 : "=&r" (temp), "=m" (*m), "=&r" (res) 502 : "=&r" (temp), "=m" (*m), "=&r" (res)
503 : "r" (1UL << bit), "m" (*m) 503 : "r" (1UL << bit), "m" (*m)
504 : "memory"); 504 : "memory");
505 } else if (cpu_has_llsc) { 505 } else if (kernel_uses_llsc) {
506 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 506 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
507 unsigned long temp; 507 unsigned long temp;
508 508