aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-mips/bitops.h
diff options
context:
space:
mode:
authorRalf Baechle <ralf@linux-mips.org>2007-06-07 08:17:30 -0400
committerRalf Baechle <ralf@linux-mips.org>2007-06-11 13:20:55 -0400
commitff72b7a6188088976bf7d77d3309a9b2f1716071 (patch)
tree760ed02dd1f5fc3edb871d0f568c71f82b859155 /include/asm-mips/bitops.h
parente10e0cc8852ac846d5590188b935c98742e5cc43 (diff)
[MIPS] Fix smp barriers in test_and_{change,clear,set}_bit
Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Diffstat (limited to 'include/asm-mips/bitops.h')
-rw-r--r--include/asm-mips/bitops.h51
1 files changed, 19 insertions, 32 deletions
diff --git a/include/asm-mips/bitops.h b/include/asm-mips/bitops.h
index d995413e11fd..ffe245b4258f 100644
--- a/include/asm-mips/bitops.h
+++ b/include/asm-mips/bitops.h
@@ -238,10 +238,11 @@ static inline int test_and_set_bit(unsigned long nr,
238 volatile unsigned long *addr) 238 volatile unsigned long *addr)
239{ 239{
240 unsigned short bit = nr & SZLONG_MASK; 240 unsigned short bit = nr & SZLONG_MASK;
241 unsigned long res;
241 242
242 if (cpu_has_llsc && R10000_LLSC_WAR) { 243 if (cpu_has_llsc && R10000_LLSC_WAR) {
243 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 244 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
244 unsigned long temp, res; 245 unsigned long temp;
245 246
246 __asm__ __volatile__( 247 __asm__ __volatile__(
247 " .set mips3 \n" 248 " .set mips3 \n"
@@ -254,11 +255,9 @@ static inline int test_and_set_bit(unsigned long nr,
254 : "=&r" (temp), "=m" (*m), "=&r" (res) 255 : "=&r" (temp), "=m" (*m), "=&r" (res)
255 : "r" (1UL << bit), "m" (*m) 256 : "r" (1UL << bit), "m" (*m)
256 : "memory"); 257 : "memory");
257
258 return res != 0;
259 } else if (cpu_has_llsc) { 258 } else if (cpu_has_llsc) {
260 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 259 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
261 unsigned long temp, res; 260 unsigned long temp;
262 261
263 __asm__ __volatile__( 262 __asm__ __volatile__(
264 " .set push \n" 263 " .set push \n"
@@ -277,25 +276,22 @@ static inline int test_and_set_bit(unsigned long nr,
277 : "=&r" (temp), "=m" (*m), "=&r" (res) 276 : "=&r" (temp), "=m" (*m), "=&r" (res)
278 : "r" (1UL << bit), "m" (*m) 277 : "r" (1UL << bit), "m" (*m)
279 : "memory"); 278 : "memory");
280
281 return res != 0;
282 } else { 279 } else {
283 volatile unsigned long *a = addr; 280 volatile unsigned long *a = addr;
284 unsigned long mask; 281 unsigned long mask;
285 int retval;
286 unsigned long flags; 282 unsigned long flags;
287 283
288 a += nr >> SZLONG_LOG; 284 a += nr >> SZLONG_LOG;
289 mask = 1UL << bit; 285 mask = 1UL << bit;
290 raw_local_irq_save(flags); 286 raw_local_irq_save(flags);
291 retval = (mask & *a) != 0; 287 res = (mask & *a);
292 *a |= mask; 288 *a |= mask;
293 raw_local_irq_restore(flags); 289 raw_local_irq_restore(flags);
294
295 return retval;
296 } 290 }
297 291
298 smp_mb(); 292 smp_mb();
293
294 return res != 0;
299} 295}
300 296
301/* 297/*
@@ -310,6 +306,7 @@ static inline int test_and_clear_bit(unsigned long nr,
310 volatile unsigned long *addr) 306 volatile unsigned long *addr)
311{ 307{
312 unsigned short bit = nr & SZLONG_MASK; 308 unsigned short bit = nr & SZLONG_MASK;
309 unsigned long res;
313 310
314 if (cpu_has_llsc && R10000_LLSC_WAR) { 311 if (cpu_has_llsc && R10000_LLSC_WAR) {
315 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 312 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
@@ -327,12 +324,10 @@ static inline int test_and_clear_bit(unsigned long nr,
327 : "=&r" (temp), "=m" (*m), "=&r" (res) 324 : "=&r" (temp), "=m" (*m), "=&r" (res)
328 : "r" (1UL << bit), "m" (*m) 325 : "r" (1UL << bit), "m" (*m)
329 : "memory"); 326 : "memory");
330
331 return res != 0;
332#ifdef CONFIG_CPU_MIPSR2 327#ifdef CONFIG_CPU_MIPSR2
333 } else if (__builtin_constant_p(nr)) { 328 } else if (__builtin_constant_p(nr)) {
334 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 329 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
335 unsigned long temp, res; 330 unsigned long temp;
336 331
337 __asm__ __volatile__( 332 __asm__ __volatile__(
338 "1: " __LL "%0, %1 # test_and_clear_bit \n" 333 "1: " __LL "%0, %1 # test_and_clear_bit \n"
@@ -346,12 +341,10 @@ static inline int test_and_clear_bit(unsigned long nr,
346 : "=&r" (temp), "=m" (*m), "=&r" (res) 341 : "=&r" (temp), "=m" (*m), "=&r" (res)
347 : "ri" (bit), "m" (*m) 342 : "ri" (bit), "m" (*m)
348 : "memory"); 343 : "memory");
349
350 return res;
351#endif 344#endif
352 } else if (cpu_has_llsc) { 345 } else if (cpu_has_llsc) {
353 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 346 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
354 unsigned long temp, res; 347 unsigned long temp;
355 348
356 __asm__ __volatile__( 349 __asm__ __volatile__(
357 " .set push \n" 350 " .set push \n"
@@ -371,25 +364,22 @@ static inline int test_and_clear_bit(unsigned long nr,
371 : "=&r" (temp), "=m" (*m), "=&r" (res) 364 : "=&r" (temp), "=m" (*m), "=&r" (res)
372 : "r" (1UL << bit), "m" (*m) 365 : "r" (1UL << bit), "m" (*m)
373 : "memory"); 366 : "memory");
374
375 return res != 0;
376 } else { 367 } else {
377 volatile unsigned long *a = addr; 368 volatile unsigned long *a = addr;
378 unsigned long mask; 369 unsigned long mask;
379 int retval;
380 unsigned long flags; 370 unsigned long flags;
381 371
382 a += nr >> SZLONG_LOG; 372 a += nr >> SZLONG_LOG;
383 mask = 1UL << bit; 373 mask = 1UL << bit;
384 raw_local_irq_save(flags); 374 raw_local_irq_save(flags);
385 retval = (mask & *a) != 0; 375 res = (mask & *a);
386 *a &= ~mask; 376 *a &= ~mask;
387 raw_local_irq_restore(flags); 377 raw_local_irq_restore(flags);
388
389 return retval;
390 } 378 }
391 379
392 smp_mb(); 380 smp_mb();
381
382 return res != 0;
393} 383}
394 384
395/* 385/*
@@ -404,10 +394,11 @@ static inline int test_and_change_bit(unsigned long nr,
404 volatile unsigned long *addr) 394 volatile unsigned long *addr)
405{ 395{
406 unsigned short bit = nr & SZLONG_MASK; 396 unsigned short bit = nr & SZLONG_MASK;
397 unsigned long res;
407 398
408 if (cpu_has_llsc && R10000_LLSC_WAR) { 399 if (cpu_has_llsc && R10000_LLSC_WAR) {
409 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 400 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
410 unsigned long temp, res; 401 unsigned long temp;
411 402
412 __asm__ __volatile__( 403 __asm__ __volatile__(
413 " .set mips3 \n" 404 " .set mips3 \n"
@@ -420,11 +411,9 @@ static inline int test_and_change_bit(unsigned long nr,
420 : "=&r" (temp), "=m" (*m), "=&r" (res) 411 : "=&r" (temp), "=m" (*m), "=&r" (res)
421 : "r" (1UL << bit), "m" (*m) 412 : "r" (1UL << bit), "m" (*m)
422 : "memory"); 413 : "memory");
423
424 return res != 0;
425 } else if (cpu_has_llsc) { 414 } else if (cpu_has_llsc) {
426 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); 415 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
427 unsigned long temp, res; 416 unsigned long temp;
428 417
429 __asm__ __volatile__( 418 __asm__ __volatile__(
430 " .set push \n" 419 " .set push \n"
@@ -443,24 +432,22 @@ static inline int test_and_change_bit(unsigned long nr,
443 : "=&r" (temp), "=m" (*m), "=&r" (res) 432 : "=&r" (temp), "=m" (*m), "=&r" (res)
444 : "r" (1UL << bit), "m" (*m) 433 : "r" (1UL << bit), "m" (*m)
445 : "memory"); 434 : "memory");
446
447 return res != 0;
448 } else { 435 } else {
449 volatile unsigned long *a = addr; 436 volatile unsigned long *a = addr;
450 unsigned long mask, retval; 437 unsigned long mask;
451 unsigned long flags; 438 unsigned long flags;
452 439
453 a += nr >> SZLONG_LOG; 440 a += nr >> SZLONG_LOG;
454 mask = 1UL << bit; 441 mask = 1UL << bit;
455 raw_local_irq_save(flags); 442 raw_local_irq_save(flags);
456 retval = (mask & *a) != 0; 443 res = (mask & *a);
457 *a ^= mask; 444 *a ^= mask;
458 raw_local_irq_restore(flags); 445 raw_local_irq_restore(flags);
459
460 return retval;
461 } 446 }
462 447
463 smp_mb(); 448 smp_mb();
449
450 return res != 0;
464} 451}
465 452
466#include <asm-generic/bitops/non-atomic.h> 453#include <asm-generic/bitops/non-atomic.h>