diff options
author | Ivan Kokshaysky <ink@jurassic.park.msu.ru> | 2009-03-31 18:23:35 -0400 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2009-04-01 11:59:16 -0400 |
commit | a6209d6d71f2ab8c63cc1587ef65490d83022baf (patch) | |
tree | 2490d7adf757c9ea00c30fb8f75171d6b8772d3a | |
parent | a94066992b3050a7bd9a82bf73bf19f6052d2f82 (diff) |
alpha: xchg/cmpxchg cleanup and fixes
- "_local" versions of xchg/cmpxchg functions duplicate code
of non-local ones (quite a few pages of assembler), except
memory barriers. We can generate these two variants from a
single header file using simple macros;
- convert xchg macro back to inline function using always_inline
attribute;
- use proper argument types for cmpxchg_u8/u16 functions
to fix a problem with negative arguments.
Signed-off-by: Ivan Kokshaysky <ink@jurassic.park.msu.ru>
Cc: Richard Henderson <rth@twiddle.net>
Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
-rw-r--r-- | arch/alpha/include/asm/system.h | 547 | ||||
-rw-r--r-- | arch/alpha/include/asm/xchg.h | 258 |
2 files changed, 308 insertions, 497 deletions
diff --git a/arch/alpha/include/asm/system.h b/arch/alpha/include/asm/system.h index afe20fa58c99..5aa40cca4f23 100644 --- a/arch/alpha/include/asm/system.h +++ b/arch/alpha/include/asm/system.h | |||
@@ -309,518 +309,71 @@ extern int __min_ipl; | |||
309 | #define tbia() __tbi(-2, /* no second argument */) | 309 | #define tbia() __tbi(-2, /* no second argument */) |
310 | 310 | ||
311 | /* | 311 | /* |
312 | * Atomic exchange. | 312 | * Atomic exchange routines. |
313 | * Since it can be used to implement critical sections | ||
314 | * it must clobber "memory" (also for interrupts in UP). | ||
315 | */ | 313 | */ |
316 | 314 | ||
317 | static inline unsigned long | 315 | #define __ASM__MB |
318 | __xchg_u8(volatile char *m, unsigned long val) | 316 | #define ____xchg(type, args...) __xchg ## type ## _local(args) |
319 | { | 317 | #define ____cmpxchg(type, args...) __cmpxchg ## type ## _local(args) |
320 | unsigned long ret, tmp, addr64; | 318 | #include <asm/xchg.h> |
321 | |||
322 | __asm__ __volatile__( | ||
323 | " andnot %4,7,%3\n" | ||
324 | " insbl %1,%4,%1\n" | ||
325 | "1: ldq_l %2,0(%3)\n" | ||
326 | " extbl %2,%4,%0\n" | ||
327 | " mskbl %2,%4,%2\n" | ||
328 | " or %1,%2,%2\n" | ||
329 | " stq_c %2,0(%3)\n" | ||
330 | " beq %2,2f\n" | ||
331 | #ifdef CONFIG_SMP | ||
332 | " mb\n" | ||
333 | #endif | ||
334 | ".subsection 2\n" | ||
335 | "2: br 1b\n" | ||
336 | ".previous" | ||
337 | : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64) | ||
338 | : "r" ((long)m), "1" (val) : "memory"); | ||
339 | |||
340 | return ret; | ||
341 | } | ||
342 | |||
343 | static inline unsigned long | ||
344 | __xchg_u16(volatile short *m, unsigned long val) | ||
345 | { | ||
346 | unsigned long ret, tmp, addr64; | ||
347 | |||
348 | __asm__ __volatile__( | ||
349 | " andnot %4,7,%3\n" | ||
350 | " inswl %1,%4,%1\n" | ||
351 | "1: ldq_l %2,0(%3)\n" | ||
352 | " extwl %2,%4,%0\n" | ||
353 | " mskwl %2,%4,%2\n" | ||
354 | " or %1,%2,%2\n" | ||
355 | " stq_c %2,0(%3)\n" | ||
356 | " beq %2,2f\n" | ||
357 | #ifdef CONFIG_SMP | ||
358 | " mb\n" | ||
359 | #endif | ||
360 | ".subsection 2\n" | ||
361 | "2: br 1b\n" | ||
362 | ".previous" | ||
363 | : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64) | ||
364 | : "r" ((long)m), "1" (val) : "memory"); | ||
365 | |||
366 | return ret; | ||
367 | } | ||
368 | |||
369 | static inline unsigned long | ||
370 | __xchg_u32(volatile int *m, unsigned long val) | ||
371 | { | ||
372 | unsigned long dummy; | ||
373 | |||
374 | __asm__ __volatile__( | ||
375 | "1: ldl_l %0,%4\n" | ||
376 | " bis $31,%3,%1\n" | ||
377 | " stl_c %1,%2\n" | ||
378 | " beq %1,2f\n" | ||
379 | #ifdef CONFIG_SMP | ||
380 | " mb\n" | ||
381 | #endif | ||
382 | ".subsection 2\n" | ||
383 | "2: br 1b\n" | ||
384 | ".previous" | ||
385 | : "=&r" (val), "=&r" (dummy), "=m" (*m) | ||
386 | : "rI" (val), "m" (*m) : "memory"); | ||
387 | |||
388 | return val; | ||
389 | } | ||
390 | |||
391 | static inline unsigned long | ||
392 | __xchg_u64(volatile long *m, unsigned long val) | ||
393 | { | ||
394 | unsigned long dummy; | ||
395 | |||
396 | __asm__ __volatile__( | ||
397 | "1: ldq_l %0,%4\n" | ||
398 | " bis $31,%3,%1\n" | ||
399 | " stq_c %1,%2\n" | ||
400 | " beq %1,2f\n" | ||
401 | #ifdef CONFIG_SMP | ||
402 | " mb\n" | ||
403 | #endif | ||
404 | ".subsection 2\n" | ||
405 | "2: br 1b\n" | ||
406 | ".previous" | ||
407 | : "=&r" (val), "=&r" (dummy), "=m" (*m) | ||
408 | : "rI" (val), "m" (*m) : "memory"); | ||
409 | 319 | ||
410 | return val; | 320 | #define xchg_local(ptr,x) \ |
411 | } | 321 | ({ \ |
412 | 322 | __typeof__(*(ptr)) _x_ = (x); \ | |
413 | /* This function doesn't exist, so you'll get a linker error | 323 | (__typeof__(*(ptr))) __xchg_local((ptr), (unsigned long)_x_, \ |
414 | if something tries to do an invalid xchg(). */ | 324 | sizeof(*(ptr))); \ |
415 | extern void __xchg_called_with_bad_pointer(void); | ||
416 | |||
417 | #define __xchg(ptr, x, size) \ | ||
418 | ({ \ | ||
419 | unsigned long __xchg__res; \ | ||
420 | volatile void *__xchg__ptr = (ptr); \ | ||
421 | switch (size) { \ | ||
422 | case 1: __xchg__res = __xchg_u8(__xchg__ptr, x); break; \ | ||
423 | case 2: __xchg__res = __xchg_u16(__xchg__ptr, x); break; \ | ||
424 | case 4: __xchg__res = __xchg_u32(__xchg__ptr, x); break; \ | ||
425 | case 8: __xchg__res = __xchg_u64(__xchg__ptr, x); break; \ | ||
426 | default: __xchg_called_with_bad_pointer(); __xchg__res = x; \ | ||
427 | } \ | ||
428 | __xchg__res; \ | ||
429 | }) | ||
430 | |||
431 | #define xchg(ptr,x) \ | ||
432 | ({ \ | ||
433 | __typeof__(*(ptr)) _x_ = (x); \ | ||
434 | (__typeof__(*(ptr))) __xchg((ptr), (unsigned long)_x_, sizeof(*(ptr))); \ | ||
435 | }) | 325 | }) |
436 | 326 | ||
437 | static inline unsigned long | 327 | #define cmpxchg_local(ptr, o, n) \ |
438 | __xchg_u8_local(volatile char *m, unsigned long val) | 328 | ({ \ |
439 | { | 329 | __typeof__(*(ptr)) _o_ = (o); \ |
440 | unsigned long ret, tmp, addr64; | 330 | __typeof__(*(ptr)) _n_ = (n); \ |
441 | 331 | (__typeof__(*(ptr))) __cmpxchg_local((ptr), (unsigned long)_o_, \ | |
442 | __asm__ __volatile__( | 332 | (unsigned long)_n_, \ |
443 | " andnot %4,7,%3\n" | 333 | sizeof(*(ptr))); \ |
444 | " insbl %1,%4,%1\n" | ||
445 | "1: ldq_l %2,0(%3)\n" | ||
446 | " extbl %2,%4,%0\n" | ||
447 | " mskbl %2,%4,%2\n" | ||
448 | " or %1,%2,%2\n" | ||
449 | " stq_c %2,0(%3)\n" | ||
450 | " beq %2,2f\n" | ||
451 | ".subsection 2\n" | ||
452 | "2: br 1b\n" | ||
453 | ".previous" | ||
454 | : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64) | ||
455 | : "r" ((long)m), "1" (val) : "memory"); | ||
456 | |||
457 | return ret; | ||
458 | } | ||
459 | |||
460 | static inline unsigned long | ||
461 | __xchg_u16_local(volatile short *m, unsigned long val) | ||
462 | { | ||
463 | unsigned long ret, tmp, addr64; | ||
464 | |||
465 | __asm__ __volatile__( | ||
466 | " andnot %4,7,%3\n" | ||
467 | " inswl %1,%4,%1\n" | ||
468 | "1: ldq_l %2,0(%3)\n" | ||
469 | " extwl %2,%4,%0\n" | ||
470 | " mskwl %2,%4,%2\n" | ||
471 | " or %1,%2,%2\n" | ||
472 | " stq_c %2,0(%3)\n" | ||
473 | " beq %2,2f\n" | ||
474 | ".subsection 2\n" | ||
475 | "2: br 1b\n" | ||
476 | ".previous" | ||
477 | : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64) | ||
478 | : "r" ((long)m), "1" (val) : "memory"); | ||
479 | |||
480 | return ret; | ||
481 | } | ||
482 | |||
483 | static inline unsigned long | ||
484 | __xchg_u32_local(volatile int *m, unsigned long val) | ||
485 | { | ||
486 | unsigned long dummy; | ||
487 | |||
488 | __asm__ __volatile__( | ||
489 | "1: ldl_l %0,%4\n" | ||
490 | " bis $31,%3,%1\n" | ||
491 | " stl_c %1,%2\n" | ||
492 | " beq %1,2f\n" | ||
493 | ".subsection 2\n" | ||
494 | "2: br 1b\n" | ||
495 | ".previous" | ||
496 | : "=&r" (val), "=&r" (dummy), "=m" (*m) | ||
497 | : "rI" (val), "m" (*m) : "memory"); | ||
498 | |||
499 | return val; | ||
500 | } | ||
501 | |||
502 | static inline unsigned long | ||
503 | __xchg_u64_local(volatile long *m, unsigned long val) | ||
504 | { | ||
505 | unsigned long dummy; | ||
506 | |||
507 | __asm__ __volatile__( | ||
508 | "1: ldq_l %0,%4\n" | ||
509 | " bis $31,%3,%1\n" | ||
510 | " stq_c %1,%2\n" | ||
511 | " beq %1,2f\n" | ||
512 | ".subsection 2\n" | ||
513 | "2: br 1b\n" | ||
514 | ".previous" | ||
515 | : "=&r" (val), "=&r" (dummy), "=m" (*m) | ||
516 | : "rI" (val), "m" (*m) : "memory"); | ||
517 | |||
518 | return val; | ||
519 | } | ||
520 | |||
521 | #define __xchg_local(ptr, x, size) \ | ||
522 | ({ \ | ||
523 | unsigned long __xchg__res; \ | ||
524 | volatile void *__xchg__ptr = (ptr); \ | ||
525 | switch (size) { \ | ||
526 | case 1: __xchg__res = __xchg_u8_local(__xchg__ptr, x); break; \ | ||
527 | case 2: __xchg__res = __xchg_u16_local(__xchg__ptr, x); break; \ | ||
528 | case 4: __xchg__res = __xchg_u32_local(__xchg__ptr, x); break; \ | ||
529 | case 8: __xchg__res = __xchg_u64_local(__xchg__ptr, x); break; \ | ||
530 | default: __xchg_called_with_bad_pointer(); __xchg__res = x; \ | ||
531 | } \ | ||
532 | __xchg__res; \ | ||
533 | }) | ||
534 | |||
535 | #define xchg_local(ptr,x) \ | ||
536 | ({ \ | ||
537 | __typeof__(*(ptr)) _x_ = (x); \ | ||
538 | (__typeof__(*(ptr))) __xchg_local((ptr), (unsigned long)_x_, \ | ||
539 | sizeof(*(ptr))); \ | ||
540 | }) | 334 | }) |
541 | 335 | ||
542 | /* | 336 | #define cmpxchg64_local(ptr, o, n) \ |
543 | * Atomic compare and exchange. Compare OLD with MEM, if identical, | 337 | ({ \ |
544 | * store NEW in MEM. Return the initial value in MEM. Success is | 338 | BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ |
545 | * indicated by comparing RETURN with OLD. | 339 | cmpxchg_local((ptr), (o), (n)); \ |
546 | * | 340 | }) |
547 | * The memory barrier should be placed in SMP only when we actually | ||
548 | * make the change. If we don't change anything (so if the returned | ||
549 | * prev is equal to old) then we aren't acquiring anything new and | ||
550 | * we don't need any memory barrier as far I can tell. | ||
551 | */ | ||
552 | |||
553 | #define __HAVE_ARCH_CMPXCHG 1 | ||
554 | |||
555 | static inline unsigned long | ||
556 | __cmpxchg_u8(volatile char *m, long old, long new) | ||
557 | { | ||
558 | unsigned long prev, tmp, cmp, addr64; | ||
559 | |||
560 | __asm__ __volatile__( | ||
561 | " andnot %5,7,%4\n" | ||
562 | " insbl %1,%5,%1\n" | ||
563 | "1: ldq_l %2,0(%4)\n" | ||
564 | " extbl %2,%5,%0\n" | ||
565 | " cmpeq %0,%6,%3\n" | ||
566 | " beq %3,2f\n" | ||
567 | " mskbl %2,%5,%2\n" | ||
568 | " or %1,%2,%2\n" | ||
569 | " stq_c %2,0(%4)\n" | ||
570 | " beq %2,3f\n" | ||
571 | #ifdef CONFIG_SMP | ||
572 | " mb\n" | ||
573 | #endif | ||
574 | "2:\n" | ||
575 | ".subsection 2\n" | ||
576 | "3: br 1b\n" | ||
577 | ".previous" | ||
578 | : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64) | ||
579 | : "r" ((long)m), "Ir" (old), "1" (new) : "memory"); | ||
580 | |||
581 | return prev; | ||
582 | } | ||
583 | |||
584 | static inline unsigned long | ||
585 | __cmpxchg_u16(volatile short *m, long old, long new) | ||
586 | { | ||
587 | unsigned long prev, tmp, cmp, addr64; | ||
588 | |||
589 | __asm__ __volatile__( | ||
590 | " andnot %5,7,%4\n" | ||
591 | " inswl %1,%5,%1\n" | ||
592 | "1: ldq_l %2,0(%4)\n" | ||
593 | " extwl %2,%5,%0\n" | ||
594 | " cmpeq %0,%6,%3\n" | ||
595 | " beq %3,2f\n" | ||
596 | " mskwl %2,%5,%2\n" | ||
597 | " or %1,%2,%2\n" | ||
598 | " stq_c %2,0(%4)\n" | ||
599 | " beq %2,3f\n" | ||
600 | #ifdef CONFIG_SMP | ||
601 | " mb\n" | ||
602 | #endif | ||
603 | "2:\n" | ||
604 | ".subsection 2\n" | ||
605 | "3: br 1b\n" | ||
606 | ".previous" | ||
607 | : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64) | ||
608 | : "r" ((long)m), "Ir" (old), "1" (new) : "memory"); | ||
609 | |||
610 | return prev; | ||
611 | } | ||
612 | |||
613 | static inline unsigned long | ||
614 | __cmpxchg_u32(volatile int *m, int old, int new) | ||
615 | { | ||
616 | unsigned long prev, cmp; | ||
617 | |||
618 | __asm__ __volatile__( | ||
619 | "1: ldl_l %0,%5\n" | ||
620 | " cmpeq %0,%3,%1\n" | ||
621 | " beq %1,2f\n" | ||
622 | " mov %4,%1\n" | ||
623 | " stl_c %1,%2\n" | ||
624 | " beq %1,3f\n" | ||
625 | #ifdef CONFIG_SMP | ||
626 | " mb\n" | ||
627 | #endif | ||
628 | "2:\n" | ||
629 | ".subsection 2\n" | ||
630 | "3: br 1b\n" | ||
631 | ".previous" | ||
632 | : "=&r"(prev), "=&r"(cmp), "=m"(*m) | ||
633 | : "r"((long) old), "r"(new), "m"(*m) : "memory"); | ||
634 | |||
635 | return prev; | ||
636 | } | ||
637 | 341 | ||
638 | static inline unsigned long | ||
639 | __cmpxchg_u64(volatile long *m, unsigned long old, unsigned long new) | ||
640 | { | ||
641 | unsigned long prev, cmp; | ||
642 | |||
643 | __asm__ __volatile__( | ||
644 | "1: ldq_l %0,%5\n" | ||
645 | " cmpeq %0,%3,%1\n" | ||
646 | " beq %1,2f\n" | ||
647 | " mov %4,%1\n" | ||
648 | " stq_c %1,%2\n" | ||
649 | " beq %1,3f\n" | ||
650 | #ifdef CONFIG_SMP | 342 | #ifdef CONFIG_SMP |
651 | " mb\n" | 343 | #undef __ASM__MB |
344 | #define __ASM__MB "\tmb\n" | ||
652 | #endif | 345 | #endif |
653 | "2:\n" | 346 | #undef ____xchg |
654 | ".subsection 2\n" | 347 | #undef ____cmpxchg |
655 | "3: br 1b\n" | 348 | #define ____xchg(type, args...) __xchg ##type(args) |
656 | ".previous" | 349 | #define ____cmpxchg(type, args...) __cmpxchg ##type(args) |
657 | : "=&r"(prev), "=&r"(cmp), "=m"(*m) | 350 | #include <asm/xchg.h> |
658 | : "r"((long) old), "r"(new), "m"(*m) : "memory"); | 351 | |
659 | 352 | #define xchg(ptr,x) \ | |
660 | return prev; | 353 | ({ \ |
661 | } | 354 | __typeof__(*(ptr)) _x_ = (x); \ |
662 | 355 | (__typeof__(*(ptr))) __xchg((ptr), (unsigned long)_x_, \ | |
663 | /* This function doesn't exist, so you'll get a linker error | 356 | sizeof(*(ptr))); \ |
664 | if something tries to do an invalid cmpxchg(). */ | ||
665 | extern void __cmpxchg_called_with_bad_pointer(void); | ||
666 | |||
667 | static __always_inline unsigned long | ||
668 | __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size) | ||
669 | { | ||
670 | switch (size) { | ||
671 | case 1: | ||
672 | return __cmpxchg_u8(ptr, old, new); | ||
673 | case 2: | ||
674 | return __cmpxchg_u16(ptr, old, new); | ||
675 | case 4: | ||
676 | return __cmpxchg_u32(ptr, old, new); | ||
677 | case 8: | ||
678 | return __cmpxchg_u64(ptr, old, new); | ||
679 | } | ||
680 | __cmpxchg_called_with_bad_pointer(); | ||
681 | return old; | ||
682 | } | ||
683 | |||
684 | #define cmpxchg(ptr, o, n) \ | ||
685 | ({ \ | ||
686 | __typeof__(*(ptr)) _o_ = (o); \ | ||
687 | __typeof__(*(ptr)) _n_ = (n); \ | ||
688 | (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_, \ | ||
689 | (unsigned long)_n_, sizeof(*(ptr))); \ | ||
690 | }) | 357 | }) |
691 | #define cmpxchg64(ptr, o, n) \ | ||
692 | ({ \ | ||
693 | BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ | ||
694 | cmpxchg((ptr), (o), (n)); \ | ||
695 | }) | ||
696 | |||
697 | static inline unsigned long | ||
698 | __cmpxchg_u8_local(volatile char *m, long old, long new) | ||
699 | { | ||
700 | unsigned long prev, tmp, cmp, addr64; | ||
701 | |||
702 | __asm__ __volatile__( | ||
703 | " andnot %5,7,%4\n" | ||
704 | " insbl %1,%5,%1\n" | ||
705 | "1: ldq_l %2,0(%4)\n" | ||
706 | " extbl %2,%5,%0\n" | ||
707 | " cmpeq %0,%6,%3\n" | ||
708 | " beq %3,2f\n" | ||
709 | " mskbl %2,%5,%2\n" | ||
710 | " or %1,%2,%2\n" | ||
711 | " stq_c %2,0(%4)\n" | ||
712 | " beq %2,3f\n" | ||
713 | "2:\n" | ||
714 | ".subsection 2\n" | ||
715 | "3: br 1b\n" | ||
716 | ".previous" | ||
717 | : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64) | ||
718 | : "r" ((long)m), "Ir" (old), "1" (new) : "memory"); | ||
719 | |||
720 | return prev; | ||
721 | } | ||
722 | |||
723 | static inline unsigned long | ||
724 | __cmpxchg_u16_local(volatile short *m, long old, long new) | ||
725 | { | ||
726 | unsigned long prev, tmp, cmp, addr64; | ||
727 | |||
728 | __asm__ __volatile__( | ||
729 | " andnot %5,7,%4\n" | ||
730 | " inswl %1,%5,%1\n" | ||
731 | "1: ldq_l %2,0(%4)\n" | ||
732 | " extwl %2,%5,%0\n" | ||
733 | " cmpeq %0,%6,%3\n" | ||
734 | " beq %3,2f\n" | ||
735 | " mskwl %2,%5,%2\n" | ||
736 | " or %1,%2,%2\n" | ||
737 | " stq_c %2,0(%4)\n" | ||
738 | " beq %2,3f\n" | ||
739 | "2:\n" | ||
740 | ".subsection 2\n" | ||
741 | "3: br 1b\n" | ||
742 | ".previous" | ||
743 | : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64) | ||
744 | : "r" ((long)m), "Ir" (old), "1" (new) : "memory"); | ||
745 | |||
746 | return prev; | ||
747 | } | ||
748 | |||
749 | static inline unsigned long | ||
750 | __cmpxchg_u32_local(volatile int *m, int old, int new) | ||
751 | { | ||
752 | unsigned long prev, cmp; | ||
753 | |||
754 | __asm__ __volatile__( | ||
755 | "1: ldl_l %0,%5\n" | ||
756 | " cmpeq %0,%3,%1\n" | ||
757 | " beq %1,2f\n" | ||
758 | " mov %4,%1\n" | ||
759 | " stl_c %1,%2\n" | ||
760 | " beq %1,3f\n" | ||
761 | "2:\n" | ||
762 | ".subsection 2\n" | ||
763 | "3: br 1b\n" | ||
764 | ".previous" | ||
765 | : "=&r"(prev), "=&r"(cmp), "=m"(*m) | ||
766 | : "r"((long) old), "r"(new), "m"(*m) : "memory"); | ||
767 | |||
768 | return prev; | ||
769 | } | ||
770 | |||
771 | static inline unsigned long | ||
772 | __cmpxchg_u64_local(volatile long *m, unsigned long old, unsigned long new) | ||
773 | { | ||
774 | unsigned long prev, cmp; | ||
775 | |||
776 | __asm__ __volatile__( | ||
777 | "1: ldq_l %0,%5\n" | ||
778 | " cmpeq %0,%3,%1\n" | ||
779 | " beq %1,2f\n" | ||
780 | " mov %4,%1\n" | ||
781 | " stq_c %1,%2\n" | ||
782 | " beq %1,3f\n" | ||
783 | "2:\n" | ||
784 | ".subsection 2\n" | ||
785 | "3: br 1b\n" | ||
786 | ".previous" | ||
787 | : "=&r"(prev), "=&r"(cmp), "=m"(*m) | ||
788 | : "r"((long) old), "r"(new), "m"(*m) : "memory"); | ||
789 | |||
790 | return prev; | ||
791 | } | ||
792 | |||
793 | static __always_inline unsigned long | ||
794 | __cmpxchg_local(volatile void *ptr, unsigned long old, unsigned long new, | ||
795 | int size) | ||
796 | { | ||
797 | switch (size) { | ||
798 | case 1: | ||
799 | return __cmpxchg_u8_local(ptr, old, new); | ||
800 | case 2: | ||
801 | return __cmpxchg_u16_local(ptr, old, new); | ||
802 | case 4: | ||
803 | return __cmpxchg_u32_local(ptr, old, new); | ||
804 | case 8: | ||
805 | return __cmpxchg_u64_local(ptr, old, new); | ||
806 | } | ||
807 | __cmpxchg_called_with_bad_pointer(); | ||
808 | return old; | ||
809 | } | ||
810 | 358 | ||
811 | #define cmpxchg_local(ptr, o, n) \ | 359 | #define cmpxchg(ptr, o, n) \ |
812 | ({ \ | 360 | ({ \ |
813 | __typeof__(*(ptr)) _o_ = (o); \ | 361 | __typeof__(*(ptr)) _o_ = (o); \ |
814 | __typeof__(*(ptr)) _n_ = (n); \ | 362 | __typeof__(*(ptr)) _n_ = (n); \ |
815 | (__typeof__(*(ptr))) __cmpxchg_local((ptr), (unsigned long)_o_, \ | 363 | (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_, \ |
816 | (unsigned long)_n_, sizeof(*(ptr))); \ | 364 | (unsigned long)_n_, sizeof(*(ptr)));\ |
817 | }) | 365 | }) |
818 | #define cmpxchg64_local(ptr, o, n) \ | 366 | |
819 | ({ \ | 367 | #define cmpxchg64(ptr, o, n) \ |
820 | BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ | 368 | ({ \ |
821 | cmpxchg_local((ptr), (o), (n)); \ | 369 | BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ |
370 | cmpxchg((ptr), (o), (n)); \ | ||
822 | }) | 371 | }) |
823 | 372 | ||
373 | #undef __ASM__MB | ||
374 | #undef ____cmpxchg | ||
375 | |||
376 | #define __HAVE_ARCH_CMPXCHG 1 | ||
824 | 377 | ||
825 | #endif /* __ASSEMBLY__ */ | 378 | #endif /* __ASSEMBLY__ */ |
826 | 379 | ||
diff --git a/arch/alpha/include/asm/xchg.h b/arch/alpha/include/asm/xchg.h new file mode 100644 index 000000000000..beba1b803e0d --- /dev/null +++ b/arch/alpha/include/asm/xchg.h | |||
@@ -0,0 +1,258 @@ | |||
1 | #ifndef __ALPHA_SYSTEM_H | ||
2 | #error Do not include xchg.h directly! | ||
3 | #else | ||
4 | /* | ||
5 | * xchg/xchg_local and cmpxchg/cmpxchg_local share the same code | ||
6 | * except that local version do not have the expensive memory barrier. | ||
7 | * So this file is included twice from asm/system.h. | ||
8 | */ | ||
9 | |||
10 | /* | ||
11 | * Atomic exchange. | ||
12 | * Since it can be used to implement critical sections | ||
13 | * it must clobber "memory" (also for interrupts in UP). | ||
14 | */ | ||
15 | |||
16 | static inline unsigned long | ||
17 | ____xchg(_u8, volatile char *m, unsigned long val) | ||
18 | { | ||
19 | unsigned long ret, tmp, addr64; | ||
20 | |||
21 | __asm__ __volatile__( | ||
22 | " andnot %4,7,%3\n" | ||
23 | " insbl %1,%4,%1\n" | ||
24 | "1: ldq_l %2,0(%3)\n" | ||
25 | " extbl %2,%4,%0\n" | ||
26 | " mskbl %2,%4,%2\n" | ||
27 | " or %1,%2,%2\n" | ||
28 | " stq_c %2,0(%3)\n" | ||
29 | " beq %2,2f\n" | ||
30 | __ASM__MB | ||
31 | ".subsection 2\n" | ||
32 | "2: br 1b\n" | ||
33 | ".previous" | ||
34 | : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64) | ||
35 | : "r" ((long)m), "1" (val) : "memory"); | ||
36 | |||
37 | return ret; | ||
38 | } | ||
39 | |||
40 | static inline unsigned long | ||
41 | ____xchg(_u16, volatile short *m, unsigned long val) | ||
42 | { | ||
43 | unsigned long ret, tmp, addr64; | ||
44 | |||
45 | __asm__ __volatile__( | ||
46 | " andnot %4,7,%3\n" | ||
47 | " inswl %1,%4,%1\n" | ||
48 | "1: ldq_l %2,0(%3)\n" | ||
49 | " extwl %2,%4,%0\n" | ||
50 | " mskwl %2,%4,%2\n" | ||
51 | " or %1,%2,%2\n" | ||
52 | " stq_c %2,0(%3)\n" | ||
53 | " beq %2,2f\n" | ||
54 | __ASM__MB | ||
55 | ".subsection 2\n" | ||
56 | "2: br 1b\n" | ||
57 | ".previous" | ||
58 | : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64) | ||
59 | : "r" ((long)m), "1" (val) : "memory"); | ||
60 | |||
61 | return ret; | ||
62 | } | ||
63 | |||
64 | static inline unsigned long | ||
65 | ____xchg(_u32, volatile int *m, unsigned long val) | ||
66 | { | ||
67 | unsigned long dummy; | ||
68 | |||
69 | __asm__ __volatile__( | ||
70 | "1: ldl_l %0,%4\n" | ||
71 | " bis $31,%3,%1\n" | ||
72 | " stl_c %1,%2\n" | ||
73 | " beq %1,2f\n" | ||
74 | __ASM__MB | ||
75 | ".subsection 2\n" | ||
76 | "2: br 1b\n" | ||
77 | ".previous" | ||
78 | : "=&r" (val), "=&r" (dummy), "=m" (*m) | ||
79 | : "rI" (val), "m" (*m) : "memory"); | ||
80 | |||
81 | return val; | ||
82 | } | ||
83 | |||
84 | static inline unsigned long | ||
85 | ____xchg(_u64, volatile long *m, unsigned long val) | ||
86 | { | ||
87 | unsigned long dummy; | ||
88 | |||
89 | __asm__ __volatile__( | ||
90 | "1: ldq_l %0,%4\n" | ||
91 | " bis $31,%3,%1\n" | ||
92 | " stq_c %1,%2\n" | ||
93 | " beq %1,2f\n" | ||
94 | __ASM__MB | ||
95 | ".subsection 2\n" | ||
96 | "2: br 1b\n" | ||
97 | ".previous" | ||
98 | : "=&r" (val), "=&r" (dummy), "=m" (*m) | ||
99 | : "rI" (val), "m" (*m) : "memory"); | ||
100 | |||
101 | return val; | ||
102 | } | ||
103 | |||
104 | /* This function doesn't exist, so you'll get a linker error | ||
105 | if something tries to do an invalid xchg(). */ | ||
106 | extern void __xchg_called_with_bad_pointer(void); | ||
107 | |||
108 | static __always_inline unsigned long | ||
109 | ____xchg(, volatile void *ptr, unsigned long x, int size) | ||
110 | { | ||
111 | switch (size) { | ||
112 | case 1: | ||
113 | return ____xchg(_u8, ptr, x); | ||
114 | case 2: | ||
115 | return ____xchg(_u16, ptr, x); | ||
116 | case 4: | ||
117 | return ____xchg(_u32, ptr, x); | ||
118 | case 8: | ||
119 | return ____xchg(_u64, ptr, x); | ||
120 | } | ||
121 | __xchg_called_with_bad_pointer(); | ||
122 | return x; | ||
123 | } | ||
124 | |||
125 | /* | ||
126 | * Atomic compare and exchange. Compare OLD with MEM, if identical, | ||
127 | * store NEW in MEM. Return the initial value in MEM. Success is | ||
128 | * indicated by comparing RETURN with OLD. | ||
129 | * | ||
130 | * The memory barrier should be placed in SMP only when we actually | ||
131 | * make the change. If we don't change anything (so if the returned | ||
132 | * prev is equal to old) then we aren't acquiring anything new and | ||
133 | * we don't need any memory barrier as far I can tell. | ||
134 | */ | ||
135 | |||
136 | static inline unsigned long | ||
137 | ____cmpxchg(_u8, volatile char *m, unsigned char old, unsigned char new) | ||
138 | { | ||
139 | unsigned long prev, tmp, cmp, addr64; | ||
140 | |||
141 | __asm__ __volatile__( | ||
142 | " andnot %5,7,%4\n" | ||
143 | " insbl %1,%5,%1\n" | ||
144 | "1: ldq_l %2,0(%4)\n" | ||
145 | " extbl %2,%5,%0\n" | ||
146 | " cmpeq %0,%6,%3\n" | ||
147 | " beq %3,2f\n" | ||
148 | " mskbl %2,%5,%2\n" | ||
149 | " or %1,%2,%2\n" | ||
150 | " stq_c %2,0(%4)\n" | ||
151 | " beq %2,3f\n" | ||
152 | __ASM__MB | ||
153 | "2:\n" | ||
154 | ".subsection 2\n" | ||
155 | "3: br 1b\n" | ||
156 | ".previous" | ||
157 | : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64) | ||
158 | : "r" ((long)m), "Ir" (old), "1" (new) : "memory"); | ||
159 | |||
160 | return prev; | ||
161 | } | ||
162 | |||
163 | static inline unsigned long | ||
164 | ____cmpxchg(_u16, volatile short *m, unsigned short old, unsigned short new) | ||
165 | { | ||
166 | unsigned long prev, tmp, cmp, addr64; | ||
167 | |||
168 | __asm__ __volatile__( | ||
169 | " andnot %5,7,%4\n" | ||
170 | " inswl %1,%5,%1\n" | ||
171 | "1: ldq_l %2,0(%4)\n" | ||
172 | " extwl %2,%5,%0\n" | ||
173 | " cmpeq %0,%6,%3\n" | ||
174 | " beq %3,2f\n" | ||
175 | " mskwl %2,%5,%2\n" | ||
176 | " or %1,%2,%2\n" | ||
177 | " stq_c %2,0(%4)\n" | ||
178 | " beq %2,3f\n" | ||
179 | __ASM__MB | ||
180 | "2:\n" | ||
181 | ".subsection 2\n" | ||
182 | "3: br 1b\n" | ||
183 | ".previous" | ||
184 | : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64) | ||
185 | : "r" ((long)m), "Ir" (old), "1" (new) : "memory"); | ||
186 | |||
187 | return prev; | ||
188 | } | ||
189 | |||
190 | static inline unsigned long | ||
191 | ____cmpxchg(_u32, volatile int *m, int old, int new) | ||
192 | { | ||
193 | unsigned long prev, cmp; | ||
194 | |||
195 | __asm__ __volatile__( | ||
196 | "1: ldl_l %0,%5\n" | ||
197 | " cmpeq %0,%3,%1\n" | ||
198 | " beq %1,2f\n" | ||
199 | " mov %4,%1\n" | ||
200 | " stl_c %1,%2\n" | ||
201 | " beq %1,3f\n" | ||
202 | __ASM__MB | ||
203 | "2:\n" | ||
204 | ".subsection 2\n" | ||
205 | "3: br 1b\n" | ||
206 | ".previous" | ||
207 | : "=&r"(prev), "=&r"(cmp), "=m"(*m) | ||
208 | : "r"((long) old), "r"(new), "m"(*m) : "memory"); | ||
209 | |||
210 | return prev; | ||
211 | } | ||
212 | |||
213 | static inline unsigned long | ||
214 | ____cmpxchg(_u64, volatile long *m, unsigned long old, unsigned long new) | ||
215 | { | ||
216 | unsigned long prev, cmp; | ||
217 | |||
218 | __asm__ __volatile__( | ||
219 | "1: ldq_l %0,%5\n" | ||
220 | " cmpeq %0,%3,%1\n" | ||
221 | " beq %1,2f\n" | ||
222 | " mov %4,%1\n" | ||
223 | " stq_c %1,%2\n" | ||
224 | " beq %1,3f\n" | ||
225 | __ASM__MB | ||
226 | "2:\n" | ||
227 | ".subsection 2\n" | ||
228 | "3: br 1b\n" | ||
229 | ".previous" | ||
230 | : "=&r"(prev), "=&r"(cmp), "=m"(*m) | ||
231 | : "r"((long) old), "r"(new), "m"(*m) : "memory"); | ||
232 | |||
233 | return prev; | ||
234 | } | ||
235 | |||
236 | /* This function doesn't exist, so you'll get a linker error | ||
237 | if something tries to do an invalid cmpxchg(). */ | ||
238 | extern void __cmpxchg_called_with_bad_pointer(void); | ||
239 | |||
240 | static __always_inline unsigned long | ||
241 | ____cmpxchg(, volatile void *ptr, unsigned long old, unsigned long new, | ||
242 | int size) | ||
243 | { | ||
244 | switch (size) { | ||
245 | case 1: | ||
246 | return ____cmpxchg(_u8, ptr, old, new); | ||
247 | case 2: | ||
248 | return ____cmpxchg(_u16, ptr, old, new); | ||
249 | case 4: | ||
250 | return ____cmpxchg(_u32, ptr, old, new); | ||
251 | case 8: | ||
252 | return ____cmpxchg(_u64, ptr, old, new); | ||
253 | } | ||
254 | __cmpxchg_called_with_bad_pointer(); | ||
255 | return old; | ||
256 | } | ||
257 | |||
258 | #endif | ||