diff options
Diffstat (limited to 'arch/arm/kernel/entry-armv.S')
-rw-r--r-- | arch/arm/kernel/entry-armv.S | 49 |
1 files changed, 49 insertions, 0 deletions
diff --git a/arch/arm/kernel/entry-armv.S b/arch/arm/kernel/entry-armv.S index d9fb819bf7cc..2a8d27e18fa7 100644 --- a/arch/arm/kernel/entry-armv.S +++ b/arch/arm/kernel/entry-armv.S | |||
@@ -614,6 +614,47 @@ __kuser_helper_start: | |||
614 | /* | 614 | /* |
615 | * Reference prototype: | 615 | * Reference prototype: |
616 | * | 616 | * |
617 | * void __kernel_memory_barrier(void) | ||
618 | * | ||
619 | * Input: | ||
620 | * | ||
621 | * lr = return address | ||
622 | * | ||
623 | * Output: | ||
624 | * | ||
625 | * none | ||
626 | * | ||
627 | * Clobbered: | ||
628 | * | ||
629 | * the Z flag might be lost | ||
630 | * | ||
631 | * Definition and user space usage example: | ||
632 | * | ||
633 | * typedef void (__kernel_dmb_t)(void); | ||
634 | * #define __kernel_dmb (*(__kernel_dmb_t *)0xffff0fa0) | ||
635 | * | ||
636 | * Apply any needed memory barrier to preserve consistency with data modified | ||
637 | * manually and __kuser_cmpxchg usage. | ||
638 | * | ||
639 | * This could be used as follows: | ||
640 | * | ||
641 | * #define __kernel_dmb() \ | ||
642 | * asm volatile ( "mov r0, #0xffff0fff; mov lr, pc; sub pc, r0, #95" \ | ||
643 | * : : : "lr","cc" ) | ||
644 | */ | ||
645 | |||
646 | __kuser_memory_barrier: @ 0xffff0fa0 | ||
647 | |||
648 | #if __LINUX_ARM_ARCH__ >= 6 && defined(CONFIG_SMP) | ||
649 | mcr p15, 0, r0, c7, c10, 5 @ dmb | ||
650 | #endif | ||
651 | mov pc, lr | ||
652 | |||
653 | .align 5 | ||
654 | |||
655 | /* | ||
656 | * Reference prototype: | ||
657 | * | ||
617 | * int __kernel_cmpxchg(int oldval, int newval, int *ptr) | 658 | * int __kernel_cmpxchg(int oldval, int newval, int *ptr) |
618 | * | 659 | * |
619 | * Input: | 660 | * Input: |
@@ -642,6 +683,8 @@ __kuser_helper_start: | |||
642 | * The C flag is also set if *ptr was changed to allow for assembly | 683 | * The C flag is also set if *ptr was changed to allow for assembly |
643 | * optimization in the calling code. | 684 | * optimization in the calling code. |
644 | * | 685 | * |
686 | * Note: this routine already includes memory barriers as needed. | ||
687 | * | ||
645 | * For example, a user space atomic_add implementation could look like this: | 688 | * For example, a user space atomic_add implementation could look like this: |
646 | * | 689 | * |
647 | * #define atomic_add(ptr, val) \ | 690 | * #define atomic_add(ptr, val) \ |
@@ -698,10 +741,16 @@ __kuser_cmpxchg: @ 0xffff0fc0 | |||
698 | 741 | ||
699 | #else | 742 | #else |
700 | 743 | ||
744 | #ifdef CONFIG_SMP | ||
745 | mcr p15, 0, r0, c7, c10, 5 @ dmb | ||
746 | #endif | ||
701 | ldrex r3, [r2] | 747 | ldrex r3, [r2] |
702 | subs r3, r3, r0 | 748 | subs r3, r3, r0 |
703 | strexeq r3, r1, [r2] | 749 | strexeq r3, r1, [r2] |
704 | rsbs r0, r3, #0 | 750 | rsbs r0, r3, #0 |
751 | #ifdef CONFIG_SMP | ||
752 | mcr p15, 0, r0, c7, c10, 5 @ dmb | ||
753 | #endif | ||
705 | mov pc, lr | 754 | mov pc, lr |
706 | 755 | ||
707 | #endif | 756 | #endif |