aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm64/include
diff options
context:
space:
mode:
authorWill Deacon <will.deacon@arm.com>2014-05-02 11:24:11 -0400
committerCatalin Marinas <catalin.marinas@arm.com>2014-05-09 12:03:41 -0400
commit493e68747e07b69da3d746352525a1ebd6b61d82 (patch)
treec4bce9ecd189dc90337b733b35168edba4b5f17b /arch/arm64/include
parent98f7685ee69f871ba991089cb9685f0da07517ea (diff)
arm64: barriers: wire up new barrier options
Now that all callers of the barrier macros are updated to pass the mandatory options, update the macros so the option is actually used. Acked-by: Catalin Marinas <catalin.marinas@arm.com> Signed-off-by: Will Deacon <will.deacon@arm.com> Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
Diffstat (limited to 'arch/arm64/include')
-rw-r--r--arch/arm64/include/asm/barrier.h14
1 files changed, 7 insertions, 7 deletions
diff --git a/arch/arm64/include/asm/barrier.h b/arch/arm64/include/asm/barrier.h
index 5d69eddbe39e..71a42d6599fb 100644
--- a/arch/arm64/include/asm/barrier.h
+++ b/arch/arm64/include/asm/barrier.h
@@ -25,12 +25,12 @@
25#define wfi() asm volatile("wfi" : : : "memory") 25#define wfi() asm volatile("wfi" : : : "memory")
26 26
27#define isb() asm volatile("isb" : : : "memory") 27#define isb() asm volatile("isb" : : : "memory")
28#define dmb(opt) asm volatile("dmb sy" : : : "memory") 28#define dmb(opt) asm volatile("dmb " #opt : : : "memory")
29#define dsb(opt) asm volatile("dsb sy" : : : "memory") 29#define dsb(opt) asm volatile("dsb " #opt : : : "memory")
30 30
31#define mb() dsb(sy) 31#define mb() dsb(sy)
32#define rmb() asm volatile("dsb ld" : : : "memory") 32#define rmb() dsb(ld)
33#define wmb() asm volatile("dsb st" : : : "memory") 33#define wmb() dsb(st)
34 34
35#ifndef CONFIG_SMP 35#ifndef CONFIG_SMP
36#define smp_mb() barrier() 36#define smp_mb() barrier()
@@ -54,9 +54,9 @@ do { \
54 54
55#else 55#else
56 56
57#define smp_mb() asm volatile("dmb ish" : : : "memory") 57#define smp_mb() dmb(ish)
58#define smp_rmb() asm volatile("dmb ishld" : : : "memory") 58#define smp_rmb() dmb(ishld)
59#define smp_wmb() asm volatile("dmb ishst" : : : "memory") 59#define smp_wmb() dmb(ishst)
60 60
61#define smp_store_release(p, v) \ 61#define smp_store_release(p, v) \
62do { \ 62do { \