aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm64/mm/cache.S
diff options
context:
space:
mode:
authorWill Deacon <will.deacon@arm.com>2014-05-02 11:24:15 -0400
committerCatalin Marinas <catalin.marinas@arm.com>2014-05-09 12:21:24 -0400
commitdc60b777fcdddbadab111028e266fd69d4702b34 (patch)
treed839ce1021b5f64b2925d8f20230cb9564f3e11e /arch/arm64/mm/cache.S
parentee9e101c11478680d579bd20bb38a4d3e2514fe3 (diff)
arm64: mm: use inner-shareable barriers for inner-shareable maintenance
In order to ensure ordering and completion of inner-shareable maintenance instructions (cache and TLB) on AArch64, we can use the -ish suffix to the dmb and dsb instructions respectively. This patch updates our low-level cache and tlb maintenance routines to use the inner-shareable barrier variants where appropriate. Acked-by: Catalin Marinas <catalin.marinas@arm.com> Signed-off-by: Will Deacon <will.deacon@arm.com> Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
Diffstat (limited to 'arch/arm64/mm/cache.S')
-rw-r--r--arch/arm64/mm/cache.S6
1 files changed, 3 insertions, 3 deletions
diff --git a/arch/arm64/mm/cache.S b/arch/arm64/mm/cache.S
index fda756875fa6..23663837acff 100644
--- a/arch/arm64/mm/cache.S
+++ b/arch/arm64/mm/cache.S
@@ -31,7 +31,7 @@
31 * Corrupted registers: x0-x7, x9-x11 31 * Corrupted registers: x0-x7, x9-x11
32 */ 32 */
33__flush_dcache_all: 33__flush_dcache_all:
34 dsb sy // ensure ordering with previous memory accesses 34 dmb sy // ensure ordering with previous memory accesses
35 mrs x0, clidr_el1 // read clidr 35 mrs x0, clidr_el1 // read clidr
36 and x3, x0, #0x7000000 // extract loc from clidr 36 and x3, x0, #0x7000000 // extract loc from clidr
37 lsr x3, x3, #23 // left align loc bit field 37 lsr x3, x3, #23 // left align loc bit field
@@ -128,7 +128,7 @@ USER(9f, dc cvau, x4 ) // clean D line to PoU
128 add x4, x4, x2 128 add x4, x4, x2
129 cmp x4, x1 129 cmp x4, x1
130 b.lo 1b 130 b.lo 1b
131 dsb sy 131 dsb ish
132 132
133 icache_line_size x2, x3 133 icache_line_size x2, x3
134 sub x3, x2, #1 134 sub x3, x2, #1
@@ -139,7 +139,7 @@ USER(9f, ic ivau, x4 ) // invalidate I line PoU
139 cmp x4, x1 139 cmp x4, x1
140 b.lo 1b 140 b.lo 1b
1419: // ignore any faulting cache operation 1419: // ignore any faulting cache operation
142 dsb sy 142 dsb ish
143 isb 143 isb
144 ret 144 ret
145ENDPROC(flush_icache_range) 145ENDPROC(flush_icache_range)