aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorWill Deacon <will.deacon@arm.com>2013-05-13 07:01:12 -0400
committerWill Deacon <will.deacon@arm.com>2013-08-12 07:25:45 -0400
commit6abdd491698a27f7df04a32ca12cc453810e4396 (patch)
tree34f4aa721441439634e721fe0eeba614b076a889
parent62cbbc42e0019aff6310259f275ae812463f8836 (diff)
ARM: mm: use inner-shareable barriers for TLB and user cache operations
System-wide barriers aren't required for situations where we only need to make visibility and ordering guarantees in the inner-shareable domain (i.e. we are not dealing with devices or potentially incoherent CPUs). This patch changes the v7 TLB operations, coherent_user_range and dcache_clean_area functions to user inner-shareable barriers. For cache maintenance, only the store access type is required to ensure completion. Reviewed-by: Catalin Marinas <catalin.marinas@arm.com> Signed-off-by: Will Deacon <will.deacon@arm.com>
-rw-r--r--arch/arm/mm/cache-v7.S4
-rw-r--r--arch/arm/mm/proc-v7.S2
-rw-r--r--arch/arm/mm/tlb-v7.S8
3 files changed, 7 insertions, 7 deletions
diff --git a/arch/arm/mm/cache-v7.S b/arch/arm/mm/cache-v7.S
index 515b00064da8..b5c467a65c27 100644
--- a/arch/arm/mm/cache-v7.S
+++ b/arch/arm/mm/cache-v7.S
@@ -282,7 +282,7 @@ ENTRY(v7_coherent_user_range)
282 add r12, r12, r2 282 add r12, r12, r2
283 cmp r12, r1 283 cmp r12, r1
284 blo 1b 284 blo 1b
285 dsb 285 dsb ishst
286 icache_line_size r2, r3 286 icache_line_size r2, r3
287 sub r3, r2, #1 287 sub r3, r2, #1
288 bic r12, r0, r3 288 bic r12, r0, r3
@@ -294,7 +294,7 @@ ENTRY(v7_coherent_user_range)
294 mov r0, #0 294 mov r0, #0
295 ALT_SMP(mcr p15, 0, r0, c7, c1, 6) @ invalidate BTB Inner Shareable 295 ALT_SMP(mcr p15, 0, r0, c7, c1, 6) @ invalidate BTB Inner Shareable
296 ALT_UP(mcr p15, 0, r0, c7, c5, 6) @ invalidate BTB 296 ALT_UP(mcr p15, 0, r0, c7, c5, 6) @ invalidate BTB
297 dsb 297 dsb ishst
298 isb 298 isb
299 mov pc, lr 299 mov pc, lr
300 300
diff --git a/arch/arm/mm/proc-v7.S b/arch/arm/mm/proc-v7.S
index 73398bcf9bd8..0b5462a941a6 100644
--- a/arch/arm/mm/proc-v7.S
+++ b/arch/arm/mm/proc-v7.S
@@ -83,7 +83,7 @@ ENTRY(cpu_v7_dcache_clean_area)
83 add r0, r0, r2 83 add r0, r0, r2
84 subs r1, r1, r2 84 subs r1, r1, r2
85 bhi 2b 85 bhi 2b
86 dsb 86 dsb ishst
87 mov pc, lr 87 mov pc, lr
88ENDPROC(cpu_v7_dcache_clean_area) 88ENDPROC(cpu_v7_dcache_clean_area)
89 89
diff --git a/arch/arm/mm/tlb-v7.S b/arch/arm/mm/tlb-v7.S
index ea94765acf9a..355308767bae 100644
--- a/arch/arm/mm/tlb-v7.S
+++ b/arch/arm/mm/tlb-v7.S
@@ -35,7 +35,7 @@
35ENTRY(v7wbi_flush_user_tlb_range) 35ENTRY(v7wbi_flush_user_tlb_range)
36 vma_vm_mm r3, r2 @ get vma->vm_mm 36 vma_vm_mm r3, r2 @ get vma->vm_mm
37 mmid r3, r3 @ get vm_mm->context.id 37 mmid r3, r3 @ get vm_mm->context.id
38 dsb 38 dsb ish
39 mov r0, r0, lsr #PAGE_SHIFT @ align address 39 mov r0, r0, lsr #PAGE_SHIFT @ align address
40 mov r1, r1, lsr #PAGE_SHIFT 40 mov r1, r1, lsr #PAGE_SHIFT
41 asid r3, r3 @ mask ASID 41 asid r3, r3 @ mask ASID
@@ -56,7 +56,7 @@ ENTRY(v7wbi_flush_user_tlb_range)
56 add r0, r0, #PAGE_SZ 56 add r0, r0, #PAGE_SZ
57 cmp r0, r1 57 cmp r0, r1
58 blo 1b 58 blo 1b
59 dsb 59 dsb ish
60 mov pc, lr 60 mov pc, lr
61ENDPROC(v7wbi_flush_user_tlb_range) 61ENDPROC(v7wbi_flush_user_tlb_range)
62 62
@@ -69,7 +69,7 @@ ENDPROC(v7wbi_flush_user_tlb_range)
69 * - end - end address (exclusive, may not be aligned) 69 * - end - end address (exclusive, may not be aligned)
70 */ 70 */
71ENTRY(v7wbi_flush_kern_tlb_range) 71ENTRY(v7wbi_flush_kern_tlb_range)
72 dsb 72 dsb ish
73 mov r0, r0, lsr #PAGE_SHIFT @ align address 73 mov r0, r0, lsr #PAGE_SHIFT @ align address
74 mov r1, r1, lsr #PAGE_SHIFT 74 mov r1, r1, lsr #PAGE_SHIFT
75 mov r0, r0, lsl #PAGE_SHIFT 75 mov r0, r0, lsl #PAGE_SHIFT
@@ -84,7 +84,7 @@ ENTRY(v7wbi_flush_kern_tlb_range)
84 add r0, r0, #PAGE_SZ 84 add r0, r0, #PAGE_SZ
85 cmp r0, r1 85 cmp r0, r1
86 blo 1b 86 blo 1b
87 dsb 87 dsb ish
88 isb 88 isb
89 mov pc, lr 89 mov pc, lr
90ENDPROC(v7wbi_flush_kern_tlb_range) 90ENDPROC(v7wbi_flush_kern_tlb_range)