aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm/mm/tlb-v7.S
diff options
context:
space:
mode:
authorWill Deacon <will.deacon@arm.com>2013-05-13 07:01:12 -0400
committerWill Deacon <will.deacon@arm.com>2013-08-12 07:25:45 -0400
commit6abdd491698a27f7df04a32ca12cc453810e4396 (patch)
tree34f4aa721441439634e721fe0eeba614b076a889 /arch/arm/mm/tlb-v7.S
parent62cbbc42e0019aff6310259f275ae812463f8836 (diff)
ARM: mm: use inner-shareable barriers for TLB and user cache operations
System-wide barriers aren't required for situations where we only need to make visibility and ordering guarantees in the inner-shareable domain (i.e. we are not dealing with devices or potentially incoherent CPUs). This patch changes the v7 TLB operations, coherent_user_range and dcache_clean_area functions to user inner-shareable barriers. For cache maintenance, only the store access type is required to ensure completion. Reviewed-by: Catalin Marinas <catalin.marinas@arm.com> Signed-off-by: Will Deacon <will.deacon@arm.com>
Diffstat (limited to 'arch/arm/mm/tlb-v7.S')
-rw-r--r--arch/arm/mm/tlb-v7.S8
1 files changed, 4 insertions, 4 deletions
diff --git a/arch/arm/mm/tlb-v7.S b/arch/arm/mm/tlb-v7.S
index ea94765acf9a..355308767bae 100644
--- a/arch/arm/mm/tlb-v7.S
+++ b/arch/arm/mm/tlb-v7.S
@@ -35,7 +35,7 @@
35ENTRY(v7wbi_flush_user_tlb_range) 35ENTRY(v7wbi_flush_user_tlb_range)
36 vma_vm_mm r3, r2 @ get vma->vm_mm 36 vma_vm_mm r3, r2 @ get vma->vm_mm
37 mmid r3, r3 @ get vm_mm->context.id 37 mmid r3, r3 @ get vm_mm->context.id
38 dsb 38 dsb ish
39 mov r0, r0, lsr #PAGE_SHIFT @ align address 39 mov r0, r0, lsr #PAGE_SHIFT @ align address
40 mov r1, r1, lsr #PAGE_SHIFT 40 mov r1, r1, lsr #PAGE_SHIFT
41 asid r3, r3 @ mask ASID 41 asid r3, r3 @ mask ASID
@@ -56,7 +56,7 @@ ENTRY(v7wbi_flush_user_tlb_range)
56 add r0, r0, #PAGE_SZ 56 add r0, r0, #PAGE_SZ
57 cmp r0, r1 57 cmp r0, r1
58 blo 1b 58 blo 1b
59 dsb 59 dsb ish
60 mov pc, lr 60 mov pc, lr
61ENDPROC(v7wbi_flush_user_tlb_range) 61ENDPROC(v7wbi_flush_user_tlb_range)
62 62
@@ -69,7 +69,7 @@ ENDPROC(v7wbi_flush_user_tlb_range)
69 * - end - end address (exclusive, may not be aligned) 69 * - end - end address (exclusive, may not be aligned)
70 */ 70 */
71ENTRY(v7wbi_flush_kern_tlb_range) 71ENTRY(v7wbi_flush_kern_tlb_range)
72 dsb 72 dsb ish
73 mov r0, r0, lsr #PAGE_SHIFT @ align address 73 mov r0, r0, lsr #PAGE_SHIFT @ align address
74 mov r1, r1, lsr #PAGE_SHIFT 74 mov r1, r1, lsr #PAGE_SHIFT
75 mov r0, r0, lsl #PAGE_SHIFT 75 mov r0, r0, lsl #PAGE_SHIFT
@@ -84,7 +84,7 @@ ENTRY(v7wbi_flush_kern_tlb_range)
84 add r0, r0, #PAGE_SZ 84 add r0, r0, #PAGE_SZ
85 cmp r0, r1 85 cmp r0, r1
86 blo 1b 86 blo 1b
87 dsb 87 dsb ish
88 isb 88 isb
89 mov pc, lr 89 mov pc, lr
90ENDPROC(v7wbi_flush_kern_tlb_range) 90ENDPROC(v7wbi_flush_kern_tlb_range)