diff options
Diffstat (limited to 'arch/arm/mm/cache-v7.S')
-rw-r--r-- | arch/arm/mm/cache-v7.S | 96 |
1 files changed, 69 insertions, 27 deletions
diff --git a/arch/arm/mm/cache-v7.S b/arch/arm/mm/cache-v7.S index d32f02b6186..ea33896449b 100644 --- a/arch/arm/mm/cache-v7.S +++ b/arch/arm/mm/cache-v7.S | |||
@@ -33,30 +33,37 @@ ENTRY(v7_flush_icache_all) | |||
33 | ENDPROC(v7_flush_icache_all) | 33 | ENDPROC(v7_flush_icache_all) |
34 | 34 | ||
35 | /* | 35 | /* |
36 | * v7_flush_dcache_all() | 36 | * v7_op_dcache_all op |
37 | * | 37 | * |
38 | * Flush the whole D-cache. | 38 | * op=c14, Flush the whole D-cache. |
39 | * op=c10, Clean the whole D-cache. | ||
39 | * | 40 | * |
40 | * Corrupted registers: r0-r7, r9-r11 (r6 only in Thumb mode) | 41 | * Corrupted registers: r0-r7, r9-r11 (r6 only in Thumb mode) |
41 | * | 42 | * |
42 | * - mm - mm_struct describing address space | 43 | * - mm - mm_struct describing address space |
43 | */ | 44 | */ |
44 | ENTRY(v7_flush_dcache_all) | 45 | .macro v7_op_dcache_all op @ op=c10 clean, op=c14 flush |
45 | dmb @ ensure ordering with previous memory accesses | 46 | dmb @ ensure ordering with previous memory accesses |
46 | mrc p15, 1, r0, c0, c0, 1 @ read clidr | 47 | mrc p15, 1, r0, c0, c0, 1 @ read clidr |
47 | ands r3, r0, #0x7000000 @ extract loc from clidr | 48 | ands r3, r0, #0x7000000 @ extract loc from clidr |
48 | mov r3, r3, lsr #23 @ left align loc bit field | 49 | mov r3, r3, lsr #23 @ left align loc bit field |
49 | beq finished @ if loc is 0, then no need to clean | 50 | beq 1005f @ if loc is 0, then no need to clean |
50 | mov r10, #0 @ start clean at cache level 0 | 51 | mov r10, #0 @ start clean at cache level 0 |
51 | loop1: | 52 | 1001: |
52 | add r2, r10, r10, lsr #1 @ work out 3x current cache level | 53 | add r2, r10, r10, lsr #1 @ work out 3x current cache level |
53 | mov r1, r0, lsr r2 @ extract cache type bits from clidr | 54 | mov r1, r0, lsr r2 @ extract cache type bits from clidr |
54 | and r1, r1, #7 @ mask of the bits for current cache only | 55 | and r1, r1, #7 @ mask of the bits for current cache only |
55 | cmp r1, #2 @ see what cache we have at this level | 56 | cmp r1, #2 @ see what cache we have at this level |
56 | blt skip @ skip if no cache, or just i-cache | 57 | blt 1004f @ skip if no cache, or just i-cache |
58 | #ifdef CONFIG_PREEMPT | ||
59 | save_and_disable_irqs_notrace r9 @ make cssr&csidr read atomic | ||
60 | #endif | ||
57 | mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr | 61 | mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr |
58 | isb @ isb to sych the new cssr&csidr | 62 | isb @ isb to sych the new cssr&csidr |
59 | mrc p15, 1, r1, c0, c0, 0 @ read the new csidr | 63 | mrc p15, 1, r1, c0, c0, 0 @ read the new csidr |
64 | #ifdef CONFIG_PREEMPT | ||
65 | restore_irqs_notrace r9 | ||
66 | #endif | ||
60 | and r2, r1, #7 @ extract the length of the cache lines | 67 | and r2, r1, #7 @ extract the length of the cache lines |
61 | add r2, r2, #4 @ add 4 (line length offset) | 68 | add r2, r2, #4 @ add 4 (line length offset) |
62 | ldr r4, =0x3ff | 69 | ldr r4, =0x3ff |
@@ -64,32 +71,40 @@ loop1: | |||
64 | clz r5, r4 @ find bit position of way size increment | 71 | clz r5, r4 @ find bit position of way size increment |
65 | ldr r7, =0x7fff | 72 | ldr r7, =0x7fff |
66 | ands r7, r7, r1, lsr #13 @ extract max number of the index size | 73 | ands r7, r7, r1, lsr #13 @ extract max number of the index size |
67 | loop2: | 74 | 1002: |
68 | mov r9, r4 @ create working copy of max way size | 75 | mov r9, r4 @ create working copy of max way size |
69 | loop3: | 76 | 1003: |
70 | ARM( orr r11, r10, r9, lsl r5 ) @ factor way and cache number into r11 | 77 | ARM( orr r11, r10, r9, lsl r5 ) @ factor way and cache number into r11 |
71 | THUMB( lsl r6, r9, r5 ) | 78 | THUMB( lsl r6, r9, r5 ) |
72 | THUMB( orr r11, r10, r6 ) @ factor way and cache number into r11 | 79 | THUMB( orr r11, r10, r6 ) @ factor way and cache number into r11 |
73 | ARM( orr r11, r11, r7, lsl r2 ) @ factor index number into r11 | 80 | ARM( orr r11, r11, r7, lsl r2 ) @ factor index number into r11 |
74 | THUMB( lsl r6, r7, r2 ) | 81 | THUMB( lsl r6, r7, r2 ) |
75 | THUMB( orr r11, r11, r6 ) @ factor index number into r11 | 82 | THUMB( orr r11, r11, r6 ) @ factor index number into r11 |
76 | mcr p15, 0, r11, c7, c14, 2 @ clean & invalidate by set/way | 83 | mcr p15, 0, r11, c7, \op, 2 @ op=c10/c14, clean/flush by set/way |
77 | subs r9, r9, #1 @ decrement the way | 84 | subs r9, r9, #1 @ decrement the way |
78 | bge loop3 | 85 | bge 1003b |
79 | subs r7, r7, #1 @ decrement the index | 86 | subs r7, r7, #1 @ decrement the index |
80 | bge loop2 | 87 | bge 1002b |
81 | skip: | 88 | 1004: |
82 | add r10, r10, #2 @ increment cache number | 89 | add r10, r10, #2 @ increment cache number |
83 | cmp r3, r10 | 90 | cmp r3, r10 |
84 | bgt loop1 | 91 | bgt 1001b |
85 | finished: | 92 | 1005: |
86 | mov r10, #0 @ swith back to cache level 0 | 93 | mov r10, #0 @ swith back to cache level 0 |
87 | mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr | 94 | mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr |
88 | dsb | 95 | dsb |
89 | isb | 96 | isb |
90 | mov pc, lr | 97 | mov pc, lr |
98 | .endm | ||
99 | |||
100 | ENTRY(v7_flush_dcache_all) | ||
101 | v7_op_dcache_all c14 | ||
91 | ENDPROC(v7_flush_dcache_all) | 102 | ENDPROC(v7_flush_dcache_all) |
92 | 103 | ||
104 | ENTRY(v7_clean_dcache_all) | ||
105 | v7_op_dcache_all c10 | ||
106 | ENDPROC(v7_clean_dcache_all) | ||
107 | |||
93 | /* | 108 | /* |
94 | * v7_flush_cache_all() | 109 | * v7_flush_cache_all() |
95 | * | 110 | * |
@@ -114,6 +129,24 @@ ENTRY(v7_flush_kern_cache_all) | |||
114 | ENDPROC(v7_flush_kern_cache_all) | 129 | ENDPROC(v7_flush_kern_cache_all) |
115 | 130 | ||
116 | /* | 131 | /* |
132 | * v7_clean_kern_cache_all() | ||
133 | */ | ||
134 | ENTRY(v7_clean_kern_cache_all) | ||
135 | ARM( stmfd sp!, {r4-r5, r7, r9-r11, lr} ) | ||
136 | THUMB( stmfd sp!, {r4-r7, r9-r11, lr} ) | ||
137 | bl v7_clean_dcache_all | ||
138 | mov r0, #0 | ||
139 | #ifdef CONFIG_SMP | ||
140 | mcr p15, 0, r0, c7, c1, 0 @ invalidate I-cache inner shareable | ||
141 | #else | ||
142 | mcr p15, 0, r0, c7, c5, 0 @ I+BTB cache invalidate | ||
143 | #endif | ||
144 | ARM( ldmfd sp!, {r4-r5, r7, r9-r11, lr} ) | ||
145 | THUMB( ldmfd sp!, {r4-r7, r9-r11, lr} ) | ||
146 | mov pc, lr | ||
147 | ENDPROC(v7_clean_kern_cache_all) | ||
148 | |||
149 | /* | ||
117 | * v7_flush_cache_all() | 150 | * v7_flush_cache_all() |
118 | * | 151 | * |
119 | * Flush all TLB entries in a particular address space | 152 | * Flush all TLB entries in a particular address space |
@@ -174,6 +207,10 @@ ENTRY(v7_coherent_user_range) | |||
174 | dcache_line_size r2, r3 | 207 | dcache_line_size r2, r3 |
175 | sub r3, r2, #1 | 208 | sub r3, r2, #1 |
176 | bic r12, r0, r3 | 209 | bic r12, r0, r3 |
210 | #ifdef CONFIG_ARM_ERRATA_764369 | ||
211 | ALT_SMP(W(dsb)) | ||
212 | ALT_UP(W(nop)) | ||
213 | #endif | ||
177 | 1: | 214 | 1: |
178 | USER( mcr p15, 0, r12, c7, c11, 1 ) @ clean D line to the point of unification | 215 | USER( mcr p15, 0, r12, c7, c11, 1 ) @ clean D line to the point of unification |
179 | add r12, r12, r2 | 216 | add r12, r12, r2 |
@@ -223,6 +260,10 @@ ENTRY(v7_flush_kern_dcache_area) | |||
223 | add r1, r0, r1 | 260 | add r1, r0, r1 |
224 | sub r3, r2, #1 | 261 | sub r3, r2, #1 |
225 | bic r0, r0, r3 | 262 | bic r0, r0, r3 |
263 | #ifdef CONFIG_ARM_ERRATA_764369 | ||
264 | ALT_SMP(W(dsb)) | ||
265 | ALT_UP(W(nop)) | ||
266 | #endif | ||
226 | 1: | 267 | 1: |
227 | mcr p15, 0, r0, c7, c14, 1 @ clean & invalidate D line / unified line | 268 | mcr p15, 0, r0, c7, c14, 1 @ clean & invalidate D line / unified line |
228 | add r0, r0, r2 | 269 | add r0, r0, r2 |
@@ -247,6 +288,10 @@ v7_dma_inv_range: | |||
247 | sub r3, r2, #1 | 288 | sub r3, r2, #1 |
248 | tst r0, r3 | 289 | tst r0, r3 |
249 | bic r0, r0, r3 | 290 | bic r0, r0, r3 |
291 | #ifdef CONFIG_ARM_ERRATA_764369 | ||
292 | ALT_SMP(W(dsb)) | ||
293 | ALT_UP(W(nop)) | ||
294 | #endif | ||
250 | mcrne p15, 0, r0, c7, c14, 1 @ clean & invalidate D / U line | 295 | mcrne p15, 0, r0, c7, c14, 1 @ clean & invalidate D / U line |
251 | 296 | ||
252 | tst r1, r3 | 297 | tst r1, r3 |
@@ -270,6 +315,10 @@ v7_dma_clean_range: | |||
270 | dcache_line_size r2, r3 | 315 | dcache_line_size r2, r3 |
271 | sub r3, r2, #1 | 316 | sub r3, r2, #1 |
272 | bic r0, r0, r3 | 317 | bic r0, r0, r3 |
318 | #ifdef CONFIG_ARM_ERRATA_764369 | ||
319 | ALT_SMP(W(dsb)) | ||
320 | ALT_UP(W(nop)) | ||
321 | #endif | ||
273 | 1: | 322 | 1: |
274 | mcr p15, 0, r0, c7, c10, 1 @ clean D / U line | 323 | mcr p15, 0, r0, c7, c10, 1 @ clean D / U line |
275 | add r0, r0, r2 | 324 | add r0, r0, r2 |
@@ -288,6 +337,10 @@ ENTRY(v7_dma_flush_range) | |||
288 | dcache_line_size r2, r3 | 337 | dcache_line_size r2, r3 |
289 | sub r3, r2, #1 | 338 | sub r3, r2, #1 |
290 | bic r0, r0, r3 | 339 | bic r0, r0, r3 |
340 | #ifdef CONFIG_ARM_ERRATA_764369 | ||
341 | ALT_SMP(W(dsb)) | ||
342 | ALT_UP(W(nop)) | ||
343 | #endif | ||
291 | 1: | 344 | 1: |
292 | mcr p15, 0, r0, c7, c14, 1 @ clean & invalidate D / U line | 345 | mcr p15, 0, r0, c7, c14, 1 @ clean & invalidate D / U line |
293 | add r0, r0, r2 | 346 | add r0, r0, r2 |
@@ -325,16 +378,5 @@ ENDPROC(v7_dma_unmap_area) | |||
325 | 378 | ||
326 | __INITDATA | 379 | __INITDATA |
327 | 380 | ||
328 | .type v7_cache_fns, #object | 381 | @ define struct cpu_cache_fns (see <asm/cacheflush.h> and proc-macros.S) |
329 | ENTRY(v7_cache_fns) | 382 | define_cache_functions v7 |
330 | .long v7_flush_icache_all | ||
331 | .long v7_flush_kern_cache_all | ||
332 | .long v7_flush_user_cache_all | ||
333 | .long v7_flush_user_cache_range | ||
334 | .long v7_coherent_kern_range | ||
335 | .long v7_coherent_user_range | ||
336 | .long v7_flush_kern_dcache_area | ||
337 | .long v7_dma_map_area | ||
338 | .long v7_dma_unmap_area | ||
339 | .long v7_dma_flush_range | ||
340 | .size v7_cache_fns, . - v7_cache_fns | ||