aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm/mm/proc-arm1022.S
diff options
context:
space:
mode:
authorRussell King <rmk+kernel@arm.linux.org.uk>2014-06-30 11:29:12 -0400
committerRussell King <rmk+kernel@arm.linux.org.uk>2014-07-18 07:29:04 -0400
commit6ebbf2ce437b33022d30badd49dc94d33ecfa498 (patch)
treebc015e35b456a28bb0e501803a454dc0c0d3291a /arch/arm/mm/proc-arm1022.S
parentaf040ffc9ba1e079ee4c0748aff64fa3d4716fa5 (diff)
ARM: convert all "mov.* pc, reg" to "bx reg" for ARMv6+
ARMv6 and greater introduced a new instruction ("bx") which can be used to return from function calls. Recent CPUs perform better when the "bx lr" instruction is used rather than the "mov pc, lr" instruction, and this sequence is strongly recommended to be used by the ARM architecture manual (section A.4.1.1). We provide a new macro "ret" with all its variants for the condition code which will resolve to the appropriate instruction. Rather than doing this piecemeal, and miss some instances, change all the "mov pc" instances to use the new macro, with the exception of the "movs" instruction and the kprobes code. This allows us to detect the "mov pc, lr" case and fix it up - and also gives us the possibility of deploying this for other registers depending on the CPU selection. Reported-by: Will Deacon <will.deacon@arm.com> Tested-by: Stephen Warren <swarren@nvidia.com> # Tegra Jetson TK1 Tested-by: Robert Jarzmik <robert.jarzmik@free.fr> # mioa701_bootresume.S Tested-by: Andrew Lunn <andrew@lunn.ch> # Kirkwood Tested-by: Shawn Guo <shawn.guo@freescale.com> Tested-by: Tony Lindgren <tony@atomide.com> # OMAPs Tested-by: Gregory CLEMENT <gregory.clement@free-electrons.com> # Armada XP, 375, 385 Acked-by: Sekhar Nori <nsekhar@ti.com> # DaVinci Acked-by: Christoffer Dall <christoffer.dall@linaro.org> # kvm/hyp Acked-by: Haojian Zhuang <haojian.zhuang@gmail.com> # PXA3xx Acked-by: Stefano Stabellini <stefano.stabellini@eu.citrix.com> # Xen Tested-by: Uwe Kleine-König <u.kleine-koenig@pengutronix.de> # ARMv7M Tested-by: Simon Horman <horms+renesas@verge.net.au> # Shmobile Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
Diffstat (limited to 'arch/arm/mm/proc-arm1022.S')
-rw-r--r--arch/arm/mm/proc-arm1022.S34
1 files changed, 17 insertions, 17 deletions
diff --git a/arch/arm/mm/proc-arm1022.S b/arch/arm/mm/proc-arm1022.S
index 6f01a0ae3b30..a126b7a59928 100644
--- a/arch/arm/mm/proc-arm1022.S
+++ b/arch/arm/mm/proc-arm1022.S
@@ -62,7 +62,7 @@
62 * cpu_arm1022_proc_init() 62 * cpu_arm1022_proc_init()
63 */ 63 */
64ENTRY(cpu_arm1022_proc_init) 64ENTRY(cpu_arm1022_proc_init)
65 mov pc, lr 65 ret lr
66 66
67/* 67/*
68 * cpu_arm1022_proc_fin() 68 * cpu_arm1022_proc_fin()
@@ -72,7 +72,7 @@ ENTRY(cpu_arm1022_proc_fin)
72 bic r0, r0, #0x1000 @ ...i............ 72 bic r0, r0, #0x1000 @ ...i............
73 bic r0, r0, #0x000e @ ............wca. 73 bic r0, r0, #0x000e @ ............wca.
74 mcr p15, 0, r0, c1, c0, 0 @ disable caches 74 mcr p15, 0, r0, c1, c0, 0 @ disable caches
75 mov pc, lr 75 ret lr
76 76
77/* 77/*
78 * cpu_arm1022_reset(loc) 78 * cpu_arm1022_reset(loc)
@@ -96,7 +96,7 @@ ENTRY(cpu_arm1022_reset)
96 bic ip, ip, #0x000f @ ............wcam 96 bic ip, ip, #0x000f @ ............wcam
97 bic ip, ip, #0x1100 @ ...i...s........ 97 bic ip, ip, #0x1100 @ ...i...s........
98 mcr p15, 0, ip, c1, c0, 0 @ ctrl register 98 mcr p15, 0, ip, c1, c0, 0 @ ctrl register
99 mov pc, r0 99 ret r0
100ENDPROC(cpu_arm1022_reset) 100ENDPROC(cpu_arm1022_reset)
101 .popsection 101 .popsection
102 102
@@ -106,7 +106,7 @@ ENDPROC(cpu_arm1022_reset)
106 .align 5 106 .align 5
107ENTRY(cpu_arm1022_do_idle) 107ENTRY(cpu_arm1022_do_idle)
108 mcr p15, 0, r0, c7, c0, 4 @ Wait for interrupt 108 mcr p15, 0, r0, c7, c0, 4 @ Wait for interrupt
109 mov pc, lr 109 ret lr
110 110
111/* ================================= CACHE ================================ */ 111/* ================================= CACHE ================================ */
112 112
@@ -122,7 +122,7 @@ ENTRY(arm1022_flush_icache_all)
122 mov r0, #0 122 mov r0, #0
123 mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache 123 mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache
124#endif 124#endif
125 mov pc, lr 125 ret lr
126ENDPROC(arm1022_flush_icache_all) 126ENDPROC(arm1022_flush_icache_all)
127 127
128/* 128/*
@@ -156,7 +156,7 @@ __flush_whole_cache:
156 mcrne p15, 0, ip, c7, c5, 0 @ invalidate I cache 156 mcrne p15, 0, ip, c7, c5, 0 @ invalidate I cache
157#endif 157#endif
158 mcrne p15, 0, ip, c7, c10, 4 @ drain WB 158 mcrne p15, 0, ip, c7, c10, 4 @ drain WB
159 mov pc, lr 159 ret lr
160 160
161/* 161/*
162 * flush_user_cache_range(start, end, flags) 162 * flush_user_cache_range(start, end, flags)
@@ -185,7 +185,7 @@ ENTRY(arm1022_flush_user_cache_range)
185 mcrne p15, 0, ip, c7, c5, 0 @ invalidate I cache 185 mcrne p15, 0, ip, c7, c5, 0 @ invalidate I cache
186#endif 186#endif
187 mcrne p15, 0, ip, c7, c10, 4 @ drain WB 187 mcrne p15, 0, ip, c7, c10, 4 @ drain WB
188 mov pc, lr 188 ret lr
189 189
190/* 190/*
191 * coherent_kern_range(start, end) 191 * coherent_kern_range(start, end)
@@ -225,7 +225,7 @@ ENTRY(arm1022_coherent_user_range)
225 blo 1b 225 blo 1b
226 mcr p15, 0, ip, c7, c10, 4 @ drain WB 226 mcr p15, 0, ip, c7, c10, 4 @ drain WB
227 mov r0, #0 227 mov r0, #0
228 mov pc, lr 228 ret lr
229 229
230/* 230/*
231 * flush_kern_dcache_area(void *addr, size_t size) 231 * flush_kern_dcache_area(void *addr, size_t size)
@@ -246,7 +246,7 @@ ENTRY(arm1022_flush_kern_dcache_area)
246 blo 1b 246 blo 1b
247#endif 247#endif
248 mcr p15, 0, ip, c7, c10, 4 @ drain WB 248 mcr p15, 0, ip, c7, c10, 4 @ drain WB
249 mov pc, lr 249 ret lr
250 250
251/* 251/*
252 * dma_inv_range(start, end) 252 * dma_inv_range(start, end)
@@ -275,7 +275,7 @@ arm1022_dma_inv_range:
275 blo 1b 275 blo 1b
276#endif 276#endif
277 mcr p15, 0, ip, c7, c10, 4 @ drain WB 277 mcr p15, 0, ip, c7, c10, 4 @ drain WB
278 mov pc, lr 278 ret lr
279 279
280/* 280/*
281 * dma_clean_range(start, end) 281 * dma_clean_range(start, end)
@@ -297,7 +297,7 @@ arm1022_dma_clean_range:
297 blo 1b 297 blo 1b
298#endif 298#endif
299 mcr p15, 0, ip, c7, c10, 4 @ drain WB 299 mcr p15, 0, ip, c7, c10, 4 @ drain WB
300 mov pc, lr 300 ret lr
301 301
302/* 302/*
303 * dma_flush_range(start, end) 303 * dma_flush_range(start, end)
@@ -317,7 +317,7 @@ ENTRY(arm1022_dma_flush_range)
317 blo 1b 317 blo 1b
318#endif 318#endif
319 mcr p15, 0, ip, c7, c10, 4 @ drain WB 319 mcr p15, 0, ip, c7, c10, 4 @ drain WB
320 mov pc, lr 320 ret lr
321 321
322/* 322/*
323 * dma_map_area(start, size, dir) 323 * dma_map_area(start, size, dir)
@@ -340,7 +340,7 @@ ENDPROC(arm1022_dma_map_area)
340 * - dir - DMA direction 340 * - dir - DMA direction
341 */ 341 */
342ENTRY(arm1022_dma_unmap_area) 342ENTRY(arm1022_dma_unmap_area)
343 mov pc, lr 343 ret lr
344ENDPROC(arm1022_dma_unmap_area) 344ENDPROC(arm1022_dma_unmap_area)
345 345
346 .globl arm1022_flush_kern_cache_louis 346 .globl arm1022_flush_kern_cache_louis
@@ -358,7 +358,7 @@ ENTRY(cpu_arm1022_dcache_clean_area)
358 subs r1, r1, #CACHE_DLINESIZE 358 subs r1, r1, #CACHE_DLINESIZE
359 bhi 1b 359 bhi 1b
360#endif 360#endif
361 mov pc, lr 361 ret lr
362 362
363/* =============================== PageTable ============================== */ 363/* =============================== PageTable ============================== */
364 364
@@ -389,7 +389,7 @@ ENTRY(cpu_arm1022_switch_mm)
389 mcr p15, 0, r0, c2, c0, 0 @ load page table pointer 389 mcr p15, 0, r0, c2, c0, 0 @ load page table pointer
390 mcr p15, 0, r1, c8, c7, 0 @ invalidate I & D TLBs 390 mcr p15, 0, r1, c8, c7, 0 @ invalidate I & D TLBs
391#endif 391#endif
392 mov pc, lr 392 ret lr
393 393
394/* 394/*
395 * cpu_arm1022_set_pte_ext(ptep, pte, ext) 395 * cpu_arm1022_set_pte_ext(ptep, pte, ext)
@@ -405,7 +405,7 @@ ENTRY(cpu_arm1022_set_pte_ext)
405 mcr p15, 0, r0, c7, c10, 1 @ clean D entry 405 mcr p15, 0, r0, c7, c10, 1 @ clean D entry
406#endif 406#endif
407#endif /* CONFIG_MMU */ 407#endif /* CONFIG_MMU */
408 mov pc, lr 408 ret lr
409 409
410 .type __arm1022_setup, #function 410 .type __arm1022_setup, #function
411__arm1022_setup: 411__arm1022_setup:
@@ -423,7 +423,7 @@ __arm1022_setup:
423#ifdef CONFIG_CPU_CACHE_ROUND_ROBIN 423#ifdef CONFIG_CPU_CACHE_ROUND_ROBIN
424 orr r0, r0, #0x4000 @ .R.............. 424 orr r0, r0, #0x4000 @ .R..............
425#endif 425#endif
426 mov pc, lr 426 ret lr
427 .size __arm1022_setup, . - __arm1022_setup 427 .size __arm1022_setup, . - __arm1022_setup
428 428
429 /* 429 /*