aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm/boot/compressed/head.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/arm/boot/compressed/head.S')
-rw-r--r--arch/arm/boot/compressed/head.S79
1 files changed, 58 insertions, 21 deletions
diff --git a/arch/arm/boot/compressed/head.S b/arch/arm/boot/compressed/head.S
index adf583cd0c35..f9da41921c52 100644
--- a/arch/arm/boot/compressed/head.S
+++ b/arch/arm/boot/compressed/head.S
@@ -179,16 +179,29 @@ not_angel:
179 bl cache_on 179 bl cache_on
180 180
181restart: adr r0, LC0 181restart: adr r0, LC0
182 ldmia r0, {r1, r2, r3, r5, r6, r9, r11, r12} 182 ldmia r0, {r1, r2, r3, r6, r10, r11, r12}
183 ldr sp, [r0, #32] 183 ldr sp, [r0, #28]
184 184
185 /* 185 /*
186 * We might be running at a different address. We need 186 * We might be running at a different address. We need
187 * to fix up various pointers. 187 * to fix up various pointers.
188 */ 188 */
189 sub r0, r0, r1 @ calculate the delta offset 189 sub r0, r0, r1 @ calculate the delta offset
190 add r5, r5, r0 @ _start
191 add r6, r6, r0 @ _edata 190 add r6, r6, r0 @ _edata
191 add r10, r10, r0 @ inflated kernel size location
192
193 /*
194 * The kernel build system appends the size of the
195 * decompressed kernel at the end of the compressed data
196 * in little-endian form.
197 */
198 ldrb r9, [r10, #0]
199 ldrb lr, [r10, #1]
200 orr r9, r9, lr, lsl #8
201 ldrb lr, [r10, #2]
202 ldrb r10, [r10, #3]
203 orr r9, r9, lr, lsl #16
204 orr r9, r9, r10, lsl #24
192 205
193#ifndef CONFIG_ZBOOT_ROM 206#ifndef CONFIG_ZBOOT_ROM
194 /* malloc space is above the relocated stack (64k max) */ 207 /* malloc space is above the relocated stack (64k max) */
@@ -206,31 +219,40 @@ restart: adr r0, LC0
206/* 219/*
207 * Check to see if we will overwrite ourselves. 220 * Check to see if we will overwrite ourselves.
208 * r4 = final kernel address 221 * r4 = final kernel address
209 * r5 = start of this image
210 * r9 = size of decompressed image 222 * r9 = size of decompressed image
211 * r10 = end of this image, including bss/stack/malloc space if non XIP 223 * r10 = end of this image, including bss/stack/malloc space if non XIP
212 * We basically want: 224 * We basically want:
213 * r4 >= r10 -> OK 225 * r4 - 16k page directory >= r10 -> OK
214 * r4 + image length <= r5 -> OK 226 * r4 + image length <= current position (pc) -> OK
215 */ 227 */
228 add r10, r10, #16384
216 cmp r4, r10 229 cmp r4, r10
217 bhs wont_overwrite 230 bhs wont_overwrite
218 add r10, r4, r9 231 add r10, r4, r9
219 cmp r10, r5 232 ARM( cmp r10, pc )
233 THUMB( mov lr, pc )
234 THUMB( cmp r10, lr )
220 bls wont_overwrite 235 bls wont_overwrite
221 236
222/* 237/*
223 * Relocate ourselves past the end of the decompressed kernel. 238 * Relocate ourselves past the end of the decompressed kernel.
224 * r5 = start of this image
225 * r6 = _edata 239 * r6 = _edata
226 * r10 = end of the decompressed kernel 240 * r10 = end of the decompressed kernel
227 * Because we always copy ahead, we need to do it from the end and go 241 * Because we always copy ahead, we need to do it from the end and go
228 * backward in case the source and destination overlap. 242 * backward in case the source and destination overlap.
229 */ 243 */
230 /* Round up to next 256-byte boundary. */ 244 /*
231 add r10, r10, #256 245 * Bump to the next 256-byte boundary with the size of
246 * the relocation code added. This avoids overwriting
247 * ourself when the offset is small.
248 */
249 add r10, r10, #((reloc_code_end - restart + 256) & ~255)
232 bic r10, r10, #255 250 bic r10, r10, #255
233 251
252 /* Get start of code we want to copy and align it down. */
253 adr r5, restart
254 bic r5, r5, #31
255
234 sub r9, r6, r5 @ size to copy 256 sub r9, r6, r5 @ size to copy
235 add r9, r9, #31 @ rounded up to a multiple 257 add r9, r9, #31 @ rounded up to a multiple
236 bic r9, r9, #31 @ ... of 32 bytes 258 bic r9, r9, #31 @ ... of 32 bytes
@@ -245,6 +267,11 @@ restart: adr r0, LC0
245 /* Preserve offset to relocated code. */ 267 /* Preserve offset to relocated code. */
246 sub r6, r9, r6 268 sub r6, r9, r6
247 269
270#ifndef CONFIG_ZBOOT_ROM
271 /* cache_clean_flush may use the stack, so relocate it */
272 add sp, sp, r6
273#endif
274
248 bl cache_clean_flush 275 bl cache_clean_flush
249 276
250 adr r0, BSYM(restart) 277 adr r0, BSYM(restart)
@@ -333,12 +360,11 @@ not_relocated: mov r0, #0
333LC0: .word LC0 @ r1 360LC0: .word LC0 @ r1
334 .word __bss_start @ r2 361 .word __bss_start @ r2
335 .word _end @ r3 362 .word _end @ r3
336 .word _start @ r5
337 .word _edata @ r6 363 .word _edata @ r6
338 .word _image_size @ r9 364 .word input_data_end - 4 @ r10 (inflated size location)
339 .word _got_start @ r11 365 .word _got_start @ r11
340 .word _got_end @ ip 366 .word _got_end @ ip
341 .word user_stack_end @ sp 367 .word .L_user_stack_end @ sp
342 .size LC0, . - LC0 368 .size LC0, . - LC0
343 369
344#ifdef CONFIG_ARCH_RPC 370#ifdef CONFIG_ARCH_RPC
@@ -447,7 +473,11 @@ __setup_mmu: sub r3, r4, #16384 @ Page directory size
447 orr r1, r1, #3 << 10 473 orr r1, r1, #3 << 10
448 add r2, r3, #16384 474 add r2, r3, #16384
4491: cmp r1, r9 @ if virt > start of RAM 4751: cmp r1, r9 @ if virt > start of RAM
476#ifdef CONFIG_CPU_DCACHE_WRITETHROUGH
477 orrhs r1, r1, #0x08 @ set cacheable
478#else
450 orrhs r1, r1, #0x0c @ set cacheable, bufferable 479 orrhs r1, r1, #0x0c @ set cacheable, bufferable
480#endif
451 cmp r1, r10 @ if virt > end of RAM 481 cmp r1, r10 @ if virt > end of RAM
452 bichs r1, r1, #0x0c @ clear cacheable, bufferable 482 bichs r1, r1, #0x0c @ clear cacheable, bufferable
453 str r1, [r0], #4 @ 1:1 mapping 483 str r1, [r0], #4 @ 1:1 mapping
@@ -472,6 +502,12 @@ __setup_mmu: sub r3, r4, #16384 @ Page directory size
472 mov pc, lr 502 mov pc, lr
473ENDPROC(__setup_mmu) 503ENDPROC(__setup_mmu)
474 504
505__arm926ejs_mmu_cache_on:
506#ifdef CONFIG_CPU_DCACHE_WRITETHROUGH
507 mov r0, #4 @ put dcache in WT mode
508 mcr p15, 7, r0, c15, c0, 0
509#endif
510
475__armv4_mmu_cache_on: 511__armv4_mmu_cache_on:
476 mov r12, lr 512 mov r12, lr
477#ifdef CONFIG_MMU 513#ifdef CONFIG_MMU
@@ -653,6 +689,12 @@ proc_types:
653 W(b) __armv4_mpu_cache_off 689 W(b) __armv4_mpu_cache_off
654 W(b) __armv4_mpu_cache_flush 690 W(b) __armv4_mpu_cache_flush
655 691
692 .word 0x41069260 @ ARM926EJ-S (v5TEJ)
693 .word 0xff0ffff0
694 b __arm926ejs_mmu_cache_on
695 b __armv4_mmu_cache_off
696 b __armv5tej_mmu_cache_flush
697
656 .word 0x00007000 @ ARM7 IDs 698 .word 0x00007000 @ ARM7 IDs
657 .word 0x0000f000 699 .word 0x0000f000
658 mov pc, lr 700 mov pc, lr
@@ -735,12 +777,6 @@ proc_types:
735 W(b) __armv4_mmu_cache_off 777 W(b) __armv4_mmu_cache_off
736 W(b) __armv6_mmu_cache_flush 778 W(b) __armv6_mmu_cache_flush
737 779
738 .word 0x560f5810 @ Marvell PJ4 ARMv6
739 .word 0xff0ffff0
740 W(b) __armv4_mmu_cache_on
741 W(b) __armv4_mmu_cache_off
742 W(b) __armv6_mmu_cache_flush
743
744 .word 0x000f0000 @ new CPU Id 780 .word 0x000f0000 @ new CPU Id
745 .word 0x000f0000 781 .word 0x000f0000
746 W(b) __armv7_mmu_cache_on 782 W(b) __armv7_mmu_cache_on
@@ -1062,8 +1098,9 @@ memdump: mov r12, r0
1062#endif 1098#endif
1063 1099
1064 .ltorg 1100 .ltorg
1101reloc_code_end:
1065 1102
1066 .align 1103 .align
1067 .section ".stack", "aw", %nobits 1104 .section ".stack", "aw", %nobits
1068user_stack: .space 4096 1105.L_user_stack: .space 4096
1069user_stack_end: 1106.L_user_stack_end: