diff options
Diffstat (limited to 'arch/arm/boot/compressed/head.S')
-rw-r--r-- | arch/arm/boot/compressed/head.S | 184 |
1 files changed, 116 insertions, 68 deletions
diff --git a/arch/arm/boot/compressed/head.S b/arch/arm/boot/compressed/head.S index 4515728c5345..fa6fbf45cf3b 100644 --- a/arch/arm/boot/compressed/head.S +++ b/arch/arm/boot/compressed/head.S | |||
@@ -140,7 +140,8 @@ start: | |||
140 | tst r2, #3 @ not user? | 140 | tst r2, #3 @ not user? |
141 | bne not_angel | 141 | bne not_angel |
142 | mov r0, #0x17 @ angel_SWIreason_EnterSVC | 142 | mov r0, #0x17 @ angel_SWIreason_EnterSVC |
143 | swi 0x123456 @ angel_SWI_ARM | 143 | ARM( swi 0x123456 ) @ angel_SWI_ARM |
144 | THUMB( svc 0xab ) @ angel_SWI_THUMB | ||
144 | not_angel: | 145 | not_angel: |
145 | mrs r2, cpsr @ turn off interrupts to | 146 | mrs r2, cpsr @ turn off interrupts to |
146 | orr r2, r2, #0xc0 @ prevent angel from running | 147 | orr r2, r2, #0xc0 @ prevent angel from running |
@@ -161,7 +162,9 @@ not_angel: | |||
161 | 162 | ||
162 | .text | 163 | .text |
163 | adr r0, LC0 | 164 | adr r0, LC0 |
164 | ldmia r0, {r1, r2, r3, r4, r5, r6, ip, sp} | 165 | ARM( ldmia r0, {r1, r2, r3, r4, r5, r6, ip, sp} ) |
166 | THUMB( ldmia r0, {r1, r2, r3, r4, r5, r6, ip} ) | ||
167 | THUMB( ldr sp, [r0, #28] ) | ||
165 | subs r0, r0, r1 @ calculate the delta offset | 168 | subs r0, r0, r1 @ calculate the delta offset |
166 | 169 | ||
167 | @ if delta is zero, we are | 170 | @ if delta is zero, we are |
@@ -263,22 +266,25 @@ not_relocated: mov r0, #0 | |||
263 | * r6 = processor ID | 266 | * r6 = processor ID |
264 | * r7 = architecture ID | 267 | * r7 = architecture ID |
265 | * r8 = atags pointer | 268 | * r8 = atags pointer |
266 | * r9-r14 = corrupted | 269 | * r9-r12,r14 = corrupted |
267 | */ | 270 | */ |
268 | add r1, r5, r0 @ end of decompressed kernel | 271 | add r1, r5, r0 @ end of decompressed kernel |
269 | adr r2, reloc_start | 272 | adr r2, reloc_start |
270 | ldr r3, LC1 | 273 | ldr r3, LC1 |
271 | add r3, r2, r3 | 274 | add r3, r2, r3 |
272 | 1: ldmia r2!, {r9 - r14} @ copy relocation code | 275 | 1: ldmia r2!, {r9 - r12, r14} @ copy relocation code |
273 | stmia r1!, {r9 - r14} | 276 | stmia r1!, {r9 - r12, r14} |
274 | ldmia r2!, {r9 - r14} | 277 | ldmia r2!, {r9 - r12, r14} |
275 | stmia r1!, {r9 - r14} | 278 | stmia r1!, {r9 - r12, r14} |
276 | cmp r2, r3 | 279 | cmp r2, r3 |
277 | blo 1b | 280 | blo 1b |
278 | add sp, r1, #128 @ relocate the stack | 281 | mov sp, r1 |
282 | add sp, sp, #128 @ relocate the stack | ||
279 | 283 | ||
280 | bl cache_clean_flush | 284 | bl cache_clean_flush |
281 | add pc, r5, r0 @ call relocation code | 285 | ARM( add pc, r5, r0 ) @ call relocation code |
286 | THUMB( add r12, r5, r0 ) | ||
287 | THUMB( mov pc, r12 ) @ call relocation code | ||
282 | 288 | ||
283 | /* | 289 | /* |
284 | * We're not in danger of overwriting ourselves. Do this the simple way. | 290 | * We're not in danger of overwriting ourselves. Do this the simple way. |
@@ -291,6 +297,7 @@ wont_overwrite: mov r0, r4 | |||
291 | bl decompress_kernel | 297 | bl decompress_kernel |
292 | b call_kernel | 298 | b call_kernel |
293 | 299 | ||
300 | .align 2 | ||
294 | .type LC0, #object | 301 | .type LC0, #object |
295 | LC0: .word LC0 @ r1 | 302 | LC0: .word LC0 @ r1 |
296 | .word __bss_start @ r2 | 303 | .word __bss_start @ r2 |
@@ -431,6 +438,7 @@ ENDPROC(__setup_mmu) | |||
431 | 438 | ||
432 | __armv4_mmu_cache_on: | 439 | __armv4_mmu_cache_on: |
433 | mov r12, lr | 440 | mov r12, lr |
441 | #ifdef CONFIG_MMU | ||
434 | bl __setup_mmu | 442 | bl __setup_mmu |
435 | mov r0, #0 | 443 | mov r0, #0 |
436 | mcr p15, 0, r0, c7, c10, 4 @ drain write buffer | 444 | mcr p15, 0, r0, c7, c10, 4 @ drain write buffer |
@@ -444,10 +452,12 @@ __armv4_mmu_cache_on: | |||
444 | bl __common_mmu_cache_on | 452 | bl __common_mmu_cache_on |
445 | mov r0, #0 | 453 | mov r0, #0 |
446 | mcr p15, 0, r0, c8, c7, 0 @ flush I,D TLBs | 454 | mcr p15, 0, r0, c8, c7, 0 @ flush I,D TLBs |
455 | #endif | ||
447 | mov pc, r12 | 456 | mov pc, r12 |
448 | 457 | ||
449 | __armv7_mmu_cache_on: | 458 | __armv7_mmu_cache_on: |
450 | mov r12, lr | 459 | mov r12, lr |
460 | #ifdef CONFIG_MMU | ||
451 | mrc p15, 0, r11, c0, c1, 4 @ read ID_MMFR0 | 461 | mrc p15, 0, r11, c0, c1, 4 @ read ID_MMFR0 |
452 | tst r11, #0xf @ VMSA | 462 | tst r11, #0xf @ VMSA |
453 | blne __setup_mmu | 463 | blne __setup_mmu |
@@ -455,9 +465,11 @@ __armv7_mmu_cache_on: | |||
455 | mcr p15, 0, r0, c7, c10, 4 @ drain write buffer | 465 | mcr p15, 0, r0, c7, c10, 4 @ drain write buffer |
456 | tst r11, #0xf @ VMSA | 466 | tst r11, #0xf @ VMSA |
457 | mcrne p15, 0, r0, c8, c7, 0 @ flush I,D TLBs | 467 | mcrne p15, 0, r0, c8, c7, 0 @ flush I,D TLBs |
468 | #endif | ||
458 | mrc p15, 0, r0, c1, c0, 0 @ read control reg | 469 | mrc p15, 0, r0, c1, c0, 0 @ read control reg |
459 | orr r0, r0, #0x5000 @ I-cache enable, RR cache replacement | 470 | orr r0, r0, #0x5000 @ I-cache enable, RR cache replacement |
460 | orr r0, r0, #0x003c @ write buffer | 471 | orr r0, r0, #0x003c @ write buffer |
472 | #ifdef CONFIG_MMU | ||
461 | #ifdef CONFIG_CPU_ENDIAN_BE8 | 473 | #ifdef CONFIG_CPU_ENDIAN_BE8 |
462 | orr r0, r0, #1 << 25 @ big-endian page tables | 474 | orr r0, r0, #1 << 25 @ big-endian page tables |
463 | #endif | 475 | #endif |
@@ -465,6 +477,7 @@ __armv7_mmu_cache_on: | |||
465 | movne r1, #-1 | 477 | movne r1, #-1 |
466 | mcrne p15, 0, r3, c2, c0, 0 @ load page table pointer | 478 | mcrne p15, 0, r3, c2, c0, 0 @ load page table pointer |
467 | mcrne p15, 0, r1, c3, c0, 0 @ load domain access control | 479 | mcrne p15, 0, r1, c3, c0, 0 @ load domain access control |
480 | #endif | ||
468 | mcr p15, 0, r0, c1, c0, 0 @ load control register | 481 | mcr p15, 0, r0, c1, c0, 0 @ load control register |
469 | mrc p15, 0, r0, c1, c0, 0 @ and read it back | 482 | mrc p15, 0, r0, c1, c0, 0 @ and read it back |
470 | mov r0, #0 | 483 | mov r0, #0 |
@@ -498,6 +511,7 @@ __arm6_mmu_cache_on: | |||
498 | mov pc, r12 | 511 | mov pc, r12 |
499 | 512 | ||
500 | __common_mmu_cache_on: | 513 | __common_mmu_cache_on: |
514 | #ifndef CONFIG_THUMB2_KERNEL | ||
501 | #ifndef DEBUG | 515 | #ifndef DEBUG |
502 | orr r0, r0, #0x000d @ Write buffer, mmu | 516 | orr r0, r0, #0x000d @ Write buffer, mmu |
503 | #endif | 517 | #endif |
@@ -509,6 +523,7 @@ __common_mmu_cache_on: | |||
509 | 1: mcr p15, 0, r0, c1, c0, 0 @ load control register | 523 | 1: mcr p15, 0, r0, c1, c0, 0 @ load control register |
510 | mrc p15, 0, r0, c1, c0, 0 @ and read it back to | 524 | mrc p15, 0, r0, c1, c0, 0 @ and read it back to |
511 | sub pc, lr, r0, lsr #32 @ properly flush pipeline | 525 | sub pc, lr, r0, lsr #32 @ properly flush pipeline |
526 | #endif | ||
512 | 527 | ||
513 | /* | 528 | /* |
514 | * All code following this line is relocatable. It is relocated by | 529 | * All code following this line is relocatable. It is relocated by |
@@ -522,7 +537,7 @@ __common_mmu_cache_on: | |||
522 | * r6 = processor ID | 537 | * r6 = processor ID |
523 | * r7 = architecture ID | 538 | * r7 = architecture ID |
524 | * r8 = atags pointer | 539 | * r8 = atags pointer |
525 | * r9-r14 = corrupted | 540 | * r9-r12,r14 = corrupted |
526 | */ | 541 | */ |
527 | .align 5 | 542 | .align 5 |
528 | reloc_start: add r9, r5, r0 | 543 | reloc_start: add r9, r5, r0 |
@@ -531,13 +546,14 @@ reloc_start: add r9, r5, r0 | |||
531 | mov r1, r4 | 546 | mov r1, r4 |
532 | 1: | 547 | 1: |
533 | .rept 4 | 548 | .rept 4 |
534 | ldmia r5!, {r0, r2, r3, r10 - r14} @ relocate kernel | 549 | ldmia r5!, {r0, r2, r3, r10 - r12, r14} @ relocate kernel |
535 | stmia r1!, {r0, r2, r3, r10 - r14} | 550 | stmia r1!, {r0, r2, r3, r10 - r12, r14} |
536 | .endr | 551 | .endr |
537 | 552 | ||
538 | cmp r5, r9 | 553 | cmp r5, r9 |
539 | blo 1b | 554 | blo 1b |
540 | add sp, r1, #128 @ relocate the stack | 555 | mov sp, r1 |
556 | add sp, sp, #128 @ relocate the stack | ||
541 | debug_reloc_end | 557 | debug_reloc_end |
542 | 558 | ||
543 | call_kernel: bl cache_clean_flush | 559 | call_kernel: bl cache_clean_flush |
@@ -571,7 +587,9 @@ call_cache_fn: adr r12, proc_types | |||
571 | ldr r2, [r12, #4] @ get mask | 587 | ldr r2, [r12, #4] @ get mask |
572 | eor r1, r1, r6 @ (real ^ match) | 588 | eor r1, r1, r6 @ (real ^ match) |
573 | tst r1, r2 @ & mask | 589 | tst r1, r2 @ & mask |
574 | addeq pc, r12, r3 @ call cache function | 590 | ARM( addeq pc, r12, r3 ) @ call cache function |
591 | THUMB( addeq r12, r3 ) | ||
592 | THUMB( moveq pc, r12 ) @ call cache function | ||
575 | add r12, r12, #4*5 | 593 | add r12, r12, #4*5 |
576 | b 1b | 594 | b 1b |
577 | 595 | ||
@@ -589,13 +607,15 @@ call_cache_fn: adr r12, proc_types | |||
589 | * methods. Writeback caches _must_ have the flush method | 607 | * methods. Writeback caches _must_ have the flush method |
590 | * defined. | 608 | * defined. |
591 | */ | 609 | */ |
610 | .align 2 | ||
592 | .type proc_types,#object | 611 | .type proc_types,#object |
593 | proc_types: | 612 | proc_types: |
594 | .word 0x41560600 @ ARM6/610 | 613 | .word 0x41560600 @ ARM6/610 |
595 | .word 0xffffffe0 | 614 | .word 0xffffffe0 |
596 | b __arm6_mmu_cache_off @ works, but slow | 615 | W(b) __arm6_mmu_cache_off @ works, but slow |
597 | b __arm6_mmu_cache_off | 616 | W(b) __arm6_mmu_cache_off |
598 | mov pc, lr | 617 | mov pc, lr |
618 | THUMB( nop ) | ||
599 | @ b __arm6_mmu_cache_on @ untested | 619 | @ b __arm6_mmu_cache_on @ untested |
600 | @ b __arm6_mmu_cache_off | 620 | @ b __arm6_mmu_cache_off |
601 | @ b __armv3_mmu_cache_flush | 621 | @ b __armv3_mmu_cache_flush |
@@ -603,76 +623,84 @@ proc_types: | |||
603 | .word 0x00000000 @ old ARM ID | 623 | .word 0x00000000 @ old ARM ID |
604 | .word 0x0000f000 | 624 | .word 0x0000f000 |
605 | mov pc, lr | 625 | mov pc, lr |
626 | THUMB( nop ) | ||
606 | mov pc, lr | 627 | mov pc, lr |
628 | THUMB( nop ) | ||
607 | mov pc, lr | 629 | mov pc, lr |
630 | THUMB( nop ) | ||
608 | 631 | ||
609 | .word 0x41007000 @ ARM7/710 | 632 | .word 0x41007000 @ ARM7/710 |
610 | .word 0xfff8fe00 | 633 | .word 0xfff8fe00 |
611 | b __arm7_mmu_cache_off | 634 | W(b) __arm7_mmu_cache_off |
612 | b __arm7_mmu_cache_off | 635 | W(b) __arm7_mmu_cache_off |
613 | mov pc, lr | 636 | mov pc, lr |
637 | THUMB( nop ) | ||
614 | 638 | ||
615 | .word 0x41807200 @ ARM720T (writethrough) | 639 | .word 0x41807200 @ ARM720T (writethrough) |
616 | .word 0xffffff00 | 640 | .word 0xffffff00 |
617 | b __armv4_mmu_cache_on | 641 | W(b) __armv4_mmu_cache_on |
618 | b __armv4_mmu_cache_off | 642 | W(b) __armv4_mmu_cache_off |
619 | mov pc, lr | 643 | mov pc, lr |
644 | THUMB( nop ) | ||
620 | 645 | ||
621 | .word 0x41007400 @ ARM74x | 646 | .word 0x41007400 @ ARM74x |
622 | .word 0xff00ff00 | 647 | .word 0xff00ff00 |
623 | b __armv3_mpu_cache_on | 648 | W(b) __armv3_mpu_cache_on |
624 | b __armv3_mpu_cache_off | 649 | W(b) __armv3_mpu_cache_off |
625 | b __armv3_mpu_cache_flush | 650 | W(b) __armv3_mpu_cache_flush |
626 | 651 | ||
627 | .word 0x41009400 @ ARM94x | 652 | .word 0x41009400 @ ARM94x |
628 | .word 0xff00ff00 | 653 | .word 0xff00ff00 |
629 | b __armv4_mpu_cache_on | 654 | W(b) __armv4_mpu_cache_on |
630 | b __armv4_mpu_cache_off | 655 | W(b) __armv4_mpu_cache_off |
631 | b __armv4_mpu_cache_flush | 656 | W(b) __armv4_mpu_cache_flush |
632 | 657 | ||
633 | .word 0x00007000 @ ARM7 IDs | 658 | .word 0x00007000 @ ARM7 IDs |
634 | .word 0x0000f000 | 659 | .word 0x0000f000 |
635 | mov pc, lr | 660 | mov pc, lr |
661 | THUMB( nop ) | ||
636 | mov pc, lr | 662 | mov pc, lr |
663 | THUMB( nop ) | ||
637 | mov pc, lr | 664 | mov pc, lr |
665 | THUMB( nop ) | ||
638 | 666 | ||
639 | @ Everything from here on will be the new ID system. | 667 | @ Everything from here on will be the new ID system. |
640 | 668 | ||
641 | .word 0x4401a100 @ sa110 / sa1100 | 669 | .word 0x4401a100 @ sa110 / sa1100 |
642 | .word 0xffffffe0 | 670 | .word 0xffffffe0 |
643 | b __armv4_mmu_cache_on | 671 | W(b) __armv4_mmu_cache_on |
644 | b __armv4_mmu_cache_off | 672 | W(b) __armv4_mmu_cache_off |
645 | b __armv4_mmu_cache_flush | 673 | W(b) __armv4_mmu_cache_flush |
646 | 674 | ||
647 | .word 0x6901b110 @ sa1110 | 675 | .word 0x6901b110 @ sa1110 |
648 | .word 0xfffffff0 | 676 | .word 0xfffffff0 |
649 | b __armv4_mmu_cache_on | 677 | W(b) __armv4_mmu_cache_on |
650 | b __armv4_mmu_cache_off | 678 | W(b) __armv4_mmu_cache_off |
651 | b __armv4_mmu_cache_flush | 679 | W(b) __armv4_mmu_cache_flush |
652 | 680 | ||
653 | .word 0x56056930 | 681 | .word 0x56056930 |
654 | .word 0xff0ffff0 @ PXA935 | 682 | .word 0xff0ffff0 @ PXA935 |
655 | b __armv4_mmu_cache_on | 683 | W(b) __armv4_mmu_cache_on |
656 | b __armv4_mmu_cache_off | 684 | W(b) __armv4_mmu_cache_off |
657 | b __armv4_mmu_cache_flush | 685 | W(b) __armv4_mmu_cache_flush |
658 | 686 | ||
659 | .word 0x56158000 @ PXA168 | 687 | .word 0x56158000 @ PXA168 |
660 | .word 0xfffff000 | 688 | .word 0xfffff000 |
661 | b __armv4_mmu_cache_on | 689 | W(b) __armv4_mmu_cache_on |
662 | b __armv4_mmu_cache_off | 690 | W(b) __armv4_mmu_cache_off |
663 | b __armv5tej_mmu_cache_flush | 691 | W(b) __armv5tej_mmu_cache_flush |
664 | 692 | ||
665 | .word 0x56056930 | 693 | .word 0x56056930 |
666 | .word 0xff0ffff0 @ PXA935 | 694 | .word 0xff0ffff0 @ PXA935 |
667 | b __armv4_mmu_cache_on | 695 | W(b) __armv4_mmu_cache_on |
668 | b __armv4_mmu_cache_off | 696 | W(b) __armv4_mmu_cache_off |
669 | b __armv4_mmu_cache_flush | 697 | W(b) __armv4_mmu_cache_flush |
670 | 698 | ||
671 | .word 0x56050000 @ Feroceon | 699 | .word 0x56050000 @ Feroceon |
672 | .word 0xff0f0000 | 700 | .word 0xff0f0000 |
673 | b __armv4_mmu_cache_on | 701 | W(b) __armv4_mmu_cache_on |
674 | b __armv4_mmu_cache_off | 702 | W(b) __armv4_mmu_cache_off |
675 | b __armv5tej_mmu_cache_flush | 703 | W(b) __armv5tej_mmu_cache_flush |
676 | 704 | ||
677 | #ifdef CONFIG_CPU_FEROCEON_OLD_ID | 705 | #ifdef CONFIG_CPU_FEROCEON_OLD_ID |
678 | /* this conflicts with the standard ARMv5TE entry */ | 706 | /* this conflicts with the standard ARMv5TE entry */ |
@@ -685,47 +713,50 @@ proc_types: | |||
685 | 713 | ||
686 | .word 0x66015261 @ FA526 | 714 | .word 0x66015261 @ FA526 |
687 | .word 0xff01fff1 | 715 | .word 0xff01fff1 |
688 | b __fa526_cache_on | 716 | W(b) __fa526_cache_on |
689 | b __armv4_mmu_cache_off | 717 | W(b) __armv4_mmu_cache_off |
690 | b __fa526_cache_flush | 718 | W(b) __fa526_cache_flush |
691 | 719 | ||
692 | @ These match on the architecture ID | 720 | @ These match on the architecture ID |
693 | 721 | ||
694 | .word 0x00020000 @ ARMv4T | 722 | .word 0x00020000 @ ARMv4T |
695 | .word 0x000f0000 | 723 | .word 0x000f0000 |
696 | b __armv4_mmu_cache_on | 724 | W(b) __armv4_mmu_cache_on |
697 | b __armv4_mmu_cache_off | 725 | W(b) __armv4_mmu_cache_off |
698 | b __armv4_mmu_cache_flush | 726 | W(b) __armv4_mmu_cache_flush |
699 | 727 | ||
700 | .word 0x00050000 @ ARMv5TE | 728 | .word 0x00050000 @ ARMv5TE |
701 | .word 0x000f0000 | 729 | .word 0x000f0000 |
702 | b __armv4_mmu_cache_on | 730 | W(b) __armv4_mmu_cache_on |
703 | b __armv4_mmu_cache_off | 731 | W(b) __armv4_mmu_cache_off |
704 | b __armv4_mmu_cache_flush | 732 | W(b) __armv4_mmu_cache_flush |
705 | 733 | ||
706 | .word 0x00060000 @ ARMv5TEJ | 734 | .word 0x00060000 @ ARMv5TEJ |
707 | .word 0x000f0000 | 735 | .word 0x000f0000 |
708 | b __armv4_mmu_cache_on | 736 | W(b) __armv4_mmu_cache_on |
709 | b __armv4_mmu_cache_off | 737 | W(b) __armv4_mmu_cache_off |
710 | b __armv5tej_mmu_cache_flush | 738 | W(b) __armv4_mmu_cache_flush |
711 | 739 | ||
712 | .word 0x0007b000 @ ARMv6 | 740 | .word 0x0007b000 @ ARMv6 |
713 | .word 0x000ff000 | 741 | .word 0x000ff000 |
714 | b __armv4_mmu_cache_on | 742 | W(b) __armv4_mmu_cache_on |
715 | b __armv4_mmu_cache_off | 743 | W(b) __armv4_mmu_cache_off |
716 | b __armv6_mmu_cache_flush | 744 | W(b) __armv6_mmu_cache_flush |
717 | 745 | ||
718 | .word 0x000f0000 @ new CPU Id | 746 | .word 0x000f0000 @ new CPU Id |
719 | .word 0x000f0000 | 747 | .word 0x000f0000 |
720 | b __armv7_mmu_cache_on | 748 | W(b) __armv7_mmu_cache_on |
721 | b __armv7_mmu_cache_off | 749 | W(b) __armv7_mmu_cache_off |
722 | b __armv7_mmu_cache_flush | 750 | W(b) __armv7_mmu_cache_flush |
723 | 751 | ||
724 | .word 0 @ unrecognised type | 752 | .word 0 @ unrecognised type |
725 | .word 0 | 753 | .word 0 |
726 | mov pc, lr | 754 | mov pc, lr |
755 | THUMB( nop ) | ||
727 | mov pc, lr | 756 | mov pc, lr |
757 | THUMB( nop ) | ||
728 | mov pc, lr | 758 | mov pc, lr |
759 | THUMB( nop ) | ||
729 | 760 | ||
730 | .size proc_types, . - proc_types | 761 | .size proc_types, . - proc_types |
731 | 762 | ||
@@ -760,22 +791,30 @@ __armv3_mpu_cache_off: | |||
760 | mov pc, lr | 791 | mov pc, lr |
761 | 792 | ||
762 | __armv4_mmu_cache_off: | 793 | __armv4_mmu_cache_off: |
794 | #ifdef CONFIG_MMU | ||
763 | mrc p15, 0, r0, c1, c0 | 795 | mrc p15, 0, r0, c1, c0 |
764 | bic r0, r0, #0x000d | 796 | bic r0, r0, #0x000d |
765 | mcr p15, 0, r0, c1, c0 @ turn MMU and cache off | 797 | mcr p15, 0, r0, c1, c0 @ turn MMU and cache off |
766 | mov r0, #0 | 798 | mov r0, #0 |
767 | mcr p15, 0, r0, c7, c7 @ invalidate whole cache v4 | 799 | mcr p15, 0, r0, c7, c7 @ invalidate whole cache v4 |
768 | mcr p15, 0, r0, c8, c7 @ invalidate whole TLB v4 | 800 | mcr p15, 0, r0, c8, c7 @ invalidate whole TLB v4 |
801 | #endif | ||
769 | mov pc, lr | 802 | mov pc, lr |
770 | 803 | ||
771 | __armv7_mmu_cache_off: | 804 | __armv7_mmu_cache_off: |
772 | mrc p15, 0, r0, c1, c0 | 805 | mrc p15, 0, r0, c1, c0 |
806 | #ifdef CONFIG_MMU | ||
773 | bic r0, r0, #0x000d | 807 | bic r0, r0, #0x000d |
808 | #else | ||
809 | bic r0, r0, #0x000c | ||
810 | #endif | ||
774 | mcr p15, 0, r0, c1, c0 @ turn MMU and cache off | 811 | mcr p15, 0, r0, c1, c0 @ turn MMU and cache off |
775 | mov r12, lr | 812 | mov r12, lr |
776 | bl __armv7_mmu_cache_flush | 813 | bl __armv7_mmu_cache_flush |
777 | mov r0, #0 | 814 | mov r0, #0 |
815 | #ifdef CONFIG_MMU | ||
778 | mcr p15, 0, r0, c8, c7, 0 @ invalidate whole TLB | 816 | mcr p15, 0, r0, c8, c7, 0 @ invalidate whole TLB |
817 | #endif | ||
779 | mcr p15, 0, r0, c7, c5, 6 @ invalidate BTC | 818 | mcr p15, 0, r0, c7, c5, 6 @ invalidate BTC |
780 | mcr p15, 0, r0, c7, c10, 4 @ DSB | 819 | mcr p15, 0, r0, c7, c10, 4 @ DSB |
781 | mcr p15, 0, r0, c7, c5, 4 @ ISB | 820 | mcr p15, 0, r0, c7, c5, 4 @ ISB |
@@ -852,7 +891,7 @@ __armv7_mmu_cache_flush: | |||
852 | b iflush | 891 | b iflush |
853 | hierarchical: | 892 | hierarchical: |
854 | mcr p15, 0, r10, c7, c10, 5 @ DMB | 893 | mcr p15, 0, r10, c7, c10, 5 @ DMB |
855 | stmfd sp!, {r0-r5, r7, r9, r11} | 894 | stmfd sp!, {r0-r7, r9-r11} |
856 | mrc p15, 1, r0, c0, c0, 1 @ read clidr | 895 | mrc p15, 1, r0, c0, c0, 1 @ read clidr |
857 | ands r3, r0, #0x7000000 @ extract loc from clidr | 896 | ands r3, r0, #0x7000000 @ extract loc from clidr |
858 | mov r3, r3, lsr #23 @ left align loc bit field | 897 | mov r3, r3, lsr #23 @ left align loc bit field |
@@ -877,8 +916,12 @@ loop1: | |||
877 | loop2: | 916 | loop2: |
878 | mov r9, r4 @ create working copy of max way size | 917 | mov r9, r4 @ create working copy of max way size |
879 | loop3: | 918 | loop3: |
880 | orr r11, r10, r9, lsl r5 @ factor way and cache number into r11 | 919 | ARM( orr r11, r10, r9, lsl r5 ) @ factor way and cache number into r11 |
881 | orr r11, r11, r7, lsl r2 @ factor index number into r11 | 920 | ARM( orr r11, r11, r7, lsl r2 ) @ factor index number into r11 |
921 | THUMB( lsl r6, r9, r5 ) | ||
922 | THUMB( orr r11, r10, r6 ) @ factor way and cache number into r11 | ||
923 | THUMB( lsl r6, r7, r2 ) | ||
924 | THUMB( orr r11, r11, r6 ) @ factor index number into r11 | ||
882 | mcr p15, 0, r11, c7, c14, 2 @ clean & invalidate by set/way | 925 | mcr p15, 0, r11, c7, c14, 2 @ clean & invalidate by set/way |
883 | subs r9, r9, #1 @ decrement the way | 926 | subs r9, r9, #1 @ decrement the way |
884 | bge loop3 | 927 | bge loop3 |
@@ -889,7 +932,7 @@ skip: | |||
889 | cmp r3, r10 | 932 | cmp r3, r10 |
890 | bgt loop1 | 933 | bgt loop1 |
891 | finished: | 934 | finished: |
892 | ldmfd sp!, {r0-r5, r7, r9, r11} | 935 | ldmfd sp!, {r0-r7, r9-r11} |
893 | mov r10, #0 @ swith back to cache level 0 | 936 | mov r10, #0 @ swith back to cache level 0 |
894 | mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr | 937 | mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr |
895 | iflush: | 938 | iflush: |
@@ -923,9 +966,13 @@ __armv4_mmu_cache_flush: | |||
923 | mov r11, #8 | 966 | mov r11, #8 |
924 | mov r11, r11, lsl r3 @ cache line size in bytes | 967 | mov r11, r11, lsl r3 @ cache line size in bytes |
925 | no_cache_id: | 968 | no_cache_id: |
926 | bic r1, pc, #63 @ align to longest cache line | 969 | mov r1, pc |
970 | bic r1, r1, #63 @ align to longest cache line | ||
927 | add r2, r1, r2 | 971 | add r2, r1, r2 |
928 | 1: ldr r3, [r1], r11 @ s/w flush D cache | 972 | 1: |
973 | ARM( ldr r3, [r1], r11 ) @ s/w flush D cache | ||
974 | THUMB( ldr r3, [r1] ) @ s/w flush D cache | ||
975 | THUMB( add r1, r1, r11 ) | ||
929 | teq r1, r2 | 976 | teq r1, r2 |
930 | bne 1b | 977 | bne 1b |
931 | 978 | ||
@@ -945,6 +992,7 @@ __armv3_mpu_cache_flush: | |||
945 | * memory, which again must be relocatable. | 992 | * memory, which again must be relocatable. |
946 | */ | 993 | */ |
947 | #ifdef DEBUG | 994 | #ifdef DEBUG |
995 | .align 2 | ||
948 | .type phexbuf,#object | 996 | .type phexbuf,#object |
949 | phexbuf: .space 12 | 997 | phexbuf: .space 12 |
950 | .size phexbuf, . - phexbuf | 998 | .size phexbuf, . - phexbuf |