aboutsummaryrefslogtreecommitdiffstats
path: root/arch/powerpc/kernel/entry_64.S
diff options
context:
space:
mode:
authorPaul Mackerras <paulus@samba.org>2008-08-29 21:41:12 -0400
committerPaul Mackerras <paulus@samba.org>2008-09-15 14:08:35 -0400
commite31aa453bbc4886a7bd33e5c2afa526d6f55bd7a (patch)
treefefa13c13d7b1803fdaeb92143f83b1971f0ec8d /arch/powerpc/kernel/entry_64.S
parent1f6a93e4c35e75d547b51f56ba8139ab1a91628c (diff)
powerpc: Use LOAD_REG_IMMEDIATE only for constants on 64-bit
Using LOAD_REG_IMMEDIATE to get the address of kernel symbols generates 5 instructions where LOAD_REG_ADDR can do it in one, and will generate R_PPC64_ADDR16_* relocations in the output when we get to making the kernel as a position-independent executable, which we'd rather not have to handle. This changes various bits of assembly code to use LOAD_REG_ADDR when we need to get the address of a symbol, or to use suitable position-independent code for cases where we can't access the TOC for various reasons, or if we're not running at the address we were linked at. It also cleans up a few minor things; there's no reason to save and restore SRR0/1 around RTAS calls, __mmu_off can get the return address from LR more conveniently than the caller can supply it in R4 (and we already assume elsewhere that EA == RA if the MMU is on in early boot), and enable_64b_mode was using 5 instructions where 2 would do. Signed-off-by: Paul Mackerras <paulus@samba.org>
Diffstat (limited to 'arch/powerpc/kernel/entry_64.S')
-rw-r--r--arch/powerpc/kernel/entry_64.S16
1 files changed, 7 insertions, 9 deletions
diff --git a/arch/powerpc/kernel/entry_64.S b/arch/powerpc/kernel/entry_64.S
index 55445f1dba8a..fd8b4bae9b04 100644
--- a/arch/powerpc/kernel/entry_64.S
+++ b/arch/powerpc/kernel/entry_64.S
@@ -690,10 +690,6 @@ _GLOBAL(enter_rtas)
690 std r7,_DAR(r1) 690 std r7,_DAR(r1)
691 mfdsisr r8 691 mfdsisr r8
692 std r8,_DSISR(r1) 692 std r8,_DSISR(r1)
693 mfsrr0 r9
694 std r9,_SRR0(r1)
695 mfsrr1 r10
696 std r10,_SRR1(r1)
697 693
698 /* Temporary workaround to clear CR until RTAS can be modified to 694 /* Temporary workaround to clear CR until RTAS can be modified to
699 * ignore all bits. 695 * ignore all bits.
@@ -754,6 +750,10 @@ _STATIC(rtas_return_loc)
754 mfspr r4,SPRN_SPRG3 /* Get PACA */ 750 mfspr r4,SPRN_SPRG3 /* Get PACA */
755 clrldi r4,r4,2 /* convert to realmode address */ 751 clrldi r4,r4,2 /* convert to realmode address */
756 752
753 bcl 20,31,$+4
7540: mflr r3
755 ld r3,(1f-0b)(r3) /* get &.rtas_restore_regs */
756
757 mfmsr r6 757 mfmsr r6
758 li r0,MSR_RI 758 li r0,MSR_RI
759 andc r6,r6,r0 759 andc r6,r6,r0
@@ -761,7 +761,6 @@ _STATIC(rtas_return_loc)
761 mtmsrd r6 761 mtmsrd r6
762 762
763 ld r1,PACAR1(r4) /* Restore our SP */ 763 ld r1,PACAR1(r4) /* Restore our SP */
764 LOAD_REG_IMMEDIATE(r3,.rtas_restore_regs)
765 ld r4,PACASAVEDMSR(r4) /* Restore our MSR */ 764 ld r4,PACASAVEDMSR(r4) /* Restore our MSR */
766 765
767 mtspr SPRN_SRR0,r3 766 mtspr SPRN_SRR0,r3
@@ -769,6 +768,9 @@ _STATIC(rtas_return_loc)
769 rfid 768 rfid
770 b . /* prevent speculative execution */ 769 b . /* prevent speculative execution */
771 770
771 .align 3
7721: .llong .rtas_restore_regs
773
772_STATIC(rtas_restore_regs) 774_STATIC(rtas_restore_regs)
773 /* relocation is on at this point */ 775 /* relocation is on at this point */
774 REST_GPR(2, r1) /* Restore the TOC */ 776 REST_GPR(2, r1) /* Restore the TOC */
@@ -788,10 +790,6 @@ _STATIC(rtas_restore_regs)
788 mtdar r7 790 mtdar r7
789 ld r8,_DSISR(r1) 791 ld r8,_DSISR(r1)
790 mtdsisr r8 792 mtdsisr r8
791 ld r9,_SRR0(r1)
792 mtsrr0 r9
793 ld r10,_SRR1(r1)
794 mtsrr1 r10
795 793
796 addi r1,r1,RTAS_FRAME_SIZE /* Unstack our frame */ 794 addi r1,r1,RTAS_FRAME_SIZE /* Unstack our frame */
797 ld r0,16(r1) /* get return address */ 795 ld r0,16(r1) /* get return address */