diff options
author | Paul Mackerras <paulus@samba.org> | 2008-08-29 21:41:12 -0400 |
---|---|---|
committer | Paul Mackerras <paulus@samba.org> | 2008-09-15 14:08:35 -0400 |
commit | e31aa453bbc4886a7bd33e5c2afa526d6f55bd7a (patch) | |
tree | fefa13c13d7b1803fdaeb92143f83b1971f0ec8d /arch/powerpc/kernel/misc.S | |
parent | 1f6a93e4c35e75d547b51f56ba8139ab1a91628c (diff) |
powerpc: Use LOAD_REG_IMMEDIATE only for constants on 64-bit
Using LOAD_REG_IMMEDIATE to get the address of kernel symbols
generates 5 instructions where LOAD_REG_ADDR can do it in one,
and will generate R_PPC64_ADDR16_* relocations in the output when
we get to making the kernel as a position-independent executable,
which we'd rather not have to handle. This changes various bits
of assembly code to use LOAD_REG_ADDR when we need to get the
address of a symbol, or to use suitable position-independent code
for cases where we can't access the TOC for various reasons, or
if we're not running at the address we were linked at.
It also cleans up a few minor things; there's no reason to save and
restore SRR0/1 around RTAS calls, __mmu_off can get the return
address from LR more conveniently than the caller can supply it in
R4 (and we already assume elsewhere that EA == RA if the MMU is on
in early boot), and enable_64b_mode was using 5 instructions where
2 would do.
Signed-off-by: Paul Mackerras <paulus@samba.org>
Diffstat (limited to 'arch/powerpc/kernel/misc.S')
-rw-r--r-- | arch/powerpc/kernel/misc.S | 10 |
1 files changed, 8 insertions, 2 deletions
diff --git a/arch/powerpc/kernel/misc.S b/arch/powerpc/kernel/misc.S index 85cb6f340846..2d29752cbe16 100644 --- a/arch/powerpc/kernel/misc.S +++ b/arch/powerpc/kernel/misc.S | |||
@@ -31,11 +31,14 @@ _GLOBAL(reloc_offset) | |||
31 | mflr r0 | 31 | mflr r0 |
32 | bl 1f | 32 | bl 1f |
33 | 1: mflr r3 | 33 | 1: mflr r3 |
34 | LOAD_REG_IMMEDIATE(r4,1b) | 34 | PPC_LL r4,(2f-1b)(r3) |
35 | subf r3,r4,r3 | 35 | subf r3,r4,r3 |
36 | mtlr r0 | 36 | mtlr r0 |
37 | blr | 37 | blr |
38 | 38 | ||
39 | .align 3 | ||
40 | 2: PPC_LONG 1b | ||
41 | |||
39 | /* | 42 | /* |
40 | * add_reloc_offset(x) returns x + reloc_offset(). | 43 | * add_reloc_offset(x) returns x + reloc_offset(). |
41 | */ | 44 | */ |
@@ -43,12 +46,15 @@ _GLOBAL(add_reloc_offset) | |||
43 | mflr r0 | 46 | mflr r0 |
44 | bl 1f | 47 | bl 1f |
45 | 1: mflr r5 | 48 | 1: mflr r5 |
46 | LOAD_REG_IMMEDIATE(r4,1b) | 49 | PPC_LL r4,(2f-1b)(r5) |
47 | subf r5,r4,r5 | 50 | subf r5,r4,r5 |
48 | add r3,r3,r5 | 51 | add r3,r3,r5 |
49 | mtlr r0 | 52 | mtlr r0 |
50 | blr | 53 | blr |
51 | 54 | ||
55 | .align 3 | ||
56 | 2: PPC_LONG 1b | ||
57 | |||
52 | _GLOBAL(kernel_execve) | 58 | _GLOBAL(kernel_execve) |
53 | li r0,__NR_execve | 59 | li r0,__NR_execve |
54 | sc | 60 | sc |