aboutsummaryrefslogtreecommitdiffstats
path: root/arch/powerpc/kernel/fpu.S
diff options
context:
space:
mode:
authorDavid Gibson <david@gibson.dropbear.id.au>2006-01-12 22:56:25 -0500
committerPaul Mackerras <paulus@samba.org>2006-01-13 05:16:23 -0500
commite58c3495e6007af59382540bb21ee941e470d88d (patch)
tree24b559cb768bfa5cf4bdef69f2943b081a1f5afa /arch/powerpc/kernel/fpu.S
parent7e78e5e502d4f220d24c6f738f2fdb078ad33607 (diff)
[PATCH] powerpc: Cleanup LOADADDR etc. asm macros
This patch consolidates the variety of macros used for loading 32 or 64-bit constants in assembler (LOADADDR, LOADBASE, SET_REG_TO_*). The idea is to make the set of macros consistent across 32 and 64 bit and to make it more obvious which is the appropriate one to use in a given situation. The new macros and their semantics are described in the comments in ppc_asm.h. In the process, we change several places that were unnecessarily using immediate loads on ppc64 to use the GOT/TOC. Likewise we cleanup a couple of places where we were clumsily subtracting PAGE_OFFSET with asm instructions to use assemble-time arithmetic or the toreal() macro instead. Signed-off-by: David Gibson <dwg@au1.ibm.com> Signed-off-by: Paul Mackerras <paulus@samba.org>
Diffstat (limited to 'arch/powerpc/kernel/fpu.S')
-rw-r--r--arch/powerpc/kernel/fpu.S10
1 files changed, 5 insertions, 5 deletions
diff --git a/arch/powerpc/kernel/fpu.S b/arch/powerpc/kernel/fpu.S
index b780b42c95fc..e4362dfa37fb 100644
--- a/arch/powerpc/kernel/fpu.S
+++ b/arch/powerpc/kernel/fpu.S
@@ -39,9 +39,9 @@ _GLOBAL(load_up_fpu)
39 * to another. Instead we call giveup_fpu in switch_to. 39 * to another. Instead we call giveup_fpu in switch_to.
40 */ 40 */
41#ifndef CONFIG_SMP 41#ifndef CONFIG_SMP
42 LOADBASE(r3, last_task_used_math) 42 LOAD_REG_ADDRBASE(r3, last_task_used_math)
43 toreal(r3) 43 toreal(r3)
44 PPC_LL r4,OFF(last_task_used_math)(r3) 44 PPC_LL r4,ADDROFF(last_task_used_math)(r3)
45 PPC_LCMPI 0,r4,0 45 PPC_LCMPI 0,r4,0
46 beq 1f 46 beq 1f
47 toreal(r4) 47 toreal(r4)
@@ -77,7 +77,7 @@ _GLOBAL(load_up_fpu)
77#ifndef CONFIG_SMP 77#ifndef CONFIG_SMP
78 subi r4,r5,THREAD 78 subi r4,r5,THREAD
79 fromreal(r4) 79 fromreal(r4)
80 PPC_STL r4,OFF(last_task_used_math)(r3) 80 PPC_STL r4,ADDROFF(last_task_used_math)(r3)
81#endif /* CONFIG_SMP */ 81#endif /* CONFIG_SMP */
82 /* restore registers and return */ 82 /* restore registers and return */
83 /* we haven't used ctr or xer or lr */ 83 /* we haven't used ctr or xer or lr */
@@ -113,8 +113,8 @@ _GLOBAL(giveup_fpu)
1131: 1131:
114#ifndef CONFIG_SMP 114#ifndef CONFIG_SMP
115 li r5,0 115 li r5,0
116 LOADBASE(r4,last_task_used_math) 116 LOAD_REG_ADDRBASE(r4,last_task_used_math)
117 PPC_STL r5,OFF(last_task_used_math)(r4) 117 PPC_STL r5,ADDROFF(last_task_used_math)(r4)
118#endif /* CONFIG_SMP */ 118#endif /* CONFIG_SMP */
119 blr 119 blr
120 120