aboutsummaryrefslogtreecommitdiffstats
path: root/arch/powerpc/kernel/fpu.S
diff options
context:
space:
mode:
authorPaul Mackerras <paulus@samba.org>2005-10-05 20:59:19 -0400
committerPaul Mackerras <paulus@samba.org>2005-10-05 20:59:19 -0400
commitb85a046af3a260e079505e8023ccd10e01cf4f2b (patch)
tree2f5f1af4db85fca6fc88902840463b107721cb14 /arch/powerpc/kernel/fpu.S
parent6ce52e6438fd2921891648ceab700d9b867e5ed2 (diff)
powerpc: Define 32/64 bit asm macros and use them in fpu.S
These macros help in writing assembly code that works for both ppc32 and ppc64. With this we now have a common fpu.S. This takes out load_up_fpu from head_64.S. Signed-off-by: Paul Mackerras <paulus@samba.org>
Diffstat (limited to 'arch/powerpc/kernel/fpu.S')
-rw-r--r--arch/powerpc/kernel/fpu.S72
1 files changed, 29 insertions, 43 deletions
diff --git a/arch/powerpc/kernel/fpu.S b/arch/powerpc/kernel/fpu.S
index 665d7d34304c..04e95e810b21 100644
--- a/arch/powerpc/kernel/fpu.S
+++ b/arch/powerpc/kernel/fpu.S
@@ -27,13 +27,9 @@
27 * Load up this task's FP registers from its thread_struct, 27 * Load up this task's FP registers from its thread_struct,
28 * enable the FPU for the current task and return to the task. 28 * enable the FPU for the current task and return to the task.
29 */ 29 */
30 .globl load_up_fpu 30_GLOBAL(load_up_fpu)
31load_up_fpu:
32 mfmsr r5 31 mfmsr r5
33 ori r5,r5,MSR_FP 32 ori r5,r5,MSR_FP
34#ifdef CONFIG_PPC64BRIDGE
35 clrldi r5,r5,1 /* turn off 64-bit mode */
36#endif /* CONFIG_PPC64BRIDGE */
37 SYNC 33 SYNC
38 MTMSRD(r5) /* enable use of fpu now */ 34 MTMSRD(r5) /* enable use of fpu now */
39 isync 35 isync
@@ -43,67 +39,57 @@ load_up_fpu:
43 * to another. Instead we call giveup_fpu in switch_to. 39 * to another. Instead we call giveup_fpu in switch_to.
44 */ 40 */
45#ifndef CONFIG_SMP 41#ifndef CONFIG_SMP
46 tophys(r6,0) /* get __pa constant */ 42 LOADBASE(r3, last_task_used_math)
47 addis r3,r6,last_task_used_math@ha 43 tophys(r3,r3)
48 lwz r4,last_task_used_math@l(r3) 44 LDL r4,OFF(last_task_used_math)(r3)
49 cmpwi 0,r4,0 45 CMPI 0,r4,0
50 beq 1f 46 beq 1f
51 add r4,r4,r6 47 tophys(r4,r4)
52 addi r4,r4,THREAD /* want last_task_used_math->thread */ 48 addi r4,r4,THREAD /* want last_task_used_math->thread */
53 SAVE_32FPRS(0, r4) 49 SAVE_32FPRS(0, r4)
54 mffs fr0 50 mffs fr0
55 stfd fr0,THREAD_FPSCR-4(r4) 51 stfd fr0,THREAD_FPSCR-4(r4)
56 lwz r5,PT_REGS(r4) 52 LDL r5,PT_REGS(r4)
57 add r5,r5,r6 53 tophys(r5,r5)
58 lwz r4,_MSR-STACK_FRAME_OVERHEAD(r5) 54 LDL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
59 li r10,MSR_FP|MSR_FE0|MSR_FE1 55 li r10,MSR_FP|MSR_FE0|MSR_FE1
60 andc r4,r4,r10 /* disable FP for previous task */ 56 andc r4,r4,r10 /* disable FP for previous task */
61 stw r4,_MSR-STACK_FRAME_OVERHEAD(r5) 57 STL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
621: 581:
63#endif /* CONFIG_SMP */ 59#endif /* CONFIG_SMP */
64 /* enable use of FP after return */ 60 /* enable use of FP after return */
61#ifdef CONFIG_PPC32
65 mfspr r5,SPRN_SPRG3 /* current task's THREAD (phys) */ 62 mfspr r5,SPRN_SPRG3 /* current task's THREAD (phys) */
66 lwz r4,THREAD_FPEXC_MODE(r5) 63 lwz r4,THREAD_FPEXC_MODE(r5)
67 ori r9,r9,MSR_FP /* enable FP for current */ 64 ori r9,r9,MSR_FP /* enable FP for current */
68 or r9,r9,r4 65 or r9,r9,r4
66#else
67 ld r4,PACACURRENT(r13)
68 addi r5,r4,THREAD /* Get THREAD */
69 ld r4,THREAD_FPEXC_MODE(r5)
70 ori r12,r12,MSR_FP
71 or r12,r12,r4
72 std r12,_MSR(r1)
73#endif
69 lfd fr0,THREAD_FPSCR-4(r5) 74 lfd fr0,THREAD_FPSCR-4(r5)
70 mtfsf 0xff,fr0 75 mtfsf 0xff,fr0
71 REST_32FPRS(0, r5) 76 REST_32FPRS(0, r5)
72#ifndef CONFIG_SMP 77#ifndef CONFIG_SMP
73 subi r4,r5,THREAD 78 subi r4,r5,THREAD
74 sub r4,r4,r6 79 tovirt(r4,r4)
75 stw r4,last_task_used_math@l(r3) 80 STL r4,OFF(last_task_used_math)(r3)
76#endif /* CONFIG_SMP */ 81#endif /* CONFIG_SMP */
77 /* restore registers and return */ 82 /* restore registers and return */
78 /* we haven't used ctr or xer or lr */ 83 /* we haven't used ctr or xer or lr */
79 b fast_exception_return 84 b fast_exception_return
80 85
81/* 86/*
82 * FP unavailable trap from kernel - print a message, but let
83 * the task use FP in the kernel until it returns to user mode.
84 */
85 .globl KernelFP
86KernelFP:
87 lwz r3,_MSR(r1)
88 ori r3,r3,MSR_FP
89 stw r3,_MSR(r1) /* enable use of FP after return */
90 lis r3,86f@h
91 ori r3,r3,86f@l
92 mr r4,r2 /* current */
93 lwz r5,_NIP(r1)
94 bl printk
95 b ret_from_except
9686: .string "floating point used in kernel (task=%p, pc=%x)\n"
97 .align 4,0
98
99/*
100 * giveup_fpu(tsk) 87 * giveup_fpu(tsk)
101 * Disable FP for the task given as the argument, 88 * Disable FP for the task given as the argument,
102 * and save the floating-point registers in its thread_struct. 89 * and save the floating-point registers in its thread_struct.
103 * Enables the FPU for use in the kernel on return. 90 * Enables the FPU for use in the kernel on return.
104 */ 91 */
105 .globl giveup_fpu 92_GLOBAL(giveup_fpu)
106giveup_fpu:
107 mfmsr r5 93 mfmsr r5
108 ori r5,r5,MSR_FP 94 ori r5,r5,MSR_FP
109 SYNC_601 95 SYNC_601
@@ -111,23 +97,23 @@ giveup_fpu:
111 MTMSRD(r5) /* enable use of fpu now */ 97 MTMSRD(r5) /* enable use of fpu now */
112 SYNC_601 98 SYNC_601
113 isync 99 isync
114 cmpwi 0,r3,0 100 CMPI 0,r3,0
115 beqlr- /* if no previous owner, done */ 101 beqlr- /* if no previous owner, done */
116 addi r3,r3,THREAD /* want THREAD of task */ 102 addi r3,r3,THREAD /* want THREAD of task */
117 lwz r5,PT_REGS(r3) 103 LDL r5,PT_REGS(r3)
118 cmpwi 0,r5,0 104 CMPI 0,r5,0
119 SAVE_32FPRS(0, r3) 105 SAVE_32FPRS(0, r3)
120 mffs fr0 106 mffs fr0
121 stfd fr0,THREAD_FPSCR-4(r3) 107 stfd fr0,THREAD_FPSCR-4(r3)
122 beq 1f 108 beq 1f
123 lwz r4,_MSR-STACK_FRAME_OVERHEAD(r5) 109 LDL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
124 li r3,MSR_FP|MSR_FE0|MSR_FE1 110 li r3,MSR_FP|MSR_FE0|MSR_FE1
125 andc r4,r4,r3 /* disable FP for previous task */ 111 andc r4,r4,r3 /* disable FP for previous task */
126 stw r4,_MSR-STACK_FRAME_OVERHEAD(r5) 112 STL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
1271: 1131:
128#ifndef CONFIG_SMP 114#ifndef CONFIG_SMP
129 li r5,0 115 li r5,0
130 lis r4,last_task_used_math@ha 116 LOADBASE(r4,last_task_used_math)
131 stw r5,last_task_used_math@l(r4) 117 STL r5,OFF(last_task_used_math)(r4)
132#endif /* CONFIG_SMP */ 118#endif /* CONFIG_SMP */
133 blr 119 blr