aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--arch/powerpc/kernel/fpu.S14
-rw-r--r--arch/powerpc/kernel/vector.S15
2 files changed, 17 insertions, 12 deletions
diff --git a/arch/powerpc/kernel/fpu.S b/arch/powerpc/kernel/fpu.S
index 4dca05e91e95..f7f5b8bed68f 100644
--- a/arch/powerpc/kernel/fpu.S
+++ b/arch/powerpc/kernel/fpu.S
@@ -106,6 +106,8 @@ _GLOBAL(store_fp_state)
106 * and save its floating-point registers in its thread_struct. 106 * and save its floating-point registers in its thread_struct.
107 * Load up this task's FP registers from its thread_struct, 107 * Load up this task's FP registers from its thread_struct,
108 * enable the FPU for the current task and return to the task. 108 * enable the FPU for the current task and return to the task.
109 * Note that on 32-bit this can only use registers that will be
110 * restored by fast_exception_return, i.e. r3 - r6, r10 and r11.
109 */ 111 */
110_GLOBAL(load_up_fpu) 112_GLOBAL(load_up_fpu)
111 mfmsr r5 113 mfmsr r5
@@ -131,10 +133,10 @@ END_FTR_SECTION_IFSET(CPU_FTR_VSX)
131 beq 1f 133 beq 1f
132 toreal(r4) 134 toreal(r4)
133 addi r4,r4,THREAD /* want last_task_used_math->thread */ 135 addi r4,r4,THREAD /* want last_task_used_math->thread */
134 addi r8,r4,THREAD_FPSTATE 136 addi r10,r4,THREAD_FPSTATE
135 SAVE_32FPVSRS(0, R5, R8) 137 SAVE_32FPVSRS(0, R5, R10)
136 mffs fr0 138 mffs fr0
137 stfd fr0,FPSTATE_FPSCR(r8) 139 stfd fr0,FPSTATE_FPSCR(r10)
138 PPC_LL r5,PT_REGS(r4) 140 PPC_LL r5,PT_REGS(r4)
139 toreal(r5) 141 toreal(r5)
140 PPC_LL r4,_MSR-STACK_FRAME_OVERHEAD(r5) 142 PPC_LL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
@@ -157,10 +159,10 @@ END_FTR_SECTION_IFSET(CPU_FTR_VSX)
157 or r12,r12,r4 159 or r12,r12,r4
158 std r12,_MSR(r1) 160 std r12,_MSR(r1)
159#endif 161#endif
160 addi r7,r5,THREAD_FPSTATE 162 addi r10,r5,THREAD_FPSTATE
161 lfd fr0,FPSTATE_FPSCR(r7) 163 lfd fr0,FPSTATE_FPSCR(r10)
162 MTFSF_L(fr0) 164 MTFSF_L(fr0)
163 REST_32FPVSRS(0, R4, R7) 165 REST_32FPVSRS(0, R4, R10)
164#ifndef CONFIG_SMP 166#ifndef CONFIG_SMP
165 subi r4,r5,THREAD 167 subi r4,r5,THREAD
166 fromreal(r4) 168 fromreal(r4)
diff --git a/arch/powerpc/kernel/vector.S b/arch/powerpc/kernel/vector.S
index eacda4eea2d7..0458a9aaba9d 100644
--- a/arch/powerpc/kernel/vector.S
+++ b/arch/powerpc/kernel/vector.S
@@ -64,6 +64,9 @@ _GLOBAL(store_vr_state)
64 * Enables the VMX for use in the kernel on return. 64 * Enables the VMX for use in the kernel on return.
65 * On SMP we know the VMX is free, since we give it up every 65 * On SMP we know the VMX is free, since we give it up every
66 * switch (ie, no lazy save of the vector registers). 66 * switch (ie, no lazy save of the vector registers).
67 *
68 * Note that on 32-bit this can only use registers that will be
69 * restored by fast_exception_return, i.e. r3 - r6, r10 and r11.
67 */ 70 */
68_GLOBAL(load_up_altivec) 71_GLOBAL(load_up_altivec)
69 mfmsr r5 /* grab the current MSR */ 72 mfmsr r5 /* grab the current MSR */
@@ -89,11 +92,11 @@ _GLOBAL(load_up_altivec)
89 /* Save VMX state to last_task_used_altivec's THREAD struct */ 92 /* Save VMX state to last_task_used_altivec's THREAD struct */
90 toreal(r4) 93 toreal(r4)
91 addi r4,r4,THREAD 94 addi r4,r4,THREAD
92 addi r7,r4,THREAD_VRSTATE 95 addi r6,r4,THREAD_VRSTATE
93 SAVE_32VRS(0,r5,r7) 96 SAVE_32VRS(0,r5,r6)
94 mfvscr vr0 97 mfvscr vr0
95 li r10,VRSTATE_VSCR 98 li r10,VRSTATE_VSCR
96 stvx vr0,r10,r7 99 stvx vr0,r10,r6
97 /* Disable VMX for last_task_used_altivec */ 100 /* Disable VMX for last_task_used_altivec */
98 PPC_LL r5,PT_REGS(r4) 101 PPC_LL r5,PT_REGS(r4)
99 toreal(r5) 102 toreal(r5)
@@ -125,13 +128,13 @@ _GLOBAL(load_up_altivec)
125 oris r12,r12,MSR_VEC@h 128 oris r12,r12,MSR_VEC@h
126 std r12,_MSR(r1) 129 std r12,_MSR(r1)
127#endif 130#endif
128 addi r7,r5,THREAD_VRSTATE 131 addi r6,r5,THREAD_VRSTATE
129 li r4,1 132 li r4,1
130 li r10,VRSTATE_VSCR 133 li r10,VRSTATE_VSCR
131 stw r4,THREAD_USED_VR(r5) 134 stw r4,THREAD_USED_VR(r5)
132 lvx vr0,r10,r7 135 lvx vr0,r10,r6
133 mtvscr vr0 136 mtvscr vr0
134 REST_32VRS(0,r4,r7) 137 REST_32VRS(0,r4,r6)
135#ifndef CONFIG_SMP 138#ifndef CONFIG_SMP
136 /* Update last_task_used_altivec to 'current' */ 139 /* Update last_task_used_altivec to 'current' */
137 subi r4,r5,THREAD /* Back to 'current' */ 140 subi r4,r5,THREAD /* Back to 'current' */