aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--arch/powerpc/kernel/misc_64.S8
-rw-r--r--arch/powerpc/kernel/process.c10
-rw-r--r--arch/powerpc/kernel/signal_32.c2
-rw-r--r--arch/powerpc/kernel/signal_64.c2
-rw-r--r--include/asm-powerpc/system.h1
5 files changed, 16 insertions, 7 deletions
diff --git a/arch/powerpc/kernel/misc_64.S b/arch/powerpc/kernel/misc_64.S
index 31b9026cf1e3..4dd70cf7bb4e 100644
--- a/arch/powerpc/kernel/misc_64.S
+++ b/arch/powerpc/kernel/misc_64.S
@@ -508,12 +508,12 @@ _GLOBAL(giveup_altivec)
508 508
509#ifdef CONFIG_VSX 509#ifdef CONFIG_VSX
510/* 510/*
511 * giveup_vsx(tsk) 511 * __giveup_vsx(tsk)
512 * Disable VSX for the task given as the argument, 512 * Disable VSX for the task given as the argument.
513 * and save the vector registers in its thread_struct. 513 * Does NOT save vsx registers.
514 * Enables the VSX for use in the kernel on return. 514 * Enables the VSX for use in the kernel on return.
515 */ 515 */
516_GLOBAL(giveup_vsx) 516_GLOBAL(__giveup_vsx)
517 mfmsr r5 517 mfmsr r5
518 oris r5,r5,MSR_VSX@h 518 oris r5,r5,MSR_VSX@h
519 mtmsrd r5 /* enable use of VSX now */ 519 mtmsrd r5 /* enable use of VSX now */
diff --git a/arch/powerpc/kernel/process.c b/arch/powerpc/kernel/process.c
index 0a4eb0811590..219f3634115e 100644
--- a/arch/powerpc/kernel/process.c
+++ b/arch/powerpc/kernel/process.c
@@ -159,6 +159,13 @@ void enable_kernel_vsx(void)
159EXPORT_SYMBOL(enable_kernel_vsx); 159EXPORT_SYMBOL(enable_kernel_vsx);
160#endif 160#endif
161 161
162void giveup_vsx(struct task_struct *tsk)
163{
164 giveup_fpu(tsk);
165 giveup_altivec(tsk);
166 __giveup_vsx(tsk);
167}
168
162void flush_vsx_to_thread(struct task_struct *tsk) 169void flush_vsx_to_thread(struct task_struct *tsk)
163{ 170{
164 if (tsk->thread.regs) { 171 if (tsk->thread.regs) {
@@ -290,7 +297,8 @@ struct task_struct *__switch_to(struct task_struct *prev,
290#endif /* CONFIG_ALTIVEC */ 297#endif /* CONFIG_ALTIVEC */
291#ifdef CONFIG_VSX 298#ifdef CONFIG_VSX
292 if (prev->thread.regs && (prev->thread.regs->msr & MSR_VSX)) 299 if (prev->thread.regs && (prev->thread.regs->msr & MSR_VSX))
293 giveup_vsx(prev); 300 /* VMX and FPU registers are already save here */
301 __giveup_vsx(prev);
294#endif /* CONFIG_VSX */ 302#endif /* CONFIG_VSX */
295#ifdef CONFIG_SPE 303#ifdef CONFIG_SPE
296 /* 304 /*
diff --git a/arch/powerpc/kernel/signal_32.c b/arch/powerpc/kernel/signal_32.c
index 6f6810db0a74..3e80aa32b8b0 100644
--- a/arch/powerpc/kernel/signal_32.c
+++ b/arch/powerpc/kernel/signal_32.c
@@ -452,7 +452,7 @@ static int save_user_regs(struct pt_regs *regs, struct mcontext __user *frame,
452 * contains valid data 452 * contains valid data
453 */ 453 */
454 if (current->thread.used_vsr) { 454 if (current->thread.used_vsr) {
455 flush_vsx_to_thread(current); 455 __giveup_vsx(current);
456 if (copy_vsx_to_user(&frame->mc_vsregs, current)) 456 if (copy_vsx_to_user(&frame->mc_vsregs, current))
457 return 1; 457 return 1;
458 msr |= MSR_VSX; 458 msr |= MSR_VSX;
diff --git a/arch/powerpc/kernel/signal_64.c b/arch/powerpc/kernel/signal_64.c
index 5f9d2ef2e24b..65ad925c3a8f 100644
--- a/arch/powerpc/kernel/signal_64.c
+++ b/arch/powerpc/kernel/signal_64.c
@@ -122,7 +122,7 @@ static long setup_sigcontext(struct sigcontext __user *sc, struct pt_regs *regs,
122 * VMX data. 122 * VMX data.
123 */ 123 */
124 if (current->thread.used_vsr) { 124 if (current->thread.used_vsr) {
125 flush_vsx_to_thread(current); 125 __giveup_vsx(current);
126 v_regs += ELF_NVRREG; 126 v_regs += ELF_NVRREG;
127 err |= copy_vsx_to_user(v_regs, current); 127 err |= copy_vsx_to_user(v_regs, current);
128 /* set MSR_VSX in the MSR value in the frame to 128 /* set MSR_VSX in the MSR value in the frame to
diff --git a/include/asm-powerpc/system.h b/include/asm-powerpc/system.h
index 0c12c66733f6..e6e25e2364eb 100644
--- a/include/asm-powerpc/system.h
+++ b/include/asm-powerpc/system.h
@@ -139,6 +139,7 @@ extern void enable_kernel_altivec(void);
139extern void giveup_altivec(struct task_struct *); 139extern void giveup_altivec(struct task_struct *);
140extern void load_up_altivec(struct task_struct *); 140extern void load_up_altivec(struct task_struct *);
141extern int emulate_altivec(struct pt_regs *); 141extern int emulate_altivec(struct pt_regs *);
142extern void __giveup_vsx(struct task_struct *);
142extern void giveup_vsx(struct task_struct *); 143extern void giveup_vsx(struct task_struct *);
143extern void enable_kernel_spe(void); 144extern void enable_kernel_spe(void);
144extern void giveup_spe(struct task_struct *); 145extern void giveup_spe(struct task_struct *);