diff options
Diffstat (limited to 'arch/powerpc/kernel/process.c')
-rw-r--r-- | arch/powerpc/kernel/process.c | 48 |
1 files changed, 35 insertions, 13 deletions
diff --git a/arch/powerpc/kernel/process.c b/arch/powerpc/kernel/process.c index 0b93893424f5..a8cca88e972f 100644 --- a/arch/powerpc/kernel/process.c +++ b/arch/powerpc/kernel/process.c | |||
@@ -139,12 +139,16 @@ EXPORT_SYMBOL(__msr_check_and_clear); | |||
139 | #ifdef CONFIG_PPC_FPU | 139 | #ifdef CONFIG_PPC_FPU |
140 | void __giveup_fpu(struct task_struct *tsk) | 140 | void __giveup_fpu(struct task_struct *tsk) |
141 | { | 141 | { |
142 | unsigned long msr; | ||
143 | |||
142 | save_fpu(tsk); | 144 | save_fpu(tsk); |
143 | tsk->thread.regs->msr &= ~MSR_FP; | 145 | msr = tsk->thread.regs->msr; |
146 | msr &= ~MSR_FP; | ||
144 | #ifdef CONFIG_VSX | 147 | #ifdef CONFIG_VSX |
145 | if (cpu_has_feature(CPU_FTR_VSX)) | 148 | if (cpu_has_feature(CPU_FTR_VSX)) |
146 | tsk->thread.regs->msr &= ~MSR_VSX; | 149 | msr &= ~MSR_VSX; |
147 | #endif | 150 | #endif |
151 | tsk->thread.regs->msr = msr; | ||
148 | } | 152 | } |
149 | 153 | ||
150 | void giveup_fpu(struct task_struct *tsk) | 154 | void giveup_fpu(struct task_struct *tsk) |
@@ -219,12 +223,16 @@ static int restore_fp(struct task_struct *tsk) { return 0; } | |||
219 | 223 | ||
220 | static void __giveup_altivec(struct task_struct *tsk) | 224 | static void __giveup_altivec(struct task_struct *tsk) |
221 | { | 225 | { |
226 | unsigned long msr; | ||
227 | |||
222 | save_altivec(tsk); | 228 | save_altivec(tsk); |
223 | tsk->thread.regs->msr &= ~MSR_VEC; | 229 | msr = tsk->thread.regs->msr; |
230 | msr &= ~MSR_VEC; | ||
224 | #ifdef CONFIG_VSX | 231 | #ifdef CONFIG_VSX |
225 | if (cpu_has_feature(CPU_FTR_VSX)) | 232 | if (cpu_has_feature(CPU_FTR_VSX)) |
226 | tsk->thread.regs->msr &= ~MSR_VSX; | 233 | msr &= ~MSR_VSX; |
227 | #endif | 234 | #endif |
235 | tsk->thread.regs->msr = msr; | ||
228 | } | 236 | } |
229 | 237 | ||
230 | void giveup_altivec(struct task_struct *tsk) | 238 | void giveup_altivec(struct task_struct *tsk) |
@@ -794,7 +802,7 @@ static void tm_reclaim_thread(struct thread_struct *thr, | |||
794 | * this state. | 802 | * this state. |
795 | * We do this using the current MSR, rather tracking it in | 803 | * We do this using the current MSR, rather tracking it in |
796 | * some specific thread_struct bit, as it has the additional | 804 | * some specific thread_struct bit, as it has the additional |
797 | * benifit of checking for a potential TM bad thing exception. | 805 | * benefit of checking for a potential TM bad thing exception. |
798 | */ | 806 | */ |
799 | if (!MSR_TM_SUSPENDED(mfmsr())) | 807 | if (!MSR_TM_SUSPENDED(mfmsr())) |
800 | return; | 808 | return; |
@@ -1009,6 +1017,14 @@ static inline void save_sprs(struct thread_struct *t) | |||
1009 | */ | 1017 | */ |
1010 | t->tar = mfspr(SPRN_TAR); | 1018 | t->tar = mfspr(SPRN_TAR); |
1011 | } | 1019 | } |
1020 | |||
1021 | if (cpu_has_feature(CPU_FTR_ARCH_300)) { | ||
1022 | /* Conditionally save Load Monitor registers, if enabled */ | ||
1023 | if (t->fscr & FSCR_LM) { | ||
1024 | t->lmrr = mfspr(SPRN_LMRR); | ||
1025 | t->lmser = mfspr(SPRN_LMSER); | ||
1026 | } | ||
1027 | } | ||
1012 | #endif | 1028 | #endif |
1013 | } | 1029 | } |
1014 | 1030 | ||
@@ -1023,18 +1039,11 @@ static inline void restore_sprs(struct thread_struct *old_thread, | |||
1023 | #ifdef CONFIG_PPC_BOOK3S_64 | 1039 | #ifdef CONFIG_PPC_BOOK3S_64 |
1024 | if (cpu_has_feature(CPU_FTR_DSCR)) { | 1040 | if (cpu_has_feature(CPU_FTR_DSCR)) { |
1025 | u64 dscr = get_paca()->dscr_default; | 1041 | u64 dscr = get_paca()->dscr_default; |
1026 | u64 fscr = old_thread->fscr & ~FSCR_DSCR; | 1042 | if (new_thread->dscr_inherit) |
1027 | |||
1028 | if (new_thread->dscr_inherit) { | ||
1029 | dscr = new_thread->dscr; | 1043 | dscr = new_thread->dscr; |
1030 | fscr |= FSCR_DSCR; | ||
1031 | } | ||
1032 | 1044 | ||
1033 | if (old_thread->dscr != dscr) | 1045 | if (old_thread->dscr != dscr) |
1034 | mtspr(SPRN_DSCR, dscr); | 1046 | mtspr(SPRN_DSCR, dscr); |
1035 | |||
1036 | if (old_thread->fscr != fscr) | ||
1037 | mtspr(SPRN_FSCR, fscr); | ||
1038 | } | 1047 | } |
1039 | 1048 | ||
1040 | if (cpu_has_feature(CPU_FTR_ARCH_207S)) { | 1049 | if (cpu_has_feature(CPU_FTR_ARCH_207S)) { |
@@ -1045,9 +1054,22 @@ static inline void restore_sprs(struct thread_struct *old_thread, | |||
1045 | if (old_thread->ebbrr != new_thread->ebbrr) | 1054 | if (old_thread->ebbrr != new_thread->ebbrr) |
1046 | mtspr(SPRN_EBBRR, new_thread->ebbrr); | 1055 | mtspr(SPRN_EBBRR, new_thread->ebbrr); |
1047 | 1056 | ||
1057 | if (old_thread->fscr != new_thread->fscr) | ||
1058 | mtspr(SPRN_FSCR, new_thread->fscr); | ||
1059 | |||
1048 | if (old_thread->tar != new_thread->tar) | 1060 | if (old_thread->tar != new_thread->tar) |
1049 | mtspr(SPRN_TAR, new_thread->tar); | 1061 | mtspr(SPRN_TAR, new_thread->tar); |
1050 | } | 1062 | } |
1063 | |||
1064 | if (cpu_has_feature(CPU_FTR_ARCH_300)) { | ||
1065 | /* Conditionally restore Load Monitor registers, if enabled */ | ||
1066 | if (new_thread->fscr & FSCR_LM) { | ||
1067 | if (old_thread->lmrr != new_thread->lmrr) | ||
1068 | mtspr(SPRN_LMRR, new_thread->lmrr); | ||
1069 | if (old_thread->lmser != new_thread->lmser) | ||
1070 | mtspr(SPRN_LMSER, new_thread->lmser); | ||
1071 | } | ||
1072 | } | ||
1051 | #endif | 1073 | #endif |
1052 | } | 1074 | } |
1053 | 1075 | ||