diff options
author | Simon Guo <wei.guo.simon@gmail.com> | 2018-05-23 03:01:50 -0400 |
---|---|---|
committer | Paul Mackerras <paulus@ozlabs.org> | 2018-05-31 20:29:17 -0400 |
commit | caa3be92bebc5b87a221900ac408aa99b0badf3d (patch) | |
tree | 913512c4b7e3dc942eb486d072a076b7af7a8bc3 | |
parent | 7f386af7bdb1a45bb04fb02d7b751809d63e5b09 (diff) |
KVM: PPC: Book3S PR: Add C function wrapper for _kvmppc_save/restore_tm()
Currently __kvmppc_save/restore_tm() APIs can only be invoked from
assembly function. This patch adds C function wrappers for them so
that they can be safely called from C function.
Signed-off-by: Simon Guo <wei.guo.simon@gmail.com>
Signed-off-by: Paul Mackerras <paulus@ozlabs.org>
-rw-r--r-- | arch/powerpc/include/asm/asm-prototypes.h | 6 | ||||
-rw-r--r-- | arch/powerpc/kvm/book3s_hv_rmhandlers.S | 6 | ||||
-rw-r--r-- | arch/powerpc/kvm/tm.S | 94 |
3 files changed, 101 insertions, 5 deletions
diff --git a/arch/powerpc/include/asm/asm-prototypes.h b/arch/powerpc/include/asm/asm-prototypes.h index dfdcb2374c28..5da683bebc7f 100644 --- a/arch/powerpc/include/asm/asm-prototypes.h +++ b/arch/powerpc/include/asm/asm-prototypes.h | |||
@@ -141,7 +141,13 @@ unsigned long prepare_ftrace_return(unsigned long parent, unsigned long ip); | |||
141 | void pnv_power9_force_smt4_catch(void); | 141 | void pnv_power9_force_smt4_catch(void); |
142 | void pnv_power9_force_smt4_release(void); | 142 | void pnv_power9_force_smt4_release(void); |
143 | 143 | ||
144 | /* Transaction memory related */ | ||
144 | void tm_enable(void); | 145 | void tm_enable(void); |
145 | void tm_disable(void); | 146 | void tm_disable(void); |
146 | void tm_abort(uint8_t cause); | 147 | void tm_abort(uint8_t cause); |
148 | |||
149 | struct kvm_vcpu; | ||
150 | void _kvmppc_restore_tm_pr(struct kvm_vcpu *vcpu, u64 guest_msr); | ||
151 | void _kvmppc_save_tm_pr(struct kvm_vcpu *vcpu, u64 guest_msr); | ||
152 | |||
147 | #endif /* _ASM_POWERPC_ASM_PROTOTYPES_H */ | 153 | #endif /* _ASM_POWERPC_ASM_PROTOTYPES_H */ |
diff --git a/arch/powerpc/kvm/book3s_hv_rmhandlers.S b/arch/powerpc/kvm/book3s_hv_rmhandlers.S index 75e3bbf8c957..af631d8303f6 100644 --- a/arch/powerpc/kvm/book3s_hv_rmhandlers.S +++ b/arch/powerpc/kvm/book3s_hv_rmhandlers.S | |||
@@ -3138,12 +3138,12 @@ END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC) | |||
3138 | kvmppc_save_tm_hv: | 3138 | kvmppc_save_tm_hv: |
3139 | /* See if we need to handle fake suspend mode */ | 3139 | /* See if we need to handle fake suspend mode */ |
3140 | BEGIN_FTR_SECTION | 3140 | BEGIN_FTR_SECTION |
3141 | b kvmppc_save_tm | 3141 | b __kvmppc_save_tm |
3142 | END_FTR_SECTION_IFCLR(CPU_FTR_P9_TM_HV_ASSIST) | 3142 | END_FTR_SECTION_IFCLR(CPU_FTR_P9_TM_HV_ASSIST) |
3143 | 3143 | ||
3144 | lbz r0, HSTATE_FAKE_SUSPEND(r13) /* Were we fake suspended? */ | 3144 | lbz r0, HSTATE_FAKE_SUSPEND(r13) /* Were we fake suspended? */ |
3145 | cmpwi r0, 0 | 3145 | cmpwi r0, 0 |
3146 | beq kvmppc_save_tm | 3146 | beq __kvmppc_save_tm |
3147 | 3147 | ||
3148 | /* The following code handles the fake_suspend = 1 case */ | 3148 | /* The following code handles the fake_suspend = 1 case */ |
3149 | mflr r0 | 3149 | mflr r0 |
@@ -3228,7 +3228,7 @@ kvmppc_restore_tm_hv: | |||
3228 | * fake-suspend mode, or emulate a TM rollback. | 3228 | * fake-suspend mode, or emulate a TM rollback. |
3229 | */ | 3229 | */ |
3230 | BEGIN_FTR_SECTION | 3230 | BEGIN_FTR_SECTION |
3231 | b kvmppc_restore_tm | 3231 | b __kvmppc_restore_tm |
3232 | END_FTR_SECTION_IFCLR(CPU_FTR_P9_TM_HV_ASSIST) | 3232 | END_FTR_SECTION_IFCLR(CPU_FTR_P9_TM_HV_ASSIST) |
3233 | mflr r0 | 3233 | mflr r0 |
3234 | std r0, PPC_LR_STKOFF(r1) | 3234 | std r0, PPC_LR_STKOFF(r1) |
diff --git a/arch/powerpc/kvm/tm.S b/arch/powerpc/kvm/tm.S index 2760d7acd371..4a68dd4050a4 100644 --- a/arch/powerpc/kvm/tm.S +++ b/arch/powerpc/kvm/tm.S | |||
@@ -33,7 +33,7 @@ | |||
33 | * This can modify all checkpointed registers, but | 33 | * This can modify all checkpointed registers, but |
34 | * restores r1, r2 before exit. | 34 | * restores r1, r2 before exit. |
35 | */ | 35 | */ |
36 | _GLOBAL(kvmppc_save_tm) | 36 | _GLOBAL(__kvmppc_save_tm) |
37 | mflr r0 | 37 | mflr r0 |
38 | std r0, PPC_LR_STKOFF(r1) | 38 | std r0, PPC_LR_STKOFF(r1) |
39 | 39 | ||
@@ -157,6 +157,52 @@ END_FTR_SECTION_IFSET(CPU_FTR_P9_TM_HV_ASSIST) | |||
157 | blr | 157 | blr |
158 | 158 | ||
159 | /* | 159 | /* |
160 | * _kvmppc_save_tm_pr() is a wrapper around __kvmppc_save_tm(), so that it can | ||
161 | * be invoked from C function by PR KVM only. | ||
162 | */ | ||
163 | _GLOBAL(_kvmppc_save_tm_pr) | ||
164 | mflr r5 | ||
165 | std r5, PPC_LR_STKOFF(r1) | ||
166 | stdu r1, -SWITCH_FRAME_SIZE(r1) | ||
167 | SAVE_NVGPRS(r1) | ||
168 | |||
169 | /* save MSR since TM/math bits might be impacted | ||
170 | * by __kvmppc_save_tm(). | ||
171 | */ | ||
172 | mfmsr r5 | ||
173 | SAVE_GPR(5, r1) | ||
174 | |||
175 | /* also save DSCR/CR so that it can be recovered later */ | ||
176 | mfspr r6, SPRN_DSCR | ||
177 | SAVE_GPR(6, r1) | ||
178 | |||
179 | mfcr r7 | ||
180 | stw r7, _CCR(r1) | ||
181 | |||
182 | bl __kvmppc_save_tm | ||
183 | |||
184 | ld r7, _CCR(r1) | ||
185 | mtcr r7 | ||
186 | |||
187 | REST_GPR(6, r1) | ||
188 | mtspr SPRN_DSCR, r6 | ||
189 | |||
190 | /* need preserve current MSR's MSR_TS bits */ | ||
191 | REST_GPR(5, r1) | ||
192 | mfmsr r6 | ||
193 | rldicl r6, r6, 64 - MSR_TS_S_LG, 62 | ||
194 | rldimi r5, r6, MSR_TS_S_LG, 63 - MSR_TS_T_LG | ||
195 | mtmsrd r5 | ||
196 | |||
197 | REST_NVGPRS(r1) | ||
198 | addi r1, r1, SWITCH_FRAME_SIZE | ||
199 | ld r5, PPC_LR_STKOFF(r1) | ||
200 | mtlr r5 | ||
201 | blr | ||
202 | |||
203 | EXPORT_SYMBOL_GPL(_kvmppc_save_tm_pr); | ||
204 | |||
205 | /* | ||
160 | * Restore transactional state and TM-related registers. | 206 | * Restore transactional state and TM-related registers. |
161 | * Called with: | 207 | * Called with: |
162 | * - r3 pointing to the vcpu struct. | 208 | * - r3 pointing to the vcpu struct. |
@@ -166,7 +212,7 @@ END_FTR_SECTION_IFSET(CPU_FTR_P9_TM_HV_ASSIST) | |||
166 | * This potentially modifies all checkpointed registers. | 212 | * This potentially modifies all checkpointed registers. |
167 | * It restores r1, r2 from the PACA. | 213 | * It restores r1, r2 from the PACA. |
168 | */ | 214 | */ |
169 | _GLOBAL(kvmppc_restore_tm) | 215 | _GLOBAL(__kvmppc_restore_tm) |
170 | mflr r0 | 216 | mflr r0 |
171 | std r0, PPC_LR_STKOFF(r1) | 217 | std r0, PPC_LR_STKOFF(r1) |
172 | 218 | ||
@@ -279,4 +325,48 @@ _GLOBAL(kvmppc_restore_tm) | |||
279 | ld r0, PPC_LR_STKOFF(r1) | 325 | ld r0, PPC_LR_STKOFF(r1) |
280 | mtlr r0 | 326 | mtlr r0 |
281 | blr | 327 | blr |
328 | |||
329 | /* | ||
330 | * _kvmppc_restore_tm_pr() is a wrapper around __kvmppc_restore_tm(), so that it | ||
331 | * can be invoked from C function by PR KVM only. | ||
332 | */ | ||
333 | _GLOBAL(_kvmppc_restore_tm_pr) | ||
334 | mflr r5 | ||
335 | std r5, PPC_LR_STKOFF(r1) | ||
336 | stdu r1, -SWITCH_FRAME_SIZE(r1) | ||
337 | SAVE_NVGPRS(r1) | ||
338 | |||
339 | /* save MSR to avoid TM/math bits change */ | ||
340 | mfmsr r5 | ||
341 | SAVE_GPR(5, r1) | ||
342 | |||
343 | /* also save DSCR/CR so that it can be recovered later */ | ||
344 | mfspr r6, SPRN_DSCR | ||
345 | SAVE_GPR(6, r1) | ||
346 | |||
347 | mfcr r7 | ||
348 | stw r7, _CCR(r1) | ||
349 | |||
350 | bl __kvmppc_restore_tm | ||
351 | |||
352 | ld r7, _CCR(r1) | ||
353 | mtcr r7 | ||
354 | |||
355 | REST_GPR(6, r1) | ||
356 | mtspr SPRN_DSCR, r6 | ||
357 | |||
358 | /* need preserve current MSR's MSR_TS bits */ | ||
359 | REST_GPR(5, r1) | ||
360 | mfmsr r6 | ||
361 | rldicl r6, r6, 64 - MSR_TS_S_LG, 62 | ||
362 | rldimi r5, r6, MSR_TS_S_LG, 63 - MSR_TS_T_LG | ||
363 | mtmsrd r5 | ||
364 | |||
365 | REST_NVGPRS(r1) | ||
366 | addi r1, r1, SWITCH_FRAME_SIZE | ||
367 | ld r5, PPC_LR_STKOFF(r1) | ||
368 | mtlr r5 | ||
369 | blr | ||
370 | |||
371 | EXPORT_SYMBOL_GPL(_kvmppc_restore_tm_pr); | ||
282 | #endif /* CONFIG_PPC_TRANSACTIONAL_MEM */ | 372 | #endif /* CONFIG_PPC_TRANSACTIONAL_MEM */ |