aboutsummaryrefslogtreecommitdiffstats
path: root/arch
diff options
context:
space:
mode:
authorMihai Caraman <mihai.caraman@freescale.com>2012-04-16 00:08:54 -0400
committerAlexander Graf <agraf@suse.de>2012-05-06 10:19:09 -0400
commit518f040c826d569daf260153d4f75c21b6d9979b (patch)
treea86b6efb30af49776f4361bf0cd72c52a943b0a1 /arch
parentaf415087d2bbbef3cc25cdf371bfb0460cf66b3b (diff)
KVM: PPC: bookehv: Use lwz/stw instead of PPC_LL/PPC_STL for 32-bit fields
Interrupt code used PPC_LL/PPC_STL macros to load/store some of u32 fields which led to memory overflow on 64-bit. Use lwz/stw instead. Signed-off-by: Mihai Caraman <mihai.caraman@freescale.com> Signed-off-by: Alexander Graf <agraf@suse.de>
Diffstat (limited to 'arch')
-rw-r--r--arch/powerpc/kvm/bookehv_interrupts.S16
1 files changed, 8 insertions, 8 deletions
diff --git a/arch/powerpc/kvm/bookehv_interrupts.S b/arch/powerpc/kvm/bookehv_interrupts.S
index b7608ac52b66..06750cc1050b 100644
--- a/arch/powerpc/kvm/bookehv_interrupts.S
+++ b/arch/powerpc/kvm/bookehv_interrupts.S
@@ -87,9 +87,9 @@
87 mfspr r8, SPRN_TBRL 87 mfspr r8, SPRN_TBRL
88 mfspr r9, SPRN_TBRU 88 mfspr r9, SPRN_TBRU
89 cmpw r9, r7 89 cmpw r9, r7
90 PPC_STL r8, VCPU_TIMING_EXIT_TBL(r4) 90 stw r8, VCPU_TIMING_EXIT_TBL(r4)
91 bne- 1b 91 bne- 1b
92 PPC_STL r9, VCPU_TIMING_EXIT_TBU(r4) 92 stw r9, VCPU_TIMING_EXIT_TBU(r4)
93#endif 93#endif
94 94
95 oris r8, r6, MSR_CE@h 95 oris r8, r6, MSR_CE@h
@@ -216,7 +216,7 @@ _GLOBAL(kvmppc_handler_\intno\()_\srr1)
216 PPC_STL r4, VCPU_GPR(r4)(r11) 216 PPC_STL r4, VCPU_GPR(r4)(r11)
217 PPC_LL r4, THREAD_NORMSAVE(0)(r10) 217 PPC_LL r4, THREAD_NORMSAVE(0)(r10)
218 PPC_STL r5, VCPU_GPR(r5)(r11) 218 PPC_STL r5, VCPU_GPR(r5)(r11)
219 PPC_STL r13, VCPU_CR(r11) 219 stw r13, VCPU_CR(r11)
220 mfspr r5, \srr0 220 mfspr r5, \srr0
221 PPC_STL r3, VCPU_GPR(r10)(r11) 221 PPC_STL r3, VCPU_GPR(r10)(r11)
222 PPC_LL r3, THREAD_NORMSAVE(2)(r10) 222 PPC_LL r3, THREAD_NORMSAVE(2)(r10)
@@ -243,7 +243,7 @@ _GLOBAL(kvmppc_handler_\intno\()_\srr1)
243 PPC_STL r4, VCPU_GPR(r4)(r11) 243 PPC_STL r4, VCPU_GPR(r4)(r11)
244 PPC_LL r4, GPR9(r8) 244 PPC_LL r4, GPR9(r8)
245 PPC_STL r5, VCPU_GPR(r5)(r11) 245 PPC_STL r5, VCPU_GPR(r5)(r11)
246 PPC_STL r9, VCPU_CR(r11) 246 stw r9, VCPU_CR(r11)
247 mfspr r5, \srr0 247 mfspr r5, \srr0
248 PPC_STL r3, VCPU_GPR(r8)(r11) 248 PPC_STL r3, VCPU_GPR(r8)(r11)
249 PPC_LL r3, GPR10(r8) 249 PPC_LL r3, GPR10(r8)
@@ -315,7 +315,7 @@ _GLOBAL(kvmppc_resume_host)
315 mfspr r6, SPRN_SPRG4 315 mfspr r6, SPRN_SPRG4
316 PPC_STL r5, VCPU_LR(r4) 316 PPC_STL r5, VCPU_LR(r4)
317 mfspr r7, SPRN_SPRG5 317 mfspr r7, SPRN_SPRG5
318 PPC_STL r3, VCPU_VRSAVE(r4) 318 stw r3, VCPU_VRSAVE(r4)
319 PPC_STD(r6, VCPU_SHARED_SPRG4, r11) 319 PPC_STD(r6, VCPU_SHARED_SPRG4, r11)
320 mfspr r8, SPRN_SPRG6 320 mfspr r8, SPRN_SPRG6
321 PPC_STD(r7, VCPU_SHARED_SPRG5, r11) 321 PPC_STD(r7, VCPU_SHARED_SPRG5, r11)
@@ -551,7 +551,7 @@ lightweight_exit:
551 PPC_LL r3, VCPU_LR(r4) 551 PPC_LL r3, VCPU_LR(r4)
552 PPC_LL r5, VCPU_XER(r4) 552 PPC_LL r5, VCPU_XER(r4)
553 PPC_LL r6, VCPU_CTR(r4) 553 PPC_LL r6, VCPU_CTR(r4)
554 PPC_LL r7, VCPU_CR(r4) 554 lwz r7, VCPU_CR(r4)
555 PPC_LL r8, VCPU_PC(r4) 555 PPC_LL r8, VCPU_PC(r4)
556 PPC_LD(r9, VCPU_SHARED_MSR, r11) 556 PPC_LD(r9, VCPU_SHARED_MSR, r11)
557 PPC_LL r0, VCPU_GPR(r0)(r4) 557 PPC_LL r0, VCPU_GPR(r0)(r4)
@@ -574,9 +574,9 @@ lightweight_exit:
574 mfspr r9, SPRN_TBRL 574 mfspr r9, SPRN_TBRL
575 mfspr r8, SPRN_TBRU 575 mfspr r8, SPRN_TBRU
576 cmpw r8, r6 576 cmpw r8, r6
577 PPC_STL r9, VCPU_TIMING_LAST_ENTER_TBL(r4) 577 stw r9, VCPU_TIMING_LAST_ENTER_TBL(r4)
578 bne 1b 578 bne 1b
579 PPC_STL r8, VCPU_TIMING_LAST_ENTER_TBU(r4) 579 stw r8, VCPU_TIMING_LAST_ENTER_TBU(r4)
580#endif 580#endif
581 581
582 /* 582 /*