aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorBharat Bhushan <r65777@freescale.com>2012-03-04 20:34:08 -0500
committerAvi Kivity <avi@redhat.com>2012-04-08 07:01:31 -0400
commitc0fe7b099931c6c05c98a05c277185ee25254f35 (patch)
treed842f7b3f6f8f94780f8c4bc4f6d5dc20e4f7aea
parent0456ec4ff2b832ab9ff476ed687fea704500f1cd (diff)
Restore guest CR after exit timing calculation
No instruction which can change Condition Register (CR) should be executed after Guest CR is loaded. So the guest CR is restored after the Exit Timing in lightweight_exit executes cmpw, which can clobber CR. Signed-off-by: Bharat Bhushan <bharat.bhushan@freescale.com> Signed-off-by: Alexander Graf <agraf@suse.de> Signed-off-by: Avi Kivity <avi@redhat.com>
-rw-r--r--arch/powerpc/kvm/bookehv_interrupts.S11
1 files changed, 8 insertions, 3 deletions
diff --git a/arch/powerpc/kvm/bookehv_interrupts.S b/arch/powerpc/kvm/bookehv_interrupts.S
index 57e2fa41444..909e96e0650 100644
--- a/arch/powerpc/kvm/bookehv_interrupts.S
+++ b/arch/powerpc/kvm/bookehv_interrupts.S
@@ -580,7 +580,6 @@ lightweight_exit:
580 mtlr r3 580 mtlr r3
581 mtxer r5 581 mtxer r5
582 mtctr r6 582 mtctr r6
583 mtcr r7
584 mtsrr0 r8 583 mtsrr0 r8
585 mtsrr1 r9 584 mtsrr1 r9
586 585
@@ -588,14 +587,20 @@ lightweight_exit:
588 /* save enter time */ 587 /* save enter time */
5891: 5881:
590 mfspr r6, SPRN_TBRU 589 mfspr r6, SPRN_TBRU
591 mfspr r7, SPRN_TBRL 590 mfspr r9, SPRN_TBRL
592 mfspr r8, SPRN_TBRU 591 mfspr r8, SPRN_TBRU
593 cmpw r8, r6 592 cmpw r8, r6
594 PPC_STL r7, VCPU_TIMING_LAST_ENTER_TBL(r4) 593 PPC_STL r9, VCPU_TIMING_LAST_ENTER_TBL(r4)
595 bne 1b 594 bne 1b
596 PPC_STL r8, VCPU_TIMING_LAST_ENTER_TBU(r4) 595 PPC_STL r8, VCPU_TIMING_LAST_ENTER_TBU(r4)
597#endif 596#endif
598 597
598 /*
599 * Don't execute any instruction which can change CR after
600 * below instruction.
601 */
602 mtcr r7
603
599 /* Finish loading guest volatiles and jump to guest. */ 604 /* Finish loading guest volatiles and jump to guest. */
600 PPC_LL r5, VCPU_GPR(r5)(r4) 605 PPC_LL r5, VCPU_GPR(r5)(r4)
601 PPC_LL r6, VCPU_GPR(r6)(r4) 606 PPC_LL r6, VCPU_GPR(r6)(r4)