aboutsummaryrefslogtreecommitdiffstats
path: root/arch
diff options
context:
space:
mode:
authorAvi Kivity <avi@redhat.com>2011-04-20 08:21:35 -0400
committerAvi Kivity <avi@redhat.com>2011-05-22 08:39:10 -0400
commitc2ad2bb3ef870067ecfc9ccdcf465feb51f2b6a5 (patch)
treec448aa3e6005667d0ffd94abb3c6ee91183f818c /arch
parent1ac9d0cfb07e8ac3b5007d8279c5bd56e124250c (diff)
KVM: x86 emulator: drop use of is_long_mode()
Requires ctxt->vcpu, which is to be abolished. Replace with open calls to get_msr(). Signed-off-by: Avi Kivity <avi@redhat.com>
Diffstat (limited to 'arch')
-rw-r--r--arch/x86/kvm/emulate.c19
1 files changed, 12 insertions, 7 deletions
diff --git a/arch/x86/kvm/emulate.c b/arch/x86/kvm/emulate.c
index 57e0e291b38d..be1532f4b8b8 100644
--- a/arch/x86/kvm/emulate.c
+++ b/arch/x86/kvm/emulate.c
@@ -1844,12 +1844,14 @@ emulate_syscall(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
1844 struct desc_struct cs, ss; 1844 struct desc_struct cs, ss;
1845 u64 msr_data; 1845 u64 msr_data;
1846 u16 cs_sel, ss_sel; 1846 u16 cs_sel, ss_sel;
1847 u64 efer = 0;
1847 1848
1848 /* syscall is not available in real mode */ 1849 /* syscall is not available in real mode */
1849 if (ctxt->mode == X86EMUL_MODE_REAL || 1850 if (ctxt->mode == X86EMUL_MODE_REAL ||
1850 ctxt->mode == X86EMUL_MODE_VM86) 1851 ctxt->mode == X86EMUL_MODE_VM86)
1851 return emulate_ud(ctxt); 1852 return emulate_ud(ctxt);
1852 1853
1854 ops->get_msr(ctxt, MSR_EFER, &efer);
1853 setup_syscalls_segments(ctxt, ops, &cs, &ss); 1855 setup_syscalls_segments(ctxt, ops, &cs, &ss);
1854 1856
1855 ops->get_msr(ctxt, MSR_STAR, &msr_data); 1857 ops->get_msr(ctxt, MSR_STAR, &msr_data);
@@ -1857,7 +1859,7 @@ emulate_syscall(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
1857 cs_sel = (u16)(msr_data & 0xfffc); 1859 cs_sel = (u16)(msr_data & 0xfffc);
1858 ss_sel = (u16)(msr_data + 8); 1860 ss_sel = (u16)(msr_data + 8);
1859 1861
1860 if (is_long_mode(ctxt->vcpu)) { 1862 if (efer & EFER_LMA) {
1861 cs.d = 0; 1863 cs.d = 0;
1862 cs.l = 1; 1864 cs.l = 1;
1863 } 1865 }
@@ -1867,7 +1869,7 @@ emulate_syscall(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
1867 ops->set_segment_selector(ctxt, ss_sel, VCPU_SREG_SS); 1869 ops->set_segment_selector(ctxt, ss_sel, VCPU_SREG_SS);
1868 1870
1869 c->regs[VCPU_REGS_RCX] = c->eip; 1871 c->regs[VCPU_REGS_RCX] = c->eip;
1870 if (is_long_mode(ctxt->vcpu)) { 1872 if (efer & EFER_LMA) {
1871#ifdef CONFIG_X86_64 1873#ifdef CONFIG_X86_64
1872 c->regs[VCPU_REGS_R11] = ctxt->eflags & ~EFLG_RF; 1874 c->regs[VCPU_REGS_R11] = ctxt->eflags & ~EFLG_RF;
1873 1875
@@ -1897,7 +1899,9 @@ emulate_sysenter(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
1897 struct desc_struct cs, ss; 1899 struct desc_struct cs, ss;
1898 u64 msr_data; 1900 u64 msr_data;
1899 u16 cs_sel, ss_sel; 1901 u16 cs_sel, ss_sel;
1902 u64 efer = 0;
1900 1903
1904 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
1901 /* inject #GP if in real mode */ 1905 /* inject #GP if in real mode */
1902 if (ctxt->mode == X86EMUL_MODE_REAL) 1906 if (ctxt->mode == X86EMUL_MODE_REAL)
1903 return emulate_gp(ctxt, 0); 1907 return emulate_gp(ctxt, 0);
@@ -1927,8 +1931,7 @@ emulate_sysenter(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
1927 cs_sel &= ~SELECTOR_RPL_MASK; 1931 cs_sel &= ~SELECTOR_RPL_MASK;
1928 ss_sel = cs_sel + 8; 1932 ss_sel = cs_sel + 8;
1929 ss_sel &= ~SELECTOR_RPL_MASK; 1933 ss_sel &= ~SELECTOR_RPL_MASK;
1930 if (ctxt->mode == X86EMUL_MODE_PROT64 1934 if (ctxt->mode == X86EMUL_MODE_PROT64 || (efer & EFER_LMA)) {
1931 || is_long_mode(ctxt->vcpu)) {
1932 cs.d = 0; 1935 cs.d = 0;
1933 cs.l = 1; 1936 cs.l = 1;
1934 } 1937 }
@@ -2603,6 +2606,7 @@ static int check_cr_write(struct x86_emulate_ctxt *ctxt)
2603 struct decode_cache *c = &ctxt->decode; 2606 struct decode_cache *c = &ctxt->decode;
2604 u64 new_val = c->src.val64; 2607 u64 new_val = c->src.val64;
2605 int cr = c->modrm_reg; 2608 int cr = c->modrm_reg;
2609 u64 efer = 0;
2606 2610
2607 static u64 cr_reserved_bits[] = { 2611 static u64 cr_reserved_bits[] = {
2608 0xffffffff00000000ULL, 2612 0xffffffff00000000ULL,
@@ -2620,7 +2624,7 @@ static int check_cr_write(struct x86_emulate_ctxt *ctxt)
2620 2624
2621 switch (cr) { 2625 switch (cr) {
2622 case 0: { 2626 case 0: {
2623 u64 cr4, efer; 2627 u64 cr4;
2624 if (((new_val & X86_CR0_PG) && !(new_val & X86_CR0_PE)) || 2628 if (((new_val & X86_CR0_PG) && !(new_val & X86_CR0_PE)) ||
2625 ((new_val & X86_CR0_NW) && !(new_val & X86_CR0_CD))) 2629 ((new_val & X86_CR0_NW) && !(new_val & X86_CR0_CD)))
2626 return emulate_gp(ctxt, 0); 2630 return emulate_gp(ctxt, 0);
@@ -2637,7 +2641,8 @@ static int check_cr_write(struct x86_emulate_ctxt *ctxt)
2637 case 3: { 2641 case 3: {
2638 u64 rsvd = 0; 2642 u64 rsvd = 0;
2639 2643
2640 if (is_long_mode(ctxt->vcpu)) 2644 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
2645 if (efer & EFER_LMA)
2641 rsvd = CR3_L_MODE_RESERVED_BITS; 2646 rsvd = CR3_L_MODE_RESERVED_BITS;
2642 else if (is_pae(ctxt->vcpu)) 2647 else if (is_pae(ctxt->vcpu))
2643 rsvd = CR3_PAE_RESERVED_BITS; 2648 rsvd = CR3_PAE_RESERVED_BITS;
@@ -2650,7 +2655,7 @@ static int check_cr_write(struct x86_emulate_ctxt *ctxt)
2650 break; 2655 break;
2651 } 2656 }
2652 case 4: { 2657 case 4: {
2653 u64 cr4, efer; 2658 u64 cr4;
2654 2659
2655 cr4 = ctxt->ops->get_cr(ctxt, 4); 2660 cr4 = ctxt->ops->get_cr(ctxt, 4);
2656 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); 2661 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);