aboutsummaryrefslogtreecommitdiffstats
path: root/arch/x86/kvm/emulate.c
diff options
context:
space:
mode:
authorAvi Kivity <avi@redhat.com>2011-06-01 08:34:24 -0400
committerAvi Kivity <avi@redhat.com>2011-07-12 06:16:09 -0400
commit36dd9bb5ce32bc39e25a5fcc61415f13e3ed5d17 (patch)
tree6c007056ff4a4945d414026401d0c9bf9e66a16d /arch/x86/kvm/emulate.c
parent2e4ce7f574369f374ad537a180b4870e2098cf0e (diff)
KVM: x86 emulator: rename decode_cache::eip to _eip
The name eip conflicts with a field of the same name in x86_emulate_ctxt, which we plan to fold decode_cache into. The name _eip is unfortunate, but what's really needed is a refactoring here, not a better name. Signed-off-by: Avi Kivity <avi@redhat.com> Signed-off-by: Marcelo Tosatti <mtosatti@redhat.com>
Diffstat (limited to 'arch/x86/kvm/emulate.c')
-rw-r--r--arch/x86/kvm/emulate.c120
1 files changed, 60 insertions, 60 deletions
diff --git a/arch/x86/kvm/emulate.c b/arch/x86/kvm/emulate.c
index 663bdb3637a..a1b9705e3cc 100644
--- a/arch/x86/kvm/emulate.c
+++ b/arch/x86/kvm/emulate.c
@@ -459,7 +459,7 @@ register_address_increment(struct decode_cache *c, unsigned long *reg, int inc)
459 459
460static inline void jmp_rel(struct decode_cache *c, int rel) 460static inline void jmp_rel(struct decode_cache *c, int rel)
461{ 461{
462 register_address_increment(c, &c->eip, rel); 462 register_address_increment(c, &c->_eip, rel);
463} 463}
464 464
465static u32 desc_limit_scaled(struct desc_struct *desc) 465static u32 desc_limit_scaled(struct desc_struct *desc)
@@ -898,7 +898,7 @@ static int decode_modrm(struct x86_emulate_ctxt *ctxt,
898 c->modrm_rm = base_reg = (c->rex_prefix & 1) << 3; /* REG.B */ 898 c->modrm_rm = base_reg = (c->rex_prefix & 1) << 3; /* REG.B */
899 } 899 }
900 900
901 c->modrm = insn_fetch(u8, 1, c->eip); 901 c->modrm = insn_fetch(u8, 1, c->_eip);
902 c->modrm_mod |= (c->modrm & 0xc0) >> 6; 902 c->modrm_mod |= (c->modrm & 0xc0) >> 6;
903 c->modrm_reg |= (c->modrm & 0x38) >> 3; 903 c->modrm_reg |= (c->modrm & 0x38) >> 3;
904 c->modrm_rm |= (c->modrm & 0x07); 904 c->modrm_rm |= (c->modrm & 0x07);
@@ -932,13 +932,13 @@ static int decode_modrm(struct x86_emulate_ctxt *ctxt,
932 switch (c->modrm_mod) { 932 switch (c->modrm_mod) {
933 case 0: 933 case 0:
934 if (c->modrm_rm == 6) 934 if (c->modrm_rm == 6)
935 modrm_ea += insn_fetch(u16, 2, c->eip); 935 modrm_ea += insn_fetch(u16, 2, c->_eip);
936 break; 936 break;
937 case 1: 937 case 1:
938 modrm_ea += insn_fetch(s8, 1, c->eip); 938 modrm_ea += insn_fetch(s8, 1, c->_eip);
939 break; 939 break;
940 case 2: 940 case 2:
941 modrm_ea += insn_fetch(u16, 2, c->eip); 941 modrm_ea += insn_fetch(u16, 2, c->_eip);
942 break; 942 break;
943 } 943 }
944 switch (c->modrm_rm) { 944 switch (c->modrm_rm) {
@@ -975,13 +975,13 @@ static int decode_modrm(struct x86_emulate_ctxt *ctxt,
975 } else { 975 } else {
976 /* 32/64-bit ModR/M decode. */ 976 /* 32/64-bit ModR/M decode. */
977 if ((c->modrm_rm & 7) == 4) { 977 if ((c->modrm_rm & 7) == 4) {
978 sib = insn_fetch(u8, 1, c->eip); 978 sib = insn_fetch(u8, 1, c->_eip);
979 index_reg |= (sib >> 3) & 7; 979 index_reg |= (sib >> 3) & 7;
980 base_reg |= sib & 7; 980 base_reg |= sib & 7;
981 scale = sib >> 6; 981 scale = sib >> 6;
982 982
983 if ((base_reg & 7) == 5 && c->modrm_mod == 0) 983 if ((base_reg & 7) == 5 && c->modrm_mod == 0)
984 modrm_ea += insn_fetch(s32, 4, c->eip); 984 modrm_ea += insn_fetch(s32, 4, c->_eip);
985 else 985 else
986 modrm_ea += c->regs[base_reg]; 986 modrm_ea += c->regs[base_reg];
987 if (index_reg != 4) 987 if (index_reg != 4)
@@ -994,13 +994,13 @@ static int decode_modrm(struct x86_emulate_ctxt *ctxt,
994 switch (c->modrm_mod) { 994 switch (c->modrm_mod) {
995 case 0: 995 case 0:
996 if (c->modrm_rm == 5) 996 if (c->modrm_rm == 5)
997 modrm_ea += insn_fetch(s32, 4, c->eip); 997 modrm_ea += insn_fetch(s32, 4, c->_eip);
998 break; 998 break;
999 case 1: 999 case 1:
1000 modrm_ea += insn_fetch(s8, 1, c->eip); 1000 modrm_ea += insn_fetch(s8, 1, c->_eip);
1001 break; 1001 break;
1002 case 2: 1002 case 2:
1003 modrm_ea += insn_fetch(s32, 4, c->eip); 1003 modrm_ea += insn_fetch(s32, 4, c->_eip);
1004 break; 1004 break;
1005 } 1005 }
1006 } 1006 }
@@ -1018,13 +1018,13 @@ static int decode_abs(struct x86_emulate_ctxt *ctxt,
1018 op->type = OP_MEM; 1018 op->type = OP_MEM;
1019 switch (c->ad_bytes) { 1019 switch (c->ad_bytes) {
1020 case 2: 1020 case 2:
1021 op->addr.mem.ea = insn_fetch(u16, 2, c->eip); 1021 op->addr.mem.ea = insn_fetch(u16, 2, c->_eip);
1022 break; 1022 break;
1023 case 4: 1023 case 4:
1024 op->addr.mem.ea = insn_fetch(u32, 4, c->eip); 1024 op->addr.mem.ea = insn_fetch(u32, 4, c->_eip);
1025 break; 1025 break;
1026 case 8: 1026 case 8:
1027 op->addr.mem.ea = insn_fetch(u64, 8, c->eip); 1027 op->addr.mem.ea = insn_fetch(u64, 8, c->_eip);
1028 break; 1028 break;
1029 } 1029 }
1030done: 1030done:
@@ -1561,7 +1561,7 @@ int emulate_int_real(struct x86_emulate_ctxt *ctxt, int irq)
1561 if (rc != X86EMUL_CONTINUE) 1561 if (rc != X86EMUL_CONTINUE)
1562 return rc; 1562 return rc;
1563 1563
1564 c->src.val = c->eip; 1564 c->src.val = c->_eip;
1565 rc = em_push(ctxt); 1565 rc = em_push(ctxt);
1566 if (rc != X86EMUL_CONTINUE) 1566 if (rc != X86EMUL_CONTINUE)
1567 return rc; 1567 return rc;
@@ -1583,7 +1583,7 @@ int emulate_int_real(struct x86_emulate_ctxt *ctxt, int irq)
1583 if (rc != X86EMUL_CONTINUE) 1583 if (rc != X86EMUL_CONTINUE)
1584 return rc; 1584 return rc;
1585 1585
1586 c->eip = eip; 1586 c->_eip = eip;
1587 1587
1588 return rc; 1588 return rc;
1589} 1589}
@@ -1640,7 +1640,7 @@ static int emulate_iret_real(struct x86_emulate_ctxt *ctxt)
1640 if (rc != X86EMUL_CONTINUE) 1640 if (rc != X86EMUL_CONTINUE)
1641 return rc; 1641 return rc;
1642 1642
1643 c->eip = temp_eip; 1643 c->_eip = temp_eip;
1644 1644
1645 1645
1646 if (c->op_bytes == 4) 1646 if (c->op_bytes == 4)
@@ -1683,8 +1683,8 @@ static int em_jmp_far(struct x86_emulate_ctxt *ctxt)
1683 if (rc != X86EMUL_CONTINUE) 1683 if (rc != X86EMUL_CONTINUE)
1684 return rc; 1684 return rc;
1685 1685
1686 c->eip = 0; 1686 c->_eip = 0;
1687 memcpy(&c->eip, c->src.valptr, c->op_bytes); 1687 memcpy(&c->_eip, c->src.valptr, c->op_bytes);
1688 return X86EMUL_CONTINUE; 1688 return X86EMUL_CONTINUE;
1689} 1689}
1690 1690
@@ -1778,14 +1778,14 @@ static int em_grp45(struct x86_emulate_ctxt *ctxt)
1778 break; 1778 break;
1779 case 2: /* call near abs */ { 1779 case 2: /* call near abs */ {
1780 long int old_eip; 1780 long int old_eip;
1781 old_eip = c->eip; 1781 old_eip = c->_eip;
1782 c->eip = c->src.val; 1782 c->_eip = c->src.val;
1783 c->src.val = old_eip; 1783 c->src.val = old_eip;
1784 rc = em_push(ctxt); 1784 rc = em_push(ctxt);
1785 break; 1785 break;
1786 } 1786 }
1787 case 4: /* jmp abs */ 1787 case 4: /* jmp abs */
1788 c->eip = c->src.val; 1788 c->_eip = c->src.val;
1789 break; 1789 break;
1790 case 5: /* jmp far */ 1790 case 5: /* jmp far */
1791 rc = em_jmp_far(ctxt); 1791 rc = em_jmp_far(ctxt);
@@ -1821,7 +1821,7 @@ static int em_ret(struct x86_emulate_ctxt *ctxt)
1821 struct decode_cache *c = &ctxt->decode; 1821 struct decode_cache *c = &ctxt->decode;
1822 1822
1823 c->dst.type = OP_REG; 1823 c->dst.type = OP_REG;
1824 c->dst.addr.reg = &c->eip; 1824 c->dst.addr.reg = &c->_eip;
1825 c->dst.bytes = c->op_bytes; 1825 c->dst.bytes = c->op_bytes;
1826 return em_pop(ctxt); 1826 return em_pop(ctxt);
1827} 1827}
@@ -1832,11 +1832,11 @@ static int em_ret_far(struct x86_emulate_ctxt *ctxt)
1832 int rc; 1832 int rc;
1833 unsigned long cs; 1833 unsigned long cs;
1834 1834
1835 rc = emulate_pop(ctxt, &c->eip, c->op_bytes); 1835 rc = emulate_pop(ctxt, &c->_eip, c->op_bytes);
1836 if (rc != X86EMUL_CONTINUE) 1836 if (rc != X86EMUL_CONTINUE)
1837 return rc; 1837 return rc;
1838 if (c->op_bytes == 4) 1838 if (c->op_bytes == 4)
1839 c->eip = (u32)c->eip; 1839 c->_eip = (u32)c->_eip;
1840 rc = emulate_pop(ctxt, &cs, c->op_bytes); 1840 rc = emulate_pop(ctxt, &cs, c->op_bytes);
1841 if (rc != X86EMUL_CONTINUE) 1841 if (rc != X86EMUL_CONTINUE)
1842 return rc; 1842 return rc;
@@ -1919,7 +1919,7 @@ static int em_syscall(struct x86_emulate_ctxt *ctxt)
1919 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS); 1919 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS);
1920 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS); 1920 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS);
1921 1921
1922 c->regs[VCPU_REGS_RCX] = c->eip; 1922 c->regs[VCPU_REGS_RCX] = c->_eip;
1923 if (efer & EFER_LMA) { 1923 if (efer & EFER_LMA) {
1924#ifdef CONFIG_X86_64 1924#ifdef CONFIG_X86_64
1925 c->regs[VCPU_REGS_R11] = ctxt->eflags & ~EFLG_RF; 1925 c->regs[VCPU_REGS_R11] = ctxt->eflags & ~EFLG_RF;
@@ -1927,7 +1927,7 @@ static int em_syscall(struct x86_emulate_ctxt *ctxt)
1927 ops->get_msr(ctxt, 1927 ops->get_msr(ctxt,
1928 ctxt->mode == X86EMUL_MODE_PROT64 ? 1928 ctxt->mode == X86EMUL_MODE_PROT64 ?
1929 MSR_LSTAR : MSR_CSTAR, &msr_data); 1929 MSR_LSTAR : MSR_CSTAR, &msr_data);
1930 c->eip = msr_data; 1930 c->_eip = msr_data;
1931 1931
1932 ops->get_msr(ctxt, MSR_SYSCALL_MASK, &msr_data); 1932 ops->get_msr(ctxt, MSR_SYSCALL_MASK, &msr_data);
1933 ctxt->eflags &= ~(msr_data | EFLG_RF); 1933 ctxt->eflags &= ~(msr_data | EFLG_RF);
@@ -1935,7 +1935,7 @@ static int em_syscall(struct x86_emulate_ctxt *ctxt)
1935 } else { 1935 } else {
1936 /* legacy mode */ 1936 /* legacy mode */
1937 ops->get_msr(ctxt, MSR_STAR, &msr_data); 1937 ops->get_msr(ctxt, MSR_STAR, &msr_data);
1938 c->eip = (u32)msr_data; 1938 c->_eip = (u32)msr_data;
1939 1939
1940 ctxt->eflags &= ~(EFLG_VM | EFLG_IF | EFLG_RF); 1940 ctxt->eflags &= ~(EFLG_VM | EFLG_IF | EFLG_RF);
1941 } 1941 }
@@ -1991,7 +1991,7 @@ static int em_sysenter(struct x86_emulate_ctxt *ctxt)
1991 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS); 1991 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS);
1992 1992
1993 ops->get_msr(ctxt, MSR_IA32_SYSENTER_EIP, &msr_data); 1993 ops->get_msr(ctxt, MSR_IA32_SYSENTER_EIP, &msr_data);
1994 c->eip = msr_data; 1994 c->_eip = msr_data;
1995 1995
1996 ops->get_msr(ctxt, MSR_IA32_SYSENTER_ESP, &msr_data); 1996 ops->get_msr(ctxt, MSR_IA32_SYSENTER_ESP, &msr_data);
1997 c->regs[VCPU_REGS_RSP] = msr_data; 1997 c->regs[VCPU_REGS_RSP] = msr_data;
@@ -2045,7 +2045,7 @@ static int em_sysexit(struct x86_emulate_ctxt *ctxt)
2045 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS); 2045 ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS);
2046 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS); 2046 ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS);
2047 2047
2048 c->eip = c->regs[VCPU_REGS_RDX]; 2048 c->_eip = c->regs[VCPU_REGS_RDX];
2049 c->regs[VCPU_REGS_RSP] = c->regs[VCPU_REGS_RCX]; 2049 c->regs[VCPU_REGS_RSP] = c->regs[VCPU_REGS_RCX];
2050 2050
2051 return X86EMUL_CONTINUE; 2051 return X86EMUL_CONTINUE;
@@ -2115,7 +2115,7 @@ static void save_state_to_tss16(struct x86_emulate_ctxt *ctxt,
2115{ 2115{
2116 struct decode_cache *c = &ctxt->decode; 2116 struct decode_cache *c = &ctxt->decode;
2117 2117
2118 tss->ip = c->eip; 2118 tss->ip = c->_eip;
2119 tss->flag = ctxt->eflags; 2119 tss->flag = ctxt->eflags;
2120 tss->ax = c->regs[VCPU_REGS_RAX]; 2120 tss->ax = c->regs[VCPU_REGS_RAX];
2121 tss->cx = c->regs[VCPU_REGS_RCX]; 2121 tss->cx = c->regs[VCPU_REGS_RCX];
@@ -2139,7 +2139,7 @@ static int load_state_from_tss16(struct x86_emulate_ctxt *ctxt,
2139 struct decode_cache *c = &ctxt->decode; 2139 struct decode_cache *c = &ctxt->decode;
2140 int ret; 2140 int ret;
2141 2141
2142 c->eip = tss->ip; 2142 c->_eip = tss->ip;
2143 ctxt->eflags = tss->flag | 2; 2143 ctxt->eflags = tss->flag | 2;
2144 c->regs[VCPU_REGS_RAX] = tss->ax; 2144 c->regs[VCPU_REGS_RAX] = tss->ax;
2145 c->regs[VCPU_REGS_RCX] = tss->cx; 2145 c->regs[VCPU_REGS_RCX] = tss->cx;
@@ -2233,7 +2233,7 @@ static void save_state_to_tss32(struct x86_emulate_ctxt *ctxt,
2233 struct decode_cache *c = &ctxt->decode; 2233 struct decode_cache *c = &ctxt->decode;
2234 2234
2235 tss->cr3 = ctxt->ops->get_cr(ctxt, 3); 2235 tss->cr3 = ctxt->ops->get_cr(ctxt, 3);
2236 tss->eip = c->eip; 2236 tss->eip = c->_eip;
2237 tss->eflags = ctxt->eflags; 2237 tss->eflags = ctxt->eflags;
2238 tss->eax = c->regs[VCPU_REGS_RAX]; 2238 tss->eax = c->regs[VCPU_REGS_RAX];
2239 tss->ecx = c->regs[VCPU_REGS_RCX]; 2239 tss->ecx = c->regs[VCPU_REGS_RCX];
@@ -2261,7 +2261,7 @@ static int load_state_from_tss32(struct x86_emulate_ctxt *ctxt,
2261 2261
2262 if (ctxt->ops->set_cr(ctxt, 3, tss->cr3)) 2262 if (ctxt->ops->set_cr(ctxt, 3, tss->cr3))
2263 return emulate_gp(ctxt, 0); 2263 return emulate_gp(ctxt, 0);
2264 c->eip = tss->eip; 2264 c->_eip = tss->eip;
2265 ctxt->eflags = tss->eflags | 2; 2265 ctxt->eflags = tss->eflags | 2;
2266 c->regs[VCPU_REGS_RAX] = tss->eax; 2266 c->regs[VCPU_REGS_RAX] = tss->eax;
2267 c->regs[VCPU_REGS_RCX] = tss->ecx; 2267 c->regs[VCPU_REGS_RCX] = tss->ecx;
@@ -2446,14 +2446,14 @@ int emulator_task_switch(struct x86_emulate_ctxt *ctxt,
2446 struct decode_cache *c = &ctxt->decode; 2446 struct decode_cache *c = &ctxt->decode;
2447 int rc; 2447 int rc;
2448 2448
2449 c->eip = ctxt->eip; 2449 c->_eip = ctxt->eip;
2450 c->dst.type = OP_NONE; 2450 c->dst.type = OP_NONE;
2451 2451
2452 rc = emulator_do_task_switch(ctxt, tss_selector, reason, 2452 rc = emulator_do_task_switch(ctxt, tss_selector, reason,
2453 has_error_code, error_code); 2453 has_error_code, error_code);
2454 2454
2455 if (rc == X86EMUL_CONTINUE) 2455 if (rc == X86EMUL_CONTINUE)
2456 ctxt->eip = c->eip; 2456 ctxt->eip = c->_eip;
2457 2457
2458 return (rc == X86EMUL_UNHANDLEABLE) ? EMULATION_FAILED : EMULATION_OK; 2458 return (rc == X86EMUL_UNHANDLEABLE) ? EMULATION_FAILED : EMULATION_OK;
2459} 2459}
@@ -2516,14 +2516,14 @@ static int em_call_far(struct x86_emulate_ctxt *ctxt)
2516 int rc; 2516 int rc;
2517 2517
2518 old_cs = get_segment_selector(ctxt, VCPU_SREG_CS); 2518 old_cs = get_segment_selector(ctxt, VCPU_SREG_CS);
2519 old_eip = c->eip; 2519 old_eip = c->_eip;
2520 2520
2521 memcpy(&sel, c->src.valptr + c->op_bytes, 2); 2521 memcpy(&sel, c->src.valptr + c->op_bytes, 2);
2522 if (load_segment_descriptor(ctxt, sel, VCPU_SREG_CS)) 2522 if (load_segment_descriptor(ctxt, sel, VCPU_SREG_CS))
2523 return X86EMUL_CONTINUE; 2523 return X86EMUL_CONTINUE;
2524 2524
2525 c->eip = 0; 2525 c->_eip = 0;
2526 memcpy(&c->eip, c->src.valptr, c->op_bytes); 2526 memcpy(&c->_eip, c->src.valptr, c->op_bytes);
2527 2527
2528 c->src.val = old_cs; 2528 c->src.val = old_cs;
2529 rc = em_push(ctxt); 2529 rc = em_push(ctxt);
@@ -2540,7 +2540,7 @@ static int em_ret_near_imm(struct x86_emulate_ctxt *ctxt)
2540 int rc; 2540 int rc;
2541 2541
2542 c->dst.type = OP_REG; 2542 c->dst.type = OP_REG;
2543 c->dst.addr.reg = &c->eip; 2543 c->dst.addr.reg = &c->_eip;
2544 c->dst.bytes = c->op_bytes; 2544 c->dst.bytes = c->op_bytes;
2545 rc = emulate_pop(ctxt, &c->dst.val, c->op_bytes); 2545 rc = emulate_pop(ctxt, &c->dst.val, c->op_bytes);
2546 if (rc != X86EMUL_CONTINUE) 2546 if (rc != X86EMUL_CONTINUE)
@@ -2754,7 +2754,7 @@ static int em_vmcall(struct x86_emulate_ctxt *ctxt)
2754 return rc; 2754 return rc;
2755 2755
2756 /* Let the processor re-execute the fixed hypercall */ 2756 /* Let the processor re-execute the fixed hypercall */
2757 c->eip = ctxt->eip; 2757 c->_eip = ctxt->eip;
2758 /* Disable writeback. */ 2758 /* Disable writeback. */
2759 c->dst.type = OP_NONE; 2759 c->dst.type = OP_NONE;
2760 return X86EMUL_CONTINUE; 2760 return X86EMUL_CONTINUE;
@@ -3408,17 +3408,17 @@ static int decode_imm(struct x86_emulate_ctxt *ctxt, struct operand *op,
3408 3408
3409 op->type = OP_IMM; 3409 op->type = OP_IMM;
3410 op->bytes = size; 3410 op->bytes = size;
3411 op->addr.mem.ea = c->eip; 3411 op->addr.mem.ea = c->_eip;
3412 /* NB. Immediates are sign-extended as necessary. */ 3412 /* NB. Immediates are sign-extended as necessary. */
3413 switch (op->bytes) { 3413 switch (op->bytes) {
3414 case 1: 3414 case 1:
3415 op->val = insn_fetch(s8, 1, c->eip); 3415 op->val = insn_fetch(s8, 1, c->_eip);
3416 break; 3416 break;
3417 case 2: 3417 case 2:
3418 op->val = insn_fetch(s16, 2, c->eip); 3418 op->val = insn_fetch(s16, 2, c->_eip);
3419 break; 3419 break;
3420 case 4: 3420 case 4:
3421 op->val = insn_fetch(s32, 4, c->eip); 3421 op->val = insn_fetch(s32, 4, c->_eip);
3422 break; 3422 break;
3423 } 3423 }
3424 if (!sign_extension) { 3424 if (!sign_extension) {
@@ -3448,8 +3448,8 @@ int x86_decode_insn(struct x86_emulate_ctxt *ctxt, void *insn, int insn_len)
3448 struct opcode opcode; 3448 struct opcode opcode;
3449 struct operand memop = { .type = OP_NONE }, *memopp = NULL; 3449 struct operand memop = { .type = OP_NONE }, *memopp = NULL;
3450 3450
3451 c->eip = ctxt->eip; 3451 c->_eip = ctxt->eip;
3452 c->fetch.start = c->eip; 3452 c->fetch.start = c->_eip;
3453 c->fetch.end = c->fetch.start + insn_len; 3453 c->fetch.end = c->fetch.start + insn_len;
3454 if (insn_len > 0) 3454 if (insn_len > 0)
3455 memcpy(c->fetch.data, insn, insn_len); 3455 memcpy(c->fetch.data, insn, insn_len);
@@ -3478,7 +3478,7 @@ int x86_decode_insn(struct x86_emulate_ctxt *ctxt, void *insn, int insn_len)
3478 3478
3479 /* Legacy prefixes. */ 3479 /* Legacy prefixes. */
3480 for (;;) { 3480 for (;;) {
3481 switch (c->b = insn_fetch(u8, 1, c->eip)) { 3481 switch (c->b = insn_fetch(u8, 1, c->_eip)) {
3482 case 0x66: /* operand-size override */ 3482 case 0x66: /* operand-size override */
3483 op_prefix = true; 3483 op_prefix = true;
3484 /* switch between 2/4 bytes */ 3484 /* switch between 2/4 bytes */
@@ -3534,7 +3534,7 @@ done_prefixes:
3534 /* Two-byte opcode? */ 3534 /* Two-byte opcode? */
3535 if (c->b == 0x0f) { 3535 if (c->b == 0x0f) {
3536 c->twobyte = 1; 3536 c->twobyte = 1;
3537 c->b = insn_fetch(u8, 1, c->eip); 3537 c->b = insn_fetch(u8, 1, c->_eip);
3538 opcode = twobyte_table[c->b]; 3538 opcode = twobyte_table[c->b];
3539 } 3539 }
3540 c->d = opcode.flags; 3540 c->d = opcode.flags;
@@ -3542,14 +3542,14 @@ done_prefixes:
3542 while (c->d & GroupMask) { 3542 while (c->d & GroupMask) {
3543 switch (c->d & GroupMask) { 3543 switch (c->d & GroupMask) {
3544 case Group: 3544 case Group:
3545 c->modrm = insn_fetch(u8, 1, c->eip); 3545 c->modrm = insn_fetch(u8, 1, c->_eip);
3546 --c->eip; 3546 --c->_eip;
3547 goffset = (c->modrm >> 3) & 7; 3547 goffset = (c->modrm >> 3) & 7;
3548 opcode = opcode.u.group[goffset]; 3548 opcode = opcode.u.group[goffset];
3549 break; 3549 break;
3550 case GroupDual: 3550 case GroupDual:
3551 c->modrm = insn_fetch(u8, 1, c->eip); 3551 c->modrm = insn_fetch(u8, 1, c->_eip);
3552 --c->eip; 3552 --c->_eip;
3553 goffset = (c->modrm >> 3) & 7; 3553 goffset = (c->modrm >> 3) & 7;
3554 if ((c->modrm >> 6) == 3) 3554 if ((c->modrm >> 6) == 3)
3555 opcode = opcode.u.gdual->mod3[goffset]; 3555 opcode = opcode.u.gdual->mod3[goffset];
@@ -3679,9 +3679,9 @@ done_prefixes:
3679 break; 3679 break;
3680 case SrcImmFAddr: 3680 case SrcImmFAddr:
3681 c->src.type = OP_IMM; 3681 c->src.type = OP_IMM;
3682 c->src.addr.mem.ea = c->eip; 3682 c->src.addr.mem.ea = c->_eip;
3683 c->src.bytes = c->op_bytes + 2; 3683 c->src.bytes = c->op_bytes + 2;
3684 insn_fetch_arr(c->src.valptr, c->src.bytes, c->eip); 3684 insn_fetch_arr(c->src.valptr, c->src.bytes, c->_eip);
3685 break; 3685 break;
3686 case SrcMemFAddr: 3686 case SrcMemFAddr:
3687 memop.bytes = c->op_bytes + 2; 3687 memop.bytes = c->op_bytes + 2;
@@ -3732,9 +3732,9 @@ done_prefixes:
3732 break; 3732 break;
3733 case DstImmUByte: 3733 case DstImmUByte:
3734 c->dst.type = OP_IMM; 3734 c->dst.type = OP_IMM;
3735 c->dst.addr.mem.ea = c->eip; 3735 c->dst.addr.mem.ea = c->_eip;
3736 c->dst.bytes = 1; 3736 c->dst.bytes = 1;
3737 c->dst.val = insn_fetch(u8, 1, c->eip); 3737 c->dst.val = insn_fetch(u8, 1, c->_eip);
3738 break; 3738 break;
3739 case DstMem: 3739 case DstMem:
3740 case DstMem64: 3740 case DstMem64:
@@ -3778,7 +3778,7 @@ done_prefixes:
3778 3778
3779done: 3779done:
3780 if (memopp && memopp->type == OP_MEM && c->rip_relative) 3780 if (memopp && memopp->type == OP_MEM && c->rip_relative)
3781 memopp->addr.mem.ea += c->eip; 3781 memopp->addr.mem.ea += c->_eip;
3782 3782
3783 return (rc == X86EMUL_UNHANDLEABLE) ? EMULATION_FAILED : EMULATION_OK; 3783 return (rc == X86EMUL_UNHANDLEABLE) ? EMULATION_FAILED : EMULATION_OK;
3784} 3784}
@@ -3879,7 +3879,7 @@ int x86_emulate_insn(struct x86_emulate_ctxt *ctxt)
3879 if (c->rep_prefix && (c->d & String)) { 3879 if (c->rep_prefix && (c->d & String)) {
3880 /* All REP prefixes have the same first termination condition */ 3880 /* All REP prefixes have the same first termination condition */
3881 if (address_mask(c, c->regs[VCPU_REGS_RCX]) == 0) { 3881 if (address_mask(c, c->regs[VCPU_REGS_RCX]) == 0) {
3882 ctxt->eip = c->eip; 3882 ctxt->eip = c->_eip;
3883 goto done; 3883 goto done;
3884 } 3884 }
3885 } 3885 }
@@ -4029,7 +4029,7 @@ special_insn:
4029 goto do_io_out; 4029 goto do_io_out;
4030 case 0xe8: /* call (near) */ { 4030 case 0xe8: /* call (near) */ {
4031 long int rel = c->src.val; 4031 long int rel = c->src.val;
4032 c->src.val = (unsigned long) c->eip; 4032 c->src.val = (unsigned long) c->_eip;
4033 jmp_rel(c, rel); 4033 jmp_rel(c, rel);
4034 rc = em_push(ctxt); 4034 rc = em_push(ctxt);
4035 break; 4035 break;
@@ -4130,7 +4130,7 @@ writeback:
4130 } 4130 }
4131 } 4131 }
4132 4132
4133 ctxt->eip = c->eip; 4133 ctxt->eip = c->_eip;
4134 4134
4135done: 4135done:
4136 if (rc == X86EMUL_PROPAGATE_FAULT) 4136 if (rc == X86EMUL_PROPAGATE_FAULT)