diff options
author | Avi Kivity <avi@redhat.com> | 2011-04-27 06:20:30 -0400 |
---|---|---|
committer | Avi Kivity <avi@redhat.com> | 2011-05-22 08:47:39 -0400 |
commit | 1aa366163b8b69f660cf94fd5062fa44859e4318 (patch) | |
tree | 29d6843cbe1c66a04c11dad24169ec5e1318e53d | |
parent | 0a434bb2bf094f463ca3ca71ac42cea9e423048f (diff) |
KVM: x86 emulator: consolidate segment accessors
Instead of separate accessors for the segment selector and cached descriptor,
use one accessor for both. This simplifies the code somewhat.
Signed-off-by: Avi Kivity <avi@redhat.com>
-rw-r--r-- | arch/x86/include/asm/kvm_emulate.h | 13 | ||||
-rw-r--r-- | arch/x86/kvm/emulate.c | 122 | ||||
-rw-r--r-- | arch/x86/kvm/x86.c | 41 |
3 files changed, 83 insertions, 93 deletions
diff --git a/arch/x86/include/asm/kvm_emulate.h b/arch/x86/include/asm/kvm_emulate.h index 28114f581fa3..0049211959c0 100644 --- a/arch/x86/include/asm/kvm_emulate.h +++ b/arch/x86/include/asm/kvm_emulate.h | |||
@@ -164,15 +164,10 @@ struct x86_emulate_ops { | |||
164 | int size, unsigned short port, const void *val, | 164 | int size, unsigned short port, const void *val, |
165 | unsigned int count); | 165 | unsigned int count); |
166 | 166 | ||
167 | bool (*get_cached_descriptor)(struct x86_emulate_ctxt *ctxt, | 167 | bool (*get_segment)(struct x86_emulate_ctxt *ctxt, u16 *selector, |
168 | struct desc_struct *desc, u32 *base3, | 168 | struct desc_struct *desc, u32 *base3, int seg); |
169 | int seg); | 169 | void (*set_segment)(struct x86_emulate_ctxt *ctxt, u16 selector, |
170 | void (*set_cached_descriptor)(struct x86_emulate_ctxt *ctxt, | 170 | struct desc_struct *desc, u32 base3, int seg); |
171 | struct desc_struct *desc, u32 base3, | ||
172 | int seg); | ||
173 | u16 (*get_segment_selector)(struct x86_emulate_ctxt *ctxt, int seg); | ||
174 | void (*set_segment_selector)(struct x86_emulate_ctxt *ctxt, | ||
175 | u16 sel, int seg); | ||
176 | unsigned long (*get_cached_segment_base)(struct x86_emulate_ctxt *ctxt, | 171 | unsigned long (*get_cached_segment_base)(struct x86_emulate_ctxt *ctxt, |
177 | int seg); | 172 | int seg); |
178 | void (*get_gdt)(struct x86_emulate_ctxt *ctxt, struct desc_ptr *dt); | 173 | void (*get_gdt)(struct x86_emulate_ctxt *ctxt, struct desc_ptr *dt); |
diff --git a/arch/x86/kvm/emulate.c b/arch/x86/kvm/emulate.c index 3624f202b440..59992484f5f3 100644 --- a/arch/x86/kvm/emulate.c +++ b/arch/x86/kvm/emulate.c | |||
@@ -553,6 +553,26 @@ static int emulate_nm(struct x86_emulate_ctxt *ctxt) | |||
553 | return emulate_exception(ctxt, NM_VECTOR, 0, false); | 553 | return emulate_exception(ctxt, NM_VECTOR, 0, false); |
554 | } | 554 | } |
555 | 555 | ||
556 | static u16 get_segment_selector(struct x86_emulate_ctxt *ctxt, unsigned seg) | ||
557 | { | ||
558 | u16 selector; | ||
559 | struct desc_struct desc; | ||
560 | |||
561 | ctxt->ops->get_segment(ctxt, &selector, &desc, NULL, seg); | ||
562 | return selector; | ||
563 | } | ||
564 | |||
565 | static void set_segment_selector(struct x86_emulate_ctxt *ctxt, u16 selector, | ||
566 | unsigned seg) | ||
567 | { | ||
568 | u16 dummy; | ||
569 | u32 base3; | ||
570 | struct desc_struct desc; | ||
571 | |||
572 | ctxt->ops->get_segment(ctxt, &dummy, &desc, &base3, seg); | ||
573 | ctxt->ops->set_segment(ctxt, selector, &desc, base3, seg); | ||
574 | } | ||
575 | |||
556 | static int __linearize(struct x86_emulate_ctxt *ctxt, | 576 | static int __linearize(struct x86_emulate_ctxt *ctxt, |
557 | struct segmented_address addr, | 577 | struct segmented_address addr, |
558 | unsigned size, bool write, bool fetch, | 578 | unsigned size, bool write, bool fetch, |
@@ -563,6 +583,7 @@ static int __linearize(struct x86_emulate_ctxt *ctxt, | |||
563 | bool usable; | 583 | bool usable; |
564 | ulong la; | 584 | ulong la; |
565 | u32 lim; | 585 | u32 lim; |
586 | u16 sel; | ||
566 | unsigned cpl, rpl; | 587 | unsigned cpl, rpl; |
567 | 588 | ||
568 | la = seg_base(ctxt, ctxt->ops, addr.seg) + addr.ea; | 589 | la = seg_base(ctxt, ctxt->ops, addr.seg) + addr.ea; |
@@ -574,8 +595,8 @@ static int __linearize(struct x86_emulate_ctxt *ctxt, | |||
574 | return emulate_gp(ctxt, 0); | 595 | return emulate_gp(ctxt, 0); |
575 | break; | 596 | break; |
576 | default: | 597 | default: |
577 | usable = ctxt->ops->get_cached_descriptor(ctxt, &desc, NULL, | 598 | usable = ctxt->ops->get_segment(ctxt, &sel, &desc, NULL, |
578 | addr.seg); | 599 | addr.seg); |
579 | if (!usable) | 600 | if (!usable) |
580 | goto bad; | 601 | goto bad; |
581 | /* code segment or read-only data segment */ | 602 | /* code segment or read-only data segment */ |
@@ -598,7 +619,7 @@ static int __linearize(struct x86_emulate_ctxt *ctxt, | |||
598 | goto bad; | 619 | goto bad; |
599 | } | 620 | } |
600 | cpl = ctxt->ops->cpl(ctxt); | 621 | cpl = ctxt->ops->cpl(ctxt); |
601 | rpl = ctxt->ops->get_segment_selector(ctxt, addr.seg) & 3; | 622 | rpl = sel & 3; |
602 | cpl = max(cpl, rpl); | 623 | cpl = max(cpl, rpl); |
603 | if (!(desc.type & 8)) { | 624 | if (!(desc.type & 8)) { |
604 | /* data segment */ | 625 | /* data segment */ |
@@ -1142,9 +1163,10 @@ static void get_descriptor_table_ptr(struct x86_emulate_ctxt *ctxt, | |||
1142 | { | 1163 | { |
1143 | if (selector & 1 << 2) { | 1164 | if (selector & 1 << 2) { |
1144 | struct desc_struct desc; | 1165 | struct desc_struct desc; |
1166 | u16 sel; | ||
1167 | |||
1145 | memset (dt, 0, sizeof *dt); | 1168 | memset (dt, 0, sizeof *dt); |
1146 | if (!ops->get_cached_descriptor(ctxt, &desc, NULL, | 1169 | if (!ops->get_segment(ctxt, &sel, &desc, NULL, VCPU_SREG_LDTR)) |
1147 | VCPU_SREG_LDTR)) | ||
1148 | return; | 1170 | return; |
1149 | 1171 | ||
1150 | dt->size = desc_limit_scaled(&desc); /* what if limit > 65535? */ | 1172 | dt->size = desc_limit_scaled(&desc); /* what if limit > 65535? */ |
@@ -1305,8 +1327,7 @@ static int load_segment_descriptor(struct x86_emulate_ctxt *ctxt, | |||
1305 | return ret; | 1327 | return ret; |
1306 | } | 1328 | } |
1307 | load: | 1329 | load: |
1308 | ops->set_segment_selector(ctxt, selector, seg); | 1330 | ops->set_segment(ctxt, selector, &seg_desc, 0, seg); |
1309 | ops->set_cached_descriptor(ctxt, &seg_desc, 0, seg); | ||
1310 | return X86EMUL_CONTINUE; | 1331 | return X86EMUL_CONTINUE; |
1311 | exception: | 1332 | exception: |
1312 | emulate_exception(ctxt, err_vec, err_code, true); | 1333 | emulate_exception(ctxt, err_vec, err_code, true); |
@@ -1464,7 +1485,7 @@ static int emulate_push_sreg(struct x86_emulate_ctxt *ctxt, | |||
1464 | { | 1485 | { |
1465 | struct decode_cache *c = &ctxt->decode; | 1486 | struct decode_cache *c = &ctxt->decode; |
1466 | 1487 | ||
1467 | c->src.val = ops->get_segment_selector(ctxt, seg); | 1488 | c->src.val = get_segment_selector(ctxt, seg); |
1468 | 1489 | ||
1469 | return em_push(ctxt); | 1490 | return em_push(ctxt); |
1470 | } | 1491 | } |
@@ -1552,7 +1573,7 @@ int emulate_int_real(struct x86_emulate_ctxt *ctxt, | |||
1552 | 1573 | ||
1553 | ctxt->eflags &= ~(EFLG_IF | EFLG_TF | EFLG_AC); | 1574 | ctxt->eflags &= ~(EFLG_IF | EFLG_TF | EFLG_AC); |
1554 | 1575 | ||
1555 | c->src.val = ops->get_segment_selector(ctxt, VCPU_SREG_CS); | 1576 | c->src.val = get_segment_selector(ctxt, VCPU_SREG_CS); |
1556 | rc = em_push(ctxt); | 1577 | rc = em_push(ctxt); |
1557 | if (rc != X86EMUL_CONTINUE) | 1578 | if (rc != X86EMUL_CONTINUE) |
1558 | return rc; | 1579 | return rc; |
@@ -1838,8 +1859,10 @@ setup_syscalls_segments(struct x86_emulate_ctxt *ctxt, | |||
1838 | struct x86_emulate_ops *ops, struct desc_struct *cs, | 1859 | struct x86_emulate_ops *ops, struct desc_struct *cs, |
1839 | struct desc_struct *ss) | 1860 | struct desc_struct *ss) |
1840 | { | 1861 | { |
1862 | u16 selector; | ||
1863 | |||
1841 | memset(cs, 0, sizeof(struct desc_struct)); | 1864 | memset(cs, 0, sizeof(struct desc_struct)); |
1842 | ops->get_cached_descriptor(ctxt, cs, NULL, VCPU_SREG_CS); | 1865 | ops->get_segment(ctxt, &selector, cs, NULL, VCPU_SREG_CS); |
1843 | memset(ss, 0, sizeof(struct desc_struct)); | 1866 | memset(ss, 0, sizeof(struct desc_struct)); |
1844 | 1867 | ||
1845 | cs->l = 0; /* will be adjusted later */ | 1868 | cs->l = 0; /* will be adjusted later */ |
@@ -1888,10 +1911,8 @@ emulate_syscall(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops) | |||
1888 | cs.d = 0; | 1911 | cs.d = 0; |
1889 | cs.l = 1; | 1912 | cs.l = 1; |
1890 | } | 1913 | } |
1891 | ops->set_cached_descriptor(ctxt, &cs, 0, VCPU_SREG_CS); | 1914 | ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS); |
1892 | ops->set_segment_selector(ctxt, cs_sel, VCPU_SREG_CS); | 1915 | ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS); |
1893 | ops->set_cached_descriptor(ctxt, &ss, 0, VCPU_SREG_SS); | ||
1894 | ops->set_segment_selector(ctxt, ss_sel, VCPU_SREG_SS); | ||
1895 | 1916 | ||
1896 | c->regs[VCPU_REGS_RCX] = c->eip; | 1917 | c->regs[VCPU_REGS_RCX] = c->eip; |
1897 | if (efer & EFER_LMA) { | 1918 | if (efer & EFER_LMA) { |
@@ -1961,10 +1982,8 @@ emulate_sysenter(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops) | |||
1961 | cs.l = 1; | 1982 | cs.l = 1; |
1962 | } | 1983 | } |
1963 | 1984 | ||
1964 | ops->set_cached_descriptor(ctxt, &cs, 0, VCPU_SREG_CS); | 1985 | ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS); |
1965 | ops->set_segment_selector(ctxt, cs_sel, VCPU_SREG_CS); | 1986 | ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS); |
1966 | ops->set_cached_descriptor(ctxt, &ss, 0, VCPU_SREG_SS); | ||
1967 | ops->set_segment_selector(ctxt, ss_sel, VCPU_SREG_SS); | ||
1968 | 1987 | ||
1969 | ops->get_msr(ctxt, MSR_IA32_SYSENTER_EIP, &msr_data); | 1988 | ops->get_msr(ctxt, MSR_IA32_SYSENTER_EIP, &msr_data); |
1970 | c->eip = msr_data; | 1989 | c->eip = msr_data; |
@@ -2018,10 +2037,8 @@ emulate_sysexit(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops) | |||
2018 | cs_sel |= SELECTOR_RPL_MASK; | 2037 | cs_sel |= SELECTOR_RPL_MASK; |
2019 | ss_sel |= SELECTOR_RPL_MASK; | 2038 | ss_sel |= SELECTOR_RPL_MASK; |
2020 | 2039 | ||
2021 | ops->set_cached_descriptor(ctxt, &cs, 0, VCPU_SREG_CS); | 2040 | ops->set_segment(ctxt, cs_sel, &cs, 0, VCPU_SREG_CS); |
2022 | ops->set_segment_selector(ctxt, cs_sel, VCPU_SREG_CS); | 2041 | ops->set_segment(ctxt, ss_sel, &ss, 0, VCPU_SREG_SS); |
2023 | ops->set_cached_descriptor(ctxt, &ss, 0, VCPU_SREG_SS); | ||
2024 | ops->set_segment_selector(ctxt, ss_sel, VCPU_SREG_SS); | ||
2025 | 2042 | ||
2026 | c->eip = c->regs[VCPU_REGS_RDX]; | 2043 | c->eip = c->regs[VCPU_REGS_RDX]; |
2027 | c->regs[VCPU_REGS_RSP] = c->regs[VCPU_REGS_RCX]; | 2044 | c->regs[VCPU_REGS_RSP] = c->regs[VCPU_REGS_RCX]; |
@@ -2048,11 +2065,11 @@ static bool emulator_io_port_access_allowed(struct x86_emulate_ctxt *ctxt, | |||
2048 | struct desc_struct tr_seg; | 2065 | struct desc_struct tr_seg; |
2049 | u32 base3; | 2066 | u32 base3; |
2050 | int r; | 2067 | int r; |
2051 | u16 io_bitmap_ptr, perm, bit_idx = port & 0x7; | 2068 | u16 tr, io_bitmap_ptr, perm, bit_idx = port & 0x7; |
2052 | unsigned mask = (1 << len) - 1; | 2069 | unsigned mask = (1 << len) - 1; |
2053 | unsigned long base; | 2070 | unsigned long base; |
2054 | 2071 | ||
2055 | ops->get_cached_descriptor(ctxt, &tr_seg, &base3, VCPU_SREG_TR); | 2072 | ops->get_segment(ctxt, &tr, &tr_seg, &base3, VCPU_SREG_TR); |
2056 | if (!tr_seg.p) | 2073 | if (!tr_seg.p) |
2057 | return false; | 2074 | return false; |
2058 | if (desc_limit_scaled(&tr_seg) < 103) | 2075 | if (desc_limit_scaled(&tr_seg) < 103) |
@@ -2107,11 +2124,11 @@ static void save_state_to_tss16(struct x86_emulate_ctxt *ctxt, | |||
2107 | tss->si = c->regs[VCPU_REGS_RSI]; | 2124 | tss->si = c->regs[VCPU_REGS_RSI]; |
2108 | tss->di = c->regs[VCPU_REGS_RDI]; | 2125 | tss->di = c->regs[VCPU_REGS_RDI]; |
2109 | 2126 | ||
2110 | tss->es = ops->get_segment_selector(ctxt, VCPU_SREG_ES); | 2127 | tss->es = get_segment_selector(ctxt, VCPU_SREG_ES); |
2111 | tss->cs = ops->get_segment_selector(ctxt, VCPU_SREG_CS); | 2128 | tss->cs = get_segment_selector(ctxt, VCPU_SREG_CS); |
2112 | tss->ss = ops->get_segment_selector(ctxt, VCPU_SREG_SS); | 2129 | tss->ss = get_segment_selector(ctxt, VCPU_SREG_SS); |
2113 | tss->ds = ops->get_segment_selector(ctxt, VCPU_SREG_DS); | 2130 | tss->ds = get_segment_selector(ctxt, VCPU_SREG_DS); |
2114 | tss->ldt = ops->get_segment_selector(ctxt, VCPU_SREG_LDTR); | 2131 | tss->ldt = get_segment_selector(ctxt, VCPU_SREG_LDTR); |
2115 | } | 2132 | } |
2116 | 2133 | ||
2117 | static int load_state_from_tss16(struct x86_emulate_ctxt *ctxt, | 2134 | static int load_state_from_tss16(struct x86_emulate_ctxt *ctxt, |
@@ -2136,11 +2153,11 @@ static int load_state_from_tss16(struct x86_emulate_ctxt *ctxt, | |||
2136 | * SDM says that segment selectors are loaded before segment | 2153 | * SDM says that segment selectors are loaded before segment |
2137 | * descriptors | 2154 | * descriptors |
2138 | */ | 2155 | */ |
2139 | ops->set_segment_selector(ctxt, tss->ldt, VCPU_SREG_LDTR); | 2156 | set_segment_selector(ctxt, tss->ldt, VCPU_SREG_LDTR); |
2140 | ops->set_segment_selector(ctxt, tss->es, VCPU_SREG_ES); | 2157 | set_segment_selector(ctxt, tss->es, VCPU_SREG_ES); |
2141 | ops->set_segment_selector(ctxt, tss->cs, VCPU_SREG_CS); | 2158 | set_segment_selector(ctxt, tss->cs, VCPU_SREG_CS); |
2142 | ops->set_segment_selector(ctxt, tss->ss, VCPU_SREG_SS); | 2159 | set_segment_selector(ctxt, tss->ss, VCPU_SREG_SS); |
2143 | ops->set_segment_selector(ctxt, tss->ds, VCPU_SREG_DS); | 2160 | set_segment_selector(ctxt, tss->ds, VCPU_SREG_DS); |
2144 | 2161 | ||
2145 | /* | 2162 | /* |
2146 | * Now load segment descriptors. If fault happenes at this stage | 2163 | * Now load segment descriptors. If fault happenes at this stage |
@@ -2227,13 +2244,13 @@ static void save_state_to_tss32(struct x86_emulate_ctxt *ctxt, | |||
2227 | tss->esi = c->regs[VCPU_REGS_RSI]; | 2244 | tss->esi = c->regs[VCPU_REGS_RSI]; |
2228 | tss->edi = c->regs[VCPU_REGS_RDI]; | 2245 | tss->edi = c->regs[VCPU_REGS_RDI]; |
2229 | 2246 | ||
2230 | tss->es = ops->get_segment_selector(ctxt, VCPU_SREG_ES); | 2247 | tss->es = get_segment_selector(ctxt, VCPU_SREG_ES); |
2231 | tss->cs = ops->get_segment_selector(ctxt, VCPU_SREG_CS); | 2248 | tss->cs = get_segment_selector(ctxt, VCPU_SREG_CS); |
2232 | tss->ss = ops->get_segment_selector(ctxt, VCPU_SREG_SS); | 2249 | tss->ss = get_segment_selector(ctxt, VCPU_SREG_SS); |
2233 | tss->ds = ops->get_segment_selector(ctxt, VCPU_SREG_DS); | 2250 | tss->ds = get_segment_selector(ctxt, VCPU_SREG_DS); |
2234 | tss->fs = ops->get_segment_selector(ctxt, VCPU_SREG_FS); | 2251 | tss->fs = get_segment_selector(ctxt, VCPU_SREG_FS); |
2235 | tss->gs = ops->get_segment_selector(ctxt, VCPU_SREG_GS); | 2252 | tss->gs = get_segment_selector(ctxt, VCPU_SREG_GS); |
2236 | tss->ldt_selector = ops->get_segment_selector(ctxt, VCPU_SREG_LDTR); | 2253 | tss->ldt_selector = get_segment_selector(ctxt, VCPU_SREG_LDTR); |
2237 | } | 2254 | } |
2238 | 2255 | ||
2239 | static int load_state_from_tss32(struct x86_emulate_ctxt *ctxt, | 2256 | static int load_state_from_tss32(struct x86_emulate_ctxt *ctxt, |
@@ -2260,13 +2277,13 @@ static int load_state_from_tss32(struct x86_emulate_ctxt *ctxt, | |||
2260 | * SDM says that segment selectors are loaded before segment | 2277 | * SDM says that segment selectors are loaded before segment |
2261 | * descriptors | 2278 | * descriptors |
2262 | */ | 2279 | */ |
2263 | ops->set_segment_selector(ctxt, tss->ldt_selector, VCPU_SREG_LDTR); | 2280 | set_segment_selector(ctxt, tss->ldt_selector, VCPU_SREG_LDTR); |
2264 | ops->set_segment_selector(ctxt, tss->es, VCPU_SREG_ES); | 2281 | set_segment_selector(ctxt, tss->es, VCPU_SREG_ES); |
2265 | ops->set_segment_selector(ctxt, tss->cs, VCPU_SREG_CS); | 2282 | set_segment_selector(ctxt, tss->cs, VCPU_SREG_CS); |
2266 | ops->set_segment_selector(ctxt, tss->ss, VCPU_SREG_SS); | 2283 | set_segment_selector(ctxt, tss->ss, VCPU_SREG_SS); |
2267 | ops->set_segment_selector(ctxt, tss->ds, VCPU_SREG_DS); | 2284 | set_segment_selector(ctxt, tss->ds, VCPU_SREG_DS); |
2268 | ops->set_segment_selector(ctxt, tss->fs, VCPU_SREG_FS); | 2285 | set_segment_selector(ctxt, tss->fs, VCPU_SREG_FS); |
2269 | ops->set_segment_selector(ctxt, tss->gs, VCPU_SREG_GS); | 2286 | set_segment_selector(ctxt, tss->gs, VCPU_SREG_GS); |
2270 | 2287 | ||
2271 | /* | 2288 | /* |
2272 | * Now load segment descriptors. If fault happenes at this stage | 2289 | * Now load segment descriptors. If fault happenes at this stage |
@@ -2348,7 +2365,7 @@ static int emulator_do_task_switch(struct x86_emulate_ctxt *ctxt, | |||
2348 | { | 2365 | { |
2349 | struct desc_struct curr_tss_desc, next_tss_desc; | 2366 | struct desc_struct curr_tss_desc, next_tss_desc; |
2350 | int ret; | 2367 | int ret; |
2351 | u16 old_tss_sel = ops->get_segment_selector(ctxt, VCPU_SREG_TR); | 2368 | u16 old_tss_sel = get_segment_selector(ctxt, VCPU_SREG_TR); |
2352 | ulong old_tss_base = | 2369 | ulong old_tss_base = |
2353 | ops->get_cached_segment_base(ctxt, VCPU_SREG_TR); | 2370 | ops->get_cached_segment_base(ctxt, VCPU_SREG_TR); |
2354 | u32 desc_limit; | 2371 | u32 desc_limit; |
@@ -2411,8 +2428,7 @@ static int emulator_do_task_switch(struct x86_emulate_ctxt *ctxt, | |||
2411 | } | 2428 | } |
2412 | 2429 | ||
2413 | ops->set_cr(ctxt, 0, ops->get_cr(ctxt, 0) | X86_CR0_TS); | 2430 | ops->set_cr(ctxt, 0, ops->get_cr(ctxt, 0) | X86_CR0_TS); |
2414 | ops->set_cached_descriptor(ctxt, &next_tss_desc, 0, VCPU_SREG_TR); | 2431 | ops->set_segment(ctxt, tss_selector, &next_tss_desc, 0, VCPU_SREG_TR); |
2415 | ops->set_segment_selector(ctxt, tss_selector, VCPU_SREG_TR); | ||
2416 | 2432 | ||
2417 | if (has_error_code) { | 2433 | if (has_error_code) { |
2418 | struct decode_cache *c = &ctxt->decode; | 2434 | struct decode_cache *c = &ctxt->decode; |
@@ -2503,7 +2519,7 @@ static int em_call_far(struct x86_emulate_ctxt *ctxt) | |||
2503 | ulong old_eip; | 2519 | ulong old_eip; |
2504 | int rc; | 2520 | int rc; |
2505 | 2521 | ||
2506 | old_cs = ctxt->ops->get_segment_selector(ctxt, VCPU_SREG_CS); | 2522 | old_cs = get_segment_selector(ctxt, VCPU_SREG_CS); |
2507 | old_eip = c->eip; | 2523 | old_eip = c->eip; |
2508 | 2524 | ||
2509 | memcpy(&sel, c->src.valptr + c->op_bytes, 2); | 2525 | memcpy(&sel, c->src.valptr + c->op_bytes, 2); |
@@ -3881,7 +3897,7 @@ special_insn: | |||
3881 | rc = emulate_ud(ctxt); | 3897 | rc = emulate_ud(ctxt); |
3882 | goto done; | 3898 | goto done; |
3883 | } | 3899 | } |
3884 | c->dst.val = ops->get_segment_selector(ctxt, c->modrm_reg); | 3900 | c->dst.val = get_segment_selector(ctxt, c->modrm_reg); |
3885 | break; | 3901 | break; |
3886 | case 0x8d: /* lea r16/r32, m */ | 3902 | case 0x8d: /* lea r16/r32, m */ |
3887 | c->dst.val = c->src.addr.mem.ea; | 3903 | c->dst.val = c->src.addr.mem.ea; |
diff --git a/arch/x86/kvm/x86.c b/arch/x86/kvm/x86.c index 22bc69ccf3ef..77c9d8673dc4 100644 --- a/arch/x86/kvm/x86.c +++ b/arch/x86/kvm/x86.c | |||
@@ -4304,13 +4304,14 @@ static unsigned long emulator_get_cached_segment_base( | |||
4304 | return get_segment_base(emul_to_vcpu(ctxt), seg); | 4304 | return get_segment_base(emul_to_vcpu(ctxt), seg); |
4305 | } | 4305 | } |
4306 | 4306 | ||
4307 | static bool emulator_get_cached_descriptor(struct x86_emulate_ctxt *ctxt, | 4307 | static bool emulator_get_segment(struct x86_emulate_ctxt *ctxt, u16 *selector, |
4308 | struct desc_struct *desc, u32 *base3, | 4308 | struct desc_struct *desc, u32 *base3, |
4309 | int seg) | 4309 | int seg) |
4310 | { | 4310 | { |
4311 | struct kvm_segment var; | 4311 | struct kvm_segment var; |
4312 | 4312 | ||
4313 | kvm_get_segment(emul_to_vcpu(ctxt), &var, seg); | 4313 | kvm_get_segment(emul_to_vcpu(ctxt), &var, seg); |
4314 | *selector = var.selector; | ||
4314 | 4315 | ||
4315 | if (var.unusable) | 4316 | if (var.unusable) |
4316 | return false; | 4317 | return false; |
@@ -4335,16 +4336,14 @@ static bool emulator_get_cached_descriptor(struct x86_emulate_ctxt *ctxt, | |||
4335 | return true; | 4336 | return true; |
4336 | } | 4337 | } |
4337 | 4338 | ||
4338 | static void emulator_set_cached_descriptor(struct x86_emulate_ctxt *ctxt, | 4339 | static void emulator_set_segment(struct x86_emulate_ctxt *ctxt, u16 selector, |
4339 | struct desc_struct *desc, u32 base3, | 4340 | struct desc_struct *desc, u32 base3, |
4340 | int seg) | 4341 | int seg) |
4341 | { | 4342 | { |
4342 | struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); | 4343 | struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); |
4343 | struct kvm_segment var; | 4344 | struct kvm_segment var; |
4344 | 4345 | ||
4345 | /* needed to preserve selector */ | 4346 | var.selector = selector; |
4346 | kvm_get_segment(vcpu, &var, seg); | ||
4347 | |||
4348 | var.base = get_desc_base(desc); | 4347 | var.base = get_desc_base(desc); |
4349 | #ifdef CONFIG_X86_64 | 4348 | #ifdef CONFIG_X86_64 |
4350 | var.base |= ((u64)base3) << 32; | 4349 | var.base |= ((u64)base3) << 32; |
@@ -4368,24 +4367,6 @@ static void emulator_set_cached_descriptor(struct x86_emulate_ctxt *ctxt, | |||
4368 | return; | 4367 | return; |
4369 | } | 4368 | } |
4370 | 4369 | ||
4371 | static u16 emulator_get_segment_selector(struct x86_emulate_ctxt *ctxt, int seg) | ||
4372 | { | ||
4373 | struct kvm_segment kvm_seg; | ||
4374 | |||
4375 | kvm_get_segment(emul_to_vcpu(ctxt), &kvm_seg, seg); | ||
4376 | return kvm_seg.selector; | ||
4377 | } | ||
4378 | |||
4379 | static void emulator_set_segment_selector(struct x86_emulate_ctxt *ctxt, | ||
4380 | u16 sel, int seg) | ||
4381 | { | ||
4382 | struct kvm_segment kvm_seg; | ||
4383 | |||
4384 | kvm_get_segment(emul_to_vcpu(ctxt), &kvm_seg, seg); | ||
4385 | kvm_seg.selector = sel; | ||
4386 | kvm_set_segment(emul_to_vcpu(ctxt), &kvm_seg, seg); | ||
4387 | } | ||
4388 | |||
4389 | static int emulator_get_msr(struct x86_emulate_ctxt *ctxt, | 4370 | static int emulator_get_msr(struct x86_emulate_ctxt *ctxt, |
4390 | u32 msr_index, u64 *pdata) | 4371 | u32 msr_index, u64 *pdata) |
4391 | { | 4372 | { |
@@ -4436,10 +4417,8 @@ static struct x86_emulate_ops emulate_ops = { | |||
4436 | .invlpg = emulator_invlpg, | 4417 | .invlpg = emulator_invlpg, |
4437 | .pio_in_emulated = emulator_pio_in_emulated, | 4418 | .pio_in_emulated = emulator_pio_in_emulated, |
4438 | .pio_out_emulated = emulator_pio_out_emulated, | 4419 | .pio_out_emulated = emulator_pio_out_emulated, |
4439 | .get_cached_descriptor = emulator_get_cached_descriptor, | 4420 | .get_segment = emulator_get_segment, |
4440 | .set_cached_descriptor = emulator_set_cached_descriptor, | 4421 | .set_segment = emulator_set_segment, |
4441 | .get_segment_selector = emulator_get_segment_selector, | ||
4442 | .set_segment_selector = emulator_set_segment_selector, | ||
4443 | .get_cached_segment_base = emulator_get_cached_segment_base, | 4422 | .get_cached_segment_base = emulator_get_cached_segment_base, |
4444 | .get_gdt = emulator_get_gdt, | 4423 | .get_gdt = emulator_get_gdt, |
4445 | .get_idt = emulator_get_idt, | 4424 | .get_idt = emulator_get_idt, |