aboutsummaryrefslogtreecommitdiffstats
path: root/arch
diff options
context:
space:
mode:
authorAvi Kivity <avi@redhat.com>2010-07-29 08:11:52 -0400
committerAvi Kivity <avi@redhat.com>2010-10-24 04:50:24 -0400
commitdde7e6d12a9ef9f727d05ce824f4fe75ca2a5b3a (patch)
tree8c80116b0bc967ba7beba168fb0e16ef30f93ed7 /arch
parentef65c88912cafe56de2737c440aefc764fd8f202 (diff)
KVM: x86 emulator: move x86_decode_insn() downwards
No code changes. Signed-off-by: Avi Kivity <avi@redhat.com> Signed-off-by: Marcelo Tosatti <mtosatti@redhat.com>
Diffstat (limited to 'arch')
-rw-r--r--arch/x86/kvm/emulate.c744
1 files changed, 372 insertions, 372 deletions
diff --git a/arch/x86/kvm/emulate.c b/arch/x86/kvm/emulate.c
index 799e895fb08e..c6f435917538 100644
--- a/arch/x86/kvm/emulate.c
+++ b/arch/x86/kvm/emulate.c
@@ -945,378 +945,6 @@ done:
945 return rc; 945 return rc;
946} 946}
947 947
948int
949x86_decode_insn(struct x86_emulate_ctxt *ctxt)
950{
951 struct x86_emulate_ops *ops = ctxt->ops;
952 struct decode_cache *c = &ctxt->decode;
953 int rc = X86EMUL_CONTINUE;
954 int mode = ctxt->mode;
955 int def_op_bytes, def_ad_bytes, dual, goffset;
956 struct opcode opcode, *g_mod012, *g_mod3;
957
958 /* we cannot decode insn before we complete previous rep insn */
959 WARN_ON(ctxt->restart);
960
961 c->eip = ctxt->eip;
962 c->fetch.start = c->fetch.end = c->eip;
963 ctxt->cs_base = seg_base(ctxt, ops, VCPU_SREG_CS);
964
965 switch (mode) {
966 case X86EMUL_MODE_REAL:
967 case X86EMUL_MODE_VM86:
968 case X86EMUL_MODE_PROT16:
969 def_op_bytes = def_ad_bytes = 2;
970 break;
971 case X86EMUL_MODE_PROT32:
972 def_op_bytes = def_ad_bytes = 4;
973 break;
974#ifdef CONFIG_X86_64
975 case X86EMUL_MODE_PROT64:
976 def_op_bytes = 4;
977 def_ad_bytes = 8;
978 break;
979#endif
980 default:
981 return -1;
982 }
983
984 c->op_bytes = def_op_bytes;
985 c->ad_bytes = def_ad_bytes;
986
987 /* Legacy prefixes. */
988 for (;;) {
989 switch (c->b = insn_fetch(u8, 1, c->eip)) {
990 case 0x66: /* operand-size override */
991 /* switch between 2/4 bytes */
992 c->op_bytes = def_op_bytes ^ 6;
993 break;
994 case 0x67: /* address-size override */
995 if (mode == X86EMUL_MODE_PROT64)
996 /* switch between 4/8 bytes */
997 c->ad_bytes = def_ad_bytes ^ 12;
998 else
999 /* switch between 2/4 bytes */
1000 c->ad_bytes = def_ad_bytes ^ 6;
1001 break;
1002 case 0x26: /* ES override */
1003 case 0x2e: /* CS override */
1004 case 0x36: /* SS override */
1005 case 0x3e: /* DS override */
1006 set_seg_override(c, (c->b >> 3) & 3);
1007 break;
1008 case 0x64: /* FS override */
1009 case 0x65: /* GS override */
1010 set_seg_override(c, c->b & 7);
1011 break;
1012 case 0x40 ... 0x4f: /* REX */
1013 if (mode != X86EMUL_MODE_PROT64)
1014 goto done_prefixes;
1015 c->rex_prefix = c->b;
1016 continue;
1017 case 0xf0: /* LOCK */
1018 c->lock_prefix = 1;
1019 break;
1020 case 0xf2: /* REPNE/REPNZ */
1021 c->rep_prefix = REPNE_PREFIX;
1022 break;
1023 case 0xf3: /* REP/REPE/REPZ */
1024 c->rep_prefix = REPE_PREFIX;
1025 break;
1026 default:
1027 goto done_prefixes;
1028 }
1029
1030 /* Any legacy prefix after a REX prefix nullifies its effect. */
1031
1032 c->rex_prefix = 0;
1033 }
1034
1035done_prefixes:
1036
1037 /* REX prefix. */
1038 if (c->rex_prefix)
1039 if (c->rex_prefix & 8)
1040 c->op_bytes = 8; /* REX.W */
1041
1042 /* Opcode byte(s). */
1043 opcode = opcode_table[c->b];
1044 if (opcode.flags == 0) {
1045 /* Two-byte opcode? */
1046 if (c->b == 0x0f) {
1047 c->twobyte = 1;
1048 c->b = insn_fetch(u8, 1, c->eip);
1049 opcode = twobyte_table[c->b];
1050 }
1051 }
1052 c->d = opcode.flags;
1053
1054 if (c->d & Group) {
1055 dual = c->d & GroupDual;
1056 c->modrm = insn_fetch(u8, 1, c->eip);
1057 --c->eip;
1058
1059 if (c->d & GroupDual) {
1060 g_mod012 = opcode.u.gdual->mod012;
1061 g_mod3 = opcode.u.gdual->mod3;
1062 } else
1063 g_mod012 = g_mod3 = opcode.u.group;
1064
1065 c->d &= ~(Group | GroupDual);
1066
1067 goffset = (c->modrm >> 3) & 7;
1068
1069 if ((c->modrm >> 6) == 3)
1070 opcode = g_mod3[goffset];
1071 else
1072 opcode = g_mod012[goffset];
1073 c->d |= opcode.flags;
1074 }
1075
1076 c->execute = opcode.u.execute;
1077
1078 /* Unrecognised? */
1079 if (c->d == 0 || (c->d & Undefined)) {
1080 DPRINTF("Cannot emulate %02x\n", c->b);
1081 return -1;
1082 }
1083
1084 if (mode == X86EMUL_MODE_PROT64 && (c->d & Stack))
1085 c->op_bytes = 8;
1086
1087 /* ModRM and SIB bytes. */
1088 if (c->d & ModRM)
1089 rc = decode_modrm(ctxt, ops);
1090 else if (c->d & MemAbs)
1091 rc = decode_abs(ctxt, ops);
1092 if (rc != X86EMUL_CONTINUE)
1093 goto done;
1094
1095 if (!c->has_seg_override)
1096 set_seg_override(c, VCPU_SREG_DS);
1097
1098 if (!(!c->twobyte && c->b == 0x8d))
1099 c->modrm_ea += seg_override_base(ctxt, ops, c);
1100
1101 if (c->ad_bytes != 8)
1102 c->modrm_ea = (u32)c->modrm_ea;
1103
1104 if (c->rip_relative)
1105 c->modrm_ea += c->eip;
1106
1107 /*
1108 * Decode and fetch the source operand: register, memory
1109 * or immediate.
1110 */
1111 switch (c->d & SrcMask) {
1112 case SrcNone:
1113 break;
1114 case SrcReg:
1115 decode_register_operand(&c->src, c, 0);
1116 break;
1117 case SrcMem16:
1118 c->src.bytes = 2;
1119 goto srcmem_common;
1120 case SrcMem32:
1121 c->src.bytes = 4;
1122 goto srcmem_common;
1123 case SrcMem:
1124 c->src.bytes = (c->d & ByteOp) ? 1 :
1125 c->op_bytes;
1126 /* Don't fetch the address for invlpg: it could be unmapped. */
1127 if (c->twobyte && c->b == 0x01 && c->modrm_reg == 7)
1128 break;
1129 srcmem_common:
1130 /*
1131 * For instructions with a ModR/M byte, switch to register
1132 * access if Mod = 3.
1133 */
1134 if ((c->d & ModRM) && c->modrm_mod == 3) {
1135 c->src.type = OP_REG;
1136 c->src.val = c->modrm_val;
1137 c->src.ptr = c->modrm_ptr;
1138 break;
1139 }
1140 c->src.type = OP_MEM;
1141 c->src.ptr = (unsigned long *)c->modrm_ea;
1142 c->src.val = 0;
1143 break;
1144 case SrcImm:
1145 case SrcImmU:
1146 c->src.type = OP_IMM;
1147 c->src.ptr = (unsigned long *)c->eip;
1148 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1149 if (c->src.bytes == 8)
1150 c->src.bytes = 4;
1151 /* NB. Immediates are sign-extended as necessary. */
1152 switch (c->src.bytes) {
1153 case 1:
1154 c->src.val = insn_fetch(s8, 1, c->eip);
1155 break;
1156 case 2:
1157 c->src.val = insn_fetch(s16, 2, c->eip);
1158 break;
1159 case 4:
1160 c->src.val = insn_fetch(s32, 4, c->eip);
1161 break;
1162 }
1163 if ((c->d & SrcMask) == SrcImmU) {
1164 switch (c->src.bytes) {
1165 case 1:
1166 c->src.val &= 0xff;
1167 break;
1168 case 2:
1169 c->src.val &= 0xffff;
1170 break;
1171 case 4:
1172 c->src.val &= 0xffffffff;
1173 break;
1174 }
1175 }
1176 break;
1177 case SrcImmByte:
1178 case SrcImmUByte:
1179 c->src.type = OP_IMM;
1180 c->src.ptr = (unsigned long *)c->eip;
1181 c->src.bytes = 1;
1182 if ((c->d & SrcMask) == SrcImmByte)
1183 c->src.val = insn_fetch(s8, 1, c->eip);
1184 else
1185 c->src.val = insn_fetch(u8, 1, c->eip);
1186 break;
1187 case SrcAcc:
1188 c->src.type = OP_REG;
1189 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1190 c->src.ptr = &c->regs[VCPU_REGS_RAX];
1191 switch (c->src.bytes) {
1192 case 1:
1193 c->src.val = *(u8 *)c->src.ptr;
1194 break;
1195 case 2:
1196 c->src.val = *(u16 *)c->src.ptr;
1197 break;
1198 case 4:
1199 c->src.val = *(u32 *)c->src.ptr;
1200 break;
1201 case 8:
1202 c->src.val = *(u64 *)c->src.ptr;
1203 break;
1204 }
1205 break;
1206 case SrcOne:
1207 c->src.bytes = 1;
1208 c->src.val = 1;
1209 break;
1210 case SrcSI:
1211 c->src.type = OP_MEM;
1212 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1213 c->src.ptr = (unsigned long *)
1214 register_address(c, seg_override_base(ctxt, ops, c),
1215 c->regs[VCPU_REGS_RSI]);
1216 c->src.val = 0;
1217 break;
1218 case SrcImmFAddr:
1219 c->src.type = OP_IMM;
1220 c->src.ptr = (unsigned long *)c->eip;
1221 c->src.bytes = c->op_bytes + 2;
1222 insn_fetch_arr(c->src.valptr, c->src.bytes, c->eip);
1223 break;
1224 case SrcMemFAddr:
1225 c->src.type = OP_MEM;
1226 c->src.ptr = (unsigned long *)c->modrm_ea;
1227 c->src.bytes = c->op_bytes + 2;
1228 break;
1229 }
1230
1231 /*
1232 * Decode and fetch the second source operand: register, memory
1233 * or immediate.
1234 */
1235 switch (c->d & Src2Mask) {
1236 case Src2None:
1237 break;
1238 case Src2CL:
1239 c->src2.bytes = 1;
1240 c->src2.val = c->regs[VCPU_REGS_RCX] & 0x8;
1241 break;
1242 case Src2ImmByte:
1243 c->src2.type = OP_IMM;
1244 c->src2.ptr = (unsigned long *)c->eip;
1245 c->src2.bytes = 1;
1246 c->src2.val = insn_fetch(u8, 1, c->eip);
1247 break;
1248 case Src2One:
1249 c->src2.bytes = 1;
1250 c->src2.val = 1;
1251 break;
1252 }
1253
1254 /* Decode and fetch the destination operand: register or memory. */
1255 switch (c->d & DstMask) {
1256 case ImplicitOps:
1257 /* Special instructions do their own operand decoding. */
1258 return 0;
1259 case DstReg:
1260 decode_register_operand(&c->dst, c,
1261 c->twobyte && (c->b == 0xb6 || c->b == 0xb7));
1262 break;
1263 case DstMem:
1264 case DstMem64:
1265 if ((c->d & ModRM) && c->modrm_mod == 3) {
1266 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1267 c->dst.type = OP_REG;
1268 c->dst.val = c->dst.orig_val = c->modrm_val;
1269 c->dst.ptr = c->modrm_ptr;
1270 break;
1271 }
1272 c->dst.type = OP_MEM;
1273 c->dst.ptr = (unsigned long *)c->modrm_ea;
1274 if ((c->d & DstMask) == DstMem64)
1275 c->dst.bytes = 8;
1276 else
1277 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1278 c->dst.val = 0;
1279 if (c->d & BitOp) {
1280 unsigned long mask = ~(c->dst.bytes * 8 - 1);
1281
1282 c->dst.ptr = (void *)c->dst.ptr +
1283 (c->src.val & mask) / 8;
1284 }
1285 break;
1286 case DstAcc:
1287 c->dst.type = OP_REG;
1288 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1289 c->dst.ptr = &c->regs[VCPU_REGS_RAX];
1290 switch (c->dst.bytes) {
1291 case 1:
1292 c->dst.val = *(u8 *)c->dst.ptr;
1293 break;
1294 case 2:
1295 c->dst.val = *(u16 *)c->dst.ptr;
1296 break;
1297 case 4:
1298 c->dst.val = *(u32 *)c->dst.ptr;
1299 break;
1300 case 8:
1301 c->dst.val = *(u64 *)c->dst.ptr;
1302 break;
1303 }
1304 c->dst.orig_val = c->dst.val;
1305 break;
1306 case DstDI:
1307 c->dst.type = OP_MEM;
1308 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1309 c->dst.ptr = (unsigned long *)
1310 register_address(c, es_base(ctxt, ops),
1311 c->regs[VCPU_REGS_RDI]);
1312 c->dst.val = 0;
1313 break;
1314 }
1315
1316done:
1317 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
1318}
1319
1320static int read_emulated(struct x86_emulate_ctxt *ctxt, 948static int read_emulated(struct x86_emulate_ctxt *ctxt,
1321 struct x86_emulate_ops *ops, 949 struct x86_emulate_ops *ops,
1322 unsigned long addr, void *dest, unsigned size) 950 unsigned long addr, void *dest, unsigned size)
@@ -2625,6 +2253,378 @@ static void string_addr_inc(struct x86_emulate_ctxt *ctxt, unsigned long base,
2625} 2253}
2626 2254
2627int 2255int
2256x86_decode_insn(struct x86_emulate_ctxt *ctxt)
2257{
2258 struct x86_emulate_ops *ops = ctxt->ops;
2259 struct decode_cache *c = &ctxt->decode;
2260 int rc = X86EMUL_CONTINUE;
2261 int mode = ctxt->mode;
2262 int def_op_bytes, def_ad_bytes, dual, goffset;
2263 struct opcode opcode, *g_mod012, *g_mod3;
2264
2265 /* we cannot decode insn before we complete previous rep insn */
2266 WARN_ON(ctxt->restart);
2267
2268 c->eip = ctxt->eip;
2269 c->fetch.start = c->fetch.end = c->eip;
2270 ctxt->cs_base = seg_base(ctxt, ops, VCPU_SREG_CS);
2271
2272 switch (mode) {
2273 case X86EMUL_MODE_REAL:
2274 case X86EMUL_MODE_VM86:
2275 case X86EMUL_MODE_PROT16:
2276 def_op_bytes = def_ad_bytes = 2;
2277 break;
2278 case X86EMUL_MODE_PROT32:
2279 def_op_bytes = def_ad_bytes = 4;
2280 break;
2281#ifdef CONFIG_X86_64
2282 case X86EMUL_MODE_PROT64:
2283 def_op_bytes = 4;
2284 def_ad_bytes = 8;
2285 break;
2286#endif
2287 default:
2288 return -1;
2289 }
2290
2291 c->op_bytes = def_op_bytes;
2292 c->ad_bytes = def_ad_bytes;
2293
2294 /* Legacy prefixes. */
2295 for (;;) {
2296 switch (c->b = insn_fetch(u8, 1, c->eip)) {
2297 case 0x66: /* operand-size override */
2298 /* switch between 2/4 bytes */
2299 c->op_bytes = def_op_bytes ^ 6;
2300 break;
2301 case 0x67: /* address-size override */
2302 if (mode == X86EMUL_MODE_PROT64)
2303 /* switch between 4/8 bytes */
2304 c->ad_bytes = def_ad_bytes ^ 12;
2305 else
2306 /* switch between 2/4 bytes */
2307 c->ad_bytes = def_ad_bytes ^ 6;
2308 break;
2309 case 0x26: /* ES override */
2310 case 0x2e: /* CS override */
2311 case 0x36: /* SS override */
2312 case 0x3e: /* DS override */
2313 set_seg_override(c, (c->b >> 3) & 3);
2314 break;
2315 case 0x64: /* FS override */
2316 case 0x65: /* GS override */
2317 set_seg_override(c, c->b & 7);
2318 break;
2319 case 0x40 ... 0x4f: /* REX */
2320 if (mode != X86EMUL_MODE_PROT64)
2321 goto done_prefixes;
2322 c->rex_prefix = c->b;
2323 continue;
2324 case 0xf0: /* LOCK */
2325 c->lock_prefix = 1;
2326 break;
2327 case 0xf2: /* REPNE/REPNZ */
2328 c->rep_prefix = REPNE_PREFIX;
2329 break;
2330 case 0xf3: /* REP/REPE/REPZ */
2331 c->rep_prefix = REPE_PREFIX;
2332 break;
2333 default:
2334 goto done_prefixes;
2335 }
2336
2337 /* Any legacy prefix after a REX prefix nullifies its effect. */
2338
2339 c->rex_prefix = 0;
2340 }
2341
2342done_prefixes:
2343
2344 /* REX prefix. */
2345 if (c->rex_prefix)
2346 if (c->rex_prefix & 8)
2347 c->op_bytes = 8; /* REX.W */
2348
2349 /* Opcode byte(s). */
2350 opcode = opcode_table[c->b];
2351 if (opcode.flags == 0) {
2352 /* Two-byte opcode? */
2353 if (c->b == 0x0f) {
2354 c->twobyte = 1;
2355 c->b = insn_fetch(u8, 1, c->eip);
2356 opcode = twobyte_table[c->b];
2357 }
2358 }
2359 c->d = opcode.flags;
2360
2361 if (c->d & Group) {
2362 dual = c->d & GroupDual;
2363 c->modrm = insn_fetch(u8, 1, c->eip);
2364 --c->eip;
2365
2366 if (c->d & GroupDual) {
2367 g_mod012 = opcode.u.gdual->mod012;
2368 g_mod3 = opcode.u.gdual->mod3;
2369 } else
2370 g_mod012 = g_mod3 = opcode.u.group;
2371
2372 c->d &= ~(Group | GroupDual);
2373
2374 goffset = (c->modrm >> 3) & 7;
2375
2376 if ((c->modrm >> 6) == 3)
2377 opcode = g_mod3[goffset];
2378 else
2379 opcode = g_mod012[goffset];
2380 c->d |= opcode.flags;
2381 }
2382
2383 c->execute = opcode.u.execute;
2384
2385 /* Unrecognised? */
2386 if (c->d == 0 || (c->d & Undefined)) {
2387 DPRINTF("Cannot emulate %02x\n", c->b);
2388 return -1;
2389 }
2390
2391 if (mode == X86EMUL_MODE_PROT64 && (c->d & Stack))
2392 c->op_bytes = 8;
2393
2394 /* ModRM and SIB bytes. */
2395 if (c->d & ModRM)
2396 rc = decode_modrm(ctxt, ops);
2397 else if (c->d & MemAbs)
2398 rc = decode_abs(ctxt, ops);
2399 if (rc != X86EMUL_CONTINUE)
2400 goto done;
2401
2402 if (!c->has_seg_override)
2403 set_seg_override(c, VCPU_SREG_DS);
2404
2405 if (!(!c->twobyte && c->b == 0x8d))
2406 c->modrm_ea += seg_override_base(ctxt, ops, c);
2407
2408 if (c->ad_bytes != 8)
2409 c->modrm_ea = (u32)c->modrm_ea;
2410
2411 if (c->rip_relative)
2412 c->modrm_ea += c->eip;
2413
2414 /*
2415 * Decode and fetch the source operand: register, memory
2416 * or immediate.
2417 */
2418 switch (c->d & SrcMask) {
2419 case SrcNone:
2420 break;
2421 case SrcReg:
2422 decode_register_operand(&c->src, c, 0);
2423 break;
2424 case SrcMem16:
2425 c->src.bytes = 2;
2426 goto srcmem_common;
2427 case SrcMem32:
2428 c->src.bytes = 4;
2429 goto srcmem_common;
2430 case SrcMem:
2431 c->src.bytes = (c->d & ByteOp) ? 1 :
2432 c->op_bytes;
2433 /* Don't fetch the address for invlpg: it could be unmapped. */
2434 if (c->twobyte && c->b == 0x01 && c->modrm_reg == 7)
2435 break;
2436 srcmem_common:
2437 /*
2438 * For instructions with a ModR/M byte, switch to register
2439 * access if Mod = 3.
2440 */
2441 if ((c->d & ModRM) && c->modrm_mod == 3) {
2442 c->src.type = OP_REG;
2443 c->src.val = c->modrm_val;
2444 c->src.ptr = c->modrm_ptr;
2445 break;
2446 }
2447 c->src.type = OP_MEM;
2448 c->src.ptr = (unsigned long *)c->modrm_ea;
2449 c->src.val = 0;
2450 break;
2451 case SrcImm:
2452 case SrcImmU:
2453 c->src.type = OP_IMM;
2454 c->src.ptr = (unsigned long *)c->eip;
2455 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
2456 if (c->src.bytes == 8)
2457 c->src.bytes = 4;
2458 /* NB. Immediates are sign-extended as necessary. */
2459 switch (c->src.bytes) {
2460 case 1:
2461 c->src.val = insn_fetch(s8, 1, c->eip);
2462 break;
2463 case 2:
2464 c->src.val = insn_fetch(s16, 2, c->eip);
2465 break;
2466 case 4:
2467 c->src.val = insn_fetch(s32, 4, c->eip);
2468 break;
2469 }
2470 if ((c->d & SrcMask) == SrcImmU) {
2471 switch (c->src.bytes) {
2472 case 1:
2473 c->src.val &= 0xff;
2474 break;
2475 case 2:
2476 c->src.val &= 0xffff;
2477 break;
2478 case 4:
2479 c->src.val &= 0xffffffff;
2480 break;
2481 }
2482 }
2483 break;
2484 case SrcImmByte:
2485 case SrcImmUByte:
2486 c->src.type = OP_IMM;
2487 c->src.ptr = (unsigned long *)c->eip;
2488 c->src.bytes = 1;
2489 if ((c->d & SrcMask) == SrcImmByte)
2490 c->src.val = insn_fetch(s8, 1, c->eip);
2491 else
2492 c->src.val = insn_fetch(u8, 1, c->eip);
2493 break;
2494 case SrcAcc:
2495 c->src.type = OP_REG;
2496 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
2497 c->src.ptr = &c->regs[VCPU_REGS_RAX];
2498 switch (c->src.bytes) {
2499 case 1:
2500 c->src.val = *(u8 *)c->src.ptr;
2501 break;
2502 case 2:
2503 c->src.val = *(u16 *)c->src.ptr;
2504 break;
2505 case 4:
2506 c->src.val = *(u32 *)c->src.ptr;
2507 break;
2508 case 8:
2509 c->src.val = *(u64 *)c->src.ptr;
2510 break;
2511 }
2512 break;
2513 case SrcOne:
2514 c->src.bytes = 1;
2515 c->src.val = 1;
2516 break;
2517 case SrcSI:
2518 c->src.type = OP_MEM;
2519 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
2520 c->src.ptr = (unsigned long *)
2521 register_address(c, seg_override_base(ctxt, ops, c),
2522 c->regs[VCPU_REGS_RSI]);
2523 c->src.val = 0;
2524 break;
2525 case SrcImmFAddr:
2526 c->src.type = OP_IMM;
2527 c->src.ptr = (unsigned long *)c->eip;
2528 c->src.bytes = c->op_bytes + 2;
2529 insn_fetch_arr(c->src.valptr, c->src.bytes, c->eip);
2530 break;
2531 case SrcMemFAddr:
2532 c->src.type = OP_MEM;
2533 c->src.ptr = (unsigned long *)c->modrm_ea;
2534 c->src.bytes = c->op_bytes + 2;
2535 break;
2536 }
2537
2538 /*
2539 * Decode and fetch the second source operand: register, memory
2540 * or immediate.
2541 */
2542 switch (c->d & Src2Mask) {
2543 case Src2None:
2544 break;
2545 case Src2CL:
2546 c->src2.bytes = 1;
2547 c->src2.val = c->regs[VCPU_REGS_RCX] & 0x8;
2548 break;
2549 case Src2ImmByte:
2550 c->src2.type = OP_IMM;
2551 c->src2.ptr = (unsigned long *)c->eip;
2552 c->src2.bytes = 1;
2553 c->src2.val = insn_fetch(u8, 1, c->eip);
2554 break;
2555 case Src2One:
2556 c->src2.bytes = 1;
2557 c->src2.val = 1;
2558 break;
2559 }
2560
2561 /* Decode and fetch the destination operand: register or memory. */
2562 switch (c->d & DstMask) {
2563 case ImplicitOps:
2564 /* Special instructions do their own operand decoding. */
2565 return 0;
2566 case DstReg:
2567 decode_register_operand(&c->dst, c,
2568 c->twobyte && (c->b == 0xb6 || c->b == 0xb7));
2569 break;
2570 case DstMem:
2571 case DstMem64:
2572 if ((c->d & ModRM) && c->modrm_mod == 3) {
2573 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
2574 c->dst.type = OP_REG;
2575 c->dst.val = c->dst.orig_val = c->modrm_val;
2576 c->dst.ptr = c->modrm_ptr;
2577 break;
2578 }
2579 c->dst.type = OP_MEM;
2580 c->dst.ptr = (unsigned long *)c->modrm_ea;
2581 if ((c->d & DstMask) == DstMem64)
2582 c->dst.bytes = 8;
2583 else
2584 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
2585 c->dst.val = 0;
2586 if (c->d & BitOp) {
2587 unsigned long mask = ~(c->dst.bytes * 8 - 1);
2588
2589 c->dst.ptr = (void *)c->dst.ptr +
2590 (c->src.val & mask) / 8;
2591 }
2592 break;
2593 case DstAcc:
2594 c->dst.type = OP_REG;
2595 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
2596 c->dst.ptr = &c->regs[VCPU_REGS_RAX];
2597 switch (c->dst.bytes) {
2598 case 1:
2599 c->dst.val = *(u8 *)c->dst.ptr;
2600 break;
2601 case 2:
2602 c->dst.val = *(u16 *)c->dst.ptr;
2603 break;
2604 case 4:
2605 c->dst.val = *(u32 *)c->dst.ptr;
2606 break;
2607 case 8:
2608 c->dst.val = *(u64 *)c->dst.ptr;
2609 break;
2610 }
2611 c->dst.orig_val = c->dst.val;
2612 break;
2613 case DstDI:
2614 c->dst.type = OP_MEM;
2615 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
2616 c->dst.ptr = (unsigned long *)
2617 register_address(c, es_base(ctxt, ops),
2618 c->regs[VCPU_REGS_RDI]);
2619 c->dst.val = 0;
2620 break;
2621 }
2622
2623done:
2624 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
2625}
2626
2627int
2628x86_emulate_insn(struct x86_emulate_ctxt *ctxt) 2628x86_emulate_insn(struct x86_emulate_ctxt *ctxt)
2629{ 2629{
2630 struct x86_emulate_ops *ops = ctxt->ops; 2630 struct x86_emulate_ops *ops = ctxt->ops;