aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/kvm/x86_emulate.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/kvm/x86_emulate.c')
-rw-r--r--drivers/kvm/x86_emulate.c1913
1 files changed, 0 insertions, 1913 deletions
diff --git a/drivers/kvm/x86_emulate.c b/drivers/kvm/x86_emulate.c
deleted file mode 100644
index 50b133f68743..000000000000
--- a/drivers/kvm/x86_emulate.c
+++ /dev/null
@@ -1,1913 +0,0 @@
1/******************************************************************************
2 * x86_emulate.c
3 *
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
5 *
6 * Copyright (c) 2005 Keir Fraser
7 *
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privileged instructions:
10 *
11 * Copyright (C) 2006 Qumranet
12 *
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
15 *
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
18 *
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
20 */
21
22#ifndef __KERNEL__
23#include <stdio.h>
24#include <stdint.h>
25#include <public/xen.h>
26#define DPRINTF(_f, _a ...) printf(_f , ## _a)
27#else
28#include "kvm.h"
29#include "x86.h"
30#define DPRINTF(x...) do {} while (0)
31#endif
32#include "x86_emulate.h"
33#include <linux/module.h>
34
35/*
36 * Opcode effective-address decode tables.
37 * Note that we only emulate instructions that have at least one memory
38 * operand (excluding implicit stack references). We assume that stack
39 * references and instruction fetches will never occur in special memory
40 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
41 * not be handled.
42 */
43
44/* Operand sizes: 8-bit operands or specified/overridden size. */
45#define ByteOp (1<<0) /* 8-bit operands. */
46/* Destination operand type. */
47#define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
48#define DstReg (2<<1) /* Register operand. */
49#define DstMem (3<<1) /* Memory operand. */
50#define DstMask (3<<1)
51/* Source operand type. */
52#define SrcNone (0<<3) /* No source operand. */
53#define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
54#define SrcReg (1<<3) /* Register operand. */
55#define SrcMem (2<<3) /* Memory operand. */
56#define SrcMem16 (3<<3) /* Memory operand (16-bit). */
57#define SrcMem32 (4<<3) /* Memory operand (32-bit). */
58#define SrcImm (5<<3) /* Immediate operand. */
59#define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
60#define SrcMask (7<<3)
61/* Generic ModRM decode. */
62#define ModRM (1<<6)
63/* Destination is only written; never read. */
64#define Mov (1<<7)
65#define BitOp (1<<8)
66#define MemAbs (1<<9) /* Memory operand is absolute displacement */
67#define String (1<<10) /* String instruction (rep capable) */
68#define Stack (1<<11) /* Stack instruction (push/pop) */
69
70static u16 opcode_table[256] = {
71 /* 0x00 - 0x07 */
72 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
73 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
74 0, 0, 0, 0,
75 /* 0x08 - 0x0F */
76 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
77 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
78 0, 0, 0, 0,
79 /* 0x10 - 0x17 */
80 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
81 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
82 0, 0, 0, 0,
83 /* 0x18 - 0x1F */
84 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
85 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
86 0, 0, 0, 0,
87 /* 0x20 - 0x27 */
88 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
89 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
90 SrcImmByte, SrcImm, 0, 0,
91 /* 0x28 - 0x2F */
92 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
93 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
94 0, 0, 0, 0,
95 /* 0x30 - 0x37 */
96 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
97 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
98 0, 0, 0, 0,
99 /* 0x38 - 0x3F */
100 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
101 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
102 0, 0, 0, 0,
103 /* 0x40 - 0x47 */
104 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
105 /* 0x48 - 0x4F */
106 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
107 /* 0x50 - 0x57 */
108 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
109 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
110 /* 0x58 - 0x5F */
111 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
112 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
113 /* 0x60 - 0x67 */
114 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ ,
115 0, 0, 0, 0,
116 /* 0x68 - 0x6F */
117 0, 0, ImplicitOps | Mov | Stack, 0,
118 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* insb, insw/insd */
119 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* outsb, outsw/outsd */
120 /* 0x70 - 0x77 */
121 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
122 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
123 /* 0x78 - 0x7F */
124 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
125 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
126 /* 0x80 - 0x87 */
127 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
128 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
129 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
130 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
131 /* 0x88 - 0x8F */
132 ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
133 ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
134 0, ModRM | DstReg, 0, DstMem | SrcNone | ModRM | Mov | Stack,
135 /* 0x90 - 0x9F */
136 0, 0, 0, 0, 0, 0, 0, 0,
137 0, 0, 0, 0, ImplicitOps | Stack, ImplicitOps | Stack, 0, 0,
138 /* 0xA0 - 0xA7 */
139 ByteOp | DstReg | SrcMem | Mov | MemAbs, DstReg | SrcMem | Mov | MemAbs,
140 ByteOp | DstMem | SrcReg | Mov | MemAbs, DstMem | SrcReg | Mov | MemAbs,
141 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
142 ByteOp | ImplicitOps | String, ImplicitOps | String,
143 /* 0xA8 - 0xAF */
144 0, 0, ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
145 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
146 ByteOp | ImplicitOps | String, ImplicitOps | String,
147 /* 0xB0 - 0xBF */
148 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
149 /* 0xC0 - 0xC7 */
150 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
151 0, ImplicitOps | Stack, 0, 0,
152 ByteOp | DstMem | SrcImm | ModRM | Mov, DstMem | SrcImm | ModRM | Mov,
153 /* 0xC8 - 0xCF */
154 0, 0, 0, 0, 0, 0, 0, 0,
155 /* 0xD0 - 0xD7 */
156 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
157 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
158 0, 0, 0, 0,
159 /* 0xD8 - 0xDF */
160 0, 0, 0, 0, 0, 0, 0, 0,
161 /* 0xE0 - 0xE7 */
162 0, 0, 0, 0, 0, 0, 0, 0,
163 /* 0xE8 - 0xEF */
164 ImplicitOps | Stack, SrcImm|ImplicitOps, 0, SrcImmByte|ImplicitOps,
165 0, 0, 0, 0,
166 /* 0xF0 - 0xF7 */
167 0, 0, 0, 0,
168 ImplicitOps, ImplicitOps,
169 ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
170 /* 0xF8 - 0xFF */
171 ImplicitOps, 0, ImplicitOps, ImplicitOps,
172 0, 0, ByteOp | DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM
173};
174
175static u16 twobyte_table[256] = {
176 /* 0x00 - 0x0F */
177 0, SrcMem | ModRM | DstReg, 0, 0, 0, 0, ImplicitOps, 0,
178 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
179 /* 0x10 - 0x1F */
180 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
181 /* 0x20 - 0x2F */
182 ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
183 0, 0, 0, 0, 0, 0, 0, 0,
184 /* 0x30 - 0x3F */
185 ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
186 /* 0x40 - 0x47 */
187 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
188 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
189 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
190 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
191 /* 0x48 - 0x4F */
192 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
193 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
194 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
195 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
196 /* 0x50 - 0x5F */
197 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
198 /* 0x60 - 0x6F */
199 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
200 /* 0x70 - 0x7F */
201 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
202 /* 0x80 - 0x8F */
203 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
204 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
205 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
206 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
207 /* 0x90 - 0x9F */
208 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
209 /* 0xA0 - 0xA7 */
210 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
211 /* 0xA8 - 0xAF */
212 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
213 /* 0xB0 - 0xB7 */
214 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
215 DstMem | SrcReg | ModRM | BitOp,
216 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
217 DstReg | SrcMem16 | ModRM | Mov,
218 /* 0xB8 - 0xBF */
219 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM | BitOp,
220 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
221 DstReg | SrcMem16 | ModRM | Mov,
222 /* 0xC0 - 0xCF */
223 0, 0, 0, DstMem | SrcReg | ModRM | Mov, 0, 0, 0, ImplicitOps | ModRM,
224 0, 0, 0, 0, 0, 0, 0, 0,
225 /* 0xD0 - 0xDF */
226 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
227 /* 0xE0 - 0xEF */
228 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
229 /* 0xF0 - 0xFF */
230 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
231};
232
233/* EFLAGS bit definitions. */
234#define EFLG_OF (1<<11)
235#define EFLG_DF (1<<10)
236#define EFLG_SF (1<<7)
237#define EFLG_ZF (1<<6)
238#define EFLG_AF (1<<4)
239#define EFLG_PF (1<<2)
240#define EFLG_CF (1<<0)
241
242/*
243 * Instruction emulation:
244 * Most instructions are emulated directly via a fragment of inline assembly
245 * code. This allows us to save/restore EFLAGS and thus very easily pick up
246 * any modified flags.
247 */
248
249#if defined(CONFIG_X86_64)
250#define _LO32 "k" /* force 32-bit operand */
251#define _STK "%%rsp" /* stack pointer */
252#elif defined(__i386__)
253#define _LO32 "" /* force 32-bit operand */
254#define _STK "%%esp" /* stack pointer */
255#endif
256
257/*
258 * These EFLAGS bits are restored from saved value during emulation, and
259 * any changes are written back to the saved value after emulation.
260 */
261#define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
262
263/* Before executing instruction: restore necessary bits in EFLAGS. */
264#define _PRE_EFLAGS(_sav, _msk, _tmp) \
265 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
266 "movl %"_sav",%"_LO32 _tmp"; " \
267 "push %"_tmp"; " \
268 "push %"_tmp"; " \
269 "movl %"_msk",%"_LO32 _tmp"; " \
270 "andl %"_LO32 _tmp",("_STK"); " \
271 "pushf; " \
272 "notl %"_LO32 _tmp"; " \
273 "andl %"_LO32 _tmp",("_STK"); " \
274 "andl %"_LO32 _tmp","__stringify(BITS_PER_LONG/4)"("_STK"); " \
275 "pop %"_tmp"; " \
276 "orl %"_LO32 _tmp",("_STK"); " \
277 "popf; " \
278 "pop %"_sav"; "
279
280/* After executing instruction: write-back necessary bits in EFLAGS. */
281#define _POST_EFLAGS(_sav, _msk, _tmp) \
282 /* _sav |= EFLAGS & _msk; */ \
283 "pushf; " \
284 "pop %"_tmp"; " \
285 "andl %"_msk",%"_LO32 _tmp"; " \
286 "orl %"_LO32 _tmp",%"_sav"; "
287
288/* Raw emulation: instruction has two explicit operands. */
289#define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
290 do { \
291 unsigned long _tmp; \
292 \
293 switch ((_dst).bytes) { \
294 case 2: \
295 __asm__ __volatile__ ( \
296 _PRE_EFLAGS("0", "4", "2") \
297 _op"w %"_wx"3,%1; " \
298 _POST_EFLAGS("0", "4", "2") \
299 : "=m" (_eflags), "=m" ((_dst).val), \
300 "=&r" (_tmp) \
301 : _wy ((_src).val), "i" (EFLAGS_MASK)); \
302 break; \
303 case 4: \
304 __asm__ __volatile__ ( \
305 _PRE_EFLAGS("0", "4", "2") \
306 _op"l %"_lx"3,%1; " \
307 _POST_EFLAGS("0", "4", "2") \
308 : "=m" (_eflags), "=m" ((_dst).val), \
309 "=&r" (_tmp) \
310 : _ly ((_src).val), "i" (EFLAGS_MASK)); \
311 break; \
312 case 8: \
313 __emulate_2op_8byte(_op, _src, _dst, \
314 _eflags, _qx, _qy); \
315 break; \
316 } \
317 } while (0)
318
319#define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
320 do { \
321 unsigned long _tmp; \
322 switch ((_dst).bytes) { \
323 case 1: \
324 __asm__ __volatile__ ( \
325 _PRE_EFLAGS("0", "4", "2") \
326 _op"b %"_bx"3,%1; " \
327 _POST_EFLAGS("0", "4", "2") \
328 : "=m" (_eflags), "=m" ((_dst).val), \
329 "=&r" (_tmp) \
330 : _by ((_src).val), "i" (EFLAGS_MASK)); \
331 break; \
332 default: \
333 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
334 _wx, _wy, _lx, _ly, _qx, _qy); \
335 break; \
336 } \
337 } while (0)
338
339/* Source operand is byte-sized and may be restricted to just %cl. */
340#define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
341 __emulate_2op(_op, _src, _dst, _eflags, \
342 "b", "c", "b", "c", "b", "c", "b", "c")
343
344/* Source operand is byte, word, long or quad sized. */
345#define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
346 __emulate_2op(_op, _src, _dst, _eflags, \
347 "b", "q", "w", "r", _LO32, "r", "", "r")
348
349/* Source operand is word, long or quad sized. */
350#define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
351 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
352 "w", "r", _LO32, "r", "", "r")
353
354/* Instruction has only one explicit operand (no source operand). */
355#define emulate_1op(_op, _dst, _eflags) \
356 do { \
357 unsigned long _tmp; \
358 \
359 switch ((_dst).bytes) { \
360 case 1: \
361 __asm__ __volatile__ ( \
362 _PRE_EFLAGS("0", "3", "2") \
363 _op"b %1; " \
364 _POST_EFLAGS("0", "3", "2") \
365 : "=m" (_eflags), "=m" ((_dst).val), \
366 "=&r" (_tmp) \
367 : "i" (EFLAGS_MASK)); \
368 break; \
369 case 2: \
370 __asm__ __volatile__ ( \
371 _PRE_EFLAGS("0", "3", "2") \
372 _op"w %1; " \
373 _POST_EFLAGS("0", "3", "2") \
374 : "=m" (_eflags), "=m" ((_dst).val), \
375 "=&r" (_tmp) \
376 : "i" (EFLAGS_MASK)); \
377 break; \
378 case 4: \
379 __asm__ __volatile__ ( \
380 _PRE_EFLAGS("0", "3", "2") \
381 _op"l %1; " \
382 _POST_EFLAGS("0", "3", "2") \
383 : "=m" (_eflags), "=m" ((_dst).val), \
384 "=&r" (_tmp) \
385 : "i" (EFLAGS_MASK)); \
386 break; \
387 case 8: \
388 __emulate_1op_8byte(_op, _dst, _eflags); \
389 break; \
390 } \
391 } while (0)
392
393/* Emulate an instruction with quadword operands (x86/64 only). */
394#if defined(CONFIG_X86_64)
395#define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
396 do { \
397 __asm__ __volatile__ ( \
398 _PRE_EFLAGS("0", "4", "2") \
399 _op"q %"_qx"3,%1; " \
400 _POST_EFLAGS("0", "4", "2") \
401 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
402 : _qy ((_src).val), "i" (EFLAGS_MASK)); \
403 } while (0)
404
405#define __emulate_1op_8byte(_op, _dst, _eflags) \
406 do { \
407 __asm__ __volatile__ ( \
408 _PRE_EFLAGS("0", "3", "2") \
409 _op"q %1; " \
410 _POST_EFLAGS("0", "3", "2") \
411 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
412 : "i" (EFLAGS_MASK)); \
413 } while (0)
414
415#elif defined(__i386__)
416#define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
417#define __emulate_1op_8byte(_op, _dst, _eflags)
418#endif /* __i386__ */
419
420/* Fetch next part of the instruction being emulated. */
421#define insn_fetch(_type, _size, _eip) \
422({ unsigned long _x; \
423 rc = do_insn_fetch(ctxt, ops, (_eip), &_x, (_size)); \
424 if (rc != 0) \
425 goto done; \
426 (_eip) += (_size); \
427 (_type)_x; \
428})
429
430/* Access/update address held in a register, based on addressing mode. */
431#define address_mask(reg) \
432 ((c->ad_bytes == sizeof(unsigned long)) ? \
433 (reg) : ((reg) & ((1UL << (c->ad_bytes << 3)) - 1)))
434#define register_address(base, reg) \
435 ((base) + address_mask(reg))
436#define register_address_increment(reg, inc) \
437 do { \
438 /* signed type ensures sign extension to long */ \
439 int _inc = (inc); \
440 if (c->ad_bytes == sizeof(unsigned long)) \
441 (reg) += _inc; \
442 else \
443 (reg) = ((reg) & \
444 ~((1UL << (c->ad_bytes << 3)) - 1)) | \
445 (((reg) + _inc) & \
446 ((1UL << (c->ad_bytes << 3)) - 1)); \
447 } while (0)
448
449#define JMP_REL(rel) \
450 do { \
451 register_address_increment(c->eip, rel); \
452 } while (0)
453
454static int do_fetch_insn_byte(struct x86_emulate_ctxt *ctxt,
455 struct x86_emulate_ops *ops,
456 unsigned long linear, u8 *dest)
457{
458 struct fetch_cache *fc = &ctxt->decode.fetch;
459 int rc;
460 int size;
461
462 if (linear < fc->start || linear >= fc->end) {
463 size = min(15UL, PAGE_SIZE - offset_in_page(linear));
464 rc = ops->read_std(linear, fc->data, size, ctxt->vcpu);
465 if (rc)
466 return rc;
467 fc->start = linear;
468 fc->end = linear + size;
469 }
470 *dest = fc->data[linear - fc->start];
471 return 0;
472}
473
474static int do_insn_fetch(struct x86_emulate_ctxt *ctxt,
475 struct x86_emulate_ops *ops,
476 unsigned long eip, void *dest, unsigned size)
477{
478 int rc = 0;
479
480 eip += ctxt->cs_base;
481 while (size--) {
482 rc = do_fetch_insn_byte(ctxt, ops, eip++, dest++);
483 if (rc)
484 return rc;
485 }
486 return 0;
487}
488
489/*
490 * Given the 'reg' portion of a ModRM byte, and a register block, return a
491 * pointer into the block that addresses the relevant register.
492 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
493 */
494static void *decode_register(u8 modrm_reg, unsigned long *regs,
495 int highbyte_regs)
496{
497 void *p;
498
499 p = &regs[modrm_reg];
500 if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
501 p = (unsigned char *)&regs[modrm_reg & 3] + 1;
502 return p;
503}
504
505static int read_descriptor(struct x86_emulate_ctxt *ctxt,
506 struct x86_emulate_ops *ops,
507 void *ptr,
508 u16 *size, unsigned long *address, int op_bytes)
509{
510 int rc;
511
512 if (op_bytes == 2)
513 op_bytes = 3;
514 *address = 0;
515 rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2,
516 ctxt->vcpu);
517 if (rc)
518 return rc;
519 rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes,
520 ctxt->vcpu);
521 return rc;
522}
523
524static int test_cc(unsigned int condition, unsigned int flags)
525{
526 int rc = 0;
527
528 switch ((condition & 15) >> 1) {
529 case 0: /* o */
530 rc |= (flags & EFLG_OF);
531 break;
532 case 1: /* b/c/nae */
533 rc |= (flags & EFLG_CF);
534 break;
535 case 2: /* z/e */
536 rc |= (flags & EFLG_ZF);
537 break;
538 case 3: /* be/na */
539 rc |= (flags & (EFLG_CF|EFLG_ZF));
540 break;
541 case 4: /* s */
542 rc |= (flags & EFLG_SF);
543 break;
544 case 5: /* p/pe */
545 rc |= (flags & EFLG_PF);
546 break;
547 case 7: /* le/ng */
548 rc |= (flags & EFLG_ZF);
549 /* fall through */
550 case 6: /* l/nge */
551 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
552 break;
553 }
554
555 /* Odd condition identifiers (lsb == 1) have inverted sense. */
556 return (!!rc ^ (condition & 1));
557}
558
559static void decode_register_operand(struct operand *op,
560 struct decode_cache *c,
561 int inhibit_bytereg)
562{
563 unsigned reg = c->modrm_reg;
564 int highbyte_regs = c->rex_prefix == 0;
565
566 if (!(c->d & ModRM))
567 reg = (c->b & 7) | ((c->rex_prefix & 1) << 3);
568 op->type = OP_REG;
569 if ((c->d & ByteOp) && !inhibit_bytereg) {
570 op->ptr = decode_register(reg, c->regs, highbyte_regs);
571 op->val = *(u8 *)op->ptr;
572 op->bytes = 1;
573 } else {
574 op->ptr = decode_register(reg, c->regs, 0);
575 op->bytes = c->op_bytes;
576 switch (op->bytes) {
577 case 2:
578 op->val = *(u16 *)op->ptr;
579 break;
580 case 4:
581 op->val = *(u32 *)op->ptr;
582 break;
583 case 8:
584 op->val = *(u64 *) op->ptr;
585 break;
586 }
587 }
588 op->orig_val = op->val;
589}
590
591static int decode_modrm(struct x86_emulate_ctxt *ctxt,
592 struct x86_emulate_ops *ops)
593{
594 struct decode_cache *c = &ctxt->decode;
595 u8 sib;
596 int index_reg = 0, base_reg = 0, scale, rip_relative = 0;
597 int rc = 0;
598
599 if (c->rex_prefix) {
600 c->modrm_reg = (c->rex_prefix & 4) << 1; /* REX.R */
601 index_reg = (c->rex_prefix & 2) << 2; /* REX.X */
602 c->modrm_rm = base_reg = (c->rex_prefix & 1) << 3; /* REG.B */
603 }
604
605 c->modrm = insn_fetch(u8, 1, c->eip);
606 c->modrm_mod |= (c->modrm & 0xc0) >> 6;
607 c->modrm_reg |= (c->modrm & 0x38) >> 3;
608 c->modrm_rm |= (c->modrm & 0x07);
609 c->modrm_ea = 0;
610 c->use_modrm_ea = 1;
611
612 if (c->modrm_mod == 3) {
613 c->modrm_val = *(unsigned long *)
614 decode_register(c->modrm_rm, c->regs, c->d & ByteOp);
615 return rc;
616 }
617
618 if (c->ad_bytes == 2) {
619 unsigned bx = c->regs[VCPU_REGS_RBX];
620 unsigned bp = c->regs[VCPU_REGS_RBP];
621 unsigned si = c->regs[VCPU_REGS_RSI];
622 unsigned di = c->regs[VCPU_REGS_RDI];
623
624 /* 16-bit ModR/M decode. */
625 switch (c->modrm_mod) {
626 case 0:
627 if (c->modrm_rm == 6)
628 c->modrm_ea += insn_fetch(u16, 2, c->eip);
629 break;
630 case 1:
631 c->modrm_ea += insn_fetch(s8, 1, c->eip);
632 break;
633 case 2:
634 c->modrm_ea += insn_fetch(u16, 2, c->eip);
635 break;
636 }
637 switch (c->modrm_rm) {
638 case 0:
639 c->modrm_ea += bx + si;
640 break;
641 case 1:
642 c->modrm_ea += bx + di;
643 break;
644 case 2:
645 c->modrm_ea += bp + si;
646 break;
647 case 3:
648 c->modrm_ea += bp + di;
649 break;
650 case 4:
651 c->modrm_ea += si;
652 break;
653 case 5:
654 c->modrm_ea += di;
655 break;
656 case 6:
657 if (c->modrm_mod != 0)
658 c->modrm_ea += bp;
659 break;
660 case 7:
661 c->modrm_ea += bx;
662 break;
663 }
664 if (c->modrm_rm == 2 || c->modrm_rm == 3 ||
665 (c->modrm_rm == 6 && c->modrm_mod != 0))
666 if (!c->override_base)
667 c->override_base = &ctxt->ss_base;
668 c->modrm_ea = (u16)c->modrm_ea;
669 } else {
670 /* 32/64-bit ModR/M decode. */
671 switch (c->modrm_rm) {
672 case 4:
673 case 12:
674 sib = insn_fetch(u8, 1, c->eip);
675 index_reg |= (sib >> 3) & 7;
676 base_reg |= sib & 7;
677 scale = sib >> 6;
678
679 switch (base_reg) {
680 case 5:
681 if (c->modrm_mod != 0)
682 c->modrm_ea += c->regs[base_reg];
683 else
684 c->modrm_ea +=
685 insn_fetch(s32, 4, c->eip);
686 break;
687 default:
688 c->modrm_ea += c->regs[base_reg];
689 }
690 switch (index_reg) {
691 case 4:
692 break;
693 default:
694 c->modrm_ea += c->regs[index_reg] << scale;
695 }
696 break;
697 case 5:
698 if (c->modrm_mod != 0)
699 c->modrm_ea += c->regs[c->modrm_rm];
700 else if (ctxt->mode == X86EMUL_MODE_PROT64)
701 rip_relative = 1;
702 break;
703 default:
704 c->modrm_ea += c->regs[c->modrm_rm];
705 break;
706 }
707 switch (c->modrm_mod) {
708 case 0:
709 if (c->modrm_rm == 5)
710 c->modrm_ea += insn_fetch(s32, 4, c->eip);
711 break;
712 case 1:
713 c->modrm_ea += insn_fetch(s8, 1, c->eip);
714 break;
715 case 2:
716 c->modrm_ea += insn_fetch(s32, 4, c->eip);
717 break;
718 }
719 }
720 if (rip_relative) {
721 c->modrm_ea += c->eip;
722 switch (c->d & SrcMask) {
723 case SrcImmByte:
724 c->modrm_ea += 1;
725 break;
726 case SrcImm:
727 if (c->d & ByteOp)
728 c->modrm_ea += 1;
729 else
730 if (c->op_bytes == 8)
731 c->modrm_ea += 4;
732 else
733 c->modrm_ea += c->op_bytes;
734 }
735 }
736done:
737 return rc;
738}
739
740static int decode_abs(struct x86_emulate_ctxt *ctxt,
741 struct x86_emulate_ops *ops)
742{
743 struct decode_cache *c = &ctxt->decode;
744 int rc = 0;
745
746 switch (c->ad_bytes) {
747 case 2:
748 c->modrm_ea = insn_fetch(u16, 2, c->eip);
749 break;
750 case 4:
751 c->modrm_ea = insn_fetch(u32, 4, c->eip);
752 break;
753 case 8:
754 c->modrm_ea = insn_fetch(u64, 8, c->eip);
755 break;
756 }
757done:
758 return rc;
759}
760
761int
762x86_decode_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
763{
764 struct decode_cache *c = &ctxt->decode;
765 int rc = 0;
766 int mode = ctxt->mode;
767 int def_op_bytes, def_ad_bytes;
768
769 /* Shadow copy of register state. Committed on successful emulation. */
770
771 memset(c, 0, sizeof(struct decode_cache));
772 c->eip = ctxt->vcpu->arch.rip;
773 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
774
775 switch (mode) {
776 case X86EMUL_MODE_REAL:
777 case X86EMUL_MODE_PROT16:
778 def_op_bytes = def_ad_bytes = 2;
779 break;
780 case X86EMUL_MODE_PROT32:
781 def_op_bytes = def_ad_bytes = 4;
782 break;
783#ifdef CONFIG_X86_64
784 case X86EMUL_MODE_PROT64:
785 def_op_bytes = 4;
786 def_ad_bytes = 8;
787 break;
788#endif
789 default:
790 return -1;
791 }
792
793 c->op_bytes = def_op_bytes;
794 c->ad_bytes = def_ad_bytes;
795
796 /* Legacy prefixes. */
797 for (;;) {
798 switch (c->b = insn_fetch(u8, 1, c->eip)) {
799 case 0x66: /* operand-size override */
800 /* switch between 2/4 bytes */
801 c->op_bytes = def_op_bytes ^ 6;
802 break;
803 case 0x67: /* address-size override */
804 if (mode == X86EMUL_MODE_PROT64)
805 /* switch between 4/8 bytes */
806 c->ad_bytes = def_ad_bytes ^ 12;
807 else
808 /* switch between 2/4 bytes */
809 c->ad_bytes = def_ad_bytes ^ 6;
810 break;
811 case 0x2e: /* CS override */
812 c->override_base = &ctxt->cs_base;
813 break;
814 case 0x3e: /* DS override */
815 c->override_base = &ctxt->ds_base;
816 break;
817 case 0x26: /* ES override */
818 c->override_base = &ctxt->es_base;
819 break;
820 case 0x64: /* FS override */
821 c->override_base = &ctxt->fs_base;
822 break;
823 case 0x65: /* GS override */
824 c->override_base = &ctxt->gs_base;
825 break;
826 case 0x36: /* SS override */
827 c->override_base = &ctxt->ss_base;
828 break;
829 case 0x40 ... 0x4f: /* REX */
830 if (mode != X86EMUL_MODE_PROT64)
831 goto done_prefixes;
832 c->rex_prefix = c->b;
833 continue;
834 case 0xf0: /* LOCK */
835 c->lock_prefix = 1;
836 break;
837 case 0xf2: /* REPNE/REPNZ */
838 c->rep_prefix = REPNE_PREFIX;
839 break;
840 case 0xf3: /* REP/REPE/REPZ */
841 c->rep_prefix = REPE_PREFIX;
842 break;
843 default:
844 goto done_prefixes;
845 }
846
847 /* Any legacy prefix after a REX prefix nullifies its effect. */
848
849 c->rex_prefix = 0;
850 }
851
852done_prefixes:
853
854 /* REX prefix. */
855 if (c->rex_prefix)
856 if (c->rex_prefix & 8)
857 c->op_bytes = 8; /* REX.W */
858
859 /* Opcode byte(s). */
860 c->d = opcode_table[c->b];
861 if (c->d == 0) {
862 /* Two-byte opcode? */
863 if (c->b == 0x0f) {
864 c->twobyte = 1;
865 c->b = insn_fetch(u8, 1, c->eip);
866 c->d = twobyte_table[c->b];
867 }
868
869 /* Unrecognised? */
870 if (c->d == 0) {
871 DPRINTF("Cannot emulate %02x\n", c->b);
872 return -1;
873 }
874 }
875
876 if (mode == X86EMUL_MODE_PROT64 && (c->d & Stack))
877 c->op_bytes = 8;
878
879 /* ModRM and SIB bytes. */
880 if (c->d & ModRM)
881 rc = decode_modrm(ctxt, ops);
882 else if (c->d & MemAbs)
883 rc = decode_abs(ctxt, ops);
884 if (rc)
885 goto done;
886
887 if (!c->override_base)
888 c->override_base = &ctxt->ds_base;
889 if (mode == X86EMUL_MODE_PROT64 &&
890 c->override_base != &ctxt->fs_base &&
891 c->override_base != &ctxt->gs_base)
892 c->override_base = NULL;
893
894 if (c->override_base)
895 c->modrm_ea += *c->override_base;
896
897 if (c->ad_bytes != 8)
898 c->modrm_ea = (u32)c->modrm_ea;
899 /*
900 * Decode and fetch the source operand: register, memory
901 * or immediate.
902 */
903 switch (c->d & SrcMask) {
904 case SrcNone:
905 break;
906 case SrcReg:
907 decode_register_operand(&c->src, c, 0);
908 break;
909 case SrcMem16:
910 c->src.bytes = 2;
911 goto srcmem_common;
912 case SrcMem32:
913 c->src.bytes = 4;
914 goto srcmem_common;
915 case SrcMem:
916 c->src.bytes = (c->d & ByteOp) ? 1 :
917 c->op_bytes;
918 /* Don't fetch the address for invlpg: it could be unmapped. */
919 if (c->twobyte && c->b == 0x01 && c->modrm_reg == 7)
920 break;
921 srcmem_common:
922 /*
923 * For instructions with a ModR/M byte, switch to register
924 * access if Mod = 3.
925 */
926 if ((c->d & ModRM) && c->modrm_mod == 3) {
927 c->src.type = OP_REG;
928 break;
929 }
930 c->src.type = OP_MEM;
931 break;
932 case SrcImm:
933 c->src.type = OP_IMM;
934 c->src.ptr = (unsigned long *)c->eip;
935 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
936 if (c->src.bytes == 8)
937 c->src.bytes = 4;
938 /* NB. Immediates are sign-extended as necessary. */
939 switch (c->src.bytes) {
940 case 1:
941 c->src.val = insn_fetch(s8, 1, c->eip);
942 break;
943 case 2:
944 c->src.val = insn_fetch(s16, 2, c->eip);
945 break;
946 case 4:
947 c->src.val = insn_fetch(s32, 4, c->eip);
948 break;
949 }
950 break;
951 case SrcImmByte:
952 c->src.type = OP_IMM;
953 c->src.ptr = (unsigned long *)c->eip;
954 c->src.bytes = 1;
955 c->src.val = insn_fetch(s8, 1, c->eip);
956 break;
957 }
958
959 /* Decode and fetch the destination operand: register or memory. */
960 switch (c->d & DstMask) {
961 case ImplicitOps:
962 /* Special instructions do their own operand decoding. */
963 return 0;
964 case DstReg:
965 decode_register_operand(&c->dst, c,
966 c->twobyte && (c->b == 0xb6 || c->b == 0xb7));
967 break;
968 case DstMem:
969 if ((c->d & ModRM) && c->modrm_mod == 3) {
970 c->dst.type = OP_REG;
971 break;
972 }
973 c->dst.type = OP_MEM;
974 break;
975 }
976
977done:
978 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
979}
980
981static inline void emulate_push(struct x86_emulate_ctxt *ctxt)
982{
983 struct decode_cache *c = &ctxt->decode;
984
985 c->dst.type = OP_MEM;
986 c->dst.bytes = c->op_bytes;
987 c->dst.val = c->src.val;
988 register_address_increment(c->regs[VCPU_REGS_RSP], -c->op_bytes);
989 c->dst.ptr = (void *) register_address(ctxt->ss_base,
990 c->regs[VCPU_REGS_RSP]);
991}
992
993static inline int emulate_grp1a(struct x86_emulate_ctxt *ctxt,
994 struct x86_emulate_ops *ops)
995{
996 struct decode_cache *c = &ctxt->decode;
997 int rc;
998
999 rc = ops->read_std(register_address(ctxt->ss_base,
1000 c->regs[VCPU_REGS_RSP]),
1001 &c->dst.val, c->dst.bytes, ctxt->vcpu);
1002 if (rc != 0)
1003 return rc;
1004
1005 register_address_increment(c->regs[VCPU_REGS_RSP], c->dst.bytes);
1006
1007 return 0;
1008}
1009
1010static inline void emulate_grp2(struct x86_emulate_ctxt *ctxt)
1011{
1012 struct decode_cache *c = &ctxt->decode;
1013 switch (c->modrm_reg) {
1014 case 0: /* rol */
1015 emulate_2op_SrcB("rol", c->src, c->dst, ctxt->eflags);
1016 break;
1017 case 1: /* ror */
1018 emulate_2op_SrcB("ror", c->src, c->dst, ctxt->eflags);
1019 break;
1020 case 2: /* rcl */
1021 emulate_2op_SrcB("rcl", c->src, c->dst, ctxt->eflags);
1022 break;
1023 case 3: /* rcr */
1024 emulate_2op_SrcB("rcr", c->src, c->dst, ctxt->eflags);
1025 break;
1026 case 4: /* sal/shl */
1027 case 6: /* sal/shl */
1028 emulate_2op_SrcB("sal", c->src, c->dst, ctxt->eflags);
1029 break;
1030 case 5: /* shr */
1031 emulate_2op_SrcB("shr", c->src, c->dst, ctxt->eflags);
1032 break;
1033 case 7: /* sar */
1034 emulate_2op_SrcB("sar", c->src, c->dst, ctxt->eflags);
1035 break;
1036 }
1037}
1038
1039static inline int emulate_grp3(struct x86_emulate_ctxt *ctxt,
1040 struct x86_emulate_ops *ops)
1041{
1042 struct decode_cache *c = &ctxt->decode;
1043 int rc = 0;
1044
1045 switch (c->modrm_reg) {
1046 case 0 ... 1: /* test */
1047 /*
1048 * Special case in Grp3: test has an immediate
1049 * source operand.
1050 */
1051 c->src.type = OP_IMM;
1052 c->src.ptr = (unsigned long *)c->eip;
1053 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1054 if (c->src.bytes == 8)
1055 c->src.bytes = 4;
1056 switch (c->src.bytes) {
1057 case 1:
1058 c->src.val = insn_fetch(s8, 1, c->eip);
1059 break;
1060 case 2:
1061 c->src.val = insn_fetch(s16, 2, c->eip);
1062 break;
1063 case 4:
1064 c->src.val = insn_fetch(s32, 4, c->eip);
1065 break;
1066 }
1067 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1068 break;
1069 case 2: /* not */
1070 c->dst.val = ~c->dst.val;
1071 break;
1072 case 3: /* neg */
1073 emulate_1op("neg", c->dst, ctxt->eflags);
1074 break;
1075 default:
1076 DPRINTF("Cannot emulate %02x\n", c->b);
1077 rc = X86EMUL_UNHANDLEABLE;
1078 break;
1079 }
1080done:
1081 return rc;
1082}
1083
1084static inline int emulate_grp45(struct x86_emulate_ctxt *ctxt,
1085 struct x86_emulate_ops *ops)
1086{
1087 struct decode_cache *c = &ctxt->decode;
1088 int rc;
1089
1090 switch (c->modrm_reg) {
1091 case 0: /* inc */
1092 emulate_1op("inc", c->dst, ctxt->eflags);
1093 break;
1094 case 1: /* dec */
1095 emulate_1op("dec", c->dst, ctxt->eflags);
1096 break;
1097 case 4: /* jmp abs */
1098 if (c->b == 0xff)
1099 c->eip = c->dst.val;
1100 else {
1101 DPRINTF("Cannot emulate %02x\n", c->b);
1102 return X86EMUL_UNHANDLEABLE;
1103 }
1104 break;
1105 case 6: /* push */
1106
1107 /* 64-bit mode: PUSH always pushes a 64-bit operand. */
1108
1109 if (ctxt->mode == X86EMUL_MODE_PROT64) {
1110 c->dst.bytes = 8;
1111 rc = ops->read_std((unsigned long)c->dst.ptr,
1112 &c->dst.val, 8, ctxt->vcpu);
1113 if (rc != 0)
1114 return rc;
1115 }
1116 register_address_increment(c->regs[VCPU_REGS_RSP],
1117 -c->dst.bytes);
1118 rc = ops->write_emulated(register_address(ctxt->ss_base,
1119 c->regs[VCPU_REGS_RSP]), &c->dst.val,
1120 c->dst.bytes, ctxt->vcpu);
1121 if (rc != 0)
1122 return rc;
1123 c->dst.type = OP_NONE;
1124 break;
1125 default:
1126 DPRINTF("Cannot emulate %02x\n", c->b);
1127 return X86EMUL_UNHANDLEABLE;
1128 }
1129 return 0;
1130}
1131
1132static inline int emulate_grp9(struct x86_emulate_ctxt *ctxt,
1133 struct x86_emulate_ops *ops,
1134 unsigned long memop)
1135{
1136 struct decode_cache *c = &ctxt->decode;
1137 u64 old, new;
1138 int rc;
1139
1140 rc = ops->read_emulated(memop, &old, 8, ctxt->vcpu);
1141 if (rc != 0)
1142 return rc;
1143
1144 if (((u32) (old >> 0) != (u32) c->regs[VCPU_REGS_RAX]) ||
1145 ((u32) (old >> 32) != (u32) c->regs[VCPU_REGS_RDX])) {
1146
1147 c->regs[VCPU_REGS_RAX] = (u32) (old >> 0);
1148 c->regs[VCPU_REGS_RDX] = (u32) (old >> 32);
1149 ctxt->eflags &= ~EFLG_ZF;
1150
1151 } else {
1152 new = ((u64)c->regs[VCPU_REGS_RCX] << 32) |
1153 (u32) c->regs[VCPU_REGS_RBX];
1154
1155 rc = ops->cmpxchg_emulated(memop, &old, &new, 8, ctxt->vcpu);
1156 if (rc != 0)
1157 return rc;
1158 ctxt->eflags |= EFLG_ZF;
1159 }
1160 return 0;
1161}
1162
1163static inline int writeback(struct x86_emulate_ctxt *ctxt,
1164 struct x86_emulate_ops *ops)
1165{
1166 int rc;
1167 struct decode_cache *c = &ctxt->decode;
1168
1169 switch (c->dst.type) {
1170 case OP_REG:
1171 /* The 4-byte case *is* correct:
1172 * in 64-bit mode we zero-extend.
1173 */
1174 switch (c->dst.bytes) {
1175 case 1:
1176 *(u8 *)c->dst.ptr = (u8)c->dst.val;
1177 break;
1178 case 2:
1179 *(u16 *)c->dst.ptr = (u16)c->dst.val;
1180 break;
1181 case 4:
1182 *c->dst.ptr = (u32)c->dst.val;
1183 break; /* 64b: zero-ext */
1184 case 8:
1185 *c->dst.ptr = c->dst.val;
1186 break;
1187 }
1188 break;
1189 case OP_MEM:
1190 if (c->lock_prefix)
1191 rc = ops->cmpxchg_emulated(
1192 (unsigned long)c->dst.ptr,
1193 &c->dst.orig_val,
1194 &c->dst.val,
1195 c->dst.bytes,
1196 ctxt->vcpu);
1197 else
1198 rc = ops->write_emulated(
1199 (unsigned long)c->dst.ptr,
1200 &c->dst.val,
1201 c->dst.bytes,
1202 ctxt->vcpu);
1203 if (rc != 0)
1204 return rc;
1205 break;
1206 case OP_NONE:
1207 /* no writeback */
1208 break;
1209 default:
1210 break;
1211 }
1212 return 0;
1213}
1214
1215int
1216x86_emulate_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
1217{
1218 unsigned long memop = 0;
1219 u64 msr_data;
1220 unsigned long saved_eip = 0;
1221 struct decode_cache *c = &ctxt->decode;
1222 int rc = 0;
1223
1224 /* Shadow copy of register state. Committed on successful emulation.
1225 * NOTE: we can copy them from vcpu as x86_decode_insn() doesn't
1226 * modify them.
1227 */
1228
1229 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
1230 saved_eip = c->eip;
1231
1232 if (((c->d & ModRM) && (c->modrm_mod != 3)) || (c->d & MemAbs))
1233 memop = c->modrm_ea;
1234
1235 if (c->rep_prefix && (c->d & String)) {
1236 /* All REP prefixes have the same first termination condition */
1237 if (c->regs[VCPU_REGS_RCX] == 0) {
1238 ctxt->vcpu->arch.rip = c->eip;
1239 goto done;
1240 }
1241 /* The second termination condition only applies for REPE
1242 * and REPNE. Test if the repeat string operation prefix is
1243 * REPE/REPZ or REPNE/REPNZ and if it's the case it tests the
1244 * corresponding termination condition according to:
1245 * - if REPE/REPZ and ZF = 0 then done
1246 * - if REPNE/REPNZ and ZF = 1 then done
1247 */
1248 if ((c->b == 0xa6) || (c->b == 0xa7) ||
1249 (c->b == 0xae) || (c->b == 0xaf)) {
1250 if ((c->rep_prefix == REPE_PREFIX) &&
1251 ((ctxt->eflags & EFLG_ZF) == 0)) {
1252 ctxt->vcpu->arch.rip = c->eip;
1253 goto done;
1254 }
1255 if ((c->rep_prefix == REPNE_PREFIX) &&
1256 ((ctxt->eflags & EFLG_ZF) == EFLG_ZF)) {
1257 ctxt->vcpu->arch.rip = c->eip;
1258 goto done;
1259 }
1260 }
1261 c->regs[VCPU_REGS_RCX]--;
1262 c->eip = ctxt->vcpu->arch.rip;
1263 }
1264
1265 if (c->src.type == OP_MEM) {
1266 c->src.ptr = (unsigned long *)memop;
1267 c->src.val = 0;
1268 rc = ops->read_emulated((unsigned long)c->src.ptr,
1269 &c->src.val,
1270 c->src.bytes,
1271 ctxt->vcpu);
1272 if (rc != 0)
1273 goto done;
1274 c->src.orig_val = c->src.val;
1275 }
1276
1277 if ((c->d & DstMask) == ImplicitOps)
1278 goto special_insn;
1279
1280
1281 if (c->dst.type == OP_MEM) {
1282 c->dst.ptr = (unsigned long *)memop;
1283 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1284 c->dst.val = 0;
1285 if (c->d & BitOp) {
1286 unsigned long mask = ~(c->dst.bytes * 8 - 1);
1287
1288 c->dst.ptr = (void *)c->dst.ptr +
1289 (c->src.val & mask) / 8;
1290 }
1291 if (!(c->d & Mov) &&
1292 /* optimisation - avoid slow emulated read */
1293 ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1294 &c->dst.val,
1295 c->dst.bytes, ctxt->vcpu)) != 0))
1296 goto done;
1297 }
1298 c->dst.orig_val = c->dst.val;
1299
1300special_insn:
1301
1302 if (c->twobyte)
1303 goto twobyte_insn;
1304
1305 switch (c->b) {
1306 case 0x00 ... 0x05:
1307 add: /* add */
1308 emulate_2op_SrcV("add", c->src, c->dst, ctxt->eflags);
1309 break;
1310 case 0x08 ... 0x0d:
1311 or: /* or */
1312 emulate_2op_SrcV("or", c->src, c->dst, ctxt->eflags);
1313 break;
1314 case 0x10 ... 0x15:
1315 adc: /* adc */
1316 emulate_2op_SrcV("adc", c->src, c->dst, ctxt->eflags);
1317 break;
1318 case 0x18 ... 0x1d:
1319 sbb: /* sbb */
1320 emulate_2op_SrcV("sbb", c->src, c->dst, ctxt->eflags);
1321 break;
1322 case 0x20 ... 0x23:
1323 and: /* and */
1324 emulate_2op_SrcV("and", c->src, c->dst, ctxt->eflags);
1325 break;
1326 case 0x24: /* and al imm8 */
1327 c->dst.type = OP_REG;
1328 c->dst.ptr = &c->regs[VCPU_REGS_RAX];
1329 c->dst.val = *(u8 *)c->dst.ptr;
1330 c->dst.bytes = 1;
1331 c->dst.orig_val = c->dst.val;
1332 goto and;
1333 case 0x25: /* and ax imm16, or eax imm32 */
1334 c->dst.type = OP_REG;
1335 c->dst.bytes = c->op_bytes;
1336 c->dst.ptr = &c->regs[VCPU_REGS_RAX];
1337 if (c->op_bytes == 2)
1338 c->dst.val = *(u16 *)c->dst.ptr;
1339 else
1340 c->dst.val = *(u32 *)c->dst.ptr;
1341 c->dst.orig_val = c->dst.val;
1342 goto and;
1343 case 0x28 ... 0x2d:
1344 sub: /* sub */
1345 emulate_2op_SrcV("sub", c->src, c->dst, ctxt->eflags);
1346 break;
1347 case 0x30 ... 0x35:
1348 xor: /* xor */
1349 emulate_2op_SrcV("xor", c->src, c->dst, ctxt->eflags);
1350 break;
1351 case 0x38 ... 0x3d:
1352 cmp: /* cmp */
1353 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1354 break;
1355 case 0x40 ... 0x47: /* inc r16/r32 */
1356 emulate_1op("inc", c->dst, ctxt->eflags);
1357 break;
1358 case 0x48 ... 0x4f: /* dec r16/r32 */
1359 emulate_1op("dec", c->dst, ctxt->eflags);
1360 break;
1361 case 0x50 ... 0x57: /* push reg */
1362 c->dst.type = OP_MEM;
1363 c->dst.bytes = c->op_bytes;
1364 c->dst.val = c->src.val;
1365 register_address_increment(c->regs[VCPU_REGS_RSP],
1366 -c->op_bytes);
1367 c->dst.ptr = (void *) register_address(
1368 ctxt->ss_base, c->regs[VCPU_REGS_RSP]);
1369 break;
1370 case 0x58 ... 0x5f: /* pop reg */
1371 pop_instruction:
1372 if ((rc = ops->read_std(register_address(ctxt->ss_base,
1373 c->regs[VCPU_REGS_RSP]), c->dst.ptr,
1374 c->op_bytes, ctxt->vcpu)) != 0)
1375 goto done;
1376
1377 register_address_increment(c->regs[VCPU_REGS_RSP],
1378 c->op_bytes);
1379 c->dst.type = OP_NONE; /* Disable writeback. */
1380 break;
1381 case 0x63: /* movsxd */
1382 if (ctxt->mode != X86EMUL_MODE_PROT64)
1383 goto cannot_emulate;
1384 c->dst.val = (s32) c->src.val;
1385 break;
1386 case 0x6a: /* push imm8 */
1387 c->src.val = 0L;
1388 c->src.val = insn_fetch(s8, 1, c->eip);
1389 emulate_push(ctxt);
1390 break;
1391 case 0x6c: /* insb */
1392 case 0x6d: /* insw/insd */
1393 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1394 1,
1395 (c->d & ByteOp) ? 1 : c->op_bytes,
1396 c->rep_prefix ?
1397 address_mask(c->regs[VCPU_REGS_RCX]) : 1,
1398 (ctxt->eflags & EFLG_DF),
1399 register_address(ctxt->es_base,
1400 c->regs[VCPU_REGS_RDI]),
1401 c->rep_prefix,
1402 c->regs[VCPU_REGS_RDX]) == 0) {
1403 c->eip = saved_eip;
1404 return -1;
1405 }
1406 return 0;
1407 case 0x6e: /* outsb */
1408 case 0x6f: /* outsw/outsd */
1409 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1410 0,
1411 (c->d & ByteOp) ? 1 : c->op_bytes,
1412 c->rep_prefix ?
1413 address_mask(c->regs[VCPU_REGS_RCX]) : 1,
1414 (ctxt->eflags & EFLG_DF),
1415 register_address(c->override_base ?
1416 *c->override_base :
1417 ctxt->ds_base,
1418 c->regs[VCPU_REGS_RSI]),
1419 c->rep_prefix,
1420 c->regs[VCPU_REGS_RDX]) == 0) {
1421 c->eip = saved_eip;
1422 return -1;
1423 }
1424 return 0;
1425 case 0x70 ... 0x7f: /* jcc (short) */ {
1426 int rel = insn_fetch(s8, 1, c->eip);
1427
1428 if (test_cc(c->b, ctxt->eflags))
1429 JMP_REL(rel);
1430 break;
1431 }
1432 case 0x80 ... 0x83: /* Grp1 */
1433 switch (c->modrm_reg) {
1434 case 0:
1435 goto add;
1436 case 1:
1437 goto or;
1438 case 2:
1439 goto adc;
1440 case 3:
1441 goto sbb;
1442 case 4:
1443 goto and;
1444 case 5:
1445 goto sub;
1446 case 6:
1447 goto xor;
1448 case 7:
1449 goto cmp;
1450 }
1451 break;
1452 case 0x84 ... 0x85:
1453 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1454 break;
1455 case 0x86 ... 0x87: /* xchg */
1456 /* Write back the register source. */
1457 switch (c->dst.bytes) {
1458 case 1:
1459 *(u8 *) c->src.ptr = (u8) c->dst.val;
1460 break;
1461 case 2:
1462 *(u16 *) c->src.ptr = (u16) c->dst.val;
1463 break;
1464 case 4:
1465 *c->src.ptr = (u32) c->dst.val;
1466 break; /* 64b reg: zero-extend */
1467 case 8:
1468 *c->src.ptr = c->dst.val;
1469 break;
1470 }
1471 /*
1472 * Write back the memory destination with implicit LOCK
1473 * prefix.
1474 */
1475 c->dst.val = c->src.val;
1476 c->lock_prefix = 1;
1477 break;
1478 case 0x88 ... 0x8b: /* mov */
1479 goto mov;
1480 case 0x8d: /* lea r16/r32, m */
1481 c->dst.val = c->modrm_val;
1482 break;
1483 case 0x8f: /* pop (sole member of Grp1a) */
1484 rc = emulate_grp1a(ctxt, ops);
1485 if (rc != 0)
1486 goto done;
1487 break;
1488 case 0x9c: /* pushf */
1489 c->src.val = (unsigned long) ctxt->eflags;
1490 emulate_push(ctxt);
1491 break;
1492 case 0x9d: /* popf */
1493 c->dst.ptr = (unsigned long *) &ctxt->eflags;
1494 goto pop_instruction;
1495 case 0xa0 ... 0xa1: /* mov */
1496 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1497 c->dst.val = c->src.val;
1498 break;
1499 case 0xa2 ... 0xa3: /* mov */
1500 c->dst.val = (unsigned long)c->regs[VCPU_REGS_RAX];
1501 break;
1502 case 0xa4 ... 0xa5: /* movs */
1503 c->dst.type = OP_MEM;
1504 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1505 c->dst.ptr = (unsigned long *)register_address(
1506 ctxt->es_base,
1507 c->regs[VCPU_REGS_RDI]);
1508 if ((rc = ops->read_emulated(register_address(
1509 c->override_base ? *c->override_base :
1510 ctxt->ds_base,
1511 c->regs[VCPU_REGS_RSI]),
1512 &c->dst.val,
1513 c->dst.bytes, ctxt->vcpu)) != 0)
1514 goto done;
1515 register_address_increment(c->regs[VCPU_REGS_RSI],
1516 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1517 : c->dst.bytes);
1518 register_address_increment(c->regs[VCPU_REGS_RDI],
1519 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1520 : c->dst.bytes);
1521 break;
1522 case 0xa6 ... 0xa7: /* cmps */
1523 c->src.type = OP_NONE; /* Disable writeback. */
1524 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1525 c->src.ptr = (unsigned long *)register_address(
1526 c->override_base ? *c->override_base :
1527 ctxt->ds_base,
1528 c->regs[VCPU_REGS_RSI]);
1529 if ((rc = ops->read_emulated((unsigned long)c->src.ptr,
1530 &c->src.val,
1531 c->src.bytes,
1532 ctxt->vcpu)) != 0)
1533 goto done;
1534
1535 c->dst.type = OP_NONE; /* Disable writeback. */
1536 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1537 c->dst.ptr = (unsigned long *)register_address(
1538 ctxt->es_base,
1539 c->regs[VCPU_REGS_RDI]);
1540 if ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1541 &c->dst.val,
1542 c->dst.bytes,
1543 ctxt->vcpu)) != 0)
1544 goto done;
1545
1546 DPRINTF("cmps: mem1=0x%p mem2=0x%p\n", c->src.ptr, c->dst.ptr);
1547
1548 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1549
1550 register_address_increment(c->regs[VCPU_REGS_RSI],
1551 (ctxt->eflags & EFLG_DF) ? -c->src.bytes
1552 : c->src.bytes);
1553 register_address_increment(c->regs[VCPU_REGS_RDI],
1554 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1555 : c->dst.bytes);
1556
1557 break;
1558 case 0xaa ... 0xab: /* stos */
1559 c->dst.type = OP_MEM;
1560 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1561 c->dst.ptr = (unsigned long *)register_address(
1562 ctxt->es_base,
1563 c->regs[VCPU_REGS_RDI]);
1564 c->dst.val = c->regs[VCPU_REGS_RAX];
1565 register_address_increment(c->regs[VCPU_REGS_RDI],
1566 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1567 : c->dst.bytes);
1568 break;
1569 case 0xac ... 0xad: /* lods */
1570 c->dst.type = OP_REG;
1571 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1572 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1573 if ((rc = ops->read_emulated(register_address(
1574 c->override_base ? *c->override_base :
1575 ctxt->ds_base,
1576 c->regs[VCPU_REGS_RSI]),
1577 &c->dst.val,
1578 c->dst.bytes,
1579 ctxt->vcpu)) != 0)
1580 goto done;
1581 register_address_increment(c->regs[VCPU_REGS_RSI],
1582 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1583 : c->dst.bytes);
1584 break;
1585 case 0xae ... 0xaf: /* scas */
1586 DPRINTF("Urk! I don't handle SCAS.\n");
1587 goto cannot_emulate;
1588 case 0xc0 ... 0xc1:
1589 emulate_grp2(ctxt);
1590 break;
1591 case 0xc3: /* ret */
1592 c->dst.ptr = &c->eip;
1593 goto pop_instruction;
1594 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1595 mov:
1596 c->dst.val = c->src.val;
1597 break;
1598 case 0xd0 ... 0xd1: /* Grp2 */
1599 c->src.val = 1;
1600 emulate_grp2(ctxt);
1601 break;
1602 case 0xd2 ... 0xd3: /* Grp2 */
1603 c->src.val = c->regs[VCPU_REGS_RCX];
1604 emulate_grp2(ctxt);
1605 break;
1606 case 0xe8: /* call (near) */ {
1607 long int rel;
1608 switch (c->op_bytes) {
1609 case 2:
1610 rel = insn_fetch(s16, 2, c->eip);
1611 break;
1612 case 4:
1613 rel = insn_fetch(s32, 4, c->eip);
1614 break;
1615 default:
1616 DPRINTF("Call: Invalid op_bytes\n");
1617 goto cannot_emulate;
1618 }
1619 c->src.val = (unsigned long) c->eip;
1620 JMP_REL(rel);
1621 c->op_bytes = c->ad_bytes;
1622 emulate_push(ctxt);
1623 break;
1624 }
1625 case 0xe9: /* jmp rel */
1626 case 0xeb: /* jmp rel short */
1627 JMP_REL(c->src.val);
1628 c->dst.type = OP_NONE; /* Disable writeback. */
1629 break;
1630 case 0xf4: /* hlt */
1631 ctxt->vcpu->arch.halt_request = 1;
1632 goto done;
1633 case 0xf5: /* cmc */
1634 /* complement carry flag from eflags reg */
1635 ctxt->eflags ^= EFLG_CF;
1636 c->dst.type = OP_NONE; /* Disable writeback. */
1637 break;
1638 case 0xf6 ... 0xf7: /* Grp3 */
1639 rc = emulate_grp3(ctxt, ops);
1640 if (rc != 0)
1641 goto done;
1642 break;
1643 case 0xf8: /* clc */
1644 ctxt->eflags &= ~EFLG_CF;
1645 c->dst.type = OP_NONE; /* Disable writeback. */
1646 break;
1647 case 0xfa: /* cli */
1648 ctxt->eflags &= ~X86_EFLAGS_IF;
1649 c->dst.type = OP_NONE; /* Disable writeback. */
1650 break;
1651 case 0xfb: /* sti */
1652 ctxt->eflags |= X86_EFLAGS_IF;
1653 c->dst.type = OP_NONE; /* Disable writeback. */
1654 break;
1655 case 0xfe ... 0xff: /* Grp4/Grp5 */
1656 rc = emulate_grp45(ctxt, ops);
1657 if (rc != 0)
1658 goto done;
1659 break;
1660 }
1661
1662writeback:
1663 rc = writeback(ctxt, ops);
1664 if (rc != 0)
1665 goto done;
1666
1667 /* Commit shadow register state. */
1668 memcpy(ctxt->vcpu->arch.regs, c->regs, sizeof c->regs);
1669 ctxt->vcpu->arch.rip = c->eip;
1670
1671done:
1672 if (rc == X86EMUL_UNHANDLEABLE) {
1673 c->eip = saved_eip;
1674 return -1;
1675 }
1676 return 0;
1677
1678twobyte_insn:
1679 switch (c->b) {
1680 case 0x01: /* lgdt, lidt, lmsw */
1681 switch (c->modrm_reg) {
1682 u16 size;
1683 unsigned long address;
1684
1685 case 0: /* vmcall */
1686 if (c->modrm_mod != 3 || c->modrm_rm != 1)
1687 goto cannot_emulate;
1688
1689 rc = kvm_fix_hypercall(ctxt->vcpu);
1690 if (rc)
1691 goto done;
1692
1693 kvm_emulate_hypercall(ctxt->vcpu);
1694 break;
1695 case 2: /* lgdt */
1696 rc = read_descriptor(ctxt, ops, c->src.ptr,
1697 &size, &address, c->op_bytes);
1698 if (rc)
1699 goto done;
1700 realmode_lgdt(ctxt->vcpu, size, address);
1701 break;
1702 case 3: /* lidt/vmmcall */
1703 if (c->modrm_mod == 3 && c->modrm_rm == 1) {
1704 rc = kvm_fix_hypercall(ctxt->vcpu);
1705 if (rc)
1706 goto done;
1707 kvm_emulate_hypercall(ctxt->vcpu);
1708 } else {
1709 rc = read_descriptor(ctxt, ops, c->src.ptr,
1710 &size, &address,
1711 c->op_bytes);
1712 if (rc)
1713 goto done;
1714 realmode_lidt(ctxt->vcpu, size, address);
1715 }
1716 break;
1717 case 4: /* smsw */
1718 if (c->modrm_mod != 3)
1719 goto cannot_emulate;
1720 *(u16 *)&c->regs[c->modrm_rm]
1721 = realmode_get_cr(ctxt->vcpu, 0);
1722 break;
1723 case 6: /* lmsw */
1724 if (c->modrm_mod != 3)
1725 goto cannot_emulate;
1726 realmode_lmsw(ctxt->vcpu, (u16)c->modrm_val,
1727 &ctxt->eflags);
1728 break;
1729 case 7: /* invlpg*/
1730 emulate_invlpg(ctxt->vcpu, memop);
1731 break;
1732 default:
1733 goto cannot_emulate;
1734 }
1735 /* Disable writeback. */
1736 c->dst.type = OP_NONE;
1737 break;
1738 case 0x06:
1739 emulate_clts(ctxt->vcpu);
1740 c->dst.type = OP_NONE;
1741 break;
1742 case 0x08: /* invd */
1743 case 0x09: /* wbinvd */
1744 case 0x0d: /* GrpP (prefetch) */
1745 case 0x18: /* Grp16 (prefetch/nop) */
1746 c->dst.type = OP_NONE;
1747 break;
1748 case 0x20: /* mov cr, reg */
1749 if (c->modrm_mod != 3)
1750 goto cannot_emulate;
1751 c->regs[c->modrm_rm] =
1752 realmode_get_cr(ctxt->vcpu, c->modrm_reg);
1753 c->dst.type = OP_NONE; /* no writeback */
1754 break;
1755 case 0x21: /* mov from dr to reg */
1756 if (c->modrm_mod != 3)
1757 goto cannot_emulate;
1758 rc = emulator_get_dr(ctxt, c->modrm_reg, &c->regs[c->modrm_rm]);
1759 if (rc)
1760 goto cannot_emulate;
1761 c->dst.type = OP_NONE; /* no writeback */
1762 break;
1763 case 0x22: /* mov reg, cr */
1764 if (c->modrm_mod != 3)
1765 goto cannot_emulate;
1766 realmode_set_cr(ctxt->vcpu,
1767 c->modrm_reg, c->modrm_val, &ctxt->eflags);
1768 c->dst.type = OP_NONE;
1769 break;
1770 case 0x23: /* mov from reg to dr */
1771 if (c->modrm_mod != 3)
1772 goto cannot_emulate;
1773 rc = emulator_set_dr(ctxt, c->modrm_reg,
1774 c->regs[c->modrm_rm]);
1775 if (rc)
1776 goto cannot_emulate;
1777 c->dst.type = OP_NONE; /* no writeback */
1778 break;
1779 case 0x30:
1780 /* wrmsr */
1781 msr_data = (u32)c->regs[VCPU_REGS_RAX]
1782 | ((u64)c->regs[VCPU_REGS_RDX] << 32);
1783 rc = kvm_set_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], msr_data);
1784 if (rc) {
1785 kvm_inject_gp(ctxt->vcpu, 0);
1786 c->eip = ctxt->vcpu->arch.rip;
1787 }
1788 rc = X86EMUL_CONTINUE;
1789 c->dst.type = OP_NONE;
1790 break;
1791 case 0x32:
1792 /* rdmsr */
1793 rc = kvm_get_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], &msr_data);
1794 if (rc) {
1795 kvm_inject_gp(ctxt->vcpu, 0);
1796 c->eip = ctxt->vcpu->arch.rip;
1797 } else {
1798 c->regs[VCPU_REGS_RAX] = (u32)msr_data;
1799 c->regs[VCPU_REGS_RDX] = msr_data >> 32;
1800 }
1801 rc = X86EMUL_CONTINUE;
1802 c->dst.type = OP_NONE;
1803 break;
1804 case 0x40 ... 0x4f: /* cmov */
1805 c->dst.val = c->dst.orig_val = c->src.val;
1806 if (!test_cc(c->b, ctxt->eflags))
1807 c->dst.type = OP_NONE; /* no writeback */
1808 break;
1809 case 0x80 ... 0x8f: /* jnz rel, etc*/ {
1810 long int rel;
1811
1812 switch (c->op_bytes) {
1813 case 2:
1814 rel = insn_fetch(s16, 2, c->eip);
1815 break;
1816 case 4:
1817 rel = insn_fetch(s32, 4, c->eip);
1818 break;
1819 case 8:
1820 rel = insn_fetch(s64, 8, c->eip);
1821 break;
1822 default:
1823 DPRINTF("jnz: Invalid op_bytes\n");
1824 goto cannot_emulate;
1825 }
1826 if (test_cc(c->b, ctxt->eflags))
1827 JMP_REL(rel);
1828 c->dst.type = OP_NONE;
1829 break;
1830 }
1831 case 0xa3:
1832 bt: /* bt */
1833 c->dst.type = OP_NONE;
1834 /* only subword offset */
1835 c->src.val &= (c->dst.bytes << 3) - 1;
1836 emulate_2op_SrcV_nobyte("bt", c->src, c->dst, ctxt->eflags);
1837 break;
1838 case 0xab:
1839 bts: /* bts */
1840 /* only subword offset */
1841 c->src.val &= (c->dst.bytes << 3) - 1;
1842 emulate_2op_SrcV_nobyte("bts", c->src, c->dst, ctxt->eflags);
1843 break;
1844 case 0xb0 ... 0xb1: /* cmpxchg */
1845 /*
1846 * Save real source value, then compare EAX against
1847 * destination.
1848 */
1849 c->src.orig_val = c->src.val;
1850 c->src.val = c->regs[VCPU_REGS_RAX];
1851 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1852 if (ctxt->eflags & EFLG_ZF) {
1853 /* Success: write back to memory. */
1854 c->dst.val = c->src.orig_val;
1855 } else {
1856 /* Failure: write the value we saw to EAX. */
1857 c->dst.type = OP_REG;
1858 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1859 }
1860 break;
1861 case 0xb3:
1862 btr: /* btr */
1863 /* only subword offset */
1864 c->src.val &= (c->dst.bytes << 3) - 1;
1865 emulate_2op_SrcV_nobyte("btr", c->src, c->dst, ctxt->eflags);
1866 break;
1867 case 0xb6 ... 0xb7: /* movzx */
1868 c->dst.bytes = c->op_bytes;
1869 c->dst.val = (c->d & ByteOp) ? (u8) c->src.val
1870 : (u16) c->src.val;
1871 break;
1872 case 0xba: /* Grp8 */
1873 switch (c->modrm_reg & 3) {
1874 case 0:
1875 goto bt;
1876 case 1:
1877 goto bts;
1878 case 2:
1879 goto btr;
1880 case 3:
1881 goto btc;
1882 }
1883 break;
1884 case 0xbb:
1885 btc: /* btc */
1886 /* only subword offset */
1887 c->src.val &= (c->dst.bytes << 3) - 1;
1888 emulate_2op_SrcV_nobyte("btc", c->src, c->dst, ctxt->eflags);
1889 break;
1890 case 0xbe ... 0xbf: /* movsx */
1891 c->dst.bytes = c->op_bytes;
1892 c->dst.val = (c->d & ByteOp) ? (s8) c->src.val :
1893 (s16) c->src.val;
1894 break;
1895 case 0xc3: /* movnti */
1896 c->dst.bytes = c->op_bytes;
1897 c->dst.val = (c->op_bytes == 4) ? (u32) c->src.val :
1898 (u64) c->src.val;
1899 break;
1900 case 0xc7: /* Grp9 (cmpxchg8b) */
1901 rc = emulate_grp9(ctxt, ops, memop);
1902 if (rc != 0)
1903 goto done;
1904 c->dst.type = OP_NONE;
1905 break;
1906 }
1907 goto writeback;
1908
1909cannot_emulate:
1910 DPRINTF("Cannot emulate %02x\n", c->b);
1911 c->eip = saved_eip;
1912 return -1;
1913}