1 /******************************************************************************
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privileged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf(_f , ## _a)
28 #include <linux/kvm_host.h>
29 #define DPRINTF(x...) do {} while (0)
31 #include <linux/module.h>
32 #include <asm/kvm_x86_emulate.h>
35 * Opcode effective-address decode tables.
36 * Note that we only emulate instructions that have at least one memory
37 * operand (excluding implicit stack references). We assume that stack
38 * references and instruction fetches will never occur in special memory
39 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
43 /* Operand sizes: 8-bit operands or specified/overridden size. */
44 #define ByteOp (1<<0) /* 8-bit operands. */
45 /* Destination operand type. */
46 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
47 #define DstReg (2<<1) /* Register operand. */
48 #define DstMem (3<<1) /* Memory operand. */
49 #define DstMask (3<<1)
50 /* Source operand type. */
51 #define SrcNone (0<<3) /* No source operand. */
52 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
53 #define SrcReg (1<<3) /* Register operand. */
54 #define SrcMem (2<<3) /* Memory operand. */
55 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
56 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
57 #define SrcImm (5<<3) /* Immediate operand. */
58 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
59 #define SrcMask (7<<3)
60 /* Generic ModRM decode. */
62 /* Destination is only written; never read. */
65 #define MemAbs (1<<9) /* Memory operand is absolute displacement */
66 #define String (1<<10) /* String instruction (rep capable) */
67 #define Stack (1<<11) /* Stack instruction (push/pop) */
69 static u16 opcode_table
[256] = {
71 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
72 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
75 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
76 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
79 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
80 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
83 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
84 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
87 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
88 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
89 SrcImmByte
, SrcImm
, 0, 0,
91 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
92 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
95 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
96 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
99 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
100 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
103 DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
,
105 DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
,
107 SrcReg
| Stack
, SrcReg
| Stack
, SrcReg
| Stack
, SrcReg
| Stack
,
108 SrcReg
| Stack
, SrcReg
| Stack
, SrcReg
| Stack
, SrcReg
| Stack
,
110 DstReg
| Stack
, DstReg
| Stack
, DstReg
| Stack
, DstReg
| Stack
,
111 DstReg
| Stack
, DstReg
| Stack
, DstReg
| Stack
, DstReg
| Stack
,
113 0, 0, 0, DstReg
| SrcMem32
| ModRM
| Mov
/* movsxd (x86/64) */ ,
116 0, 0, ImplicitOps
| Mov
| Stack
, 0,
117 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
, /* insb, insw/insd */
118 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
, /* outsb, outsw/outsd */
120 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
121 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
123 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
124 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
126 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImm
| ModRM
,
127 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
128 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
129 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
131 ByteOp
| DstMem
| SrcReg
| ModRM
| Mov
, DstMem
| SrcReg
| ModRM
| Mov
,
132 ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
133 0, ModRM
| DstReg
, 0, DstMem
| SrcNone
| ModRM
| Mov
| Stack
,
135 0, 0, 0, 0, 0, 0, 0, 0,
136 0, 0, 0, 0, ImplicitOps
| Stack
, ImplicitOps
| Stack
, 0, 0,
138 ByteOp
| DstReg
| SrcMem
| Mov
| MemAbs
, DstReg
| SrcMem
| Mov
| MemAbs
,
139 ByteOp
| DstMem
| SrcReg
| Mov
| MemAbs
, DstMem
| SrcReg
| Mov
| MemAbs
,
140 ByteOp
| ImplicitOps
| Mov
| String
, ImplicitOps
| Mov
| String
,
141 ByteOp
| ImplicitOps
| String
, ImplicitOps
| String
,
143 0, 0, ByteOp
| ImplicitOps
| Mov
| String
, ImplicitOps
| Mov
| String
,
144 ByteOp
| ImplicitOps
| Mov
| String
, ImplicitOps
| Mov
| String
,
145 ByteOp
| ImplicitOps
| String
, ImplicitOps
| String
,
147 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
149 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
150 0, ImplicitOps
| Stack
, 0, 0,
151 ByteOp
| DstMem
| SrcImm
| ModRM
| Mov
, DstMem
| SrcImm
| ModRM
| Mov
,
153 0, 0, 0, 0, 0, 0, 0, 0,
155 ByteOp
| DstMem
| SrcImplicit
| ModRM
, DstMem
| SrcImplicit
| ModRM
,
156 ByteOp
| DstMem
| SrcImplicit
| ModRM
, DstMem
| SrcImplicit
| ModRM
,
159 0, 0, 0, 0, 0, 0, 0, 0,
161 0, 0, 0, 0, 0, 0, 0, 0,
163 ImplicitOps
| Stack
, SrcImm
|ImplicitOps
, 0, SrcImmByte
|ImplicitOps
,
167 ImplicitOps
, ImplicitOps
,
168 ByteOp
| DstMem
| SrcNone
| ModRM
, DstMem
| SrcNone
| ModRM
,
170 ImplicitOps
, 0, ImplicitOps
, ImplicitOps
,
171 0, 0, ByteOp
| DstMem
| SrcNone
| ModRM
, DstMem
| SrcNone
| ModRM
174 static u16 twobyte_table
[256] = {
176 0, SrcMem
| ModRM
| DstReg
, 0, 0, 0, 0, ImplicitOps
, 0,
177 ImplicitOps
, ImplicitOps
, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0,
179 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0, 0, 0, 0, 0, 0,
181 ModRM
| ImplicitOps
, ModRM
, ModRM
| ImplicitOps
, ModRM
, 0, 0, 0, 0,
182 0, 0, 0, 0, 0, 0, 0, 0,
184 ImplicitOps
, 0, ImplicitOps
, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
186 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
187 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
188 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
189 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
191 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
192 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
193 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
194 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
196 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
198 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
200 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
202 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
203 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
204 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
205 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
207 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
209 0, 0, 0, DstMem
| SrcReg
| ModRM
| BitOp
, 0, 0, 0, 0,
211 0, 0, 0, DstMem
| SrcReg
| ModRM
| BitOp
, 0, 0, 0, 0,
213 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
, 0,
214 DstMem
| SrcReg
| ModRM
| BitOp
,
215 0, 0, ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
,
216 DstReg
| SrcMem16
| ModRM
| Mov
,
218 0, 0, DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcReg
| ModRM
| BitOp
,
219 0, 0, ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
,
220 DstReg
| SrcMem16
| ModRM
| Mov
,
222 0, 0, 0, DstMem
| SrcReg
| ModRM
| Mov
, 0, 0, 0, ImplicitOps
| ModRM
,
223 0, 0, 0, 0, 0, 0, 0, 0,
225 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
227 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
229 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
232 /* EFLAGS bit definitions. */
233 #define EFLG_OF (1<<11)
234 #define EFLG_DF (1<<10)
235 #define EFLG_SF (1<<7)
236 #define EFLG_ZF (1<<6)
237 #define EFLG_AF (1<<4)
238 #define EFLG_PF (1<<2)
239 #define EFLG_CF (1<<0)
242 * Instruction emulation:
243 * Most instructions are emulated directly via a fragment of inline assembly
244 * code. This allows us to save/restore EFLAGS and thus very easily pick up
245 * any modified flags.
248 #if defined(CONFIG_X86_64)
249 #define _LO32 "k" /* force 32-bit operand */
250 #define _STK "%%rsp" /* stack pointer */
251 #elif defined(__i386__)
252 #define _LO32 "" /* force 32-bit operand */
253 #define _STK "%%esp" /* stack pointer */
257 * These EFLAGS bits are restored from saved value during emulation, and
258 * any changes are written back to the saved value after emulation.
260 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
262 /* Before executing instruction: restore necessary bits in EFLAGS. */
263 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
264 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
265 "movl %"_sav",%"_LO32 _tmp"; " \
268 "movl %"_msk",%"_LO32 _tmp"; " \
269 "andl %"_LO32 _tmp",("_STK"); " \
271 "notl %"_LO32 _tmp"; " \
272 "andl %"_LO32 _tmp",("_STK"); " \
273 "andl %"_LO32 _tmp","__stringify(BITS_PER_LONG/4)"("_STK"); " \
275 "orl %"_LO32 _tmp",("_STK"); " \
279 /* After executing instruction: write-back necessary bits in EFLAGS. */
280 #define _POST_EFLAGS(_sav, _msk, _tmp) \
281 /* _sav |= EFLAGS & _msk; */ \
284 "andl %"_msk",%"_LO32 _tmp"; " \
285 "orl %"_LO32 _tmp",%"_sav"; "
287 /* Raw emulation: instruction has two explicit operands. */
288 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
290 unsigned long _tmp; \
292 switch ((_dst).bytes) { \
294 __asm__ __volatile__ ( \
295 _PRE_EFLAGS("0", "4", "2") \
296 _op"w %"_wx"3,%1; " \
297 _POST_EFLAGS("0", "4", "2") \
298 : "=m" (_eflags), "=m" ((_dst).val), \
300 : _wy ((_src).val), "i" (EFLAGS_MASK)); \
303 __asm__ __volatile__ ( \
304 _PRE_EFLAGS("0", "4", "2") \
305 _op"l %"_lx"3,%1; " \
306 _POST_EFLAGS("0", "4", "2") \
307 : "=m" (_eflags), "=m" ((_dst).val), \
309 : _ly ((_src).val), "i" (EFLAGS_MASK)); \
312 __emulate_2op_8byte(_op, _src, _dst, \
313 _eflags, _qx, _qy); \
318 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
320 unsigned long _tmp; \
321 switch ((_dst).bytes) { \
323 __asm__ __volatile__ ( \
324 _PRE_EFLAGS("0", "4", "2") \
325 _op"b %"_bx"3,%1; " \
326 _POST_EFLAGS("0", "4", "2") \
327 : "=m" (_eflags), "=m" ((_dst).val), \
329 : _by ((_src).val), "i" (EFLAGS_MASK)); \
332 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
333 _wx, _wy, _lx, _ly, _qx, _qy); \
338 /* Source operand is byte-sized and may be restricted to just %cl. */
339 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
340 __emulate_2op(_op, _src, _dst, _eflags, \
341 "b", "c", "b", "c", "b", "c", "b", "c")
343 /* Source operand is byte, word, long or quad sized. */
344 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
345 __emulate_2op(_op, _src, _dst, _eflags, \
346 "b", "q", "w", "r", _LO32, "r", "", "r")
348 /* Source operand is word, long or quad sized. */
349 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
350 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
351 "w", "r", _LO32, "r", "", "r")
353 /* Instruction has only one explicit operand (no source operand). */
354 #define emulate_1op(_op, _dst, _eflags) \
356 unsigned long _tmp; \
358 switch ((_dst).bytes) { \
360 __asm__ __volatile__ ( \
361 _PRE_EFLAGS("0", "3", "2") \
363 _POST_EFLAGS("0", "3", "2") \
364 : "=m" (_eflags), "=m" ((_dst).val), \
366 : "i" (EFLAGS_MASK)); \
369 __asm__ __volatile__ ( \
370 _PRE_EFLAGS("0", "3", "2") \
372 _POST_EFLAGS("0", "3", "2") \
373 : "=m" (_eflags), "=m" ((_dst).val), \
375 : "i" (EFLAGS_MASK)); \
378 __asm__ __volatile__ ( \
379 _PRE_EFLAGS("0", "3", "2") \
381 _POST_EFLAGS("0", "3", "2") \
382 : "=m" (_eflags), "=m" ((_dst).val), \
384 : "i" (EFLAGS_MASK)); \
387 __emulate_1op_8byte(_op, _dst, _eflags); \
392 /* Emulate an instruction with quadword operands (x86/64 only). */
393 #if defined(CONFIG_X86_64)
394 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
396 __asm__ __volatile__ ( \
397 _PRE_EFLAGS("0", "4", "2") \
398 _op"q %"_qx"3,%1; " \
399 _POST_EFLAGS("0", "4", "2") \
400 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
401 : _qy ((_src).val), "i" (EFLAGS_MASK)); \
404 #define __emulate_1op_8byte(_op, _dst, _eflags) \
406 __asm__ __volatile__ ( \
407 _PRE_EFLAGS("0", "3", "2") \
409 _POST_EFLAGS("0", "3", "2") \
410 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
411 : "i" (EFLAGS_MASK)); \
414 #elif defined(__i386__)
415 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
416 #define __emulate_1op_8byte(_op, _dst, _eflags)
417 #endif /* __i386__ */
419 /* Fetch next part of the instruction being emulated. */
420 #define insn_fetch(_type, _size, _eip) \
421 ({ unsigned long _x; \
422 rc = do_insn_fetch(ctxt, ops, (_eip), &_x, (_size)); \
429 /* Access/update address held in a register, based on addressing mode. */
430 #define address_mask(reg) \
431 ((c->ad_bytes == sizeof(unsigned long)) ? \
432 (reg) : ((reg) & ((1UL << (c->ad_bytes << 3)) - 1)))
433 #define register_address(base, reg) \
434 ((base) + address_mask(reg))
435 #define register_address_increment(reg, inc) \
437 /* signed type ensures sign extension to long */ \
439 if (c->ad_bytes == sizeof(unsigned long)) \
443 ~((1UL << (c->ad_bytes << 3)) - 1)) | \
445 ((1UL << (c->ad_bytes << 3)) - 1)); \
448 #define JMP_REL(rel) \
450 register_address_increment(c->eip, rel); \
453 static int do_fetch_insn_byte(struct x86_emulate_ctxt
*ctxt
,
454 struct x86_emulate_ops
*ops
,
455 unsigned long linear
, u8
*dest
)
457 struct fetch_cache
*fc
= &ctxt
->decode
.fetch
;
461 if (linear
< fc
->start
|| linear
>= fc
->end
) {
462 size
= min(15UL, PAGE_SIZE
- offset_in_page(linear
));
463 rc
= ops
->read_std(linear
, fc
->data
, size
, ctxt
->vcpu
);
467 fc
->end
= linear
+ size
;
469 *dest
= fc
->data
[linear
- fc
->start
];
473 static int do_insn_fetch(struct x86_emulate_ctxt
*ctxt
,
474 struct x86_emulate_ops
*ops
,
475 unsigned long eip
, void *dest
, unsigned size
)
479 eip
+= ctxt
->cs_base
;
481 rc
= do_fetch_insn_byte(ctxt
, ops
, eip
++, dest
++);
489 * Given the 'reg' portion of a ModRM byte, and a register block, return a
490 * pointer into the block that addresses the relevant register.
491 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
493 static void *decode_register(u8 modrm_reg
, unsigned long *regs
,
498 p
= ®s
[modrm_reg
];
499 if (highbyte_regs
&& modrm_reg
>= 4 && modrm_reg
< 8)
500 p
= (unsigned char *)®s
[modrm_reg
& 3] + 1;
504 static int read_descriptor(struct x86_emulate_ctxt
*ctxt
,
505 struct x86_emulate_ops
*ops
,
507 u16
*size
, unsigned long *address
, int op_bytes
)
514 rc
= ops
->read_std((unsigned long)ptr
, (unsigned long *)size
, 2,
518 rc
= ops
->read_std((unsigned long)ptr
+ 2, address
, op_bytes
,
523 static int test_cc(unsigned int condition
, unsigned int flags
)
527 switch ((condition
& 15) >> 1) {
529 rc
|= (flags
& EFLG_OF
);
531 case 1: /* b/c/nae */
532 rc
|= (flags
& EFLG_CF
);
535 rc
|= (flags
& EFLG_ZF
);
538 rc
|= (flags
& (EFLG_CF
|EFLG_ZF
));
541 rc
|= (flags
& EFLG_SF
);
544 rc
|= (flags
& EFLG_PF
);
547 rc
|= (flags
& EFLG_ZF
);
550 rc
|= (!(flags
& EFLG_SF
) != !(flags
& EFLG_OF
));
554 /* Odd condition identifiers (lsb == 1) have inverted sense. */
555 return (!!rc
^ (condition
& 1));
558 static void decode_register_operand(struct operand
*op
,
559 struct decode_cache
*c
,
562 unsigned reg
= c
->modrm_reg
;
563 int highbyte_regs
= c
->rex_prefix
== 0;
566 reg
= (c
->b
& 7) | ((c
->rex_prefix
& 1) << 3);
568 if ((c
->d
& ByteOp
) && !inhibit_bytereg
) {
569 op
->ptr
= decode_register(reg
, c
->regs
, highbyte_regs
);
570 op
->val
= *(u8
*)op
->ptr
;
573 op
->ptr
= decode_register(reg
, c
->regs
, 0);
574 op
->bytes
= c
->op_bytes
;
577 op
->val
= *(u16
*)op
->ptr
;
580 op
->val
= *(u32
*)op
->ptr
;
583 op
->val
= *(u64
*) op
->ptr
;
587 op
->orig_val
= op
->val
;
590 static int decode_modrm(struct x86_emulate_ctxt
*ctxt
,
591 struct x86_emulate_ops
*ops
)
593 struct decode_cache
*c
= &ctxt
->decode
;
595 int index_reg
= 0, base_reg
= 0, scale
, rip_relative
= 0;
599 c
->modrm_reg
= (c
->rex_prefix
& 4) << 1; /* REX.R */
600 index_reg
= (c
->rex_prefix
& 2) << 2; /* REX.X */
601 c
->modrm_rm
= base_reg
= (c
->rex_prefix
& 1) << 3; /* REG.B */
604 c
->modrm
= insn_fetch(u8
, 1, c
->eip
);
605 c
->modrm_mod
|= (c
->modrm
& 0xc0) >> 6;
606 c
->modrm_reg
|= (c
->modrm
& 0x38) >> 3;
607 c
->modrm_rm
|= (c
->modrm
& 0x07);
611 if (c
->modrm_mod
== 3) {
612 c
->modrm_val
= *(unsigned long *)
613 decode_register(c
->modrm_rm
, c
->regs
, c
->d
& ByteOp
);
617 if (c
->ad_bytes
== 2) {
618 unsigned bx
= c
->regs
[VCPU_REGS_RBX
];
619 unsigned bp
= c
->regs
[VCPU_REGS_RBP
];
620 unsigned si
= c
->regs
[VCPU_REGS_RSI
];
621 unsigned di
= c
->regs
[VCPU_REGS_RDI
];
623 /* 16-bit ModR/M decode. */
624 switch (c
->modrm_mod
) {
626 if (c
->modrm_rm
== 6)
627 c
->modrm_ea
+= insn_fetch(u16
, 2, c
->eip
);
630 c
->modrm_ea
+= insn_fetch(s8
, 1, c
->eip
);
633 c
->modrm_ea
+= insn_fetch(u16
, 2, c
->eip
);
636 switch (c
->modrm_rm
) {
638 c
->modrm_ea
+= bx
+ si
;
641 c
->modrm_ea
+= bx
+ di
;
644 c
->modrm_ea
+= bp
+ si
;
647 c
->modrm_ea
+= bp
+ di
;
656 if (c
->modrm_mod
!= 0)
663 if (c
->modrm_rm
== 2 || c
->modrm_rm
== 3 ||
664 (c
->modrm_rm
== 6 && c
->modrm_mod
!= 0))
665 if (!c
->override_base
)
666 c
->override_base
= &ctxt
->ss_base
;
667 c
->modrm_ea
= (u16
)c
->modrm_ea
;
669 /* 32/64-bit ModR/M decode. */
670 switch (c
->modrm_rm
) {
673 sib
= insn_fetch(u8
, 1, c
->eip
);
674 index_reg
|= (sib
>> 3) & 7;
680 if (c
->modrm_mod
!= 0)
681 c
->modrm_ea
+= c
->regs
[base_reg
];
684 insn_fetch(s32
, 4, c
->eip
);
687 c
->modrm_ea
+= c
->regs
[base_reg
];
693 c
->modrm_ea
+= c
->regs
[index_reg
] << scale
;
697 if (c
->modrm_mod
!= 0)
698 c
->modrm_ea
+= c
->regs
[c
->modrm_rm
];
699 else if (ctxt
->mode
== X86EMUL_MODE_PROT64
)
703 c
->modrm_ea
+= c
->regs
[c
->modrm_rm
];
706 switch (c
->modrm_mod
) {
708 if (c
->modrm_rm
== 5)
709 c
->modrm_ea
+= insn_fetch(s32
, 4, c
->eip
);
712 c
->modrm_ea
+= insn_fetch(s8
, 1, c
->eip
);
715 c
->modrm_ea
+= insn_fetch(s32
, 4, c
->eip
);
720 c
->modrm_ea
+= c
->eip
;
721 switch (c
->d
& SrcMask
) {
729 if (c
->op_bytes
== 8)
732 c
->modrm_ea
+= c
->op_bytes
;
739 static int decode_abs(struct x86_emulate_ctxt
*ctxt
,
740 struct x86_emulate_ops
*ops
)
742 struct decode_cache
*c
= &ctxt
->decode
;
745 switch (c
->ad_bytes
) {
747 c
->modrm_ea
= insn_fetch(u16
, 2, c
->eip
);
750 c
->modrm_ea
= insn_fetch(u32
, 4, c
->eip
);
753 c
->modrm_ea
= insn_fetch(u64
, 8, c
->eip
);
761 x86_decode_insn(struct x86_emulate_ctxt
*ctxt
, struct x86_emulate_ops
*ops
)
763 struct decode_cache
*c
= &ctxt
->decode
;
765 int mode
= ctxt
->mode
;
766 int def_op_bytes
, def_ad_bytes
;
768 /* Shadow copy of register state. Committed on successful emulation. */
770 memset(c
, 0, sizeof(struct decode_cache
));
771 c
->eip
= ctxt
->vcpu
->arch
.rip
;
772 memcpy(c
->regs
, ctxt
->vcpu
->arch
.regs
, sizeof c
->regs
);
775 case X86EMUL_MODE_REAL
:
776 case X86EMUL_MODE_PROT16
:
777 def_op_bytes
= def_ad_bytes
= 2;
779 case X86EMUL_MODE_PROT32
:
780 def_op_bytes
= def_ad_bytes
= 4;
783 case X86EMUL_MODE_PROT64
:
792 c
->op_bytes
= def_op_bytes
;
793 c
->ad_bytes
= def_ad_bytes
;
795 /* Legacy prefixes. */
797 switch (c
->b
= insn_fetch(u8
, 1, c
->eip
)) {
798 case 0x66: /* operand-size override */
799 /* switch between 2/4 bytes */
800 c
->op_bytes
= def_op_bytes
^ 6;
802 case 0x67: /* address-size override */
803 if (mode
== X86EMUL_MODE_PROT64
)
804 /* switch between 4/8 bytes */
805 c
->ad_bytes
= def_ad_bytes
^ 12;
807 /* switch between 2/4 bytes */
808 c
->ad_bytes
= def_ad_bytes
^ 6;
810 case 0x2e: /* CS override */
811 c
->override_base
= &ctxt
->cs_base
;
813 case 0x3e: /* DS override */
814 c
->override_base
= &ctxt
->ds_base
;
816 case 0x26: /* ES override */
817 c
->override_base
= &ctxt
->es_base
;
819 case 0x64: /* FS override */
820 c
->override_base
= &ctxt
->fs_base
;
822 case 0x65: /* GS override */
823 c
->override_base
= &ctxt
->gs_base
;
825 case 0x36: /* SS override */
826 c
->override_base
= &ctxt
->ss_base
;
828 case 0x40 ... 0x4f: /* REX */
829 if (mode
!= X86EMUL_MODE_PROT64
)
831 c
->rex_prefix
= c
->b
;
833 case 0xf0: /* LOCK */
836 case 0xf2: /* REPNE/REPNZ */
837 c
->rep_prefix
= REPNE_PREFIX
;
839 case 0xf3: /* REP/REPE/REPZ */
840 c
->rep_prefix
= REPE_PREFIX
;
846 /* Any legacy prefix after a REX prefix nullifies its effect. */
855 if (c
->rex_prefix
& 8)
856 c
->op_bytes
= 8; /* REX.W */
858 /* Opcode byte(s). */
859 c
->d
= opcode_table
[c
->b
];
861 /* Two-byte opcode? */
864 c
->b
= insn_fetch(u8
, 1, c
->eip
);
865 c
->d
= twobyte_table
[c
->b
];
870 DPRINTF("Cannot emulate %02x\n", c
->b
);
875 if (mode
== X86EMUL_MODE_PROT64
&& (c
->d
& Stack
))
878 /* ModRM and SIB bytes. */
880 rc
= decode_modrm(ctxt
, ops
);
881 else if (c
->d
& MemAbs
)
882 rc
= decode_abs(ctxt
, ops
);
886 if (!c
->override_base
)
887 c
->override_base
= &ctxt
->ds_base
;
888 if (mode
== X86EMUL_MODE_PROT64
&&
889 c
->override_base
!= &ctxt
->fs_base
&&
890 c
->override_base
!= &ctxt
->gs_base
)
891 c
->override_base
= NULL
;
893 if (c
->override_base
)
894 c
->modrm_ea
+= *c
->override_base
;
896 if (c
->ad_bytes
!= 8)
897 c
->modrm_ea
= (u32
)c
->modrm_ea
;
899 * Decode and fetch the source operand: register, memory
902 switch (c
->d
& SrcMask
) {
906 decode_register_operand(&c
->src
, c
, 0);
915 c
->src
.bytes
= (c
->d
& ByteOp
) ? 1 :
917 /* Don't fetch the address for invlpg: it could be unmapped. */
918 if (c
->twobyte
&& c
->b
== 0x01 && c
->modrm_reg
== 7)
922 * For instructions with a ModR/M byte, switch to register
925 if ((c
->d
& ModRM
) && c
->modrm_mod
== 3) {
926 c
->src
.type
= OP_REG
;
929 c
->src
.type
= OP_MEM
;
932 c
->src
.type
= OP_IMM
;
933 c
->src
.ptr
= (unsigned long *)c
->eip
;
934 c
->src
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
935 if (c
->src
.bytes
== 8)
937 /* NB. Immediates are sign-extended as necessary. */
938 switch (c
->src
.bytes
) {
940 c
->src
.val
= insn_fetch(s8
, 1, c
->eip
);
943 c
->src
.val
= insn_fetch(s16
, 2, c
->eip
);
946 c
->src
.val
= insn_fetch(s32
, 4, c
->eip
);
951 c
->src
.type
= OP_IMM
;
952 c
->src
.ptr
= (unsigned long *)c
->eip
;
954 c
->src
.val
= insn_fetch(s8
, 1, c
->eip
);
958 /* Decode and fetch the destination operand: register or memory. */
959 switch (c
->d
& DstMask
) {
961 /* Special instructions do their own operand decoding. */
964 decode_register_operand(&c
->dst
, c
,
965 c
->twobyte
&& (c
->b
== 0xb6 || c
->b
== 0xb7));
968 if ((c
->d
& ModRM
) && c
->modrm_mod
== 3) {
969 c
->dst
.type
= OP_REG
;
972 c
->dst
.type
= OP_MEM
;
977 return (rc
== X86EMUL_UNHANDLEABLE
) ? -1 : 0;
980 static inline void emulate_push(struct x86_emulate_ctxt
*ctxt
)
982 struct decode_cache
*c
= &ctxt
->decode
;
984 c
->dst
.type
= OP_MEM
;
985 c
->dst
.bytes
= c
->op_bytes
;
986 c
->dst
.val
= c
->src
.val
;
987 register_address_increment(c
->regs
[VCPU_REGS_RSP
], -c
->op_bytes
);
988 c
->dst
.ptr
= (void *) register_address(ctxt
->ss_base
,
989 c
->regs
[VCPU_REGS_RSP
]);
992 static inline int emulate_grp1a(struct x86_emulate_ctxt
*ctxt
,
993 struct x86_emulate_ops
*ops
)
995 struct decode_cache
*c
= &ctxt
->decode
;
998 rc
= ops
->read_std(register_address(ctxt
->ss_base
,
999 c
->regs
[VCPU_REGS_RSP
]),
1000 &c
->dst
.val
, c
->dst
.bytes
, ctxt
->vcpu
);
1004 register_address_increment(c
->regs
[VCPU_REGS_RSP
], c
->dst
.bytes
);
1009 static inline void emulate_grp2(struct x86_emulate_ctxt
*ctxt
)
1011 struct decode_cache
*c
= &ctxt
->decode
;
1012 switch (c
->modrm_reg
) {
1014 emulate_2op_SrcB("rol", c
->src
, c
->dst
, ctxt
->eflags
);
1017 emulate_2op_SrcB("ror", c
->src
, c
->dst
, ctxt
->eflags
);
1020 emulate_2op_SrcB("rcl", c
->src
, c
->dst
, ctxt
->eflags
);
1023 emulate_2op_SrcB("rcr", c
->src
, c
->dst
, ctxt
->eflags
);
1025 case 4: /* sal/shl */
1026 case 6: /* sal/shl */
1027 emulate_2op_SrcB("sal", c
->src
, c
->dst
, ctxt
->eflags
);
1030 emulate_2op_SrcB("shr", c
->src
, c
->dst
, ctxt
->eflags
);
1033 emulate_2op_SrcB("sar", c
->src
, c
->dst
, ctxt
->eflags
);
1038 static inline int emulate_grp3(struct x86_emulate_ctxt
*ctxt
,
1039 struct x86_emulate_ops
*ops
)
1041 struct decode_cache
*c
= &ctxt
->decode
;
1044 switch (c
->modrm_reg
) {
1045 case 0 ... 1: /* test */
1047 * Special case in Grp3: test has an immediate
1050 c
->src
.type
= OP_IMM
;
1051 c
->src
.ptr
= (unsigned long *)c
->eip
;
1052 c
->src
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1053 if (c
->src
.bytes
== 8)
1055 switch (c
->src
.bytes
) {
1057 c
->src
.val
= insn_fetch(s8
, 1, c
->eip
);
1060 c
->src
.val
= insn_fetch(s16
, 2, c
->eip
);
1063 c
->src
.val
= insn_fetch(s32
, 4, c
->eip
);
1066 emulate_2op_SrcV("test", c
->src
, c
->dst
, ctxt
->eflags
);
1069 c
->dst
.val
= ~c
->dst
.val
;
1072 emulate_1op("neg", c
->dst
, ctxt
->eflags
);
1075 DPRINTF("Cannot emulate %02x\n", c
->b
);
1076 rc
= X86EMUL_UNHANDLEABLE
;
1083 static inline int emulate_grp45(struct x86_emulate_ctxt
*ctxt
,
1084 struct x86_emulate_ops
*ops
)
1086 struct decode_cache
*c
= &ctxt
->decode
;
1089 switch (c
->modrm_reg
) {
1091 emulate_1op("inc", c
->dst
, ctxt
->eflags
);
1094 emulate_1op("dec", c
->dst
, ctxt
->eflags
);
1096 case 4: /* jmp abs */
1098 c
->eip
= c
->dst
.val
;
1100 DPRINTF("Cannot emulate %02x\n", c
->b
);
1101 return X86EMUL_UNHANDLEABLE
;
1106 /* 64-bit mode: PUSH always pushes a 64-bit operand. */
1108 if (ctxt
->mode
== X86EMUL_MODE_PROT64
) {
1110 rc
= ops
->read_std((unsigned long)c
->dst
.ptr
,
1111 &c
->dst
.val
, 8, ctxt
->vcpu
);
1115 register_address_increment(c
->regs
[VCPU_REGS_RSP
],
1117 rc
= ops
->write_emulated(register_address(ctxt
->ss_base
,
1118 c
->regs
[VCPU_REGS_RSP
]), &c
->dst
.val
,
1119 c
->dst
.bytes
, ctxt
->vcpu
);
1122 c
->dst
.type
= OP_NONE
;
1125 DPRINTF("Cannot emulate %02x\n", c
->b
);
1126 return X86EMUL_UNHANDLEABLE
;
1131 static inline int emulate_grp9(struct x86_emulate_ctxt
*ctxt
,
1132 struct x86_emulate_ops
*ops
,
1133 unsigned long memop
)
1135 struct decode_cache
*c
= &ctxt
->decode
;
1139 rc
= ops
->read_emulated(memop
, &old
, 8, ctxt
->vcpu
);
1143 if (((u32
) (old
>> 0) != (u32
) c
->regs
[VCPU_REGS_RAX
]) ||
1144 ((u32
) (old
>> 32) != (u32
) c
->regs
[VCPU_REGS_RDX
])) {
1146 c
->regs
[VCPU_REGS_RAX
] = (u32
) (old
>> 0);
1147 c
->regs
[VCPU_REGS_RDX
] = (u32
) (old
>> 32);
1148 ctxt
->eflags
&= ~EFLG_ZF
;
1151 new = ((u64
)c
->regs
[VCPU_REGS_RCX
] << 32) |
1152 (u32
) c
->regs
[VCPU_REGS_RBX
];
1154 rc
= ops
->cmpxchg_emulated(memop
, &old
, &new, 8, ctxt
->vcpu
);
1157 ctxt
->eflags
|= EFLG_ZF
;
1162 static inline int writeback(struct x86_emulate_ctxt
*ctxt
,
1163 struct x86_emulate_ops
*ops
)
1166 struct decode_cache
*c
= &ctxt
->decode
;
1168 switch (c
->dst
.type
) {
1170 /* The 4-byte case *is* correct:
1171 * in 64-bit mode we zero-extend.
1173 switch (c
->dst
.bytes
) {
1175 *(u8
*)c
->dst
.ptr
= (u8
)c
->dst
.val
;
1178 *(u16
*)c
->dst
.ptr
= (u16
)c
->dst
.val
;
1181 *c
->dst
.ptr
= (u32
)c
->dst
.val
;
1182 break; /* 64b: zero-ext */
1184 *c
->dst
.ptr
= c
->dst
.val
;
1190 rc
= ops
->cmpxchg_emulated(
1191 (unsigned long)c
->dst
.ptr
,
1197 rc
= ops
->write_emulated(
1198 (unsigned long)c
->dst
.ptr
,
1215 x86_emulate_insn(struct x86_emulate_ctxt
*ctxt
, struct x86_emulate_ops
*ops
)
1217 unsigned long memop
= 0;
1219 unsigned long saved_eip
= 0;
1220 struct decode_cache
*c
= &ctxt
->decode
;
1223 /* Shadow copy of register state. Committed on successful emulation.
1224 * NOTE: we can copy them from vcpu as x86_decode_insn() doesn't
1228 memcpy(c
->regs
, ctxt
->vcpu
->arch
.regs
, sizeof c
->regs
);
1231 if (((c
->d
& ModRM
) && (c
->modrm_mod
!= 3)) || (c
->d
& MemAbs
))
1232 memop
= c
->modrm_ea
;
1234 if (c
->rep_prefix
&& (c
->d
& String
)) {
1235 /* All REP prefixes have the same first termination condition */
1236 if (c
->regs
[VCPU_REGS_RCX
] == 0) {
1237 ctxt
->vcpu
->arch
.rip
= c
->eip
;
1240 /* The second termination condition only applies for REPE
1241 * and REPNE. Test if the repeat string operation prefix is
1242 * REPE/REPZ or REPNE/REPNZ and if it's the case it tests the
1243 * corresponding termination condition according to:
1244 * - if REPE/REPZ and ZF = 0 then done
1245 * - if REPNE/REPNZ and ZF = 1 then done
1247 if ((c
->b
== 0xa6) || (c
->b
== 0xa7) ||
1248 (c
->b
== 0xae) || (c
->b
== 0xaf)) {
1249 if ((c
->rep_prefix
== REPE_PREFIX
) &&
1250 ((ctxt
->eflags
& EFLG_ZF
) == 0)) {
1251 ctxt
->vcpu
->arch
.rip
= c
->eip
;
1254 if ((c
->rep_prefix
== REPNE_PREFIX
) &&
1255 ((ctxt
->eflags
& EFLG_ZF
) == EFLG_ZF
)) {
1256 ctxt
->vcpu
->arch
.rip
= c
->eip
;
1260 c
->regs
[VCPU_REGS_RCX
]--;
1261 c
->eip
= ctxt
->vcpu
->arch
.rip
;
1264 if (c
->src
.type
== OP_MEM
) {
1265 c
->src
.ptr
= (unsigned long *)memop
;
1267 rc
= ops
->read_emulated((unsigned long)c
->src
.ptr
,
1273 c
->src
.orig_val
= c
->src
.val
;
1276 if ((c
->d
& DstMask
) == ImplicitOps
)
1280 if (c
->dst
.type
== OP_MEM
) {
1281 c
->dst
.ptr
= (unsigned long *)memop
;
1282 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1285 unsigned long mask
= ~(c
->dst
.bytes
* 8 - 1);
1287 c
->dst
.ptr
= (void *)c
->dst
.ptr
+
1288 (c
->src
.val
& mask
) / 8;
1290 if (!(c
->d
& Mov
) &&
1291 /* optimisation - avoid slow emulated read */
1292 ((rc
= ops
->read_emulated((unsigned long)c
->dst
.ptr
,
1294 c
->dst
.bytes
, ctxt
->vcpu
)) != 0))
1297 c
->dst
.orig_val
= c
->dst
.val
;
1307 emulate_2op_SrcV("add", c
->src
, c
->dst
, ctxt
->eflags
);
1311 emulate_2op_SrcV("or", c
->src
, c
->dst
, ctxt
->eflags
);
1315 emulate_2op_SrcV("adc", c
->src
, c
->dst
, ctxt
->eflags
);
1319 emulate_2op_SrcV("sbb", c
->src
, c
->dst
, ctxt
->eflags
);
1323 emulate_2op_SrcV("and", c
->src
, c
->dst
, ctxt
->eflags
);
1325 case 0x24: /* and al imm8 */
1326 c
->dst
.type
= OP_REG
;
1327 c
->dst
.ptr
= &c
->regs
[VCPU_REGS_RAX
];
1328 c
->dst
.val
= *(u8
*)c
->dst
.ptr
;
1330 c
->dst
.orig_val
= c
->dst
.val
;
1332 case 0x25: /* and ax imm16, or eax imm32 */
1333 c
->dst
.type
= OP_REG
;
1334 c
->dst
.bytes
= c
->op_bytes
;
1335 c
->dst
.ptr
= &c
->regs
[VCPU_REGS_RAX
];
1336 if (c
->op_bytes
== 2)
1337 c
->dst
.val
= *(u16
*)c
->dst
.ptr
;
1339 c
->dst
.val
= *(u32
*)c
->dst
.ptr
;
1340 c
->dst
.orig_val
= c
->dst
.val
;
1344 emulate_2op_SrcV("sub", c
->src
, c
->dst
, ctxt
->eflags
);
1348 emulate_2op_SrcV("xor", c
->src
, c
->dst
, ctxt
->eflags
);
1352 emulate_2op_SrcV("cmp", c
->src
, c
->dst
, ctxt
->eflags
);
1354 case 0x40 ... 0x47: /* inc r16/r32 */
1355 emulate_1op("inc", c
->dst
, ctxt
->eflags
);
1357 case 0x48 ... 0x4f: /* dec r16/r32 */
1358 emulate_1op("dec", c
->dst
, ctxt
->eflags
);
1360 case 0x50 ... 0x57: /* push reg */
1361 c
->dst
.type
= OP_MEM
;
1362 c
->dst
.bytes
= c
->op_bytes
;
1363 c
->dst
.val
= c
->src
.val
;
1364 register_address_increment(c
->regs
[VCPU_REGS_RSP
],
1366 c
->dst
.ptr
= (void *) register_address(
1367 ctxt
->ss_base
, c
->regs
[VCPU_REGS_RSP
]);
1369 case 0x58 ... 0x5f: /* pop reg */
1371 if ((rc
= ops
->read_std(register_address(ctxt
->ss_base
,
1372 c
->regs
[VCPU_REGS_RSP
]), c
->dst
.ptr
,
1373 c
->op_bytes
, ctxt
->vcpu
)) != 0)
1376 register_address_increment(c
->regs
[VCPU_REGS_RSP
],
1378 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1380 case 0x63: /* movsxd */
1381 if (ctxt
->mode
!= X86EMUL_MODE_PROT64
)
1382 goto cannot_emulate
;
1383 c
->dst
.val
= (s32
) c
->src
.val
;
1385 case 0x6a: /* push imm8 */
1387 c
->src
.val
= insn_fetch(s8
, 1, c
->eip
);
1390 case 0x6c: /* insb */
1391 case 0x6d: /* insw/insd */
1392 if (kvm_emulate_pio_string(ctxt
->vcpu
, NULL
,
1394 (c
->d
& ByteOp
) ? 1 : c
->op_bytes
,
1396 address_mask(c
->regs
[VCPU_REGS_RCX
]) : 1,
1397 (ctxt
->eflags
& EFLG_DF
),
1398 register_address(ctxt
->es_base
,
1399 c
->regs
[VCPU_REGS_RDI
]),
1401 c
->regs
[VCPU_REGS_RDX
]) == 0) {
1406 case 0x6e: /* outsb */
1407 case 0x6f: /* outsw/outsd */
1408 if (kvm_emulate_pio_string(ctxt
->vcpu
, NULL
,
1410 (c
->d
& ByteOp
) ? 1 : c
->op_bytes
,
1412 address_mask(c
->regs
[VCPU_REGS_RCX
]) : 1,
1413 (ctxt
->eflags
& EFLG_DF
),
1414 register_address(c
->override_base
?
1417 c
->regs
[VCPU_REGS_RSI
]),
1419 c
->regs
[VCPU_REGS_RDX
]) == 0) {
1424 case 0x70 ... 0x7f: /* jcc (short) */ {
1425 int rel
= insn_fetch(s8
, 1, c
->eip
);
1427 if (test_cc(c
->b
, ctxt
->eflags
))
1431 case 0x80 ... 0x83: /* Grp1 */
1432 switch (c
->modrm_reg
) {
1452 emulate_2op_SrcV("test", c
->src
, c
->dst
, ctxt
->eflags
);
1454 case 0x86 ... 0x87: /* xchg */
1455 /* Write back the register source. */
1456 switch (c
->dst
.bytes
) {
1458 *(u8
*) c
->src
.ptr
= (u8
) c
->dst
.val
;
1461 *(u16
*) c
->src
.ptr
= (u16
) c
->dst
.val
;
1464 *c
->src
.ptr
= (u32
) c
->dst
.val
;
1465 break; /* 64b reg: zero-extend */
1467 *c
->src
.ptr
= c
->dst
.val
;
1471 * Write back the memory destination with implicit LOCK
1474 c
->dst
.val
= c
->src
.val
;
1477 case 0x88 ... 0x8b: /* mov */
1479 case 0x8d: /* lea r16/r32, m */
1480 c
->dst
.val
= c
->modrm_val
;
1482 case 0x8f: /* pop (sole member of Grp1a) */
1483 rc
= emulate_grp1a(ctxt
, ops
);
1487 case 0x9c: /* pushf */
1488 c
->src
.val
= (unsigned long) ctxt
->eflags
;
1491 case 0x9d: /* popf */
1492 c
->dst
.ptr
= (unsigned long *) &ctxt
->eflags
;
1493 goto pop_instruction
;
1494 case 0xa0 ... 0xa1: /* mov */
1495 c
->dst
.ptr
= (unsigned long *)&c
->regs
[VCPU_REGS_RAX
];
1496 c
->dst
.val
= c
->src
.val
;
1498 case 0xa2 ... 0xa3: /* mov */
1499 c
->dst
.val
= (unsigned long)c
->regs
[VCPU_REGS_RAX
];
1501 case 0xa4 ... 0xa5: /* movs */
1502 c
->dst
.type
= OP_MEM
;
1503 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1504 c
->dst
.ptr
= (unsigned long *)register_address(
1506 c
->regs
[VCPU_REGS_RDI
]);
1507 if ((rc
= ops
->read_emulated(register_address(
1508 c
->override_base
? *c
->override_base
:
1510 c
->regs
[VCPU_REGS_RSI
]),
1512 c
->dst
.bytes
, ctxt
->vcpu
)) != 0)
1514 register_address_increment(c
->regs
[VCPU_REGS_RSI
],
1515 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1517 register_address_increment(c
->regs
[VCPU_REGS_RDI
],
1518 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1521 case 0xa6 ... 0xa7: /* cmps */
1522 c
->src
.type
= OP_NONE
; /* Disable writeback. */
1523 c
->src
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1524 c
->src
.ptr
= (unsigned long *)register_address(
1525 c
->override_base
? *c
->override_base
:
1527 c
->regs
[VCPU_REGS_RSI
]);
1528 if ((rc
= ops
->read_emulated((unsigned long)c
->src
.ptr
,
1534 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1535 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1536 c
->dst
.ptr
= (unsigned long *)register_address(
1538 c
->regs
[VCPU_REGS_RDI
]);
1539 if ((rc
= ops
->read_emulated((unsigned long)c
->dst
.ptr
,
1545 DPRINTF("cmps: mem1=0x%p mem2=0x%p\n", c
->src
.ptr
, c
->dst
.ptr
);
1547 emulate_2op_SrcV("cmp", c
->src
, c
->dst
, ctxt
->eflags
);
1549 register_address_increment(c
->regs
[VCPU_REGS_RSI
],
1550 (ctxt
->eflags
& EFLG_DF
) ? -c
->src
.bytes
1552 register_address_increment(c
->regs
[VCPU_REGS_RDI
],
1553 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1557 case 0xaa ... 0xab: /* stos */
1558 c
->dst
.type
= OP_MEM
;
1559 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1560 c
->dst
.ptr
= (unsigned long *)register_address(
1562 c
->regs
[VCPU_REGS_RDI
]);
1563 c
->dst
.val
= c
->regs
[VCPU_REGS_RAX
];
1564 register_address_increment(c
->regs
[VCPU_REGS_RDI
],
1565 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1568 case 0xac ... 0xad: /* lods */
1569 c
->dst
.type
= OP_REG
;
1570 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1571 c
->dst
.ptr
= (unsigned long *)&c
->regs
[VCPU_REGS_RAX
];
1572 if ((rc
= ops
->read_emulated(register_address(
1573 c
->override_base
? *c
->override_base
:
1575 c
->regs
[VCPU_REGS_RSI
]),
1580 register_address_increment(c
->regs
[VCPU_REGS_RSI
],
1581 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1584 case 0xae ... 0xaf: /* scas */
1585 DPRINTF("Urk! I don't handle SCAS.\n");
1586 goto cannot_emulate
;
1590 case 0xc3: /* ret */
1591 c
->dst
.ptr
= &c
->eip
;
1592 goto pop_instruction
;
1593 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1595 c
->dst
.val
= c
->src
.val
;
1597 case 0xd0 ... 0xd1: /* Grp2 */
1601 case 0xd2 ... 0xd3: /* Grp2 */
1602 c
->src
.val
= c
->regs
[VCPU_REGS_RCX
];
1605 case 0xe8: /* call (near) */ {
1607 switch (c
->op_bytes
) {
1609 rel
= insn_fetch(s16
, 2, c
->eip
);
1612 rel
= insn_fetch(s32
, 4, c
->eip
);
1615 DPRINTF("Call: Invalid op_bytes\n");
1616 goto cannot_emulate
;
1618 c
->src
.val
= (unsigned long) c
->eip
;
1620 c
->op_bytes
= c
->ad_bytes
;
1624 case 0xe9: /* jmp rel */
1625 case 0xeb: /* jmp rel short */
1626 JMP_REL(c
->src
.val
);
1627 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1629 case 0xf4: /* hlt */
1630 ctxt
->vcpu
->arch
.halt_request
= 1;
1632 case 0xf5: /* cmc */
1633 /* complement carry flag from eflags reg */
1634 ctxt
->eflags
^= EFLG_CF
;
1635 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1637 case 0xf6 ... 0xf7: /* Grp3 */
1638 rc
= emulate_grp3(ctxt
, ops
);
1642 case 0xf8: /* clc */
1643 ctxt
->eflags
&= ~EFLG_CF
;
1644 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1646 case 0xfa: /* cli */
1647 ctxt
->eflags
&= ~X86_EFLAGS_IF
;
1648 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1650 case 0xfb: /* sti */
1651 ctxt
->eflags
|= X86_EFLAGS_IF
;
1652 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1654 case 0xfe ... 0xff: /* Grp4/Grp5 */
1655 rc
= emulate_grp45(ctxt
, ops
);
1662 rc
= writeback(ctxt
, ops
);
1666 /* Commit shadow register state. */
1667 memcpy(ctxt
->vcpu
->arch
.regs
, c
->regs
, sizeof c
->regs
);
1668 ctxt
->vcpu
->arch
.rip
= c
->eip
;
1671 if (rc
== X86EMUL_UNHANDLEABLE
) {
1679 case 0x01: /* lgdt, lidt, lmsw */
1680 switch (c
->modrm_reg
) {
1682 unsigned long address
;
1684 case 0: /* vmcall */
1685 if (c
->modrm_mod
!= 3 || c
->modrm_rm
!= 1)
1686 goto cannot_emulate
;
1688 rc
= kvm_fix_hypercall(ctxt
->vcpu
);
1692 kvm_emulate_hypercall(ctxt
->vcpu
);
1695 rc
= read_descriptor(ctxt
, ops
, c
->src
.ptr
,
1696 &size
, &address
, c
->op_bytes
);
1699 realmode_lgdt(ctxt
->vcpu
, size
, address
);
1701 case 3: /* lidt/vmmcall */
1702 if (c
->modrm_mod
== 3 && c
->modrm_rm
== 1) {
1703 rc
= kvm_fix_hypercall(ctxt
->vcpu
);
1706 kvm_emulate_hypercall(ctxt
->vcpu
);
1708 rc
= read_descriptor(ctxt
, ops
, c
->src
.ptr
,
1713 realmode_lidt(ctxt
->vcpu
, size
, address
);
1717 if (c
->modrm_mod
!= 3)
1718 goto cannot_emulate
;
1719 *(u16
*)&c
->regs
[c
->modrm_rm
]
1720 = realmode_get_cr(ctxt
->vcpu
, 0);
1723 if (c
->modrm_mod
!= 3)
1724 goto cannot_emulate
;
1725 realmode_lmsw(ctxt
->vcpu
, (u16
)c
->modrm_val
,
1729 emulate_invlpg(ctxt
->vcpu
, memop
);
1732 goto cannot_emulate
;
1734 /* Disable writeback. */
1735 c
->dst
.type
= OP_NONE
;
1738 emulate_clts(ctxt
->vcpu
);
1739 c
->dst
.type
= OP_NONE
;
1741 case 0x08: /* invd */
1742 case 0x09: /* wbinvd */
1743 case 0x0d: /* GrpP (prefetch) */
1744 case 0x18: /* Grp16 (prefetch/nop) */
1745 c
->dst
.type
= OP_NONE
;
1747 case 0x20: /* mov cr, reg */
1748 if (c
->modrm_mod
!= 3)
1749 goto cannot_emulate
;
1750 c
->regs
[c
->modrm_rm
] =
1751 realmode_get_cr(ctxt
->vcpu
, c
->modrm_reg
);
1752 c
->dst
.type
= OP_NONE
; /* no writeback */
1754 case 0x21: /* mov from dr to reg */
1755 if (c
->modrm_mod
!= 3)
1756 goto cannot_emulate
;
1757 rc
= emulator_get_dr(ctxt
, c
->modrm_reg
, &c
->regs
[c
->modrm_rm
]);
1759 goto cannot_emulate
;
1760 c
->dst
.type
= OP_NONE
; /* no writeback */
1762 case 0x22: /* mov reg, cr */
1763 if (c
->modrm_mod
!= 3)
1764 goto cannot_emulate
;
1765 realmode_set_cr(ctxt
->vcpu
,
1766 c
->modrm_reg
, c
->modrm_val
, &ctxt
->eflags
);
1767 c
->dst
.type
= OP_NONE
;
1769 case 0x23: /* mov from reg to dr */
1770 if (c
->modrm_mod
!= 3)
1771 goto cannot_emulate
;
1772 rc
= emulator_set_dr(ctxt
, c
->modrm_reg
,
1773 c
->regs
[c
->modrm_rm
]);
1775 goto cannot_emulate
;
1776 c
->dst
.type
= OP_NONE
; /* no writeback */
1780 msr_data
= (u32
)c
->regs
[VCPU_REGS_RAX
]
1781 | ((u64
)c
->regs
[VCPU_REGS_RDX
] << 32);
1782 rc
= kvm_set_msr(ctxt
->vcpu
, c
->regs
[VCPU_REGS_RCX
], msr_data
);
1784 kvm_inject_gp(ctxt
->vcpu
, 0);
1785 c
->eip
= ctxt
->vcpu
->arch
.rip
;
1787 rc
= X86EMUL_CONTINUE
;
1788 c
->dst
.type
= OP_NONE
;
1792 rc
= kvm_get_msr(ctxt
->vcpu
, c
->regs
[VCPU_REGS_RCX
], &msr_data
);
1794 kvm_inject_gp(ctxt
->vcpu
, 0);
1795 c
->eip
= ctxt
->vcpu
->arch
.rip
;
1797 c
->regs
[VCPU_REGS_RAX
] = (u32
)msr_data
;
1798 c
->regs
[VCPU_REGS_RDX
] = msr_data
>> 32;
1800 rc
= X86EMUL_CONTINUE
;
1801 c
->dst
.type
= OP_NONE
;
1803 case 0x40 ... 0x4f: /* cmov */
1804 c
->dst
.val
= c
->dst
.orig_val
= c
->src
.val
;
1805 if (!test_cc(c
->b
, ctxt
->eflags
))
1806 c
->dst
.type
= OP_NONE
; /* no writeback */
1808 case 0x80 ... 0x8f: /* jnz rel, etc*/ {
1811 switch (c
->op_bytes
) {
1813 rel
= insn_fetch(s16
, 2, c
->eip
);
1816 rel
= insn_fetch(s32
, 4, c
->eip
);
1819 rel
= insn_fetch(s64
, 8, c
->eip
);
1822 DPRINTF("jnz: Invalid op_bytes\n");
1823 goto cannot_emulate
;
1825 if (test_cc(c
->b
, ctxt
->eflags
))
1827 c
->dst
.type
= OP_NONE
;
1832 c
->dst
.type
= OP_NONE
;
1833 /* only subword offset */
1834 c
->src
.val
&= (c
->dst
.bytes
<< 3) - 1;
1835 emulate_2op_SrcV_nobyte("bt", c
->src
, c
->dst
, ctxt
->eflags
);
1839 /* only subword offset */
1840 c
->src
.val
&= (c
->dst
.bytes
<< 3) - 1;
1841 emulate_2op_SrcV_nobyte("bts", c
->src
, c
->dst
, ctxt
->eflags
);
1843 case 0xb0 ... 0xb1: /* cmpxchg */
1845 * Save real source value, then compare EAX against
1848 c
->src
.orig_val
= c
->src
.val
;
1849 c
->src
.val
= c
->regs
[VCPU_REGS_RAX
];
1850 emulate_2op_SrcV("cmp", c
->src
, c
->dst
, ctxt
->eflags
);
1851 if (ctxt
->eflags
& EFLG_ZF
) {
1852 /* Success: write back to memory. */
1853 c
->dst
.val
= c
->src
.orig_val
;
1855 /* Failure: write the value we saw to EAX. */
1856 c
->dst
.type
= OP_REG
;
1857 c
->dst
.ptr
= (unsigned long *)&c
->regs
[VCPU_REGS_RAX
];
1862 /* only subword offset */
1863 c
->src
.val
&= (c
->dst
.bytes
<< 3) - 1;
1864 emulate_2op_SrcV_nobyte("btr", c
->src
, c
->dst
, ctxt
->eflags
);
1866 case 0xb6 ... 0xb7: /* movzx */
1867 c
->dst
.bytes
= c
->op_bytes
;
1868 c
->dst
.val
= (c
->d
& ByteOp
) ? (u8
) c
->src
.val
1871 case 0xba: /* Grp8 */
1872 switch (c
->modrm_reg
& 3) {
1885 /* only subword offset */
1886 c
->src
.val
&= (c
->dst
.bytes
<< 3) - 1;
1887 emulate_2op_SrcV_nobyte("btc", c
->src
, c
->dst
, ctxt
->eflags
);
1889 case 0xbe ... 0xbf: /* movsx */
1890 c
->dst
.bytes
= c
->op_bytes
;
1891 c
->dst
.val
= (c
->d
& ByteOp
) ? (s8
) c
->src
.val
:
1894 case 0xc3: /* movnti */
1895 c
->dst
.bytes
= c
->op_bytes
;
1896 c
->dst
.val
= (c
->op_bytes
== 4) ? (u32
) c
->src
.val
:
1899 case 0xc7: /* Grp9 (cmpxchg8b) */
1900 rc
= emulate_grp9(ctxt
, ops
, memop
);
1903 c
->dst
.type
= OP_NONE
;
1909 DPRINTF("Cannot emulate %02x\n", c
->b
);