1 /******************************************************************************
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privileged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf( _f , ## _a )
29 #define DPRINTF(x...) do {} while (0)
31 #include "x86_emulate.h"
32 #include <linux/module.h>
35 * Opcode effective-address decode tables.
36 * Note that we only emulate instructions that have at least one memory
37 * operand (excluding implicit stack references). We assume that stack
38 * references and instruction fetches will never occur in special memory
39 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
43 /* Operand sizes: 8-bit operands or specified/overridden size. */
44 #define ByteOp (1<<0) /* 8-bit operands. */
45 /* Destination operand type. */
46 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
47 #define DstReg (2<<1) /* Register operand. */
48 #define DstMem (3<<1) /* Memory operand. */
49 #define DstMask (3<<1)
50 /* Source operand type. */
51 #define SrcNone (0<<3) /* No source operand. */
52 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
53 #define SrcReg (1<<3) /* Register operand. */
54 #define SrcMem (2<<3) /* Memory operand. */
55 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
56 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
57 #define SrcImm (5<<3) /* Immediate operand. */
58 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
59 #define SrcMask (7<<3)
60 /* Generic ModRM decode. */
62 /* Destination is only written; never read. */
66 static u8 opcode_table
[256] = {
68 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
69 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
72 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
73 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
76 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
77 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
80 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
81 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
84 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
85 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
86 SrcImmByte
, SrcImm
, 0, 0,
88 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
89 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
92 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
93 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
96 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
97 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
100 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
102 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
103 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
105 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
106 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
108 0, 0, 0, DstReg
| SrcMem32
| ModRM
| Mov
/* movsxd (x86/64) */ ,
111 0, 0, ImplicitOps
|Mov
, 0,
112 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
, /* insb, insw/insd */
113 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
, /* outsb, outsw/outsd */
115 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
116 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
118 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
119 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
121 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImm
| ModRM
,
122 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
123 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
124 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
126 ByteOp
| DstMem
| SrcReg
| ModRM
| Mov
, DstMem
| SrcReg
| ModRM
| Mov
,
127 ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
128 0, ModRM
| DstReg
, 0, DstMem
| SrcNone
| ModRM
| Mov
,
130 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps
, ImplicitOps
, 0, 0,
132 ByteOp
| DstReg
| SrcMem
| Mov
, DstReg
| SrcMem
| Mov
,
133 ByteOp
| DstMem
| SrcReg
| Mov
, DstMem
| SrcReg
| Mov
,
134 ByteOp
| ImplicitOps
| Mov
, ImplicitOps
| Mov
,
135 ByteOp
| ImplicitOps
, ImplicitOps
,
137 0, 0, ByteOp
| ImplicitOps
| Mov
, ImplicitOps
| Mov
,
138 ByteOp
| ImplicitOps
| Mov
, ImplicitOps
| Mov
,
139 ByteOp
| ImplicitOps
, ImplicitOps
,
141 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
143 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
144 0, ImplicitOps
, 0, 0,
145 ByteOp
| DstMem
| SrcImm
| ModRM
| Mov
, DstMem
| SrcImm
| ModRM
| Mov
,
147 0, 0, 0, 0, 0, 0, 0, 0,
149 ByteOp
| DstMem
| SrcImplicit
| ModRM
, DstMem
| SrcImplicit
| ModRM
,
150 ByteOp
| DstMem
| SrcImplicit
| ModRM
, DstMem
| SrcImplicit
| ModRM
,
153 0, 0, 0, 0, 0, 0, 0, 0,
155 0, 0, 0, 0, 0, 0, 0, 0,
157 ImplicitOps
, SrcImm
|ImplicitOps
, 0, SrcImmByte
|ImplicitOps
, 0, 0, 0, 0,
161 ByteOp
| DstMem
| SrcNone
| ModRM
, DstMem
| SrcNone
| ModRM
,
164 0, 0, ByteOp
| DstMem
| SrcNone
| ModRM
, DstMem
| SrcNone
| ModRM
167 static u16 twobyte_table
[256] = {
169 0, SrcMem
| ModRM
| DstReg
, 0, 0, 0, 0, ImplicitOps
, 0,
170 0, ImplicitOps
, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0,
172 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0, 0, 0, 0, 0, 0,
174 ModRM
| ImplicitOps
, ModRM
, ModRM
| ImplicitOps
, ModRM
, 0, 0, 0, 0,
175 0, 0, 0, 0, 0, 0, 0, 0,
177 ImplicitOps
, 0, ImplicitOps
, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
179 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
180 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
181 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
182 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
184 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
185 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
186 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
187 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
189 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
191 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
193 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
195 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
196 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
197 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
198 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
200 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
202 0, 0, 0, DstMem
| SrcReg
| ModRM
| BitOp
, 0, 0, 0, 0,
204 0, 0, 0, DstMem
| SrcReg
| ModRM
| BitOp
, 0, 0, 0, 0,
206 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
, 0,
207 DstMem
| SrcReg
| ModRM
| BitOp
,
208 0, 0, ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
,
209 DstReg
| SrcMem16
| ModRM
| Mov
,
211 0, 0, DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcReg
| ModRM
| BitOp
,
212 0, 0, ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
,
213 DstReg
| SrcMem16
| ModRM
| Mov
,
215 0, 0, 0, 0, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0, 0, 0, 0, 0, 0, 0,
217 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
219 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
221 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
224 /* Type, address-of, and value of an instruction's operand. */
226 enum { OP_REG
, OP_MEM
, OP_IMM
} type
;
228 unsigned long val
, orig_val
, *ptr
;
231 /* EFLAGS bit definitions. */
232 #define EFLG_OF (1<<11)
233 #define EFLG_DF (1<<10)
234 #define EFLG_SF (1<<7)
235 #define EFLG_ZF (1<<6)
236 #define EFLG_AF (1<<4)
237 #define EFLG_PF (1<<2)
238 #define EFLG_CF (1<<0)
241 * Instruction emulation:
242 * Most instructions are emulated directly via a fragment of inline assembly
243 * code. This allows us to save/restore EFLAGS and thus very easily pick up
244 * any modified flags.
247 #if defined(CONFIG_X86_64)
248 #define _LO32 "k" /* force 32-bit operand */
249 #define _STK "%%rsp" /* stack pointer */
250 #elif defined(__i386__)
251 #define _LO32 "" /* force 32-bit operand */
252 #define _STK "%%esp" /* stack pointer */
256 * These EFLAGS bits are restored from saved value during emulation, and
257 * any changes are written back to the saved value after emulation.
259 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
261 /* Before executing instruction: restore necessary bits in EFLAGS. */
262 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
263 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); */ \
265 "movl %"_msk",%"_LO32 _tmp"; " \
266 "andl %"_LO32 _tmp",("_STK"); " \
268 "notl %"_LO32 _tmp"; " \
269 "andl %"_LO32 _tmp",("_STK"); " \
271 "orl %"_LO32 _tmp",("_STK"); " \
273 /* _sav &= ~msk; */ \
274 "movl %"_msk",%"_LO32 _tmp"; " \
275 "notl %"_LO32 _tmp"; " \
276 "andl %"_LO32 _tmp",%"_sav"; "
278 /* After executing instruction: write-back necessary bits in EFLAGS. */
279 #define _POST_EFLAGS(_sav, _msk, _tmp) \
280 /* _sav |= EFLAGS & _msk; */ \
283 "andl %"_msk",%"_LO32 _tmp"; " \
284 "orl %"_LO32 _tmp",%"_sav"; "
286 /* Raw emulation: instruction has two explicit operands. */
287 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
289 unsigned long _tmp; \
291 switch ((_dst).bytes) { \
293 __asm__ __volatile__ ( \
294 _PRE_EFLAGS("0","4","2") \
295 _op"w %"_wx"3,%1; " \
296 _POST_EFLAGS("0","4","2") \
297 : "=m" (_eflags), "=m" ((_dst).val), \
299 : _wy ((_src).val), "i" (EFLAGS_MASK) ); \
302 __asm__ __volatile__ ( \
303 _PRE_EFLAGS("0","4","2") \
304 _op"l %"_lx"3,%1; " \
305 _POST_EFLAGS("0","4","2") \
306 : "=m" (_eflags), "=m" ((_dst).val), \
308 : _ly ((_src).val), "i" (EFLAGS_MASK) ); \
311 __emulate_2op_8byte(_op, _src, _dst, \
312 _eflags, _qx, _qy); \
317 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
319 unsigned long _tmp; \
320 switch ( (_dst).bytes ) \
323 __asm__ __volatile__ ( \
324 _PRE_EFLAGS("0","4","2") \
325 _op"b %"_bx"3,%1; " \
326 _POST_EFLAGS("0","4","2") \
327 : "=m" (_eflags), "=m" ((_dst).val), \
329 : _by ((_src).val), "i" (EFLAGS_MASK) ); \
332 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
333 _wx, _wy, _lx, _ly, _qx, _qy); \
338 /* Source operand is byte-sized and may be restricted to just %cl. */
339 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
340 __emulate_2op(_op, _src, _dst, _eflags, \
341 "b", "c", "b", "c", "b", "c", "b", "c")
343 /* Source operand is byte, word, long or quad sized. */
344 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
345 __emulate_2op(_op, _src, _dst, _eflags, \
346 "b", "q", "w", "r", _LO32, "r", "", "r")
348 /* Source operand is word, long or quad sized. */
349 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
350 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
351 "w", "r", _LO32, "r", "", "r")
353 /* Instruction has only one explicit operand (no source operand). */
354 #define emulate_1op(_op, _dst, _eflags) \
356 unsigned long _tmp; \
358 switch ( (_dst).bytes ) \
361 __asm__ __volatile__ ( \
362 _PRE_EFLAGS("0","3","2") \
364 _POST_EFLAGS("0","3","2") \
365 : "=m" (_eflags), "=m" ((_dst).val), \
367 : "i" (EFLAGS_MASK) ); \
370 __asm__ __volatile__ ( \
371 _PRE_EFLAGS("0","3","2") \
373 _POST_EFLAGS("0","3","2") \
374 : "=m" (_eflags), "=m" ((_dst).val), \
376 : "i" (EFLAGS_MASK) ); \
379 __asm__ __volatile__ ( \
380 _PRE_EFLAGS("0","3","2") \
382 _POST_EFLAGS("0","3","2") \
383 : "=m" (_eflags), "=m" ((_dst).val), \
385 : "i" (EFLAGS_MASK) ); \
388 __emulate_1op_8byte(_op, _dst, _eflags); \
393 /* Emulate an instruction with quadword operands (x86/64 only). */
394 #if defined(CONFIG_X86_64)
395 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
397 __asm__ __volatile__ ( \
398 _PRE_EFLAGS("0","4","2") \
399 _op"q %"_qx"3,%1; " \
400 _POST_EFLAGS("0","4","2") \
401 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
402 : _qy ((_src).val), "i" (EFLAGS_MASK) ); \
405 #define __emulate_1op_8byte(_op, _dst, _eflags) \
407 __asm__ __volatile__ ( \
408 _PRE_EFLAGS("0","3","2") \
410 _POST_EFLAGS("0","3","2") \
411 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
412 : "i" (EFLAGS_MASK) ); \
415 #elif defined(__i386__)
416 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
417 #define __emulate_1op_8byte(_op, _dst, _eflags)
418 #endif /* __i386__ */
420 /* Fetch next part of the instruction being emulated. */
421 #define insn_fetch(_type, _size, _eip) \
422 ({ unsigned long _x; \
423 rc = ops->read_std((unsigned long)(_eip) + ctxt->cs_base, &_x, \
424 (_size), ctxt->vcpu); \
431 /* Access/update address held in a register, based on addressing mode. */
432 #define address_mask(reg) \
433 ((ad_bytes == sizeof(unsigned long)) ? \
434 (reg) : ((reg) & ((1UL << (ad_bytes << 3)) - 1)))
435 #define register_address(base, reg) \
436 ((base) + address_mask(reg))
437 #define register_address_increment(reg, inc) \
439 /* signed type ensures sign extension to long */ \
441 if ( ad_bytes == sizeof(unsigned long) ) \
444 (reg) = ((reg) & ~((1UL << (ad_bytes << 3)) - 1)) | \
445 (((reg) + _inc) & ((1UL << (ad_bytes << 3)) - 1)); \
448 #define JMP_REL(rel) \
450 _eip += (int)(rel); \
451 _eip = ((op_bytes == 2) ? (uint16_t)_eip : (uint32_t)_eip); \
455 * Given the 'reg' portion of a ModRM byte, and a register block, return a
456 * pointer into the block that addresses the relevant register.
457 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
459 static void *decode_register(u8 modrm_reg
, unsigned long *regs
,
464 p
= ®s
[modrm_reg
];
465 if (highbyte_regs
&& modrm_reg
>= 4 && modrm_reg
< 8)
466 p
= (unsigned char *)®s
[modrm_reg
& 3] + 1;
470 static int read_descriptor(struct x86_emulate_ctxt
*ctxt
,
471 struct x86_emulate_ops
*ops
,
473 u16
*size
, unsigned long *address
, int op_bytes
)
480 rc
= ops
->read_std((unsigned long)ptr
, (unsigned long *)size
, 2,
484 rc
= ops
->read_std((unsigned long)ptr
+ 2, address
, op_bytes
,
489 static int test_cc(unsigned int condition
, unsigned int flags
)
493 switch ((condition
& 15) >> 1) {
495 rc
|= (flags
& EFLG_OF
);
497 case 1: /* b/c/nae */
498 rc
|= (flags
& EFLG_CF
);
501 rc
|= (flags
& EFLG_ZF
);
504 rc
|= (flags
& (EFLG_CF
|EFLG_ZF
));
507 rc
|= (flags
& EFLG_SF
);
510 rc
|= (flags
& EFLG_PF
);
513 rc
|= (flags
& EFLG_ZF
);
516 rc
|= (!(flags
& EFLG_SF
) != !(flags
& EFLG_OF
));
520 /* Odd condition identifiers (lsb == 1) have inverted sense. */
521 return (!!rc
^ (condition
& 1));
525 x86_emulate_memop(struct x86_emulate_ctxt
*ctxt
, struct x86_emulate_ops
*ops
)
528 u8 b
, sib
, twobyte
= 0, rex_prefix
= 0;
529 u8 modrm
, modrm_mod
= 0, modrm_reg
= 0, modrm_rm
= 0;
530 unsigned long *override_base
= NULL
;
531 unsigned int op_bytes
, ad_bytes
, lock_prefix
= 0, rep_prefix
= 0, i
;
533 struct operand src
, dst
;
534 unsigned long cr2
= ctxt
->cr2
;
535 int mode
= ctxt
->mode
;
536 unsigned long modrm_ea
;
537 int use_modrm_ea
, index_reg
= 0, base_reg
= 0, scale
, rip_relative
= 0;
541 /* Shadow copy of register state. Committed on successful emulation. */
542 unsigned long _regs
[NR_VCPU_REGS
];
543 unsigned long _eip
= ctxt
->vcpu
->rip
, _eflags
= ctxt
->eflags
;
544 unsigned long modrm_val
= 0;
546 memcpy(_regs
, ctxt
->vcpu
->regs
, sizeof _regs
);
549 case X86EMUL_MODE_REAL
:
550 case X86EMUL_MODE_PROT16
:
551 op_bytes
= ad_bytes
= 2;
553 case X86EMUL_MODE_PROT32
:
554 op_bytes
= ad_bytes
= 4;
557 case X86EMUL_MODE_PROT64
:
566 /* Legacy prefixes. */
567 for (i
= 0; i
< 8; i
++) {
568 switch (b
= insn_fetch(u8
, 1, _eip
)) {
569 case 0x66: /* operand-size override */
570 op_bytes
^= 6; /* switch between 2/4 bytes */
572 case 0x67: /* address-size override */
573 if (mode
== X86EMUL_MODE_PROT64
)
574 ad_bytes
^= 12; /* switch between 4/8 bytes */
576 ad_bytes
^= 6; /* switch between 2/4 bytes */
578 case 0x2e: /* CS override */
579 override_base
= &ctxt
->cs_base
;
581 case 0x3e: /* DS override */
582 override_base
= &ctxt
->ds_base
;
584 case 0x26: /* ES override */
585 override_base
= &ctxt
->es_base
;
587 case 0x64: /* FS override */
588 override_base
= &ctxt
->fs_base
;
590 case 0x65: /* GS override */
591 override_base
= &ctxt
->gs_base
;
593 case 0x36: /* SS override */
594 override_base
= &ctxt
->ss_base
;
596 case 0xf0: /* LOCK */
599 case 0xf3: /* REP/REPE/REPZ */
602 case 0xf2: /* REPNE/REPNZ */
612 if ((mode
== X86EMUL_MODE_PROT64
) && ((b
& 0xf0) == 0x40)) {
615 op_bytes
= 8; /* REX.W */
616 modrm_reg
= (b
& 4) << 1; /* REX.R */
617 index_reg
= (b
& 2) << 2; /* REX.X */
618 modrm_rm
= base_reg
= (b
& 1) << 3; /* REG.B */
619 b
= insn_fetch(u8
, 1, _eip
);
622 /* Opcode byte(s). */
625 /* Two-byte opcode? */
628 b
= insn_fetch(u8
, 1, _eip
);
629 d
= twobyte_table
[b
];
637 /* ModRM and SIB bytes. */
639 modrm
= insn_fetch(u8
, 1, _eip
);
640 modrm_mod
|= (modrm
& 0xc0) >> 6;
641 modrm_reg
|= (modrm
& 0x38) >> 3;
642 modrm_rm
|= (modrm
& 0x07);
646 if (modrm_mod
== 3) {
647 modrm_val
= *(unsigned long *)
648 decode_register(modrm_rm
, _regs
, d
& ByteOp
);
653 unsigned bx
= _regs
[VCPU_REGS_RBX
];
654 unsigned bp
= _regs
[VCPU_REGS_RBP
];
655 unsigned si
= _regs
[VCPU_REGS_RSI
];
656 unsigned di
= _regs
[VCPU_REGS_RDI
];
658 /* 16-bit ModR/M decode. */
662 modrm_ea
+= insn_fetch(u16
, 2, _eip
);
665 modrm_ea
+= insn_fetch(s8
, 1, _eip
);
668 modrm_ea
+= insn_fetch(u16
, 2, _eip
);
698 if (modrm_rm
== 2 || modrm_rm
== 3 ||
699 (modrm_rm
== 6 && modrm_mod
!= 0))
701 override_base
= &ctxt
->ss_base
;
702 modrm_ea
= (u16
)modrm_ea
;
704 /* 32/64-bit ModR/M decode. */
708 sib
= insn_fetch(u8
, 1, _eip
);
709 index_reg
|= (sib
>> 3) & 7;
716 modrm_ea
+= _regs
[base_reg
];
718 modrm_ea
+= insn_fetch(s32
, 4, _eip
);
721 modrm_ea
+= _regs
[base_reg
];
727 modrm_ea
+= _regs
[index_reg
] << scale
;
733 modrm_ea
+= _regs
[modrm_rm
];
734 else if (mode
== X86EMUL_MODE_PROT64
)
738 modrm_ea
+= _regs
[modrm_rm
];
744 modrm_ea
+= insn_fetch(s32
, 4, _eip
);
747 modrm_ea
+= insn_fetch(s8
, 1, _eip
);
750 modrm_ea
+= insn_fetch(s32
, 4, _eip
);
755 override_base
= &ctxt
->ds_base
;
756 if (mode
== X86EMUL_MODE_PROT64
&&
757 override_base
!= &ctxt
->fs_base
&&
758 override_base
!= &ctxt
->gs_base
)
759 override_base
= NULL
;
762 modrm_ea
+= *override_base
;
766 switch (d
& SrcMask
) {
777 modrm_ea
+= op_bytes
;
781 modrm_ea
= (u32
)modrm_ea
;
788 * Decode and fetch the source operand: register, memory
791 switch (d
& SrcMask
) {
797 src
.ptr
= decode_register(modrm_reg
, _regs
,
799 src
.val
= src
.orig_val
= *(u8
*) src
.ptr
;
802 src
.ptr
= decode_register(modrm_reg
, _regs
, 0);
803 switch ((src
.bytes
= op_bytes
)) {
805 src
.val
= src
.orig_val
= *(u16
*) src
.ptr
;
808 src
.val
= src
.orig_val
= *(u32
*) src
.ptr
;
811 src
.val
= src
.orig_val
= *(u64
*) src
.ptr
;
823 src
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
824 /* Don't fetch the address for invlpg: it could be unmapped. */
825 if (twobyte
&& b
== 0x01 && modrm_reg
== 7)
829 src
.ptr
= (unsigned long *)cr2
;
831 if ((rc
= ops
->read_emulated((unsigned long)src
.ptr
,
832 &src
.val
, src
.bytes
, ctxt
->vcpu
)) != 0)
834 src
.orig_val
= src
.val
;
838 src
.ptr
= (unsigned long *)_eip
;
839 src
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
842 /* NB. Immediates are sign-extended as necessary. */
845 src
.val
= insn_fetch(s8
, 1, _eip
);
848 src
.val
= insn_fetch(s16
, 2, _eip
);
851 src
.val
= insn_fetch(s32
, 4, _eip
);
857 src
.ptr
= (unsigned long *)_eip
;
859 src
.val
= insn_fetch(s8
, 1, _eip
);
863 /* Decode and fetch the destination operand: register or memory. */
864 switch (d
& DstMask
) {
866 /* Special instructions do their own operand decoding. */
871 && !(twobyte
&& (b
== 0xb6 || b
== 0xb7))) {
872 dst
.ptr
= decode_register(modrm_reg
, _regs
,
874 dst
.val
= *(u8
*) dst
.ptr
;
877 dst
.ptr
= decode_register(modrm_reg
, _regs
, 0);
878 switch ((dst
.bytes
= op_bytes
)) {
880 dst
.val
= *(u16
*)dst
.ptr
;
883 dst
.val
= *(u32
*)dst
.ptr
;
886 dst
.val
= *(u64
*)dst
.ptr
;
893 dst
.ptr
= (unsigned long *)cr2
;
894 dst
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
897 unsigned long mask
= ~(dst
.bytes
* 8 - 1);
899 dst
.ptr
= (void *)dst
.ptr
+ (src
.val
& mask
) / 8;
901 if (!(d
& Mov
) && /* optimisation - avoid slow emulated read */
902 ((rc
= ops
->read_emulated((unsigned long)dst
.ptr
,
903 &dst
.val
, dst
.bytes
, ctxt
->vcpu
)) != 0))
907 dst
.orig_val
= dst
.val
;
915 emulate_2op_SrcV("add", src
, dst
, _eflags
);
919 emulate_2op_SrcV("or", src
, dst
, _eflags
);
923 emulate_2op_SrcV("adc", src
, dst
, _eflags
);
927 emulate_2op_SrcV("sbb", src
, dst
, _eflags
);
931 emulate_2op_SrcV("and", src
, dst
, _eflags
);
933 case 0x24: /* and al imm8 */
935 dst
.ptr
= &_regs
[VCPU_REGS_RAX
];
936 dst
.val
= *(u8
*)dst
.ptr
;
938 dst
.orig_val
= dst
.val
;
940 case 0x25: /* and ax imm16, or eax imm32 */
942 dst
.bytes
= op_bytes
;
943 dst
.ptr
= &_regs
[VCPU_REGS_RAX
];
945 dst
.val
= *(u16
*)dst
.ptr
;
947 dst
.val
= *(u32
*)dst
.ptr
;
948 dst
.orig_val
= dst
.val
;
952 emulate_2op_SrcV("sub", src
, dst
, _eflags
);
956 emulate_2op_SrcV("xor", src
, dst
, _eflags
);
960 emulate_2op_SrcV("cmp", src
, dst
, _eflags
);
962 case 0x63: /* movsxd */
963 if (mode
!= X86EMUL_MODE_PROT64
)
965 dst
.val
= (s32
) src
.val
;
967 case 0x6a: /* push imm8 */
969 src
.val
= insn_fetch(s8
, 1, _eip
);
972 dst
.bytes
= op_bytes
;
974 register_address_increment(_regs
[VCPU_REGS_RSP
], -op_bytes
);
975 dst
.ptr
= (void *) register_address(ctxt
->ss_base
,
976 _regs
[VCPU_REGS_RSP
]);
978 case 0x80 ... 0x83: /* Grp1 */
1000 emulate_2op_SrcV("test", src
, dst
, _eflags
);
1002 case 0x86 ... 0x87: /* xchg */
1003 /* Write back the register source. */
1004 switch (dst
.bytes
) {
1006 *(u8
*) src
.ptr
= (u8
) dst
.val
;
1009 *(u16
*) src
.ptr
= (u16
) dst
.val
;
1012 *src
.ptr
= (u32
) dst
.val
;
1013 break; /* 64b reg: zero-extend */
1019 * Write back the memory destination with implicit LOCK
1025 case 0x88 ... 0x8b: /* mov */
1027 case 0x8d: /* lea r16/r32, m */
1028 dst
.val
= modrm_val
;
1030 case 0x8f: /* pop (sole member of Grp1a) */
1031 /* 64-bit mode: POP always pops a 64-bit operand. */
1032 if (mode
== X86EMUL_MODE_PROT64
)
1034 if ((rc
= ops
->read_std(register_address(ctxt
->ss_base
,
1035 _regs
[VCPU_REGS_RSP
]),
1036 &dst
.val
, dst
.bytes
, ctxt
->vcpu
)) != 0)
1038 register_address_increment(_regs
[VCPU_REGS_RSP
], dst
.bytes
);
1040 case 0xa0 ... 0xa1: /* mov */
1041 dst
.ptr
= (unsigned long *)&_regs
[VCPU_REGS_RAX
];
1043 _eip
+= ad_bytes
; /* skip src displacement */
1045 case 0xa2 ... 0xa3: /* mov */
1046 dst
.val
= (unsigned long)_regs
[VCPU_REGS_RAX
];
1047 _eip
+= ad_bytes
; /* skip dst displacement */
1051 switch (modrm_reg
) {
1053 emulate_2op_SrcB("rol", src
, dst
, _eflags
);
1056 emulate_2op_SrcB("ror", src
, dst
, _eflags
);
1059 emulate_2op_SrcB("rcl", src
, dst
, _eflags
);
1062 emulate_2op_SrcB("rcr", src
, dst
, _eflags
);
1064 case 4: /* sal/shl */
1065 case 6: /* sal/shl */
1066 emulate_2op_SrcB("sal", src
, dst
, _eflags
);
1069 emulate_2op_SrcB("shr", src
, dst
, _eflags
);
1072 emulate_2op_SrcB("sar", src
, dst
, _eflags
);
1076 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1080 case 0xd0 ... 0xd1: /* Grp2 */
1083 case 0xd2 ... 0xd3: /* Grp2 */
1084 src
.val
= _regs
[VCPU_REGS_RCX
];
1086 case 0xe8: /* call (near) */ {
1090 rel
= insn_fetch(s16
, 2, _eip
);
1093 rel
= insn_fetch(s32
, 4, _eip
);
1096 rel
= insn_fetch(s64
, 8, _eip
);
1099 DPRINTF("Call: Invalid op_bytes\n");
1100 goto cannot_emulate
;
1102 src
.val
= (unsigned long) _eip
;
1106 case 0xe9: /* jmp rel */
1107 case 0xeb: /* jmp rel short */
1109 no_wb
= 1; /* Disable writeback. */
1111 case 0xf6 ... 0xf7: /* Grp3 */
1112 switch (modrm_reg
) {
1113 case 0 ... 1: /* test */
1115 * Special case in Grp3: test has an immediate
1119 src
.ptr
= (unsigned long *)_eip
;
1120 src
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
1123 switch (src
.bytes
) {
1125 src
.val
= insn_fetch(s8
, 1, _eip
);
1128 src
.val
= insn_fetch(s16
, 2, _eip
);
1131 src
.val
= insn_fetch(s32
, 4, _eip
);
1139 emulate_1op("neg", dst
, _eflags
);
1142 goto cannot_emulate
;
1145 case 0xfe ... 0xff: /* Grp4/Grp5 */
1146 switch (modrm_reg
) {
1148 emulate_1op("inc", dst
, _eflags
);
1151 emulate_1op("dec", dst
, _eflags
);
1153 case 4: /* jmp abs */
1157 goto cannot_emulate
;
1160 /* 64-bit mode: PUSH always pushes a 64-bit operand. */
1161 if (mode
== X86EMUL_MODE_PROT64
) {
1163 if ((rc
= ops
->read_std((unsigned long)dst
.ptr
,
1168 register_address_increment(_regs
[VCPU_REGS_RSP
],
1170 if ((rc
= ops
->write_std(
1171 register_address(ctxt
->ss_base
,
1172 _regs
[VCPU_REGS_RSP
]),
1173 &dst
.val
, dst
.bytes
, ctxt
->vcpu
)) != 0)
1178 goto cannot_emulate
;
1187 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
1188 switch (dst
.bytes
) {
1190 *(u8
*)dst
.ptr
= (u8
)dst
.val
;
1193 *(u16
*)dst
.ptr
= (u16
)dst
.val
;
1196 *dst
.ptr
= (u32
)dst
.val
;
1197 break; /* 64b: zero-ext */
1205 rc
= ops
->cmpxchg_emulated((unsigned long)dst
.
1207 &dst
.val
, dst
.bytes
,
1210 rc
= ops
->write_emulated((unsigned long)dst
.ptr
,
1211 &dst
.val
, dst
.bytes
,
1220 /* Commit shadow register state. */
1221 memcpy(ctxt
->vcpu
->regs
, _regs
, sizeof _regs
);
1222 ctxt
->eflags
= _eflags
;
1223 ctxt
->vcpu
->rip
= _eip
;
1226 return (rc
== X86EMUL_UNHANDLEABLE
) ? -1 : 0;
1230 goto twobyte_special_insn
;
1232 case 0x50 ... 0x57: /* push reg */
1234 src
.val
= (u16
) _regs
[b
& 0x7];
1236 src
.val
= (u32
) _regs
[b
& 0x7];
1238 dst
.bytes
= op_bytes
;
1240 register_address_increment(_regs
[VCPU_REGS_RSP
], -op_bytes
);
1241 dst
.ptr
= (void *) register_address(
1242 ctxt
->ss_base
, _regs
[VCPU_REGS_RSP
]);
1244 case 0x58 ... 0x5f: /* pop reg */
1245 dst
.ptr
= (unsigned long *)&_regs
[b
& 0x7];
1247 if ((rc
= ops
->read_std(register_address(ctxt
->ss_base
,
1248 _regs
[VCPU_REGS_RSP
]), dst
.ptr
, op_bytes
, ctxt
->vcpu
))
1252 register_address_increment(_regs
[VCPU_REGS_RSP
], op_bytes
);
1253 no_wb
= 1; /* Disable writeback. */
1255 case 0x6c: /* insb */
1256 case 0x6d: /* insw/insd */
1257 if (kvm_emulate_pio_string(ctxt
->vcpu
, NULL
,
1259 (d
& ByteOp
) ? 1 : op_bytes
, /* size */
1261 address_mask(_regs
[VCPU_REGS_RCX
]) : 1, /* count */
1262 (_eflags
& EFLG_DF
), /* down */
1263 register_address(ctxt
->es_base
,
1264 _regs
[VCPU_REGS_RDI
]), /* address */
1266 _regs
[VCPU_REGS_RDX
] /* port */
1270 case 0x6e: /* outsb */
1271 case 0x6f: /* outsw/outsd */
1272 if (kvm_emulate_pio_string(ctxt
->vcpu
, NULL
,
1274 (d
& ByteOp
) ? 1 : op_bytes
, /* size */
1276 address_mask(_regs
[VCPU_REGS_RCX
]) : 1, /* count */
1277 (_eflags
& EFLG_DF
), /* down */
1278 register_address(override_base
?
1279 *override_base
: ctxt
->ds_base
,
1280 _regs
[VCPU_REGS_RSI
]), /* address */
1282 _regs
[VCPU_REGS_RDX
] /* port */
1286 case 0x70 ... 0x7f: /* jcc (short) */ {
1287 int rel
= insn_fetch(s8
, 1, _eip
);
1289 if (test_cc(b
, _eflags
))
1293 case 0x9c: /* pushf */
1294 src
.val
= (unsigned long) _eflags
;
1296 case 0x9d: /* popf */
1297 dst
.ptr
= (unsigned long *) &_eflags
;
1298 goto pop_instruction
;
1299 case 0xc3: /* ret */
1301 goto pop_instruction
;
1302 case 0xf4: /* hlt */
1303 ctxt
->vcpu
->halt_request
= 1;
1307 if (_regs
[VCPU_REGS_RCX
] == 0) {
1308 ctxt
->vcpu
->rip
= _eip
;
1311 _regs
[VCPU_REGS_RCX
]--;
1312 _eip
= ctxt
->vcpu
->rip
;
1315 case 0xa4 ... 0xa5: /* movs */
1317 dst
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
1318 dst
.ptr
= (unsigned long *)register_address(ctxt
->es_base
,
1319 _regs
[VCPU_REGS_RDI
]);
1320 if ((rc
= ops
->read_emulated(register_address(
1321 override_base
? *override_base
: ctxt
->ds_base
,
1322 _regs
[VCPU_REGS_RSI
]), &dst
.val
, dst
.bytes
, ctxt
->vcpu
)) != 0)
1324 register_address_increment(_regs
[VCPU_REGS_RSI
],
1325 (_eflags
& EFLG_DF
) ? -dst
.bytes
: dst
.bytes
);
1326 register_address_increment(_regs
[VCPU_REGS_RDI
],
1327 (_eflags
& EFLG_DF
) ? -dst
.bytes
: dst
.bytes
);
1329 case 0xa6 ... 0xa7: /* cmps */
1330 DPRINTF("Urk! I don't handle CMPS.\n");
1331 goto cannot_emulate
;
1332 case 0xaa ... 0xab: /* stos */
1334 dst
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
1335 dst
.ptr
= (unsigned long *)cr2
;
1336 dst
.val
= _regs
[VCPU_REGS_RAX
];
1337 register_address_increment(_regs
[VCPU_REGS_RDI
],
1338 (_eflags
& EFLG_DF
) ? -dst
.bytes
: dst
.bytes
);
1340 case 0xac ... 0xad: /* lods */
1342 dst
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
1343 dst
.ptr
= (unsigned long *)&_regs
[VCPU_REGS_RAX
];
1344 if ((rc
= ops
->read_emulated(cr2
, &dst
.val
, dst
.bytes
,
1347 register_address_increment(_regs
[VCPU_REGS_RSI
],
1348 (_eflags
& EFLG_DF
) ? -dst
.bytes
: dst
.bytes
);
1350 case 0xae ... 0xaf: /* scas */
1351 DPRINTF("Urk! I don't handle SCAS.\n");
1352 goto cannot_emulate
;
1359 case 0x01: /* lgdt, lidt, lmsw */
1360 /* Disable writeback. */
1362 switch (modrm_reg
) {
1364 unsigned long address
;
1367 rc
= read_descriptor(ctxt
, ops
, src
.ptr
,
1368 &size
, &address
, op_bytes
);
1371 realmode_lgdt(ctxt
->vcpu
, size
, address
);
1374 rc
= read_descriptor(ctxt
, ops
, src
.ptr
,
1375 &size
, &address
, op_bytes
);
1378 realmode_lidt(ctxt
->vcpu
, size
, address
);
1382 goto cannot_emulate
;
1383 *(u16
*)&_regs
[modrm_rm
]
1384 = realmode_get_cr(ctxt
->vcpu
, 0);
1388 goto cannot_emulate
;
1389 realmode_lmsw(ctxt
->vcpu
, (u16
)modrm_val
, &_eflags
);
1392 emulate_invlpg(ctxt
->vcpu
, cr2
);
1395 goto cannot_emulate
;
1398 case 0x21: /* mov from dr to reg */
1401 goto cannot_emulate
;
1402 rc
= emulator_get_dr(ctxt
, modrm_reg
, &_regs
[modrm_rm
]);
1404 case 0x23: /* mov from reg to dr */
1407 goto cannot_emulate
;
1408 rc
= emulator_set_dr(ctxt
, modrm_reg
, _regs
[modrm_rm
]);
1410 case 0x40 ... 0x4f: /* cmov */
1411 dst
.val
= dst
.orig_val
= src
.val
;
1414 * First, assume we're decoding an even cmov opcode
1417 switch ((b
& 15) >> 1) {
1419 no_wb
= (_eflags
& EFLG_OF
) ? 0 : 1;
1421 case 1: /* cmovb/cmovc/cmovnae */
1422 no_wb
= (_eflags
& EFLG_CF
) ? 0 : 1;
1424 case 2: /* cmovz/cmove */
1425 no_wb
= (_eflags
& EFLG_ZF
) ? 0 : 1;
1427 case 3: /* cmovbe/cmovna */
1428 no_wb
= (_eflags
& (EFLG_CF
| EFLG_ZF
)) ? 0 : 1;
1431 no_wb
= (_eflags
& EFLG_SF
) ? 0 : 1;
1433 case 5: /* cmovp/cmovpe */
1434 no_wb
= (_eflags
& EFLG_PF
) ? 0 : 1;
1436 case 7: /* cmovle/cmovng */
1437 no_wb
= (_eflags
& EFLG_ZF
) ? 0 : 1;
1439 case 6: /* cmovl/cmovnge */
1440 no_wb
&= (!(_eflags
& EFLG_SF
) !=
1441 !(_eflags
& EFLG_OF
)) ? 0 : 1;
1444 /* Odd cmov opcodes (lsb == 1) have inverted sense. */
1449 src
.val
&= (dst
.bytes
<< 3) - 1; /* only subword offset */
1450 emulate_2op_SrcV_nobyte("bt", src
, dst
, _eflags
);
1454 src
.val
&= (dst
.bytes
<< 3) - 1; /* only subword offset */
1455 emulate_2op_SrcV_nobyte("bts", src
, dst
, _eflags
);
1457 case 0xb0 ... 0xb1: /* cmpxchg */
1459 * Save real source value, then compare EAX against
1462 src
.orig_val
= src
.val
;
1463 src
.val
= _regs
[VCPU_REGS_RAX
];
1464 emulate_2op_SrcV("cmp", src
, dst
, _eflags
);
1465 if (_eflags
& EFLG_ZF
) {
1466 /* Success: write back to memory. */
1467 dst
.val
= src
.orig_val
;
1469 /* Failure: write the value we saw to EAX. */
1471 dst
.ptr
= (unsigned long *)&_regs
[VCPU_REGS_RAX
];
1476 src
.val
&= (dst
.bytes
<< 3) - 1; /* only subword offset */
1477 emulate_2op_SrcV_nobyte("btr", src
, dst
, _eflags
);
1479 case 0xb6 ... 0xb7: /* movzx */
1480 dst
.bytes
= op_bytes
;
1481 dst
.val
= (d
& ByteOp
) ? (u8
) src
.val
: (u16
) src
.val
;
1483 case 0xba: /* Grp8 */
1484 switch (modrm_reg
& 3) {
1497 src
.val
&= (dst
.bytes
<< 3) - 1; /* only subword offset */
1498 emulate_2op_SrcV_nobyte("btc", src
, dst
, _eflags
);
1500 case 0xbe ... 0xbf: /* movsx */
1501 dst
.bytes
= op_bytes
;
1502 dst
.val
= (d
& ByteOp
) ? (s8
) src
.val
: (s16
) src
.val
;
1507 twobyte_special_insn
:
1508 /* Disable writeback. */
1512 emulate_clts(ctxt
->vcpu
);
1514 case 0x09: /* wbinvd */
1516 case 0x0d: /* GrpP (prefetch) */
1517 case 0x18: /* Grp16 (prefetch/nop) */
1519 case 0x20: /* mov cr, reg */
1521 goto cannot_emulate
;
1522 _regs
[modrm_rm
] = realmode_get_cr(ctxt
->vcpu
, modrm_reg
);
1524 case 0x22: /* mov reg, cr */
1526 goto cannot_emulate
;
1527 realmode_set_cr(ctxt
->vcpu
, modrm_reg
, modrm_val
, &_eflags
);
1531 msr_data
= (u32
)_regs
[VCPU_REGS_RAX
]
1532 | ((u64
)_regs
[VCPU_REGS_RDX
] << 32);
1533 rc
= kvm_set_msr(ctxt
->vcpu
, _regs
[VCPU_REGS_RCX
], msr_data
);
1535 kvm_x86_ops
->inject_gp(ctxt
->vcpu
, 0);
1536 _eip
= ctxt
->vcpu
->rip
;
1538 rc
= X86EMUL_CONTINUE
;
1542 rc
= kvm_get_msr(ctxt
->vcpu
, _regs
[VCPU_REGS_RCX
], &msr_data
);
1544 kvm_x86_ops
->inject_gp(ctxt
->vcpu
, 0);
1545 _eip
= ctxt
->vcpu
->rip
;
1547 _regs
[VCPU_REGS_RAX
] = (u32
)msr_data
;
1548 _regs
[VCPU_REGS_RDX
] = msr_data
>> 32;
1550 rc
= X86EMUL_CONTINUE
;
1552 case 0x80 ... 0x8f: /* jnz rel, etc*/ {
1557 rel
= insn_fetch(s16
, 2, _eip
);
1560 rel
= insn_fetch(s32
, 4, _eip
);
1563 rel
= insn_fetch(s64
, 8, _eip
);
1566 DPRINTF("jnz: Invalid op_bytes\n");
1567 goto cannot_emulate
;
1569 if (test_cc(b
, _eflags
))
1573 case 0xc7: /* Grp9 (cmpxchg8b) */
1576 if ((rc
= ops
->read_emulated(cr2
, &old
, 8, ctxt
->vcpu
))
1579 if (((u32
) (old
>> 0) != (u32
) _regs
[VCPU_REGS_RAX
]) ||
1580 ((u32
) (old
>> 32) != (u32
) _regs
[VCPU_REGS_RDX
])) {
1581 _regs
[VCPU_REGS_RAX
] = (u32
) (old
>> 0);
1582 _regs
[VCPU_REGS_RDX
] = (u32
) (old
>> 32);
1583 _eflags
&= ~EFLG_ZF
;
1585 new = ((u64
)_regs
[VCPU_REGS_RCX
] << 32)
1586 | (u32
) _regs
[VCPU_REGS_RBX
];
1587 if ((rc
= ops
->cmpxchg_emulated(cr2
, &old
,
1588 &new, 8, ctxt
->vcpu
)) != 0)
1598 DPRINTF("Cannot emulate %02x\n", b
);
1605 #include <asm/uaccess.h>
1608 x86_emulate_read_std(unsigned long addr
,
1610 unsigned int bytes
, struct x86_emulate_ctxt
*ctxt
)
1616 if ((rc
= copy_from_user((void *)val
, (void *)addr
, bytes
)) != 0) {
1617 propagate_page_fault(addr
+ bytes
- rc
, 0); /* read fault */
1618 return X86EMUL_PROPAGATE_FAULT
;
1621 return X86EMUL_CONTINUE
;
1625 x86_emulate_write_std(unsigned long addr
,
1627 unsigned int bytes
, struct x86_emulate_ctxt
*ctxt
)
1631 if ((rc
= copy_to_user((void *)addr
, (void *)&val
, bytes
)) != 0) {
1632 propagate_page_fault(addr
+ bytes
- rc
, PGERR_write_access
);
1633 return X86EMUL_PROPAGATE_FAULT
;
1636 return X86EMUL_CONTINUE
;