1 /******************************************************************************
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privileged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf( _f , ## _a )
29 #define DPRINTF(x...) do {} while (0)
31 #include "x86_emulate.h"
32 #include <linux/module.h>
35 * Opcode effective-address decode tables.
36 * Note that we only emulate instructions that have at least one memory
37 * operand (excluding implicit stack references). We assume that stack
38 * references and instruction fetches will never occur in special memory
39 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
43 /* Operand sizes: 8-bit operands or specified/overridden size. */
44 #define ByteOp (1<<0) /* 8-bit operands. */
45 /* Destination operand type. */
46 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
47 #define DstReg (2<<1) /* Register operand. */
48 #define DstMem (3<<1) /* Memory operand. */
49 #define DstMask (3<<1)
50 /* Source operand type. */
51 #define SrcNone (0<<3) /* No source operand. */
52 #define SrcImplicit (0<<3) /* Source operand is implicit in the opcode. */
53 #define SrcReg (1<<3) /* Register operand. */
54 #define SrcMem (2<<3) /* Memory operand. */
55 #define SrcMem16 (3<<3) /* Memory operand (16-bit). */
56 #define SrcMem32 (4<<3) /* Memory operand (32-bit). */
57 #define SrcImm (5<<3) /* Immediate operand. */
58 #define SrcImmByte (6<<3) /* 8-bit sign-extended immediate operand. */
59 #define SrcMask (7<<3)
60 /* Generic ModRM decode. */
62 /* Destination is only written; never read. */
66 static u8 opcode_table
[256] = {
68 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
69 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
72 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
73 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
76 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
77 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
80 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
81 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
84 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
85 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
86 SrcImmByte
, SrcImm
, 0, 0,
88 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
89 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
92 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
93 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
96 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
97 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
100 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
102 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
103 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
105 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
106 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
108 0, 0, 0, DstReg
| SrcMem32
| ModRM
| Mov
/* movsxd (x86/64) */ ,
111 0, 0, ImplicitOps
|Mov
, 0,
112 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
, /* insb, insw/insd */
113 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
, /* outsb, outsw/outsd */
115 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
116 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
118 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
119 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
121 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImm
| ModRM
,
122 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
123 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
124 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
126 ByteOp
| DstMem
| SrcReg
| ModRM
| Mov
, DstMem
| SrcReg
| ModRM
| Mov
,
127 ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
128 0, ModRM
| DstReg
, 0, DstMem
| SrcNone
| ModRM
| Mov
,
130 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps
, ImplicitOps
, 0, 0,
132 ByteOp
| DstReg
| SrcMem
| Mov
, DstReg
| SrcMem
| Mov
,
133 ByteOp
| DstMem
| SrcReg
| Mov
, DstMem
| SrcReg
| Mov
,
134 ByteOp
| ImplicitOps
| Mov
, ImplicitOps
| Mov
,
135 ByteOp
| ImplicitOps
, ImplicitOps
,
137 0, 0, ByteOp
| ImplicitOps
| Mov
, ImplicitOps
| Mov
,
138 ByteOp
| ImplicitOps
| Mov
, ImplicitOps
| Mov
,
139 ByteOp
| ImplicitOps
, ImplicitOps
,
141 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
143 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
144 0, ImplicitOps
, 0, 0,
145 ByteOp
| DstMem
| SrcImm
| ModRM
| Mov
, DstMem
| SrcImm
| ModRM
| Mov
,
147 0, 0, 0, 0, 0, 0, 0, 0,
149 ByteOp
| DstMem
| SrcImplicit
| ModRM
, DstMem
| SrcImplicit
| ModRM
,
150 ByteOp
| DstMem
| SrcImplicit
| ModRM
, DstMem
| SrcImplicit
| ModRM
,
153 0, 0, 0, 0, 0, 0, 0, 0,
155 0, 0, 0, 0, 0, 0, 0, 0,
157 ImplicitOps
, SrcImm
|ImplicitOps
, 0, SrcImmByte
|ImplicitOps
, 0, 0, 0, 0,
161 ByteOp
| DstMem
| SrcNone
| ModRM
, DstMem
| SrcNone
| ModRM
,
164 0, 0, ByteOp
| DstMem
| SrcNone
| ModRM
, DstMem
| SrcNone
| ModRM
167 static u16 twobyte_table
[256] = {
169 0, SrcMem
| ModRM
| DstReg
, 0, 0, 0, 0, ImplicitOps
, 0,
170 0, ImplicitOps
, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0,
172 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0, 0, 0, 0, 0, 0,
174 ModRM
| ImplicitOps
, ModRM
, ModRM
| ImplicitOps
, ModRM
, 0, 0, 0, 0,
175 0, 0, 0, 0, 0, 0, 0, 0,
177 ImplicitOps
, 0, ImplicitOps
, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
179 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
180 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
181 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
182 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
184 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
185 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
186 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
187 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
189 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
191 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
193 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
195 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
196 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
197 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
198 ImplicitOps
, ImplicitOps
, ImplicitOps
, ImplicitOps
,
200 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
202 0, 0, 0, DstMem
| SrcReg
| ModRM
| BitOp
, 0, 0, 0, 0,
204 0, 0, 0, DstMem
| SrcReg
| ModRM
| BitOp
, 0, 0, 0, 0,
206 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
, 0,
207 DstMem
| SrcReg
| ModRM
| BitOp
,
208 0, 0, ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
,
209 DstReg
| SrcMem16
| ModRM
| Mov
,
211 0, 0, DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcReg
| ModRM
| BitOp
,
212 0, 0, ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
,
213 DstReg
| SrcMem16
| ModRM
| Mov
,
215 0, 0, 0, DstMem
| SrcReg
| ModRM
| Mov
, 0, 0, 0, ImplicitOps
| ModRM
,
216 0, 0, 0, 0, 0, 0, 0, 0,
218 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
220 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
222 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
225 /* Type, address-of, and value of an instruction's operand. */
227 enum { OP_REG
, OP_MEM
, OP_IMM
} type
;
229 unsigned long val
, orig_val
, *ptr
;
232 /* EFLAGS bit definitions. */
233 #define EFLG_OF (1<<11)
234 #define EFLG_DF (1<<10)
235 #define EFLG_SF (1<<7)
236 #define EFLG_ZF (1<<6)
237 #define EFLG_AF (1<<4)
238 #define EFLG_PF (1<<2)
239 #define EFLG_CF (1<<0)
242 * Instruction emulation:
243 * Most instructions are emulated directly via a fragment of inline assembly
244 * code. This allows us to save/restore EFLAGS and thus very easily pick up
245 * any modified flags.
248 #if defined(CONFIG_X86_64)
249 #define _LO32 "k" /* force 32-bit operand */
250 #define _STK "%%rsp" /* stack pointer */
251 #elif defined(__i386__)
252 #define _LO32 "" /* force 32-bit operand */
253 #define _STK "%%esp" /* stack pointer */
257 * These EFLAGS bits are restored from saved value during emulation, and
258 * any changes are written back to the saved value after emulation.
260 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
262 /* Before executing instruction: restore necessary bits in EFLAGS. */
263 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
264 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); */ \
266 "movl %"_msk",%"_LO32 _tmp"; " \
267 "andl %"_LO32 _tmp",("_STK"); " \
269 "notl %"_LO32 _tmp"; " \
270 "andl %"_LO32 _tmp",("_STK"); " \
272 "orl %"_LO32 _tmp",("_STK"); " \
274 /* _sav &= ~msk; */ \
275 "movl %"_msk",%"_LO32 _tmp"; " \
276 "notl %"_LO32 _tmp"; " \
277 "andl %"_LO32 _tmp",%"_sav"; "
279 /* After executing instruction: write-back necessary bits in EFLAGS. */
280 #define _POST_EFLAGS(_sav, _msk, _tmp) \
281 /* _sav |= EFLAGS & _msk; */ \
284 "andl %"_msk",%"_LO32 _tmp"; " \
285 "orl %"_LO32 _tmp",%"_sav"; "
287 /* Raw emulation: instruction has two explicit operands. */
288 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
290 unsigned long _tmp; \
292 switch ((_dst).bytes) { \
294 __asm__ __volatile__ ( \
295 _PRE_EFLAGS("0","4","2") \
296 _op"w %"_wx"3,%1; " \
297 _POST_EFLAGS("0","4","2") \
298 : "=m" (_eflags), "=m" ((_dst).val), \
300 : _wy ((_src).val), "i" (EFLAGS_MASK) ); \
303 __asm__ __volatile__ ( \
304 _PRE_EFLAGS("0","4","2") \
305 _op"l %"_lx"3,%1; " \
306 _POST_EFLAGS("0","4","2") \
307 : "=m" (_eflags), "=m" ((_dst).val), \
309 : _ly ((_src).val), "i" (EFLAGS_MASK) ); \
312 __emulate_2op_8byte(_op, _src, _dst, \
313 _eflags, _qx, _qy); \
318 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
320 unsigned long _tmp; \
321 switch ( (_dst).bytes ) \
324 __asm__ __volatile__ ( \
325 _PRE_EFLAGS("0","4","2") \
326 _op"b %"_bx"3,%1; " \
327 _POST_EFLAGS("0","4","2") \
328 : "=m" (_eflags), "=m" ((_dst).val), \
330 : _by ((_src).val), "i" (EFLAGS_MASK) ); \
333 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
334 _wx, _wy, _lx, _ly, _qx, _qy); \
339 /* Source operand is byte-sized and may be restricted to just %cl. */
340 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
341 __emulate_2op(_op, _src, _dst, _eflags, \
342 "b", "c", "b", "c", "b", "c", "b", "c")
344 /* Source operand is byte, word, long or quad sized. */
345 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
346 __emulate_2op(_op, _src, _dst, _eflags, \
347 "b", "q", "w", "r", _LO32, "r", "", "r")
349 /* Source operand is word, long or quad sized. */
350 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
351 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
352 "w", "r", _LO32, "r", "", "r")
354 /* Instruction has only one explicit operand (no source operand). */
355 #define emulate_1op(_op, _dst, _eflags) \
357 unsigned long _tmp; \
359 switch ( (_dst).bytes ) \
362 __asm__ __volatile__ ( \
363 _PRE_EFLAGS("0","3","2") \
365 _POST_EFLAGS("0","3","2") \
366 : "=m" (_eflags), "=m" ((_dst).val), \
368 : "i" (EFLAGS_MASK) ); \
371 __asm__ __volatile__ ( \
372 _PRE_EFLAGS("0","3","2") \
374 _POST_EFLAGS("0","3","2") \
375 : "=m" (_eflags), "=m" ((_dst).val), \
377 : "i" (EFLAGS_MASK) ); \
380 __asm__ __volatile__ ( \
381 _PRE_EFLAGS("0","3","2") \
383 _POST_EFLAGS("0","3","2") \
384 : "=m" (_eflags), "=m" ((_dst).val), \
386 : "i" (EFLAGS_MASK) ); \
389 __emulate_1op_8byte(_op, _dst, _eflags); \
394 /* Emulate an instruction with quadword operands (x86/64 only). */
395 #if defined(CONFIG_X86_64)
396 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
398 __asm__ __volatile__ ( \
399 _PRE_EFLAGS("0","4","2") \
400 _op"q %"_qx"3,%1; " \
401 _POST_EFLAGS("0","4","2") \
402 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
403 : _qy ((_src).val), "i" (EFLAGS_MASK) ); \
406 #define __emulate_1op_8byte(_op, _dst, _eflags) \
408 __asm__ __volatile__ ( \
409 _PRE_EFLAGS("0","3","2") \
411 _POST_EFLAGS("0","3","2") \
412 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
413 : "i" (EFLAGS_MASK) ); \
416 #elif defined(__i386__)
417 #define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
418 #define __emulate_1op_8byte(_op, _dst, _eflags)
419 #endif /* __i386__ */
421 /* Fetch next part of the instruction being emulated. */
422 #define insn_fetch(_type, _size, _eip) \
423 ({ unsigned long _x; \
424 rc = ops->read_std((unsigned long)(_eip) + ctxt->cs_base, &_x, \
425 (_size), ctxt->vcpu); \
432 /* Access/update address held in a register, based on addressing mode. */
433 #define address_mask(reg) \
434 ((ad_bytes == sizeof(unsigned long)) ? \
435 (reg) : ((reg) & ((1UL << (ad_bytes << 3)) - 1)))
436 #define register_address(base, reg) \
437 ((base) + address_mask(reg))
438 #define register_address_increment(reg, inc) \
440 /* signed type ensures sign extension to long */ \
442 if ( ad_bytes == sizeof(unsigned long) ) \
445 (reg) = ((reg) & ~((1UL << (ad_bytes << 3)) - 1)) | \
446 (((reg) + _inc) & ((1UL << (ad_bytes << 3)) - 1)); \
449 #define JMP_REL(rel) \
451 _eip += (int)(rel); \
452 _eip = ((op_bytes == 2) ? (uint16_t)_eip : (uint32_t)_eip); \
456 * Given the 'reg' portion of a ModRM byte, and a register block, return a
457 * pointer into the block that addresses the relevant register.
458 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
460 static void *decode_register(u8 modrm_reg
, unsigned long *regs
,
465 p
= ®s
[modrm_reg
];
466 if (highbyte_regs
&& modrm_reg
>= 4 && modrm_reg
< 8)
467 p
= (unsigned char *)®s
[modrm_reg
& 3] + 1;
471 static int read_descriptor(struct x86_emulate_ctxt
*ctxt
,
472 struct x86_emulate_ops
*ops
,
474 u16
*size
, unsigned long *address
, int op_bytes
)
481 rc
= ops
->read_std((unsigned long)ptr
, (unsigned long *)size
, 2,
485 rc
= ops
->read_std((unsigned long)ptr
+ 2, address
, op_bytes
,
490 static int test_cc(unsigned int condition
, unsigned int flags
)
494 switch ((condition
& 15) >> 1) {
496 rc
|= (flags
& EFLG_OF
);
498 case 1: /* b/c/nae */
499 rc
|= (flags
& EFLG_CF
);
502 rc
|= (flags
& EFLG_ZF
);
505 rc
|= (flags
& (EFLG_CF
|EFLG_ZF
));
508 rc
|= (flags
& EFLG_SF
);
511 rc
|= (flags
& EFLG_PF
);
514 rc
|= (flags
& EFLG_ZF
);
517 rc
|= (!(flags
& EFLG_SF
) != !(flags
& EFLG_OF
));
521 /* Odd condition identifiers (lsb == 1) have inverted sense. */
522 return (!!rc
^ (condition
& 1));
526 x86_emulate_memop(struct x86_emulate_ctxt
*ctxt
, struct x86_emulate_ops
*ops
)
529 u8 b
, sib
, twobyte
= 0, rex_prefix
= 0;
530 u8 modrm
, modrm_mod
= 0, modrm_reg
= 0, modrm_rm
= 0;
531 unsigned long *override_base
= NULL
;
532 unsigned int op_bytes
, ad_bytes
, lock_prefix
= 0, rep_prefix
= 0, i
;
534 struct operand src
, dst
;
535 unsigned long cr2
= ctxt
->cr2
;
536 int mode
= ctxt
->mode
;
537 unsigned long modrm_ea
;
538 int use_modrm_ea
, index_reg
= 0, base_reg
= 0, scale
, rip_relative
= 0;
542 /* Shadow copy of register state. Committed on successful emulation. */
543 unsigned long _regs
[NR_VCPU_REGS
];
544 unsigned long _eip
= ctxt
->vcpu
->rip
, _eflags
= ctxt
->eflags
;
545 unsigned long modrm_val
= 0;
547 memcpy(_regs
, ctxt
->vcpu
->regs
, sizeof _regs
);
550 case X86EMUL_MODE_REAL
:
551 case X86EMUL_MODE_PROT16
:
552 op_bytes
= ad_bytes
= 2;
554 case X86EMUL_MODE_PROT32
:
555 op_bytes
= ad_bytes
= 4;
558 case X86EMUL_MODE_PROT64
:
567 /* Legacy prefixes. */
568 for (i
= 0; i
< 8; i
++) {
569 switch (b
= insn_fetch(u8
, 1, _eip
)) {
570 case 0x66: /* operand-size override */
571 op_bytes
^= 6; /* switch between 2/4 bytes */
573 case 0x67: /* address-size override */
574 if (mode
== X86EMUL_MODE_PROT64
)
575 ad_bytes
^= 12; /* switch between 4/8 bytes */
577 ad_bytes
^= 6; /* switch between 2/4 bytes */
579 case 0x2e: /* CS override */
580 override_base
= &ctxt
->cs_base
;
582 case 0x3e: /* DS override */
583 override_base
= &ctxt
->ds_base
;
585 case 0x26: /* ES override */
586 override_base
= &ctxt
->es_base
;
588 case 0x64: /* FS override */
589 override_base
= &ctxt
->fs_base
;
591 case 0x65: /* GS override */
592 override_base
= &ctxt
->gs_base
;
594 case 0x36: /* SS override */
595 override_base
= &ctxt
->ss_base
;
597 case 0xf0: /* LOCK */
600 case 0xf2: /* REPNE/REPNZ */
601 case 0xf3: /* REP/REPE/REPZ */
612 if ((mode
== X86EMUL_MODE_PROT64
) && ((b
& 0xf0) == 0x40)) {
615 op_bytes
= 8; /* REX.W */
616 modrm_reg
= (b
& 4) << 1; /* REX.R */
617 index_reg
= (b
& 2) << 2; /* REX.X */
618 modrm_rm
= base_reg
= (b
& 1) << 3; /* REG.B */
619 b
= insn_fetch(u8
, 1, _eip
);
622 /* Opcode byte(s). */
625 /* Two-byte opcode? */
628 b
= insn_fetch(u8
, 1, _eip
);
629 d
= twobyte_table
[b
];
637 /* ModRM and SIB bytes. */
639 modrm
= insn_fetch(u8
, 1, _eip
);
640 modrm_mod
|= (modrm
& 0xc0) >> 6;
641 modrm_reg
|= (modrm
& 0x38) >> 3;
642 modrm_rm
|= (modrm
& 0x07);
646 if (modrm_mod
== 3) {
647 modrm_val
= *(unsigned long *)
648 decode_register(modrm_rm
, _regs
, d
& ByteOp
);
653 unsigned bx
= _regs
[VCPU_REGS_RBX
];
654 unsigned bp
= _regs
[VCPU_REGS_RBP
];
655 unsigned si
= _regs
[VCPU_REGS_RSI
];
656 unsigned di
= _regs
[VCPU_REGS_RDI
];
658 /* 16-bit ModR/M decode. */
662 modrm_ea
+= insn_fetch(u16
, 2, _eip
);
665 modrm_ea
+= insn_fetch(s8
, 1, _eip
);
668 modrm_ea
+= insn_fetch(u16
, 2, _eip
);
698 if (modrm_rm
== 2 || modrm_rm
== 3 ||
699 (modrm_rm
== 6 && modrm_mod
!= 0))
701 override_base
= &ctxt
->ss_base
;
702 modrm_ea
= (u16
)modrm_ea
;
704 /* 32/64-bit ModR/M decode. */
708 sib
= insn_fetch(u8
, 1, _eip
);
709 index_reg
|= (sib
>> 3) & 7;
716 modrm_ea
+= _regs
[base_reg
];
718 modrm_ea
+= insn_fetch(s32
, 4, _eip
);
721 modrm_ea
+= _regs
[base_reg
];
727 modrm_ea
+= _regs
[index_reg
] << scale
;
733 modrm_ea
+= _regs
[modrm_rm
];
734 else if (mode
== X86EMUL_MODE_PROT64
)
738 modrm_ea
+= _regs
[modrm_rm
];
744 modrm_ea
+= insn_fetch(s32
, 4, _eip
);
747 modrm_ea
+= insn_fetch(s8
, 1, _eip
);
750 modrm_ea
+= insn_fetch(s32
, 4, _eip
);
755 override_base
= &ctxt
->ds_base
;
756 if (mode
== X86EMUL_MODE_PROT64
&&
757 override_base
!= &ctxt
->fs_base
&&
758 override_base
!= &ctxt
->gs_base
)
759 override_base
= NULL
;
762 modrm_ea
+= *override_base
;
766 switch (d
& SrcMask
) {
777 modrm_ea
+= op_bytes
;
781 modrm_ea
= (u32
)modrm_ea
;
788 * Decode and fetch the source operand: register, memory
791 switch (d
& SrcMask
) {
797 src
.ptr
= decode_register(modrm_reg
, _regs
,
799 src
.val
= src
.orig_val
= *(u8
*) src
.ptr
;
802 src
.ptr
= decode_register(modrm_reg
, _regs
, 0);
803 switch ((src
.bytes
= op_bytes
)) {
805 src
.val
= src
.orig_val
= *(u16
*) src
.ptr
;
808 src
.val
= src
.orig_val
= *(u32
*) src
.ptr
;
811 src
.val
= src
.orig_val
= *(u64
*) src
.ptr
;
823 src
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
824 /* Don't fetch the address for invlpg: it could be unmapped. */
825 if (twobyte
&& b
== 0x01 && modrm_reg
== 7)
829 * For instructions with a ModR/M byte, switch to register
832 if ((d
& ModRM
) && modrm_mod
== 3) {
837 src
.ptr
= (unsigned long *)cr2
;
839 if ((rc
= ops
->read_emulated((unsigned long)src
.ptr
,
840 &src
.val
, src
.bytes
, ctxt
->vcpu
)) != 0)
842 src
.orig_val
= src
.val
;
846 src
.ptr
= (unsigned long *)_eip
;
847 src
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
850 /* NB. Immediates are sign-extended as necessary. */
853 src
.val
= insn_fetch(s8
, 1, _eip
);
856 src
.val
= insn_fetch(s16
, 2, _eip
);
859 src
.val
= insn_fetch(s32
, 4, _eip
);
865 src
.ptr
= (unsigned long *)_eip
;
867 src
.val
= insn_fetch(s8
, 1, _eip
);
871 /* Decode and fetch the destination operand: register or memory. */
872 switch (d
& DstMask
) {
874 /* Special instructions do their own operand decoding. */
879 && !(twobyte
&& (b
== 0xb6 || b
== 0xb7))) {
880 dst
.ptr
= decode_register(modrm_reg
, _regs
,
882 dst
.val
= *(u8
*) dst
.ptr
;
885 dst
.ptr
= decode_register(modrm_reg
, _regs
, 0);
886 switch ((dst
.bytes
= op_bytes
)) {
888 dst
.val
= *(u16
*)dst
.ptr
;
891 dst
.val
= *(u32
*)dst
.ptr
;
894 dst
.val
= *(u64
*)dst
.ptr
;
901 dst
.ptr
= (unsigned long *)cr2
;
902 dst
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
905 * For instructions with a ModR/M byte, switch to register
908 if ((d
& ModRM
) && modrm_mod
== 3) {
913 unsigned long mask
= ~(dst
.bytes
* 8 - 1);
915 dst
.ptr
= (void *)dst
.ptr
+ (src
.val
& mask
) / 8;
917 if (!(d
& Mov
) && /* optimisation - avoid slow emulated read */
918 ((rc
= ops
->read_emulated((unsigned long)dst
.ptr
,
919 &dst
.val
, dst
.bytes
, ctxt
->vcpu
)) != 0))
923 dst
.orig_val
= dst
.val
;
931 emulate_2op_SrcV("add", src
, dst
, _eflags
);
935 emulate_2op_SrcV("or", src
, dst
, _eflags
);
939 emulate_2op_SrcV("adc", src
, dst
, _eflags
);
943 emulate_2op_SrcV("sbb", src
, dst
, _eflags
);
947 emulate_2op_SrcV("and", src
, dst
, _eflags
);
949 case 0x24: /* and al imm8 */
951 dst
.ptr
= &_regs
[VCPU_REGS_RAX
];
952 dst
.val
= *(u8
*)dst
.ptr
;
954 dst
.orig_val
= dst
.val
;
956 case 0x25: /* and ax imm16, or eax imm32 */
958 dst
.bytes
= op_bytes
;
959 dst
.ptr
= &_regs
[VCPU_REGS_RAX
];
961 dst
.val
= *(u16
*)dst
.ptr
;
963 dst
.val
= *(u32
*)dst
.ptr
;
964 dst
.orig_val
= dst
.val
;
968 emulate_2op_SrcV("sub", src
, dst
, _eflags
);
972 emulate_2op_SrcV("xor", src
, dst
, _eflags
);
976 emulate_2op_SrcV("cmp", src
, dst
, _eflags
);
978 case 0x63: /* movsxd */
979 if (mode
!= X86EMUL_MODE_PROT64
)
981 dst
.val
= (s32
) src
.val
;
983 case 0x6a: /* push imm8 */
985 src
.val
= insn_fetch(s8
, 1, _eip
);
988 dst
.bytes
= op_bytes
;
990 register_address_increment(_regs
[VCPU_REGS_RSP
], -op_bytes
);
991 dst
.ptr
= (void *) register_address(ctxt
->ss_base
,
992 _regs
[VCPU_REGS_RSP
]);
994 case 0x80 ... 0x83: /* Grp1 */
1016 emulate_2op_SrcV("test", src
, dst
, _eflags
);
1018 case 0x86 ... 0x87: /* xchg */
1019 /* Write back the register source. */
1020 switch (dst
.bytes
) {
1022 *(u8
*) src
.ptr
= (u8
) dst
.val
;
1025 *(u16
*) src
.ptr
= (u16
) dst
.val
;
1028 *src
.ptr
= (u32
) dst
.val
;
1029 break; /* 64b reg: zero-extend */
1035 * Write back the memory destination with implicit LOCK
1041 case 0x88 ... 0x8b: /* mov */
1043 case 0x8d: /* lea r16/r32, m */
1044 dst
.val
= modrm_val
;
1046 case 0x8f: /* pop (sole member of Grp1a) */
1047 /* 64-bit mode: POP always pops a 64-bit operand. */
1048 if (mode
== X86EMUL_MODE_PROT64
)
1050 if ((rc
= ops
->read_std(register_address(ctxt
->ss_base
,
1051 _regs
[VCPU_REGS_RSP
]),
1052 &dst
.val
, dst
.bytes
, ctxt
->vcpu
)) != 0)
1054 register_address_increment(_regs
[VCPU_REGS_RSP
], dst
.bytes
);
1056 case 0xa0 ... 0xa1: /* mov */
1057 dst
.ptr
= (unsigned long *)&_regs
[VCPU_REGS_RAX
];
1059 _eip
+= ad_bytes
; /* skip src displacement */
1061 case 0xa2 ... 0xa3: /* mov */
1062 dst
.val
= (unsigned long)_regs
[VCPU_REGS_RAX
];
1063 _eip
+= ad_bytes
; /* skip dst displacement */
1067 switch (modrm_reg
) {
1069 emulate_2op_SrcB("rol", src
, dst
, _eflags
);
1072 emulate_2op_SrcB("ror", src
, dst
, _eflags
);
1075 emulate_2op_SrcB("rcl", src
, dst
, _eflags
);
1078 emulate_2op_SrcB("rcr", src
, dst
, _eflags
);
1080 case 4: /* sal/shl */
1081 case 6: /* sal/shl */
1082 emulate_2op_SrcB("sal", src
, dst
, _eflags
);
1085 emulate_2op_SrcB("shr", src
, dst
, _eflags
);
1088 emulate_2op_SrcB("sar", src
, dst
, _eflags
);
1092 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1096 case 0xd0 ... 0xd1: /* Grp2 */
1099 case 0xd2 ... 0xd3: /* Grp2 */
1100 src
.val
= _regs
[VCPU_REGS_RCX
];
1102 case 0xf6 ... 0xf7: /* Grp3 */
1103 switch (modrm_reg
) {
1104 case 0 ... 1: /* test */
1106 * Special case in Grp3: test has an immediate
1110 src
.ptr
= (unsigned long *)_eip
;
1111 src
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
1114 switch (src
.bytes
) {
1116 src
.val
= insn_fetch(s8
, 1, _eip
);
1119 src
.val
= insn_fetch(s16
, 2, _eip
);
1122 src
.val
= insn_fetch(s32
, 4, _eip
);
1130 emulate_1op("neg", dst
, _eflags
);
1133 goto cannot_emulate
;
1136 case 0xfe ... 0xff: /* Grp4/Grp5 */
1137 switch (modrm_reg
) {
1139 emulate_1op("inc", dst
, _eflags
);
1142 emulate_1op("dec", dst
, _eflags
);
1144 case 4: /* jmp abs */
1148 goto cannot_emulate
;
1151 /* 64-bit mode: PUSH always pushes a 64-bit operand. */
1152 if (mode
== X86EMUL_MODE_PROT64
) {
1154 if ((rc
= ops
->read_std((unsigned long)dst
.ptr
,
1159 register_address_increment(_regs
[VCPU_REGS_RSP
],
1161 if ((rc
= ops
->write_std(
1162 register_address(ctxt
->ss_base
,
1163 _regs
[VCPU_REGS_RSP
]),
1164 &dst
.val
, dst
.bytes
, ctxt
->vcpu
)) != 0)
1169 goto cannot_emulate
;
1178 /* The 4-byte case *is* correct: in 64-bit mode we zero-extend. */
1179 switch (dst
.bytes
) {
1181 *(u8
*)dst
.ptr
= (u8
)dst
.val
;
1184 *(u16
*)dst
.ptr
= (u16
)dst
.val
;
1187 *dst
.ptr
= (u32
)dst
.val
;
1188 break; /* 64b: zero-ext */
1196 rc
= ops
->cmpxchg_emulated((unsigned long)dst
.
1198 &dst
.val
, dst
.bytes
,
1201 rc
= ops
->write_emulated((unsigned long)dst
.ptr
,
1202 &dst
.val
, dst
.bytes
,
1211 /* Commit shadow register state. */
1212 memcpy(ctxt
->vcpu
->regs
, _regs
, sizeof _regs
);
1213 ctxt
->eflags
= _eflags
;
1214 ctxt
->vcpu
->rip
= _eip
;
1217 return (rc
== X86EMUL_UNHANDLEABLE
) ? -1 : 0;
1221 goto twobyte_special_insn
;
1223 case 0x50 ... 0x57: /* push reg */
1225 src
.val
= (u16
) _regs
[b
& 0x7];
1227 src
.val
= (u32
) _regs
[b
& 0x7];
1229 dst
.bytes
= op_bytes
;
1231 register_address_increment(_regs
[VCPU_REGS_RSP
], -op_bytes
);
1232 dst
.ptr
= (void *) register_address(
1233 ctxt
->ss_base
, _regs
[VCPU_REGS_RSP
]);
1235 case 0x58 ... 0x5f: /* pop reg */
1236 dst
.ptr
= (unsigned long *)&_regs
[b
& 0x7];
1238 if ((rc
= ops
->read_std(register_address(ctxt
->ss_base
,
1239 _regs
[VCPU_REGS_RSP
]), dst
.ptr
, op_bytes
, ctxt
->vcpu
))
1243 register_address_increment(_regs
[VCPU_REGS_RSP
], op_bytes
);
1244 no_wb
= 1; /* Disable writeback. */
1246 case 0x6c: /* insb */
1247 case 0x6d: /* insw/insd */
1248 if (kvm_emulate_pio_string(ctxt
->vcpu
, NULL
,
1250 (d
& ByteOp
) ? 1 : op_bytes
, /* size */
1252 address_mask(_regs
[VCPU_REGS_RCX
]) : 1, /* count */
1253 (_eflags
& EFLG_DF
), /* down */
1254 register_address(ctxt
->es_base
,
1255 _regs
[VCPU_REGS_RDI
]), /* address */
1257 _regs
[VCPU_REGS_RDX
] /* port */
1261 case 0x6e: /* outsb */
1262 case 0x6f: /* outsw/outsd */
1263 if (kvm_emulate_pio_string(ctxt
->vcpu
, NULL
,
1265 (d
& ByteOp
) ? 1 : op_bytes
, /* size */
1267 address_mask(_regs
[VCPU_REGS_RCX
]) : 1, /* count */
1268 (_eflags
& EFLG_DF
), /* down */
1269 register_address(override_base
?
1270 *override_base
: ctxt
->ds_base
,
1271 _regs
[VCPU_REGS_RSI
]), /* address */
1273 _regs
[VCPU_REGS_RDX
] /* port */
1277 case 0x70 ... 0x7f: /* jcc (short) */ {
1278 int rel
= insn_fetch(s8
, 1, _eip
);
1280 if (test_cc(b
, _eflags
))
1284 case 0x9c: /* pushf */
1285 src
.val
= (unsigned long) _eflags
;
1287 case 0x9d: /* popf */
1288 dst
.ptr
= (unsigned long *) &_eflags
;
1289 goto pop_instruction
;
1290 case 0xc3: /* ret */
1292 goto pop_instruction
;
1293 case 0xf4: /* hlt */
1294 ctxt
->vcpu
->halt_request
= 1;
1298 if (_regs
[VCPU_REGS_RCX
] == 0) {
1299 ctxt
->vcpu
->rip
= _eip
;
1302 _regs
[VCPU_REGS_RCX
]--;
1303 _eip
= ctxt
->vcpu
->rip
;
1306 case 0xa4 ... 0xa5: /* movs */
1308 dst
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
1309 dst
.ptr
= (unsigned long *)register_address(ctxt
->es_base
,
1310 _regs
[VCPU_REGS_RDI
]);
1311 if ((rc
= ops
->read_emulated(register_address(
1312 override_base
? *override_base
: ctxt
->ds_base
,
1313 _regs
[VCPU_REGS_RSI
]), &dst
.val
, dst
.bytes
, ctxt
->vcpu
)) != 0)
1315 register_address_increment(_regs
[VCPU_REGS_RSI
],
1316 (_eflags
& EFLG_DF
) ? -dst
.bytes
: dst
.bytes
);
1317 register_address_increment(_regs
[VCPU_REGS_RDI
],
1318 (_eflags
& EFLG_DF
) ? -dst
.bytes
: dst
.bytes
);
1320 case 0xa6 ... 0xa7: /* cmps */
1321 DPRINTF("Urk! I don't handle CMPS.\n");
1322 goto cannot_emulate
;
1323 case 0xaa ... 0xab: /* stos */
1325 dst
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
1326 dst
.ptr
= (unsigned long *)cr2
;
1327 dst
.val
= _regs
[VCPU_REGS_RAX
];
1328 register_address_increment(_regs
[VCPU_REGS_RDI
],
1329 (_eflags
& EFLG_DF
) ? -dst
.bytes
: dst
.bytes
);
1331 case 0xac ... 0xad: /* lods */
1333 dst
.bytes
= (d
& ByteOp
) ? 1 : op_bytes
;
1334 dst
.ptr
= (unsigned long *)&_regs
[VCPU_REGS_RAX
];
1335 if ((rc
= ops
->read_emulated(cr2
, &dst
.val
, dst
.bytes
,
1338 register_address_increment(_regs
[VCPU_REGS_RSI
],
1339 (_eflags
& EFLG_DF
) ? -dst
.bytes
: dst
.bytes
);
1341 case 0xae ... 0xaf: /* scas */
1342 DPRINTF("Urk! I don't handle SCAS.\n");
1343 goto cannot_emulate
;
1344 case 0xe8: /* call (near) */ {
1348 rel
= insn_fetch(s16
, 2, _eip
);
1351 rel
= insn_fetch(s32
, 4, _eip
);
1354 rel
= insn_fetch(s64
, 8, _eip
);
1357 DPRINTF("Call: Invalid op_bytes\n");
1358 goto cannot_emulate
;
1360 src
.val
= (unsigned long) _eip
;
1364 case 0xe9: /* jmp rel */
1365 case 0xeb: /* jmp rel short */
1367 no_wb
= 1; /* Disable writeback. */
1376 case 0x01: /* lgdt, lidt, lmsw */
1377 /* Disable writeback. */
1379 switch (modrm_reg
) {
1381 unsigned long address
;
1384 rc
= read_descriptor(ctxt
, ops
, src
.ptr
,
1385 &size
, &address
, op_bytes
);
1388 realmode_lgdt(ctxt
->vcpu
, size
, address
);
1391 rc
= read_descriptor(ctxt
, ops
, src
.ptr
,
1392 &size
, &address
, op_bytes
);
1395 realmode_lidt(ctxt
->vcpu
, size
, address
);
1399 goto cannot_emulate
;
1400 *(u16
*)&_regs
[modrm_rm
]
1401 = realmode_get_cr(ctxt
->vcpu
, 0);
1405 goto cannot_emulate
;
1406 realmode_lmsw(ctxt
->vcpu
, (u16
)modrm_val
, &_eflags
);
1409 emulate_invlpg(ctxt
->vcpu
, cr2
);
1412 goto cannot_emulate
;
1415 case 0x21: /* mov from dr to reg */
1418 goto cannot_emulate
;
1419 rc
= emulator_get_dr(ctxt
, modrm_reg
, &_regs
[modrm_rm
]);
1421 case 0x23: /* mov from reg to dr */
1424 goto cannot_emulate
;
1425 rc
= emulator_set_dr(ctxt
, modrm_reg
, _regs
[modrm_rm
]);
1427 case 0x40 ... 0x4f: /* cmov */
1428 dst
.val
= dst
.orig_val
= src
.val
;
1431 * First, assume we're decoding an even cmov opcode
1434 switch ((b
& 15) >> 1) {
1436 no_wb
= (_eflags
& EFLG_OF
) ? 0 : 1;
1438 case 1: /* cmovb/cmovc/cmovnae */
1439 no_wb
= (_eflags
& EFLG_CF
) ? 0 : 1;
1441 case 2: /* cmovz/cmove */
1442 no_wb
= (_eflags
& EFLG_ZF
) ? 0 : 1;
1444 case 3: /* cmovbe/cmovna */
1445 no_wb
= (_eflags
& (EFLG_CF
| EFLG_ZF
)) ? 0 : 1;
1448 no_wb
= (_eflags
& EFLG_SF
) ? 0 : 1;
1450 case 5: /* cmovp/cmovpe */
1451 no_wb
= (_eflags
& EFLG_PF
) ? 0 : 1;
1453 case 7: /* cmovle/cmovng */
1454 no_wb
= (_eflags
& EFLG_ZF
) ? 0 : 1;
1456 case 6: /* cmovl/cmovnge */
1457 no_wb
&= (!(_eflags
& EFLG_SF
) !=
1458 !(_eflags
& EFLG_OF
)) ? 0 : 1;
1461 /* Odd cmov opcodes (lsb == 1) have inverted sense. */
1466 src
.val
&= (dst
.bytes
<< 3) - 1; /* only subword offset */
1467 emulate_2op_SrcV_nobyte("bt", src
, dst
, _eflags
);
1471 src
.val
&= (dst
.bytes
<< 3) - 1; /* only subword offset */
1472 emulate_2op_SrcV_nobyte("bts", src
, dst
, _eflags
);
1474 case 0xb0 ... 0xb1: /* cmpxchg */
1476 * Save real source value, then compare EAX against
1479 src
.orig_val
= src
.val
;
1480 src
.val
= _regs
[VCPU_REGS_RAX
];
1481 emulate_2op_SrcV("cmp", src
, dst
, _eflags
);
1482 if (_eflags
& EFLG_ZF
) {
1483 /* Success: write back to memory. */
1484 dst
.val
= src
.orig_val
;
1486 /* Failure: write the value we saw to EAX. */
1488 dst
.ptr
= (unsigned long *)&_regs
[VCPU_REGS_RAX
];
1493 src
.val
&= (dst
.bytes
<< 3) - 1; /* only subword offset */
1494 emulate_2op_SrcV_nobyte("btr", src
, dst
, _eflags
);
1496 case 0xb6 ... 0xb7: /* movzx */
1497 dst
.bytes
= op_bytes
;
1498 dst
.val
= (d
& ByteOp
) ? (u8
) src
.val
: (u16
) src
.val
;
1500 case 0xba: /* Grp8 */
1501 switch (modrm_reg
& 3) {
1514 src
.val
&= (dst
.bytes
<< 3) - 1; /* only subword offset */
1515 emulate_2op_SrcV_nobyte("btc", src
, dst
, _eflags
);
1517 case 0xbe ... 0xbf: /* movsx */
1518 dst
.bytes
= op_bytes
;
1519 dst
.val
= (d
& ByteOp
) ? (s8
) src
.val
: (s16
) src
.val
;
1521 case 0xc3: /* movnti */
1522 dst
.bytes
= op_bytes
;
1523 dst
.val
= (op_bytes
== 4) ? (u32
) src
.val
: (u64
) src
.val
;
1528 twobyte_special_insn
:
1529 /* Disable writeback. */
1533 emulate_clts(ctxt
->vcpu
);
1535 case 0x09: /* wbinvd */
1537 case 0x0d: /* GrpP (prefetch) */
1538 case 0x18: /* Grp16 (prefetch/nop) */
1540 case 0x20: /* mov cr, reg */
1542 goto cannot_emulate
;
1543 _regs
[modrm_rm
] = realmode_get_cr(ctxt
->vcpu
, modrm_reg
);
1545 case 0x22: /* mov reg, cr */
1547 goto cannot_emulate
;
1548 realmode_set_cr(ctxt
->vcpu
, modrm_reg
, modrm_val
, &_eflags
);
1552 msr_data
= (u32
)_regs
[VCPU_REGS_RAX
]
1553 | ((u64
)_regs
[VCPU_REGS_RDX
] << 32);
1554 rc
= kvm_set_msr(ctxt
->vcpu
, _regs
[VCPU_REGS_RCX
], msr_data
);
1556 kvm_x86_ops
->inject_gp(ctxt
->vcpu
, 0);
1557 _eip
= ctxt
->vcpu
->rip
;
1559 rc
= X86EMUL_CONTINUE
;
1563 rc
= kvm_get_msr(ctxt
->vcpu
, _regs
[VCPU_REGS_RCX
], &msr_data
);
1565 kvm_x86_ops
->inject_gp(ctxt
->vcpu
, 0);
1566 _eip
= ctxt
->vcpu
->rip
;
1568 _regs
[VCPU_REGS_RAX
] = (u32
)msr_data
;
1569 _regs
[VCPU_REGS_RDX
] = msr_data
>> 32;
1571 rc
= X86EMUL_CONTINUE
;
1573 case 0x80 ... 0x8f: /* jnz rel, etc*/ {
1578 rel
= insn_fetch(s16
, 2, _eip
);
1581 rel
= insn_fetch(s32
, 4, _eip
);
1584 rel
= insn_fetch(s64
, 8, _eip
);
1587 DPRINTF("jnz: Invalid op_bytes\n");
1588 goto cannot_emulate
;
1590 if (test_cc(b
, _eflags
))
1594 case 0xc7: /* Grp9 (cmpxchg8b) */
1597 if ((rc
= ops
->read_emulated(cr2
, &old
, 8, ctxt
->vcpu
))
1600 if (((u32
) (old
>> 0) != (u32
) _regs
[VCPU_REGS_RAX
]) ||
1601 ((u32
) (old
>> 32) != (u32
) _regs
[VCPU_REGS_RDX
])) {
1602 _regs
[VCPU_REGS_RAX
] = (u32
) (old
>> 0);
1603 _regs
[VCPU_REGS_RDX
] = (u32
) (old
>> 32);
1604 _eflags
&= ~EFLG_ZF
;
1606 new = ((u64
)_regs
[VCPU_REGS_RCX
] << 32)
1607 | (u32
) _regs
[VCPU_REGS_RBX
];
1608 if ((rc
= ops
->cmpxchg_emulated(cr2
, &old
,
1609 &new, 8, ctxt
->vcpu
)) != 0)
1619 DPRINTF("Cannot emulate %02x\n", b
);
1626 #include <asm/uaccess.h>
1629 x86_emulate_read_std(unsigned long addr
,
1631 unsigned int bytes
, struct x86_emulate_ctxt
*ctxt
)
1637 if ((rc
= copy_from_user((void *)val
, (void *)addr
, bytes
)) != 0) {
1638 propagate_page_fault(addr
+ bytes
- rc
, 0); /* read fault */
1639 return X86EMUL_PROPAGATE_FAULT
;
1642 return X86EMUL_CONTINUE
;
1646 x86_emulate_write_std(unsigned long addr
,
1648 unsigned int bytes
, struct x86_emulate_ctxt
*ctxt
)
1652 if ((rc
= copy_to_user((void *)addr
, (void *)&val
, bytes
)) != 0) {
1653 propagate_page_fault(addr
+ bytes
- rc
, PGERR_write_access
);
1654 return X86EMUL_PROPAGATE_FAULT
;
1657 return X86EMUL_CONTINUE
;