1 /******************************************************************************
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privileged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf(_f , ## _a)
28 #include <linux/kvm_host.h>
29 #include "kvm_cache_regs.h"
30 #define DPRINTF(x...) do {} while (0)
32 #include <linux/module.h>
33 #include <asm/kvm_x86_emulate.h>
36 * Opcode effective-address decode tables.
37 * Note that we only emulate instructions that have at least one memory
38 * operand (excluding implicit stack references). We assume that stack
39 * references and instruction fetches will never occur in special memory
40 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
44 /* Operand sizes: 8-bit operands or specified/overridden size. */
45 #define ByteOp (1<<0) /* 8-bit operands. */
46 /* Destination operand type. */
47 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
48 #define DstReg (2<<1) /* Register operand. */
49 #define DstMem (3<<1) /* Memory operand. */
50 #define DstAcc (4<<1) /* Destination Accumulator */
51 #define DstMask (7<<1)
52 /* Source operand type. */
53 #define SrcNone (0<<4) /* No source operand. */
54 #define SrcImplicit (0<<4) /* Source operand is implicit in the opcode. */
55 #define SrcReg (1<<4) /* Register operand. */
56 #define SrcMem (2<<4) /* Memory operand. */
57 #define SrcMem16 (3<<4) /* Memory operand (16-bit). */
58 #define SrcMem32 (4<<4) /* Memory operand (32-bit). */
59 #define SrcImm (5<<4) /* Immediate operand. */
60 #define SrcImmByte (6<<4) /* 8-bit sign-extended immediate operand. */
61 #define SrcOne (7<<4) /* Implied '1' */
62 #define SrcImmUByte (8<<4) /* 8-bit unsigned immediate operand. */
63 #define SrcMask (0xf<<4)
64 /* Generic ModRM decode. */
66 /* Destination is only written; never read. */
69 #define MemAbs (1<<11) /* Memory operand is absolute displacement */
70 #define String (1<<12) /* String instruction (rep capable) */
71 #define Stack (1<<13) /* Stack instruction (push/pop) */
72 #define Group (1<<14) /* Bits 3:5 of modrm byte extend opcode */
73 #define GroupDual (1<<15) /* Alternate decoding of mod == 3 */
74 #define GroupMask 0xff /* Group number stored in bits 0:7 */
75 /* Source 2 operand type */
76 #define Src2None (0<<29)
77 #define Src2CL (1<<29)
78 #define Src2ImmByte (2<<29)
79 #define Src2One (3<<29)
80 #define Src2Imm16 (4<<29)
81 #define Src2Mask (7<<29)
84 Group1_80
, Group1_81
, Group1_82
, Group1_83
,
85 Group1A
, Group3_Byte
, Group3
, Group4
, Group5
, Group7
,
88 static u32 opcode_table
[256] = {
90 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
91 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
92 ByteOp
| DstAcc
| SrcImm
, DstAcc
| SrcImm
, 0, 0,
94 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
95 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
98 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
99 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
102 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
103 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
106 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
107 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
108 DstAcc
| SrcImmByte
, DstAcc
| SrcImm
, 0, 0,
110 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
111 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
114 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
115 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
118 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
119 ByteOp
| DstReg
| SrcMem
| ModRM
, DstReg
| SrcMem
| ModRM
,
120 ByteOp
| DstAcc
| SrcImm
, DstAcc
| SrcImm
,
123 DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
,
125 DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
,
127 SrcReg
| Stack
, SrcReg
| Stack
, SrcReg
| Stack
, SrcReg
| Stack
,
128 SrcReg
| Stack
, SrcReg
| Stack
, SrcReg
| Stack
, SrcReg
| Stack
,
130 DstReg
| Stack
, DstReg
| Stack
, DstReg
| Stack
, DstReg
| Stack
,
131 DstReg
| Stack
, DstReg
| Stack
, DstReg
| Stack
, DstReg
| Stack
,
133 0, 0, 0, DstReg
| SrcMem32
| ModRM
| Mov
/* movsxd (x86/64) */ ,
136 SrcImm
| Mov
| Stack
, 0, SrcImmByte
| Mov
| Stack
, 0,
137 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
, /* insb, insw/insd */
138 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
, /* outsb, outsw/outsd */
140 SrcImmByte
, SrcImmByte
, SrcImmByte
, SrcImmByte
,
141 SrcImmByte
, SrcImmByte
, SrcImmByte
, SrcImmByte
,
143 SrcImmByte
, SrcImmByte
, SrcImmByte
, SrcImmByte
,
144 SrcImmByte
, SrcImmByte
, SrcImmByte
, SrcImmByte
,
146 Group
| Group1_80
, Group
| Group1_81
,
147 Group
| Group1_82
, Group
| Group1_83
,
148 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
149 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
,
151 ByteOp
| DstMem
| SrcReg
| ModRM
| Mov
, DstMem
| SrcReg
| ModRM
| Mov
,
152 ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
153 DstMem
| SrcReg
| ModRM
| Mov
, ModRM
| DstReg
,
154 DstReg
| SrcMem
| ModRM
| Mov
, Group
| Group1A
,
156 DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
, DstReg
,
158 0, 0, SrcImm
| Src2Imm16
, 0,
159 ImplicitOps
| Stack
, ImplicitOps
| Stack
, 0, 0,
161 ByteOp
| DstReg
| SrcMem
| Mov
| MemAbs
, DstReg
| SrcMem
| Mov
| MemAbs
,
162 ByteOp
| DstMem
| SrcReg
| Mov
| MemAbs
, DstMem
| SrcReg
| Mov
| MemAbs
,
163 ByteOp
| ImplicitOps
| Mov
| String
, ImplicitOps
| Mov
| String
,
164 ByteOp
| ImplicitOps
| String
, ImplicitOps
| String
,
166 0, 0, ByteOp
| ImplicitOps
| Mov
| String
, ImplicitOps
| Mov
| String
,
167 ByteOp
| ImplicitOps
| Mov
| String
, ImplicitOps
| Mov
| String
,
168 ByteOp
| ImplicitOps
| String
, ImplicitOps
| String
,
170 ByteOp
| DstReg
| SrcImm
| Mov
, ByteOp
| DstReg
| SrcImm
| Mov
,
171 ByteOp
| DstReg
| SrcImm
| Mov
, ByteOp
| DstReg
| SrcImm
| Mov
,
172 ByteOp
| DstReg
| SrcImm
| Mov
, ByteOp
| DstReg
| SrcImm
| Mov
,
173 ByteOp
| DstReg
| SrcImm
| Mov
, ByteOp
| DstReg
| SrcImm
| Mov
,
175 DstReg
| SrcImm
| Mov
, DstReg
| SrcImm
| Mov
,
176 DstReg
| SrcImm
| Mov
, DstReg
| SrcImm
| Mov
,
177 DstReg
| SrcImm
| Mov
, DstReg
| SrcImm
| Mov
,
178 DstReg
| SrcImm
| Mov
, DstReg
| SrcImm
| Mov
,
180 ByteOp
| DstMem
| SrcImm
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
181 0, ImplicitOps
| Stack
, 0, 0,
182 ByteOp
| DstMem
| SrcImm
| ModRM
| Mov
, DstMem
| SrcImm
| ModRM
| Mov
,
184 0, 0, 0, ImplicitOps
| Stack
,
185 ImplicitOps
, SrcImmByte
, ImplicitOps
, ImplicitOps
,
187 ByteOp
| DstMem
| SrcImplicit
| ModRM
, DstMem
| SrcImplicit
| ModRM
,
188 ByteOp
| DstMem
| SrcImplicit
| ModRM
, DstMem
| SrcImplicit
| ModRM
,
191 0, 0, 0, 0, 0, 0, 0, 0,
194 ByteOp
| SrcImmUByte
, SrcImmUByte
,
195 ByteOp
| SrcImmUByte
, SrcImmUByte
,
197 SrcImm
| Stack
, SrcImm
| ImplicitOps
,
198 SrcImm
| Src2Imm16
, SrcImmByte
| ImplicitOps
,
199 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
,
200 SrcNone
| ByteOp
| ImplicitOps
, SrcNone
| ImplicitOps
,
203 ImplicitOps
, ImplicitOps
, Group
| Group3_Byte
, Group
| Group3
,
205 ImplicitOps
, 0, ImplicitOps
, ImplicitOps
,
206 ImplicitOps
, ImplicitOps
, Group
| Group4
, Group
| Group5
,
209 static u32 twobyte_table
[256] = {
211 0, Group
| GroupDual
| Group7
, 0, 0, 0, 0, ImplicitOps
, 0,
212 ImplicitOps
, ImplicitOps
, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0,
214 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps
| ModRM
, 0, 0, 0, 0, 0, 0, 0,
216 ModRM
| ImplicitOps
, ModRM
, ModRM
| ImplicitOps
, ModRM
, 0, 0, 0, 0,
217 0, 0, 0, 0, 0, 0, 0, 0,
219 ImplicitOps
, 0, ImplicitOps
, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
221 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
222 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
223 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
224 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
226 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
227 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
228 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
229 DstReg
| SrcMem
| ModRM
| Mov
, DstReg
| SrcMem
| ModRM
| Mov
,
231 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
233 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
235 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
237 SrcImm
, SrcImm
, SrcImm
, SrcImm
, SrcImm
, SrcImm
, SrcImm
, SrcImm
,
238 SrcImm
, SrcImm
, SrcImm
, SrcImm
, SrcImm
, SrcImm
, SrcImm
, SrcImm
,
240 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
242 0, 0, 0, DstMem
| SrcReg
| ModRM
| BitOp
,
243 DstMem
| SrcReg
| Src2ImmByte
| ModRM
,
244 DstMem
| SrcReg
| Src2CL
| ModRM
, 0, 0,
246 0, 0, 0, DstMem
| SrcReg
| ModRM
| BitOp
,
247 DstMem
| SrcReg
| Src2ImmByte
| ModRM
,
248 DstMem
| SrcReg
| Src2CL
| ModRM
,
251 ByteOp
| DstMem
| SrcReg
| ModRM
, DstMem
| SrcReg
| ModRM
, 0,
252 DstMem
| SrcReg
| ModRM
| BitOp
,
253 0, 0, ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
,
254 DstReg
| SrcMem16
| ModRM
| Mov
,
256 0, 0, DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcReg
| ModRM
| BitOp
,
257 0, 0, ByteOp
| DstReg
| SrcMem
| ModRM
| Mov
,
258 DstReg
| SrcMem16
| ModRM
| Mov
,
260 0, 0, 0, DstMem
| SrcReg
| ModRM
| Mov
, 0, 0, 0, ImplicitOps
| ModRM
,
261 0, 0, 0, 0, 0, 0, 0, 0,
263 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
265 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
267 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
270 static u32 group_table
[] = {
272 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
273 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
274 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
275 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
277 DstMem
| SrcImm
| ModRM
, DstMem
| SrcImm
| ModRM
,
278 DstMem
| SrcImm
| ModRM
, DstMem
| SrcImm
| ModRM
,
279 DstMem
| SrcImm
| ModRM
, DstMem
| SrcImm
| ModRM
,
280 DstMem
| SrcImm
| ModRM
, DstMem
| SrcImm
| ModRM
,
282 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
283 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
284 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
285 ByteOp
| DstMem
| SrcImm
| ModRM
, ByteOp
| DstMem
| SrcImm
| ModRM
,
287 DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
288 DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
289 DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
290 DstMem
| SrcImmByte
| ModRM
, DstMem
| SrcImmByte
| ModRM
,
292 DstMem
| SrcNone
| ModRM
| Mov
| Stack
, 0, 0, 0, 0, 0, 0, 0,
294 ByteOp
| SrcImm
| DstMem
| ModRM
, 0,
295 ByteOp
| DstMem
| SrcNone
| ModRM
, ByteOp
| DstMem
| SrcNone
| ModRM
,
298 DstMem
| SrcImm
| ModRM
, 0,
299 DstMem
| SrcNone
| ModRM
, DstMem
| SrcNone
| ModRM
,
302 ByteOp
| DstMem
| SrcNone
| ModRM
, ByteOp
| DstMem
| SrcNone
| ModRM
,
305 DstMem
| SrcNone
| ModRM
, DstMem
| SrcNone
| ModRM
,
306 SrcMem
| ModRM
| Stack
, 0,
307 SrcMem
| ModRM
| Stack
, 0, SrcMem
| ModRM
| Stack
, 0,
309 0, 0, ModRM
| SrcMem
, ModRM
| SrcMem
,
310 SrcNone
| ModRM
| DstMem
| Mov
, 0,
311 SrcMem16
| ModRM
| Mov
, SrcMem
| ModRM
| ByteOp
,
314 static u32 group2_table
[] = {
316 SrcNone
| ModRM
, 0, 0, SrcNone
| ModRM
,
317 SrcNone
| ModRM
| DstMem
| Mov
, 0,
318 SrcMem16
| ModRM
| Mov
, 0,
321 /* EFLAGS bit definitions. */
322 #define EFLG_OF (1<<11)
323 #define EFLG_DF (1<<10)
324 #define EFLG_SF (1<<7)
325 #define EFLG_ZF (1<<6)
326 #define EFLG_AF (1<<4)
327 #define EFLG_PF (1<<2)
328 #define EFLG_CF (1<<0)
331 * Instruction emulation:
332 * Most instructions are emulated directly via a fragment of inline assembly
333 * code. This allows us to save/restore EFLAGS and thus very easily pick up
334 * any modified flags.
337 #if defined(CONFIG_X86_64)
338 #define _LO32 "k" /* force 32-bit operand */
339 #define _STK "%%rsp" /* stack pointer */
340 #elif defined(__i386__)
341 #define _LO32 "" /* force 32-bit operand */
342 #define _STK "%%esp" /* stack pointer */
346 * These EFLAGS bits are restored from saved value during emulation, and
347 * any changes are written back to the saved value after emulation.
349 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
351 /* Before executing instruction: restore necessary bits in EFLAGS. */
352 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
353 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
354 "movl %"_sav",%"_LO32 _tmp"; " \
357 "movl %"_msk",%"_LO32 _tmp"; " \
358 "andl %"_LO32 _tmp",("_STK"); " \
360 "notl %"_LO32 _tmp"; " \
361 "andl %"_LO32 _tmp",("_STK"); " \
362 "andl %"_LO32 _tmp","__stringify(BITS_PER_LONG/4)"("_STK"); " \
364 "orl %"_LO32 _tmp",("_STK"); " \
368 /* After executing instruction: write-back necessary bits in EFLAGS. */
369 #define _POST_EFLAGS(_sav, _msk, _tmp) \
370 /* _sav |= EFLAGS & _msk; */ \
373 "andl %"_msk",%"_LO32 _tmp"; " \
374 "orl %"_LO32 _tmp",%"_sav"; "
382 #define ____emulate_2op(_op, _src, _dst, _eflags, _x, _y, _suffix) \
384 __asm__ __volatile__ ( \
385 _PRE_EFLAGS("0", "4", "2") \
386 _op _suffix " %"_x"3,%1; " \
387 _POST_EFLAGS("0", "4", "2") \
388 : "=m" (_eflags), "=m" ((_dst).val), \
390 : _y ((_src).val), "i" (EFLAGS_MASK)); \
394 /* Raw emulation: instruction has two explicit operands. */
395 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
397 unsigned long _tmp; \
399 switch ((_dst).bytes) { \
401 ____emulate_2op(_op,_src,_dst,_eflags,_wx,_wy,"w"); \
404 ____emulate_2op(_op,_src,_dst,_eflags,_lx,_ly,"l"); \
407 ON64(____emulate_2op(_op,_src,_dst,_eflags,_qx,_qy,"q")); \
412 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
414 unsigned long _tmp; \
415 switch ((_dst).bytes) { \
417 ____emulate_2op(_op,_src,_dst,_eflags,_bx,_by,"b"); \
420 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
421 _wx, _wy, _lx, _ly, _qx, _qy); \
426 /* Source operand is byte-sized and may be restricted to just %cl. */
427 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
428 __emulate_2op(_op, _src, _dst, _eflags, \
429 "b", "c", "b", "c", "b", "c", "b", "c")
431 /* Source operand is byte, word, long or quad sized. */
432 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
433 __emulate_2op(_op, _src, _dst, _eflags, \
434 "b", "q", "w", "r", _LO32, "r", "", "r")
436 /* Source operand is word, long or quad sized. */
437 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
438 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
439 "w", "r", _LO32, "r", "", "r")
441 /* Instruction has three operands and one operand is stored in ECX register */
442 #define __emulate_2op_cl(_op, _cl, _src, _dst, _eflags, _suffix, _type) \
444 unsigned long _tmp; \
445 _type _clv = (_cl).val; \
446 _type _srcv = (_src).val; \
447 _type _dstv = (_dst).val; \
449 __asm__ __volatile__ ( \
450 _PRE_EFLAGS("0", "5", "2") \
451 _op _suffix " %4,%1 \n" \
452 _POST_EFLAGS("0", "5", "2") \
453 : "=m" (_eflags), "+r" (_dstv), "=&r" (_tmp) \
454 : "c" (_clv) , "r" (_srcv), "i" (EFLAGS_MASK) \
457 (_cl).val = (unsigned long) _clv; \
458 (_src).val = (unsigned long) _srcv; \
459 (_dst).val = (unsigned long) _dstv; \
462 #define emulate_2op_cl(_op, _cl, _src, _dst, _eflags) \
464 switch ((_dst).bytes) { \
466 __emulate_2op_cl(_op, _cl, _src, _dst, _eflags, \
467 "w", unsigned short); \
470 __emulate_2op_cl(_op, _cl, _src, _dst, _eflags, \
471 "l", unsigned int); \
474 ON64(__emulate_2op_cl(_op, _cl, _src, _dst, _eflags, \
475 "q", unsigned long)); \
480 #define __emulate_1op(_op, _dst, _eflags, _suffix) \
482 unsigned long _tmp; \
484 __asm__ __volatile__ ( \
485 _PRE_EFLAGS("0", "3", "2") \
486 _op _suffix " %1; " \
487 _POST_EFLAGS("0", "3", "2") \
488 : "=m" (_eflags), "+m" ((_dst).val), \
490 : "i" (EFLAGS_MASK)); \
493 /* Instruction has only one explicit operand (no source operand). */
494 #define emulate_1op(_op, _dst, _eflags) \
496 switch ((_dst).bytes) { \
497 case 1: __emulate_1op(_op, _dst, _eflags, "b"); break; \
498 case 2: __emulate_1op(_op, _dst, _eflags, "w"); break; \
499 case 4: __emulate_1op(_op, _dst, _eflags, "l"); break; \
500 case 8: ON64(__emulate_1op(_op, _dst, _eflags, "q")); break; \
504 /* Fetch next part of the instruction being emulated. */
505 #define insn_fetch(_type, _size, _eip) \
506 ({ unsigned long _x; \
507 rc = do_insn_fetch(ctxt, ops, (_eip), &_x, (_size)); \
514 static inline unsigned long ad_mask(struct decode_cache
*c
)
516 return (1UL << (c
->ad_bytes
<< 3)) - 1;
519 /* Access/update address held in a register, based on addressing mode. */
520 static inline unsigned long
521 address_mask(struct decode_cache
*c
, unsigned long reg
)
523 if (c
->ad_bytes
== sizeof(unsigned long))
526 return reg
& ad_mask(c
);
529 static inline unsigned long
530 register_address(struct decode_cache
*c
, unsigned long base
, unsigned long reg
)
532 return base
+ address_mask(c
, reg
);
536 register_address_increment(struct decode_cache
*c
, unsigned long *reg
, int inc
)
538 if (c
->ad_bytes
== sizeof(unsigned long))
541 *reg
= (*reg
& ~ad_mask(c
)) | ((*reg
+ inc
) & ad_mask(c
));
544 static inline void jmp_rel(struct decode_cache
*c
, int rel
)
546 register_address_increment(c
, &c
->eip
, rel
);
549 static void set_seg_override(struct decode_cache
*c
, int seg
)
551 c
->has_seg_override
= true;
552 c
->seg_override
= seg
;
555 static unsigned long seg_base(struct x86_emulate_ctxt
*ctxt
, int seg
)
557 if (ctxt
->mode
== X86EMUL_MODE_PROT64
&& seg
< VCPU_SREG_FS
)
560 return kvm_x86_ops
->get_segment_base(ctxt
->vcpu
, seg
);
563 static unsigned long seg_override_base(struct x86_emulate_ctxt
*ctxt
,
564 struct decode_cache
*c
)
566 if (!c
->has_seg_override
)
569 return seg_base(ctxt
, c
->seg_override
);
572 static unsigned long es_base(struct x86_emulate_ctxt
*ctxt
)
574 return seg_base(ctxt
, VCPU_SREG_ES
);
577 static unsigned long ss_base(struct x86_emulate_ctxt
*ctxt
)
579 return seg_base(ctxt
, VCPU_SREG_SS
);
582 static int do_fetch_insn_byte(struct x86_emulate_ctxt
*ctxt
,
583 struct x86_emulate_ops
*ops
,
584 unsigned long linear
, u8
*dest
)
586 struct fetch_cache
*fc
= &ctxt
->decode
.fetch
;
590 if (linear
< fc
->start
|| linear
>= fc
->end
) {
591 size
= min(15UL, PAGE_SIZE
- offset_in_page(linear
));
592 rc
= ops
->read_std(linear
, fc
->data
, size
, ctxt
->vcpu
);
596 fc
->end
= linear
+ size
;
598 *dest
= fc
->data
[linear
- fc
->start
];
602 static int do_insn_fetch(struct x86_emulate_ctxt
*ctxt
,
603 struct x86_emulate_ops
*ops
,
604 unsigned long eip
, void *dest
, unsigned size
)
608 eip
+= ctxt
->cs_base
;
610 rc
= do_fetch_insn_byte(ctxt
, ops
, eip
++, dest
++);
618 * Given the 'reg' portion of a ModRM byte, and a register block, return a
619 * pointer into the block that addresses the relevant register.
620 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
622 static void *decode_register(u8 modrm_reg
, unsigned long *regs
,
627 p
= ®s
[modrm_reg
];
628 if (highbyte_regs
&& modrm_reg
>= 4 && modrm_reg
< 8)
629 p
= (unsigned char *)®s
[modrm_reg
& 3] + 1;
633 static int read_descriptor(struct x86_emulate_ctxt
*ctxt
,
634 struct x86_emulate_ops
*ops
,
636 u16
*size
, unsigned long *address
, int op_bytes
)
643 rc
= ops
->read_std((unsigned long)ptr
, (unsigned long *)size
, 2,
647 rc
= ops
->read_std((unsigned long)ptr
+ 2, address
, op_bytes
,
652 static int test_cc(unsigned int condition
, unsigned int flags
)
656 switch ((condition
& 15) >> 1) {
658 rc
|= (flags
& EFLG_OF
);
660 case 1: /* b/c/nae */
661 rc
|= (flags
& EFLG_CF
);
664 rc
|= (flags
& EFLG_ZF
);
667 rc
|= (flags
& (EFLG_CF
|EFLG_ZF
));
670 rc
|= (flags
& EFLG_SF
);
673 rc
|= (flags
& EFLG_PF
);
676 rc
|= (flags
& EFLG_ZF
);
679 rc
|= (!(flags
& EFLG_SF
) != !(flags
& EFLG_OF
));
683 /* Odd condition identifiers (lsb == 1) have inverted sense. */
684 return (!!rc
^ (condition
& 1));
687 static void decode_register_operand(struct operand
*op
,
688 struct decode_cache
*c
,
691 unsigned reg
= c
->modrm_reg
;
692 int highbyte_regs
= c
->rex_prefix
== 0;
695 reg
= (c
->b
& 7) | ((c
->rex_prefix
& 1) << 3);
697 if ((c
->d
& ByteOp
) && !inhibit_bytereg
) {
698 op
->ptr
= decode_register(reg
, c
->regs
, highbyte_regs
);
699 op
->val
= *(u8
*)op
->ptr
;
702 op
->ptr
= decode_register(reg
, c
->regs
, 0);
703 op
->bytes
= c
->op_bytes
;
706 op
->val
= *(u16
*)op
->ptr
;
709 op
->val
= *(u32
*)op
->ptr
;
712 op
->val
= *(u64
*) op
->ptr
;
716 op
->orig_val
= op
->val
;
719 static int decode_modrm(struct x86_emulate_ctxt
*ctxt
,
720 struct x86_emulate_ops
*ops
)
722 struct decode_cache
*c
= &ctxt
->decode
;
724 int index_reg
= 0, base_reg
= 0, scale
;
728 c
->modrm_reg
= (c
->rex_prefix
& 4) << 1; /* REX.R */
729 index_reg
= (c
->rex_prefix
& 2) << 2; /* REX.X */
730 c
->modrm_rm
= base_reg
= (c
->rex_prefix
& 1) << 3; /* REG.B */
733 c
->modrm
= insn_fetch(u8
, 1, c
->eip
);
734 c
->modrm_mod
|= (c
->modrm
& 0xc0) >> 6;
735 c
->modrm_reg
|= (c
->modrm
& 0x38) >> 3;
736 c
->modrm_rm
|= (c
->modrm
& 0x07);
740 if (c
->modrm_mod
== 3) {
741 c
->modrm_ptr
= decode_register(c
->modrm_rm
,
742 c
->regs
, c
->d
& ByteOp
);
743 c
->modrm_val
= *(unsigned long *)c
->modrm_ptr
;
747 if (c
->ad_bytes
== 2) {
748 unsigned bx
= c
->regs
[VCPU_REGS_RBX
];
749 unsigned bp
= c
->regs
[VCPU_REGS_RBP
];
750 unsigned si
= c
->regs
[VCPU_REGS_RSI
];
751 unsigned di
= c
->regs
[VCPU_REGS_RDI
];
753 /* 16-bit ModR/M decode. */
754 switch (c
->modrm_mod
) {
756 if (c
->modrm_rm
== 6)
757 c
->modrm_ea
+= insn_fetch(u16
, 2, c
->eip
);
760 c
->modrm_ea
+= insn_fetch(s8
, 1, c
->eip
);
763 c
->modrm_ea
+= insn_fetch(u16
, 2, c
->eip
);
766 switch (c
->modrm_rm
) {
768 c
->modrm_ea
+= bx
+ si
;
771 c
->modrm_ea
+= bx
+ di
;
774 c
->modrm_ea
+= bp
+ si
;
777 c
->modrm_ea
+= bp
+ di
;
786 if (c
->modrm_mod
!= 0)
793 if (c
->modrm_rm
== 2 || c
->modrm_rm
== 3 ||
794 (c
->modrm_rm
== 6 && c
->modrm_mod
!= 0))
795 if (!c
->has_seg_override
)
796 set_seg_override(c
, VCPU_SREG_SS
);
797 c
->modrm_ea
= (u16
)c
->modrm_ea
;
799 /* 32/64-bit ModR/M decode. */
800 if ((c
->modrm_rm
& 7) == 4) {
801 sib
= insn_fetch(u8
, 1, c
->eip
);
802 index_reg
|= (sib
>> 3) & 7;
806 if ((base_reg
& 7) == 5 && c
->modrm_mod
== 0)
807 c
->modrm_ea
+= insn_fetch(s32
, 4, c
->eip
);
809 c
->modrm_ea
+= c
->regs
[base_reg
];
811 c
->modrm_ea
+= c
->regs
[index_reg
] << scale
;
812 } else if ((c
->modrm_rm
& 7) == 5 && c
->modrm_mod
== 0) {
813 if (ctxt
->mode
== X86EMUL_MODE_PROT64
)
816 c
->modrm_ea
+= c
->regs
[c
->modrm_rm
];
817 switch (c
->modrm_mod
) {
819 if (c
->modrm_rm
== 5)
820 c
->modrm_ea
+= insn_fetch(s32
, 4, c
->eip
);
823 c
->modrm_ea
+= insn_fetch(s8
, 1, c
->eip
);
826 c
->modrm_ea
+= insn_fetch(s32
, 4, c
->eip
);
834 static int decode_abs(struct x86_emulate_ctxt
*ctxt
,
835 struct x86_emulate_ops
*ops
)
837 struct decode_cache
*c
= &ctxt
->decode
;
840 switch (c
->ad_bytes
) {
842 c
->modrm_ea
= insn_fetch(u16
, 2, c
->eip
);
845 c
->modrm_ea
= insn_fetch(u32
, 4, c
->eip
);
848 c
->modrm_ea
= insn_fetch(u64
, 8, c
->eip
);
856 x86_decode_insn(struct x86_emulate_ctxt
*ctxt
, struct x86_emulate_ops
*ops
)
858 struct decode_cache
*c
= &ctxt
->decode
;
860 int mode
= ctxt
->mode
;
861 int def_op_bytes
, def_ad_bytes
, group
;
863 /* Shadow copy of register state. Committed on successful emulation. */
865 memset(c
, 0, sizeof(struct decode_cache
));
866 c
->eip
= kvm_rip_read(ctxt
->vcpu
);
867 ctxt
->cs_base
= seg_base(ctxt
, VCPU_SREG_CS
);
868 memcpy(c
->regs
, ctxt
->vcpu
->arch
.regs
, sizeof c
->regs
);
871 case X86EMUL_MODE_REAL
:
872 case X86EMUL_MODE_PROT16
:
873 def_op_bytes
= def_ad_bytes
= 2;
875 case X86EMUL_MODE_PROT32
:
876 def_op_bytes
= def_ad_bytes
= 4;
879 case X86EMUL_MODE_PROT64
:
888 c
->op_bytes
= def_op_bytes
;
889 c
->ad_bytes
= def_ad_bytes
;
891 /* Legacy prefixes. */
893 switch (c
->b
= insn_fetch(u8
, 1, c
->eip
)) {
894 case 0x66: /* operand-size override */
895 /* switch between 2/4 bytes */
896 c
->op_bytes
= def_op_bytes
^ 6;
898 case 0x67: /* address-size override */
899 if (mode
== X86EMUL_MODE_PROT64
)
900 /* switch between 4/8 bytes */
901 c
->ad_bytes
= def_ad_bytes
^ 12;
903 /* switch between 2/4 bytes */
904 c
->ad_bytes
= def_ad_bytes
^ 6;
906 case 0x26: /* ES override */
907 case 0x2e: /* CS override */
908 case 0x36: /* SS override */
909 case 0x3e: /* DS override */
910 set_seg_override(c
, (c
->b
>> 3) & 3);
912 case 0x64: /* FS override */
913 case 0x65: /* GS override */
914 set_seg_override(c
, c
->b
& 7);
916 case 0x40 ... 0x4f: /* REX */
917 if (mode
!= X86EMUL_MODE_PROT64
)
919 c
->rex_prefix
= c
->b
;
921 case 0xf0: /* LOCK */
924 case 0xf2: /* REPNE/REPNZ */
925 c
->rep_prefix
= REPNE_PREFIX
;
927 case 0xf3: /* REP/REPE/REPZ */
928 c
->rep_prefix
= REPE_PREFIX
;
934 /* Any legacy prefix after a REX prefix nullifies its effect. */
943 if (c
->rex_prefix
& 8)
944 c
->op_bytes
= 8; /* REX.W */
946 /* Opcode byte(s). */
947 c
->d
= opcode_table
[c
->b
];
949 /* Two-byte opcode? */
952 c
->b
= insn_fetch(u8
, 1, c
->eip
);
953 c
->d
= twobyte_table
[c
->b
];
958 group
= c
->d
& GroupMask
;
959 c
->modrm
= insn_fetch(u8
, 1, c
->eip
);
962 group
= (group
<< 3) + ((c
->modrm
>> 3) & 7);
963 if ((c
->d
& GroupDual
) && (c
->modrm
>> 6) == 3)
964 c
->d
= group2_table
[group
];
966 c
->d
= group_table
[group
];
971 DPRINTF("Cannot emulate %02x\n", c
->b
);
975 if (mode
== X86EMUL_MODE_PROT64
&& (c
->d
& Stack
))
978 /* ModRM and SIB bytes. */
980 rc
= decode_modrm(ctxt
, ops
);
981 else if (c
->d
& MemAbs
)
982 rc
= decode_abs(ctxt
, ops
);
986 if (!c
->has_seg_override
)
987 set_seg_override(c
, VCPU_SREG_DS
);
989 if (!(!c
->twobyte
&& c
->b
== 0x8d))
990 c
->modrm_ea
+= seg_override_base(ctxt
, c
);
992 if (c
->ad_bytes
!= 8)
993 c
->modrm_ea
= (u32
)c
->modrm_ea
;
995 * Decode and fetch the source operand: register, memory
998 switch (c
->d
& SrcMask
) {
1002 decode_register_operand(&c
->src
, c
, 0);
1011 c
->src
.bytes
= (c
->d
& ByteOp
) ? 1 :
1013 /* Don't fetch the address for invlpg: it could be unmapped. */
1014 if (c
->twobyte
&& c
->b
== 0x01 && c
->modrm_reg
== 7)
1018 * For instructions with a ModR/M byte, switch to register
1019 * access if Mod = 3.
1021 if ((c
->d
& ModRM
) && c
->modrm_mod
== 3) {
1022 c
->src
.type
= OP_REG
;
1023 c
->src
.val
= c
->modrm_val
;
1024 c
->src
.ptr
= c
->modrm_ptr
;
1027 c
->src
.type
= OP_MEM
;
1030 c
->src
.type
= OP_IMM
;
1031 c
->src
.ptr
= (unsigned long *)c
->eip
;
1032 c
->src
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1033 if (c
->src
.bytes
== 8)
1035 /* NB. Immediates are sign-extended as necessary. */
1036 switch (c
->src
.bytes
) {
1038 c
->src
.val
= insn_fetch(s8
, 1, c
->eip
);
1041 c
->src
.val
= insn_fetch(s16
, 2, c
->eip
);
1044 c
->src
.val
= insn_fetch(s32
, 4, c
->eip
);
1050 c
->src
.type
= OP_IMM
;
1051 c
->src
.ptr
= (unsigned long *)c
->eip
;
1053 if ((c
->d
& SrcMask
) == SrcImmByte
)
1054 c
->src
.val
= insn_fetch(s8
, 1, c
->eip
);
1056 c
->src
.val
= insn_fetch(u8
, 1, c
->eip
);
1065 * Decode and fetch the second source operand: register, memory
1068 switch (c
->d
& Src2Mask
) {
1073 c
->src2
.val
= c
->regs
[VCPU_REGS_RCX
] & 0x8;
1076 c
->src2
.type
= OP_IMM
;
1077 c
->src2
.ptr
= (unsigned long *)c
->eip
;
1079 c
->src2
.val
= insn_fetch(u8
, 1, c
->eip
);
1082 c
->src2
.type
= OP_IMM
;
1083 c
->src2
.ptr
= (unsigned long *)c
->eip
;
1085 c
->src2
.val
= insn_fetch(u16
, 2, c
->eip
);
1093 /* Decode and fetch the destination operand: register or memory. */
1094 switch (c
->d
& DstMask
) {
1096 /* Special instructions do their own operand decoding. */
1099 decode_register_operand(&c
->dst
, c
,
1100 c
->twobyte
&& (c
->b
== 0xb6 || c
->b
== 0xb7));
1103 if ((c
->d
& ModRM
) && c
->modrm_mod
== 3) {
1104 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1105 c
->dst
.type
= OP_REG
;
1106 c
->dst
.val
= c
->dst
.orig_val
= c
->modrm_val
;
1107 c
->dst
.ptr
= c
->modrm_ptr
;
1110 c
->dst
.type
= OP_MEM
;
1113 c
->dst
.type
= OP_REG
;
1114 c
->dst
.bytes
= c
->op_bytes
;
1115 c
->dst
.ptr
= &c
->regs
[VCPU_REGS_RAX
];
1116 switch (c
->op_bytes
) {
1118 c
->dst
.val
= *(u8
*)c
->dst
.ptr
;
1121 c
->dst
.val
= *(u16
*)c
->dst
.ptr
;
1124 c
->dst
.val
= *(u32
*)c
->dst
.ptr
;
1127 c
->dst
.orig_val
= c
->dst
.val
;
1131 if (c
->rip_relative
)
1132 c
->modrm_ea
+= c
->eip
;
1135 return (rc
== X86EMUL_UNHANDLEABLE
) ? -1 : 0;
1138 static inline void emulate_push(struct x86_emulate_ctxt
*ctxt
)
1140 struct decode_cache
*c
= &ctxt
->decode
;
1142 c
->dst
.type
= OP_MEM
;
1143 c
->dst
.bytes
= c
->op_bytes
;
1144 c
->dst
.val
= c
->src
.val
;
1145 register_address_increment(c
, &c
->regs
[VCPU_REGS_RSP
], -c
->op_bytes
);
1146 c
->dst
.ptr
= (void *) register_address(c
, ss_base(ctxt
),
1147 c
->regs
[VCPU_REGS_RSP
]);
1150 static int emulate_pop(struct x86_emulate_ctxt
*ctxt
,
1151 struct x86_emulate_ops
*ops
,
1152 void *dest
, int len
)
1154 struct decode_cache
*c
= &ctxt
->decode
;
1157 rc
= ops
->read_emulated(register_address(c
, ss_base(ctxt
),
1158 c
->regs
[VCPU_REGS_RSP
]),
1159 dest
, len
, ctxt
->vcpu
);
1163 register_address_increment(c
, &c
->regs
[VCPU_REGS_RSP
], len
);
1167 static inline int emulate_grp1a(struct x86_emulate_ctxt
*ctxt
,
1168 struct x86_emulate_ops
*ops
)
1170 struct decode_cache
*c
= &ctxt
->decode
;
1173 rc
= emulate_pop(ctxt
, ops
, &c
->dst
.val
, c
->dst
.bytes
);
1179 static inline void emulate_grp2(struct x86_emulate_ctxt
*ctxt
)
1181 struct decode_cache
*c
= &ctxt
->decode
;
1182 switch (c
->modrm_reg
) {
1184 emulate_2op_SrcB("rol", c
->src
, c
->dst
, ctxt
->eflags
);
1187 emulate_2op_SrcB("ror", c
->src
, c
->dst
, ctxt
->eflags
);
1190 emulate_2op_SrcB("rcl", c
->src
, c
->dst
, ctxt
->eflags
);
1193 emulate_2op_SrcB("rcr", c
->src
, c
->dst
, ctxt
->eflags
);
1195 case 4: /* sal/shl */
1196 case 6: /* sal/shl */
1197 emulate_2op_SrcB("sal", c
->src
, c
->dst
, ctxt
->eflags
);
1200 emulate_2op_SrcB("shr", c
->src
, c
->dst
, ctxt
->eflags
);
1203 emulate_2op_SrcB("sar", c
->src
, c
->dst
, ctxt
->eflags
);
1208 static inline int emulate_grp3(struct x86_emulate_ctxt
*ctxt
,
1209 struct x86_emulate_ops
*ops
)
1211 struct decode_cache
*c
= &ctxt
->decode
;
1214 switch (c
->modrm_reg
) {
1215 case 0 ... 1: /* test */
1216 emulate_2op_SrcV("test", c
->src
, c
->dst
, ctxt
->eflags
);
1219 c
->dst
.val
= ~c
->dst
.val
;
1222 emulate_1op("neg", c
->dst
, ctxt
->eflags
);
1225 DPRINTF("Cannot emulate %02x\n", c
->b
);
1226 rc
= X86EMUL_UNHANDLEABLE
;
1232 static inline int emulate_grp45(struct x86_emulate_ctxt
*ctxt
,
1233 struct x86_emulate_ops
*ops
)
1235 struct decode_cache
*c
= &ctxt
->decode
;
1237 switch (c
->modrm_reg
) {
1239 emulate_1op("inc", c
->dst
, ctxt
->eflags
);
1242 emulate_1op("dec", c
->dst
, ctxt
->eflags
);
1244 case 2: /* call near abs */ {
1247 c
->eip
= c
->src
.val
;
1248 c
->src
.val
= old_eip
;
1252 case 4: /* jmp abs */
1253 c
->eip
= c
->src
.val
;
1262 static inline int emulate_grp9(struct x86_emulate_ctxt
*ctxt
,
1263 struct x86_emulate_ops
*ops
,
1264 unsigned long memop
)
1266 struct decode_cache
*c
= &ctxt
->decode
;
1270 rc
= ops
->read_emulated(memop
, &old
, 8, ctxt
->vcpu
);
1274 if (((u32
) (old
>> 0) != (u32
) c
->regs
[VCPU_REGS_RAX
]) ||
1275 ((u32
) (old
>> 32) != (u32
) c
->regs
[VCPU_REGS_RDX
])) {
1277 c
->regs
[VCPU_REGS_RAX
] = (u32
) (old
>> 0);
1278 c
->regs
[VCPU_REGS_RDX
] = (u32
) (old
>> 32);
1279 ctxt
->eflags
&= ~EFLG_ZF
;
1282 new = ((u64
)c
->regs
[VCPU_REGS_RCX
] << 32) |
1283 (u32
) c
->regs
[VCPU_REGS_RBX
];
1285 rc
= ops
->cmpxchg_emulated(memop
, &old
, &new, 8, ctxt
->vcpu
);
1288 ctxt
->eflags
|= EFLG_ZF
;
1293 static int emulate_ret_far(struct x86_emulate_ctxt
*ctxt
,
1294 struct x86_emulate_ops
*ops
)
1296 struct decode_cache
*c
= &ctxt
->decode
;
1300 rc
= emulate_pop(ctxt
, ops
, &c
->eip
, c
->op_bytes
);
1303 if (c
->op_bytes
== 4)
1304 c
->eip
= (u32
)c
->eip
;
1305 rc
= emulate_pop(ctxt
, ops
, &cs
, c
->op_bytes
);
1308 rc
= kvm_load_segment_descriptor(ctxt
->vcpu
, (u16
)cs
, 1, VCPU_SREG_CS
);
1312 static inline int writeback(struct x86_emulate_ctxt
*ctxt
,
1313 struct x86_emulate_ops
*ops
)
1316 struct decode_cache
*c
= &ctxt
->decode
;
1318 switch (c
->dst
.type
) {
1320 /* The 4-byte case *is* correct:
1321 * in 64-bit mode we zero-extend.
1323 switch (c
->dst
.bytes
) {
1325 *(u8
*)c
->dst
.ptr
= (u8
)c
->dst
.val
;
1328 *(u16
*)c
->dst
.ptr
= (u16
)c
->dst
.val
;
1331 *c
->dst
.ptr
= (u32
)c
->dst
.val
;
1332 break; /* 64b: zero-ext */
1334 *c
->dst
.ptr
= c
->dst
.val
;
1340 rc
= ops
->cmpxchg_emulated(
1341 (unsigned long)c
->dst
.ptr
,
1347 rc
= ops
->write_emulated(
1348 (unsigned long)c
->dst
.ptr
,
1364 static void toggle_interruptibility(struct x86_emulate_ctxt
*ctxt
, u32 mask
)
1366 u32 int_shadow
= kvm_x86_ops
->get_interrupt_shadow(ctxt
->vcpu
, mask
);
1368 * an sti; sti; sequence only disable interrupts for the first
1369 * instruction. So, if the last instruction, be it emulated or
1370 * not, left the system with the INT_STI flag enabled, it
1371 * means that the last instruction is an sti. We should not
1372 * leave the flag on in this case. The same goes for mov ss
1374 if (!(int_shadow
& mask
))
1375 ctxt
->interruptibility
= mask
;
1379 x86_emulate_insn(struct x86_emulate_ctxt
*ctxt
, struct x86_emulate_ops
*ops
)
1381 unsigned long memop
= 0;
1383 unsigned long saved_eip
= 0;
1384 struct decode_cache
*c
= &ctxt
->decode
;
1389 ctxt
->interruptibility
= 0;
1391 /* Shadow copy of register state. Committed on successful emulation.
1392 * NOTE: we can copy them from vcpu as x86_decode_insn() doesn't
1396 memcpy(c
->regs
, ctxt
->vcpu
->arch
.regs
, sizeof c
->regs
);
1399 if (((c
->d
& ModRM
) && (c
->modrm_mod
!= 3)) || (c
->d
& MemAbs
))
1400 memop
= c
->modrm_ea
;
1402 if (c
->rep_prefix
&& (c
->d
& String
)) {
1403 /* All REP prefixes have the same first termination condition */
1404 if (c
->regs
[VCPU_REGS_RCX
] == 0) {
1405 kvm_rip_write(ctxt
->vcpu
, c
->eip
);
1408 /* The second termination condition only applies for REPE
1409 * and REPNE. Test if the repeat string operation prefix is
1410 * REPE/REPZ or REPNE/REPNZ and if it's the case it tests the
1411 * corresponding termination condition according to:
1412 * - if REPE/REPZ and ZF = 0 then done
1413 * - if REPNE/REPNZ and ZF = 1 then done
1415 if ((c
->b
== 0xa6) || (c
->b
== 0xa7) ||
1416 (c
->b
== 0xae) || (c
->b
== 0xaf)) {
1417 if ((c
->rep_prefix
== REPE_PREFIX
) &&
1418 ((ctxt
->eflags
& EFLG_ZF
) == 0)) {
1419 kvm_rip_write(ctxt
->vcpu
, c
->eip
);
1422 if ((c
->rep_prefix
== REPNE_PREFIX
) &&
1423 ((ctxt
->eflags
& EFLG_ZF
) == EFLG_ZF
)) {
1424 kvm_rip_write(ctxt
->vcpu
, c
->eip
);
1428 c
->regs
[VCPU_REGS_RCX
]--;
1429 c
->eip
= kvm_rip_read(ctxt
->vcpu
);
1432 if (c
->src
.type
== OP_MEM
) {
1433 c
->src
.ptr
= (unsigned long *)memop
;
1435 rc
= ops
->read_emulated((unsigned long)c
->src
.ptr
,
1441 c
->src
.orig_val
= c
->src
.val
;
1444 if ((c
->d
& DstMask
) == ImplicitOps
)
1448 if (c
->dst
.type
== OP_MEM
) {
1449 c
->dst
.ptr
= (unsigned long *)memop
;
1450 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1453 unsigned long mask
= ~(c
->dst
.bytes
* 8 - 1);
1455 c
->dst
.ptr
= (void *)c
->dst
.ptr
+
1456 (c
->src
.val
& mask
) / 8;
1458 if (!(c
->d
& Mov
) &&
1459 /* optimisation - avoid slow emulated read */
1460 ((rc
= ops
->read_emulated((unsigned long)c
->dst
.ptr
,
1462 c
->dst
.bytes
, ctxt
->vcpu
)) != 0))
1465 c
->dst
.orig_val
= c
->dst
.val
;
1475 emulate_2op_SrcV("add", c
->src
, c
->dst
, ctxt
->eflags
);
1479 emulate_2op_SrcV("or", c
->src
, c
->dst
, ctxt
->eflags
);
1483 emulate_2op_SrcV("adc", c
->src
, c
->dst
, ctxt
->eflags
);
1487 emulate_2op_SrcV("sbb", c
->src
, c
->dst
, ctxt
->eflags
);
1491 emulate_2op_SrcV("and", c
->src
, c
->dst
, ctxt
->eflags
);
1495 emulate_2op_SrcV("sub", c
->src
, c
->dst
, ctxt
->eflags
);
1499 emulate_2op_SrcV("xor", c
->src
, c
->dst
, ctxt
->eflags
);
1503 emulate_2op_SrcV("cmp", c
->src
, c
->dst
, ctxt
->eflags
);
1505 case 0x40 ... 0x47: /* inc r16/r32 */
1506 emulate_1op("inc", c
->dst
, ctxt
->eflags
);
1508 case 0x48 ... 0x4f: /* dec r16/r32 */
1509 emulate_1op("dec", c
->dst
, ctxt
->eflags
);
1511 case 0x50 ... 0x57: /* push reg */
1514 case 0x58 ... 0x5f: /* pop reg */
1516 rc
= emulate_pop(ctxt
, ops
, &c
->dst
.val
, c
->op_bytes
);
1520 case 0x63: /* movsxd */
1521 if (ctxt
->mode
!= X86EMUL_MODE_PROT64
)
1522 goto cannot_emulate
;
1523 c
->dst
.val
= (s32
) c
->src
.val
;
1525 case 0x68: /* push imm */
1526 case 0x6a: /* push imm8 */
1529 case 0x6c: /* insb */
1530 case 0x6d: /* insw/insd */
1531 if (kvm_emulate_pio_string(ctxt
->vcpu
, NULL
,
1533 (c
->d
& ByteOp
) ? 1 : c
->op_bytes
,
1535 address_mask(c
, c
->regs
[VCPU_REGS_RCX
]) : 1,
1536 (ctxt
->eflags
& EFLG_DF
),
1537 register_address(c
, es_base(ctxt
),
1538 c
->regs
[VCPU_REGS_RDI
]),
1540 c
->regs
[VCPU_REGS_RDX
]) == 0) {
1545 case 0x6e: /* outsb */
1546 case 0x6f: /* outsw/outsd */
1547 if (kvm_emulate_pio_string(ctxt
->vcpu
, NULL
,
1549 (c
->d
& ByteOp
) ? 1 : c
->op_bytes
,
1551 address_mask(c
, c
->regs
[VCPU_REGS_RCX
]) : 1,
1552 (ctxt
->eflags
& EFLG_DF
),
1554 seg_override_base(ctxt
, c
),
1555 c
->regs
[VCPU_REGS_RSI
]),
1557 c
->regs
[VCPU_REGS_RDX
]) == 0) {
1562 case 0x70 ... 0x7f: /* jcc (short) */
1563 if (test_cc(c
->b
, ctxt
->eflags
))
1564 jmp_rel(c
, c
->src
.val
);
1566 case 0x80 ... 0x83: /* Grp1 */
1567 switch (c
->modrm_reg
) {
1587 emulate_2op_SrcV("test", c
->src
, c
->dst
, ctxt
->eflags
);
1589 case 0x86 ... 0x87: /* xchg */
1591 /* Write back the register source. */
1592 switch (c
->dst
.bytes
) {
1594 *(u8
*) c
->src
.ptr
= (u8
) c
->dst
.val
;
1597 *(u16
*) c
->src
.ptr
= (u16
) c
->dst
.val
;
1600 *c
->src
.ptr
= (u32
) c
->dst
.val
;
1601 break; /* 64b reg: zero-extend */
1603 *c
->src
.ptr
= c
->dst
.val
;
1607 * Write back the memory destination with implicit LOCK
1610 c
->dst
.val
= c
->src
.val
;
1613 case 0x88 ... 0x8b: /* mov */
1615 case 0x8c: { /* mov r/m, sreg */
1616 struct kvm_segment segreg
;
1618 if (c
->modrm_reg
<= 5)
1619 kvm_get_segment(ctxt
->vcpu
, &segreg
, c
->modrm_reg
);
1621 printk(KERN_INFO
"0x8c: Invalid segreg in modrm byte 0x%02x\n",
1623 goto cannot_emulate
;
1625 c
->dst
.val
= segreg
.selector
;
1628 case 0x8d: /* lea r16/r32, m */
1629 c
->dst
.val
= c
->modrm_ea
;
1631 case 0x8e: { /* mov seg, r/m16 */
1637 if (c
->modrm_reg
== VCPU_SREG_SS
)
1638 toggle_interruptibility(ctxt
, X86_SHADOW_INT_MOV_SS
);
1640 if (c
->modrm_reg
<= 5) {
1641 type_bits
= (c
->modrm_reg
== 1) ? 9 : 1;
1642 err
= kvm_load_segment_descriptor(ctxt
->vcpu
, sel
,
1643 type_bits
, c
->modrm_reg
);
1645 printk(KERN_INFO
"Invalid segreg in modrm byte 0x%02x\n",
1647 goto cannot_emulate
;
1651 goto cannot_emulate
;
1653 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1656 case 0x8f: /* pop (sole member of Grp1a) */
1657 rc
= emulate_grp1a(ctxt
, ops
);
1661 case 0x90: /* nop / xchg r8,rax */
1662 if (!(c
->rex_prefix
& 1)) { /* nop */
1663 c
->dst
.type
= OP_NONE
;
1666 case 0x91 ... 0x97: /* xchg reg,rax */
1667 c
->src
.type
= c
->dst
.type
= OP_REG
;
1668 c
->src
.bytes
= c
->dst
.bytes
= c
->op_bytes
;
1669 c
->src
.ptr
= (unsigned long *) &c
->regs
[VCPU_REGS_RAX
];
1670 c
->src
.val
= *(c
->src
.ptr
);
1672 case 0x9c: /* pushf */
1673 c
->src
.val
= (unsigned long) ctxt
->eflags
;
1676 case 0x9d: /* popf */
1677 c
->dst
.type
= OP_REG
;
1678 c
->dst
.ptr
= (unsigned long *) &ctxt
->eflags
;
1679 c
->dst
.bytes
= c
->op_bytes
;
1680 goto pop_instruction
;
1681 case 0xa0 ... 0xa1: /* mov */
1682 c
->dst
.ptr
= (unsigned long *)&c
->regs
[VCPU_REGS_RAX
];
1683 c
->dst
.val
= c
->src
.val
;
1685 case 0xa2 ... 0xa3: /* mov */
1686 c
->dst
.val
= (unsigned long)c
->regs
[VCPU_REGS_RAX
];
1688 case 0xa4 ... 0xa5: /* movs */
1689 c
->dst
.type
= OP_MEM
;
1690 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1691 c
->dst
.ptr
= (unsigned long *)register_address(c
,
1693 c
->regs
[VCPU_REGS_RDI
]);
1694 if ((rc
= ops
->read_emulated(register_address(c
,
1695 seg_override_base(ctxt
, c
),
1696 c
->regs
[VCPU_REGS_RSI
]),
1698 c
->dst
.bytes
, ctxt
->vcpu
)) != 0)
1700 register_address_increment(c
, &c
->regs
[VCPU_REGS_RSI
],
1701 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1703 register_address_increment(c
, &c
->regs
[VCPU_REGS_RDI
],
1704 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1707 case 0xa6 ... 0xa7: /* cmps */
1708 c
->src
.type
= OP_NONE
; /* Disable writeback. */
1709 c
->src
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1710 c
->src
.ptr
= (unsigned long *)register_address(c
,
1711 seg_override_base(ctxt
, c
),
1712 c
->regs
[VCPU_REGS_RSI
]);
1713 if ((rc
= ops
->read_emulated((unsigned long)c
->src
.ptr
,
1719 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1720 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1721 c
->dst
.ptr
= (unsigned long *)register_address(c
,
1723 c
->regs
[VCPU_REGS_RDI
]);
1724 if ((rc
= ops
->read_emulated((unsigned long)c
->dst
.ptr
,
1730 DPRINTF("cmps: mem1=0x%p mem2=0x%p\n", c
->src
.ptr
, c
->dst
.ptr
);
1732 emulate_2op_SrcV("cmp", c
->src
, c
->dst
, ctxt
->eflags
);
1734 register_address_increment(c
, &c
->regs
[VCPU_REGS_RSI
],
1735 (ctxt
->eflags
& EFLG_DF
) ? -c
->src
.bytes
1737 register_address_increment(c
, &c
->regs
[VCPU_REGS_RDI
],
1738 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1742 case 0xaa ... 0xab: /* stos */
1743 c
->dst
.type
= OP_MEM
;
1744 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1745 c
->dst
.ptr
= (unsigned long *)register_address(c
,
1747 c
->regs
[VCPU_REGS_RDI
]);
1748 c
->dst
.val
= c
->regs
[VCPU_REGS_RAX
];
1749 register_address_increment(c
, &c
->regs
[VCPU_REGS_RDI
],
1750 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1753 case 0xac ... 0xad: /* lods */
1754 c
->dst
.type
= OP_REG
;
1755 c
->dst
.bytes
= (c
->d
& ByteOp
) ? 1 : c
->op_bytes
;
1756 c
->dst
.ptr
= (unsigned long *)&c
->regs
[VCPU_REGS_RAX
];
1757 if ((rc
= ops
->read_emulated(register_address(c
,
1758 seg_override_base(ctxt
, c
),
1759 c
->regs
[VCPU_REGS_RSI
]),
1764 register_address_increment(c
, &c
->regs
[VCPU_REGS_RSI
],
1765 (ctxt
->eflags
& EFLG_DF
) ? -c
->dst
.bytes
1768 case 0xae ... 0xaf: /* scas */
1769 DPRINTF("Urk! I don't handle SCAS.\n");
1770 goto cannot_emulate
;
1771 case 0xb0 ... 0xbf: /* mov r, imm */
1776 case 0xc3: /* ret */
1777 c
->dst
.type
= OP_REG
;
1778 c
->dst
.ptr
= &c
->eip
;
1779 c
->dst
.bytes
= c
->op_bytes
;
1780 goto pop_instruction
;
1781 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1783 c
->dst
.val
= c
->src
.val
;
1785 case 0xcb: /* ret far */
1786 rc
= emulate_ret_far(ctxt
, ops
);
1790 case 0xd0 ... 0xd1: /* Grp2 */
1794 case 0xd2 ... 0xd3: /* Grp2 */
1795 c
->src
.val
= c
->regs
[VCPU_REGS_RCX
];
1798 case 0xe4: /* inb */
1803 case 0xe6: /* outb */
1804 case 0xe7: /* out */
1808 case 0xe8: /* call (near) */ {
1809 long int rel
= c
->src
.val
;
1810 c
->src
.val
= (unsigned long) c
->eip
;
1815 case 0xe9: /* jmp rel */
1817 case 0xea: /* jmp far */
1818 if (kvm_load_segment_descriptor(ctxt
->vcpu
, c
->src2
.val
, 9,
1819 VCPU_SREG_CS
) < 0) {
1820 DPRINTF("jmp far: Failed to load CS descriptor\n");
1821 goto cannot_emulate
;
1824 c
->eip
= c
->src
.val
;
1827 jmp
: /* jmp rel short */
1828 jmp_rel(c
, c
->src
.val
);
1829 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1831 case 0xec: /* in al,dx */
1832 case 0xed: /* in (e/r)ax,dx */
1833 port
= c
->regs
[VCPU_REGS_RDX
];
1836 case 0xee: /* out al,dx */
1837 case 0xef: /* out (e/r)ax,dx */
1838 port
= c
->regs
[VCPU_REGS_RDX
];
1840 do_io
: if (kvm_emulate_pio(ctxt
->vcpu
, NULL
, io_dir_in
,
1841 (c
->d
& ByteOp
) ? 1 : c
->op_bytes
,
1844 goto cannot_emulate
;
1847 case 0xf4: /* hlt */
1848 ctxt
->vcpu
->arch
.halt_request
= 1;
1850 case 0xf5: /* cmc */
1851 /* complement carry flag from eflags reg */
1852 ctxt
->eflags
^= EFLG_CF
;
1853 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1855 case 0xf6 ... 0xf7: /* Grp3 */
1856 rc
= emulate_grp3(ctxt
, ops
);
1860 case 0xf8: /* clc */
1861 ctxt
->eflags
&= ~EFLG_CF
;
1862 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1864 case 0xfa: /* cli */
1865 ctxt
->eflags
&= ~X86_EFLAGS_IF
;
1866 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1868 case 0xfb: /* sti */
1869 toggle_interruptibility(ctxt
, X86_SHADOW_INT_STI
);
1870 ctxt
->eflags
|= X86_EFLAGS_IF
;
1871 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1873 case 0xfc: /* cld */
1874 ctxt
->eflags
&= ~EFLG_DF
;
1875 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1877 case 0xfd: /* std */
1878 ctxt
->eflags
|= EFLG_DF
;
1879 c
->dst
.type
= OP_NONE
; /* Disable writeback. */
1881 case 0xfe ... 0xff: /* Grp4/Grp5 */
1882 rc
= emulate_grp45(ctxt
, ops
);
1889 rc
= writeback(ctxt
, ops
);
1893 /* Commit shadow register state. */
1894 memcpy(ctxt
->vcpu
->arch
.regs
, c
->regs
, sizeof c
->regs
);
1895 kvm_rip_write(ctxt
->vcpu
, c
->eip
);
1898 if (rc
== X86EMUL_UNHANDLEABLE
) {
1906 case 0x01: /* lgdt, lidt, lmsw */
1907 switch (c
->modrm_reg
) {
1909 unsigned long address
;
1911 case 0: /* vmcall */
1912 if (c
->modrm_mod
!= 3 || c
->modrm_rm
!= 1)
1913 goto cannot_emulate
;
1915 rc
= kvm_fix_hypercall(ctxt
->vcpu
);
1919 /* Let the processor re-execute the fixed hypercall */
1920 c
->eip
= kvm_rip_read(ctxt
->vcpu
);
1921 /* Disable writeback. */
1922 c
->dst
.type
= OP_NONE
;
1925 rc
= read_descriptor(ctxt
, ops
, c
->src
.ptr
,
1926 &size
, &address
, c
->op_bytes
);
1929 realmode_lgdt(ctxt
->vcpu
, size
, address
);
1930 /* Disable writeback. */
1931 c
->dst
.type
= OP_NONE
;
1933 case 3: /* lidt/vmmcall */
1934 if (c
->modrm_mod
== 3) {
1935 switch (c
->modrm_rm
) {
1937 rc
= kvm_fix_hypercall(ctxt
->vcpu
);
1942 goto cannot_emulate
;
1945 rc
= read_descriptor(ctxt
, ops
, c
->src
.ptr
,
1950 realmode_lidt(ctxt
->vcpu
, size
, address
);
1952 /* Disable writeback. */
1953 c
->dst
.type
= OP_NONE
;
1957 c
->dst
.val
= realmode_get_cr(ctxt
->vcpu
, 0);
1960 realmode_lmsw(ctxt
->vcpu
, (u16
)c
->src
.val
,
1962 c
->dst
.type
= OP_NONE
;
1965 emulate_invlpg(ctxt
->vcpu
, memop
);
1966 /* Disable writeback. */
1967 c
->dst
.type
= OP_NONE
;
1970 goto cannot_emulate
;
1974 emulate_clts(ctxt
->vcpu
);
1975 c
->dst
.type
= OP_NONE
;
1977 case 0x08: /* invd */
1978 case 0x09: /* wbinvd */
1979 case 0x0d: /* GrpP (prefetch) */
1980 case 0x18: /* Grp16 (prefetch/nop) */
1981 c
->dst
.type
= OP_NONE
;
1983 case 0x20: /* mov cr, reg */
1984 if (c
->modrm_mod
!= 3)
1985 goto cannot_emulate
;
1986 c
->regs
[c
->modrm_rm
] =
1987 realmode_get_cr(ctxt
->vcpu
, c
->modrm_reg
);
1988 c
->dst
.type
= OP_NONE
; /* no writeback */
1990 case 0x21: /* mov from dr to reg */
1991 if (c
->modrm_mod
!= 3)
1992 goto cannot_emulate
;
1993 rc
= emulator_get_dr(ctxt
, c
->modrm_reg
, &c
->regs
[c
->modrm_rm
]);
1995 goto cannot_emulate
;
1996 c
->dst
.type
= OP_NONE
; /* no writeback */
1998 case 0x22: /* mov reg, cr */
1999 if (c
->modrm_mod
!= 3)
2000 goto cannot_emulate
;
2001 realmode_set_cr(ctxt
->vcpu
,
2002 c
->modrm_reg
, c
->modrm_val
, &ctxt
->eflags
);
2003 c
->dst
.type
= OP_NONE
;
2005 case 0x23: /* mov from reg to dr */
2006 if (c
->modrm_mod
!= 3)
2007 goto cannot_emulate
;
2008 rc
= emulator_set_dr(ctxt
, c
->modrm_reg
,
2009 c
->regs
[c
->modrm_rm
]);
2011 goto cannot_emulate
;
2012 c
->dst
.type
= OP_NONE
; /* no writeback */
2016 msr_data
= (u32
)c
->regs
[VCPU_REGS_RAX
]
2017 | ((u64
)c
->regs
[VCPU_REGS_RDX
] << 32);
2018 rc
= kvm_set_msr(ctxt
->vcpu
, c
->regs
[VCPU_REGS_RCX
], msr_data
);
2020 kvm_inject_gp(ctxt
->vcpu
, 0);
2021 c
->eip
= kvm_rip_read(ctxt
->vcpu
);
2023 rc
= X86EMUL_CONTINUE
;
2024 c
->dst
.type
= OP_NONE
;
2028 rc
= kvm_get_msr(ctxt
->vcpu
, c
->regs
[VCPU_REGS_RCX
], &msr_data
);
2030 kvm_inject_gp(ctxt
->vcpu
, 0);
2031 c
->eip
= kvm_rip_read(ctxt
->vcpu
);
2033 c
->regs
[VCPU_REGS_RAX
] = (u32
)msr_data
;
2034 c
->regs
[VCPU_REGS_RDX
] = msr_data
>> 32;
2036 rc
= X86EMUL_CONTINUE
;
2037 c
->dst
.type
= OP_NONE
;
2039 case 0x40 ... 0x4f: /* cmov */
2040 c
->dst
.val
= c
->dst
.orig_val
= c
->src
.val
;
2041 if (!test_cc(c
->b
, ctxt
->eflags
))
2042 c
->dst
.type
= OP_NONE
; /* no writeback */
2044 case 0x80 ... 0x8f: /* jnz rel, etc*/
2045 if (test_cc(c
->b
, ctxt
->eflags
))
2046 jmp_rel(c
, c
->src
.val
);
2047 c
->dst
.type
= OP_NONE
;
2051 c
->dst
.type
= OP_NONE
;
2052 /* only subword offset */
2053 c
->src
.val
&= (c
->dst
.bytes
<< 3) - 1;
2054 emulate_2op_SrcV_nobyte("bt", c
->src
, c
->dst
, ctxt
->eflags
);
2056 case 0xa4: /* shld imm8, r, r/m */
2057 case 0xa5: /* shld cl, r, r/m */
2058 emulate_2op_cl("shld", c
->src2
, c
->src
, c
->dst
, ctxt
->eflags
);
2062 /* only subword offset */
2063 c
->src
.val
&= (c
->dst
.bytes
<< 3) - 1;
2064 emulate_2op_SrcV_nobyte("bts", c
->src
, c
->dst
, ctxt
->eflags
);
2066 case 0xac: /* shrd imm8, r, r/m */
2067 case 0xad: /* shrd cl, r, r/m */
2068 emulate_2op_cl("shrd", c
->src2
, c
->src
, c
->dst
, ctxt
->eflags
);
2070 case 0xae: /* clflush */
2072 case 0xb0 ... 0xb1: /* cmpxchg */
2074 * Save real source value, then compare EAX against
2077 c
->src
.orig_val
= c
->src
.val
;
2078 c
->src
.val
= c
->regs
[VCPU_REGS_RAX
];
2079 emulate_2op_SrcV("cmp", c
->src
, c
->dst
, ctxt
->eflags
);
2080 if (ctxt
->eflags
& EFLG_ZF
) {
2081 /* Success: write back to memory. */
2082 c
->dst
.val
= c
->src
.orig_val
;
2084 /* Failure: write the value we saw to EAX. */
2085 c
->dst
.type
= OP_REG
;
2086 c
->dst
.ptr
= (unsigned long *)&c
->regs
[VCPU_REGS_RAX
];
2091 /* only subword offset */
2092 c
->src
.val
&= (c
->dst
.bytes
<< 3) - 1;
2093 emulate_2op_SrcV_nobyte("btr", c
->src
, c
->dst
, ctxt
->eflags
);
2095 case 0xb6 ... 0xb7: /* movzx */
2096 c
->dst
.bytes
= c
->op_bytes
;
2097 c
->dst
.val
= (c
->d
& ByteOp
) ? (u8
) c
->src
.val
2100 case 0xba: /* Grp8 */
2101 switch (c
->modrm_reg
& 3) {
2114 /* only subword offset */
2115 c
->src
.val
&= (c
->dst
.bytes
<< 3) - 1;
2116 emulate_2op_SrcV_nobyte("btc", c
->src
, c
->dst
, ctxt
->eflags
);
2118 case 0xbe ... 0xbf: /* movsx */
2119 c
->dst
.bytes
= c
->op_bytes
;
2120 c
->dst
.val
= (c
->d
& ByteOp
) ? (s8
) c
->src
.val
:
2123 case 0xc3: /* movnti */
2124 c
->dst
.bytes
= c
->op_bytes
;
2125 c
->dst
.val
= (c
->op_bytes
== 4) ? (u32
) c
->src
.val
:
2128 case 0xc7: /* Grp9 (cmpxchg8b) */
2129 rc
= emulate_grp9(ctxt
, ops
, memop
);
2132 c
->dst
.type
= OP_NONE
;
2138 DPRINTF("Cannot emulate %02x\n", c
->b
);