announce overlapping skbs merge
[cor_2_6_31.git] / arch / x86 / kvm / x86_emulate.c
blob616de4628d60d4e8aea197284a637ceced25dd73
1 /******************************************************************************
2 * x86_emulate.c
4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
6 * Copyright (c) 2005 Keir Fraser
8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9 * privileged instructions:
11 * Copyright (C) 2006 Qumranet
13 * Avi Kivity <avi@qumranet.com>
14 * Yaniv Kamay <yaniv@qumranet.com>
16 * This work is licensed under the terms of the GNU GPL, version 2. See
17 * the COPYING file in the top-level directory.
19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
22 #ifndef __KERNEL__
23 #include <stdio.h>
24 #include <stdint.h>
25 #include <public/xen.h>
26 #define DPRINTF(_f, _a ...) printf(_f , ## _a)
27 #else
28 #include <linux/kvm_host.h>
29 #include "kvm_cache_regs.h"
30 #define DPRINTF(x...) do {} while (0)
31 #endif
32 #include <linux/module.h>
33 #include <asm/kvm_x86_emulate.h>
36 * Opcode effective-address decode tables.
37 * Note that we only emulate instructions that have at least one memory
38 * operand (excluding implicit stack references). We assume that stack
39 * references and instruction fetches will never occur in special memory
40 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
41 * not be handled.
44 /* Operand sizes: 8-bit operands or specified/overridden size. */
45 #define ByteOp (1<<0) /* 8-bit operands. */
46 /* Destination operand type. */
47 #define ImplicitOps (1<<1) /* Implicit in opcode. No generic decode. */
48 #define DstReg (2<<1) /* Register operand. */
49 #define DstMem (3<<1) /* Memory operand. */
50 #define DstAcc (4<<1) /* Destination Accumulator */
51 #define DstMask (7<<1)
52 /* Source operand type. */
53 #define SrcNone (0<<4) /* No source operand. */
54 #define SrcImplicit (0<<4) /* Source operand is implicit in the opcode. */
55 #define SrcReg (1<<4) /* Register operand. */
56 #define SrcMem (2<<4) /* Memory operand. */
57 #define SrcMem16 (3<<4) /* Memory operand (16-bit). */
58 #define SrcMem32 (4<<4) /* Memory operand (32-bit). */
59 #define SrcImm (5<<4) /* Immediate operand. */
60 #define SrcImmByte (6<<4) /* 8-bit sign-extended immediate operand. */
61 #define SrcOne (7<<4) /* Implied '1' */
62 #define SrcImmUByte (8<<4) /* 8-bit unsigned immediate operand. */
63 #define SrcMask (0xf<<4)
64 /* Generic ModRM decode. */
65 #define ModRM (1<<8)
66 /* Destination is only written; never read. */
67 #define Mov (1<<9)
68 #define BitOp (1<<10)
69 #define MemAbs (1<<11) /* Memory operand is absolute displacement */
70 #define String (1<<12) /* String instruction (rep capable) */
71 #define Stack (1<<13) /* Stack instruction (push/pop) */
72 #define Group (1<<14) /* Bits 3:5 of modrm byte extend opcode */
73 #define GroupDual (1<<15) /* Alternate decoding of mod == 3 */
74 #define GroupMask 0xff /* Group number stored in bits 0:7 */
75 /* Source 2 operand type */
76 #define Src2None (0<<29)
77 #define Src2CL (1<<29)
78 #define Src2ImmByte (2<<29)
79 #define Src2One (3<<29)
80 #define Src2Imm16 (4<<29)
81 #define Src2Mask (7<<29)
83 enum {
84 Group1_80, Group1_81, Group1_82, Group1_83,
85 Group1A, Group3_Byte, Group3, Group4, Group5, Group7,
88 static u32 opcode_table[256] = {
89 /* 0x00 - 0x07 */
90 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
91 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
92 ByteOp | DstAcc | SrcImm, DstAcc | SrcImm, 0, 0,
93 /* 0x08 - 0x0F */
94 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
95 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
96 0, 0, 0, 0,
97 /* 0x10 - 0x17 */
98 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
99 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
100 0, 0, 0, 0,
101 /* 0x18 - 0x1F */
102 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
103 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
104 0, 0, 0, 0,
105 /* 0x20 - 0x27 */
106 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
107 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
108 DstAcc | SrcImmByte, DstAcc | SrcImm, 0, 0,
109 /* 0x28 - 0x2F */
110 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
111 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
112 0, 0, 0, 0,
113 /* 0x30 - 0x37 */
114 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
115 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
116 0, 0, 0, 0,
117 /* 0x38 - 0x3F */
118 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
119 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
120 ByteOp | DstAcc | SrcImm, DstAcc | SrcImm,
121 0, 0,
122 /* 0x40 - 0x47 */
123 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
124 /* 0x48 - 0x4F */
125 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
126 /* 0x50 - 0x57 */
127 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
128 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
129 /* 0x58 - 0x5F */
130 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
131 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
132 /* 0x60 - 0x67 */
133 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ ,
134 0, 0, 0, 0,
135 /* 0x68 - 0x6F */
136 SrcImm | Mov | Stack, 0, SrcImmByte | Mov | Stack, 0,
137 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* insb, insw/insd */
138 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps, /* outsb, outsw/outsd */
139 /* 0x70 - 0x77 */
140 SrcImmByte, SrcImmByte, SrcImmByte, SrcImmByte,
141 SrcImmByte, SrcImmByte, SrcImmByte, SrcImmByte,
142 /* 0x78 - 0x7F */
143 SrcImmByte, SrcImmByte, SrcImmByte, SrcImmByte,
144 SrcImmByte, SrcImmByte, SrcImmByte, SrcImmByte,
145 /* 0x80 - 0x87 */
146 Group | Group1_80, Group | Group1_81,
147 Group | Group1_82, Group | Group1_83,
148 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
149 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
150 /* 0x88 - 0x8F */
151 ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
152 ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
153 DstMem | SrcReg | ModRM | Mov, ModRM | DstReg,
154 DstReg | SrcMem | ModRM | Mov, Group | Group1A,
155 /* 0x90 - 0x97 */
156 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
157 /* 0x98 - 0x9F */
158 0, 0, SrcImm | Src2Imm16, 0,
159 ImplicitOps | Stack, ImplicitOps | Stack, 0, 0,
160 /* 0xA0 - 0xA7 */
161 ByteOp | DstReg | SrcMem | Mov | MemAbs, DstReg | SrcMem | Mov | MemAbs,
162 ByteOp | DstMem | SrcReg | Mov | MemAbs, DstMem | SrcReg | Mov | MemAbs,
163 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
164 ByteOp | ImplicitOps | String, ImplicitOps | String,
165 /* 0xA8 - 0xAF */
166 0, 0, ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
167 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
168 ByteOp | ImplicitOps | String, ImplicitOps | String,
169 /* 0xB0 - 0xB7 */
170 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
171 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
172 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
173 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
174 /* 0xB8 - 0xBF */
175 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
176 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
177 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
178 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
179 /* 0xC0 - 0xC7 */
180 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
181 0, ImplicitOps | Stack, 0, 0,
182 ByteOp | DstMem | SrcImm | ModRM | Mov, DstMem | SrcImm | ModRM | Mov,
183 /* 0xC8 - 0xCF */
184 0, 0, 0, ImplicitOps | Stack,
185 ImplicitOps, SrcImmByte, ImplicitOps, ImplicitOps,
186 /* 0xD0 - 0xD7 */
187 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
188 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
189 0, 0, 0, 0,
190 /* 0xD8 - 0xDF */
191 0, 0, 0, 0, 0, 0, 0, 0,
192 /* 0xE0 - 0xE7 */
193 0, 0, 0, 0,
194 ByteOp | SrcImmUByte, SrcImmUByte,
195 ByteOp | SrcImmUByte, SrcImmUByte,
196 /* 0xE8 - 0xEF */
197 SrcImm | Stack, SrcImm | ImplicitOps,
198 SrcImm | Src2Imm16, SrcImmByte | ImplicitOps,
199 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
200 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
201 /* 0xF0 - 0xF7 */
202 0, 0, 0, 0,
203 ImplicitOps, ImplicitOps, Group | Group3_Byte, Group | Group3,
204 /* 0xF8 - 0xFF */
205 ImplicitOps, 0, ImplicitOps, ImplicitOps,
206 ImplicitOps, ImplicitOps, Group | Group4, Group | Group5,
209 static u32 twobyte_table[256] = {
210 /* 0x00 - 0x0F */
211 0, Group | GroupDual | Group7, 0, 0, 0, 0, ImplicitOps, 0,
212 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
213 /* 0x10 - 0x1F */
214 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
215 /* 0x20 - 0x2F */
216 ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
217 0, 0, 0, 0, 0, 0, 0, 0,
218 /* 0x30 - 0x3F */
219 ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
220 /* 0x40 - 0x47 */
221 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
222 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
223 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
224 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
225 /* 0x48 - 0x4F */
226 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
227 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
228 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
229 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
230 /* 0x50 - 0x5F */
231 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
232 /* 0x60 - 0x6F */
233 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
234 /* 0x70 - 0x7F */
235 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
236 /* 0x80 - 0x8F */
237 SrcImm, SrcImm, SrcImm, SrcImm, SrcImm, SrcImm, SrcImm, SrcImm,
238 SrcImm, SrcImm, SrcImm, SrcImm, SrcImm, SrcImm, SrcImm, SrcImm,
239 /* 0x90 - 0x9F */
240 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
241 /* 0xA0 - 0xA7 */
242 0, 0, 0, DstMem | SrcReg | ModRM | BitOp,
243 DstMem | SrcReg | Src2ImmByte | ModRM,
244 DstMem | SrcReg | Src2CL | ModRM, 0, 0,
245 /* 0xA8 - 0xAF */
246 0, 0, 0, DstMem | SrcReg | ModRM | BitOp,
247 DstMem | SrcReg | Src2ImmByte | ModRM,
248 DstMem | SrcReg | Src2CL | ModRM,
249 ModRM, 0,
250 /* 0xB0 - 0xB7 */
251 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
252 DstMem | SrcReg | ModRM | BitOp,
253 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
254 DstReg | SrcMem16 | ModRM | Mov,
255 /* 0xB8 - 0xBF */
256 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM | BitOp,
257 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
258 DstReg | SrcMem16 | ModRM | Mov,
259 /* 0xC0 - 0xCF */
260 0, 0, 0, DstMem | SrcReg | ModRM | Mov, 0, 0, 0, ImplicitOps | ModRM,
261 0, 0, 0, 0, 0, 0, 0, 0,
262 /* 0xD0 - 0xDF */
263 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
264 /* 0xE0 - 0xEF */
265 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
266 /* 0xF0 - 0xFF */
267 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
270 static u32 group_table[] = {
271 [Group1_80*8] =
272 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
273 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
274 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
275 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
276 [Group1_81*8] =
277 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
278 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
279 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
280 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
281 [Group1_82*8] =
282 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
283 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
284 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
285 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
286 [Group1_83*8] =
287 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
288 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
289 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
290 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
291 [Group1A*8] =
292 DstMem | SrcNone | ModRM | Mov | Stack, 0, 0, 0, 0, 0, 0, 0,
293 [Group3_Byte*8] =
294 ByteOp | SrcImm | DstMem | ModRM, 0,
295 ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
296 0, 0, 0, 0,
297 [Group3*8] =
298 DstMem | SrcImm | ModRM, 0,
299 DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
300 0, 0, 0, 0,
301 [Group4*8] =
302 ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
303 0, 0, 0, 0, 0, 0,
304 [Group5*8] =
305 DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
306 SrcMem | ModRM | Stack, 0,
307 SrcMem | ModRM | Stack, 0, SrcMem | ModRM | Stack, 0,
308 [Group7*8] =
309 0, 0, ModRM | SrcMem, ModRM | SrcMem,
310 SrcNone | ModRM | DstMem | Mov, 0,
311 SrcMem16 | ModRM | Mov, SrcMem | ModRM | ByteOp,
314 static u32 group2_table[] = {
315 [Group7*8] =
316 SrcNone | ModRM, 0, 0, SrcNone | ModRM,
317 SrcNone | ModRM | DstMem | Mov, 0,
318 SrcMem16 | ModRM | Mov, 0,
321 /* EFLAGS bit definitions. */
322 #define EFLG_OF (1<<11)
323 #define EFLG_DF (1<<10)
324 #define EFLG_SF (1<<7)
325 #define EFLG_ZF (1<<6)
326 #define EFLG_AF (1<<4)
327 #define EFLG_PF (1<<2)
328 #define EFLG_CF (1<<0)
331 * Instruction emulation:
332 * Most instructions are emulated directly via a fragment of inline assembly
333 * code. This allows us to save/restore EFLAGS and thus very easily pick up
334 * any modified flags.
337 #if defined(CONFIG_X86_64)
338 #define _LO32 "k" /* force 32-bit operand */
339 #define _STK "%%rsp" /* stack pointer */
340 #elif defined(__i386__)
341 #define _LO32 "" /* force 32-bit operand */
342 #define _STK "%%esp" /* stack pointer */
343 #endif
346 * These EFLAGS bits are restored from saved value during emulation, and
347 * any changes are written back to the saved value after emulation.
349 #define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
351 /* Before executing instruction: restore necessary bits in EFLAGS. */
352 #define _PRE_EFLAGS(_sav, _msk, _tmp) \
353 /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
354 "movl %"_sav",%"_LO32 _tmp"; " \
355 "push %"_tmp"; " \
356 "push %"_tmp"; " \
357 "movl %"_msk",%"_LO32 _tmp"; " \
358 "andl %"_LO32 _tmp",("_STK"); " \
359 "pushf; " \
360 "notl %"_LO32 _tmp"; " \
361 "andl %"_LO32 _tmp",("_STK"); " \
362 "andl %"_LO32 _tmp","__stringify(BITS_PER_LONG/4)"("_STK"); " \
363 "pop %"_tmp"; " \
364 "orl %"_LO32 _tmp",("_STK"); " \
365 "popf; " \
366 "pop %"_sav"; "
368 /* After executing instruction: write-back necessary bits in EFLAGS. */
369 #define _POST_EFLAGS(_sav, _msk, _tmp) \
370 /* _sav |= EFLAGS & _msk; */ \
371 "pushf; " \
372 "pop %"_tmp"; " \
373 "andl %"_msk",%"_LO32 _tmp"; " \
374 "orl %"_LO32 _tmp",%"_sav"; "
376 #ifdef CONFIG_X86_64
377 #define ON64(x) x
378 #else
379 #define ON64(x)
380 #endif
382 #define ____emulate_2op(_op, _src, _dst, _eflags, _x, _y, _suffix) \
383 do { \
384 __asm__ __volatile__ ( \
385 _PRE_EFLAGS("0", "4", "2") \
386 _op _suffix " %"_x"3,%1; " \
387 _POST_EFLAGS("0", "4", "2") \
388 : "=m" (_eflags), "=m" ((_dst).val), \
389 "=&r" (_tmp) \
390 : _y ((_src).val), "i" (EFLAGS_MASK)); \
391 } while (0)
394 /* Raw emulation: instruction has two explicit operands. */
395 #define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
396 do { \
397 unsigned long _tmp; \
399 switch ((_dst).bytes) { \
400 case 2: \
401 ____emulate_2op(_op,_src,_dst,_eflags,_wx,_wy,"w"); \
402 break; \
403 case 4: \
404 ____emulate_2op(_op,_src,_dst,_eflags,_lx,_ly,"l"); \
405 break; \
406 case 8: \
407 ON64(____emulate_2op(_op,_src,_dst,_eflags,_qx,_qy,"q")); \
408 break; \
410 } while (0)
412 #define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
413 do { \
414 unsigned long _tmp; \
415 switch ((_dst).bytes) { \
416 case 1: \
417 ____emulate_2op(_op,_src,_dst,_eflags,_bx,_by,"b"); \
418 break; \
419 default: \
420 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
421 _wx, _wy, _lx, _ly, _qx, _qy); \
422 break; \
424 } while (0)
426 /* Source operand is byte-sized and may be restricted to just %cl. */
427 #define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
428 __emulate_2op(_op, _src, _dst, _eflags, \
429 "b", "c", "b", "c", "b", "c", "b", "c")
431 /* Source operand is byte, word, long or quad sized. */
432 #define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
433 __emulate_2op(_op, _src, _dst, _eflags, \
434 "b", "q", "w", "r", _LO32, "r", "", "r")
436 /* Source operand is word, long or quad sized. */
437 #define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
438 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
439 "w", "r", _LO32, "r", "", "r")
441 /* Instruction has three operands and one operand is stored in ECX register */
442 #define __emulate_2op_cl(_op, _cl, _src, _dst, _eflags, _suffix, _type) \
443 do { \
444 unsigned long _tmp; \
445 _type _clv = (_cl).val; \
446 _type _srcv = (_src).val; \
447 _type _dstv = (_dst).val; \
449 __asm__ __volatile__ ( \
450 _PRE_EFLAGS("0", "5", "2") \
451 _op _suffix " %4,%1 \n" \
452 _POST_EFLAGS("0", "5", "2") \
453 : "=m" (_eflags), "+r" (_dstv), "=&r" (_tmp) \
454 : "c" (_clv) , "r" (_srcv), "i" (EFLAGS_MASK) \
455 ); \
457 (_cl).val = (unsigned long) _clv; \
458 (_src).val = (unsigned long) _srcv; \
459 (_dst).val = (unsigned long) _dstv; \
460 } while (0)
462 #define emulate_2op_cl(_op, _cl, _src, _dst, _eflags) \
463 do { \
464 switch ((_dst).bytes) { \
465 case 2: \
466 __emulate_2op_cl(_op, _cl, _src, _dst, _eflags, \
467 "w", unsigned short); \
468 break; \
469 case 4: \
470 __emulate_2op_cl(_op, _cl, _src, _dst, _eflags, \
471 "l", unsigned int); \
472 break; \
473 case 8: \
474 ON64(__emulate_2op_cl(_op, _cl, _src, _dst, _eflags, \
475 "q", unsigned long)); \
476 break; \
478 } while (0)
480 #define __emulate_1op(_op, _dst, _eflags, _suffix) \
481 do { \
482 unsigned long _tmp; \
484 __asm__ __volatile__ ( \
485 _PRE_EFLAGS("0", "3", "2") \
486 _op _suffix " %1; " \
487 _POST_EFLAGS("0", "3", "2") \
488 : "=m" (_eflags), "+m" ((_dst).val), \
489 "=&r" (_tmp) \
490 : "i" (EFLAGS_MASK)); \
491 } while (0)
493 /* Instruction has only one explicit operand (no source operand). */
494 #define emulate_1op(_op, _dst, _eflags) \
495 do { \
496 switch ((_dst).bytes) { \
497 case 1: __emulate_1op(_op, _dst, _eflags, "b"); break; \
498 case 2: __emulate_1op(_op, _dst, _eflags, "w"); break; \
499 case 4: __emulate_1op(_op, _dst, _eflags, "l"); break; \
500 case 8: ON64(__emulate_1op(_op, _dst, _eflags, "q")); break; \
502 } while (0)
504 /* Fetch next part of the instruction being emulated. */
505 #define insn_fetch(_type, _size, _eip) \
506 ({ unsigned long _x; \
507 rc = do_insn_fetch(ctxt, ops, (_eip), &_x, (_size)); \
508 if (rc != 0) \
509 goto done; \
510 (_eip) += (_size); \
511 (_type)_x; \
514 static inline unsigned long ad_mask(struct decode_cache *c)
516 return (1UL << (c->ad_bytes << 3)) - 1;
519 /* Access/update address held in a register, based on addressing mode. */
520 static inline unsigned long
521 address_mask(struct decode_cache *c, unsigned long reg)
523 if (c->ad_bytes == sizeof(unsigned long))
524 return reg;
525 else
526 return reg & ad_mask(c);
529 static inline unsigned long
530 register_address(struct decode_cache *c, unsigned long base, unsigned long reg)
532 return base + address_mask(c, reg);
535 static inline void
536 register_address_increment(struct decode_cache *c, unsigned long *reg, int inc)
538 if (c->ad_bytes == sizeof(unsigned long))
539 *reg += inc;
540 else
541 *reg = (*reg & ~ad_mask(c)) | ((*reg + inc) & ad_mask(c));
544 static inline void jmp_rel(struct decode_cache *c, int rel)
546 register_address_increment(c, &c->eip, rel);
549 static void set_seg_override(struct decode_cache *c, int seg)
551 c->has_seg_override = true;
552 c->seg_override = seg;
555 static unsigned long seg_base(struct x86_emulate_ctxt *ctxt, int seg)
557 if (ctxt->mode == X86EMUL_MODE_PROT64 && seg < VCPU_SREG_FS)
558 return 0;
560 return kvm_x86_ops->get_segment_base(ctxt->vcpu, seg);
563 static unsigned long seg_override_base(struct x86_emulate_ctxt *ctxt,
564 struct decode_cache *c)
566 if (!c->has_seg_override)
567 return 0;
569 return seg_base(ctxt, c->seg_override);
572 static unsigned long es_base(struct x86_emulate_ctxt *ctxt)
574 return seg_base(ctxt, VCPU_SREG_ES);
577 static unsigned long ss_base(struct x86_emulate_ctxt *ctxt)
579 return seg_base(ctxt, VCPU_SREG_SS);
582 static int do_fetch_insn_byte(struct x86_emulate_ctxt *ctxt,
583 struct x86_emulate_ops *ops,
584 unsigned long linear, u8 *dest)
586 struct fetch_cache *fc = &ctxt->decode.fetch;
587 int rc;
588 int size;
590 if (linear < fc->start || linear >= fc->end) {
591 size = min(15UL, PAGE_SIZE - offset_in_page(linear));
592 rc = ops->read_std(linear, fc->data, size, ctxt->vcpu);
593 if (rc)
594 return rc;
595 fc->start = linear;
596 fc->end = linear + size;
598 *dest = fc->data[linear - fc->start];
599 return 0;
602 static int do_insn_fetch(struct x86_emulate_ctxt *ctxt,
603 struct x86_emulate_ops *ops,
604 unsigned long eip, void *dest, unsigned size)
606 int rc = 0;
608 eip += ctxt->cs_base;
609 while (size--) {
610 rc = do_fetch_insn_byte(ctxt, ops, eip++, dest++);
611 if (rc)
612 return rc;
614 return 0;
618 * Given the 'reg' portion of a ModRM byte, and a register block, return a
619 * pointer into the block that addresses the relevant register.
620 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
622 static void *decode_register(u8 modrm_reg, unsigned long *regs,
623 int highbyte_regs)
625 void *p;
627 p = &regs[modrm_reg];
628 if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
629 p = (unsigned char *)&regs[modrm_reg & 3] + 1;
630 return p;
633 static int read_descriptor(struct x86_emulate_ctxt *ctxt,
634 struct x86_emulate_ops *ops,
635 void *ptr,
636 u16 *size, unsigned long *address, int op_bytes)
638 int rc;
640 if (op_bytes == 2)
641 op_bytes = 3;
642 *address = 0;
643 rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2,
644 ctxt->vcpu);
645 if (rc)
646 return rc;
647 rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes,
648 ctxt->vcpu);
649 return rc;
652 static int test_cc(unsigned int condition, unsigned int flags)
654 int rc = 0;
656 switch ((condition & 15) >> 1) {
657 case 0: /* o */
658 rc |= (flags & EFLG_OF);
659 break;
660 case 1: /* b/c/nae */
661 rc |= (flags & EFLG_CF);
662 break;
663 case 2: /* z/e */
664 rc |= (flags & EFLG_ZF);
665 break;
666 case 3: /* be/na */
667 rc |= (flags & (EFLG_CF|EFLG_ZF));
668 break;
669 case 4: /* s */
670 rc |= (flags & EFLG_SF);
671 break;
672 case 5: /* p/pe */
673 rc |= (flags & EFLG_PF);
674 break;
675 case 7: /* le/ng */
676 rc |= (flags & EFLG_ZF);
677 /* fall through */
678 case 6: /* l/nge */
679 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
680 break;
683 /* Odd condition identifiers (lsb == 1) have inverted sense. */
684 return (!!rc ^ (condition & 1));
687 static void decode_register_operand(struct operand *op,
688 struct decode_cache *c,
689 int inhibit_bytereg)
691 unsigned reg = c->modrm_reg;
692 int highbyte_regs = c->rex_prefix == 0;
694 if (!(c->d & ModRM))
695 reg = (c->b & 7) | ((c->rex_prefix & 1) << 3);
696 op->type = OP_REG;
697 if ((c->d & ByteOp) && !inhibit_bytereg) {
698 op->ptr = decode_register(reg, c->regs, highbyte_regs);
699 op->val = *(u8 *)op->ptr;
700 op->bytes = 1;
701 } else {
702 op->ptr = decode_register(reg, c->regs, 0);
703 op->bytes = c->op_bytes;
704 switch (op->bytes) {
705 case 2:
706 op->val = *(u16 *)op->ptr;
707 break;
708 case 4:
709 op->val = *(u32 *)op->ptr;
710 break;
711 case 8:
712 op->val = *(u64 *) op->ptr;
713 break;
716 op->orig_val = op->val;
719 static int decode_modrm(struct x86_emulate_ctxt *ctxt,
720 struct x86_emulate_ops *ops)
722 struct decode_cache *c = &ctxt->decode;
723 u8 sib;
724 int index_reg = 0, base_reg = 0, scale;
725 int rc = 0;
727 if (c->rex_prefix) {
728 c->modrm_reg = (c->rex_prefix & 4) << 1; /* REX.R */
729 index_reg = (c->rex_prefix & 2) << 2; /* REX.X */
730 c->modrm_rm = base_reg = (c->rex_prefix & 1) << 3; /* REG.B */
733 c->modrm = insn_fetch(u8, 1, c->eip);
734 c->modrm_mod |= (c->modrm & 0xc0) >> 6;
735 c->modrm_reg |= (c->modrm & 0x38) >> 3;
736 c->modrm_rm |= (c->modrm & 0x07);
737 c->modrm_ea = 0;
738 c->use_modrm_ea = 1;
740 if (c->modrm_mod == 3) {
741 c->modrm_ptr = decode_register(c->modrm_rm,
742 c->regs, c->d & ByteOp);
743 c->modrm_val = *(unsigned long *)c->modrm_ptr;
744 return rc;
747 if (c->ad_bytes == 2) {
748 unsigned bx = c->regs[VCPU_REGS_RBX];
749 unsigned bp = c->regs[VCPU_REGS_RBP];
750 unsigned si = c->regs[VCPU_REGS_RSI];
751 unsigned di = c->regs[VCPU_REGS_RDI];
753 /* 16-bit ModR/M decode. */
754 switch (c->modrm_mod) {
755 case 0:
756 if (c->modrm_rm == 6)
757 c->modrm_ea += insn_fetch(u16, 2, c->eip);
758 break;
759 case 1:
760 c->modrm_ea += insn_fetch(s8, 1, c->eip);
761 break;
762 case 2:
763 c->modrm_ea += insn_fetch(u16, 2, c->eip);
764 break;
766 switch (c->modrm_rm) {
767 case 0:
768 c->modrm_ea += bx + si;
769 break;
770 case 1:
771 c->modrm_ea += bx + di;
772 break;
773 case 2:
774 c->modrm_ea += bp + si;
775 break;
776 case 3:
777 c->modrm_ea += bp + di;
778 break;
779 case 4:
780 c->modrm_ea += si;
781 break;
782 case 5:
783 c->modrm_ea += di;
784 break;
785 case 6:
786 if (c->modrm_mod != 0)
787 c->modrm_ea += bp;
788 break;
789 case 7:
790 c->modrm_ea += bx;
791 break;
793 if (c->modrm_rm == 2 || c->modrm_rm == 3 ||
794 (c->modrm_rm == 6 && c->modrm_mod != 0))
795 if (!c->has_seg_override)
796 set_seg_override(c, VCPU_SREG_SS);
797 c->modrm_ea = (u16)c->modrm_ea;
798 } else {
799 /* 32/64-bit ModR/M decode. */
800 if ((c->modrm_rm & 7) == 4) {
801 sib = insn_fetch(u8, 1, c->eip);
802 index_reg |= (sib >> 3) & 7;
803 base_reg |= sib & 7;
804 scale = sib >> 6;
806 if ((base_reg & 7) == 5 && c->modrm_mod == 0)
807 c->modrm_ea += insn_fetch(s32, 4, c->eip);
808 else
809 c->modrm_ea += c->regs[base_reg];
810 if (index_reg != 4)
811 c->modrm_ea += c->regs[index_reg] << scale;
812 } else if ((c->modrm_rm & 7) == 5 && c->modrm_mod == 0) {
813 if (ctxt->mode == X86EMUL_MODE_PROT64)
814 c->rip_relative = 1;
815 } else
816 c->modrm_ea += c->regs[c->modrm_rm];
817 switch (c->modrm_mod) {
818 case 0:
819 if (c->modrm_rm == 5)
820 c->modrm_ea += insn_fetch(s32, 4, c->eip);
821 break;
822 case 1:
823 c->modrm_ea += insn_fetch(s8, 1, c->eip);
824 break;
825 case 2:
826 c->modrm_ea += insn_fetch(s32, 4, c->eip);
827 break;
830 done:
831 return rc;
834 static int decode_abs(struct x86_emulate_ctxt *ctxt,
835 struct x86_emulate_ops *ops)
837 struct decode_cache *c = &ctxt->decode;
838 int rc = 0;
840 switch (c->ad_bytes) {
841 case 2:
842 c->modrm_ea = insn_fetch(u16, 2, c->eip);
843 break;
844 case 4:
845 c->modrm_ea = insn_fetch(u32, 4, c->eip);
846 break;
847 case 8:
848 c->modrm_ea = insn_fetch(u64, 8, c->eip);
849 break;
851 done:
852 return rc;
856 x86_decode_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
858 struct decode_cache *c = &ctxt->decode;
859 int rc = 0;
860 int mode = ctxt->mode;
861 int def_op_bytes, def_ad_bytes, group;
863 /* Shadow copy of register state. Committed on successful emulation. */
865 memset(c, 0, sizeof(struct decode_cache));
866 c->eip = kvm_rip_read(ctxt->vcpu);
867 ctxt->cs_base = seg_base(ctxt, VCPU_SREG_CS);
868 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
870 switch (mode) {
871 case X86EMUL_MODE_REAL:
872 case X86EMUL_MODE_PROT16:
873 def_op_bytes = def_ad_bytes = 2;
874 break;
875 case X86EMUL_MODE_PROT32:
876 def_op_bytes = def_ad_bytes = 4;
877 break;
878 #ifdef CONFIG_X86_64
879 case X86EMUL_MODE_PROT64:
880 def_op_bytes = 4;
881 def_ad_bytes = 8;
882 break;
883 #endif
884 default:
885 return -1;
888 c->op_bytes = def_op_bytes;
889 c->ad_bytes = def_ad_bytes;
891 /* Legacy prefixes. */
892 for (;;) {
893 switch (c->b = insn_fetch(u8, 1, c->eip)) {
894 case 0x66: /* operand-size override */
895 /* switch between 2/4 bytes */
896 c->op_bytes = def_op_bytes ^ 6;
897 break;
898 case 0x67: /* address-size override */
899 if (mode == X86EMUL_MODE_PROT64)
900 /* switch between 4/8 bytes */
901 c->ad_bytes = def_ad_bytes ^ 12;
902 else
903 /* switch between 2/4 bytes */
904 c->ad_bytes = def_ad_bytes ^ 6;
905 break;
906 case 0x26: /* ES override */
907 case 0x2e: /* CS override */
908 case 0x36: /* SS override */
909 case 0x3e: /* DS override */
910 set_seg_override(c, (c->b >> 3) & 3);
911 break;
912 case 0x64: /* FS override */
913 case 0x65: /* GS override */
914 set_seg_override(c, c->b & 7);
915 break;
916 case 0x40 ... 0x4f: /* REX */
917 if (mode != X86EMUL_MODE_PROT64)
918 goto done_prefixes;
919 c->rex_prefix = c->b;
920 continue;
921 case 0xf0: /* LOCK */
922 c->lock_prefix = 1;
923 break;
924 case 0xf2: /* REPNE/REPNZ */
925 c->rep_prefix = REPNE_PREFIX;
926 break;
927 case 0xf3: /* REP/REPE/REPZ */
928 c->rep_prefix = REPE_PREFIX;
929 break;
930 default:
931 goto done_prefixes;
934 /* Any legacy prefix after a REX prefix nullifies its effect. */
936 c->rex_prefix = 0;
939 done_prefixes:
941 /* REX prefix. */
942 if (c->rex_prefix)
943 if (c->rex_prefix & 8)
944 c->op_bytes = 8; /* REX.W */
946 /* Opcode byte(s). */
947 c->d = opcode_table[c->b];
948 if (c->d == 0) {
949 /* Two-byte opcode? */
950 if (c->b == 0x0f) {
951 c->twobyte = 1;
952 c->b = insn_fetch(u8, 1, c->eip);
953 c->d = twobyte_table[c->b];
957 if (c->d & Group) {
958 group = c->d & GroupMask;
959 c->modrm = insn_fetch(u8, 1, c->eip);
960 --c->eip;
962 group = (group << 3) + ((c->modrm >> 3) & 7);
963 if ((c->d & GroupDual) && (c->modrm >> 6) == 3)
964 c->d = group2_table[group];
965 else
966 c->d = group_table[group];
969 /* Unrecognised? */
970 if (c->d == 0) {
971 DPRINTF("Cannot emulate %02x\n", c->b);
972 return -1;
975 if (mode == X86EMUL_MODE_PROT64 && (c->d & Stack))
976 c->op_bytes = 8;
978 /* ModRM and SIB bytes. */
979 if (c->d & ModRM)
980 rc = decode_modrm(ctxt, ops);
981 else if (c->d & MemAbs)
982 rc = decode_abs(ctxt, ops);
983 if (rc)
984 goto done;
986 if (!c->has_seg_override)
987 set_seg_override(c, VCPU_SREG_DS);
989 if (!(!c->twobyte && c->b == 0x8d))
990 c->modrm_ea += seg_override_base(ctxt, c);
992 if (c->ad_bytes != 8)
993 c->modrm_ea = (u32)c->modrm_ea;
995 * Decode and fetch the source operand: register, memory
996 * or immediate.
998 switch (c->d & SrcMask) {
999 case SrcNone:
1000 break;
1001 case SrcReg:
1002 decode_register_operand(&c->src, c, 0);
1003 break;
1004 case SrcMem16:
1005 c->src.bytes = 2;
1006 goto srcmem_common;
1007 case SrcMem32:
1008 c->src.bytes = 4;
1009 goto srcmem_common;
1010 case SrcMem:
1011 c->src.bytes = (c->d & ByteOp) ? 1 :
1012 c->op_bytes;
1013 /* Don't fetch the address for invlpg: it could be unmapped. */
1014 if (c->twobyte && c->b == 0x01 && c->modrm_reg == 7)
1015 break;
1016 srcmem_common:
1018 * For instructions with a ModR/M byte, switch to register
1019 * access if Mod = 3.
1021 if ((c->d & ModRM) && c->modrm_mod == 3) {
1022 c->src.type = OP_REG;
1023 c->src.val = c->modrm_val;
1024 c->src.ptr = c->modrm_ptr;
1025 break;
1027 c->src.type = OP_MEM;
1028 break;
1029 case SrcImm:
1030 c->src.type = OP_IMM;
1031 c->src.ptr = (unsigned long *)c->eip;
1032 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1033 if (c->src.bytes == 8)
1034 c->src.bytes = 4;
1035 /* NB. Immediates are sign-extended as necessary. */
1036 switch (c->src.bytes) {
1037 case 1:
1038 c->src.val = insn_fetch(s8, 1, c->eip);
1039 break;
1040 case 2:
1041 c->src.val = insn_fetch(s16, 2, c->eip);
1042 break;
1043 case 4:
1044 c->src.val = insn_fetch(s32, 4, c->eip);
1045 break;
1047 break;
1048 case SrcImmByte:
1049 case SrcImmUByte:
1050 c->src.type = OP_IMM;
1051 c->src.ptr = (unsigned long *)c->eip;
1052 c->src.bytes = 1;
1053 if ((c->d & SrcMask) == SrcImmByte)
1054 c->src.val = insn_fetch(s8, 1, c->eip);
1055 else
1056 c->src.val = insn_fetch(u8, 1, c->eip);
1057 break;
1058 case SrcOne:
1059 c->src.bytes = 1;
1060 c->src.val = 1;
1061 break;
1065 * Decode and fetch the second source operand: register, memory
1066 * or immediate.
1068 switch (c->d & Src2Mask) {
1069 case Src2None:
1070 break;
1071 case Src2CL:
1072 c->src2.bytes = 1;
1073 c->src2.val = c->regs[VCPU_REGS_RCX] & 0x8;
1074 break;
1075 case Src2ImmByte:
1076 c->src2.type = OP_IMM;
1077 c->src2.ptr = (unsigned long *)c->eip;
1078 c->src2.bytes = 1;
1079 c->src2.val = insn_fetch(u8, 1, c->eip);
1080 break;
1081 case Src2Imm16:
1082 c->src2.type = OP_IMM;
1083 c->src2.ptr = (unsigned long *)c->eip;
1084 c->src2.bytes = 2;
1085 c->src2.val = insn_fetch(u16, 2, c->eip);
1086 break;
1087 case Src2One:
1088 c->src2.bytes = 1;
1089 c->src2.val = 1;
1090 break;
1093 /* Decode and fetch the destination operand: register or memory. */
1094 switch (c->d & DstMask) {
1095 case ImplicitOps:
1096 /* Special instructions do their own operand decoding. */
1097 return 0;
1098 case DstReg:
1099 decode_register_operand(&c->dst, c,
1100 c->twobyte && (c->b == 0xb6 || c->b == 0xb7));
1101 break;
1102 case DstMem:
1103 if ((c->d & ModRM) && c->modrm_mod == 3) {
1104 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1105 c->dst.type = OP_REG;
1106 c->dst.val = c->dst.orig_val = c->modrm_val;
1107 c->dst.ptr = c->modrm_ptr;
1108 break;
1110 c->dst.type = OP_MEM;
1111 break;
1112 case DstAcc:
1113 c->dst.type = OP_REG;
1114 c->dst.bytes = c->op_bytes;
1115 c->dst.ptr = &c->regs[VCPU_REGS_RAX];
1116 switch (c->op_bytes) {
1117 case 1:
1118 c->dst.val = *(u8 *)c->dst.ptr;
1119 break;
1120 case 2:
1121 c->dst.val = *(u16 *)c->dst.ptr;
1122 break;
1123 case 4:
1124 c->dst.val = *(u32 *)c->dst.ptr;
1125 break;
1127 c->dst.orig_val = c->dst.val;
1128 break;
1131 if (c->rip_relative)
1132 c->modrm_ea += c->eip;
1134 done:
1135 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
1138 static inline void emulate_push(struct x86_emulate_ctxt *ctxt)
1140 struct decode_cache *c = &ctxt->decode;
1142 c->dst.type = OP_MEM;
1143 c->dst.bytes = c->op_bytes;
1144 c->dst.val = c->src.val;
1145 register_address_increment(c, &c->regs[VCPU_REGS_RSP], -c->op_bytes);
1146 c->dst.ptr = (void *) register_address(c, ss_base(ctxt),
1147 c->regs[VCPU_REGS_RSP]);
1150 static int emulate_pop(struct x86_emulate_ctxt *ctxt,
1151 struct x86_emulate_ops *ops,
1152 void *dest, int len)
1154 struct decode_cache *c = &ctxt->decode;
1155 int rc;
1157 rc = ops->read_emulated(register_address(c, ss_base(ctxt),
1158 c->regs[VCPU_REGS_RSP]),
1159 dest, len, ctxt->vcpu);
1160 if (rc != 0)
1161 return rc;
1163 register_address_increment(c, &c->regs[VCPU_REGS_RSP], len);
1164 return rc;
1167 static inline int emulate_grp1a(struct x86_emulate_ctxt *ctxt,
1168 struct x86_emulate_ops *ops)
1170 struct decode_cache *c = &ctxt->decode;
1171 int rc;
1173 rc = emulate_pop(ctxt, ops, &c->dst.val, c->dst.bytes);
1174 if (rc != 0)
1175 return rc;
1176 return 0;
1179 static inline void emulate_grp2(struct x86_emulate_ctxt *ctxt)
1181 struct decode_cache *c = &ctxt->decode;
1182 switch (c->modrm_reg) {
1183 case 0: /* rol */
1184 emulate_2op_SrcB("rol", c->src, c->dst, ctxt->eflags);
1185 break;
1186 case 1: /* ror */
1187 emulate_2op_SrcB("ror", c->src, c->dst, ctxt->eflags);
1188 break;
1189 case 2: /* rcl */
1190 emulate_2op_SrcB("rcl", c->src, c->dst, ctxt->eflags);
1191 break;
1192 case 3: /* rcr */
1193 emulate_2op_SrcB("rcr", c->src, c->dst, ctxt->eflags);
1194 break;
1195 case 4: /* sal/shl */
1196 case 6: /* sal/shl */
1197 emulate_2op_SrcB("sal", c->src, c->dst, ctxt->eflags);
1198 break;
1199 case 5: /* shr */
1200 emulate_2op_SrcB("shr", c->src, c->dst, ctxt->eflags);
1201 break;
1202 case 7: /* sar */
1203 emulate_2op_SrcB("sar", c->src, c->dst, ctxt->eflags);
1204 break;
1208 static inline int emulate_grp3(struct x86_emulate_ctxt *ctxt,
1209 struct x86_emulate_ops *ops)
1211 struct decode_cache *c = &ctxt->decode;
1212 int rc = 0;
1214 switch (c->modrm_reg) {
1215 case 0 ... 1: /* test */
1216 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1217 break;
1218 case 2: /* not */
1219 c->dst.val = ~c->dst.val;
1220 break;
1221 case 3: /* neg */
1222 emulate_1op("neg", c->dst, ctxt->eflags);
1223 break;
1224 default:
1225 DPRINTF("Cannot emulate %02x\n", c->b);
1226 rc = X86EMUL_UNHANDLEABLE;
1227 break;
1229 return rc;
1232 static inline int emulate_grp45(struct x86_emulate_ctxt *ctxt,
1233 struct x86_emulate_ops *ops)
1235 struct decode_cache *c = &ctxt->decode;
1237 switch (c->modrm_reg) {
1238 case 0: /* inc */
1239 emulate_1op("inc", c->dst, ctxt->eflags);
1240 break;
1241 case 1: /* dec */
1242 emulate_1op("dec", c->dst, ctxt->eflags);
1243 break;
1244 case 2: /* call near abs */ {
1245 long int old_eip;
1246 old_eip = c->eip;
1247 c->eip = c->src.val;
1248 c->src.val = old_eip;
1249 emulate_push(ctxt);
1250 break;
1252 case 4: /* jmp abs */
1253 c->eip = c->src.val;
1254 break;
1255 case 6: /* push */
1256 emulate_push(ctxt);
1257 break;
1259 return 0;
1262 static inline int emulate_grp9(struct x86_emulate_ctxt *ctxt,
1263 struct x86_emulate_ops *ops,
1264 unsigned long memop)
1266 struct decode_cache *c = &ctxt->decode;
1267 u64 old, new;
1268 int rc;
1270 rc = ops->read_emulated(memop, &old, 8, ctxt->vcpu);
1271 if (rc != 0)
1272 return rc;
1274 if (((u32) (old >> 0) != (u32) c->regs[VCPU_REGS_RAX]) ||
1275 ((u32) (old >> 32) != (u32) c->regs[VCPU_REGS_RDX])) {
1277 c->regs[VCPU_REGS_RAX] = (u32) (old >> 0);
1278 c->regs[VCPU_REGS_RDX] = (u32) (old >> 32);
1279 ctxt->eflags &= ~EFLG_ZF;
1281 } else {
1282 new = ((u64)c->regs[VCPU_REGS_RCX] << 32) |
1283 (u32) c->regs[VCPU_REGS_RBX];
1285 rc = ops->cmpxchg_emulated(memop, &old, &new, 8, ctxt->vcpu);
1286 if (rc != 0)
1287 return rc;
1288 ctxt->eflags |= EFLG_ZF;
1290 return 0;
1293 static int emulate_ret_far(struct x86_emulate_ctxt *ctxt,
1294 struct x86_emulate_ops *ops)
1296 struct decode_cache *c = &ctxt->decode;
1297 int rc;
1298 unsigned long cs;
1300 rc = emulate_pop(ctxt, ops, &c->eip, c->op_bytes);
1301 if (rc)
1302 return rc;
1303 if (c->op_bytes == 4)
1304 c->eip = (u32)c->eip;
1305 rc = emulate_pop(ctxt, ops, &cs, c->op_bytes);
1306 if (rc)
1307 return rc;
1308 rc = kvm_load_segment_descriptor(ctxt->vcpu, (u16)cs, 1, VCPU_SREG_CS);
1309 return rc;
1312 static inline int writeback(struct x86_emulate_ctxt *ctxt,
1313 struct x86_emulate_ops *ops)
1315 int rc;
1316 struct decode_cache *c = &ctxt->decode;
1318 switch (c->dst.type) {
1319 case OP_REG:
1320 /* The 4-byte case *is* correct:
1321 * in 64-bit mode we zero-extend.
1323 switch (c->dst.bytes) {
1324 case 1:
1325 *(u8 *)c->dst.ptr = (u8)c->dst.val;
1326 break;
1327 case 2:
1328 *(u16 *)c->dst.ptr = (u16)c->dst.val;
1329 break;
1330 case 4:
1331 *c->dst.ptr = (u32)c->dst.val;
1332 break; /* 64b: zero-ext */
1333 case 8:
1334 *c->dst.ptr = c->dst.val;
1335 break;
1337 break;
1338 case OP_MEM:
1339 if (c->lock_prefix)
1340 rc = ops->cmpxchg_emulated(
1341 (unsigned long)c->dst.ptr,
1342 &c->dst.orig_val,
1343 &c->dst.val,
1344 c->dst.bytes,
1345 ctxt->vcpu);
1346 else
1347 rc = ops->write_emulated(
1348 (unsigned long)c->dst.ptr,
1349 &c->dst.val,
1350 c->dst.bytes,
1351 ctxt->vcpu);
1352 if (rc != 0)
1353 return rc;
1354 break;
1355 case OP_NONE:
1356 /* no writeback */
1357 break;
1358 default:
1359 break;
1361 return 0;
1364 static void toggle_interruptibility(struct x86_emulate_ctxt *ctxt, u32 mask)
1366 u32 int_shadow = kvm_x86_ops->get_interrupt_shadow(ctxt->vcpu, mask);
1368 * an sti; sti; sequence only disable interrupts for the first
1369 * instruction. So, if the last instruction, be it emulated or
1370 * not, left the system with the INT_STI flag enabled, it
1371 * means that the last instruction is an sti. We should not
1372 * leave the flag on in this case. The same goes for mov ss
1374 if (!(int_shadow & mask))
1375 ctxt->interruptibility = mask;
1379 x86_emulate_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
1381 unsigned long memop = 0;
1382 u64 msr_data;
1383 unsigned long saved_eip = 0;
1384 struct decode_cache *c = &ctxt->decode;
1385 unsigned int port;
1386 int io_dir_in;
1387 int rc = 0;
1389 ctxt->interruptibility = 0;
1391 /* Shadow copy of register state. Committed on successful emulation.
1392 * NOTE: we can copy them from vcpu as x86_decode_insn() doesn't
1393 * modify them.
1396 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
1397 saved_eip = c->eip;
1399 if (((c->d & ModRM) && (c->modrm_mod != 3)) || (c->d & MemAbs))
1400 memop = c->modrm_ea;
1402 if (c->rep_prefix && (c->d & String)) {
1403 /* All REP prefixes have the same first termination condition */
1404 if (c->regs[VCPU_REGS_RCX] == 0) {
1405 kvm_rip_write(ctxt->vcpu, c->eip);
1406 goto done;
1408 /* The second termination condition only applies for REPE
1409 * and REPNE. Test if the repeat string operation prefix is
1410 * REPE/REPZ or REPNE/REPNZ and if it's the case it tests the
1411 * corresponding termination condition according to:
1412 * - if REPE/REPZ and ZF = 0 then done
1413 * - if REPNE/REPNZ and ZF = 1 then done
1415 if ((c->b == 0xa6) || (c->b == 0xa7) ||
1416 (c->b == 0xae) || (c->b == 0xaf)) {
1417 if ((c->rep_prefix == REPE_PREFIX) &&
1418 ((ctxt->eflags & EFLG_ZF) == 0)) {
1419 kvm_rip_write(ctxt->vcpu, c->eip);
1420 goto done;
1422 if ((c->rep_prefix == REPNE_PREFIX) &&
1423 ((ctxt->eflags & EFLG_ZF) == EFLG_ZF)) {
1424 kvm_rip_write(ctxt->vcpu, c->eip);
1425 goto done;
1428 c->regs[VCPU_REGS_RCX]--;
1429 c->eip = kvm_rip_read(ctxt->vcpu);
1432 if (c->src.type == OP_MEM) {
1433 c->src.ptr = (unsigned long *)memop;
1434 c->src.val = 0;
1435 rc = ops->read_emulated((unsigned long)c->src.ptr,
1436 &c->src.val,
1437 c->src.bytes,
1438 ctxt->vcpu);
1439 if (rc != 0)
1440 goto done;
1441 c->src.orig_val = c->src.val;
1444 if ((c->d & DstMask) == ImplicitOps)
1445 goto special_insn;
1448 if (c->dst.type == OP_MEM) {
1449 c->dst.ptr = (unsigned long *)memop;
1450 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1451 c->dst.val = 0;
1452 if (c->d & BitOp) {
1453 unsigned long mask = ~(c->dst.bytes * 8 - 1);
1455 c->dst.ptr = (void *)c->dst.ptr +
1456 (c->src.val & mask) / 8;
1458 if (!(c->d & Mov) &&
1459 /* optimisation - avoid slow emulated read */
1460 ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1461 &c->dst.val,
1462 c->dst.bytes, ctxt->vcpu)) != 0))
1463 goto done;
1465 c->dst.orig_val = c->dst.val;
1467 special_insn:
1469 if (c->twobyte)
1470 goto twobyte_insn;
1472 switch (c->b) {
1473 case 0x00 ... 0x05:
1474 add: /* add */
1475 emulate_2op_SrcV("add", c->src, c->dst, ctxt->eflags);
1476 break;
1477 case 0x08 ... 0x0d:
1478 or: /* or */
1479 emulate_2op_SrcV("or", c->src, c->dst, ctxt->eflags);
1480 break;
1481 case 0x10 ... 0x15:
1482 adc: /* adc */
1483 emulate_2op_SrcV("adc", c->src, c->dst, ctxt->eflags);
1484 break;
1485 case 0x18 ... 0x1d:
1486 sbb: /* sbb */
1487 emulate_2op_SrcV("sbb", c->src, c->dst, ctxt->eflags);
1488 break;
1489 case 0x20 ... 0x25:
1490 and: /* and */
1491 emulate_2op_SrcV("and", c->src, c->dst, ctxt->eflags);
1492 break;
1493 case 0x28 ... 0x2d:
1494 sub: /* sub */
1495 emulate_2op_SrcV("sub", c->src, c->dst, ctxt->eflags);
1496 break;
1497 case 0x30 ... 0x35:
1498 xor: /* xor */
1499 emulate_2op_SrcV("xor", c->src, c->dst, ctxt->eflags);
1500 break;
1501 case 0x38 ... 0x3d:
1502 cmp: /* cmp */
1503 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1504 break;
1505 case 0x40 ... 0x47: /* inc r16/r32 */
1506 emulate_1op("inc", c->dst, ctxt->eflags);
1507 break;
1508 case 0x48 ... 0x4f: /* dec r16/r32 */
1509 emulate_1op("dec", c->dst, ctxt->eflags);
1510 break;
1511 case 0x50 ... 0x57: /* push reg */
1512 emulate_push(ctxt);
1513 break;
1514 case 0x58 ... 0x5f: /* pop reg */
1515 pop_instruction:
1516 rc = emulate_pop(ctxt, ops, &c->dst.val, c->op_bytes);
1517 if (rc != 0)
1518 goto done;
1519 break;
1520 case 0x63: /* movsxd */
1521 if (ctxt->mode != X86EMUL_MODE_PROT64)
1522 goto cannot_emulate;
1523 c->dst.val = (s32) c->src.val;
1524 break;
1525 case 0x68: /* push imm */
1526 case 0x6a: /* push imm8 */
1527 emulate_push(ctxt);
1528 break;
1529 case 0x6c: /* insb */
1530 case 0x6d: /* insw/insd */
1531 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1533 (c->d & ByteOp) ? 1 : c->op_bytes,
1534 c->rep_prefix ?
1535 address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
1536 (ctxt->eflags & EFLG_DF),
1537 register_address(c, es_base(ctxt),
1538 c->regs[VCPU_REGS_RDI]),
1539 c->rep_prefix,
1540 c->regs[VCPU_REGS_RDX]) == 0) {
1541 c->eip = saved_eip;
1542 return -1;
1544 return 0;
1545 case 0x6e: /* outsb */
1546 case 0x6f: /* outsw/outsd */
1547 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1549 (c->d & ByteOp) ? 1 : c->op_bytes,
1550 c->rep_prefix ?
1551 address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
1552 (ctxt->eflags & EFLG_DF),
1553 register_address(c,
1554 seg_override_base(ctxt, c),
1555 c->regs[VCPU_REGS_RSI]),
1556 c->rep_prefix,
1557 c->regs[VCPU_REGS_RDX]) == 0) {
1558 c->eip = saved_eip;
1559 return -1;
1561 return 0;
1562 case 0x70 ... 0x7f: /* jcc (short) */
1563 if (test_cc(c->b, ctxt->eflags))
1564 jmp_rel(c, c->src.val);
1565 break;
1566 case 0x80 ... 0x83: /* Grp1 */
1567 switch (c->modrm_reg) {
1568 case 0:
1569 goto add;
1570 case 1:
1571 goto or;
1572 case 2:
1573 goto adc;
1574 case 3:
1575 goto sbb;
1576 case 4:
1577 goto and;
1578 case 5:
1579 goto sub;
1580 case 6:
1581 goto xor;
1582 case 7:
1583 goto cmp;
1585 break;
1586 case 0x84 ... 0x85:
1587 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1588 break;
1589 case 0x86 ... 0x87: /* xchg */
1590 xchg:
1591 /* Write back the register source. */
1592 switch (c->dst.bytes) {
1593 case 1:
1594 *(u8 *) c->src.ptr = (u8) c->dst.val;
1595 break;
1596 case 2:
1597 *(u16 *) c->src.ptr = (u16) c->dst.val;
1598 break;
1599 case 4:
1600 *c->src.ptr = (u32) c->dst.val;
1601 break; /* 64b reg: zero-extend */
1602 case 8:
1603 *c->src.ptr = c->dst.val;
1604 break;
1607 * Write back the memory destination with implicit LOCK
1608 * prefix.
1610 c->dst.val = c->src.val;
1611 c->lock_prefix = 1;
1612 break;
1613 case 0x88 ... 0x8b: /* mov */
1614 goto mov;
1615 case 0x8c: { /* mov r/m, sreg */
1616 struct kvm_segment segreg;
1618 if (c->modrm_reg <= 5)
1619 kvm_get_segment(ctxt->vcpu, &segreg, c->modrm_reg);
1620 else {
1621 printk(KERN_INFO "0x8c: Invalid segreg in modrm byte 0x%02x\n",
1622 c->modrm);
1623 goto cannot_emulate;
1625 c->dst.val = segreg.selector;
1626 break;
1628 case 0x8d: /* lea r16/r32, m */
1629 c->dst.val = c->modrm_ea;
1630 break;
1631 case 0x8e: { /* mov seg, r/m16 */
1632 uint16_t sel;
1633 int type_bits;
1634 int err;
1636 sel = c->src.val;
1637 if (c->modrm_reg == VCPU_SREG_SS)
1638 toggle_interruptibility(ctxt, X86_SHADOW_INT_MOV_SS);
1640 if (c->modrm_reg <= 5) {
1641 type_bits = (c->modrm_reg == 1) ? 9 : 1;
1642 err = kvm_load_segment_descriptor(ctxt->vcpu, sel,
1643 type_bits, c->modrm_reg);
1644 } else {
1645 printk(KERN_INFO "Invalid segreg in modrm byte 0x%02x\n",
1646 c->modrm);
1647 goto cannot_emulate;
1650 if (err < 0)
1651 goto cannot_emulate;
1653 c->dst.type = OP_NONE; /* Disable writeback. */
1654 break;
1656 case 0x8f: /* pop (sole member of Grp1a) */
1657 rc = emulate_grp1a(ctxt, ops);
1658 if (rc != 0)
1659 goto done;
1660 break;
1661 case 0x90: /* nop / xchg r8,rax */
1662 if (!(c->rex_prefix & 1)) { /* nop */
1663 c->dst.type = OP_NONE;
1664 break;
1666 case 0x91 ... 0x97: /* xchg reg,rax */
1667 c->src.type = c->dst.type = OP_REG;
1668 c->src.bytes = c->dst.bytes = c->op_bytes;
1669 c->src.ptr = (unsigned long *) &c->regs[VCPU_REGS_RAX];
1670 c->src.val = *(c->src.ptr);
1671 goto xchg;
1672 case 0x9c: /* pushf */
1673 c->src.val = (unsigned long) ctxt->eflags;
1674 emulate_push(ctxt);
1675 break;
1676 case 0x9d: /* popf */
1677 c->dst.type = OP_REG;
1678 c->dst.ptr = (unsigned long *) &ctxt->eflags;
1679 c->dst.bytes = c->op_bytes;
1680 goto pop_instruction;
1681 case 0xa0 ... 0xa1: /* mov */
1682 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1683 c->dst.val = c->src.val;
1684 break;
1685 case 0xa2 ... 0xa3: /* mov */
1686 c->dst.val = (unsigned long)c->regs[VCPU_REGS_RAX];
1687 break;
1688 case 0xa4 ... 0xa5: /* movs */
1689 c->dst.type = OP_MEM;
1690 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1691 c->dst.ptr = (unsigned long *)register_address(c,
1692 es_base(ctxt),
1693 c->regs[VCPU_REGS_RDI]);
1694 if ((rc = ops->read_emulated(register_address(c,
1695 seg_override_base(ctxt, c),
1696 c->regs[VCPU_REGS_RSI]),
1697 &c->dst.val,
1698 c->dst.bytes, ctxt->vcpu)) != 0)
1699 goto done;
1700 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1701 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1702 : c->dst.bytes);
1703 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1704 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1705 : c->dst.bytes);
1706 break;
1707 case 0xa6 ... 0xa7: /* cmps */
1708 c->src.type = OP_NONE; /* Disable writeback. */
1709 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1710 c->src.ptr = (unsigned long *)register_address(c,
1711 seg_override_base(ctxt, c),
1712 c->regs[VCPU_REGS_RSI]);
1713 if ((rc = ops->read_emulated((unsigned long)c->src.ptr,
1714 &c->src.val,
1715 c->src.bytes,
1716 ctxt->vcpu)) != 0)
1717 goto done;
1719 c->dst.type = OP_NONE; /* Disable writeback. */
1720 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1721 c->dst.ptr = (unsigned long *)register_address(c,
1722 es_base(ctxt),
1723 c->regs[VCPU_REGS_RDI]);
1724 if ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1725 &c->dst.val,
1726 c->dst.bytes,
1727 ctxt->vcpu)) != 0)
1728 goto done;
1730 DPRINTF("cmps: mem1=0x%p mem2=0x%p\n", c->src.ptr, c->dst.ptr);
1732 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1734 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1735 (ctxt->eflags & EFLG_DF) ? -c->src.bytes
1736 : c->src.bytes);
1737 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1738 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1739 : c->dst.bytes);
1741 break;
1742 case 0xaa ... 0xab: /* stos */
1743 c->dst.type = OP_MEM;
1744 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1745 c->dst.ptr = (unsigned long *)register_address(c,
1746 es_base(ctxt),
1747 c->regs[VCPU_REGS_RDI]);
1748 c->dst.val = c->regs[VCPU_REGS_RAX];
1749 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1750 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1751 : c->dst.bytes);
1752 break;
1753 case 0xac ... 0xad: /* lods */
1754 c->dst.type = OP_REG;
1755 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1756 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1757 if ((rc = ops->read_emulated(register_address(c,
1758 seg_override_base(ctxt, c),
1759 c->regs[VCPU_REGS_RSI]),
1760 &c->dst.val,
1761 c->dst.bytes,
1762 ctxt->vcpu)) != 0)
1763 goto done;
1764 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1765 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1766 : c->dst.bytes);
1767 break;
1768 case 0xae ... 0xaf: /* scas */
1769 DPRINTF("Urk! I don't handle SCAS.\n");
1770 goto cannot_emulate;
1771 case 0xb0 ... 0xbf: /* mov r, imm */
1772 goto mov;
1773 case 0xc0 ... 0xc1:
1774 emulate_grp2(ctxt);
1775 break;
1776 case 0xc3: /* ret */
1777 c->dst.type = OP_REG;
1778 c->dst.ptr = &c->eip;
1779 c->dst.bytes = c->op_bytes;
1780 goto pop_instruction;
1781 case 0xc6 ... 0xc7: /* mov (sole member of Grp11) */
1782 mov:
1783 c->dst.val = c->src.val;
1784 break;
1785 case 0xcb: /* ret far */
1786 rc = emulate_ret_far(ctxt, ops);
1787 if (rc)
1788 goto done;
1789 break;
1790 case 0xd0 ... 0xd1: /* Grp2 */
1791 c->src.val = 1;
1792 emulate_grp2(ctxt);
1793 break;
1794 case 0xd2 ... 0xd3: /* Grp2 */
1795 c->src.val = c->regs[VCPU_REGS_RCX];
1796 emulate_grp2(ctxt);
1797 break;
1798 case 0xe4: /* inb */
1799 case 0xe5: /* in */
1800 port = c->src.val;
1801 io_dir_in = 1;
1802 goto do_io;
1803 case 0xe6: /* outb */
1804 case 0xe7: /* out */
1805 port = c->src.val;
1806 io_dir_in = 0;
1807 goto do_io;
1808 case 0xe8: /* call (near) */ {
1809 long int rel = c->src.val;
1810 c->src.val = (unsigned long) c->eip;
1811 jmp_rel(c, rel);
1812 emulate_push(ctxt);
1813 break;
1815 case 0xe9: /* jmp rel */
1816 goto jmp;
1817 case 0xea: /* jmp far */
1818 if (kvm_load_segment_descriptor(ctxt->vcpu, c->src2.val, 9,
1819 VCPU_SREG_CS) < 0) {
1820 DPRINTF("jmp far: Failed to load CS descriptor\n");
1821 goto cannot_emulate;
1824 c->eip = c->src.val;
1825 break;
1826 case 0xeb:
1827 jmp: /* jmp rel short */
1828 jmp_rel(c, c->src.val);
1829 c->dst.type = OP_NONE; /* Disable writeback. */
1830 break;
1831 case 0xec: /* in al,dx */
1832 case 0xed: /* in (e/r)ax,dx */
1833 port = c->regs[VCPU_REGS_RDX];
1834 io_dir_in = 1;
1835 goto do_io;
1836 case 0xee: /* out al,dx */
1837 case 0xef: /* out (e/r)ax,dx */
1838 port = c->regs[VCPU_REGS_RDX];
1839 io_dir_in = 0;
1840 do_io: if (kvm_emulate_pio(ctxt->vcpu, NULL, io_dir_in,
1841 (c->d & ByteOp) ? 1 : c->op_bytes,
1842 port) != 0) {
1843 c->eip = saved_eip;
1844 goto cannot_emulate;
1846 break;
1847 case 0xf4: /* hlt */
1848 ctxt->vcpu->arch.halt_request = 1;
1849 break;
1850 case 0xf5: /* cmc */
1851 /* complement carry flag from eflags reg */
1852 ctxt->eflags ^= EFLG_CF;
1853 c->dst.type = OP_NONE; /* Disable writeback. */
1854 break;
1855 case 0xf6 ... 0xf7: /* Grp3 */
1856 rc = emulate_grp3(ctxt, ops);
1857 if (rc != 0)
1858 goto done;
1859 break;
1860 case 0xf8: /* clc */
1861 ctxt->eflags &= ~EFLG_CF;
1862 c->dst.type = OP_NONE; /* Disable writeback. */
1863 break;
1864 case 0xfa: /* cli */
1865 ctxt->eflags &= ~X86_EFLAGS_IF;
1866 c->dst.type = OP_NONE; /* Disable writeback. */
1867 break;
1868 case 0xfb: /* sti */
1869 toggle_interruptibility(ctxt, X86_SHADOW_INT_STI);
1870 ctxt->eflags |= X86_EFLAGS_IF;
1871 c->dst.type = OP_NONE; /* Disable writeback. */
1872 break;
1873 case 0xfc: /* cld */
1874 ctxt->eflags &= ~EFLG_DF;
1875 c->dst.type = OP_NONE; /* Disable writeback. */
1876 break;
1877 case 0xfd: /* std */
1878 ctxt->eflags |= EFLG_DF;
1879 c->dst.type = OP_NONE; /* Disable writeback. */
1880 break;
1881 case 0xfe ... 0xff: /* Grp4/Grp5 */
1882 rc = emulate_grp45(ctxt, ops);
1883 if (rc != 0)
1884 goto done;
1885 break;
1888 writeback:
1889 rc = writeback(ctxt, ops);
1890 if (rc != 0)
1891 goto done;
1893 /* Commit shadow register state. */
1894 memcpy(ctxt->vcpu->arch.regs, c->regs, sizeof c->regs);
1895 kvm_rip_write(ctxt->vcpu, c->eip);
1897 done:
1898 if (rc == X86EMUL_UNHANDLEABLE) {
1899 c->eip = saved_eip;
1900 return -1;
1902 return 0;
1904 twobyte_insn:
1905 switch (c->b) {
1906 case 0x01: /* lgdt, lidt, lmsw */
1907 switch (c->modrm_reg) {
1908 u16 size;
1909 unsigned long address;
1911 case 0: /* vmcall */
1912 if (c->modrm_mod != 3 || c->modrm_rm != 1)
1913 goto cannot_emulate;
1915 rc = kvm_fix_hypercall(ctxt->vcpu);
1916 if (rc)
1917 goto done;
1919 /* Let the processor re-execute the fixed hypercall */
1920 c->eip = kvm_rip_read(ctxt->vcpu);
1921 /* Disable writeback. */
1922 c->dst.type = OP_NONE;
1923 break;
1924 case 2: /* lgdt */
1925 rc = read_descriptor(ctxt, ops, c->src.ptr,
1926 &size, &address, c->op_bytes);
1927 if (rc)
1928 goto done;
1929 realmode_lgdt(ctxt->vcpu, size, address);
1930 /* Disable writeback. */
1931 c->dst.type = OP_NONE;
1932 break;
1933 case 3: /* lidt/vmmcall */
1934 if (c->modrm_mod == 3) {
1935 switch (c->modrm_rm) {
1936 case 1:
1937 rc = kvm_fix_hypercall(ctxt->vcpu);
1938 if (rc)
1939 goto done;
1940 break;
1941 default:
1942 goto cannot_emulate;
1944 } else {
1945 rc = read_descriptor(ctxt, ops, c->src.ptr,
1946 &size, &address,
1947 c->op_bytes);
1948 if (rc)
1949 goto done;
1950 realmode_lidt(ctxt->vcpu, size, address);
1952 /* Disable writeback. */
1953 c->dst.type = OP_NONE;
1954 break;
1955 case 4: /* smsw */
1956 c->dst.bytes = 2;
1957 c->dst.val = realmode_get_cr(ctxt->vcpu, 0);
1958 break;
1959 case 6: /* lmsw */
1960 realmode_lmsw(ctxt->vcpu, (u16)c->src.val,
1961 &ctxt->eflags);
1962 c->dst.type = OP_NONE;
1963 break;
1964 case 7: /* invlpg*/
1965 emulate_invlpg(ctxt->vcpu, memop);
1966 /* Disable writeback. */
1967 c->dst.type = OP_NONE;
1968 break;
1969 default:
1970 goto cannot_emulate;
1972 break;
1973 case 0x06:
1974 emulate_clts(ctxt->vcpu);
1975 c->dst.type = OP_NONE;
1976 break;
1977 case 0x08: /* invd */
1978 case 0x09: /* wbinvd */
1979 case 0x0d: /* GrpP (prefetch) */
1980 case 0x18: /* Grp16 (prefetch/nop) */
1981 c->dst.type = OP_NONE;
1982 break;
1983 case 0x20: /* mov cr, reg */
1984 if (c->modrm_mod != 3)
1985 goto cannot_emulate;
1986 c->regs[c->modrm_rm] =
1987 realmode_get_cr(ctxt->vcpu, c->modrm_reg);
1988 c->dst.type = OP_NONE; /* no writeback */
1989 break;
1990 case 0x21: /* mov from dr to reg */
1991 if (c->modrm_mod != 3)
1992 goto cannot_emulate;
1993 rc = emulator_get_dr(ctxt, c->modrm_reg, &c->regs[c->modrm_rm]);
1994 if (rc)
1995 goto cannot_emulate;
1996 c->dst.type = OP_NONE; /* no writeback */
1997 break;
1998 case 0x22: /* mov reg, cr */
1999 if (c->modrm_mod != 3)
2000 goto cannot_emulate;
2001 realmode_set_cr(ctxt->vcpu,
2002 c->modrm_reg, c->modrm_val, &ctxt->eflags);
2003 c->dst.type = OP_NONE;
2004 break;
2005 case 0x23: /* mov from reg to dr */
2006 if (c->modrm_mod != 3)
2007 goto cannot_emulate;
2008 rc = emulator_set_dr(ctxt, c->modrm_reg,
2009 c->regs[c->modrm_rm]);
2010 if (rc)
2011 goto cannot_emulate;
2012 c->dst.type = OP_NONE; /* no writeback */
2013 break;
2014 case 0x30:
2015 /* wrmsr */
2016 msr_data = (u32)c->regs[VCPU_REGS_RAX]
2017 | ((u64)c->regs[VCPU_REGS_RDX] << 32);
2018 rc = kvm_set_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], msr_data);
2019 if (rc) {
2020 kvm_inject_gp(ctxt->vcpu, 0);
2021 c->eip = kvm_rip_read(ctxt->vcpu);
2023 rc = X86EMUL_CONTINUE;
2024 c->dst.type = OP_NONE;
2025 break;
2026 case 0x32:
2027 /* rdmsr */
2028 rc = kvm_get_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], &msr_data);
2029 if (rc) {
2030 kvm_inject_gp(ctxt->vcpu, 0);
2031 c->eip = kvm_rip_read(ctxt->vcpu);
2032 } else {
2033 c->regs[VCPU_REGS_RAX] = (u32)msr_data;
2034 c->regs[VCPU_REGS_RDX] = msr_data >> 32;
2036 rc = X86EMUL_CONTINUE;
2037 c->dst.type = OP_NONE;
2038 break;
2039 case 0x40 ... 0x4f: /* cmov */
2040 c->dst.val = c->dst.orig_val = c->src.val;
2041 if (!test_cc(c->b, ctxt->eflags))
2042 c->dst.type = OP_NONE; /* no writeback */
2043 break;
2044 case 0x80 ... 0x8f: /* jnz rel, etc*/
2045 if (test_cc(c->b, ctxt->eflags))
2046 jmp_rel(c, c->src.val);
2047 c->dst.type = OP_NONE;
2048 break;
2049 case 0xa3:
2050 bt: /* bt */
2051 c->dst.type = OP_NONE;
2052 /* only subword offset */
2053 c->src.val &= (c->dst.bytes << 3) - 1;
2054 emulate_2op_SrcV_nobyte("bt", c->src, c->dst, ctxt->eflags);
2055 break;
2056 case 0xa4: /* shld imm8, r, r/m */
2057 case 0xa5: /* shld cl, r, r/m */
2058 emulate_2op_cl("shld", c->src2, c->src, c->dst, ctxt->eflags);
2059 break;
2060 case 0xab:
2061 bts: /* bts */
2062 /* only subword offset */
2063 c->src.val &= (c->dst.bytes << 3) - 1;
2064 emulate_2op_SrcV_nobyte("bts", c->src, c->dst, ctxt->eflags);
2065 break;
2066 case 0xac: /* shrd imm8, r, r/m */
2067 case 0xad: /* shrd cl, r, r/m */
2068 emulate_2op_cl("shrd", c->src2, c->src, c->dst, ctxt->eflags);
2069 break;
2070 case 0xae: /* clflush */
2071 break;
2072 case 0xb0 ... 0xb1: /* cmpxchg */
2074 * Save real source value, then compare EAX against
2075 * destination.
2077 c->src.orig_val = c->src.val;
2078 c->src.val = c->regs[VCPU_REGS_RAX];
2079 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
2080 if (ctxt->eflags & EFLG_ZF) {
2081 /* Success: write back to memory. */
2082 c->dst.val = c->src.orig_val;
2083 } else {
2084 /* Failure: write the value we saw to EAX. */
2085 c->dst.type = OP_REG;
2086 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
2088 break;
2089 case 0xb3:
2090 btr: /* btr */
2091 /* only subword offset */
2092 c->src.val &= (c->dst.bytes << 3) - 1;
2093 emulate_2op_SrcV_nobyte("btr", c->src, c->dst, ctxt->eflags);
2094 break;
2095 case 0xb6 ... 0xb7: /* movzx */
2096 c->dst.bytes = c->op_bytes;
2097 c->dst.val = (c->d & ByteOp) ? (u8) c->src.val
2098 : (u16) c->src.val;
2099 break;
2100 case 0xba: /* Grp8 */
2101 switch (c->modrm_reg & 3) {
2102 case 0:
2103 goto bt;
2104 case 1:
2105 goto bts;
2106 case 2:
2107 goto btr;
2108 case 3:
2109 goto btc;
2111 break;
2112 case 0xbb:
2113 btc: /* btc */
2114 /* only subword offset */
2115 c->src.val &= (c->dst.bytes << 3) - 1;
2116 emulate_2op_SrcV_nobyte("btc", c->src, c->dst, ctxt->eflags);
2117 break;
2118 case 0xbe ... 0xbf: /* movsx */
2119 c->dst.bytes = c->op_bytes;
2120 c->dst.val = (c->d & ByteOp) ? (s8) c->src.val :
2121 (s16) c->src.val;
2122 break;
2123 case 0xc3: /* movnti */
2124 c->dst.bytes = c->op_bytes;
2125 c->dst.val = (c->op_bytes == 4) ? (u32) c->src.val :
2126 (u64) c->src.val;
2127 break;
2128 case 0xc7: /* Grp9 (cmpxchg8b) */
2129 rc = emulate_grp9(ctxt, ops, memop);
2130 if (rc != 0)
2131 goto done;
2132 c->dst.type = OP_NONE;
2133 break;
2135 goto writeback;
2137 cannot_emulate:
2138 DPRINTF("Cannot emulate %02x\n", c->b);
2139 c->eip = saved_eip;
2140 return -1;