tcg-i386: Tidy initialization of tcg_target_call_clobber_regs.
[qemu/mdroth.git] / tcg / i386 / tcg-target.c
blob4e33b25ef8487f99eb1e3ea636fe91984f0ce3fd
1 /*
2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
25 #ifndef NDEBUG
26 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
27 "%eax",
28 "%ecx",
29 "%edx",
30 "%ebx",
31 "%esp",
32 "%ebp",
33 "%esi",
34 "%edi",
36 #endif
38 static const int tcg_target_reg_alloc_order[] = {
39 TCG_REG_EBX,
40 TCG_REG_ESI,
41 TCG_REG_EDI,
42 TCG_REG_EBP,
43 TCG_REG_ECX,
44 TCG_REG_EDX,
45 TCG_REG_EAX,
48 static const int tcg_target_call_iarg_regs[3] = { TCG_REG_EAX, TCG_REG_EDX, TCG_REG_ECX };
49 static const int tcg_target_call_oarg_regs[2] = { TCG_REG_EAX, TCG_REG_EDX };
51 static uint8_t *tb_ret_addr;
53 static void patch_reloc(uint8_t *code_ptr, int type,
54 tcg_target_long value, tcg_target_long addend)
56 value += addend;
57 switch(type) {
58 case R_386_32:
59 *(uint32_t *)code_ptr = value;
60 break;
61 case R_386_PC32:
62 *(uint32_t *)code_ptr = value - (long)code_ptr;
63 break;
64 case R_386_PC8:
65 value -= (long)code_ptr;
66 if (value != (int8_t)value) {
67 tcg_abort();
69 *(uint8_t *)code_ptr = value;
70 break;
71 default:
72 tcg_abort();
76 /* maximum number of register used for input function arguments */
77 static inline int tcg_target_get_call_iarg_regs_count(int flags)
79 flags &= TCG_CALL_TYPE_MASK;
80 switch(flags) {
81 case TCG_CALL_TYPE_STD:
82 return 0;
83 case TCG_CALL_TYPE_REGPARM_1:
84 case TCG_CALL_TYPE_REGPARM_2:
85 case TCG_CALL_TYPE_REGPARM:
86 return flags - TCG_CALL_TYPE_REGPARM_1 + 1;
87 default:
88 tcg_abort();
92 /* parse target specific constraints */
93 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
95 const char *ct_str;
97 ct_str = *pct_str;
98 switch(ct_str[0]) {
99 case 'a':
100 ct->ct |= TCG_CT_REG;
101 tcg_regset_set_reg(ct->u.regs, TCG_REG_EAX);
102 break;
103 case 'b':
104 ct->ct |= TCG_CT_REG;
105 tcg_regset_set_reg(ct->u.regs, TCG_REG_EBX);
106 break;
107 case 'c':
108 ct->ct |= TCG_CT_REG;
109 tcg_regset_set_reg(ct->u.regs, TCG_REG_ECX);
110 break;
111 case 'd':
112 ct->ct |= TCG_CT_REG;
113 tcg_regset_set_reg(ct->u.regs, TCG_REG_EDX);
114 break;
115 case 'S':
116 ct->ct |= TCG_CT_REG;
117 tcg_regset_set_reg(ct->u.regs, TCG_REG_ESI);
118 break;
119 case 'D':
120 ct->ct |= TCG_CT_REG;
121 tcg_regset_set_reg(ct->u.regs, TCG_REG_EDI);
122 break;
123 case 'q':
124 ct->ct |= TCG_CT_REG;
125 tcg_regset_set32(ct->u.regs, 0, 0xf);
126 break;
127 case 'r':
128 ct->ct |= TCG_CT_REG;
129 tcg_regset_set32(ct->u.regs, 0, 0xff);
130 break;
132 /* qemu_ld/st address constraint */
133 case 'L':
134 ct->ct |= TCG_CT_REG;
135 tcg_regset_set32(ct->u.regs, 0, 0xff);
136 tcg_regset_reset_reg(ct->u.regs, TCG_REG_EAX);
137 tcg_regset_reset_reg(ct->u.regs, TCG_REG_EDX);
138 break;
139 default:
140 return -1;
142 ct_str++;
143 *pct_str = ct_str;
144 return 0;
147 /* test if a constant matches the constraint */
148 static inline int tcg_target_const_match(tcg_target_long val,
149 const TCGArgConstraint *arg_ct)
151 int ct;
152 ct = arg_ct->ct;
153 if (ct & TCG_CT_CONST)
154 return 1;
155 else
156 return 0;
159 #define ARITH_ADD 0
160 #define ARITH_OR 1
161 #define ARITH_ADC 2
162 #define ARITH_SBB 3
163 #define ARITH_AND 4
164 #define ARITH_SUB 5
165 #define ARITH_XOR 6
166 #define ARITH_CMP 7
168 #define SHIFT_ROL 0
169 #define SHIFT_ROR 1
170 #define SHIFT_SHL 4
171 #define SHIFT_SHR 5
172 #define SHIFT_SAR 7
174 #define JCC_JMP (-1)
175 #define JCC_JO 0x0
176 #define JCC_JNO 0x1
177 #define JCC_JB 0x2
178 #define JCC_JAE 0x3
179 #define JCC_JE 0x4
180 #define JCC_JNE 0x5
181 #define JCC_JBE 0x6
182 #define JCC_JA 0x7
183 #define JCC_JS 0x8
184 #define JCC_JNS 0x9
185 #define JCC_JP 0xa
186 #define JCC_JNP 0xb
187 #define JCC_JL 0xc
188 #define JCC_JGE 0xd
189 #define JCC_JLE 0xe
190 #define JCC_JG 0xf
192 #define P_EXT 0x100 /* 0x0f opcode prefix */
194 static const uint8_t tcg_cond_to_jcc[10] = {
195 [TCG_COND_EQ] = JCC_JE,
196 [TCG_COND_NE] = JCC_JNE,
197 [TCG_COND_LT] = JCC_JL,
198 [TCG_COND_GE] = JCC_JGE,
199 [TCG_COND_LE] = JCC_JLE,
200 [TCG_COND_GT] = JCC_JG,
201 [TCG_COND_LTU] = JCC_JB,
202 [TCG_COND_GEU] = JCC_JAE,
203 [TCG_COND_LEU] = JCC_JBE,
204 [TCG_COND_GTU] = JCC_JA,
207 static inline void tcg_out_opc(TCGContext *s, int opc)
209 if (opc & P_EXT)
210 tcg_out8(s, 0x0f);
211 tcg_out8(s, opc);
214 static inline void tcg_out_modrm(TCGContext *s, int opc, int r, int rm)
216 tcg_out_opc(s, opc);
217 tcg_out8(s, 0xc0 | (r << 3) | rm);
220 /* rm == -1 means no register index */
221 static inline void tcg_out_modrm_offset(TCGContext *s, int opc, int r, int rm,
222 int32_t offset)
224 tcg_out_opc(s, opc);
225 if (rm == -1) {
226 tcg_out8(s, 0x05 | (r << 3));
227 tcg_out32(s, offset);
228 } else if (offset == 0 && rm != TCG_REG_EBP) {
229 if (rm == TCG_REG_ESP) {
230 tcg_out8(s, 0x04 | (r << 3));
231 tcg_out8(s, 0x24);
232 } else {
233 tcg_out8(s, 0x00 | (r << 3) | rm);
235 } else if ((int8_t)offset == offset) {
236 if (rm == TCG_REG_ESP) {
237 tcg_out8(s, 0x44 | (r << 3));
238 tcg_out8(s, 0x24);
239 } else {
240 tcg_out8(s, 0x40 | (r << 3) | rm);
242 tcg_out8(s, offset);
243 } else {
244 if (rm == TCG_REG_ESP) {
245 tcg_out8(s, 0x84 | (r << 3));
246 tcg_out8(s, 0x24);
247 } else {
248 tcg_out8(s, 0x80 | (r << 3) | rm);
250 tcg_out32(s, offset);
254 static inline void tcg_out_mov(TCGContext *s, int ret, int arg)
256 if (arg != ret)
257 tcg_out_modrm(s, 0x8b, ret, arg);
260 static inline void tcg_out_movi(TCGContext *s, TCGType type,
261 int ret, int32_t arg)
263 if (arg == 0) {
264 /* xor r0,r0 */
265 tcg_out_modrm(s, 0x01 | (ARITH_XOR << 3), ret, ret);
266 } else {
267 tcg_out8(s, 0xb8 + ret);
268 tcg_out32(s, arg);
272 static inline void tcg_out_ld(TCGContext *s, TCGType type, int ret,
273 int arg1, tcg_target_long arg2)
275 /* movl */
276 tcg_out_modrm_offset(s, 0x8b, ret, arg1, arg2);
279 static inline void tcg_out_st(TCGContext *s, TCGType type, int arg,
280 int arg1, tcg_target_long arg2)
282 /* movl */
283 tcg_out_modrm_offset(s, 0x89, arg, arg1, arg2);
286 static inline void tgen_arithi(TCGContext *s, int c, int r0, int32_t val, int cf)
288 if (!cf && ((c == ARITH_ADD && val == 1) || (c == ARITH_SUB && val == -1))) {
289 /* inc */
290 tcg_out_opc(s, 0x40 + r0);
291 } else if (!cf && ((c == ARITH_ADD && val == -1) || (c == ARITH_SUB && val == 1))) {
292 /* dec */
293 tcg_out_opc(s, 0x48 + r0);
294 } else if (val == (int8_t)val) {
295 tcg_out_modrm(s, 0x83, c, r0);
296 tcg_out8(s, val);
297 } else if (c == ARITH_AND && val == 0xffu && r0 < 4) {
298 /* movzbl */
299 tcg_out_modrm(s, 0xb6 | P_EXT, r0, r0);
300 } else if (c == ARITH_AND && val == 0xffffu) {
301 /* movzwl */
302 tcg_out_modrm(s, 0xb7 | P_EXT, r0, r0);
303 } else {
304 tcg_out_modrm(s, 0x81, c, r0);
305 tcg_out32(s, val);
309 static void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
311 if (val != 0)
312 tgen_arithi(s, ARITH_ADD, reg, val, 0);
315 /* Use SMALL != 0 to force a short forward branch. */
316 static void tcg_out_jxx(TCGContext *s, int opc, int label_index, int small)
318 int32_t val, val1;
319 TCGLabel *l = &s->labels[label_index];
321 if (l->has_value) {
322 val = l->u.value - (tcg_target_long)s->code_ptr;
323 val1 = val - 2;
324 if ((int8_t)val1 == val1) {
325 if (opc == -1) {
326 tcg_out8(s, 0xeb);
327 } else {
328 tcg_out8(s, 0x70 + opc);
330 tcg_out8(s, val1);
331 } else {
332 if (small) {
333 tcg_abort();
335 if (opc == -1) {
336 tcg_out8(s, 0xe9);
337 tcg_out32(s, val - 5);
338 } else {
339 tcg_out8(s, 0x0f);
340 tcg_out8(s, 0x80 + opc);
341 tcg_out32(s, val - 6);
344 } else if (small) {
345 if (opc == -1) {
346 tcg_out8(s, 0xeb);
347 } else {
348 tcg_out8(s, 0x70 + opc);
350 tcg_out_reloc(s, s->code_ptr, R_386_PC8, label_index, -1);
351 s->code_ptr += 1;
352 } else {
353 if (opc == -1) {
354 tcg_out8(s, 0xe9);
355 } else {
356 tcg_out8(s, 0x0f);
357 tcg_out8(s, 0x80 + opc);
359 tcg_out_reloc(s, s->code_ptr, R_386_PC32, label_index, -4);
360 s->code_ptr += 4;
364 static void tcg_out_cmp(TCGContext *s, TCGArg arg1, TCGArg arg2,
365 int const_arg2)
367 if (const_arg2) {
368 if (arg2 == 0) {
369 /* test r, r */
370 tcg_out_modrm(s, 0x85, arg1, arg1);
371 } else {
372 tgen_arithi(s, ARITH_CMP, arg1, arg2, 0);
374 } else {
375 tcg_out_modrm(s, 0x01 | (ARITH_CMP << 3), arg2, arg1);
379 static void tcg_out_brcond(TCGContext *s, TCGCond cond,
380 TCGArg arg1, TCGArg arg2, int const_arg2,
381 int label_index, int small)
383 tcg_out_cmp(s, arg1, arg2, const_arg2);
384 tcg_out_jxx(s, tcg_cond_to_jcc[cond], label_index, small);
387 /* XXX: we implement it at the target level to avoid having to
388 handle cross basic blocks temporaries */
389 static void tcg_out_brcond2(TCGContext *s, const TCGArg *args,
390 const int *const_args, int small)
392 int label_next;
393 label_next = gen_new_label();
394 switch(args[4]) {
395 case TCG_COND_EQ:
396 tcg_out_brcond(s, TCG_COND_NE, args[0], args[2], const_args[2],
397 label_next, 1);
398 tcg_out_brcond(s, TCG_COND_EQ, args[1], args[3], const_args[3],
399 args[5], small);
400 break;
401 case TCG_COND_NE:
402 tcg_out_brcond(s, TCG_COND_NE, args[0], args[2], const_args[2],
403 args[5], small);
404 tcg_out_brcond(s, TCG_COND_NE, args[1], args[3], const_args[3],
405 args[5], small);
406 break;
407 case TCG_COND_LT:
408 tcg_out_brcond(s, TCG_COND_LT, args[1], args[3], const_args[3],
409 args[5], small);
410 tcg_out_jxx(s, JCC_JNE, label_next, 1);
411 tcg_out_brcond(s, TCG_COND_LTU, args[0], args[2], const_args[2],
412 args[5], small);
413 break;
414 case TCG_COND_LE:
415 tcg_out_brcond(s, TCG_COND_LT, args[1], args[3], const_args[3],
416 args[5], small);
417 tcg_out_jxx(s, JCC_JNE, label_next, 1);
418 tcg_out_brcond(s, TCG_COND_LEU, args[0], args[2], const_args[2],
419 args[5], small);
420 break;
421 case TCG_COND_GT:
422 tcg_out_brcond(s, TCG_COND_GT, args[1], args[3], const_args[3],
423 args[5], small);
424 tcg_out_jxx(s, JCC_JNE, label_next, 1);
425 tcg_out_brcond(s, TCG_COND_GTU, args[0], args[2], const_args[2],
426 args[5], small);
427 break;
428 case TCG_COND_GE:
429 tcg_out_brcond(s, TCG_COND_GT, args[1], args[3], const_args[3],
430 args[5], small);
431 tcg_out_jxx(s, JCC_JNE, label_next, 1);
432 tcg_out_brcond(s, TCG_COND_GEU, args[0], args[2], const_args[2],
433 args[5], small);
434 break;
435 case TCG_COND_LTU:
436 tcg_out_brcond(s, TCG_COND_LTU, args[1], args[3], const_args[3],
437 args[5], small);
438 tcg_out_jxx(s, JCC_JNE, label_next, 1);
439 tcg_out_brcond(s, TCG_COND_LTU, args[0], args[2], const_args[2],
440 args[5], small);
441 break;
442 case TCG_COND_LEU:
443 tcg_out_brcond(s, TCG_COND_LTU, args[1], args[3], const_args[3],
444 args[5], small);
445 tcg_out_jxx(s, JCC_JNE, label_next, 1);
446 tcg_out_brcond(s, TCG_COND_LEU, args[0], args[2], const_args[2],
447 args[5], small);
448 break;
449 case TCG_COND_GTU:
450 tcg_out_brcond(s, TCG_COND_GTU, args[1], args[3], const_args[3],
451 args[5], small);
452 tcg_out_jxx(s, JCC_JNE, label_next, 1);
453 tcg_out_brcond(s, TCG_COND_GTU, args[0], args[2], const_args[2],
454 args[5], small);
455 break;
456 case TCG_COND_GEU:
457 tcg_out_brcond(s, TCG_COND_GTU, args[1], args[3], const_args[3],
458 args[5], small);
459 tcg_out_jxx(s, JCC_JNE, label_next, 1);
460 tcg_out_brcond(s, TCG_COND_GEU, args[0], args[2], const_args[2],
461 args[5], small);
462 break;
463 default:
464 tcg_abort();
466 tcg_out_label(s, label_next, (tcg_target_long)s->code_ptr);
469 static void tcg_out_setcond(TCGContext *s, TCGCond cond, TCGArg dest,
470 TCGArg arg1, TCGArg arg2, int const_arg2)
472 tcg_out_cmp(s, arg1, arg2, const_arg2);
473 /* setcc */
474 tcg_out_modrm(s, 0x90 | tcg_cond_to_jcc[cond] | P_EXT, 0, dest);
475 tgen_arithi(s, ARITH_AND, dest, 0xff, 0);
478 static void tcg_out_setcond2(TCGContext *s, const TCGArg *args,
479 const int *const_args)
481 TCGArg new_args[6];
482 int label_true, label_over;
484 memcpy(new_args, args+1, 5*sizeof(TCGArg));
486 if (args[0] == args[1] || args[0] == args[2]
487 || (!const_args[3] && args[0] == args[3])
488 || (!const_args[4] && args[0] == args[4])) {
489 /* When the destination overlaps with one of the argument
490 registers, don't do anything tricky. */
491 label_true = gen_new_label();
492 label_over = gen_new_label();
494 new_args[5] = label_true;
495 tcg_out_brcond2(s, new_args, const_args+1, 1);
497 tcg_out_movi(s, TCG_TYPE_I32, args[0], 0);
498 tcg_out_jxx(s, JCC_JMP, label_over, 1);
499 tcg_out_label(s, label_true, (tcg_target_long)s->code_ptr);
501 tcg_out_movi(s, TCG_TYPE_I32, args[0], 1);
502 tcg_out_label(s, label_over, (tcg_target_long)s->code_ptr);
503 } else {
504 /* When the destination does not overlap one of the arguments,
505 clear the destination first, jump if cond false, and emit an
506 increment in the true case. This results in smaller code. */
508 tcg_out_movi(s, TCG_TYPE_I32, args[0], 0);
510 label_over = gen_new_label();
511 new_args[4] = tcg_invert_cond(new_args[4]);
512 new_args[5] = label_over;
513 tcg_out_brcond2(s, new_args, const_args+1, 1);
515 tgen_arithi(s, ARITH_ADD, args[0], 1, 0);
516 tcg_out_label(s, label_over, (tcg_target_long)s->code_ptr);
520 #if defined(CONFIG_SOFTMMU)
522 #include "../../softmmu_defs.h"
524 static void *qemu_ld_helpers[4] = {
525 __ldb_mmu,
526 __ldw_mmu,
527 __ldl_mmu,
528 __ldq_mmu,
531 static void *qemu_st_helpers[4] = {
532 __stb_mmu,
533 __stw_mmu,
534 __stl_mmu,
535 __stq_mmu,
537 #endif
539 #ifndef CONFIG_USER_ONLY
540 #define GUEST_BASE 0
541 #endif
543 /* XXX: qemu_ld and qemu_st could be modified to clobber only EDX and
544 EAX. It will be useful once fixed registers globals are less
545 common. */
546 static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
547 int opc)
549 int addr_reg, data_reg, data_reg2, r0, r1, mem_index, s_bits, bswap;
550 #if defined(CONFIG_SOFTMMU)
551 uint8_t *label1_ptr, *label2_ptr;
552 #endif
553 #if TARGET_LONG_BITS == 64
554 #if defined(CONFIG_SOFTMMU)
555 uint8_t *label3_ptr;
556 #endif
557 int addr_reg2;
558 #endif
560 data_reg = *args++;
561 if (opc == 3)
562 data_reg2 = *args++;
563 else
564 data_reg2 = 0;
565 addr_reg = *args++;
566 #if TARGET_LONG_BITS == 64
567 addr_reg2 = *args++;
568 #endif
569 mem_index = *args;
570 s_bits = opc & 3;
572 r0 = TCG_REG_EAX;
573 r1 = TCG_REG_EDX;
575 #if defined(CONFIG_SOFTMMU)
576 tcg_out_mov(s, r1, addr_reg);
578 tcg_out_mov(s, r0, addr_reg);
580 tcg_out_modrm(s, 0xc1, 5, r1); /* shr $x, r1 */
581 tcg_out8(s, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
583 tcg_out_modrm(s, 0x81, 4, r0); /* andl $x, r0 */
584 tcg_out32(s, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
586 tcg_out_modrm(s, 0x81, 4, r1); /* andl $x, r1 */
587 tcg_out32(s, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
589 tcg_out_opc(s, 0x8d); /* lea offset(r1, %ebp), r1 */
590 tcg_out8(s, 0x80 | (r1 << 3) | 0x04);
591 tcg_out8(s, (5 << 3) | r1);
592 tcg_out32(s, offsetof(CPUState, tlb_table[mem_index][0].addr_read));
594 /* cmp 0(r1), r0 */
595 tcg_out_modrm_offset(s, 0x3b, r0, r1, 0);
597 tcg_out_mov(s, r0, addr_reg);
599 #if TARGET_LONG_BITS == 32
600 /* je label1 */
601 tcg_out8(s, 0x70 + JCC_JE);
602 label1_ptr = s->code_ptr;
603 s->code_ptr++;
604 #else
605 /* jne label3 */
606 tcg_out8(s, 0x70 + JCC_JNE);
607 label3_ptr = s->code_ptr;
608 s->code_ptr++;
610 /* cmp 4(r1), addr_reg2 */
611 tcg_out_modrm_offset(s, 0x3b, addr_reg2, r1, 4);
613 /* je label1 */
614 tcg_out8(s, 0x70 + JCC_JE);
615 label1_ptr = s->code_ptr;
616 s->code_ptr++;
618 /* label3: */
619 *label3_ptr = s->code_ptr - label3_ptr - 1;
620 #endif
622 /* XXX: move that code at the end of the TB */
623 #if TARGET_LONG_BITS == 32
624 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_EDX, mem_index);
625 #else
626 tcg_out_mov(s, TCG_REG_EDX, addr_reg2);
627 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_ECX, mem_index);
628 #endif
629 tcg_out8(s, 0xe8);
630 tcg_out32(s, (tcg_target_long)qemu_ld_helpers[s_bits] -
631 (tcg_target_long)s->code_ptr - 4);
633 switch(opc) {
634 case 0 | 4:
635 /* movsbl */
636 tcg_out_modrm(s, 0xbe | P_EXT, data_reg, TCG_REG_EAX);
637 break;
638 case 1 | 4:
639 /* movswl */
640 tcg_out_modrm(s, 0xbf | P_EXT, data_reg, TCG_REG_EAX);
641 break;
642 case 0:
643 /* movzbl */
644 tcg_out_modrm(s, 0xb6 | P_EXT, data_reg, TCG_REG_EAX);
645 break;
646 case 1:
647 /* movzwl */
648 tcg_out_modrm(s, 0xb7 | P_EXT, data_reg, TCG_REG_EAX);
649 break;
650 case 2:
651 default:
652 tcg_out_mov(s, data_reg, TCG_REG_EAX);
653 break;
654 case 3:
655 if (data_reg == TCG_REG_EDX) {
656 tcg_out_opc(s, 0x90 + TCG_REG_EDX); /* xchg %edx, %eax */
657 tcg_out_mov(s, data_reg2, TCG_REG_EAX);
658 } else {
659 tcg_out_mov(s, data_reg, TCG_REG_EAX);
660 tcg_out_mov(s, data_reg2, TCG_REG_EDX);
662 break;
665 /* jmp label2 */
666 tcg_out8(s, 0xeb);
667 label2_ptr = s->code_ptr;
668 s->code_ptr++;
670 /* label1: */
671 *label1_ptr = s->code_ptr - label1_ptr - 1;
673 /* add x(r1), r0 */
674 tcg_out_modrm_offset(s, 0x03, r0, r1, offsetof(CPUTLBEntry, addend) -
675 offsetof(CPUTLBEntry, addr_read));
676 #else
677 r0 = addr_reg;
678 #endif
680 #ifdef TARGET_WORDS_BIGENDIAN
681 bswap = 1;
682 #else
683 bswap = 0;
684 #endif
685 switch(opc) {
686 case 0:
687 /* movzbl */
688 tcg_out_modrm_offset(s, 0xb6 | P_EXT, data_reg, r0, GUEST_BASE);
689 break;
690 case 0 | 4:
691 /* movsbl */
692 tcg_out_modrm_offset(s, 0xbe | P_EXT, data_reg, r0, GUEST_BASE);
693 break;
694 case 1:
695 /* movzwl */
696 tcg_out_modrm_offset(s, 0xb7 | P_EXT, data_reg, r0, GUEST_BASE);
697 if (bswap) {
698 /* rolw $8, data_reg */
699 tcg_out8(s, 0x66);
700 tcg_out_modrm(s, 0xc1, 0, data_reg);
701 tcg_out8(s, 8);
703 break;
704 case 1 | 4:
705 /* movswl */
706 tcg_out_modrm_offset(s, 0xbf | P_EXT, data_reg, r0, GUEST_BASE);
707 if (bswap) {
708 /* rolw $8, data_reg */
709 tcg_out8(s, 0x66);
710 tcg_out_modrm(s, 0xc1, 0, data_reg);
711 tcg_out8(s, 8);
713 /* movswl data_reg, data_reg */
714 tcg_out_modrm(s, 0xbf | P_EXT, data_reg, data_reg);
716 break;
717 case 2:
718 /* movl (r0), data_reg */
719 tcg_out_modrm_offset(s, 0x8b, data_reg, r0, GUEST_BASE);
720 if (bswap) {
721 /* bswap */
722 tcg_out_opc(s, (0xc8 + data_reg) | P_EXT);
724 break;
725 case 3:
726 /* XXX: could be nicer */
727 if (r0 == data_reg) {
728 r1 = TCG_REG_EDX;
729 if (r1 == data_reg)
730 r1 = TCG_REG_EAX;
731 tcg_out_mov(s, r1, r0);
732 r0 = r1;
734 if (!bswap) {
735 tcg_out_modrm_offset(s, 0x8b, data_reg, r0, GUEST_BASE);
736 tcg_out_modrm_offset(s, 0x8b, data_reg2, r0, GUEST_BASE + 4);
737 } else {
738 tcg_out_modrm_offset(s, 0x8b, data_reg, r0, GUEST_BASE + 4);
739 tcg_out_opc(s, (0xc8 + data_reg) | P_EXT);
741 tcg_out_modrm_offset(s, 0x8b, data_reg2, r0, GUEST_BASE);
742 /* bswap */
743 tcg_out_opc(s, (0xc8 + data_reg2) | P_EXT);
745 break;
746 default:
747 tcg_abort();
750 #if defined(CONFIG_SOFTMMU)
751 /* label2: */
752 *label2_ptr = s->code_ptr - label2_ptr - 1;
753 #endif
757 static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
758 int opc)
760 int addr_reg, data_reg, data_reg2, r0, r1, mem_index, s_bits, bswap;
761 #if defined(CONFIG_SOFTMMU)
762 uint8_t *label1_ptr, *label2_ptr;
763 #endif
764 #if TARGET_LONG_BITS == 64
765 #if defined(CONFIG_SOFTMMU)
766 uint8_t *label3_ptr;
767 #endif
768 int addr_reg2;
769 #endif
771 data_reg = *args++;
772 if (opc == 3)
773 data_reg2 = *args++;
774 else
775 data_reg2 = 0;
776 addr_reg = *args++;
777 #if TARGET_LONG_BITS == 64
778 addr_reg2 = *args++;
779 #endif
780 mem_index = *args;
782 s_bits = opc;
784 r0 = TCG_REG_EAX;
785 r1 = TCG_REG_EDX;
787 #if defined(CONFIG_SOFTMMU)
788 tcg_out_mov(s, r1, addr_reg);
790 tcg_out_mov(s, r0, addr_reg);
792 tcg_out_modrm(s, 0xc1, 5, r1); /* shr $x, r1 */
793 tcg_out8(s, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
795 tcg_out_modrm(s, 0x81, 4, r0); /* andl $x, r0 */
796 tcg_out32(s, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
798 tcg_out_modrm(s, 0x81, 4, r1); /* andl $x, r1 */
799 tcg_out32(s, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
801 tcg_out_opc(s, 0x8d); /* lea offset(r1, %ebp), r1 */
802 tcg_out8(s, 0x80 | (r1 << 3) | 0x04);
803 tcg_out8(s, (5 << 3) | r1);
804 tcg_out32(s, offsetof(CPUState, tlb_table[mem_index][0].addr_write));
806 /* cmp 0(r1), r0 */
807 tcg_out_modrm_offset(s, 0x3b, r0, r1, 0);
809 tcg_out_mov(s, r0, addr_reg);
811 #if TARGET_LONG_BITS == 32
812 /* je label1 */
813 tcg_out8(s, 0x70 + JCC_JE);
814 label1_ptr = s->code_ptr;
815 s->code_ptr++;
816 #else
817 /* jne label3 */
818 tcg_out8(s, 0x70 + JCC_JNE);
819 label3_ptr = s->code_ptr;
820 s->code_ptr++;
822 /* cmp 4(r1), addr_reg2 */
823 tcg_out_modrm_offset(s, 0x3b, addr_reg2, r1, 4);
825 /* je label1 */
826 tcg_out8(s, 0x70 + JCC_JE);
827 label1_ptr = s->code_ptr;
828 s->code_ptr++;
830 /* label3: */
831 *label3_ptr = s->code_ptr - label3_ptr - 1;
832 #endif
834 /* XXX: move that code at the end of the TB */
835 #if TARGET_LONG_BITS == 32
836 if (opc == 3) {
837 tcg_out_mov(s, TCG_REG_EDX, data_reg);
838 tcg_out_mov(s, TCG_REG_ECX, data_reg2);
839 tcg_out8(s, 0x6a); /* push Ib */
840 tcg_out8(s, mem_index);
841 tcg_out8(s, 0xe8);
842 tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] -
843 (tcg_target_long)s->code_ptr - 4);
844 tcg_out_addi(s, TCG_REG_ESP, 4);
845 } else {
846 switch(opc) {
847 case 0:
848 /* movzbl */
849 tcg_out_modrm(s, 0xb6 | P_EXT, TCG_REG_EDX, data_reg);
850 break;
851 case 1:
852 /* movzwl */
853 tcg_out_modrm(s, 0xb7 | P_EXT, TCG_REG_EDX, data_reg);
854 break;
855 case 2:
856 tcg_out_mov(s, TCG_REG_EDX, data_reg);
857 break;
859 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_ECX, mem_index);
860 tcg_out8(s, 0xe8);
861 tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] -
862 (tcg_target_long)s->code_ptr - 4);
864 #else
865 if (opc == 3) {
866 tcg_out_mov(s, TCG_REG_EDX, addr_reg2);
867 tcg_out8(s, 0x6a); /* push Ib */
868 tcg_out8(s, mem_index);
869 tcg_out_opc(s, 0x50 + data_reg2); /* push */
870 tcg_out_opc(s, 0x50 + data_reg); /* push */
871 tcg_out8(s, 0xe8);
872 tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] -
873 (tcg_target_long)s->code_ptr - 4);
874 tcg_out_addi(s, TCG_REG_ESP, 12);
875 } else {
876 tcg_out_mov(s, TCG_REG_EDX, addr_reg2);
877 switch(opc) {
878 case 0:
879 /* movzbl */
880 tcg_out_modrm(s, 0xb6 | P_EXT, TCG_REG_ECX, data_reg);
881 break;
882 case 1:
883 /* movzwl */
884 tcg_out_modrm(s, 0xb7 | P_EXT, TCG_REG_ECX, data_reg);
885 break;
886 case 2:
887 tcg_out_mov(s, TCG_REG_ECX, data_reg);
888 break;
890 tcg_out8(s, 0x6a); /* push Ib */
891 tcg_out8(s, mem_index);
892 tcg_out8(s, 0xe8);
893 tcg_out32(s, (tcg_target_long)qemu_st_helpers[s_bits] -
894 (tcg_target_long)s->code_ptr - 4);
895 tcg_out_addi(s, TCG_REG_ESP, 4);
897 #endif
899 /* jmp label2 */
900 tcg_out8(s, 0xeb);
901 label2_ptr = s->code_ptr;
902 s->code_ptr++;
904 /* label1: */
905 *label1_ptr = s->code_ptr - label1_ptr - 1;
907 /* add x(r1), r0 */
908 tcg_out_modrm_offset(s, 0x03, r0, r1, offsetof(CPUTLBEntry, addend) -
909 offsetof(CPUTLBEntry, addr_write));
910 #else
911 r0 = addr_reg;
912 #endif
914 #ifdef TARGET_WORDS_BIGENDIAN
915 bswap = 1;
916 #else
917 bswap = 0;
918 #endif
919 switch(opc) {
920 case 0:
921 /* movb */
922 tcg_out_modrm_offset(s, 0x88, data_reg, r0, GUEST_BASE);
923 break;
924 case 1:
925 if (bswap) {
926 tcg_out_mov(s, r1, data_reg);
927 tcg_out8(s, 0x66); /* rolw $8, %ecx */
928 tcg_out_modrm(s, 0xc1, 0, r1);
929 tcg_out8(s, 8);
930 data_reg = r1;
932 /* movw */
933 tcg_out8(s, 0x66);
934 tcg_out_modrm_offset(s, 0x89, data_reg, r0, GUEST_BASE);
935 break;
936 case 2:
937 if (bswap) {
938 tcg_out_mov(s, r1, data_reg);
939 /* bswap data_reg */
940 tcg_out_opc(s, (0xc8 + r1) | P_EXT);
941 data_reg = r1;
943 /* movl */
944 tcg_out_modrm_offset(s, 0x89, data_reg, r0, GUEST_BASE);
945 break;
946 case 3:
947 if (bswap) {
948 tcg_out_mov(s, r1, data_reg2);
949 /* bswap data_reg */
950 tcg_out_opc(s, (0xc8 + r1) | P_EXT);
951 tcg_out_modrm_offset(s, 0x89, r1, r0, GUEST_BASE);
952 tcg_out_mov(s, r1, data_reg);
953 /* bswap data_reg */
954 tcg_out_opc(s, (0xc8 + r1) | P_EXT);
955 tcg_out_modrm_offset(s, 0x89, r1, r0, GUEST_BASE + 4);
956 } else {
957 tcg_out_modrm_offset(s, 0x89, data_reg, r0, GUEST_BASE);
958 tcg_out_modrm_offset(s, 0x89, data_reg2, r0, GUEST_BASE + 4);
960 break;
961 default:
962 tcg_abort();
965 #if defined(CONFIG_SOFTMMU)
966 /* label2: */
967 *label2_ptr = s->code_ptr - label2_ptr - 1;
968 #endif
971 static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
972 const TCGArg *args, const int *const_args)
974 int c;
976 switch(opc) {
977 case INDEX_op_exit_tb:
978 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_EAX, args[0]);
979 tcg_out8(s, 0xe9); /* jmp tb_ret_addr */
980 tcg_out32(s, tb_ret_addr - s->code_ptr - 4);
981 break;
982 case INDEX_op_goto_tb:
983 if (s->tb_jmp_offset) {
984 /* direct jump method */
985 tcg_out8(s, 0xe9); /* jmp im */
986 s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
987 tcg_out32(s, 0);
988 } else {
989 /* indirect jump method */
990 /* jmp Ev */
991 tcg_out_modrm_offset(s, 0xff, 4, -1,
992 (tcg_target_long)(s->tb_next + args[0]));
994 s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
995 break;
996 case INDEX_op_call:
997 if (const_args[0]) {
998 tcg_out8(s, 0xe8);
999 tcg_out32(s, args[0] - (tcg_target_long)s->code_ptr - 4);
1000 } else {
1001 tcg_out_modrm(s, 0xff, 2, args[0]);
1003 break;
1004 case INDEX_op_jmp:
1005 if (const_args[0]) {
1006 tcg_out8(s, 0xe9);
1007 tcg_out32(s, args[0] - (tcg_target_long)s->code_ptr - 4);
1008 } else {
1009 tcg_out_modrm(s, 0xff, 4, args[0]);
1011 break;
1012 case INDEX_op_br:
1013 tcg_out_jxx(s, JCC_JMP, args[0], 0);
1014 break;
1015 case INDEX_op_movi_i32:
1016 tcg_out_movi(s, TCG_TYPE_I32, args[0], args[1]);
1017 break;
1018 case INDEX_op_ld8u_i32:
1019 /* movzbl */
1020 tcg_out_modrm_offset(s, 0xb6 | P_EXT, args[0], args[1], args[2]);
1021 break;
1022 case INDEX_op_ld8s_i32:
1023 /* movsbl */
1024 tcg_out_modrm_offset(s, 0xbe | P_EXT, args[0], args[1], args[2]);
1025 break;
1026 case INDEX_op_ld16u_i32:
1027 /* movzwl */
1028 tcg_out_modrm_offset(s, 0xb7 | P_EXT, args[0], args[1], args[2]);
1029 break;
1030 case INDEX_op_ld16s_i32:
1031 /* movswl */
1032 tcg_out_modrm_offset(s, 0xbf | P_EXT, args[0], args[1], args[2]);
1033 break;
1034 case INDEX_op_ld_i32:
1035 /* movl */
1036 tcg_out_modrm_offset(s, 0x8b, args[0], args[1], args[2]);
1037 break;
1038 case INDEX_op_st8_i32:
1039 /* movb */
1040 tcg_out_modrm_offset(s, 0x88, args[0], args[1], args[2]);
1041 break;
1042 case INDEX_op_st16_i32:
1043 /* movw */
1044 tcg_out8(s, 0x66);
1045 tcg_out_modrm_offset(s, 0x89, args[0], args[1], args[2]);
1046 break;
1047 case INDEX_op_st_i32:
1048 /* movl */
1049 tcg_out_modrm_offset(s, 0x89, args[0], args[1], args[2]);
1050 break;
1051 case INDEX_op_sub_i32:
1052 c = ARITH_SUB;
1053 goto gen_arith;
1054 case INDEX_op_and_i32:
1055 c = ARITH_AND;
1056 goto gen_arith;
1057 case INDEX_op_or_i32:
1058 c = ARITH_OR;
1059 goto gen_arith;
1060 case INDEX_op_xor_i32:
1061 c = ARITH_XOR;
1062 goto gen_arith;
1063 case INDEX_op_add_i32:
1064 c = ARITH_ADD;
1065 gen_arith:
1066 if (const_args[2]) {
1067 tgen_arithi(s, c, args[0], args[2], 0);
1068 } else {
1069 tcg_out_modrm(s, 0x01 | (c << 3), args[2], args[0]);
1071 break;
1072 case INDEX_op_mul_i32:
1073 if (const_args[2]) {
1074 int32_t val;
1075 val = args[2];
1076 if (val == (int8_t)val) {
1077 tcg_out_modrm(s, 0x6b, args[0], args[0]);
1078 tcg_out8(s, val);
1079 } else {
1080 tcg_out_modrm(s, 0x69, args[0], args[0]);
1081 tcg_out32(s, val);
1083 } else {
1084 tcg_out_modrm(s, 0xaf | P_EXT, args[0], args[2]);
1086 break;
1087 case INDEX_op_mulu2_i32:
1088 tcg_out_modrm(s, 0xf7, 4, args[3]);
1089 break;
1090 case INDEX_op_div2_i32:
1091 tcg_out_modrm(s, 0xf7, 7, args[4]);
1092 break;
1093 case INDEX_op_divu2_i32:
1094 tcg_out_modrm(s, 0xf7, 6, args[4]);
1095 break;
1096 case INDEX_op_shl_i32:
1097 c = SHIFT_SHL;
1098 gen_shift32:
1099 if (const_args[2]) {
1100 if (args[2] == 1) {
1101 tcg_out_modrm(s, 0xd1, c, args[0]);
1102 } else {
1103 tcg_out_modrm(s, 0xc1, c, args[0]);
1104 tcg_out8(s, args[2]);
1106 } else {
1107 tcg_out_modrm(s, 0xd3, c, args[0]);
1109 break;
1110 case INDEX_op_shr_i32:
1111 c = SHIFT_SHR;
1112 goto gen_shift32;
1113 case INDEX_op_sar_i32:
1114 c = SHIFT_SAR;
1115 goto gen_shift32;
1116 case INDEX_op_rotl_i32:
1117 c = SHIFT_ROL;
1118 goto gen_shift32;
1119 case INDEX_op_rotr_i32:
1120 c = SHIFT_ROR;
1121 goto gen_shift32;
1123 case INDEX_op_add2_i32:
1124 if (const_args[4])
1125 tgen_arithi(s, ARITH_ADD, args[0], args[4], 1);
1126 else
1127 tcg_out_modrm(s, 0x01 | (ARITH_ADD << 3), args[4], args[0]);
1128 if (const_args[5])
1129 tgen_arithi(s, ARITH_ADC, args[1], args[5], 1);
1130 else
1131 tcg_out_modrm(s, 0x01 | (ARITH_ADC << 3), args[5], args[1]);
1132 break;
1133 case INDEX_op_sub2_i32:
1134 if (const_args[4])
1135 tgen_arithi(s, ARITH_SUB, args[0], args[4], 1);
1136 else
1137 tcg_out_modrm(s, 0x01 | (ARITH_SUB << 3), args[4], args[0]);
1138 if (const_args[5])
1139 tgen_arithi(s, ARITH_SBB, args[1], args[5], 1);
1140 else
1141 tcg_out_modrm(s, 0x01 | (ARITH_SBB << 3), args[5], args[1]);
1142 break;
1143 case INDEX_op_brcond_i32:
1144 tcg_out_brcond(s, args[2], args[0], args[1], const_args[1],
1145 args[3], 0);
1146 break;
1147 case INDEX_op_brcond2_i32:
1148 tcg_out_brcond2(s, args, const_args, 0);
1149 break;
1151 case INDEX_op_bswap16_i32:
1152 tcg_out8(s, 0x66);
1153 tcg_out_modrm(s, 0xc1, SHIFT_ROL, args[0]);
1154 tcg_out8(s, 8);
1155 break;
1156 case INDEX_op_bswap32_i32:
1157 tcg_out_opc(s, (0xc8 + args[0]) | P_EXT);
1158 break;
1160 case INDEX_op_neg_i32:
1161 tcg_out_modrm(s, 0xf7, 3, args[0]);
1162 break;
1164 case INDEX_op_not_i32:
1165 tcg_out_modrm(s, 0xf7, 2, args[0]);
1166 break;
1168 case INDEX_op_ext8s_i32:
1169 tcg_out_modrm(s, 0xbe | P_EXT, args[0], args[1]);
1170 break;
1171 case INDEX_op_ext16s_i32:
1172 tcg_out_modrm(s, 0xbf | P_EXT, args[0], args[1]);
1173 break;
1174 case INDEX_op_ext8u_i32:
1175 tcg_out_modrm(s, 0xb6 | P_EXT, args[0], args[1]);
1176 break;
1177 case INDEX_op_ext16u_i32:
1178 tcg_out_modrm(s, 0xb7 | P_EXT, args[0], args[1]);
1179 break;
1181 case INDEX_op_setcond_i32:
1182 tcg_out_setcond(s, args[3], args[0], args[1], args[2], const_args[2]);
1183 break;
1184 case INDEX_op_setcond2_i32:
1185 tcg_out_setcond2(s, args, const_args);
1186 break;
1188 case INDEX_op_qemu_ld8u:
1189 tcg_out_qemu_ld(s, args, 0);
1190 break;
1191 case INDEX_op_qemu_ld8s:
1192 tcg_out_qemu_ld(s, args, 0 | 4);
1193 break;
1194 case INDEX_op_qemu_ld16u:
1195 tcg_out_qemu_ld(s, args, 1);
1196 break;
1197 case INDEX_op_qemu_ld16s:
1198 tcg_out_qemu_ld(s, args, 1 | 4);
1199 break;
1200 case INDEX_op_qemu_ld32:
1201 tcg_out_qemu_ld(s, args, 2);
1202 break;
1203 case INDEX_op_qemu_ld64:
1204 tcg_out_qemu_ld(s, args, 3);
1205 break;
1207 case INDEX_op_qemu_st8:
1208 tcg_out_qemu_st(s, args, 0);
1209 break;
1210 case INDEX_op_qemu_st16:
1211 tcg_out_qemu_st(s, args, 1);
1212 break;
1213 case INDEX_op_qemu_st32:
1214 tcg_out_qemu_st(s, args, 2);
1215 break;
1216 case INDEX_op_qemu_st64:
1217 tcg_out_qemu_st(s, args, 3);
1218 break;
1220 default:
1221 tcg_abort();
1225 static const TCGTargetOpDef x86_op_defs[] = {
1226 { INDEX_op_exit_tb, { } },
1227 { INDEX_op_goto_tb, { } },
1228 { INDEX_op_call, { "ri" } },
1229 { INDEX_op_jmp, { "ri" } },
1230 { INDEX_op_br, { } },
1231 { INDEX_op_mov_i32, { "r", "r" } },
1232 { INDEX_op_movi_i32, { "r" } },
1233 { INDEX_op_ld8u_i32, { "r", "r" } },
1234 { INDEX_op_ld8s_i32, { "r", "r" } },
1235 { INDEX_op_ld16u_i32, { "r", "r" } },
1236 { INDEX_op_ld16s_i32, { "r", "r" } },
1237 { INDEX_op_ld_i32, { "r", "r" } },
1238 { INDEX_op_st8_i32, { "q", "r" } },
1239 { INDEX_op_st16_i32, { "r", "r" } },
1240 { INDEX_op_st_i32, { "r", "r" } },
1242 { INDEX_op_add_i32, { "r", "0", "ri" } },
1243 { INDEX_op_sub_i32, { "r", "0", "ri" } },
1244 { INDEX_op_mul_i32, { "r", "0", "ri" } },
1245 { INDEX_op_mulu2_i32, { "a", "d", "a", "r" } },
1246 { INDEX_op_div2_i32, { "a", "d", "0", "1", "r" } },
1247 { INDEX_op_divu2_i32, { "a", "d", "0", "1", "r" } },
1248 { INDEX_op_and_i32, { "r", "0", "ri" } },
1249 { INDEX_op_or_i32, { "r", "0", "ri" } },
1250 { INDEX_op_xor_i32, { "r", "0", "ri" } },
1252 { INDEX_op_shl_i32, { "r", "0", "ci" } },
1253 { INDEX_op_shr_i32, { "r", "0", "ci" } },
1254 { INDEX_op_sar_i32, { "r", "0", "ci" } },
1255 { INDEX_op_rotl_i32, { "r", "0", "ci" } },
1256 { INDEX_op_rotr_i32, { "r", "0", "ci" } },
1258 { INDEX_op_brcond_i32, { "r", "ri" } },
1260 { INDEX_op_add2_i32, { "r", "r", "0", "1", "ri", "ri" } },
1261 { INDEX_op_sub2_i32, { "r", "r", "0", "1", "ri", "ri" } },
1262 { INDEX_op_brcond2_i32, { "r", "r", "ri", "ri" } },
1264 { INDEX_op_bswap16_i32, { "r", "0" } },
1265 { INDEX_op_bswap32_i32, { "r", "0" } },
1267 { INDEX_op_neg_i32, { "r", "0" } },
1269 { INDEX_op_not_i32, { "r", "0" } },
1271 { INDEX_op_ext8s_i32, { "r", "q" } },
1272 { INDEX_op_ext16s_i32, { "r", "r" } },
1273 { INDEX_op_ext8u_i32, { "r", "q"} },
1274 { INDEX_op_ext16u_i32, { "r", "r"} },
1276 { INDEX_op_setcond_i32, { "q", "r", "ri" } },
1277 { INDEX_op_setcond2_i32, { "r", "r", "r", "ri", "ri" } },
1279 #if TARGET_LONG_BITS == 32
1280 { INDEX_op_qemu_ld8u, { "r", "L" } },
1281 { INDEX_op_qemu_ld8s, { "r", "L" } },
1282 { INDEX_op_qemu_ld16u, { "r", "L" } },
1283 { INDEX_op_qemu_ld16s, { "r", "L" } },
1284 { INDEX_op_qemu_ld32, { "r", "L" } },
1285 { INDEX_op_qemu_ld64, { "r", "r", "L" } },
1287 { INDEX_op_qemu_st8, { "cb", "L" } },
1288 { INDEX_op_qemu_st16, { "L", "L" } },
1289 { INDEX_op_qemu_st32, { "L", "L" } },
1290 { INDEX_op_qemu_st64, { "L", "L", "L" } },
1291 #else
1292 { INDEX_op_qemu_ld8u, { "r", "L", "L" } },
1293 { INDEX_op_qemu_ld8s, { "r", "L", "L" } },
1294 { INDEX_op_qemu_ld16u, { "r", "L", "L" } },
1295 { INDEX_op_qemu_ld16s, { "r", "L", "L" } },
1296 { INDEX_op_qemu_ld32, { "r", "L", "L" } },
1297 { INDEX_op_qemu_ld64, { "r", "r", "L", "L" } },
1299 { INDEX_op_qemu_st8, { "cb", "L", "L" } },
1300 { INDEX_op_qemu_st16, { "L", "L", "L" } },
1301 { INDEX_op_qemu_st32, { "L", "L", "L" } },
1302 { INDEX_op_qemu_st64, { "L", "L", "L", "L" } },
1303 #endif
1304 { -1 },
1307 static int tcg_target_callee_save_regs[] = {
1308 /* TCG_REG_EBP, */ /* currently used for the global env, so no
1309 need to save */
1310 TCG_REG_EBX,
1311 TCG_REG_ESI,
1312 TCG_REG_EDI,
1315 static inline void tcg_out_push(TCGContext *s, int reg)
1317 tcg_out_opc(s, 0x50 + reg);
1320 static inline void tcg_out_pop(TCGContext *s, int reg)
1322 tcg_out_opc(s, 0x58 + reg);
1325 /* Generate global QEMU prologue and epilogue code */
1326 void tcg_target_qemu_prologue(TCGContext *s)
1328 int i, frame_size, push_size, stack_addend;
1330 /* TB prologue */
1331 /* save all callee saved registers */
1332 for(i = 0; i < ARRAY_SIZE(tcg_target_callee_save_regs); i++) {
1333 tcg_out_push(s, tcg_target_callee_save_regs[i]);
1335 /* reserve some stack space */
1336 push_size = 4 + ARRAY_SIZE(tcg_target_callee_save_regs) * 4;
1337 frame_size = push_size + TCG_STATIC_CALL_ARGS_SIZE;
1338 frame_size = (frame_size + TCG_TARGET_STACK_ALIGN - 1) &
1339 ~(TCG_TARGET_STACK_ALIGN - 1);
1340 stack_addend = frame_size - push_size;
1341 tcg_out_addi(s, TCG_REG_ESP, -stack_addend);
1343 tcg_out_modrm(s, 0xff, 4, TCG_REG_EAX); /* jmp *%eax */
1345 /* TB epilogue */
1346 tb_ret_addr = s->code_ptr;
1347 tcg_out_addi(s, TCG_REG_ESP, stack_addend);
1348 for(i = ARRAY_SIZE(tcg_target_callee_save_regs) - 1; i >= 0; i--) {
1349 tcg_out_pop(s, tcg_target_callee_save_regs[i]);
1351 tcg_out8(s, 0xc3); /* ret */
1354 void tcg_target_init(TCGContext *s)
1356 #if !defined(CONFIG_USER_ONLY)
1357 /* fail safe */
1358 if ((1 << CPU_TLB_ENTRY_BITS) != sizeof(CPUTLBEntry))
1359 tcg_abort();
1360 #endif
1362 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xff);
1364 tcg_regset_clear(tcg_target_call_clobber_regs);
1365 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_EAX);
1366 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_EDX);
1367 tcg_regset_set_reg(tcg_target_call_clobber_regs, TCG_REG_ECX);
1369 tcg_regset_clear(s->reserved_regs);
1370 tcg_regset_set_reg(s->reserved_regs, TCG_REG_ESP);
1372 tcg_add_target_add_op_defs(x86_op_defs);