2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
26 static const char * const tcg_target_reg_names
[TCG_TARGET_NB_REGS
] = {
38 static const int tcg_target_reg_alloc_order
[] = {
48 static const int tcg_target_call_iarg_regs
[3] = { TCG_REG_EAX
, TCG_REG_EDX
, TCG_REG_ECX
};
49 static const int tcg_target_call_oarg_regs
[2] = { TCG_REG_EAX
, TCG_REG_EDX
};
51 static uint8_t *tb_ret_addr
;
53 static void patch_reloc(uint8_t *code_ptr
, int type
,
54 tcg_target_long value
, tcg_target_long addend
)
59 *(uint32_t *)code_ptr
= value
;
62 *(uint32_t *)code_ptr
= value
- (long)code_ptr
;
65 value
-= (long)code_ptr
;
66 if (value
!= (int8_t)value
) {
69 *(uint8_t *)code_ptr
= value
;
76 /* maximum number of register used for input function arguments */
77 static inline int tcg_target_get_call_iarg_regs_count(int flags
)
79 flags
&= TCG_CALL_TYPE_MASK
;
81 case TCG_CALL_TYPE_STD
:
83 case TCG_CALL_TYPE_REGPARM_1
:
84 case TCG_CALL_TYPE_REGPARM_2
:
85 case TCG_CALL_TYPE_REGPARM
:
86 return flags
- TCG_CALL_TYPE_REGPARM_1
+ 1;
92 /* parse target specific constraints */
93 static int target_parse_constraint(TCGArgConstraint
*ct
, const char **pct_str
)
100 ct
->ct
|= TCG_CT_REG
;
101 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_EAX
);
104 ct
->ct
|= TCG_CT_REG
;
105 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_EBX
);
108 ct
->ct
|= TCG_CT_REG
;
109 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_ECX
);
112 ct
->ct
|= TCG_CT_REG
;
113 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_EDX
);
116 ct
->ct
|= TCG_CT_REG
;
117 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_ESI
);
120 ct
->ct
|= TCG_CT_REG
;
121 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_EDI
);
124 ct
->ct
|= TCG_CT_REG
;
125 tcg_regset_set32(ct
->u
.regs
, 0, 0xf);
128 ct
->ct
|= TCG_CT_REG
;
129 tcg_regset_set32(ct
->u
.regs
, 0, 0xff);
132 /* qemu_ld/st address constraint */
134 ct
->ct
|= TCG_CT_REG
;
135 tcg_regset_set32(ct
->u
.regs
, 0, 0xff);
136 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_EAX
);
137 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_EDX
);
147 /* test if a constant matches the constraint */
148 static inline int tcg_target_const_match(tcg_target_long val
,
149 const TCGArgConstraint
*arg_ct
)
153 if (ct
& TCG_CT_CONST
)
192 #define P_EXT 0x100 /* 0x0f opcode prefix */
194 static const uint8_t tcg_cond_to_jcc
[10] = {
195 [TCG_COND_EQ
] = JCC_JE
,
196 [TCG_COND_NE
] = JCC_JNE
,
197 [TCG_COND_LT
] = JCC_JL
,
198 [TCG_COND_GE
] = JCC_JGE
,
199 [TCG_COND_LE
] = JCC_JLE
,
200 [TCG_COND_GT
] = JCC_JG
,
201 [TCG_COND_LTU
] = JCC_JB
,
202 [TCG_COND_GEU
] = JCC_JAE
,
203 [TCG_COND_LEU
] = JCC_JBE
,
204 [TCG_COND_GTU
] = JCC_JA
,
207 static inline void tcg_out_opc(TCGContext
*s
, int opc
)
214 static inline void tcg_out_modrm(TCGContext
*s
, int opc
, int r
, int rm
)
217 tcg_out8(s
, 0xc0 | (r
<< 3) | rm
);
220 /* rm == -1 means no register index */
221 static inline void tcg_out_modrm_offset(TCGContext
*s
, int opc
, int r
, int rm
,
226 tcg_out8(s
, 0x05 | (r
<< 3));
227 tcg_out32(s
, offset
);
228 } else if (offset
== 0 && rm
!= TCG_REG_EBP
) {
229 if (rm
== TCG_REG_ESP
) {
230 tcg_out8(s
, 0x04 | (r
<< 3));
233 tcg_out8(s
, 0x00 | (r
<< 3) | rm
);
235 } else if ((int8_t)offset
== offset
) {
236 if (rm
== TCG_REG_ESP
) {
237 tcg_out8(s
, 0x44 | (r
<< 3));
240 tcg_out8(s
, 0x40 | (r
<< 3) | rm
);
244 if (rm
== TCG_REG_ESP
) {
245 tcg_out8(s
, 0x84 | (r
<< 3));
248 tcg_out8(s
, 0x80 | (r
<< 3) | rm
);
250 tcg_out32(s
, offset
);
254 static inline void tcg_out_mov(TCGContext
*s
, int ret
, int arg
)
257 tcg_out_modrm(s
, 0x8b, ret
, arg
);
260 static inline void tcg_out_movi(TCGContext
*s
, TCGType type
,
261 int ret
, int32_t arg
)
265 tcg_out_modrm(s
, 0x01 | (ARITH_XOR
<< 3), ret
, ret
);
267 tcg_out8(s
, 0xb8 + ret
);
272 static inline void tcg_out_ld(TCGContext
*s
, TCGType type
, int ret
,
273 int arg1
, tcg_target_long arg2
)
276 tcg_out_modrm_offset(s
, 0x8b, ret
, arg1
, arg2
);
279 static inline void tcg_out_st(TCGContext
*s
, TCGType type
, int arg
,
280 int arg1
, tcg_target_long arg2
)
283 tcg_out_modrm_offset(s
, 0x89, arg
, arg1
, arg2
);
286 static inline void tgen_arithi(TCGContext
*s
, int c
, int r0
, int32_t val
, int cf
)
288 if (!cf
&& ((c
== ARITH_ADD
&& val
== 1) || (c
== ARITH_SUB
&& val
== -1))) {
290 tcg_out_opc(s
, 0x40 + r0
);
291 } else if (!cf
&& ((c
== ARITH_ADD
&& val
== -1) || (c
== ARITH_SUB
&& val
== 1))) {
293 tcg_out_opc(s
, 0x48 + r0
);
294 } else if (val
== (int8_t)val
) {
295 tcg_out_modrm(s
, 0x83, c
, r0
);
297 } else if (c
== ARITH_AND
&& val
== 0xffu
&& r0
< 4) {
299 tcg_out_modrm(s
, 0xb6 | P_EXT
, r0
, r0
);
300 } else if (c
== ARITH_AND
&& val
== 0xffffu
) {
302 tcg_out_modrm(s
, 0xb7 | P_EXT
, r0
, r0
);
304 tcg_out_modrm(s
, 0x81, c
, r0
);
309 static void tcg_out_addi(TCGContext
*s
, int reg
, tcg_target_long val
)
312 tgen_arithi(s
, ARITH_ADD
, reg
, val
, 0);
315 /* Use SMALL != 0 to force a short forward branch. */
316 static void tcg_out_jxx(TCGContext
*s
, int opc
, int label_index
, int small
)
319 TCGLabel
*l
= &s
->labels
[label_index
];
322 val
= l
->u
.value
- (tcg_target_long
)s
->code_ptr
;
324 if ((int8_t)val1
== val1
) {
328 tcg_out8(s
, 0x70 + opc
);
337 tcg_out32(s
, val
- 5);
340 tcg_out8(s
, 0x80 + opc
);
341 tcg_out32(s
, val
- 6);
348 tcg_out8(s
, 0x70 + opc
);
350 tcg_out_reloc(s
, s
->code_ptr
, R_386_PC8
, label_index
, -1);
357 tcg_out8(s
, 0x80 + opc
);
359 tcg_out_reloc(s
, s
->code_ptr
, R_386_PC32
, label_index
, -4);
364 static void tcg_out_cmp(TCGContext
*s
, TCGArg arg1
, TCGArg arg2
,
370 tcg_out_modrm(s
, 0x85, arg1
, arg1
);
372 tgen_arithi(s
, ARITH_CMP
, arg1
, arg2
, 0);
375 tcg_out_modrm(s
, 0x01 | (ARITH_CMP
<< 3), arg2
, arg1
);
379 static void tcg_out_brcond(TCGContext
*s
, TCGCond cond
,
380 TCGArg arg1
, TCGArg arg2
, int const_arg2
,
381 int label_index
, int small
)
383 tcg_out_cmp(s
, arg1
, arg2
, const_arg2
);
384 tcg_out_jxx(s
, tcg_cond_to_jcc
[cond
], label_index
, small
);
387 /* XXX: we implement it at the target level to avoid having to
388 handle cross basic blocks temporaries */
389 static void tcg_out_brcond2(TCGContext
*s
, const TCGArg
*args
,
390 const int *const_args
, int small
)
393 label_next
= gen_new_label();
396 tcg_out_brcond(s
, TCG_COND_NE
, args
[0], args
[2], const_args
[2],
398 tcg_out_brcond(s
, TCG_COND_EQ
, args
[1], args
[3], const_args
[3],
402 tcg_out_brcond(s
, TCG_COND_NE
, args
[0], args
[2], const_args
[2],
404 tcg_out_brcond(s
, TCG_COND_NE
, args
[1], args
[3], const_args
[3],
408 tcg_out_brcond(s
, TCG_COND_LT
, args
[1], args
[3], const_args
[3],
410 tcg_out_jxx(s
, JCC_JNE
, label_next
, 1);
411 tcg_out_brcond(s
, TCG_COND_LTU
, args
[0], args
[2], const_args
[2],
415 tcg_out_brcond(s
, TCG_COND_LT
, args
[1], args
[3], const_args
[3],
417 tcg_out_jxx(s
, JCC_JNE
, label_next
, 1);
418 tcg_out_brcond(s
, TCG_COND_LEU
, args
[0], args
[2], const_args
[2],
422 tcg_out_brcond(s
, TCG_COND_GT
, args
[1], args
[3], const_args
[3],
424 tcg_out_jxx(s
, JCC_JNE
, label_next
, 1);
425 tcg_out_brcond(s
, TCG_COND_GTU
, args
[0], args
[2], const_args
[2],
429 tcg_out_brcond(s
, TCG_COND_GT
, args
[1], args
[3], const_args
[3],
431 tcg_out_jxx(s
, JCC_JNE
, label_next
, 1);
432 tcg_out_brcond(s
, TCG_COND_GEU
, args
[0], args
[2], const_args
[2],
436 tcg_out_brcond(s
, TCG_COND_LTU
, args
[1], args
[3], const_args
[3],
438 tcg_out_jxx(s
, JCC_JNE
, label_next
, 1);
439 tcg_out_brcond(s
, TCG_COND_LTU
, args
[0], args
[2], const_args
[2],
443 tcg_out_brcond(s
, TCG_COND_LTU
, args
[1], args
[3], const_args
[3],
445 tcg_out_jxx(s
, JCC_JNE
, label_next
, 1);
446 tcg_out_brcond(s
, TCG_COND_LEU
, args
[0], args
[2], const_args
[2],
450 tcg_out_brcond(s
, TCG_COND_GTU
, args
[1], args
[3], const_args
[3],
452 tcg_out_jxx(s
, JCC_JNE
, label_next
, 1);
453 tcg_out_brcond(s
, TCG_COND_GTU
, args
[0], args
[2], const_args
[2],
457 tcg_out_brcond(s
, TCG_COND_GTU
, args
[1], args
[3], const_args
[3],
459 tcg_out_jxx(s
, JCC_JNE
, label_next
, 1);
460 tcg_out_brcond(s
, TCG_COND_GEU
, args
[0], args
[2], const_args
[2],
466 tcg_out_label(s
, label_next
, (tcg_target_long
)s
->code_ptr
);
469 static void tcg_out_setcond(TCGContext
*s
, TCGCond cond
, TCGArg dest
,
470 TCGArg arg1
, TCGArg arg2
, int const_arg2
)
472 tcg_out_cmp(s
, arg1
, arg2
, const_arg2
);
474 tcg_out_modrm(s
, 0x90 | tcg_cond_to_jcc
[cond
] | P_EXT
, 0, dest
);
475 tgen_arithi(s
, ARITH_AND
, dest
, 0xff, 0);
478 static void tcg_out_setcond2(TCGContext
*s
, const TCGArg
*args
,
479 const int *const_args
)
482 int label_true
, label_over
;
484 memcpy(new_args
, args
+1, 5*sizeof(TCGArg
));
486 if (args
[0] == args
[1] || args
[0] == args
[2]
487 || (!const_args
[3] && args
[0] == args
[3])
488 || (!const_args
[4] && args
[0] == args
[4])) {
489 /* When the destination overlaps with one of the argument
490 registers, don't do anything tricky. */
491 label_true
= gen_new_label();
492 label_over
= gen_new_label();
494 new_args
[5] = label_true
;
495 tcg_out_brcond2(s
, new_args
, const_args
+1, 1);
497 tcg_out_movi(s
, TCG_TYPE_I32
, args
[0], 0);
498 tcg_out_jxx(s
, JCC_JMP
, label_over
, 1);
499 tcg_out_label(s
, label_true
, (tcg_target_long
)s
->code_ptr
);
501 tcg_out_movi(s
, TCG_TYPE_I32
, args
[0], 1);
502 tcg_out_label(s
, label_over
, (tcg_target_long
)s
->code_ptr
);
504 /* When the destination does not overlap one of the arguments,
505 clear the destination first, jump if cond false, and emit an
506 increment in the true case. This results in smaller code. */
508 tcg_out_movi(s
, TCG_TYPE_I32
, args
[0], 0);
510 label_over
= gen_new_label();
511 new_args
[4] = tcg_invert_cond(new_args
[4]);
512 new_args
[5] = label_over
;
513 tcg_out_brcond2(s
, new_args
, const_args
+1, 1);
515 tgen_arithi(s
, ARITH_ADD
, args
[0], 1, 0);
516 tcg_out_label(s
, label_over
, (tcg_target_long
)s
->code_ptr
);
520 #if defined(CONFIG_SOFTMMU)
522 #include "../../softmmu_defs.h"
524 static void *qemu_ld_helpers
[4] = {
531 static void *qemu_st_helpers
[4] = {
539 #ifndef CONFIG_USER_ONLY
543 /* XXX: qemu_ld and qemu_st could be modified to clobber only EDX and
544 EAX. It will be useful once fixed registers globals are less
546 static void tcg_out_qemu_ld(TCGContext
*s
, const TCGArg
*args
,
549 int addr_reg
, data_reg
, data_reg2
, r0
, r1
, mem_index
, s_bits
, bswap
;
550 #if defined(CONFIG_SOFTMMU)
551 uint8_t *label1_ptr
, *label2_ptr
;
553 #if TARGET_LONG_BITS == 64
554 #if defined(CONFIG_SOFTMMU)
566 #if TARGET_LONG_BITS == 64
575 #if defined(CONFIG_SOFTMMU)
576 tcg_out_mov(s
, r1
, addr_reg
);
578 tcg_out_mov(s
, r0
, addr_reg
);
580 tcg_out_modrm(s
, 0xc1, 5, r1
); /* shr $x, r1 */
581 tcg_out8(s
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
);
583 tcg_out_modrm(s
, 0x81, 4, r0
); /* andl $x, r0 */
584 tcg_out32(s
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1));
586 tcg_out_modrm(s
, 0x81, 4, r1
); /* andl $x, r1 */
587 tcg_out32(s
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
589 tcg_out_opc(s
, 0x8d); /* lea offset(r1, %ebp), r1 */
590 tcg_out8(s
, 0x80 | (r1
<< 3) | 0x04);
591 tcg_out8(s
, (5 << 3) | r1
);
592 tcg_out32(s
, offsetof(CPUState
, tlb_table
[mem_index
][0].addr_read
));
595 tcg_out_modrm_offset(s
, 0x3b, r0
, r1
, 0);
597 tcg_out_mov(s
, r0
, addr_reg
);
599 #if TARGET_LONG_BITS == 32
601 tcg_out8(s
, 0x70 + JCC_JE
);
602 label1_ptr
= s
->code_ptr
;
606 tcg_out8(s
, 0x70 + JCC_JNE
);
607 label3_ptr
= s
->code_ptr
;
610 /* cmp 4(r1), addr_reg2 */
611 tcg_out_modrm_offset(s
, 0x3b, addr_reg2
, r1
, 4);
614 tcg_out8(s
, 0x70 + JCC_JE
);
615 label1_ptr
= s
->code_ptr
;
619 *label3_ptr
= s
->code_ptr
- label3_ptr
- 1;
622 /* XXX: move that code at the end of the TB */
623 #if TARGET_LONG_BITS == 32
624 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_EDX
, mem_index
);
626 tcg_out_mov(s
, TCG_REG_EDX
, addr_reg2
);
627 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_ECX
, mem_index
);
630 tcg_out32(s
, (tcg_target_long
)qemu_ld_helpers
[s_bits
] -
631 (tcg_target_long
)s
->code_ptr
- 4);
636 tcg_out_modrm(s
, 0xbe | P_EXT
, data_reg
, TCG_REG_EAX
);
640 tcg_out_modrm(s
, 0xbf | P_EXT
, data_reg
, TCG_REG_EAX
);
644 tcg_out_modrm(s
, 0xb6 | P_EXT
, data_reg
, TCG_REG_EAX
);
648 tcg_out_modrm(s
, 0xb7 | P_EXT
, data_reg
, TCG_REG_EAX
);
652 tcg_out_mov(s
, data_reg
, TCG_REG_EAX
);
655 if (data_reg
== TCG_REG_EDX
) {
656 tcg_out_opc(s
, 0x90 + TCG_REG_EDX
); /* xchg %edx, %eax */
657 tcg_out_mov(s
, data_reg2
, TCG_REG_EAX
);
659 tcg_out_mov(s
, data_reg
, TCG_REG_EAX
);
660 tcg_out_mov(s
, data_reg2
, TCG_REG_EDX
);
667 label2_ptr
= s
->code_ptr
;
671 *label1_ptr
= s
->code_ptr
- label1_ptr
- 1;
674 tcg_out_modrm_offset(s
, 0x03, r0
, r1
, offsetof(CPUTLBEntry
, addend
) -
675 offsetof(CPUTLBEntry
, addr_read
));
680 #ifdef TARGET_WORDS_BIGENDIAN
688 tcg_out_modrm_offset(s
, 0xb6 | P_EXT
, data_reg
, r0
, GUEST_BASE
);
692 tcg_out_modrm_offset(s
, 0xbe | P_EXT
, data_reg
, r0
, GUEST_BASE
);
696 tcg_out_modrm_offset(s
, 0xb7 | P_EXT
, data_reg
, r0
, GUEST_BASE
);
698 /* rolw $8, data_reg */
700 tcg_out_modrm(s
, 0xc1, 0, data_reg
);
706 tcg_out_modrm_offset(s
, 0xbf | P_EXT
, data_reg
, r0
, GUEST_BASE
);
708 /* rolw $8, data_reg */
710 tcg_out_modrm(s
, 0xc1, 0, data_reg
);
713 /* movswl data_reg, data_reg */
714 tcg_out_modrm(s
, 0xbf | P_EXT
, data_reg
, data_reg
);
718 /* movl (r0), data_reg */
719 tcg_out_modrm_offset(s
, 0x8b, data_reg
, r0
, GUEST_BASE
);
722 tcg_out_opc(s
, (0xc8 + data_reg
) | P_EXT
);
726 /* XXX: could be nicer */
727 if (r0
== data_reg
) {
731 tcg_out_mov(s
, r1
, r0
);
735 tcg_out_modrm_offset(s
, 0x8b, data_reg
, r0
, GUEST_BASE
);
736 tcg_out_modrm_offset(s
, 0x8b, data_reg2
, r0
, GUEST_BASE
+ 4);
738 tcg_out_modrm_offset(s
, 0x8b, data_reg
, r0
, GUEST_BASE
+ 4);
739 tcg_out_opc(s
, (0xc8 + data_reg
) | P_EXT
);
741 tcg_out_modrm_offset(s
, 0x8b, data_reg2
, r0
, GUEST_BASE
);
743 tcg_out_opc(s
, (0xc8 + data_reg2
) | P_EXT
);
750 #if defined(CONFIG_SOFTMMU)
752 *label2_ptr
= s
->code_ptr
- label2_ptr
- 1;
757 static void tcg_out_qemu_st(TCGContext
*s
, const TCGArg
*args
,
760 int addr_reg
, data_reg
, data_reg2
, r0
, r1
, mem_index
, s_bits
, bswap
;
761 #if defined(CONFIG_SOFTMMU)
762 uint8_t *label1_ptr
, *label2_ptr
;
764 #if TARGET_LONG_BITS == 64
765 #if defined(CONFIG_SOFTMMU)
777 #if TARGET_LONG_BITS == 64
787 #if defined(CONFIG_SOFTMMU)
788 tcg_out_mov(s
, r1
, addr_reg
);
790 tcg_out_mov(s
, r0
, addr_reg
);
792 tcg_out_modrm(s
, 0xc1, 5, r1
); /* shr $x, r1 */
793 tcg_out8(s
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
);
795 tcg_out_modrm(s
, 0x81, 4, r0
); /* andl $x, r0 */
796 tcg_out32(s
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1));
798 tcg_out_modrm(s
, 0x81, 4, r1
); /* andl $x, r1 */
799 tcg_out32(s
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
801 tcg_out_opc(s
, 0x8d); /* lea offset(r1, %ebp), r1 */
802 tcg_out8(s
, 0x80 | (r1
<< 3) | 0x04);
803 tcg_out8(s
, (5 << 3) | r1
);
804 tcg_out32(s
, offsetof(CPUState
, tlb_table
[mem_index
][0].addr_write
));
807 tcg_out_modrm_offset(s
, 0x3b, r0
, r1
, 0);
809 tcg_out_mov(s
, r0
, addr_reg
);
811 #if TARGET_LONG_BITS == 32
813 tcg_out8(s
, 0x70 + JCC_JE
);
814 label1_ptr
= s
->code_ptr
;
818 tcg_out8(s
, 0x70 + JCC_JNE
);
819 label3_ptr
= s
->code_ptr
;
822 /* cmp 4(r1), addr_reg2 */
823 tcg_out_modrm_offset(s
, 0x3b, addr_reg2
, r1
, 4);
826 tcg_out8(s
, 0x70 + JCC_JE
);
827 label1_ptr
= s
->code_ptr
;
831 *label3_ptr
= s
->code_ptr
- label3_ptr
- 1;
834 /* XXX: move that code at the end of the TB */
835 #if TARGET_LONG_BITS == 32
837 tcg_out_mov(s
, TCG_REG_EDX
, data_reg
);
838 tcg_out_mov(s
, TCG_REG_ECX
, data_reg2
);
839 tcg_out8(s
, 0x6a); /* push Ib */
840 tcg_out8(s
, mem_index
);
842 tcg_out32(s
, (tcg_target_long
)qemu_st_helpers
[s_bits
] -
843 (tcg_target_long
)s
->code_ptr
- 4);
844 tcg_out_addi(s
, TCG_REG_ESP
, 4);
849 tcg_out_modrm(s
, 0xb6 | P_EXT
, TCG_REG_EDX
, data_reg
);
853 tcg_out_modrm(s
, 0xb7 | P_EXT
, TCG_REG_EDX
, data_reg
);
856 tcg_out_mov(s
, TCG_REG_EDX
, data_reg
);
859 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_ECX
, mem_index
);
861 tcg_out32(s
, (tcg_target_long
)qemu_st_helpers
[s_bits
] -
862 (tcg_target_long
)s
->code_ptr
- 4);
866 tcg_out_mov(s
, TCG_REG_EDX
, addr_reg2
);
867 tcg_out8(s
, 0x6a); /* push Ib */
868 tcg_out8(s
, mem_index
);
869 tcg_out_opc(s
, 0x50 + data_reg2
); /* push */
870 tcg_out_opc(s
, 0x50 + data_reg
); /* push */
872 tcg_out32(s
, (tcg_target_long
)qemu_st_helpers
[s_bits
] -
873 (tcg_target_long
)s
->code_ptr
- 4);
874 tcg_out_addi(s
, TCG_REG_ESP
, 12);
876 tcg_out_mov(s
, TCG_REG_EDX
, addr_reg2
);
880 tcg_out_modrm(s
, 0xb6 | P_EXT
, TCG_REG_ECX
, data_reg
);
884 tcg_out_modrm(s
, 0xb7 | P_EXT
, TCG_REG_ECX
, data_reg
);
887 tcg_out_mov(s
, TCG_REG_ECX
, data_reg
);
890 tcg_out8(s
, 0x6a); /* push Ib */
891 tcg_out8(s
, mem_index
);
893 tcg_out32(s
, (tcg_target_long
)qemu_st_helpers
[s_bits
] -
894 (tcg_target_long
)s
->code_ptr
- 4);
895 tcg_out_addi(s
, TCG_REG_ESP
, 4);
901 label2_ptr
= s
->code_ptr
;
905 *label1_ptr
= s
->code_ptr
- label1_ptr
- 1;
908 tcg_out_modrm_offset(s
, 0x03, r0
, r1
, offsetof(CPUTLBEntry
, addend
) -
909 offsetof(CPUTLBEntry
, addr_write
));
914 #ifdef TARGET_WORDS_BIGENDIAN
922 tcg_out_modrm_offset(s
, 0x88, data_reg
, r0
, GUEST_BASE
);
926 tcg_out_mov(s
, r1
, data_reg
);
927 tcg_out8(s
, 0x66); /* rolw $8, %ecx */
928 tcg_out_modrm(s
, 0xc1, 0, r1
);
934 tcg_out_modrm_offset(s
, 0x89, data_reg
, r0
, GUEST_BASE
);
938 tcg_out_mov(s
, r1
, data_reg
);
940 tcg_out_opc(s
, (0xc8 + r1
) | P_EXT
);
944 tcg_out_modrm_offset(s
, 0x89, data_reg
, r0
, GUEST_BASE
);
948 tcg_out_mov(s
, r1
, data_reg2
);
950 tcg_out_opc(s
, (0xc8 + r1
) | P_EXT
);
951 tcg_out_modrm_offset(s
, 0x89, r1
, r0
, GUEST_BASE
);
952 tcg_out_mov(s
, r1
, data_reg
);
954 tcg_out_opc(s
, (0xc8 + r1
) | P_EXT
);
955 tcg_out_modrm_offset(s
, 0x89, r1
, r0
, GUEST_BASE
+ 4);
957 tcg_out_modrm_offset(s
, 0x89, data_reg
, r0
, GUEST_BASE
);
958 tcg_out_modrm_offset(s
, 0x89, data_reg2
, r0
, GUEST_BASE
+ 4);
965 #if defined(CONFIG_SOFTMMU)
967 *label2_ptr
= s
->code_ptr
- label2_ptr
- 1;
971 static inline void tcg_out_op(TCGContext
*s
, TCGOpcode opc
,
972 const TCGArg
*args
, const int *const_args
)
977 case INDEX_op_exit_tb
:
978 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_EAX
, args
[0]);
979 tcg_out8(s
, 0xe9); /* jmp tb_ret_addr */
980 tcg_out32(s
, tb_ret_addr
- s
->code_ptr
- 4);
982 case INDEX_op_goto_tb
:
983 if (s
->tb_jmp_offset
) {
984 /* direct jump method */
985 tcg_out8(s
, 0xe9); /* jmp im */
986 s
->tb_jmp_offset
[args
[0]] = s
->code_ptr
- s
->code_buf
;
989 /* indirect jump method */
991 tcg_out_modrm_offset(s
, 0xff, 4, -1,
992 (tcg_target_long
)(s
->tb_next
+ args
[0]));
994 s
->tb_next_offset
[args
[0]] = s
->code_ptr
- s
->code_buf
;
999 tcg_out32(s
, args
[0] - (tcg_target_long
)s
->code_ptr
- 4);
1001 tcg_out_modrm(s
, 0xff, 2, args
[0]);
1005 if (const_args
[0]) {
1007 tcg_out32(s
, args
[0] - (tcg_target_long
)s
->code_ptr
- 4);
1009 tcg_out_modrm(s
, 0xff, 4, args
[0]);
1013 tcg_out_jxx(s
, JCC_JMP
, args
[0], 0);
1015 case INDEX_op_movi_i32
:
1016 tcg_out_movi(s
, TCG_TYPE_I32
, args
[0], args
[1]);
1018 case INDEX_op_ld8u_i32
:
1020 tcg_out_modrm_offset(s
, 0xb6 | P_EXT
, args
[0], args
[1], args
[2]);
1022 case INDEX_op_ld8s_i32
:
1024 tcg_out_modrm_offset(s
, 0xbe | P_EXT
, args
[0], args
[1], args
[2]);
1026 case INDEX_op_ld16u_i32
:
1028 tcg_out_modrm_offset(s
, 0xb7 | P_EXT
, args
[0], args
[1], args
[2]);
1030 case INDEX_op_ld16s_i32
:
1032 tcg_out_modrm_offset(s
, 0xbf | P_EXT
, args
[0], args
[1], args
[2]);
1034 case INDEX_op_ld_i32
:
1036 tcg_out_modrm_offset(s
, 0x8b, args
[0], args
[1], args
[2]);
1038 case INDEX_op_st8_i32
:
1040 tcg_out_modrm_offset(s
, 0x88, args
[0], args
[1], args
[2]);
1042 case INDEX_op_st16_i32
:
1045 tcg_out_modrm_offset(s
, 0x89, args
[0], args
[1], args
[2]);
1047 case INDEX_op_st_i32
:
1049 tcg_out_modrm_offset(s
, 0x89, args
[0], args
[1], args
[2]);
1051 case INDEX_op_sub_i32
:
1054 case INDEX_op_and_i32
:
1057 case INDEX_op_or_i32
:
1060 case INDEX_op_xor_i32
:
1063 case INDEX_op_add_i32
:
1066 if (const_args
[2]) {
1067 tgen_arithi(s
, c
, args
[0], args
[2], 0);
1069 tcg_out_modrm(s
, 0x01 | (c
<< 3), args
[2], args
[0]);
1072 case INDEX_op_mul_i32
:
1073 if (const_args
[2]) {
1076 if (val
== (int8_t)val
) {
1077 tcg_out_modrm(s
, 0x6b, args
[0], args
[0]);
1080 tcg_out_modrm(s
, 0x69, args
[0], args
[0]);
1084 tcg_out_modrm(s
, 0xaf | P_EXT
, args
[0], args
[2]);
1087 case INDEX_op_mulu2_i32
:
1088 tcg_out_modrm(s
, 0xf7, 4, args
[3]);
1090 case INDEX_op_div2_i32
:
1091 tcg_out_modrm(s
, 0xf7, 7, args
[4]);
1093 case INDEX_op_divu2_i32
:
1094 tcg_out_modrm(s
, 0xf7, 6, args
[4]);
1096 case INDEX_op_shl_i32
:
1099 if (const_args
[2]) {
1101 tcg_out_modrm(s
, 0xd1, c
, args
[0]);
1103 tcg_out_modrm(s
, 0xc1, c
, args
[0]);
1104 tcg_out8(s
, args
[2]);
1107 tcg_out_modrm(s
, 0xd3, c
, args
[0]);
1110 case INDEX_op_shr_i32
:
1113 case INDEX_op_sar_i32
:
1116 case INDEX_op_rotl_i32
:
1119 case INDEX_op_rotr_i32
:
1123 case INDEX_op_add2_i32
:
1125 tgen_arithi(s
, ARITH_ADD
, args
[0], args
[4], 1);
1127 tcg_out_modrm(s
, 0x01 | (ARITH_ADD
<< 3), args
[4], args
[0]);
1129 tgen_arithi(s
, ARITH_ADC
, args
[1], args
[5], 1);
1131 tcg_out_modrm(s
, 0x01 | (ARITH_ADC
<< 3), args
[5], args
[1]);
1133 case INDEX_op_sub2_i32
:
1135 tgen_arithi(s
, ARITH_SUB
, args
[0], args
[4], 1);
1137 tcg_out_modrm(s
, 0x01 | (ARITH_SUB
<< 3), args
[4], args
[0]);
1139 tgen_arithi(s
, ARITH_SBB
, args
[1], args
[5], 1);
1141 tcg_out_modrm(s
, 0x01 | (ARITH_SBB
<< 3), args
[5], args
[1]);
1143 case INDEX_op_brcond_i32
:
1144 tcg_out_brcond(s
, args
[2], args
[0], args
[1], const_args
[1],
1147 case INDEX_op_brcond2_i32
:
1148 tcg_out_brcond2(s
, args
, const_args
, 0);
1151 case INDEX_op_bswap16_i32
:
1153 tcg_out_modrm(s
, 0xc1, SHIFT_ROL
, args
[0]);
1156 case INDEX_op_bswap32_i32
:
1157 tcg_out_opc(s
, (0xc8 + args
[0]) | P_EXT
);
1160 case INDEX_op_neg_i32
:
1161 tcg_out_modrm(s
, 0xf7, 3, args
[0]);
1164 case INDEX_op_not_i32
:
1165 tcg_out_modrm(s
, 0xf7, 2, args
[0]);
1168 case INDEX_op_ext8s_i32
:
1169 tcg_out_modrm(s
, 0xbe | P_EXT
, args
[0], args
[1]);
1171 case INDEX_op_ext16s_i32
:
1172 tcg_out_modrm(s
, 0xbf | P_EXT
, args
[0], args
[1]);
1174 case INDEX_op_ext8u_i32
:
1175 tcg_out_modrm(s
, 0xb6 | P_EXT
, args
[0], args
[1]);
1177 case INDEX_op_ext16u_i32
:
1178 tcg_out_modrm(s
, 0xb7 | P_EXT
, args
[0], args
[1]);
1181 case INDEX_op_setcond_i32
:
1182 tcg_out_setcond(s
, args
[3], args
[0], args
[1], args
[2], const_args
[2]);
1184 case INDEX_op_setcond2_i32
:
1185 tcg_out_setcond2(s
, args
, const_args
);
1188 case INDEX_op_qemu_ld8u
:
1189 tcg_out_qemu_ld(s
, args
, 0);
1191 case INDEX_op_qemu_ld8s
:
1192 tcg_out_qemu_ld(s
, args
, 0 | 4);
1194 case INDEX_op_qemu_ld16u
:
1195 tcg_out_qemu_ld(s
, args
, 1);
1197 case INDEX_op_qemu_ld16s
:
1198 tcg_out_qemu_ld(s
, args
, 1 | 4);
1200 case INDEX_op_qemu_ld32
:
1201 tcg_out_qemu_ld(s
, args
, 2);
1203 case INDEX_op_qemu_ld64
:
1204 tcg_out_qemu_ld(s
, args
, 3);
1207 case INDEX_op_qemu_st8
:
1208 tcg_out_qemu_st(s
, args
, 0);
1210 case INDEX_op_qemu_st16
:
1211 tcg_out_qemu_st(s
, args
, 1);
1213 case INDEX_op_qemu_st32
:
1214 tcg_out_qemu_st(s
, args
, 2);
1216 case INDEX_op_qemu_st64
:
1217 tcg_out_qemu_st(s
, args
, 3);
1225 static const TCGTargetOpDef x86_op_defs
[] = {
1226 { INDEX_op_exit_tb
, { } },
1227 { INDEX_op_goto_tb
, { } },
1228 { INDEX_op_call
, { "ri" } },
1229 { INDEX_op_jmp
, { "ri" } },
1230 { INDEX_op_br
, { } },
1231 { INDEX_op_mov_i32
, { "r", "r" } },
1232 { INDEX_op_movi_i32
, { "r" } },
1233 { INDEX_op_ld8u_i32
, { "r", "r" } },
1234 { INDEX_op_ld8s_i32
, { "r", "r" } },
1235 { INDEX_op_ld16u_i32
, { "r", "r" } },
1236 { INDEX_op_ld16s_i32
, { "r", "r" } },
1237 { INDEX_op_ld_i32
, { "r", "r" } },
1238 { INDEX_op_st8_i32
, { "q", "r" } },
1239 { INDEX_op_st16_i32
, { "r", "r" } },
1240 { INDEX_op_st_i32
, { "r", "r" } },
1242 { INDEX_op_add_i32
, { "r", "0", "ri" } },
1243 { INDEX_op_sub_i32
, { "r", "0", "ri" } },
1244 { INDEX_op_mul_i32
, { "r", "0", "ri" } },
1245 { INDEX_op_mulu2_i32
, { "a", "d", "a", "r" } },
1246 { INDEX_op_div2_i32
, { "a", "d", "0", "1", "r" } },
1247 { INDEX_op_divu2_i32
, { "a", "d", "0", "1", "r" } },
1248 { INDEX_op_and_i32
, { "r", "0", "ri" } },
1249 { INDEX_op_or_i32
, { "r", "0", "ri" } },
1250 { INDEX_op_xor_i32
, { "r", "0", "ri" } },
1252 { INDEX_op_shl_i32
, { "r", "0", "ci" } },
1253 { INDEX_op_shr_i32
, { "r", "0", "ci" } },
1254 { INDEX_op_sar_i32
, { "r", "0", "ci" } },
1255 { INDEX_op_rotl_i32
, { "r", "0", "ci" } },
1256 { INDEX_op_rotr_i32
, { "r", "0", "ci" } },
1258 { INDEX_op_brcond_i32
, { "r", "ri" } },
1260 { INDEX_op_add2_i32
, { "r", "r", "0", "1", "ri", "ri" } },
1261 { INDEX_op_sub2_i32
, { "r", "r", "0", "1", "ri", "ri" } },
1262 { INDEX_op_brcond2_i32
, { "r", "r", "ri", "ri" } },
1264 { INDEX_op_bswap16_i32
, { "r", "0" } },
1265 { INDEX_op_bswap32_i32
, { "r", "0" } },
1267 { INDEX_op_neg_i32
, { "r", "0" } },
1269 { INDEX_op_not_i32
, { "r", "0" } },
1271 { INDEX_op_ext8s_i32
, { "r", "q" } },
1272 { INDEX_op_ext16s_i32
, { "r", "r" } },
1273 { INDEX_op_ext8u_i32
, { "r", "q"} },
1274 { INDEX_op_ext16u_i32
, { "r", "r"} },
1276 { INDEX_op_setcond_i32
, { "q", "r", "ri" } },
1277 { INDEX_op_setcond2_i32
, { "r", "r", "r", "ri", "ri" } },
1279 #if TARGET_LONG_BITS == 32
1280 { INDEX_op_qemu_ld8u
, { "r", "L" } },
1281 { INDEX_op_qemu_ld8s
, { "r", "L" } },
1282 { INDEX_op_qemu_ld16u
, { "r", "L" } },
1283 { INDEX_op_qemu_ld16s
, { "r", "L" } },
1284 { INDEX_op_qemu_ld32
, { "r", "L" } },
1285 { INDEX_op_qemu_ld64
, { "r", "r", "L" } },
1287 { INDEX_op_qemu_st8
, { "cb", "L" } },
1288 { INDEX_op_qemu_st16
, { "L", "L" } },
1289 { INDEX_op_qemu_st32
, { "L", "L" } },
1290 { INDEX_op_qemu_st64
, { "L", "L", "L" } },
1292 { INDEX_op_qemu_ld8u
, { "r", "L", "L" } },
1293 { INDEX_op_qemu_ld8s
, { "r", "L", "L" } },
1294 { INDEX_op_qemu_ld16u
, { "r", "L", "L" } },
1295 { INDEX_op_qemu_ld16s
, { "r", "L", "L" } },
1296 { INDEX_op_qemu_ld32
, { "r", "L", "L" } },
1297 { INDEX_op_qemu_ld64
, { "r", "r", "L", "L" } },
1299 { INDEX_op_qemu_st8
, { "cb", "L", "L" } },
1300 { INDEX_op_qemu_st16
, { "L", "L", "L" } },
1301 { INDEX_op_qemu_st32
, { "L", "L", "L" } },
1302 { INDEX_op_qemu_st64
, { "L", "L", "L", "L" } },
1307 static int tcg_target_callee_save_regs
[] = {
1308 /* TCG_REG_EBP, */ /* currently used for the global env, so no
1315 static inline void tcg_out_push(TCGContext
*s
, int reg
)
1317 tcg_out_opc(s
, 0x50 + reg
);
1320 static inline void tcg_out_pop(TCGContext
*s
, int reg
)
1322 tcg_out_opc(s
, 0x58 + reg
);
1325 /* Generate global QEMU prologue and epilogue code */
1326 void tcg_target_qemu_prologue(TCGContext
*s
)
1328 int i
, frame_size
, push_size
, stack_addend
;
1331 /* save all callee saved registers */
1332 for(i
= 0; i
< ARRAY_SIZE(tcg_target_callee_save_regs
); i
++) {
1333 tcg_out_push(s
, tcg_target_callee_save_regs
[i
]);
1335 /* reserve some stack space */
1336 push_size
= 4 + ARRAY_SIZE(tcg_target_callee_save_regs
) * 4;
1337 frame_size
= push_size
+ TCG_STATIC_CALL_ARGS_SIZE
;
1338 frame_size
= (frame_size
+ TCG_TARGET_STACK_ALIGN
- 1) &
1339 ~(TCG_TARGET_STACK_ALIGN
- 1);
1340 stack_addend
= frame_size
- push_size
;
1341 tcg_out_addi(s
, TCG_REG_ESP
, -stack_addend
);
1343 tcg_out_modrm(s
, 0xff, 4, TCG_REG_EAX
); /* jmp *%eax */
1346 tb_ret_addr
= s
->code_ptr
;
1347 tcg_out_addi(s
, TCG_REG_ESP
, stack_addend
);
1348 for(i
= ARRAY_SIZE(tcg_target_callee_save_regs
) - 1; i
>= 0; i
--) {
1349 tcg_out_pop(s
, tcg_target_callee_save_regs
[i
]);
1351 tcg_out8(s
, 0xc3); /* ret */
1354 void tcg_target_init(TCGContext
*s
)
1356 #if !defined(CONFIG_USER_ONLY)
1358 if ((1 << CPU_TLB_ENTRY_BITS
) != sizeof(CPUTLBEntry
))
1362 tcg_regset_set32(tcg_target_available_regs
[TCG_TYPE_I32
], 0, 0xff);
1364 tcg_regset_clear(tcg_target_call_clobber_regs
);
1365 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_EAX
);
1366 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_EDX
);
1367 tcg_regset_set_reg(tcg_target_call_clobber_regs
, TCG_REG_ECX
);
1369 tcg_regset_clear(s
->reserved_regs
);
1370 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_ESP
);
1372 tcg_add_target_add_op_defs(x86_op_defs
);