2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
26 static const char * const tcg_target_reg_names
[TCG_TARGET_NB_REGS
] = {
46 static const int tcg_target_reg_alloc_order
[] = {
65 static const int tcg_target_call_iarg_regs
[6] = {
74 static const int tcg_target_call_oarg_regs
[2] = {
79 static uint8_t *tb_ret_addr
;
81 static void patch_reloc(uint8_t *code_ptr
, int type
,
82 tcg_target_long value
, tcg_target_long addend
)
87 if (value
!= (uint32_t)value
)
89 *(uint32_t *)code_ptr
= value
;
92 if (value
!= (int32_t)value
)
94 *(uint32_t *)code_ptr
= value
;
97 value
-= (long)code_ptr
;
98 if (value
!= (int32_t)value
)
100 *(uint32_t *)code_ptr
= value
;
107 /* maximum number of register used for input function arguments */
108 static inline int tcg_target_get_call_iarg_regs_count(int flags
)
113 /* parse target specific constraints */
114 static int target_parse_constraint(TCGArgConstraint
*ct
, const char **pct_str
)
121 ct
->ct
|= TCG_CT_REG
;
122 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_RAX
);
125 ct
->ct
|= TCG_CT_REG
;
126 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_RBX
);
129 ct
->ct
|= TCG_CT_REG
;
130 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_RCX
);
133 ct
->ct
|= TCG_CT_REG
;
134 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_RDX
);
137 ct
->ct
|= TCG_CT_REG
;
138 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_RSI
);
141 ct
->ct
|= TCG_CT_REG
;
142 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_RDI
);
145 ct
->ct
|= TCG_CT_REG
;
146 tcg_regset_set32(ct
->u
.regs
, 0, 0xf);
149 ct
->ct
|= TCG_CT_REG
;
150 tcg_regset_set32(ct
->u
.regs
, 0, 0xffff);
152 case 'L': /* qemu_ld/st constraint */
153 ct
->ct
|= TCG_CT_REG
;
154 tcg_regset_set32(ct
->u
.regs
, 0, 0xffff);
155 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_RSI
);
156 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_RDI
);
159 ct
->ct
|= TCG_CT_CONST_S32
;
162 ct
->ct
|= TCG_CT_CONST_U32
;
172 /* test if a constant matches the constraint */
173 static inline int tcg_target_const_match(tcg_target_long val
,
174 const TCGArgConstraint
*arg_ct
)
178 if (ct
& TCG_CT_CONST
)
180 else if ((ct
& TCG_CT_CONST_S32
) && val
== (int32_t)val
)
182 else if ((ct
& TCG_CT_CONST_U32
) && val
== (uint32_t)val
)
221 #define P_EXT 0x100 /* 0x0f opcode prefix */
222 #define P_REXW 0x200 /* set rex.w = 1 */
223 #define P_REXB 0x400 /* force rex use for byte registers */
225 static const uint8_t tcg_cond_to_jcc
[10] = {
226 [TCG_COND_EQ
] = JCC_JE
,
227 [TCG_COND_NE
] = JCC_JNE
,
228 [TCG_COND_LT
] = JCC_JL
,
229 [TCG_COND_GE
] = JCC_JGE
,
230 [TCG_COND_LE
] = JCC_JLE
,
231 [TCG_COND_GT
] = JCC_JG
,
232 [TCG_COND_LTU
] = JCC_JB
,
233 [TCG_COND_GEU
] = JCC_JAE
,
234 [TCG_COND_LEU
] = JCC_JBE
,
235 [TCG_COND_GTU
] = JCC_JA
,
238 static inline void tcg_out_opc(TCGContext
*s
, int opc
, int r
, int rm
, int x
)
241 rex
= ((opc
>> 6) & 0x8) | ((r
>> 1) & 0x4) |
242 ((x
>> 2) & 2) | ((rm
>> 3) & 1);
243 if (rex
|| (opc
& P_REXB
)) {
244 tcg_out8(s
, rex
| 0x40);
248 tcg_out8(s
, opc
& 0xff);
251 static inline void tcg_out_modrm(TCGContext
*s
, int opc
, int r
, int rm
)
253 tcg_out_opc(s
, opc
, r
, rm
, 0);
254 tcg_out8(s
, 0xc0 | ((r
& 7) << 3) | (rm
& 7));
257 /* rm < 0 means no register index plus (-rm - 1 immediate bytes) */
258 static inline void tcg_out_modrm_offset(TCGContext
*s
, int opc
, int r
, int rm
,
259 tcg_target_long offset
)
263 tcg_out_opc(s
, opc
, r
, 0, 0);
264 val
= offset
- ((tcg_target_long
)s
->code_ptr
+ 5 + (-rm
- 1));
265 if (val
== (int32_t)val
) {
267 tcg_out8(s
, 0x05 | ((r
& 7) << 3));
269 } else if (offset
== (int32_t)offset
) {
270 tcg_out8(s
, 0x04 | ((r
& 7) << 3));
271 tcg_out8(s
, 0x25); /* sib */
272 tcg_out32(s
, offset
);
276 } else if (offset
== 0 && (rm
& 7) != TCG_REG_RBP
) {
277 tcg_out_opc(s
, opc
, r
, rm
, 0);
278 if ((rm
& 7) == TCG_REG_RSP
) {
279 tcg_out8(s
, 0x04 | ((r
& 7) << 3));
282 tcg_out8(s
, 0x00 | ((r
& 7) << 3) | (rm
& 7));
284 } else if ((int8_t)offset
== offset
) {
285 tcg_out_opc(s
, opc
, r
, rm
, 0);
286 if ((rm
& 7) == TCG_REG_RSP
) {
287 tcg_out8(s
, 0x44 | ((r
& 7) << 3));
290 tcg_out8(s
, 0x40 | ((r
& 7) << 3) | (rm
& 7));
294 tcg_out_opc(s
, opc
, r
, rm
, 0);
295 if ((rm
& 7) == TCG_REG_RSP
) {
296 tcg_out8(s
, 0x84 | ((r
& 7) << 3));
299 tcg_out8(s
, 0x80 | ((r
& 7) << 3) | (rm
& 7));
301 tcg_out32(s
, offset
);
305 #if defined(CONFIG_SOFTMMU)
306 /* XXX: incomplete. index must be different from ESP */
307 static void tcg_out_modrm_offset2(TCGContext
*s
, int opc
, int r
, int rm
,
308 int index
, int shift
,
309 tcg_target_long offset
)
314 if (offset
== 0 && (rm
& 7) != TCG_REG_RBP
) {
316 } else if (offset
== (int8_t)offset
) {
318 } else if (offset
== (int32_t)offset
) {
324 tcg_out_opc(s
, opc
, r
, rm
, 0);
325 if ((rm
& 7) == TCG_REG_RSP
) {
326 tcg_out8(s
, mod
| ((r
& 7) << 3) | 0x04);
327 tcg_out8(s
, 0x04 | (rm
& 7));
329 tcg_out8(s
, mod
| ((r
& 7) << 3) | (rm
& 7));
332 tcg_out_opc(s
, opc
, r
, rm
, index
);
333 tcg_out8(s
, mod
| ((r
& 7) << 3) | 0x04);
334 tcg_out8(s
, (shift
<< 6) | ((index
& 7) << 3) | (rm
& 7));
338 } else if (mod
== 0x80) {
339 tcg_out32(s
, offset
);
344 static inline void tcg_out_mov(TCGContext
*s
, int ret
, int arg
)
346 tcg_out_modrm(s
, 0x8b | P_REXW
, ret
, arg
);
349 static inline void tcg_out_movi(TCGContext
*s
, TCGType type
,
350 int ret
, tcg_target_long arg
)
353 tcg_out_modrm(s
, 0x01 | (ARITH_XOR
<< 3), ret
, ret
); /* xor r0,r0 */
354 } else if (arg
== (uint32_t)arg
|| type
== TCG_TYPE_I32
) {
355 tcg_out_opc(s
, 0xb8 + (ret
& 7), 0, ret
, 0);
357 } else if (arg
== (int32_t)arg
) {
358 tcg_out_modrm(s
, 0xc7 | P_REXW
, 0, ret
);
361 tcg_out_opc(s
, (0xb8 + (ret
& 7)) | P_REXW
, 0, ret
, 0);
363 tcg_out32(s
, arg
>> 32);
367 static inline void tcg_out_ld(TCGContext
*s
, TCGType type
, int ret
,
368 int arg1
, tcg_target_long arg2
)
370 if (type
== TCG_TYPE_I32
)
371 tcg_out_modrm_offset(s
, 0x8b, ret
, arg1
, arg2
); /* movl */
373 tcg_out_modrm_offset(s
, 0x8b | P_REXW
, ret
, arg1
, arg2
); /* movq */
376 static inline void tcg_out_st(TCGContext
*s
, TCGType type
, int arg
,
377 int arg1
, tcg_target_long arg2
)
379 if (type
== TCG_TYPE_I32
)
380 tcg_out_modrm_offset(s
, 0x89, arg
, arg1
, arg2
); /* movl */
382 tcg_out_modrm_offset(s
, 0x89 | P_REXW
, arg
, arg1
, arg2
); /* movq */
385 static inline void tgen_arithi32(TCGContext
*s
, int c
, int r0
, int32_t val
)
387 if (val
== (int8_t)val
) {
388 tcg_out_modrm(s
, 0x83, c
, r0
);
390 } else if (c
== ARITH_AND
&& val
== 0xffu
) {
392 tcg_out_modrm(s
, 0xb6 | P_EXT
| P_REXB
, r0
, r0
);
393 } else if (c
== ARITH_AND
&& val
== 0xffffu
) {
395 tcg_out_modrm(s
, 0xb7 | P_EXT
, r0
, r0
);
397 tcg_out_modrm(s
, 0x81, c
, r0
);
402 static inline void tgen_arithi64(TCGContext
*s
, int c
, int r0
, int64_t val
)
404 if (val
== (int8_t)val
) {
405 tcg_out_modrm(s
, 0x83 | P_REXW
, c
, r0
);
407 } else if (c
== ARITH_AND
&& val
== 0xffu
) {
409 tcg_out_modrm(s
, 0xb6 | P_EXT
| P_REXW
, r0
, r0
);
410 } else if (c
== ARITH_AND
&& val
== 0xffffu
) {
412 tcg_out_modrm(s
, 0xb7 | P_EXT
| P_REXW
, r0
, r0
);
413 } else if (c
== ARITH_AND
&& val
== 0xffffffffu
) {
414 /* 32-bit mov zero extends */
415 tcg_out_modrm(s
, 0x8b, r0
, r0
);
416 } else if (val
== (int32_t)val
) {
417 tcg_out_modrm(s
, 0x81 | P_REXW
, c
, r0
);
419 } else if (c
== ARITH_AND
&& val
== (uint32_t)val
) {
420 tcg_out_modrm(s
, 0x81, c
, r0
);
427 static void tcg_out_addi(TCGContext
*s
, int reg
, tcg_target_long val
)
430 tgen_arithi64(s
, ARITH_ADD
, reg
, val
);
433 static void tcg_out_jxx(TCGContext
*s
, int opc
, int label_index
)
436 TCGLabel
*l
= &s
->labels
[label_index
];
439 val
= l
->u
.value
- (tcg_target_long
)s
->code_ptr
;
441 if ((int8_t)val1
== val1
) {
445 tcg_out8(s
, 0x70 + opc
);
450 tcg_out32(s
, val
- 5);
453 tcg_out8(s
, 0x80 + opc
);
454 tcg_out32(s
, val
- 6);
462 tcg_out8(s
, 0x80 + opc
);
464 tcg_out_reloc(s
, s
->code_ptr
, R_386_PC32
, label_index
, -4);
469 static void tcg_out_brcond(TCGContext
*s
, int cond
,
470 TCGArg arg1
, TCGArg arg2
, int const_arg2
,
471 int label_index
, int rexw
)
476 tcg_out_modrm(s
, 0x85 | rexw
, arg1
, arg1
);
479 tgen_arithi64(s
, ARITH_CMP
, arg1
, arg2
);
481 tgen_arithi32(s
, ARITH_CMP
, arg1
, arg2
);
484 tcg_out_modrm(s
, 0x01 | (ARITH_CMP
<< 3) | rexw
, arg2
, arg1
);
486 tcg_out_jxx(s
, tcg_cond_to_jcc
[cond
], label_index
);
489 #if defined(CONFIG_SOFTMMU)
491 #include "../../softmmu_defs.h"
493 static void *qemu_ld_helpers
[4] = {
500 static void *qemu_st_helpers
[4] = {
508 static void tcg_out_qemu_ld(TCGContext
*s
, const TCGArg
*args
,
511 int addr_reg
, data_reg
, r0
, r1
, mem_index
, s_bits
, bswap
, rexw
;
512 #if defined(CONFIG_SOFTMMU)
513 uint8_t *label1_ptr
, *label2_ptr
;
524 #if TARGET_LONG_BITS == 32
529 #if defined(CONFIG_SOFTMMU)
531 tcg_out_modrm(s
, 0x8b | rexw
, r1
, addr_reg
);
534 tcg_out_modrm(s
, 0x8b | rexw
, r0
, addr_reg
);
536 tcg_out_modrm(s
, 0xc1 | rexw
, 5, r1
); /* shr $x, r1 */
537 tcg_out8(s
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
);
539 tcg_out_modrm(s
, 0x81 | rexw
, 4, r0
); /* andl $x, r0 */
540 tcg_out32(s
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1));
542 tcg_out_modrm(s
, 0x81, 4, r1
); /* andl $x, r1 */
543 tcg_out32(s
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
545 /* lea offset(r1, env), r1 */
546 tcg_out_modrm_offset2(s
, 0x8d | P_REXW
, r1
, r1
, TCG_AREG0
, 0,
547 offsetof(CPUState
, tlb_table
[mem_index
][0].addr_read
));
550 tcg_out_modrm_offset(s
, 0x3b | rexw
, r0
, r1
, 0);
553 tcg_out_modrm(s
, 0x8b | rexw
, r0
, addr_reg
);
556 tcg_out8(s
, 0x70 + JCC_JE
);
557 label1_ptr
= s
->code_ptr
;
560 /* XXX: move that code at the end of the TB */
561 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_RSI
, mem_index
);
563 tcg_out32(s
, (tcg_target_long
)qemu_ld_helpers
[s_bits
] -
564 (tcg_target_long
)s
->code_ptr
- 4);
569 tcg_out_modrm(s
, 0xbe | P_EXT
| P_REXW
, data_reg
, TCG_REG_RAX
);
573 tcg_out_modrm(s
, 0xbf | P_EXT
| P_REXW
, data_reg
, TCG_REG_RAX
);
577 tcg_out_modrm(s
, 0x63 | P_REXW
, data_reg
, TCG_REG_RAX
);
581 tcg_out_modrm(s
, 0xb6 | P_EXT
| P_REXW
, data_reg
, TCG_REG_RAX
);
585 tcg_out_modrm(s
, 0xb7 | P_EXT
| P_REXW
, data_reg
, TCG_REG_RAX
);
590 tcg_out_modrm(s
, 0x8b, data_reg
, TCG_REG_RAX
);
593 tcg_out_mov(s
, data_reg
, TCG_REG_RAX
);
599 label2_ptr
= s
->code_ptr
;
603 *label1_ptr
= s
->code_ptr
- label1_ptr
- 1;
606 tcg_out_modrm_offset(s
, 0x03 | P_REXW
, r0
, r1
, offsetof(CPUTLBEntry
, addend
) -
607 offsetof(CPUTLBEntry
, addr_read
));
612 #ifdef TARGET_WORDS_BIGENDIAN
620 tcg_out_modrm_offset(s
, 0xb6 | P_EXT
, data_reg
, r0
, 0);
624 tcg_out_modrm_offset(s
, 0xbe | P_EXT
| rexw
, data_reg
, r0
, 0);
628 tcg_out_modrm_offset(s
, 0xb7 | P_EXT
, data_reg
, r0
, 0);
630 /* rolw $8, data_reg */
632 tcg_out_modrm(s
, 0xc1, 0, data_reg
);
639 tcg_out_modrm_offset(s
, 0xb7 | P_EXT
, data_reg
, r0
, 0);
640 /* rolw $8, data_reg */
642 tcg_out_modrm(s
, 0xc1, 0, data_reg
);
645 /* movswX data_reg, data_reg */
646 tcg_out_modrm(s
, 0xbf | P_EXT
| rexw
, data_reg
, data_reg
);
649 tcg_out_modrm_offset(s
, 0xbf | P_EXT
| rexw
, data_reg
, r0
, 0);
653 /* movl (r0), data_reg */
654 tcg_out_modrm_offset(s
, 0x8b, data_reg
, r0
, 0);
657 tcg_out_opc(s
, (0xc8 + (data_reg
& 7)) | P_EXT
, 0, data_reg
, 0);
662 /* movl (r0), data_reg */
663 tcg_out_modrm_offset(s
, 0x8b, data_reg
, r0
, 0);
665 tcg_out_opc(s
, (0xc8 + (data_reg
& 7)) | P_EXT
, 0, data_reg
, 0);
667 tcg_out_modrm(s
, 0x63 | P_REXW
, data_reg
, data_reg
);
670 tcg_out_modrm_offset(s
, 0x63 | P_REXW
, data_reg
, r0
, 0);
674 /* movq (r0), data_reg */
675 tcg_out_modrm_offset(s
, 0x8b | P_REXW
, data_reg
, r0
, 0);
678 tcg_out_opc(s
, (0xc8 + (data_reg
& 7)) | P_EXT
| P_REXW
, 0, data_reg
, 0);
685 #if defined(CONFIG_SOFTMMU)
687 *label2_ptr
= s
->code_ptr
- label2_ptr
- 1;
691 static void tcg_out_qemu_st(TCGContext
*s
, const TCGArg
*args
,
694 int addr_reg
, data_reg
, r0
, r1
, mem_index
, s_bits
, bswap
, rexw
;
695 #if defined(CONFIG_SOFTMMU)
696 uint8_t *label1_ptr
, *label2_ptr
;
708 #if TARGET_LONG_BITS == 32
713 #if defined(CONFIG_SOFTMMU)
715 tcg_out_modrm(s
, 0x8b | rexw
, r1
, addr_reg
);
718 tcg_out_modrm(s
, 0x8b | rexw
, r0
, addr_reg
);
720 tcg_out_modrm(s
, 0xc1 | rexw
, 5, r1
); /* shr $x, r1 */
721 tcg_out8(s
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
);
723 tcg_out_modrm(s
, 0x81 | rexw
, 4, r0
); /* andl $x, r0 */
724 tcg_out32(s
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1));
726 tcg_out_modrm(s
, 0x81, 4, r1
); /* andl $x, r1 */
727 tcg_out32(s
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
729 /* lea offset(r1, env), r1 */
730 tcg_out_modrm_offset2(s
, 0x8d | P_REXW
, r1
, r1
, TCG_AREG0
, 0,
731 offsetof(CPUState
, tlb_table
[mem_index
][0].addr_write
));
734 tcg_out_modrm_offset(s
, 0x3b | rexw
, r0
, r1
, 0);
737 tcg_out_modrm(s
, 0x8b | rexw
, r0
, addr_reg
);
740 tcg_out8(s
, 0x70 + JCC_JE
);
741 label1_ptr
= s
->code_ptr
;
744 /* XXX: move that code at the end of the TB */
748 tcg_out_modrm(s
, 0xb6 | P_EXT
| P_REXB
, TCG_REG_RSI
, data_reg
);
752 tcg_out_modrm(s
, 0xb7 | P_EXT
, TCG_REG_RSI
, data_reg
);
756 tcg_out_modrm(s
, 0x8b, TCG_REG_RSI
, data_reg
);
760 tcg_out_mov(s
, TCG_REG_RSI
, data_reg
);
763 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_RDX
, mem_index
);
765 tcg_out32(s
, (tcg_target_long
)qemu_st_helpers
[s_bits
] -
766 (tcg_target_long
)s
->code_ptr
- 4);
770 label2_ptr
= s
->code_ptr
;
774 *label1_ptr
= s
->code_ptr
- label1_ptr
- 1;
777 tcg_out_modrm_offset(s
, 0x03 | P_REXW
, r0
, r1
, offsetof(CPUTLBEntry
, addend
) -
778 offsetof(CPUTLBEntry
, addr_write
));
783 #ifdef TARGET_WORDS_BIGENDIAN
791 tcg_out_modrm_offset(s
, 0x88 | P_REXB
, data_reg
, r0
, 0);
795 tcg_out_modrm(s
, 0x8b, r1
, data_reg
); /* movl */
796 tcg_out8(s
, 0x66); /* rolw $8, %ecx */
797 tcg_out_modrm(s
, 0xc1, 0, r1
);
803 tcg_out_modrm_offset(s
, 0x89, data_reg
, r0
, 0);
807 tcg_out_modrm(s
, 0x8b, r1
, data_reg
); /* movl */
809 tcg_out_opc(s
, (0xc8 + r1
) | P_EXT
, 0, r1
, 0);
813 tcg_out_modrm_offset(s
, 0x89, data_reg
, r0
, 0);
817 tcg_out_mov(s
, r1
, data_reg
);
819 tcg_out_opc(s
, (0xc8 + r1
) | P_EXT
| P_REXW
, 0, r1
, 0);
823 tcg_out_modrm_offset(s
, 0x89 | P_REXW
, data_reg
, r0
, 0);
829 #if defined(CONFIG_SOFTMMU)
831 *label2_ptr
= s
->code_ptr
- label2_ptr
- 1;
835 static inline void tcg_out_op(TCGContext
*s
, int opc
, const TCGArg
*args
,
836 const int *const_args
)
841 case INDEX_op_exit_tb
:
842 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_RAX
, args
[0]);
843 tcg_out8(s
, 0xe9); /* jmp tb_ret_addr */
844 tcg_out32(s
, tb_ret_addr
- s
->code_ptr
- 4);
846 case INDEX_op_goto_tb
:
847 if (s
->tb_jmp_offset
) {
848 /* direct jump method */
849 tcg_out8(s
, 0xe9); /* jmp im */
850 s
->tb_jmp_offset
[args
[0]] = s
->code_ptr
- s
->code_buf
;
853 /* indirect jump method */
855 tcg_out_modrm_offset(s
, 0xff, 4, -1,
856 (tcg_target_long
)(s
->tb_next
+
859 s
->tb_next_offset
[args
[0]] = s
->code_ptr
- s
->code_buf
;
864 tcg_out32(s
, args
[0] - (tcg_target_long
)s
->code_ptr
- 4);
866 tcg_out_modrm(s
, 0xff, 2, args
[0]);
872 tcg_out32(s
, args
[0] - (tcg_target_long
)s
->code_ptr
- 4);
874 tcg_out_modrm(s
, 0xff, 4, args
[0]);
878 tcg_out_jxx(s
, JCC_JMP
, args
[0]);
880 case INDEX_op_movi_i32
:
881 tcg_out_movi(s
, TCG_TYPE_I32
, args
[0], (uint32_t)args
[1]);
883 case INDEX_op_movi_i64
:
884 tcg_out_movi(s
, TCG_TYPE_I64
, args
[0], args
[1]);
886 case INDEX_op_ld8u_i32
:
887 case INDEX_op_ld8u_i64
:
889 tcg_out_modrm_offset(s
, 0xb6 | P_EXT
, args
[0], args
[1], args
[2]);
891 case INDEX_op_ld8s_i32
:
893 tcg_out_modrm_offset(s
, 0xbe | P_EXT
, args
[0], args
[1], args
[2]);
895 case INDEX_op_ld8s_i64
:
897 tcg_out_modrm_offset(s
, 0xbe | P_EXT
| P_REXW
, args
[0], args
[1], args
[2]);
899 case INDEX_op_ld16u_i32
:
900 case INDEX_op_ld16u_i64
:
902 tcg_out_modrm_offset(s
, 0xb7 | P_EXT
, args
[0], args
[1], args
[2]);
904 case INDEX_op_ld16s_i32
:
906 tcg_out_modrm_offset(s
, 0xbf | P_EXT
, args
[0], args
[1], args
[2]);
908 case INDEX_op_ld16s_i64
:
910 tcg_out_modrm_offset(s
, 0xbf | P_EXT
| P_REXW
, args
[0], args
[1], args
[2]);
912 case INDEX_op_ld_i32
:
913 case INDEX_op_ld32u_i64
:
915 tcg_out_modrm_offset(s
, 0x8b, args
[0], args
[1], args
[2]);
917 case INDEX_op_ld32s_i64
:
919 tcg_out_modrm_offset(s
, 0x63 | P_REXW
, args
[0], args
[1], args
[2]);
921 case INDEX_op_ld_i64
:
923 tcg_out_modrm_offset(s
, 0x8b | P_REXW
, args
[0], args
[1], args
[2]);
926 case INDEX_op_st8_i32
:
927 case INDEX_op_st8_i64
:
929 tcg_out_modrm_offset(s
, 0x88 | P_REXB
, args
[0], args
[1], args
[2]);
931 case INDEX_op_st16_i32
:
932 case INDEX_op_st16_i64
:
935 tcg_out_modrm_offset(s
, 0x89, args
[0], args
[1], args
[2]);
937 case INDEX_op_st_i32
:
938 case INDEX_op_st32_i64
:
940 tcg_out_modrm_offset(s
, 0x89, args
[0], args
[1], args
[2]);
942 case INDEX_op_st_i64
:
944 tcg_out_modrm_offset(s
, 0x89 | P_REXW
, args
[0], args
[1], args
[2]);
947 case INDEX_op_sub_i32
:
950 case INDEX_op_and_i32
:
953 case INDEX_op_or_i32
:
956 case INDEX_op_xor_i32
:
959 case INDEX_op_add_i32
:
963 tgen_arithi32(s
, c
, args
[0], args
[2]);
965 tcg_out_modrm(s
, 0x01 | (c
<< 3), args
[2], args
[0]);
969 case INDEX_op_sub_i64
:
972 case INDEX_op_and_i64
:
975 case INDEX_op_or_i64
:
978 case INDEX_op_xor_i64
:
981 case INDEX_op_add_i64
:
985 tgen_arithi64(s
, c
, args
[0], args
[2]);
987 tcg_out_modrm(s
, 0x01 | (c
<< 3) | P_REXW
, args
[2], args
[0]);
991 case INDEX_op_mul_i32
:
995 if (val
== (int8_t)val
) {
996 tcg_out_modrm(s
, 0x6b, args
[0], args
[0]);
999 tcg_out_modrm(s
, 0x69, args
[0], args
[0]);
1003 tcg_out_modrm(s
, 0xaf | P_EXT
, args
[0], args
[2]);
1006 case INDEX_op_mul_i64
:
1007 if (const_args
[2]) {
1010 if (val
== (int8_t)val
) {
1011 tcg_out_modrm(s
, 0x6b | P_REXW
, args
[0], args
[0]);
1014 tcg_out_modrm(s
, 0x69 | P_REXW
, args
[0], args
[0]);
1018 tcg_out_modrm(s
, 0xaf | P_EXT
| P_REXW
, args
[0], args
[2]);
1021 case INDEX_op_div2_i32
:
1022 tcg_out_modrm(s
, 0xf7, 7, args
[4]);
1024 case INDEX_op_divu2_i32
:
1025 tcg_out_modrm(s
, 0xf7, 6, args
[4]);
1027 case INDEX_op_div2_i64
:
1028 tcg_out_modrm(s
, 0xf7 | P_REXW
, 7, args
[4]);
1030 case INDEX_op_divu2_i64
:
1031 tcg_out_modrm(s
, 0xf7 | P_REXW
, 6, args
[4]);
1034 case INDEX_op_shl_i32
:
1037 if (const_args
[2]) {
1039 tcg_out_modrm(s
, 0xd1, c
, args
[0]);
1041 tcg_out_modrm(s
, 0xc1, c
, args
[0]);
1042 tcg_out8(s
, args
[2]);
1045 tcg_out_modrm(s
, 0xd3, c
, args
[0]);
1048 case INDEX_op_shr_i32
:
1051 case INDEX_op_sar_i32
:
1054 case INDEX_op_rotl_i32
:
1057 case INDEX_op_rotr_i32
:
1061 case INDEX_op_shl_i64
:
1064 if (const_args
[2]) {
1066 tcg_out_modrm(s
, 0xd1 | P_REXW
, c
, args
[0]);
1068 tcg_out_modrm(s
, 0xc1 | P_REXW
, c
, args
[0]);
1069 tcg_out8(s
, args
[2]);
1072 tcg_out_modrm(s
, 0xd3 | P_REXW
, c
, args
[0]);
1075 case INDEX_op_shr_i64
:
1078 case INDEX_op_sar_i64
:
1081 case INDEX_op_rotl_i64
:
1084 case INDEX_op_rotr_i64
:
1088 case INDEX_op_brcond_i32
:
1089 tcg_out_brcond(s
, args
[2], args
[0], args
[1], const_args
[1],
1092 case INDEX_op_brcond_i64
:
1093 tcg_out_brcond(s
, args
[2], args
[0], args
[1], const_args
[1],
1097 case INDEX_op_bswap_i32
:
1098 tcg_out_opc(s
, (0xc8 + (args
[0] & 7)) | P_EXT
, 0, args
[0], 0);
1100 case INDEX_op_bswap_i64
:
1101 tcg_out_opc(s
, (0xc8 + (args
[0] & 7)) | P_EXT
| P_REXW
, 0, args
[0], 0);
1104 case INDEX_op_neg_i32
:
1105 tcg_out_modrm(s
, 0xf7, 3, args
[0]);
1107 case INDEX_op_neg_i64
:
1108 tcg_out_modrm(s
, 0xf7 | P_REXW
, 3, args
[0]);
1111 case INDEX_op_not_i32
:
1112 tcg_out_modrm(s
, 0xf7, 2, args
[0]);
1114 case INDEX_op_not_i64
:
1115 tcg_out_modrm(s
, 0xf7 | P_REXW
, 2, args
[0]);
1118 case INDEX_op_ext8s_i32
:
1119 tcg_out_modrm(s
, 0xbe | P_EXT
| P_REXB
, args
[0], args
[1]);
1121 case INDEX_op_ext16s_i32
:
1122 tcg_out_modrm(s
, 0xbf | P_EXT
, args
[0], args
[1]);
1124 case INDEX_op_ext8s_i64
:
1125 tcg_out_modrm(s
, 0xbe | P_EXT
| P_REXW
, args
[0], args
[1]);
1127 case INDEX_op_ext16s_i64
:
1128 tcg_out_modrm(s
, 0xbf | P_EXT
| P_REXW
, args
[0], args
[1]);
1130 case INDEX_op_ext32s_i64
:
1131 tcg_out_modrm(s
, 0x63 | P_REXW
, args
[0], args
[1]);
1134 case INDEX_op_qemu_ld8u
:
1135 tcg_out_qemu_ld(s
, args
, 0);
1137 case INDEX_op_qemu_ld8s
:
1138 tcg_out_qemu_ld(s
, args
, 0 | 4);
1140 case INDEX_op_qemu_ld16u
:
1141 tcg_out_qemu_ld(s
, args
, 1);
1143 case INDEX_op_qemu_ld16s
:
1144 tcg_out_qemu_ld(s
, args
, 1 | 4);
1146 case INDEX_op_qemu_ld32u
:
1147 tcg_out_qemu_ld(s
, args
, 2);
1149 case INDEX_op_qemu_ld32s
:
1150 tcg_out_qemu_ld(s
, args
, 2 | 4);
1152 case INDEX_op_qemu_ld64
:
1153 tcg_out_qemu_ld(s
, args
, 3);
1156 case INDEX_op_qemu_st8
:
1157 tcg_out_qemu_st(s
, args
, 0);
1159 case INDEX_op_qemu_st16
:
1160 tcg_out_qemu_st(s
, args
, 1);
1162 case INDEX_op_qemu_st32
:
1163 tcg_out_qemu_st(s
, args
, 2);
1165 case INDEX_op_qemu_st64
:
1166 tcg_out_qemu_st(s
, args
, 3);
1174 static int tcg_target_callee_save_regs
[] = {
1179 /* TCG_REG_R14, */ /* currently used for the global env, so no
1184 static inline void tcg_out_push(TCGContext
*s
, int reg
)
1186 tcg_out_opc(s
, (0x50 + (reg
& 7)), 0, reg
, 0);
1189 static inline void tcg_out_pop(TCGContext
*s
, int reg
)
1191 tcg_out_opc(s
, (0x58 + (reg
& 7)), 0, reg
, 0);
1194 /* Generate global QEMU prologue and epilogue code */
1195 void tcg_target_qemu_prologue(TCGContext
*s
)
1197 int i
, frame_size
, push_size
, stack_addend
;
1200 /* save all callee saved registers */
1201 for(i
= 0; i
< ARRAY_SIZE(tcg_target_callee_save_regs
); i
++) {
1202 tcg_out_push(s
, tcg_target_callee_save_regs
[i
]);
1205 /* reserve some stack space */
1206 push_size
= 8 + ARRAY_SIZE(tcg_target_callee_save_regs
) * 8;
1207 frame_size
= push_size
+ TCG_STATIC_CALL_ARGS_SIZE
;
1208 frame_size
= (frame_size
+ TCG_TARGET_STACK_ALIGN
- 1) &
1209 ~(TCG_TARGET_STACK_ALIGN
- 1);
1210 stack_addend
= frame_size
- push_size
;
1211 tcg_out_addi(s
, TCG_REG_RSP
, -stack_addend
);
1213 tcg_out_modrm(s
, 0xff, 4, TCG_REG_RDI
); /* jmp *%rdi */
1216 tb_ret_addr
= s
->code_ptr
;
1217 tcg_out_addi(s
, TCG_REG_RSP
, stack_addend
);
1218 for(i
= ARRAY_SIZE(tcg_target_callee_save_regs
) - 1; i
>= 0; i
--) {
1219 tcg_out_pop(s
, tcg_target_callee_save_regs
[i
]);
1221 tcg_out8(s
, 0xc3); /* ret */
1224 static const TCGTargetOpDef x86_64_op_defs
[] = {
1225 { INDEX_op_exit_tb
, { } },
1226 { INDEX_op_goto_tb
, { } },
1227 { INDEX_op_call
, { "ri" } }, /* XXX: might need a specific constant constraint */
1228 { INDEX_op_jmp
, { "ri" } }, /* XXX: might need a specific constant constraint */
1229 { INDEX_op_br
, { } },
1231 { INDEX_op_mov_i32
, { "r", "r" } },
1232 { INDEX_op_movi_i32
, { "r" } },
1233 { INDEX_op_ld8u_i32
, { "r", "r" } },
1234 { INDEX_op_ld8s_i32
, { "r", "r" } },
1235 { INDEX_op_ld16u_i32
, { "r", "r" } },
1236 { INDEX_op_ld16s_i32
, { "r", "r" } },
1237 { INDEX_op_ld_i32
, { "r", "r" } },
1238 { INDEX_op_st8_i32
, { "r", "r" } },
1239 { INDEX_op_st16_i32
, { "r", "r" } },
1240 { INDEX_op_st_i32
, { "r", "r" } },
1242 { INDEX_op_add_i32
, { "r", "0", "ri" } },
1243 { INDEX_op_mul_i32
, { "r", "0", "ri" } },
1244 { INDEX_op_div2_i32
, { "a", "d", "0", "1", "r" } },
1245 { INDEX_op_divu2_i32
, { "a", "d", "0", "1", "r" } },
1246 { INDEX_op_sub_i32
, { "r", "0", "ri" } },
1247 { INDEX_op_and_i32
, { "r", "0", "ri" } },
1248 { INDEX_op_or_i32
, { "r", "0", "ri" } },
1249 { INDEX_op_xor_i32
, { "r", "0", "ri" } },
1251 { INDEX_op_shl_i32
, { "r", "0", "ci" } },
1252 { INDEX_op_shr_i32
, { "r", "0", "ci" } },
1253 { INDEX_op_sar_i32
, { "r", "0", "ci" } },
1254 { INDEX_op_rotl_i32
, { "r", "0", "ci" } },
1255 { INDEX_op_rotr_i32
, { "r", "0", "ci" } },
1257 { INDEX_op_brcond_i32
, { "r", "ri" } },
1259 { INDEX_op_mov_i64
, { "r", "r" } },
1260 { INDEX_op_movi_i64
, { "r" } },
1261 { INDEX_op_ld8u_i64
, { "r", "r" } },
1262 { INDEX_op_ld8s_i64
, { "r", "r" } },
1263 { INDEX_op_ld16u_i64
, { "r", "r" } },
1264 { INDEX_op_ld16s_i64
, { "r", "r" } },
1265 { INDEX_op_ld32u_i64
, { "r", "r" } },
1266 { INDEX_op_ld32s_i64
, { "r", "r" } },
1267 { INDEX_op_ld_i64
, { "r", "r" } },
1268 { INDEX_op_st8_i64
, { "r", "r" } },
1269 { INDEX_op_st16_i64
, { "r", "r" } },
1270 { INDEX_op_st32_i64
, { "r", "r" } },
1271 { INDEX_op_st_i64
, { "r", "r" } },
1273 { INDEX_op_add_i64
, { "r", "0", "re" } },
1274 { INDEX_op_mul_i64
, { "r", "0", "re" } },
1275 { INDEX_op_div2_i64
, { "a", "d", "0", "1", "r" } },
1276 { INDEX_op_divu2_i64
, { "a", "d", "0", "1", "r" } },
1277 { INDEX_op_sub_i64
, { "r", "0", "re" } },
1278 { INDEX_op_and_i64
, { "r", "0", "reZ" } },
1279 { INDEX_op_or_i64
, { "r", "0", "re" } },
1280 { INDEX_op_xor_i64
, { "r", "0", "re" } },
1282 { INDEX_op_shl_i64
, { "r", "0", "ci" } },
1283 { INDEX_op_shr_i64
, { "r", "0", "ci" } },
1284 { INDEX_op_sar_i64
, { "r", "0", "ci" } },
1285 { INDEX_op_rotl_i64
, { "r", "0", "ci" } },
1286 { INDEX_op_rotr_i64
, { "r", "0", "ci" } },
1288 { INDEX_op_brcond_i64
, { "r", "re" } },
1290 { INDEX_op_bswap_i32
, { "r", "0" } },
1291 { INDEX_op_bswap_i64
, { "r", "0" } },
1293 { INDEX_op_neg_i32
, { "r", "0" } },
1294 { INDEX_op_neg_i64
, { "r", "0" } },
1296 { INDEX_op_not_i32
, { "r", "0" } },
1297 { INDEX_op_not_i64
, { "r", "0" } },
1299 { INDEX_op_ext8s_i32
, { "r", "r"} },
1300 { INDEX_op_ext16s_i32
, { "r", "r"} },
1301 { INDEX_op_ext8s_i64
, { "r", "r"} },
1302 { INDEX_op_ext16s_i64
, { "r", "r"} },
1303 { INDEX_op_ext32s_i64
, { "r", "r"} },
1305 { INDEX_op_qemu_ld8u
, { "r", "L" } },
1306 { INDEX_op_qemu_ld8s
, { "r", "L" } },
1307 { INDEX_op_qemu_ld16u
, { "r", "L" } },
1308 { INDEX_op_qemu_ld16s
, { "r", "L" } },
1309 { INDEX_op_qemu_ld32u
, { "r", "L" } },
1310 { INDEX_op_qemu_ld32s
, { "r", "L" } },
1311 { INDEX_op_qemu_ld64
, { "r", "L" } },
1313 { INDEX_op_qemu_st8
, { "L", "L" } },
1314 { INDEX_op_qemu_st16
, { "L", "L" } },
1315 { INDEX_op_qemu_st32
, { "L", "L" } },
1316 { INDEX_op_qemu_st64
, { "L", "L", "L" } },
1321 void tcg_target_init(TCGContext
*s
)
1324 if ((1 << CPU_TLB_ENTRY_BITS
) != sizeof(CPUTLBEntry
))
1327 tcg_regset_set32(tcg_target_available_regs
[TCG_TYPE_I32
], 0, 0xffff);
1328 tcg_regset_set32(tcg_target_available_regs
[TCG_TYPE_I64
], 0, 0xffff);
1329 tcg_regset_set32(tcg_target_call_clobber_regs
, 0,
1330 (1 << TCG_REG_RDI
) |
1331 (1 << TCG_REG_RSI
) |
1332 (1 << TCG_REG_RDX
) |
1333 (1 << TCG_REG_RCX
) |
1336 (1 << TCG_REG_RAX
) |
1337 (1 << TCG_REG_R10
) |
1338 (1 << TCG_REG_R11
));
1340 tcg_regset_clear(s
->reserved_regs
);
1341 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_RSP
);
1343 tcg_add_target_add_op_defs(x86_64_op_defs
);