2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2009, 2011 Stefan Weil
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
26 * - See TODO comments in code.
29 /* Marker for missing code. */
32 fprintf(stderr, "TODO %s:%u: %s()\n", \
33 __FILE__, __LINE__, __func__); \
38 #define BIT(n) (1 << (n))
40 /* Bitfield n...m (in 32 bit value). */
41 #define BITS(n, m) (((0xffffffffU << (31 - n)) >> (31 - n + m)) << m)
43 /* Used for function call generation. */
44 #define TCG_REG_CALL_STACK TCG_REG_R4
45 #define TCG_TARGET_STACK_ALIGN 16
46 #define TCG_TARGET_CALL_STACK_OFFSET 0
48 /* TODO: documentation. */
49 static uint8_t *tb_ret_addr
;
51 /* Macros used in tcg_target_op_defs. */
54 #if TCG_TARGET_REG_BITS == 32
59 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
67 /* TODO: documentation. */
68 static const TCGTargetOpDef tcg_target_op_defs
[] = {
69 { INDEX_op_exit_tb
, { NULL
} },
70 { INDEX_op_goto_tb
, { NULL
} },
71 { INDEX_op_call
, { RI
} },
72 { INDEX_op_jmp
, { RI
} },
73 { INDEX_op_br
, { NULL
} },
75 { INDEX_op_mov_i32
, { R
, R
} },
76 { INDEX_op_movi_i32
, { R
} },
78 { INDEX_op_ld8u_i32
, { R
, R
} },
79 { INDEX_op_ld8s_i32
, { R
, R
} },
80 { INDEX_op_ld16u_i32
, { R
, R
} },
81 { INDEX_op_ld16s_i32
, { R
, R
} },
82 { INDEX_op_ld_i32
, { R
, R
} },
83 { INDEX_op_st8_i32
, { R
, R
} },
84 { INDEX_op_st16_i32
, { R
, R
} },
85 { INDEX_op_st_i32
, { R
, R
} },
87 { INDEX_op_add_i32
, { R
, RI
, RI
} },
88 { INDEX_op_sub_i32
, { R
, RI
, RI
} },
89 { INDEX_op_mul_i32
, { R
, RI
, RI
} },
90 #if TCG_TARGET_HAS_div_i32
91 { INDEX_op_div_i32
, { R
, R
, R
} },
92 { INDEX_op_divu_i32
, { R
, R
, R
} },
93 { INDEX_op_rem_i32
, { R
, R
, R
} },
94 { INDEX_op_remu_i32
, { R
, R
, R
} },
95 #elif TCG_TARGET_HAS_div2_i32
96 { INDEX_op_div2_i32
, { R
, R
, "0", "1", R
} },
97 { INDEX_op_divu2_i32
, { R
, R
, "0", "1", R
} },
99 /* TODO: Does R, RI, RI result in faster code than R, R, RI?
100 If both operands are constants, we can optimize. */
101 { INDEX_op_and_i32
, { R
, RI
, RI
} },
102 #if TCG_TARGET_HAS_andc_i32
103 { INDEX_op_andc_i32
, { R
, RI
, RI
} },
105 #if TCG_TARGET_HAS_eqv_i32
106 { INDEX_op_eqv_i32
, { R
, RI
, RI
} },
108 #if TCG_TARGET_HAS_nand_i32
109 { INDEX_op_nand_i32
, { R
, RI
, RI
} },
111 #if TCG_TARGET_HAS_nor_i32
112 { INDEX_op_nor_i32
, { R
, RI
, RI
} },
114 { INDEX_op_or_i32
, { R
, RI
, RI
} },
115 #if TCG_TARGET_HAS_orc_i32
116 { INDEX_op_orc_i32
, { R
, RI
, RI
} },
118 { INDEX_op_xor_i32
, { R
, RI
, RI
} },
119 { INDEX_op_shl_i32
, { R
, RI
, RI
} },
120 { INDEX_op_shr_i32
, { R
, RI
, RI
} },
121 { INDEX_op_sar_i32
, { R
, RI
, RI
} },
122 #if TCG_TARGET_HAS_rot_i32
123 { INDEX_op_rotl_i32
, { R
, RI
, RI
} },
124 { INDEX_op_rotr_i32
, { R
, RI
, RI
} },
127 { INDEX_op_brcond_i32
, { R
, RI
} },
129 { INDEX_op_setcond_i32
, { R
, R
, RI
} },
130 #if TCG_TARGET_REG_BITS == 64
131 { INDEX_op_setcond_i64
, { R
, R
, RI
} },
132 #endif /* TCG_TARGET_REG_BITS == 64 */
134 #if TCG_TARGET_REG_BITS == 32
135 /* TODO: Support R, R, R, R, RI, RI? Will it be faster? */
136 { INDEX_op_add2_i32
, { R
, R
, R
, R
, R
, R
} },
137 { INDEX_op_sub2_i32
, { R
, R
, R
, R
, R
, R
} },
138 { INDEX_op_brcond2_i32
, { R
, R
, RI
, RI
} },
139 { INDEX_op_mulu2_i32
, { R
, R
, R
, R
} },
140 { INDEX_op_setcond2_i32
, { R
, R
, R
, RI
, RI
} },
143 #if TCG_TARGET_HAS_not_i32
144 { INDEX_op_not_i32
, { R
, R
} },
146 #if TCG_TARGET_HAS_neg_i32
147 { INDEX_op_neg_i32
, { R
, R
} },
150 #if TCG_TARGET_REG_BITS == 64
151 { INDEX_op_mov_i64
, { R
, R
} },
152 { INDEX_op_movi_i64
, { R
} },
154 { INDEX_op_ld8u_i64
, { R
, R
} },
155 { INDEX_op_ld8s_i64
, { R
, R
} },
156 { INDEX_op_ld16u_i64
, { R
, R
} },
157 { INDEX_op_ld16s_i64
, { R
, R
} },
158 { INDEX_op_ld32u_i64
, { R
, R
} },
159 { INDEX_op_ld32s_i64
, { R
, R
} },
160 { INDEX_op_ld_i64
, { R
, R
} },
162 { INDEX_op_st8_i64
, { R
, R
} },
163 { INDEX_op_st16_i64
, { R
, R
} },
164 { INDEX_op_st32_i64
, { R
, R
} },
165 { INDEX_op_st_i64
, { R
, R
} },
167 { INDEX_op_add_i64
, { R
, RI
, RI
} },
168 { INDEX_op_sub_i64
, { R
, RI
, RI
} },
169 { INDEX_op_mul_i64
, { R
, RI
, RI
} },
170 #if TCG_TARGET_HAS_div_i64
171 { INDEX_op_div_i64
, { R
, R
, R
} },
172 { INDEX_op_divu_i64
, { R
, R
, R
} },
173 { INDEX_op_rem_i64
, { R
, R
, R
} },
174 { INDEX_op_remu_i64
, { R
, R
, R
} },
175 #elif TCG_TARGET_HAS_div2_i64
176 { INDEX_op_div2_i64
, { R
, R
, "0", "1", R
} },
177 { INDEX_op_divu2_i64
, { R
, R
, "0", "1", R
} },
179 { INDEX_op_and_i64
, { R
, RI
, RI
} },
180 #if TCG_TARGET_HAS_andc_i64
181 { INDEX_op_andc_i64
, { R
, RI
, RI
} },
183 #if TCG_TARGET_HAS_eqv_i64
184 { INDEX_op_eqv_i64
, { R
, RI
, RI
} },
186 #if TCG_TARGET_HAS_nand_i64
187 { INDEX_op_nand_i64
, { R
, RI
, RI
} },
189 #if TCG_TARGET_HAS_nor_i64
190 { INDEX_op_nor_i64
, { R
, RI
, RI
} },
192 { INDEX_op_or_i64
, { R
, RI
, RI
} },
193 #if TCG_TARGET_HAS_orc_i64
194 { INDEX_op_orc_i64
, { R
, RI
, RI
} },
196 { INDEX_op_xor_i64
, { R
, RI
, RI
} },
197 { INDEX_op_shl_i64
, { R
, RI
, RI
} },
198 { INDEX_op_shr_i64
, { R
, RI
, RI
} },
199 { INDEX_op_sar_i64
, { R
, RI
, RI
} },
200 #if TCG_TARGET_HAS_rot_i64
201 { INDEX_op_rotl_i64
, { R
, RI
, RI
} },
202 { INDEX_op_rotr_i64
, { R
, RI
, RI
} },
204 { INDEX_op_brcond_i64
, { R
, RI
} },
206 #if TCG_TARGET_HAS_ext8s_i64
207 { INDEX_op_ext8s_i64
, { R
, R
} },
209 #if TCG_TARGET_HAS_ext16s_i64
210 { INDEX_op_ext16s_i64
, { R
, R
} },
212 #if TCG_TARGET_HAS_ext32s_i64
213 { INDEX_op_ext32s_i64
, { R
, R
} },
215 #if TCG_TARGET_HAS_ext8u_i64
216 { INDEX_op_ext8u_i64
, { R
, R
} },
218 #if TCG_TARGET_HAS_ext16u_i64
219 { INDEX_op_ext16u_i64
, { R
, R
} },
221 #if TCG_TARGET_HAS_ext32u_i64
222 { INDEX_op_ext32u_i64
, { R
, R
} },
224 #if TCG_TARGET_HAS_bswap16_i64
225 { INDEX_op_bswap16_i64
, { R
, R
} },
227 #if TCG_TARGET_HAS_bswap32_i64
228 { INDEX_op_bswap32_i64
, { R
, R
} },
230 #if TCG_TARGET_HAS_bswap64_i64
231 { INDEX_op_bswap64_i64
, { R
, R
} },
233 #if TCG_TARGET_HAS_not_i64
234 { INDEX_op_not_i64
, { R
, R
} },
236 #if TCG_TARGET_HAS_neg_i64
237 { INDEX_op_neg_i64
, { R
, R
} },
239 #endif /* TCG_TARGET_REG_BITS == 64 */
241 { INDEX_op_qemu_ld8u
, { R
, L
} },
242 { INDEX_op_qemu_ld8s
, { R
, L
} },
243 { INDEX_op_qemu_ld16u
, { R
, L
} },
244 { INDEX_op_qemu_ld16s
, { R
, L
} },
245 { INDEX_op_qemu_ld32
, { R
, L
} },
246 #if TCG_TARGET_REG_BITS == 64
247 { INDEX_op_qemu_ld32u
, { R
, L
} },
248 { INDEX_op_qemu_ld32s
, { R
, L
} },
250 { INDEX_op_qemu_ld64
, { R64
, L
} },
252 { INDEX_op_qemu_st8
, { R
, S
} },
253 { INDEX_op_qemu_st16
, { R
, S
} },
254 { INDEX_op_qemu_st32
, { R
, S
} },
255 { INDEX_op_qemu_st64
, { R64
, S
} },
257 #if TCG_TARGET_HAS_ext8s_i32
258 { INDEX_op_ext8s_i32
, { R
, R
} },
260 #if TCG_TARGET_HAS_ext16s_i32
261 { INDEX_op_ext16s_i32
, { R
, R
} },
263 #if TCG_TARGET_HAS_ext8u_i32
264 { INDEX_op_ext8u_i32
, { R
, R
} },
266 #if TCG_TARGET_HAS_ext16u_i32
267 { INDEX_op_ext16u_i32
, { R
, R
} },
270 #if TCG_TARGET_HAS_bswap16_i32
271 { INDEX_op_bswap16_i32
, { R
, R
} },
273 #if TCG_TARGET_HAS_bswap32_i32
274 { INDEX_op_bswap32_i32
, { R
, R
} },
280 static const int tcg_target_reg_alloc_order
[] = {
285 #if 0 /* used for TCG_REG_CALL_STACK */
291 #if TCG_TARGET_NB_REGS >= 16
303 #if MAX_OPC_PARAM_IARGS != 4
304 # error Fix needed, number of supported input arguments changed!
307 static const int tcg_target_call_iarg_regs
[] = {
312 #if TCG_TARGET_REG_BITS == 32
313 /* 32 bit hosts need 2 * MAX_OPC_PARAM_IARGS registers. */
314 #if 0 /* used for TCG_REG_CALL_STACK */
320 #if TCG_TARGET_NB_REGS >= 16
323 # error Too few input registers available
328 static const int tcg_target_call_oarg_regs
[] = {
330 #if TCG_TARGET_REG_BITS == 32
336 static const char *const tcg_target_reg_names
[TCG_TARGET_NB_REGS
] = {
345 #if TCG_TARGET_NB_REGS >= 16
354 #if TCG_TARGET_NB_REGS >= 32
376 static void patch_reloc(uint8_t *code_ptr
, int type
,
377 tcg_target_long value
, tcg_target_long addend
)
379 /* tcg_out_reloc always uses the same type, addend. */
380 assert(type
== sizeof(tcg_target_long
));
383 *(tcg_target_long
*)code_ptr
= value
;
386 /* Parse target specific constraints. */
387 static int target_parse_constraint(TCGArgConstraint
*ct
, const char **pct_str
)
389 const char *ct_str
= *pct_str
;
392 case 'L': /* qemu_ld constraint */
393 case 'S': /* qemu_st constraint */
394 ct
->ct
|= TCG_CT_REG
;
395 tcg_regset_set32(ct
->u
.regs
, 0, BIT(TCG_TARGET_NB_REGS
) - 1);
405 #if defined(CONFIG_DEBUG_TCG_INTERPRETER)
406 /* Show current bytecode. Used by tcg interpreter. */
407 void tci_disas(uint8_t opc
)
409 const TCGOpDef
*def
= &tcg_op_defs
[opc
];
410 fprintf(stderr
, "TCG %s %u, %u, %u\n",
411 def
->name
, def
->nb_oargs
, def
->nb_iargs
, def
->nb_cargs
);
415 /* Write value (native size). */
416 static void tcg_out_i(TCGContext
*s
, tcg_target_ulong v
)
418 *(tcg_target_ulong
*)s
->code_ptr
= v
;
419 s
->code_ptr
+= sizeof(tcg_target_ulong
);
422 /* Write 64 bit value. */
423 static void tcg_out64(TCGContext
*s
, uint64_t v
)
425 *(uint64_t *)s
->code_ptr
= v
;
426 s
->code_ptr
+= sizeof(v
);
430 static void tcg_out_op_t(TCGContext
*s
, TCGOpcode op
)
436 /* Write register. */
437 static void tcg_out_r(TCGContext
*s
, TCGArg t0
)
439 assert(t0
< TCG_TARGET_NB_REGS
);
443 /* Write register or constant (native size). */
444 static void tcg_out_ri(TCGContext
*s
, int const_arg
, TCGArg arg
)
447 assert(const_arg
== 1);
448 tcg_out8(s
, TCG_CONST
);
455 /* Write register or constant (32 bit). */
456 static void tcg_out_ri32(TCGContext
*s
, int const_arg
, TCGArg arg
)
459 assert(const_arg
== 1);
460 tcg_out8(s
, TCG_CONST
);
467 #if TCG_TARGET_REG_BITS == 64
468 /* Write register or constant (64 bit). */
469 static void tcg_out_ri64(TCGContext
*s
, int const_arg
, TCGArg arg
)
472 assert(const_arg
== 1);
473 tcg_out8(s
, TCG_CONST
);
482 static void tci_out_label(TCGContext
*s
, TCGArg arg
)
484 TCGLabel
*label
= &s
->labels
[arg
];
485 if (label
->has_value
) {
486 tcg_out_i(s
, label
->u
.value
);
487 assert(label
->u
.value
);
489 tcg_out_reloc(s
, s
->code_ptr
, sizeof(tcg_target_ulong
), arg
, 0);
490 s
->code_ptr
+= sizeof(tcg_target_ulong
);
494 static void tcg_out_ld(TCGContext
*s
, TCGType type
, TCGReg ret
, TCGReg arg1
,
495 tcg_target_long arg2
)
497 uint8_t *old_code_ptr
= s
->code_ptr
;
498 if (type
== TCG_TYPE_I32
) {
499 tcg_out_op_t(s
, INDEX_op_ld_i32
);
504 assert(type
== TCG_TYPE_I64
);
505 #if TCG_TARGET_REG_BITS == 64
506 tcg_out_op_t(s
, INDEX_op_ld_i64
);
509 assert(arg2
== (uint32_t)arg2
);
515 old_code_ptr
[1] = s
->code_ptr
- old_code_ptr
;
518 static void tcg_out_mov(TCGContext
*s
, TCGType type
, TCGReg ret
, TCGReg arg
)
520 uint8_t *old_code_ptr
= s
->code_ptr
;
522 #if TCG_TARGET_REG_BITS == 32
523 tcg_out_op_t(s
, INDEX_op_mov_i32
);
525 tcg_out_op_t(s
, INDEX_op_mov_i64
);
529 old_code_ptr
[1] = s
->code_ptr
- old_code_ptr
;
532 static void tcg_out_movi(TCGContext
*s
, TCGType type
,
533 TCGReg t0
, tcg_target_long arg
)
535 uint8_t *old_code_ptr
= s
->code_ptr
;
536 uint32_t arg32
= arg
;
537 if (type
== TCG_TYPE_I32
|| arg
== arg32
) {
538 tcg_out_op_t(s
, INDEX_op_movi_i32
);
542 assert(type
== TCG_TYPE_I64
);
543 #if TCG_TARGET_REG_BITS == 64
544 tcg_out_op_t(s
, INDEX_op_movi_i64
);
551 old_code_ptr
[1] = s
->code_ptr
- old_code_ptr
;
554 static void tcg_out_op(TCGContext
*s
, TCGOpcode opc
, const TCGArg
*args
,
555 const int *const_args
)
557 uint8_t *old_code_ptr
= s
->code_ptr
;
559 tcg_out_op_t(s
, opc
);
562 case INDEX_op_exit_tb
:
563 tcg_out64(s
, args
[0]);
565 case INDEX_op_goto_tb
:
566 if (s
->tb_jmp_offset
) {
567 /* Direct jump method. */
568 assert(args
[0] < ARRAY_SIZE(s
->tb_jmp_offset
));
569 s
->tb_jmp_offset
[args
[0]] = s
->code_ptr
- s
->code_buf
;
572 /* Indirect jump method. */
575 assert(args
[0] < ARRAY_SIZE(s
->tb_next_offset
));
576 s
->tb_next_offset
[args
[0]] = s
->code_ptr
- s
->code_buf
;
579 tci_out_label(s
, args
[0]);
582 tcg_out_ri(s
, const_args
[0], args
[0]);
587 case INDEX_op_setcond_i32
:
588 tcg_out_r(s
, args
[0]);
589 tcg_out_r(s
, args
[1]);
590 tcg_out_ri32(s
, const_args
[2], args
[2]);
591 tcg_out8(s
, args
[3]); /* condition */
593 #if TCG_TARGET_REG_BITS == 32
594 case INDEX_op_setcond2_i32
:
595 /* setcond2_i32 cond, t0, t1_low, t1_high, t2_low, t2_high */
596 tcg_out_r(s
, args
[0]);
597 tcg_out_r(s
, args
[1]);
598 tcg_out_r(s
, args
[2]);
599 tcg_out_ri32(s
, const_args
[3], args
[3]);
600 tcg_out_ri32(s
, const_args
[4], args
[4]);
601 tcg_out8(s
, args
[5]); /* condition */
603 #elif TCG_TARGET_REG_BITS == 64
604 case INDEX_op_setcond_i64
:
605 tcg_out_r(s
, args
[0]);
606 tcg_out_r(s
, args
[1]);
607 tcg_out_ri64(s
, const_args
[2], args
[2]);
608 tcg_out8(s
, args
[3]); /* condition */
611 case INDEX_op_movi_i32
:
612 TODO(); /* Handled by tcg_out_movi? */
614 case INDEX_op_ld8u_i32
:
615 case INDEX_op_ld8s_i32
:
616 case INDEX_op_ld16u_i32
:
617 case INDEX_op_ld16s_i32
:
618 case INDEX_op_ld_i32
:
619 case INDEX_op_st8_i32
:
620 case INDEX_op_st16_i32
:
621 case INDEX_op_st_i32
:
622 case INDEX_op_ld8u_i64
:
623 case INDEX_op_ld8s_i64
:
624 case INDEX_op_ld16u_i64
:
625 case INDEX_op_ld16s_i64
:
626 case INDEX_op_ld32u_i64
:
627 case INDEX_op_ld32s_i64
:
628 case INDEX_op_ld_i64
:
629 case INDEX_op_st8_i64
:
630 case INDEX_op_st16_i64
:
631 case INDEX_op_st32_i64
:
632 case INDEX_op_st_i64
:
633 tcg_out_r(s
, args
[0]);
634 tcg_out_r(s
, args
[1]);
635 assert(args
[2] == (uint32_t)args
[2]);
636 tcg_out32(s
, args
[2]);
638 case INDEX_op_add_i32
:
639 case INDEX_op_sub_i32
:
640 case INDEX_op_mul_i32
:
641 case INDEX_op_and_i32
:
642 case INDEX_op_andc_i32
: /* Optional (TCG_TARGET_HAS_andc_i32). */
643 case INDEX_op_eqv_i32
: /* Optional (TCG_TARGET_HAS_eqv_i32). */
644 case INDEX_op_nand_i32
: /* Optional (TCG_TARGET_HAS_nand_i32). */
645 case INDEX_op_nor_i32
: /* Optional (TCG_TARGET_HAS_nor_i32). */
646 case INDEX_op_or_i32
:
647 case INDEX_op_orc_i32
: /* Optional (TCG_TARGET_HAS_orc_i32). */
648 case INDEX_op_xor_i32
:
649 case INDEX_op_shl_i32
:
650 case INDEX_op_shr_i32
:
651 case INDEX_op_sar_i32
:
652 case INDEX_op_rotl_i32
: /* Optional (TCG_TARGET_HAS_rot_i32). */
653 case INDEX_op_rotr_i32
: /* Optional (TCG_TARGET_HAS_rot_i32). */
654 tcg_out_r(s
, args
[0]);
655 tcg_out_ri32(s
, const_args
[1], args
[1]);
656 tcg_out_ri32(s
, const_args
[2], args
[2]);
659 #if TCG_TARGET_REG_BITS == 64
660 case INDEX_op_mov_i64
:
661 case INDEX_op_movi_i64
:
664 case INDEX_op_add_i64
:
665 case INDEX_op_sub_i64
:
666 case INDEX_op_mul_i64
:
667 case INDEX_op_and_i64
:
668 case INDEX_op_andc_i64
: /* Optional (TCG_TARGET_HAS_andc_i64). */
669 case INDEX_op_eqv_i64
: /* Optional (TCG_TARGET_HAS_eqv_i64). */
670 case INDEX_op_nand_i64
: /* Optional (TCG_TARGET_HAS_nand_i64). */
671 case INDEX_op_nor_i64
: /* Optional (TCG_TARGET_HAS_nor_i64). */
672 case INDEX_op_or_i64
:
673 case INDEX_op_orc_i64
: /* Optional (TCG_TARGET_HAS_orc_i64). */
674 case INDEX_op_xor_i64
:
675 case INDEX_op_shl_i64
:
676 case INDEX_op_shr_i64
:
677 case INDEX_op_sar_i64
:
678 /* TODO: Implementation of rotl_i64, rotr_i64 missing in tci.c. */
679 case INDEX_op_rotl_i64
: /* Optional (TCG_TARGET_HAS_rot_i64). */
680 case INDEX_op_rotr_i64
: /* Optional (TCG_TARGET_HAS_rot_i64). */
681 tcg_out_r(s
, args
[0]);
682 tcg_out_ri64(s
, const_args
[1], args
[1]);
683 tcg_out_ri64(s
, const_args
[2], args
[2]);
685 case INDEX_op_div_i64
: /* Optional (TCG_TARGET_HAS_div_i64). */
686 case INDEX_op_divu_i64
: /* Optional (TCG_TARGET_HAS_div_i64). */
687 case INDEX_op_rem_i64
: /* Optional (TCG_TARGET_HAS_div_i64). */
688 case INDEX_op_remu_i64
: /* Optional (TCG_TARGET_HAS_div_i64). */
691 case INDEX_op_div2_i64
: /* Optional (TCG_TARGET_HAS_div2_i64). */
692 case INDEX_op_divu2_i64
: /* Optional (TCG_TARGET_HAS_div2_i64). */
695 case INDEX_op_brcond_i64
:
696 tcg_out_r(s
, args
[0]);
697 tcg_out_ri64(s
, const_args
[1], args
[1]);
698 tcg_out8(s
, args
[2]); /* condition */
699 tci_out_label(s
, args
[3]);
701 case INDEX_op_bswap16_i64
: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
702 case INDEX_op_bswap32_i64
: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
703 case INDEX_op_bswap64_i64
: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
704 case INDEX_op_not_i64
: /* Optional (TCG_TARGET_HAS_not_i64). */
705 case INDEX_op_neg_i64
: /* Optional (TCG_TARGET_HAS_neg_i64). */
706 case INDEX_op_ext8s_i64
: /* Optional (TCG_TARGET_HAS_ext8s_i64). */
707 case INDEX_op_ext8u_i64
: /* Optional (TCG_TARGET_HAS_ext8u_i64). */
708 case INDEX_op_ext16s_i64
: /* Optional (TCG_TARGET_HAS_ext16s_i64). */
709 case INDEX_op_ext16u_i64
: /* Optional (TCG_TARGET_HAS_ext16u_i64). */
710 case INDEX_op_ext32s_i64
: /* Optional (TCG_TARGET_HAS_ext32s_i64). */
711 case INDEX_op_ext32u_i64
: /* Optional (TCG_TARGET_HAS_ext32u_i64). */
712 #endif /* TCG_TARGET_REG_BITS == 64 */
713 case INDEX_op_neg_i32
: /* Optional (TCG_TARGET_HAS_neg_i32). */
714 case INDEX_op_not_i32
: /* Optional (TCG_TARGET_HAS_not_i32). */
715 case INDEX_op_ext8s_i32
: /* Optional (TCG_TARGET_HAS_ext8s_i32). */
716 case INDEX_op_ext16s_i32
: /* Optional (TCG_TARGET_HAS_ext16s_i32). */
717 case INDEX_op_ext8u_i32
: /* Optional (TCG_TARGET_HAS_ext8u_i32). */
718 case INDEX_op_ext16u_i32
: /* Optional (TCG_TARGET_HAS_ext16u_i32). */
719 case INDEX_op_bswap16_i32
: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
720 case INDEX_op_bswap32_i32
: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
721 tcg_out_r(s
, args
[0]);
722 tcg_out_r(s
, args
[1]);
724 case INDEX_op_div_i32
: /* Optional (TCG_TARGET_HAS_div_i32). */
725 case INDEX_op_divu_i32
: /* Optional (TCG_TARGET_HAS_div_i32). */
726 case INDEX_op_rem_i32
: /* Optional (TCG_TARGET_HAS_div_i32). */
727 case INDEX_op_remu_i32
: /* Optional (TCG_TARGET_HAS_div_i32). */
728 tcg_out_r(s
, args
[0]);
729 tcg_out_ri32(s
, const_args
[1], args
[1]);
730 tcg_out_ri32(s
, const_args
[2], args
[2]);
732 case INDEX_op_div2_i32
: /* Optional (TCG_TARGET_HAS_div2_i32). */
733 case INDEX_op_divu2_i32
: /* Optional (TCG_TARGET_HAS_div2_i32). */
736 #if TCG_TARGET_REG_BITS == 32
737 case INDEX_op_add2_i32
:
738 case INDEX_op_sub2_i32
:
739 tcg_out_r(s
, args
[0]);
740 tcg_out_r(s
, args
[1]);
741 tcg_out_r(s
, args
[2]);
742 tcg_out_r(s
, args
[3]);
743 tcg_out_r(s
, args
[4]);
744 tcg_out_r(s
, args
[5]);
746 case INDEX_op_brcond2_i32
:
747 tcg_out_r(s
, args
[0]);
748 tcg_out_r(s
, args
[1]);
749 tcg_out_ri32(s
, const_args
[2], args
[2]);
750 tcg_out_ri32(s
, const_args
[3], args
[3]);
751 tcg_out8(s
, args
[4]); /* condition */
752 tci_out_label(s
, args
[5]);
754 case INDEX_op_mulu2_i32
:
755 tcg_out_r(s
, args
[0]);
756 tcg_out_r(s
, args
[1]);
757 tcg_out_r(s
, args
[2]);
758 tcg_out_r(s
, args
[3]);
761 case INDEX_op_brcond_i32
:
762 tcg_out_r(s
, args
[0]);
763 tcg_out_ri32(s
, const_args
[1], args
[1]);
764 tcg_out8(s
, args
[2]); /* condition */
765 tci_out_label(s
, args
[3]);
767 case INDEX_op_qemu_ld8u
:
768 case INDEX_op_qemu_ld8s
:
769 case INDEX_op_qemu_ld16u
:
770 case INDEX_op_qemu_ld16s
:
771 case INDEX_op_qemu_ld32
:
772 #if TCG_TARGET_REG_BITS == 64
773 case INDEX_op_qemu_ld32s
:
774 case INDEX_op_qemu_ld32u
:
776 tcg_out_r(s
, *args
++);
777 tcg_out_r(s
, *args
++);
778 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
779 tcg_out_r(s
, *args
++);
781 #ifdef CONFIG_SOFTMMU
785 case INDEX_op_qemu_ld64
:
786 tcg_out_r(s
, *args
++);
787 #if TCG_TARGET_REG_BITS == 32
788 tcg_out_r(s
, *args
++);
790 tcg_out_r(s
, *args
++);
791 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
792 tcg_out_r(s
, *args
++);
794 #ifdef CONFIG_SOFTMMU
798 case INDEX_op_qemu_st8
:
799 case INDEX_op_qemu_st16
:
800 case INDEX_op_qemu_st32
:
801 #ifdef CONFIG_TCG_PASS_AREG0
802 tcg_out_r(s
, TCG_AREG0
);
804 tcg_out_r(s
, *args
++);
805 tcg_out_r(s
, *args
++);
806 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
807 tcg_out_r(s
, *args
++);
809 #ifdef CONFIG_SOFTMMU
813 case INDEX_op_qemu_st64
:
814 #ifdef CONFIG_TCG_PASS_AREG0
815 tcg_out_r(s
, TCG_AREG0
);
817 tcg_out_r(s
, *args
++);
818 #if TCG_TARGET_REG_BITS == 32
819 tcg_out_r(s
, *args
++);
821 tcg_out_r(s
, *args
++);
822 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
823 tcg_out_r(s
, *args
++);
825 #ifdef CONFIG_SOFTMMU
833 fprintf(stderr
, "Missing: %s\n", tcg_op_defs
[opc
].name
);
836 old_code_ptr
[1] = s
->code_ptr
- old_code_ptr
;
839 static void tcg_out_st(TCGContext
*s
, TCGType type
, TCGReg arg
, TCGReg arg1
,
840 tcg_target_long arg2
)
842 uint8_t *old_code_ptr
= s
->code_ptr
;
843 if (type
== TCG_TYPE_I32
) {
844 tcg_out_op_t(s
, INDEX_op_st_i32
);
849 assert(type
== TCG_TYPE_I64
);
850 #if TCG_TARGET_REG_BITS == 64
851 tcg_out_op_t(s
, INDEX_op_st_i64
);
859 old_code_ptr
[1] = s
->code_ptr
- old_code_ptr
;
862 /* Test if a constant matches the constraint. */
863 static int tcg_target_const_match(tcg_target_long val
,
864 const TCGArgConstraint
*arg_ct
)
866 /* No need to return 0 or 1, 0 or != 0 is good enough. */
867 return arg_ct
->ct
& TCG_CT_CONST
;
870 /* Maximum number of register used for input function arguments. */
871 static int tcg_target_get_call_iarg_regs_count(int flags
)
873 return ARRAY_SIZE(tcg_target_call_iarg_regs
);
876 static void tcg_target_init(TCGContext
*s
)
878 #if defined(CONFIG_DEBUG_TCG_INTERPRETER)
879 const char *envval
= getenv("DEBUG_TCG");
881 cpu_set_log(strtol(envval
, NULL
, 0));
885 /* The current code uses uint8_t for tcg operations. */
886 assert(ARRAY_SIZE(tcg_op_defs
) <= UINT8_MAX
);
888 /* Registers available for 32 bit operations. */
889 tcg_regset_set32(tcg_target_available_regs
[TCG_TYPE_I32
], 0,
890 BIT(TCG_TARGET_NB_REGS
) - 1);
891 /* Registers available for 64 bit operations. */
892 tcg_regset_set32(tcg_target_available_regs
[TCG_TYPE_I64
], 0,
893 BIT(TCG_TARGET_NB_REGS
) - 1);
894 /* TODO: Which registers should be set here? */
895 tcg_regset_set32(tcg_target_call_clobber_regs
, 0,
896 BIT(TCG_TARGET_NB_REGS
) - 1);
897 tcg_regset_clear(s
->reserved_regs
);
898 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_CALL_STACK
);
899 tcg_add_target_add_op_defs(tcg_target_op_defs
);
900 tcg_set_frame(s
, TCG_AREG0
, offsetof(CPUArchState
, temp_buf
),
901 CPU_TEMP_BUF_NLONGS
* sizeof(long));
904 /* Generate global QEMU prologue and epilogue code. */
905 static void tcg_target_qemu_prologue(TCGContext
*s
)
907 tb_ret_addr
= s
->code_ptr
;