2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2009, 2011 Stefan Weil
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 #include "../tcg-pool.c.inc"
27 static TCGConstraintSetIndex tcg_target_op_def(TCGOpcode op)
30 case INDEX_op_goto_ptr:
33 case INDEX_op_ld8u_i32:
34 case INDEX_op_ld8s_i32:
35 case INDEX_op_ld16u_i32:
36 case INDEX_op_ld16s_i32:
38 case INDEX_op_ld8u_i64:
39 case INDEX_op_ld8s_i64:
40 case INDEX_op_ld16u_i64:
41 case INDEX_op_ld16s_i64:
42 case INDEX_op_ld32u_i64:
43 case INDEX_op_ld32s_i64:
45 case INDEX_op_not_i32:
46 case INDEX_op_not_i64:
47 case INDEX_op_neg_i32:
48 case INDEX_op_neg_i64:
49 case INDEX_op_ext8s_i32:
50 case INDEX_op_ext8s_i64:
51 case INDEX_op_ext16s_i32:
52 case INDEX_op_ext16s_i64:
53 case INDEX_op_ext8u_i32:
54 case INDEX_op_ext8u_i64:
55 case INDEX_op_ext16u_i32:
56 case INDEX_op_ext16u_i64:
57 case INDEX_op_ext32s_i64:
58 case INDEX_op_ext32u_i64:
59 case INDEX_op_ext_i32_i64:
60 case INDEX_op_extu_i32_i64:
61 case INDEX_op_bswap16_i32:
62 case INDEX_op_bswap16_i64:
63 case INDEX_op_bswap32_i32:
64 case INDEX_op_bswap32_i64:
65 case INDEX_op_bswap64_i64:
66 case INDEX_op_extract_i32:
67 case INDEX_op_extract_i64:
68 case INDEX_op_sextract_i32:
69 case INDEX_op_sextract_i64:
70 case INDEX_op_ctpop_i32:
71 case INDEX_op_ctpop_i64:
74 case INDEX_op_st8_i32:
75 case INDEX_op_st16_i32:
77 case INDEX_op_st8_i64:
78 case INDEX_op_st16_i64:
79 case INDEX_op_st32_i64:
83 case INDEX_op_div_i32:
84 case INDEX_op_div_i64:
85 case INDEX_op_divu_i32:
86 case INDEX_op_divu_i64:
87 case INDEX_op_rem_i32:
88 case INDEX_op_rem_i64:
89 case INDEX_op_remu_i32:
90 case INDEX_op_remu_i64:
91 case INDEX_op_add_i32:
92 case INDEX_op_add_i64:
93 case INDEX_op_sub_i32:
94 case INDEX_op_sub_i64:
95 case INDEX_op_mul_i32:
96 case INDEX_op_mul_i64:
97 case INDEX_op_and_i32:
98 case INDEX_op_and_i64:
99 case INDEX_op_andc_i32:
100 case INDEX_op_andc_i64:
101 case INDEX_op_eqv_i32:
102 case INDEX_op_eqv_i64:
103 case INDEX_op_nand_i32:
104 case INDEX_op_nand_i64:
105 case INDEX_op_nor_i32:
106 case INDEX_op_nor_i64:
107 case INDEX_op_or_i32:
108 case INDEX_op_or_i64:
109 case INDEX_op_orc_i32:
110 case INDEX_op_orc_i64:
111 case INDEX_op_xor_i32:
112 case INDEX_op_xor_i64:
113 case INDEX_op_shl_i32:
114 case INDEX_op_shl_i64:
115 case INDEX_op_shr_i32:
116 case INDEX_op_shr_i64:
117 case INDEX_op_sar_i32:
118 case INDEX_op_sar_i64:
119 case INDEX_op_rotl_i32:
120 case INDEX_op_rotl_i64:
121 case INDEX_op_rotr_i32:
122 case INDEX_op_rotr_i64:
123 case INDEX_op_setcond_i32:
124 case INDEX_op_setcond_i64:
125 case INDEX_op_deposit_i32:
126 case INDEX_op_deposit_i64:
127 case INDEX_op_clz_i32:
128 case INDEX_op_clz_i64:
129 case INDEX_op_ctz_i32:
130 case INDEX_op_ctz_i64:
131 return C_O1_I2(r, r, r);
133 case INDEX_op_brcond_i32:
134 case INDEX_op_brcond_i64:
135 return C_O0_I2(r, r);
137 case INDEX_op_add2_i32:
138 case INDEX_op_add2_i64:
139 case INDEX_op_sub2_i32:
140 case INDEX_op_sub2_i64:
141 return C_O2_I4(r, r, r, r, r, r);
143 #if TCG_TARGET_REG_BITS == 32
144 case INDEX_op_brcond2_i32:
145 return C_O0_I4(r, r, r, r);
148 case INDEX_op_mulu2_i32:
149 case INDEX_op_mulu2_i64:
150 case INDEX_op_muls2_i32:
151 case INDEX_op_muls2_i64:
152 return C_O2_I2(r, r, r, r);
154 case INDEX_op_movcond_i32:
155 case INDEX_op_movcond_i64:
156 case INDEX_op_setcond2_i32:
157 return C_O1_I4(r, r, r, r, r);
159 case INDEX_op_qemu_ld_a32_i32:
160 return C_O1_I1(r, r);
161 case INDEX_op_qemu_ld_a64_i32:
162 return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O1_I2(r, r, r);
163 case INDEX_op_qemu_ld_a32_i64:
164 return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I1(r, r, r);
165 case INDEX_op_qemu_ld_a64_i64:
166 return TCG_TARGET_REG_BITS == 64 ? C_O1_I1(r, r) : C_O2_I2(r, r, r, r);
167 case INDEX_op_qemu_st_a32_i32:
168 return C_O0_I2(r, r);
169 case INDEX_op_qemu_st_a64_i32:
170 return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r);
171 case INDEX_op_qemu_st_a32_i64:
172 return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I3(r, r, r);
173 case INDEX_op_qemu_st_a64_i64:
174 return TCG_TARGET_REG_BITS == 64 ? C_O0_I2(r, r) : C_O0_I4(r, r, r, r);
177 g_assert_not_reached();
181 static const int tcg_target_reg_alloc_order[] = {
194 /* Either 2 or 4 of these are call clobbered, so use them last. */
201 /* No call arguments via registers. All will be stored on the "stack". */
202 static const int tcg_target_call_iarg_regs[] = { };
204 static TCGReg tcg_target_call_oarg_reg(TCGCallReturnKind kind, int slot)
206 tcg_debug_assert(kind == TCG_CALL_RET_NORMAL);
207 tcg_debug_assert(slot >= 0 && slot < 128 / TCG_TARGET_REG_BITS);
208 return TCG_REG_R0 + slot;
211 #ifdef CONFIG_DEBUG_TCG
212 static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
232 static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
233 intptr_t value, intptr_t addend)
235 intptr_t diff = value - (intptr_t)(code_ptr + 1);
237 tcg_debug_assert(addend == 0);
238 tcg_debug_assert(type == 20);
240 if (diff == sextract32(diff, 0, type)) {
241 tcg_patch32(code_ptr, deposit32(*code_ptr, 32 - type, type, diff));
247 static void stack_bounds_check(TCGReg base, intptr_t offset)
249 if (base == TCG_REG_CALL_STACK) {
250 tcg_debug_assert(offset >= 0);
251 tcg_debug_assert(offset < (TCG_STATIC_CALL_ARGS_SIZE +
252 TCG_STATIC_FRAME_SIZE));
256 static void tcg_out_op_l(TCGContext *s, TCGOpcode op, TCGLabel *l0)
258 tcg_insn_unit insn = 0;
260 tcg_out_reloc(s, s->code_ptr, 20, l0, 0);
261 insn = deposit32(insn, 0, 8, op);
265 static void tcg_out_op_p(TCGContext *s, TCGOpcode op, void *p0)
267 tcg_insn_unit insn = 0;
270 /* Special case for exit_tb: map null -> 0. */
274 diff = p0 - (void *)(s->code_ptr + 1);
275 tcg_debug_assert(diff != 0);
276 if (diff != sextract32(diff, 0, 20)) {
277 tcg_raise_tb_overflow(s);
280 insn = deposit32(insn, 0, 8, op);
281 insn = deposit32(insn, 12, 20, diff);
285 static void tcg_out_op_r(TCGContext *s, TCGOpcode op, TCGReg r0)
287 tcg_insn_unit insn = 0;
289 insn = deposit32(insn, 0, 8, op);
290 insn = deposit32(insn, 8, 4, r0);
294 static void tcg_out_op_v(TCGContext *s, TCGOpcode op)
296 tcg_out32(s, (uint8_t)op);
299 static void tcg_out_op_ri(TCGContext *s, TCGOpcode op, TCGReg r0, int32_t i1)
301 tcg_insn_unit insn = 0;
303 tcg_debug_assert(i1 == sextract32(i1, 0, 20));
304 insn = deposit32(insn, 0, 8, op);
305 insn = deposit32(insn, 8, 4, r0);
306 insn = deposit32(insn, 12, 20, i1);
310 static void tcg_out_op_rl(TCGContext *s, TCGOpcode op, TCGReg r0, TCGLabel *l1)
312 tcg_insn_unit insn = 0;
314 tcg_out_reloc(s, s->code_ptr, 20, l1, 0);
315 insn = deposit32(insn, 0, 8, op);
316 insn = deposit32(insn, 8, 4, r0);
320 static void tcg_out_op_rr(TCGContext *s, TCGOpcode op, TCGReg r0, TCGReg r1)
322 tcg_insn_unit insn = 0;
324 insn = deposit32(insn, 0, 8, op);
325 insn = deposit32(insn, 8, 4, r0);
326 insn = deposit32(insn, 12, 4, r1);
330 static void tcg_out_op_rrm(TCGContext *s, TCGOpcode op,
331 TCGReg r0, TCGReg r1, TCGArg m2)
333 tcg_insn_unit insn = 0;
335 tcg_debug_assert(m2 == extract32(m2, 0, 16));
336 insn = deposit32(insn, 0, 8, op);
337 insn = deposit32(insn, 8, 4, r0);
338 insn = deposit32(insn, 12, 4, r1);
339 insn = deposit32(insn, 16, 16, m2);
343 static void tcg_out_op_rrr(TCGContext *s, TCGOpcode op,
344 TCGReg r0, TCGReg r1, TCGReg r2)
346 tcg_insn_unit insn = 0;
348 insn = deposit32(insn, 0, 8, op);
349 insn = deposit32(insn, 8, 4, r0);
350 insn = deposit32(insn, 12, 4, r1);
351 insn = deposit32(insn, 16, 4, r2);
355 static void tcg_out_op_rrs(TCGContext *s, TCGOpcode op,
356 TCGReg r0, TCGReg r1, intptr_t i2)
358 tcg_insn_unit insn = 0;
360 tcg_debug_assert(i2 == sextract32(i2, 0, 16));
361 insn = deposit32(insn, 0, 8, op);
362 insn = deposit32(insn, 8, 4, r0);
363 insn = deposit32(insn, 12, 4, r1);
364 insn = deposit32(insn, 16, 16, i2);
368 static void tcg_out_op_rrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
369 TCGReg r1, uint8_t b2, uint8_t b3)
371 tcg_insn_unit insn = 0;
373 tcg_debug_assert(b2 == extract32(b2, 0, 6));
374 tcg_debug_assert(b3 == extract32(b3, 0, 6));
375 insn = deposit32(insn, 0, 8, op);
376 insn = deposit32(insn, 8, 4, r0);
377 insn = deposit32(insn, 12, 4, r1);
378 insn = deposit32(insn, 16, 6, b2);
379 insn = deposit32(insn, 22, 6, b3);
383 static void tcg_out_op_rrrc(TCGContext *s, TCGOpcode op,
384 TCGReg r0, TCGReg r1, TCGReg r2, TCGCond c3)
386 tcg_insn_unit insn = 0;
388 insn = deposit32(insn, 0, 8, op);
389 insn = deposit32(insn, 8, 4, r0);
390 insn = deposit32(insn, 12, 4, r1);
391 insn = deposit32(insn, 16, 4, r2);
392 insn = deposit32(insn, 20, 4, c3);
396 static void tcg_out_op_rrrbb(TCGContext *s, TCGOpcode op, TCGReg r0,
397 TCGReg r1, TCGReg r2, uint8_t b3, uint8_t b4)
399 tcg_insn_unit insn = 0;
401 tcg_debug_assert(b3 == extract32(b3, 0, 6));
402 tcg_debug_assert(b4 == extract32(b4, 0, 6));
403 insn = deposit32(insn, 0, 8, op);
404 insn = deposit32(insn, 8, 4, r0);
405 insn = deposit32(insn, 12, 4, r1);
406 insn = deposit32(insn, 16, 4, r2);
407 insn = deposit32(insn, 20, 6, b3);
408 insn = deposit32(insn, 26, 6, b4);
412 static void tcg_out_op_rrrrr(TCGContext *s, TCGOpcode op, TCGReg r0,
413 TCGReg r1, TCGReg r2, TCGReg r3, TCGReg r4)
415 tcg_insn_unit insn = 0;
417 insn = deposit32(insn, 0, 8, op);
418 insn = deposit32(insn, 8, 4, r0);
419 insn = deposit32(insn, 12, 4, r1);
420 insn = deposit32(insn, 16, 4, r2);
421 insn = deposit32(insn, 20, 4, r3);
422 insn = deposit32(insn, 24, 4, r4);
426 static void tcg_out_op_rrrr(TCGContext *s, TCGOpcode op,
427 TCGReg r0, TCGReg r1, TCGReg r2, TCGReg r3)
429 tcg_insn_unit insn = 0;
431 insn = deposit32(insn, 0, 8, op);
432 insn = deposit32(insn, 8, 4, r0);
433 insn = deposit32(insn, 12, 4, r1);
434 insn = deposit32(insn, 16, 4, r2);
435 insn = deposit32(insn, 20, 4, r3);
439 static void tcg_out_op_rrrrrc(TCGContext *s, TCGOpcode op,
440 TCGReg r0, TCGReg r1, TCGReg r2,
441 TCGReg r3, TCGReg r4, TCGCond c5)
443 tcg_insn_unit insn = 0;
445 insn = deposit32(insn, 0, 8, op);
446 insn = deposit32(insn, 8, 4, r0);
447 insn = deposit32(insn, 12, 4, r1);
448 insn = deposit32(insn, 16, 4, r2);
449 insn = deposit32(insn, 20, 4, r3);
450 insn = deposit32(insn, 24, 4, r4);
451 insn = deposit32(insn, 28, 4, c5);
455 static void tcg_out_op_rrrrrr(TCGContext *s, TCGOpcode op,
456 TCGReg r0, TCGReg r1, TCGReg r2,
457 TCGReg r3, TCGReg r4, TCGReg r5)
459 tcg_insn_unit insn = 0;
461 insn = deposit32(insn, 0, 8, op);
462 insn = deposit32(insn, 8, 4, r0);
463 insn = deposit32(insn, 12, 4, r1);
464 insn = deposit32(insn, 16, 4, r2);
465 insn = deposit32(insn, 20, 4, r3);
466 insn = deposit32(insn, 24, 4, r4);
467 insn = deposit32(insn, 28, 4, r5);
471 static void tcg_out_ldst(TCGContext *s, TCGOpcode op, TCGReg val,
472 TCGReg base, intptr_t offset)
474 stack_bounds_check(base, offset);
475 if (offset != sextract32(offset, 0, 16)) {
476 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP, offset);
477 tcg_out_op_rrr(s, (TCG_TARGET_REG_BITS == 32
478 ? INDEX_op_add_i32 : INDEX_op_add_i64),
479 TCG_REG_TMP, TCG_REG_TMP, base);
483 tcg_out_op_rrs(s, op, val, base, offset);
486 static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
491 tcg_out_ldst(s, INDEX_op_ld_i32, val, base, offset);
493 #if TCG_TARGET_REG_BITS == 64
495 tcg_out_ldst(s, INDEX_op_ld_i64, val, base, offset);
499 g_assert_not_reached();
503 static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
507 tcg_out_op_rr(s, INDEX_op_mov_i32, ret, arg);
509 #if TCG_TARGET_REG_BITS == 64
511 tcg_out_op_rr(s, INDEX_op_mov_i64, ret, arg);
515 g_assert_not_reached();
520 static void tcg_out_movi(TCGContext *s, TCGType type,
521 TCGReg ret, tcg_target_long arg)
525 #if TCG_TARGET_REG_BITS == 64
532 g_assert_not_reached();
535 if (arg == sextract32(arg, 0, 20)) {
536 tcg_out_op_ri(s, INDEX_op_tci_movi, ret, arg);
538 tcg_insn_unit insn = 0;
540 new_pool_label(s, arg, 20, s->code_ptr, 0);
541 insn = deposit32(insn, 0, 8, INDEX_op_tci_movl);
542 insn = deposit32(insn, 8, 4, ret);
547 static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
551 tcg_debug_assert(TCG_TARGET_HAS_ext8s_i32);
552 tcg_out_op_rr(s, INDEX_op_ext8s_i32, rd, rs);
554 #if TCG_TARGET_REG_BITS == 64
556 tcg_debug_assert(TCG_TARGET_HAS_ext8s_i64);
557 tcg_out_op_rr(s, INDEX_op_ext8s_i64, rd, rs);
561 g_assert_not_reached();
565 static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
567 if (TCG_TARGET_REG_BITS == 64) {
568 tcg_debug_assert(TCG_TARGET_HAS_ext8u_i64);
569 tcg_out_op_rr(s, INDEX_op_ext8u_i64, rd, rs);
571 tcg_debug_assert(TCG_TARGET_HAS_ext8u_i32);
572 tcg_out_op_rr(s, INDEX_op_ext8u_i32, rd, rs);
576 static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
580 tcg_debug_assert(TCG_TARGET_HAS_ext16s_i32);
581 tcg_out_op_rr(s, INDEX_op_ext16s_i32, rd, rs);
583 #if TCG_TARGET_REG_BITS == 64
585 tcg_debug_assert(TCG_TARGET_HAS_ext16s_i64);
586 tcg_out_op_rr(s, INDEX_op_ext16s_i64, rd, rs);
590 g_assert_not_reached();
594 static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
596 if (TCG_TARGET_REG_BITS == 64) {
597 tcg_debug_assert(TCG_TARGET_HAS_ext16u_i64);
598 tcg_out_op_rr(s, INDEX_op_ext16u_i64, rd, rs);
600 tcg_debug_assert(TCG_TARGET_HAS_ext16u_i32);
601 tcg_out_op_rr(s, INDEX_op_ext16u_i32, rd, rs);
605 static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
607 tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
608 tcg_debug_assert(TCG_TARGET_HAS_ext32s_i64);
609 tcg_out_op_rr(s, INDEX_op_ext32s_i64, rd, rs);
612 static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs)
614 tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
615 tcg_debug_assert(TCG_TARGET_HAS_ext32u_i64);
616 tcg_out_op_rr(s, INDEX_op_ext32u_i64, rd, rs);
619 static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
621 tcg_out_ext32s(s, rd, rs);
624 static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
626 tcg_out_ext32u(s, rd, rs);
629 static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs)
631 tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
632 tcg_out_mov(s, TCG_TYPE_I32, rd, rs);
635 static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
640 static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
643 /* This function is only used for passing structs by reference. */
644 g_assert_not_reached();
647 static void tcg_out_call(TCGContext *s, const tcg_insn_unit *func,
648 const TCGHelperInfo *info)
650 ffi_cif *cif = info->cif;
651 tcg_insn_unit insn = 0;
654 if (cif->rtype == &ffi_type_void) {
657 tcg_debug_assert(cif->rtype->size == 4 ||
658 cif->rtype->size == 8 ||
659 cif->rtype->size == 16);
660 which = ctz32(cif->rtype->size) - 1;
662 new_pool_l2(s, 20, s->code_ptr, 0, (uintptr_t)func, (uintptr_t)cif);
663 insn = deposit32(insn, 0, 8, INDEX_op_call);
664 insn = deposit32(insn, 8, 4, which);
668 #if TCG_TARGET_REG_BITS == 64
669 # define CASE_32_64(x) \
670 case glue(glue(INDEX_op_, x), _i64): \
671 case glue(glue(INDEX_op_, x), _i32):
672 # define CASE_64(x) \
673 case glue(glue(INDEX_op_, x), _i64):
675 # define CASE_32_64(x) \
676 case glue(glue(INDEX_op_, x), _i32):
680 static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg)
682 tcg_out_op_p(s, INDEX_op_exit_tb, (void *)arg);
685 static void tcg_out_goto_tb(TCGContext *s, int which)
687 /* indirect jump method. */
688 tcg_out_op_p(s, INDEX_op_goto_tb, (void *)get_jmp_target_addr(s, which));
689 set_jmp_reset_offset(s, which);
692 void tb_target_set_jmp_target(const TranslationBlock *tb, int n,
693 uintptr_t jmp_rx, uintptr_t jmp_rw)
695 /* Always indirect, nothing to do */
698 static void tcg_out_op(TCGContext *s, TCGOpcode opc,
699 const TCGArg args[TCG_MAX_OP_ARGS],
700 const int const_args[TCG_MAX_OP_ARGS])
705 case INDEX_op_goto_ptr:
706 tcg_out_op_r(s, opc, args[0]);
710 tcg_out_op_l(s, opc, arg_label(args[0]));
714 tcg_out_op_rrrc(s, opc, args[0], args[1], args[2], args[3]);
718 case INDEX_op_setcond2_i32:
719 tcg_out_op_rrrrrc(s, opc, args[0], args[1], args[2],
720 args[3], args[4], args[5]);
727 case INDEX_op_ld_i32:
733 case INDEX_op_st_i32:
736 tcg_out_ldst(s, opc, args[0], args[1], args[2]);
745 CASE_32_64(andc) /* Optional (TCG_TARGET_HAS_andc_*). */
746 CASE_32_64(orc) /* Optional (TCG_TARGET_HAS_orc_*). */
747 CASE_32_64(eqv) /* Optional (TCG_TARGET_HAS_eqv_*). */
748 CASE_32_64(nand) /* Optional (TCG_TARGET_HAS_nand_*). */
749 CASE_32_64(nor) /* Optional (TCG_TARGET_HAS_nor_*). */
753 CASE_32_64(rotl) /* Optional (TCG_TARGET_HAS_rot_*). */
754 CASE_32_64(rotr) /* Optional (TCG_TARGET_HAS_rot_*). */
755 CASE_32_64(div) /* Optional (TCG_TARGET_HAS_div_*). */
756 CASE_32_64(divu) /* Optional (TCG_TARGET_HAS_div_*). */
757 CASE_32_64(rem) /* Optional (TCG_TARGET_HAS_div_*). */
758 CASE_32_64(remu) /* Optional (TCG_TARGET_HAS_div_*). */
759 CASE_32_64(clz) /* Optional (TCG_TARGET_HAS_clz_*). */
760 CASE_32_64(ctz) /* Optional (TCG_TARGET_HAS_ctz_*). */
761 tcg_out_op_rrr(s, opc, args[0], args[1], args[2]);
764 CASE_32_64(deposit) /* Optional (TCG_TARGET_HAS_deposit_*). */
766 TCGArg pos = args[3], len = args[4];
767 TCGArg max = opc == INDEX_op_deposit_i32 ? 32 : 64;
769 tcg_debug_assert(pos < max);
770 tcg_debug_assert(pos + len <= max);
772 tcg_out_op_rrrbb(s, opc, args[0], args[1], args[2], pos, len);
776 CASE_32_64(extract) /* Optional (TCG_TARGET_HAS_extract_*). */
777 CASE_32_64(sextract) /* Optional (TCG_TARGET_HAS_sextract_*). */
779 TCGArg pos = args[2], len = args[3];
780 TCGArg max = tcg_op_defs[opc].flags & TCG_OPF_64BIT ? 64 : 32;
782 tcg_debug_assert(pos < max);
783 tcg_debug_assert(pos + len <= max);
785 tcg_out_op_rrbb(s, opc, args[0], args[1], pos, len);
790 tcg_out_op_rrrc(s, (opc == INDEX_op_brcond_i32
791 ? INDEX_op_setcond_i32 : INDEX_op_setcond_i64),
792 TCG_REG_TMP, args[0], args[1], args[2]);
793 tcg_out_op_rl(s, opc, TCG_REG_TMP, arg_label(args[3]));
796 CASE_32_64(neg) /* Optional (TCG_TARGET_HAS_neg_*). */
797 CASE_32_64(not) /* Optional (TCG_TARGET_HAS_not_*). */
798 CASE_32_64(ctpop) /* Optional (TCG_TARGET_HAS_ctpop_*). */
799 case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
800 case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
801 tcg_out_op_rr(s, opc, args[0], args[1]);
804 case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
805 exts = INDEX_op_ext16s_i32;
807 case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
808 exts = INDEX_op_ext16s_i64;
810 case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
811 exts = INDEX_op_ext32s_i64;
813 /* The base tci bswaps zero-extend, and ignore high bits. */
814 tcg_out_op_rr(s, opc, args[0], args[1]);
815 if (args[2] & TCG_BSWAP_OS) {
816 tcg_out_op_rr(s, exts, args[0], args[0]);
822 tcg_out_op_rrrrrr(s, opc, args[0], args[1], args[2],
823 args[3], args[4], args[5]);
826 #if TCG_TARGET_REG_BITS == 32
827 case INDEX_op_brcond2_i32:
828 tcg_out_op_rrrrrc(s, INDEX_op_setcond2_i32, TCG_REG_TMP,
829 args[0], args[1], args[2], args[3], args[4]);
830 tcg_out_op_rl(s, INDEX_op_brcond_i32, TCG_REG_TMP, arg_label(args[5]));
836 tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], args[3]);
839 case INDEX_op_qemu_ld_a32_i32:
840 case INDEX_op_qemu_st_a32_i32:
841 tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
843 case INDEX_op_qemu_ld_a64_i32:
844 case INDEX_op_qemu_st_a64_i32:
845 case INDEX_op_qemu_ld_a32_i64:
846 case INDEX_op_qemu_st_a32_i64:
847 if (TCG_TARGET_REG_BITS == 64) {
848 tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
850 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[3]);
851 tcg_out_op_rrrr(s, opc, args[0], args[1], args[2], TCG_REG_TMP);
854 case INDEX_op_qemu_ld_a64_i64:
855 case INDEX_op_qemu_st_a64_i64:
856 if (TCG_TARGET_REG_BITS == 64) {
857 tcg_out_op_rrm(s, opc, args[0], args[1], args[2]);
859 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_TMP, args[4]);
860 tcg_out_op_rrrrr(s, opc, args[0], args[1],
861 args[2], args[3], TCG_REG_TMP);
866 tcg_out_op_v(s, opc);
869 case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
870 case INDEX_op_mov_i64:
871 case INDEX_op_call: /* Always emitted via tcg_out_call. */
872 case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
873 case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
874 case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
875 case INDEX_op_ext8s_i64:
876 case INDEX_op_ext8u_i32:
877 case INDEX_op_ext8u_i64:
878 case INDEX_op_ext16s_i32:
879 case INDEX_op_ext16s_i64:
880 case INDEX_op_ext16u_i32:
881 case INDEX_op_ext16u_i64:
882 case INDEX_op_ext32s_i64:
883 case INDEX_op_ext32u_i64:
884 case INDEX_op_ext_i32_i64:
885 case INDEX_op_extu_i32_i64:
886 case INDEX_op_extrl_i64_i32:
888 g_assert_not_reached();
892 static void tcg_out_st(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
897 tcg_out_ldst(s, INDEX_op_st_i32, val, base, offset);
899 #if TCG_TARGET_REG_BITS == 64
901 tcg_out_ldst(s, INDEX_op_st_i64, val, base, offset);
905 g_assert_not_reached();
909 static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
910 TCGReg base, intptr_t ofs)
915 /* Test if a constant matches the constraint. */
916 static bool tcg_target_const_match(int64_t val, int ct,
917 TCGType type, TCGCond cond, int vece)
919 return ct & TCG_CT_CONST;
922 static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
924 memset(p, 0, sizeof(*p) * count);
927 static void tcg_target_init(TCGContext *s)
929 /* The current code uses uint8_t for tcg operations. */
930 tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
932 /* Registers available for 32 bit operations. */
933 tcg_target_available_regs[TCG_TYPE_I32] = BIT(TCG_TARGET_NB_REGS) - 1;
934 /* Registers available for 64 bit operations. */
935 tcg_target_available_regs[TCG_TYPE_I64] = BIT(TCG_TARGET_NB_REGS) - 1;
937 * The interpreter "registers" are in the local stack frame and
938 * cannot be clobbered by the called helper functions. However,
939 * the interpreter assumes a 128-bit return value and assigns to
940 * the return value registers.
942 tcg_target_call_clobber_regs =
943 MAKE_64BIT_MASK(TCG_REG_R0, 128 / TCG_TARGET_REG_BITS);
945 s->reserved_regs = 0;
946 tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP);
947 tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
949 /* The call arguments come first, followed by the temp storage. */
950 tcg_set_frame(s, TCG_REG_CALL_STACK, TCG_STATIC_CALL_ARGS_SIZE,
951 TCG_STATIC_FRAME_SIZE);
954 /* Generate global QEMU prologue and epilogue code. */
955 static inline void tcg_target_qemu_prologue(TCGContext *s)
959 static void tcg_out_tb_start(TCGContext *s)
964 bool tcg_target_has_memory_bswap(MemOp memop)