event_notifier: add event_notifier_set_handler
[qemu/opensuse.git] / tcg / tci / tcg-target.c
blobef8580fc8d87206ddc4bddd114156d4891dc9538
1 /*
2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2009, 2011 Stefan Weil
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
25 /* TODO list:
26 * - See TODO comments in code.
29 /* Marker for missing code. */
30 #define TODO() \
31 do { \
32 fprintf(stderr, "TODO %s:%u: %s()\n", \
33 __FILE__, __LINE__, __func__); \
34 tcg_abort(); \
35 } while (0)
37 /* Single bit n. */
38 #define BIT(n) (1 << (n))
40 /* Bitfield n...m (in 32 bit value). */
41 #define BITS(n, m) (((0xffffffffU << (31 - n)) >> (31 - n + m)) << m)
43 /* Used for function call generation. */
44 #define TCG_REG_CALL_STACK TCG_REG_R4
45 #define TCG_TARGET_STACK_ALIGN 16
46 #define TCG_TARGET_CALL_STACK_OFFSET 0
48 /* TODO: documentation. */
49 static uint8_t *tb_ret_addr;
51 /* Macros used in tcg_target_op_defs. */
52 #define R "r"
53 #define RI "ri"
54 #if TCG_TARGET_REG_BITS == 32
55 # define R64 "r", "r"
56 #else
57 # define R64 "r"
58 #endif
59 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
60 # define L "L", "L"
61 # define S "S", "S"
62 #else
63 # define L "L"
64 # define S "S"
65 #endif
67 /* TODO: documentation. */
68 static const TCGTargetOpDef tcg_target_op_defs[] = {
69 { INDEX_op_exit_tb, { NULL } },
70 { INDEX_op_goto_tb, { NULL } },
71 { INDEX_op_call, { RI } },
72 { INDEX_op_jmp, { RI } },
73 { INDEX_op_br, { NULL } },
75 { INDEX_op_mov_i32, { R, R } },
76 { INDEX_op_movi_i32, { R } },
78 { INDEX_op_ld8u_i32, { R, R } },
79 { INDEX_op_ld8s_i32, { R, R } },
80 { INDEX_op_ld16u_i32, { R, R } },
81 { INDEX_op_ld16s_i32, { R, R } },
82 { INDEX_op_ld_i32, { R, R } },
83 { INDEX_op_st8_i32, { R, R } },
84 { INDEX_op_st16_i32, { R, R } },
85 { INDEX_op_st_i32, { R, R } },
87 { INDEX_op_add_i32, { R, RI, RI } },
88 { INDEX_op_sub_i32, { R, RI, RI } },
89 { INDEX_op_mul_i32, { R, RI, RI } },
90 #if TCG_TARGET_HAS_div_i32
91 { INDEX_op_div_i32, { R, R, R } },
92 { INDEX_op_divu_i32, { R, R, R } },
93 { INDEX_op_rem_i32, { R, R, R } },
94 { INDEX_op_remu_i32, { R, R, R } },
95 #elif TCG_TARGET_HAS_div2_i32
96 { INDEX_op_div2_i32, { R, R, "0", "1", R } },
97 { INDEX_op_divu2_i32, { R, R, "0", "1", R } },
98 #endif
99 /* TODO: Does R, RI, RI result in faster code than R, R, RI?
100 If both operands are constants, we can optimize. */
101 { INDEX_op_and_i32, { R, RI, RI } },
102 #if TCG_TARGET_HAS_andc_i32
103 { INDEX_op_andc_i32, { R, RI, RI } },
104 #endif
105 #if TCG_TARGET_HAS_eqv_i32
106 { INDEX_op_eqv_i32, { R, RI, RI } },
107 #endif
108 #if TCG_TARGET_HAS_nand_i32
109 { INDEX_op_nand_i32, { R, RI, RI } },
110 #endif
111 #if TCG_TARGET_HAS_nor_i32
112 { INDEX_op_nor_i32, { R, RI, RI } },
113 #endif
114 { INDEX_op_or_i32, { R, RI, RI } },
115 #if TCG_TARGET_HAS_orc_i32
116 { INDEX_op_orc_i32, { R, RI, RI } },
117 #endif
118 { INDEX_op_xor_i32, { R, RI, RI } },
119 { INDEX_op_shl_i32, { R, RI, RI } },
120 { INDEX_op_shr_i32, { R, RI, RI } },
121 { INDEX_op_sar_i32, { R, RI, RI } },
122 #if TCG_TARGET_HAS_rot_i32
123 { INDEX_op_rotl_i32, { R, RI, RI } },
124 { INDEX_op_rotr_i32, { R, RI, RI } },
125 #endif
127 { INDEX_op_brcond_i32, { R, RI } },
129 { INDEX_op_setcond_i32, { R, R, RI } },
130 #if TCG_TARGET_REG_BITS == 64
131 { INDEX_op_setcond_i64, { R, R, RI } },
132 #endif /* TCG_TARGET_REG_BITS == 64 */
134 #if TCG_TARGET_REG_BITS == 32
135 /* TODO: Support R, R, R, R, RI, RI? Will it be faster? */
136 { INDEX_op_add2_i32, { R, R, R, R, R, R } },
137 { INDEX_op_sub2_i32, { R, R, R, R, R, R } },
138 { INDEX_op_brcond2_i32, { R, R, RI, RI } },
139 { INDEX_op_mulu2_i32, { R, R, R, R } },
140 { INDEX_op_setcond2_i32, { R, R, R, RI, RI } },
141 #endif
143 #if TCG_TARGET_HAS_not_i32
144 { INDEX_op_not_i32, { R, R } },
145 #endif
146 #if TCG_TARGET_HAS_neg_i32
147 { INDEX_op_neg_i32, { R, R } },
148 #endif
150 #if TCG_TARGET_REG_BITS == 64
151 { INDEX_op_mov_i64, { R, R } },
152 { INDEX_op_movi_i64, { R } },
154 { INDEX_op_ld8u_i64, { R, R } },
155 { INDEX_op_ld8s_i64, { R, R } },
156 { INDEX_op_ld16u_i64, { R, R } },
157 { INDEX_op_ld16s_i64, { R, R } },
158 { INDEX_op_ld32u_i64, { R, R } },
159 { INDEX_op_ld32s_i64, { R, R } },
160 { INDEX_op_ld_i64, { R, R } },
162 { INDEX_op_st8_i64, { R, R } },
163 { INDEX_op_st16_i64, { R, R } },
164 { INDEX_op_st32_i64, { R, R } },
165 { INDEX_op_st_i64, { R, R } },
167 { INDEX_op_add_i64, { R, RI, RI } },
168 { INDEX_op_sub_i64, { R, RI, RI } },
169 { INDEX_op_mul_i64, { R, RI, RI } },
170 #if TCG_TARGET_HAS_div_i64
171 { INDEX_op_div_i64, { R, R, R } },
172 { INDEX_op_divu_i64, { R, R, R } },
173 { INDEX_op_rem_i64, { R, R, R } },
174 { INDEX_op_remu_i64, { R, R, R } },
175 #elif TCG_TARGET_HAS_div2_i64
176 { INDEX_op_div2_i64, { R, R, "0", "1", R } },
177 { INDEX_op_divu2_i64, { R, R, "0", "1", R } },
178 #endif
179 { INDEX_op_and_i64, { R, RI, RI } },
180 #if TCG_TARGET_HAS_andc_i64
181 { INDEX_op_andc_i64, { R, RI, RI } },
182 #endif
183 #if TCG_TARGET_HAS_eqv_i64
184 { INDEX_op_eqv_i64, { R, RI, RI } },
185 #endif
186 #if TCG_TARGET_HAS_nand_i64
187 { INDEX_op_nand_i64, { R, RI, RI } },
188 #endif
189 #if TCG_TARGET_HAS_nor_i64
190 { INDEX_op_nor_i64, { R, RI, RI } },
191 #endif
192 { INDEX_op_or_i64, { R, RI, RI } },
193 #if TCG_TARGET_HAS_orc_i64
194 { INDEX_op_orc_i64, { R, RI, RI } },
195 #endif
196 { INDEX_op_xor_i64, { R, RI, RI } },
197 { INDEX_op_shl_i64, { R, RI, RI } },
198 { INDEX_op_shr_i64, { R, RI, RI } },
199 { INDEX_op_sar_i64, { R, RI, RI } },
200 #if TCG_TARGET_HAS_rot_i64
201 { INDEX_op_rotl_i64, { R, RI, RI } },
202 { INDEX_op_rotr_i64, { R, RI, RI } },
203 #endif
204 { INDEX_op_brcond_i64, { R, RI } },
206 #if TCG_TARGET_HAS_ext8s_i64
207 { INDEX_op_ext8s_i64, { R, R } },
208 #endif
209 #if TCG_TARGET_HAS_ext16s_i64
210 { INDEX_op_ext16s_i64, { R, R } },
211 #endif
212 #if TCG_TARGET_HAS_ext32s_i64
213 { INDEX_op_ext32s_i64, { R, R } },
214 #endif
215 #if TCG_TARGET_HAS_ext8u_i64
216 { INDEX_op_ext8u_i64, { R, R } },
217 #endif
218 #if TCG_TARGET_HAS_ext16u_i64
219 { INDEX_op_ext16u_i64, { R, R } },
220 #endif
221 #if TCG_TARGET_HAS_ext32u_i64
222 { INDEX_op_ext32u_i64, { R, R } },
223 #endif
224 #if TCG_TARGET_HAS_bswap16_i64
225 { INDEX_op_bswap16_i64, { R, R } },
226 #endif
227 #if TCG_TARGET_HAS_bswap32_i64
228 { INDEX_op_bswap32_i64, { R, R } },
229 #endif
230 #if TCG_TARGET_HAS_bswap64_i64
231 { INDEX_op_bswap64_i64, { R, R } },
232 #endif
233 #if TCG_TARGET_HAS_not_i64
234 { INDEX_op_not_i64, { R, R } },
235 #endif
236 #if TCG_TARGET_HAS_neg_i64
237 { INDEX_op_neg_i64, { R, R } },
238 #endif
239 #endif /* TCG_TARGET_REG_BITS == 64 */
241 { INDEX_op_qemu_ld8u, { R, L } },
242 { INDEX_op_qemu_ld8s, { R, L } },
243 { INDEX_op_qemu_ld16u, { R, L } },
244 { INDEX_op_qemu_ld16s, { R, L } },
245 { INDEX_op_qemu_ld32, { R, L } },
246 #if TCG_TARGET_REG_BITS == 64
247 { INDEX_op_qemu_ld32u, { R, L } },
248 { INDEX_op_qemu_ld32s, { R, L } },
249 #endif
250 { INDEX_op_qemu_ld64, { R64, L } },
252 { INDEX_op_qemu_st8, { R, S } },
253 { INDEX_op_qemu_st16, { R, S } },
254 { INDEX_op_qemu_st32, { R, S } },
255 { INDEX_op_qemu_st64, { R64, S } },
257 #if TCG_TARGET_HAS_ext8s_i32
258 { INDEX_op_ext8s_i32, { R, R } },
259 #endif
260 #if TCG_TARGET_HAS_ext16s_i32
261 { INDEX_op_ext16s_i32, { R, R } },
262 #endif
263 #if TCG_TARGET_HAS_ext8u_i32
264 { INDEX_op_ext8u_i32, { R, R } },
265 #endif
266 #if TCG_TARGET_HAS_ext16u_i32
267 { INDEX_op_ext16u_i32, { R, R } },
268 #endif
270 #if TCG_TARGET_HAS_bswap16_i32
271 { INDEX_op_bswap16_i32, { R, R } },
272 #endif
273 #if TCG_TARGET_HAS_bswap32_i32
274 { INDEX_op_bswap32_i32, { R, R } },
275 #endif
277 { -1 },
280 static const int tcg_target_reg_alloc_order[] = {
281 TCG_REG_R0,
282 TCG_REG_R1,
283 TCG_REG_R2,
284 TCG_REG_R3,
285 #if 0 /* used for TCG_REG_CALL_STACK */
286 TCG_REG_R4,
287 #endif
288 TCG_REG_R5,
289 TCG_REG_R6,
290 TCG_REG_R7,
291 #if TCG_TARGET_NB_REGS >= 16
292 TCG_REG_R8,
293 TCG_REG_R9,
294 TCG_REG_R10,
295 TCG_REG_R11,
296 TCG_REG_R12,
297 TCG_REG_R13,
298 TCG_REG_R14,
299 TCG_REG_R15,
300 #endif
303 #if MAX_OPC_PARAM_IARGS != 4
304 # error Fix needed, number of supported input arguments changed!
305 #endif
307 static const int tcg_target_call_iarg_regs[] = {
308 TCG_REG_R0,
309 TCG_REG_R1,
310 TCG_REG_R2,
311 TCG_REG_R3,
312 #if TCG_TARGET_REG_BITS == 32
313 /* 32 bit hosts need 2 * MAX_OPC_PARAM_IARGS registers. */
314 #if 0 /* used for TCG_REG_CALL_STACK */
315 TCG_REG_R4,
316 #endif
317 TCG_REG_R5,
318 TCG_REG_R6,
319 TCG_REG_R7,
320 #if TCG_TARGET_NB_REGS >= 16
321 TCG_REG_R8,
322 #else
323 # error Too few input registers available
324 #endif
325 #endif
328 static const int tcg_target_call_oarg_regs[] = {
329 TCG_REG_R0,
330 #if TCG_TARGET_REG_BITS == 32
331 TCG_REG_R1
332 #endif
335 #ifndef NDEBUG
336 static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
337 "r00",
338 "r01",
339 "r02",
340 "r03",
341 "r04",
342 "r05",
343 "r06",
344 "r07",
345 #if TCG_TARGET_NB_REGS >= 16
346 "r08",
347 "r09",
348 "r10",
349 "r11",
350 "r12",
351 "r13",
352 "r14",
353 "r15",
354 #if TCG_TARGET_NB_REGS >= 32
355 "r16",
356 "r17",
357 "r18",
358 "r19",
359 "r20",
360 "r21",
361 "r22",
362 "r23",
363 "r24",
364 "r25",
365 "r26",
366 "r27",
367 "r28",
368 "r29",
369 "r30",
370 "r31"
371 #endif
372 #endif
374 #endif
376 static void patch_reloc(uint8_t *code_ptr, int type,
377 tcg_target_long value, tcg_target_long addend)
379 /* tcg_out_reloc always uses the same type, addend. */
380 assert(type == sizeof(tcg_target_long));
381 assert(addend == 0);
382 assert(value != 0);
383 *(tcg_target_long *)code_ptr = value;
386 /* Parse target specific constraints. */
387 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
389 const char *ct_str = *pct_str;
390 switch (ct_str[0]) {
391 case 'r':
392 case 'L': /* qemu_ld constraint */
393 case 'S': /* qemu_st constraint */
394 ct->ct |= TCG_CT_REG;
395 tcg_regset_set32(ct->u.regs, 0, BIT(TCG_TARGET_NB_REGS) - 1);
396 break;
397 default:
398 return -1;
400 ct_str++;
401 *pct_str = ct_str;
402 return 0;
405 #if defined(CONFIG_DEBUG_TCG_INTERPRETER)
406 /* Show current bytecode. Used by tcg interpreter. */
407 void tci_disas(uint8_t opc)
409 const TCGOpDef *def = &tcg_op_defs[opc];
410 fprintf(stderr, "TCG %s %u, %u, %u\n",
411 def->name, def->nb_oargs, def->nb_iargs, def->nb_cargs);
413 #endif
415 /* Write value (native size). */
416 static void tcg_out_i(TCGContext *s, tcg_target_ulong v)
418 *(tcg_target_ulong *)s->code_ptr = v;
419 s->code_ptr += sizeof(tcg_target_ulong);
422 /* Write 64 bit value. */
423 static void tcg_out64(TCGContext *s, uint64_t v)
425 *(uint64_t *)s->code_ptr = v;
426 s->code_ptr += sizeof(v);
429 /* Write opcode. */
430 static void tcg_out_op_t(TCGContext *s, TCGOpcode op)
432 tcg_out8(s, op);
433 tcg_out8(s, 0);
436 /* Write register. */
437 static void tcg_out_r(TCGContext *s, TCGArg t0)
439 assert(t0 < TCG_TARGET_NB_REGS);
440 tcg_out8(s, t0);
443 /* Write register or constant (native size). */
444 static void tcg_out_ri(TCGContext *s, int const_arg, TCGArg arg)
446 if (const_arg) {
447 assert(const_arg == 1);
448 tcg_out8(s, TCG_CONST);
449 tcg_out_i(s, arg);
450 } else {
451 tcg_out_r(s, arg);
455 /* Write register or constant (32 bit). */
456 static void tcg_out_ri32(TCGContext *s, int const_arg, TCGArg arg)
458 if (const_arg) {
459 assert(const_arg == 1);
460 tcg_out8(s, TCG_CONST);
461 tcg_out32(s, arg);
462 } else {
463 tcg_out_r(s, arg);
467 #if TCG_TARGET_REG_BITS == 64
468 /* Write register or constant (64 bit). */
469 static void tcg_out_ri64(TCGContext *s, int const_arg, TCGArg arg)
471 if (const_arg) {
472 assert(const_arg == 1);
473 tcg_out8(s, TCG_CONST);
474 tcg_out64(s, arg);
475 } else {
476 tcg_out_r(s, arg);
479 #endif
481 /* Write label. */
482 static void tci_out_label(TCGContext *s, TCGArg arg)
484 TCGLabel *label = &s->labels[arg];
485 if (label->has_value) {
486 tcg_out_i(s, label->u.value);
487 assert(label->u.value);
488 } else {
489 tcg_out_reloc(s, s->code_ptr, sizeof(tcg_target_ulong), arg, 0);
490 s->code_ptr += sizeof(tcg_target_ulong);
494 static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg1,
495 tcg_target_long arg2)
497 uint8_t *old_code_ptr = s->code_ptr;
498 if (type == TCG_TYPE_I32) {
499 tcg_out_op_t(s, INDEX_op_ld_i32);
500 tcg_out_r(s, ret);
501 tcg_out_r(s, arg1);
502 tcg_out32(s, arg2);
503 } else {
504 assert(type == TCG_TYPE_I64);
505 #if TCG_TARGET_REG_BITS == 64
506 tcg_out_op_t(s, INDEX_op_ld_i64);
507 tcg_out_r(s, ret);
508 tcg_out_r(s, arg1);
509 assert(arg2 == (uint32_t)arg2);
510 tcg_out32(s, arg2);
511 #else
512 TODO();
513 #endif
515 old_code_ptr[1] = s->code_ptr - old_code_ptr;
518 static void tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
520 uint8_t *old_code_ptr = s->code_ptr;
521 assert(ret != arg);
522 #if TCG_TARGET_REG_BITS == 32
523 tcg_out_op_t(s, INDEX_op_mov_i32);
524 #else
525 tcg_out_op_t(s, INDEX_op_mov_i64);
526 #endif
527 tcg_out_r(s, ret);
528 tcg_out_r(s, arg);
529 old_code_ptr[1] = s->code_ptr - old_code_ptr;
532 static void tcg_out_movi(TCGContext *s, TCGType type,
533 TCGReg t0, tcg_target_long arg)
535 uint8_t *old_code_ptr = s->code_ptr;
536 uint32_t arg32 = arg;
537 if (type == TCG_TYPE_I32 || arg == arg32) {
538 tcg_out_op_t(s, INDEX_op_movi_i32);
539 tcg_out_r(s, t0);
540 tcg_out32(s, arg32);
541 } else {
542 assert(type == TCG_TYPE_I64);
543 #if TCG_TARGET_REG_BITS == 64
544 tcg_out_op_t(s, INDEX_op_movi_i64);
545 tcg_out_r(s, t0);
546 tcg_out64(s, arg);
547 #else
548 TODO();
549 #endif
551 old_code_ptr[1] = s->code_ptr - old_code_ptr;
554 static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
555 const int *const_args)
557 uint8_t *old_code_ptr = s->code_ptr;
559 tcg_out_op_t(s, opc);
561 switch (opc) {
562 case INDEX_op_exit_tb:
563 tcg_out64(s, args[0]);
564 break;
565 case INDEX_op_goto_tb:
566 if (s->tb_jmp_offset) {
567 /* Direct jump method. */
568 assert(args[0] < ARRAY_SIZE(s->tb_jmp_offset));
569 s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
570 tcg_out32(s, 0);
571 } else {
572 /* Indirect jump method. */
573 TODO();
575 assert(args[0] < ARRAY_SIZE(s->tb_next_offset));
576 s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
577 break;
578 case INDEX_op_br:
579 tci_out_label(s, args[0]);
580 break;
581 case INDEX_op_call:
582 tcg_out_ri(s, const_args[0], args[0]);
583 break;
584 case INDEX_op_jmp:
585 TODO();
586 break;
587 case INDEX_op_setcond_i32:
588 tcg_out_r(s, args[0]);
589 tcg_out_r(s, args[1]);
590 tcg_out_ri32(s, const_args[2], args[2]);
591 tcg_out8(s, args[3]); /* condition */
592 break;
593 #if TCG_TARGET_REG_BITS == 32
594 case INDEX_op_setcond2_i32:
595 /* setcond2_i32 cond, t0, t1_low, t1_high, t2_low, t2_high */
596 tcg_out_r(s, args[0]);
597 tcg_out_r(s, args[1]);
598 tcg_out_r(s, args[2]);
599 tcg_out_ri32(s, const_args[3], args[3]);
600 tcg_out_ri32(s, const_args[4], args[4]);
601 tcg_out8(s, args[5]); /* condition */
602 break;
603 #elif TCG_TARGET_REG_BITS == 64
604 case INDEX_op_setcond_i64:
605 tcg_out_r(s, args[0]);
606 tcg_out_r(s, args[1]);
607 tcg_out_ri64(s, const_args[2], args[2]);
608 tcg_out8(s, args[3]); /* condition */
609 break;
610 #endif
611 case INDEX_op_movi_i32:
612 TODO(); /* Handled by tcg_out_movi? */
613 break;
614 case INDEX_op_ld8u_i32:
615 case INDEX_op_ld8s_i32:
616 case INDEX_op_ld16u_i32:
617 case INDEX_op_ld16s_i32:
618 case INDEX_op_ld_i32:
619 case INDEX_op_st8_i32:
620 case INDEX_op_st16_i32:
621 case INDEX_op_st_i32:
622 case INDEX_op_ld8u_i64:
623 case INDEX_op_ld8s_i64:
624 case INDEX_op_ld16u_i64:
625 case INDEX_op_ld16s_i64:
626 case INDEX_op_ld32u_i64:
627 case INDEX_op_ld32s_i64:
628 case INDEX_op_ld_i64:
629 case INDEX_op_st8_i64:
630 case INDEX_op_st16_i64:
631 case INDEX_op_st32_i64:
632 case INDEX_op_st_i64:
633 tcg_out_r(s, args[0]);
634 tcg_out_r(s, args[1]);
635 assert(args[2] == (uint32_t)args[2]);
636 tcg_out32(s, args[2]);
637 break;
638 case INDEX_op_add_i32:
639 case INDEX_op_sub_i32:
640 case INDEX_op_mul_i32:
641 case INDEX_op_and_i32:
642 case INDEX_op_andc_i32: /* Optional (TCG_TARGET_HAS_andc_i32). */
643 case INDEX_op_eqv_i32: /* Optional (TCG_TARGET_HAS_eqv_i32). */
644 case INDEX_op_nand_i32: /* Optional (TCG_TARGET_HAS_nand_i32). */
645 case INDEX_op_nor_i32: /* Optional (TCG_TARGET_HAS_nor_i32). */
646 case INDEX_op_or_i32:
647 case INDEX_op_orc_i32: /* Optional (TCG_TARGET_HAS_orc_i32). */
648 case INDEX_op_xor_i32:
649 case INDEX_op_shl_i32:
650 case INDEX_op_shr_i32:
651 case INDEX_op_sar_i32:
652 case INDEX_op_rotl_i32: /* Optional (TCG_TARGET_HAS_rot_i32). */
653 case INDEX_op_rotr_i32: /* Optional (TCG_TARGET_HAS_rot_i32). */
654 tcg_out_r(s, args[0]);
655 tcg_out_ri32(s, const_args[1], args[1]);
656 tcg_out_ri32(s, const_args[2], args[2]);
657 break;
659 #if TCG_TARGET_REG_BITS == 64
660 case INDEX_op_mov_i64:
661 case INDEX_op_movi_i64:
662 TODO();
663 break;
664 case INDEX_op_add_i64:
665 case INDEX_op_sub_i64:
666 case INDEX_op_mul_i64:
667 case INDEX_op_and_i64:
668 case INDEX_op_andc_i64: /* Optional (TCG_TARGET_HAS_andc_i64). */
669 case INDEX_op_eqv_i64: /* Optional (TCG_TARGET_HAS_eqv_i64). */
670 case INDEX_op_nand_i64: /* Optional (TCG_TARGET_HAS_nand_i64). */
671 case INDEX_op_nor_i64: /* Optional (TCG_TARGET_HAS_nor_i64). */
672 case INDEX_op_or_i64:
673 case INDEX_op_orc_i64: /* Optional (TCG_TARGET_HAS_orc_i64). */
674 case INDEX_op_xor_i64:
675 case INDEX_op_shl_i64:
676 case INDEX_op_shr_i64:
677 case INDEX_op_sar_i64:
678 /* TODO: Implementation of rotl_i64, rotr_i64 missing in tci.c. */
679 case INDEX_op_rotl_i64: /* Optional (TCG_TARGET_HAS_rot_i64). */
680 case INDEX_op_rotr_i64: /* Optional (TCG_TARGET_HAS_rot_i64). */
681 tcg_out_r(s, args[0]);
682 tcg_out_ri64(s, const_args[1], args[1]);
683 tcg_out_ri64(s, const_args[2], args[2]);
684 break;
685 case INDEX_op_div_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
686 case INDEX_op_divu_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
687 case INDEX_op_rem_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
688 case INDEX_op_remu_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
689 TODO();
690 break;
691 case INDEX_op_div2_i64: /* Optional (TCG_TARGET_HAS_div2_i64). */
692 case INDEX_op_divu2_i64: /* Optional (TCG_TARGET_HAS_div2_i64). */
693 TODO();
694 break;
695 case INDEX_op_brcond_i64:
696 tcg_out_r(s, args[0]);
697 tcg_out_ri64(s, const_args[1], args[1]);
698 tcg_out8(s, args[2]); /* condition */
699 tci_out_label(s, args[3]);
700 break;
701 case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
702 case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
703 case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
704 case INDEX_op_not_i64: /* Optional (TCG_TARGET_HAS_not_i64). */
705 case INDEX_op_neg_i64: /* Optional (TCG_TARGET_HAS_neg_i64). */
706 case INDEX_op_ext8s_i64: /* Optional (TCG_TARGET_HAS_ext8s_i64). */
707 case INDEX_op_ext8u_i64: /* Optional (TCG_TARGET_HAS_ext8u_i64). */
708 case INDEX_op_ext16s_i64: /* Optional (TCG_TARGET_HAS_ext16s_i64). */
709 case INDEX_op_ext16u_i64: /* Optional (TCG_TARGET_HAS_ext16u_i64). */
710 case INDEX_op_ext32s_i64: /* Optional (TCG_TARGET_HAS_ext32s_i64). */
711 case INDEX_op_ext32u_i64: /* Optional (TCG_TARGET_HAS_ext32u_i64). */
712 #endif /* TCG_TARGET_REG_BITS == 64 */
713 case INDEX_op_neg_i32: /* Optional (TCG_TARGET_HAS_neg_i32). */
714 case INDEX_op_not_i32: /* Optional (TCG_TARGET_HAS_not_i32). */
715 case INDEX_op_ext8s_i32: /* Optional (TCG_TARGET_HAS_ext8s_i32). */
716 case INDEX_op_ext16s_i32: /* Optional (TCG_TARGET_HAS_ext16s_i32). */
717 case INDEX_op_ext8u_i32: /* Optional (TCG_TARGET_HAS_ext8u_i32). */
718 case INDEX_op_ext16u_i32: /* Optional (TCG_TARGET_HAS_ext16u_i32). */
719 case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
720 case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
721 tcg_out_r(s, args[0]);
722 tcg_out_r(s, args[1]);
723 break;
724 case INDEX_op_div_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
725 case INDEX_op_divu_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
726 case INDEX_op_rem_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
727 case INDEX_op_remu_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
728 tcg_out_r(s, args[0]);
729 tcg_out_ri32(s, const_args[1], args[1]);
730 tcg_out_ri32(s, const_args[2], args[2]);
731 break;
732 case INDEX_op_div2_i32: /* Optional (TCG_TARGET_HAS_div2_i32). */
733 case INDEX_op_divu2_i32: /* Optional (TCG_TARGET_HAS_div2_i32). */
734 TODO();
735 break;
736 #if TCG_TARGET_REG_BITS == 32
737 case INDEX_op_add2_i32:
738 case INDEX_op_sub2_i32:
739 tcg_out_r(s, args[0]);
740 tcg_out_r(s, args[1]);
741 tcg_out_r(s, args[2]);
742 tcg_out_r(s, args[3]);
743 tcg_out_r(s, args[4]);
744 tcg_out_r(s, args[5]);
745 break;
746 case INDEX_op_brcond2_i32:
747 tcg_out_r(s, args[0]);
748 tcg_out_r(s, args[1]);
749 tcg_out_ri32(s, const_args[2], args[2]);
750 tcg_out_ri32(s, const_args[3], args[3]);
751 tcg_out8(s, args[4]); /* condition */
752 tci_out_label(s, args[5]);
753 break;
754 case INDEX_op_mulu2_i32:
755 tcg_out_r(s, args[0]);
756 tcg_out_r(s, args[1]);
757 tcg_out_r(s, args[2]);
758 tcg_out_r(s, args[3]);
759 break;
760 #endif
761 case INDEX_op_brcond_i32:
762 tcg_out_r(s, args[0]);
763 tcg_out_ri32(s, const_args[1], args[1]);
764 tcg_out8(s, args[2]); /* condition */
765 tci_out_label(s, args[3]);
766 break;
767 case INDEX_op_qemu_ld8u:
768 case INDEX_op_qemu_ld8s:
769 case INDEX_op_qemu_ld16u:
770 case INDEX_op_qemu_ld16s:
771 case INDEX_op_qemu_ld32:
772 #if TCG_TARGET_REG_BITS == 64
773 case INDEX_op_qemu_ld32s:
774 case INDEX_op_qemu_ld32u:
775 #endif
776 tcg_out_r(s, *args++);
777 tcg_out_r(s, *args++);
778 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
779 tcg_out_r(s, *args++);
780 #endif
781 #ifdef CONFIG_SOFTMMU
782 tcg_out_i(s, *args);
783 #endif
784 break;
785 case INDEX_op_qemu_ld64:
786 tcg_out_r(s, *args++);
787 #if TCG_TARGET_REG_BITS == 32
788 tcg_out_r(s, *args++);
789 #endif
790 tcg_out_r(s, *args++);
791 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
792 tcg_out_r(s, *args++);
793 #endif
794 #ifdef CONFIG_SOFTMMU
795 tcg_out_i(s, *args);
796 #endif
797 break;
798 case INDEX_op_qemu_st8:
799 case INDEX_op_qemu_st16:
800 case INDEX_op_qemu_st32:
801 #ifdef CONFIG_TCG_PASS_AREG0
802 tcg_out_r(s, TCG_AREG0);
803 #endif
804 tcg_out_r(s, *args++);
805 tcg_out_r(s, *args++);
806 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
807 tcg_out_r(s, *args++);
808 #endif
809 #ifdef CONFIG_SOFTMMU
810 tcg_out_i(s, *args);
811 #endif
812 break;
813 case INDEX_op_qemu_st64:
814 #ifdef CONFIG_TCG_PASS_AREG0
815 tcg_out_r(s, TCG_AREG0);
816 #endif
817 tcg_out_r(s, *args++);
818 #if TCG_TARGET_REG_BITS == 32
819 tcg_out_r(s, *args++);
820 #endif
821 tcg_out_r(s, *args++);
822 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
823 tcg_out_r(s, *args++);
824 #endif
825 #ifdef CONFIG_SOFTMMU
826 tcg_out_i(s, *args);
827 #endif
828 break;
829 case INDEX_op_end:
830 TODO();
831 break;
832 default:
833 fprintf(stderr, "Missing: %s\n", tcg_op_defs[opc].name);
834 tcg_abort();
836 old_code_ptr[1] = s->code_ptr - old_code_ptr;
839 static void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg, TCGReg arg1,
840 tcg_target_long arg2)
842 uint8_t *old_code_ptr = s->code_ptr;
843 if (type == TCG_TYPE_I32) {
844 tcg_out_op_t(s, INDEX_op_st_i32);
845 tcg_out_r(s, arg);
846 tcg_out_r(s, arg1);
847 tcg_out32(s, arg2);
848 } else {
849 assert(type == TCG_TYPE_I64);
850 #if TCG_TARGET_REG_BITS == 64
851 tcg_out_op_t(s, INDEX_op_st_i64);
852 tcg_out_r(s, arg);
853 tcg_out_r(s, arg1);
854 tcg_out32(s, arg2);
855 #else
856 TODO();
857 #endif
859 old_code_ptr[1] = s->code_ptr - old_code_ptr;
862 /* Test if a constant matches the constraint. */
863 static int tcg_target_const_match(tcg_target_long val,
864 const TCGArgConstraint *arg_ct)
866 /* No need to return 0 or 1, 0 or != 0 is good enough. */
867 return arg_ct->ct & TCG_CT_CONST;
870 /* Maximum number of register used for input function arguments. */
871 static int tcg_target_get_call_iarg_regs_count(int flags)
873 return ARRAY_SIZE(tcg_target_call_iarg_regs);
876 static void tcg_target_init(TCGContext *s)
878 #if defined(CONFIG_DEBUG_TCG_INTERPRETER)
879 const char *envval = getenv("DEBUG_TCG");
880 if (envval) {
881 cpu_set_log(strtol(envval, NULL, 0));
883 #endif
885 /* The current code uses uint8_t for tcg operations. */
886 assert(ARRAY_SIZE(tcg_op_defs) <= UINT8_MAX);
888 /* Registers available for 32 bit operations. */
889 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0,
890 BIT(TCG_TARGET_NB_REGS) - 1);
891 /* Registers available for 64 bit operations. */
892 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I64], 0,
893 BIT(TCG_TARGET_NB_REGS) - 1);
894 /* TODO: Which registers should be set here? */
895 tcg_regset_set32(tcg_target_call_clobber_regs, 0,
896 BIT(TCG_TARGET_NB_REGS) - 1);
897 tcg_regset_clear(s->reserved_regs);
898 tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
899 tcg_add_target_add_op_defs(tcg_target_op_defs);
900 tcg_set_frame(s, TCG_AREG0, offsetof(CPUArchState, temp_buf),
901 CPU_TEMP_BUF_NLONGS * sizeof(long));
904 /* Generate global QEMU prologue and epilogue code. */
905 static void tcg_target_qemu_prologue(TCGContext *s)
907 tb_ret_addr = s->code_ptr;