i8254: convert PIO to new memory api read/write
[qemu/agraf.git] / tcg / tci / tcg-target.c
blobe93074083557b0658dadb3b7817ed70f23d12485
1 /*
2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2009, 2011 Stefan Weil
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
25 /* TODO list:
26 * - See TODO comments in code.
29 /* Marker for missing code. */
30 #define TODO() \
31 do { \
32 fprintf(stderr, "TODO %s:%u: %s()\n", \
33 __FILE__, __LINE__, __func__); \
34 tcg_abort(); \
35 } while (0)
37 /* Single bit n. */
38 #define BIT(n) (1 << (n))
40 /* Bitfield n...m (in 32 bit value). */
41 #define BITS(n, m) (((0xffffffffU << (31 - n)) >> (31 - n + m)) << m)
43 /* Used for function call generation. */
44 #define TCG_REG_CALL_STACK TCG_REG_R4
45 #define TCG_TARGET_STACK_ALIGN 16
46 #define TCG_TARGET_CALL_STACK_OFFSET 0
48 /* TODO: documentation. */
49 static uint8_t *tb_ret_addr;
51 /* Macros used in tcg_target_op_defs. */
52 #define R "r"
53 #define RI "ri"
54 #if TCG_TARGET_REG_BITS == 32
55 # define R64 "r", "r"
56 #else
57 # define R64 "r"
58 #endif
59 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
60 # define L "L", "L"
61 # define S "S", "S"
62 #else
63 # define L "L"
64 # define S "S"
65 #endif
67 /* TODO: documentation. */
68 static const TCGTargetOpDef tcg_target_op_defs[] = {
69 { INDEX_op_exit_tb, { NULL } },
70 { INDEX_op_goto_tb, { NULL } },
71 { INDEX_op_call, { RI } },
72 { INDEX_op_br, { NULL } },
74 { INDEX_op_mov_i32, { R, R } },
75 { INDEX_op_movi_i32, { R } },
77 { INDEX_op_ld8u_i32, { R, R } },
78 { INDEX_op_ld8s_i32, { R, R } },
79 { INDEX_op_ld16u_i32, { R, R } },
80 { INDEX_op_ld16s_i32, { R, R } },
81 { INDEX_op_ld_i32, { R, R } },
82 { INDEX_op_st8_i32, { R, R } },
83 { INDEX_op_st16_i32, { R, R } },
84 { INDEX_op_st_i32, { R, R } },
86 { INDEX_op_add_i32, { R, RI, RI } },
87 { INDEX_op_sub_i32, { R, RI, RI } },
88 { INDEX_op_mul_i32, { R, RI, RI } },
89 #if TCG_TARGET_HAS_div_i32
90 { INDEX_op_div_i32, { R, R, R } },
91 { INDEX_op_divu_i32, { R, R, R } },
92 { INDEX_op_rem_i32, { R, R, R } },
93 { INDEX_op_remu_i32, { R, R, R } },
94 #elif TCG_TARGET_HAS_div2_i32
95 { INDEX_op_div2_i32, { R, R, "0", "1", R } },
96 { INDEX_op_divu2_i32, { R, R, "0", "1", R } },
97 #endif
98 /* TODO: Does R, RI, RI result in faster code than R, R, RI?
99 If both operands are constants, we can optimize. */
100 { INDEX_op_and_i32, { R, RI, RI } },
101 #if TCG_TARGET_HAS_andc_i32
102 { INDEX_op_andc_i32, { R, RI, RI } },
103 #endif
104 #if TCG_TARGET_HAS_eqv_i32
105 { INDEX_op_eqv_i32, { R, RI, RI } },
106 #endif
107 #if TCG_TARGET_HAS_nand_i32
108 { INDEX_op_nand_i32, { R, RI, RI } },
109 #endif
110 #if TCG_TARGET_HAS_nor_i32
111 { INDEX_op_nor_i32, { R, RI, RI } },
112 #endif
113 { INDEX_op_or_i32, { R, RI, RI } },
114 #if TCG_TARGET_HAS_orc_i32
115 { INDEX_op_orc_i32, { R, RI, RI } },
116 #endif
117 { INDEX_op_xor_i32, { R, RI, RI } },
118 { INDEX_op_shl_i32, { R, RI, RI } },
119 { INDEX_op_shr_i32, { R, RI, RI } },
120 { INDEX_op_sar_i32, { R, RI, RI } },
121 #if TCG_TARGET_HAS_rot_i32
122 { INDEX_op_rotl_i32, { R, RI, RI } },
123 { INDEX_op_rotr_i32, { R, RI, RI } },
124 #endif
126 { INDEX_op_brcond_i32, { R, RI } },
128 { INDEX_op_setcond_i32, { R, R, RI } },
129 #if TCG_TARGET_REG_BITS == 64
130 { INDEX_op_setcond_i64, { R, R, RI } },
131 #endif /* TCG_TARGET_REG_BITS == 64 */
133 #if TCG_TARGET_REG_BITS == 32
134 /* TODO: Support R, R, R, R, RI, RI? Will it be faster? */
135 { INDEX_op_add2_i32, { R, R, R, R, R, R } },
136 { INDEX_op_sub2_i32, { R, R, R, R, R, R } },
137 { INDEX_op_brcond2_i32, { R, R, RI, RI } },
138 { INDEX_op_mulu2_i32, { R, R, R, R } },
139 { INDEX_op_setcond2_i32, { R, R, R, RI, RI } },
140 #endif
142 #if TCG_TARGET_HAS_not_i32
143 { INDEX_op_not_i32, { R, R } },
144 #endif
145 #if TCG_TARGET_HAS_neg_i32
146 { INDEX_op_neg_i32, { R, R } },
147 #endif
149 #if TCG_TARGET_REG_BITS == 64
150 { INDEX_op_mov_i64, { R, R } },
151 { INDEX_op_movi_i64, { R } },
153 { INDEX_op_ld8u_i64, { R, R } },
154 { INDEX_op_ld8s_i64, { R, R } },
155 { INDEX_op_ld16u_i64, { R, R } },
156 { INDEX_op_ld16s_i64, { R, R } },
157 { INDEX_op_ld32u_i64, { R, R } },
158 { INDEX_op_ld32s_i64, { R, R } },
159 { INDEX_op_ld_i64, { R, R } },
161 { INDEX_op_st8_i64, { R, R } },
162 { INDEX_op_st16_i64, { R, R } },
163 { INDEX_op_st32_i64, { R, R } },
164 { INDEX_op_st_i64, { R, R } },
166 { INDEX_op_add_i64, { R, RI, RI } },
167 { INDEX_op_sub_i64, { R, RI, RI } },
168 { INDEX_op_mul_i64, { R, RI, RI } },
169 #if TCG_TARGET_HAS_div_i64
170 { INDEX_op_div_i64, { R, R, R } },
171 { INDEX_op_divu_i64, { R, R, R } },
172 { INDEX_op_rem_i64, { R, R, R } },
173 { INDEX_op_remu_i64, { R, R, R } },
174 #elif TCG_TARGET_HAS_div2_i64
175 { INDEX_op_div2_i64, { R, R, "0", "1", R } },
176 { INDEX_op_divu2_i64, { R, R, "0", "1", R } },
177 #endif
178 { INDEX_op_and_i64, { R, RI, RI } },
179 #if TCG_TARGET_HAS_andc_i64
180 { INDEX_op_andc_i64, { R, RI, RI } },
181 #endif
182 #if TCG_TARGET_HAS_eqv_i64
183 { INDEX_op_eqv_i64, { R, RI, RI } },
184 #endif
185 #if TCG_TARGET_HAS_nand_i64
186 { INDEX_op_nand_i64, { R, RI, RI } },
187 #endif
188 #if TCG_TARGET_HAS_nor_i64
189 { INDEX_op_nor_i64, { R, RI, RI } },
190 #endif
191 { INDEX_op_or_i64, { R, RI, RI } },
192 #if TCG_TARGET_HAS_orc_i64
193 { INDEX_op_orc_i64, { R, RI, RI } },
194 #endif
195 { INDEX_op_xor_i64, { R, RI, RI } },
196 { INDEX_op_shl_i64, { R, RI, RI } },
197 { INDEX_op_shr_i64, { R, RI, RI } },
198 { INDEX_op_sar_i64, { R, RI, RI } },
199 #if TCG_TARGET_HAS_rot_i64
200 { INDEX_op_rotl_i64, { R, RI, RI } },
201 { INDEX_op_rotr_i64, { R, RI, RI } },
202 #endif
203 { INDEX_op_brcond_i64, { R, RI } },
205 #if TCG_TARGET_HAS_ext8s_i64
206 { INDEX_op_ext8s_i64, { R, R } },
207 #endif
208 #if TCG_TARGET_HAS_ext16s_i64
209 { INDEX_op_ext16s_i64, { R, R } },
210 #endif
211 #if TCG_TARGET_HAS_ext32s_i64
212 { INDEX_op_ext32s_i64, { R, R } },
213 #endif
214 #if TCG_TARGET_HAS_ext8u_i64
215 { INDEX_op_ext8u_i64, { R, R } },
216 #endif
217 #if TCG_TARGET_HAS_ext16u_i64
218 { INDEX_op_ext16u_i64, { R, R } },
219 #endif
220 #if TCG_TARGET_HAS_ext32u_i64
221 { INDEX_op_ext32u_i64, { R, R } },
222 #endif
223 #if TCG_TARGET_HAS_bswap16_i64
224 { INDEX_op_bswap16_i64, { R, R } },
225 #endif
226 #if TCG_TARGET_HAS_bswap32_i64
227 { INDEX_op_bswap32_i64, { R, R } },
228 #endif
229 #if TCG_TARGET_HAS_bswap64_i64
230 { INDEX_op_bswap64_i64, { R, R } },
231 #endif
232 #if TCG_TARGET_HAS_not_i64
233 { INDEX_op_not_i64, { R, R } },
234 #endif
235 #if TCG_TARGET_HAS_neg_i64
236 { INDEX_op_neg_i64, { R, R } },
237 #endif
238 #endif /* TCG_TARGET_REG_BITS == 64 */
240 { INDEX_op_qemu_ld8u, { R, L } },
241 { INDEX_op_qemu_ld8s, { R, L } },
242 { INDEX_op_qemu_ld16u, { R, L } },
243 { INDEX_op_qemu_ld16s, { R, L } },
244 { INDEX_op_qemu_ld32, { R, L } },
245 #if TCG_TARGET_REG_BITS == 64
246 { INDEX_op_qemu_ld32u, { R, L } },
247 { INDEX_op_qemu_ld32s, { R, L } },
248 #endif
249 { INDEX_op_qemu_ld64, { R64, L } },
251 { INDEX_op_qemu_st8, { R, S } },
252 { INDEX_op_qemu_st16, { R, S } },
253 { INDEX_op_qemu_st32, { R, S } },
254 { INDEX_op_qemu_st64, { R64, S } },
256 #if TCG_TARGET_HAS_ext8s_i32
257 { INDEX_op_ext8s_i32, { R, R } },
258 #endif
259 #if TCG_TARGET_HAS_ext16s_i32
260 { INDEX_op_ext16s_i32, { R, R } },
261 #endif
262 #if TCG_TARGET_HAS_ext8u_i32
263 { INDEX_op_ext8u_i32, { R, R } },
264 #endif
265 #if TCG_TARGET_HAS_ext16u_i32
266 { INDEX_op_ext16u_i32, { R, R } },
267 #endif
269 #if TCG_TARGET_HAS_bswap16_i32
270 { INDEX_op_bswap16_i32, { R, R } },
271 #endif
272 #if TCG_TARGET_HAS_bswap32_i32
273 { INDEX_op_bswap32_i32, { R, R } },
274 #endif
276 { -1 },
279 static const int tcg_target_reg_alloc_order[] = {
280 TCG_REG_R0,
281 TCG_REG_R1,
282 TCG_REG_R2,
283 TCG_REG_R3,
284 #if 0 /* used for TCG_REG_CALL_STACK */
285 TCG_REG_R4,
286 #endif
287 TCG_REG_R5,
288 TCG_REG_R6,
289 TCG_REG_R7,
290 #if TCG_TARGET_NB_REGS >= 16
291 TCG_REG_R8,
292 TCG_REG_R9,
293 TCG_REG_R10,
294 TCG_REG_R11,
295 TCG_REG_R12,
296 TCG_REG_R13,
297 TCG_REG_R14,
298 TCG_REG_R15,
299 #endif
302 #if MAX_OPC_PARAM_IARGS != 5
303 # error Fix needed, number of supported input arguments changed!
304 #endif
306 static const int tcg_target_call_iarg_regs[] = {
307 TCG_REG_R0,
308 TCG_REG_R1,
309 TCG_REG_R2,
310 TCG_REG_R3,
311 #if 0 /* used for TCG_REG_CALL_STACK */
312 TCG_REG_R4,
313 #endif
314 TCG_REG_R5,
315 #if TCG_TARGET_REG_BITS == 32
316 /* 32 bit hosts need 2 * MAX_OPC_PARAM_IARGS registers. */
317 TCG_REG_R6,
318 TCG_REG_R7,
319 #if TCG_TARGET_NB_REGS >= 16
320 TCG_REG_R8,
321 TCG_REG_R9,
322 TCG_REG_R10,
323 #else
324 # error Too few input registers available
325 #endif
326 #endif
329 static const int tcg_target_call_oarg_regs[] = {
330 TCG_REG_R0,
331 #if TCG_TARGET_REG_BITS == 32
332 TCG_REG_R1
333 #endif
336 #ifndef NDEBUG
337 static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
338 "r00",
339 "r01",
340 "r02",
341 "r03",
342 "r04",
343 "r05",
344 "r06",
345 "r07",
346 #if TCG_TARGET_NB_REGS >= 16
347 "r08",
348 "r09",
349 "r10",
350 "r11",
351 "r12",
352 "r13",
353 "r14",
354 "r15",
355 #if TCG_TARGET_NB_REGS >= 32
356 "r16",
357 "r17",
358 "r18",
359 "r19",
360 "r20",
361 "r21",
362 "r22",
363 "r23",
364 "r24",
365 "r25",
366 "r26",
367 "r27",
368 "r28",
369 "r29",
370 "r30",
371 "r31"
372 #endif
373 #endif
375 #endif
377 static void patch_reloc(uint8_t *code_ptr, int type,
378 tcg_target_long value, tcg_target_long addend)
380 /* tcg_out_reloc always uses the same type, addend. */
381 assert(type == sizeof(tcg_target_long));
382 assert(addend == 0);
383 assert(value != 0);
384 *(tcg_target_long *)code_ptr = value;
387 /* Parse target specific constraints. */
388 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
390 const char *ct_str = *pct_str;
391 switch (ct_str[0]) {
392 case 'r':
393 case 'L': /* qemu_ld constraint */
394 case 'S': /* qemu_st constraint */
395 ct->ct |= TCG_CT_REG;
396 tcg_regset_set32(ct->u.regs, 0, BIT(TCG_TARGET_NB_REGS) - 1);
397 break;
398 default:
399 return -1;
401 ct_str++;
402 *pct_str = ct_str;
403 return 0;
406 #if defined(CONFIG_DEBUG_TCG_INTERPRETER)
407 /* Show current bytecode. Used by tcg interpreter. */
408 void tci_disas(uint8_t opc)
410 const TCGOpDef *def = &tcg_op_defs[opc];
411 fprintf(stderr, "TCG %s %u, %u, %u\n",
412 def->name, def->nb_oargs, def->nb_iargs, def->nb_cargs);
414 #endif
416 /* Write value (native size). */
417 static void tcg_out_i(TCGContext *s, tcg_target_ulong v)
419 *(tcg_target_ulong *)s->code_ptr = v;
420 s->code_ptr += sizeof(tcg_target_ulong);
423 /* Write 64 bit value. */
424 static void tcg_out64(TCGContext *s, uint64_t v)
426 *(uint64_t *)s->code_ptr = v;
427 s->code_ptr += sizeof(v);
430 /* Write opcode. */
431 static void tcg_out_op_t(TCGContext *s, TCGOpcode op)
433 tcg_out8(s, op);
434 tcg_out8(s, 0);
437 /* Write register. */
438 static void tcg_out_r(TCGContext *s, TCGArg t0)
440 assert(t0 < TCG_TARGET_NB_REGS);
441 tcg_out8(s, t0);
444 /* Write register or constant (native size). */
445 static void tcg_out_ri(TCGContext *s, int const_arg, TCGArg arg)
447 if (const_arg) {
448 assert(const_arg == 1);
449 tcg_out8(s, TCG_CONST);
450 tcg_out_i(s, arg);
451 } else {
452 tcg_out_r(s, arg);
456 /* Write register or constant (32 bit). */
457 static void tcg_out_ri32(TCGContext *s, int const_arg, TCGArg arg)
459 if (const_arg) {
460 assert(const_arg == 1);
461 tcg_out8(s, TCG_CONST);
462 tcg_out32(s, arg);
463 } else {
464 tcg_out_r(s, arg);
468 #if TCG_TARGET_REG_BITS == 64
469 /* Write register or constant (64 bit). */
470 static void tcg_out_ri64(TCGContext *s, int const_arg, TCGArg arg)
472 if (const_arg) {
473 assert(const_arg == 1);
474 tcg_out8(s, TCG_CONST);
475 tcg_out64(s, arg);
476 } else {
477 tcg_out_r(s, arg);
480 #endif
482 /* Write label. */
483 static void tci_out_label(TCGContext *s, TCGArg arg)
485 TCGLabel *label = &s->labels[arg];
486 if (label->has_value) {
487 tcg_out_i(s, label->u.value);
488 assert(label->u.value);
489 } else {
490 tcg_out_reloc(s, s->code_ptr, sizeof(tcg_target_ulong), arg, 0);
491 s->code_ptr += sizeof(tcg_target_ulong);
495 static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg1,
496 tcg_target_long arg2)
498 uint8_t *old_code_ptr = s->code_ptr;
499 if (type == TCG_TYPE_I32) {
500 tcg_out_op_t(s, INDEX_op_ld_i32);
501 tcg_out_r(s, ret);
502 tcg_out_r(s, arg1);
503 tcg_out32(s, arg2);
504 } else {
505 assert(type == TCG_TYPE_I64);
506 #if TCG_TARGET_REG_BITS == 64
507 tcg_out_op_t(s, INDEX_op_ld_i64);
508 tcg_out_r(s, ret);
509 tcg_out_r(s, arg1);
510 assert(arg2 == (uint32_t)arg2);
511 tcg_out32(s, arg2);
512 #else
513 TODO();
514 #endif
516 old_code_ptr[1] = s->code_ptr - old_code_ptr;
519 static void tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
521 uint8_t *old_code_ptr = s->code_ptr;
522 assert(ret != arg);
523 #if TCG_TARGET_REG_BITS == 32
524 tcg_out_op_t(s, INDEX_op_mov_i32);
525 #else
526 tcg_out_op_t(s, INDEX_op_mov_i64);
527 #endif
528 tcg_out_r(s, ret);
529 tcg_out_r(s, arg);
530 old_code_ptr[1] = s->code_ptr - old_code_ptr;
533 static void tcg_out_movi(TCGContext *s, TCGType type,
534 TCGReg t0, tcg_target_long arg)
536 uint8_t *old_code_ptr = s->code_ptr;
537 uint32_t arg32 = arg;
538 if (type == TCG_TYPE_I32 || arg == arg32) {
539 tcg_out_op_t(s, INDEX_op_movi_i32);
540 tcg_out_r(s, t0);
541 tcg_out32(s, arg32);
542 } else {
543 assert(type == TCG_TYPE_I64);
544 #if TCG_TARGET_REG_BITS == 64
545 tcg_out_op_t(s, INDEX_op_movi_i64);
546 tcg_out_r(s, t0);
547 tcg_out64(s, arg);
548 #else
549 TODO();
550 #endif
552 old_code_ptr[1] = s->code_ptr - old_code_ptr;
555 static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
556 const int *const_args)
558 uint8_t *old_code_ptr = s->code_ptr;
560 tcg_out_op_t(s, opc);
562 switch (opc) {
563 case INDEX_op_exit_tb:
564 tcg_out64(s, args[0]);
565 break;
566 case INDEX_op_goto_tb:
567 if (s->tb_jmp_offset) {
568 /* Direct jump method. */
569 assert(args[0] < ARRAY_SIZE(s->tb_jmp_offset));
570 s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
571 tcg_out32(s, 0);
572 } else {
573 /* Indirect jump method. */
574 TODO();
576 assert(args[0] < ARRAY_SIZE(s->tb_next_offset));
577 s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
578 break;
579 case INDEX_op_br:
580 tci_out_label(s, args[0]);
581 break;
582 case INDEX_op_call:
583 tcg_out_ri(s, const_args[0], args[0]);
584 break;
585 case INDEX_op_setcond_i32:
586 tcg_out_r(s, args[0]);
587 tcg_out_r(s, args[1]);
588 tcg_out_ri32(s, const_args[2], args[2]);
589 tcg_out8(s, args[3]); /* condition */
590 break;
591 #if TCG_TARGET_REG_BITS == 32
592 case INDEX_op_setcond2_i32:
593 /* setcond2_i32 cond, t0, t1_low, t1_high, t2_low, t2_high */
594 tcg_out_r(s, args[0]);
595 tcg_out_r(s, args[1]);
596 tcg_out_r(s, args[2]);
597 tcg_out_ri32(s, const_args[3], args[3]);
598 tcg_out_ri32(s, const_args[4], args[4]);
599 tcg_out8(s, args[5]); /* condition */
600 break;
601 #elif TCG_TARGET_REG_BITS == 64
602 case INDEX_op_setcond_i64:
603 tcg_out_r(s, args[0]);
604 tcg_out_r(s, args[1]);
605 tcg_out_ri64(s, const_args[2], args[2]);
606 tcg_out8(s, args[3]); /* condition */
607 break;
608 #endif
609 case INDEX_op_movi_i32:
610 TODO(); /* Handled by tcg_out_movi? */
611 break;
612 case INDEX_op_ld8u_i32:
613 case INDEX_op_ld8s_i32:
614 case INDEX_op_ld16u_i32:
615 case INDEX_op_ld16s_i32:
616 case INDEX_op_ld_i32:
617 case INDEX_op_st8_i32:
618 case INDEX_op_st16_i32:
619 case INDEX_op_st_i32:
620 case INDEX_op_ld8u_i64:
621 case INDEX_op_ld8s_i64:
622 case INDEX_op_ld16u_i64:
623 case INDEX_op_ld16s_i64:
624 case INDEX_op_ld32u_i64:
625 case INDEX_op_ld32s_i64:
626 case INDEX_op_ld_i64:
627 case INDEX_op_st8_i64:
628 case INDEX_op_st16_i64:
629 case INDEX_op_st32_i64:
630 case INDEX_op_st_i64:
631 tcg_out_r(s, args[0]);
632 tcg_out_r(s, args[1]);
633 assert(args[2] == (uint32_t)args[2]);
634 tcg_out32(s, args[2]);
635 break;
636 case INDEX_op_add_i32:
637 case INDEX_op_sub_i32:
638 case INDEX_op_mul_i32:
639 case INDEX_op_and_i32:
640 case INDEX_op_andc_i32: /* Optional (TCG_TARGET_HAS_andc_i32). */
641 case INDEX_op_eqv_i32: /* Optional (TCG_TARGET_HAS_eqv_i32). */
642 case INDEX_op_nand_i32: /* Optional (TCG_TARGET_HAS_nand_i32). */
643 case INDEX_op_nor_i32: /* Optional (TCG_TARGET_HAS_nor_i32). */
644 case INDEX_op_or_i32:
645 case INDEX_op_orc_i32: /* Optional (TCG_TARGET_HAS_orc_i32). */
646 case INDEX_op_xor_i32:
647 case INDEX_op_shl_i32:
648 case INDEX_op_shr_i32:
649 case INDEX_op_sar_i32:
650 case INDEX_op_rotl_i32: /* Optional (TCG_TARGET_HAS_rot_i32). */
651 case INDEX_op_rotr_i32: /* Optional (TCG_TARGET_HAS_rot_i32). */
652 tcg_out_r(s, args[0]);
653 tcg_out_ri32(s, const_args[1], args[1]);
654 tcg_out_ri32(s, const_args[2], args[2]);
655 break;
657 #if TCG_TARGET_REG_BITS == 64
658 case INDEX_op_mov_i64:
659 case INDEX_op_movi_i64:
660 TODO();
661 break;
662 case INDEX_op_add_i64:
663 case INDEX_op_sub_i64:
664 case INDEX_op_mul_i64:
665 case INDEX_op_and_i64:
666 case INDEX_op_andc_i64: /* Optional (TCG_TARGET_HAS_andc_i64). */
667 case INDEX_op_eqv_i64: /* Optional (TCG_TARGET_HAS_eqv_i64). */
668 case INDEX_op_nand_i64: /* Optional (TCG_TARGET_HAS_nand_i64). */
669 case INDEX_op_nor_i64: /* Optional (TCG_TARGET_HAS_nor_i64). */
670 case INDEX_op_or_i64:
671 case INDEX_op_orc_i64: /* Optional (TCG_TARGET_HAS_orc_i64). */
672 case INDEX_op_xor_i64:
673 case INDEX_op_shl_i64:
674 case INDEX_op_shr_i64:
675 case INDEX_op_sar_i64:
676 /* TODO: Implementation of rotl_i64, rotr_i64 missing in tci.c. */
677 case INDEX_op_rotl_i64: /* Optional (TCG_TARGET_HAS_rot_i64). */
678 case INDEX_op_rotr_i64: /* Optional (TCG_TARGET_HAS_rot_i64). */
679 tcg_out_r(s, args[0]);
680 tcg_out_ri64(s, const_args[1], args[1]);
681 tcg_out_ri64(s, const_args[2], args[2]);
682 break;
683 case INDEX_op_div_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
684 case INDEX_op_divu_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
685 case INDEX_op_rem_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
686 case INDEX_op_remu_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
687 TODO();
688 break;
689 case INDEX_op_div2_i64: /* Optional (TCG_TARGET_HAS_div2_i64). */
690 case INDEX_op_divu2_i64: /* Optional (TCG_TARGET_HAS_div2_i64). */
691 TODO();
692 break;
693 case INDEX_op_brcond_i64:
694 tcg_out_r(s, args[0]);
695 tcg_out_ri64(s, const_args[1], args[1]);
696 tcg_out8(s, args[2]); /* condition */
697 tci_out_label(s, args[3]);
698 break;
699 case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
700 case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
701 case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
702 case INDEX_op_not_i64: /* Optional (TCG_TARGET_HAS_not_i64). */
703 case INDEX_op_neg_i64: /* Optional (TCG_TARGET_HAS_neg_i64). */
704 case INDEX_op_ext8s_i64: /* Optional (TCG_TARGET_HAS_ext8s_i64). */
705 case INDEX_op_ext8u_i64: /* Optional (TCG_TARGET_HAS_ext8u_i64). */
706 case INDEX_op_ext16s_i64: /* Optional (TCG_TARGET_HAS_ext16s_i64). */
707 case INDEX_op_ext16u_i64: /* Optional (TCG_TARGET_HAS_ext16u_i64). */
708 case INDEX_op_ext32s_i64: /* Optional (TCG_TARGET_HAS_ext32s_i64). */
709 case INDEX_op_ext32u_i64: /* Optional (TCG_TARGET_HAS_ext32u_i64). */
710 #endif /* TCG_TARGET_REG_BITS == 64 */
711 case INDEX_op_neg_i32: /* Optional (TCG_TARGET_HAS_neg_i32). */
712 case INDEX_op_not_i32: /* Optional (TCG_TARGET_HAS_not_i32). */
713 case INDEX_op_ext8s_i32: /* Optional (TCG_TARGET_HAS_ext8s_i32). */
714 case INDEX_op_ext16s_i32: /* Optional (TCG_TARGET_HAS_ext16s_i32). */
715 case INDEX_op_ext8u_i32: /* Optional (TCG_TARGET_HAS_ext8u_i32). */
716 case INDEX_op_ext16u_i32: /* Optional (TCG_TARGET_HAS_ext16u_i32). */
717 case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
718 case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
719 tcg_out_r(s, args[0]);
720 tcg_out_r(s, args[1]);
721 break;
722 case INDEX_op_div_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
723 case INDEX_op_divu_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
724 case INDEX_op_rem_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
725 case INDEX_op_remu_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
726 tcg_out_r(s, args[0]);
727 tcg_out_ri32(s, const_args[1], args[1]);
728 tcg_out_ri32(s, const_args[2], args[2]);
729 break;
730 case INDEX_op_div2_i32: /* Optional (TCG_TARGET_HAS_div2_i32). */
731 case INDEX_op_divu2_i32: /* Optional (TCG_TARGET_HAS_div2_i32). */
732 TODO();
733 break;
734 #if TCG_TARGET_REG_BITS == 32
735 case INDEX_op_add2_i32:
736 case INDEX_op_sub2_i32:
737 tcg_out_r(s, args[0]);
738 tcg_out_r(s, args[1]);
739 tcg_out_r(s, args[2]);
740 tcg_out_r(s, args[3]);
741 tcg_out_r(s, args[4]);
742 tcg_out_r(s, args[5]);
743 break;
744 case INDEX_op_brcond2_i32:
745 tcg_out_r(s, args[0]);
746 tcg_out_r(s, args[1]);
747 tcg_out_ri32(s, const_args[2], args[2]);
748 tcg_out_ri32(s, const_args[3], args[3]);
749 tcg_out8(s, args[4]); /* condition */
750 tci_out_label(s, args[5]);
751 break;
752 case INDEX_op_mulu2_i32:
753 tcg_out_r(s, args[0]);
754 tcg_out_r(s, args[1]);
755 tcg_out_r(s, args[2]);
756 tcg_out_r(s, args[3]);
757 break;
758 #endif
759 case INDEX_op_brcond_i32:
760 tcg_out_r(s, args[0]);
761 tcg_out_ri32(s, const_args[1], args[1]);
762 tcg_out8(s, args[2]); /* condition */
763 tci_out_label(s, args[3]);
764 break;
765 case INDEX_op_qemu_ld8u:
766 case INDEX_op_qemu_ld8s:
767 case INDEX_op_qemu_ld16u:
768 case INDEX_op_qemu_ld16s:
769 case INDEX_op_qemu_ld32:
770 #if TCG_TARGET_REG_BITS == 64
771 case INDEX_op_qemu_ld32s:
772 case INDEX_op_qemu_ld32u:
773 #endif
774 tcg_out_r(s, *args++);
775 tcg_out_r(s, *args++);
776 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
777 tcg_out_r(s, *args++);
778 #endif
779 #ifdef CONFIG_SOFTMMU
780 tcg_out_i(s, *args);
781 #endif
782 break;
783 case INDEX_op_qemu_ld64:
784 tcg_out_r(s, *args++);
785 #if TCG_TARGET_REG_BITS == 32
786 tcg_out_r(s, *args++);
787 #endif
788 tcg_out_r(s, *args++);
789 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
790 tcg_out_r(s, *args++);
791 #endif
792 #ifdef CONFIG_SOFTMMU
793 tcg_out_i(s, *args);
794 #endif
795 break;
796 case INDEX_op_qemu_st8:
797 case INDEX_op_qemu_st16:
798 case INDEX_op_qemu_st32:
799 tcg_out_r(s, *args++);
800 tcg_out_r(s, *args++);
801 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
802 tcg_out_r(s, *args++);
803 #endif
804 #ifdef CONFIG_SOFTMMU
805 tcg_out_i(s, *args);
806 #endif
807 break;
808 case INDEX_op_qemu_st64:
809 tcg_out_r(s, *args++);
810 #if TCG_TARGET_REG_BITS == 32
811 tcg_out_r(s, *args++);
812 #endif
813 tcg_out_r(s, *args++);
814 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
815 tcg_out_r(s, *args++);
816 #endif
817 #ifdef CONFIG_SOFTMMU
818 tcg_out_i(s, *args);
819 #endif
820 break;
821 case INDEX_op_end:
822 TODO();
823 break;
824 default:
825 fprintf(stderr, "Missing: %s\n", tcg_op_defs[opc].name);
826 tcg_abort();
828 old_code_ptr[1] = s->code_ptr - old_code_ptr;
831 static void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg, TCGReg arg1,
832 tcg_target_long arg2)
834 uint8_t *old_code_ptr = s->code_ptr;
835 if (type == TCG_TYPE_I32) {
836 tcg_out_op_t(s, INDEX_op_st_i32);
837 tcg_out_r(s, arg);
838 tcg_out_r(s, arg1);
839 tcg_out32(s, arg2);
840 } else {
841 assert(type == TCG_TYPE_I64);
842 #if TCG_TARGET_REG_BITS == 64
843 tcg_out_op_t(s, INDEX_op_st_i64);
844 tcg_out_r(s, arg);
845 tcg_out_r(s, arg1);
846 tcg_out32(s, arg2);
847 #else
848 TODO();
849 #endif
851 old_code_ptr[1] = s->code_ptr - old_code_ptr;
854 /* Test if a constant matches the constraint. */
855 static int tcg_target_const_match(tcg_target_long val,
856 const TCGArgConstraint *arg_ct)
858 /* No need to return 0 or 1, 0 or != 0 is good enough. */
859 return arg_ct->ct & TCG_CT_CONST;
862 static void tcg_target_init(TCGContext *s)
864 #if defined(CONFIG_DEBUG_TCG_INTERPRETER)
865 const char *envval = getenv("DEBUG_TCG");
866 if (envval) {
867 cpu_set_log(strtol(envval, NULL, 0));
869 #endif
871 /* The current code uses uint8_t for tcg operations. */
872 assert(ARRAY_SIZE(tcg_op_defs) <= UINT8_MAX);
874 /* Registers available for 32 bit operations. */
875 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0,
876 BIT(TCG_TARGET_NB_REGS) - 1);
877 /* Registers available for 64 bit operations. */
878 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I64], 0,
879 BIT(TCG_TARGET_NB_REGS) - 1);
880 /* TODO: Which registers should be set here? */
881 tcg_regset_set32(tcg_target_call_clobber_regs, 0,
882 BIT(TCG_TARGET_NB_REGS) - 1);
883 tcg_regset_clear(s->reserved_regs);
884 tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
885 tcg_add_target_add_op_defs(tcg_target_op_defs);
886 tcg_set_frame(s, TCG_AREG0, offsetof(CPUArchState, temp_buf),
887 CPU_TEMP_BUF_NLONGS * sizeof(long));
890 /* Generate global QEMU prologue and epilogue code. */
891 static void tcg_target_qemu_prologue(TCGContext *s)
893 tb_ret_addr = s->code_ptr;