2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008-2009 Arnaud Patard <arnaud.patard@rtp-net.org>
5 * Copyright (c) 2009 Aurelien Jarno <aurelien@aurel32.net>
6 * Based on i386/tcg-target.c - Copyright (c) 2008 Fabrice Bellard
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
21 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
27 #if defined(TCG_TARGET_WORDS_BIGENDIAN) == defined(TARGET_WORDS_BIGENDIAN)
28 # define TCG_NEED_BSWAP 0
30 # define TCG_NEED_BSWAP 1
34 static const char * const tcg_target_reg_names
[TCG_TARGET_NB_REGS
] = {
70 /* check if we really need so many registers :P */
71 static const TCGReg tcg_target_reg_alloc_order
[] = {
97 static const TCGReg tcg_target_call_iarg_regs
[4] = {
104 static const TCGReg tcg_target_call_oarg_regs
[2] = {
109 static uint8_t *tb_ret_addr
;
111 static inline uint32_t reloc_lo16_val (void *pc
, tcg_target_long target
)
113 return target
& 0xffff;
116 static inline void reloc_lo16 (void *pc
, tcg_target_long target
)
118 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0xffff)
119 | reloc_lo16_val(pc
, target
);
122 static inline uint32_t reloc_hi16_val (void *pc
, tcg_target_long target
)
124 return (target
>> 16) & 0xffff;
127 static inline void reloc_hi16 (void *pc
, tcg_target_long target
)
129 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0xffff)
130 | reloc_hi16_val(pc
, target
);
133 static inline uint32_t reloc_pc16_val (void *pc
, tcg_target_long target
)
137 disp
= target
- (tcg_target_long
) pc
- 4;
138 if (disp
!= (disp
<< 14) >> 14) {
142 return (disp
>> 2) & 0xffff;
145 static inline void reloc_pc16 (void *pc
, tcg_target_long target
)
147 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0xffff)
148 | reloc_pc16_val(pc
, target
);
151 static inline uint32_t reloc_26_val (void *pc
, tcg_target_long target
)
153 if ((((tcg_target_long
)pc
+ 4) & 0xf0000000) != (target
& 0xf0000000)) {
157 return (target
>> 2) & 0x3ffffff;
160 static inline void reloc_pc26 (void *pc
, tcg_target_long target
)
162 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0x3ffffff)
163 | reloc_26_val(pc
, target
);
166 static void patch_reloc(uint8_t *code_ptr
, int type
,
167 tcg_target_long value
, tcg_target_long addend
)
172 reloc_lo16(code_ptr
, value
);
175 reloc_hi16(code_ptr
, value
);
178 reloc_pc16(code_ptr
, value
);
181 reloc_pc26(code_ptr
, value
);
188 /* parse target specific constraints */
189 static int target_parse_constraint(TCGArgConstraint
*ct
, const char **pct_str
)
196 ct
->ct
|= TCG_CT_REG
;
197 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
200 ct
->ct
|= TCG_CT_REG
;
201 tcg_regset_clear(ct
->u
.regs
);
202 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_T9
);
204 case 'L': /* qemu_ld output arg constraint */
205 ct
->ct
|= TCG_CT_REG
;
206 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
207 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_V0
);
209 case 'l': /* qemu_ld input arg constraint */
210 ct
->ct
|= TCG_CT_REG
;
211 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
212 #if defined(CONFIG_SOFTMMU)
213 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A0
);
214 # if (TARGET_LONG_BITS == 64)
215 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A2
);
219 case 'S': /* qemu_st constraint */
220 ct
->ct
|= TCG_CT_REG
;
221 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
222 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A0
);
223 #if defined(CONFIG_SOFTMMU)
224 # if (TARGET_LONG_BITS == 32)
225 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A1
);
227 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A2
);
228 # if TARGET_LONG_BITS == 64
229 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A3
);
234 ct
->ct
|= TCG_CT_CONST_U16
;
237 ct
->ct
|= TCG_CT_CONST_S16
;
240 /* We are cheating a bit here, using the fact that the register
241 ZERO is also the register number 0. Hence there is no need
242 to check for const_args in each instruction. */
243 ct
->ct
|= TCG_CT_CONST_ZERO
;
253 /* test if a constant matches the constraint */
254 static inline int tcg_target_const_match(tcg_target_long val
,
255 const TCGArgConstraint
*arg_ct
)
259 if (ct
& TCG_CT_CONST
)
261 else if ((ct
& TCG_CT_CONST_ZERO
) && val
== 0)
263 else if ((ct
& TCG_CT_CONST_U16
) && val
== (uint16_t)val
)
265 else if ((ct
& TCG_CT_CONST_S16
) && val
== (int16_t)val
)
271 /* instruction opcodes */
273 OPC_BEQ
= 0x04 << 26,
274 OPC_BNE
= 0x05 << 26,
275 OPC_BLEZ
= 0x06 << 26,
276 OPC_BGTZ
= 0x07 << 26,
277 OPC_ADDIU
= 0x09 << 26,
278 OPC_SLTI
= 0x0A << 26,
279 OPC_SLTIU
= 0x0B << 26,
280 OPC_ANDI
= 0x0C << 26,
281 OPC_ORI
= 0x0D << 26,
282 OPC_XORI
= 0x0E << 26,
283 OPC_LUI
= 0x0F << 26,
287 OPC_LBU
= 0x24 << 26,
288 OPC_LHU
= 0x25 << 26,
289 OPC_LWU
= 0x27 << 26,
294 OPC_SPECIAL
= 0x00 << 26,
295 OPC_SLL
= OPC_SPECIAL
| 0x00,
296 OPC_SRL
= OPC_SPECIAL
| 0x02,
297 OPC_ROTR
= OPC_SPECIAL
| (0x01 << 21) | 0x02,
298 OPC_SRA
= OPC_SPECIAL
| 0x03,
299 OPC_SLLV
= OPC_SPECIAL
| 0x04,
300 OPC_SRLV
= OPC_SPECIAL
| 0x06,
301 OPC_ROTRV
= OPC_SPECIAL
| (0x01 << 6) | 0x06,
302 OPC_SRAV
= OPC_SPECIAL
| 0x07,
303 OPC_JR
= OPC_SPECIAL
| 0x08,
304 OPC_JALR
= OPC_SPECIAL
| 0x09,
305 OPC_MOVZ
= OPC_SPECIAL
| 0x0A,
306 OPC_MOVN
= OPC_SPECIAL
| 0x0B,
307 OPC_MFHI
= OPC_SPECIAL
| 0x10,
308 OPC_MFLO
= OPC_SPECIAL
| 0x12,
309 OPC_MULT
= OPC_SPECIAL
| 0x18,
310 OPC_MULTU
= OPC_SPECIAL
| 0x19,
311 OPC_DIV
= OPC_SPECIAL
| 0x1A,
312 OPC_DIVU
= OPC_SPECIAL
| 0x1B,
313 OPC_ADDU
= OPC_SPECIAL
| 0x21,
314 OPC_SUBU
= OPC_SPECIAL
| 0x23,
315 OPC_AND
= OPC_SPECIAL
| 0x24,
316 OPC_OR
= OPC_SPECIAL
| 0x25,
317 OPC_XOR
= OPC_SPECIAL
| 0x26,
318 OPC_NOR
= OPC_SPECIAL
| 0x27,
319 OPC_SLT
= OPC_SPECIAL
| 0x2A,
320 OPC_SLTU
= OPC_SPECIAL
| 0x2B,
322 OPC_REGIMM
= 0x01 << 26,
323 OPC_BLTZ
= OPC_REGIMM
| (0x00 << 16),
324 OPC_BGEZ
= OPC_REGIMM
| (0x01 << 16),
326 OPC_SPECIAL2
= 0x1c << 26,
327 OPC_MUL
= OPC_SPECIAL2
| 0x002,
329 OPC_SPECIAL3
= 0x1f << 26,
330 OPC_INS
= OPC_SPECIAL3
| 0x004,
331 OPC_WSBH
= OPC_SPECIAL3
| 0x0a0,
332 OPC_SEB
= OPC_SPECIAL3
| 0x420,
333 OPC_SEH
= OPC_SPECIAL3
| 0x620,
339 static inline void tcg_out_opc_reg(TCGContext
*s
, int opc
,
340 TCGReg rd
, TCGReg rs
, TCGReg rt
)
345 inst
|= (rs
& 0x1F) << 21;
346 inst
|= (rt
& 0x1F) << 16;
347 inst
|= (rd
& 0x1F) << 11;
354 static inline void tcg_out_opc_imm(TCGContext
*s
, int opc
,
355 TCGReg rt
, TCGReg rs
, TCGArg imm
)
360 inst
|= (rs
& 0x1F) << 21;
361 inst
|= (rt
& 0x1F) << 16;
362 inst
|= (imm
& 0xffff);
369 static inline void tcg_out_opc_br(TCGContext
*s
, int opc
,
370 TCGReg rt
, TCGReg rs
)
372 /* We pay attention here to not modify the branch target by reading
373 the existing value and using it again. This ensure that caches and
374 memory are kept coherent during retranslation. */
375 uint16_t offset
= (uint16_t)(*(uint32_t *) s
->code_ptr
);
377 tcg_out_opc_imm(s
, opc
, rt
, rs
, offset
);
383 static inline void tcg_out_opc_sa(TCGContext
*s
, int opc
,
384 TCGReg rd
, TCGReg rt
, TCGArg sa
)
389 inst
|= (rt
& 0x1F) << 16;
390 inst
|= (rd
& 0x1F) << 11;
391 inst
|= (sa
& 0x1F) << 6;
396 static inline void tcg_out_nop(TCGContext
*s
)
401 static inline void tcg_out_mov(TCGContext
*s
, TCGType type
,
402 TCGReg ret
, TCGReg arg
)
404 /* Simple reg-reg move, optimising out the 'do nothing' case */
406 tcg_out_opc_reg(s
, OPC_ADDU
, ret
, arg
, TCG_REG_ZERO
);
410 static inline void tcg_out_movi(TCGContext
*s
, TCGType type
,
411 TCGReg reg
, tcg_target_long arg
)
413 if (arg
== (int16_t)arg
) {
414 tcg_out_opc_imm(s
, OPC_ADDIU
, reg
, TCG_REG_ZERO
, arg
);
415 } else if (arg
== (uint16_t)arg
) {
416 tcg_out_opc_imm(s
, OPC_ORI
, reg
, TCG_REG_ZERO
, arg
);
418 tcg_out_opc_imm(s
, OPC_LUI
, reg
, 0, arg
>> 16);
419 tcg_out_opc_imm(s
, OPC_ORI
, reg
, reg
, arg
& 0xffff);
423 static inline void tcg_out_bswap16(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
425 #if defined(__mips_isa_rev) && (__mips_isa_rev >= 2)
426 tcg_out_opc_reg(s
, OPC_WSBH
, ret
, 0, arg
);
428 /* ret and arg can't be register at */
429 if (ret
== TCG_REG_AT
|| arg
== TCG_REG_AT
) {
433 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 8);
434 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 8);
435 tcg_out_opc_imm(s
, OPC_ANDI
, ret
, ret
, 0xff00);
436 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
440 static inline void tcg_out_bswap16s(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
442 #if defined(__mips_isa_rev) && (__mips_isa_rev >= 2)
443 tcg_out_opc_reg(s
, OPC_WSBH
, ret
, 0, arg
);
444 tcg_out_opc_reg(s
, OPC_SEH
, ret
, 0, ret
);
446 /* ret and arg can't be register at */
447 if (ret
== TCG_REG_AT
|| arg
== TCG_REG_AT
) {
451 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 8);
452 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 24);
453 tcg_out_opc_sa(s
, OPC_SRA
, ret
, ret
, 16);
454 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
458 static inline void tcg_out_bswap32(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
460 #if defined(__mips_isa_rev) && (__mips_isa_rev >= 2)
461 tcg_out_opc_reg(s
, OPC_WSBH
, ret
, 0, arg
);
462 tcg_out_opc_sa(s
, OPC_ROTR
, ret
, ret
, 16);
464 /* ret and arg must be different and can't be register at */
465 if (ret
== arg
|| ret
== TCG_REG_AT
|| arg
== TCG_REG_AT
) {
469 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 24);
471 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 24);
472 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
474 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_AT
, arg
, 0xff00);
475 tcg_out_opc_sa(s
, OPC_SLL
, TCG_REG_AT
, TCG_REG_AT
, 8);
476 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
478 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 8);
479 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_AT
, TCG_REG_AT
, 0xff00);
480 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
484 static inline void tcg_out_ext8s(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
486 #if defined(__mips_isa_rev) && (__mips_isa_rev >= 2)
487 tcg_out_opc_reg(s
, OPC_SEB
, ret
, 0, arg
);
489 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 24);
490 tcg_out_opc_sa(s
, OPC_SRA
, ret
, ret
, 24);
494 static inline void tcg_out_ext16s(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
496 #if defined(__mips_isa_rev) && (__mips_isa_rev >= 2)
497 tcg_out_opc_reg(s
, OPC_SEH
, ret
, 0, arg
);
499 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 16);
500 tcg_out_opc_sa(s
, OPC_SRA
, ret
, ret
, 16);
504 static inline void tcg_out_ldst(TCGContext
*s
, int opc
, TCGArg arg
,
505 TCGReg arg1
, TCGArg arg2
)
507 if (arg2
== (int16_t) arg2
) {
508 tcg_out_opc_imm(s
, opc
, arg
, arg1
, arg2
);
510 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_AT
, arg2
);
511 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_AT
, TCG_REG_AT
, arg1
);
512 tcg_out_opc_imm(s
, opc
, arg
, TCG_REG_AT
, 0);
516 static inline void tcg_out_ld(TCGContext
*s
, TCGType type
, TCGReg arg
,
517 TCGReg arg1
, tcg_target_long arg2
)
519 tcg_out_ldst(s
, OPC_LW
, arg
, arg1
, arg2
);
522 static inline void tcg_out_st(TCGContext
*s
, TCGType type
, TCGReg arg
,
523 TCGReg arg1
, tcg_target_long arg2
)
525 tcg_out_ldst(s
, OPC_SW
, arg
, arg1
, arg2
);
528 static inline void tcg_out_addi(TCGContext
*s
, TCGReg reg
, TCGArg val
)
530 if (val
== (int16_t)val
) {
531 tcg_out_opc_imm(s
, OPC_ADDIU
, reg
, reg
, val
);
533 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_AT
, val
);
534 tcg_out_opc_reg(s
, OPC_ADDU
, reg
, reg
, TCG_REG_AT
);
538 /* Helper routines for marshalling helper function arguments into
539 * the correct registers and stack.
540 * arg_num is where we want to put this argument, and is updated to be ready
541 * for the next call. arg is the argument itself. Note that arg_num 0..3 is
542 * real registers, 4+ on stack.
544 * We provide routines for arguments which are: immediate, 32 bit
545 * value in register, 16 and 8 bit values in register (which must be zero
546 * extended before use) and 64 bit value in a lo:hi register pair.
548 #define DEFINE_TCG_OUT_CALL_IARG(NAME, ARGPARAM) \
549 static inline void NAME(TCGContext *s, int *arg_num, ARGPARAM) \
551 if (*arg_num < 4) { \
552 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(tcg_target_call_iarg_regs[*arg_num]); \
554 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(TCG_REG_AT); \
555 tcg_out_st(s, TCG_TYPE_I32, TCG_REG_AT, TCG_REG_SP, 4 * (*arg_num)); \
559 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
560 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xff);
561 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg8
, TCGReg arg
)
562 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
563 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
564 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xffff);
565 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg16
, TCGReg arg
)
566 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
567 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
568 tcg_out_movi(s, TCG_TYPE_I32, A, arg);
569 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_imm32
, TCGArg arg
)
570 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
572 /* We don't use the macro for this one to avoid an unnecessary reg-reg
573 move when storing to the stack. */
574 static inline void tcg_out_call_iarg_reg32(TCGContext
*s
, int *arg_num
,
578 tcg_out_mov(s
, TCG_TYPE_I32
, tcg_target_call_iarg_regs
[*arg_num
], arg
);
580 tcg_out_st(s
, TCG_TYPE_I32
, arg
, TCG_REG_SP
, 4 * (*arg_num
));
585 static inline void tcg_out_call_iarg_reg64(TCGContext
*s
, int *arg_num
,
586 TCGReg arg_low
, TCGReg arg_high
)
588 (*arg_num
) = (*arg_num
+ 1) & ~1;
590 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
591 tcg_out_call_iarg_reg32(s
, arg_num
, arg_high
);
592 tcg_out_call_iarg_reg32(s
, arg_num
, arg_low
);
594 tcg_out_call_iarg_reg32(s
, arg_num
, arg_low
);
595 tcg_out_call_iarg_reg32(s
, arg_num
, arg_high
);
599 static void tcg_out_brcond(TCGContext
*s
, TCGCond cond
, TCGArg arg1
,
600 TCGArg arg2
, int label_index
)
602 TCGLabel
*l
= &s
->labels
[label_index
];
606 tcg_out_opc_br(s
, OPC_BEQ
, arg1
, arg2
);
609 tcg_out_opc_br(s
, OPC_BNE
, arg1
, arg2
);
613 tcg_out_opc_br(s
, OPC_BLTZ
, 0, arg1
);
615 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg1
, arg2
);
616 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
620 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg1
, arg2
);
621 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
625 tcg_out_opc_br(s
, OPC_BGEZ
, 0, arg1
);
627 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg1
, arg2
);
628 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
632 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg1
, arg2
);
633 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
637 tcg_out_opc_br(s
, OPC_BLEZ
, 0, arg1
);
639 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg2
, arg1
);
640 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
644 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg2
, arg1
);
645 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
649 tcg_out_opc_br(s
, OPC_BGTZ
, 0, arg1
);
651 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg2
, arg1
);
652 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
656 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg2
, arg1
);
657 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
664 reloc_pc16(s
->code_ptr
- 4, l
->u
.value
);
666 tcg_out_reloc(s
, s
->code_ptr
- 4, R_MIPS_PC16
, label_index
, 0);
671 /* XXX: we implement it at the target level to avoid having to
672 handle cross basic blocks temporaries */
673 static void tcg_out_brcond2(TCGContext
*s
, TCGCond cond
, TCGArg arg1
,
674 TCGArg arg2
, TCGArg arg3
, TCGArg arg4
,
681 tcg_out_brcond(s
, TCG_COND_NE
, arg2
, arg4
, label_index
);
682 tcg_out_brcond(s
, TCG_COND_NE
, arg1
, arg3
, label_index
);
688 tcg_out_brcond(s
, TCG_COND_LT
, arg2
, arg4
, label_index
);
692 tcg_out_brcond(s
, TCG_COND_GT
, arg2
, arg4
, label_index
);
696 tcg_out_brcond(s
, TCG_COND_LTU
, arg2
, arg4
, label_index
);
700 tcg_out_brcond(s
, TCG_COND_GTU
, arg2
, arg4
, label_index
);
706 label_ptr
= s
->code_ptr
;
707 tcg_out_opc_br(s
, OPC_BNE
, arg2
, arg4
);
712 tcg_out_brcond(s
, TCG_COND_EQ
, arg1
, arg3
, label_index
);
716 tcg_out_brcond(s
, TCG_COND_LTU
, arg1
, arg3
, label_index
);
720 tcg_out_brcond(s
, TCG_COND_LEU
, arg1
, arg3
, label_index
);
724 tcg_out_brcond(s
, TCG_COND_GTU
, arg1
, arg3
, label_index
);
728 tcg_out_brcond(s
, TCG_COND_GEU
, arg1
, arg3
, label_index
);
734 reloc_pc16(label_ptr
, (tcg_target_long
) s
->code_ptr
);
737 static void tcg_out_movcond(TCGContext
*s
, TCGCond cond
, TCGReg ret
,
738 TCGArg c1
, TCGArg c2
, TCGArg v
)
743 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, c2
);
744 } else if (c2
== 0) {
745 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, c1
);
747 tcg_out_opc_reg(s
, OPC_XOR
, TCG_REG_AT
, c1
, c2
);
748 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, TCG_REG_AT
);
753 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, c2
);
754 } else if (c2
== 0) {
755 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, c1
);
757 tcg_out_opc_reg(s
, OPC_XOR
, TCG_REG_AT
, c1
, c2
);
758 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, TCG_REG_AT
);
762 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, c1
, c2
);
763 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, TCG_REG_AT
);
766 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, c1
, c2
);
767 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, TCG_REG_AT
);
770 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, c1
, c2
);
771 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, TCG_REG_AT
);
774 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, c1
, c2
);
775 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, TCG_REG_AT
);
778 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, c2
, c1
);
779 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, TCG_REG_AT
);
782 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, c2
, c1
);
783 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, TCG_REG_AT
);
786 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, c2
, c1
);
787 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, TCG_REG_AT
);
790 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, c2
, c1
);
791 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, TCG_REG_AT
);
799 static void tcg_out_setcond(TCGContext
*s
, TCGCond cond
, TCGReg ret
,
800 TCGArg arg1
, TCGArg arg2
)
805 tcg_out_opc_imm(s
, OPC_SLTIU
, ret
, arg2
, 1);
806 } else if (arg2
== 0) {
807 tcg_out_opc_imm(s
, OPC_SLTIU
, ret
, arg1
, 1);
809 tcg_out_opc_reg(s
, OPC_XOR
, ret
, arg1
, arg2
);
810 tcg_out_opc_imm(s
, OPC_SLTIU
, ret
, ret
, 1);
815 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, TCG_REG_ZERO
, arg2
);
816 } else if (arg2
== 0) {
817 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, TCG_REG_ZERO
, arg1
);
819 tcg_out_opc_reg(s
, OPC_XOR
, ret
, arg1
, arg2
);
820 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, TCG_REG_ZERO
, ret
);
824 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg1
, arg2
);
827 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg1
, arg2
);
830 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg1
, arg2
);
831 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
834 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg1
, arg2
);
835 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
838 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg2
, arg1
);
839 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
842 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg2
, arg1
);
843 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
846 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg2
, arg1
);
849 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg2
, arg1
);
857 /* XXX: we implement it at the target level to avoid having to
858 handle cross basic blocks temporaries */
859 static void tcg_out_setcond2(TCGContext
*s
, TCGCond cond
, TCGReg ret
,
860 TCGArg arg1
, TCGArg arg2
, TCGArg arg3
, TCGArg arg4
)
864 tcg_out_setcond(s
, TCG_COND_EQ
, TCG_REG_AT
, arg2
, arg4
);
865 tcg_out_setcond(s
, TCG_COND_EQ
, TCG_REG_T0
, arg1
, arg3
);
866 tcg_out_opc_reg(s
, OPC_AND
, ret
, TCG_REG_AT
, TCG_REG_T0
);
869 tcg_out_setcond(s
, TCG_COND_NE
, TCG_REG_AT
, arg2
, arg4
);
870 tcg_out_setcond(s
, TCG_COND_NE
, TCG_REG_T0
, arg1
, arg3
);
871 tcg_out_opc_reg(s
, OPC_OR
, ret
, TCG_REG_AT
, TCG_REG_T0
);
875 tcg_out_setcond(s
, TCG_COND_LT
, TCG_REG_AT
, arg2
, arg4
);
879 tcg_out_setcond(s
, TCG_COND_GT
, TCG_REG_AT
, arg2
, arg4
);
883 tcg_out_setcond(s
, TCG_COND_LTU
, TCG_REG_AT
, arg2
, arg4
);
887 tcg_out_setcond(s
, TCG_COND_GTU
, TCG_REG_AT
, arg2
, arg4
);
894 tcg_out_setcond(s
, TCG_COND_EQ
, TCG_REG_T0
, arg2
, arg4
);
899 tcg_out_setcond(s
, TCG_COND_LTU
, ret
, arg1
, arg3
);
903 tcg_out_setcond(s
, TCG_COND_LEU
, ret
, arg1
, arg3
);
907 tcg_out_setcond(s
, TCG_COND_GTU
, ret
, arg1
, arg3
);
911 tcg_out_setcond(s
, TCG_COND_GEU
, ret
, arg1
, arg3
);
917 tcg_out_opc_reg(s
, OPC_AND
, ret
, ret
, TCG_REG_T0
);
918 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
921 #if defined(CONFIG_SOFTMMU)
923 #include "exec/softmmu_defs.h"
925 /* helper signature: helper_ld_mmu(CPUState *env, target_ulong addr,
927 static const void * const qemu_ld_helpers
[4] = {
934 /* helper signature: helper_st_mmu(CPUState *env, target_ulong addr,
935 uintxx_t val, int mmu_idx) */
936 static const void * const qemu_st_helpers
[4] = {
944 static void tcg_out_qemu_ld(TCGContext
*s
, const TCGArg
*args
,
947 TCGReg addr_regl
, data_regl
, data_regh
, data_reg1
, data_reg2
;
948 #if defined(CONFIG_SOFTMMU)
949 void *label1_ptr
, *label2_ptr
;
951 int mem_index
, s_bits
;
953 # if TARGET_LONG_BITS == 64
965 #if defined(CONFIG_SOFTMMU)
966 # if TARGET_LONG_BITS == 64
968 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
983 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
984 data_reg1
= data_regh
;
985 data_reg2
= data_regl
;
987 data_reg1
= data_regl
;
988 data_reg2
= data_regh
;
991 data_reg1
= data_regl
;
994 #if defined(CONFIG_SOFTMMU)
995 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_A0
, addr_regl
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
);
996 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_A0
, TCG_REG_A0
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
997 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, TCG_AREG0
);
998 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
999 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_read
) + addr_meml
);
1000 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T0
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1));
1001 tcg_out_opc_reg(s
, OPC_AND
, TCG_REG_T0
, TCG_REG_T0
, addr_regl
);
1003 # if TARGET_LONG_BITS == 64
1004 label3_ptr
= s
->code_ptr
;
1005 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_T0
, TCG_REG_AT
);
1008 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
1009 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_read
) + addr_memh
);
1011 label1_ptr
= s
->code_ptr
;
1012 tcg_out_opc_br(s
, OPC_BEQ
, addr_regh
, TCG_REG_AT
);
1015 reloc_pc16(label3_ptr
, (tcg_target_long
) s
->code_ptr
);
1017 label1_ptr
= s
->code_ptr
;
1018 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_T0
, TCG_REG_AT
);
1024 tcg_out_call_iarg_reg32(s
, &arg_num
, TCG_AREG0
);
1025 # if TARGET_LONG_BITS == 64
1026 tcg_out_call_iarg_reg64(s
, &arg_num
, addr_regl
, addr_regh
);
1028 tcg_out_call_iarg_reg32(s
, &arg_num
, addr_regl
);
1030 tcg_out_call_iarg_imm32(s
, &arg_num
, mem_index
);
1031 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T9
, (tcg_target_long
)qemu_ld_helpers
[s_bits
]);
1032 tcg_out_opc_reg(s
, OPC_JALR
, TCG_REG_RA
, TCG_REG_T9
, 0);
1037 tcg_out_opc_imm(s
, OPC_ANDI
, data_reg1
, TCG_REG_V0
, 0xff);
1040 tcg_out_ext8s(s
, data_reg1
, TCG_REG_V0
);
1043 tcg_out_opc_imm(s
, OPC_ANDI
, data_reg1
, TCG_REG_V0
, 0xffff);
1046 tcg_out_ext16s(s
, data_reg1
, TCG_REG_V0
);
1049 tcg_out_mov(s
, TCG_TYPE_I32
, data_reg1
, TCG_REG_V0
);
1052 tcg_out_mov(s
, TCG_TYPE_I32
, data_reg2
, TCG_REG_V1
);
1053 tcg_out_mov(s
, TCG_TYPE_I32
, data_reg1
, TCG_REG_V0
);
1059 label2_ptr
= s
->code_ptr
;
1060 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_ZERO
, TCG_REG_ZERO
);
1063 /* label1: fast path */
1064 reloc_pc16(label1_ptr
, (tcg_target_long
) s
->code_ptr
);
1066 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_A0
, TCG_REG_A0
,
1067 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addend
));
1068 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_V0
, TCG_REG_A0
, addr_regl
);
1070 if (GUEST_BASE
== (int16_t)GUEST_BASE
) {
1071 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_V0
, addr_regl
, GUEST_BASE
);
1073 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_V0
, GUEST_BASE
);
1074 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_V0
, TCG_REG_V0
, addr_regl
);
1080 tcg_out_opc_imm(s
, OPC_LBU
, data_reg1
, TCG_REG_V0
, 0);
1083 tcg_out_opc_imm(s
, OPC_LB
, data_reg1
, TCG_REG_V0
, 0);
1086 if (TCG_NEED_BSWAP
) {
1087 tcg_out_opc_imm(s
, OPC_LHU
, TCG_REG_T0
, TCG_REG_V0
, 0);
1088 tcg_out_bswap16(s
, data_reg1
, TCG_REG_T0
);
1090 tcg_out_opc_imm(s
, OPC_LHU
, data_reg1
, TCG_REG_V0
, 0);
1094 if (TCG_NEED_BSWAP
) {
1095 tcg_out_opc_imm(s
, OPC_LHU
, TCG_REG_T0
, TCG_REG_V0
, 0);
1096 tcg_out_bswap16s(s
, data_reg1
, TCG_REG_T0
);
1098 tcg_out_opc_imm(s
, OPC_LH
, data_reg1
, TCG_REG_V0
, 0);
1102 if (TCG_NEED_BSWAP
) {
1103 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_T0
, TCG_REG_V0
, 0);
1104 tcg_out_bswap32(s
, data_reg1
, TCG_REG_T0
);
1106 tcg_out_opc_imm(s
, OPC_LW
, data_reg1
, TCG_REG_V0
, 0);
1110 if (TCG_NEED_BSWAP
) {
1111 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_T0
, TCG_REG_V0
, 4);
1112 tcg_out_bswap32(s
, data_reg1
, TCG_REG_T0
);
1113 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_T0
, TCG_REG_V0
, 0);
1114 tcg_out_bswap32(s
, data_reg2
, TCG_REG_T0
);
1116 tcg_out_opc_imm(s
, OPC_LW
, data_reg1
, TCG_REG_V0
, 0);
1117 tcg_out_opc_imm(s
, OPC_LW
, data_reg2
, TCG_REG_V0
, 4);
1124 #if defined(CONFIG_SOFTMMU)
1125 reloc_pc16(label2_ptr
, (tcg_target_long
) s
->code_ptr
);
1129 static void tcg_out_qemu_st(TCGContext
*s
, const TCGArg
*args
,
1132 TCGReg addr_regl
, data_regl
, data_regh
, data_reg1
, data_reg2
;
1133 #if defined(CONFIG_SOFTMMU)
1134 uint8_t *label1_ptr
, *label2_ptr
;
1136 int mem_index
, s_bits
;
1139 #if TARGET_LONG_BITS == 64
1140 # if defined(CONFIG_SOFTMMU)
1141 uint8_t *label3_ptr
;
1146 data_regl
= *args
++;
1148 data_regh
= *args
++;
1152 addr_regl
= *args
++;
1153 #if defined(CONFIG_SOFTMMU)
1154 # if TARGET_LONG_BITS == 64
1155 addr_regh
= *args
++;
1156 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
1171 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
1172 data_reg1
= data_regh
;
1173 data_reg2
= data_regl
;
1175 data_reg1
= data_regl
;
1176 data_reg2
= data_regh
;
1179 data_reg1
= data_regl
;
1183 #if defined(CONFIG_SOFTMMU)
1184 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_A0
, addr_regl
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
);
1185 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_A0
, TCG_REG_A0
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
1186 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, TCG_AREG0
);
1187 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
1188 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_write
) + addr_meml
);
1189 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T0
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1));
1190 tcg_out_opc_reg(s
, OPC_AND
, TCG_REG_T0
, TCG_REG_T0
, addr_regl
);
1192 # if TARGET_LONG_BITS == 64
1193 label3_ptr
= s
->code_ptr
;
1194 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_T0
, TCG_REG_AT
);
1197 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
1198 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_write
) + addr_memh
);
1200 label1_ptr
= s
->code_ptr
;
1201 tcg_out_opc_br(s
, OPC_BEQ
, addr_regh
, TCG_REG_AT
);
1204 reloc_pc16(label3_ptr
, (tcg_target_long
) s
->code_ptr
);
1206 label1_ptr
= s
->code_ptr
;
1207 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_T0
, TCG_REG_AT
);
1213 tcg_out_call_iarg_reg32(s
, &arg_num
, TCG_AREG0
);
1214 # if TARGET_LONG_BITS == 64
1215 tcg_out_call_iarg_reg64(s
, &arg_num
, addr_regl
, addr_regh
);
1217 tcg_out_call_iarg_reg32(s
, &arg_num
, addr_regl
);
1221 tcg_out_call_iarg_reg8(s
, &arg_num
, data_regl
);
1224 tcg_out_call_iarg_reg16(s
, &arg_num
, data_regl
);
1227 tcg_out_call_iarg_reg32(s
, &arg_num
, data_regl
);
1230 tcg_out_call_iarg_reg64(s
, &arg_num
, data_regl
, data_regh
);
1235 tcg_out_call_iarg_imm32(s
, &arg_num
, mem_index
);
1236 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T9
, (tcg_target_long
)qemu_st_helpers
[s_bits
]);
1237 tcg_out_opc_reg(s
, OPC_JALR
, TCG_REG_RA
, TCG_REG_T9
, 0);
1240 label2_ptr
= s
->code_ptr
;
1241 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_ZERO
, TCG_REG_ZERO
);
1244 /* label1: fast path */
1245 reloc_pc16(label1_ptr
, (tcg_target_long
) s
->code_ptr
);
1247 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_A0
, TCG_REG_A0
,
1248 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addend
));
1249 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, addr_regl
);
1251 if (GUEST_BASE
== (int16_t)GUEST_BASE
) {
1252 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_A0
, addr_regl
, GUEST_BASE
);
1254 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_A0
, GUEST_BASE
);
1255 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, addr_regl
);
1262 tcg_out_opc_imm(s
, OPC_SB
, data_reg1
, TCG_REG_A0
, 0);
1265 if (TCG_NEED_BSWAP
) {
1266 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_T0
, data_reg1
, 0xffff);
1267 tcg_out_bswap16(s
, TCG_REG_T0
, TCG_REG_T0
);
1268 tcg_out_opc_imm(s
, OPC_SH
, TCG_REG_T0
, TCG_REG_A0
, 0);
1270 tcg_out_opc_imm(s
, OPC_SH
, data_reg1
, TCG_REG_A0
, 0);
1274 if (TCG_NEED_BSWAP
) {
1275 tcg_out_bswap32(s
, TCG_REG_T0
, data_reg1
);
1276 tcg_out_opc_imm(s
, OPC_SW
, TCG_REG_T0
, TCG_REG_A0
, 0);
1278 tcg_out_opc_imm(s
, OPC_SW
, data_reg1
, TCG_REG_A0
, 0);
1282 if (TCG_NEED_BSWAP
) {
1283 tcg_out_bswap32(s
, TCG_REG_T0
, data_reg2
);
1284 tcg_out_opc_imm(s
, OPC_SW
, TCG_REG_T0
, TCG_REG_A0
, 0);
1285 tcg_out_bswap32(s
, TCG_REG_T0
, data_reg1
);
1286 tcg_out_opc_imm(s
, OPC_SW
, TCG_REG_T0
, TCG_REG_A0
, 4);
1288 tcg_out_opc_imm(s
, OPC_SW
, data_reg1
, TCG_REG_A0
, 0);
1289 tcg_out_opc_imm(s
, OPC_SW
, data_reg2
, TCG_REG_A0
, 4);
1296 #if defined(CONFIG_SOFTMMU)
1297 reloc_pc16(label2_ptr
, (tcg_target_long
) s
->code_ptr
);
1301 static inline void tcg_out_op(TCGContext
*s
, TCGOpcode opc
,
1302 const TCGArg
*args
, const int *const_args
)
1305 case INDEX_op_exit_tb
:
1306 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_V0
, args
[0]);
1307 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_AT
, (tcg_target_long
)tb_ret_addr
);
1308 tcg_out_opc_reg(s
, OPC_JR
, 0, TCG_REG_AT
, 0);
1311 case INDEX_op_goto_tb
:
1312 if (s
->tb_jmp_offset
) {
1313 /* direct jump method */
1316 /* indirect jump method */
1317 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_AT
, (tcg_target_long
)(s
->tb_next
+ args
[0]));
1318 tcg_out_ld(s
, TCG_TYPE_PTR
, TCG_REG_AT
, TCG_REG_AT
, 0);
1319 tcg_out_opc_reg(s
, OPC_JR
, 0, TCG_REG_AT
, 0);
1322 s
->tb_next_offset
[args
[0]] = s
->code_ptr
- s
->code_buf
;
1325 tcg_out_opc_reg(s
, OPC_JALR
, TCG_REG_RA
, args
[0], 0);
1329 tcg_out_brcond(s
, TCG_COND_EQ
, TCG_REG_ZERO
, TCG_REG_ZERO
, args
[0]);
1332 case INDEX_op_mov_i32
:
1333 tcg_out_mov(s
, TCG_TYPE_I32
, args
[0], args
[1]);
1335 case INDEX_op_movi_i32
:
1336 tcg_out_movi(s
, TCG_TYPE_I32
, args
[0], args
[1]);
1339 case INDEX_op_ld8u_i32
:
1340 tcg_out_ldst(s
, OPC_LBU
, args
[0], args
[1], args
[2]);
1342 case INDEX_op_ld8s_i32
:
1343 tcg_out_ldst(s
, OPC_LB
, args
[0], args
[1], args
[2]);
1345 case INDEX_op_ld16u_i32
:
1346 tcg_out_ldst(s
, OPC_LHU
, args
[0], args
[1], args
[2]);
1348 case INDEX_op_ld16s_i32
:
1349 tcg_out_ldst(s
, OPC_LH
, args
[0], args
[1], args
[2]);
1351 case INDEX_op_ld_i32
:
1352 tcg_out_ldst(s
, OPC_LW
, args
[0], args
[1], args
[2]);
1354 case INDEX_op_st8_i32
:
1355 tcg_out_ldst(s
, OPC_SB
, args
[0], args
[1], args
[2]);
1357 case INDEX_op_st16_i32
:
1358 tcg_out_ldst(s
, OPC_SH
, args
[0], args
[1], args
[2]);
1360 case INDEX_op_st_i32
:
1361 tcg_out_ldst(s
, OPC_SW
, args
[0], args
[1], args
[2]);
1364 case INDEX_op_add_i32
:
1365 if (const_args
[2]) {
1366 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[0], args
[1], args
[2]);
1368 tcg_out_opc_reg(s
, OPC_ADDU
, args
[0], args
[1], args
[2]);
1371 case INDEX_op_add2_i32
:
1372 if (const_args
[4]) {
1373 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_AT
, args
[2], args
[4]);
1375 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_AT
, args
[2], args
[4]);
1377 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_T0
, TCG_REG_AT
, args
[2]);
1378 if (const_args
[5]) {
1379 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[1], args
[3], args
[5]);
1381 tcg_out_opc_reg(s
, OPC_ADDU
, args
[1], args
[3], args
[5]);
1383 tcg_out_opc_reg(s
, OPC_ADDU
, args
[1], args
[1], TCG_REG_T0
);
1384 tcg_out_mov(s
, TCG_TYPE_I32
, args
[0], TCG_REG_AT
);
1386 case INDEX_op_sub_i32
:
1387 if (const_args
[2]) {
1388 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[0], args
[1], -args
[2]);
1390 tcg_out_opc_reg(s
, OPC_SUBU
, args
[0], args
[1], args
[2]);
1393 case INDEX_op_sub2_i32
:
1394 if (const_args
[4]) {
1395 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_AT
, args
[2], -args
[4]);
1397 tcg_out_opc_reg(s
, OPC_SUBU
, TCG_REG_AT
, args
[2], args
[4]);
1399 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_T0
, args
[2], TCG_REG_AT
);
1400 if (const_args
[5]) {
1401 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[1], args
[3], -args
[5]);
1403 tcg_out_opc_reg(s
, OPC_SUBU
, args
[1], args
[3], args
[5]);
1405 tcg_out_opc_reg(s
, OPC_SUBU
, args
[1], args
[1], TCG_REG_T0
);
1406 tcg_out_mov(s
, TCG_TYPE_I32
, args
[0], TCG_REG_AT
);
1408 case INDEX_op_mul_i32
:
1409 #if defined(__mips_isa_rev) && (__mips_isa_rev >= 1)
1410 tcg_out_opc_reg(s
, OPC_MUL
, args
[0], args
[1], args
[2]);
1412 tcg_out_opc_reg(s
, OPC_MULT
, 0, args
[1], args
[2]);
1413 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1416 case INDEX_op_mulu2_i32
:
1417 tcg_out_opc_reg(s
, OPC_MULTU
, 0, args
[2], args
[3]);
1418 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1419 tcg_out_opc_reg(s
, OPC_MFHI
, args
[1], 0, 0);
1421 case INDEX_op_div_i32
:
1422 tcg_out_opc_reg(s
, OPC_DIV
, 0, args
[1], args
[2]);
1423 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1425 case INDEX_op_divu_i32
:
1426 tcg_out_opc_reg(s
, OPC_DIVU
, 0, args
[1], args
[2]);
1427 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1429 case INDEX_op_rem_i32
:
1430 tcg_out_opc_reg(s
, OPC_DIV
, 0, args
[1], args
[2]);
1431 tcg_out_opc_reg(s
, OPC_MFHI
, args
[0], 0, 0);
1433 case INDEX_op_remu_i32
:
1434 tcg_out_opc_reg(s
, OPC_DIVU
, 0, args
[1], args
[2]);
1435 tcg_out_opc_reg(s
, OPC_MFHI
, args
[0], 0, 0);
1438 case INDEX_op_and_i32
:
1439 if (const_args
[2]) {
1440 tcg_out_opc_imm(s
, OPC_ANDI
, args
[0], args
[1], args
[2]);
1442 tcg_out_opc_reg(s
, OPC_AND
, args
[0], args
[1], args
[2]);
1445 case INDEX_op_or_i32
:
1446 if (const_args
[2]) {
1447 tcg_out_opc_imm(s
, OPC_ORI
, args
[0], args
[1], args
[2]);
1449 tcg_out_opc_reg(s
, OPC_OR
, args
[0], args
[1], args
[2]);
1452 case INDEX_op_nor_i32
:
1453 tcg_out_opc_reg(s
, OPC_NOR
, args
[0], args
[1], args
[2]);
1455 case INDEX_op_not_i32
:
1456 tcg_out_opc_reg(s
, OPC_NOR
, args
[0], TCG_REG_ZERO
, args
[1]);
1458 case INDEX_op_xor_i32
:
1459 if (const_args
[2]) {
1460 tcg_out_opc_imm(s
, OPC_XORI
, args
[0], args
[1], args
[2]);
1462 tcg_out_opc_reg(s
, OPC_XOR
, args
[0], args
[1], args
[2]);
1466 case INDEX_op_sar_i32
:
1467 if (const_args
[2]) {
1468 tcg_out_opc_sa(s
, OPC_SRA
, args
[0], args
[1], args
[2]);
1470 tcg_out_opc_reg(s
, OPC_SRAV
, args
[0], args
[2], args
[1]);
1473 case INDEX_op_shl_i32
:
1474 if (const_args
[2]) {
1475 tcg_out_opc_sa(s
, OPC_SLL
, args
[0], args
[1], args
[2]);
1477 tcg_out_opc_reg(s
, OPC_SLLV
, args
[0], args
[2], args
[1]);
1480 case INDEX_op_shr_i32
:
1481 if (const_args
[2]) {
1482 tcg_out_opc_sa(s
, OPC_SRL
, args
[0], args
[1], args
[2]);
1484 tcg_out_opc_reg(s
, OPC_SRLV
, args
[0], args
[2], args
[1]);
1487 case INDEX_op_rotl_i32
:
1488 if (const_args
[2]) {
1489 tcg_out_opc_sa(s
, OPC_ROTR
, args
[0], args
[1], 0x20 - args
[2]);
1491 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_AT
, 32);
1492 tcg_out_opc_reg(s
, OPC_SUBU
, TCG_REG_AT
, TCG_REG_AT
, args
[2]);
1493 tcg_out_opc_reg(s
, OPC_ROTRV
, args
[0], TCG_REG_AT
, args
[1]);
1496 case INDEX_op_rotr_i32
:
1497 if (const_args
[2]) {
1498 tcg_out_opc_sa(s
, OPC_ROTR
, args
[0], args
[1], args
[2]);
1500 tcg_out_opc_reg(s
, OPC_ROTRV
, args
[0], args
[2], args
[1]);
1504 /* The bswap routines do not work on non-R2 CPU. In that case
1505 we let TCG generating the corresponding code. */
1506 case INDEX_op_bswap16_i32
:
1507 tcg_out_bswap16(s
, args
[0], args
[1]);
1509 case INDEX_op_bswap32_i32
:
1510 tcg_out_bswap32(s
, args
[0], args
[1]);
1513 case INDEX_op_ext8s_i32
:
1514 tcg_out_ext8s(s
, args
[0], args
[1]);
1516 case INDEX_op_ext16s_i32
:
1517 tcg_out_ext16s(s
, args
[0], args
[1]);
1520 case INDEX_op_deposit_i32
:
1521 tcg_out_opc_imm(s
, OPC_INS
, args
[0], args
[2],
1522 ((args
[3] + args
[4] - 1) << 11) | (args
[3] << 6));
1525 case INDEX_op_brcond_i32
:
1526 tcg_out_brcond(s
, args
[2], args
[0], args
[1], args
[3]);
1528 case INDEX_op_brcond2_i32
:
1529 tcg_out_brcond2(s
, args
[4], args
[0], args
[1], args
[2], args
[3], args
[5]);
1532 case INDEX_op_movcond_i32
:
1533 tcg_out_movcond(s
, args
[5], args
[0], args
[1], args
[2], args
[3]);
1536 case INDEX_op_setcond_i32
:
1537 tcg_out_setcond(s
, args
[3], args
[0], args
[1], args
[2]);
1539 case INDEX_op_setcond2_i32
:
1540 tcg_out_setcond2(s
, args
[5], args
[0], args
[1], args
[2], args
[3], args
[4]);
1543 case INDEX_op_qemu_ld8u
:
1544 tcg_out_qemu_ld(s
, args
, 0);
1546 case INDEX_op_qemu_ld8s
:
1547 tcg_out_qemu_ld(s
, args
, 0 | 4);
1549 case INDEX_op_qemu_ld16u
:
1550 tcg_out_qemu_ld(s
, args
, 1);
1552 case INDEX_op_qemu_ld16s
:
1553 tcg_out_qemu_ld(s
, args
, 1 | 4);
1555 case INDEX_op_qemu_ld32
:
1556 tcg_out_qemu_ld(s
, args
, 2);
1558 case INDEX_op_qemu_ld64
:
1559 tcg_out_qemu_ld(s
, args
, 3);
1561 case INDEX_op_qemu_st8
:
1562 tcg_out_qemu_st(s
, args
, 0);
1564 case INDEX_op_qemu_st16
:
1565 tcg_out_qemu_st(s
, args
, 1);
1567 case INDEX_op_qemu_st32
:
1568 tcg_out_qemu_st(s
, args
, 2);
1570 case INDEX_op_qemu_st64
:
1571 tcg_out_qemu_st(s
, args
, 3);
1579 static const TCGTargetOpDef mips_op_defs
[] = {
1580 { INDEX_op_exit_tb
, { } },
1581 { INDEX_op_goto_tb
, { } },
1582 { INDEX_op_call
, { "C" } },
1583 { INDEX_op_br
, { } },
1585 { INDEX_op_mov_i32
, { "r", "r" } },
1586 { INDEX_op_movi_i32
, { "r" } },
1587 { INDEX_op_ld8u_i32
, { "r", "r" } },
1588 { INDEX_op_ld8s_i32
, { "r", "r" } },
1589 { INDEX_op_ld16u_i32
, { "r", "r" } },
1590 { INDEX_op_ld16s_i32
, { "r", "r" } },
1591 { INDEX_op_ld_i32
, { "r", "r" } },
1592 { INDEX_op_st8_i32
, { "rZ", "r" } },
1593 { INDEX_op_st16_i32
, { "rZ", "r" } },
1594 { INDEX_op_st_i32
, { "rZ", "r" } },
1596 { INDEX_op_add_i32
, { "r", "rZ", "rJ" } },
1597 { INDEX_op_mul_i32
, { "r", "rZ", "rZ" } },
1598 { INDEX_op_mulu2_i32
, { "r", "r", "rZ", "rZ" } },
1599 { INDEX_op_div_i32
, { "r", "rZ", "rZ" } },
1600 { INDEX_op_divu_i32
, { "r", "rZ", "rZ" } },
1601 { INDEX_op_rem_i32
, { "r", "rZ", "rZ" } },
1602 { INDEX_op_remu_i32
, { "r", "rZ", "rZ" } },
1603 { INDEX_op_sub_i32
, { "r", "rZ", "rJ" } },
1605 { INDEX_op_and_i32
, { "r", "rZ", "rI" } },
1606 { INDEX_op_nor_i32
, { "r", "rZ", "rZ" } },
1607 { INDEX_op_not_i32
, { "r", "rZ" } },
1608 { INDEX_op_or_i32
, { "r", "rZ", "rIZ" } },
1609 { INDEX_op_xor_i32
, { "r", "rZ", "rIZ" } },
1611 { INDEX_op_shl_i32
, { "r", "rZ", "ri" } },
1612 { INDEX_op_shr_i32
, { "r", "rZ", "ri" } },
1613 { INDEX_op_sar_i32
, { "r", "rZ", "ri" } },
1614 { INDEX_op_rotr_i32
, { "r", "rZ", "ri" } },
1615 { INDEX_op_rotl_i32
, { "r", "rZ", "ri" } },
1617 { INDEX_op_bswap16_i32
, { "r", "r" } },
1618 { INDEX_op_bswap32_i32
, { "r", "r" } },
1620 { INDEX_op_ext8s_i32
, { "r", "rZ" } },
1621 { INDEX_op_ext16s_i32
, { "r", "rZ" } },
1623 { INDEX_op_deposit_i32
, { "r", "0", "rZ" } },
1625 { INDEX_op_brcond_i32
, { "rZ", "rZ" } },
1626 { INDEX_op_movcond_i32
, { "r", "rZ", "rZ", "rZ", "0" } },
1627 { INDEX_op_setcond_i32
, { "r", "rZ", "rZ" } },
1628 { INDEX_op_setcond2_i32
, { "r", "rZ", "rZ", "rZ", "rZ" } },
1630 { INDEX_op_add2_i32
, { "r", "r", "rZ", "rZ", "rJ", "rJ" } },
1631 { INDEX_op_sub2_i32
, { "r", "r", "rZ", "rZ", "rJ", "rJ" } },
1632 { INDEX_op_brcond2_i32
, { "rZ", "rZ", "rZ", "rZ" } },
1634 #if TARGET_LONG_BITS == 32
1635 { INDEX_op_qemu_ld8u
, { "L", "lZ" } },
1636 { INDEX_op_qemu_ld8s
, { "L", "lZ" } },
1637 { INDEX_op_qemu_ld16u
, { "L", "lZ" } },
1638 { INDEX_op_qemu_ld16s
, { "L", "lZ" } },
1639 { INDEX_op_qemu_ld32
, { "L", "lZ" } },
1640 { INDEX_op_qemu_ld64
, { "L", "L", "lZ" } },
1642 { INDEX_op_qemu_st8
, { "SZ", "SZ" } },
1643 { INDEX_op_qemu_st16
, { "SZ", "SZ" } },
1644 { INDEX_op_qemu_st32
, { "SZ", "SZ" } },
1645 { INDEX_op_qemu_st64
, { "SZ", "SZ", "SZ" } },
1647 { INDEX_op_qemu_ld8u
, { "L", "lZ", "lZ" } },
1648 { INDEX_op_qemu_ld8s
, { "L", "lZ", "lZ" } },
1649 { INDEX_op_qemu_ld16u
, { "L", "lZ", "lZ" } },
1650 { INDEX_op_qemu_ld16s
, { "L", "lZ", "lZ" } },
1651 { INDEX_op_qemu_ld32
, { "L", "lZ", "lZ" } },
1652 { INDEX_op_qemu_ld64
, { "L", "L", "lZ", "lZ" } },
1654 { INDEX_op_qemu_st8
, { "SZ", "SZ", "SZ" } },
1655 { INDEX_op_qemu_st16
, { "SZ", "SZ", "SZ" } },
1656 { INDEX_op_qemu_st32
, { "SZ", "SZ", "SZ" } },
1657 { INDEX_op_qemu_st64
, { "SZ", "SZ", "SZ", "SZ" } },
1662 static int tcg_target_callee_save_regs
[] = {
1663 TCG_REG_S0
, /* used for the global env (TCG_AREG0) */
1672 TCG_REG_RA
, /* should be last for ABI compliance */
1675 /* Generate global QEMU prologue and epilogue code */
1676 static void tcg_target_qemu_prologue(TCGContext
*s
)
1680 /* reserve some stack space, also for TCG temps. */
1681 frame_size
= ARRAY_SIZE(tcg_target_callee_save_regs
) * 4
1682 + TCG_STATIC_CALL_ARGS_SIZE
1683 + CPU_TEMP_BUF_NLONGS
* sizeof(long);
1684 frame_size
= (frame_size
+ TCG_TARGET_STACK_ALIGN
- 1) &
1685 ~(TCG_TARGET_STACK_ALIGN
- 1);
1686 tcg_set_frame(s
, TCG_REG_SP
, ARRAY_SIZE(tcg_target_callee_save_regs
) * 4
1687 + TCG_STATIC_CALL_ARGS_SIZE
,
1688 CPU_TEMP_BUF_NLONGS
* sizeof(long));
1691 tcg_out_addi(s
, TCG_REG_SP
, -frame_size
);
1692 for(i
= 0 ; i
< ARRAY_SIZE(tcg_target_callee_save_regs
) ; i
++) {
1693 tcg_out_st(s
, TCG_TYPE_I32
, tcg_target_callee_save_regs
[i
],
1694 TCG_REG_SP
, TCG_STATIC_CALL_ARGS_SIZE
+ i
* 4);
1697 /* Call generated code */
1698 tcg_out_opc_reg(s
, OPC_JR
, 0, tcg_target_call_iarg_regs
[1], 0);
1699 tcg_out_mov(s
, TCG_TYPE_PTR
, TCG_AREG0
, tcg_target_call_iarg_regs
[0]);
1700 tb_ret_addr
= s
->code_ptr
;
1703 for(i
= 0 ; i
< ARRAY_SIZE(tcg_target_callee_save_regs
) ; i
++) {
1704 tcg_out_ld(s
, TCG_TYPE_I32
, tcg_target_callee_save_regs
[i
],
1705 TCG_REG_SP
, TCG_STATIC_CALL_ARGS_SIZE
+ i
* 4);
1708 tcg_out_opc_reg(s
, OPC_JR
, 0, TCG_REG_RA
, 0);
1709 tcg_out_addi(s
, TCG_REG_SP
, frame_size
);
1712 static void tcg_target_init(TCGContext
*s
)
1714 tcg_regset_set(tcg_target_available_regs
[TCG_TYPE_I32
], 0xffffffff);
1715 tcg_regset_set(tcg_target_call_clobber_regs
,
1732 tcg_regset_clear(s
->reserved_regs
);
1733 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_ZERO
); /* zero register */
1734 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_K0
); /* kernel use only */
1735 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_K1
); /* kernel use only */
1736 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_AT
); /* internal use */
1737 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_T0
); /* internal use */
1738 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_RA
); /* return address */
1739 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_SP
); /* stack pointer */
1740 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_GP
); /* global pointer */
1742 tcg_add_target_add_op_defs(mips_op_defs
);