target-alpha: Add support for -cpu ?
[qemu/opensuse.git] / tcg / mips / tcg-target.c
blobae2b274d85b0fb436c2ec0e1d9dbc9f3aab08455
1 /*
2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008-2009 Arnaud Patard <arnaud.patard@rtp-net.org>
5 * Copyright (c) 2009 Aurelien Jarno <aurelien@aurel32.net>
6 * Based on i386/tcg-target.c - Copyright (c) 2008 Fabrice Bellard
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
21 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
24 * THE SOFTWARE.
27 #if defined(TCG_TARGET_WORDS_BIGENDIAN) == defined(TARGET_WORDS_BIGENDIAN)
28 # define TCG_NEED_BSWAP 0
29 #else
30 # define TCG_NEED_BSWAP 1
31 #endif
33 #ifndef NDEBUG
34 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
35 "zero",
36 "at",
37 "v0",
38 "v1",
39 "a0",
40 "a1",
41 "a2",
42 "a3",
43 "t0",
44 "t1",
45 "t2",
46 "t3",
47 "t4",
48 "t5",
49 "t6",
50 "t7",
51 "s0",
52 "s1",
53 "s2",
54 "s3",
55 "s4",
56 "s5",
57 "s6",
58 "s7",
59 "t8",
60 "t9",
61 "k0",
62 "k1",
63 "gp",
64 "sp",
65 "fp",
66 "ra",
68 #endif
70 /* check if we really need so many registers :P */
71 static const TCGReg tcg_target_reg_alloc_order[] = {
72 TCG_REG_S0,
73 TCG_REG_S1,
74 TCG_REG_S2,
75 TCG_REG_S3,
76 TCG_REG_S4,
77 TCG_REG_S5,
78 TCG_REG_S6,
79 TCG_REG_S7,
80 TCG_REG_T1,
81 TCG_REG_T2,
82 TCG_REG_T3,
83 TCG_REG_T4,
84 TCG_REG_T5,
85 TCG_REG_T6,
86 TCG_REG_T7,
87 TCG_REG_T8,
88 TCG_REG_T9,
89 TCG_REG_A0,
90 TCG_REG_A1,
91 TCG_REG_A2,
92 TCG_REG_A3,
93 TCG_REG_V0,
94 TCG_REG_V1
97 static const TCGReg tcg_target_call_iarg_regs[4] = {
98 TCG_REG_A0,
99 TCG_REG_A1,
100 TCG_REG_A2,
101 TCG_REG_A3
104 static const TCGReg tcg_target_call_oarg_regs[2] = {
105 TCG_REG_V0,
106 TCG_REG_V1
109 static uint8_t *tb_ret_addr;
111 static inline uint32_t reloc_lo16_val (void *pc, tcg_target_long target)
113 return target & 0xffff;
116 static inline void reloc_lo16 (void *pc, tcg_target_long target)
118 *(uint32_t *) pc = (*(uint32_t *) pc & ~0xffff)
119 | reloc_lo16_val(pc, target);
122 static inline uint32_t reloc_hi16_val (void *pc, tcg_target_long target)
124 return (target >> 16) & 0xffff;
127 static inline void reloc_hi16 (void *pc, tcg_target_long target)
129 *(uint32_t *) pc = (*(uint32_t *) pc & ~0xffff)
130 | reloc_hi16_val(pc, target);
133 static inline uint32_t reloc_pc16_val (void *pc, tcg_target_long target)
135 int32_t disp;
137 disp = target - (tcg_target_long) pc - 4;
138 if (disp != (disp << 14) >> 14) {
139 tcg_abort ();
142 return (disp >> 2) & 0xffff;
145 static inline void reloc_pc16 (void *pc, tcg_target_long target)
147 *(uint32_t *) pc = (*(uint32_t *) pc & ~0xffff)
148 | reloc_pc16_val(pc, target);
151 static inline uint32_t reloc_26_val (void *pc, tcg_target_long target)
153 if ((((tcg_target_long)pc + 4) & 0xf0000000) != (target & 0xf0000000)) {
154 tcg_abort ();
157 return (target >> 2) & 0x3ffffff;
160 static inline void reloc_pc26 (void *pc, tcg_target_long target)
162 *(uint32_t *) pc = (*(uint32_t *) pc & ~0x3ffffff)
163 | reloc_26_val(pc, target);
166 static void patch_reloc(uint8_t *code_ptr, int type,
167 tcg_target_long value, tcg_target_long addend)
169 value += addend;
170 switch(type) {
171 case R_MIPS_LO16:
172 reloc_lo16(code_ptr, value);
173 break;
174 case R_MIPS_HI16:
175 reloc_hi16(code_ptr, value);
176 break;
177 case R_MIPS_PC16:
178 reloc_pc16(code_ptr, value);
179 break;
180 case R_MIPS_26:
181 reloc_pc26(code_ptr, value);
182 break;
183 default:
184 tcg_abort();
188 /* parse target specific constraints */
189 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
191 const char *ct_str;
193 ct_str = *pct_str;
194 switch(ct_str[0]) {
195 case 'r':
196 ct->ct |= TCG_CT_REG;
197 tcg_regset_set(ct->u.regs, 0xffffffff);
198 break;
199 case 'C':
200 ct->ct |= TCG_CT_REG;
201 tcg_regset_clear(ct->u.regs);
202 tcg_regset_set_reg(ct->u.regs, TCG_REG_T9);
203 break;
204 case 'L': /* qemu_ld output arg constraint */
205 ct->ct |= TCG_CT_REG;
206 tcg_regset_set(ct->u.regs, 0xffffffff);
207 tcg_regset_reset_reg(ct->u.regs, TCG_REG_V0);
208 break;
209 case 'l': /* qemu_ld input arg constraint */
210 ct->ct |= TCG_CT_REG;
211 tcg_regset_set(ct->u.regs, 0xffffffff);
212 #if defined(CONFIG_SOFTMMU)
213 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A0);
214 # if (TARGET_LONG_BITS == 64)
215 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A2);
216 # endif
217 #endif
218 break;
219 case 'S': /* qemu_st constraint */
220 ct->ct |= TCG_CT_REG;
221 tcg_regset_set(ct->u.regs, 0xffffffff);
222 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A0);
223 #if defined(CONFIG_SOFTMMU)
224 # if (TARGET_LONG_BITS == 32)
225 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A1);
226 # endif
227 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A2);
228 # if TARGET_LONG_BITS == 64
229 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A3);
230 # endif
231 #endif
232 break;
233 case 'I':
234 ct->ct |= TCG_CT_CONST_U16;
235 break;
236 case 'J':
237 ct->ct |= TCG_CT_CONST_S16;
238 break;
239 case 'Z':
240 /* We are cheating a bit here, using the fact that the register
241 ZERO is also the register number 0. Hence there is no need
242 to check for const_args in each instruction. */
243 ct->ct |= TCG_CT_CONST_ZERO;
244 break;
245 default:
246 return -1;
248 ct_str++;
249 *pct_str = ct_str;
250 return 0;
253 /* test if a constant matches the constraint */
254 static inline int tcg_target_const_match(tcg_target_long val,
255 const TCGArgConstraint *arg_ct)
257 int ct;
258 ct = arg_ct->ct;
259 if (ct & TCG_CT_CONST)
260 return 1;
261 else if ((ct & TCG_CT_CONST_ZERO) && val == 0)
262 return 1;
263 else if ((ct & TCG_CT_CONST_U16) && val == (uint16_t)val)
264 return 1;
265 else if ((ct & TCG_CT_CONST_S16) && val == (int16_t)val)
266 return 1;
267 else
268 return 0;
271 /* instruction opcodes */
272 enum {
273 OPC_BEQ = 0x04 << 26,
274 OPC_BNE = 0x05 << 26,
275 OPC_BLEZ = 0x06 << 26,
276 OPC_BGTZ = 0x07 << 26,
277 OPC_ADDIU = 0x09 << 26,
278 OPC_SLTI = 0x0A << 26,
279 OPC_SLTIU = 0x0B << 26,
280 OPC_ANDI = 0x0C << 26,
281 OPC_ORI = 0x0D << 26,
282 OPC_XORI = 0x0E << 26,
283 OPC_LUI = 0x0F << 26,
284 OPC_LB = 0x20 << 26,
285 OPC_LH = 0x21 << 26,
286 OPC_LW = 0x23 << 26,
287 OPC_LBU = 0x24 << 26,
288 OPC_LHU = 0x25 << 26,
289 OPC_LWU = 0x27 << 26,
290 OPC_SB = 0x28 << 26,
291 OPC_SH = 0x29 << 26,
292 OPC_SW = 0x2B << 26,
294 OPC_SPECIAL = 0x00 << 26,
295 OPC_SLL = OPC_SPECIAL | 0x00,
296 OPC_SRL = OPC_SPECIAL | 0x02,
297 OPC_ROTR = OPC_SPECIAL | (0x01 << 21) | 0x02,
298 OPC_SRA = OPC_SPECIAL | 0x03,
299 OPC_SLLV = OPC_SPECIAL | 0x04,
300 OPC_SRLV = OPC_SPECIAL | 0x06,
301 OPC_ROTRV = OPC_SPECIAL | (0x01 << 6) | 0x06,
302 OPC_SRAV = OPC_SPECIAL | 0x07,
303 OPC_JR = OPC_SPECIAL | 0x08,
304 OPC_JALR = OPC_SPECIAL | 0x09,
305 OPC_MOVZ = OPC_SPECIAL | 0x0A,
306 OPC_MOVN = OPC_SPECIAL | 0x0B,
307 OPC_MFHI = OPC_SPECIAL | 0x10,
308 OPC_MFLO = OPC_SPECIAL | 0x12,
309 OPC_MULT = OPC_SPECIAL | 0x18,
310 OPC_MULTU = OPC_SPECIAL | 0x19,
311 OPC_DIV = OPC_SPECIAL | 0x1A,
312 OPC_DIVU = OPC_SPECIAL | 0x1B,
313 OPC_ADDU = OPC_SPECIAL | 0x21,
314 OPC_SUBU = OPC_SPECIAL | 0x23,
315 OPC_AND = OPC_SPECIAL | 0x24,
316 OPC_OR = OPC_SPECIAL | 0x25,
317 OPC_XOR = OPC_SPECIAL | 0x26,
318 OPC_NOR = OPC_SPECIAL | 0x27,
319 OPC_SLT = OPC_SPECIAL | 0x2A,
320 OPC_SLTU = OPC_SPECIAL | 0x2B,
322 OPC_REGIMM = 0x01 << 26,
323 OPC_BLTZ = OPC_REGIMM | (0x00 << 16),
324 OPC_BGEZ = OPC_REGIMM | (0x01 << 16),
326 OPC_SPECIAL2 = 0x1c << 26,
327 OPC_MUL = OPC_SPECIAL2 | 0x002,
329 OPC_SPECIAL3 = 0x1f << 26,
330 OPC_INS = OPC_SPECIAL3 | 0x004,
331 OPC_WSBH = OPC_SPECIAL3 | 0x0a0,
332 OPC_SEB = OPC_SPECIAL3 | 0x420,
333 OPC_SEH = OPC_SPECIAL3 | 0x620,
337 * Type reg
339 static inline void tcg_out_opc_reg(TCGContext *s, int opc,
340 TCGReg rd, TCGReg rs, TCGReg rt)
342 int32_t inst;
344 inst = opc;
345 inst |= (rs & 0x1F) << 21;
346 inst |= (rt & 0x1F) << 16;
347 inst |= (rd & 0x1F) << 11;
348 tcg_out32(s, inst);
352 * Type immediate
354 static inline void tcg_out_opc_imm(TCGContext *s, int opc,
355 TCGReg rt, TCGReg rs, TCGArg imm)
357 int32_t inst;
359 inst = opc;
360 inst |= (rs & 0x1F) << 21;
361 inst |= (rt & 0x1F) << 16;
362 inst |= (imm & 0xffff);
363 tcg_out32(s, inst);
367 * Type branch
369 static inline void tcg_out_opc_br(TCGContext *s, int opc,
370 TCGReg rt, TCGReg rs)
372 /* We pay attention here to not modify the branch target by reading
373 the existing value and using it again. This ensure that caches and
374 memory are kept coherent during retranslation. */
375 uint16_t offset = (uint16_t)(*(uint32_t *) s->code_ptr);
377 tcg_out_opc_imm(s, opc, rt, rs, offset);
381 * Type sa
383 static inline void tcg_out_opc_sa(TCGContext *s, int opc,
384 TCGReg rd, TCGReg rt, TCGArg sa)
386 int32_t inst;
388 inst = opc;
389 inst |= (rt & 0x1F) << 16;
390 inst |= (rd & 0x1F) << 11;
391 inst |= (sa & 0x1F) << 6;
392 tcg_out32(s, inst);
396 static inline void tcg_out_nop(TCGContext *s)
398 tcg_out32(s, 0);
401 static inline void tcg_out_mov(TCGContext *s, TCGType type,
402 TCGReg ret, TCGReg arg)
404 /* Simple reg-reg move, optimising out the 'do nothing' case */
405 if (ret != arg) {
406 tcg_out_opc_reg(s, OPC_ADDU, ret, arg, TCG_REG_ZERO);
410 static inline void tcg_out_movi(TCGContext *s, TCGType type,
411 TCGReg reg, tcg_target_long arg)
413 if (arg == (int16_t)arg) {
414 tcg_out_opc_imm(s, OPC_ADDIU, reg, TCG_REG_ZERO, arg);
415 } else if (arg == (uint16_t)arg) {
416 tcg_out_opc_imm(s, OPC_ORI, reg, TCG_REG_ZERO, arg);
417 } else {
418 tcg_out_opc_imm(s, OPC_LUI, reg, 0, arg >> 16);
419 tcg_out_opc_imm(s, OPC_ORI, reg, reg, arg & 0xffff);
423 static inline void tcg_out_bswap16(TCGContext *s, TCGReg ret, TCGReg arg)
425 #if defined(__mips_isa_rev) && (__mips_isa_rev >= 2)
426 tcg_out_opc_reg(s, OPC_WSBH, ret, 0, arg);
427 #else
428 /* ret and arg can't be register at */
429 if (ret == TCG_REG_AT || arg == TCG_REG_AT) {
430 tcg_abort();
433 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 8);
434 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 8);
435 tcg_out_opc_imm(s, OPC_ANDI, ret, ret, 0xff00);
436 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
437 #endif
440 static inline void tcg_out_bswap16s(TCGContext *s, TCGReg ret, TCGReg arg)
442 #if defined(__mips_isa_rev) && (__mips_isa_rev >= 2)
443 tcg_out_opc_reg(s, OPC_WSBH, ret, 0, arg);
444 tcg_out_opc_reg(s, OPC_SEH, ret, 0, ret);
445 #else
446 /* ret and arg can't be register at */
447 if (ret == TCG_REG_AT || arg == TCG_REG_AT) {
448 tcg_abort();
451 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 8);
452 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 24);
453 tcg_out_opc_sa(s, OPC_SRA, ret, ret, 16);
454 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
455 #endif
458 static inline void tcg_out_bswap32(TCGContext *s, TCGReg ret, TCGReg arg)
460 #if defined(__mips_isa_rev) && (__mips_isa_rev >= 2)
461 tcg_out_opc_reg(s, OPC_WSBH, ret, 0, arg);
462 tcg_out_opc_sa(s, OPC_ROTR, ret, ret, 16);
463 #else
464 /* ret and arg must be different and can't be register at */
465 if (ret == arg || ret == TCG_REG_AT || arg == TCG_REG_AT) {
466 tcg_abort();
469 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 24);
471 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 24);
472 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
474 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_AT, arg, 0xff00);
475 tcg_out_opc_sa(s, OPC_SLL, TCG_REG_AT, TCG_REG_AT, 8);
476 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
478 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 8);
479 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_AT, TCG_REG_AT, 0xff00);
480 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
481 #endif
484 static inline void tcg_out_ext8s(TCGContext *s, TCGReg ret, TCGReg arg)
486 #if defined(__mips_isa_rev) && (__mips_isa_rev >= 2)
487 tcg_out_opc_reg(s, OPC_SEB, ret, 0, arg);
488 #else
489 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 24);
490 tcg_out_opc_sa(s, OPC_SRA, ret, ret, 24);
491 #endif
494 static inline void tcg_out_ext16s(TCGContext *s, TCGReg ret, TCGReg arg)
496 #if defined(__mips_isa_rev) && (__mips_isa_rev >= 2)
497 tcg_out_opc_reg(s, OPC_SEH, ret, 0, arg);
498 #else
499 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 16);
500 tcg_out_opc_sa(s, OPC_SRA, ret, ret, 16);
501 #endif
504 static inline void tcg_out_ldst(TCGContext *s, int opc, TCGArg arg,
505 TCGReg arg1, TCGArg arg2)
507 if (arg2 == (int16_t) arg2) {
508 tcg_out_opc_imm(s, opc, arg, arg1, arg2);
509 } else {
510 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_AT, arg2);
511 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_AT, TCG_REG_AT, arg1);
512 tcg_out_opc_imm(s, opc, arg, TCG_REG_AT, 0);
516 static inline void tcg_out_ld(TCGContext *s, TCGType type, TCGReg arg,
517 TCGReg arg1, tcg_target_long arg2)
519 tcg_out_ldst(s, OPC_LW, arg, arg1, arg2);
522 static inline void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg,
523 TCGReg arg1, tcg_target_long arg2)
525 tcg_out_ldst(s, OPC_SW, arg, arg1, arg2);
528 static inline void tcg_out_addi(TCGContext *s, TCGReg reg, TCGArg val)
530 if (val == (int16_t)val) {
531 tcg_out_opc_imm(s, OPC_ADDIU, reg, reg, val);
532 } else {
533 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_AT, val);
534 tcg_out_opc_reg(s, OPC_ADDU, reg, reg, TCG_REG_AT);
538 /* Helper routines for marshalling helper function arguments into
539 * the correct registers and stack.
540 * arg_num is where we want to put this argument, and is updated to be ready
541 * for the next call. arg is the argument itself. Note that arg_num 0..3 is
542 * real registers, 4+ on stack.
544 * We provide routines for arguments which are: immediate, 32 bit
545 * value in register, 16 and 8 bit values in register (which must be zero
546 * extended before use) and 64 bit value in a lo:hi register pair.
548 #define DEFINE_TCG_OUT_CALL_IARG(NAME, ARGPARAM) \
549 static inline void NAME(TCGContext *s, int *arg_num, ARGPARAM) \
551 if (*arg_num < 4) { \
552 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(tcg_target_call_iarg_regs[*arg_num]); \
553 } else { \
554 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(TCG_REG_AT); \
555 tcg_out_st(s, TCG_TYPE_I32, TCG_REG_AT, TCG_REG_SP, 4 * (*arg_num)); \
557 (*arg_num)++; \
559 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
560 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xff);
561 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg8, TCGReg arg)
562 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
563 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
564 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xffff);
565 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg16, TCGReg arg)
566 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
567 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
568 tcg_out_movi(s, TCG_TYPE_I32, A, arg);
569 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_imm32, TCGArg arg)
570 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
572 /* We don't use the macro for this one to avoid an unnecessary reg-reg
573 move when storing to the stack. */
574 static inline void tcg_out_call_iarg_reg32(TCGContext *s, int *arg_num,
575 TCGReg arg)
577 if (*arg_num < 4) {
578 tcg_out_mov(s, TCG_TYPE_I32, tcg_target_call_iarg_regs[*arg_num], arg);
579 } else {
580 tcg_out_st(s, TCG_TYPE_I32, arg, TCG_REG_SP, 4 * (*arg_num));
582 (*arg_num)++;
585 static inline void tcg_out_call_iarg_reg64(TCGContext *s, int *arg_num,
586 TCGReg arg_low, TCGReg arg_high)
588 (*arg_num) = (*arg_num + 1) & ~1;
590 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
591 tcg_out_call_iarg_reg32(s, arg_num, arg_high);
592 tcg_out_call_iarg_reg32(s, arg_num, arg_low);
593 #else
594 tcg_out_call_iarg_reg32(s, arg_num, arg_low);
595 tcg_out_call_iarg_reg32(s, arg_num, arg_high);
596 #endif
599 static void tcg_out_brcond(TCGContext *s, TCGCond cond, TCGArg arg1,
600 TCGArg arg2, int label_index)
602 TCGLabel *l = &s->labels[label_index];
604 switch (cond) {
605 case TCG_COND_EQ:
606 tcg_out_opc_br(s, OPC_BEQ, arg1, arg2);
607 break;
608 case TCG_COND_NE:
609 tcg_out_opc_br(s, OPC_BNE, arg1, arg2);
610 break;
611 case TCG_COND_LT:
612 if (arg2 == 0) {
613 tcg_out_opc_br(s, OPC_BLTZ, 0, arg1);
614 } else {
615 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg1, arg2);
616 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
618 break;
619 case TCG_COND_LTU:
620 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg1, arg2);
621 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
622 break;
623 case TCG_COND_GE:
624 if (arg2 == 0) {
625 tcg_out_opc_br(s, OPC_BGEZ, 0, arg1);
626 } else {
627 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg1, arg2);
628 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
630 break;
631 case TCG_COND_GEU:
632 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg1, arg2);
633 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
634 break;
635 case TCG_COND_LE:
636 if (arg2 == 0) {
637 tcg_out_opc_br(s, OPC_BLEZ, 0, arg1);
638 } else {
639 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg2, arg1);
640 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
642 break;
643 case TCG_COND_LEU:
644 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg2, arg1);
645 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
646 break;
647 case TCG_COND_GT:
648 if (arg2 == 0) {
649 tcg_out_opc_br(s, OPC_BGTZ, 0, arg1);
650 } else {
651 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg2, arg1);
652 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
654 break;
655 case TCG_COND_GTU:
656 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg2, arg1);
657 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
658 break;
659 default:
660 tcg_abort();
661 break;
663 if (l->has_value) {
664 reloc_pc16(s->code_ptr - 4, l->u.value);
665 } else {
666 tcg_out_reloc(s, s->code_ptr - 4, R_MIPS_PC16, label_index, 0);
668 tcg_out_nop(s);
671 /* XXX: we implement it at the target level to avoid having to
672 handle cross basic blocks temporaries */
673 static void tcg_out_brcond2(TCGContext *s, TCGCond cond, TCGArg arg1,
674 TCGArg arg2, TCGArg arg3, TCGArg arg4,
675 int label_index)
677 void *label_ptr;
679 switch(cond) {
680 case TCG_COND_NE:
681 tcg_out_brcond(s, TCG_COND_NE, arg2, arg4, label_index);
682 tcg_out_brcond(s, TCG_COND_NE, arg1, arg3, label_index);
683 return;
684 case TCG_COND_EQ:
685 break;
686 case TCG_COND_LT:
687 case TCG_COND_LE:
688 tcg_out_brcond(s, TCG_COND_LT, arg2, arg4, label_index);
689 break;
690 case TCG_COND_GT:
691 case TCG_COND_GE:
692 tcg_out_brcond(s, TCG_COND_GT, arg2, arg4, label_index);
693 break;
694 case TCG_COND_LTU:
695 case TCG_COND_LEU:
696 tcg_out_brcond(s, TCG_COND_LTU, arg2, arg4, label_index);
697 break;
698 case TCG_COND_GTU:
699 case TCG_COND_GEU:
700 tcg_out_brcond(s, TCG_COND_GTU, arg2, arg4, label_index);
701 break;
702 default:
703 tcg_abort();
706 label_ptr = s->code_ptr;
707 tcg_out_opc_br(s, OPC_BNE, arg2, arg4);
708 tcg_out_nop(s);
710 switch(cond) {
711 case TCG_COND_EQ:
712 tcg_out_brcond(s, TCG_COND_EQ, arg1, arg3, label_index);
713 break;
714 case TCG_COND_LT:
715 case TCG_COND_LTU:
716 tcg_out_brcond(s, TCG_COND_LTU, arg1, arg3, label_index);
717 break;
718 case TCG_COND_LE:
719 case TCG_COND_LEU:
720 tcg_out_brcond(s, TCG_COND_LEU, arg1, arg3, label_index);
721 break;
722 case TCG_COND_GT:
723 case TCG_COND_GTU:
724 tcg_out_brcond(s, TCG_COND_GTU, arg1, arg3, label_index);
725 break;
726 case TCG_COND_GE:
727 case TCG_COND_GEU:
728 tcg_out_brcond(s, TCG_COND_GEU, arg1, arg3, label_index);
729 break;
730 default:
731 tcg_abort();
734 reloc_pc16(label_ptr, (tcg_target_long) s->code_ptr);
737 static void tcg_out_movcond(TCGContext *s, TCGCond cond, TCGReg ret,
738 TCGArg c1, TCGArg c2, TCGArg v)
740 switch (cond) {
741 case TCG_COND_EQ:
742 if (c1 == 0) {
743 tcg_out_opc_reg(s, OPC_MOVZ, ret, v, c2);
744 } else if (c2 == 0) {
745 tcg_out_opc_reg(s, OPC_MOVZ, ret, v, c1);
746 } else {
747 tcg_out_opc_reg(s, OPC_XOR, TCG_REG_AT, c1, c2);
748 tcg_out_opc_reg(s, OPC_MOVZ, ret, v, TCG_REG_AT);
750 break;
751 case TCG_COND_NE:
752 if (c1 == 0) {
753 tcg_out_opc_reg(s, OPC_MOVN, ret, v, c2);
754 } else if (c2 == 0) {
755 tcg_out_opc_reg(s, OPC_MOVN, ret, v, c1);
756 } else {
757 tcg_out_opc_reg(s, OPC_XOR, TCG_REG_AT, c1, c2);
758 tcg_out_opc_reg(s, OPC_MOVN, ret, v, TCG_REG_AT);
760 break;
761 case TCG_COND_LT:
762 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, c1, c2);
763 tcg_out_opc_reg(s, OPC_MOVN, ret, v, TCG_REG_AT);
764 break;
765 case TCG_COND_LTU:
766 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, c1, c2);
767 tcg_out_opc_reg(s, OPC_MOVN, ret, v, TCG_REG_AT);
768 break;
769 case TCG_COND_GE:
770 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, c1, c2);
771 tcg_out_opc_reg(s, OPC_MOVZ, ret, v, TCG_REG_AT);
772 break;
773 case TCG_COND_GEU:
774 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, c1, c2);
775 tcg_out_opc_reg(s, OPC_MOVZ, ret, v, TCG_REG_AT);
776 break;
777 case TCG_COND_LE:
778 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, c2, c1);
779 tcg_out_opc_reg(s, OPC_MOVZ, ret, v, TCG_REG_AT);
780 break;
781 case TCG_COND_LEU:
782 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, c2, c1);
783 tcg_out_opc_reg(s, OPC_MOVZ, ret, v, TCG_REG_AT);
784 break;
785 case TCG_COND_GT:
786 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, c2, c1);
787 tcg_out_opc_reg(s, OPC_MOVN, ret, v, TCG_REG_AT);
788 break;
789 case TCG_COND_GTU:
790 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, c2, c1);
791 tcg_out_opc_reg(s, OPC_MOVN, ret, v, TCG_REG_AT);
792 break;
793 default:
794 tcg_abort();
795 break;
799 static void tcg_out_setcond(TCGContext *s, TCGCond cond, TCGReg ret,
800 TCGArg arg1, TCGArg arg2)
802 switch (cond) {
803 case TCG_COND_EQ:
804 if (arg1 == 0) {
805 tcg_out_opc_imm(s, OPC_SLTIU, ret, arg2, 1);
806 } else if (arg2 == 0) {
807 tcg_out_opc_imm(s, OPC_SLTIU, ret, arg1, 1);
808 } else {
809 tcg_out_opc_reg(s, OPC_XOR, ret, arg1, arg2);
810 tcg_out_opc_imm(s, OPC_SLTIU, ret, ret, 1);
812 break;
813 case TCG_COND_NE:
814 if (arg1 == 0) {
815 tcg_out_opc_reg(s, OPC_SLTU, ret, TCG_REG_ZERO, arg2);
816 } else if (arg2 == 0) {
817 tcg_out_opc_reg(s, OPC_SLTU, ret, TCG_REG_ZERO, arg1);
818 } else {
819 tcg_out_opc_reg(s, OPC_XOR, ret, arg1, arg2);
820 tcg_out_opc_reg(s, OPC_SLTU, ret, TCG_REG_ZERO, ret);
822 break;
823 case TCG_COND_LT:
824 tcg_out_opc_reg(s, OPC_SLT, ret, arg1, arg2);
825 break;
826 case TCG_COND_LTU:
827 tcg_out_opc_reg(s, OPC_SLTU, ret, arg1, arg2);
828 break;
829 case TCG_COND_GE:
830 tcg_out_opc_reg(s, OPC_SLT, ret, arg1, arg2);
831 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
832 break;
833 case TCG_COND_GEU:
834 tcg_out_opc_reg(s, OPC_SLTU, ret, arg1, arg2);
835 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
836 break;
837 case TCG_COND_LE:
838 tcg_out_opc_reg(s, OPC_SLT, ret, arg2, arg1);
839 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
840 break;
841 case TCG_COND_LEU:
842 tcg_out_opc_reg(s, OPC_SLTU, ret, arg2, arg1);
843 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
844 break;
845 case TCG_COND_GT:
846 tcg_out_opc_reg(s, OPC_SLT, ret, arg2, arg1);
847 break;
848 case TCG_COND_GTU:
849 tcg_out_opc_reg(s, OPC_SLTU, ret, arg2, arg1);
850 break;
851 default:
852 tcg_abort();
853 break;
857 /* XXX: we implement it at the target level to avoid having to
858 handle cross basic blocks temporaries */
859 static void tcg_out_setcond2(TCGContext *s, TCGCond cond, TCGReg ret,
860 TCGArg arg1, TCGArg arg2, TCGArg arg3, TCGArg arg4)
862 switch (cond) {
863 case TCG_COND_EQ:
864 tcg_out_setcond(s, TCG_COND_EQ, TCG_REG_AT, arg2, arg4);
865 tcg_out_setcond(s, TCG_COND_EQ, TCG_REG_T0, arg1, arg3);
866 tcg_out_opc_reg(s, OPC_AND, ret, TCG_REG_AT, TCG_REG_T0);
867 return;
868 case TCG_COND_NE:
869 tcg_out_setcond(s, TCG_COND_NE, TCG_REG_AT, arg2, arg4);
870 tcg_out_setcond(s, TCG_COND_NE, TCG_REG_T0, arg1, arg3);
871 tcg_out_opc_reg(s, OPC_OR, ret, TCG_REG_AT, TCG_REG_T0);
872 return;
873 case TCG_COND_LT:
874 case TCG_COND_LE:
875 tcg_out_setcond(s, TCG_COND_LT, TCG_REG_AT, arg2, arg4);
876 break;
877 case TCG_COND_GT:
878 case TCG_COND_GE:
879 tcg_out_setcond(s, TCG_COND_GT, TCG_REG_AT, arg2, arg4);
880 break;
881 case TCG_COND_LTU:
882 case TCG_COND_LEU:
883 tcg_out_setcond(s, TCG_COND_LTU, TCG_REG_AT, arg2, arg4);
884 break;
885 case TCG_COND_GTU:
886 case TCG_COND_GEU:
887 tcg_out_setcond(s, TCG_COND_GTU, TCG_REG_AT, arg2, arg4);
888 break;
889 default:
890 tcg_abort();
891 break;
894 tcg_out_setcond(s, TCG_COND_EQ, TCG_REG_T0, arg2, arg4);
896 switch(cond) {
897 case TCG_COND_LT:
898 case TCG_COND_LTU:
899 tcg_out_setcond(s, TCG_COND_LTU, ret, arg1, arg3);
900 break;
901 case TCG_COND_LE:
902 case TCG_COND_LEU:
903 tcg_out_setcond(s, TCG_COND_LEU, ret, arg1, arg3);
904 break;
905 case TCG_COND_GT:
906 case TCG_COND_GTU:
907 tcg_out_setcond(s, TCG_COND_GTU, ret, arg1, arg3);
908 break;
909 case TCG_COND_GE:
910 case TCG_COND_GEU:
911 tcg_out_setcond(s, TCG_COND_GEU, ret, arg1, arg3);
912 break;
913 default:
914 tcg_abort();
917 tcg_out_opc_reg(s, OPC_AND, ret, ret, TCG_REG_T0);
918 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
921 #if defined(CONFIG_SOFTMMU)
923 #include "../../softmmu_defs.h"
925 /* helper signature: helper_ld_mmu(CPUState *env, target_ulong addr,
926 int mmu_idx) */
927 static const void * const qemu_ld_helpers[4] = {
928 helper_ldb_mmu,
929 helper_ldw_mmu,
930 helper_ldl_mmu,
931 helper_ldq_mmu,
934 /* helper signature: helper_st_mmu(CPUState *env, target_ulong addr,
935 uintxx_t val, int mmu_idx) */
936 static const void * const qemu_st_helpers[4] = {
937 helper_stb_mmu,
938 helper_stw_mmu,
939 helper_stl_mmu,
940 helper_stq_mmu,
942 #endif
944 static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
945 int opc)
947 TCGReg addr_regl, data_regl, data_regh, data_reg1, data_reg2;
948 #if defined(CONFIG_SOFTMMU)
949 void *label1_ptr, *label2_ptr;
950 int arg_num;
951 int mem_index, s_bits;
952 int addr_meml;
953 # if TARGET_LONG_BITS == 64
954 uint8_t *label3_ptr;
955 TCGReg addr_regh;
956 int addr_memh;
957 # endif
958 #endif
959 data_regl = *args++;
960 if (opc == 3)
961 data_regh = *args++;
962 else
963 data_regh = 0;
964 addr_regl = *args++;
965 #if defined(CONFIG_SOFTMMU)
966 # if TARGET_LONG_BITS == 64
967 addr_regh = *args++;
968 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
969 addr_memh = 0;
970 addr_meml = 4;
971 # else
972 addr_memh = 4;
973 addr_meml = 0;
974 # endif
975 # else
976 addr_meml = 0;
977 # endif
978 mem_index = *args;
979 s_bits = opc & 3;
980 #endif
982 if (opc == 3) {
983 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
984 data_reg1 = data_regh;
985 data_reg2 = data_regl;
986 #else
987 data_reg1 = data_regl;
988 data_reg2 = data_regh;
989 #endif
990 } else {
991 data_reg1 = data_regl;
992 data_reg2 = 0;
994 #if defined(CONFIG_SOFTMMU)
995 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_A0, addr_regl, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
996 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_A0, TCG_REG_A0, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
997 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, TCG_AREG0);
998 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
999 offsetof(CPUArchState, tlb_table[mem_index][0].addr_read) + addr_meml);
1000 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T0, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
1001 tcg_out_opc_reg(s, OPC_AND, TCG_REG_T0, TCG_REG_T0, addr_regl);
1003 # if TARGET_LONG_BITS == 64
1004 label3_ptr = s->code_ptr;
1005 tcg_out_opc_br(s, OPC_BNE, TCG_REG_T0, TCG_REG_AT);
1006 tcg_out_nop(s);
1008 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
1009 offsetof(CPUArchState, tlb_table[mem_index][0].addr_read) + addr_memh);
1011 label1_ptr = s->code_ptr;
1012 tcg_out_opc_br(s, OPC_BEQ, addr_regh, TCG_REG_AT);
1013 tcg_out_nop(s);
1015 reloc_pc16(label3_ptr, (tcg_target_long) s->code_ptr);
1016 # else
1017 label1_ptr = s->code_ptr;
1018 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_T0, TCG_REG_AT);
1019 tcg_out_nop(s);
1020 # endif
1022 /* slow path */
1023 arg_num = 0;
1024 tcg_out_call_iarg_reg32(s, &arg_num, TCG_AREG0);
1025 # if TARGET_LONG_BITS == 64
1026 tcg_out_call_iarg_reg64(s, &arg_num, addr_regl, addr_regh);
1027 # else
1028 tcg_out_call_iarg_reg32(s, &arg_num, addr_regl);
1029 # endif
1030 tcg_out_call_iarg_imm32(s, &arg_num, mem_index);
1031 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T9, (tcg_target_long)qemu_ld_helpers[s_bits]);
1032 tcg_out_opc_reg(s, OPC_JALR, TCG_REG_RA, TCG_REG_T9, 0);
1033 tcg_out_nop(s);
1035 switch(opc) {
1036 case 0:
1037 tcg_out_opc_imm(s, OPC_ANDI, data_reg1, TCG_REG_V0, 0xff);
1038 break;
1039 case 0 | 4:
1040 tcg_out_ext8s(s, data_reg1, TCG_REG_V0);
1041 break;
1042 case 1:
1043 tcg_out_opc_imm(s, OPC_ANDI, data_reg1, TCG_REG_V0, 0xffff);
1044 break;
1045 case 1 | 4:
1046 tcg_out_ext16s(s, data_reg1, TCG_REG_V0);
1047 break;
1048 case 2:
1049 tcg_out_mov(s, TCG_TYPE_I32, data_reg1, TCG_REG_V0);
1050 break;
1051 case 3:
1052 tcg_out_mov(s, TCG_TYPE_I32, data_reg2, TCG_REG_V1);
1053 tcg_out_mov(s, TCG_TYPE_I32, data_reg1, TCG_REG_V0);
1054 break;
1055 default:
1056 tcg_abort();
1059 label2_ptr = s->code_ptr;
1060 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_ZERO, TCG_REG_ZERO);
1061 tcg_out_nop(s);
1063 /* label1: fast path */
1064 reloc_pc16(label1_ptr, (tcg_target_long) s->code_ptr);
1066 tcg_out_opc_imm(s, OPC_LW, TCG_REG_A0, TCG_REG_A0,
1067 offsetof(CPUArchState, tlb_table[mem_index][0].addend));
1068 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_V0, TCG_REG_A0, addr_regl);
1069 #else
1070 if (GUEST_BASE == (int16_t)GUEST_BASE) {
1071 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_V0, addr_regl, GUEST_BASE);
1072 } else {
1073 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_V0, GUEST_BASE);
1074 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_V0, TCG_REG_V0, addr_regl);
1076 #endif
1078 switch(opc) {
1079 case 0:
1080 tcg_out_opc_imm(s, OPC_LBU, data_reg1, TCG_REG_V0, 0);
1081 break;
1082 case 0 | 4:
1083 tcg_out_opc_imm(s, OPC_LB, data_reg1, TCG_REG_V0, 0);
1084 break;
1085 case 1:
1086 if (TCG_NEED_BSWAP) {
1087 tcg_out_opc_imm(s, OPC_LHU, TCG_REG_T0, TCG_REG_V0, 0);
1088 tcg_out_bswap16(s, data_reg1, TCG_REG_T0);
1089 } else {
1090 tcg_out_opc_imm(s, OPC_LHU, data_reg1, TCG_REG_V0, 0);
1092 break;
1093 case 1 | 4:
1094 if (TCG_NEED_BSWAP) {
1095 tcg_out_opc_imm(s, OPC_LHU, TCG_REG_T0, TCG_REG_V0, 0);
1096 tcg_out_bswap16s(s, data_reg1, TCG_REG_T0);
1097 } else {
1098 tcg_out_opc_imm(s, OPC_LH, data_reg1, TCG_REG_V0, 0);
1100 break;
1101 case 2:
1102 if (TCG_NEED_BSWAP) {
1103 tcg_out_opc_imm(s, OPC_LW, TCG_REG_T0, TCG_REG_V0, 0);
1104 tcg_out_bswap32(s, data_reg1, TCG_REG_T0);
1105 } else {
1106 tcg_out_opc_imm(s, OPC_LW, data_reg1, TCG_REG_V0, 0);
1108 break;
1109 case 3:
1110 if (TCG_NEED_BSWAP) {
1111 tcg_out_opc_imm(s, OPC_LW, TCG_REG_T0, TCG_REG_V0, 4);
1112 tcg_out_bswap32(s, data_reg1, TCG_REG_T0);
1113 tcg_out_opc_imm(s, OPC_LW, TCG_REG_T0, TCG_REG_V0, 0);
1114 tcg_out_bswap32(s, data_reg2, TCG_REG_T0);
1115 } else {
1116 tcg_out_opc_imm(s, OPC_LW, data_reg1, TCG_REG_V0, 0);
1117 tcg_out_opc_imm(s, OPC_LW, data_reg2, TCG_REG_V0, 4);
1119 break;
1120 default:
1121 tcg_abort();
1124 #if defined(CONFIG_SOFTMMU)
1125 reloc_pc16(label2_ptr, (tcg_target_long) s->code_ptr);
1126 #endif
1129 static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
1130 int opc)
1132 TCGReg addr_regl, data_regl, data_regh, data_reg1, data_reg2;
1133 #if defined(CONFIG_SOFTMMU)
1134 uint8_t *label1_ptr, *label2_ptr;
1135 int arg_num;
1136 int mem_index, s_bits;
1137 int addr_meml;
1138 #endif
1139 #if TARGET_LONG_BITS == 64
1140 # if defined(CONFIG_SOFTMMU)
1141 uint8_t *label3_ptr;
1142 TCGReg addr_regh;
1143 int addr_memh;
1144 # endif
1145 #endif
1146 data_regl = *args++;
1147 if (opc == 3) {
1148 data_regh = *args++;
1149 } else {
1150 data_regh = 0;
1152 addr_regl = *args++;
1153 #if defined(CONFIG_SOFTMMU)
1154 # if TARGET_LONG_BITS == 64
1155 addr_regh = *args++;
1156 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
1157 addr_memh = 0;
1158 addr_meml = 4;
1159 # else
1160 addr_memh = 4;
1161 addr_meml = 0;
1162 # endif
1163 # else
1164 addr_meml = 0;
1165 # endif
1166 mem_index = *args;
1167 s_bits = opc;
1168 #endif
1170 if (opc == 3) {
1171 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
1172 data_reg1 = data_regh;
1173 data_reg2 = data_regl;
1174 #else
1175 data_reg1 = data_regl;
1176 data_reg2 = data_regh;
1177 #endif
1178 } else {
1179 data_reg1 = data_regl;
1180 data_reg2 = 0;
1183 #if defined(CONFIG_SOFTMMU)
1184 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_A0, addr_regl, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
1185 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_A0, TCG_REG_A0, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
1186 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, TCG_AREG0);
1187 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
1188 offsetof(CPUArchState, tlb_table[mem_index][0].addr_write) + addr_meml);
1189 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T0, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
1190 tcg_out_opc_reg(s, OPC_AND, TCG_REG_T0, TCG_REG_T0, addr_regl);
1192 # if TARGET_LONG_BITS == 64
1193 label3_ptr = s->code_ptr;
1194 tcg_out_opc_br(s, OPC_BNE, TCG_REG_T0, TCG_REG_AT);
1195 tcg_out_nop(s);
1197 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
1198 offsetof(CPUArchState, tlb_table[mem_index][0].addr_write) + addr_memh);
1200 label1_ptr = s->code_ptr;
1201 tcg_out_opc_br(s, OPC_BEQ, addr_regh, TCG_REG_AT);
1202 tcg_out_nop(s);
1204 reloc_pc16(label3_ptr, (tcg_target_long) s->code_ptr);
1205 # else
1206 label1_ptr = s->code_ptr;
1207 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_T0, TCG_REG_AT);
1208 tcg_out_nop(s);
1209 # endif
1211 /* slow path */
1212 arg_num = 0;
1213 tcg_out_call_iarg_reg32(s, &arg_num, TCG_AREG0);
1214 # if TARGET_LONG_BITS == 64
1215 tcg_out_call_iarg_reg64(s, &arg_num, addr_regl, addr_regh);
1216 # else
1217 tcg_out_call_iarg_reg32(s, &arg_num, addr_regl);
1218 # endif
1219 switch(opc) {
1220 case 0:
1221 tcg_out_call_iarg_reg8(s, &arg_num, data_regl);
1222 break;
1223 case 1:
1224 tcg_out_call_iarg_reg16(s, &arg_num, data_regl);
1225 break;
1226 case 2:
1227 tcg_out_call_iarg_reg32(s, &arg_num, data_regl);
1228 break;
1229 case 3:
1230 tcg_out_call_iarg_reg64(s, &arg_num, data_regl, data_regh);
1231 break;
1232 default:
1233 tcg_abort();
1235 tcg_out_call_iarg_imm32(s, &arg_num, mem_index);
1236 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T9, (tcg_target_long)qemu_st_helpers[s_bits]);
1237 tcg_out_opc_reg(s, OPC_JALR, TCG_REG_RA, TCG_REG_T9, 0);
1238 tcg_out_nop(s);
1240 label2_ptr = s->code_ptr;
1241 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_ZERO, TCG_REG_ZERO);
1242 tcg_out_nop(s);
1244 /* label1: fast path */
1245 reloc_pc16(label1_ptr, (tcg_target_long) s->code_ptr);
1247 tcg_out_opc_imm(s, OPC_LW, TCG_REG_A0, TCG_REG_A0,
1248 offsetof(CPUArchState, tlb_table[mem_index][0].addend));
1249 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, addr_regl);
1250 #else
1251 if (GUEST_BASE == (int16_t)GUEST_BASE) {
1252 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_A0, addr_regl, GUEST_BASE);
1253 } else {
1254 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_A0, GUEST_BASE);
1255 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, addr_regl);
1258 #endif
1260 switch(opc) {
1261 case 0:
1262 tcg_out_opc_imm(s, OPC_SB, data_reg1, TCG_REG_A0, 0);
1263 break;
1264 case 1:
1265 if (TCG_NEED_BSWAP) {
1266 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_T0, data_reg1, 0xffff);
1267 tcg_out_bswap16(s, TCG_REG_T0, TCG_REG_T0);
1268 tcg_out_opc_imm(s, OPC_SH, TCG_REG_T0, TCG_REG_A0, 0);
1269 } else {
1270 tcg_out_opc_imm(s, OPC_SH, data_reg1, TCG_REG_A0, 0);
1272 break;
1273 case 2:
1274 if (TCG_NEED_BSWAP) {
1275 tcg_out_bswap32(s, TCG_REG_T0, data_reg1);
1276 tcg_out_opc_imm(s, OPC_SW, TCG_REG_T0, TCG_REG_A0, 0);
1277 } else {
1278 tcg_out_opc_imm(s, OPC_SW, data_reg1, TCG_REG_A0, 0);
1280 break;
1281 case 3:
1282 if (TCG_NEED_BSWAP) {
1283 tcg_out_bswap32(s, TCG_REG_T0, data_reg2);
1284 tcg_out_opc_imm(s, OPC_SW, TCG_REG_T0, TCG_REG_A0, 0);
1285 tcg_out_bswap32(s, TCG_REG_T0, data_reg1);
1286 tcg_out_opc_imm(s, OPC_SW, TCG_REG_T0, TCG_REG_A0, 4);
1287 } else {
1288 tcg_out_opc_imm(s, OPC_SW, data_reg1, TCG_REG_A0, 0);
1289 tcg_out_opc_imm(s, OPC_SW, data_reg2, TCG_REG_A0, 4);
1291 break;
1292 default:
1293 tcg_abort();
1296 #if defined(CONFIG_SOFTMMU)
1297 reloc_pc16(label2_ptr, (tcg_target_long) s->code_ptr);
1298 #endif
1301 static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
1302 const TCGArg *args, const int *const_args)
1304 switch(opc) {
1305 case INDEX_op_exit_tb:
1306 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_V0, args[0]);
1307 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_AT, (tcg_target_long)tb_ret_addr);
1308 tcg_out_opc_reg(s, OPC_JR, 0, TCG_REG_AT, 0);
1309 tcg_out_nop(s);
1310 break;
1311 case INDEX_op_goto_tb:
1312 if (s->tb_jmp_offset) {
1313 /* direct jump method */
1314 tcg_abort();
1315 } else {
1316 /* indirect jump method */
1317 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_AT, (tcg_target_long)(s->tb_next + args[0]));
1318 tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_AT, TCG_REG_AT, 0);
1319 tcg_out_opc_reg(s, OPC_JR, 0, TCG_REG_AT, 0);
1321 tcg_out_nop(s);
1322 s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
1323 break;
1324 case INDEX_op_call:
1325 tcg_out_opc_reg(s, OPC_JALR, TCG_REG_RA, args[0], 0);
1326 tcg_out_nop(s);
1327 break;
1328 case INDEX_op_br:
1329 tcg_out_brcond(s, TCG_COND_EQ, TCG_REG_ZERO, TCG_REG_ZERO, args[0]);
1330 break;
1332 case INDEX_op_mov_i32:
1333 tcg_out_mov(s, TCG_TYPE_I32, args[0], args[1]);
1334 break;
1335 case INDEX_op_movi_i32:
1336 tcg_out_movi(s, TCG_TYPE_I32, args[0], args[1]);
1337 break;
1339 case INDEX_op_ld8u_i32:
1340 tcg_out_ldst(s, OPC_LBU, args[0], args[1], args[2]);
1341 break;
1342 case INDEX_op_ld8s_i32:
1343 tcg_out_ldst(s, OPC_LB, args[0], args[1], args[2]);
1344 break;
1345 case INDEX_op_ld16u_i32:
1346 tcg_out_ldst(s, OPC_LHU, args[0], args[1], args[2]);
1347 break;
1348 case INDEX_op_ld16s_i32:
1349 tcg_out_ldst(s, OPC_LH, args[0], args[1], args[2]);
1350 break;
1351 case INDEX_op_ld_i32:
1352 tcg_out_ldst(s, OPC_LW, args[0], args[1], args[2]);
1353 break;
1354 case INDEX_op_st8_i32:
1355 tcg_out_ldst(s, OPC_SB, args[0], args[1], args[2]);
1356 break;
1357 case INDEX_op_st16_i32:
1358 tcg_out_ldst(s, OPC_SH, args[0], args[1], args[2]);
1359 break;
1360 case INDEX_op_st_i32:
1361 tcg_out_ldst(s, OPC_SW, args[0], args[1], args[2]);
1362 break;
1364 case INDEX_op_add_i32:
1365 if (const_args[2]) {
1366 tcg_out_opc_imm(s, OPC_ADDIU, args[0], args[1], args[2]);
1367 } else {
1368 tcg_out_opc_reg(s, OPC_ADDU, args[0], args[1], args[2]);
1370 break;
1371 case INDEX_op_add2_i32:
1372 if (const_args[4]) {
1373 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_AT, args[2], args[4]);
1374 } else {
1375 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_AT, args[2], args[4]);
1377 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_T0, TCG_REG_AT, args[2]);
1378 if (const_args[5]) {
1379 tcg_out_opc_imm(s, OPC_ADDIU, args[1], args[3], args[5]);
1380 } else {
1381 tcg_out_opc_reg(s, OPC_ADDU, args[1], args[3], args[5]);
1383 tcg_out_opc_reg(s, OPC_ADDU, args[1], args[1], TCG_REG_T0);
1384 tcg_out_mov(s, TCG_TYPE_I32, args[0], TCG_REG_AT);
1385 break;
1386 case INDEX_op_sub_i32:
1387 if (const_args[2]) {
1388 tcg_out_opc_imm(s, OPC_ADDIU, args[0], args[1], -args[2]);
1389 } else {
1390 tcg_out_opc_reg(s, OPC_SUBU, args[0], args[1], args[2]);
1392 break;
1393 case INDEX_op_sub2_i32:
1394 if (const_args[4]) {
1395 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_AT, args[2], -args[4]);
1396 } else {
1397 tcg_out_opc_reg(s, OPC_SUBU, TCG_REG_AT, args[2], args[4]);
1399 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_T0, args[2], TCG_REG_AT);
1400 if (const_args[5]) {
1401 tcg_out_opc_imm(s, OPC_ADDIU, args[1], args[3], -args[5]);
1402 } else {
1403 tcg_out_opc_reg(s, OPC_SUBU, args[1], args[3], args[5]);
1405 tcg_out_opc_reg(s, OPC_SUBU, args[1], args[1], TCG_REG_T0);
1406 tcg_out_mov(s, TCG_TYPE_I32, args[0], TCG_REG_AT);
1407 break;
1408 case INDEX_op_mul_i32:
1409 #if defined(__mips_isa_rev) && (__mips_isa_rev >= 1)
1410 tcg_out_opc_reg(s, OPC_MUL, args[0], args[1], args[2]);
1411 #else
1412 tcg_out_opc_reg(s, OPC_MULT, 0, args[1], args[2]);
1413 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1414 #endif
1415 break;
1416 case INDEX_op_mulu2_i32:
1417 tcg_out_opc_reg(s, OPC_MULTU, 0, args[2], args[3]);
1418 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1419 tcg_out_opc_reg(s, OPC_MFHI, args[1], 0, 0);
1420 break;
1421 case INDEX_op_div_i32:
1422 tcg_out_opc_reg(s, OPC_DIV, 0, args[1], args[2]);
1423 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1424 break;
1425 case INDEX_op_divu_i32:
1426 tcg_out_opc_reg(s, OPC_DIVU, 0, args[1], args[2]);
1427 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1428 break;
1429 case INDEX_op_rem_i32:
1430 tcg_out_opc_reg(s, OPC_DIV, 0, args[1], args[2]);
1431 tcg_out_opc_reg(s, OPC_MFHI, args[0], 0, 0);
1432 break;
1433 case INDEX_op_remu_i32:
1434 tcg_out_opc_reg(s, OPC_DIVU, 0, args[1], args[2]);
1435 tcg_out_opc_reg(s, OPC_MFHI, args[0], 0, 0);
1436 break;
1438 case INDEX_op_and_i32:
1439 if (const_args[2]) {
1440 tcg_out_opc_imm(s, OPC_ANDI, args[0], args[1], args[2]);
1441 } else {
1442 tcg_out_opc_reg(s, OPC_AND, args[0], args[1], args[2]);
1444 break;
1445 case INDEX_op_or_i32:
1446 if (const_args[2]) {
1447 tcg_out_opc_imm(s, OPC_ORI, args[0], args[1], args[2]);
1448 } else {
1449 tcg_out_opc_reg(s, OPC_OR, args[0], args[1], args[2]);
1451 break;
1452 case INDEX_op_nor_i32:
1453 tcg_out_opc_reg(s, OPC_NOR, args[0], args[1], args[2]);
1454 break;
1455 case INDEX_op_not_i32:
1456 tcg_out_opc_reg(s, OPC_NOR, args[0], TCG_REG_ZERO, args[1]);
1457 break;
1458 case INDEX_op_xor_i32:
1459 if (const_args[2]) {
1460 tcg_out_opc_imm(s, OPC_XORI, args[0], args[1], args[2]);
1461 } else {
1462 tcg_out_opc_reg(s, OPC_XOR, args[0], args[1], args[2]);
1464 break;
1466 case INDEX_op_sar_i32:
1467 if (const_args[2]) {
1468 tcg_out_opc_sa(s, OPC_SRA, args[0], args[1], args[2]);
1469 } else {
1470 tcg_out_opc_reg(s, OPC_SRAV, args[0], args[2], args[1]);
1472 break;
1473 case INDEX_op_shl_i32:
1474 if (const_args[2]) {
1475 tcg_out_opc_sa(s, OPC_SLL, args[0], args[1], args[2]);
1476 } else {
1477 tcg_out_opc_reg(s, OPC_SLLV, args[0], args[2], args[1]);
1479 break;
1480 case INDEX_op_shr_i32:
1481 if (const_args[2]) {
1482 tcg_out_opc_sa(s, OPC_SRL, args[0], args[1], args[2]);
1483 } else {
1484 tcg_out_opc_reg(s, OPC_SRLV, args[0], args[2], args[1]);
1486 break;
1487 case INDEX_op_rotl_i32:
1488 if (const_args[2]) {
1489 tcg_out_opc_sa(s, OPC_ROTR, args[0], args[1], 0x20 - args[2]);
1490 } else {
1491 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_AT, 32);
1492 tcg_out_opc_reg(s, OPC_SUBU, TCG_REG_AT, TCG_REG_AT, args[2]);
1493 tcg_out_opc_reg(s, OPC_ROTRV, args[0], TCG_REG_AT, args[1]);
1495 break;
1496 case INDEX_op_rotr_i32:
1497 if (const_args[2]) {
1498 tcg_out_opc_sa(s, OPC_ROTR, args[0], args[1], args[2]);
1499 } else {
1500 tcg_out_opc_reg(s, OPC_ROTRV, args[0], args[2], args[1]);
1502 break;
1504 /* The bswap routines do not work on non-R2 CPU. In that case
1505 we let TCG generating the corresponding code. */
1506 case INDEX_op_bswap16_i32:
1507 tcg_out_bswap16(s, args[0], args[1]);
1508 break;
1509 case INDEX_op_bswap32_i32:
1510 tcg_out_bswap32(s, args[0], args[1]);
1511 break;
1513 case INDEX_op_ext8s_i32:
1514 tcg_out_ext8s(s, args[0], args[1]);
1515 break;
1516 case INDEX_op_ext16s_i32:
1517 tcg_out_ext16s(s, args[0], args[1]);
1518 break;
1520 case INDEX_op_deposit_i32:
1521 tcg_out_opc_imm(s, OPC_INS, args[0], args[2],
1522 ((args[3] + args[4] - 1) << 11) | (args[3] << 6));
1523 break;
1525 case INDEX_op_brcond_i32:
1526 tcg_out_brcond(s, args[2], args[0], args[1], args[3]);
1527 break;
1528 case INDEX_op_brcond2_i32:
1529 tcg_out_brcond2(s, args[4], args[0], args[1], args[2], args[3], args[5]);
1530 break;
1532 case INDEX_op_movcond_i32:
1533 tcg_out_movcond(s, args[5], args[0], args[1], args[2], args[3]);
1534 break;
1536 case INDEX_op_setcond_i32:
1537 tcg_out_setcond(s, args[3], args[0], args[1], args[2]);
1538 break;
1539 case INDEX_op_setcond2_i32:
1540 tcg_out_setcond2(s, args[5], args[0], args[1], args[2], args[3], args[4]);
1541 break;
1543 case INDEX_op_qemu_ld8u:
1544 tcg_out_qemu_ld(s, args, 0);
1545 break;
1546 case INDEX_op_qemu_ld8s:
1547 tcg_out_qemu_ld(s, args, 0 | 4);
1548 break;
1549 case INDEX_op_qemu_ld16u:
1550 tcg_out_qemu_ld(s, args, 1);
1551 break;
1552 case INDEX_op_qemu_ld16s:
1553 tcg_out_qemu_ld(s, args, 1 | 4);
1554 break;
1555 case INDEX_op_qemu_ld32:
1556 tcg_out_qemu_ld(s, args, 2);
1557 break;
1558 case INDEX_op_qemu_ld64:
1559 tcg_out_qemu_ld(s, args, 3);
1560 break;
1561 case INDEX_op_qemu_st8:
1562 tcg_out_qemu_st(s, args, 0);
1563 break;
1564 case INDEX_op_qemu_st16:
1565 tcg_out_qemu_st(s, args, 1);
1566 break;
1567 case INDEX_op_qemu_st32:
1568 tcg_out_qemu_st(s, args, 2);
1569 break;
1570 case INDEX_op_qemu_st64:
1571 tcg_out_qemu_st(s, args, 3);
1572 break;
1574 default:
1575 tcg_abort();
1579 static const TCGTargetOpDef mips_op_defs[] = {
1580 { INDEX_op_exit_tb, { } },
1581 { INDEX_op_goto_tb, { } },
1582 { INDEX_op_call, { "C" } },
1583 { INDEX_op_br, { } },
1585 { INDEX_op_mov_i32, { "r", "r" } },
1586 { INDEX_op_movi_i32, { "r" } },
1587 { INDEX_op_ld8u_i32, { "r", "r" } },
1588 { INDEX_op_ld8s_i32, { "r", "r" } },
1589 { INDEX_op_ld16u_i32, { "r", "r" } },
1590 { INDEX_op_ld16s_i32, { "r", "r" } },
1591 { INDEX_op_ld_i32, { "r", "r" } },
1592 { INDEX_op_st8_i32, { "rZ", "r" } },
1593 { INDEX_op_st16_i32, { "rZ", "r" } },
1594 { INDEX_op_st_i32, { "rZ", "r" } },
1596 { INDEX_op_add_i32, { "r", "rZ", "rJ" } },
1597 { INDEX_op_mul_i32, { "r", "rZ", "rZ" } },
1598 { INDEX_op_mulu2_i32, { "r", "r", "rZ", "rZ" } },
1599 { INDEX_op_div_i32, { "r", "rZ", "rZ" } },
1600 { INDEX_op_divu_i32, { "r", "rZ", "rZ" } },
1601 { INDEX_op_rem_i32, { "r", "rZ", "rZ" } },
1602 { INDEX_op_remu_i32, { "r", "rZ", "rZ" } },
1603 { INDEX_op_sub_i32, { "r", "rZ", "rJ" } },
1605 { INDEX_op_and_i32, { "r", "rZ", "rI" } },
1606 { INDEX_op_nor_i32, { "r", "rZ", "rZ" } },
1607 { INDEX_op_not_i32, { "r", "rZ" } },
1608 { INDEX_op_or_i32, { "r", "rZ", "rIZ" } },
1609 { INDEX_op_xor_i32, { "r", "rZ", "rIZ" } },
1611 { INDEX_op_shl_i32, { "r", "rZ", "ri" } },
1612 { INDEX_op_shr_i32, { "r", "rZ", "ri" } },
1613 { INDEX_op_sar_i32, { "r", "rZ", "ri" } },
1614 { INDEX_op_rotr_i32, { "r", "rZ", "ri" } },
1615 { INDEX_op_rotl_i32, { "r", "rZ", "ri" } },
1617 { INDEX_op_bswap16_i32, { "r", "r" } },
1618 { INDEX_op_bswap32_i32, { "r", "r" } },
1620 { INDEX_op_ext8s_i32, { "r", "rZ" } },
1621 { INDEX_op_ext16s_i32, { "r", "rZ" } },
1623 { INDEX_op_deposit_i32, { "r", "0", "rZ" } },
1625 { INDEX_op_brcond_i32, { "rZ", "rZ" } },
1626 { INDEX_op_movcond_i32, { "r", "rZ", "rZ", "rZ", "0" } },
1627 { INDEX_op_setcond_i32, { "r", "rZ", "rZ" } },
1628 { INDEX_op_setcond2_i32, { "r", "rZ", "rZ", "rZ", "rZ" } },
1630 { INDEX_op_add2_i32, { "r", "r", "rZ", "rZ", "rJ", "rJ" } },
1631 { INDEX_op_sub2_i32, { "r", "r", "rZ", "rZ", "rJ", "rJ" } },
1632 { INDEX_op_brcond2_i32, { "rZ", "rZ", "rZ", "rZ" } },
1634 #if TARGET_LONG_BITS == 32
1635 { INDEX_op_qemu_ld8u, { "L", "lZ" } },
1636 { INDEX_op_qemu_ld8s, { "L", "lZ" } },
1637 { INDEX_op_qemu_ld16u, { "L", "lZ" } },
1638 { INDEX_op_qemu_ld16s, { "L", "lZ" } },
1639 { INDEX_op_qemu_ld32, { "L", "lZ" } },
1640 { INDEX_op_qemu_ld64, { "L", "L", "lZ" } },
1642 { INDEX_op_qemu_st8, { "SZ", "SZ" } },
1643 { INDEX_op_qemu_st16, { "SZ", "SZ" } },
1644 { INDEX_op_qemu_st32, { "SZ", "SZ" } },
1645 { INDEX_op_qemu_st64, { "SZ", "SZ", "SZ" } },
1646 #else
1647 { INDEX_op_qemu_ld8u, { "L", "lZ", "lZ" } },
1648 { INDEX_op_qemu_ld8s, { "L", "lZ", "lZ" } },
1649 { INDEX_op_qemu_ld16u, { "L", "lZ", "lZ" } },
1650 { INDEX_op_qemu_ld16s, { "L", "lZ", "lZ" } },
1651 { INDEX_op_qemu_ld32, { "L", "lZ", "lZ" } },
1652 { INDEX_op_qemu_ld64, { "L", "L", "lZ", "lZ" } },
1654 { INDEX_op_qemu_st8, { "SZ", "SZ", "SZ" } },
1655 { INDEX_op_qemu_st16, { "SZ", "SZ", "SZ" } },
1656 { INDEX_op_qemu_st32, { "SZ", "SZ", "SZ" } },
1657 { INDEX_op_qemu_st64, { "SZ", "SZ", "SZ", "SZ" } },
1658 #endif
1659 { -1 },
1662 static int tcg_target_callee_save_regs[] = {
1663 TCG_REG_S0, /* used for the global env (TCG_AREG0) */
1664 TCG_REG_S1,
1665 TCG_REG_S2,
1666 TCG_REG_S3,
1667 TCG_REG_S4,
1668 TCG_REG_S5,
1669 TCG_REG_S6,
1670 TCG_REG_S7,
1671 TCG_REG_FP,
1672 TCG_REG_RA, /* should be last for ABI compliance */
1675 /* Generate global QEMU prologue and epilogue code */
1676 static void tcg_target_qemu_prologue(TCGContext *s)
1678 int i, frame_size;
1680 /* reserve some stack space, also for TCG temps. */
1681 frame_size = ARRAY_SIZE(tcg_target_callee_save_regs) * 4
1682 + TCG_STATIC_CALL_ARGS_SIZE
1683 + CPU_TEMP_BUF_NLONGS * sizeof(long);
1684 frame_size = (frame_size + TCG_TARGET_STACK_ALIGN - 1) &
1685 ~(TCG_TARGET_STACK_ALIGN - 1);
1686 tcg_set_frame(s, TCG_REG_SP, ARRAY_SIZE(tcg_target_callee_save_regs) * 4
1687 + TCG_STATIC_CALL_ARGS_SIZE,
1688 CPU_TEMP_BUF_NLONGS * sizeof(long));
1690 /* TB prologue */
1691 tcg_out_addi(s, TCG_REG_SP, -frame_size);
1692 for(i = 0 ; i < ARRAY_SIZE(tcg_target_callee_save_regs) ; i++) {
1693 tcg_out_st(s, TCG_TYPE_I32, tcg_target_callee_save_regs[i],
1694 TCG_REG_SP, TCG_STATIC_CALL_ARGS_SIZE + i * 4);
1697 /* Call generated code */
1698 tcg_out_opc_reg(s, OPC_JR, 0, tcg_target_call_iarg_regs[1], 0);
1699 tcg_out_mov(s, TCG_TYPE_PTR, TCG_AREG0, tcg_target_call_iarg_regs[0]);
1700 tb_ret_addr = s->code_ptr;
1702 /* TB epilogue */
1703 for(i = 0 ; i < ARRAY_SIZE(tcg_target_callee_save_regs) ; i++) {
1704 tcg_out_ld(s, TCG_TYPE_I32, tcg_target_callee_save_regs[i],
1705 TCG_REG_SP, TCG_STATIC_CALL_ARGS_SIZE + i * 4);
1708 tcg_out_opc_reg(s, OPC_JR, 0, TCG_REG_RA, 0);
1709 tcg_out_addi(s, TCG_REG_SP, frame_size);
1712 static void tcg_target_init(TCGContext *s)
1714 tcg_regset_set(tcg_target_available_regs[TCG_TYPE_I32], 0xffffffff);
1715 tcg_regset_set(tcg_target_call_clobber_regs,
1716 (1 << TCG_REG_V0) |
1717 (1 << TCG_REG_V1) |
1718 (1 << TCG_REG_A0) |
1719 (1 << TCG_REG_A1) |
1720 (1 << TCG_REG_A2) |
1721 (1 << TCG_REG_A3) |
1722 (1 << TCG_REG_T1) |
1723 (1 << TCG_REG_T2) |
1724 (1 << TCG_REG_T3) |
1725 (1 << TCG_REG_T4) |
1726 (1 << TCG_REG_T5) |
1727 (1 << TCG_REG_T6) |
1728 (1 << TCG_REG_T7) |
1729 (1 << TCG_REG_T8) |
1730 (1 << TCG_REG_T9));
1732 tcg_regset_clear(s->reserved_regs);
1733 tcg_regset_set_reg(s->reserved_regs, TCG_REG_ZERO); /* zero register */
1734 tcg_regset_set_reg(s->reserved_regs, TCG_REG_K0); /* kernel use only */
1735 tcg_regset_set_reg(s->reserved_regs, TCG_REG_K1); /* kernel use only */
1736 tcg_regset_set_reg(s->reserved_regs, TCG_REG_AT); /* internal use */
1737 tcg_regset_set_reg(s->reserved_regs, TCG_REG_T0); /* internal use */
1738 tcg_regset_set_reg(s->reserved_regs, TCG_REG_RA); /* return address */
1739 tcg_regset_set_reg(s->reserved_regs, TCG_REG_SP); /* stack pointer */
1740 tcg_regset_set_reg(s->reserved_regs, TCG_REG_GP); /* global pointer */
1742 tcg_add_target_add_op_defs(mips_op_defs);