4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
38 #define DYNAMIC_PC 1 /* dynamic pc value */
39 #define JUMP_PC 2 /* dynamic pc value which takes only two values
40 according to jump_pc[T2] */
42 /* global register indexes */
43 static TCGv_ptr cpu_env
, cpu_regwptr
;
44 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
45 static TCGv_i32 cpu_cc_op
;
46 static TCGv_i32 cpu_psr
;
47 static TCGv cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
49 #ifndef CONFIG_USER_ONLY
52 static TCGv cpu_cond
, cpu_dst
, cpu_addr
, cpu_val
;
54 static TCGv_i32 cpu_xcc
, cpu_asi
, cpu_fprs
;
56 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
57 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
58 static TCGv_i32 cpu_softint
;
62 /* local register indexes (only used inside old micro ops) */
64 static TCGv_i32 cpu_tmp32
;
65 static TCGv_i64 cpu_tmp64
;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr
[TARGET_FPREGS
];
69 static target_ulong gen_opc_npc
[OPC_BUF_SIZE
];
70 static target_ulong gen_opc_jump_pc
[2];
72 #include "gen-icount.h"
74 typedef struct DisasContext
{
75 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
76 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
77 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
81 int address_mask_32bit
;
83 uint32_t cc_op
; /* current CC operation */
84 struct TranslationBlock
*tb
;
88 // This function uses non-native bit order
89 #define GET_FIELD(X, FROM, TO) \
90 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
92 // This function uses the order in the manuals, i.e. bit 0 is 2^0
93 #define GET_FIELD_SP(X, FROM, TO) \
94 GET_FIELD(X, 31 - (TO), 31 - (FROM))
96 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
97 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
100 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
101 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
103 #define DFPREG(r) (r & 0x1e)
104 #define QFPREG(r) (r & 0x1c)
107 #define UA2005_HTRAP_MASK 0xff
108 #define V8_TRAP_MASK 0x7f
110 static int sign_extend(int x
, int len
)
113 return (x
<< len
) >> len
;
116 #define IS_IMM (insn & (1<<13))
118 /* floating point registers moves */
119 static void gen_op_load_fpr_DT0(unsigned int src
)
121 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
122 offsetof(CPU_DoubleU
, l
.upper
));
123 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
124 offsetof(CPU_DoubleU
, l
.lower
));
127 static void gen_op_load_fpr_DT1(unsigned int src
)
129 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
130 offsetof(CPU_DoubleU
, l
.upper
));
131 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
132 offsetof(CPU_DoubleU
, l
.lower
));
135 static void gen_op_store_DT0_fpr(unsigned int dst
)
137 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
138 offsetof(CPU_DoubleU
, l
.upper
));
139 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
140 offsetof(CPU_DoubleU
, l
.lower
));
143 static void gen_op_load_fpr_QT0(unsigned int src
)
145 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
146 offsetof(CPU_QuadU
, l
.upmost
));
147 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
148 offsetof(CPU_QuadU
, l
.upper
));
149 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
150 offsetof(CPU_QuadU
, l
.lower
));
151 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
152 offsetof(CPU_QuadU
, l
.lowest
));
155 static void gen_op_load_fpr_QT1(unsigned int src
)
157 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
158 offsetof(CPU_QuadU
, l
.upmost
));
159 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
160 offsetof(CPU_QuadU
, l
.upper
));
161 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
162 offsetof(CPU_QuadU
, l
.lower
));
163 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
164 offsetof(CPU_QuadU
, l
.lowest
));
167 static void gen_op_store_QT0_fpr(unsigned int dst
)
169 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
170 offsetof(CPU_QuadU
, l
.upmost
));
171 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
172 offsetof(CPU_QuadU
, l
.upper
));
173 tcg_gen_ld_i32(cpu_fpr
[dst
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
174 offsetof(CPU_QuadU
, l
.lower
));
175 tcg_gen_ld_i32(cpu_fpr
[dst
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
176 offsetof(CPU_QuadU
, l
.lowest
));
180 #ifdef CONFIG_USER_ONLY
181 #define supervisor(dc) 0
182 #ifdef TARGET_SPARC64
183 #define hypervisor(dc) 0
186 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
187 #ifdef TARGET_SPARC64
188 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
193 #ifdef TARGET_SPARC64
195 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
197 #define AM_CHECK(dc) (1)
201 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
203 #ifdef TARGET_SPARC64
205 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
209 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
212 tcg_gen_movi_tl(tn
, 0);
214 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
216 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
220 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
225 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
227 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
231 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
232 target_ulong pc
, target_ulong npc
)
234 TranslationBlock
*tb
;
237 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
238 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
240 /* jump to same page: we can use a direct jump */
241 tcg_gen_goto_tb(tb_num
);
242 tcg_gen_movi_tl(cpu_pc
, pc
);
243 tcg_gen_movi_tl(cpu_npc
, npc
);
244 tcg_gen_exit_tb((tcg_target_long
)tb
+ tb_num
);
246 /* jump to another page: currently not optimized */
247 tcg_gen_movi_tl(cpu_pc
, pc
);
248 tcg_gen_movi_tl(cpu_npc
, npc
);
254 static inline void gen_mov_reg_N(TCGv reg
, TCGv_i32 src
)
256 tcg_gen_extu_i32_tl(reg
, src
);
257 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
258 tcg_gen_andi_tl(reg
, reg
, 0x1);
261 static inline void gen_mov_reg_Z(TCGv reg
, TCGv_i32 src
)
263 tcg_gen_extu_i32_tl(reg
, src
);
264 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
265 tcg_gen_andi_tl(reg
, reg
, 0x1);
268 static inline void gen_mov_reg_V(TCGv reg
, TCGv_i32 src
)
270 tcg_gen_extu_i32_tl(reg
, src
);
271 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
272 tcg_gen_andi_tl(reg
, reg
, 0x1);
275 static inline void gen_mov_reg_C(TCGv reg
, TCGv_i32 src
)
277 tcg_gen_extu_i32_tl(reg
, src
);
278 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
279 tcg_gen_andi_tl(reg
, reg
, 0x1);
282 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
288 l1
= gen_new_label();
290 r_temp
= tcg_temp_new();
291 tcg_gen_xor_tl(r_temp
, src1
, src2
);
292 tcg_gen_not_tl(r_temp
, r_temp
);
293 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
294 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
295 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
296 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
297 r_const
= tcg_const_i32(TT_TOVF
);
298 gen_helper_raise_exception(r_const
);
299 tcg_temp_free_i32(r_const
);
301 tcg_temp_free(r_temp
);
304 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
309 l1
= gen_new_label();
310 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
311 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
312 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
313 r_const
= tcg_const_i32(TT_TOVF
);
314 gen_helper_raise_exception(r_const
);
315 tcg_temp_free_i32(r_const
);
319 static inline void gen_op_addi_cc(TCGv dst
, TCGv src1
, target_long src2
)
321 tcg_gen_mov_tl(cpu_cc_src
, src1
);
322 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
323 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
324 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
327 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
329 tcg_gen_mov_tl(cpu_cc_src
, src1
);
330 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
331 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
332 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
335 static TCGv_i32
gen_add32_carry32(void)
337 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
339 /* Carry is computed from a previous add: (dst < src) */
340 #if TARGET_LONG_BITS == 64
341 cc_src1_32
= tcg_temp_new_i32();
342 cc_src2_32
= tcg_temp_new_i32();
343 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_dst
);
344 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src
);
346 cc_src1_32
= cpu_cc_dst
;
347 cc_src2_32
= cpu_cc_src
;
350 carry_32
= tcg_temp_new_i32();
351 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
353 #if TARGET_LONG_BITS == 64
354 tcg_temp_free_i32(cc_src1_32
);
355 tcg_temp_free_i32(cc_src2_32
);
361 static TCGv_i32
gen_sub32_carry32(void)
363 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
365 /* Carry is computed from a previous borrow: (src1 < src2) */
366 #if TARGET_LONG_BITS == 64
367 cc_src1_32
= tcg_temp_new_i32();
368 cc_src2_32
= tcg_temp_new_i32();
369 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_src
);
370 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src2
);
372 cc_src1_32
= cpu_cc_src
;
373 cc_src2_32
= cpu_cc_src2
;
376 carry_32
= tcg_temp_new_i32();
377 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
379 #if TARGET_LONG_BITS == 64
380 tcg_temp_free_i32(cc_src1_32
);
381 tcg_temp_free_i32(cc_src2_32
);
387 static void gen_op_addx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
388 TCGv src2
, int update_cc
)
396 /* Carry is known to be zero. Fall back to plain ADD. */
398 gen_op_add_cc(dst
, src1
, src2
);
400 tcg_gen_add_tl(dst
, src1
, src2
);
407 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
409 /* For 32-bit hosts, we can re-use the host's hardware carry
410 generation by using an ADD2 opcode. We discard the low
411 part of the output. Ideally we'd combine this operation
412 with the add that generated the carry in the first place. */
413 TCGv dst_low
= tcg_temp_new();
414 tcg_gen_op6_i32(INDEX_op_add2_i32
, dst_low
, dst
,
415 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
416 tcg_temp_free(dst_low
);
420 carry_32
= gen_add32_carry32();
426 carry_32
= gen_sub32_carry32();
430 /* We need external help to produce the carry. */
431 carry_32
= tcg_temp_new_i32();
432 gen_helper_compute_C_icc(carry_32
);
436 #if TARGET_LONG_BITS == 64
437 carry
= tcg_temp_new();
438 tcg_gen_extu_i32_i64(carry
, carry_32
);
443 tcg_gen_add_tl(dst
, src1
, src2
);
444 tcg_gen_add_tl(dst
, dst
, carry
);
446 tcg_temp_free_i32(carry_32
);
447 #if TARGET_LONG_BITS == 64
448 tcg_temp_free(carry
);
451 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
455 tcg_gen_mov_tl(cpu_cc_src
, src1
);
456 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
457 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
458 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADDX
);
459 dc
->cc_op
= CC_OP_ADDX
;
463 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
465 tcg_gen_mov_tl(cpu_cc_src
, src1
);
466 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
467 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
468 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
471 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
473 tcg_gen_mov_tl(cpu_cc_src
, src1
);
474 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
475 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
476 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
477 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
478 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
481 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
487 l1
= gen_new_label();
489 r_temp
= tcg_temp_new();
490 tcg_gen_xor_tl(r_temp
, src1
, src2
);
491 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
492 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
493 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
494 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
495 r_const
= tcg_const_i32(TT_TOVF
);
496 gen_helper_raise_exception(r_const
);
497 tcg_temp_free_i32(r_const
);
499 tcg_temp_free(r_temp
);
502 static inline void gen_op_subi_cc(TCGv dst
, TCGv src1
, target_long src2
, DisasContext
*dc
)
504 tcg_gen_mov_tl(cpu_cc_src
, src1
);
505 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
507 tcg_gen_mov_tl(cpu_cc_dst
, src1
);
508 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
509 dc
->cc_op
= CC_OP_LOGIC
;
511 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
512 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
513 dc
->cc_op
= CC_OP_SUB
;
515 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
518 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
520 tcg_gen_mov_tl(cpu_cc_src
, src1
);
521 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
522 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
523 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
526 static void gen_op_subx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
527 TCGv src2
, int update_cc
)
535 /* Carry is known to be zero. Fall back to plain SUB. */
537 gen_op_sub_cc(dst
, src1
, src2
);
539 tcg_gen_sub_tl(dst
, src1
, src2
);
546 carry_32
= gen_add32_carry32();
552 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
554 /* For 32-bit hosts, we can re-use the host's hardware carry
555 generation by using a SUB2 opcode. We discard the low
556 part of the output. Ideally we'd combine this operation
557 with the add that generated the carry in the first place. */
558 TCGv dst_low
= tcg_temp_new();
559 tcg_gen_op6_i32(INDEX_op_sub2_i32
, dst_low
, dst
,
560 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
561 tcg_temp_free(dst_low
);
565 carry_32
= gen_sub32_carry32();
569 /* We need external help to produce the carry. */
570 carry_32
= tcg_temp_new_i32();
571 gen_helper_compute_C_icc(carry_32
);
575 #if TARGET_LONG_BITS == 64
576 carry
= tcg_temp_new();
577 tcg_gen_extu_i32_i64(carry
, carry_32
);
582 tcg_gen_sub_tl(dst
, src1
, src2
);
583 tcg_gen_sub_tl(dst
, dst
, carry
);
585 tcg_temp_free_i32(carry_32
);
586 #if TARGET_LONG_BITS == 64
587 tcg_temp_free(carry
);
590 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
594 tcg_gen_mov_tl(cpu_cc_src
, src1
);
595 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
596 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
597 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUBX
);
598 dc
->cc_op
= CC_OP_SUBX
;
602 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
604 tcg_gen_mov_tl(cpu_cc_src
, src1
);
605 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
606 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
607 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
610 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
612 tcg_gen_mov_tl(cpu_cc_src
, src1
);
613 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
614 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
615 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
616 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
617 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
620 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
625 l1
= gen_new_label();
626 r_temp
= tcg_temp_new();
632 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
633 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
634 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
635 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
636 tcg_gen_movi_tl(cpu_cc_src2
, 0);
640 // env->y = (b2 << 31) | (env->y >> 1);
641 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
642 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
643 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
644 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
645 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
646 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
649 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
650 gen_mov_reg_V(r_temp
, cpu_psr
);
651 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
652 tcg_temp_free(r_temp
);
654 // T0 = (b1 << 31) | (T0 >> 1);
656 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
657 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
658 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
660 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
662 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
665 static inline void gen_op_multiply(TCGv dst
, TCGv src1
, TCGv src2
, int sign_ext
)
667 TCGv_i32 r_src1
, r_src2
;
668 TCGv_i64 r_temp
, r_temp2
;
670 r_src1
= tcg_temp_new_i32();
671 r_src2
= tcg_temp_new_i32();
673 tcg_gen_trunc_tl_i32(r_src1
, src1
);
674 tcg_gen_trunc_tl_i32(r_src2
, src2
);
676 r_temp
= tcg_temp_new_i64();
677 r_temp2
= tcg_temp_new_i64();
680 tcg_gen_ext_i32_i64(r_temp
, r_src2
);
681 tcg_gen_ext_i32_i64(r_temp2
, r_src1
);
683 tcg_gen_extu_i32_i64(r_temp
, r_src2
);
684 tcg_gen_extu_i32_i64(r_temp2
, r_src1
);
687 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
689 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
690 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
691 tcg_temp_free_i64(r_temp
);
692 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
694 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
696 tcg_temp_free_i64(r_temp2
);
698 tcg_temp_free_i32(r_src1
);
699 tcg_temp_free_i32(r_src2
);
702 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
704 /* zero-extend truncated operands before multiplication */
705 gen_op_multiply(dst
, src1
, src2
, 0);
708 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
710 /* sign-extend truncated operands before multiplication */
711 gen_op_multiply(dst
, src1
, src2
, 1);
714 #ifdef TARGET_SPARC64
715 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
720 l1
= gen_new_label();
721 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
722 r_const
= tcg_const_i32(TT_DIV_ZERO
);
723 gen_helper_raise_exception(r_const
);
724 tcg_temp_free_i32(r_const
);
728 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
732 l1
= gen_new_label();
733 l2
= gen_new_label();
734 tcg_gen_mov_tl(cpu_cc_src
, src1
);
735 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
736 gen_trap_ifdivzero_tl(cpu_cc_src2
);
737 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src
, INT64_MIN
, l1
);
738 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src2
, -1, l1
);
739 tcg_gen_movi_i64(dst
, INT64_MIN
);
742 tcg_gen_div_i64(dst
, cpu_cc_src
, cpu_cc_src2
);
748 static inline void gen_op_eval_ba(TCGv dst
)
750 tcg_gen_movi_tl(dst
, 1);
754 static inline void gen_op_eval_be(TCGv dst
, TCGv_i32 src
)
756 gen_mov_reg_Z(dst
, src
);
760 static inline void gen_op_eval_ble(TCGv dst
, TCGv_i32 src
)
762 gen_mov_reg_N(cpu_tmp0
, src
);
763 gen_mov_reg_V(dst
, src
);
764 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
765 gen_mov_reg_Z(cpu_tmp0
, src
);
766 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
770 static inline void gen_op_eval_bl(TCGv dst
, TCGv_i32 src
)
772 gen_mov_reg_V(cpu_tmp0
, src
);
773 gen_mov_reg_N(dst
, src
);
774 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
778 static inline void gen_op_eval_bleu(TCGv dst
, TCGv_i32 src
)
780 gen_mov_reg_Z(cpu_tmp0
, src
);
781 gen_mov_reg_C(dst
, src
);
782 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
786 static inline void gen_op_eval_bcs(TCGv dst
, TCGv_i32 src
)
788 gen_mov_reg_C(dst
, src
);
792 static inline void gen_op_eval_bvs(TCGv dst
, TCGv_i32 src
)
794 gen_mov_reg_V(dst
, src
);
798 static inline void gen_op_eval_bn(TCGv dst
)
800 tcg_gen_movi_tl(dst
, 0);
804 static inline void gen_op_eval_bneg(TCGv dst
, TCGv_i32 src
)
806 gen_mov_reg_N(dst
, src
);
810 static inline void gen_op_eval_bne(TCGv dst
, TCGv_i32 src
)
812 gen_mov_reg_Z(dst
, src
);
813 tcg_gen_xori_tl(dst
, dst
, 0x1);
817 static inline void gen_op_eval_bg(TCGv dst
, TCGv_i32 src
)
819 gen_mov_reg_N(cpu_tmp0
, src
);
820 gen_mov_reg_V(dst
, src
);
821 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
822 gen_mov_reg_Z(cpu_tmp0
, src
);
823 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
824 tcg_gen_xori_tl(dst
, dst
, 0x1);
828 static inline void gen_op_eval_bge(TCGv dst
, TCGv_i32 src
)
830 gen_mov_reg_V(cpu_tmp0
, src
);
831 gen_mov_reg_N(dst
, src
);
832 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
833 tcg_gen_xori_tl(dst
, dst
, 0x1);
837 static inline void gen_op_eval_bgu(TCGv dst
, TCGv_i32 src
)
839 gen_mov_reg_Z(cpu_tmp0
, src
);
840 gen_mov_reg_C(dst
, src
);
841 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
842 tcg_gen_xori_tl(dst
, dst
, 0x1);
846 static inline void gen_op_eval_bcc(TCGv dst
, TCGv_i32 src
)
848 gen_mov_reg_C(dst
, src
);
849 tcg_gen_xori_tl(dst
, dst
, 0x1);
853 static inline void gen_op_eval_bpos(TCGv dst
, TCGv_i32 src
)
855 gen_mov_reg_N(dst
, src
);
856 tcg_gen_xori_tl(dst
, dst
, 0x1);
860 static inline void gen_op_eval_bvc(TCGv dst
, TCGv_i32 src
)
862 gen_mov_reg_V(dst
, src
);
863 tcg_gen_xori_tl(dst
, dst
, 0x1);
867 FPSR bit field FCC1 | FCC0:
873 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
874 unsigned int fcc_offset
)
876 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
877 tcg_gen_andi_tl(reg
, reg
, 0x1);
880 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
881 unsigned int fcc_offset
)
883 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
884 tcg_gen_andi_tl(reg
, reg
, 0x1);
888 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
889 unsigned int fcc_offset
)
891 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
892 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
893 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
896 // 1 or 2: FCC0 ^ FCC1
897 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
898 unsigned int fcc_offset
)
900 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
901 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
902 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
906 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
907 unsigned int fcc_offset
)
909 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
913 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
914 unsigned int fcc_offset
)
916 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
917 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
918 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
919 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
923 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
924 unsigned int fcc_offset
)
926 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
930 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
931 unsigned int fcc_offset
)
933 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
934 tcg_gen_xori_tl(dst
, dst
, 0x1);
935 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
936 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
940 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
941 unsigned int fcc_offset
)
943 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
944 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
945 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
949 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
950 unsigned int fcc_offset
)
952 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
953 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
954 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
955 tcg_gen_xori_tl(dst
, dst
, 0x1);
958 // 0 or 3: !(FCC0 ^ FCC1)
959 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
960 unsigned int fcc_offset
)
962 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
963 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
964 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
965 tcg_gen_xori_tl(dst
, dst
, 0x1);
969 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
970 unsigned int fcc_offset
)
972 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
973 tcg_gen_xori_tl(dst
, dst
, 0x1);
976 // !1: !(FCC0 & !FCC1)
977 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
978 unsigned int fcc_offset
)
980 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
981 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
982 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
983 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
984 tcg_gen_xori_tl(dst
, dst
, 0x1);
988 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
989 unsigned int fcc_offset
)
991 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
992 tcg_gen_xori_tl(dst
, dst
, 0x1);
995 // !2: !(!FCC0 & FCC1)
996 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
997 unsigned int fcc_offset
)
999 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1000 tcg_gen_xori_tl(dst
, dst
, 0x1);
1001 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1002 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1003 tcg_gen_xori_tl(dst
, dst
, 0x1);
1006 // !3: !(FCC0 & FCC1)
1007 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
1008 unsigned int fcc_offset
)
1010 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1011 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1012 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1013 tcg_gen_xori_tl(dst
, dst
, 0x1);
1016 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
1017 target_ulong pc2
, TCGv r_cond
)
1021 l1
= gen_new_label();
1023 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1025 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
1028 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
1031 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
1032 target_ulong pc2
, TCGv r_cond
)
1036 l1
= gen_new_label();
1038 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1040 gen_goto_tb(dc
, 0, pc2
, pc1
);
1043 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
1046 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
1051 l1
= gen_new_label();
1052 l2
= gen_new_label();
1054 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1056 tcg_gen_movi_tl(cpu_npc
, npc1
);
1060 tcg_gen_movi_tl(cpu_npc
, npc2
);
1064 /* call this function before using the condition register as it may
1065 have been set for a jump */
1066 static inline void flush_cond(DisasContext
*dc
, TCGv cond
)
1068 if (dc
->npc
== JUMP_PC
) {
1069 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1070 dc
->npc
= DYNAMIC_PC
;
1074 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
1076 if (dc
->npc
== JUMP_PC
) {
1077 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1078 dc
->npc
= DYNAMIC_PC
;
1079 } else if (dc
->npc
!= DYNAMIC_PC
) {
1080 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
1084 static inline void save_state(DisasContext
*dc
, TCGv cond
)
1086 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1087 /* flush pending conditional evaluations before exposing cpu state */
1088 if (dc
->cc_op
!= CC_OP_FLAGS
) {
1089 dc
->cc_op
= CC_OP_FLAGS
;
1090 gen_helper_compute_psr();
1095 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
1097 if (dc
->npc
== JUMP_PC
) {
1098 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1099 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1100 dc
->pc
= DYNAMIC_PC
;
1101 } else if (dc
->npc
== DYNAMIC_PC
) {
1102 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1103 dc
->pc
= DYNAMIC_PC
;
1109 static inline void gen_op_next_insn(void)
1111 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1112 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
1115 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
,
1120 #ifdef TARGET_SPARC64
1128 switch (dc
->cc_op
) {
1132 gen_helper_compute_psr();
1133 dc
->cc_op
= CC_OP_FLAGS
;
1138 gen_op_eval_bn(r_dst
);
1141 gen_op_eval_be(r_dst
, r_src
);
1144 gen_op_eval_ble(r_dst
, r_src
);
1147 gen_op_eval_bl(r_dst
, r_src
);
1150 gen_op_eval_bleu(r_dst
, r_src
);
1153 gen_op_eval_bcs(r_dst
, r_src
);
1156 gen_op_eval_bneg(r_dst
, r_src
);
1159 gen_op_eval_bvs(r_dst
, r_src
);
1162 gen_op_eval_ba(r_dst
);
1165 gen_op_eval_bne(r_dst
, r_src
);
1168 gen_op_eval_bg(r_dst
, r_src
);
1171 gen_op_eval_bge(r_dst
, r_src
);
1174 gen_op_eval_bgu(r_dst
, r_src
);
1177 gen_op_eval_bcc(r_dst
, r_src
);
1180 gen_op_eval_bpos(r_dst
, r_src
);
1183 gen_op_eval_bvc(r_dst
, r_src
);
1188 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1190 unsigned int offset
;
1210 gen_op_eval_bn(r_dst
);
1213 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1216 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1219 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1222 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1225 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1228 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1231 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1234 gen_op_eval_ba(r_dst
);
1237 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1240 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1243 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1246 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1249 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1252 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1255 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1260 #ifdef TARGET_SPARC64
1262 static const int gen_tcg_cond_reg
[8] = {
1273 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1277 l1
= gen_new_label();
1278 tcg_gen_movi_tl(r_dst
, 0);
1279 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1280 tcg_gen_movi_tl(r_dst
, 1);
1285 /* XXX: potentially incorrect if dynamic npc */
1286 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1289 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1290 target_ulong target
= dc
->pc
+ offset
;
1293 /* unconditional not taken */
1295 dc
->pc
= dc
->npc
+ 4;
1296 dc
->npc
= dc
->pc
+ 4;
1299 dc
->npc
= dc
->pc
+ 4;
1301 } else if (cond
== 0x8) {
1302 /* unconditional taken */
1305 dc
->npc
= dc
->pc
+ 4;
1309 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1312 flush_cond(dc
, r_cond
);
1313 gen_cond(r_cond
, cc
, cond
, dc
);
1315 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1319 dc
->jump_pc
[0] = target
;
1320 dc
->jump_pc
[1] = dc
->npc
+ 4;
1326 /* XXX: potentially incorrect if dynamic npc */
1327 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1330 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1331 target_ulong target
= dc
->pc
+ offset
;
1334 /* unconditional not taken */
1336 dc
->pc
= dc
->npc
+ 4;
1337 dc
->npc
= dc
->pc
+ 4;
1340 dc
->npc
= dc
->pc
+ 4;
1342 } else if (cond
== 0x8) {
1343 /* unconditional taken */
1346 dc
->npc
= dc
->pc
+ 4;
1350 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1353 flush_cond(dc
, r_cond
);
1354 gen_fcond(r_cond
, cc
, cond
);
1356 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1360 dc
->jump_pc
[0] = target
;
1361 dc
->jump_pc
[1] = dc
->npc
+ 4;
1367 #ifdef TARGET_SPARC64
1368 /* XXX: potentially incorrect if dynamic npc */
1369 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1370 TCGv r_cond
, TCGv r_reg
)
1372 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1373 target_ulong target
= dc
->pc
+ offset
;
1375 flush_cond(dc
, r_cond
);
1376 gen_cond_reg(r_cond
, cond
, r_reg
);
1378 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1382 dc
->jump_pc
[0] = target
;
1383 dc
->jump_pc
[1] = dc
->npc
+ 4;
1388 static inline void gen_op_fcmps(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1392 gen_helper_fcmps(r_rs1
, r_rs2
);
1395 gen_helper_fcmps_fcc1(r_rs1
, r_rs2
);
1398 gen_helper_fcmps_fcc2(r_rs1
, r_rs2
);
1401 gen_helper_fcmps_fcc3(r_rs1
, r_rs2
);
1406 static inline void gen_op_fcmpd(int fccno
)
1413 gen_helper_fcmpd_fcc1();
1416 gen_helper_fcmpd_fcc2();
1419 gen_helper_fcmpd_fcc3();
1424 static inline void gen_op_fcmpq(int fccno
)
1431 gen_helper_fcmpq_fcc1();
1434 gen_helper_fcmpq_fcc2();
1437 gen_helper_fcmpq_fcc3();
1442 static inline void gen_op_fcmpes(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1446 gen_helper_fcmpes(r_rs1
, r_rs2
);
1449 gen_helper_fcmpes_fcc1(r_rs1
, r_rs2
);
1452 gen_helper_fcmpes_fcc2(r_rs1
, r_rs2
);
1455 gen_helper_fcmpes_fcc3(r_rs1
, r_rs2
);
1460 static inline void gen_op_fcmped(int fccno
)
1464 gen_helper_fcmped();
1467 gen_helper_fcmped_fcc1();
1470 gen_helper_fcmped_fcc2();
1473 gen_helper_fcmped_fcc3();
1478 static inline void gen_op_fcmpeq(int fccno
)
1482 gen_helper_fcmpeq();
1485 gen_helper_fcmpeq_fcc1();
1488 gen_helper_fcmpeq_fcc2();
1491 gen_helper_fcmpeq_fcc3();
1498 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1500 gen_helper_fcmps(r_rs1
, r_rs2
);
1503 static inline void gen_op_fcmpd(int fccno
)
1508 static inline void gen_op_fcmpq(int fccno
)
1513 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1515 gen_helper_fcmpes(r_rs1
, r_rs2
);
1518 static inline void gen_op_fcmped(int fccno
)
1520 gen_helper_fcmped();
1523 static inline void gen_op_fcmpeq(int fccno
)
1525 gen_helper_fcmpeq();
1529 static inline void gen_op_fpexception_im(int fsr_flags
)
1533 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1534 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1535 r_const
= tcg_const_i32(TT_FP_EXCP
);
1536 gen_helper_raise_exception(r_const
);
1537 tcg_temp_free_i32(r_const
);
1540 static int gen_trap_ifnofpu(DisasContext
*dc
, TCGv r_cond
)
1542 #if !defined(CONFIG_USER_ONLY)
1543 if (!dc
->fpu_enabled
) {
1546 save_state(dc
, r_cond
);
1547 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1548 gen_helper_raise_exception(r_const
);
1549 tcg_temp_free_i32(r_const
);
1557 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1559 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1562 static inline void gen_clear_float_exceptions(void)
1564 gen_helper_clear_float_exceptions();
1568 #ifdef TARGET_SPARC64
1569 static inline TCGv_i32
gen_get_asi(int insn
, TCGv r_addr
)
1575 r_asi
= tcg_temp_new_i32();
1576 tcg_gen_mov_i32(r_asi
, cpu_asi
);
1578 asi
= GET_FIELD(insn
, 19, 26);
1579 r_asi
= tcg_const_i32(asi
);
1584 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1587 TCGv_i32 r_asi
, r_size
, r_sign
;
1589 r_asi
= gen_get_asi(insn
, addr
);
1590 r_size
= tcg_const_i32(size
);
1591 r_sign
= tcg_const_i32(sign
);
1592 gen_helper_ld_asi(dst
, addr
, r_asi
, r_size
, r_sign
);
1593 tcg_temp_free_i32(r_sign
);
1594 tcg_temp_free_i32(r_size
);
1595 tcg_temp_free_i32(r_asi
);
1598 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1600 TCGv_i32 r_asi
, r_size
;
1602 r_asi
= gen_get_asi(insn
, addr
);
1603 r_size
= tcg_const_i32(size
);
1604 gen_helper_st_asi(addr
, src
, r_asi
, r_size
);
1605 tcg_temp_free_i32(r_size
);
1606 tcg_temp_free_i32(r_asi
);
1609 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1611 TCGv_i32 r_asi
, r_size
, r_rd
;
1613 r_asi
= gen_get_asi(insn
, addr
);
1614 r_size
= tcg_const_i32(size
);
1615 r_rd
= tcg_const_i32(rd
);
1616 gen_helper_ldf_asi(addr
, r_asi
, r_size
, r_rd
);
1617 tcg_temp_free_i32(r_rd
);
1618 tcg_temp_free_i32(r_size
);
1619 tcg_temp_free_i32(r_asi
);
1622 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
1624 TCGv_i32 r_asi
, r_size
, r_rd
;
1626 r_asi
= gen_get_asi(insn
, addr
);
1627 r_size
= tcg_const_i32(size
);
1628 r_rd
= tcg_const_i32(rd
);
1629 gen_helper_stf_asi(addr
, r_asi
, r_size
, r_rd
);
1630 tcg_temp_free_i32(r_rd
);
1631 tcg_temp_free_i32(r_size
);
1632 tcg_temp_free_i32(r_asi
);
1635 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1637 TCGv_i32 r_asi
, r_size
, r_sign
;
1639 r_asi
= gen_get_asi(insn
, addr
);
1640 r_size
= tcg_const_i32(4);
1641 r_sign
= tcg_const_i32(0);
1642 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1643 tcg_temp_free_i32(r_sign
);
1644 gen_helper_st_asi(addr
, dst
, r_asi
, r_size
);
1645 tcg_temp_free_i32(r_size
);
1646 tcg_temp_free_i32(r_asi
);
1647 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1650 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1652 TCGv_i32 r_asi
, r_rd
;
1654 r_asi
= gen_get_asi(insn
, addr
);
1655 r_rd
= tcg_const_i32(rd
);
1656 gen_helper_ldda_asi(addr
, r_asi
, r_rd
);
1657 tcg_temp_free_i32(r_rd
);
1658 tcg_temp_free_i32(r_asi
);
1661 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1663 TCGv_i32 r_asi
, r_size
;
1665 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1666 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1667 r_asi
= gen_get_asi(insn
, addr
);
1668 r_size
= tcg_const_i32(8);
1669 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1670 tcg_temp_free_i32(r_size
);
1671 tcg_temp_free_i32(r_asi
);
1674 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1680 r_val1
= tcg_temp_new();
1681 gen_movl_reg_TN(rd
, r_val1
);
1682 r_asi
= gen_get_asi(insn
, addr
);
1683 gen_helper_cas_asi(dst
, addr
, r_val1
, val2
, r_asi
);
1684 tcg_temp_free_i32(r_asi
);
1685 tcg_temp_free(r_val1
);
1688 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1693 gen_movl_reg_TN(rd
, cpu_tmp64
);
1694 r_asi
= gen_get_asi(insn
, addr
);
1695 gen_helper_casx_asi(dst
, addr
, cpu_tmp64
, val2
, r_asi
);
1696 tcg_temp_free_i32(r_asi
);
1699 #elif !defined(CONFIG_USER_ONLY)
1701 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1704 TCGv_i32 r_asi
, r_size
, r_sign
;
1706 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1707 r_size
= tcg_const_i32(size
);
1708 r_sign
= tcg_const_i32(sign
);
1709 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1710 tcg_temp_free(r_sign
);
1711 tcg_temp_free(r_size
);
1712 tcg_temp_free(r_asi
);
1713 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1716 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1718 TCGv_i32 r_asi
, r_size
;
1720 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
1721 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1722 r_size
= tcg_const_i32(size
);
1723 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1724 tcg_temp_free(r_size
);
1725 tcg_temp_free(r_asi
);
1728 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1730 TCGv_i32 r_asi
, r_size
, r_sign
;
1733 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1734 r_size
= tcg_const_i32(4);
1735 r_sign
= tcg_const_i32(0);
1736 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1737 tcg_temp_free(r_sign
);
1738 r_val
= tcg_temp_new_i64();
1739 tcg_gen_extu_tl_i64(r_val
, dst
);
1740 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1741 tcg_temp_free_i64(r_val
);
1742 tcg_temp_free(r_size
);
1743 tcg_temp_free(r_asi
);
1744 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1747 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1749 TCGv_i32 r_asi
, r_size
, r_sign
;
1751 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1752 r_size
= tcg_const_i32(8);
1753 r_sign
= tcg_const_i32(0);
1754 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1755 tcg_temp_free(r_sign
);
1756 tcg_temp_free(r_size
);
1757 tcg_temp_free(r_asi
);
1758 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
1759 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
1760 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
1761 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
1762 gen_movl_TN_reg(rd
, hi
);
1765 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1767 TCGv_i32 r_asi
, r_size
;
1769 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1770 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1771 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1772 r_size
= tcg_const_i32(8);
1773 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1774 tcg_temp_free(r_size
);
1775 tcg_temp_free(r_asi
);
1779 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1780 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
1783 TCGv_i32 r_asi
, r_size
;
1785 gen_ld_asi(dst
, addr
, insn
, 1, 0);
1787 r_val
= tcg_const_i64(0xffULL
);
1788 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1789 r_size
= tcg_const_i32(1);
1790 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1791 tcg_temp_free_i32(r_size
);
1792 tcg_temp_free_i32(r_asi
);
1793 tcg_temp_free_i64(r_val
);
1797 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
1802 rs1
= GET_FIELD(insn
, 13, 17);
1804 tcg_gen_movi_tl(def
, 0);
1805 } else if (rs1
< 8) {
1806 r_rs1
= cpu_gregs
[rs1
];
1808 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
1813 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
1817 if (IS_IMM
) { /* immediate */
1818 target_long simm
= GET_FIELDs(insn
, 19, 31);
1819 tcg_gen_movi_tl(def
, simm
);
1820 } else { /* register */
1821 unsigned int rs2
= GET_FIELD(insn
, 27, 31);
1823 tcg_gen_movi_tl(def
, 0);
1824 } else if (rs2
< 8) {
1825 r_rs2
= cpu_gregs
[rs2
];
1827 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
1833 #ifdef TARGET_SPARC64
1834 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr
, TCGv_ptr cpu_env
)
1836 TCGv_i32 r_tl
= tcg_temp_new_i32();
1838 /* load env->tl into r_tl */
1839 tcg_gen_ld_i32(r_tl
, cpu_env
, offsetof(CPUSPARCState
, tl
));
1841 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1842 tcg_gen_andi_i32(r_tl
, r_tl
, MAXTL_MASK
);
1844 /* calculate offset to current trap state from env->ts, reuse r_tl */
1845 tcg_gen_muli_i32(r_tl
, r_tl
, sizeof (trap_state
));
1846 tcg_gen_addi_ptr(r_tsptr
, cpu_env
, offsetof(CPUState
, ts
));
1848 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1850 TCGv_ptr r_tl_tmp
= tcg_temp_new_ptr();
1851 tcg_gen_ext_i32_ptr(r_tl_tmp
, r_tl
);
1852 tcg_gen_add_ptr(r_tsptr
, r_tsptr
, r_tl_tmp
);
1853 tcg_temp_free_ptr(r_tl_tmp
);
1856 tcg_temp_free_i32(r_tl
);
1860 #define CHECK_IU_FEATURE(dc, FEATURE) \
1861 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1863 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1864 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1867 /* before an instruction, dc->pc must be static */
1868 static void disas_sparc_insn(DisasContext
* dc
)
1870 unsigned int insn
, opc
, rs1
, rs2
, rd
;
1871 TCGv cpu_src1
, cpu_src2
, cpu_tmp1
, cpu_tmp2
;
1874 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
)))
1875 tcg_gen_debug_insn_start(dc
->pc
);
1876 insn
= ldl_code(dc
->pc
);
1877 opc
= GET_FIELD(insn
, 0, 1);
1879 rd
= GET_FIELD(insn
, 2, 6);
1881 cpu_tmp1
= cpu_src1
= tcg_temp_new();
1882 cpu_tmp2
= cpu_src2
= tcg_temp_new();
1885 case 0: /* branches/sethi */
1887 unsigned int xop
= GET_FIELD(insn
, 7, 9);
1890 #ifdef TARGET_SPARC64
1891 case 0x1: /* V9 BPcc */
1895 target
= GET_FIELD_SP(insn
, 0, 18);
1896 target
= sign_extend(target
, 19);
1898 cc
= GET_FIELD_SP(insn
, 20, 21);
1900 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1902 do_branch(dc
, target
, insn
, 1, cpu_cond
);
1907 case 0x3: /* V9 BPr */
1909 target
= GET_FIELD_SP(insn
, 0, 13) |
1910 (GET_FIELD_SP(insn
, 20, 21) << 14);
1911 target
= sign_extend(target
, 16);
1913 cpu_src1
= get_src1(insn
, cpu_src1
);
1914 do_branch_reg(dc
, target
, insn
, cpu_cond
, cpu_src1
);
1917 case 0x5: /* V9 FBPcc */
1919 int cc
= GET_FIELD_SP(insn
, 20, 21);
1920 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1922 target
= GET_FIELD_SP(insn
, 0, 18);
1923 target
= sign_extend(target
, 19);
1925 do_fbranch(dc
, target
, insn
, cc
, cpu_cond
);
1929 case 0x7: /* CBN+x */
1934 case 0x2: /* BN+x */
1936 target
= GET_FIELD(insn
, 10, 31);
1937 target
= sign_extend(target
, 22);
1939 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1942 case 0x6: /* FBN+x */
1944 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1946 target
= GET_FIELD(insn
, 10, 31);
1947 target
= sign_extend(target
, 22);
1949 do_fbranch(dc
, target
, insn
, 0, cpu_cond
);
1952 case 0x4: /* SETHI */
1954 uint32_t value
= GET_FIELD(insn
, 10, 31);
1957 r_const
= tcg_const_tl(value
<< 10);
1958 gen_movl_TN_reg(rd
, r_const
);
1959 tcg_temp_free(r_const
);
1962 case 0x0: /* UNIMPL */
1971 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
1974 r_const
= tcg_const_tl(dc
->pc
);
1975 gen_movl_TN_reg(15, r_const
);
1976 tcg_temp_free(r_const
);
1978 gen_mov_pc_npc(dc
, cpu_cond
);
1982 case 2: /* FPU & Logical Operations */
1984 unsigned int xop
= GET_FIELD(insn
, 7, 12);
1985 if (xop
== 0x3a) { /* generate trap */
1988 cpu_src1
= get_src1(insn
, cpu_src1
);
1990 rs2
= GET_FIELD(insn
, 25, 31);
1991 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
1993 rs2
= GET_FIELD(insn
, 27, 31);
1995 gen_movl_reg_TN(rs2
, cpu_src2
);
1996 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
1998 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
2001 cond
= GET_FIELD(insn
, 3, 6);
2002 if (cond
== 0x8) { /* Trap Always */
2003 save_state(dc
, cpu_cond
);
2004 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2006 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2008 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2009 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2010 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2013 dc
->def
->features
& CPU_FEATURE_TA0_SHUTDOWN
) {
2015 gen_helper_shutdown();
2018 gen_helper_raise_exception(cpu_tmp32
);
2020 } else if (cond
!= 0) {
2021 TCGv r_cond
= tcg_temp_new();
2023 #ifdef TARGET_SPARC64
2025 int cc
= GET_FIELD_SP(insn
, 11, 12);
2027 save_state(dc
, cpu_cond
);
2029 gen_cond(r_cond
, 0, cond
, dc
);
2031 gen_cond(r_cond
, 1, cond
, dc
);
2035 save_state(dc
, cpu_cond
);
2036 gen_cond(r_cond
, 0, cond
, dc
);
2038 l1
= gen_new_label();
2039 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
2041 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2043 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2045 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2046 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2047 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2048 gen_helper_raise_exception(cpu_tmp32
);
2051 tcg_temp_free(r_cond
);
2057 } else if (xop
== 0x28) {
2058 rs1
= GET_FIELD(insn
, 13, 17);
2061 #ifndef TARGET_SPARC64
2062 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2063 manual, rdy on the microSPARC
2065 case 0x0f: /* stbar in the SPARCv8 manual,
2066 rdy on the microSPARC II */
2067 case 0x10 ... 0x1f: /* implementation-dependent in the
2068 SPARCv8 manual, rdy on the
2071 if (rs1
== 0x11 && dc
->def
->features
& CPU_FEATURE_ASR17
) {
2074 /* Read Asr17 for a Leon3 monoprocessor */
2075 r_const
= tcg_const_tl((1 << 8)
2076 | (dc
->def
->nwindows
- 1));
2077 gen_movl_TN_reg(rd
, r_const
);
2078 tcg_temp_free(r_const
);
2082 gen_movl_TN_reg(rd
, cpu_y
);
2084 #ifdef TARGET_SPARC64
2085 case 0x2: /* V9 rdccr */
2086 gen_helper_compute_psr();
2087 gen_helper_rdccr(cpu_dst
);
2088 gen_movl_TN_reg(rd
, cpu_dst
);
2090 case 0x3: /* V9 rdasi */
2091 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
2092 gen_movl_TN_reg(rd
, cpu_dst
);
2094 case 0x4: /* V9 rdtick */
2098 r_tickptr
= tcg_temp_new_ptr();
2099 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2100 offsetof(CPUState
, tick
));
2101 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2102 tcg_temp_free_ptr(r_tickptr
);
2103 gen_movl_TN_reg(rd
, cpu_dst
);
2106 case 0x5: /* V9 rdpc */
2110 r_const
= tcg_const_tl(dc
->pc
);
2111 gen_movl_TN_reg(rd
, r_const
);
2112 tcg_temp_free(r_const
);
2115 case 0x6: /* V9 rdfprs */
2116 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
2117 gen_movl_TN_reg(rd
, cpu_dst
);
2119 case 0xf: /* V9 membar */
2120 break; /* no effect */
2121 case 0x13: /* Graphics Status */
2122 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2124 gen_movl_TN_reg(rd
, cpu_gsr
);
2126 case 0x16: /* Softint */
2127 tcg_gen_ext_i32_tl(cpu_dst
, cpu_softint
);
2128 gen_movl_TN_reg(rd
, cpu_dst
);
2130 case 0x17: /* Tick compare */
2131 gen_movl_TN_reg(rd
, cpu_tick_cmpr
);
2133 case 0x18: /* System tick */
2137 r_tickptr
= tcg_temp_new_ptr();
2138 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2139 offsetof(CPUState
, stick
));
2140 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2141 tcg_temp_free_ptr(r_tickptr
);
2142 gen_movl_TN_reg(rd
, cpu_dst
);
2145 case 0x19: /* System tick compare */
2146 gen_movl_TN_reg(rd
, cpu_stick_cmpr
);
2148 case 0x10: /* Performance Control */
2149 case 0x11: /* Performance Instrumentation Counter */
2150 case 0x12: /* Dispatch Control */
2151 case 0x14: /* Softint set, WO */
2152 case 0x15: /* Softint clear, WO */
2157 #if !defined(CONFIG_USER_ONLY)
2158 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2159 #ifndef TARGET_SPARC64
2160 if (!supervisor(dc
))
2162 gen_helper_compute_psr();
2163 dc
->cc_op
= CC_OP_FLAGS
;
2164 gen_helper_rdpsr(cpu_dst
);
2166 CHECK_IU_FEATURE(dc
, HYPV
);
2167 if (!hypervisor(dc
))
2169 rs1
= GET_FIELD(insn
, 13, 17);
2172 // gen_op_rdhpstate();
2175 // gen_op_rdhtstate();
2178 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
2181 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
2184 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
2186 case 31: // hstick_cmpr
2187 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
2193 gen_movl_TN_reg(rd
, cpu_dst
);
2195 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2196 if (!supervisor(dc
))
2198 #ifdef TARGET_SPARC64
2199 rs1
= GET_FIELD(insn
, 13, 17);
2205 r_tsptr
= tcg_temp_new_ptr();
2206 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2207 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2208 offsetof(trap_state
, tpc
));
2209 tcg_temp_free_ptr(r_tsptr
);
2216 r_tsptr
= tcg_temp_new_ptr();
2217 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2218 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2219 offsetof(trap_state
, tnpc
));
2220 tcg_temp_free_ptr(r_tsptr
);
2227 r_tsptr
= tcg_temp_new_ptr();
2228 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2229 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2230 offsetof(trap_state
, tstate
));
2231 tcg_temp_free_ptr(r_tsptr
);
2238 r_tsptr
= tcg_temp_new_ptr();
2239 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2240 tcg_gen_ld_i32(cpu_tmp32
, r_tsptr
,
2241 offsetof(trap_state
, tt
));
2242 tcg_temp_free_ptr(r_tsptr
);
2243 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2250 r_tickptr
= tcg_temp_new_ptr();
2251 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2252 offsetof(CPUState
, tick
));
2253 gen_helper_tick_get_count(cpu_tmp0
, r_tickptr
);
2254 gen_movl_TN_reg(rd
, cpu_tmp0
);
2255 tcg_temp_free_ptr(r_tickptr
);
2259 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2262 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2263 offsetof(CPUSPARCState
, pstate
));
2264 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2267 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2268 offsetof(CPUSPARCState
, tl
));
2269 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2272 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2273 offsetof(CPUSPARCState
, psrpil
));
2274 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2277 gen_helper_rdcwp(cpu_tmp0
);
2280 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2281 offsetof(CPUSPARCState
, cansave
));
2282 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2284 case 11: // canrestore
2285 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2286 offsetof(CPUSPARCState
, canrestore
));
2287 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2289 case 12: // cleanwin
2290 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2291 offsetof(CPUSPARCState
, cleanwin
));
2292 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2294 case 13: // otherwin
2295 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2296 offsetof(CPUSPARCState
, otherwin
));
2297 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2300 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2301 offsetof(CPUSPARCState
, wstate
));
2302 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2304 case 16: // UA2005 gl
2305 CHECK_IU_FEATURE(dc
, GL
);
2306 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2307 offsetof(CPUSPARCState
, gl
));
2308 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2310 case 26: // UA2005 strand status
2311 CHECK_IU_FEATURE(dc
, HYPV
);
2312 if (!hypervisor(dc
))
2314 tcg_gen_mov_tl(cpu_tmp0
, cpu_ssr
);
2317 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2324 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2326 gen_movl_TN_reg(rd
, cpu_tmp0
);
2328 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2329 #ifdef TARGET_SPARC64
2330 save_state(dc
, cpu_cond
);
2331 gen_helper_flushw();
2333 if (!supervisor(dc
))
2335 gen_movl_TN_reg(rd
, cpu_tbr
);
2339 } else if (xop
== 0x34) { /* FPU Operations */
2340 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2342 gen_op_clear_ieee_excp_and_FTT();
2343 rs1
= GET_FIELD(insn
, 13, 17);
2344 rs2
= GET_FIELD(insn
, 27, 31);
2345 xop
= GET_FIELD(insn
, 18, 26);
2346 save_state(dc
, cpu_cond
);
2348 case 0x1: /* fmovs */
2349 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2351 case 0x5: /* fnegs */
2352 gen_helper_fnegs(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2354 case 0x9: /* fabss */
2355 gen_helper_fabss(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2357 case 0x29: /* fsqrts */
2358 CHECK_FPU_FEATURE(dc
, FSQRT
);
2359 gen_clear_float_exceptions();
2360 gen_helper_fsqrts(cpu_tmp32
, cpu_fpr
[rs2
]);
2361 gen_helper_check_ieee_exceptions();
2362 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2364 case 0x2a: /* fsqrtd */
2365 CHECK_FPU_FEATURE(dc
, FSQRT
);
2366 gen_op_load_fpr_DT1(DFPREG(rs2
));
2367 gen_clear_float_exceptions();
2368 gen_helper_fsqrtd();
2369 gen_helper_check_ieee_exceptions();
2370 gen_op_store_DT0_fpr(DFPREG(rd
));
2372 case 0x2b: /* fsqrtq */
2373 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2374 gen_op_load_fpr_QT1(QFPREG(rs2
));
2375 gen_clear_float_exceptions();
2376 gen_helper_fsqrtq();
2377 gen_helper_check_ieee_exceptions();
2378 gen_op_store_QT0_fpr(QFPREG(rd
));
2380 case 0x41: /* fadds */
2381 gen_clear_float_exceptions();
2382 gen_helper_fadds(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2383 gen_helper_check_ieee_exceptions();
2384 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2386 case 0x42: /* faddd */
2387 gen_op_load_fpr_DT0(DFPREG(rs1
));
2388 gen_op_load_fpr_DT1(DFPREG(rs2
));
2389 gen_clear_float_exceptions();
2391 gen_helper_check_ieee_exceptions();
2392 gen_op_store_DT0_fpr(DFPREG(rd
));
2394 case 0x43: /* faddq */
2395 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2396 gen_op_load_fpr_QT0(QFPREG(rs1
));
2397 gen_op_load_fpr_QT1(QFPREG(rs2
));
2398 gen_clear_float_exceptions();
2400 gen_helper_check_ieee_exceptions();
2401 gen_op_store_QT0_fpr(QFPREG(rd
));
2403 case 0x45: /* fsubs */
2404 gen_clear_float_exceptions();
2405 gen_helper_fsubs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2406 gen_helper_check_ieee_exceptions();
2407 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2409 case 0x46: /* fsubd */
2410 gen_op_load_fpr_DT0(DFPREG(rs1
));
2411 gen_op_load_fpr_DT1(DFPREG(rs2
));
2412 gen_clear_float_exceptions();
2414 gen_helper_check_ieee_exceptions();
2415 gen_op_store_DT0_fpr(DFPREG(rd
));
2417 case 0x47: /* fsubq */
2418 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2419 gen_op_load_fpr_QT0(QFPREG(rs1
));
2420 gen_op_load_fpr_QT1(QFPREG(rs2
));
2421 gen_clear_float_exceptions();
2423 gen_helper_check_ieee_exceptions();
2424 gen_op_store_QT0_fpr(QFPREG(rd
));
2426 case 0x49: /* fmuls */
2427 CHECK_FPU_FEATURE(dc
, FMUL
);
2428 gen_clear_float_exceptions();
2429 gen_helper_fmuls(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2430 gen_helper_check_ieee_exceptions();
2431 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2433 case 0x4a: /* fmuld */
2434 CHECK_FPU_FEATURE(dc
, FMUL
);
2435 gen_op_load_fpr_DT0(DFPREG(rs1
));
2436 gen_op_load_fpr_DT1(DFPREG(rs2
));
2437 gen_clear_float_exceptions();
2439 gen_helper_check_ieee_exceptions();
2440 gen_op_store_DT0_fpr(DFPREG(rd
));
2442 case 0x4b: /* fmulq */
2443 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2444 CHECK_FPU_FEATURE(dc
, FMUL
);
2445 gen_op_load_fpr_QT0(QFPREG(rs1
));
2446 gen_op_load_fpr_QT1(QFPREG(rs2
));
2447 gen_clear_float_exceptions();
2449 gen_helper_check_ieee_exceptions();
2450 gen_op_store_QT0_fpr(QFPREG(rd
));
2452 case 0x4d: /* fdivs */
2453 gen_clear_float_exceptions();
2454 gen_helper_fdivs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2455 gen_helper_check_ieee_exceptions();
2456 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2458 case 0x4e: /* fdivd */
2459 gen_op_load_fpr_DT0(DFPREG(rs1
));
2460 gen_op_load_fpr_DT1(DFPREG(rs2
));
2461 gen_clear_float_exceptions();
2463 gen_helper_check_ieee_exceptions();
2464 gen_op_store_DT0_fpr(DFPREG(rd
));
2466 case 0x4f: /* fdivq */
2467 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2468 gen_op_load_fpr_QT0(QFPREG(rs1
));
2469 gen_op_load_fpr_QT1(QFPREG(rs2
));
2470 gen_clear_float_exceptions();
2472 gen_helper_check_ieee_exceptions();
2473 gen_op_store_QT0_fpr(QFPREG(rd
));
2475 case 0x69: /* fsmuld */
2476 CHECK_FPU_FEATURE(dc
, FSMULD
);
2477 gen_clear_float_exceptions();
2478 gen_helper_fsmuld(cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2479 gen_helper_check_ieee_exceptions();
2480 gen_op_store_DT0_fpr(DFPREG(rd
));
2482 case 0x6e: /* fdmulq */
2483 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2484 gen_op_load_fpr_DT0(DFPREG(rs1
));
2485 gen_op_load_fpr_DT1(DFPREG(rs2
));
2486 gen_clear_float_exceptions();
2487 gen_helper_fdmulq();
2488 gen_helper_check_ieee_exceptions();
2489 gen_op_store_QT0_fpr(QFPREG(rd
));
2491 case 0xc4: /* fitos */
2492 gen_clear_float_exceptions();
2493 gen_helper_fitos(cpu_tmp32
, cpu_fpr
[rs2
]);
2494 gen_helper_check_ieee_exceptions();
2495 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2497 case 0xc6: /* fdtos */
2498 gen_op_load_fpr_DT1(DFPREG(rs2
));
2499 gen_clear_float_exceptions();
2500 gen_helper_fdtos(cpu_tmp32
);
2501 gen_helper_check_ieee_exceptions();
2502 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2504 case 0xc7: /* fqtos */
2505 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2506 gen_op_load_fpr_QT1(QFPREG(rs2
));
2507 gen_clear_float_exceptions();
2508 gen_helper_fqtos(cpu_tmp32
);
2509 gen_helper_check_ieee_exceptions();
2510 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2512 case 0xc8: /* fitod */
2513 gen_helper_fitod(cpu_fpr
[rs2
]);
2514 gen_op_store_DT0_fpr(DFPREG(rd
));
2516 case 0xc9: /* fstod */
2517 gen_helper_fstod(cpu_fpr
[rs2
]);
2518 gen_op_store_DT0_fpr(DFPREG(rd
));
2520 case 0xcb: /* fqtod */
2521 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2522 gen_op_load_fpr_QT1(QFPREG(rs2
));
2523 gen_clear_float_exceptions();
2525 gen_helper_check_ieee_exceptions();
2526 gen_op_store_DT0_fpr(DFPREG(rd
));
2528 case 0xcc: /* fitoq */
2529 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2530 gen_helper_fitoq(cpu_fpr
[rs2
]);
2531 gen_op_store_QT0_fpr(QFPREG(rd
));
2533 case 0xcd: /* fstoq */
2534 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2535 gen_helper_fstoq(cpu_fpr
[rs2
]);
2536 gen_op_store_QT0_fpr(QFPREG(rd
));
2538 case 0xce: /* fdtoq */
2539 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2540 gen_op_load_fpr_DT1(DFPREG(rs2
));
2542 gen_op_store_QT0_fpr(QFPREG(rd
));
2544 case 0xd1: /* fstoi */
2545 gen_clear_float_exceptions();
2546 gen_helper_fstoi(cpu_tmp32
, cpu_fpr
[rs2
]);
2547 gen_helper_check_ieee_exceptions();
2548 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2550 case 0xd2: /* fdtoi */
2551 gen_op_load_fpr_DT1(DFPREG(rs2
));
2552 gen_clear_float_exceptions();
2553 gen_helper_fdtoi(cpu_tmp32
);
2554 gen_helper_check_ieee_exceptions();
2555 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2557 case 0xd3: /* fqtoi */
2558 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2559 gen_op_load_fpr_QT1(QFPREG(rs2
));
2560 gen_clear_float_exceptions();
2561 gen_helper_fqtoi(cpu_tmp32
);
2562 gen_helper_check_ieee_exceptions();
2563 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2565 #ifdef TARGET_SPARC64
2566 case 0x2: /* V9 fmovd */
2567 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2568 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
2569 cpu_fpr
[DFPREG(rs2
) + 1]);
2571 case 0x3: /* V9 fmovq */
2572 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2573 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2574 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1],
2575 cpu_fpr
[QFPREG(rs2
) + 1]);
2576 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2],
2577 cpu_fpr
[QFPREG(rs2
) + 2]);
2578 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3],
2579 cpu_fpr
[QFPREG(rs2
) + 3]);
2581 case 0x6: /* V9 fnegd */
2582 gen_op_load_fpr_DT1(DFPREG(rs2
));
2584 gen_op_store_DT0_fpr(DFPREG(rd
));
2586 case 0x7: /* V9 fnegq */
2587 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2588 gen_op_load_fpr_QT1(QFPREG(rs2
));
2590 gen_op_store_QT0_fpr(QFPREG(rd
));
2592 case 0xa: /* V9 fabsd */
2593 gen_op_load_fpr_DT1(DFPREG(rs2
));
2595 gen_op_store_DT0_fpr(DFPREG(rd
));
2597 case 0xb: /* V9 fabsq */
2598 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2599 gen_op_load_fpr_QT1(QFPREG(rs2
));
2601 gen_op_store_QT0_fpr(QFPREG(rd
));
2603 case 0x81: /* V9 fstox */
2604 gen_clear_float_exceptions();
2605 gen_helper_fstox(cpu_fpr
[rs2
]);
2606 gen_helper_check_ieee_exceptions();
2607 gen_op_store_DT0_fpr(DFPREG(rd
));
2609 case 0x82: /* V9 fdtox */
2610 gen_op_load_fpr_DT1(DFPREG(rs2
));
2611 gen_clear_float_exceptions();
2613 gen_helper_check_ieee_exceptions();
2614 gen_op_store_DT0_fpr(DFPREG(rd
));
2616 case 0x83: /* V9 fqtox */
2617 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2618 gen_op_load_fpr_QT1(QFPREG(rs2
));
2619 gen_clear_float_exceptions();
2621 gen_helper_check_ieee_exceptions();
2622 gen_op_store_DT0_fpr(DFPREG(rd
));
2624 case 0x84: /* V9 fxtos */
2625 gen_op_load_fpr_DT1(DFPREG(rs2
));
2626 gen_clear_float_exceptions();
2627 gen_helper_fxtos(cpu_tmp32
);
2628 gen_helper_check_ieee_exceptions();
2629 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2631 case 0x88: /* V9 fxtod */
2632 gen_op_load_fpr_DT1(DFPREG(rs2
));
2633 gen_clear_float_exceptions();
2635 gen_helper_check_ieee_exceptions();
2636 gen_op_store_DT0_fpr(DFPREG(rd
));
2638 case 0x8c: /* V9 fxtoq */
2639 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2640 gen_op_load_fpr_DT1(DFPREG(rs2
));
2641 gen_clear_float_exceptions();
2643 gen_helper_check_ieee_exceptions();
2644 gen_op_store_QT0_fpr(QFPREG(rd
));
2650 } else if (xop
== 0x35) { /* FPU Operations */
2651 #ifdef TARGET_SPARC64
2654 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2656 gen_op_clear_ieee_excp_and_FTT();
2657 rs1
= GET_FIELD(insn
, 13, 17);
2658 rs2
= GET_FIELD(insn
, 27, 31);
2659 xop
= GET_FIELD(insn
, 18, 26);
2660 save_state(dc
, cpu_cond
);
2661 #ifdef TARGET_SPARC64
2662 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2665 l1
= gen_new_label();
2666 cond
= GET_FIELD_SP(insn
, 14, 17);
2667 cpu_src1
= get_src1(insn
, cpu_src1
);
2668 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2670 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2673 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2676 l1
= gen_new_label();
2677 cond
= GET_FIELD_SP(insn
, 14, 17);
2678 cpu_src1
= get_src1(insn
, cpu_src1
);
2679 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2681 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2682 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_fpr
[DFPREG(rs2
) + 1]);
2685 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2688 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2689 l1
= gen_new_label();
2690 cond
= GET_FIELD_SP(insn
, 14, 17);
2691 cpu_src1
= get_src1(insn
, cpu_src1
);
2692 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2694 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2695 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1], cpu_fpr
[QFPREG(rs2
) + 1]);
2696 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2], cpu_fpr
[QFPREG(rs2
) + 2]);
2697 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3], cpu_fpr
[QFPREG(rs2
) + 3]);
2703 #ifdef TARGET_SPARC64
2704 #define FMOVSCC(fcc) \
2709 l1 = gen_new_label(); \
2710 r_cond = tcg_temp_new(); \
2711 cond = GET_FIELD_SP(insn, 14, 17); \
2712 gen_fcond(r_cond, fcc, cond); \
2713 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2715 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2716 gen_set_label(l1); \
2717 tcg_temp_free(r_cond); \
2719 #define FMOVDCC(fcc) \
2724 l1 = gen_new_label(); \
2725 r_cond = tcg_temp_new(); \
2726 cond = GET_FIELD_SP(insn, 14, 17); \
2727 gen_fcond(r_cond, fcc, cond); \
2728 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2730 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2731 cpu_fpr[DFPREG(rs2)]); \
2732 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2733 cpu_fpr[DFPREG(rs2) + 1]); \
2734 gen_set_label(l1); \
2735 tcg_temp_free(r_cond); \
2737 #define FMOVQCC(fcc) \
2742 l1 = gen_new_label(); \
2743 r_cond = tcg_temp_new(); \
2744 cond = GET_FIELD_SP(insn, 14, 17); \
2745 gen_fcond(r_cond, fcc, cond); \
2746 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2748 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2749 cpu_fpr[QFPREG(rs2)]); \
2750 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2751 cpu_fpr[QFPREG(rs2) + 1]); \
2752 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2753 cpu_fpr[QFPREG(rs2) + 2]); \
2754 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2755 cpu_fpr[QFPREG(rs2) + 3]); \
2756 gen_set_label(l1); \
2757 tcg_temp_free(r_cond); \
2759 case 0x001: /* V9 fmovscc %fcc0 */
2762 case 0x002: /* V9 fmovdcc %fcc0 */
2765 case 0x003: /* V9 fmovqcc %fcc0 */
2766 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2769 case 0x041: /* V9 fmovscc %fcc1 */
2772 case 0x042: /* V9 fmovdcc %fcc1 */
2775 case 0x043: /* V9 fmovqcc %fcc1 */
2776 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2779 case 0x081: /* V9 fmovscc %fcc2 */
2782 case 0x082: /* V9 fmovdcc %fcc2 */
2785 case 0x083: /* V9 fmovqcc %fcc2 */
2786 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2789 case 0x0c1: /* V9 fmovscc %fcc3 */
2792 case 0x0c2: /* V9 fmovdcc %fcc3 */
2795 case 0x0c3: /* V9 fmovqcc %fcc3 */
2796 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2802 #define FMOVSCC(icc) \
2807 l1 = gen_new_label(); \
2808 r_cond = tcg_temp_new(); \
2809 cond = GET_FIELD_SP(insn, 14, 17); \
2810 gen_cond(r_cond, icc, cond, dc); \
2811 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2813 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2814 gen_set_label(l1); \
2815 tcg_temp_free(r_cond); \
2817 #define FMOVDCC(icc) \
2822 l1 = gen_new_label(); \
2823 r_cond = tcg_temp_new(); \
2824 cond = GET_FIELD_SP(insn, 14, 17); \
2825 gen_cond(r_cond, icc, cond, dc); \
2826 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2828 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2829 cpu_fpr[DFPREG(rs2)]); \
2830 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2831 cpu_fpr[DFPREG(rs2) + 1]); \
2832 gen_set_label(l1); \
2833 tcg_temp_free(r_cond); \
2835 #define FMOVQCC(icc) \
2840 l1 = gen_new_label(); \
2841 r_cond = tcg_temp_new(); \
2842 cond = GET_FIELD_SP(insn, 14, 17); \
2843 gen_cond(r_cond, icc, cond, dc); \
2844 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2846 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2847 cpu_fpr[QFPREG(rs2)]); \
2848 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2849 cpu_fpr[QFPREG(rs2) + 1]); \
2850 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2851 cpu_fpr[QFPREG(rs2) + 2]); \
2852 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2853 cpu_fpr[QFPREG(rs2) + 3]); \
2854 gen_set_label(l1); \
2855 tcg_temp_free(r_cond); \
2858 case 0x101: /* V9 fmovscc %icc */
2861 case 0x102: /* V9 fmovdcc %icc */
2863 case 0x103: /* V9 fmovqcc %icc */
2864 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2867 case 0x181: /* V9 fmovscc %xcc */
2870 case 0x182: /* V9 fmovdcc %xcc */
2873 case 0x183: /* V9 fmovqcc %xcc */
2874 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2881 case 0x51: /* fcmps, V9 %fcc */
2882 gen_op_fcmps(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2884 case 0x52: /* fcmpd, V9 %fcc */
2885 gen_op_load_fpr_DT0(DFPREG(rs1
));
2886 gen_op_load_fpr_DT1(DFPREG(rs2
));
2887 gen_op_fcmpd(rd
& 3);
2889 case 0x53: /* fcmpq, V9 %fcc */
2890 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2891 gen_op_load_fpr_QT0(QFPREG(rs1
));
2892 gen_op_load_fpr_QT1(QFPREG(rs2
));
2893 gen_op_fcmpq(rd
& 3);
2895 case 0x55: /* fcmpes, V9 %fcc */
2896 gen_op_fcmpes(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2898 case 0x56: /* fcmped, V9 %fcc */
2899 gen_op_load_fpr_DT0(DFPREG(rs1
));
2900 gen_op_load_fpr_DT1(DFPREG(rs2
));
2901 gen_op_fcmped(rd
& 3);
2903 case 0x57: /* fcmpeq, V9 %fcc */
2904 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2905 gen_op_load_fpr_QT0(QFPREG(rs1
));
2906 gen_op_load_fpr_QT1(QFPREG(rs2
));
2907 gen_op_fcmpeq(rd
& 3);
2912 } else if (xop
== 0x2) {
2915 rs1
= GET_FIELD(insn
, 13, 17);
2917 // or %g0, x, y -> mov T0, x; mov y, T0
2918 if (IS_IMM
) { /* immediate */
2921 simm
= GET_FIELDs(insn
, 19, 31);
2922 r_const
= tcg_const_tl(simm
);
2923 gen_movl_TN_reg(rd
, r_const
);
2924 tcg_temp_free(r_const
);
2925 } else { /* register */
2926 rs2
= GET_FIELD(insn
, 27, 31);
2927 gen_movl_reg_TN(rs2
, cpu_dst
);
2928 gen_movl_TN_reg(rd
, cpu_dst
);
2931 cpu_src1
= get_src1(insn
, cpu_src1
);
2932 if (IS_IMM
) { /* immediate */
2933 simm
= GET_FIELDs(insn
, 19, 31);
2934 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
2935 gen_movl_TN_reg(rd
, cpu_dst
);
2936 } else { /* register */
2937 // or x, %g0, y -> mov T1, x; mov y, T1
2938 rs2
= GET_FIELD(insn
, 27, 31);
2940 gen_movl_reg_TN(rs2
, cpu_src2
);
2941 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2942 gen_movl_TN_reg(rd
, cpu_dst
);
2944 gen_movl_TN_reg(rd
, cpu_src1
);
2947 #ifdef TARGET_SPARC64
2948 } else if (xop
== 0x25) { /* sll, V9 sllx */
2949 cpu_src1
= get_src1(insn
, cpu_src1
);
2950 if (IS_IMM
) { /* immediate */
2951 simm
= GET_FIELDs(insn
, 20, 31);
2952 if (insn
& (1 << 12)) {
2953 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2955 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x1f);
2957 } else { /* register */
2958 rs2
= GET_FIELD(insn
, 27, 31);
2959 gen_movl_reg_TN(rs2
, cpu_src2
);
2960 if (insn
& (1 << 12)) {
2961 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2963 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2965 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2967 gen_movl_TN_reg(rd
, cpu_dst
);
2968 } else if (xop
== 0x26) { /* srl, V9 srlx */
2969 cpu_src1
= get_src1(insn
, cpu_src1
);
2970 if (IS_IMM
) { /* immediate */
2971 simm
= GET_FIELDs(insn
, 20, 31);
2972 if (insn
& (1 << 12)) {
2973 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2975 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2976 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
2978 } else { /* register */
2979 rs2
= GET_FIELD(insn
, 27, 31);
2980 gen_movl_reg_TN(rs2
, cpu_src2
);
2981 if (insn
& (1 << 12)) {
2982 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2983 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2985 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2986 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2987 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2990 gen_movl_TN_reg(rd
, cpu_dst
);
2991 } else if (xop
== 0x27) { /* sra, V9 srax */
2992 cpu_src1
= get_src1(insn
, cpu_src1
);
2993 if (IS_IMM
) { /* immediate */
2994 simm
= GET_FIELDs(insn
, 20, 31);
2995 if (insn
& (1 << 12)) {
2996 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2998 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2999 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3000 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
3002 } else { /* register */
3003 rs2
= GET_FIELD(insn
, 27, 31);
3004 gen_movl_reg_TN(rs2
, cpu_src2
);
3005 if (insn
& (1 << 12)) {
3006 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3007 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3009 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3010 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3011 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3012 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3015 gen_movl_TN_reg(rd
, cpu_dst
);
3017 } else if (xop
< 0x36) {
3019 cpu_src1
= get_src1(insn
, cpu_src1
);
3020 cpu_src2
= get_src2(insn
, cpu_src2
);
3021 switch (xop
& ~0x10) {
3024 simm
= GET_FIELDs(insn
, 19, 31);
3026 gen_op_addi_cc(cpu_dst
, cpu_src1
, simm
);
3027 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3028 dc
->cc_op
= CC_OP_ADD
;
3030 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
3034 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3035 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3036 dc
->cc_op
= CC_OP_ADD
;
3038 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3044 simm
= GET_FIELDs(insn
, 19, 31);
3045 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, simm
);
3047 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3050 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3051 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3052 dc
->cc_op
= CC_OP_LOGIC
;
3057 simm
= GET_FIELDs(insn
, 19, 31);
3058 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
3060 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3063 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3064 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3065 dc
->cc_op
= CC_OP_LOGIC
;
3070 simm
= GET_FIELDs(insn
, 19, 31);
3071 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, simm
);
3073 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3076 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3077 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3078 dc
->cc_op
= CC_OP_LOGIC
;
3083 simm
= GET_FIELDs(insn
, 19, 31);
3085 gen_op_subi_cc(cpu_dst
, cpu_src1
, simm
, dc
);
3087 tcg_gen_subi_tl(cpu_dst
, cpu_src1
, simm
);
3091 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3092 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
3093 dc
->cc_op
= CC_OP_SUB
;
3095 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3099 case 0x5: /* andn */
3101 simm
= GET_FIELDs(insn
, 19, 31);
3102 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, ~simm
);
3104 tcg_gen_andc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3107 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3108 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3109 dc
->cc_op
= CC_OP_LOGIC
;
3114 simm
= GET_FIELDs(insn
, 19, 31);
3115 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, ~simm
);
3117 tcg_gen_orc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3120 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3121 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3122 dc
->cc_op
= CC_OP_LOGIC
;
3125 case 0x7: /* xorn */
3127 simm
= GET_FIELDs(insn
, 19, 31);
3128 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, ~simm
);
3130 tcg_gen_not_tl(cpu_tmp0
, cpu_src2
);
3131 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3134 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3135 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3136 dc
->cc_op
= CC_OP_LOGIC
;
3139 case 0x8: /* addx, V9 addc */
3140 gen_op_addx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3143 #ifdef TARGET_SPARC64
3144 case 0x9: /* V9 mulx */
3146 simm
= GET_FIELDs(insn
, 19, 31);
3147 tcg_gen_muli_i64(cpu_dst
, cpu_src1
, simm
);
3149 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
3153 case 0xa: /* umul */
3154 CHECK_IU_FEATURE(dc
, MUL
);
3155 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
3157 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3158 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3159 dc
->cc_op
= CC_OP_LOGIC
;
3162 case 0xb: /* smul */
3163 CHECK_IU_FEATURE(dc
, MUL
);
3164 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
3166 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3167 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3168 dc
->cc_op
= CC_OP_LOGIC
;
3171 case 0xc: /* subx, V9 subc */
3172 gen_op_subx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3175 #ifdef TARGET_SPARC64
3176 case 0xd: /* V9 udivx */
3177 tcg_gen_mov_tl(cpu_cc_src
, cpu_src1
);
3178 tcg_gen_mov_tl(cpu_cc_src2
, cpu_src2
);
3179 gen_trap_ifdivzero_tl(cpu_cc_src2
);
3180 tcg_gen_divu_i64(cpu_dst
, cpu_cc_src
, cpu_cc_src2
);
3183 case 0xe: /* udiv */
3184 CHECK_IU_FEATURE(dc
, DIV
);
3186 gen_helper_udiv_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3187 dc
->cc_op
= CC_OP_DIV
;
3189 gen_helper_udiv(cpu_dst
, cpu_src1
, cpu_src2
);
3192 case 0xf: /* sdiv */
3193 CHECK_IU_FEATURE(dc
, DIV
);
3195 gen_helper_sdiv_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3196 dc
->cc_op
= CC_OP_DIV
;
3198 gen_helper_sdiv(cpu_dst
, cpu_src1
, cpu_src2
);
3204 gen_movl_TN_reg(rd
, cpu_dst
);
3206 cpu_src1
= get_src1(insn
, cpu_src1
);
3207 cpu_src2
= get_src2(insn
, cpu_src2
);
3209 case 0x20: /* taddcc */
3210 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3211 gen_movl_TN_reg(rd
, cpu_dst
);
3212 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADD
);
3213 dc
->cc_op
= CC_OP_TADD
;
3215 case 0x21: /* tsubcc */
3216 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3217 gen_movl_TN_reg(rd
, cpu_dst
);
3218 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUB
);
3219 dc
->cc_op
= CC_OP_TSUB
;
3221 case 0x22: /* taddcctv */
3222 save_state(dc
, cpu_cond
);
3223 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3224 gen_movl_TN_reg(rd
, cpu_dst
);
3225 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADDTV
);
3226 dc
->cc_op
= CC_OP_TADDTV
;
3228 case 0x23: /* tsubcctv */
3229 save_state(dc
, cpu_cond
);
3230 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3231 gen_movl_TN_reg(rd
, cpu_dst
);
3232 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUBTV
);
3233 dc
->cc_op
= CC_OP_TSUBTV
;
3235 case 0x24: /* mulscc */
3236 gen_helper_compute_psr();
3237 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3238 gen_movl_TN_reg(rd
, cpu_dst
);
3239 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3240 dc
->cc_op
= CC_OP_ADD
;
3242 #ifndef TARGET_SPARC64
3243 case 0x25: /* sll */
3244 if (IS_IMM
) { /* immediate */
3245 simm
= GET_FIELDs(insn
, 20, 31);
3246 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3247 } else { /* register */
3248 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3249 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3251 gen_movl_TN_reg(rd
, cpu_dst
);
3253 case 0x26: /* srl */
3254 if (IS_IMM
) { /* immediate */
3255 simm
= GET_FIELDs(insn
, 20, 31);
3256 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3257 } else { /* register */
3258 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3259 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3261 gen_movl_TN_reg(rd
, cpu_dst
);
3263 case 0x27: /* sra */
3264 if (IS_IMM
) { /* immediate */
3265 simm
= GET_FIELDs(insn
, 20, 31);
3266 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3267 } else { /* register */
3268 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3269 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3271 gen_movl_TN_reg(rd
, cpu_dst
);
3278 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3279 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3281 #ifndef TARGET_SPARC64
3282 case 0x01 ... 0x0f: /* undefined in the
3286 case 0x10 ... 0x1f: /* implementation-dependent
3292 case 0x2: /* V9 wrccr */
3293 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3294 gen_helper_wrccr(cpu_dst
);
3295 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3296 dc
->cc_op
= CC_OP_FLAGS
;
3298 case 0x3: /* V9 wrasi */
3299 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3300 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, 0xff);
3301 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3303 case 0x6: /* V9 wrfprs */
3304 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3305 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3306 save_state(dc
, cpu_cond
);
3311 case 0xf: /* V9 sir, nop if user */
3312 #if !defined(CONFIG_USER_ONLY)
3313 if (supervisor(dc
)) {
3318 case 0x13: /* Graphics Status */
3319 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3321 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3323 case 0x14: /* Softint set */
3324 if (!supervisor(dc
))
3326 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3327 gen_helper_set_softint(cpu_tmp64
);
3329 case 0x15: /* Softint clear */
3330 if (!supervisor(dc
))
3332 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3333 gen_helper_clear_softint(cpu_tmp64
);
3335 case 0x16: /* Softint write */
3336 if (!supervisor(dc
))
3338 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3339 gen_helper_write_softint(cpu_tmp64
);
3341 case 0x17: /* Tick compare */
3342 #if !defined(CONFIG_USER_ONLY)
3343 if (!supervisor(dc
))
3349 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3351 r_tickptr
= tcg_temp_new_ptr();
3352 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3353 offsetof(CPUState
, tick
));
3354 gen_helper_tick_set_limit(r_tickptr
,
3356 tcg_temp_free_ptr(r_tickptr
);
3359 case 0x18: /* System tick */
3360 #if !defined(CONFIG_USER_ONLY)
3361 if (!supervisor(dc
))
3367 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3369 r_tickptr
= tcg_temp_new_ptr();
3370 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3371 offsetof(CPUState
, stick
));
3372 gen_helper_tick_set_count(r_tickptr
,
3374 tcg_temp_free_ptr(r_tickptr
);
3377 case 0x19: /* System tick compare */
3378 #if !defined(CONFIG_USER_ONLY)
3379 if (!supervisor(dc
))
3385 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3387 r_tickptr
= tcg_temp_new_ptr();
3388 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3389 offsetof(CPUState
, stick
));
3390 gen_helper_tick_set_limit(r_tickptr
,
3392 tcg_temp_free_ptr(r_tickptr
);
3396 case 0x10: /* Performance Control */
3397 case 0x11: /* Performance Instrumentation
3399 case 0x12: /* Dispatch Control */
3406 #if !defined(CONFIG_USER_ONLY)
3407 case 0x31: /* wrpsr, V9 saved, restored */
3409 if (!supervisor(dc
))
3411 #ifdef TARGET_SPARC64
3417 gen_helper_restored();
3419 case 2: /* UA2005 allclean */
3420 case 3: /* UA2005 otherw */
3421 case 4: /* UA2005 normalw */
3422 case 5: /* UA2005 invalw */
3428 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3429 gen_helper_wrpsr(cpu_dst
);
3430 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3431 dc
->cc_op
= CC_OP_FLAGS
;
3432 save_state(dc
, cpu_cond
);
3439 case 0x32: /* wrwim, V9 wrpr */
3441 if (!supervisor(dc
))
3443 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3444 #ifdef TARGET_SPARC64
3450 r_tsptr
= tcg_temp_new_ptr();
3451 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3452 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3453 offsetof(trap_state
, tpc
));
3454 tcg_temp_free_ptr(r_tsptr
);
3461 r_tsptr
= tcg_temp_new_ptr();
3462 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3463 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3464 offsetof(trap_state
, tnpc
));
3465 tcg_temp_free_ptr(r_tsptr
);
3472 r_tsptr
= tcg_temp_new_ptr();
3473 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3474 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3475 offsetof(trap_state
,
3477 tcg_temp_free_ptr(r_tsptr
);
3484 r_tsptr
= tcg_temp_new_ptr();
3485 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3486 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3487 tcg_gen_st_i32(cpu_tmp32
, r_tsptr
,
3488 offsetof(trap_state
, tt
));
3489 tcg_temp_free_ptr(r_tsptr
);
3496 r_tickptr
= tcg_temp_new_ptr();
3497 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3498 offsetof(CPUState
, tick
));
3499 gen_helper_tick_set_count(r_tickptr
,
3501 tcg_temp_free_ptr(r_tickptr
);
3505 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3509 TCGv r_tmp
= tcg_temp_local_new();
3511 tcg_gen_mov_tl(r_tmp
, cpu_tmp0
);
3512 save_state(dc
, cpu_cond
);
3513 gen_helper_wrpstate(r_tmp
);
3514 tcg_temp_free(r_tmp
);
3515 dc
->npc
= DYNAMIC_PC
;
3520 TCGv r_tmp
= tcg_temp_local_new();
3522 tcg_gen_mov_tl(r_tmp
, cpu_tmp0
);
3523 save_state(dc
, cpu_cond
);
3524 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_tmp
);
3525 tcg_temp_free(r_tmp
);
3526 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3527 offsetof(CPUSPARCState
, tl
));
3528 dc
->npc
= DYNAMIC_PC
;
3532 gen_helper_wrpil(cpu_tmp0
);
3535 gen_helper_wrcwp(cpu_tmp0
);
3538 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3539 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3540 offsetof(CPUSPARCState
,
3543 case 11: // canrestore
3544 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3545 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3546 offsetof(CPUSPARCState
,
3549 case 12: // cleanwin
3550 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3551 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3552 offsetof(CPUSPARCState
,
3555 case 13: // otherwin
3556 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3557 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3558 offsetof(CPUSPARCState
,
3562 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3563 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3564 offsetof(CPUSPARCState
,
3567 case 16: // UA2005 gl
3568 CHECK_IU_FEATURE(dc
, GL
);
3569 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3570 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3571 offsetof(CPUSPARCState
, gl
));
3573 case 26: // UA2005 strand status
3574 CHECK_IU_FEATURE(dc
, HYPV
);
3575 if (!hypervisor(dc
))
3577 tcg_gen_mov_tl(cpu_ssr
, cpu_tmp0
);
3583 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3584 if (dc
->def
->nwindows
!= 32)
3585 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3586 (1 << dc
->def
->nwindows
) - 1);
3587 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3591 case 0x33: /* wrtbr, UA2005 wrhpr */
3593 #ifndef TARGET_SPARC64
3594 if (!supervisor(dc
))
3596 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3598 CHECK_IU_FEATURE(dc
, HYPV
);
3599 if (!hypervisor(dc
))
3601 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3604 // XXX gen_op_wrhpstate();
3605 save_state(dc
, cpu_cond
);
3611 // XXX gen_op_wrhtstate();
3614 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
3617 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
3619 case 31: // hstick_cmpr
3623 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
3624 r_tickptr
= tcg_temp_new_ptr();
3625 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3626 offsetof(CPUState
, hstick
));
3627 gen_helper_tick_set_limit(r_tickptr
,
3629 tcg_temp_free_ptr(r_tickptr
);
3632 case 6: // hver readonly
3640 #ifdef TARGET_SPARC64
3641 case 0x2c: /* V9 movcc */
3643 int cc
= GET_FIELD_SP(insn
, 11, 12);
3644 int cond
= GET_FIELD_SP(insn
, 14, 17);
3648 r_cond
= tcg_temp_new();
3649 if (insn
& (1 << 18)) {
3651 gen_cond(r_cond
, 0, cond
, dc
);
3653 gen_cond(r_cond
, 1, cond
, dc
);
3657 gen_fcond(r_cond
, cc
, cond
);
3660 l1
= gen_new_label();
3662 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
3663 if (IS_IMM
) { /* immediate */
3666 simm
= GET_FIELD_SPs(insn
, 0, 10);
3667 r_const
= tcg_const_tl(simm
);
3668 gen_movl_TN_reg(rd
, r_const
);
3669 tcg_temp_free(r_const
);
3671 rs2
= GET_FIELD_SP(insn
, 0, 4);
3672 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3673 gen_movl_TN_reg(rd
, cpu_tmp0
);
3676 tcg_temp_free(r_cond
);
3679 case 0x2d: /* V9 sdivx */
3680 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
3681 gen_movl_TN_reg(rd
, cpu_dst
);
3683 case 0x2e: /* V9 popc */
3685 cpu_src2
= get_src2(insn
, cpu_src2
);
3686 gen_helper_popc(cpu_dst
, cpu_src2
);
3687 gen_movl_TN_reg(rd
, cpu_dst
);
3689 case 0x2f: /* V9 movr */
3691 int cond
= GET_FIELD_SP(insn
, 10, 12);
3694 cpu_src1
= get_src1(insn
, cpu_src1
);
3696 l1
= gen_new_label();
3698 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
3700 if (IS_IMM
) { /* immediate */
3703 simm
= GET_FIELD_SPs(insn
, 0, 9);
3704 r_const
= tcg_const_tl(simm
);
3705 gen_movl_TN_reg(rd
, r_const
);
3706 tcg_temp_free(r_const
);
3708 rs2
= GET_FIELD_SP(insn
, 0, 4);
3709 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3710 gen_movl_TN_reg(rd
, cpu_tmp0
);
3720 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3721 #ifdef TARGET_SPARC64
3722 int opf
= GET_FIELD_SP(insn
, 5, 13);
3723 rs1
= GET_FIELD(insn
, 13, 17);
3724 rs2
= GET_FIELD(insn
, 27, 31);
3725 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3729 case 0x000: /* VIS I edge8cc */
3730 case 0x001: /* VIS II edge8n */
3731 case 0x002: /* VIS I edge8lcc */
3732 case 0x003: /* VIS II edge8ln */
3733 case 0x004: /* VIS I edge16cc */
3734 case 0x005: /* VIS II edge16n */
3735 case 0x006: /* VIS I edge16lcc */
3736 case 0x007: /* VIS II edge16ln */
3737 case 0x008: /* VIS I edge32cc */
3738 case 0x009: /* VIS II edge32n */
3739 case 0x00a: /* VIS I edge32lcc */
3740 case 0x00b: /* VIS II edge32ln */
3743 case 0x010: /* VIS I array8 */
3744 CHECK_FPU_FEATURE(dc
, VIS1
);
3745 cpu_src1
= get_src1(insn
, cpu_src1
);
3746 gen_movl_reg_TN(rs2
, cpu_src2
);
3747 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3748 gen_movl_TN_reg(rd
, cpu_dst
);
3750 case 0x012: /* VIS I array16 */
3751 CHECK_FPU_FEATURE(dc
, VIS1
);
3752 cpu_src1
= get_src1(insn
, cpu_src1
);
3753 gen_movl_reg_TN(rs2
, cpu_src2
);
3754 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3755 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
3756 gen_movl_TN_reg(rd
, cpu_dst
);
3758 case 0x014: /* VIS I array32 */
3759 CHECK_FPU_FEATURE(dc
, VIS1
);
3760 cpu_src1
= get_src1(insn
, cpu_src1
);
3761 gen_movl_reg_TN(rs2
, cpu_src2
);
3762 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3763 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
3764 gen_movl_TN_reg(rd
, cpu_dst
);
3766 case 0x018: /* VIS I alignaddr */
3767 CHECK_FPU_FEATURE(dc
, VIS1
);
3768 cpu_src1
= get_src1(insn
, cpu_src1
);
3769 gen_movl_reg_TN(rs2
, cpu_src2
);
3770 gen_helper_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
);
3771 gen_movl_TN_reg(rd
, cpu_dst
);
3773 case 0x019: /* VIS II bmask */
3774 case 0x01a: /* VIS I alignaddrl */
3777 case 0x020: /* VIS I fcmple16 */
3778 CHECK_FPU_FEATURE(dc
, VIS1
);
3779 gen_op_load_fpr_DT0(DFPREG(rs1
));
3780 gen_op_load_fpr_DT1(DFPREG(rs2
));
3781 gen_helper_fcmple16();
3782 gen_op_store_DT0_fpr(DFPREG(rd
));
3784 case 0x022: /* VIS I fcmpne16 */
3785 CHECK_FPU_FEATURE(dc
, VIS1
);
3786 gen_op_load_fpr_DT0(DFPREG(rs1
));
3787 gen_op_load_fpr_DT1(DFPREG(rs2
));
3788 gen_helper_fcmpne16();
3789 gen_op_store_DT0_fpr(DFPREG(rd
));
3791 case 0x024: /* VIS I fcmple32 */
3792 CHECK_FPU_FEATURE(dc
, VIS1
);
3793 gen_op_load_fpr_DT0(DFPREG(rs1
));
3794 gen_op_load_fpr_DT1(DFPREG(rs2
));
3795 gen_helper_fcmple32();
3796 gen_op_store_DT0_fpr(DFPREG(rd
));
3798 case 0x026: /* VIS I fcmpne32 */
3799 CHECK_FPU_FEATURE(dc
, VIS1
);
3800 gen_op_load_fpr_DT0(DFPREG(rs1
));
3801 gen_op_load_fpr_DT1(DFPREG(rs2
));
3802 gen_helper_fcmpne32();
3803 gen_op_store_DT0_fpr(DFPREG(rd
));
3805 case 0x028: /* VIS I fcmpgt16 */
3806 CHECK_FPU_FEATURE(dc
, VIS1
);
3807 gen_op_load_fpr_DT0(DFPREG(rs1
));
3808 gen_op_load_fpr_DT1(DFPREG(rs2
));
3809 gen_helper_fcmpgt16();
3810 gen_op_store_DT0_fpr(DFPREG(rd
));
3812 case 0x02a: /* VIS I fcmpeq16 */
3813 CHECK_FPU_FEATURE(dc
, VIS1
);
3814 gen_op_load_fpr_DT0(DFPREG(rs1
));
3815 gen_op_load_fpr_DT1(DFPREG(rs2
));
3816 gen_helper_fcmpeq16();
3817 gen_op_store_DT0_fpr(DFPREG(rd
));
3819 case 0x02c: /* VIS I fcmpgt32 */
3820 CHECK_FPU_FEATURE(dc
, VIS1
);
3821 gen_op_load_fpr_DT0(DFPREG(rs1
));
3822 gen_op_load_fpr_DT1(DFPREG(rs2
));
3823 gen_helper_fcmpgt32();
3824 gen_op_store_DT0_fpr(DFPREG(rd
));
3826 case 0x02e: /* VIS I fcmpeq32 */
3827 CHECK_FPU_FEATURE(dc
, VIS1
);
3828 gen_op_load_fpr_DT0(DFPREG(rs1
));
3829 gen_op_load_fpr_DT1(DFPREG(rs2
));
3830 gen_helper_fcmpeq32();
3831 gen_op_store_DT0_fpr(DFPREG(rd
));
3833 case 0x031: /* VIS I fmul8x16 */
3834 CHECK_FPU_FEATURE(dc
, VIS1
);
3835 gen_op_load_fpr_DT0(DFPREG(rs1
));
3836 gen_op_load_fpr_DT1(DFPREG(rs2
));
3837 gen_helper_fmul8x16();
3838 gen_op_store_DT0_fpr(DFPREG(rd
));
3840 case 0x033: /* VIS I fmul8x16au */
3841 CHECK_FPU_FEATURE(dc
, VIS1
);
3842 gen_op_load_fpr_DT0(DFPREG(rs1
));
3843 gen_op_load_fpr_DT1(DFPREG(rs2
));
3844 gen_helper_fmul8x16au();
3845 gen_op_store_DT0_fpr(DFPREG(rd
));
3847 case 0x035: /* VIS I fmul8x16al */
3848 CHECK_FPU_FEATURE(dc
, VIS1
);
3849 gen_op_load_fpr_DT0(DFPREG(rs1
));
3850 gen_op_load_fpr_DT1(DFPREG(rs2
));
3851 gen_helper_fmul8x16al();
3852 gen_op_store_DT0_fpr(DFPREG(rd
));
3854 case 0x036: /* VIS I fmul8sux16 */
3855 CHECK_FPU_FEATURE(dc
, VIS1
);
3856 gen_op_load_fpr_DT0(DFPREG(rs1
));
3857 gen_op_load_fpr_DT1(DFPREG(rs2
));
3858 gen_helper_fmul8sux16();
3859 gen_op_store_DT0_fpr(DFPREG(rd
));
3861 case 0x037: /* VIS I fmul8ulx16 */
3862 CHECK_FPU_FEATURE(dc
, VIS1
);
3863 gen_op_load_fpr_DT0(DFPREG(rs1
));
3864 gen_op_load_fpr_DT1(DFPREG(rs2
));
3865 gen_helper_fmul8ulx16();
3866 gen_op_store_DT0_fpr(DFPREG(rd
));
3868 case 0x038: /* VIS I fmuld8sux16 */
3869 CHECK_FPU_FEATURE(dc
, VIS1
);
3870 gen_op_load_fpr_DT0(DFPREG(rs1
));
3871 gen_op_load_fpr_DT1(DFPREG(rs2
));
3872 gen_helper_fmuld8sux16();
3873 gen_op_store_DT0_fpr(DFPREG(rd
));
3875 case 0x039: /* VIS I fmuld8ulx16 */
3876 CHECK_FPU_FEATURE(dc
, VIS1
);
3877 gen_op_load_fpr_DT0(DFPREG(rs1
));
3878 gen_op_load_fpr_DT1(DFPREG(rs2
));
3879 gen_helper_fmuld8ulx16();
3880 gen_op_store_DT0_fpr(DFPREG(rd
));
3882 case 0x03a: /* VIS I fpack32 */
3883 case 0x03b: /* VIS I fpack16 */
3884 case 0x03d: /* VIS I fpackfix */
3885 case 0x03e: /* VIS I pdist */
3888 case 0x048: /* VIS I faligndata */
3889 CHECK_FPU_FEATURE(dc
, VIS1
);
3890 gen_op_load_fpr_DT0(DFPREG(rs1
));
3891 gen_op_load_fpr_DT1(DFPREG(rs2
));
3892 gen_helper_faligndata();
3893 gen_op_store_DT0_fpr(DFPREG(rd
));
3895 case 0x04b: /* VIS I fpmerge */
3896 CHECK_FPU_FEATURE(dc
, VIS1
);
3897 gen_op_load_fpr_DT0(DFPREG(rs1
));
3898 gen_op_load_fpr_DT1(DFPREG(rs2
));
3899 gen_helper_fpmerge();
3900 gen_op_store_DT0_fpr(DFPREG(rd
));
3902 case 0x04c: /* VIS II bshuffle */
3905 case 0x04d: /* VIS I fexpand */
3906 CHECK_FPU_FEATURE(dc
, VIS1
);
3907 gen_op_load_fpr_DT0(DFPREG(rs1
));
3908 gen_op_load_fpr_DT1(DFPREG(rs2
));
3909 gen_helper_fexpand();
3910 gen_op_store_DT0_fpr(DFPREG(rd
));
3912 case 0x050: /* VIS I fpadd16 */
3913 CHECK_FPU_FEATURE(dc
, VIS1
);
3914 gen_op_load_fpr_DT0(DFPREG(rs1
));
3915 gen_op_load_fpr_DT1(DFPREG(rs2
));
3916 gen_helper_fpadd16();
3917 gen_op_store_DT0_fpr(DFPREG(rd
));
3919 case 0x051: /* VIS I fpadd16s */
3920 CHECK_FPU_FEATURE(dc
, VIS1
);
3921 gen_helper_fpadd16s(cpu_fpr
[rd
],
3922 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3924 case 0x052: /* VIS I fpadd32 */
3925 CHECK_FPU_FEATURE(dc
, VIS1
);
3926 gen_op_load_fpr_DT0(DFPREG(rs1
));
3927 gen_op_load_fpr_DT1(DFPREG(rs2
));
3928 gen_helper_fpadd32();
3929 gen_op_store_DT0_fpr(DFPREG(rd
));
3931 case 0x053: /* VIS I fpadd32s */
3932 CHECK_FPU_FEATURE(dc
, VIS1
);
3933 gen_helper_fpadd32s(cpu_fpr
[rd
],
3934 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3936 case 0x054: /* VIS I fpsub16 */
3937 CHECK_FPU_FEATURE(dc
, VIS1
);
3938 gen_op_load_fpr_DT0(DFPREG(rs1
));
3939 gen_op_load_fpr_DT1(DFPREG(rs2
));
3940 gen_helper_fpsub16();
3941 gen_op_store_DT0_fpr(DFPREG(rd
));
3943 case 0x055: /* VIS I fpsub16s */
3944 CHECK_FPU_FEATURE(dc
, VIS1
);
3945 gen_helper_fpsub16s(cpu_fpr
[rd
],
3946 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3948 case 0x056: /* VIS I fpsub32 */
3949 CHECK_FPU_FEATURE(dc
, VIS1
);
3950 gen_op_load_fpr_DT0(DFPREG(rs1
));
3951 gen_op_load_fpr_DT1(DFPREG(rs2
));
3952 gen_helper_fpsub32();
3953 gen_op_store_DT0_fpr(DFPREG(rd
));
3955 case 0x057: /* VIS I fpsub32s */
3956 CHECK_FPU_FEATURE(dc
, VIS1
);
3957 gen_helper_fpsub32s(cpu_fpr
[rd
],
3958 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3960 case 0x060: /* VIS I fzero */
3961 CHECK_FPU_FEATURE(dc
, VIS1
);
3962 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], 0);
3963 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], 0);
3965 case 0x061: /* VIS I fzeros */
3966 CHECK_FPU_FEATURE(dc
, VIS1
);
3967 tcg_gen_movi_i32(cpu_fpr
[rd
], 0);
3969 case 0x062: /* VIS I fnor */
3970 CHECK_FPU_FEATURE(dc
, VIS1
);
3971 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3972 cpu_fpr
[DFPREG(rs2
)]);
3973 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3974 cpu_fpr
[DFPREG(rs2
) + 1]);
3976 case 0x063: /* VIS I fnors */
3977 CHECK_FPU_FEATURE(dc
, VIS1
);
3978 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3980 case 0x064: /* VIS I fandnot2 */
3981 CHECK_FPU_FEATURE(dc
, VIS1
);
3982 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3983 cpu_fpr
[DFPREG(rs2
)]);
3984 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3985 cpu_fpr
[DFPREG(rs1
) + 1],
3986 cpu_fpr
[DFPREG(rs2
) + 1]);
3988 case 0x065: /* VIS I fandnot2s */
3989 CHECK_FPU_FEATURE(dc
, VIS1
);
3990 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3992 case 0x066: /* VIS I fnot2 */
3993 CHECK_FPU_FEATURE(dc
, VIS1
);
3994 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
3995 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
3996 cpu_fpr
[DFPREG(rs2
) + 1]);
3998 case 0x067: /* VIS I fnot2s */
3999 CHECK_FPU_FEATURE(dc
, VIS1
);
4000 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
4002 case 0x068: /* VIS I fandnot1 */
4003 CHECK_FPU_FEATURE(dc
, VIS1
);
4004 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
4005 cpu_fpr
[DFPREG(rs1
)]);
4006 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4007 cpu_fpr
[DFPREG(rs2
) + 1],
4008 cpu_fpr
[DFPREG(rs1
) + 1]);
4010 case 0x069: /* VIS I fandnot1s */
4011 CHECK_FPU_FEATURE(dc
, VIS1
);
4012 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
4014 case 0x06a: /* VIS I fnot1 */
4015 CHECK_FPU_FEATURE(dc
, VIS1
);
4016 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
4017 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
4018 cpu_fpr
[DFPREG(rs1
) + 1]);
4020 case 0x06b: /* VIS I fnot1s */
4021 CHECK_FPU_FEATURE(dc
, VIS1
);
4022 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
4024 case 0x06c: /* VIS I fxor */
4025 CHECK_FPU_FEATURE(dc
, VIS1
);
4026 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4027 cpu_fpr
[DFPREG(rs2
)]);
4028 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1],
4029 cpu_fpr
[DFPREG(rs1
) + 1],
4030 cpu_fpr
[DFPREG(rs2
) + 1]);
4032 case 0x06d: /* VIS I fxors */
4033 CHECK_FPU_FEATURE(dc
, VIS1
);
4034 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4036 case 0x06e: /* VIS I fnand */
4037 CHECK_FPU_FEATURE(dc
, VIS1
);
4038 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
4039 cpu_fpr
[DFPREG(rs2
)]);
4040 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
4041 cpu_fpr
[DFPREG(rs2
) + 1]);
4043 case 0x06f: /* VIS I fnands */
4044 CHECK_FPU_FEATURE(dc
, VIS1
);
4045 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4047 case 0x070: /* VIS I fand */
4048 CHECK_FPU_FEATURE(dc
, VIS1
);
4049 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4050 cpu_fpr
[DFPREG(rs2
)]);
4051 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1],
4052 cpu_fpr
[DFPREG(rs1
) + 1],
4053 cpu_fpr
[DFPREG(rs2
) + 1]);
4055 case 0x071: /* VIS I fands */
4056 CHECK_FPU_FEATURE(dc
, VIS1
);
4057 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4059 case 0x072: /* VIS I fxnor */
4060 CHECK_FPU_FEATURE(dc
, VIS1
);
4061 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
4062 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
4063 cpu_fpr
[DFPREG(rs1
)]);
4064 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
4065 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
4066 cpu_fpr
[DFPREG(rs1
) + 1]);
4068 case 0x073: /* VIS I fxnors */
4069 CHECK_FPU_FEATURE(dc
, VIS1
);
4070 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
4071 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
4073 case 0x074: /* VIS I fsrc1 */
4074 CHECK_FPU_FEATURE(dc
, VIS1
);
4075 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
4076 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
4077 cpu_fpr
[DFPREG(rs1
) + 1]);
4079 case 0x075: /* VIS I fsrc1s */
4080 CHECK_FPU_FEATURE(dc
, VIS1
);
4081 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
4083 case 0x076: /* VIS I fornot2 */
4084 CHECK_FPU_FEATURE(dc
, VIS1
);
4085 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4086 cpu_fpr
[DFPREG(rs2
)]);
4087 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4088 cpu_fpr
[DFPREG(rs1
) + 1],
4089 cpu_fpr
[DFPREG(rs2
) + 1]);
4091 case 0x077: /* VIS I fornot2s */
4092 CHECK_FPU_FEATURE(dc
, VIS1
);
4093 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4095 case 0x078: /* VIS I fsrc2 */
4096 CHECK_FPU_FEATURE(dc
, VIS1
);
4097 gen_op_load_fpr_DT0(DFPREG(rs2
));
4098 gen_op_store_DT0_fpr(DFPREG(rd
));
4100 case 0x079: /* VIS I fsrc2s */
4101 CHECK_FPU_FEATURE(dc
, VIS1
);
4102 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
4104 case 0x07a: /* VIS I fornot1 */
4105 CHECK_FPU_FEATURE(dc
, VIS1
);
4106 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
4107 cpu_fpr
[DFPREG(rs1
)]);
4108 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4109 cpu_fpr
[DFPREG(rs2
) + 1],
4110 cpu_fpr
[DFPREG(rs1
) + 1]);
4112 case 0x07b: /* VIS I fornot1s */
4113 CHECK_FPU_FEATURE(dc
, VIS1
);
4114 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
4116 case 0x07c: /* VIS I for */
4117 CHECK_FPU_FEATURE(dc
, VIS1
);
4118 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4119 cpu_fpr
[DFPREG(rs2
)]);
4120 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1],
4121 cpu_fpr
[DFPREG(rs1
) + 1],
4122 cpu_fpr
[DFPREG(rs2
) + 1]);
4124 case 0x07d: /* VIS I fors */
4125 CHECK_FPU_FEATURE(dc
, VIS1
);
4126 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4128 case 0x07e: /* VIS I fone */
4129 CHECK_FPU_FEATURE(dc
, VIS1
);
4130 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], -1);
4131 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], -1);
4133 case 0x07f: /* VIS I fones */
4134 CHECK_FPU_FEATURE(dc
, VIS1
);
4135 tcg_gen_movi_i32(cpu_fpr
[rd
], -1);
4137 case 0x080: /* VIS I shutdown */
4138 case 0x081: /* VIS II siam */
4147 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
4148 #ifdef TARGET_SPARC64
4153 #ifdef TARGET_SPARC64
4154 } else if (xop
== 0x39) { /* V9 return */
4157 save_state(dc
, cpu_cond
);
4158 cpu_src1
= get_src1(insn
, cpu_src1
);
4159 if (IS_IMM
) { /* immediate */
4160 simm
= GET_FIELDs(insn
, 19, 31);
4161 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4162 } else { /* register */
4163 rs2
= GET_FIELD(insn
, 27, 31);
4165 gen_movl_reg_TN(rs2
, cpu_src2
);
4166 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4168 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4170 gen_helper_restore();
4171 gen_mov_pc_npc(dc
, cpu_cond
);
4172 r_const
= tcg_const_i32(3);
4173 gen_helper_check_align(cpu_dst
, r_const
);
4174 tcg_temp_free_i32(r_const
);
4175 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4176 dc
->npc
= DYNAMIC_PC
;
4180 cpu_src1
= get_src1(insn
, cpu_src1
);
4181 if (IS_IMM
) { /* immediate */
4182 simm
= GET_FIELDs(insn
, 19, 31);
4183 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4184 } else { /* register */
4185 rs2
= GET_FIELD(insn
, 27, 31);
4187 gen_movl_reg_TN(rs2
, cpu_src2
);
4188 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4190 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4193 case 0x38: /* jmpl */
4198 r_pc
= tcg_const_tl(dc
->pc
);
4199 gen_movl_TN_reg(rd
, r_pc
);
4200 tcg_temp_free(r_pc
);
4201 gen_mov_pc_npc(dc
, cpu_cond
);
4202 r_const
= tcg_const_i32(3);
4203 gen_helper_check_align(cpu_dst
, r_const
);
4204 tcg_temp_free_i32(r_const
);
4205 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4206 dc
->npc
= DYNAMIC_PC
;
4209 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4210 case 0x39: /* rett, V9 return */
4214 if (!supervisor(dc
))
4216 gen_mov_pc_npc(dc
, cpu_cond
);
4217 r_const
= tcg_const_i32(3);
4218 gen_helper_check_align(cpu_dst
, r_const
);
4219 tcg_temp_free_i32(r_const
);
4220 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4221 dc
->npc
= DYNAMIC_PC
;
4226 case 0x3b: /* flush */
4227 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4231 case 0x3c: /* save */
4232 save_state(dc
, cpu_cond
);
4234 gen_movl_TN_reg(rd
, cpu_dst
);
4236 case 0x3d: /* restore */
4237 save_state(dc
, cpu_cond
);
4238 gen_helper_restore();
4239 gen_movl_TN_reg(rd
, cpu_dst
);
4241 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4242 case 0x3e: /* V9 done/retry */
4246 if (!supervisor(dc
))
4248 dc
->npc
= DYNAMIC_PC
;
4249 dc
->pc
= DYNAMIC_PC
;
4253 if (!supervisor(dc
))
4255 dc
->npc
= DYNAMIC_PC
;
4256 dc
->pc
= DYNAMIC_PC
;
4272 case 3: /* load/store instructions */
4274 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4276 /* flush pending conditional evaluations before exposing
4278 if (dc
->cc_op
!= CC_OP_FLAGS
) {
4279 dc
->cc_op
= CC_OP_FLAGS
;
4280 gen_helper_compute_psr();
4282 cpu_src1
= get_src1(insn
, cpu_src1
);
4283 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4284 rs2
= GET_FIELD(insn
, 27, 31);
4285 gen_movl_reg_TN(rs2
, cpu_src2
);
4286 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4287 } else if (IS_IMM
) { /* immediate */
4288 simm
= GET_FIELDs(insn
, 19, 31);
4289 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, simm
);
4290 } else { /* register */
4291 rs2
= GET_FIELD(insn
, 27, 31);
4293 gen_movl_reg_TN(rs2
, cpu_src2
);
4294 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4296 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4298 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4299 (xop
> 0x17 && xop
<= 0x1d ) ||
4300 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4302 case 0x0: /* ld, V9 lduw, load unsigned word */
4303 gen_address_mask(dc
, cpu_addr
);
4304 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4306 case 0x1: /* ldub, load unsigned byte */
4307 gen_address_mask(dc
, cpu_addr
);
4308 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4310 case 0x2: /* lduh, load unsigned halfword */
4311 gen_address_mask(dc
, cpu_addr
);
4312 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4314 case 0x3: /* ldd, load double word */
4320 save_state(dc
, cpu_cond
);
4321 r_const
= tcg_const_i32(7);
4322 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4323 tcg_temp_free_i32(r_const
);
4324 gen_address_mask(dc
, cpu_addr
);
4325 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4326 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4327 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4328 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4329 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4330 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4331 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4334 case 0x9: /* ldsb, load signed byte */
4335 gen_address_mask(dc
, cpu_addr
);
4336 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4338 case 0xa: /* ldsh, load signed halfword */
4339 gen_address_mask(dc
, cpu_addr
);
4340 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4342 case 0xd: /* ldstub -- XXX: should be atomically */
4346 gen_address_mask(dc
, cpu_addr
);
4347 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4348 r_const
= tcg_const_tl(0xff);
4349 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4350 tcg_temp_free(r_const
);
4353 case 0x0f: /* swap, swap register with memory. Also
4355 CHECK_IU_FEATURE(dc
, SWAP
);
4356 gen_movl_reg_TN(rd
, cpu_val
);
4357 gen_address_mask(dc
, cpu_addr
);
4358 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4359 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4360 tcg_gen_mov_tl(cpu_val
, cpu_tmp0
);
4362 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4363 case 0x10: /* lda, V9 lduwa, load word alternate */
4364 #ifndef TARGET_SPARC64
4367 if (!supervisor(dc
))
4370 save_state(dc
, cpu_cond
);
4371 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4373 case 0x11: /* lduba, load unsigned byte alternate */
4374 #ifndef TARGET_SPARC64
4377 if (!supervisor(dc
))
4380 save_state(dc
, cpu_cond
);
4381 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4383 case 0x12: /* lduha, load unsigned halfword alternate */
4384 #ifndef TARGET_SPARC64
4387 if (!supervisor(dc
))
4390 save_state(dc
, cpu_cond
);
4391 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4393 case 0x13: /* ldda, load double word alternate */
4394 #ifndef TARGET_SPARC64
4397 if (!supervisor(dc
))
4402 save_state(dc
, cpu_cond
);
4403 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4405 case 0x19: /* ldsba, load signed byte alternate */
4406 #ifndef TARGET_SPARC64
4409 if (!supervisor(dc
))
4412 save_state(dc
, cpu_cond
);
4413 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4415 case 0x1a: /* ldsha, load signed halfword alternate */
4416 #ifndef TARGET_SPARC64
4419 if (!supervisor(dc
))
4422 save_state(dc
, cpu_cond
);
4423 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4425 case 0x1d: /* ldstuba -- XXX: should be atomically */
4426 #ifndef TARGET_SPARC64
4429 if (!supervisor(dc
))
4432 save_state(dc
, cpu_cond
);
4433 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4435 case 0x1f: /* swapa, swap reg with alt. memory. Also
4437 CHECK_IU_FEATURE(dc
, SWAP
);
4438 #ifndef TARGET_SPARC64
4441 if (!supervisor(dc
))
4444 save_state(dc
, cpu_cond
);
4445 gen_movl_reg_TN(rd
, cpu_val
);
4446 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4449 #ifndef TARGET_SPARC64
4450 case 0x30: /* ldc */
4451 case 0x31: /* ldcsr */
4452 case 0x33: /* lddc */
4456 #ifdef TARGET_SPARC64
4457 case 0x08: /* V9 ldsw */
4458 gen_address_mask(dc
, cpu_addr
);
4459 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4461 case 0x0b: /* V9 ldx */
4462 gen_address_mask(dc
, cpu_addr
);
4463 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4465 case 0x18: /* V9 ldswa */
4466 save_state(dc
, cpu_cond
);
4467 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4469 case 0x1b: /* V9 ldxa */
4470 save_state(dc
, cpu_cond
);
4471 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4473 case 0x2d: /* V9 prefetch, no effect */
4475 case 0x30: /* V9 ldfa */
4476 save_state(dc
, cpu_cond
);
4477 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4479 case 0x33: /* V9 lddfa */
4480 save_state(dc
, cpu_cond
);
4481 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4483 case 0x3d: /* V9 prefetcha, no effect */
4485 case 0x32: /* V9 ldqfa */
4486 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4487 save_state(dc
, cpu_cond
);
4488 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4494 gen_movl_TN_reg(rd
, cpu_val
);
4495 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4498 } else if (xop
>= 0x20 && xop
< 0x24) {
4499 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4501 save_state(dc
, cpu_cond
);
4503 case 0x20: /* ldf, load fpreg */
4504 gen_address_mask(dc
, cpu_addr
);
4505 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4506 tcg_gen_trunc_tl_i32(cpu_fpr
[rd
], cpu_tmp0
);
4508 case 0x21: /* ldfsr, V9 ldxfsr */
4509 #ifdef TARGET_SPARC64
4510 gen_address_mask(dc
, cpu_addr
);
4512 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4513 gen_helper_ldxfsr(cpu_tmp64
);
4515 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4516 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
4517 gen_helper_ldfsr(cpu_tmp32
);
4521 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4522 gen_helper_ldfsr(cpu_tmp32
);
4526 case 0x22: /* ldqf, load quad fpreg */
4530 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4531 r_const
= tcg_const_i32(dc
->mem_idx
);
4532 gen_address_mask(dc
, cpu_addr
);
4533 gen_helper_ldqf(cpu_addr
, r_const
);
4534 tcg_temp_free_i32(r_const
);
4535 gen_op_store_QT0_fpr(QFPREG(rd
));
4538 case 0x23: /* lddf, load double fpreg */
4542 r_const
= tcg_const_i32(dc
->mem_idx
);
4543 gen_address_mask(dc
, cpu_addr
);
4544 gen_helper_lddf(cpu_addr
, r_const
);
4545 tcg_temp_free_i32(r_const
);
4546 gen_op_store_DT0_fpr(DFPREG(rd
));
4552 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) ||
4553 xop
== 0xe || xop
== 0x1e) {
4554 gen_movl_reg_TN(rd
, cpu_val
);
4556 case 0x4: /* st, store word */
4557 gen_address_mask(dc
, cpu_addr
);
4558 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4560 case 0x5: /* stb, store byte */
4561 gen_address_mask(dc
, cpu_addr
);
4562 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4564 case 0x6: /* sth, store halfword */
4565 gen_address_mask(dc
, cpu_addr
);
4566 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4568 case 0x7: /* std, store double word */
4574 save_state(dc
, cpu_cond
);
4575 gen_address_mask(dc
, cpu_addr
);
4576 r_const
= tcg_const_i32(7);
4577 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4578 tcg_temp_free_i32(r_const
);
4579 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
4580 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, cpu_val
);
4581 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4584 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4585 case 0x14: /* sta, V9 stwa, store word alternate */
4586 #ifndef TARGET_SPARC64
4589 if (!supervisor(dc
))
4592 save_state(dc
, cpu_cond
);
4593 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4594 dc
->npc
= DYNAMIC_PC
;
4596 case 0x15: /* stba, store byte alternate */
4597 #ifndef TARGET_SPARC64
4600 if (!supervisor(dc
))
4603 save_state(dc
, cpu_cond
);
4604 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4605 dc
->npc
= DYNAMIC_PC
;
4607 case 0x16: /* stha, store halfword alternate */
4608 #ifndef TARGET_SPARC64
4611 if (!supervisor(dc
))
4614 save_state(dc
, cpu_cond
);
4615 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4616 dc
->npc
= DYNAMIC_PC
;
4618 case 0x17: /* stda, store double word alternate */
4619 #ifndef TARGET_SPARC64
4622 if (!supervisor(dc
))
4628 save_state(dc
, cpu_cond
);
4629 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4633 #ifdef TARGET_SPARC64
4634 case 0x0e: /* V9 stx */
4635 gen_address_mask(dc
, cpu_addr
);
4636 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4638 case 0x1e: /* V9 stxa */
4639 save_state(dc
, cpu_cond
);
4640 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4641 dc
->npc
= DYNAMIC_PC
;
4647 } else if (xop
> 0x23 && xop
< 0x28) {
4648 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4650 save_state(dc
, cpu_cond
);
4652 case 0x24: /* stf, store fpreg */
4653 gen_address_mask(dc
, cpu_addr
);
4654 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_fpr
[rd
]);
4655 tcg_gen_qemu_st32(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4657 case 0x25: /* stfsr, V9 stxfsr */
4658 #ifdef TARGET_SPARC64
4659 gen_address_mask(dc
, cpu_addr
);
4660 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUState
, fsr
));
4662 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4664 tcg_gen_qemu_st32(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4666 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUState
, fsr
));
4667 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4671 #ifdef TARGET_SPARC64
4672 /* V9 stqf, store quad fpreg */
4676 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4677 gen_op_load_fpr_QT0(QFPREG(rd
));
4678 r_const
= tcg_const_i32(dc
->mem_idx
);
4679 gen_address_mask(dc
, cpu_addr
);
4680 gen_helper_stqf(cpu_addr
, r_const
);
4681 tcg_temp_free_i32(r_const
);
4684 #else /* !TARGET_SPARC64 */
4685 /* stdfq, store floating point queue */
4686 #if defined(CONFIG_USER_ONLY)
4689 if (!supervisor(dc
))
4691 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4696 case 0x27: /* stdf, store double fpreg */
4700 gen_op_load_fpr_DT0(DFPREG(rd
));
4701 r_const
= tcg_const_i32(dc
->mem_idx
);
4702 gen_address_mask(dc
, cpu_addr
);
4703 gen_helper_stdf(cpu_addr
, r_const
);
4704 tcg_temp_free_i32(r_const
);
4710 } else if (xop
> 0x33 && xop
< 0x3f) {
4711 save_state(dc
, cpu_cond
);
4713 #ifdef TARGET_SPARC64
4714 case 0x34: /* V9 stfa */
4715 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
4717 case 0x36: /* V9 stqfa */
4721 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4722 r_const
= tcg_const_i32(7);
4723 gen_helper_check_align(cpu_addr
, r_const
);
4724 tcg_temp_free_i32(r_const
);
4725 gen_op_load_fpr_QT0(QFPREG(rd
));
4726 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4729 case 0x37: /* V9 stdfa */
4730 gen_op_load_fpr_DT0(DFPREG(rd
));
4731 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4733 case 0x3c: /* V9 casa */
4734 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4735 gen_movl_TN_reg(rd
, cpu_val
);
4737 case 0x3e: /* V9 casxa */
4738 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4739 gen_movl_TN_reg(rd
, cpu_val
);
4742 case 0x34: /* stc */
4743 case 0x35: /* stcsr */
4744 case 0x36: /* stdcq */
4745 case 0x37: /* stdc */
4756 /* default case for non jump instructions */
4757 if (dc
->npc
== DYNAMIC_PC
) {
4758 dc
->pc
= DYNAMIC_PC
;
4760 } else if (dc
->npc
== JUMP_PC
) {
4761 /* we can do a static jump */
4762 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
4766 dc
->npc
= dc
->npc
+ 4;
4774 save_state(dc
, cpu_cond
);
4775 r_const
= tcg_const_i32(TT_ILL_INSN
);
4776 gen_helper_raise_exception(r_const
);
4777 tcg_temp_free_i32(r_const
);
4785 save_state(dc
, cpu_cond
);
4786 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
4787 gen_helper_raise_exception(r_const
);
4788 tcg_temp_free_i32(r_const
);
4792 #if !defined(CONFIG_USER_ONLY)
4797 save_state(dc
, cpu_cond
);
4798 r_const
= tcg_const_i32(TT_PRIV_INSN
);
4799 gen_helper_raise_exception(r_const
);
4800 tcg_temp_free_i32(r_const
);
4806 save_state(dc
, cpu_cond
);
4807 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4810 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4812 save_state(dc
, cpu_cond
);
4813 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4817 #ifndef TARGET_SPARC64
4822 save_state(dc
, cpu_cond
);
4823 r_const
= tcg_const_i32(TT_NCP_INSN
);
4824 gen_helper_raise_exception(r_const
);
4825 tcg_temp_free(r_const
);
4831 tcg_temp_free(cpu_tmp1
);
4832 tcg_temp_free(cpu_tmp2
);
4835 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
4836 int spc
, CPUSPARCState
*env
)
4838 target_ulong pc_start
, last_pc
;
4839 uint16_t *gen_opc_end
;
4840 DisasContext dc1
, *dc
= &dc1
;
4846 memset(dc
, 0, sizeof(DisasContext
));
4851 dc
->npc
= (target_ulong
) tb
->cs_base
;
4852 dc
->cc_op
= CC_OP_DYNAMIC
;
4853 dc
->mem_idx
= cpu_mmu_index(env
);
4855 if ((dc
->def
->features
& CPU_FEATURE_FLOAT
))
4856 dc
->fpu_enabled
= cpu_fpu_enabled(env
);
4858 dc
->fpu_enabled
= 0;
4859 #ifdef TARGET_SPARC64
4860 dc
->address_mask_32bit
= env
->pstate
& PS_AM
;
4862 dc
->singlestep
= (env
->singlestep_enabled
|| singlestep
);
4863 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4865 cpu_tmp0
= tcg_temp_new();
4866 cpu_tmp32
= tcg_temp_new_i32();
4867 cpu_tmp64
= tcg_temp_new_i64();
4869 cpu_dst
= tcg_temp_local_new();
4872 cpu_val
= tcg_temp_local_new();
4873 cpu_addr
= tcg_temp_local_new();
4876 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
4878 max_insns
= CF_COUNT_MASK
;
4881 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
4882 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
4883 if (bp
->pc
== dc
->pc
) {
4884 if (dc
->pc
!= pc_start
)
4885 save_state(dc
, cpu_cond
);
4894 qemu_log("Search PC...\n");
4895 j
= gen_opc_ptr
- gen_opc_buf
;
4899 gen_opc_instr_start
[lj
++] = 0;
4900 gen_opc_pc
[lj
] = dc
->pc
;
4901 gen_opc_npc
[lj
] = dc
->npc
;
4902 gen_opc_instr_start
[lj
] = 1;
4903 gen_opc_icount
[lj
] = num_insns
;
4906 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
4909 disas_sparc_insn(dc
);
4914 /* if the next PC is different, we abort now */
4915 if (dc
->pc
!= (last_pc
+ 4))
4917 /* if we reach a page boundary, we stop generation so that the
4918 PC of a TT_TFAULT exception is always in the right page */
4919 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
4921 /* if single step mode, we generate only one instruction and
4922 generate an exception */
4923 if (dc
->singlestep
) {
4926 } while ((gen_opc_ptr
< gen_opc_end
) &&
4927 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
4928 num_insns
< max_insns
);
4931 tcg_temp_free(cpu_addr
);
4932 tcg_temp_free(cpu_val
);
4933 tcg_temp_free(cpu_dst
);
4934 tcg_temp_free_i64(cpu_tmp64
);
4935 tcg_temp_free_i32(cpu_tmp32
);
4936 tcg_temp_free(cpu_tmp0
);
4937 if (tb
->cflags
& CF_LAST_IO
)
4940 if (dc
->pc
!= DYNAMIC_PC
&&
4941 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
4942 /* static PC and NPC: we can use direct chaining */
4943 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
4945 if (dc
->pc
!= DYNAMIC_PC
)
4946 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4947 save_npc(dc
, cpu_cond
);
4951 gen_icount_end(tb
, num_insns
);
4952 *gen_opc_ptr
= INDEX_op_end
;
4954 j
= gen_opc_ptr
- gen_opc_buf
;
4957 gen_opc_instr_start
[lj
++] = 0;
4961 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
4962 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
4964 tb
->size
= last_pc
+ 4 - pc_start
;
4965 tb
->icount
= num_insns
;
4968 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
4969 qemu_log("--------------\n");
4970 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
4971 log_target_disas(pc_start
, last_pc
+ 4 - pc_start
, 0);
4977 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
4979 gen_intermediate_code_internal(tb
, 0, env
);
4982 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
4984 gen_intermediate_code_internal(tb
, 1, env
);
4987 void gen_intermediate_code_init(CPUSPARCState
*env
)
4991 static const char * const gregnames
[8] = {
4992 NULL
, // g0 not used
5001 static const char * const fregnames
[64] = {
5002 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5003 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5004 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5005 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5006 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5007 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5008 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5009 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5012 /* init various static tables */
5016 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
5017 cpu_regwptr
= tcg_global_mem_new_ptr(TCG_AREG0
,
5018 offsetof(CPUState
, regwptr
),
5020 #ifdef TARGET_SPARC64
5021 cpu_xcc
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, xcc
),
5023 cpu_asi
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, asi
),
5025 cpu_fprs
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, fprs
),
5027 cpu_gsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, gsr
),
5029 cpu_tick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5030 offsetof(CPUState
, tick_cmpr
),
5032 cpu_stick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5033 offsetof(CPUState
, stick_cmpr
),
5035 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5036 offsetof(CPUState
, hstick_cmpr
),
5038 cpu_hintp
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hintp
),
5040 cpu_htba
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, htba
),
5042 cpu_hver
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hver
),
5044 cpu_ssr
= tcg_global_mem_new(TCG_AREG0
,
5045 offsetof(CPUState
, ssr
), "ssr");
5046 cpu_ver
= tcg_global_mem_new(TCG_AREG0
,
5047 offsetof(CPUState
, version
), "ver");
5048 cpu_softint
= tcg_global_mem_new_i32(TCG_AREG0
,
5049 offsetof(CPUState
, softint
),
5052 cpu_wim
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, wim
),
5055 cpu_cond
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cond
),
5057 cpu_cc_src
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_src
),
5059 cpu_cc_src2
= tcg_global_mem_new(TCG_AREG0
,
5060 offsetof(CPUState
, cc_src2
),
5062 cpu_cc_dst
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_dst
),
5064 cpu_cc_op
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, cc_op
),
5066 cpu_psr
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, psr
),
5068 cpu_fsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, fsr
),
5070 cpu_pc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, pc
),
5072 cpu_npc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, npc
),
5074 cpu_y
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, y
), "y");
5075 #ifndef CONFIG_USER_ONLY
5076 cpu_tbr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, tbr
),
5079 for (i
= 1; i
< 8; i
++)
5080 cpu_gregs
[i
] = tcg_global_mem_new(TCG_AREG0
,
5081 offsetof(CPUState
, gregs
[i
]),
5083 for (i
= 0; i
< TARGET_FPREGS
; i
++)
5084 cpu_fpr
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
5085 offsetof(CPUState
, fpr
[i
]),
5088 /* register helpers */
5090 #define GEN_HELPER 2
5095 void restore_state_to_opc(CPUState
*env
, TranslationBlock
*tb
, int pc_pos
)
5098 env
->pc
= gen_opc_pc
[pc_pos
];
5099 npc
= gen_opc_npc
[pc_pos
];
5101 /* dynamic NPC: already stored */
5102 } else if (npc
== 2) {
5103 /* jump PC: use 'cond' and the jump targets of the translation */
5105 env
->npc
= gen_opc_jump_pc
[0];
5107 env
->npc
= gen_opc_jump_pc
[1];
5113 /* flush pending conditional evaluations before exposing cpu state */
5114 if (CC_OP
!= CC_OP_FLAGS
) {
5115 helper_compute_psr();