4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
39 #define DYNAMIC_PC 1 /* dynamic pc value */
40 #define JUMP_PC 2 /* dynamic pc value which takes only two values
41 according to jump_pc[T2] */
43 /* global register indexes */
44 static TCGv_ptr cpu_env
, cpu_regwptr
;
45 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
46 static TCGv_i32 cpu_psr
;
47 static TCGv cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
49 #ifndef CONFIG_USER_ONLY
52 static TCGv cpu_cond
, cpu_src1
, cpu_src2
, cpu_dst
, cpu_addr
, cpu_val
;
54 static TCGv_i32 cpu_xcc
, cpu_asi
, cpu_fprs
;
56 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
57 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
58 static TCGv_i32 cpu_softint
;
62 /* local register indexes (only used inside old micro ops) */
64 static TCGv_i32 cpu_tmp32
;
65 static TCGv_i64 cpu_tmp64
;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr
[TARGET_FPREGS
];
69 #include "gen-icount.h"
71 typedef struct DisasContext
{
72 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
73 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
78 int address_mask_32bit
;
79 struct TranslationBlock
*tb
;
83 // This function uses non-native bit order
84 #define GET_FIELD(X, FROM, TO) \
85 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
87 // This function uses the order in the manuals, i.e. bit 0 is 2^0
88 #define GET_FIELD_SP(X, FROM, TO) \
89 GET_FIELD(X, 31 - (TO), 31 - (FROM))
91 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
92 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
95 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
96 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
98 #define DFPREG(r) (r & 0x1e)
99 #define QFPREG(r) (r & 0x1c)
102 #define UA2005_HTRAP_MASK 0xff
103 #define V8_TRAP_MASK 0x7f
105 static int sign_extend(int x
, int len
)
108 return (x
<< len
) >> len
;
111 #define IS_IMM (insn & (1<<13))
113 /* floating point registers moves */
114 static void gen_op_load_fpr_DT0(unsigned int src
)
116 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
117 offsetof(CPU_DoubleU
, l
.upper
));
118 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
119 offsetof(CPU_DoubleU
, l
.lower
));
122 static void gen_op_load_fpr_DT1(unsigned int src
)
124 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
125 offsetof(CPU_DoubleU
, l
.upper
));
126 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
127 offsetof(CPU_DoubleU
, l
.lower
));
130 static void gen_op_store_DT0_fpr(unsigned int dst
)
132 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
133 offsetof(CPU_DoubleU
, l
.upper
));
134 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
135 offsetof(CPU_DoubleU
, l
.lower
));
138 static void gen_op_load_fpr_QT0(unsigned int src
)
140 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
141 offsetof(CPU_QuadU
, l
.upmost
));
142 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
143 offsetof(CPU_QuadU
, l
.upper
));
144 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
145 offsetof(CPU_QuadU
, l
.lower
));
146 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
147 offsetof(CPU_QuadU
, l
.lowest
));
150 static void gen_op_load_fpr_QT1(unsigned int src
)
152 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
153 offsetof(CPU_QuadU
, l
.upmost
));
154 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
155 offsetof(CPU_QuadU
, l
.upper
));
156 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
157 offsetof(CPU_QuadU
, l
.lower
));
158 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
159 offsetof(CPU_QuadU
, l
.lowest
));
162 static void gen_op_store_QT0_fpr(unsigned int dst
)
164 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
165 offsetof(CPU_QuadU
, l
.upmost
));
166 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
167 offsetof(CPU_QuadU
, l
.upper
));
168 tcg_gen_ld_i32(cpu_fpr
[dst
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
169 offsetof(CPU_QuadU
, l
.lower
));
170 tcg_gen_ld_i32(cpu_fpr
[dst
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
171 offsetof(CPU_QuadU
, l
.lowest
));
175 #ifdef CONFIG_USER_ONLY
176 #define supervisor(dc) 0
177 #ifdef TARGET_SPARC64
178 #define hypervisor(dc) 0
181 #define supervisor(dc) (dc->mem_idx >= 1)
182 #ifdef TARGET_SPARC64
183 #define hypervisor(dc) (dc->mem_idx == 2)
188 #ifdef TARGET_SPARC64
190 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
192 #define AM_CHECK(dc) (1)
196 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
198 #ifdef TARGET_SPARC64
200 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
204 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
207 tcg_gen_movi_tl(tn
, 0);
209 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
211 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
215 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
220 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
222 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
226 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
227 target_ulong pc
, target_ulong npc
)
229 TranslationBlock
*tb
;
232 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
233 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
)) {
234 /* jump to same page: we can use a direct jump */
235 tcg_gen_goto_tb(tb_num
);
236 tcg_gen_movi_tl(cpu_pc
, pc
);
237 tcg_gen_movi_tl(cpu_npc
, npc
);
238 tcg_gen_exit_tb((long)tb
+ tb_num
);
240 /* jump to another page: currently not optimized */
241 tcg_gen_movi_tl(cpu_pc
, pc
);
242 tcg_gen_movi_tl(cpu_npc
, npc
);
248 static inline void gen_mov_reg_N(TCGv reg
, TCGv_i32 src
)
250 tcg_gen_extu_i32_tl(reg
, src
);
251 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
252 tcg_gen_andi_tl(reg
, reg
, 0x1);
255 static inline void gen_mov_reg_Z(TCGv reg
, TCGv_i32 src
)
257 tcg_gen_extu_i32_tl(reg
, src
);
258 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
259 tcg_gen_andi_tl(reg
, reg
, 0x1);
262 static inline void gen_mov_reg_V(TCGv reg
, TCGv_i32 src
)
264 tcg_gen_extu_i32_tl(reg
, src
);
265 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
266 tcg_gen_andi_tl(reg
, reg
, 0x1);
269 static inline void gen_mov_reg_C(TCGv reg
, TCGv_i32 src
)
271 tcg_gen_extu_i32_tl(reg
, src
);
272 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
273 tcg_gen_andi_tl(reg
, reg
, 0x1);
276 static inline void gen_cc_clear_icc(void)
278 tcg_gen_movi_i32(cpu_psr
, 0);
281 #ifdef TARGET_SPARC64
282 static inline void gen_cc_clear_xcc(void)
284 tcg_gen_movi_i32(cpu_xcc
, 0);
290 env->psr |= PSR_ZERO;
291 if ((int32_t) T0 < 0)
294 static inline void gen_cc_NZ_icc(TCGv dst
)
299 l1
= gen_new_label();
300 l2
= gen_new_label();
301 r_temp
= tcg_temp_new();
302 tcg_gen_andi_tl(r_temp
, dst
, 0xffffffffULL
);
303 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
304 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_ZERO
);
306 tcg_gen_ext32s_tl(r_temp
, dst
);
307 tcg_gen_brcondi_tl(TCG_COND_GE
, r_temp
, 0, l2
);
308 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_NEG
);
310 tcg_temp_free(r_temp
);
313 #ifdef TARGET_SPARC64
314 static inline void gen_cc_NZ_xcc(TCGv dst
)
318 l1
= gen_new_label();
319 l2
= gen_new_label();
320 tcg_gen_brcondi_tl(TCG_COND_NE
, dst
, 0, l1
);
321 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_ZERO
);
323 tcg_gen_brcondi_tl(TCG_COND_GE
, dst
, 0, l2
);
324 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_NEG
);
331 env->psr |= PSR_CARRY;
333 static inline void gen_cc_C_add_icc(TCGv dst
, TCGv src1
)
335 TCGv r_temp1
, r_temp2
;
338 l1
= gen_new_label();
339 r_temp1
= tcg_temp_new();
340 r_temp2
= tcg_temp_new();
341 tcg_gen_andi_tl(r_temp1
, dst
, 0xffffffffULL
);
342 tcg_gen_andi_tl(r_temp2
, src1
, 0xffffffffULL
);
343 tcg_gen_brcond_tl(TCG_COND_GEU
, r_temp1
, r_temp2
, l1
);
344 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_CARRY
);
346 tcg_temp_free(r_temp1
);
347 tcg_temp_free(r_temp2
);
350 #ifdef TARGET_SPARC64
351 static inline void gen_cc_C_add_xcc(TCGv dst
, TCGv src1
)
355 l1
= gen_new_label();
356 tcg_gen_brcond_tl(TCG_COND_GEU
, dst
, src1
, l1
);
357 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_CARRY
);
363 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
366 static inline void gen_cc_V_add_icc(TCGv dst
, TCGv src1
, TCGv src2
)
370 r_temp
= tcg_temp_new();
371 tcg_gen_xor_tl(r_temp
, src1
, src2
);
372 tcg_gen_not_tl(r_temp
, r_temp
);
373 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
374 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
375 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
376 tcg_gen_shri_tl(r_temp
, r_temp
, 31 - PSR_OVF_SHIFT
);
377 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
378 tcg_temp_free(r_temp
);
379 tcg_gen_or_i32(cpu_psr
, cpu_psr
, cpu_tmp32
);
382 #ifdef TARGET_SPARC64
383 static inline void gen_cc_V_add_xcc(TCGv dst
, TCGv src1
, TCGv src2
)
387 r_temp
= tcg_temp_new();
388 tcg_gen_xor_tl(r_temp
, src1
, src2
);
389 tcg_gen_not_tl(r_temp
, r_temp
);
390 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
391 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
392 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 63));
393 tcg_gen_shri_tl(r_temp
, r_temp
, 63 - PSR_OVF_SHIFT
);
394 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
395 tcg_temp_free(r_temp
);
396 tcg_gen_or_i32(cpu_xcc
, cpu_xcc
, cpu_tmp32
);
400 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
406 l1
= gen_new_label();
408 r_temp
= tcg_temp_new();
409 tcg_gen_xor_tl(r_temp
, src1
, src2
);
410 tcg_gen_not_tl(r_temp
, r_temp
);
411 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
412 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
413 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
414 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
415 r_const
= tcg_const_i32(TT_TOVF
);
416 gen_helper_raise_exception(r_const
);
417 tcg_temp_free_i32(r_const
);
419 tcg_temp_free(r_temp
);
422 static inline void gen_cc_V_tag(TCGv src1
, TCGv src2
)
426 l1
= gen_new_label();
427 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
428 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
429 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
430 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
434 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
439 l1
= gen_new_label();
440 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
441 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
442 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
443 r_const
= tcg_const_i32(TT_TOVF
);
444 gen_helper_raise_exception(r_const
);
445 tcg_temp_free_i32(r_const
);
449 static inline void gen_op_add_cc2(TCGv dst
)
452 gen_cc_NZ_icc(cpu_cc_dst
);
453 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
454 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
455 #ifdef TARGET_SPARC64
457 gen_cc_NZ_xcc(cpu_cc_dst
);
458 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
459 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
461 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
464 static inline void gen_op_addi_cc(TCGv dst
, TCGv src1
, target_long src2
)
466 tcg_gen_mov_tl(cpu_cc_src
, src1
);
467 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
468 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
472 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
474 tcg_gen_mov_tl(cpu_cc_src
, src1
);
475 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
476 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
480 static inline void gen_op_addx_cc2(TCGv dst
)
482 gen_cc_NZ_icc(cpu_cc_dst
);
483 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
484 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
485 #ifdef TARGET_SPARC64
486 gen_cc_NZ_xcc(cpu_cc_dst
);
487 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
488 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
490 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
493 static inline void gen_op_addxi_cc(TCGv dst
, TCGv src1
, target_long src2
)
495 tcg_gen_mov_tl(cpu_cc_src
, src1
);
496 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
497 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
498 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
500 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
501 #ifdef TARGET_SPARC64
503 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
505 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_dst
, src2
);
506 gen_op_addx_cc2(dst
);
509 static inline void gen_op_addx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
511 tcg_gen_mov_tl(cpu_cc_src
, src1
);
512 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
513 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
514 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
516 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
517 #ifdef TARGET_SPARC64
519 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
521 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
522 gen_op_addx_cc2(dst
);
525 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
527 tcg_gen_mov_tl(cpu_cc_src
, src1
);
528 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
529 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
531 gen_cc_NZ_icc(cpu_cc_dst
);
532 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
533 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
534 gen_cc_V_tag(cpu_cc_src
, cpu_cc_src2
);
535 #ifdef TARGET_SPARC64
537 gen_cc_NZ_xcc(cpu_cc_dst
);
538 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
539 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
541 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
544 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
546 tcg_gen_mov_tl(cpu_cc_src
, src1
);
547 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
548 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
549 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
550 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
552 gen_cc_NZ_icc(cpu_cc_dst
);
553 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
554 #ifdef TARGET_SPARC64
556 gen_cc_NZ_xcc(cpu_cc_dst
);
557 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
558 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
560 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
565 env->psr |= PSR_CARRY;
567 static inline void gen_cc_C_sub_icc(TCGv src1
, TCGv src2
)
569 TCGv r_temp1
, r_temp2
;
572 l1
= gen_new_label();
573 r_temp1
= tcg_temp_new();
574 r_temp2
= tcg_temp_new();
575 tcg_gen_andi_tl(r_temp1
, src1
, 0xffffffffULL
);
576 tcg_gen_andi_tl(r_temp2
, src2
, 0xffffffffULL
);
577 tcg_gen_brcond_tl(TCG_COND_GEU
, r_temp1
, r_temp2
, l1
);
578 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_CARRY
);
580 tcg_temp_free(r_temp1
);
581 tcg_temp_free(r_temp2
);
584 #ifdef TARGET_SPARC64
585 static inline void gen_cc_C_sub_xcc(TCGv src1
, TCGv src2
)
589 l1
= gen_new_label();
590 tcg_gen_brcond_tl(TCG_COND_GEU
, src1
, src2
, l1
);
591 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_CARRY
);
597 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
600 static inline void gen_cc_V_sub_icc(TCGv dst
, TCGv src1
, TCGv src2
)
604 r_temp
= tcg_temp_new();
605 tcg_gen_xor_tl(r_temp
, src1
, src2
);
606 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
607 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
608 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
609 tcg_gen_shri_tl(r_temp
, r_temp
, 31 - PSR_OVF_SHIFT
);
610 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
611 tcg_gen_or_i32(cpu_psr
, cpu_psr
, cpu_tmp32
);
612 tcg_temp_free(r_temp
);
615 #ifdef TARGET_SPARC64
616 static inline void gen_cc_V_sub_xcc(TCGv dst
, TCGv src1
, TCGv src2
)
620 r_temp
= tcg_temp_new();
621 tcg_gen_xor_tl(r_temp
, src1
, src2
);
622 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
623 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
624 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 63));
625 tcg_gen_shri_tl(r_temp
, r_temp
, 63 - PSR_OVF_SHIFT
);
626 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
627 tcg_gen_or_i32(cpu_xcc
, cpu_xcc
, cpu_tmp32
);
628 tcg_temp_free(r_temp
);
632 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
638 l1
= gen_new_label();
640 r_temp
= tcg_temp_new();
641 tcg_gen_xor_tl(r_temp
, src1
, src2
);
642 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
643 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
644 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
645 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
646 r_const
= tcg_const_i32(TT_TOVF
);
647 gen_helper_raise_exception(r_const
);
648 tcg_temp_free_i32(r_const
);
650 tcg_temp_free(r_temp
);
653 static inline void gen_op_sub_cc2(TCGv dst
)
656 gen_cc_NZ_icc(cpu_cc_dst
);
657 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
658 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
659 #ifdef TARGET_SPARC64
661 gen_cc_NZ_xcc(cpu_cc_dst
);
662 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
663 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
665 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
668 static inline void gen_op_subi_cc(TCGv dst
, TCGv src1
, target_long src2
)
670 tcg_gen_mov_tl(cpu_cc_src
, src1
);
671 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
672 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
676 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
678 tcg_gen_mov_tl(cpu_cc_src
, src1
);
679 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
680 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
684 static inline void gen_op_subx_cc2(TCGv dst
)
686 gen_cc_NZ_icc(cpu_cc_dst
);
687 gen_cc_C_sub_icc(cpu_cc_dst
, cpu_cc_src
);
688 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
689 #ifdef TARGET_SPARC64
690 gen_cc_NZ_xcc(cpu_cc_dst
);
691 gen_cc_C_sub_xcc(cpu_cc_dst
, cpu_cc_src
);
692 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
694 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
697 static inline void gen_op_subxi_cc(TCGv dst
, TCGv src1
, target_long src2
)
699 tcg_gen_mov_tl(cpu_cc_src
, src1
);
700 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
701 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
702 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
704 gen_cc_C_sub_icc(cpu_cc_dst
, cpu_cc_src
);
705 #ifdef TARGET_SPARC64
707 gen_cc_C_sub_xcc(cpu_cc_dst
, cpu_cc_src
);
709 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_dst
, src2
);
710 gen_op_subx_cc2(dst
);
713 static inline void gen_op_subx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
715 tcg_gen_mov_tl(cpu_cc_src
, src1
);
716 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
717 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
718 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
720 gen_cc_C_sub_icc(cpu_cc_dst
, cpu_cc_src
);
721 #ifdef TARGET_SPARC64
723 gen_cc_C_sub_xcc(cpu_cc_dst
, cpu_cc_src
);
725 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
726 gen_op_subx_cc2(dst
);
729 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
731 tcg_gen_mov_tl(cpu_cc_src
, src1
);
732 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
733 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
735 gen_cc_NZ_icc(cpu_cc_dst
);
736 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
737 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
738 gen_cc_V_tag(cpu_cc_src
, cpu_cc_src2
);
739 #ifdef TARGET_SPARC64
741 gen_cc_NZ_xcc(cpu_cc_dst
);
742 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
743 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
745 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
748 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
750 tcg_gen_mov_tl(cpu_cc_src
, src1
);
751 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
752 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
753 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
754 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
756 gen_cc_NZ_icc(cpu_cc_dst
);
757 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
758 #ifdef TARGET_SPARC64
760 gen_cc_NZ_xcc(cpu_cc_dst
);
761 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
762 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
764 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
767 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
772 l1
= gen_new_label();
773 r_temp
= tcg_temp_new();
779 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
780 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
781 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
782 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
783 tcg_gen_movi_tl(cpu_cc_src2
, 0);
787 // env->y = (b2 << 31) | (env->y >> 1);
788 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
789 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
790 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
791 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
792 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
793 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
796 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
797 gen_mov_reg_V(r_temp
, cpu_psr
);
798 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
799 tcg_temp_free(r_temp
);
801 // T0 = (b1 << 31) | (T0 >> 1);
803 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
804 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
805 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
807 /* do addition and update flags */
808 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
811 gen_cc_NZ_icc(cpu_cc_dst
);
812 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
813 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
814 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
817 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
819 TCGv_i64 r_temp
, r_temp2
;
821 r_temp
= tcg_temp_new_i64();
822 r_temp2
= tcg_temp_new_i64();
824 tcg_gen_extu_tl_i64(r_temp
, src2
);
825 tcg_gen_extu_tl_i64(r_temp2
, src1
);
826 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
828 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
829 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
830 tcg_temp_free_i64(r_temp
);
831 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
832 #ifdef TARGET_SPARC64
833 tcg_gen_mov_i64(dst
, r_temp2
);
835 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
837 tcg_temp_free_i64(r_temp2
);
840 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
842 TCGv_i64 r_temp
, r_temp2
;
844 r_temp
= tcg_temp_new_i64();
845 r_temp2
= tcg_temp_new_i64();
847 tcg_gen_ext_tl_i64(r_temp
, src2
);
848 tcg_gen_ext_tl_i64(r_temp2
, src1
);
849 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
851 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
852 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
853 tcg_temp_free_i64(r_temp
);
854 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
855 #ifdef TARGET_SPARC64
856 tcg_gen_mov_i64(dst
, r_temp2
);
858 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
860 tcg_temp_free_i64(r_temp2
);
863 #ifdef TARGET_SPARC64
864 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
869 l1
= gen_new_label();
870 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
871 r_const
= tcg_const_i32(TT_DIV_ZERO
);
872 gen_helper_raise_exception(r_const
);
873 tcg_temp_free_i32(r_const
);
877 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
881 l1
= gen_new_label();
882 l2
= gen_new_label();
883 tcg_gen_mov_tl(cpu_cc_src
, src1
);
884 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
885 gen_trap_ifdivzero_tl(cpu_cc_src2
);
886 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src
, INT64_MIN
, l1
);
887 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src2
, -1, l1
);
888 tcg_gen_movi_i64(dst
, INT64_MIN
);
891 tcg_gen_div_i64(dst
, cpu_cc_src
, cpu_cc_src2
);
896 static inline void gen_op_div_cc(TCGv dst
)
900 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
902 gen_cc_NZ_icc(cpu_cc_dst
);
903 l1
= gen_new_label();
904 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_cc_src2
, 0, l1
);
905 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
909 static inline void gen_op_logic_cc(TCGv dst
)
911 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
914 gen_cc_NZ_icc(cpu_cc_dst
);
915 #ifdef TARGET_SPARC64
917 gen_cc_NZ_xcc(cpu_cc_dst
);
922 static inline void gen_op_eval_ba(TCGv dst
)
924 tcg_gen_movi_tl(dst
, 1);
928 static inline void gen_op_eval_be(TCGv dst
, TCGv_i32 src
)
930 gen_mov_reg_Z(dst
, src
);
934 static inline void gen_op_eval_ble(TCGv dst
, TCGv_i32 src
)
936 gen_mov_reg_N(cpu_tmp0
, src
);
937 gen_mov_reg_V(dst
, src
);
938 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
939 gen_mov_reg_Z(cpu_tmp0
, src
);
940 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
944 static inline void gen_op_eval_bl(TCGv dst
, TCGv_i32 src
)
946 gen_mov_reg_V(cpu_tmp0
, src
);
947 gen_mov_reg_N(dst
, src
);
948 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
952 static inline void gen_op_eval_bleu(TCGv dst
, TCGv_i32 src
)
954 gen_mov_reg_Z(cpu_tmp0
, src
);
955 gen_mov_reg_C(dst
, src
);
956 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
960 static inline void gen_op_eval_bcs(TCGv dst
, TCGv_i32 src
)
962 gen_mov_reg_C(dst
, src
);
966 static inline void gen_op_eval_bvs(TCGv dst
, TCGv_i32 src
)
968 gen_mov_reg_V(dst
, src
);
972 static inline void gen_op_eval_bn(TCGv dst
)
974 tcg_gen_movi_tl(dst
, 0);
978 static inline void gen_op_eval_bneg(TCGv dst
, TCGv_i32 src
)
980 gen_mov_reg_N(dst
, src
);
984 static inline void gen_op_eval_bne(TCGv dst
, TCGv_i32 src
)
986 gen_mov_reg_Z(dst
, src
);
987 tcg_gen_xori_tl(dst
, dst
, 0x1);
991 static inline void gen_op_eval_bg(TCGv dst
, TCGv_i32 src
)
993 gen_mov_reg_N(cpu_tmp0
, src
);
994 gen_mov_reg_V(dst
, src
);
995 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
996 gen_mov_reg_Z(cpu_tmp0
, src
);
997 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
998 tcg_gen_xori_tl(dst
, dst
, 0x1);
1002 static inline void gen_op_eval_bge(TCGv dst
, TCGv_i32 src
)
1004 gen_mov_reg_V(cpu_tmp0
, src
);
1005 gen_mov_reg_N(dst
, src
);
1006 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1007 tcg_gen_xori_tl(dst
, dst
, 0x1);
1011 static inline void gen_op_eval_bgu(TCGv dst
, TCGv_i32 src
)
1013 gen_mov_reg_Z(cpu_tmp0
, src
);
1014 gen_mov_reg_C(dst
, src
);
1015 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
1016 tcg_gen_xori_tl(dst
, dst
, 0x1);
1020 static inline void gen_op_eval_bcc(TCGv dst
, TCGv_i32 src
)
1022 gen_mov_reg_C(dst
, src
);
1023 tcg_gen_xori_tl(dst
, dst
, 0x1);
1027 static inline void gen_op_eval_bpos(TCGv dst
, TCGv_i32 src
)
1029 gen_mov_reg_N(dst
, src
);
1030 tcg_gen_xori_tl(dst
, dst
, 0x1);
1034 static inline void gen_op_eval_bvc(TCGv dst
, TCGv_i32 src
)
1036 gen_mov_reg_V(dst
, src
);
1037 tcg_gen_xori_tl(dst
, dst
, 0x1);
1041 FPSR bit field FCC1 | FCC0:
1047 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
1048 unsigned int fcc_offset
)
1050 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
1051 tcg_gen_andi_tl(reg
, reg
, 0x1);
1054 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
1055 unsigned int fcc_offset
)
1057 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
1058 tcg_gen_andi_tl(reg
, reg
, 0x1);
1062 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
1063 unsigned int fcc_offset
)
1065 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1066 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1067 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
1070 // 1 or 2: FCC0 ^ FCC1
1071 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
1072 unsigned int fcc_offset
)
1074 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1075 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1076 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1080 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
1081 unsigned int fcc_offset
)
1083 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1087 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
1088 unsigned int fcc_offset
)
1090 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1091 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1092 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
1093 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1097 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
1098 unsigned int fcc_offset
)
1100 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
1104 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
1105 unsigned int fcc_offset
)
1107 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1108 tcg_gen_xori_tl(dst
, dst
, 0x1);
1109 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1110 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1114 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
1115 unsigned int fcc_offset
)
1117 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1118 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1119 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1122 // 0: !(FCC0 | FCC1)
1123 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
1124 unsigned int fcc_offset
)
1126 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1127 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1128 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
1129 tcg_gen_xori_tl(dst
, dst
, 0x1);
1132 // 0 or 3: !(FCC0 ^ FCC1)
1133 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
1134 unsigned int fcc_offset
)
1136 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1137 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1138 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1139 tcg_gen_xori_tl(dst
, dst
, 0x1);
1143 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
1144 unsigned int fcc_offset
)
1146 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1147 tcg_gen_xori_tl(dst
, dst
, 0x1);
1150 // !1: !(FCC0 & !FCC1)
1151 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
1152 unsigned int fcc_offset
)
1154 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1155 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1156 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
1157 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1158 tcg_gen_xori_tl(dst
, dst
, 0x1);
1162 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
1163 unsigned int fcc_offset
)
1165 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
1166 tcg_gen_xori_tl(dst
, dst
, 0x1);
1169 // !2: !(!FCC0 & FCC1)
1170 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
1171 unsigned int fcc_offset
)
1173 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1174 tcg_gen_xori_tl(dst
, dst
, 0x1);
1175 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1176 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1177 tcg_gen_xori_tl(dst
, dst
, 0x1);
1180 // !3: !(FCC0 & FCC1)
1181 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
1182 unsigned int fcc_offset
)
1184 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1185 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1186 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1187 tcg_gen_xori_tl(dst
, dst
, 0x1);
1190 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
1191 target_ulong pc2
, TCGv r_cond
)
1195 l1
= gen_new_label();
1197 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1199 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
1202 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
1205 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
1206 target_ulong pc2
, TCGv r_cond
)
1210 l1
= gen_new_label();
1212 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1214 gen_goto_tb(dc
, 0, pc2
, pc1
);
1217 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
1220 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
1225 l1
= gen_new_label();
1226 l2
= gen_new_label();
1228 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1230 tcg_gen_movi_tl(cpu_npc
, npc1
);
1234 tcg_gen_movi_tl(cpu_npc
, npc2
);
1238 /* call this function before using the condition register as it may
1239 have been set for a jump */
1240 static inline void flush_cond(DisasContext
*dc
, TCGv cond
)
1242 if (dc
->npc
== JUMP_PC
) {
1243 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1244 dc
->npc
= DYNAMIC_PC
;
1248 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
1250 if (dc
->npc
== JUMP_PC
) {
1251 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1252 dc
->npc
= DYNAMIC_PC
;
1253 } else if (dc
->npc
!= DYNAMIC_PC
) {
1254 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
1258 static inline void save_state(DisasContext
*dc
, TCGv cond
)
1260 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1264 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
1266 if (dc
->npc
== JUMP_PC
) {
1267 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1268 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1269 dc
->pc
= DYNAMIC_PC
;
1270 } else if (dc
->npc
== DYNAMIC_PC
) {
1271 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1272 dc
->pc
= DYNAMIC_PC
;
1278 static inline void gen_op_next_insn(void)
1280 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1281 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
1284 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1288 #ifdef TARGET_SPARC64
1298 gen_op_eval_bn(r_dst
);
1301 gen_op_eval_be(r_dst
, r_src
);
1304 gen_op_eval_ble(r_dst
, r_src
);
1307 gen_op_eval_bl(r_dst
, r_src
);
1310 gen_op_eval_bleu(r_dst
, r_src
);
1313 gen_op_eval_bcs(r_dst
, r_src
);
1316 gen_op_eval_bneg(r_dst
, r_src
);
1319 gen_op_eval_bvs(r_dst
, r_src
);
1322 gen_op_eval_ba(r_dst
);
1325 gen_op_eval_bne(r_dst
, r_src
);
1328 gen_op_eval_bg(r_dst
, r_src
);
1331 gen_op_eval_bge(r_dst
, r_src
);
1334 gen_op_eval_bgu(r_dst
, r_src
);
1337 gen_op_eval_bcc(r_dst
, r_src
);
1340 gen_op_eval_bpos(r_dst
, r_src
);
1343 gen_op_eval_bvc(r_dst
, r_src
);
1348 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1350 unsigned int offset
;
1370 gen_op_eval_bn(r_dst
);
1373 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1376 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1379 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1382 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1385 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1388 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1391 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1394 gen_op_eval_ba(r_dst
);
1397 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1400 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1403 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1406 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1409 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1412 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1415 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1420 #ifdef TARGET_SPARC64
1422 static const int gen_tcg_cond_reg
[8] = {
1433 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1437 l1
= gen_new_label();
1438 tcg_gen_movi_tl(r_dst
, 0);
1439 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1440 tcg_gen_movi_tl(r_dst
, 1);
1445 /* XXX: potentially incorrect if dynamic npc */
1446 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1449 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1450 target_ulong target
= dc
->pc
+ offset
;
1453 /* unconditional not taken */
1455 dc
->pc
= dc
->npc
+ 4;
1456 dc
->npc
= dc
->pc
+ 4;
1459 dc
->npc
= dc
->pc
+ 4;
1461 } else if (cond
== 0x8) {
1462 /* unconditional taken */
1465 dc
->npc
= dc
->pc
+ 4;
1471 flush_cond(dc
, r_cond
);
1472 gen_cond(r_cond
, cc
, cond
);
1474 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1478 dc
->jump_pc
[0] = target
;
1479 dc
->jump_pc
[1] = dc
->npc
+ 4;
1485 /* XXX: potentially incorrect if dynamic npc */
1486 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1489 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1490 target_ulong target
= dc
->pc
+ offset
;
1493 /* unconditional not taken */
1495 dc
->pc
= dc
->npc
+ 4;
1496 dc
->npc
= dc
->pc
+ 4;
1499 dc
->npc
= dc
->pc
+ 4;
1501 } else if (cond
== 0x8) {
1502 /* unconditional taken */
1505 dc
->npc
= dc
->pc
+ 4;
1511 flush_cond(dc
, r_cond
);
1512 gen_fcond(r_cond
, cc
, cond
);
1514 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1518 dc
->jump_pc
[0] = target
;
1519 dc
->jump_pc
[1] = dc
->npc
+ 4;
1525 #ifdef TARGET_SPARC64
1526 /* XXX: potentially incorrect if dynamic npc */
1527 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1528 TCGv r_cond
, TCGv r_reg
)
1530 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1531 target_ulong target
= dc
->pc
+ offset
;
1533 flush_cond(dc
, r_cond
);
1534 gen_cond_reg(r_cond
, cond
, r_reg
);
1536 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1540 dc
->jump_pc
[0] = target
;
1541 dc
->jump_pc
[1] = dc
->npc
+ 4;
1546 static inline void gen_op_fcmps(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1550 gen_helper_fcmps(r_rs1
, r_rs2
);
1553 gen_helper_fcmps_fcc1(r_rs1
, r_rs2
);
1556 gen_helper_fcmps_fcc2(r_rs1
, r_rs2
);
1559 gen_helper_fcmps_fcc3(r_rs1
, r_rs2
);
1564 static inline void gen_op_fcmpd(int fccno
)
1571 gen_helper_fcmpd_fcc1();
1574 gen_helper_fcmpd_fcc2();
1577 gen_helper_fcmpd_fcc3();
1582 static inline void gen_op_fcmpq(int fccno
)
1589 gen_helper_fcmpq_fcc1();
1592 gen_helper_fcmpq_fcc2();
1595 gen_helper_fcmpq_fcc3();
1600 static inline void gen_op_fcmpes(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1604 gen_helper_fcmpes(r_rs1
, r_rs2
);
1607 gen_helper_fcmpes_fcc1(r_rs1
, r_rs2
);
1610 gen_helper_fcmpes_fcc2(r_rs1
, r_rs2
);
1613 gen_helper_fcmpes_fcc3(r_rs1
, r_rs2
);
1618 static inline void gen_op_fcmped(int fccno
)
1622 gen_helper_fcmped();
1625 gen_helper_fcmped_fcc1();
1628 gen_helper_fcmped_fcc2();
1631 gen_helper_fcmped_fcc3();
1636 static inline void gen_op_fcmpeq(int fccno
)
1640 gen_helper_fcmpeq();
1643 gen_helper_fcmpeq_fcc1();
1646 gen_helper_fcmpeq_fcc2();
1649 gen_helper_fcmpeq_fcc3();
1656 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1658 gen_helper_fcmps(r_rs1
, r_rs2
);
1661 static inline void gen_op_fcmpd(int fccno
)
1666 static inline void gen_op_fcmpq(int fccno
)
1671 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1673 gen_helper_fcmpes(r_rs1
, r_rs2
);
1676 static inline void gen_op_fcmped(int fccno
)
1678 gen_helper_fcmped();
1681 static inline void gen_op_fcmpeq(int fccno
)
1683 gen_helper_fcmpeq();
1687 static inline void gen_op_fpexception_im(int fsr_flags
)
1691 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1692 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1693 r_const
= tcg_const_i32(TT_FP_EXCP
);
1694 gen_helper_raise_exception(r_const
);
1695 tcg_temp_free_i32(r_const
);
1698 static int gen_trap_ifnofpu(DisasContext
*dc
, TCGv r_cond
)
1700 #if !defined(CONFIG_USER_ONLY)
1701 if (!dc
->fpu_enabled
) {
1704 save_state(dc
, r_cond
);
1705 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1706 gen_helper_raise_exception(r_const
);
1707 tcg_temp_free_i32(r_const
);
1715 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1717 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1720 static inline void gen_clear_float_exceptions(void)
1722 gen_helper_clear_float_exceptions();
1726 #ifdef TARGET_SPARC64
1727 static inline TCGv_i32
gen_get_asi(int insn
, TCGv r_addr
)
1733 r_asi
= tcg_temp_new_i32();
1734 tcg_gen_mov_i32(r_asi
, cpu_asi
);
1736 asi
= GET_FIELD(insn
, 19, 26);
1737 r_asi
= tcg_const_i32(asi
);
1742 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1745 TCGv_i32 r_asi
, r_size
, r_sign
;
1747 r_asi
= gen_get_asi(insn
, addr
);
1748 r_size
= tcg_const_i32(size
);
1749 r_sign
= tcg_const_i32(sign
);
1750 gen_helper_ld_asi(dst
, addr
, r_asi
, r_size
, r_sign
);
1751 tcg_temp_free_i32(r_sign
);
1752 tcg_temp_free_i32(r_size
);
1753 tcg_temp_free_i32(r_asi
);
1756 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1758 TCGv_i32 r_asi
, r_size
;
1760 r_asi
= gen_get_asi(insn
, addr
);
1761 r_size
= tcg_const_i32(size
);
1762 gen_helper_st_asi(addr
, src
, r_asi
, r_size
);
1763 tcg_temp_free_i32(r_size
);
1764 tcg_temp_free_i32(r_asi
);
1767 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1769 TCGv_i32 r_asi
, r_size
, r_rd
;
1771 r_asi
= gen_get_asi(insn
, addr
);
1772 r_size
= tcg_const_i32(size
);
1773 r_rd
= tcg_const_i32(rd
);
1774 gen_helper_ldf_asi(addr
, r_asi
, r_size
, r_rd
);
1775 tcg_temp_free_i32(r_rd
);
1776 tcg_temp_free_i32(r_size
);
1777 tcg_temp_free_i32(r_asi
);
1780 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
1782 TCGv_i32 r_asi
, r_size
, r_rd
;
1784 r_asi
= gen_get_asi(insn
, addr
);
1785 r_size
= tcg_const_i32(size
);
1786 r_rd
= tcg_const_i32(rd
);
1787 gen_helper_stf_asi(addr
, r_asi
, r_size
, r_rd
);
1788 tcg_temp_free_i32(r_rd
);
1789 tcg_temp_free_i32(r_size
);
1790 tcg_temp_free_i32(r_asi
);
1793 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1795 TCGv_i32 r_asi
, r_size
, r_sign
;
1797 r_asi
= gen_get_asi(insn
, addr
);
1798 r_size
= tcg_const_i32(4);
1799 r_sign
= tcg_const_i32(0);
1800 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1801 tcg_temp_free_i32(r_sign
);
1802 gen_helper_st_asi(addr
, dst
, r_asi
, r_size
);
1803 tcg_temp_free_i32(r_size
);
1804 tcg_temp_free_i32(r_asi
);
1805 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1808 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1810 TCGv_i32 r_asi
, r_rd
;
1812 r_asi
= gen_get_asi(insn
, addr
);
1813 r_rd
= tcg_const_i32(rd
);
1814 gen_helper_ldda_asi(addr
, r_asi
, r_rd
);
1815 tcg_temp_free_i32(r_rd
);
1816 tcg_temp_free_i32(r_asi
);
1819 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1821 TCGv_i32 r_asi
, r_size
;
1823 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1824 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1825 r_asi
= gen_get_asi(insn
, addr
);
1826 r_size
= tcg_const_i32(8);
1827 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1828 tcg_temp_free_i32(r_size
);
1829 tcg_temp_free_i32(r_asi
);
1832 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1838 r_val1
= tcg_temp_new();
1839 gen_movl_reg_TN(rd
, r_val1
);
1840 r_asi
= gen_get_asi(insn
, addr
);
1841 gen_helper_cas_asi(dst
, addr
, r_val1
, val2
, r_asi
);
1842 tcg_temp_free_i32(r_asi
);
1843 tcg_temp_free(r_val1
);
1846 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1851 gen_movl_reg_TN(rd
, cpu_tmp64
);
1852 r_asi
= gen_get_asi(insn
, addr
);
1853 gen_helper_casx_asi(dst
, addr
, cpu_tmp64
, val2
, r_asi
);
1854 tcg_temp_free_i32(r_asi
);
1857 #elif !defined(CONFIG_USER_ONLY)
1859 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1862 TCGv_i32 r_asi
, r_size
, r_sign
;
1864 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1865 r_size
= tcg_const_i32(size
);
1866 r_sign
= tcg_const_i32(sign
);
1867 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1868 tcg_temp_free(r_sign
);
1869 tcg_temp_free(r_size
);
1870 tcg_temp_free(r_asi
);
1871 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1874 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1876 TCGv_i32 r_asi
, r_size
;
1878 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
1879 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1880 r_size
= tcg_const_i32(size
);
1881 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1882 tcg_temp_free(r_size
);
1883 tcg_temp_free(r_asi
);
1886 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1888 TCGv_i32 r_asi
, r_size
, r_sign
;
1891 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1892 r_size
= tcg_const_i32(4);
1893 r_sign
= tcg_const_i32(0);
1894 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1895 tcg_temp_free(r_sign
);
1896 r_val
= tcg_temp_new_i64();
1897 tcg_gen_extu_tl_i64(r_val
, dst
);
1898 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1899 tcg_temp_free_i64(r_val
);
1900 tcg_temp_free(r_size
);
1901 tcg_temp_free(r_asi
);
1902 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1905 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1907 TCGv_i32 r_asi
, r_size
, r_sign
;
1909 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1910 r_size
= tcg_const_i32(8);
1911 r_sign
= tcg_const_i32(0);
1912 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1913 tcg_temp_free(r_sign
);
1914 tcg_temp_free(r_size
);
1915 tcg_temp_free(r_asi
);
1916 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
1917 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
1918 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
1919 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
1920 gen_movl_TN_reg(rd
, hi
);
1923 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1925 TCGv_i32 r_asi
, r_size
;
1927 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1928 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1929 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1930 r_size
= tcg_const_i32(8);
1931 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1932 tcg_temp_free(r_size
);
1933 tcg_temp_free(r_asi
);
1937 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1938 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
1941 TCGv_i32 r_asi
, r_size
;
1943 gen_ld_asi(dst
, addr
, insn
, 1, 0);
1945 r_val
= tcg_const_i64(0xffULL
);
1946 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1947 r_size
= tcg_const_i32(1);
1948 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1949 tcg_temp_free_i32(r_size
);
1950 tcg_temp_free_i32(r_asi
);
1951 tcg_temp_free_i64(r_val
);
1955 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
1960 rs1
= GET_FIELD(insn
, 13, 17);
1962 r_rs1
= tcg_const_tl(0); // XXX how to free?
1964 r_rs1
= cpu_gregs
[rs1
];
1966 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
1970 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
1974 if (IS_IMM
) { /* immediate */
1977 simm
= GET_FIELDs(insn
, 19, 31);
1978 r_rs2
= tcg_const_tl(simm
); // XXX how to free?
1979 } else { /* register */
1982 rs2
= GET_FIELD(insn
, 27, 31);
1984 r_rs2
= tcg_const_tl(0); // XXX how to free?
1986 r_rs2
= cpu_gregs
[rs2
];
1988 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
1993 #define CHECK_IU_FEATURE(dc, FEATURE) \
1994 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1996 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1997 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2000 /* before an instruction, dc->pc must be static */
2001 static void disas_sparc_insn(DisasContext
* dc
)
2003 unsigned int insn
, opc
, rs1
, rs2
, rd
;
2006 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
)))
2007 tcg_gen_debug_insn_start(dc
->pc
);
2008 insn
= ldl_code(dc
->pc
);
2009 opc
= GET_FIELD(insn
, 0, 1);
2011 rd
= GET_FIELD(insn
, 2, 6);
2013 cpu_src1
= tcg_temp_new(); // const
2014 cpu_src2
= tcg_temp_new(); // const
2017 case 0: /* branches/sethi */
2019 unsigned int xop
= GET_FIELD(insn
, 7, 9);
2022 #ifdef TARGET_SPARC64
2023 case 0x1: /* V9 BPcc */
2027 target
= GET_FIELD_SP(insn
, 0, 18);
2028 target
= sign_extend(target
, 18);
2030 cc
= GET_FIELD_SP(insn
, 20, 21);
2032 do_branch(dc
, target
, insn
, 0, cpu_cond
);
2034 do_branch(dc
, target
, insn
, 1, cpu_cond
);
2039 case 0x3: /* V9 BPr */
2041 target
= GET_FIELD_SP(insn
, 0, 13) |
2042 (GET_FIELD_SP(insn
, 20, 21) << 14);
2043 target
= sign_extend(target
, 16);
2045 cpu_src1
= get_src1(insn
, cpu_src1
);
2046 do_branch_reg(dc
, target
, insn
, cpu_cond
, cpu_src1
);
2049 case 0x5: /* V9 FBPcc */
2051 int cc
= GET_FIELD_SP(insn
, 20, 21);
2052 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2054 target
= GET_FIELD_SP(insn
, 0, 18);
2055 target
= sign_extend(target
, 19);
2057 do_fbranch(dc
, target
, insn
, cc
, cpu_cond
);
2061 case 0x7: /* CBN+x */
2066 case 0x2: /* BN+x */
2068 target
= GET_FIELD(insn
, 10, 31);
2069 target
= sign_extend(target
, 22);
2071 do_branch(dc
, target
, insn
, 0, cpu_cond
);
2074 case 0x6: /* FBN+x */
2076 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2078 target
= GET_FIELD(insn
, 10, 31);
2079 target
= sign_extend(target
, 22);
2081 do_fbranch(dc
, target
, insn
, 0, cpu_cond
);
2084 case 0x4: /* SETHI */
2086 uint32_t value
= GET_FIELD(insn
, 10, 31);
2089 r_const
= tcg_const_tl(value
<< 10);
2090 gen_movl_TN_reg(rd
, r_const
);
2091 tcg_temp_free(r_const
);
2094 case 0x0: /* UNIMPL */
2103 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
2106 r_const
= tcg_const_tl(dc
->pc
);
2107 gen_movl_TN_reg(15, r_const
);
2108 tcg_temp_free(r_const
);
2110 gen_mov_pc_npc(dc
, cpu_cond
);
2114 case 2: /* FPU & Logical Operations */
2116 unsigned int xop
= GET_FIELD(insn
, 7, 12);
2117 if (xop
== 0x3a) { /* generate trap */
2120 cpu_src1
= get_src1(insn
, cpu_src1
);
2122 rs2
= GET_FIELD(insn
, 25, 31);
2123 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
2125 rs2
= GET_FIELD(insn
, 27, 31);
2127 gen_movl_reg_TN(rs2
, cpu_src2
);
2128 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2130 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
2132 cond
= GET_FIELD(insn
, 3, 6);
2134 save_state(dc
, cpu_cond
);
2135 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2137 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2139 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2140 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2141 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2142 gen_helper_raise_exception(cpu_tmp32
);
2143 } else if (cond
!= 0) {
2144 TCGv r_cond
= tcg_temp_new();
2146 #ifdef TARGET_SPARC64
2148 int cc
= GET_FIELD_SP(insn
, 11, 12);
2150 save_state(dc
, cpu_cond
);
2152 gen_cond(r_cond
, 0, cond
);
2154 gen_cond(r_cond
, 1, cond
);
2158 save_state(dc
, cpu_cond
);
2159 gen_cond(r_cond
, 0, cond
);
2161 l1
= gen_new_label();
2162 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
2164 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2166 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2168 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2169 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2170 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2171 gen_helper_raise_exception(cpu_tmp32
);
2174 tcg_temp_free(r_cond
);
2180 } else if (xop
== 0x28) {
2181 rs1
= GET_FIELD(insn
, 13, 17);
2184 #ifndef TARGET_SPARC64
2185 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2186 manual, rdy on the microSPARC
2188 case 0x0f: /* stbar in the SPARCv8 manual,
2189 rdy on the microSPARC II */
2190 case 0x10 ... 0x1f: /* implementation-dependent in the
2191 SPARCv8 manual, rdy on the
2194 gen_movl_TN_reg(rd
, cpu_y
);
2196 #ifdef TARGET_SPARC64
2197 case 0x2: /* V9 rdccr */
2198 gen_helper_rdccr(cpu_dst
);
2199 gen_movl_TN_reg(rd
, cpu_dst
);
2201 case 0x3: /* V9 rdasi */
2202 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
2203 gen_movl_TN_reg(rd
, cpu_dst
);
2205 case 0x4: /* V9 rdtick */
2209 r_tickptr
= tcg_temp_new_ptr();
2210 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2211 offsetof(CPUState
, tick
));
2212 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2213 tcg_temp_free_ptr(r_tickptr
);
2214 gen_movl_TN_reg(rd
, cpu_dst
);
2217 case 0x5: /* V9 rdpc */
2221 r_const
= tcg_const_tl(dc
->pc
);
2222 gen_movl_TN_reg(rd
, r_const
);
2223 tcg_temp_free(r_const
);
2226 case 0x6: /* V9 rdfprs */
2227 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
2228 gen_movl_TN_reg(rd
, cpu_dst
);
2230 case 0xf: /* V9 membar */
2231 break; /* no effect */
2232 case 0x13: /* Graphics Status */
2233 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2235 gen_movl_TN_reg(rd
, cpu_gsr
);
2237 case 0x16: /* Softint */
2238 tcg_gen_ext_i32_tl(cpu_dst
, cpu_softint
);
2239 gen_movl_TN_reg(rd
, cpu_dst
);
2241 case 0x17: /* Tick compare */
2242 gen_movl_TN_reg(rd
, cpu_tick_cmpr
);
2244 case 0x18: /* System tick */
2248 r_tickptr
= tcg_temp_new_ptr();
2249 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2250 offsetof(CPUState
, stick
));
2251 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2252 tcg_temp_free_ptr(r_tickptr
);
2253 gen_movl_TN_reg(rd
, cpu_dst
);
2256 case 0x19: /* System tick compare */
2257 gen_movl_TN_reg(rd
, cpu_stick_cmpr
);
2259 case 0x10: /* Performance Control */
2260 case 0x11: /* Performance Instrumentation Counter */
2261 case 0x12: /* Dispatch Control */
2262 case 0x14: /* Softint set, WO */
2263 case 0x15: /* Softint clear, WO */
2268 #if !defined(CONFIG_USER_ONLY)
2269 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2270 #ifndef TARGET_SPARC64
2271 if (!supervisor(dc
))
2273 gen_helper_rdpsr(cpu_dst
);
2275 CHECK_IU_FEATURE(dc
, HYPV
);
2276 if (!hypervisor(dc
))
2278 rs1
= GET_FIELD(insn
, 13, 17);
2281 // gen_op_rdhpstate();
2284 // gen_op_rdhtstate();
2287 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
2290 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
2293 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
2295 case 31: // hstick_cmpr
2296 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
2302 gen_movl_TN_reg(rd
, cpu_dst
);
2304 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2305 if (!supervisor(dc
))
2307 #ifdef TARGET_SPARC64
2308 rs1
= GET_FIELD(insn
, 13, 17);
2314 r_tsptr
= tcg_temp_new_ptr();
2315 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2316 offsetof(CPUState
, tsptr
));
2317 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2318 offsetof(trap_state
, tpc
));
2319 tcg_temp_free_ptr(r_tsptr
);
2326 r_tsptr
= tcg_temp_new_ptr();
2327 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2328 offsetof(CPUState
, tsptr
));
2329 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2330 offsetof(trap_state
, tnpc
));
2331 tcg_temp_free_ptr(r_tsptr
);
2338 r_tsptr
= tcg_temp_new_ptr();
2339 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2340 offsetof(CPUState
, tsptr
));
2341 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2342 offsetof(trap_state
, tstate
));
2343 tcg_temp_free_ptr(r_tsptr
);
2350 r_tsptr
= tcg_temp_new_ptr();
2351 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2352 offsetof(CPUState
, tsptr
));
2353 tcg_gen_ld_i32(cpu_tmp32
, r_tsptr
,
2354 offsetof(trap_state
, tt
));
2355 tcg_temp_free_ptr(r_tsptr
);
2356 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2363 r_tickptr
= tcg_temp_new_ptr();
2364 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2365 offsetof(CPUState
, tick
));
2366 gen_helper_tick_get_count(cpu_tmp0
, r_tickptr
);
2367 gen_movl_TN_reg(rd
, cpu_tmp0
);
2368 tcg_temp_free_ptr(r_tickptr
);
2372 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2375 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2376 offsetof(CPUSPARCState
, pstate
));
2377 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2380 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2381 offsetof(CPUSPARCState
, tl
));
2382 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2385 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2386 offsetof(CPUSPARCState
, psrpil
));
2387 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2390 gen_helper_rdcwp(cpu_tmp0
);
2393 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2394 offsetof(CPUSPARCState
, cansave
));
2395 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2397 case 11: // canrestore
2398 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2399 offsetof(CPUSPARCState
, canrestore
));
2400 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2402 case 12: // cleanwin
2403 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2404 offsetof(CPUSPARCState
, cleanwin
));
2405 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2407 case 13: // otherwin
2408 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2409 offsetof(CPUSPARCState
, otherwin
));
2410 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2413 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2414 offsetof(CPUSPARCState
, wstate
));
2415 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2417 case 16: // UA2005 gl
2418 CHECK_IU_FEATURE(dc
, GL
);
2419 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2420 offsetof(CPUSPARCState
, gl
));
2421 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2423 case 26: // UA2005 strand status
2424 CHECK_IU_FEATURE(dc
, HYPV
);
2425 if (!hypervisor(dc
))
2427 tcg_gen_mov_tl(cpu_tmp0
, cpu_ssr
);
2430 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2437 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2439 gen_movl_TN_reg(rd
, cpu_tmp0
);
2441 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2442 #ifdef TARGET_SPARC64
2443 save_state(dc
, cpu_cond
);
2444 gen_helper_flushw();
2446 if (!supervisor(dc
))
2448 gen_movl_TN_reg(rd
, cpu_tbr
);
2452 } else if (xop
== 0x34) { /* FPU Operations */
2453 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2455 gen_op_clear_ieee_excp_and_FTT();
2456 rs1
= GET_FIELD(insn
, 13, 17);
2457 rs2
= GET_FIELD(insn
, 27, 31);
2458 xop
= GET_FIELD(insn
, 18, 26);
2460 case 0x1: /* fmovs */
2461 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2463 case 0x5: /* fnegs */
2464 gen_helper_fnegs(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2466 case 0x9: /* fabss */
2467 gen_helper_fabss(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2469 case 0x29: /* fsqrts */
2470 CHECK_FPU_FEATURE(dc
, FSQRT
);
2471 gen_clear_float_exceptions();
2472 gen_helper_fsqrts(cpu_tmp32
, cpu_fpr
[rs2
]);
2473 gen_helper_check_ieee_exceptions();
2474 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2476 case 0x2a: /* fsqrtd */
2477 CHECK_FPU_FEATURE(dc
, FSQRT
);
2478 gen_op_load_fpr_DT1(DFPREG(rs2
));
2479 gen_clear_float_exceptions();
2480 gen_helper_fsqrtd();
2481 gen_helper_check_ieee_exceptions();
2482 gen_op_store_DT0_fpr(DFPREG(rd
));
2484 case 0x2b: /* fsqrtq */
2485 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2486 gen_op_load_fpr_QT1(QFPREG(rs2
));
2487 gen_clear_float_exceptions();
2488 gen_helper_fsqrtq();
2489 gen_helper_check_ieee_exceptions();
2490 gen_op_store_QT0_fpr(QFPREG(rd
));
2492 case 0x41: /* fadds */
2493 gen_clear_float_exceptions();
2494 gen_helper_fadds(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2495 gen_helper_check_ieee_exceptions();
2496 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2498 case 0x42: /* faddd */
2499 gen_op_load_fpr_DT0(DFPREG(rs1
));
2500 gen_op_load_fpr_DT1(DFPREG(rs2
));
2501 gen_clear_float_exceptions();
2503 gen_helper_check_ieee_exceptions();
2504 gen_op_store_DT0_fpr(DFPREG(rd
));
2506 case 0x43: /* faddq */
2507 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2508 gen_op_load_fpr_QT0(QFPREG(rs1
));
2509 gen_op_load_fpr_QT1(QFPREG(rs2
));
2510 gen_clear_float_exceptions();
2512 gen_helper_check_ieee_exceptions();
2513 gen_op_store_QT0_fpr(QFPREG(rd
));
2515 case 0x45: /* fsubs */
2516 gen_clear_float_exceptions();
2517 gen_helper_fsubs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2518 gen_helper_check_ieee_exceptions();
2519 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2521 case 0x46: /* fsubd */
2522 gen_op_load_fpr_DT0(DFPREG(rs1
));
2523 gen_op_load_fpr_DT1(DFPREG(rs2
));
2524 gen_clear_float_exceptions();
2526 gen_helper_check_ieee_exceptions();
2527 gen_op_store_DT0_fpr(DFPREG(rd
));
2529 case 0x47: /* fsubq */
2530 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2531 gen_op_load_fpr_QT0(QFPREG(rs1
));
2532 gen_op_load_fpr_QT1(QFPREG(rs2
));
2533 gen_clear_float_exceptions();
2535 gen_helper_check_ieee_exceptions();
2536 gen_op_store_QT0_fpr(QFPREG(rd
));
2538 case 0x49: /* fmuls */
2539 CHECK_FPU_FEATURE(dc
, FMUL
);
2540 gen_clear_float_exceptions();
2541 gen_helper_fmuls(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2542 gen_helper_check_ieee_exceptions();
2543 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2545 case 0x4a: /* fmuld */
2546 CHECK_FPU_FEATURE(dc
, FMUL
);
2547 gen_op_load_fpr_DT0(DFPREG(rs1
));
2548 gen_op_load_fpr_DT1(DFPREG(rs2
));
2549 gen_clear_float_exceptions();
2551 gen_helper_check_ieee_exceptions();
2552 gen_op_store_DT0_fpr(DFPREG(rd
));
2554 case 0x4b: /* fmulq */
2555 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2556 CHECK_FPU_FEATURE(dc
, FMUL
);
2557 gen_op_load_fpr_QT0(QFPREG(rs1
));
2558 gen_op_load_fpr_QT1(QFPREG(rs2
));
2559 gen_clear_float_exceptions();
2561 gen_helper_check_ieee_exceptions();
2562 gen_op_store_QT0_fpr(QFPREG(rd
));
2564 case 0x4d: /* fdivs */
2565 gen_clear_float_exceptions();
2566 gen_helper_fdivs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2567 gen_helper_check_ieee_exceptions();
2568 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2570 case 0x4e: /* fdivd */
2571 gen_op_load_fpr_DT0(DFPREG(rs1
));
2572 gen_op_load_fpr_DT1(DFPREG(rs2
));
2573 gen_clear_float_exceptions();
2575 gen_helper_check_ieee_exceptions();
2576 gen_op_store_DT0_fpr(DFPREG(rd
));
2578 case 0x4f: /* fdivq */
2579 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2580 gen_op_load_fpr_QT0(QFPREG(rs1
));
2581 gen_op_load_fpr_QT1(QFPREG(rs2
));
2582 gen_clear_float_exceptions();
2584 gen_helper_check_ieee_exceptions();
2585 gen_op_store_QT0_fpr(QFPREG(rd
));
2587 case 0x69: /* fsmuld */
2588 CHECK_FPU_FEATURE(dc
, FSMULD
);
2589 gen_clear_float_exceptions();
2590 gen_helper_fsmuld(cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2591 gen_helper_check_ieee_exceptions();
2592 gen_op_store_DT0_fpr(DFPREG(rd
));
2594 case 0x6e: /* fdmulq */
2595 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2596 gen_op_load_fpr_DT0(DFPREG(rs1
));
2597 gen_op_load_fpr_DT1(DFPREG(rs2
));
2598 gen_clear_float_exceptions();
2599 gen_helper_fdmulq();
2600 gen_helper_check_ieee_exceptions();
2601 gen_op_store_QT0_fpr(QFPREG(rd
));
2603 case 0xc4: /* fitos */
2604 gen_clear_float_exceptions();
2605 gen_helper_fitos(cpu_tmp32
, cpu_fpr
[rs2
]);
2606 gen_helper_check_ieee_exceptions();
2607 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2609 case 0xc6: /* fdtos */
2610 gen_op_load_fpr_DT1(DFPREG(rs2
));
2611 gen_clear_float_exceptions();
2612 gen_helper_fdtos(cpu_tmp32
);
2613 gen_helper_check_ieee_exceptions();
2614 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2616 case 0xc7: /* fqtos */
2617 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2618 gen_op_load_fpr_QT1(QFPREG(rs2
));
2619 gen_clear_float_exceptions();
2620 gen_helper_fqtos(cpu_tmp32
);
2621 gen_helper_check_ieee_exceptions();
2622 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2624 case 0xc8: /* fitod */
2625 gen_helper_fitod(cpu_fpr
[rs2
]);
2626 gen_op_store_DT0_fpr(DFPREG(rd
));
2628 case 0xc9: /* fstod */
2629 gen_helper_fstod(cpu_fpr
[rs2
]);
2630 gen_op_store_DT0_fpr(DFPREG(rd
));
2632 case 0xcb: /* fqtod */
2633 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2634 gen_op_load_fpr_QT1(QFPREG(rs2
));
2635 gen_clear_float_exceptions();
2637 gen_helper_check_ieee_exceptions();
2638 gen_op_store_DT0_fpr(DFPREG(rd
));
2640 case 0xcc: /* fitoq */
2641 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2642 gen_helper_fitoq(cpu_fpr
[rs2
]);
2643 gen_op_store_QT0_fpr(QFPREG(rd
));
2645 case 0xcd: /* fstoq */
2646 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2647 gen_helper_fstoq(cpu_fpr
[rs2
]);
2648 gen_op_store_QT0_fpr(QFPREG(rd
));
2650 case 0xce: /* fdtoq */
2651 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2652 gen_op_load_fpr_DT1(DFPREG(rs2
));
2654 gen_op_store_QT0_fpr(QFPREG(rd
));
2656 case 0xd1: /* fstoi */
2657 gen_clear_float_exceptions();
2658 gen_helper_fstoi(cpu_tmp32
, cpu_fpr
[rs2
]);
2659 gen_helper_check_ieee_exceptions();
2660 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2662 case 0xd2: /* fdtoi */
2663 gen_op_load_fpr_DT1(DFPREG(rs2
));
2664 gen_clear_float_exceptions();
2665 gen_helper_fdtoi(cpu_tmp32
);
2666 gen_helper_check_ieee_exceptions();
2667 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2669 case 0xd3: /* fqtoi */
2670 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2671 gen_op_load_fpr_QT1(QFPREG(rs2
));
2672 gen_clear_float_exceptions();
2673 gen_helper_fqtoi(cpu_tmp32
);
2674 gen_helper_check_ieee_exceptions();
2675 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2677 #ifdef TARGET_SPARC64
2678 case 0x2: /* V9 fmovd */
2679 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2680 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
2681 cpu_fpr
[DFPREG(rs2
) + 1]);
2683 case 0x3: /* V9 fmovq */
2684 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2685 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2686 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1],
2687 cpu_fpr
[QFPREG(rs2
) + 1]);
2688 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2],
2689 cpu_fpr
[QFPREG(rs2
) + 2]);
2690 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3],
2691 cpu_fpr
[QFPREG(rs2
) + 3]);
2693 case 0x6: /* V9 fnegd */
2694 gen_op_load_fpr_DT1(DFPREG(rs2
));
2696 gen_op_store_DT0_fpr(DFPREG(rd
));
2698 case 0x7: /* V9 fnegq */
2699 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2700 gen_op_load_fpr_QT1(QFPREG(rs2
));
2702 gen_op_store_QT0_fpr(QFPREG(rd
));
2704 case 0xa: /* V9 fabsd */
2705 gen_op_load_fpr_DT1(DFPREG(rs2
));
2707 gen_op_store_DT0_fpr(DFPREG(rd
));
2709 case 0xb: /* V9 fabsq */
2710 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2711 gen_op_load_fpr_QT1(QFPREG(rs2
));
2713 gen_op_store_QT0_fpr(QFPREG(rd
));
2715 case 0x81: /* V9 fstox */
2716 gen_clear_float_exceptions();
2717 gen_helper_fstox(cpu_fpr
[rs2
]);
2718 gen_helper_check_ieee_exceptions();
2719 gen_op_store_DT0_fpr(DFPREG(rd
));
2721 case 0x82: /* V9 fdtox */
2722 gen_op_load_fpr_DT1(DFPREG(rs2
));
2723 gen_clear_float_exceptions();
2725 gen_helper_check_ieee_exceptions();
2726 gen_op_store_DT0_fpr(DFPREG(rd
));
2728 case 0x83: /* V9 fqtox */
2729 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2730 gen_op_load_fpr_QT1(QFPREG(rs2
));
2731 gen_clear_float_exceptions();
2733 gen_helper_check_ieee_exceptions();
2734 gen_op_store_DT0_fpr(DFPREG(rd
));
2736 case 0x84: /* V9 fxtos */
2737 gen_op_load_fpr_DT1(DFPREG(rs2
));
2738 gen_clear_float_exceptions();
2739 gen_helper_fxtos(cpu_tmp32
);
2740 gen_helper_check_ieee_exceptions();
2741 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2743 case 0x88: /* V9 fxtod */
2744 gen_op_load_fpr_DT1(DFPREG(rs2
));
2745 gen_clear_float_exceptions();
2747 gen_helper_check_ieee_exceptions();
2748 gen_op_store_DT0_fpr(DFPREG(rd
));
2750 case 0x8c: /* V9 fxtoq */
2751 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2752 gen_op_load_fpr_DT1(DFPREG(rs2
));
2753 gen_clear_float_exceptions();
2755 gen_helper_check_ieee_exceptions();
2756 gen_op_store_QT0_fpr(QFPREG(rd
));
2762 } else if (xop
== 0x35) { /* FPU Operations */
2763 #ifdef TARGET_SPARC64
2766 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2768 gen_op_clear_ieee_excp_and_FTT();
2769 rs1
= GET_FIELD(insn
, 13, 17);
2770 rs2
= GET_FIELD(insn
, 27, 31);
2771 xop
= GET_FIELD(insn
, 18, 26);
2772 #ifdef TARGET_SPARC64
2773 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2776 l1
= gen_new_label();
2777 cond
= GET_FIELD_SP(insn
, 14, 17);
2778 cpu_src1
= get_src1(insn
, cpu_src1
);
2779 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2781 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2784 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2787 l1
= gen_new_label();
2788 cond
= GET_FIELD_SP(insn
, 14, 17);
2789 cpu_src1
= get_src1(insn
, cpu_src1
);
2790 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2792 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2793 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_fpr
[DFPREG(rs2
) + 1]);
2796 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2799 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2800 l1
= gen_new_label();
2801 cond
= GET_FIELD_SP(insn
, 14, 17);
2802 cpu_src1
= get_src1(insn
, cpu_src1
);
2803 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2805 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2806 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1], cpu_fpr
[QFPREG(rs2
) + 1]);
2807 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2], cpu_fpr
[QFPREG(rs2
) + 2]);
2808 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3], cpu_fpr
[QFPREG(rs2
) + 3]);
2814 #ifdef TARGET_SPARC64
2815 #define FMOVSCC(fcc) \
2820 l1 = gen_new_label(); \
2821 r_cond = tcg_temp_new(); \
2822 cond = GET_FIELD_SP(insn, 14, 17); \
2823 gen_fcond(r_cond, fcc, cond); \
2824 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2826 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2827 gen_set_label(l1); \
2828 tcg_temp_free(r_cond); \
2830 #define FMOVDCC(fcc) \
2835 l1 = gen_new_label(); \
2836 r_cond = tcg_temp_new(); \
2837 cond = GET_FIELD_SP(insn, 14, 17); \
2838 gen_fcond(r_cond, fcc, cond); \
2839 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2841 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2842 cpu_fpr[DFPREG(rs2)]); \
2843 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2844 cpu_fpr[DFPREG(rs2) + 1]); \
2845 gen_set_label(l1); \
2846 tcg_temp_free(r_cond); \
2848 #define FMOVQCC(fcc) \
2853 l1 = gen_new_label(); \
2854 r_cond = tcg_temp_new(); \
2855 cond = GET_FIELD_SP(insn, 14, 17); \
2856 gen_fcond(r_cond, fcc, cond); \
2857 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2859 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2860 cpu_fpr[QFPREG(rs2)]); \
2861 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2862 cpu_fpr[QFPREG(rs2) + 1]); \
2863 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2864 cpu_fpr[QFPREG(rs2) + 2]); \
2865 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2866 cpu_fpr[QFPREG(rs2) + 3]); \
2867 gen_set_label(l1); \
2868 tcg_temp_free(r_cond); \
2870 case 0x001: /* V9 fmovscc %fcc0 */
2873 case 0x002: /* V9 fmovdcc %fcc0 */
2876 case 0x003: /* V9 fmovqcc %fcc0 */
2877 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2880 case 0x041: /* V9 fmovscc %fcc1 */
2883 case 0x042: /* V9 fmovdcc %fcc1 */
2886 case 0x043: /* V9 fmovqcc %fcc1 */
2887 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2890 case 0x081: /* V9 fmovscc %fcc2 */
2893 case 0x082: /* V9 fmovdcc %fcc2 */
2896 case 0x083: /* V9 fmovqcc %fcc2 */
2897 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2900 case 0x0c1: /* V9 fmovscc %fcc3 */
2903 case 0x0c2: /* V9 fmovdcc %fcc3 */
2906 case 0x0c3: /* V9 fmovqcc %fcc3 */
2907 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2913 #define FMOVSCC(icc) \
2918 l1 = gen_new_label(); \
2919 r_cond = tcg_temp_new(); \
2920 cond = GET_FIELD_SP(insn, 14, 17); \
2921 gen_cond(r_cond, icc, cond); \
2922 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2924 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2925 gen_set_label(l1); \
2926 tcg_temp_free(r_cond); \
2928 #define FMOVDCC(icc) \
2933 l1 = gen_new_label(); \
2934 r_cond = tcg_temp_new(); \
2935 cond = GET_FIELD_SP(insn, 14, 17); \
2936 gen_cond(r_cond, icc, cond); \
2937 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2939 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2940 cpu_fpr[DFPREG(rs2)]); \
2941 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2942 cpu_fpr[DFPREG(rs2) + 1]); \
2943 gen_set_label(l1); \
2944 tcg_temp_free(r_cond); \
2946 #define FMOVQCC(icc) \
2951 l1 = gen_new_label(); \
2952 r_cond = tcg_temp_new(); \
2953 cond = GET_FIELD_SP(insn, 14, 17); \
2954 gen_cond(r_cond, icc, cond); \
2955 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2957 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2958 cpu_fpr[QFPREG(rs2)]); \
2959 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2960 cpu_fpr[QFPREG(rs2) + 1]); \
2961 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2962 cpu_fpr[QFPREG(rs2) + 2]); \
2963 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2964 cpu_fpr[QFPREG(rs2) + 3]); \
2965 gen_set_label(l1); \
2966 tcg_temp_free(r_cond); \
2969 case 0x101: /* V9 fmovscc %icc */
2972 case 0x102: /* V9 fmovdcc %icc */
2974 case 0x103: /* V9 fmovqcc %icc */
2975 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2978 case 0x181: /* V9 fmovscc %xcc */
2981 case 0x182: /* V9 fmovdcc %xcc */
2984 case 0x183: /* V9 fmovqcc %xcc */
2985 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2992 case 0x51: /* fcmps, V9 %fcc */
2993 gen_op_fcmps(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2995 case 0x52: /* fcmpd, V9 %fcc */
2996 gen_op_load_fpr_DT0(DFPREG(rs1
));
2997 gen_op_load_fpr_DT1(DFPREG(rs2
));
2998 gen_op_fcmpd(rd
& 3);
3000 case 0x53: /* fcmpq, V9 %fcc */
3001 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3002 gen_op_load_fpr_QT0(QFPREG(rs1
));
3003 gen_op_load_fpr_QT1(QFPREG(rs2
));
3004 gen_op_fcmpq(rd
& 3);
3006 case 0x55: /* fcmpes, V9 %fcc */
3007 gen_op_fcmpes(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3009 case 0x56: /* fcmped, V9 %fcc */
3010 gen_op_load_fpr_DT0(DFPREG(rs1
));
3011 gen_op_load_fpr_DT1(DFPREG(rs2
));
3012 gen_op_fcmped(rd
& 3);
3014 case 0x57: /* fcmpeq, V9 %fcc */
3015 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3016 gen_op_load_fpr_QT0(QFPREG(rs1
));
3017 gen_op_load_fpr_QT1(QFPREG(rs2
));
3018 gen_op_fcmpeq(rd
& 3);
3023 } else if (xop
== 0x2) {
3026 rs1
= GET_FIELD(insn
, 13, 17);
3028 // or %g0, x, y -> mov T0, x; mov y, T0
3029 if (IS_IMM
) { /* immediate */
3032 simm
= GET_FIELDs(insn
, 19, 31);
3033 r_const
= tcg_const_tl(simm
);
3034 gen_movl_TN_reg(rd
, r_const
);
3035 tcg_temp_free(r_const
);
3036 } else { /* register */
3037 rs2
= GET_FIELD(insn
, 27, 31);
3038 gen_movl_reg_TN(rs2
, cpu_dst
);
3039 gen_movl_TN_reg(rd
, cpu_dst
);
3042 cpu_src1
= get_src1(insn
, cpu_src1
);
3043 if (IS_IMM
) { /* immediate */
3044 simm
= GET_FIELDs(insn
, 19, 31);
3045 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
3046 gen_movl_TN_reg(rd
, cpu_dst
);
3047 } else { /* register */
3048 // or x, %g0, y -> mov T1, x; mov y, T1
3049 rs2
= GET_FIELD(insn
, 27, 31);
3051 gen_movl_reg_TN(rs2
, cpu_src2
);
3052 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3053 gen_movl_TN_reg(rd
, cpu_dst
);
3055 gen_movl_TN_reg(rd
, cpu_src1
);
3058 #ifdef TARGET_SPARC64
3059 } else if (xop
== 0x25) { /* sll, V9 sllx */
3060 cpu_src1
= get_src1(insn
, cpu_src1
);
3061 if (IS_IMM
) { /* immediate */
3062 simm
= GET_FIELDs(insn
, 20, 31);
3063 if (insn
& (1 << 12)) {
3064 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3066 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x1f);
3068 } else { /* register */
3069 rs2
= GET_FIELD(insn
, 27, 31);
3070 gen_movl_reg_TN(rs2
, cpu_src2
);
3071 if (insn
& (1 << 12)) {
3072 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3074 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3076 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3078 gen_movl_TN_reg(rd
, cpu_dst
);
3079 } else if (xop
== 0x26) { /* srl, V9 srlx */
3080 cpu_src1
= get_src1(insn
, cpu_src1
);
3081 if (IS_IMM
) { /* immediate */
3082 simm
= GET_FIELDs(insn
, 20, 31);
3083 if (insn
& (1 << 12)) {
3084 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3086 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3087 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
3089 } else { /* register */
3090 rs2
= GET_FIELD(insn
, 27, 31);
3091 gen_movl_reg_TN(rs2
, cpu_src2
);
3092 if (insn
& (1 << 12)) {
3093 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3094 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3096 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3097 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3098 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3101 gen_movl_TN_reg(rd
, cpu_dst
);
3102 } else if (xop
== 0x27) { /* sra, V9 srax */
3103 cpu_src1
= get_src1(insn
, cpu_src1
);
3104 if (IS_IMM
) { /* immediate */
3105 simm
= GET_FIELDs(insn
, 20, 31);
3106 if (insn
& (1 << 12)) {
3107 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3109 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3110 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3111 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
3113 } else { /* register */
3114 rs2
= GET_FIELD(insn
, 27, 31);
3115 gen_movl_reg_TN(rs2
, cpu_src2
);
3116 if (insn
& (1 << 12)) {
3117 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3118 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3120 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3121 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3122 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3123 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3126 gen_movl_TN_reg(rd
, cpu_dst
);
3128 } else if (xop
< 0x36) {
3130 cpu_src1
= get_src1(insn
, cpu_src1
);
3131 cpu_src2
= get_src2(insn
, cpu_src2
);
3132 switch (xop
& ~0x10) {
3135 simm
= GET_FIELDs(insn
, 19, 31);
3137 gen_op_addi_cc(cpu_dst
, cpu_src1
, simm
);
3139 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
3143 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3145 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3151 simm
= GET_FIELDs(insn
, 19, 31);
3152 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, simm
);
3154 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3157 gen_op_logic_cc(cpu_dst
);
3162 simm
= GET_FIELDs(insn
, 19, 31);
3163 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
3165 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3168 gen_op_logic_cc(cpu_dst
);
3172 simm
= GET_FIELDs(insn
, 19, 31);
3173 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, simm
);
3175 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3178 gen_op_logic_cc(cpu_dst
);
3182 simm
= GET_FIELDs(insn
, 19, 31);
3184 gen_op_subi_cc(cpu_dst
, cpu_src1
, simm
);
3186 tcg_gen_subi_tl(cpu_dst
, cpu_src1
, simm
);
3190 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3192 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3196 case 0x5: /* andn */
3198 simm
= GET_FIELDs(insn
, 19, 31);
3199 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, ~simm
);
3201 tcg_gen_andc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3204 gen_op_logic_cc(cpu_dst
);
3208 simm
= GET_FIELDs(insn
, 19, 31);
3209 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, ~simm
);
3211 tcg_gen_orc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3214 gen_op_logic_cc(cpu_dst
);
3216 case 0x7: /* xorn */
3218 simm
= GET_FIELDs(insn
, 19, 31);
3219 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, ~simm
);
3221 tcg_gen_not_tl(cpu_tmp0
, cpu_src2
);
3222 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3225 gen_op_logic_cc(cpu_dst
);
3227 case 0x8: /* addx, V9 addc */
3229 simm
= GET_FIELDs(insn
, 19, 31);
3231 gen_op_addxi_cc(cpu_dst
, cpu_src1
, simm
);
3233 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3234 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, simm
);
3235 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3239 gen_op_addx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3241 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3242 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
3243 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3247 #ifdef TARGET_SPARC64
3248 case 0x9: /* V9 mulx */
3250 simm
= GET_FIELDs(insn
, 19, 31);
3251 tcg_gen_muli_i64(cpu_dst
, cpu_src1
, simm
);
3253 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
3257 case 0xa: /* umul */
3258 CHECK_IU_FEATURE(dc
, MUL
);
3259 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
3261 gen_op_logic_cc(cpu_dst
);
3263 case 0xb: /* smul */
3264 CHECK_IU_FEATURE(dc
, MUL
);
3265 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
3267 gen_op_logic_cc(cpu_dst
);
3269 case 0xc: /* subx, V9 subc */
3271 simm
= GET_FIELDs(insn
, 19, 31);
3273 gen_op_subxi_cc(cpu_dst
, cpu_src1
, simm
);
3275 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3276 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, simm
);
3277 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3281 gen_op_subx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3283 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3284 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
3285 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3289 #ifdef TARGET_SPARC64
3290 case 0xd: /* V9 udivx */
3291 tcg_gen_mov_tl(cpu_cc_src
, cpu_src1
);
3292 tcg_gen_mov_tl(cpu_cc_src2
, cpu_src2
);
3293 gen_trap_ifdivzero_tl(cpu_cc_src2
);
3294 tcg_gen_divu_i64(cpu_dst
, cpu_cc_src
, cpu_cc_src2
);
3297 case 0xe: /* udiv */
3298 CHECK_IU_FEATURE(dc
, DIV
);
3299 gen_helper_udiv(cpu_dst
, cpu_src1
, cpu_src2
);
3301 gen_op_div_cc(cpu_dst
);
3303 case 0xf: /* sdiv */
3304 CHECK_IU_FEATURE(dc
, DIV
);
3305 gen_helper_sdiv(cpu_dst
, cpu_src1
, cpu_src2
);
3307 gen_op_div_cc(cpu_dst
);
3312 gen_movl_TN_reg(rd
, cpu_dst
);
3314 cpu_src1
= get_src1(insn
, cpu_src1
);
3315 cpu_src2
= get_src2(insn
, cpu_src2
);
3317 case 0x20: /* taddcc */
3318 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3319 gen_movl_TN_reg(rd
, cpu_dst
);
3321 case 0x21: /* tsubcc */
3322 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3323 gen_movl_TN_reg(rd
, cpu_dst
);
3325 case 0x22: /* taddcctv */
3326 save_state(dc
, cpu_cond
);
3327 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3328 gen_movl_TN_reg(rd
, cpu_dst
);
3330 case 0x23: /* tsubcctv */
3331 save_state(dc
, cpu_cond
);
3332 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3333 gen_movl_TN_reg(rd
, cpu_dst
);
3335 case 0x24: /* mulscc */
3336 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3337 gen_movl_TN_reg(rd
, cpu_dst
);
3339 #ifndef TARGET_SPARC64
3340 case 0x25: /* sll */
3341 if (IS_IMM
) { /* immediate */
3342 simm
= GET_FIELDs(insn
, 20, 31);
3343 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3344 } else { /* register */
3345 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3346 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3348 gen_movl_TN_reg(rd
, cpu_dst
);
3350 case 0x26: /* srl */
3351 if (IS_IMM
) { /* immediate */
3352 simm
= GET_FIELDs(insn
, 20, 31);
3353 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3354 } else { /* register */
3355 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3356 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3358 gen_movl_TN_reg(rd
, cpu_dst
);
3360 case 0x27: /* sra */
3361 if (IS_IMM
) { /* immediate */
3362 simm
= GET_FIELDs(insn
, 20, 31);
3363 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3364 } else { /* register */
3365 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3366 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3368 gen_movl_TN_reg(rd
, cpu_dst
);
3375 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3376 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3378 #ifndef TARGET_SPARC64
3379 case 0x01 ... 0x0f: /* undefined in the
3383 case 0x10 ... 0x1f: /* implementation-dependent
3389 case 0x2: /* V9 wrccr */
3390 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3391 gen_helper_wrccr(cpu_dst
);
3393 case 0x3: /* V9 wrasi */
3394 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3395 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3397 case 0x6: /* V9 wrfprs */
3398 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3399 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3400 save_state(dc
, cpu_cond
);
3405 case 0xf: /* V9 sir, nop if user */
3406 #if !defined(CONFIG_USER_ONLY)
3411 case 0x13: /* Graphics Status */
3412 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3414 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3416 case 0x14: /* Softint set */
3417 if (!supervisor(dc
))
3419 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3420 gen_helper_set_softint(cpu_tmp64
);
3422 case 0x15: /* Softint clear */
3423 if (!supervisor(dc
))
3425 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3426 gen_helper_clear_softint(cpu_tmp64
);
3428 case 0x16: /* Softint write */
3429 if (!supervisor(dc
))
3431 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3432 gen_helper_write_softint(cpu_tmp64
);
3434 case 0x17: /* Tick compare */
3435 #if !defined(CONFIG_USER_ONLY)
3436 if (!supervisor(dc
))
3442 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3444 r_tickptr
= tcg_temp_new_ptr();
3445 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3446 offsetof(CPUState
, tick
));
3447 gen_helper_tick_set_limit(r_tickptr
,
3449 tcg_temp_free_ptr(r_tickptr
);
3452 case 0x18: /* System tick */
3453 #if !defined(CONFIG_USER_ONLY)
3454 if (!supervisor(dc
))
3460 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3462 r_tickptr
= tcg_temp_new_ptr();
3463 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3464 offsetof(CPUState
, stick
));
3465 gen_helper_tick_set_count(r_tickptr
,
3467 tcg_temp_free_ptr(r_tickptr
);
3470 case 0x19: /* System tick compare */
3471 #if !defined(CONFIG_USER_ONLY)
3472 if (!supervisor(dc
))
3478 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3480 r_tickptr
= tcg_temp_new_ptr();
3481 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3482 offsetof(CPUState
, stick
));
3483 gen_helper_tick_set_limit(r_tickptr
,
3485 tcg_temp_free_ptr(r_tickptr
);
3489 case 0x10: /* Performance Control */
3490 case 0x11: /* Performance Instrumentation
3492 case 0x12: /* Dispatch Control */
3499 #if !defined(CONFIG_USER_ONLY)
3500 case 0x31: /* wrpsr, V9 saved, restored */
3502 if (!supervisor(dc
))
3504 #ifdef TARGET_SPARC64
3510 gen_helper_restored();
3512 case 2: /* UA2005 allclean */
3513 case 3: /* UA2005 otherw */
3514 case 4: /* UA2005 normalw */
3515 case 5: /* UA2005 invalw */
3521 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3522 gen_helper_wrpsr(cpu_dst
);
3523 save_state(dc
, cpu_cond
);
3530 case 0x32: /* wrwim, V9 wrpr */
3532 if (!supervisor(dc
))
3534 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3535 #ifdef TARGET_SPARC64
3541 r_tsptr
= tcg_temp_new_ptr();
3542 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3543 offsetof(CPUState
, tsptr
));
3544 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3545 offsetof(trap_state
, tpc
));
3546 tcg_temp_free_ptr(r_tsptr
);
3553 r_tsptr
= tcg_temp_new_ptr();
3554 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3555 offsetof(CPUState
, tsptr
));
3556 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3557 offsetof(trap_state
, tnpc
));
3558 tcg_temp_free_ptr(r_tsptr
);
3565 r_tsptr
= tcg_temp_new_ptr();
3566 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3567 offsetof(CPUState
, tsptr
));
3568 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3569 offsetof(trap_state
,
3571 tcg_temp_free_ptr(r_tsptr
);
3578 r_tsptr
= tcg_temp_new_ptr();
3579 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3580 offsetof(CPUState
, tsptr
));
3581 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3582 tcg_gen_st_i32(cpu_tmp32
, r_tsptr
,
3583 offsetof(trap_state
, tt
));
3584 tcg_temp_free_ptr(r_tsptr
);
3591 r_tickptr
= tcg_temp_new_ptr();
3592 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3593 offsetof(CPUState
, tick
));
3594 gen_helper_tick_set_count(r_tickptr
,
3596 tcg_temp_free_ptr(r_tickptr
);
3600 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3603 save_state(dc
, cpu_cond
);
3604 gen_helper_wrpstate(cpu_tmp0
);
3610 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3611 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3612 offsetof(CPUSPARCState
, tl
));
3615 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3616 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3617 offsetof(CPUSPARCState
,
3621 gen_helper_wrcwp(cpu_tmp0
);
3624 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3625 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3626 offsetof(CPUSPARCState
,
3629 case 11: // canrestore
3630 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3631 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3632 offsetof(CPUSPARCState
,
3635 case 12: // cleanwin
3636 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3637 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3638 offsetof(CPUSPARCState
,
3641 case 13: // otherwin
3642 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3643 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3644 offsetof(CPUSPARCState
,
3648 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3649 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3650 offsetof(CPUSPARCState
,
3653 case 16: // UA2005 gl
3654 CHECK_IU_FEATURE(dc
, GL
);
3655 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3656 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3657 offsetof(CPUSPARCState
, gl
));
3659 case 26: // UA2005 strand status
3660 CHECK_IU_FEATURE(dc
, HYPV
);
3661 if (!hypervisor(dc
))
3663 tcg_gen_mov_tl(cpu_ssr
, cpu_tmp0
);
3669 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3670 if (dc
->def
->nwindows
!= 32)
3671 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3672 (1 << dc
->def
->nwindows
) - 1);
3673 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3677 case 0x33: /* wrtbr, UA2005 wrhpr */
3679 #ifndef TARGET_SPARC64
3680 if (!supervisor(dc
))
3682 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3684 CHECK_IU_FEATURE(dc
, HYPV
);
3685 if (!hypervisor(dc
))
3687 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3690 // XXX gen_op_wrhpstate();
3691 save_state(dc
, cpu_cond
);
3697 // XXX gen_op_wrhtstate();
3700 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
3703 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
3705 case 31: // hstick_cmpr
3709 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
3710 r_tickptr
= tcg_temp_new_ptr();
3711 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3712 offsetof(CPUState
, hstick
));
3713 gen_helper_tick_set_limit(r_tickptr
,
3715 tcg_temp_free_ptr(r_tickptr
);
3718 case 6: // hver readonly
3726 #ifdef TARGET_SPARC64
3727 case 0x2c: /* V9 movcc */
3729 int cc
= GET_FIELD_SP(insn
, 11, 12);
3730 int cond
= GET_FIELD_SP(insn
, 14, 17);
3734 r_cond
= tcg_temp_new();
3735 if (insn
& (1 << 18)) {
3737 gen_cond(r_cond
, 0, cond
);
3739 gen_cond(r_cond
, 1, cond
);
3743 gen_fcond(r_cond
, cc
, cond
);
3746 l1
= gen_new_label();
3748 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
3749 if (IS_IMM
) { /* immediate */
3752 simm
= GET_FIELD_SPs(insn
, 0, 10);
3753 r_const
= tcg_const_tl(simm
);
3754 gen_movl_TN_reg(rd
, r_const
);
3755 tcg_temp_free(r_const
);
3757 rs2
= GET_FIELD_SP(insn
, 0, 4);
3758 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3759 gen_movl_TN_reg(rd
, cpu_tmp0
);
3762 tcg_temp_free(r_cond
);
3765 case 0x2d: /* V9 sdivx */
3766 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
3767 gen_movl_TN_reg(rd
, cpu_dst
);
3769 case 0x2e: /* V9 popc */
3771 cpu_src2
= get_src2(insn
, cpu_src2
);
3772 gen_helper_popc(cpu_dst
, cpu_src2
);
3773 gen_movl_TN_reg(rd
, cpu_dst
);
3775 case 0x2f: /* V9 movr */
3777 int cond
= GET_FIELD_SP(insn
, 10, 12);
3780 cpu_src1
= get_src1(insn
, cpu_src1
);
3782 l1
= gen_new_label();
3784 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
3786 if (IS_IMM
) { /* immediate */
3789 simm
= GET_FIELD_SPs(insn
, 0, 9);
3790 r_const
= tcg_const_tl(simm
);
3791 gen_movl_TN_reg(rd
, r_const
);
3792 tcg_temp_free(r_const
);
3794 rs2
= GET_FIELD_SP(insn
, 0, 4);
3795 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3796 gen_movl_TN_reg(rd
, cpu_tmp0
);
3806 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3807 #ifdef TARGET_SPARC64
3808 int opf
= GET_FIELD_SP(insn
, 5, 13);
3809 rs1
= GET_FIELD(insn
, 13, 17);
3810 rs2
= GET_FIELD(insn
, 27, 31);
3811 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3815 case 0x000: /* VIS I edge8cc */
3816 case 0x001: /* VIS II edge8n */
3817 case 0x002: /* VIS I edge8lcc */
3818 case 0x003: /* VIS II edge8ln */
3819 case 0x004: /* VIS I edge16cc */
3820 case 0x005: /* VIS II edge16n */
3821 case 0x006: /* VIS I edge16lcc */
3822 case 0x007: /* VIS II edge16ln */
3823 case 0x008: /* VIS I edge32cc */
3824 case 0x009: /* VIS II edge32n */
3825 case 0x00a: /* VIS I edge32lcc */
3826 case 0x00b: /* VIS II edge32ln */
3829 case 0x010: /* VIS I array8 */
3830 CHECK_FPU_FEATURE(dc
, VIS1
);
3831 cpu_src1
= get_src1(insn
, cpu_src1
);
3832 gen_movl_reg_TN(rs2
, cpu_src2
);
3833 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3834 gen_movl_TN_reg(rd
, cpu_dst
);
3836 case 0x012: /* VIS I array16 */
3837 CHECK_FPU_FEATURE(dc
, VIS1
);
3838 cpu_src1
= get_src1(insn
, cpu_src1
);
3839 gen_movl_reg_TN(rs2
, cpu_src2
);
3840 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3841 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
3842 gen_movl_TN_reg(rd
, cpu_dst
);
3844 case 0x014: /* VIS I array32 */
3845 CHECK_FPU_FEATURE(dc
, VIS1
);
3846 cpu_src1
= get_src1(insn
, cpu_src1
);
3847 gen_movl_reg_TN(rs2
, cpu_src2
);
3848 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3849 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
3850 gen_movl_TN_reg(rd
, cpu_dst
);
3852 case 0x018: /* VIS I alignaddr */
3853 CHECK_FPU_FEATURE(dc
, VIS1
);
3854 cpu_src1
= get_src1(insn
, cpu_src1
);
3855 gen_movl_reg_TN(rs2
, cpu_src2
);
3856 gen_helper_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
);
3857 gen_movl_TN_reg(rd
, cpu_dst
);
3859 case 0x019: /* VIS II bmask */
3860 case 0x01a: /* VIS I alignaddrl */
3863 case 0x020: /* VIS I fcmple16 */
3864 CHECK_FPU_FEATURE(dc
, VIS1
);
3865 gen_op_load_fpr_DT0(DFPREG(rs1
));
3866 gen_op_load_fpr_DT1(DFPREG(rs2
));
3867 gen_helper_fcmple16();
3868 gen_op_store_DT0_fpr(DFPREG(rd
));
3870 case 0x022: /* VIS I fcmpne16 */
3871 CHECK_FPU_FEATURE(dc
, VIS1
);
3872 gen_op_load_fpr_DT0(DFPREG(rs1
));
3873 gen_op_load_fpr_DT1(DFPREG(rs2
));
3874 gen_helper_fcmpne16();
3875 gen_op_store_DT0_fpr(DFPREG(rd
));
3877 case 0x024: /* VIS I fcmple32 */
3878 CHECK_FPU_FEATURE(dc
, VIS1
);
3879 gen_op_load_fpr_DT0(DFPREG(rs1
));
3880 gen_op_load_fpr_DT1(DFPREG(rs2
));
3881 gen_helper_fcmple32();
3882 gen_op_store_DT0_fpr(DFPREG(rd
));
3884 case 0x026: /* VIS I fcmpne32 */
3885 CHECK_FPU_FEATURE(dc
, VIS1
);
3886 gen_op_load_fpr_DT0(DFPREG(rs1
));
3887 gen_op_load_fpr_DT1(DFPREG(rs2
));
3888 gen_helper_fcmpne32();
3889 gen_op_store_DT0_fpr(DFPREG(rd
));
3891 case 0x028: /* VIS I fcmpgt16 */
3892 CHECK_FPU_FEATURE(dc
, VIS1
);
3893 gen_op_load_fpr_DT0(DFPREG(rs1
));
3894 gen_op_load_fpr_DT1(DFPREG(rs2
));
3895 gen_helper_fcmpgt16();
3896 gen_op_store_DT0_fpr(DFPREG(rd
));
3898 case 0x02a: /* VIS I fcmpeq16 */
3899 CHECK_FPU_FEATURE(dc
, VIS1
);
3900 gen_op_load_fpr_DT0(DFPREG(rs1
));
3901 gen_op_load_fpr_DT1(DFPREG(rs2
));
3902 gen_helper_fcmpeq16();
3903 gen_op_store_DT0_fpr(DFPREG(rd
));
3905 case 0x02c: /* VIS I fcmpgt32 */
3906 CHECK_FPU_FEATURE(dc
, VIS1
);
3907 gen_op_load_fpr_DT0(DFPREG(rs1
));
3908 gen_op_load_fpr_DT1(DFPREG(rs2
));
3909 gen_helper_fcmpgt32();
3910 gen_op_store_DT0_fpr(DFPREG(rd
));
3912 case 0x02e: /* VIS I fcmpeq32 */
3913 CHECK_FPU_FEATURE(dc
, VIS1
);
3914 gen_op_load_fpr_DT0(DFPREG(rs1
));
3915 gen_op_load_fpr_DT1(DFPREG(rs2
));
3916 gen_helper_fcmpeq32();
3917 gen_op_store_DT0_fpr(DFPREG(rd
));
3919 case 0x031: /* VIS I fmul8x16 */
3920 CHECK_FPU_FEATURE(dc
, VIS1
);
3921 gen_op_load_fpr_DT0(DFPREG(rs1
));
3922 gen_op_load_fpr_DT1(DFPREG(rs2
));
3923 gen_helper_fmul8x16();
3924 gen_op_store_DT0_fpr(DFPREG(rd
));
3926 case 0x033: /* VIS I fmul8x16au */
3927 CHECK_FPU_FEATURE(dc
, VIS1
);
3928 gen_op_load_fpr_DT0(DFPREG(rs1
));
3929 gen_op_load_fpr_DT1(DFPREG(rs2
));
3930 gen_helper_fmul8x16au();
3931 gen_op_store_DT0_fpr(DFPREG(rd
));
3933 case 0x035: /* VIS I fmul8x16al */
3934 CHECK_FPU_FEATURE(dc
, VIS1
);
3935 gen_op_load_fpr_DT0(DFPREG(rs1
));
3936 gen_op_load_fpr_DT1(DFPREG(rs2
));
3937 gen_helper_fmul8x16al();
3938 gen_op_store_DT0_fpr(DFPREG(rd
));
3940 case 0x036: /* VIS I fmul8sux16 */
3941 CHECK_FPU_FEATURE(dc
, VIS1
);
3942 gen_op_load_fpr_DT0(DFPREG(rs1
));
3943 gen_op_load_fpr_DT1(DFPREG(rs2
));
3944 gen_helper_fmul8sux16();
3945 gen_op_store_DT0_fpr(DFPREG(rd
));
3947 case 0x037: /* VIS I fmul8ulx16 */
3948 CHECK_FPU_FEATURE(dc
, VIS1
);
3949 gen_op_load_fpr_DT0(DFPREG(rs1
));
3950 gen_op_load_fpr_DT1(DFPREG(rs2
));
3951 gen_helper_fmul8ulx16();
3952 gen_op_store_DT0_fpr(DFPREG(rd
));
3954 case 0x038: /* VIS I fmuld8sux16 */
3955 CHECK_FPU_FEATURE(dc
, VIS1
);
3956 gen_op_load_fpr_DT0(DFPREG(rs1
));
3957 gen_op_load_fpr_DT1(DFPREG(rs2
));
3958 gen_helper_fmuld8sux16();
3959 gen_op_store_DT0_fpr(DFPREG(rd
));
3961 case 0x039: /* VIS I fmuld8ulx16 */
3962 CHECK_FPU_FEATURE(dc
, VIS1
);
3963 gen_op_load_fpr_DT0(DFPREG(rs1
));
3964 gen_op_load_fpr_DT1(DFPREG(rs2
));
3965 gen_helper_fmuld8ulx16();
3966 gen_op_store_DT0_fpr(DFPREG(rd
));
3968 case 0x03a: /* VIS I fpack32 */
3969 case 0x03b: /* VIS I fpack16 */
3970 case 0x03d: /* VIS I fpackfix */
3971 case 0x03e: /* VIS I pdist */
3974 case 0x048: /* VIS I faligndata */
3975 CHECK_FPU_FEATURE(dc
, VIS1
);
3976 gen_op_load_fpr_DT0(DFPREG(rs1
));
3977 gen_op_load_fpr_DT1(DFPREG(rs2
));
3978 gen_helper_faligndata();
3979 gen_op_store_DT0_fpr(DFPREG(rd
));
3981 case 0x04b: /* VIS I fpmerge */
3982 CHECK_FPU_FEATURE(dc
, VIS1
);
3983 gen_op_load_fpr_DT0(DFPREG(rs1
));
3984 gen_op_load_fpr_DT1(DFPREG(rs2
));
3985 gen_helper_fpmerge();
3986 gen_op_store_DT0_fpr(DFPREG(rd
));
3988 case 0x04c: /* VIS II bshuffle */
3991 case 0x04d: /* VIS I fexpand */
3992 CHECK_FPU_FEATURE(dc
, VIS1
);
3993 gen_op_load_fpr_DT0(DFPREG(rs1
));
3994 gen_op_load_fpr_DT1(DFPREG(rs2
));
3995 gen_helper_fexpand();
3996 gen_op_store_DT0_fpr(DFPREG(rd
));
3998 case 0x050: /* VIS I fpadd16 */
3999 CHECK_FPU_FEATURE(dc
, VIS1
);
4000 gen_op_load_fpr_DT0(DFPREG(rs1
));
4001 gen_op_load_fpr_DT1(DFPREG(rs2
));
4002 gen_helper_fpadd16();
4003 gen_op_store_DT0_fpr(DFPREG(rd
));
4005 case 0x051: /* VIS I fpadd16s */
4006 CHECK_FPU_FEATURE(dc
, VIS1
);
4007 gen_helper_fpadd16s(cpu_fpr
[rd
],
4008 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4010 case 0x052: /* VIS I fpadd32 */
4011 CHECK_FPU_FEATURE(dc
, VIS1
);
4012 gen_op_load_fpr_DT0(DFPREG(rs1
));
4013 gen_op_load_fpr_DT1(DFPREG(rs2
));
4014 gen_helper_fpadd32();
4015 gen_op_store_DT0_fpr(DFPREG(rd
));
4017 case 0x053: /* VIS I fpadd32s */
4018 CHECK_FPU_FEATURE(dc
, VIS1
);
4019 gen_helper_fpadd32s(cpu_fpr
[rd
],
4020 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4022 case 0x054: /* VIS I fpsub16 */
4023 CHECK_FPU_FEATURE(dc
, VIS1
);
4024 gen_op_load_fpr_DT0(DFPREG(rs1
));
4025 gen_op_load_fpr_DT1(DFPREG(rs2
));
4026 gen_helper_fpsub16();
4027 gen_op_store_DT0_fpr(DFPREG(rd
));
4029 case 0x055: /* VIS I fpsub16s */
4030 CHECK_FPU_FEATURE(dc
, VIS1
);
4031 gen_helper_fpsub16s(cpu_fpr
[rd
],
4032 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4034 case 0x056: /* VIS I fpsub32 */
4035 CHECK_FPU_FEATURE(dc
, VIS1
);
4036 gen_op_load_fpr_DT0(DFPREG(rs1
));
4037 gen_op_load_fpr_DT1(DFPREG(rs2
));
4038 gen_helper_fpsub32();
4039 gen_op_store_DT0_fpr(DFPREG(rd
));
4041 case 0x057: /* VIS I fpsub32s */
4042 CHECK_FPU_FEATURE(dc
, VIS1
);
4043 gen_helper_fpsub32s(cpu_fpr
[rd
],
4044 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4046 case 0x060: /* VIS I fzero */
4047 CHECK_FPU_FEATURE(dc
, VIS1
);
4048 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], 0);
4049 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], 0);
4051 case 0x061: /* VIS I fzeros */
4052 CHECK_FPU_FEATURE(dc
, VIS1
);
4053 tcg_gen_movi_i32(cpu_fpr
[rd
], 0);
4055 case 0x062: /* VIS I fnor */
4056 CHECK_FPU_FEATURE(dc
, VIS1
);
4057 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
4058 cpu_fpr
[DFPREG(rs2
)]);
4059 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
4060 cpu_fpr
[DFPREG(rs2
) + 1]);
4062 case 0x063: /* VIS I fnors */
4063 CHECK_FPU_FEATURE(dc
, VIS1
);
4064 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4066 case 0x064: /* VIS I fandnot2 */
4067 CHECK_FPU_FEATURE(dc
, VIS1
);
4068 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4069 cpu_fpr
[DFPREG(rs2
)]);
4070 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4071 cpu_fpr
[DFPREG(rs1
) + 1],
4072 cpu_fpr
[DFPREG(rs2
) + 1]);
4074 case 0x065: /* VIS I fandnot2s */
4075 CHECK_FPU_FEATURE(dc
, VIS1
);
4076 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4078 case 0x066: /* VIS I fnot2 */
4079 CHECK_FPU_FEATURE(dc
, VIS1
);
4080 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
4081 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
4082 cpu_fpr
[DFPREG(rs2
) + 1]);
4084 case 0x067: /* VIS I fnot2s */
4085 CHECK_FPU_FEATURE(dc
, VIS1
);
4086 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
4088 case 0x068: /* VIS I fandnot1 */
4089 CHECK_FPU_FEATURE(dc
, VIS1
);
4090 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
4091 cpu_fpr
[DFPREG(rs1
)]);
4092 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4093 cpu_fpr
[DFPREG(rs2
) + 1],
4094 cpu_fpr
[DFPREG(rs1
) + 1]);
4096 case 0x069: /* VIS I fandnot1s */
4097 CHECK_FPU_FEATURE(dc
, VIS1
);
4098 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
4100 case 0x06a: /* VIS I fnot1 */
4101 CHECK_FPU_FEATURE(dc
, VIS1
);
4102 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
4103 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
4104 cpu_fpr
[DFPREG(rs1
) + 1]);
4106 case 0x06b: /* VIS I fnot1s */
4107 CHECK_FPU_FEATURE(dc
, VIS1
);
4108 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
4110 case 0x06c: /* VIS I fxor */
4111 CHECK_FPU_FEATURE(dc
, VIS1
);
4112 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4113 cpu_fpr
[DFPREG(rs2
)]);
4114 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1],
4115 cpu_fpr
[DFPREG(rs1
) + 1],
4116 cpu_fpr
[DFPREG(rs2
) + 1]);
4118 case 0x06d: /* VIS I fxors */
4119 CHECK_FPU_FEATURE(dc
, VIS1
);
4120 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4122 case 0x06e: /* VIS I fnand */
4123 CHECK_FPU_FEATURE(dc
, VIS1
);
4124 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
4125 cpu_fpr
[DFPREG(rs2
)]);
4126 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
4127 cpu_fpr
[DFPREG(rs2
) + 1]);
4129 case 0x06f: /* VIS I fnands */
4130 CHECK_FPU_FEATURE(dc
, VIS1
);
4131 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4133 case 0x070: /* VIS I fand */
4134 CHECK_FPU_FEATURE(dc
, VIS1
);
4135 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4136 cpu_fpr
[DFPREG(rs2
)]);
4137 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1],
4138 cpu_fpr
[DFPREG(rs1
) + 1],
4139 cpu_fpr
[DFPREG(rs2
) + 1]);
4141 case 0x071: /* VIS I fands */
4142 CHECK_FPU_FEATURE(dc
, VIS1
);
4143 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4145 case 0x072: /* VIS I fxnor */
4146 CHECK_FPU_FEATURE(dc
, VIS1
);
4147 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
4148 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
4149 cpu_fpr
[DFPREG(rs1
)]);
4150 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
4151 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
4152 cpu_fpr
[DFPREG(rs1
) + 1]);
4154 case 0x073: /* VIS I fxnors */
4155 CHECK_FPU_FEATURE(dc
, VIS1
);
4156 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
4157 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
4159 case 0x074: /* VIS I fsrc1 */
4160 CHECK_FPU_FEATURE(dc
, VIS1
);
4161 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
4162 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
4163 cpu_fpr
[DFPREG(rs1
) + 1]);
4165 case 0x075: /* VIS I fsrc1s */
4166 CHECK_FPU_FEATURE(dc
, VIS1
);
4167 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
4169 case 0x076: /* VIS I fornot2 */
4170 CHECK_FPU_FEATURE(dc
, VIS1
);
4171 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4172 cpu_fpr
[DFPREG(rs2
)]);
4173 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4174 cpu_fpr
[DFPREG(rs1
) + 1],
4175 cpu_fpr
[DFPREG(rs2
) + 1]);
4177 case 0x077: /* VIS I fornot2s */
4178 CHECK_FPU_FEATURE(dc
, VIS1
);
4179 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4181 case 0x078: /* VIS I fsrc2 */
4182 CHECK_FPU_FEATURE(dc
, VIS1
);
4183 gen_op_load_fpr_DT0(DFPREG(rs2
));
4184 gen_op_store_DT0_fpr(DFPREG(rd
));
4186 case 0x079: /* VIS I fsrc2s */
4187 CHECK_FPU_FEATURE(dc
, VIS1
);
4188 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
4190 case 0x07a: /* VIS I fornot1 */
4191 CHECK_FPU_FEATURE(dc
, VIS1
);
4192 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
4193 cpu_fpr
[DFPREG(rs1
)]);
4194 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4195 cpu_fpr
[DFPREG(rs2
) + 1],
4196 cpu_fpr
[DFPREG(rs1
) + 1]);
4198 case 0x07b: /* VIS I fornot1s */
4199 CHECK_FPU_FEATURE(dc
, VIS1
);
4200 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
4202 case 0x07c: /* VIS I for */
4203 CHECK_FPU_FEATURE(dc
, VIS1
);
4204 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4205 cpu_fpr
[DFPREG(rs2
)]);
4206 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1],
4207 cpu_fpr
[DFPREG(rs1
) + 1],
4208 cpu_fpr
[DFPREG(rs2
) + 1]);
4210 case 0x07d: /* VIS I fors */
4211 CHECK_FPU_FEATURE(dc
, VIS1
);
4212 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4214 case 0x07e: /* VIS I fone */
4215 CHECK_FPU_FEATURE(dc
, VIS1
);
4216 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], -1);
4217 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], -1);
4219 case 0x07f: /* VIS I fones */
4220 CHECK_FPU_FEATURE(dc
, VIS1
);
4221 tcg_gen_movi_i32(cpu_fpr
[rd
], -1);
4223 case 0x080: /* VIS I shutdown */
4224 case 0x081: /* VIS II siam */
4233 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
4234 #ifdef TARGET_SPARC64
4239 #ifdef TARGET_SPARC64
4240 } else if (xop
== 0x39) { /* V9 return */
4243 save_state(dc
, cpu_cond
);
4244 cpu_src1
= get_src1(insn
, cpu_src1
);
4245 if (IS_IMM
) { /* immediate */
4246 simm
= GET_FIELDs(insn
, 19, 31);
4247 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4248 } else { /* register */
4249 rs2
= GET_FIELD(insn
, 27, 31);
4251 gen_movl_reg_TN(rs2
, cpu_src2
);
4252 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4254 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4256 gen_helper_restore();
4257 gen_mov_pc_npc(dc
, cpu_cond
);
4258 r_const
= tcg_const_i32(3);
4259 gen_helper_check_align(cpu_dst
, r_const
);
4260 tcg_temp_free_i32(r_const
);
4261 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4262 dc
->npc
= DYNAMIC_PC
;
4266 cpu_src1
= get_src1(insn
, cpu_src1
);
4267 if (IS_IMM
) { /* immediate */
4268 simm
= GET_FIELDs(insn
, 19, 31);
4269 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4270 } else { /* register */
4271 rs2
= GET_FIELD(insn
, 27, 31);
4273 gen_movl_reg_TN(rs2
, cpu_src2
);
4274 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4276 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4279 case 0x38: /* jmpl */
4284 r_pc
= tcg_const_tl(dc
->pc
);
4285 gen_movl_TN_reg(rd
, r_pc
);
4286 tcg_temp_free(r_pc
);
4287 gen_mov_pc_npc(dc
, cpu_cond
);
4288 r_const
= tcg_const_i32(3);
4289 gen_helper_check_align(cpu_dst
, r_const
);
4290 tcg_temp_free_i32(r_const
);
4291 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4292 dc
->npc
= DYNAMIC_PC
;
4295 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4296 case 0x39: /* rett, V9 return */
4300 if (!supervisor(dc
))
4302 gen_mov_pc_npc(dc
, cpu_cond
);
4303 r_const
= tcg_const_i32(3);
4304 gen_helper_check_align(cpu_dst
, r_const
);
4305 tcg_temp_free_i32(r_const
);
4306 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4307 dc
->npc
= DYNAMIC_PC
;
4312 case 0x3b: /* flush */
4313 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4315 gen_helper_flush(cpu_dst
);
4317 case 0x3c: /* save */
4318 save_state(dc
, cpu_cond
);
4320 gen_movl_TN_reg(rd
, cpu_dst
);
4322 case 0x3d: /* restore */
4323 save_state(dc
, cpu_cond
);
4324 gen_helper_restore();
4325 gen_movl_TN_reg(rd
, cpu_dst
);
4327 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4328 case 0x3e: /* V9 done/retry */
4332 if (!supervisor(dc
))
4334 dc
->npc
= DYNAMIC_PC
;
4335 dc
->pc
= DYNAMIC_PC
;
4339 if (!supervisor(dc
))
4341 dc
->npc
= DYNAMIC_PC
;
4342 dc
->pc
= DYNAMIC_PC
;
4358 case 3: /* load/store instructions */
4360 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4362 cpu_src1
= get_src1(insn
, cpu_src1
);
4363 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4364 rs2
= GET_FIELD(insn
, 27, 31);
4365 gen_movl_reg_TN(rs2
, cpu_src2
);
4366 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4367 } else if (IS_IMM
) { /* immediate */
4368 simm
= GET_FIELDs(insn
, 19, 31);
4369 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, simm
);
4370 } else { /* register */
4371 rs2
= GET_FIELD(insn
, 27, 31);
4373 gen_movl_reg_TN(rs2
, cpu_src2
);
4374 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4376 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4378 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4379 (xop
> 0x17 && xop
<= 0x1d ) ||
4380 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4382 case 0x0: /* ld, V9 lduw, load unsigned word */
4383 gen_address_mask(dc
, cpu_addr
);
4384 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4386 case 0x1: /* ldub, load unsigned byte */
4387 gen_address_mask(dc
, cpu_addr
);
4388 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4390 case 0x2: /* lduh, load unsigned halfword */
4391 gen_address_mask(dc
, cpu_addr
);
4392 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4394 case 0x3: /* ldd, load double word */
4400 save_state(dc
, cpu_cond
);
4401 r_const
= tcg_const_i32(7);
4402 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4403 tcg_temp_free_i32(r_const
);
4404 gen_address_mask(dc
, cpu_addr
);
4405 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4406 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4407 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4408 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4409 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4410 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4411 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4414 case 0x9: /* ldsb, load signed byte */
4415 gen_address_mask(dc
, cpu_addr
);
4416 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4418 case 0xa: /* ldsh, load signed halfword */
4419 gen_address_mask(dc
, cpu_addr
);
4420 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4422 case 0xd: /* ldstub -- XXX: should be atomically */
4426 gen_address_mask(dc
, cpu_addr
);
4427 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4428 r_const
= tcg_const_tl(0xff);
4429 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4430 tcg_temp_free(r_const
);
4433 case 0x0f: /* swap, swap register with memory. Also
4435 CHECK_IU_FEATURE(dc
, SWAP
);
4436 gen_movl_reg_TN(rd
, cpu_val
);
4437 gen_address_mask(dc
, cpu_addr
);
4438 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4439 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4440 tcg_gen_mov_tl(cpu_val
, cpu_tmp0
);
4442 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4443 case 0x10: /* lda, V9 lduwa, load word alternate */
4444 #ifndef TARGET_SPARC64
4447 if (!supervisor(dc
))
4450 save_state(dc
, cpu_cond
);
4451 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4453 case 0x11: /* lduba, load unsigned byte alternate */
4454 #ifndef TARGET_SPARC64
4457 if (!supervisor(dc
))
4460 save_state(dc
, cpu_cond
);
4461 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4463 case 0x12: /* lduha, load unsigned halfword alternate */
4464 #ifndef TARGET_SPARC64
4467 if (!supervisor(dc
))
4470 save_state(dc
, cpu_cond
);
4471 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4473 case 0x13: /* ldda, load double word alternate */
4474 #ifndef TARGET_SPARC64
4477 if (!supervisor(dc
))
4482 save_state(dc
, cpu_cond
);
4483 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4485 case 0x19: /* ldsba, load signed byte alternate */
4486 #ifndef TARGET_SPARC64
4489 if (!supervisor(dc
))
4492 save_state(dc
, cpu_cond
);
4493 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4495 case 0x1a: /* ldsha, load signed halfword alternate */
4496 #ifndef TARGET_SPARC64
4499 if (!supervisor(dc
))
4502 save_state(dc
, cpu_cond
);
4503 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4505 case 0x1d: /* ldstuba -- XXX: should be atomically */
4506 #ifndef TARGET_SPARC64
4509 if (!supervisor(dc
))
4512 save_state(dc
, cpu_cond
);
4513 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4515 case 0x1f: /* swapa, swap reg with alt. memory. Also
4517 CHECK_IU_FEATURE(dc
, SWAP
);
4518 #ifndef TARGET_SPARC64
4521 if (!supervisor(dc
))
4524 save_state(dc
, cpu_cond
);
4525 gen_movl_reg_TN(rd
, cpu_val
);
4526 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4529 #ifndef TARGET_SPARC64
4530 case 0x30: /* ldc */
4531 case 0x31: /* ldcsr */
4532 case 0x33: /* lddc */
4536 #ifdef TARGET_SPARC64
4537 case 0x08: /* V9 ldsw */
4538 gen_address_mask(dc
, cpu_addr
);
4539 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4541 case 0x0b: /* V9 ldx */
4542 gen_address_mask(dc
, cpu_addr
);
4543 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4545 case 0x18: /* V9 ldswa */
4546 save_state(dc
, cpu_cond
);
4547 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4549 case 0x1b: /* V9 ldxa */
4550 save_state(dc
, cpu_cond
);
4551 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4553 case 0x2d: /* V9 prefetch, no effect */
4555 case 0x30: /* V9 ldfa */
4556 save_state(dc
, cpu_cond
);
4557 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4559 case 0x33: /* V9 lddfa */
4560 save_state(dc
, cpu_cond
);
4561 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4563 case 0x3d: /* V9 prefetcha, no effect */
4565 case 0x32: /* V9 ldqfa */
4566 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4567 save_state(dc
, cpu_cond
);
4568 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4574 gen_movl_TN_reg(rd
, cpu_val
);
4575 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4578 } else if (xop
>= 0x20 && xop
< 0x24) {
4579 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4581 save_state(dc
, cpu_cond
);
4583 case 0x20: /* ldf, load fpreg */
4584 gen_address_mask(dc
, cpu_addr
);
4585 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4586 tcg_gen_trunc_tl_i32(cpu_fpr
[rd
], cpu_tmp0
);
4588 case 0x21: /* ldfsr, V9 ldxfsr */
4589 #ifdef TARGET_SPARC64
4590 gen_address_mask(dc
, cpu_addr
);
4592 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4593 gen_helper_ldxfsr(cpu_tmp64
);
4597 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4598 gen_helper_ldfsr(cpu_tmp32
);
4602 case 0x22: /* ldqf, load quad fpreg */
4606 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4607 r_const
= tcg_const_i32(dc
->mem_idx
);
4608 gen_helper_ldqf(cpu_addr
, r_const
);
4609 tcg_temp_free_i32(r_const
);
4610 gen_op_store_QT0_fpr(QFPREG(rd
));
4613 case 0x23: /* lddf, load double fpreg */
4617 r_const
= tcg_const_i32(dc
->mem_idx
);
4618 gen_helper_lddf(cpu_addr
, r_const
);
4619 tcg_temp_free_i32(r_const
);
4620 gen_op_store_DT0_fpr(DFPREG(rd
));
4626 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) ||
4627 xop
== 0xe || xop
== 0x1e) {
4628 gen_movl_reg_TN(rd
, cpu_val
);
4630 case 0x4: /* st, store word */
4631 gen_address_mask(dc
, cpu_addr
);
4632 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4634 case 0x5: /* stb, store byte */
4635 gen_address_mask(dc
, cpu_addr
);
4636 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4638 case 0x6: /* sth, store halfword */
4639 gen_address_mask(dc
, cpu_addr
);
4640 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4642 case 0x7: /* std, store double word */
4648 save_state(dc
, cpu_cond
);
4649 gen_address_mask(dc
, cpu_addr
);
4650 r_const
= tcg_const_i32(7);
4651 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4652 tcg_temp_free_i32(r_const
);
4653 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
4654 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, cpu_val
);
4655 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4658 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4659 case 0x14: /* sta, V9 stwa, store word alternate */
4660 #ifndef TARGET_SPARC64
4663 if (!supervisor(dc
))
4666 save_state(dc
, cpu_cond
);
4667 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4669 case 0x15: /* stba, store byte alternate */
4670 #ifndef TARGET_SPARC64
4673 if (!supervisor(dc
))
4676 save_state(dc
, cpu_cond
);
4677 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4679 case 0x16: /* stha, store halfword alternate */
4680 #ifndef TARGET_SPARC64
4683 if (!supervisor(dc
))
4686 save_state(dc
, cpu_cond
);
4687 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4689 case 0x17: /* stda, store double word alternate */
4690 #ifndef TARGET_SPARC64
4693 if (!supervisor(dc
))
4699 save_state(dc
, cpu_cond
);
4700 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4704 #ifdef TARGET_SPARC64
4705 case 0x0e: /* V9 stx */
4706 gen_address_mask(dc
, cpu_addr
);
4707 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4709 case 0x1e: /* V9 stxa */
4710 save_state(dc
, cpu_cond
);
4711 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4717 } else if (xop
> 0x23 && xop
< 0x28) {
4718 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4720 save_state(dc
, cpu_cond
);
4722 case 0x24: /* stf, store fpreg */
4723 gen_address_mask(dc
, cpu_addr
);
4724 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_fpr
[rd
]);
4725 tcg_gen_qemu_st32(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4727 case 0x25: /* stfsr, V9 stxfsr */
4728 #ifdef TARGET_SPARC64
4729 gen_address_mask(dc
, cpu_addr
);
4730 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUState
, fsr
));
4732 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4734 tcg_gen_qemu_st32(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4736 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUState
, fsr
));
4737 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4741 #ifdef TARGET_SPARC64
4742 /* V9 stqf, store quad fpreg */
4746 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4747 gen_op_load_fpr_QT0(QFPREG(rd
));
4748 r_const
= tcg_const_i32(dc
->mem_idx
);
4749 gen_helper_stqf(cpu_addr
, r_const
);
4750 tcg_temp_free_i32(r_const
);
4753 #else /* !TARGET_SPARC64 */
4754 /* stdfq, store floating point queue */
4755 #if defined(CONFIG_USER_ONLY)
4758 if (!supervisor(dc
))
4760 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4765 case 0x27: /* stdf, store double fpreg */
4769 gen_op_load_fpr_DT0(DFPREG(rd
));
4770 r_const
= tcg_const_i32(dc
->mem_idx
);
4771 gen_helper_stdf(cpu_addr
, r_const
);
4772 tcg_temp_free_i32(r_const
);
4778 } else if (xop
> 0x33 && xop
< 0x3f) {
4779 save_state(dc
, cpu_cond
);
4781 #ifdef TARGET_SPARC64
4782 case 0x34: /* V9 stfa */
4783 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
4785 case 0x36: /* V9 stqfa */
4789 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4790 r_const
= tcg_const_i32(7);
4791 gen_helper_check_align(cpu_addr
, r_const
);
4792 tcg_temp_free_i32(r_const
);
4793 gen_op_load_fpr_QT0(QFPREG(rd
));
4794 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4797 case 0x37: /* V9 stdfa */
4798 gen_op_load_fpr_DT0(DFPREG(rd
));
4799 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4801 case 0x3c: /* V9 casa */
4802 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4803 gen_movl_TN_reg(rd
, cpu_val
);
4805 case 0x3e: /* V9 casxa */
4806 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4807 gen_movl_TN_reg(rd
, cpu_val
);
4810 case 0x34: /* stc */
4811 case 0x35: /* stcsr */
4812 case 0x36: /* stdcq */
4813 case 0x37: /* stdc */
4824 /* default case for non jump instructions */
4825 if (dc
->npc
== DYNAMIC_PC
) {
4826 dc
->pc
= DYNAMIC_PC
;
4828 } else if (dc
->npc
== JUMP_PC
) {
4829 /* we can do a static jump */
4830 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
4834 dc
->npc
= dc
->npc
+ 4;
4842 save_state(dc
, cpu_cond
);
4843 r_const
= tcg_const_i32(TT_ILL_INSN
);
4844 gen_helper_raise_exception(r_const
);
4845 tcg_temp_free_i32(r_const
);
4853 save_state(dc
, cpu_cond
);
4854 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
4855 gen_helper_raise_exception(r_const
);
4856 tcg_temp_free_i32(r_const
);
4860 #if !defined(CONFIG_USER_ONLY)
4865 save_state(dc
, cpu_cond
);
4866 r_const
= tcg_const_i32(TT_PRIV_INSN
);
4867 gen_helper_raise_exception(r_const
);
4868 tcg_temp_free_i32(r_const
);
4874 save_state(dc
, cpu_cond
);
4875 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4878 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4880 save_state(dc
, cpu_cond
);
4881 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4885 #ifndef TARGET_SPARC64
4890 save_state(dc
, cpu_cond
);
4891 r_const
= tcg_const_i32(TT_NCP_INSN
);
4892 gen_helper_raise_exception(r_const
);
4893 tcg_temp_free(r_const
);
4900 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
4901 int spc
, CPUSPARCState
*env
)
4903 target_ulong pc_start
, last_pc
;
4904 uint16_t *gen_opc_end
;
4905 DisasContext dc1
, *dc
= &dc1
;
4911 memset(dc
, 0, sizeof(DisasContext
));
4916 dc
->npc
= (target_ulong
) tb
->cs_base
;
4917 dc
->mem_idx
= cpu_mmu_index(env
);
4919 if ((dc
->def
->features
& CPU_FEATURE_FLOAT
))
4920 dc
->fpu_enabled
= cpu_fpu_enabled(env
);
4922 dc
->fpu_enabled
= 0;
4923 #ifdef TARGET_SPARC64
4924 dc
->address_mask_32bit
= env
->pstate
& PS_AM
;
4926 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4928 cpu_tmp0
= tcg_temp_new();
4929 cpu_tmp32
= tcg_temp_new_i32();
4930 cpu_tmp64
= tcg_temp_new_i64();
4932 cpu_dst
= tcg_temp_local_new();
4935 cpu_val
= tcg_temp_local_new();
4936 cpu_addr
= tcg_temp_local_new();
4939 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
4941 max_insns
= CF_COUNT_MASK
;
4944 if (unlikely(!TAILQ_EMPTY(&env
->breakpoints
))) {
4945 TAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
4946 if (bp
->pc
== dc
->pc
) {
4947 if (dc
->pc
!= pc_start
)
4948 save_state(dc
, cpu_cond
);
4957 qemu_log("Search PC...\n");
4958 j
= gen_opc_ptr
- gen_opc_buf
;
4962 gen_opc_instr_start
[lj
++] = 0;
4963 gen_opc_pc
[lj
] = dc
->pc
;
4964 gen_opc_npc
[lj
] = dc
->npc
;
4965 gen_opc_instr_start
[lj
] = 1;
4966 gen_opc_icount
[lj
] = num_insns
;
4969 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
4972 disas_sparc_insn(dc
);
4977 /* if the next PC is different, we abort now */
4978 if (dc
->pc
!= (last_pc
+ 4))
4980 /* if we reach a page boundary, we stop generation so that the
4981 PC of a TT_TFAULT exception is always in the right page */
4982 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
4984 /* if single step mode, we generate only one instruction and
4985 generate an exception */
4986 if (env
->singlestep_enabled
|| singlestep
) {
4987 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4991 } while ((gen_opc_ptr
< gen_opc_end
) &&
4992 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
4993 num_insns
< max_insns
);
4996 tcg_temp_free(cpu_addr
);
4997 tcg_temp_free(cpu_val
);
4998 tcg_temp_free(cpu_dst
);
4999 tcg_temp_free_i64(cpu_tmp64
);
5000 tcg_temp_free_i32(cpu_tmp32
);
5001 tcg_temp_free(cpu_tmp0
);
5002 if (tb
->cflags
& CF_LAST_IO
)
5005 if (dc
->pc
!= DYNAMIC_PC
&&
5006 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
5007 /* static PC and NPC: we can use direct chaining */
5008 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
5010 if (dc
->pc
!= DYNAMIC_PC
)
5011 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
5012 save_npc(dc
, cpu_cond
);
5016 gen_icount_end(tb
, num_insns
);
5017 *gen_opc_ptr
= INDEX_op_end
;
5019 j
= gen_opc_ptr
- gen_opc_buf
;
5022 gen_opc_instr_start
[lj
++] = 0;
5026 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
5027 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
5029 tb
->size
= last_pc
+ 4 - pc_start
;
5030 tb
->icount
= num_insns
;
5033 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
5034 qemu_log("--------------\n");
5035 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
5036 log_target_disas(pc_start
, last_pc
+ 4 - pc_start
, 0);
5042 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
5044 gen_intermediate_code_internal(tb
, 0, env
);
5047 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
5049 gen_intermediate_code_internal(tb
, 1, env
);
5052 void gen_intermediate_code_init(CPUSPARCState
*env
)
5056 static const char * const gregnames
[8] = {
5057 NULL
, // g0 not used
5066 static const char * const fregnames
[64] = {
5067 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5068 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5069 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5070 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5071 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5072 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5073 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5074 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5077 /* init various static tables */
5081 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
5082 cpu_regwptr
= tcg_global_mem_new_ptr(TCG_AREG0
,
5083 offsetof(CPUState
, regwptr
),
5085 #ifdef TARGET_SPARC64
5086 cpu_xcc
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, xcc
),
5088 cpu_asi
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, asi
),
5090 cpu_fprs
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, fprs
),
5092 cpu_gsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, gsr
),
5094 cpu_tick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5095 offsetof(CPUState
, tick_cmpr
),
5097 cpu_stick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5098 offsetof(CPUState
, stick_cmpr
),
5100 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5101 offsetof(CPUState
, hstick_cmpr
),
5103 cpu_hintp
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hintp
),
5105 cpu_htba
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, htba
),
5107 cpu_hver
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hver
),
5109 cpu_ssr
= tcg_global_mem_new(TCG_AREG0
,
5110 offsetof(CPUState
, ssr
), "ssr");
5111 cpu_ver
= tcg_global_mem_new(TCG_AREG0
,
5112 offsetof(CPUState
, version
), "ver");
5113 cpu_softint
= tcg_global_mem_new_i32(TCG_AREG0
,
5114 offsetof(CPUState
, softint
),
5117 cpu_wim
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, wim
),
5120 cpu_cond
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cond
),
5122 cpu_cc_src
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_src
),
5124 cpu_cc_src2
= tcg_global_mem_new(TCG_AREG0
,
5125 offsetof(CPUState
, cc_src2
),
5127 cpu_cc_dst
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_dst
),
5129 cpu_psr
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, psr
),
5131 cpu_fsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, fsr
),
5133 cpu_pc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, pc
),
5135 cpu_npc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, npc
),
5137 cpu_y
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, y
), "y");
5138 #ifndef CONFIG_USER_ONLY
5139 cpu_tbr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, tbr
),
5142 for (i
= 1; i
< 8; i
++)
5143 cpu_gregs
[i
] = tcg_global_mem_new(TCG_AREG0
,
5144 offsetof(CPUState
, gregs
[i
]),
5146 for (i
= 0; i
< TARGET_FPREGS
; i
++)
5147 cpu_fpr
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
5148 offsetof(CPUState
, fpr
[i
]),
5151 /* register helpers */
5153 #define GEN_HELPER 2
5158 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
5159 unsigned long searched_pc
, int pc_pos
, void *puc
)
5162 env
->pc
= gen_opc_pc
[pc_pos
];
5163 npc
= gen_opc_npc
[pc_pos
];
5165 /* dynamic NPC: already stored */
5166 } else if (npc
== 2) {
5167 target_ulong t2
= (target_ulong
)(unsigned long)puc
;
5168 /* jump PC: use T2 and the jump targets of the translation */
5170 env
->npc
= gen_opc_jump_pc
[0];
5172 env
->npc
= gen_opc_jump_pc
[1];