4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
39 #define DYNAMIC_PC 1 /* dynamic pc value */
40 #define JUMP_PC 2 /* dynamic pc value which takes only two values
41 according to jump_pc[T2] */
43 /* global register indexes */
44 static TCGv_ptr cpu_env
, cpu_regwptr
;
45 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
46 static TCGv_i32 cpu_psr
;
47 static TCGv cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
49 #ifndef CONFIG_USER_ONLY
52 static TCGv cpu_cond
, cpu_src1
, cpu_src2
, cpu_dst
, cpu_addr
, cpu_val
;
54 static TCGv_i32 cpu_xcc
, cpu_asi
, cpu_fprs
;
56 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
57 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
58 static TCGv_i32 cpu_softint
;
62 /* local register indexes (only used inside old micro ops) */
64 static TCGv_i32 cpu_tmp32
;
65 static TCGv_i64 cpu_tmp64
;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr
[TARGET_FPREGS
];
69 #include "gen-icount.h"
71 typedef struct DisasContext
{
72 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
73 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
78 int address_mask_32bit
;
79 struct TranslationBlock
*tb
;
83 // This function uses non-native bit order
84 #define GET_FIELD(X, FROM, TO) \
85 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
87 // This function uses the order in the manuals, i.e. bit 0 is 2^0
88 #define GET_FIELD_SP(X, FROM, TO) \
89 GET_FIELD(X, 31 - (TO), 31 - (FROM))
91 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
92 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
95 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
96 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
98 #define DFPREG(r) (r & 0x1e)
99 #define QFPREG(r) (r & 0x1c)
102 #define UA2005_HTRAP_MASK 0xff
103 #define V8_TRAP_MASK 0x7f
105 static int sign_extend(int x
, int len
)
108 return (x
<< len
) >> len
;
111 #define IS_IMM (insn & (1<<13))
113 /* floating point registers moves */
114 static void gen_op_load_fpr_DT0(unsigned int src
)
116 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
117 offsetof(CPU_DoubleU
, l
.upper
));
118 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
119 offsetof(CPU_DoubleU
, l
.lower
));
122 static void gen_op_load_fpr_DT1(unsigned int src
)
124 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
125 offsetof(CPU_DoubleU
, l
.upper
));
126 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
127 offsetof(CPU_DoubleU
, l
.lower
));
130 static void gen_op_store_DT0_fpr(unsigned int dst
)
132 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
133 offsetof(CPU_DoubleU
, l
.upper
));
134 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
135 offsetof(CPU_DoubleU
, l
.lower
));
138 static void gen_op_load_fpr_QT0(unsigned int src
)
140 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
141 offsetof(CPU_QuadU
, l
.upmost
));
142 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
143 offsetof(CPU_QuadU
, l
.upper
));
144 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
145 offsetof(CPU_QuadU
, l
.lower
));
146 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
147 offsetof(CPU_QuadU
, l
.lowest
));
150 static void gen_op_load_fpr_QT1(unsigned int src
)
152 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
153 offsetof(CPU_QuadU
, l
.upmost
));
154 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
155 offsetof(CPU_QuadU
, l
.upper
));
156 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
157 offsetof(CPU_QuadU
, l
.lower
));
158 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
159 offsetof(CPU_QuadU
, l
.lowest
));
162 static void gen_op_store_QT0_fpr(unsigned int dst
)
164 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
165 offsetof(CPU_QuadU
, l
.upmost
));
166 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
167 offsetof(CPU_QuadU
, l
.upper
));
168 tcg_gen_ld_i32(cpu_fpr
[dst
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
169 offsetof(CPU_QuadU
, l
.lower
));
170 tcg_gen_ld_i32(cpu_fpr
[dst
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
171 offsetof(CPU_QuadU
, l
.lowest
));
175 #ifdef CONFIG_USER_ONLY
176 #define supervisor(dc) 0
177 #ifdef TARGET_SPARC64
178 #define hypervisor(dc) 0
181 #define supervisor(dc) (dc->mem_idx >= 1)
182 #ifdef TARGET_SPARC64
183 #define hypervisor(dc) (dc->mem_idx == 2)
188 #ifdef TARGET_SPARC64
190 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
192 #define AM_CHECK(dc) (1)
196 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
198 #ifdef TARGET_SPARC64
200 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
204 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
207 tcg_gen_movi_tl(tn
, 0);
209 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
211 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
215 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
220 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
222 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
226 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
227 target_ulong pc
, target_ulong npc
)
229 TranslationBlock
*tb
;
232 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
233 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
)) {
234 /* jump to same page: we can use a direct jump */
235 tcg_gen_goto_tb(tb_num
);
236 tcg_gen_movi_tl(cpu_pc
, pc
);
237 tcg_gen_movi_tl(cpu_npc
, npc
);
238 tcg_gen_exit_tb((long)tb
+ tb_num
);
240 /* jump to another page: currently not optimized */
241 tcg_gen_movi_tl(cpu_pc
, pc
);
242 tcg_gen_movi_tl(cpu_npc
, npc
);
248 static inline void gen_mov_reg_N(TCGv reg
, TCGv_i32 src
)
250 tcg_gen_extu_i32_tl(reg
, src
);
251 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
252 tcg_gen_andi_tl(reg
, reg
, 0x1);
255 static inline void gen_mov_reg_Z(TCGv reg
, TCGv_i32 src
)
257 tcg_gen_extu_i32_tl(reg
, src
);
258 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
259 tcg_gen_andi_tl(reg
, reg
, 0x1);
262 static inline void gen_mov_reg_V(TCGv reg
, TCGv_i32 src
)
264 tcg_gen_extu_i32_tl(reg
, src
);
265 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
266 tcg_gen_andi_tl(reg
, reg
, 0x1);
269 static inline void gen_mov_reg_C(TCGv reg
, TCGv_i32 src
)
271 tcg_gen_extu_i32_tl(reg
, src
);
272 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
273 tcg_gen_andi_tl(reg
, reg
, 0x1);
276 static inline void gen_cc_clear_icc(void)
278 tcg_gen_movi_i32(cpu_psr
, 0);
281 #ifdef TARGET_SPARC64
282 static inline void gen_cc_clear_xcc(void)
284 tcg_gen_movi_i32(cpu_xcc
, 0);
290 env->psr |= PSR_ZERO;
291 if ((int32_t) T0 < 0)
294 static inline void gen_cc_NZ_icc(TCGv dst
)
299 l1
= gen_new_label();
300 l2
= gen_new_label();
301 r_temp
= tcg_temp_new();
302 tcg_gen_andi_tl(r_temp
, dst
, 0xffffffffULL
);
303 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
304 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_ZERO
);
306 tcg_gen_ext32s_tl(r_temp
, dst
);
307 tcg_gen_brcondi_tl(TCG_COND_GE
, r_temp
, 0, l2
);
308 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_NEG
);
310 tcg_temp_free(r_temp
);
313 #ifdef TARGET_SPARC64
314 static inline void gen_cc_NZ_xcc(TCGv dst
)
318 l1
= gen_new_label();
319 l2
= gen_new_label();
320 tcg_gen_brcondi_tl(TCG_COND_NE
, dst
, 0, l1
);
321 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_ZERO
);
323 tcg_gen_brcondi_tl(TCG_COND_GE
, dst
, 0, l2
);
324 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_NEG
);
331 env->psr |= PSR_CARRY;
333 static inline void gen_cc_C_add_icc(TCGv dst
, TCGv src1
)
335 TCGv r_temp1
, r_temp2
;
338 l1
= gen_new_label();
339 r_temp1
= tcg_temp_new();
340 r_temp2
= tcg_temp_new();
341 tcg_gen_andi_tl(r_temp1
, dst
, 0xffffffffULL
);
342 tcg_gen_andi_tl(r_temp2
, src1
, 0xffffffffULL
);
343 tcg_gen_brcond_tl(TCG_COND_GEU
, r_temp1
, r_temp2
, l1
);
344 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_CARRY
);
346 tcg_temp_free(r_temp1
);
347 tcg_temp_free(r_temp2
);
350 #ifdef TARGET_SPARC64
351 static inline void gen_cc_C_add_xcc(TCGv dst
, TCGv src1
)
355 l1
= gen_new_label();
356 tcg_gen_brcond_tl(TCG_COND_GEU
, dst
, src1
, l1
);
357 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_CARRY
);
363 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
366 static inline void gen_cc_V_add_icc(TCGv dst
, TCGv src1
, TCGv src2
)
370 r_temp
= tcg_temp_new();
371 tcg_gen_xor_tl(r_temp
, src1
, src2
);
372 tcg_gen_not_tl(r_temp
, r_temp
);
373 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
374 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
375 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
376 tcg_gen_shri_tl(r_temp
, r_temp
, 31 - PSR_OVF_SHIFT
);
377 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
378 tcg_temp_free(r_temp
);
379 tcg_gen_or_i32(cpu_psr
, cpu_psr
, cpu_tmp32
);
382 #ifdef TARGET_SPARC64
383 static inline void gen_cc_V_add_xcc(TCGv dst
, TCGv src1
, TCGv src2
)
387 r_temp
= tcg_temp_new();
388 tcg_gen_xor_tl(r_temp
, src1
, src2
);
389 tcg_gen_not_tl(r_temp
, r_temp
);
390 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
391 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
392 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 63));
393 tcg_gen_shri_tl(r_temp
, r_temp
, 63 - PSR_OVF_SHIFT
);
394 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
395 tcg_temp_free(r_temp
);
396 tcg_gen_or_i32(cpu_xcc
, cpu_xcc
, cpu_tmp32
);
400 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
406 l1
= gen_new_label();
408 r_temp
= tcg_temp_new();
409 tcg_gen_xor_tl(r_temp
, src1
, src2
);
410 tcg_gen_not_tl(r_temp
, r_temp
);
411 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
412 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
413 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
414 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
415 r_const
= tcg_const_i32(TT_TOVF
);
416 gen_helper_raise_exception(r_const
);
417 tcg_temp_free_i32(r_const
);
419 tcg_temp_free(r_temp
);
422 static inline void gen_cc_V_tag(TCGv src1
, TCGv src2
)
426 l1
= gen_new_label();
427 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
428 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
429 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
430 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
434 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
439 l1
= gen_new_label();
440 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
441 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
442 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
443 r_const
= tcg_const_i32(TT_TOVF
);
444 gen_helper_raise_exception(r_const
);
445 tcg_temp_free_i32(r_const
);
449 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
451 tcg_gen_mov_tl(cpu_cc_src
, src1
);
452 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
453 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
455 gen_cc_NZ_icc(cpu_cc_dst
);
456 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
457 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
458 #ifdef TARGET_SPARC64
460 gen_cc_NZ_xcc(cpu_cc_dst
);
461 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
462 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
464 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
467 static inline void gen_op_addx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
469 tcg_gen_mov_tl(cpu_cc_src
, src1
);
470 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
471 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
472 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
474 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
475 #ifdef TARGET_SPARC64
477 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
479 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
480 gen_cc_NZ_icc(cpu_cc_dst
);
481 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
482 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
483 #ifdef TARGET_SPARC64
484 gen_cc_NZ_xcc(cpu_cc_dst
);
485 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
486 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
488 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
491 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
493 tcg_gen_mov_tl(cpu_cc_src
, src1
);
494 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
495 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
497 gen_cc_NZ_icc(cpu_cc_dst
);
498 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
499 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
500 gen_cc_V_tag(cpu_cc_src
, cpu_cc_src2
);
501 #ifdef TARGET_SPARC64
503 gen_cc_NZ_xcc(cpu_cc_dst
);
504 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
505 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
507 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
510 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
512 tcg_gen_mov_tl(cpu_cc_src
, src1
);
513 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
514 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
515 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
516 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
518 gen_cc_NZ_icc(cpu_cc_dst
);
519 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
520 #ifdef TARGET_SPARC64
522 gen_cc_NZ_xcc(cpu_cc_dst
);
523 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
524 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
526 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
531 env->psr |= PSR_CARRY;
533 static inline void gen_cc_C_sub_icc(TCGv src1
, TCGv src2
)
535 TCGv r_temp1
, r_temp2
;
538 l1
= gen_new_label();
539 r_temp1
= tcg_temp_new();
540 r_temp2
= tcg_temp_new();
541 tcg_gen_andi_tl(r_temp1
, src1
, 0xffffffffULL
);
542 tcg_gen_andi_tl(r_temp2
, src2
, 0xffffffffULL
);
543 tcg_gen_brcond_tl(TCG_COND_GEU
, r_temp1
, r_temp2
, l1
);
544 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_CARRY
);
546 tcg_temp_free(r_temp1
);
547 tcg_temp_free(r_temp2
);
550 #ifdef TARGET_SPARC64
551 static inline void gen_cc_C_sub_xcc(TCGv src1
, TCGv src2
)
555 l1
= gen_new_label();
556 tcg_gen_brcond_tl(TCG_COND_GEU
, src1
, src2
, l1
);
557 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_CARRY
);
563 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
566 static inline void gen_cc_V_sub_icc(TCGv dst
, TCGv src1
, TCGv src2
)
570 r_temp
= tcg_temp_new();
571 tcg_gen_xor_tl(r_temp
, src1
, src2
);
572 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
573 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
574 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
575 tcg_gen_shri_tl(r_temp
, r_temp
, 31 - PSR_OVF_SHIFT
);
576 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
577 tcg_gen_or_i32(cpu_psr
, cpu_psr
, cpu_tmp32
);
578 tcg_temp_free(r_temp
);
581 #ifdef TARGET_SPARC64
582 static inline void gen_cc_V_sub_xcc(TCGv dst
, TCGv src1
, TCGv src2
)
586 r_temp
= tcg_temp_new();
587 tcg_gen_xor_tl(r_temp
, src1
, src2
);
588 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
589 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
590 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 63));
591 tcg_gen_shri_tl(r_temp
, r_temp
, 63 - PSR_OVF_SHIFT
);
592 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
593 tcg_gen_or_i32(cpu_xcc
, cpu_xcc
, cpu_tmp32
);
594 tcg_temp_free(r_temp
);
598 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
604 l1
= gen_new_label();
606 r_temp
= tcg_temp_new();
607 tcg_gen_xor_tl(r_temp
, src1
, src2
);
608 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
609 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
610 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
611 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
612 r_const
= tcg_const_i32(TT_TOVF
);
613 gen_helper_raise_exception(r_const
);
614 tcg_temp_free_i32(r_const
);
616 tcg_temp_free(r_temp
);
619 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
621 tcg_gen_mov_tl(cpu_cc_src
, src1
);
622 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
623 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
625 gen_cc_NZ_icc(cpu_cc_dst
);
626 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
627 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
628 #ifdef TARGET_SPARC64
630 gen_cc_NZ_xcc(cpu_cc_dst
);
631 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
632 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
634 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
637 static inline void gen_op_subx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
639 tcg_gen_mov_tl(cpu_cc_src
, src1
);
640 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
641 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
642 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
644 gen_cc_C_sub_icc(cpu_cc_dst
, cpu_cc_src
);
645 #ifdef TARGET_SPARC64
647 gen_cc_C_sub_xcc(cpu_cc_dst
, cpu_cc_src
);
649 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
650 gen_cc_NZ_icc(cpu_cc_dst
);
651 gen_cc_C_sub_icc(cpu_cc_dst
, cpu_cc_src
);
652 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
653 #ifdef TARGET_SPARC64
654 gen_cc_NZ_xcc(cpu_cc_dst
);
655 gen_cc_C_sub_xcc(cpu_cc_dst
, cpu_cc_src
);
656 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
658 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
661 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
663 tcg_gen_mov_tl(cpu_cc_src
, src1
);
664 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
665 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
667 gen_cc_NZ_icc(cpu_cc_dst
);
668 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
669 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
670 gen_cc_V_tag(cpu_cc_src
, cpu_cc_src2
);
671 #ifdef TARGET_SPARC64
673 gen_cc_NZ_xcc(cpu_cc_dst
);
674 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
675 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
677 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
680 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
682 tcg_gen_mov_tl(cpu_cc_src
, src1
);
683 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
684 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
685 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
686 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
688 gen_cc_NZ_icc(cpu_cc_dst
);
689 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
690 #ifdef TARGET_SPARC64
692 gen_cc_NZ_xcc(cpu_cc_dst
);
693 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
694 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
696 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
699 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
704 l1
= gen_new_label();
705 r_temp
= tcg_temp_new();
711 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
712 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
713 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
714 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
715 tcg_gen_movi_tl(cpu_cc_src2
, 0);
719 // env->y = (b2 << 31) | (env->y >> 1);
720 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
721 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
722 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
723 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
724 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
725 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
728 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
729 gen_mov_reg_V(r_temp
, cpu_psr
);
730 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
731 tcg_temp_free(r_temp
);
733 // T0 = (b1 << 31) | (T0 >> 1);
735 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
736 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
737 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
739 /* do addition and update flags */
740 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
743 gen_cc_NZ_icc(cpu_cc_dst
);
744 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
745 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
746 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
749 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
751 TCGv_i64 r_temp
, r_temp2
;
753 r_temp
= tcg_temp_new_i64();
754 r_temp2
= tcg_temp_new_i64();
756 tcg_gen_extu_tl_i64(r_temp
, src2
);
757 tcg_gen_extu_tl_i64(r_temp2
, src1
);
758 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
760 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
761 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
762 tcg_temp_free_i64(r_temp
);
763 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
764 #ifdef TARGET_SPARC64
765 tcg_gen_mov_i64(dst
, r_temp2
);
767 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
769 tcg_temp_free_i64(r_temp2
);
772 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
774 TCGv_i64 r_temp
, r_temp2
;
776 r_temp
= tcg_temp_new_i64();
777 r_temp2
= tcg_temp_new_i64();
779 tcg_gen_ext_tl_i64(r_temp
, src2
);
780 tcg_gen_ext_tl_i64(r_temp2
, src1
);
781 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
783 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
784 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
785 tcg_temp_free_i64(r_temp
);
786 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
787 #ifdef TARGET_SPARC64
788 tcg_gen_mov_i64(dst
, r_temp2
);
790 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
792 tcg_temp_free_i64(r_temp2
);
795 #ifdef TARGET_SPARC64
796 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
801 l1
= gen_new_label();
802 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
803 r_const
= tcg_const_i32(TT_DIV_ZERO
);
804 gen_helper_raise_exception(r_const
);
805 tcg_temp_free_i32(r_const
);
809 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
813 l1
= gen_new_label();
814 l2
= gen_new_label();
815 tcg_gen_mov_tl(cpu_cc_src
, src1
);
816 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
817 gen_trap_ifdivzero_tl(cpu_cc_src2
);
818 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src
, INT64_MIN
, l1
);
819 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src2
, -1, l1
);
820 tcg_gen_movi_i64(dst
, INT64_MIN
);
823 tcg_gen_div_i64(dst
, cpu_cc_src
, cpu_cc_src2
);
828 static inline void gen_op_div_cc(TCGv dst
)
832 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
834 gen_cc_NZ_icc(cpu_cc_dst
);
835 l1
= gen_new_label();
836 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_cc_src2
, 0, l1
);
837 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
841 static inline void gen_op_logic_cc(TCGv dst
)
843 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
846 gen_cc_NZ_icc(cpu_cc_dst
);
847 #ifdef TARGET_SPARC64
849 gen_cc_NZ_xcc(cpu_cc_dst
);
854 static inline void gen_op_eval_ba(TCGv dst
)
856 tcg_gen_movi_tl(dst
, 1);
860 static inline void gen_op_eval_be(TCGv dst
, TCGv_i32 src
)
862 gen_mov_reg_Z(dst
, src
);
866 static inline void gen_op_eval_ble(TCGv dst
, TCGv_i32 src
)
868 gen_mov_reg_N(cpu_tmp0
, src
);
869 gen_mov_reg_V(dst
, src
);
870 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
871 gen_mov_reg_Z(cpu_tmp0
, src
);
872 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
876 static inline void gen_op_eval_bl(TCGv dst
, TCGv_i32 src
)
878 gen_mov_reg_V(cpu_tmp0
, src
);
879 gen_mov_reg_N(dst
, src
);
880 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
884 static inline void gen_op_eval_bleu(TCGv dst
, TCGv_i32 src
)
886 gen_mov_reg_Z(cpu_tmp0
, src
);
887 gen_mov_reg_C(dst
, src
);
888 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
892 static inline void gen_op_eval_bcs(TCGv dst
, TCGv_i32 src
)
894 gen_mov_reg_C(dst
, src
);
898 static inline void gen_op_eval_bvs(TCGv dst
, TCGv_i32 src
)
900 gen_mov_reg_V(dst
, src
);
904 static inline void gen_op_eval_bn(TCGv dst
)
906 tcg_gen_movi_tl(dst
, 0);
910 static inline void gen_op_eval_bneg(TCGv dst
, TCGv_i32 src
)
912 gen_mov_reg_N(dst
, src
);
916 static inline void gen_op_eval_bne(TCGv dst
, TCGv_i32 src
)
918 gen_mov_reg_Z(dst
, src
);
919 tcg_gen_xori_tl(dst
, dst
, 0x1);
923 static inline void gen_op_eval_bg(TCGv dst
, TCGv_i32 src
)
925 gen_mov_reg_N(cpu_tmp0
, src
);
926 gen_mov_reg_V(dst
, src
);
927 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
928 gen_mov_reg_Z(cpu_tmp0
, src
);
929 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
930 tcg_gen_xori_tl(dst
, dst
, 0x1);
934 static inline void gen_op_eval_bge(TCGv dst
, TCGv_i32 src
)
936 gen_mov_reg_V(cpu_tmp0
, src
);
937 gen_mov_reg_N(dst
, src
);
938 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
939 tcg_gen_xori_tl(dst
, dst
, 0x1);
943 static inline void gen_op_eval_bgu(TCGv dst
, TCGv_i32 src
)
945 gen_mov_reg_Z(cpu_tmp0
, src
);
946 gen_mov_reg_C(dst
, src
);
947 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
948 tcg_gen_xori_tl(dst
, dst
, 0x1);
952 static inline void gen_op_eval_bcc(TCGv dst
, TCGv_i32 src
)
954 gen_mov_reg_C(dst
, src
);
955 tcg_gen_xori_tl(dst
, dst
, 0x1);
959 static inline void gen_op_eval_bpos(TCGv dst
, TCGv_i32 src
)
961 gen_mov_reg_N(dst
, src
);
962 tcg_gen_xori_tl(dst
, dst
, 0x1);
966 static inline void gen_op_eval_bvc(TCGv dst
, TCGv_i32 src
)
968 gen_mov_reg_V(dst
, src
);
969 tcg_gen_xori_tl(dst
, dst
, 0x1);
973 FPSR bit field FCC1 | FCC0:
979 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
980 unsigned int fcc_offset
)
982 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
983 tcg_gen_andi_tl(reg
, reg
, 0x1);
986 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
987 unsigned int fcc_offset
)
989 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
990 tcg_gen_andi_tl(reg
, reg
, 0x1);
994 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
995 unsigned int fcc_offset
)
997 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
998 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
999 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
1002 // 1 or 2: FCC0 ^ FCC1
1003 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
1004 unsigned int fcc_offset
)
1006 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1007 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1008 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1012 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
1013 unsigned int fcc_offset
)
1015 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1019 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
1020 unsigned int fcc_offset
)
1022 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1023 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1024 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
1025 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1029 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
1030 unsigned int fcc_offset
)
1032 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
1036 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
1037 unsigned int fcc_offset
)
1039 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1040 tcg_gen_xori_tl(dst
, dst
, 0x1);
1041 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1042 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1046 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
1047 unsigned int fcc_offset
)
1049 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1050 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1051 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1054 // 0: !(FCC0 | FCC1)
1055 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
1056 unsigned int fcc_offset
)
1058 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1059 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1060 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
1061 tcg_gen_xori_tl(dst
, dst
, 0x1);
1064 // 0 or 3: !(FCC0 ^ FCC1)
1065 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
1066 unsigned int fcc_offset
)
1068 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1069 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1070 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1071 tcg_gen_xori_tl(dst
, dst
, 0x1);
1075 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
1076 unsigned int fcc_offset
)
1078 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1079 tcg_gen_xori_tl(dst
, dst
, 0x1);
1082 // !1: !(FCC0 & !FCC1)
1083 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
1084 unsigned int fcc_offset
)
1086 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1087 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1088 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
1089 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1090 tcg_gen_xori_tl(dst
, dst
, 0x1);
1094 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
1095 unsigned int fcc_offset
)
1097 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
1098 tcg_gen_xori_tl(dst
, dst
, 0x1);
1101 // !2: !(!FCC0 & FCC1)
1102 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
1103 unsigned int fcc_offset
)
1105 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1106 tcg_gen_xori_tl(dst
, dst
, 0x1);
1107 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1108 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1109 tcg_gen_xori_tl(dst
, dst
, 0x1);
1112 // !3: !(FCC0 & FCC1)
1113 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
1114 unsigned int fcc_offset
)
1116 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1117 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1118 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1119 tcg_gen_xori_tl(dst
, dst
, 0x1);
1122 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
1123 target_ulong pc2
, TCGv r_cond
)
1127 l1
= gen_new_label();
1129 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1131 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
1134 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
1137 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
1138 target_ulong pc2
, TCGv r_cond
)
1142 l1
= gen_new_label();
1144 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1146 gen_goto_tb(dc
, 0, pc2
, pc1
);
1149 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
1152 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
1157 l1
= gen_new_label();
1158 l2
= gen_new_label();
1160 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1162 tcg_gen_movi_tl(cpu_npc
, npc1
);
1166 tcg_gen_movi_tl(cpu_npc
, npc2
);
1170 /* call this function before using the condition register as it may
1171 have been set for a jump */
1172 static inline void flush_cond(DisasContext
*dc
, TCGv cond
)
1174 if (dc
->npc
== JUMP_PC
) {
1175 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1176 dc
->npc
= DYNAMIC_PC
;
1180 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
1182 if (dc
->npc
== JUMP_PC
) {
1183 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1184 dc
->npc
= DYNAMIC_PC
;
1185 } else if (dc
->npc
!= DYNAMIC_PC
) {
1186 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
1190 static inline void save_state(DisasContext
*dc
, TCGv cond
)
1192 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1196 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
1198 if (dc
->npc
== JUMP_PC
) {
1199 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1200 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1201 dc
->pc
= DYNAMIC_PC
;
1202 } else if (dc
->npc
== DYNAMIC_PC
) {
1203 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1204 dc
->pc
= DYNAMIC_PC
;
1210 static inline void gen_op_next_insn(void)
1212 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1213 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
1216 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1220 #ifdef TARGET_SPARC64
1230 gen_op_eval_bn(r_dst
);
1233 gen_op_eval_be(r_dst
, r_src
);
1236 gen_op_eval_ble(r_dst
, r_src
);
1239 gen_op_eval_bl(r_dst
, r_src
);
1242 gen_op_eval_bleu(r_dst
, r_src
);
1245 gen_op_eval_bcs(r_dst
, r_src
);
1248 gen_op_eval_bneg(r_dst
, r_src
);
1251 gen_op_eval_bvs(r_dst
, r_src
);
1254 gen_op_eval_ba(r_dst
);
1257 gen_op_eval_bne(r_dst
, r_src
);
1260 gen_op_eval_bg(r_dst
, r_src
);
1263 gen_op_eval_bge(r_dst
, r_src
);
1266 gen_op_eval_bgu(r_dst
, r_src
);
1269 gen_op_eval_bcc(r_dst
, r_src
);
1272 gen_op_eval_bpos(r_dst
, r_src
);
1275 gen_op_eval_bvc(r_dst
, r_src
);
1280 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1282 unsigned int offset
;
1302 gen_op_eval_bn(r_dst
);
1305 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1308 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1311 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1314 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1317 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1320 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1323 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1326 gen_op_eval_ba(r_dst
);
1329 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1332 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1335 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1338 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1341 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1344 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1347 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1352 #ifdef TARGET_SPARC64
1354 static const int gen_tcg_cond_reg
[8] = {
1365 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1369 l1
= gen_new_label();
1370 tcg_gen_movi_tl(r_dst
, 0);
1371 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1372 tcg_gen_movi_tl(r_dst
, 1);
1377 /* XXX: potentially incorrect if dynamic npc */
1378 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1381 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1382 target_ulong target
= dc
->pc
+ offset
;
1385 /* unconditional not taken */
1387 dc
->pc
= dc
->npc
+ 4;
1388 dc
->npc
= dc
->pc
+ 4;
1391 dc
->npc
= dc
->pc
+ 4;
1393 } else if (cond
== 0x8) {
1394 /* unconditional taken */
1397 dc
->npc
= dc
->pc
+ 4;
1403 flush_cond(dc
, r_cond
);
1404 gen_cond(r_cond
, cc
, cond
);
1406 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1410 dc
->jump_pc
[0] = target
;
1411 dc
->jump_pc
[1] = dc
->npc
+ 4;
1417 /* XXX: potentially incorrect if dynamic npc */
1418 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1421 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1422 target_ulong target
= dc
->pc
+ offset
;
1425 /* unconditional not taken */
1427 dc
->pc
= dc
->npc
+ 4;
1428 dc
->npc
= dc
->pc
+ 4;
1431 dc
->npc
= dc
->pc
+ 4;
1433 } else if (cond
== 0x8) {
1434 /* unconditional taken */
1437 dc
->npc
= dc
->pc
+ 4;
1443 flush_cond(dc
, r_cond
);
1444 gen_fcond(r_cond
, cc
, cond
);
1446 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1450 dc
->jump_pc
[0] = target
;
1451 dc
->jump_pc
[1] = dc
->npc
+ 4;
1457 #ifdef TARGET_SPARC64
1458 /* XXX: potentially incorrect if dynamic npc */
1459 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1460 TCGv r_cond
, TCGv r_reg
)
1462 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1463 target_ulong target
= dc
->pc
+ offset
;
1465 flush_cond(dc
, r_cond
);
1466 gen_cond_reg(r_cond
, cond
, r_reg
);
1468 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1472 dc
->jump_pc
[0] = target
;
1473 dc
->jump_pc
[1] = dc
->npc
+ 4;
1478 static inline void gen_op_fcmps(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1482 gen_helper_fcmps(r_rs1
, r_rs2
);
1485 gen_helper_fcmps_fcc1(r_rs1
, r_rs2
);
1488 gen_helper_fcmps_fcc2(r_rs1
, r_rs2
);
1491 gen_helper_fcmps_fcc3(r_rs1
, r_rs2
);
1496 static inline void gen_op_fcmpd(int fccno
)
1503 gen_helper_fcmpd_fcc1();
1506 gen_helper_fcmpd_fcc2();
1509 gen_helper_fcmpd_fcc3();
1514 static inline void gen_op_fcmpq(int fccno
)
1521 gen_helper_fcmpq_fcc1();
1524 gen_helper_fcmpq_fcc2();
1527 gen_helper_fcmpq_fcc3();
1532 static inline void gen_op_fcmpes(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1536 gen_helper_fcmpes(r_rs1
, r_rs2
);
1539 gen_helper_fcmpes_fcc1(r_rs1
, r_rs2
);
1542 gen_helper_fcmpes_fcc2(r_rs1
, r_rs2
);
1545 gen_helper_fcmpes_fcc3(r_rs1
, r_rs2
);
1550 static inline void gen_op_fcmped(int fccno
)
1554 gen_helper_fcmped();
1557 gen_helper_fcmped_fcc1();
1560 gen_helper_fcmped_fcc2();
1563 gen_helper_fcmped_fcc3();
1568 static inline void gen_op_fcmpeq(int fccno
)
1572 gen_helper_fcmpeq();
1575 gen_helper_fcmpeq_fcc1();
1578 gen_helper_fcmpeq_fcc2();
1581 gen_helper_fcmpeq_fcc3();
1588 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1590 gen_helper_fcmps(r_rs1
, r_rs2
);
1593 static inline void gen_op_fcmpd(int fccno
)
1598 static inline void gen_op_fcmpq(int fccno
)
1603 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1605 gen_helper_fcmpes(r_rs1
, r_rs2
);
1608 static inline void gen_op_fcmped(int fccno
)
1610 gen_helper_fcmped();
1613 static inline void gen_op_fcmpeq(int fccno
)
1615 gen_helper_fcmpeq();
1619 static inline void gen_op_fpexception_im(int fsr_flags
)
1623 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1624 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1625 r_const
= tcg_const_i32(TT_FP_EXCP
);
1626 gen_helper_raise_exception(r_const
);
1627 tcg_temp_free_i32(r_const
);
1630 static int gen_trap_ifnofpu(DisasContext
*dc
, TCGv r_cond
)
1632 #if !defined(CONFIG_USER_ONLY)
1633 if (!dc
->fpu_enabled
) {
1636 save_state(dc
, r_cond
);
1637 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1638 gen_helper_raise_exception(r_const
);
1639 tcg_temp_free_i32(r_const
);
1647 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1649 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1652 static inline void gen_clear_float_exceptions(void)
1654 gen_helper_clear_float_exceptions();
1658 #ifdef TARGET_SPARC64
1659 static inline TCGv_i32
gen_get_asi(int insn
, TCGv r_addr
)
1665 r_asi
= tcg_temp_new_i32();
1666 tcg_gen_mov_i32(r_asi
, cpu_asi
);
1668 asi
= GET_FIELD(insn
, 19, 26);
1669 r_asi
= tcg_const_i32(asi
);
1674 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1677 TCGv_i32 r_asi
, r_size
, r_sign
;
1679 r_asi
= gen_get_asi(insn
, addr
);
1680 r_size
= tcg_const_i32(size
);
1681 r_sign
= tcg_const_i32(sign
);
1682 gen_helper_ld_asi(dst
, addr
, r_asi
, r_size
, r_sign
);
1683 tcg_temp_free_i32(r_sign
);
1684 tcg_temp_free_i32(r_size
);
1685 tcg_temp_free_i32(r_asi
);
1688 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1690 TCGv_i32 r_asi
, r_size
;
1692 r_asi
= gen_get_asi(insn
, addr
);
1693 r_size
= tcg_const_i32(size
);
1694 gen_helper_st_asi(addr
, src
, r_asi
, r_size
);
1695 tcg_temp_free_i32(r_size
);
1696 tcg_temp_free_i32(r_asi
);
1699 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1701 TCGv_i32 r_asi
, r_size
, r_rd
;
1703 r_asi
= gen_get_asi(insn
, addr
);
1704 r_size
= tcg_const_i32(size
);
1705 r_rd
= tcg_const_i32(rd
);
1706 gen_helper_ldf_asi(addr
, r_asi
, r_size
, r_rd
);
1707 tcg_temp_free_i32(r_rd
);
1708 tcg_temp_free_i32(r_size
);
1709 tcg_temp_free_i32(r_asi
);
1712 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
1714 TCGv_i32 r_asi
, r_size
, r_rd
;
1716 r_asi
= gen_get_asi(insn
, addr
);
1717 r_size
= tcg_const_i32(size
);
1718 r_rd
= tcg_const_i32(rd
);
1719 gen_helper_stf_asi(addr
, r_asi
, r_size
, r_rd
);
1720 tcg_temp_free_i32(r_rd
);
1721 tcg_temp_free_i32(r_size
);
1722 tcg_temp_free_i32(r_asi
);
1725 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1727 TCGv_i32 r_asi
, r_size
, r_sign
;
1729 r_asi
= gen_get_asi(insn
, addr
);
1730 r_size
= tcg_const_i32(4);
1731 r_sign
= tcg_const_i32(0);
1732 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1733 tcg_temp_free_i32(r_sign
);
1734 gen_helper_st_asi(addr
, dst
, r_asi
, r_size
);
1735 tcg_temp_free_i32(r_size
);
1736 tcg_temp_free_i32(r_asi
);
1737 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1740 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1742 TCGv_i32 r_asi
, r_rd
;
1744 r_asi
= gen_get_asi(insn
, addr
);
1745 r_rd
= tcg_const_i32(rd
);
1746 gen_helper_ldda_asi(addr
, r_asi
, r_rd
);
1747 tcg_temp_free_i32(r_rd
);
1748 tcg_temp_free_i32(r_asi
);
1751 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1753 TCGv_i32 r_asi
, r_size
;
1755 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1756 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1757 r_asi
= gen_get_asi(insn
, addr
);
1758 r_size
= tcg_const_i32(8);
1759 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1760 tcg_temp_free_i32(r_size
);
1761 tcg_temp_free_i32(r_asi
);
1764 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1770 r_val1
= tcg_temp_new();
1771 gen_movl_reg_TN(rd
, r_val1
);
1772 r_asi
= gen_get_asi(insn
, addr
);
1773 gen_helper_cas_asi(dst
, addr
, r_val1
, val2
, r_asi
);
1774 tcg_temp_free_i32(r_asi
);
1775 tcg_temp_free(r_val1
);
1778 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1783 gen_movl_reg_TN(rd
, cpu_tmp64
);
1784 r_asi
= gen_get_asi(insn
, addr
);
1785 gen_helper_casx_asi(dst
, addr
, cpu_tmp64
, val2
, r_asi
);
1786 tcg_temp_free_i32(r_asi
);
1789 #elif !defined(CONFIG_USER_ONLY)
1791 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1794 TCGv_i32 r_asi
, r_size
, r_sign
;
1796 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1797 r_size
= tcg_const_i32(size
);
1798 r_sign
= tcg_const_i32(sign
);
1799 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1800 tcg_temp_free(r_sign
);
1801 tcg_temp_free(r_size
);
1802 tcg_temp_free(r_asi
);
1803 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1806 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1808 TCGv_i32 r_asi
, r_size
;
1810 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
1811 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1812 r_size
= tcg_const_i32(size
);
1813 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1814 tcg_temp_free(r_size
);
1815 tcg_temp_free(r_asi
);
1818 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1820 TCGv_i32 r_asi
, r_size
, r_sign
;
1823 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1824 r_size
= tcg_const_i32(4);
1825 r_sign
= tcg_const_i32(0);
1826 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1827 tcg_temp_free(r_sign
);
1828 r_val
= tcg_temp_new_i64();
1829 tcg_gen_extu_tl_i64(r_val
, dst
);
1830 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1831 tcg_temp_free_i64(r_val
);
1832 tcg_temp_free(r_size
);
1833 tcg_temp_free(r_asi
);
1834 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1837 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1839 TCGv_i32 r_asi
, r_size
, r_sign
;
1841 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1842 r_size
= tcg_const_i32(8);
1843 r_sign
= tcg_const_i32(0);
1844 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1845 tcg_temp_free(r_sign
);
1846 tcg_temp_free(r_size
);
1847 tcg_temp_free(r_asi
);
1848 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
1849 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
1850 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
1851 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
1852 gen_movl_TN_reg(rd
, hi
);
1855 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1857 TCGv_i32 r_asi
, r_size
;
1859 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1860 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1861 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1862 r_size
= tcg_const_i32(8);
1863 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1864 tcg_temp_free(r_size
);
1865 tcg_temp_free(r_asi
);
1869 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1870 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
1873 TCGv_i32 r_asi
, r_size
;
1875 gen_ld_asi(dst
, addr
, insn
, 1, 0);
1877 r_val
= tcg_const_i64(0xffULL
);
1878 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1879 r_size
= tcg_const_i32(1);
1880 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1881 tcg_temp_free_i32(r_size
);
1882 tcg_temp_free_i32(r_asi
);
1883 tcg_temp_free_i64(r_val
);
1887 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
1892 rs1
= GET_FIELD(insn
, 13, 17);
1894 r_rs1
= tcg_const_tl(0); // XXX how to free?
1896 r_rs1
= cpu_gregs
[rs1
];
1898 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
1902 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
1907 if (IS_IMM
) { /* immediate */
1908 rs2
= GET_FIELDs(insn
, 19, 31);
1909 r_rs2
= tcg_const_tl((int)rs2
); // XXX how to free?
1910 } else { /* register */
1911 rs2
= GET_FIELD(insn
, 27, 31);
1913 r_rs2
= tcg_const_tl(0); // XXX how to free?
1915 r_rs2
= cpu_gregs
[rs2
];
1917 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
1922 #define CHECK_IU_FEATURE(dc, FEATURE) \
1923 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1925 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1926 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1929 /* before an instruction, dc->pc must be static */
1930 static void disas_sparc_insn(DisasContext
* dc
)
1932 unsigned int insn
, opc
, rs1
, rs2
, rd
;
1934 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
)))
1935 tcg_gen_debug_insn_start(dc
->pc
);
1936 insn
= ldl_code(dc
->pc
);
1937 opc
= GET_FIELD(insn
, 0, 1);
1939 rd
= GET_FIELD(insn
, 2, 6);
1941 cpu_src1
= tcg_temp_new(); // const
1942 cpu_src2
= tcg_temp_new(); // const
1945 case 0: /* branches/sethi */
1947 unsigned int xop
= GET_FIELD(insn
, 7, 9);
1950 #ifdef TARGET_SPARC64
1951 case 0x1: /* V9 BPcc */
1955 target
= GET_FIELD_SP(insn
, 0, 18);
1956 target
= sign_extend(target
, 18);
1958 cc
= GET_FIELD_SP(insn
, 20, 21);
1960 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1962 do_branch(dc
, target
, insn
, 1, cpu_cond
);
1967 case 0x3: /* V9 BPr */
1969 target
= GET_FIELD_SP(insn
, 0, 13) |
1970 (GET_FIELD_SP(insn
, 20, 21) << 14);
1971 target
= sign_extend(target
, 16);
1973 cpu_src1
= get_src1(insn
, cpu_src1
);
1974 do_branch_reg(dc
, target
, insn
, cpu_cond
, cpu_src1
);
1977 case 0x5: /* V9 FBPcc */
1979 int cc
= GET_FIELD_SP(insn
, 20, 21);
1980 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1982 target
= GET_FIELD_SP(insn
, 0, 18);
1983 target
= sign_extend(target
, 19);
1985 do_fbranch(dc
, target
, insn
, cc
, cpu_cond
);
1989 case 0x7: /* CBN+x */
1994 case 0x2: /* BN+x */
1996 target
= GET_FIELD(insn
, 10, 31);
1997 target
= sign_extend(target
, 22);
1999 do_branch(dc
, target
, insn
, 0, cpu_cond
);
2002 case 0x6: /* FBN+x */
2004 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2006 target
= GET_FIELD(insn
, 10, 31);
2007 target
= sign_extend(target
, 22);
2009 do_fbranch(dc
, target
, insn
, 0, cpu_cond
);
2012 case 0x4: /* SETHI */
2014 uint32_t value
= GET_FIELD(insn
, 10, 31);
2017 r_const
= tcg_const_tl(value
<< 10);
2018 gen_movl_TN_reg(rd
, r_const
);
2019 tcg_temp_free(r_const
);
2022 case 0x0: /* UNIMPL */
2031 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
2034 r_const
= tcg_const_tl(dc
->pc
);
2035 gen_movl_TN_reg(15, r_const
);
2036 tcg_temp_free(r_const
);
2038 gen_mov_pc_npc(dc
, cpu_cond
);
2042 case 2: /* FPU & Logical Operations */
2044 unsigned int xop
= GET_FIELD(insn
, 7, 12);
2045 if (xop
== 0x3a) { /* generate trap */
2048 cpu_src1
= get_src1(insn
, cpu_src1
);
2050 rs2
= GET_FIELD(insn
, 25, 31);
2051 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
2053 rs2
= GET_FIELD(insn
, 27, 31);
2055 gen_movl_reg_TN(rs2
, cpu_src2
);
2056 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2058 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
2060 cond
= GET_FIELD(insn
, 3, 6);
2062 save_state(dc
, cpu_cond
);
2063 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2065 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2067 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2068 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2069 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2070 gen_helper_raise_exception(cpu_tmp32
);
2071 } else if (cond
!= 0) {
2072 TCGv r_cond
= tcg_temp_new();
2074 #ifdef TARGET_SPARC64
2076 int cc
= GET_FIELD_SP(insn
, 11, 12);
2078 save_state(dc
, cpu_cond
);
2080 gen_cond(r_cond
, 0, cond
);
2082 gen_cond(r_cond
, 1, cond
);
2086 save_state(dc
, cpu_cond
);
2087 gen_cond(r_cond
, 0, cond
);
2089 l1
= gen_new_label();
2090 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
2092 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2094 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2096 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2097 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2098 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2099 gen_helper_raise_exception(cpu_tmp32
);
2102 tcg_temp_free(r_cond
);
2108 } else if (xop
== 0x28) {
2109 rs1
= GET_FIELD(insn
, 13, 17);
2112 #ifndef TARGET_SPARC64
2113 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2114 manual, rdy on the microSPARC
2116 case 0x0f: /* stbar in the SPARCv8 manual,
2117 rdy on the microSPARC II */
2118 case 0x10 ... 0x1f: /* implementation-dependent in the
2119 SPARCv8 manual, rdy on the
2122 gen_movl_TN_reg(rd
, cpu_y
);
2124 #ifdef TARGET_SPARC64
2125 case 0x2: /* V9 rdccr */
2126 gen_helper_rdccr(cpu_dst
);
2127 gen_movl_TN_reg(rd
, cpu_dst
);
2129 case 0x3: /* V9 rdasi */
2130 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
2131 gen_movl_TN_reg(rd
, cpu_dst
);
2133 case 0x4: /* V9 rdtick */
2137 r_tickptr
= tcg_temp_new_ptr();
2138 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2139 offsetof(CPUState
, tick
));
2140 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2141 tcg_temp_free_ptr(r_tickptr
);
2142 gen_movl_TN_reg(rd
, cpu_dst
);
2145 case 0x5: /* V9 rdpc */
2149 r_const
= tcg_const_tl(dc
->pc
);
2150 gen_movl_TN_reg(rd
, r_const
);
2151 tcg_temp_free(r_const
);
2154 case 0x6: /* V9 rdfprs */
2155 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
2156 gen_movl_TN_reg(rd
, cpu_dst
);
2158 case 0xf: /* V9 membar */
2159 break; /* no effect */
2160 case 0x13: /* Graphics Status */
2161 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2163 gen_movl_TN_reg(rd
, cpu_gsr
);
2165 case 0x16: /* Softint */
2166 tcg_gen_ext_i32_tl(cpu_dst
, cpu_softint
);
2167 gen_movl_TN_reg(rd
, cpu_dst
);
2169 case 0x17: /* Tick compare */
2170 gen_movl_TN_reg(rd
, cpu_tick_cmpr
);
2172 case 0x18: /* System tick */
2176 r_tickptr
= tcg_temp_new_ptr();
2177 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2178 offsetof(CPUState
, stick
));
2179 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2180 tcg_temp_free_ptr(r_tickptr
);
2181 gen_movl_TN_reg(rd
, cpu_dst
);
2184 case 0x19: /* System tick compare */
2185 gen_movl_TN_reg(rd
, cpu_stick_cmpr
);
2187 case 0x10: /* Performance Control */
2188 case 0x11: /* Performance Instrumentation Counter */
2189 case 0x12: /* Dispatch Control */
2190 case 0x14: /* Softint set, WO */
2191 case 0x15: /* Softint clear, WO */
2196 #if !defined(CONFIG_USER_ONLY)
2197 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2198 #ifndef TARGET_SPARC64
2199 if (!supervisor(dc
))
2201 gen_helper_rdpsr(cpu_dst
);
2203 CHECK_IU_FEATURE(dc
, HYPV
);
2204 if (!hypervisor(dc
))
2206 rs1
= GET_FIELD(insn
, 13, 17);
2209 // gen_op_rdhpstate();
2212 // gen_op_rdhtstate();
2215 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
2218 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
2221 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
2223 case 31: // hstick_cmpr
2224 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
2230 gen_movl_TN_reg(rd
, cpu_dst
);
2232 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2233 if (!supervisor(dc
))
2235 #ifdef TARGET_SPARC64
2236 rs1
= GET_FIELD(insn
, 13, 17);
2242 r_tsptr
= tcg_temp_new_ptr();
2243 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2244 offsetof(CPUState
, tsptr
));
2245 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2246 offsetof(trap_state
, tpc
));
2247 tcg_temp_free_ptr(r_tsptr
);
2254 r_tsptr
= tcg_temp_new_ptr();
2255 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2256 offsetof(CPUState
, tsptr
));
2257 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2258 offsetof(trap_state
, tnpc
));
2259 tcg_temp_free_ptr(r_tsptr
);
2266 r_tsptr
= tcg_temp_new_ptr();
2267 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2268 offsetof(CPUState
, tsptr
));
2269 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2270 offsetof(trap_state
, tstate
));
2271 tcg_temp_free_ptr(r_tsptr
);
2278 r_tsptr
= tcg_temp_new_ptr();
2279 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2280 offsetof(CPUState
, tsptr
));
2281 tcg_gen_ld_i32(cpu_tmp32
, r_tsptr
,
2282 offsetof(trap_state
, tt
));
2283 tcg_temp_free_ptr(r_tsptr
);
2284 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2291 r_tickptr
= tcg_temp_new_ptr();
2292 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2293 offsetof(CPUState
, tick
));
2294 gen_helper_tick_get_count(cpu_tmp0
, r_tickptr
);
2295 gen_movl_TN_reg(rd
, cpu_tmp0
);
2296 tcg_temp_free_ptr(r_tickptr
);
2300 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2303 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2304 offsetof(CPUSPARCState
, pstate
));
2305 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2308 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2309 offsetof(CPUSPARCState
, tl
));
2310 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2313 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2314 offsetof(CPUSPARCState
, psrpil
));
2315 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2318 gen_helper_rdcwp(cpu_tmp0
);
2321 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2322 offsetof(CPUSPARCState
, cansave
));
2323 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2325 case 11: // canrestore
2326 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2327 offsetof(CPUSPARCState
, canrestore
));
2328 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2330 case 12: // cleanwin
2331 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2332 offsetof(CPUSPARCState
, cleanwin
));
2333 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2335 case 13: // otherwin
2336 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2337 offsetof(CPUSPARCState
, otherwin
));
2338 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2341 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2342 offsetof(CPUSPARCState
, wstate
));
2343 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2345 case 16: // UA2005 gl
2346 CHECK_IU_FEATURE(dc
, GL
);
2347 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2348 offsetof(CPUSPARCState
, gl
));
2349 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2351 case 26: // UA2005 strand status
2352 CHECK_IU_FEATURE(dc
, HYPV
);
2353 if (!hypervisor(dc
))
2355 tcg_gen_mov_tl(cpu_tmp0
, cpu_ssr
);
2358 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2365 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2367 gen_movl_TN_reg(rd
, cpu_tmp0
);
2369 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2370 #ifdef TARGET_SPARC64
2371 save_state(dc
, cpu_cond
);
2372 gen_helper_flushw();
2374 if (!supervisor(dc
))
2376 gen_movl_TN_reg(rd
, cpu_tbr
);
2380 } else if (xop
== 0x34) { /* FPU Operations */
2381 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2383 gen_op_clear_ieee_excp_and_FTT();
2384 rs1
= GET_FIELD(insn
, 13, 17);
2385 rs2
= GET_FIELD(insn
, 27, 31);
2386 xop
= GET_FIELD(insn
, 18, 26);
2388 case 0x1: /* fmovs */
2389 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2391 case 0x5: /* fnegs */
2392 gen_helper_fnegs(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2394 case 0x9: /* fabss */
2395 gen_helper_fabss(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2397 case 0x29: /* fsqrts */
2398 CHECK_FPU_FEATURE(dc
, FSQRT
);
2399 gen_clear_float_exceptions();
2400 gen_helper_fsqrts(cpu_tmp32
, cpu_fpr
[rs2
]);
2401 gen_helper_check_ieee_exceptions();
2402 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2404 case 0x2a: /* fsqrtd */
2405 CHECK_FPU_FEATURE(dc
, FSQRT
);
2406 gen_op_load_fpr_DT1(DFPREG(rs2
));
2407 gen_clear_float_exceptions();
2408 gen_helper_fsqrtd();
2409 gen_helper_check_ieee_exceptions();
2410 gen_op_store_DT0_fpr(DFPREG(rd
));
2412 case 0x2b: /* fsqrtq */
2413 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2414 gen_op_load_fpr_QT1(QFPREG(rs2
));
2415 gen_clear_float_exceptions();
2416 gen_helper_fsqrtq();
2417 gen_helper_check_ieee_exceptions();
2418 gen_op_store_QT0_fpr(QFPREG(rd
));
2420 case 0x41: /* fadds */
2421 gen_clear_float_exceptions();
2422 gen_helper_fadds(cpu_tmp32
,
2423 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2424 gen_helper_check_ieee_exceptions();
2425 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2428 gen_op_load_fpr_DT0(DFPREG(rs1
));
2429 gen_op_load_fpr_DT1(DFPREG(rs2
));
2430 gen_clear_float_exceptions();
2432 gen_helper_check_ieee_exceptions();
2433 gen_op_store_DT0_fpr(DFPREG(rd
));
2435 case 0x43: /* faddq */
2436 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2437 gen_op_load_fpr_QT0(QFPREG(rs1
));
2438 gen_op_load_fpr_QT1(QFPREG(rs2
));
2439 gen_clear_float_exceptions();
2441 gen_helper_check_ieee_exceptions();
2442 gen_op_store_QT0_fpr(QFPREG(rd
));
2444 case 0x45: /* fsubs */
2445 gen_clear_float_exceptions();
2446 gen_helper_fsubs(cpu_tmp32
,
2447 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2448 gen_helper_check_ieee_exceptions();
2449 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2452 gen_op_load_fpr_DT0(DFPREG(rs1
));
2453 gen_op_load_fpr_DT1(DFPREG(rs2
));
2454 gen_clear_float_exceptions();
2456 gen_helper_check_ieee_exceptions();
2457 gen_op_store_DT0_fpr(DFPREG(rd
));
2459 case 0x47: /* fsubq */
2460 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2461 gen_op_load_fpr_QT0(QFPREG(rs1
));
2462 gen_op_load_fpr_QT1(QFPREG(rs2
));
2463 gen_clear_float_exceptions();
2465 gen_helper_check_ieee_exceptions();
2466 gen_op_store_QT0_fpr(QFPREG(rd
));
2468 case 0x49: /* fmuls */
2469 CHECK_FPU_FEATURE(dc
, FMUL
);
2470 gen_clear_float_exceptions();
2471 gen_helper_fmuls(cpu_tmp32
,
2472 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2473 gen_helper_check_ieee_exceptions();
2474 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2476 case 0x4a: /* fmuld */
2477 CHECK_FPU_FEATURE(dc
, FMUL
);
2478 gen_op_load_fpr_DT0(DFPREG(rs1
));
2479 gen_op_load_fpr_DT1(DFPREG(rs2
));
2480 gen_clear_float_exceptions();
2482 gen_helper_check_ieee_exceptions();
2483 gen_op_store_DT0_fpr(DFPREG(rd
));
2485 case 0x4b: /* fmulq */
2486 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2487 CHECK_FPU_FEATURE(dc
, FMUL
);
2488 gen_op_load_fpr_QT0(QFPREG(rs1
));
2489 gen_op_load_fpr_QT1(QFPREG(rs2
));
2490 gen_clear_float_exceptions();
2492 gen_helper_check_ieee_exceptions();
2493 gen_op_store_QT0_fpr(QFPREG(rd
));
2495 case 0x4d: /* fdivs */
2496 gen_clear_float_exceptions();
2497 gen_helper_fdivs(cpu_tmp32
,
2498 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2499 gen_helper_check_ieee_exceptions();
2500 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2503 gen_op_load_fpr_DT0(DFPREG(rs1
));
2504 gen_op_load_fpr_DT1(DFPREG(rs2
));
2505 gen_clear_float_exceptions();
2507 gen_helper_check_ieee_exceptions();
2508 gen_op_store_DT0_fpr(DFPREG(rd
));
2510 case 0x4f: /* fdivq */
2511 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2512 gen_op_load_fpr_QT0(QFPREG(rs1
));
2513 gen_op_load_fpr_QT1(QFPREG(rs2
));
2514 gen_clear_float_exceptions();
2516 gen_helper_check_ieee_exceptions();
2517 gen_op_store_QT0_fpr(QFPREG(rd
));
2519 case 0x69: /* fsmuld */
2520 CHECK_FPU_FEATURE(dc
, FSMULD
);
2521 gen_clear_float_exceptions();
2522 gen_helper_fsmuld(cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2523 gen_helper_check_ieee_exceptions();
2524 gen_op_store_DT0_fpr(DFPREG(rd
));
2526 case 0x6e: /* fdmulq */
2527 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2528 gen_op_load_fpr_DT0(DFPREG(rs1
));
2529 gen_op_load_fpr_DT1(DFPREG(rs2
));
2530 gen_clear_float_exceptions();
2531 gen_helper_fdmulq();
2532 gen_helper_check_ieee_exceptions();
2533 gen_op_store_QT0_fpr(QFPREG(rd
));
2535 case 0xc4: /* fitos */
2536 gen_clear_float_exceptions();
2537 gen_helper_fitos(cpu_tmp32
, cpu_fpr
[rs2
]);
2538 gen_helper_check_ieee_exceptions();
2539 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2541 case 0xc6: /* fdtos */
2542 gen_op_load_fpr_DT1(DFPREG(rs2
));
2543 gen_clear_float_exceptions();
2544 gen_helper_fdtos(cpu_tmp32
);
2545 gen_helper_check_ieee_exceptions();
2546 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2548 case 0xc7: /* fqtos */
2549 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2550 gen_op_load_fpr_QT1(QFPREG(rs2
));
2551 gen_clear_float_exceptions();
2552 gen_helper_fqtos(cpu_tmp32
);
2553 gen_helper_check_ieee_exceptions();
2554 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2556 case 0xc8: /* fitod */
2557 gen_helper_fitod(cpu_fpr
[rs2
]);
2558 gen_op_store_DT0_fpr(DFPREG(rd
));
2560 case 0xc9: /* fstod */
2561 gen_helper_fstod(cpu_fpr
[rs2
]);
2562 gen_op_store_DT0_fpr(DFPREG(rd
));
2564 case 0xcb: /* fqtod */
2565 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2566 gen_op_load_fpr_QT1(QFPREG(rs2
));
2567 gen_clear_float_exceptions();
2569 gen_helper_check_ieee_exceptions();
2570 gen_op_store_DT0_fpr(DFPREG(rd
));
2572 case 0xcc: /* fitoq */
2573 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2574 gen_helper_fitoq(cpu_fpr
[rs2
]);
2575 gen_op_store_QT0_fpr(QFPREG(rd
));
2577 case 0xcd: /* fstoq */
2578 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2579 gen_helper_fstoq(cpu_fpr
[rs2
]);
2580 gen_op_store_QT0_fpr(QFPREG(rd
));
2582 case 0xce: /* fdtoq */
2583 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2584 gen_op_load_fpr_DT1(DFPREG(rs2
));
2586 gen_op_store_QT0_fpr(QFPREG(rd
));
2588 case 0xd1: /* fstoi */
2589 gen_clear_float_exceptions();
2590 gen_helper_fstoi(cpu_tmp32
, cpu_fpr
[rs2
]);
2591 gen_helper_check_ieee_exceptions();
2592 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2594 case 0xd2: /* fdtoi */
2595 gen_op_load_fpr_DT1(DFPREG(rs2
));
2596 gen_clear_float_exceptions();
2597 gen_helper_fdtoi(cpu_tmp32
);
2598 gen_helper_check_ieee_exceptions();
2599 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2601 case 0xd3: /* fqtoi */
2602 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2603 gen_op_load_fpr_QT1(QFPREG(rs2
));
2604 gen_clear_float_exceptions();
2605 gen_helper_fqtoi(cpu_tmp32
);
2606 gen_helper_check_ieee_exceptions();
2607 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2609 #ifdef TARGET_SPARC64
2610 case 0x2: /* V9 fmovd */
2611 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)],
2612 cpu_fpr
[DFPREG(rs2
)]);
2613 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
2614 cpu_fpr
[DFPREG(rs2
) + 1]);
2616 case 0x3: /* V9 fmovq */
2617 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2618 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)],
2619 cpu_fpr
[QFPREG(rs2
)]);
2620 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1],
2621 cpu_fpr
[QFPREG(rs2
) + 1]);
2622 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2],
2623 cpu_fpr
[QFPREG(rs2
) + 2]);
2624 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3],
2625 cpu_fpr
[QFPREG(rs2
) + 3]);
2627 case 0x6: /* V9 fnegd */
2628 gen_op_load_fpr_DT1(DFPREG(rs2
));
2630 gen_op_store_DT0_fpr(DFPREG(rd
));
2632 case 0x7: /* V9 fnegq */
2633 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2634 gen_op_load_fpr_QT1(QFPREG(rs2
));
2636 gen_op_store_QT0_fpr(QFPREG(rd
));
2638 case 0xa: /* V9 fabsd */
2639 gen_op_load_fpr_DT1(DFPREG(rs2
));
2641 gen_op_store_DT0_fpr(DFPREG(rd
));
2643 case 0xb: /* V9 fabsq */
2644 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2645 gen_op_load_fpr_QT1(QFPREG(rs2
));
2647 gen_op_store_QT0_fpr(QFPREG(rd
));
2649 case 0x81: /* V9 fstox */
2650 gen_clear_float_exceptions();
2651 gen_helper_fstox(cpu_fpr
[rs2
]);
2652 gen_helper_check_ieee_exceptions();
2653 gen_op_store_DT0_fpr(DFPREG(rd
));
2655 case 0x82: /* V9 fdtox */
2656 gen_op_load_fpr_DT1(DFPREG(rs2
));
2657 gen_clear_float_exceptions();
2659 gen_helper_check_ieee_exceptions();
2660 gen_op_store_DT0_fpr(DFPREG(rd
));
2662 case 0x83: /* V9 fqtox */
2663 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2664 gen_op_load_fpr_QT1(QFPREG(rs2
));
2665 gen_clear_float_exceptions();
2667 gen_helper_check_ieee_exceptions();
2668 gen_op_store_DT0_fpr(DFPREG(rd
));
2670 case 0x84: /* V9 fxtos */
2671 gen_op_load_fpr_DT1(DFPREG(rs2
));
2672 gen_clear_float_exceptions();
2673 gen_helper_fxtos(cpu_tmp32
);
2674 gen_helper_check_ieee_exceptions();
2675 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2677 case 0x88: /* V9 fxtod */
2678 gen_op_load_fpr_DT1(DFPREG(rs2
));
2679 gen_clear_float_exceptions();
2681 gen_helper_check_ieee_exceptions();
2682 gen_op_store_DT0_fpr(DFPREG(rd
));
2684 case 0x8c: /* V9 fxtoq */
2685 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2686 gen_op_load_fpr_DT1(DFPREG(rs2
));
2687 gen_clear_float_exceptions();
2689 gen_helper_check_ieee_exceptions();
2690 gen_op_store_QT0_fpr(QFPREG(rd
));
2696 } else if (xop
== 0x35) { /* FPU Operations */
2697 #ifdef TARGET_SPARC64
2700 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2702 gen_op_clear_ieee_excp_and_FTT();
2703 rs1
= GET_FIELD(insn
, 13, 17);
2704 rs2
= GET_FIELD(insn
, 27, 31);
2705 xop
= GET_FIELD(insn
, 18, 26);
2706 #ifdef TARGET_SPARC64
2707 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2710 l1
= gen_new_label();
2711 cond
= GET_FIELD_SP(insn
, 14, 17);
2712 cpu_src1
= get_src1(insn
, cpu_src1
);
2713 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2715 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2718 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2721 l1
= gen_new_label();
2722 cond
= GET_FIELD_SP(insn
, 14, 17);
2723 cpu_src1
= get_src1(insn
, cpu_src1
);
2724 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2726 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2727 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_fpr
[DFPREG(rs2
) + 1]);
2730 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2733 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2734 l1
= gen_new_label();
2735 cond
= GET_FIELD_SP(insn
, 14, 17);
2736 cpu_src1
= get_src1(insn
, cpu_src1
);
2737 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2739 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2740 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1], cpu_fpr
[QFPREG(rs2
) + 1]);
2741 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2], cpu_fpr
[QFPREG(rs2
) + 2]);
2742 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3], cpu_fpr
[QFPREG(rs2
) + 3]);
2748 #ifdef TARGET_SPARC64
2749 #define FMOVSCC(fcc) \
2754 l1 = gen_new_label(); \
2755 r_cond = tcg_temp_new(); \
2756 cond = GET_FIELD_SP(insn, 14, 17); \
2757 gen_fcond(r_cond, fcc, cond); \
2758 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2760 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2761 gen_set_label(l1); \
2762 tcg_temp_free(r_cond); \
2764 #define FMOVDCC(fcc) \
2769 l1 = gen_new_label(); \
2770 r_cond = tcg_temp_new(); \
2771 cond = GET_FIELD_SP(insn, 14, 17); \
2772 gen_fcond(r_cond, fcc, cond); \
2773 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2775 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2776 cpu_fpr[DFPREG(rs2)]); \
2777 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2778 cpu_fpr[DFPREG(rs2) + 1]); \
2779 gen_set_label(l1); \
2780 tcg_temp_free(r_cond); \
2782 #define FMOVQCC(fcc) \
2787 l1 = gen_new_label(); \
2788 r_cond = tcg_temp_new(); \
2789 cond = GET_FIELD_SP(insn, 14, 17); \
2790 gen_fcond(r_cond, fcc, cond); \
2791 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2793 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2794 cpu_fpr[QFPREG(rs2)]); \
2795 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2796 cpu_fpr[QFPREG(rs2) + 1]); \
2797 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2798 cpu_fpr[QFPREG(rs2) + 2]); \
2799 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2800 cpu_fpr[QFPREG(rs2) + 3]); \
2801 gen_set_label(l1); \
2802 tcg_temp_free(r_cond); \
2804 case 0x001: /* V9 fmovscc %fcc0 */
2807 case 0x002: /* V9 fmovdcc %fcc0 */
2810 case 0x003: /* V9 fmovqcc %fcc0 */
2811 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2814 case 0x041: /* V9 fmovscc %fcc1 */
2817 case 0x042: /* V9 fmovdcc %fcc1 */
2820 case 0x043: /* V9 fmovqcc %fcc1 */
2821 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2824 case 0x081: /* V9 fmovscc %fcc2 */
2827 case 0x082: /* V9 fmovdcc %fcc2 */
2830 case 0x083: /* V9 fmovqcc %fcc2 */
2831 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2834 case 0x0c1: /* V9 fmovscc %fcc3 */
2837 case 0x0c2: /* V9 fmovdcc %fcc3 */
2840 case 0x0c3: /* V9 fmovqcc %fcc3 */
2841 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2847 #define FMOVSCC(icc) \
2852 l1 = gen_new_label(); \
2853 r_cond = tcg_temp_new(); \
2854 cond = GET_FIELD_SP(insn, 14, 17); \
2855 gen_cond(r_cond, icc, cond); \
2856 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2858 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2859 gen_set_label(l1); \
2860 tcg_temp_free(r_cond); \
2862 #define FMOVDCC(icc) \
2867 l1 = gen_new_label(); \
2868 r_cond = tcg_temp_new(); \
2869 cond = GET_FIELD_SP(insn, 14, 17); \
2870 gen_cond(r_cond, icc, cond); \
2871 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2873 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2874 cpu_fpr[DFPREG(rs2)]); \
2875 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2876 cpu_fpr[DFPREG(rs2) + 1]); \
2877 gen_set_label(l1); \
2878 tcg_temp_free(r_cond); \
2880 #define FMOVQCC(icc) \
2885 l1 = gen_new_label(); \
2886 r_cond = tcg_temp_new(); \
2887 cond = GET_FIELD_SP(insn, 14, 17); \
2888 gen_cond(r_cond, icc, cond); \
2889 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2891 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2892 cpu_fpr[QFPREG(rs2)]); \
2893 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2894 cpu_fpr[QFPREG(rs2) + 1]); \
2895 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2896 cpu_fpr[QFPREG(rs2) + 2]); \
2897 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2898 cpu_fpr[QFPREG(rs2) + 3]); \
2899 gen_set_label(l1); \
2900 tcg_temp_free(r_cond); \
2903 case 0x101: /* V9 fmovscc %icc */
2906 case 0x102: /* V9 fmovdcc %icc */
2908 case 0x103: /* V9 fmovqcc %icc */
2909 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2912 case 0x181: /* V9 fmovscc %xcc */
2915 case 0x182: /* V9 fmovdcc %xcc */
2918 case 0x183: /* V9 fmovqcc %xcc */
2919 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2926 case 0x51: /* fcmps, V9 %fcc */
2927 gen_op_fcmps(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2929 case 0x52: /* fcmpd, V9 %fcc */
2930 gen_op_load_fpr_DT0(DFPREG(rs1
));
2931 gen_op_load_fpr_DT1(DFPREG(rs2
));
2932 gen_op_fcmpd(rd
& 3);
2934 case 0x53: /* fcmpq, V9 %fcc */
2935 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2936 gen_op_load_fpr_QT0(QFPREG(rs1
));
2937 gen_op_load_fpr_QT1(QFPREG(rs2
));
2938 gen_op_fcmpq(rd
& 3);
2940 case 0x55: /* fcmpes, V9 %fcc */
2941 gen_op_fcmpes(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2943 case 0x56: /* fcmped, V9 %fcc */
2944 gen_op_load_fpr_DT0(DFPREG(rs1
));
2945 gen_op_load_fpr_DT1(DFPREG(rs2
));
2946 gen_op_fcmped(rd
& 3);
2948 case 0x57: /* fcmpeq, V9 %fcc */
2949 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2950 gen_op_load_fpr_QT0(QFPREG(rs1
));
2951 gen_op_load_fpr_QT1(QFPREG(rs2
));
2952 gen_op_fcmpeq(rd
& 3);
2957 } else if (xop
== 0x2) {
2960 rs1
= GET_FIELD(insn
, 13, 17);
2962 // or %g0, x, y -> mov T0, x; mov y, T0
2963 if (IS_IMM
) { /* immediate */
2966 rs2
= GET_FIELDs(insn
, 19, 31);
2967 r_const
= tcg_const_tl((int)rs2
);
2968 gen_movl_TN_reg(rd
, r_const
);
2969 tcg_temp_free(r_const
);
2970 } else { /* register */
2971 rs2
= GET_FIELD(insn
, 27, 31);
2972 gen_movl_reg_TN(rs2
, cpu_dst
);
2973 gen_movl_TN_reg(rd
, cpu_dst
);
2976 cpu_src1
= get_src1(insn
, cpu_src1
);
2977 if (IS_IMM
) { /* immediate */
2978 rs2
= GET_FIELDs(insn
, 19, 31);
2979 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, (int)rs2
);
2980 gen_movl_TN_reg(rd
, cpu_dst
);
2981 } else { /* register */
2982 // or x, %g0, y -> mov T1, x; mov y, T1
2983 rs2
= GET_FIELD(insn
, 27, 31);
2985 gen_movl_reg_TN(rs2
, cpu_src2
);
2986 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2987 gen_movl_TN_reg(rd
, cpu_dst
);
2989 gen_movl_TN_reg(rd
, cpu_src1
);
2992 #ifdef TARGET_SPARC64
2993 } else if (xop
== 0x25) { /* sll, V9 sllx */
2994 cpu_src1
= get_src1(insn
, cpu_src1
);
2995 if (IS_IMM
) { /* immediate */
2996 rs2
= GET_FIELDs(insn
, 20, 31);
2997 if (insn
& (1 << 12)) {
2998 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, rs2
& 0x3f);
3000 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, rs2
& 0x1f);
3002 } else { /* register */
3003 rs2
= GET_FIELD(insn
, 27, 31);
3004 gen_movl_reg_TN(rs2
, cpu_src2
);
3005 if (insn
& (1 << 12)) {
3006 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3008 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3010 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3012 gen_movl_TN_reg(rd
, cpu_dst
);
3013 } else if (xop
== 0x26) { /* srl, V9 srlx */
3014 cpu_src1
= get_src1(insn
, cpu_src1
);
3015 if (IS_IMM
) { /* immediate */
3016 rs2
= GET_FIELDs(insn
, 20, 31);
3017 if (insn
& (1 << 12)) {
3018 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, rs2
& 0x3f);
3020 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3021 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, rs2
& 0x1f);
3023 } else { /* register */
3024 rs2
= GET_FIELD(insn
, 27, 31);
3025 gen_movl_reg_TN(rs2
, cpu_src2
);
3026 if (insn
& (1 << 12)) {
3027 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3028 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3030 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3031 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3032 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3035 gen_movl_TN_reg(rd
, cpu_dst
);
3036 } else if (xop
== 0x27) { /* sra, V9 srax */
3037 cpu_src1
= get_src1(insn
, cpu_src1
);
3038 if (IS_IMM
) { /* immediate */
3039 rs2
= GET_FIELDs(insn
, 20, 31);
3040 if (insn
& (1 << 12)) {
3041 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, rs2
& 0x3f);
3043 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3044 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3045 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, rs2
& 0x1f);
3047 } else { /* register */
3048 rs2
= GET_FIELD(insn
, 27, 31);
3049 gen_movl_reg_TN(rs2
, cpu_src2
);
3050 if (insn
& (1 << 12)) {
3051 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3052 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3054 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3055 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3056 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3057 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3060 gen_movl_TN_reg(rd
, cpu_dst
);
3062 } else if (xop
< 0x36) {
3063 cpu_src1
= get_src1(insn
, cpu_src1
);
3064 cpu_src2
= get_src2(insn
, cpu_src2
);
3066 switch (xop
& ~0x10) {
3069 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3071 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3074 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3076 gen_op_logic_cc(cpu_dst
);
3079 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3081 gen_op_logic_cc(cpu_dst
);
3084 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3086 gen_op_logic_cc(cpu_dst
);
3090 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3092 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3095 tcg_gen_andc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3097 gen_op_logic_cc(cpu_dst
);
3100 tcg_gen_orc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3102 gen_op_logic_cc(cpu_dst
);
3105 tcg_gen_not_tl(cpu_tmp0
, cpu_src2
);
3106 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3108 gen_op_logic_cc(cpu_dst
);
3112 gen_op_addx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3114 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3115 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
3116 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3119 #ifdef TARGET_SPARC64
3120 case 0x9: /* V9 mulx */
3121 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
3125 CHECK_IU_FEATURE(dc
, MUL
);
3126 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
3128 gen_op_logic_cc(cpu_dst
);
3131 CHECK_IU_FEATURE(dc
, MUL
);
3132 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
3134 gen_op_logic_cc(cpu_dst
);
3138 gen_op_subx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3140 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3141 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
3142 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3145 #ifdef TARGET_SPARC64
3146 case 0xd: /* V9 udivx */
3147 tcg_gen_mov_tl(cpu_cc_src
, cpu_src1
);
3148 tcg_gen_mov_tl(cpu_cc_src2
, cpu_src2
);
3149 gen_trap_ifdivzero_tl(cpu_cc_src2
);
3150 tcg_gen_divu_i64(cpu_dst
, cpu_cc_src
, cpu_cc_src2
);
3154 CHECK_IU_FEATURE(dc
, DIV
);
3155 gen_helper_udiv(cpu_dst
, cpu_src1
, cpu_src2
);
3157 gen_op_div_cc(cpu_dst
);
3160 CHECK_IU_FEATURE(dc
, DIV
);
3161 gen_helper_sdiv(cpu_dst
, cpu_src1
, cpu_src2
);
3163 gen_op_div_cc(cpu_dst
);
3168 gen_movl_TN_reg(rd
, cpu_dst
);
3171 case 0x20: /* taddcc */
3172 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3173 gen_movl_TN_reg(rd
, cpu_dst
);
3175 case 0x21: /* tsubcc */
3176 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3177 gen_movl_TN_reg(rd
, cpu_dst
);
3179 case 0x22: /* taddcctv */
3180 save_state(dc
, cpu_cond
);
3181 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3182 gen_movl_TN_reg(rd
, cpu_dst
);
3184 case 0x23: /* tsubcctv */
3185 save_state(dc
, cpu_cond
);
3186 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3187 gen_movl_TN_reg(rd
, cpu_dst
);
3189 case 0x24: /* mulscc */
3190 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3191 gen_movl_TN_reg(rd
, cpu_dst
);
3193 #ifndef TARGET_SPARC64
3194 case 0x25: /* sll */
3195 if (IS_IMM
) { /* immediate */
3196 rs2
= GET_FIELDs(insn
, 20, 31);
3197 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, rs2
& 0x1f);
3198 } else { /* register */
3199 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3200 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3202 gen_movl_TN_reg(rd
, cpu_dst
);
3204 case 0x26: /* srl */
3205 if (IS_IMM
) { /* immediate */
3206 rs2
= GET_FIELDs(insn
, 20, 31);
3207 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, rs2
& 0x1f);
3208 } else { /* register */
3209 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3210 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3212 gen_movl_TN_reg(rd
, cpu_dst
);
3214 case 0x27: /* sra */
3215 if (IS_IMM
) { /* immediate */
3216 rs2
= GET_FIELDs(insn
, 20, 31);
3217 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, rs2
& 0x1f);
3218 } else { /* register */
3219 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3220 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3222 gen_movl_TN_reg(rd
, cpu_dst
);
3229 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3230 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3232 #ifndef TARGET_SPARC64
3233 case 0x01 ... 0x0f: /* undefined in the
3237 case 0x10 ... 0x1f: /* implementation-dependent
3243 case 0x2: /* V9 wrccr */
3244 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3245 gen_helper_wrccr(cpu_dst
);
3247 case 0x3: /* V9 wrasi */
3248 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3249 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3251 case 0x6: /* V9 wrfprs */
3252 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3253 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3254 save_state(dc
, cpu_cond
);
3259 case 0xf: /* V9 sir, nop if user */
3260 #if !defined(CONFIG_USER_ONLY)
3265 case 0x13: /* Graphics Status */
3266 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3268 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3270 case 0x14: /* Softint set */
3271 if (!supervisor(dc
))
3273 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3274 gen_helper_set_softint(cpu_tmp64
);
3276 case 0x15: /* Softint clear */
3277 if (!supervisor(dc
))
3279 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3280 gen_helper_clear_softint(cpu_tmp64
);
3282 case 0x16: /* Softint write */
3283 if (!supervisor(dc
))
3285 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3286 gen_helper_write_softint(cpu_tmp64
);
3288 case 0x17: /* Tick compare */
3289 #if !defined(CONFIG_USER_ONLY)
3290 if (!supervisor(dc
))
3296 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3298 r_tickptr
= tcg_temp_new_ptr();
3299 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3300 offsetof(CPUState
, tick
));
3301 gen_helper_tick_set_limit(r_tickptr
,
3303 tcg_temp_free_ptr(r_tickptr
);
3306 case 0x18: /* System tick */
3307 #if !defined(CONFIG_USER_ONLY)
3308 if (!supervisor(dc
))
3314 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3316 r_tickptr
= tcg_temp_new_ptr();
3317 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3318 offsetof(CPUState
, stick
));
3319 gen_helper_tick_set_count(r_tickptr
,
3321 tcg_temp_free_ptr(r_tickptr
);
3324 case 0x19: /* System tick compare */
3325 #if !defined(CONFIG_USER_ONLY)
3326 if (!supervisor(dc
))
3332 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3334 r_tickptr
= tcg_temp_new_ptr();
3335 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3336 offsetof(CPUState
, stick
));
3337 gen_helper_tick_set_limit(r_tickptr
,
3339 tcg_temp_free_ptr(r_tickptr
);
3343 case 0x10: /* Performance Control */
3344 case 0x11: /* Performance Instrumentation
3346 case 0x12: /* Dispatch Control */
3353 #if !defined(CONFIG_USER_ONLY)
3354 case 0x31: /* wrpsr, V9 saved, restored */
3356 if (!supervisor(dc
))
3358 #ifdef TARGET_SPARC64
3364 gen_helper_restored();
3366 case 2: /* UA2005 allclean */
3367 case 3: /* UA2005 otherw */
3368 case 4: /* UA2005 normalw */
3369 case 5: /* UA2005 invalw */
3375 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3376 gen_helper_wrpsr(cpu_dst
);
3377 save_state(dc
, cpu_cond
);
3384 case 0x32: /* wrwim, V9 wrpr */
3386 if (!supervisor(dc
))
3388 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3389 #ifdef TARGET_SPARC64
3395 r_tsptr
= tcg_temp_new_ptr();
3396 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3397 offsetof(CPUState
, tsptr
));
3398 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3399 offsetof(trap_state
, tpc
));
3400 tcg_temp_free_ptr(r_tsptr
);
3407 r_tsptr
= tcg_temp_new_ptr();
3408 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3409 offsetof(CPUState
, tsptr
));
3410 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3411 offsetof(trap_state
, tnpc
));
3412 tcg_temp_free_ptr(r_tsptr
);
3419 r_tsptr
= tcg_temp_new_ptr();
3420 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3421 offsetof(CPUState
, tsptr
));
3422 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3423 offsetof(trap_state
,
3425 tcg_temp_free_ptr(r_tsptr
);
3432 r_tsptr
= tcg_temp_new_ptr();
3433 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3434 offsetof(CPUState
, tsptr
));
3435 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3436 tcg_gen_st_i32(cpu_tmp32
, r_tsptr
,
3437 offsetof(trap_state
, tt
));
3438 tcg_temp_free_ptr(r_tsptr
);
3445 r_tickptr
= tcg_temp_new_ptr();
3446 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3447 offsetof(CPUState
, tick
));
3448 gen_helper_tick_set_count(r_tickptr
,
3450 tcg_temp_free_ptr(r_tickptr
);
3454 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3457 save_state(dc
, cpu_cond
);
3458 gen_helper_wrpstate(cpu_tmp0
);
3464 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3465 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3466 offsetof(CPUSPARCState
, tl
));
3469 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3470 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3471 offsetof(CPUSPARCState
,
3475 gen_helper_wrcwp(cpu_tmp0
);
3478 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3479 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3480 offsetof(CPUSPARCState
,
3483 case 11: // canrestore
3484 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3485 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3486 offsetof(CPUSPARCState
,
3489 case 12: // cleanwin
3490 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3491 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3492 offsetof(CPUSPARCState
,
3495 case 13: // otherwin
3496 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3497 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3498 offsetof(CPUSPARCState
,
3502 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3503 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3504 offsetof(CPUSPARCState
,
3507 case 16: // UA2005 gl
3508 CHECK_IU_FEATURE(dc
, GL
);
3509 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3510 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3511 offsetof(CPUSPARCState
, gl
));
3513 case 26: // UA2005 strand status
3514 CHECK_IU_FEATURE(dc
, HYPV
);
3515 if (!hypervisor(dc
))
3517 tcg_gen_mov_tl(cpu_ssr
, cpu_tmp0
);
3523 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3524 if (dc
->def
->nwindows
!= 32)
3525 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3526 (1 << dc
->def
->nwindows
) - 1);
3527 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3531 case 0x33: /* wrtbr, UA2005 wrhpr */
3533 #ifndef TARGET_SPARC64
3534 if (!supervisor(dc
))
3536 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3538 CHECK_IU_FEATURE(dc
, HYPV
);
3539 if (!hypervisor(dc
))
3541 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3544 // XXX gen_op_wrhpstate();
3545 save_state(dc
, cpu_cond
);
3551 // XXX gen_op_wrhtstate();
3554 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
3557 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
3559 case 31: // hstick_cmpr
3563 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
3564 r_tickptr
= tcg_temp_new_ptr();
3565 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3566 offsetof(CPUState
, hstick
));
3567 gen_helper_tick_set_limit(r_tickptr
,
3569 tcg_temp_free_ptr(r_tickptr
);
3572 case 6: // hver readonly
3580 #ifdef TARGET_SPARC64
3581 case 0x2c: /* V9 movcc */
3583 int cc
= GET_FIELD_SP(insn
, 11, 12);
3584 int cond
= GET_FIELD_SP(insn
, 14, 17);
3588 r_cond
= tcg_temp_new();
3589 if (insn
& (1 << 18)) {
3591 gen_cond(r_cond
, 0, cond
);
3593 gen_cond(r_cond
, 1, cond
);
3597 gen_fcond(r_cond
, cc
, cond
);
3600 l1
= gen_new_label();
3602 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
3603 if (IS_IMM
) { /* immediate */
3606 rs2
= GET_FIELD_SPs(insn
, 0, 10);
3607 r_const
= tcg_const_tl((int)rs2
);
3608 gen_movl_TN_reg(rd
, r_const
);
3609 tcg_temp_free(r_const
);
3611 rs2
= GET_FIELD_SP(insn
, 0, 4);
3612 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3613 gen_movl_TN_reg(rd
, cpu_tmp0
);
3616 tcg_temp_free(r_cond
);
3619 case 0x2d: /* V9 sdivx */
3620 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
3621 gen_movl_TN_reg(rd
, cpu_dst
);
3623 case 0x2e: /* V9 popc */
3625 cpu_src2
= get_src2(insn
, cpu_src2
);
3626 gen_helper_popc(cpu_dst
, cpu_src2
);
3627 gen_movl_TN_reg(rd
, cpu_dst
);
3629 case 0x2f: /* V9 movr */
3631 int cond
= GET_FIELD_SP(insn
, 10, 12);
3634 cpu_src1
= get_src1(insn
, cpu_src1
);
3636 l1
= gen_new_label();
3638 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
3640 if (IS_IMM
) { /* immediate */
3643 rs2
= GET_FIELD_SPs(insn
, 0, 9);
3644 r_const
= tcg_const_tl((int)rs2
);
3645 gen_movl_TN_reg(rd
, r_const
);
3646 tcg_temp_free(r_const
);
3648 rs2
= GET_FIELD_SP(insn
, 0, 4);
3649 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3650 gen_movl_TN_reg(rd
, cpu_tmp0
);
3660 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3661 #ifdef TARGET_SPARC64
3662 int opf
= GET_FIELD_SP(insn
, 5, 13);
3663 rs1
= GET_FIELD(insn
, 13, 17);
3664 rs2
= GET_FIELD(insn
, 27, 31);
3665 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3669 case 0x000: /* VIS I edge8cc */
3670 case 0x001: /* VIS II edge8n */
3671 case 0x002: /* VIS I edge8lcc */
3672 case 0x003: /* VIS II edge8ln */
3673 case 0x004: /* VIS I edge16cc */
3674 case 0x005: /* VIS II edge16n */
3675 case 0x006: /* VIS I edge16lcc */
3676 case 0x007: /* VIS II edge16ln */
3677 case 0x008: /* VIS I edge32cc */
3678 case 0x009: /* VIS II edge32n */
3679 case 0x00a: /* VIS I edge32lcc */
3680 case 0x00b: /* VIS II edge32ln */
3683 case 0x010: /* VIS I array8 */
3684 CHECK_FPU_FEATURE(dc
, VIS1
);
3685 cpu_src1
= get_src1(insn
, cpu_src1
);
3686 gen_movl_reg_TN(rs2
, cpu_src2
);
3687 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3688 gen_movl_TN_reg(rd
, cpu_dst
);
3690 case 0x012: /* VIS I array16 */
3691 CHECK_FPU_FEATURE(dc
, VIS1
);
3692 cpu_src1
= get_src1(insn
, cpu_src1
);
3693 gen_movl_reg_TN(rs2
, cpu_src2
);
3694 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3695 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
3696 gen_movl_TN_reg(rd
, cpu_dst
);
3698 case 0x014: /* VIS I array32 */
3699 CHECK_FPU_FEATURE(dc
, VIS1
);
3700 cpu_src1
= get_src1(insn
, cpu_src1
);
3701 gen_movl_reg_TN(rs2
, cpu_src2
);
3702 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3703 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
3704 gen_movl_TN_reg(rd
, cpu_dst
);
3706 case 0x018: /* VIS I alignaddr */
3707 CHECK_FPU_FEATURE(dc
, VIS1
);
3708 cpu_src1
= get_src1(insn
, cpu_src1
);
3709 gen_movl_reg_TN(rs2
, cpu_src2
);
3710 gen_helper_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
);
3711 gen_movl_TN_reg(rd
, cpu_dst
);
3713 case 0x019: /* VIS II bmask */
3714 case 0x01a: /* VIS I alignaddrl */
3717 case 0x020: /* VIS I fcmple16 */
3718 CHECK_FPU_FEATURE(dc
, VIS1
);
3719 gen_op_load_fpr_DT0(DFPREG(rs1
));
3720 gen_op_load_fpr_DT1(DFPREG(rs2
));
3721 gen_helper_fcmple16();
3722 gen_op_store_DT0_fpr(DFPREG(rd
));
3724 case 0x022: /* VIS I fcmpne16 */
3725 CHECK_FPU_FEATURE(dc
, VIS1
);
3726 gen_op_load_fpr_DT0(DFPREG(rs1
));
3727 gen_op_load_fpr_DT1(DFPREG(rs2
));
3728 gen_helper_fcmpne16();
3729 gen_op_store_DT0_fpr(DFPREG(rd
));
3731 case 0x024: /* VIS I fcmple32 */
3732 CHECK_FPU_FEATURE(dc
, VIS1
);
3733 gen_op_load_fpr_DT0(DFPREG(rs1
));
3734 gen_op_load_fpr_DT1(DFPREG(rs2
));
3735 gen_helper_fcmple32();
3736 gen_op_store_DT0_fpr(DFPREG(rd
));
3738 case 0x026: /* VIS I fcmpne32 */
3739 CHECK_FPU_FEATURE(dc
, VIS1
);
3740 gen_op_load_fpr_DT0(DFPREG(rs1
));
3741 gen_op_load_fpr_DT1(DFPREG(rs2
));
3742 gen_helper_fcmpne32();
3743 gen_op_store_DT0_fpr(DFPREG(rd
));
3745 case 0x028: /* VIS I fcmpgt16 */
3746 CHECK_FPU_FEATURE(dc
, VIS1
);
3747 gen_op_load_fpr_DT0(DFPREG(rs1
));
3748 gen_op_load_fpr_DT1(DFPREG(rs2
));
3749 gen_helper_fcmpgt16();
3750 gen_op_store_DT0_fpr(DFPREG(rd
));
3752 case 0x02a: /* VIS I fcmpeq16 */
3753 CHECK_FPU_FEATURE(dc
, VIS1
);
3754 gen_op_load_fpr_DT0(DFPREG(rs1
));
3755 gen_op_load_fpr_DT1(DFPREG(rs2
));
3756 gen_helper_fcmpeq16();
3757 gen_op_store_DT0_fpr(DFPREG(rd
));
3759 case 0x02c: /* VIS I fcmpgt32 */
3760 CHECK_FPU_FEATURE(dc
, VIS1
);
3761 gen_op_load_fpr_DT0(DFPREG(rs1
));
3762 gen_op_load_fpr_DT1(DFPREG(rs2
));
3763 gen_helper_fcmpgt32();
3764 gen_op_store_DT0_fpr(DFPREG(rd
));
3766 case 0x02e: /* VIS I fcmpeq32 */
3767 CHECK_FPU_FEATURE(dc
, VIS1
);
3768 gen_op_load_fpr_DT0(DFPREG(rs1
));
3769 gen_op_load_fpr_DT1(DFPREG(rs2
));
3770 gen_helper_fcmpeq32();
3771 gen_op_store_DT0_fpr(DFPREG(rd
));
3773 case 0x031: /* VIS I fmul8x16 */
3774 CHECK_FPU_FEATURE(dc
, VIS1
);
3775 gen_op_load_fpr_DT0(DFPREG(rs1
));
3776 gen_op_load_fpr_DT1(DFPREG(rs2
));
3777 gen_helper_fmul8x16();
3778 gen_op_store_DT0_fpr(DFPREG(rd
));
3780 case 0x033: /* VIS I fmul8x16au */
3781 CHECK_FPU_FEATURE(dc
, VIS1
);
3782 gen_op_load_fpr_DT0(DFPREG(rs1
));
3783 gen_op_load_fpr_DT1(DFPREG(rs2
));
3784 gen_helper_fmul8x16au();
3785 gen_op_store_DT0_fpr(DFPREG(rd
));
3787 case 0x035: /* VIS I fmul8x16al */
3788 CHECK_FPU_FEATURE(dc
, VIS1
);
3789 gen_op_load_fpr_DT0(DFPREG(rs1
));
3790 gen_op_load_fpr_DT1(DFPREG(rs2
));
3791 gen_helper_fmul8x16al();
3792 gen_op_store_DT0_fpr(DFPREG(rd
));
3794 case 0x036: /* VIS I fmul8sux16 */
3795 CHECK_FPU_FEATURE(dc
, VIS1
);
3796 gen_op_load_fpr_DT0(DFPREG(rs1
));
3797 gen_op_load_fpr_DT1(DFPREG(rs2
));
3798 gen_helper_fmul8sux16();
3799 gen_op_store_DT0_fpr(DFPREG(rd
));
3801 case 0x037: /* VIS I fmul8ulx16 */
3802 CHECK_FPU_FEATURE(dc
, VIS1
);
3803 gen_op_load_fpr_DT0(DFPREG(rs1
));
3804 gen_op_load_fpr_DT1(DFPREG(rs2
));
3805 gen_helper_fmul8ulx16();
3806 gen_op_store_DT0_fpr(DFPREG(rd
));
3808 case 0x038: /* VIS I fmuld8sux16 */
3809 CHECK_FPU_FEATURE(dc
, VIS1
);
3810 gen_op_load_fpr_DT0(DFPREG(rs1
));
3811 gen_op_load_fpr_DT1(DFPREG(rs2
));
3812 gen_helper_fmuld8sux16();
3813 gen_op_store_DT0_fpr(DFPREG(rd
));
3815 case 0x039: /* VIS I fmuld8ulx16 */
3816 CHECK_FPU_FEATURE(dc
, VIS1
);
3817 gen_op_load_fpr_DT0(DFPREG(rs1
));
3818 gen_op_load_fpr_DT1(DFPREG(rs2
));
3819 gen_helper_fmuld8ulx16();
3820 gen_op_store_DT0_fpr(DFPREG(rd
));
3822 case 0x03a: /* VIS I fpack32 */
3823 case 0x03b: /* VIS I fpack16 */
3824 case 0x03d: /* VIS I fpackfix */
3825 case 0x03e: /* VIS I pdist */
3828 case 0x048: /* VIS I faligndata */
3829 CHECK_FPU_FEATURE(dc
, VIS1
);
3830 gen_op_load_fpr_DT0(DFPREG(rs1
));
3831 gen_op_load_fpr_DT1(DFPREG(rs2
));
3832 gen_helper_faligndata();
3833 gen_op_store_DT0_fpr(DFPREG(rd
));
3835 case 0x04b: /* VIS I fpmerge */
3836 CHECK_FPU_FEATURE(dc
, VIS1
);
3837 gen_op_load_fpr_DT0(DFPREG(rs1
));
3838 gen_op_load_fpr_DT1(DFPREG(rs2
));
3839 gen_helper_fpmerge();
3840 gen_op_store_DT0_fpr(DFPREG(rd
));
3842 case 0x04c: /* VIS II bshuffle */
3845 case 0x04d: /* VIS I fexpand */
3846 CHECK_FPU_FEATURE(dc
, VIS1
);
3847 gen_op_load_fpr_DT0(DFPREG(rs1
));
3848 gen_op_load_fpr_DT1(DFPREG(rs2
));
3849 gen_helper_fexpand();
3850 gen_op_store_DT0_fpr(DFPREG(rd
));
3852 case 0x050: /* VIS I fpadd16 */
3853 CHECK_FPU_FEATURE(dc
, VIS1
);
3854 gen_op_load_fpr_DT0(DFPREG(rs1
));
3855 gen_op_load_fpr_DT1(DFPREG(rs2
));
3856 gen_helper_fpadd16();
3857 gen_op_store_DT0_fpr(DFPREG(rd
));
3859 case 0x051: /* VIS I fpadd16s */
3860 CHECK_FPU_FEATURE(dc
, VIS1
);
3861 gen_helper_fpadd16s(cpu_fpr
[rd
],
3862 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3864 case 0x052: /* VIS I fpadd32 */
3865 CHECK_FPU_FEATURE(dc
, VIS1
);
3866 gen_op_load_fpr_DT0(DFPREG(rs1
));
3867 gen_op_load_fpr_DT1(DFPREG(rs2
));
3868 gen_helper_fpadd32();
3869 gen_op_store_DT0_fpr(DFPREG(rd
));
3871 case 0x053: /* VIS I fpadd32s */
3872 CHECK_FPU_FEATURE(dc
, VIS1
);
3873 gen_helper_fpadd32s(cpu_fpr
[rd
],
3874 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3876 case 0x054: /* VIS I fpsub16 */
3877 CHECK_FPU_FEATURE(dc
, VIS1
);
3878 gen_op_load_fpr_DT0(DFPREG(rs1
));
3879 gen_op_load_fpr_DT1(DFPREG(rs2
));
3880 gen_helper_fpsub16();
3881 gen_op_store_DT0_fpr(DFPREG(rd
));
3883 case 0x055: /* VIS I fpsub16s */
3884 CHECK_FPU_FEATURE(dc
, VIS1
);
3885 gen_helper_fpsub16s(cpu_fpr
[rd
],
3886 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3888 case 0x056: /* VIS I fpsub32 */
3889 CHECK_FPU_FEATURE(dc
, VIS1
);
3890 gen_op_load_fpr_DT0(DFPREG(rs1
));
3891 gen_op_load_fpr_DT1(DFPREG(rs2
));
3892 gen_helper_fpsub32();
3893 gen_op_store_DT0_fpr(DFPREG(rd
));
3895 case 0x057: /* VIS I fpsub32s */
3896 CHECK_FPU_FEATURE(dc
, VIS1
);
3897 gen_helper_fpsub32s(cpu_fpr
[rd
],
3898 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3900 case 0x060: /* VIS I fzero */
3901 CHECK_FPU_FEATURE(dc
, VIS1
);
3902 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], 0);
3903 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], 0);
3905 case 0x061: /* VIS I fzeros */
3906 CHECK_FPU_FEATURE(dc
, VIS1
);
3907 tcg_gen_movi_i32(cpu_fpr
[rd
], 0);
3909 case 0x062: /* VIS I fnor */
3910 CHECK_FPU_FEATURE(dc
, VIS1
);
3911 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3912 cpu_fpr
[DFPREG(rs2
)]);
3913 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3914 cpu_fpr
[DFPREG(rs2
) + 1]);
3916 case 0x063: /* VIS I fnors */
3917 CHECK_FPU_FEATURE(dc
, VIS1
);
3918 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3920 case 0x064: /* VIS I fandnot2 */
3921 CHECK_FPU_FEATURE(dc
, VIS1
);
3922 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3923 cpu_fpr
[DFPREG(rs2
)]);
3924 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3925 cpu_fpr
[DFPREG(rs1
) + 1],
3926 cpu_fpr
[DFPREG(rs2
) + 1]);
3928 case 0x065: /* VIS I fandnot2s */
3929 CHECK_FPU_FEATURE(dc
, VIS1
);
3930 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3932 case 0x066: /* VIS I fnot2 */
3933 CHECK_FPU_FEATURE(dc
, VIS1
);
3934 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
3935 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
3936 cpu_fpr
[DFPREG(rs2
) + 1]);
3938 case 0x067: /* VIS I fnot2s */
3939 CHECK_FPU_FEATURE(dc
, VIS1
);
3940 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
3942 case 0x068: /* VIS I fandnot1 */
3943 CHECK_FPU_FEATURE(dc
, VIS1
);
3944 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
3945 cpu_fpr
[DFPREG(rs1
)]);
3946 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3947 cpu_fpr
[DFPREG(rs2
) + 1],
3948 cpu_fpr
[DFPREG(rs1
) + 1]);
3950 case 0x069: /* VIS I fandnot1s */
3951 CHECK_FPU_FEATURE(dc
, VIS1
);
3952 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
3954 case 0x06a: /* VIS I fnot1 */
3955 CHECK_FPU_FEATURE(dc
, VIS1
);
3956 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
3957 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
3958 cpu_fpr
[DFPREG(rs1
) + 1]);
3960 case 0x06b: /* VIS I fnot1s */
3961 CHECK_FPU_FEATURE(dc
, VIS1
);
3962 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
3964 case 0x06c: /* VIS I fxor */
3965 CHECK_FPU_FEATURE(dc
, VIS1
);
3966 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3967 cpu_fpr
[DFPREG(rs2
)]);
3968 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1],
3969 cpu_fpr
[DFPREG(rs1
) + 1],
3970 cpu_fpr
[DFPREG(rs2
) + 1]);
3972 case 0x06d: /* VIS I fxors */
3973 CHECK_FPU_FEATURE(dc
, VIS1
);
3974 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3976 case 0x06e: /* VIS I fnand */
3977 CHECK_FPU_FEATURE(dc
, VIS1
);
3978 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3979 cpu_fpr
[DFPREG(rs2
)]);
3980 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3981 cpu_fpr
[DFPREG(rs2
) + 1]);
3983 case 0x06f: /* VIS I fnands */
3984 CHECK_FPU_FEATURE(dc
, VIS1
);
3985 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3987 case 0x070: /* VIS I fand */
3988 CHECK_FPU_FEATURE(dc
, VIS1
);
3989 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3990 cpu_fpr
[DFPREG(rs2
)]);
3991 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1],
3992 cpu_fpr
[DFPREG(rs1
) + 1],
3993 cpu_fpr
[DFPREG(rs2
) + 1]);
3995 case 0x071: /* VIS I fands */
3996 CHECK_FPU_FEATURE(dc
, VIS1
);
3997 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3999 case 0x072: /* VIS I fxnor */
4000 CHECK_FPU_FEATURE(dc
, VIS1
);
4001 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
4002 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
4003 cpu_fpr
[DFPREG(rs1
)]);
4004 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
4005 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
4006 cpu_fpr
[DFPREG(rs1
) + 1]);
4008 case 0x073: /* VIS I fxnors */
4009 CHECK_FPU_FEATURE(dc
, VIS1
);
4010 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
4011 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
4013 case 0x074: /* VIS I fsrc1 */
4014 CHECK_FPU_FEATURE(dc
, VIS1
);
4015 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
4016 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
4017 cpu_fpr
[DFPREG(rs1
) + 1]);
4019 case 0x075: /* VIS I fsrc1s */
4020 CHECK_FPU_FEATURE(dc
, VIS1
);
4021 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
4023 case 0x076: /* VIS I fornot2 */
4024 CHECK_FPU_FEATURE(dc
, VIS1
);
4025 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4026 cpu_fpr
[DFPREG(rs2
)]);
4027 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4028 cpu_fpr
[DFPREG(rs1
) + 1],
4029 cpu_fpr
[DFPREG(rs2
) + 1]);
4031 case 0x077: /* VIS I fornot2s */
4032 CHECK_FPU_FEATURE(dc
, VIS1
);
4033 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4035 case 0x078: /* VIS I fsrc2 */
4036 CHECK_FPU_FEATURE(dc
, VIS1
);
4037 gen_op_load_fpr_DT0(DFPREG(rs2
));
4038 gen_op_store_DT0_fpr(DFPREG(rd
));
4040 case 0x079: /* VIS I fsrc2s */
4041 CHECK_FPU_FEATURE(dc
, VIS1
);
4042 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
4044 case 0x07a: /* VIS I fornot1 */
4045 CHECK_FPU_FEATURE(dc
, VIS1
);
4046 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
4047 cpu_fpr
[DFPREG(rs1
)]);
4048 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4049 cpu_fpr
[DFPREG(rs2
) + 1],
4050 cpu_fpr
[DFPREG(rs1
) + 1]);
4052 case 0x07b: /* VIS I fornot1s */
4053 CHECK_FPU_FEATURE(dc
, VIS1
);
4054 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
4056 case 0x07c: /* VIS I for */
4057 CHECK_FPU_FEATURE(dc
, VIS1
);
4058 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4059 cpu_fpr
[DFPREG(rs2
)]);
4060 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1],
4061 cpu_fpr
[DFPREG(rs1
) + 1],
4062 cpu_fpr
[DFPREG(rs2
) + 1]);
4064 case 0x07d: /* VIS I fors */
4065 CHECK_FPU_FEATURE(dc
, VIS1
);
4066 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4068 case 0x07e: /* VIS I fone */
4069 CHECK_FPU_FEATURE(dc
, VIS1
);
4070 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], -1);
4071 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], -1);
4073 case 0x07f: /* VIS I fones */
4074 CHECK_FPU_FEATURE(dc
, VIS1
);
4075 tcg_gen_movi_i32(cpu_fpr
[rd
], -1);
4077 case 0x080: /* VIS I shutdown */
4078 case 0x081: /* VIS II siam */
4087 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
4088 #ifdef TARGET_SPARC64
4093 #ifdef TARGET_SPARC64
4094 } else if (xop
== 0x39) { /* V9 return */
4097 save_state(dc
, cpu_cond
);
4098 cpu_src1
= get_src1(insn
, cpu_src1
);
4099 if (IS_IMM
) { /* immediate */
4100 rs2
= GET_FIELDs(insn
, 19, 31);
4101 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, (int)rs2
);
4102 } else { /* register */
4103 rs2
= GET_FIELD(insn
, 27, 31);
4105 gen_movl_reg_TN(rs2
, cpu_src2
);
4106 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4108 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4110 gen_helper_restore();
4111 gen_mov_pc_npc(dc
, cpu_cond
);
4112 r_const
= tcg_const_i32(3);
4113 gen_helper_check_align(cpu_dst
, r_const
);
4114 tcg_temp_free_i32(r_const
);
4115 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4116 dc
->npc
= DYNAMIC_PC
;
4120 cpu_src1
= get_src1(insn
, cpu_src1
);
4121 if (IS_IMM
) { /* immediate */
4122 rs2
= GET_FIELDs(insn
, 19, 31);
4123 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, (int)rs2
);
4124 } else { /* register */
4125 rs2
= GET_FIELD(insn
, 27, 31);
4127 gen_movl_reg_TN(rs2
, cpu_src2
);
4128 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4130 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4133 case 0x38: /* jmpl */
4138 r_pc
= tcg_const_tl(dc
->pc
);
4139 gen_movl_TN_reg(rd
, r_pc
);
4140 tcg_temp_free(r_pc
);
4141 gen_mov_pc_npc(dc
, cpu_cond
);
4142 r_const
= tcg_const_i32(3);
4143 gen_helper_check_align(cpu_dst
, r_const
);
4144 tcg_temp_free_i32(r_const
);
4145 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4146 dc
->npc
= DYNAMIC_PC
;
4149 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4150 case 0x39: /* rett, V9 return */
4154 if (!supervisor(dc
))
4156 gen_mov_pc_npc(dc
, cpu_cond
);
4157 r_const
= tcg_const_i32(3);
4158 gen_helper_check_align(cpu_dst
, r_const
);
4159 tcg_temp_free_i32(r_const
);
4160 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4161 dc
->npc
= DYNAMIC_PC
;
4166 case 0x3b: /* flush */
4167 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4169 gen_helper_flush(cpu_dst
);
4171 case 0x3c: /* save */
4172 save_state(dc
, cpu_cond
);
4174 gen_movl_TN_reg(rd
, cpu_dst
);
4176 case 0x3d: /* restore */
4177 save_state(dc
, cpu_cond
);
4178 gen_helper_restore();
4179 gen_movl_TN_reg(rd
, cpu_dst
);
4181 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4182 case 0x3e: /* V9 done/retry */
4186 if (!supervisor(dc
))
4188 dc
->npc
= DYNAMIC_PC
;
4189 dc
->pc
= DYNAMIC_PC
;
4193 if (!supervisor(dc
))
4195 dc
->npc
= DYNAMIC_PC
;
4196 dc
->pc
= DYNAMIC_PC
;
4212 case 3: /* load/store instructions */
4214 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4216 cpu_src1
= get_src1(insn
, cpu_src1
);
4217 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4218 rs2
= GET_FIELD(insn
, 27, 31);
4219 gen_movl_reg_TN(rs2
, cpu_src2
);
4220 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4221 } else if (IS_IMM
) { /* immediate */
4222 rs2
= GET_FIELDs(insn
, 19, 31);
4223 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, (int)rs2
);
4224 } else { /* register */
4225 rs2
= GET_FIELD(insn
, 27, 31);
4227 gen_movl_reg_TN(rs2
, cpu_src2
);
4228 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4230 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4232 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4233 (xop
> 0x17 && xop
<= 0x1d ) ||
4234 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4236 case 0x0: /* load unsigned word */
4237 gen_address_mask(dc
, cpu_addr
);
4238 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4240 case 0x1: /* load unsigned byte */
4241 gen_address_mask(dc
, cpu_addr
);
4242 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4244 case 0x2: /* load unsigned halfword */
4245 gen_address_mask(dc
, cpu_addr
);
4246 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4248 case 0x3: /* load double word */
4254 save_state(dc
, cpu_cond
);
4255 r_const
= tcg_const_i32(7);
4256 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4257 tcg_temp_free_i32(r_const
);
4258 gen_address_mask(dc
, cpu_addr
);
4259 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4260 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4261 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4262 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4263 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4264 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4265 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4268 case 0x9: /* load signed byte */
4269 gen_address_mask(dc
, cpu_addr
);
4270 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4272 case 0xa: /* load signed halfword */
4273 gen_address_mask(dc
, cpu_addr
);
4274 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4276 case 0xd: /* ldstub -- XXX: should be atomically */
4280 gen_address_mask(dc
, cpu_addr
);
4281 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4282 r_const
= tcg_const_tl(0xff);
4283 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4284 tcg_temp_free(r_const
);
4287 case 0x0f: /* swap register with memory. Also
4289 CHECK_IU_FEATURE(dc
, SWAP
);
4290 gen_movl_reg_TN(rd
, cpu_val
);
4291 gen_address_mask(dc
, cpu_addr
);
4292 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4293 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4294 tcg_gen_mov_tl(cpu_val
, cpu_tmp0
);
4296 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4297 case 0x10: /* load word alternate */
4298 #ifndef TARGET_SPARC64
4301 if (!supervisor(dc
))
4304 save_state(dc
, cpu_cond
);
4305 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4307 case 0x11: /* load unsigned byte alternate */
4308 #ifndef TARGET_SPARC64
4311 if (!supervisor(dc
))
4314 save_state(dc
, cpu_cond
);
4315 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4317 case 0x12: /* load unsigned halfword alternate */
4318 #ifndef TARGET_SPARC64
4321 if (!supervisor(dc
))
4324 save_state(dc
, cpu_cond
);
4325 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4327 case 0x13: /* load double word alternate */
4328 #ifndef TARGET_SPARC64
4331 if (!supervisor(dc
))
4336 save_state(dc
, cpu_cond
);
4337 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4339 case 0x19: /* load signed byte alternate */
4340 #ifndef TARGET_SPARC64
4343 if (!supervisor(dc
))
4346 save_state(dc
, cpu_cond
);
4347 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4349 case 0x1a: /* load signed halfword alternate */
4350 #ifndef TARGET_SPARC64
4353 if (!supervisor(dc
))
4356 save_state(dc
, cpu_cond
);
4357 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4359 case 0x1d: /* ldstuba -- XXX: should be atomically */
4360 #ifndef TARGET_SPARC64
4363 if (!supervisor(dc
))
4366 save_state(dc
, cpu_cond
);
4367 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4369 case 0x1f: /* swap reg with alt. memory. Also
4371 CHECK_IU_FEATURE(dc
, SWAP
);
4372 #ifndef TARGET_SPARC64
4375 if (!supervisor(dc
))
4378 save_state(dc
, cpu_cond
);
4379 gen_movl_reg_TN(rd
, cpu_val
);
4380 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4383 #ifndef TARGET_SPARC64
4384 case 0x30: /* ldc */
4385 case 0x31: /* ldcsr */
4386 case 0x33: /* lddc */
4390 #ifdef TARGET_SPARC64
4391 case 0x08: /* V9 ldsw */
4392 gen_address_mask(dc
, cpu_addr
);
4393 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4395 case 0x0b: /* V9 ldx */
4396 gen_address_mask(dc
, cpu_addr
);
4397 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4399 case 0x18: /* V9 ldswa */
4400 save_state(dc
, cpu_cond
);
4401 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4403 case 0x1b: /* V9 ldxa */
4404 save_state(dc
, cpu_cond
);
4405 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4407 case 0x2d: /* V9 prefetch, no effect */
4409 case 0x30: /* V9 ldfa */
4410 save_state(dc
, cpu_cond
);
4411 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4413 case 0x33: /* V9 lddfa */
4414 save_state(dc
, cpu_cond
);
4415 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4417 case 0x3d: /* V9 prefetcha, no effect */
4419 case 0x32: /* V9 ldqfa */
4420 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4421 save_state(dc
, cpu_cond
);
4422 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4428 gen_movl_TN_reg(rd
, cpu_val
);
4429 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4432 } else if (xop
>= 0x20 && xop
< 0x24) {
4433 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4435 save_state(dc
, cpu_cond
);
4437 case 0x20: /* load fpreg */
4438 gen_address_mask(dc
, cpu_addr
);
4439 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4440 tcg_gen_trunc_tl_i32(cpu_fpr
[rd
], cpu_tmp0
);
4442 case 0x21: /* ldfsr, V9 ldxfsr */
4443 #ifdef TARGET_SPARC64
4444 gen_address_mask(dc
, cpu_addr
);
4446 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4447 gen_helper_ldxfsr(cpu_tmp64
);
4451 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4452 gen_helper_ldfsr(cpu_tmp32
);
4456 case 0x22: /* load quad fpreg */
4460 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4461 r_const
= tcg_const_i32(dc
->mem_idx
);
4462 gen_helper_ldqf(cpu_addr
, r_const
);
4463 tcg_temp_free_i32(r_const
);
4464 gen_op_store_QT0_fpr(QFPREG(rd
));
4467 case 0x23: /* load double fpreg */
4471 r_const
= tcg_const_i32(dc
->mem_idx
);
4472 gen_helper_lddf(cpu_addr
, r_const
);
4473 tcg_temp_free_i32(r_const
);
4474 gen_op_store_DT0_fpr(DFPREG(rd
));
4480 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) || \
4481 xop
== 0xe || xop
== 0x1e) {
4482 gen_movl_reg_TN(rd
, cpu_val
);
4484 case 0x4: /* store word */
4485 gen_address_mask(dc
, cpu_addr
);
4486 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4488 case 0x5: /* store byte */
4489 gen_address_mask(dc
, cpu_addr
);
4490 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4492 case 0x6: /* store halfword */
4493 gen_address_mask(dc
, cpu_addr
);
4494 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4496 case 0x7: /* store double word */
4502 save_state(dc
, cpu_cond
);
4503 gen_address_mask(dc
, cpu_addr
);
4504 r_const
= tcg_const_i32(7);
4505 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4506 tcg_temp_free_i32(r_const
);
4507 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
4508 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, cpu_val
);
4509 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4512 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4513 case 0x14: /* store word alternate */
4514 #ifndef TARGET_SPARC64
4517 if (!supervisor(dc
))
4520 save_state(dc
, cpu_cond
);
4521 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4523 case 0x15: /* store byte alternate */
4524 #ifndef TARGET_SPARC64
4527 if (!supervisor(dc
))
4530 save_state(dc
, cpu_cond
);
4531 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4533 case 0x16: /* store halfword alternate */
4534 #ifndef TARGET_SPARC64
4537 if (!supervisor(dc
))
4540 save_state(dc
, cpu_cond
);
4541 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4543 case 0x17: /* store double word alternate */
4544 #ifndef TARGET_SPARC64
4547 if (!supervisor(dc
))
4553 save_state(dc
, cpu_cond
);
4554 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4558 #ifdef TARGET_SPARC64
4559 case 0x0e: /* V9 stx */
4560 gen_address_mask(dc
, cpu_addr
);
4561 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4563 case 0x1e: /* V9 stxa */
4564 save_state(dc
, cpu_cond
);
4565 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4571 } else if (xop
> 0x23 && xop
< 0x28) {
4572 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4574 save_state(dc
, cpu_cond
);
4576 case 0x24: /* store fpreg */
4577 gen_address_mask(dc
, cpu_addr
);
4578 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_fpr
[rd
]);
4579 tcg_gen_qemu_st32(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4581 case 0x25: /* stfsr, V9 stxfsr */
4582 #ifdef TARGET_SPARC64
4583 gen_address_mask(dc
, cpu_addr
);
4584 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUState
, fsr
));
4586 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4588 tcg_gen_qemu_st32(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4590 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUState
, fsr
));
4591 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4595 #ifdef TARGET_SPARC64
4596 /* V9 stqf, store quad fpreg */
4600 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4601 gen_op_load_fpr_QT0(QFPREG(rd
));
4602 r_const
= tcg_const_i32(dc
->mem_idx
);
4603 gen_helper_stqf(cpu_addr
, r_const
);
4604 tcg_temp_free_i32(r_const
);
4607 #else /* !TARGET_SPARC64 */
4608 /* stdfq, store floating point queue */
4609 #if defined(CONFIG_USER_ONLY)
4612 if (!supervisor(dc
))
4614 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4619 case 0x27: /* store double fpreg */
4623 gen_op_load_fpr_DT0(DFPREG(rd
));
4624 r_const
= tcg_const_i32(dc
->mem_idx
);
4625 gen_helper_stdf(cpu_addr
, r_const
);
4626 tcg_temp_free_i32(r_const
);
4632 } else if (xop
> 0x33 && xop
< 0x3f) {
4633 save_state(dc
, cpu_cond
);
4635 #ifdef TARGET_SPARC64
4636 case 0x34: /* V9 stfa */
4637 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
4639 case 0x36: /* V9 stqfa */
4643 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4644 r_const
= tcg_const_i32(7);
4645 gen_helper_check_align(cpu_addr
, r_const
);
4646 tcg_temp_free_i32(r_const
);
4647 gen_op_load_fpr_QT0(QFPREG(rd
));
4648 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4651 case 0x37: /* V9 stdfa */
4652 gen_op_load_fpr_DT0(DFPREG(rd
));
4653 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4655 case 0x3c: /* V9 casa */
4656 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4657 gen_movl_TN_reg(rd
, cpu_val
);
4659 case 0x3e: /* V9 casxa */
4660 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4661 gen_movl_TN_reg(rd
, cpu_val
);
4664 case 0x34: /* stc */
4665 case 0x35: /* stcsr */
4666 case 0x36: /* stdcq */
4667 case 0x37: /* stdc */
4679 /* default case for non jump instructions */
4680 if (dc
->npc
== DYNAMIC_PC
) {
4681 dc
->pc
= DYNAMIC_PC
;
4683 } else if (dc
->npc
== JUMP_PC
) {
4684 /* we can do a static jump */
4685 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
4689 dc
->npc
= dc
->npc
+ 4;
4697 save_state(dc
, cpu_cond
);
4698 r_const
= tcg_const_i32(TT_ILL_INSN
);
4699 gen_helper_raise_exception(r_const
);
4700 tcg_temp_free_i32(r_const
);
4708 save_state(dc
, cpu_cond
);
4709 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
4710 gen_helper_raise_exception(r_const
);
4711 tcg_temp_free_i32(r_const
);
4715 #if !defined(CONFIG_USER_ONLY)
4720 save_state(dc
, cpu_cond
);
4721 r_const
= tcg_const_i32(TT_PRIV_INSN
);
4722 gen_helper_raise_exception(r_const
);
4723 tcg_temp_free_i32(r_const
);
4729 save_state(dc
, cpu_cond
);
4730 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4733 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4735 save_state(dc
, cpu_cond
);
4736 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4740 #ifndef TARGET_SPARC64
4745 save_state(dc
, cpu_cond
);
4746 r_const
= tcg_const_i32(TT_NCP_INSN
);
4747 gen_helper_raise_exception(r_const
);
4748 tcg_temp_free(r_const
);
4755 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
4756 int spc
, CPUSPARCState
*env
)
4758 target_ulong pc_start
, last_pc
;
4759 uint16_t *gen_opc_end
;
4760 DisasContext dc1
, *dc
= &dc1
;
4766 memset(dc
, 0, sizeof(DisasContext
));
4771 dc
->npc
= (target_ulong
) tb
->cs_base
;
4772 dc
->mem_idx
= cpu_mmu_index(env
);
4774 if ((dc
->def
->features
& CPU_FEATURE_FLOAT
))
4775 dc
->fpu_enabled
= cpu_fpu_enabled(env
);
4777 dc
->fpu_enabled
= 0;
4778 #ifdef TARGET_SPARC64
4779 dc
->address_mask_32bit
= env
->pstate
& PS_AM
;
4781 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4783 cpu_tmp0
= tcg_temp_new();
4784 cpu_tmp32
= tcg_temp_new_i32();
4785 cpu_tmp64
= tcg_temp_new_i64();
4787 cpu_dst
= tcg_temp_local_new();
4790 cpu_val
= tcg_temp_local_new();
4791 cpu_addr
= tcg_temp_local_new();
4794 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
4796 max_insns
= CF_COUNT_MASK
;
4799 if (unlikely(!TAILQ_EMPTY(&env
->breakpoints
))) {
4800 TAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
4801 if (bp
->pc
== dc
->pc
) {
4802 if (dc
->pc
!= pc_start
)
4803 save_state(dc
, cpu_cond
);
4812 qemu_log("Search PC...\n");
4813 j
= gen_opc_ptr
- gen_opc_buf
;
4817 gen_opc_instr_start
[lj
++] = 0;
4818 gen_opc_pc
[lj
] = dc
->pc
;
4819 gen_opc_npc
[lj
] = dc
->npc
;
4820 gen_opc_instr_start
[lj
] = 1;
4821 gen_opc_icount
[lj
] = num_insns
;
4824 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
4827 disas_sparc_insn(dc
);
4832 /* if the next PC is different, we abort now */
4833 if (dc
->pc
!= (last_pc
+ 4))
4835 /* if we reach a page boundary, we stop generation so that the
4836 PC of a TT_TFAULT exception is always in the right page */
4837 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
4839 /* if single step mode, we generate only one instruction and
4840 generate an exception */
4841 if (env
->singlestep_enabled
|| singlestep
) {
4842 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4846 } while ((gen_opc_ptr
< gen_opc_end
) &&
4847 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
4848 num_insns
< max_insns
);
4851 tcg_temp_free(cpu_addr
);
4852 tcg_temp_free(cpu_val
);
4853 tcg_temp_free(cpu_dst
);
4854 tcg_temp_free_i64(cpu_tmp64
);
4855 tcg_temp_free_i32(cpu_tmp32
);
4856 tcg_temp_free(cpu_tmp0
);
4857 if (tb
->cflags
& CF_LAST_IO
)
4860 if (dc
->pc
!= DYNAMIC_PC
&&
4861 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
4862 /* static PC and NPC: we can use direct chaining */
4863 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
4865 if (dc
->pc
!= DYNAMIC_PC
)
4866 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4867 save_npc(dc
, cpu_cond
);
4871 gen_icount_end(tb
, num_insns
);
4872 *gen_opc_ptr
= INDEX_op_end
;
4874 j
= gen_opc_ptr
- gen_opc_buf
;
4877 gen_opc_instr_start
[lj
++] = 0;
4881 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
4882 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
4884 tb
->size
= last_pc
+ 4 - pc_start
;
4885 tb
->icount
= num_insns
;
4888 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
4889 qemu_log("--------------\n");
4890 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
4891 log_target_disas(pc_start
, last_pc
+ 4 - pc_start
, 0);
4897 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
4899 gen_intermediate_code_internal(tb
, 0, env
);
4902 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
4904 gen_intermediate_code_internal(tb
, 1, env
);
4907 void gen_intermediate_code_init(CPUSPARCState
*env
)
4911 static const char * const gregnames
[8] = {
4912 NULL
, // g0 not used
4921 static const char * const fregnames
[64] = {
4922 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4923 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4924 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4925 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4926 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4927 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4928 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4929 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4932 /* init various static tables */
4936 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
4937 cpu_regwptr
= tcg_global_mem_new_ptr(TCG_AREG0
,
4938 offsetof(CPUState
, regwptr
),
4940 #ifdef TARGET_SPARC64
4941 cpu_xcc
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, xcc
),
4943 cpu_asi
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, asi
),
4945 cpu_fprs
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, fprs
),
4947 cpu_gsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, gsr
),
4949 cpu_tick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4950 offsetof(CPUState
, tick_cmpr
),
4952 cpu_stick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4953 offsetof(CPUState
, stick_cmpr
),
4955 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4956 offsetof(CPUState
, hstick_cmpr
),
4958 cpu_hintp
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hintp
),
4960 cpu_htba
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, htba
),
4962 cpu_hver
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hver
),
4964 cpu_ssr
= tcg_global_mem_new(TCG_AREG0
,
4965 offsetof(CPUState
, ssr
), "ssr");
4966 cpu_ver
= tcg_global_mem_new(TCG_AREG0
,
4967 offsetof(CPUState
, version
), "ver");
4968 cpu_softint
= tcg_global_mem_new_i32(TCG_AREG0
,
4969 offsetof(CPUState
, softint
),
4972 cpu_wim
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, wim
),
4975 cpu_cond
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cond
),
4977 cpu_cc_src
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_src
),
4979 cpu_cc_src2
= tcg_global_mem_new(TCG_AREG0
,
4980 offsetof(CPUState
, cc_src2
),
4982 cpu_cc_dst
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_dst
),
4984 cpu_psr
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, psr
),
4986 cpu_fsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, fsr
),
4988 cpu_pc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, pc
),
4990 cpu_npc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, npc
),
4992 cpu_y
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, y
), "y");
4993 #ifndef CONFIG_USER_ONLY
4994 cpu_tbr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, tbr
),
4997 for (i
= 1; i
< 8; i
++)
4998 cpu_gregs
[i
] = tcg_global_mem_new(TCG_AREG0
,
4999 offsetof(CPUState
, gregs
[i
]),
5001 for (i
= 0; i
< TARGET_FPREGS
; i
++)
5002 cpu_fpr
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
5003 offsetof(CPUState
, fpr
[i
]),
5006 /* register helpers */
5008 #define GEN_HELPER 2
5013 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
5014 unsigned long searched_pc
, int pc_pos
, void *puc
)
5017 env
->pc
= gen_opc_pc
[pc_pos
];
5018 npc
= gen_opc_npc
[pc_pos
];
5020 /* dynamic NPC: already stored */
5021 } else if (npc
== 2) {
5022 target_ulong t2
= (target_ulong
)(unsigned long)puc
;
5023 /* jump PC: use T2 and the jump targets of the translation */
5025 env
->npc
= gen_opc_jump_pc
[0];
5027 env
->npc
= gen_opc_jump_pc
[1];