4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
38 #define DYNAMIC_PC 1 /* dynamic pc value */
39 #define JUMP_PC 2 /* dynamic pc value which takes only two values
40 according to jump_pc[T2] */
42 /* global register indexes */
43 static TCGv_ptr cpu_env
, cpu_regwptr
;
44 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
45 static TCGv_i32 cpu_cc_op
;
46 static TCGv_i32 cpu_psr
;
47 static TCGv cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
49 #ifndef CONFIG_USER_ONLY
52 static TCGv cpu_cond
, cpu_dst
, cpu_addr
, cpu_val
;
54 static TCGv_i32 cpu_xcc
, cpu_asi
, cpu_fprs
;
56 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
57 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
58 static TCGv_i32 cpu_softint
;
62 /* local register indexes (only used inside old micro ops) */
64 static TCGv_i32 cpu_tmp32
;
65 static TCGv_i64 cpu_tmp64
;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr
[TARGET_FPREGS
];
69 static target_ulong gen_opc_npc
[OPC_BUF_SIZE
];
70 static target_ulong gen_opc_jump_pc
[2];
72 #include "gen-icount.h"
74 typedef struct DisasContext
{
75 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
76 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
77 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
81 int address_mask_32bit
;
82 uint32_t cc_op
; /* current CC operation */
83 struct TranslationBlock
*tb
;
87 // This function uses non-native bit order
88 #define GET_FIELD(X, FROM, TO) \
89 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
91 // This function uses the order in the manuals, i.e. bit 0 is 2^0
92 #define GET_FIELD_SP(X, FROM, TO) \
93 GET_FIELD(X, 31 - (TO), 31 - (FROM))
95 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
96 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
99 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
100 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
102 #define DFPREG(r) (r & 0x1e)
103 #define QFPREG(r) (r & 0x1c)
106 #define UA2005_HTRAP_MASK 0xff
107 #define V8_TRAP_MASK 0x7f
109 static int sign_extend(int x
, int len
)
112 return (x
<< len
) >> len
;
115 #define IS_IMM (insn & (1<<13))
117 /* floating point registers moves */
118 static void gen_op_load_fpr_DT0(unsigned int src
)
120 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
121 offsetof(CPU_DoubleU
, l
.upper
));
122 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
123 offsetof(CPU_DoubleU
, l
.lower
));
126 static void gen_op_load_fpr_DT1(unsigned int src
)
128 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
129 offsetof(CPU_DoubleU
, l
.upper
));
130 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
131 offsetof(CPU_DoubleU
, l
.lower
));
134 static void gen_op_store_DT0_fpr(unsigned int dst
)
136 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
137 offsetof(CPU_DoubleU
, l
.upper
));
138 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
139 offsetof(CPU_DoubleU
, l
.lower
));
142 static void gen_op_load_fpr_QT0(unsigned int src
)
144 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
145 offsetof(CPU_QuadU
, l
.upmost
));
146 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
147 offsetof(CPU_QuadU
, l
.upper
));
148 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
149 offsetof(CPU_QuadU
, l
.lower
));
150 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
151 offsetof(CPU_QuadU
, l
.lowest
));
154 static void gen_op_load_fpr_QT1(unsigned int src
)
156 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
157 offsetof(CPU_QuadU
, l
.upmost
));
158 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
159 offsetof(CPU_QuadU
, l
.upper
));
160 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
161 offsetof(CPU_QuadU
, l
.lower
));
162 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
163 offsetof(CPU_QuadU
, l
.lowest
));
166 static void gen_op_store_QT0_fpr(unsigned int dst
)
168 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
169 offsetof(CPU_QuadU
, l
.upmost
));
170 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
171 offsetof(CPU_QuadU
, l
.upper
));
172 tcg_gen_ld_i32(cpu_fpr
[dst
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
173 offsetof(CPU_QuadU
, l
.lower
));
174 tcg_gen_ld_i32(cpu_fpr
[dst
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
175 offsetof(CPU_QuadU
, l
.lowest
));
179 #ifdef CONFIG_USER_ONLY
180 #define supervisor(dc) 0
181 #ifdef TARGET_SPARC64
182 #define hypervisor(dc) 0
185 #define supervisor(dc) (dc->mem_idx >= 1)
186 #ifdef TARGET_SPARC64
187 #define hypervisor(dc) (dc->mem_idx == 2)
192 #ifdef TARGET_SPARC64
194 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
196 #define AM_CHECK(dc) (1)
200 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
202 #ifdef TARGET_SPARC64
204 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
208 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
211 tcg_gen_movi_tl(tn
, 0);
213 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
215 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
219 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
224 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
226 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
230 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
231 target_ulong pc
, target_ulong npc
)
233 TranslationBlock
*tb
;
236 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
237 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
)) {
238 /* jump to same page: we can use a direct jump */
239 tcg_gen_goto_tb(tb_num
);
240 tcg_gen_movi_tl(cpu_pc
, pc
);
241 tcg_gen_movi_tl(cpu_npc
, npc
);
242 tcg_gen_exit_tb((long)tb
+ tb_num
);
244 /* jump to another page: currently not optimized */
245 tcg_gen_movi_tl(cpu_pc
, pc
);
246 tcg_gen_movi_tl(cpu_npc
, npc
);
252 static inline void gen_mov_reg_N(TCGv reg
, TCGv_i32 src
)
254 tcg_gen_extu_i32_tl(reg
, src
);
255 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
256 tcg_gen_andi_tl(reg
, reg
, 0x1);
259 static inline void gen_mov_reg_Z(TCGv reg
, TCGv_i32 src
)
261 tcg_gen_extu_i32_tl(reg
, src
);
262 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
263 tcg_gen_andi_tl(reg
, reg
, 0x1);
266 static inline void gen_mov_reg_V(TCGv reg
, TCGv_i32 src
)
268 tcg_gen_extu_i32_tl(reg
, src
);
269 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
270 tcg_gen_andi_tl(reg
, reg
, 0x1);
273 static inline void gen_mov_reg_C(TCGv reg
, TCGv_i32 src
)
275 tcg_gen_extu_i32_tl(reg
, src
);
276 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
277 tcg_gen_andi_tl(reg
, reg
, 0x1);
280 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
286 l1
= gen_new_label();
288 r_temp
= tcg_temp_new();
289 tcg_gen_xor_tl(r_temp
, src1
, src2
);
290 tcg_gen_not_tl(r_temp
, r_temp
);
291 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
292 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
293 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
294 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
295 r_const
= tcg_const_i32(TT_TOVF
);
296 gen_helper_raise_exception(r_const
);
297 tcg_temp_free_i32(r_const
);
299 tcg_temp_free(r_temp
);
302 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
307 l1
= gen_new_label();
308 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
309 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
310 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
311 r_const
= tcg_const_i32(TT_TOVF
);
312 gen_helper_raise_exception(r_const
);
313 tcg_temp_free_i32(r_const
);
317 static inline void gen_op_addi_cc(TCGv dst
, TCGv src1
, target_long src2
)
319 tcg_gen_mov_tl(cpu_cc_src
, src1
);
320 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
321 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
322 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
325 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
327 tcg_gen_mov_tl(cpu_cc_src
, src1
);
328 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
329 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
330 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
333 static inline void gen_op_addxi_cc(TCGv dst
, TCGv src1
, target_long src2
)
335 tcg_gen_mov_tl(cpu_cc_src
, src1
);
336 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
337 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
338 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
339 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_dst
, src2
);
340 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
343 static inline void gen_op_addx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
345 tcg_gen_mov_tl(cpu_cc_src
, src1
);
346 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
347 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
348 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
349 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
350 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
353 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
355 tcg_gen_mov_tl(cpu_cc_src
, src1
);
356 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
357 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
358 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
361 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
363 tcg_gen_mov_tl(cpu_cc_src
, src1
);
364 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
365 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
366 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
367 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
368 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
371 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
377 l1
= gen_new_label();
379 r_temp
= tcg_temp_new();
380 tcg_gen_xor_tl(r_temp
, src1
, src2
);
381 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
382 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
383 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
384 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
385 r_const
= tcg_const_i32(TT_TOVF
);
386 gen_helper_raise_exception(r_const
);
387 tcg_temp_free_i32(r_const
);
389 tcg_temp_free(r_temp
);
392 static inline void gen_op_subi_cc(TCGv dst
, TCGv src1
, target_long src2
, DisasContext
*dc
)
394 tcg_gen_mov_tl(cpu_cc_src
, src1
);
395 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
397 tcg_gen_mov_tl(cpu_cc_dst
, src1
);
398 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
399 dc
->cc_op
= CC_OP_LOGIC
;
401 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
402 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
403 dc
->cc_op
= CC_OP_SUB
;
405 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
408 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
410 tcg_gen_mov_tl(cpu_cc_src
, src1
);
411 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
412 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
413 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
416 static inline void gen_op_subxi_cc(TCGv dst
, TCGv src1
, target_long src2
)
418 tcg_gen_mov_tl(cpu_cc_src
, src1
);
419 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
420 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
421 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
422 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_dst
, src2
);
423 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
426 static inline void gen_op_subx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
428 tcg_gen_mov_tl(cpu_cc_src
, src1
);
429 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
430 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
431 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
432 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
433 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
436 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
438 tcg_gen_mov_tl(cpu_cc_src
, src1
);
439 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
440 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
441 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
444 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
446 tcg_gen_mov_tl(cpu_cc_src
, src1
);
447 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
448 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
449 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
450 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
451 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
454 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
459 l1
= gen_new_label();
460 r_temp
= tcg_temp_new();
466 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
467 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
468 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
469 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
470 tcg_gen_movi_tl(cpu_cc_src2
, 0);
474 // env->y = (b2 << 31) | (env->y >> 1);
475 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
476 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
477 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
478 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
479 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
480 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
483 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
484 gen_mov_reg_V(r_temp
, cpu_psr
);
485 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
486 tcg_temp_free(r_temp
);
488 // T0 = (b1 << 31) | (T0 >> 1);
490 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
491 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
492 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
494 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
496 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
499 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
501 TCGv_i64 r_temp
, r_temp2
;
503 r_temp
= tcg_temp_new_i64();
504 r_temp2
= tcg_temp_new_i64();
506 tcg_gen_extu_tl_i64(r_temp
, src2
);
507 tcg_gen_extu_tl_i64(r_temp2
, src1
);
508 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
510 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
511 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
512 tcg_temp_free_i64(r_temp
);
513 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
514 #ifdef TARGET_SPARC64
515 tcg_gen_mov_i64(dst
, r_temp2
);
517 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
519 tcg_temp_free_i64(r_temp2
);
522 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
524 TCGv_i64 r_temp
, r_temp2
;
526 r_temp
= tcg_temp_new_i64();
527 r_temp2
= tcg_temp_new_i64();
529 tcg_gen_ext_tl_i64(r_temp
, src2
);
530 tcg_gen_ext_tl_i64(r_temp2
, src1
);
531 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
533 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
534 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
535 tcg_temp_free_i64(r_temp
);
536 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
537 #ifdef TARGET_SPARC64
538 tcg_gen_mov_i64(dst
, r_temp2
);
540 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
542 tcg_temp_free_i64(r_temp2
);
545 #ifdef TARGET_SPARC64
546 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
551 l1
= gen_new_label();
552 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
553 r_const
= tcg_const_i32(TT_DIV_ZERO
);
554 gen_helper_raise_exception(r_const
);
555 tcg_temp_free_i32(r_const
);
559 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
563 l1
= gen_new_label();
564 l2
= gen_new_label();
565 tcg_gen_mov_tl(cpu_cc_src
, src1
);
566 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
567 gen_trap_ifdivzero_tl(cpu_cc_src2
);
568 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src
, INT64_MIN
, l1
);
569 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src2
, -1, l1
);
570 tcg_gen_movi_i64(dst
, INT64_MIN
);
573 tcg_gen_div_i64(dst
, cpu_cc_src
, cpu_cc_src2
);
579 static inline void gen_op_eval_ba(TCGv dst
)
581 tcg_gen_movi_tl(dst
, 1);
585 static inline void gen_op_eval_be(TCGv dst
, TCGv_i32 src
)
587 gen_mov_reg_Z(dst
, src
);
591 static inline void gen_op_eval_ble(TCGv dst
, TCGv_i32 src
)
593 gen_mov_reg_N(cpu_tmp0
, src
);
594 gen_mov_reg_V(dst
, src
);
595 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
596 gen_mov_reg_Z(cpu_tmp0
, src
);
597 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
601 static inline void gen_op_eval_bl(TCGv dst
, TCGv_i32 src
)
603 gen_mov_reg_V(cpu_tmp0
, src
);
604 gen_mov_reg_N(dst
, src
);
605 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
609 static inline void gen_op_eval_bleu(TCGv dst
, TCGv_i32 src
)
611 gen_mov_reg_Z(cpu_tmp0
, src
);
612 gen_mov_reg_C(dst
, src
);
613 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
617 static inline void gen_op_eval_bcs(TCGv dst
, TCGv_i32 src
)
619 gen_mov_reg_C(dst
, src
);
623 static inline void gen_op_eval_bvs(TCGv dst
, TCGv_i32 src
)
625 gen_mov_reg_V(dst
, src
);
629 static inline void gen_op_eval_bn(TCGv dst
)
631 tcg_gen_movi_tl(dst
, 0);
635 static inline void gen_op_eval_bneg(TCGv dst
, TCGv_i32 src
)
637 gen_mov_reg_N(dst
, src
);
641 static inline void gen_op_eval_bne(TCGv dst
, TCGv_i32 src
)
643 gen_mov_reg_Z(dst
, src
);
644 tcg_gen_xori_tl(dst
, dst
, 0x1);
648 static inline void gen_op_eval_bg(TCGv dst
, TCGv_i32 src
)
650 gen_mov_reg_N(cpu_tmp0
, src
);
651 gen_mov_reg_V(dst
, src
);
652 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
653 gen_mov_reg_Z(cpu_tmp0
, src
);
654 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
655 tcg_gen_xori_tl(dst
, dst
, 0x1);
659 static inline void gen_op_eval_bge(TCGv dst
, TCGv_i32 src
)
661 gen_mov_reg_V(cpu_tmp0
, src
);
662 gen_mov_reg_N(dst
, src
);
663 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
664 tcg_gen_xori_tl(dst
, dst
, 0x1);
668 static inline void gen_op_eval_bgu(TCGv dst
, TCGv_i32 src
)
670 gen_mov_reg_Z(cpu_tmp0
, src
);
671 gen_mov_reg_C(dst
, src
);
672 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
673 tcg_gen_xori_tl(dst
, dst
, 0x1);
677 static inline void gen_op_eval_bcc(TCGv dst
, TCGv_i32 src
)
679 gen_mov_reg_C(dst
, src
);
680 tcg_gen_xori_tl(dst
, dst
, 0x1);
684 static inline void gen_op_eval_bpos(TCGv dst
, TCGv_i32 src
)
686 gen_mov_reg_N(dst
, src
);
687 tcg_gen_xori_tl(dst
, dst
, 0x1);
691 static inline void gen_op_eval_bvc(TCGv dst
, TCGv_i32 src
)
693 gen_mov_reg_V(dst
, src
);
694 tcg_gen_xori_tl(dst
, dst
, 0x1);
698 FPSR bit field FCC1 | FCC0:
704 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
705 unsigned int fcc_offset
)
707 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
708 tcg_gen_andi_tl(reg
, reg
, 0x1);
711 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
712 unsigned int fcc_offset
)
714 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
715 tcg_gen_andi_tl(reg
, reg
, 0x1);
719 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
720 unsigned int fcc_offset
)
722 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
723 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
724 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
727 // 1 or 2: FCC0 ^ FCC1
728 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
729 unsigned int fcc_offset
)
731 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
732 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
733 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
737 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
738 unsigned int fcc_offset
)
740 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
744 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
745 unsigned int fcc_offset
)
747 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
748 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
749 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
750 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
754 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
755 unsigned int fcc_offset
)
757 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
761 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
762 unsigned int fcc_offset
)
764 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
765 tcg_gen_xori_tl(dst
, dst
, 0x1);
766 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
767 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
771 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
772 unsigned int fcc_offset
)
774 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
775 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
776 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
780 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
781 unsigned int fcc_offset
)
783 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
784 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
785 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
786 tcg_gen_xori_tl(dst
, dst
, 0x1);
789 // 0 or 3: !(FCC0 ^ FCC1)
790 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
791 unsigned int fcc_offset
)
793 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
794 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
795 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
796 tcg_gen_xori_tl(dst
, dst
, 0x1);
800 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
801 unsigned int fcc_offset
)
803 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
804 tcg_gen_xori_tl(dst
, dst
, 0x1);
807 // !1: !(FCC0 & !FCC1)
808 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
809 unsigned int fcc_offset
)
811 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
812 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
813 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
814 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
815 tcg_gen_xori_tl(dst
, dst
, 0x1);
819 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
820 unsigned int fcc_offset
)
822 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
823 tcg_gen_xori_tl(dst
, dst
, 0x1);
826 // !2: !(!FCC0 & FCC1)
827 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
828 unsigned int fcc_offset
)
830 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
831 tcg_gen_xori_tl(dst
, dst
, 0x1);
832 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
833 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
834 tcg_gen_xori_tl(dst
, dst
, 0x1);
837 // !3: !(FCC0 & FCC1)
838 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
839 unsigned int fcc_offset
)
841 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
842 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
843 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
844 tcg_gen_xori_tl(dst
, dst
, 0x1);
847 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
848 target_ulong pc2
, TCGv r_cond
)
852 l1
= gen_new_label();
854 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
856 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
859 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
862 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
863 target_ulong pc2
, TCGv r_cond
)
867 l1
= gen_new_label();
869 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
871 gen_goto_tb(dc
, 0, pc2
, pc1
);
874 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
877 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
882 l1
= gen_new_label();
883 l2
= gen_new_label();
885 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
887 tcg_gen_movi_tl(cpu_npc
, npc1
);
891 tcg_gen_movi_tl(cpu_npc
, npc2
);
895 /* call this function before using the condition register as it may
896 have been set for a jump */
897 static inline void flush_cond(DisasContext
*dc
, TCGv cond
)
899 if (dc
->npc
== JUMP_PC
) {
900 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
901 dc
->npc
= DYNAMIC_PC
;
905 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
907 if (dc
->npc
== JUMP_PC
) {
908 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
909 dc
->npc
= DYNAMIC_PC
;
910 } else if (dc
->npc
!= DYNAMIC_PC
) {
911 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
915 static inline void save_state(DisasContext
*dc
, TCGv cond
)
917 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
918 /* flush pending conditional evaluations before exposing cpu state */
919 if (dc
->cc_op
!= CC_OP_FLAGS
) {
920 dc
->cc_op
= CC_OP_FLAGS
;
921 gen_helper_compute_psr();
926 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
928 if (dc
->npc
== JUMP_PC
) {
929 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
930 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
932 } else if (dc
->npc
== DYNAMIC_PC
) {
933 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
940 static inline void gen_op_next_insn(void)
942 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
943 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
946 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
,
951 #ifdef TARGET_SPARC64
963 gen_helper_compute_psr();
964 dc
->cc_op
= CC_OP_FLAGS
;
969 gen_op_eval_bn(r_dst
);
972 gen_op_eval_be(r_dst
, r_src
);
975 gen_op_eval_ble(r_dst
, r_src
);
978 gen_op_eval_bl(r_dst
, r_src
);
981 gen_op_eval_bleu(r_dst
, r_src
);
984 gen_op_eval_bcs(r_dst
, r_src
);
987 gen_op_eval_bneg(r_dst
, r_src
);
990 gen_op_eval_bvs(r_dst
, r_src
);
993 gen_op_eval_ba(r_dst
);
996 gen_op_eval_bne(r_dst
, r_src
);
999 gen_op_eval_bg(r_dst
, r_src
);
1002 gen_op_eval_bge(r_dst
, r_src
);
1005 gen_op_eval_bgu(r_dst
, r_src
);
1008 gen_op_eval_bcc(r_dst
, r_src
);
1011 gen_op_eval_bpos(r_dst
, r_src
);
1014 gen_op_eval_bvc(r_dst
, r_src
);
1019 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1021 unsigned int offset
;
1041 gen_op_eval_bn(r_dst
);
1044 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1047 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1050 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1053 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1056 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1059 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1062 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1065 gen_op_eval_ba(r_dst
);
1068 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1071 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1074 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1077 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1080 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1083 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1086 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1091 #ifdef TARGET_SPARC64
1093 static const int gen_tcg_cond_reg
[8] = {
1104 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1108 l1
= gen_new_label();
1109 tcg_gen_movi_tl(r_dst
, 0);
1110 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1111 tcg_gen_movi_tl(r_dst
, 1);
1116 /* XXX: potentially incorrect if dynamic npc */
1117 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1120 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1121 target_ulong target
= dc
->pc
+ offset
;
1124 /* unconditional not taken */
1126 dc
->pc
= dc
->npc
+ 4;
1127 dc
->npc
= dc
->pc
+ 4;
1130 dc
->npc
= dc
->pc
+ 4;
1132 } else if (cond
== 0x8) {
1133 /* unconditional taken */
1136 dc
->npc
= dc
->pc
+ 4;
1140 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1143 flush_cond(dc
, r_cond
);
1144 gen_cond(r_cond
, cc
, cond
, dc
);
1146 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1150 dc
->jump_pc
[0] = target
;
1151 dc
->jump_pc
[1] = dc
->npc
+ 4;
1157 /* XXX: potentially incorrect if dynamic npc */
1158 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1161 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1162 target_ulong target
= dc
->pc
+ offset
;
1165 /* unconditional not taken */
1167 dc
->pc
= dc
->npc
+ 4;
1168 dc
->npc
= dc
->pc
+ 4;
1171 dc
->npc
= dc
->pc
+ 4;
1173 } else if (cond
== 0x8) {
1174 /* unconditional taken */
1177 dc
->npc
= dc
->pc
+ 4;
1181 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1184 flush_cond(dc
, r_cond
);
1185 gen_fcond(r_cond
, cc
, cond
);
1187 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1191 dc
->jump_pc
[0] = target
;
1192 dc
->jump_pc
[1] = dc
->npc
+ 4;
1198 #ifdef TARGET_SPARC64
1199 /* XXX: potentially incorrect if dynamic npc */
1200 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1201 TCGv r_cond
, TCGv r_reg
)
1203 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1204 target_ulong target
= dc
->pc
+ offset
;
1206 flush_cond(dc
, r_cond
);
1207 gen_cond_reg(r_cond
, cond
, r_reg
);
1209 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1213 dc
->jump_pc
[0] = target
;
1214 dc
->jump_pc
[1] = dc
->npc
+ 4;
1219 static inline void gen_op_fcmps(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1223 gen_helper_fcmps(r_rs1
, r_rs2
);
1226 gen_helper_fcmps_fcc1(r_rs1
, r_rs2
);
1229 gen_helper_fcmps_fcc2(r_rs1
, r_rs2
);
1232 gen_helper_fcmps_fcc3(r_rs1
, r_rs2
);
1237 static inline void gen_op_fcmpd(int fccno
)
1244 gen_helper_fcmpd_fcc1();
1247 gen_helper_fcmpd_fcc2();
1250 gen_helper_fcmpd_fcc3();
1255 static inline void gen_op_fcmpq(int fccno
)
1262 gen_helper_fcmpq_fcc1();
1265 gen_helper_fcmpq_fcc2();
1268 gen_helper_fcmpq_fcc3();
1273 static inline void gen_op_fcmpes(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1277 gen_helper_fcmpes(r_rs1
, r_rs2
);
1280 gen_helper_fcmpes_fcc1(r_rs1
, r_rs2
);
1283 gen_helper_fcmpes_fcc2(r_rs1
, r_rs2
);
1286 gen_helper_fcmpes_fcc3(r_rs1
, r_rs2
);
1291 static inline void gen_op_fcmped(int fccno
)
1295 gen_helper_fcmped();
1298 gen_helper_fcmped_fcc1();
1301 gen_helper_fcmped_fcc2();
1304 gen_helper_fcmped_fcc3();
1309 static inline void gen_op_fcmpeq(int fccno
)
1313 gen_helper_fcmpeq();
1316 gen_helper_fcmpeq_fcc1();
1319 gen_helper_fcmpeq_fcc2();
1322 gen_helper_fcmpeq_fcc3();
1329 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1331 gen_helper_fcmps(r_rs1
, r_rs2
);
1334 static inline void gen_op_fcmpd(int fccno
)
1339 static inline void gen_op_fcmpq(int fccno
)
1344 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1346 gen_helper_fcmpes(r_rs1
, r_rs2
);
1349 static inline void gen_op_fcmped(int fccno
)
1351 gen_helper_fcmped();
1354 static inline void gen_op_fcmpeq(int fccno
)
1356 gen_helper_fcmpeq();
1360 static inline void gen_op_fpexception_im(int fsr_flags
)
1364 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1365 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1366 r_const
= tcg_const_i32(TT_FP_EXCP
);
1367 gen_helper_raise_exception(r_const
);
1368 tcg_temp_free_i32(r_const
);
1371 static int gen_trap_ifnofpu(DisasContext
*dc
, TCGv r_cond
)
1373 #if !defined(CONFIG_USER_ONLY)
1374 if (!dc
->fpu_enabled
) {
1377 save_state(dc
, r_cond
);
1378 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1379 gen_helper_raise_exception(r_const
);
1380 tcg_temp_free_i32(r_const
);
1388 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1390 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1393 static inline void gen_clear_float_exceptions(void)
1395 gen_helper_clear_float_exceptions();
1399 #ifdef TARGET_SPARC64
1400 static inline TCGv_i32
gen_get_asi(int insn
, TCGv r_addr
)
1406 r_asi
= tcg_temp_new_i32();
1407 tcg_gen_mov_i32(r_asi
, cpu_asi
);
1409 asi
= GET_FIELD(insn
, 19, 26);
1410 r_asi
= tcg_const_i32(asi
);
1415 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1418 TCGv_i32 r_asi
, r_size
, r_sign
;
1420 r_asi
= gen_get_asi(insn
, addr
);
1421 r_size
= tcg_const_i32(size
);
1422 r_sign
= tcg_const_i32(sign
);
1423 gen_helper_ld_asi(dst
, addr
, r_asi
, r_size
, r_sign
);
1424 tcg_temp_free_i32(r_sign
);
1425 tcg_temp_free_i32(r_size
);
1426 tcg_temp_free_i32(r_asi
);
1429 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1431 TCGv_i32 r_asi
, r_size
;
1433 r_asi
= gen_get_asi(insn
, addr
);
1434 r_size
= tcg_const_i32(size
);
1435 gen_helper_st_asi(addr
, src
, r_asi
, r_size
);
1436 tcg_temp_free_i32(r_size
);
1437 tcg_temp_free_i32(r_asi
);
1440 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1442 TCGv_i32 r_asi
, r_size
, r_rd
;
1444 r_asi
= gen_get_asi(insn
, addr
);
1445 r_size
= tcg_const_i32(size
);
1446 r_rd
= tcg_const_i32(rd
);
1447 gen_helper_ldf_asi(addr
, r_asi
, r_size
, r_rd
);
1448 tcg_temp_free_i32(r_rd
);
1449 tcg_temp_free_i32(r_size
);
1450 tcg_temp_free_i32(r_asi
);
1453 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
1455 TCGv_i32 r_asi
, r_size
, r_rd
;
1457 r_asi
= gen_get_asi(insn
, addr
);
1458 r_size
= tcg_const_i32(size
);
1459 r_rd
= tcg_const_i32(rd
);
1460 gen_helper_stf_asi(addr
, r_asi
, r_size
, r_rd
);
1461 tcg_temp_free_i32(r_rd
);
1462 tcg_temp_free_i32(r_size
);
1463 tcg_temp_free_i32(r_asi
);
1466 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1468 TCGv_i32 r_asi
, r_size
, r_sign
;
1470 r_asi
= gen_get_asi(insn
, addr
);
1471 r_size
= tcg_const_i32(4);
1472 r_sign
= tcg_const_i32(0);
1473 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1474 tcg_temp_free_i32(r_sign
);
1475 gen_helper_st_asi(addr
, dst
, r_asi
, r_size
);
1476 tcg_temp_free_i32(r_size
);
1477 tcg_temp_free_i32(r_asi
);
1478 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1481 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1483 TCGv_i32 r_asi
, r_rd
;
1485 r_asi
= gen_get_asi(insn
, addr
);
1486 r_rd
= tcg_const_i32(rd
);
1487 gen_helper_ldda_asi(addr
, r_asi
, r_rd
);
1488 tcg_temp_free_i32(r_rd
);
1489 tcg_temp_free_i32(r_asi
);
1492 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1494 TCGv_i32 r_asi
, r_size
;
1496 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1497 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1498 r_asi
= gen_get_asi(insn
, addr
);
1499 r_size
= tcg_const_i32(8);
1500 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1501 tcg_temp_free_i32(r_size
);
1502 tcg_temp_free_i32(r_asi
);
1505 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1511 r_val1
= tcg_temp_new();
1512 gen_movl_reg_TN(rd
, r_val1
);
1513 r_asi
= gen_get_asi(insn
, addr
);
1514 gen_helper_cas_asi(dst
, addr
, r_val1
, val2
, r_asi
);
1515 tcg_temp_free_i32(r_asi
);
1516 tcg_temp_free(r_val1
);
1519 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1524 gen_movl_reg_TN(rd
, cpu_tmp64
);
1525 r_asi
= gen_get_asi(insn
, addr
);
1526 gen_helper_casx_asi(dst
, addr
, cpu_tmp64
, val2
, r_asi
);
1527 tcg_temp_free_i32(r_asi
);
1530 #elif !defined(CONFIG_USER_ONLY)
1532 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1535 TCGv_i32 r_asi
, r_size
, r_sign
;
1537 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1538 r_size
= tcg_const_i32(size
);
1539 r_sign
= tcg_const_i32(sign
);
1540 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1541 tcg_temp_free(r_sign
);
1542 tcg_temp_free(r_size
);
1543 tcg_temp_free(r_asi
);
1544 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1547 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1549 TCGv_i32 r_asi
, r_size
;
1551 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
1552 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1553 r_size
= tcg_const_i32(size
);
1554 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1555 tcg_temp_free(r_size
);
1556 tcg_temp_free(r_asi
);
1559 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1561 TCGv_i32 r_asi
, r_size
, r_sign
;
1564 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1565 r_size
= tcg_const_i32(4);
1566 r_sign
= tcg_const_i32(0);
1567 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1568 tcg_temp_free(r_sign
);
1569 r_val
= tcg_temp_new_i64();
1570 tcg_gen_extu_tl_i64(r_val
, dst
);
1571 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1572 tcg_temp_free_i64(r_val
);
1573 tcg_temp_free(r_size
);
1574 tcg_temp_free(r_asi
);
1575 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1578 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1580 TCGv_i32 r_asi
, r_size
, r_sign
;
1582 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1583 r_size
= tcg_const_i32(8);
1584 r_sign
= tcg_const_i32(0);
1585 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1586 tcg_temp_free(r_sign
);
1587 tcg_temp_free(r_size
);
1588 tcg_temp_free(r_asi
);
1589 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
1590 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
1591 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
1592 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
1593 gen_movl_TN_reg(rd
, hi
);
1596 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1598 TCGv_i32 r_asi
, r_size
;
1600 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1601 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1602 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1603 r_size
= tcg_const_i32(8);
1604 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1605 tcg_temp_free(r_size
);
1606 tcg_temp_free(r_asi
);
1610 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1611 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
1614 TCGv_i32 r_asi
, r_size
;
1616 gen_ld_asi(dst
, addr
, insn
, 1, 0);
1618 r_val
= tcg_const_i64(0xffULL
);
1619 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1620 r_size
= tcg_const_i32(1);
1621 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1622 tcg_temp_free_i32(r_size
);
1623 tcg_temp_free_i32(r_asi
);
1624 tcg_temp_free_i64(r_val
);
1628 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
1633 rs1
= GET_FIELD(insn
, 13, 17);
1635 tcg_gen_movi_tl(def
, 0);
1636 } else if (rs1
< 8) {
1637 r_rs1
= cpu_gregs
[rs1
];
1639 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
1644 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
1648 if (IS_IMM
) { /* immediate */
1649 target_long simm
= GET_FIELDs(insn
, 19, 31);
1650 tcg_gen_movi_tl(def
, simm
);
1651 } else { /* register */
1652 unsigned int rs2
= GET_FIELD(insn
, 27, 31);
1654 tcg_gen_movi_tl(def
, 0);
1655 } else if (rs2
< 8) {
1656 r_rs2
= cpu_gregs
[rs2
];
1658 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
1664 #ifdef TARGET_SPARC64
1665 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr
, TCGv_ptr cpu_env
)
1667 TCGv_i32 r_tl
= tcg_temp_new_i32();
1669 /* load env->tl into r_tl */
1670 tcg_gen_ld_i32(r_tl
, cpu_env
, offsetof(CPUSPARCState
, tl
));
1672 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1673 tcg_gen_andi_i32(r_tl
, r_tl
, MAXTL_MASK
);
1675 /* calculate offset to current trap state from env->ts, reuse r_tl */
1676 tcg_gen_muli_i32(r_tl
, r_tl
, sizeof (trap_state
));
1677 tcg_gen_addi_ptr(r_tsptr
, cpu_env
, offsetof(CPUState
, ts
));
1679 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1681 TCGv_ptr r_tl_tmp
= tcg_temp_new_ptr();
1682 tcg_gen_ext_i32_ptr(r_tl_tmp
, r_tl
);
1683 tcg_gen_add_ptr(r_tsptr
, r_tsptr
, r_tl_tmp
);
1684 tcg_temp_free_ptr(r_tl_tmp
);
1687 tcg_temp_free_i32(r_tl
);
1691 #define CHECK_IU_FEATURE(dc, FEATURE) \
1692 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1694 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1695 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1698 /* before an instruction, dc->pc must be static */
1699 static void disas_sparc_insn(DisasContext
* dc
)
1701 unsigned int insn
, opc
, rs1
, rs2
, rd
;
1702 TCGv cpu_src1
, cpu_src2
, cpu_tmp1
, cpu_tmp2
;
1705 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
)))
1706 tcg_gen_debug_insn_start(dc
->pc
);
1707 insn
= ldl_code(dc
->pc
);
1708 opc
= GET_FIELD(insn
, 0, 1);
1710 rd
= GET_FIELD(insn
, 2, 6);
1712 cpu_tmp1
= cpu_src1
= tcg_temp_new();
1713 cpu_tmp2
= cpu_src2
= tcg_temp_new();
1716 case 0: /* branches/sethi */
1718 unsigned int xop
= GET_FIELD(insn
, 7, 9);
1721 #ifdef TARGET_SPARC64
1722 case 0x1: /* V9 BPcc */
1726 target
= GET_FIELD_SP(insn
, 0, 18);
1727 target
= sign_extend(target
, 18);
1729 cc
= GET_FIELD_SP(insn
, 20, 21);
1731 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1733 do_branch(dc
, target
, insn
, 1, cpu_cond
);
1738 case 0x3: /* V9 BPr */
1740 target
= GET_FIELD_SP(insn
, 0, 13) |
1741 (GET_FIELD_SP(insn
, 20, 21) << 14);
1742 target
= sign_extend(target
, 16);
1744 cpu_src1
= get_src1(insn
, cpu_src1
);
1745 do_branch_reg(dc
, target
, insn
, cpu_cond
, cpu_src1
);
1748 case 0x5: /* V9 FBPcc */
1750 int cc
= GET_FIELD_SP(insn
, 20, 21);
1751 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1753 target
= GET_FIELD_SP(insn
, 0, 18);
1754 target
= sign_extend(target
, 19);
1756 do_fbranch(dc
, target
, insn
, cc
, cpu_cond
);
1760 case 0x7: /* CBN+x */
1765 case 0x2: /* BN+x */
1767 target
= GET_FIELD(insn
, 10, 31);
1768 target
= sign_extend(target
, 22);
1770 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1773 case 0x6: /* FBN+x */
1775 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1777 target
= GET_FIELD(insn
, 10, 31);
1778 target
= sign_extend(target
, 22);
1780 do_fbranch(dc
, target
, insn
, 0, cpu_cond
);
1783 case 0x4: /* SETHI */
1785 uint32_t value
= GET_FIELD(insn
, 10, 31);
1788 r_const
= tcg_const_tl(value
<< 10);
1789 gen_movl_TN_reg(rd
, r_const
);
1790 tcg_temp_free(r_const
);
1793 case 0x0: /* UNIMPL */
1802 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
1805 r_const
= tcg_const_tl(dc
->pc
);
1806 gen_movl_TN_reg(15, r_const
);
1807 tcg_temp_free(r_const
);
1809 gen_mov_pc_npc(dc
, cpu_cond
);
1813 case 2: /* FPU & Logical Operations */
1815 unsigned int xop
= GET_FIELD(insn
, 7, 12);
1816 if (xop
== 0x3a) { /* generate trap */
1819 cpu_src1
= get_src1(insn
, cpu_src1
);
1821 rs2
= GET_FIELD(insn
, 25, 31);
1822 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
1824 rs2
= GET_FIELD(insn
, 27, 31);
1826 gen_movl_reg_TN(rs2
, cpu_src2
);
1827 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
1829 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
1831 cond
= GET_FIELD(insn
, 3, 6);
1833 save_state(dc
, cpu_cond
);
1834 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
1836 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
1838 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
1839 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
1840 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
1841 gen_helper_raise_exception(cpu_tmp32
);
1842 } else if (cond
!= 0) {
1843 TCGv r_cond
= tcg_temp_new();
1845 #ifdef TARGET_SPARC64
1847 int cc
= GET_FIELD_SP(insn
, 11, 12);
1849 save_state(dc
, cpu_cond
);
1851 gen_cond(r_cond
, 0, cond
, dc
);
1853 gen_cond(r_cond
, 1, cond
, dc
);
1857 save_state(dc
, cpu_cond
);
1858 gen_cond(r_cond
, 0, cond
, dc
);
1860 l1
= gen_new_label();
1861 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1863 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
1865 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
1867 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
1868 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
1869 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
1870 gen_helper_raise_exception(cpu_tmp32
);
1873 tcg_temp_free(r_cond
);
1879 } else if (xop
== 0x28) {
1880 rs1
= GET_FIELD(insn
, 13, 17);
1883 #ifndef TARGET_SPARC64
1884 case 0x01 ... 0x0e: /* undefined in the SPARCv8
1885 manual, rdy on the microSPARC
1887 case 0x0f: /* stbar in the SPARCv8 manual,
1888 rdy on the microSPARC II */
1889 case 0x10 ... 0x1f: /* implementation-dependent in the
1890 SPARCv8 manual, rdy on the
1893 gen_movl_TN_reg(rd
, cpu_y
);
1895 #ifdef TARGET_SPARC64
1896 case 0x2: /* V9 rdccr */
1897 gen_helper_compute_psr();
1898 gen_helper_rdccr(cpu_dst
);
1899 gen_movl_TN_reg(rd
, cpu_dst
);
1901 case 0x3: /* V9 rdasi */
1902 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
1903 gen_movl_TN_reg(rd
, cpu_dst
);
1905 case 0x4: /* V9 rdtick */
1909 r_tickptr
= tcg_temp_new_ptr();
1910 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
1911 offsetof(CPUState
, tick
));
1912 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
1913 tcg_temp_free_ptr(r_tickptr
);
1914 gen_movl_TN_reg(rd
, cpu_dst
);
1917 case 0x5: /* V9 rdpc */
1921 r_const
= tcg_const_tl(dc
->pc
);
1922 gen_movl_TN_reg(rd
, r_const
);
1923 tcg_temp_free(r_const
);
1926 case 0x6: /* V9 rdfprs */
1927 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
1928 gen_movl_TN_reg(rd
, cpu_dst
);
1930 case 0xf: /* V9 membar */
1931 break; /* no effect */
1932 case 0x13: /* Graphics Status */
1933 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1935 gen_movl_TN_reg(rd
, cpu_gsr
);
1937 case 0x16: /* Softint */
1938 tcg_gen_ext_i32_tl(cpu_dst
, cpu_softint
);
1939 gen_movl_TN_reg(rd
, cpu_dst
);
1941 case 0x17: /* Tick compare */
1942 gen_movl_TN_reg(rd
, cpu_tick_cmpr
);
1944 case 0x18: /* System tick */
1948 r_tickptr
= tcg_temp_new_ptr();
1949 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
1950 offsetof(CPUState
, stick
));
1951 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
1952 tcg_temp_free_ptr(r_tickptr
);
1953 gen_movl_TN_reg(rd
, cpu_dst
);
1956 case 0x19: /* System tick compare */
1957 gen_movl_TN_reg(rd
, cpu_stick_cmpr
);
1959 case 0x10: /* Performance Control */
1960 case 0x11: /* Performance Instrumentation Counter */
1961 case 0x12: /* Dispatch Control */
1962 case 0x14: /* Softint set, WO */
1963 case 0x15: /* Softint clear, WO */
1968 #if !defined(CONFIG_USER_ONLY)
1969 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
1970 #ifndef TARGET_SPARC64
1971 if (!supervisor(dc
))
1973 gen_helper_compute_psr();
1974 dc
->cc_op
= CC_OP_FLAGS
;
1975 gen_helper_rdpsr(cpu_dst
);
1977 CHECK_IU_FEATURE(dc
, HYPV
);
1978 if (!hypervisor(dc
))
1980 rs1
= GET_FIELD(insn
, 13, 17);
1983 // gen_op_rdhpstate();
1986 // gen_op_rdhtstate();
1989 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
1992 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
1995 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
1997 case 31: // hstick_cmpr
1998 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
2004 gen_movl_TN_reg(rd
, cpu_dst
);
2006 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2007 if (!supervisor(dc
))
2009 #ifdef TARGET_SPARC64
2010 rs1
= GET_FIELD(insn
, 13, 17);
2016 r_tsptr
= tcg_temp_new_ptr();
2017 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2018 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2019 offsetof(trap_state
, tpc
));
2020 tcg_temp_free_ptr(r_tsptr
);
2027 r_tsptr
= tcg_temp_new_ptr();
2028 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2029 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2030 offsetof(trap_state
, tnpc
));
2031 tcg_temp_free_ptr(r_tsptr
);
2038 r_tsptr
= tcg_temp_new_ptr();
2039 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2040 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2041 offsetof(trap_state
, tstate
));
2042 tcg_temp_free_ptr(r_tsptr
);
2049 r_tsptr
= tcg_temp_new_ptr();
2050 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2051 tcg_gen_ld_i32(cpu_tmp32
, r_tsptr
,
2052 offsetof(trap_state
, tt
));
2053 tcg_temp_free_ptr(r_tsptr
);
2054 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2061 r_tickptr
= tcg_temp_new_ptr();
2062 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2063 offsetof(CPUState
, tick
));
2064 gen_helper_tick_get_count(cpu_tmp0
, r_tickptr
);
2065 gen_movl_TN_reg(rd
, cpu_tmp0
);
2066 tcg_temp_free_ptr(r_tickptr
);
2070 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2073 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2074 offsetof(CPUSPARCState
, pstate
));
2075 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2078 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2079 offsetof(CPUSPARCState
, tl
));
2080 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2083 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2084 offsetof(CPUSPARCState
, psrpil
));
2085 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2088 gen_helper_rdcwp(cpu_tmp0
);
2091 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2092 offsetof(CPUSPARCState
, cansave
));
2093 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2095 case 11: // canrestore
2096 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2097 offsetof(CPUSPARCState
, canrestore
));
2098 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2100 case 12: // cleanwin
2101 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2102 offsetof(CPUSPARCState
, cleanwin
));
2103 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2105 case 13: // otherwin
2106 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2107 offsetof(CPUSPARCState
, otherwin
));
2108 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2111 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2112 offsetof(CPUSPARCState
, wstate
));
2113 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2115 case 16: // UA2005 gl
2116 CHECK_IU_FEATURE(dc
, GL
);
2117 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2118 offsetof(CPUSPARCState
, gl
));
2119 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2121 case 26: // UA2005 strand status
2122 CHECK_IU_FEATURE(dc
, HYPV
);
2123 if (!hypervisor(dc
))
2125 tcg_gen_mov_tl(cpu_tmp0
, cpu_ssr
);
2128 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2135 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2137 gen_movl_TN_reg(rd
, cpu_tmp0
);
2139 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2140 #ifdef TARGET_SPARC64
2141 save_state(dc
, cpu_cond
);
2142 gen_helper_flushw();
2144 if (!supervisor(dc
))
2146 gen_movl_TN_reg(rd
, cpu_tbr
);
2150 } else if (xop
== 0x34) { /* FPU Operations */
2151 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2153 gen_op_clear_ieee_excp_and_FTT();
2154 rs1
= GET_FIELD(insn
, 13, 17);
2155 rs2
= GET_FIELD(insn
, 27, 31);
2156 xop
= GET_FIELD(insn
, 18, 26);
2157 save_state(dc
, cpu_cond
);
2159 case 0x1: /* fmovs */
2160 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2162 case 0x5: /* fnegs */
2163 gen_helper_fnegs(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2165 case 0x9: /* fabss */
2166 gen_helper_fabss(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2168 case 0x29: /* fsqrts */
2169 CHECK_FPU_FEATURE(dc
, FSQRT
);
2170 gen_clear_float_exceptions();
2171 gen_helper_fsqrts(cpu_tmp32
, cpu_fpr
[rs2
]);
2172 gen_helper_check_ieee_exceptions();
2173 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2175 case 0x2a: /* fsqrtd */
2176 CHECK_FPU_FEATURE(dc
, FSQRT
);
2177 gen_op_load_fpr_DT1(DFPREG(rs2
));
2178 gen_clear_float_exceptions();
2179 gen_helper_fsqrtd();
2180 gen_helper_check_ieee_exceptions();
2181 gen_op_store_DT0_fpr(DFPREG(rd
));
2183 case 0x2b: /* fsqrtq */
2184 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2185 gen_op_load_fpr_QT1(QFPREG(rs2
));
2186 gen_clear_float_exceptions();
2187 gen_helper_fsqrtq();
2188 gen_helper_check_ieee_exceptions();
2189 gen_op_store_QT0_fpr(QFPREG(rd
));
2191 case 0x41: /* fadds */
2192 gen_clear_float_exceptions();
2193 gen_helper_fadds(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2194 gen_helper_check_ieee_exceptions();
2195 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2197 case 0x42: /* faddd */
2198 gen_op_load_fpr_DT0(DFPREG(rs1
));
2199 gen_op_load_fpr_DT1(DFPREG(rs2
));
2200 gen_clear_float_exceptions();
2202 gen_helper_check_ieee_exceptions();
2203 gen_op_store_DT0_fpr(DFPREG(rd
));
2205 case 0x43: /* faddq */
2206 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2207 gen_op_load_fpr_QT0(QFPREG(rs1
));
2208 gen_op_load_fpr_QT1(QFPREG(rs2
));
2209 gen_clear_float_exceptions();
2211 gen_helper_check_ieee_exceptions();
2212 gen_op_store_QT0_fpr(QFPREG(rd
));
2214 case 0x45: /* fsubs */
2215 gen_clear_float_exceptions();
2216 gen_helper_fsubs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2217 gen_helper_check_ieee_exceptions();
2218 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2220 case 0x46: /* fsubd */
2221 gen_op_load_fpr_DT0(DFPREG(rs1
));
2222 gen_op_load_fpr_DT1(DFPREG(rs2
));
2223 gen_clear_float_exceptions();
2225 gen_helper_check_ieee_exceptions();
2226 gen_op_store_DT0_fpr(DFPREG(rd
));
2228 case 0x47: /* fsubq */
2229 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2230 gen_op_load_fpr_QT0(QFPREG(rs1
));
2231 gen_op_load_fpr_QT1(QFPREG(rs2
));
2232 gen_clear_float_exceptions();
2234 gen_helper_check_ieee_exceptions();
2235 gen_op_store_QT0_fpr(QFPREG(rd
));
2237 case 0x49: /* fmuls */
2238 CHECK_FPU_FEATURE(dc
, FMUL
);
2239 gen_clear_float_exceptions();
2240 gen_helper_fmuls(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2241 gen_helper_check_ieee_exceptions();
2242 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2244 case 0x4a: /* fmuld */
2245 CHECK_FPU_FEATURE(dc
, FMUL
);
2246 gen_op_load_fpr_DT0(DFPREG(rs1
));
2247 gen_op_load_fpr_DT1(DFPREG(rs2
));
2248 gen_clear_float_exceptions();
2250 gen_helper_check_ieee_exceptions();
2251 gen_op_store_DT0_fpr(DFPREG(rd
));
2253 case 0x4b: /* fmulq */
2254 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2255 CHECK_FPU_FEATURE(dc
, FMUL
);
2256 gen_op_load_fpr_QT0(QFPREG(rs1
));
2257 gen_op_load_fpr_QT1(QFPREG(rs2
));
2258 gen_clear_float_exceptions();
2260 gen_helper_check_ieee_exceptions();
2261 gen_op_store_QT0_fpr(QFPREG(rd
));
2263 case 0x4d: /* fdivs */
2264 gen_clear_float_exceptions();
2265 gen_helper_fdivs(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2266 gen_helper_check_ieee_exceptions();
2267 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2269 case 0x4e: /* fdivd */
2270 gen_op_load_fpr_DT0(DFPREG(rs1
));
2271 gen_op_load_fpr_DT1(DFPREG(rs2
));
2272 gen_clear_float_exceptions();
2274 gen_helper_check_ieee_exceptions();
2275 gen_op_store_DT0_fpr(DFPREG(rd
));
2277 case 0x4f: /* fdivq */
2278 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2279 gen_op_load_fpr_QT0(QFPREG(rs1
));
2280 gen_op_load_fpr_QT1(QFPREG(rs2
));
2281 gen_clear_float_exceptions();
2283 gen_helper_check_ieee_exceptions();
2284 gen_op_store_QT0_fpr(QFPREG(rd
));
2286 case 0x69: /* fsmuld */
2287 CHECK_FPU_FEATURE(dc
, FSMULD
);
2288 gen_clear_float_exceptions();
2289 gen_helper_fsmuld(cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2290 gen_helper_check_ieee_exceptions();
2291 gen_op_store_DT0_fpr(DFPREG(rd
));
2293 case 0x6e: /* fdmulq */
2294 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2295 gen_op_load_fpr_DT0(DFPREG(rs1
));
2296 gen_op_load_fpr_DT1(DFPREG(rs2
));
2297 gen_clear_float_exceptions();
2298 gen_helper_fdmulq();
2299 gen_helper_check_ieee_exceptions();
2300 gen_op_store_QT0_fpr(QFPREG(rd
));
2302 case 0xc4: /* fitos */
2303 gen_clear_float_exceptions();
2304 gen_helper_fitos(cpu_tmp32
, cpu_fpr
[rs2
]);
2305 gen_helper_check_ieee_exceptions();
2306 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2308 case 0xc6: /* fdtos */
2309 gen_op_load_fpr_DT1(DFPREG(rs2
));
2310 gen_clear_float_exceptions();
2311 gen_helper_fdtos(cpu_tmp32
);
2312 gen_helper_check_ieee_exceptions();
2313 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2315 case 0xc7: /* fqtos */
2316 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2317 gen_op_load_fpr_QT1(QFPREG(rs2
));
2318 gen_clear_float_exceptions();
2319 gen_helper_fqtos(cpu_tmp32
);
2320 gen_helper_check_ieee_exceptions();
2321 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2323 case 0xc8: /* fitod */
2324 gen_helper_fitod(cpu_fpr
[rs2
]);
2325 gen_op_store_DT0_fpr(DFPREG(rd
));
2327 case 0xc9: /* fstod */
2328 gen_helper_fstod(cpu_fpr
[rs2
]);
2329 gen_op_store_DT0_fpr(DFPREG(rd
));
2331 case 0xcb: /* fqtod */
2332 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2333 gen_op_load_fpr_QT1(QFPREG(rs2
));
2334 gen_clear_float_exceptions();
2336 gen_helper_check_ieee_exceptions();
2337 gen_op_store_DT0_fpr(DFPREG(rd
));
2339 case 0xcc: /* fitoq */
2340 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2341 gen_helper_fitoq(cpu_fpr
[rs2
]);
2342 gen_op_store_QT0_fpr(QFPREG(rd
));
2344 case 0xcd: /* fstoq */
2345 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2346 gen_helper_fstoq(cpu_fpr
[rs2
]);
2347 gen_op_store_QT0_fpr(QFPREG(rd
));
2349 case 0xce: /* fdtoq */
2350 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2351 gen_op_load_fpr_DT1(DFPREG(rs2
));
2353 gen_op_store_QT0_fpr(QFPREG(rd
));
2355 case 0xd1: /* fstoi */
2356 gen_clear_float_exceptions();
2357 gen_helper_fstoi(cpu_tmp32
, cpu_fpr
[rs2
]);
2358 gen_helper_check_ieee_exceptions();
2359 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2361 case 0xd2: /* fdtoi */
2362 gen_op_load_fpr_DT1(DFPREG(rs2
));
2363 gen_clear_float_exceptions();
2364 gen_helper_fdtoi(cpu_tmp32
);
2365 gen_helper_check_ieee_exceptions();
2366 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2368 case 0xd3: /* fqtoi */
2369 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2370 gen_op_load_fpr_QT1(QFPREG(rs2
));
2371 gen_clear_float_exceptions();
2372 gen_helper_fqtoi(cpu_tmp32
);
2373 gen_helper_check_ieee_exceptions();
2374 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2376 #ifdef TARGET_SPARC64
2377 case 0x2: /* V9 fmovd */
2378 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2379 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
2380 cpu_fpr
[DFPREG(rs2
) + 1]);
2382 case 0x3: /* V9 fmovq */
2383 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2384 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2385 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1],
2386 cpu_fpr
[QFPREG(rs2
) + 1]);
2387 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2],
2388 cpu_fpr
[QFPREG(rs2
) + 2]);
2389 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3],
2390 cpu_fpr
[QFPREG(rs2
) + 3]);
2392 case 0x6: /* V9 fnegd */
2393 gen_op_load_fpr_DT1(DFPREG(rs2
));
2395 gen_op_store_DT0_fpr(DFPREG(rd
));
2397 case 0x7: /* V9 fnegq */
2398 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2399 gen_op_load_fpr_QT1(QFPREG(rs2
));
2401 gen_op_store_QT0_fpr(QFPREG(rd
));
2403 case 0xa: /* V9 fabsd */
2404 gen_op_load_fpr_DT1(DFPREG(rs2
));
2406 gen_op_store_DT0_fpr(DFPREG(rd
));
2408 case 0xb: /* V9 fabsq */
2409 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2410 gen_op_load_fpr_QT1(QFPREG(rs2
));
2412 gen_op_store_QT0_fpr(QFPREG(rd
));
2414 case 0x81: /* V9 fstox */
2415 gen_clear_float_exceptions();
2416 gen_helper_fstox(cpu_fpr
[rs2
]);
2417 gen_helper_check_ieee_exceptions();
2418 gen_op_store_DT0_fpr(DFPREG(rd
));
2420 case 0x82: /* V9 fdtox */
2421 gen_op_load_fpr_DT1(DFPREG(rs2
));
2422 gen_clear_float_exceptions();
2424 gen_helper_check_ieee_exceptions();
2425 gen_op_store_DT0_fpr(DFPREG(rd
));
2427 case 0x83: /* V9 fqtox */
2428 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2429 gen_op_load_fpr_QT1(QFPREG(rs2
));
2430 gen_clear_float_exceptions();
2432 gen_helper_check_ieee_exceptions();
2433 gen_op_store_DT0_fpr(DFPREG(rd
));
2435 case 0x84: /* V9 fxtos */
2436 gen_op_load_fpr_DT1(DFPREG(rs2
));
2437 gen_clear_float_exceptions();
2438 gen_helper_fxtos(cpu_tmp32
);
2439 gen_helper_check_ieee_exceptions();
2440 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2442 case 0x88: /* V9 fxtod */
2443 gen_op_load_fpr_DT1(DFPREG(rs2
));
2444 gen_clear_float_exceptions();
2446 gen_helper_check_ieee_exceptions();
2447 gen_op_store_DT0_fpr(DFPREG(rd
));
2449 case 0x8c: /* V9 fxtoq */
2450 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2451 gen_op_load_fpr_DT1(DFPREG(rs2
));
2452 gen_clear_float_exceptions();
2454 gen_helper_check_ieee_exceptions();
2455 gen_op_store_QT0_fpr(QFPREG(rd
));
2461 } else if (xop
== 0x35) { /* FPU Operations */
2462 #ifdef TARGET_SPARC64
2465 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2467 gen_op_clear_ieee_excp_and_FTT();
2468 rs1
= GET_FIELD(insn
, 13, 17);
2469 rs2
= GET_FIELD(insn
, 27, 31);
2470 xop
= GET_FIELD(insn
, 18, 26);
2471 save_state(dc
, cpu_cond
);
2472 #ifdef TARGET_SPARC64
2473 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2476 l1
= gen_new_label();
2477 cond
= GET_FIELD_SP(insn
, 14, 17);
2478 cpu_src1
= get_src1(insn
, cpu_src1
);
2479 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2481 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2484 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2487 l1
= gen_new_label();
2488 cond
= GET_FIELD_SP(insn
, 14, 17);
2489 cpu_src1
= get_src1(insn
, cpu_src1
);
2490 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2492 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2493 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_fpr
[DFPREG(rs2
) + 1]);
2496 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2499 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2500 l1
= gen_new_label();
2501 cond
= GET_FIELD_SP(insn
, 14, 17);
2502 cpu_src1
= get_src1(insn
, cpu_src1
);
2503 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2505 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2506 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1], cpu_fpr
[QFPREG(rs2
) + 1]);
2507 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2], cpu_fpr
[QFPREG(rs2
) + 2]);
2508 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3], cpu_fpr
[QFPREG(rs2
) + 3]);
2514 #ifdef TARGET_SPARC64
2515 #define FMOVSCC(fcc) \
2520 l1 = gen_new_label(); \
2521 r_cond = tcg_temp_new(); \
2522 cond = GET_FIELD_SP(insn, 14, 17); \
2523 gen_fcond(r_cond, fcc, cond); \
2524 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2526 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2527 gen_set_label(l1); \
2528 tcg_temp_free(r_cond); \
2530 #define FMOVDCC(fcc) \
2535 l1 = gen_new_label(); \
2536 r_cond = tcg_temp_new(); \
2537 cond = GET_FIELD_SP(insn, 14, 17); \
2538 gen_fcond(r_cond, fcc, cond); \
2539 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2541 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2542 cpu_fpr[DFPREG(rs2)]); \
2543 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2544 cpu_fpr[DFPREG(rs2) + 1]); \
2545 gen_set_label(l1); \
2546 tcg_temp_free(r_cond); \
2548 #define FMOVQCC(fcc) \
2553 l1 = gen_new_label(); \
2554 r_cond = tcg_temp_new(); \
2555 cond = GET_FIELD_SP(insn, 14, 17); \
2556 gen_fcond(r_cond, fcc, cond); \
2557 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2559 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2560 cpu_fpr[QFPREG(rs2)]); \
2561 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2562 cpu_fpr[QFPREG(rs2) + 1]); \
2563 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2564 cpu_fpr[QFPREG(rs2) + 2]); \
2565 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2566 cpu_fpr[QFPREG(rs2) + 3]); \
2567 gen_set_label(l1); \
2568 tcg_temp_free(r_cond); \
2570 case 0x001: /* V9 fmovscc %fcc0 */
2573 case 0x002: /* V9 fmovdcc %fcc0 */
2576 case 0x003: /* V9 fmovqcc %fcc0 */
2577 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2580 case 0x041: /* V9 fmovscc %fcc1 */
2583 case 0x042: /* V9 fmovdcc %fcc1 */
2586 case 0x043: /* V9 fmovqcc %fcc1 */
2587 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2590 case 0x081: /* V9 fmovscc %fcc2 */
2593 case 0x082: /* V9 fmovdcc %fcc2 */
2596 case 0x083: /* V9 fmovqcc %fcc2 */
2597 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2600 case 0x0c1: /* V9 fmovscc %fcc3 */
2603 case 0x0c2: /* V9 fmovdcc %fcc3 */
2606 case 0x0c3: /* V9 fmovqcc %fcc3 */
2607 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2613 #define FMOVSCC(icc) \
2618 l1 = gen_new_label(); \
2619 r_cond = tcg_temp_new(); \
2620 cond = GET_FIELD_SP(insn, 14, 17); \
2621 gen_cond(r_cond, icc, cond, dc); \
2622 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2624 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2625 gen_set_label(l1); \
2626 tcg_temp_free(r_cond); \
2628 #define FMOVDCC(icc) \
2633 l1 = gen_new_label(); \
2634 r_cond = tcg_temp_new(); \
2635 cond = GET_FIELD_SP(insn, 14, 17); \
2636 gen_cond(r_cond, icc, cond, dc); \
2637 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2639 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2640 cpu_fpr[DFPREG(rs2)]); \
2641 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2642 cpu_fpr[DFPREG(rs2) + 1]); \
2643 gen_set_label(l1); \
2644 tcg_temp_free(r_cond); \
2646 #define FMOVQCC(icc) \
2651 l1 = gen_new_label(); \
2652 r_cond = tcg_temp_new(); \
2653 cond = GET_FIELD_SP(insn, 14, 17); \
2654 gen_cond(r_cond, icc, cond, dc); \
2655 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2657 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2658 cpu_fpr[QFPREG(rs2)]); \
2659 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2660 cpu_fpr[QFPREG(rs2) + 1]); \
2661 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2662 cpu_fpr[QFPREG(rs2) + 2]); \
2663 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2664 cpu_fpr[QFPREG(rs2) + 3]); \
2665 gen_set_label(l1); \
2666 tcg_temp_free(r_cond); \
2669 case 0x101: /* V9 fmovscc %icc */
2672 case 0x102: /* V9 fmovdcc %icc */
2674 case 0x103: /* V9 fmovqcc %icc */
2675 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2678 case 0x181: /* V9 fmovscc %xcc */
2681 case 0x182: /* V9 fmovdcc %xcc */
2684 case 0x183: /* V9 fmovqcc %xcc */
2685 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2692 case 0x51: /* fcmps, V9 %fcc */
2693 gen_op_fcmps(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2695 case 0x52: /* fcmpd, V9 %fcc */
2696 gen_op_load_fpr_DT0(DFPREG(rs1
));
2697 gen_op_load_fpr_DT1(DFPREG(rs2
));
2698 gen_op_fcmpd(rd
& 3);
2700 case 0x53: /* fcmpq, V9 %fcc */
2701 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2702 gen_op_load_fpr_QT0(QFPREG(rs1
));
2703 gen_op_load_fpr_QT1(QFPREG(rs2
));
2704 gen_op_fcmpq(rd
& 3);
2706 case 0x55: /* fcmpes, V9 %fcc */
2707 gen_op_fcmpes(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2709 case 0x56: /* fcmped, V9 %fcc */
2710 gen_op_load_fpr_DT0(DFPREG(rs1
));
2711 gen_op_load_fpr_DT1(DFPREG(rs2
));
2712 gen_op_fcmped(rd
& 3);
2714 case 0x57: /* fcmpeq, V9 %fcc */
2715 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2716 gen_op_load_fpr_QT0(QFPREG(rs1
));
2717 gen_op_load_fpr_QT1(QFPREG(rs2
));
2718 gen_op_fcmpeq(rd
& 3);
2723 } else if (xop
== 0x2) {
2726 rs1
= GET_FIELD(insn
, 13, 17);
2728 // or %g0, x, y -> mov T0, x; mov y, T0
2729 if (IS_IMM
) { /* immediate */
2732 simm
= GET_FIELDs(insn
, 19, 31);
2733 r_const
= tcg_const_tl(simm
);
2734 gen_movl_TN_reg(rd
, r_const
);
2735 tcg_temp_free(r_const
);
2736 } else { /* register */
2737 rs2
= GET_FIELD(insn
, 27, 31);
2738 gen_movl_reg_TN(rs2
, cpu_dst
);
2739 gen_movl_TN_reg(rd
, cpu_dst
);
2742 cpu_src1
= get_src1(insn
, cpu_src1
);
2743 if (IS_IMM
) { /* immediate */
2744 simm
= GET_FIELDs(insn
, 19, 31);
2745 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
2746 gen_movl_TN_reg(rd
, cpu_dst
);
2747 } else { /* register */
2748 // or x, %g0, y -> mov T1, x; mov y, T1
2749 rs2
= GET_FIELD(insn
, 27, 31);
2751 gen_movl_reg_TN(rs2
, cpu_src2
);
2752 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2753 gen_movl_TN_reg(rd
, cpu_dst
);
2755 gen_movl_TN_reg(rd
, cpu_src1
);
2758 #ifdef TARGET_SPARC64
2759 } else if (xop
== 0x25) { /* sll, V9 sllx */
2760 cpu_src1
= get_src1(insn
, cpu_src1
);
2761 if (IS_IMM
) { /* immediate */
2762 simm
= GET_FIELDs(insn
, 20, 31);
2763 if (insn
& (1 << 12)) {
2764 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2766 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x1f);
2768 } else { /* register */
2769 rs2
= GET_FIELD(insn
, 27, 31);
2770 gen_movl_reg_TN(rs2
, cpu_src2
);
2771 if (insn
& (1 << 12)) {
2772 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2774 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2776 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2778 gen_movl_TN_reg(rd
, cpu_dst
);
2779 } else if (xop
== 0x26) { /* srl, V9 srlx */
2780 cpu_src1
= get_src1(insn
, cpu_src1
);
2781 if (IS_IMM
) { /* immediate */
2782 simm
= GET_FIELDs(insn
, 20, 31);
2783 if (insn
& (1 << 12)) {
2784 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2786 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2787 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
2789 } else { /* register */
2790 rs2
= GET_FIELD(insn
, 27, 31);
2791 gen_movl_reg_TN(rs2
, cpu_src2
);
2792 if (insn
& (1 << 12)) {
2793 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2794 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2796 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2797 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2798 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2801 gen_movl_TN_reg(rd
, cpu_dst
);
2802 } else if (xop
== 0x27) { /* sra, V9 srax */
2803 cpu_src1
= get_src1(insn
, cpu_src1
);
2804 if (IS_IMM
) { /* immediate */
2805 simm
= GET_FIELDs(insn
, 20, 31);
2806 if (insn
& (1 << 12)) {
2807 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
2809 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2810 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
2811 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
2813 } else { /* register */
2814 rs2
= GET_FIELD(insn
, 27, 31);
2815 gen_movl_reg_TN(rs2
, cpu_src2
);
2816 if (insn
& (1 << 12)) {
2817 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2818 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2820 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2821 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2822 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
2823 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2826 gen_movl_TN_reg(rd
, cpu_dst
);
2828 } else if (xop
< 0x36) {
2830 cpu_src1
= get_src1(insn
, cpu_src1
);
2831 cpu_src2
= get_src2(insn
, cpu_src2
);
2832 switch (xop
& ~0x10) {
2835 simm
= GET_FIELDs(insn
, 19, 31);
2837 gen_op_addi_cc(cpu_dst
, cpu_src1
, simm
);
2838 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
2839 dc
->cc_op
= CC_OP_ADD
;
2841 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
2845 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
2846 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
2847 dc
->cc_op
= CC_OP_ADD
;
2849 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2855 simm
= GET_FIELDs(insn
, 19, 31);
2856 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, simm
);
2858 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2861 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2862 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2863 dc
->cc_op
= CC_OP_LOGIC
;
2868 simm
= GET_FIELDs(insn
, 19, 31);
2869 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
2871 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2874 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2875 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2876 dc
->cc_op
= CC_OP_LOGIC
;
2881 simm
= GET_FIELDs(insn
, 19, 31);
2882 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, simm
);
2884 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2887 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2888 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2889 dc
->cc_op
= CC_OP_LOGIC
;
2894 simm
= GET_FIELDs(insn
, 19, 31);
2896 gen_op_subi_cc(cpu_dst
, cpu_src1
, simm
, dc
);
2898 tcg_gen_subi_tl(cpu_dst
, cpu_src1
, simm
);
2902 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
2903 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
2904 dc
->cc_op
= CC_OP_SUB
;
2906 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2910 case 0x5: /* andn */
2912 simm
= GET_FIELDs(insn
, 19, 31);
2913 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, ~simm
);
2915 tcg_gen_andc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2918 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2919 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2920 dc
->cc_op
= CC_OP_LOGIC
;
2925 simm
= GET_FIELDs(insn
, 19, 31);
2926 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, ~simm
);
2928 tcg_gen_orc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2931 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2932 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2933 dc
->cc_op
= CC_OP_LOGIC
;
2936 case 0x7: /* xorn */
2938 simm
= GET_FIELDs(insn
, 19, 31);
2939 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, ~simm
);
2941 tcg_gen_not_tl(cpu_tmp0
, cpu_src2
);
2942 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
2945 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2946 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2947 dc
->cc_op
= CC_OP_LOGIC
;
2950 case 0x8: /* addx, V9 addc */
2952 simm
= GET_FIELDs(insn
, 19, 31);
2954 gen_helper_compute_psr();
2955 gen_op_addxi_cc(cpu_dst
, cpu_src1
, simm
);
2956 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADDX
);
2957 dc
->cc_op
= CC_OP_ADDX
;
2959 gen_helper_compute_psr();
2960 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
2961 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, simm
);
2962 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
2966 gen_helper_compute_psr();
2967 gen_op_addx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
2968 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADDX
);
2969 dc
->cc_op
= CC_OP_ADDX
;
2971 gen_helper_compute_psr();
2972 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
2973 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
2974 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
2978 #ifdef TARGET_SPARC64
2979 case 0x9: /* V9 mulx */
2981 simm
= GET_FIELDs(insn
, 19, 31);
2982 tcg_gen_muli_i64(cpu_dst
, cpu_src1
, simm
);
2984 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
2988 case 0xa: /* umul */
2989 CHECK_IU_FEATURE(dc
, MUL
);
2990 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
2992 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
2993 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
2994 dc
->cc_op
= CC_OP_LOGIC
;
2997 case 0xb: /* smul */
2998 CHECK_IU_FEATURE(dc
, MUL
);
2999 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
3001 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3002 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3003 dc
->cc_op
= CC_OP_LOGIC
;
3006 case 0xc: /* subx, V9 subc */
3008 simm
= GET_FIELDs(insn
, 19, 31);
3010 gen_helper_compute_psr();
3011 gen_op_subxi_cc(cpu_dst
, cpu_src1
, simm
);
3012 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUBX
);
3013 dc
->cc_op
= CC_OP_SUBX
;
3015 gen_helper_compute_psr();
3016 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3017 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, simm
);
3018 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3022 gen_helper_compute_psr();
3023 gen_op_subx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3024 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUBX
);
3025 dc
->cc_op
= CC_OP_SUBX
;
3027 gen_helper_compute_psr();
3028 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3029 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
3030 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3034 #ifdef TARGET_SPARC64
3035 case 0xd: /* V9 udivx */
3036 tcg_gen_mov_tl(cpu_cc_src
, cpu_src1
);
3037 tcg_gen_mov_tl(cpu_cc_src2
, cpu_src2
);
3038 gen_trap_ifdivzero_tl(cpu_cc_src2
);
3039 tcg_gen_divu_i64(cpu_dst
, cpu_cc_src
, cpu_cc_src2
);
3042 case 0xe: /* udiv */
3043 CHECK_IU_FEATURE(dc
, DIV
);
3044 gen_helper_udiv(cpu_dst
, cpu_src1
, cpu_src2
);
3046 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3047 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_DIV
);
3048 dc
->cc_op
= CC_OP_DIV
;
3051 case 0xf: /* sdiv */
3052 CHECK_IU_FEATURE(dc
, DIV
);
3053 gen_helper_sdiv(cpu_dst
, cpu_src1
, cpu_src2
);
3055 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3056 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_DIV
);
3057 dc
->cc_op
= CC_OP_DIV
;
3063 gen_movl_TN_reg(rd
, cpu_dst
);
3065 cpu_src1
= get_src1(insn
, cpu_src1
);
3066 cpu_src2
= get_src2(insn
, cpu_src2
);
3068 case 0x20: /* taddcc */
3069 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3070 gen_movl_TN_reg(rd
, cpu_dst
);
3071 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADD
);
3072 dc
->cc_op
= CC_OP_TADD
;
3074 case 0x21: /* tsubcc */
3075 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3076 gen_movl_TN_reg(rd
, cpu_dst
);
3077 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUB
);
3078 dc
->cc_op
= CC_OP_TSUB
;
3080 case 0x22: /* taddcctv */
3081 save_state(dc
, cpu_cond
);
3082 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3083 gen_movl_TN_reg(rd
, cpu_dst
);
3084 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADDTV
);
3085 dc
->cc_op
= CC_OP_TADDTV
;
3087 case 0x23: /* tsubcctv */
3088 save_state(dc
, cpu_cond
);
3089 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3090 gen_movl_TN_reg(rd
, cpu_dst
);
3091 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUBTV
);
3092 dc
->cc_op
= CC_OP_TSUBTV
;
3094 case 0x24: /* mulscc */
3095 gen_helper_compute_psr();
3096 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3097 gen_movl_TN_reg(rd
, cpu_dst
);
3098 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3099 dc
->cc_op
= CC_OP_ADD
;
3101 #ifndef TARGET_SPARC64
3102 case 0x25: /* sll */
3103 if (IS_IMM
) { /* immediate */
3104 simm
= GET_FIELDs(insn
, 20, 31);
3105 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3106 } else { /* register */
3107 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3108 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3110 gen_movl_TN_reg(rd
, cpu_dst
);
3112 case 0x26: /* srl */
3113 if (IS_IMM
) { /* immediate */
3114 simm
= GET_FIELDs(insn
, 20, 31);
3115 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3116 } else { /* register */
3117 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3118 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3120 gen_movl_TN_reg(rd
, cpu_dst
);
3122 case 0x27: /* sra */
3123 if (IS_IMM
) { /* immediate */
3124 simm
= GET_FIELDs(insn
, 20, 31);
3125 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3126 } else { /* register */
3127 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3128 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3130 gen_movl_TN_reg(rd
, cpu_dst
);
3137 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3138 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3140 #ifndef TARGET_SPARC64
3141 case 0x01 ... 0x0f: /* undefined in the
3145 case 0x10 ... 0x1f: /* implementation-dependent
3151 case 0x2: /* V9 wrccr */
3152 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3153 gen_helper_wrccr(cpu_dst
);
3154 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3155 dc
->cc_op
= CC_OP_FLAGS
;
3157 case 0x3: /* V9 wrasi */
3158 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3159 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, 0xff);
3160 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3162 case 0x6: /* V9 wrfprs */
3163 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3164 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3165 save_state(dc
, cpu_cond
);
3170 case 0xf: /* V9 sir, nop if user */
3171 #if !defined(CONFIG_USER_ONLY)
3176 case 0x13: /* Graphics Status */
3177 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3179 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3181 case 0x14: /* Softint set */
3182 if (!supervisor(dc
))
3184 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3185 gen_helper_set_softint(cpu_tmp64
);
3187 case 0x15: /* Softint clear */
3188 if (!supervisor(dc
))
3190 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3191 gen_helper_clear_softint(cpu_tmp64
);
3193 case 0x16: /* Softint write */
3194 if (!supervisor(dc
))
3196 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3197 gen_helper_write_softint(cpu_tmp64
);
3199 case 0x17: /* Tick compare */
3200 #if !defined(CONFIG_USER_ONLY)
3201 if (!supervisor(dc
))
3207 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3209 r_tickptr
= tcg_temp_new_ptr();
3210 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3211 offsetof(CPUState
, tick
));
3212 gen_helper_tick_set_limit(r_tickptr
,
3214 tcg_temp_free_ptr(r_tickptr
);
3217 case 0x18: /* System tick */
3218 #if !defined(CONFIG_USER_ONLY)
3219 if (!supervisor(dc
))
3225 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3227 r_tickptr
= tcg_temp_new_ptr();
3228 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3229 offsetof(CPUState
, stick
));
3230 gen_helper_tick_set_count(r_tickptr
,
3232 tcg_temp_free_ptr(r_tickptr
);
3235 case 0x19: /* System tick compare */
3236 #if !defined(CONFIG_USER_ONLY)
3237 if (!supervisor(dc
))
3243 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3245 r_tickptr
= tcg_temp_new_ptr();
3246 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3247 offsetof(CPUState
, stick
));
3248 gen_helper_tick_set_limit(r_tickptr
,
3250 tcg_temp_free_ptr(r_tickptr
);
3254 case 0x10: /* Performance Control */
3255 case 0x11: /* Performance Instrumentation
3257 case 0x12: /* Dispatch Control */
3264 #if !defined(CONFIG_USER_ONLY)
3265 case 0x31: /* wrpsr, V9 saved, restored */
3267 if (!supervisor(dc
))
3269 #ifdef TARGET_SPARC64
3275 gen_helper_restored();
3277 case 2: /* UA2005 allclean */
3278 case 3: /* UA2005 otherw */
3279 case 4: /* UA2005 normalw */
3280 case 5: /* UA2005 invalw */
3286 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3287 gen_helper_wrpsr(cpu_dst
);
3288 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3289 dc
->cc_op
= CC_OP_FLAGS
;
3290 save_state(dc
, cpu_cond
);
3297 case 0x32: /* wrwim, V9 wrpr */
3299 if (!supervisor(dc
))
3301 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3302 #ifdef TARGET_SPARC64
3308 r_tsptr
= tcg_temp_new_ptr();
3309 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3310 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3311 offsetof(trap_state
, tpc
));
3312 tcg_temp_free_ptr(r_tsptr
);
3319 r_tsptr
= tcg_temp_new_ptr();
3320 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3321 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3322 offsetof(trap_state
, tnpc
));
3323 tcg_temp_free_ptr(r_tsptr
);
3330 r_tsptr
= tcg_temp_new_ptr();
3331 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3332 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3333 offsetof(trap_state
,
3335 tcg_temp_free_ptr(r_tsptr
);
3342 r_tsptr
= tcg_temp_new_ptr();
3343 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3344 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3345 tcg_gen_st_i32(cpu_tmp32
, r_tsptr
,
3346 offsetof(trap_state
, tt
));
3347 tcg_temp_free_ptr(r_tsptr
);
3354 r_tickptr
= tcg_temp_new_ptr();
3355 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3356 offsetof(CPUState
, tick
));
3357 gen_helper_tick_set_count(r_tickptr
,
3359 tcg_temp_free_ptr(r_tickptr
);
3363 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3366 save_state(dc
, cpu_cond
);
3367 gen_helper_wrpstate(cpu_tmp0
);
3373 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3374 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3375 offsetof(CPUSPARCState
, tl
));
3378 gen_helper_wrpil(cpu_tmp0
);
3381 gen_helper_wrcwp(cpu_tmp0
);
3384 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3385 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3386 offsetof(CPUSPARCState
,
3389 case 11: // canrestore
3390 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3391 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3392 offsetof(CPUSPARCState
,
3395 case 12: // cleanwin
3396 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3397 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3398 offsetof(CPUSPARCState
,
3401 case 13: // otherwin
3402 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3403 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3404 offsetof(CPUSPARCState
,
3408 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3409 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3410 offsetof(CPUSPARCState
,
3413 case 16: // UA2005 gl
3414 CHECK_IU_FEATURE(dc
, GL
);
3415 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3416 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3417 offsetof(CPUSPARCState
, gl
));
3419 case 26: // UA2005 strand status
3420 CHECK_IU_FEATURE(dc
, HYPV
);
3421 if (!hypervisor(dc
))
3423 tcg_gen_mov_tl(cpu_ssr
, cpu_tmp0
);
3429 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3430 if (dc
->def
->nwindows
!= 32)
3431 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3432 (1 << dc
->def
->nwindows
) - 1);
3433 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3437 case 0x33: /* wrtbr, UA2005 wrhpr */
3439 #ifndef TARGET_SPARC64
3440 if (!supervisor(dc
))
3442 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3444 CHECK_IU_FEATURE(dc
, HYPV
);
3445 if (!hypervisor(dc
))
3447 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3450 // XXX gen_op_wrhpstate();
3451 save_state(dc
, cpu_cond
);
3457 // XXX gen_op_wrhtstate();
3460 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
3463 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
3465 case 31: // hstick_cmpr
3469 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
3470 r_tickptr
= tcg_temp_new_ptr();
3471 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3472 offsetof(CPUState
, hstick
));
3473 gen_helper_tick_set_limit(r_tickptr
,
3475 tcg_temp_free_ptr(r_tickptr
);
3478 case 6: // hver readonly
3486 #ifdef TARGET_SPARC64
3487 case 0x2c: /* V9 movcc */
3489 int cc
= GET_FIELD_SP(insn
, 11, 12);
3490 int cond
= GET_FIELD_SP(insn
, 14, 17);
3494 r_cond
= tcg_temp_new();
3495 if (insn
& (1 << 18)) {
3497 gen_cond(r_cond
, 0, cond
, dc
);
3499 gen_cond(r_cond
, 1, cond
, dc
);
3503 gen_fcond(r_cond
, cc
, cond
);
3506 l1
= gen_new_label();
3508 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
3509 if (IS_IMM
) { /* immediate */
3512 simm
= GET_FIELD_SPs(insn
, 0, 10);
3513 r_const
= tcg_const_tl(simm
);
3514 gen_movl_TN_reg(rd
, r_const
);
3515 tcg_temp_free(r_const
);
3517 rs2
= GET_FIELD_SP(insn
, 0, 4);
3518 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3519 gen_movl_TN_reg(rd
, cpu_tmp0
);
3522 tcg_temp_free(r_cond
);
3525 case 0x2d: /* V9 sdivx */
3526 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
3527 gen_movl_TN_reg(rd
, cpu_dst
);
3529 case 0x2e: /* V9 popc */
3531 cpu_src2
= get_src2(insn
, cpu_src2
);
3532 gen_helper_popc(cpu_dst
, cpu_src2
);
3533 gen_movl_TN_reg(rd
, cpu_dst
);
3535 case 0x2f: /* V9 movr */
3537 int cond
= GET_FIELD_SP(insn
, 10, 12);
3540 cpu_src1
= get_src1(insn
, cpu_src1
);
3542 l1
= gen_new_label();
3544 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
3546 if (IS_IMM
) { /* immediate */
3549 simm
= GET_FIELD_SPs(insn
, 0, 9);
3550 r_const
= tcg_const_tl(simm
);
3551 gen_movl_TN_reg(rd
, r_const
);
3552 tcg_temp_free(r_const
);
3554 rs2
= GET_FIELD_SP(insn
, 0, 4);
3555 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3556 gen_movl_TN_reg(rd
, cpu_tmp0
);
3566 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3567 #ifdef TARGET_SPARC64
3568 int opf
= GET_FIELD_SP(insn
, 5, 13);
3569 rs1
= GET_FIELD(insn
, 13, 17);
3570 rs2
= GET_FIELD(insn
, 27, 31);
3571 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3575 case 0x000: /* VIS I edge8cc */
3576 case 0x001: /* VIS II edge8n */
3577 case 0x002: /* VIS I edge8lcc */
3578 case 0x003: /* VIS II edge8ln */
3579 case 0x004: /* VIS I edge16cc */
3580 case 0x005: /* VIS II edge16n */
3581 case 0x006: /* VIS I edge16lcc */
3582 case 0x007: /* VIS II edge16ln */
3583 case 0x008: /* VIS I edge32cc */
3584 case 0x009: /* VIS II edge32n */
3585 case 0x00a: /* VIS I edge32lcc */
3586 case 0x00b: /* VIS II edge32ln */
3589 case 0x010: /* VIS I array8 */
3590 CHECK_FPU_FEATURE(dc
, VIS1
);
3591 cpu_src1
= get_src1(insn
, cpu_src1
);
3592 gen_movl_reg_TN(rs2
, cpu_src2
);
3593 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3594 gen_movl_TN_reg(rd
, cpu_dst
);
3596 case 0x012: /* VIS I array16 */
3597 CHECK_FPU_FEATURE(dc
, VIS1
);
3598 cpu_src1
= get_src1(insn
, cpu_src1
);
3599 gen_movl_reg_TN(rs2
, cpu_src2
);
3600 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3601 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
3602 gen_movl_TN_reg(rd
, cpu_dst
);
3604 case 0x014: /* VIS I array32 */
3605 CHECK_FPU_FEATURE(dc
, VIS1
);
3606 cpu_src1
= get_src1(insn
, cpu_src1
);
3607 gen_movl_reg_TN(rs2
, cpu_src2
);
3608 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3609 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
3610 gen_movl_TN_reg(rd
, cpu_dst
);
3612 case 0x018: /* VIS I alignaddr */
3613 CHECK_FPU_FEATURE(dc
, VIS1
);
3614 cpu_src1
= get_src1(insn
, cpu_src1
);
3615 gen_movl_reg_TN(rs2
, cpu_src2
);
3616 gen_helper_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
);
3617 gen_movl_TN_reg(rd
, cpu_dst
);
3619 case 0x019: /* VIS II bmask */
3620 case 0x01a: /* VIS I alignaddrl */
3623 case 0x020: /* VIS I fcmple16 */
3624 CHECK_FPU_FEATURE(dc
, VIS1
);
3625 gen_op_load_fpr_DT0(DFPREG(rs1
));
3626 gen_op_load_fpr_DT1(DFPREG(rs2
));
3627 gen_helper_fcmple16();
3628 gen_op_store_DT0_fpr(DFPREG(rd
));
3630 case 0x022: /* VIS I fcmpne16 */
3631 CHECK_FPU_FEATURE(dc
, VIS1
);
3632 gen_op_load_fpr_DT0(DFPREG(rs1
));
3633 gen_op_load_fpr_DT1(DFPREG(rs2
));
3634 gen_helper_fcmpne16();
3635 gen_op_store_DT0_fpr(DFPREG(rd
));
3637 case 0x024: /* VIS I fcmple32 */
3638 CHECK_FPU_FEATURE(dc
, VIS1
);
3639 gen_op_load_fpr_DT0(DFPREG(rs1
));
3640 gen_op_load_fpr_DT1(DFPREG(rs2
));
3641 gen_helper_fcmple32();
3642 gen_op_store_DT0_fpr(DFPREG(rd
));
3644 case 0x026: /* VIS I fcmpne32 */
3645 CHECK_FPU_FEATURE(dc
, VIS1
);
3646 gen_op_load_fpr_DT0(DFPREG(rs1
));
3647 gen_op_load_fpr_DT1(DFPREG(rs2
));
3648 gen_helper_fcmpne32();
3649 gen_op_store_DT0_fpr(DFPREG(rd
));
3651 case 0x028: /* VIS I fcmpgt16 */
3652 CHECK_FPU_FEATURE(dc
, VIS1
);
3653 gen_op_load_fpr_DT0(DFPREG(rs1
));
3654 gen_op_load_fpr_DT1(DFPREG(rs2
));
3655 gen_helper_fcmpgt16();
3656 gen_op_store_DT0_fpr(DFPREG(rd
));
3658 case 0x02a: /* VIS I fcmpeq16 */
3659 CHECK_FPU_FEATURE(dc
, VIS1
);
3660 gen_op_load_fpr_DT0(DFPREG(rs1
));
3661 gen_op_load_fpr_DT1(DFPREG(rs2
));
3662 gen_helper_fcmpeq16();
3663 gen_op_store_DT0_fpr(DFPREG(rd
));
3665 case 0x02c: /* VIS I fcmpgt32 */
3666 CHECK_FPU_FEATURE(dc
, VIS1
);
3667 gen_op_load_fpr_DT0(DFPREG(rs1
));
3668 gen_op_load_fpr_DT1(DFPREG(rs2
));
3669 gen_helper_fcmpgt32();
3670 gen_op_store_DT0_fpr(DFPREG(rd
));
3672 case 0x02e: /* VIS I fcmpeq32 */
3673 CHECK_FPU_FEATURE(dc
, VIS1
);
3674 gen_op_load_fpr_DT0(DFPREG(rs1
));
3675 gen_op_load_fpr_DT1(DFPREG(rs2
));
3676 gen_helper_fcmpeq32();
3677 gen_op_store_DT0_fpr(DFPREG(rd
));
3679 case 0x031: /* VIS I fmul8x16 */
3680 CHECK_FPU_FEATURE(dc
, VIS1
);
3681 gen_op_load_fpr_DT0(DFPREG(rs1
));
3682 gen_op_load_fpr_DT1(DFPREG(rs2
));
3683 gen_helper_fmul8x16();
3684 gen_op_store_DT0_fpr(DFPREG(rd
));
3686 case 0x033: /* VIS I fmul8x16au */
3687 CHECK_FPU_FEATURE(dc
, VIS1
);
3688 gen_op_load_fpr_DT0(DFPREG(rs1
));
3689 gen_op_load_fpr_DT1(DFPREG(rs2
));
3690 gen_helper_fmul8x16au();
3691 gen_op_store_DT0_fpr(DFPREG(rd
));
3693 case 0x035: /* VIS I fmul8x16al */
3694 CHECK_FPU_FEATURE(dc
, VIS1
);
3695 gen_op_load_fpr_DT0(DFPREG(rs1
));
3696 gen_op_load_fpr_DT1(DFPREG(rs2
));
3697 gen_helper_fmul8x16al();
3698 gen_op_store_DT0_fpr(DFPREG(rd
));
3700 case 0x036: /* VIS I fmul8sux16 */
3701 CHECK_FPU_FEATURE(dc
, VIS1
);
3702 gen_op_load_fpr_DT0(DFPREG(rs1
));
3703 gen_op_load_fpr_DT1(DFPREG(rs2
));
3704 gen_helper_fmul8sux16();
3705 gen_op_store_DT0_fpr(DFPREG(rd
));
3707 case 0x037: /* VIS I fmul8ulx16 */
3708 CHECK_FPU_FEATURE(dc
, VIS1
);
3709 gen_op_load_fpr_DT0(DFPREG(rs1
));
3710 gen_op_load_fpr_DT1(DFPREG(rs2
));
3711 gen_helper_fmul8ulx16();
3712 gen_op_store_DT0_fpr(DFPREG(rd
));
3714 case 0x038: /* VIS I fmuld8sux16 */
3715 CHECK_FPU_FEATURE(dc
, VIS1
);
3716 gen_op_load_fpr_DT0(DFPREG(rs1
));
3717 gen_op_load_fpr_DT1(DFPREG(rs2
));
3718 gen_helper_fmuld8sux16();
3719 gen_op_store_DT0_fpr(DFPREG(rd
));
3721 case 0x039: /* VIS I fmuld8ulx16 */
3722 CHECK_FPU_FEATURE(dc
, VIS1
);
3723 gen_op_load_fpr_DT0(DFPREG(rs1
));
3724 gen_op_load_fpr_DT1(DFPREG(rs2
));
3725 gen_helper_fmuld8ulx16();
3726 gen_op_store_DT0_fpr(DFPREG(rd
));
3728 case 0x03a: /* VIS I fpack32 */
3729 case 0x03b: /* VIS I fpack16 */
3730 case 0x03d: /* VIS I fpackfix */
3731 case 0x03e: /* VIS I pdist */
3734 case 0x048: /* VIS I faligndata */
3735 CHECK_FPU_FEATURE(dc
, VIS1
);
3736 gen_op_load_fpr_DT0(DFPREG(rs1
));
3737 gen_op_load_fpr_DT1(DFPREG(rs2
));
3738 gen_helper_faligndata();
3739 gen_op_store_DT0_fpr(DFPREG(rd
));
3741 case 0x04b: /* VIS I fpmerge */
3742 CHECK_FPU_FEATURE(dc
, VIS1
);
3743 gen_op_load_fpr_DT0(DFPREG(rs1
));
3744 gen_op_load_fpr_DT1(DFPREG(rs2
));
3745 gen_helper_fpmerge();
3746 gen_op_store_DT0_fpr(DFPREG(rd
));
3748 case 0x04c: /* VIS II bshuffle */
3751 case 0x04d: /* VIS I fexpand */
3752 CHECK_FPU_FEATURE(dc
, VIS1
);
3753 gen_op_load_fpr_DT0(DFPREG(rs1
));
3754 gen_op_load_fpr_DT1(DFPREG(rs2
));
3755 gen_helper_fexpand();
3756 gen_op_store_DT0_fpr(DFPREG(rd
));
3758 case 0x050: /* VIS I fpadd16 */
3759 CHECK_FPU_FEATURE(dc
, VIS1
);
3760 gen_op_load_fpr_DT0(DFPREG(rs1
));
3761 gen_op_load_fpr_DT1(DFPREG(rs2
));
3762 gen_helper_fpadd16();
3763 gen_op_store_DT0_fpr(DFPREG(rd
));
3765 case 0x051: /* VIS I fpadd16s */
3766 CHECK_FPU_FEATURE(dc
, VIS1
);
3767 gen_helper_fpadd16s(cpu_fpr
[rd
],
3768 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3770 case 0x052: /* VIS I fpadd32 */
3771 CHECK_FPU_FEATURE(dc
, VIS1
);
3772 gen_op_load_fpr_DT0(DFPREG(rs1
));
3773 gen_op_load_fpr_DT1(DFPREG(rs2
));
3774 gen_helper_fpadd32();
3775 gen_op_store_DT0_fpr(DFPREG(rd
));
3777 case 0x053: /* VIS I fpadd32s */
3778 CHECK_FPU_FEATURE(dc
, VIS1
);
3779 gen_helper_fpadd32s(cpu_fpr
[rd
],
3780 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3782 case 0x054: /* VIS I fpsub16 */
3783 CHECK_FPU_FEATURE(dc
, VIS1
);
3784 gen_op_load_fpr_DT0(DFPREG(rs1
));
3785 gen_op_load_fpr_DT1(DFPREG(rs2
));
3786 gen_helper_fpsub16();
3787 gen_op_store_DT0_fpr(DFPREG(rd
));
3789 case 0x055: /* VIS I fpsub16s */
3790 CHECK_FPU_FEATURE(dc
, VIS1
);
3791 gen_helper_fpsub16s(cpu_fpr
[rd
],
3792 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3794 case 0x056: /* VIS I fpsub32 */
3795 CHECK_FPU_FEATURE(dc
, VIS1
);
3796 gen_op_load_fpr_DT0(DFPREG(rs1
));
3797 gen_op_load_fpr_DT1(DFPREG(rs2
));
3798 gen_helper_fpsub32();
3799 gen_op_store_DT0_fpr(DFPREG(rd
));
3801 case 0x057: /* VIS I fpsub32s */
3802 CHECK_FPU_FEATURE(dc
, VIS1
);
3803 gen_helper_fpsub32s(cpu_fpr
[rd
],
3804 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3806 case 0x060: /* VIS I fzero */
3807 CHECK_FPU_FEATURE(dc
, VIS1
);
3808 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], 0);
3809 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], 0);
3811 case 0x061: /* VIS I fzeros */
3812 CHECK_FPU_FEATURE(dc
, VIS1
);
3813 tcg_gen_movi_i32(cpu_fpr
[rd
], 0);
3815 case 0x062: /* VIS I fnor */
3816 CHECK_FPU_FEATURE(dc
, VIS1
);
3817 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3818 cpu_fpr
[DFPREG(rs2
)]);
3819 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3820 cpu_fpr
[DFPREG(rs2
) + 1]);
3822 case 0x063: /* VIS I fnors */
3823 CHECK_FPU_FEATURE(dc
, VIS1
);
3824 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3826 case 0x064: /* VIS I fandnot2 */
3827 CHECK_FPU_FEATURE(dc
, VIS1
);
3828 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3829 cpu_fpr
[DFPREG(rs2
)]);
3830 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3831 cpu_fpr
[DFPREG(rs1
) + 1],
3832 cpu_fpr
[DFPREG(rs2
) + 1]);
3834 case 0x065: /* VIS I fandnot2s */
3835 CHECK_FPU_FEATURE(dc
, VIS1
);
3836 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3838 case 0x066: /* VIS I fnot2 */
3839 CHECK_FPU_FEATURE(dc
, VIS1
);
3840 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
3841 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
3842 cpu_fpr
[DFPREG(rs2
) + 1]);
3844 case 0x067: /* VIS I fnot2s */
3845 CHECK_FPU_FEATURE(dc
, VIS1
);
3846 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
3848 case 0x068: /* VIS I fandnot1 */
3849 CHECK_FPU_FEATURE(dc
, VIS1
);
3850 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
3851 cpu_fpr
[DFPREG(rs1
)]);
3852 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3853 cpu_fpr
[DFPREG(rs2
) + 1],
3854 cpu_fpr
[DFPREG(rs1
) + 1]);
3856 case 0x069: /* VIS I fandnot1s */
3857 CHECK_FPU_FEATURE(dc
, VIS1
);
3858 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
3860 case 0x06a: /* VIS I fnot1 */
3861 CHECK_FPU_FEATURE(dc
, VIS1
);
3862 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
3863 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
3864 cpu_fpr
[DFPREG(rs1
) + 1]);
3866 case 0x06b: /* VIS I fnot1s */
3867 CHECK_FPU_FEATURE(dc
, VIS1
);
3868 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
3870 case 0x06c: /* VIS I fxor */
3871 CHECK_FPU_FEATURE(dc
, VIS1
);
3872 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3873 cpu_fpr
[DFPREG(rs2
)]);
3874 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1],
3875 cpu_fpr
[DFPREG(rs1
) + 1],
3876 cpu_fpr
[DFPREG(rs2
) + 1]);
3878 case 0x06d: /* VIS I fxors */
3879 CHECK_FPU_FEATURE(dc
, VIS1
);
3880 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3882 case 0x06e: /* VIS I fnand */
3883 CHECK_FPU_FEATURE(dc
, VIS1
);
3884 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3885 cpu_fpr
[DFPREG(rs2
)]);
3886 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3887 cpu_fpr
[DFPREG(rs2
) + 1]);
3889 case 0x06f: /* VIS I fnands */
3890 CHECK_FPU_FEATURE(dc
, VIS1
);
3891 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3893 case 0x070: /* VIS I fand */
3894 CHECK_FPU_FEATURE(dc
, VIS1
);
3895 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3896 cpu_fpr
[DFPREG(rs2
)]);
3897 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1],
3898 cpu_fpr
[DFPREG(rs1
) + 1],
3899 cpu_fpr
[DFPREG(rs2
) + 1]);
3901 case 0x071: /* VIS I fands */
3902 CHECK_FPU_FEATURE(dc
, VIS1
);
3903 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3905 case 0x072: /* VIS I fxnor */
3906 CHECK_FPU_FEATURE(dc
, VIS1
);
3907 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
3908 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
3909 cpu_fpr
[DFPREG(rs1
)]);
3910 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
3911 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
3912 cpu_fpr
[DFPREG(rs1
) + 1]);
3914 case 0x073: /* VIS I fxnors */
3915 CHECK_FPU_FEATURE(dc
, VIS1
);
3916 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
3917 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
3919 case 0x074: /* VIS I fsrc1 */
3920 CHECK_FPU_FEATURE(dc
, VIS1
);
3921 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
3922 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
3923 cpu_fpr
[DFPREG(rs1
) + 1]);
3925 case 0x075: /* VIS I fsrc1s */
3926 CHECK_FPU_FEATURE(dc
, VIS1
);
3927 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
3929 case 0x076: /* VIS I fornot2 */
3930 CHECK_FPU_FEATURE(dc
, VIS1
);
3931 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3932 cpu_fpr
[DFPREG(rs2
)]);
3933 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3934 cpu_fpr
[DFPREG(rs1
) + 1],
3935 cpu_fpr
[DFPREG(rs2
) + 1]);
3937 case 0x077: /* VIS I fornot2s */
3938 CHECK_FPU_FEATURE(dc
, VIS1
);
3939 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3941 case 0x078: /* VIS I fsrc2 */
3942 CHECK_FPU_FEATURE(dc
, VIS1
);
3943 gen_op_load_fpr_DT0(DFPREG(rs2
));
3944 gen_op_store_DT0_fpr(DFPREG(rd
));
3946 case 0x079: /* VIS I fsrc2s */
3947 CHECK_FPU_FEATURE(dc
, VIS1
);
3948 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
3950 case 0x07a: /* VIS I fornot1 */
3951 CHECK_FPU_FEATURE(dc
, VIS1
);
3952 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
3953 cpu_fpr
[DFPREG(rs1
)]);
3954 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
3955 cpu_fpr
[DFPREG(rs2
) + 1],
3956 cpu_fpr
[DFPREG(rs1
) + 1]);
3958 case 0x07b: /* VIS I fornot1s */
3959 CHECK_FPU_FEATURE(dc
, VIS1
);
3960 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
3962 case 0x07c: /* VIS I for */
3963 CHECK_FPU_FEATURE(dc
, VIS1
);
3964 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3965 cpu_fpr
[DFPREG(rs2
)]);
3966 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1],
3967 cpu_fpr
[DFPREG(rs1
) + 1],
3968 cpu_fpr
[DFPREG(rs2
) + 1]);
3970 case 0x07d: /* VIS I fors */
3971 CHECK_FPU_FEATURE(dc
, VIS1
);
3972 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3974 case 0x07e: /* VIS I fone */
3975 CHECK_FPU_FEATURE(dc
, VIS1
);
3976 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], -1);
3977 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], -1);
3979 case 0x07f: /* VIS I fones */
3980 CHECK_FPU_FEATURE(dc
, VIS1
);
3981 tcg_gen_movi_i32(cpu_fpr
[rd
], -1);
3983 case 0x080: /* VIS I shutdown */
3984 case 0x081: /* VIS II siam */
3993 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
3994 #ifdef TARGET_SPARC64
3999 #ifdef TARGET_SPARC64
4000 } else if (xop
== 0x39) { /* V9 return */
4003 save_state(dc
, cpu_cond
);
4004 cpu_src1
= get_src1(insn
, cpu_src1
);
4005 if (IS_IMM
) { /* immediate */
4006 simm
= GET_FIELDs(insn
, 19, 31);
4007 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4008 } else { /* register */
4009 rs2
= GET_FIELD(insn
, 27, 31);
4011 gen_movl_reg_TN(rs2
, cpu_src2
);
4012 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4014 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4016 gen_helper_restore();
4017 gen_mov_pc_npc(dc
, cpu_cond
);
4018 r_const
= tcg_const_i32(3);
4019 gen_helper_check_align(cpu_dst
, r_const
);
4020 tcg_temp_free_i32(r_const
);
4021 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4022 dc
->npc
= DYNAMIC_PC
;
4026 cpu_src1
= get_src1(insn
, cpu_src1
);
4027 if (IS_IMM
) { /* immediate */
4028 simm
= GET_FIELDs(insn
, 19, 31);
4029 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4030 } else { /* register */
4031 rs2
= GET_FIELD(insn
, 27, 31);
4033 gen_movl_reg_TN(rs2
, cpu_src2
);
4034 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4036 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4039 case 0x38: /* jmpl */
4044 r_pc
= tcg_const_tl(dc
->pc
);
4045 gen_movl_TN_reg(rd
, r_pc
);
4046 tcg_temp_free(r_pc
);
4047 gen_mov_pc_npc(dc
, cpu_cond
);
4048 r_const
= tcg_const_i32(3);
4049 gen_helper_check_align(cpu_dst
, r_const
);
4050 tcg_temp_free_i32(r_const
);
4051 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4052 dc
->npc
= DYNAMIC_PC
;
4055 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4056 case 0x39: /* rett, V9 return */
4060 if (!supervisor(dc
))
4062 gen_mov_pc_npc(dc
, cpu_cond
);
4063 r_const
= tcg_const_i32(3);
4064 gen_helper_check_align(cpu_dst
, r_const
);
4065 tcg_temp_free_i32(r_const
);
4066 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4067 dc
->npc
= DYNAMIC_PC
;
4072 case 0x3b: /* flush */
4073 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4075 gen_helper_flush(cpu_dst
);
4077 case 0x3c: /* save */
4078 save_state(dc
, cpu_cond
);
4080 gen_movl_TN_reg(rd
, cpu_dst
);
4082 case 0x3d: /* restore */
4083 save_state(dc
, cpu_cond
);
4084 gen_helper_restore();
4085 gen_movl_TN_reg(rd
, cpu_dst
);
4087 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4088 case 0x3e: /* V9 done/retry */
4092 if (!supervisor(dc
))
4094 dc
->npc
= DYNAMIC_PC
;
4095 dc
->pc
= DYNAMIC_PC
;
4099 if (!supervisor(dc
))
4101 dc
->npc
= DYNAMIC_PC
;
4102 dc
->pc
= DYNAMIC_PC
;
4118 case 3: /* load/store instructions */
4120 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4122 /* flush pending conditional evaluations before exposing
4124 if (dc
->cc_op
!= CC_OP_FLAGS
) {
4125 dc
->cc_op
= CC_OP_FLAGS
;
4126 gen_helper_compute_psr();
4128 cpu_src1
= get_src1(insn
, cpu_src1
);
4129 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4130 rs2
= GET_FIELD(insn
, 27, 31);
4131 gen_movl_reg_TN(rs2
, cpu_src2
);
4132 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4133 } else if (IS_IMM
) { /* immediate */
4134 simm
= GET_FIELDs(insn
, 19, 31);
4135 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, simm
);
4136 } else { /* register */
4137 rs2
= GET_FIELD(insn
, 27, 31);
4139 gen_movl_reg_TN(rs2
, cpu_src2
);
4140 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4142 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4144 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4145 (xop
> 0x17 && xop
<= 0x1d ) ||
4146 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4148 case 0x0: /* ld, V9 lduw, load unsigned word */
4149 gen_address_mask(dc
, cpu_addr
);
4150 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4152 case 0x1: /* ldub, load unsigned byte */
4153 gen_address_mask(dc
, cpu_addr
);
4154 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4156 case 0x2: /* lduh, load unsigned halfword */
4157 gen_address_mask(dc
, cpu_addr
);
4158 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4160 case 0x3: /* ldd, load double word */
4166 save_state(dc
, cpu_cond
);
4167 r_const
= tcg_const_i32(7);
4168 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4169 tcg_temp_free_i32(r_const
);
4170 gen_address_mask(dc
, cpu_addr
);
4171 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4172 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4173 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4174 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4175 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4176 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4177 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4180 case 0x9: /* ldsb, load signed byte */
4181 gen_address_mask(dc
, cpu_addr
);
4182 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4184 case 0xa: /* ldsh, load signed halfword */
4185 gen_address_mask(dc
, cpu_addr
);
4186 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4188 case 0xd: /* ldstub -- XXX: should be atomically */
4192 gen_address_mask(dc
, cpu_addr
);
4193 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4194 r_const
= tcg_const_tl(0xff);
4195 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4196 tcg_temp_free(r_const
);
4199 case 0x0f: /* swap, swap register with memory. Also
4201 CHECK_IU_FEATURE(dc
, SWAP
);
4202 gen_movl_reg_TN(rd
, cpu_val
);
4203 gen_address_mask(dc
, cpu_addr
);
4204 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4205 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4206 tcg_gen_mov_tl(cpu_val
, cpu_tmp0
);
4208 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4209 case 0x10: /* lda, V9 lduwa, load word alternate */
4210 #ifndef TARGET_SPARC64
4213 if (!supervisor(dc
))
4216 save_state(dc
, cpu_cond
);
4217 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4219 case 0x11: /* lduba, load unsigned byte alternate */
4220 #ifndef TARGET_SPARC64
4223 if (!supervisor(dc
))
4226 save_state(dc
, cpu_cond
);
4227 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4229 case 0x12: /* lduha, load unsigned halfword alternate */
4230 #ifndef TARGET_SPARC64
4233 if (!supervisor(dc
))
4236 save_state(dc
, cpu_cond
);
4237 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4239 case 0x13: /* ldda, load double word alternate */
4240 #ifndef TARGET_SPARC64
4243 if (!supervisor(dc
))
4248 save_state(dc
, cpu_cond
);
4249 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4251 case 0x19: /* ldsba, load signed byte alternate */
4252 #ifndef TARGET_SPARC64
4255 if (!supervisor(dc
))
4258 save_state(dc
, cpu_cond
);
4259 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4261 case 0x1a: /* ldsha, load signed halfword alternate */
4262 #ifndef TARGET_SPARC64
4265 if (!supervisor(dc
))
4268 save_state(dc
, cpu_cond
);
4269 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4271 case 0x1d: /* ldstuba -- XXX: should be atomically */
4272 #ifndef TARGET_SPARC64
4275 if (!supervisor(dc
))
4278 save_state(dc
, cpu_cond
);
4279 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4281 case 0x1f: /* swapa, swap reg with alt. memory. Also
4283 CHECK_IU_FEATURE(dc
, SWAP
);
4284 #ifndef TARGET_SPARC64
4287 if (!supervisor(dc
))
4290 save_state(dc
, cpu_cond
);
4291 gen_movl_reg_TN(rd
, cpu_val
);
4292 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4295 #ifndef TARGET_SPARC64
4296 case 0x30: /* ldc */
4297 case 0x31: /* ldcsr */
4298 case 0x33: /* lddc */
4302 #ifdef TARGET_SPARC64
4303 case 0x08: /* V9 ldsw */
4304 gen_address_mask(dc
, cpu_addr
);
4305 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4307 case 0x0b: /* V9 ldx */
4308 gen_address_mask(dc
, cpu_addr
);
4309 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4311 case 0x18: /* V9 ldswa */
4312 save_state(dc
, cpu_cond
);
4313 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4315 case 0x1b: /* V9 ldxa */
4316 save_state(dc
, cpu_cond
);
4317 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4319 case 0x2d: /* V9 prefetch, no effect */
4321 case 0x30: /* V9 ldfa */
4322 save_state(dc
, cpu_cond
);
4323 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4325 case 0x33: /* V9 lddfa */
4326 save_state(dc
, cpu_cond
);
4327 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4329 case 0x3d: /* V9 prefetcha, no effect */
4331 case 0x32: /* V9 ldqfa */
4332 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4333 save_state(dc
, cpu_cond
);
4334 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4340 gen_movl_TN_reg(rd
, cpu_val
);
4341 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4344 } else if (xop
>= 0x20 && xop
< 0x24) {
4345 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4347 save_state(dc
, cpu_cond
);
4349 case 0x20: /* ldf, load fpreg */
4350 gen_address_mask(dc
, cpu_addr
);
4351 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4352 tcg_gen_trunc_tl_i32(cpu_fpr
[rd
], cpu_tmp0
);
4354 case 0x21: /* ldfsr, V9 ldxfsr */
4355 #ifdef TARGET_SPARC64
4356 gen_address_mask(dc
, cpu_addr
);
4358 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4359 gen_helper_ldxfsr(cpu_tmp64
);
4363 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4364 gen_helper_ldfsr(cpu_tmp32
);
4368 case 0x22: /* ldqf, load quad fpreg */
4372 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4373 r_const
= tcg_const_i32(dc
->mem_idx
);
4374 gen_helper_ldqf(cpu_addr
, r_const
);
4375 tcg_temp_free_i32(r_const
);
4376 gen_op_store_QT0_fpr(QFPREG(rd
));
4379 case 0x23: /* lddf, load double fpreg */
4383 r_const
= tcg_const_i32(dc
->mem_idx
);
4384 gen_helper_lddf(cpu_addr
, r_const
);
4385 tcg_temp_free_i32(r_const
);
4386 gen_op_store_DT0_fpr(DFPREG(rd
));
4392 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) ||
4393 xop
== 0xe || xop
== 0x1e) {
4394 gen_movl_reg_TN(rd
, cpu_val
);
4396 case 0x4: /* st, store word */
4397 gen_address_mask(dc
, cpu_addr
);
4398 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4400 case 0x5: /* stb, store byte */
4401 gen_address_mask(dc
, cpu_addr
);
4402 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4404 case 0x6: /* sth, store halfword */
4405 gen_address_mask(dc
, cpu_addr
);
4406 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4408 case 0x7: /* std, store double word */
4414 save_state(dc
, cpu_cond
);
4415 gen_address_mask(dc
, cpu_addr
);
4416 r_const
= tcg_const_i32(7);
4417 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4418 tcg_temp_free_i32(r_const
);
4419 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
4420 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, cpu_val
);
4421 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4424 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4425 case 0x14: /* sta, V9 stwa, store word alternate */
4426 #ifndef TARGET_SPARC64
4429 if (!supervisor(dc
))
4432 save_state(dc
, cpu_cond
);
4433 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4435 case 0x15: /* stba, store byte alternate */
4436 #ifndef TARGET_SPARC64
4439 if (!supervisor(dc
))
4442 save_state(dc
, cpu_cond
);
4443 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4445 case 0x16: /* stha, store halfword alternate */
4446 #ifndef TARGET_SPARC64
4449 if (!supervisor(dc
))
4452 save_state(dc
, cpu_cond
);
4453 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4455 case 0x17: /* stda, store double word alternate */
4456 #ifndef TARGET_SPARC64
4459 if (!supervisor(dc
))
4465 save_state(dc
, cpu_cond
);
4466 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4470 #ifdef TARGET_SPARC64
4471 case 0x0e: /* V9 stx */
4472 gen_address_mask(dc
, cpu_addr
);
4473 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4475 case 0x1e: /* V9 stxa */
4476 save_state(dc
, cpu_cond
);
4477 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4483 } else if (xop
> 0x23 && xop
< 0x28) {
4484 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4486 save_state(dc
, cpu_cond
);
4488 case 0x24: /* stf, store fpreg */
4489 gen_address_mask(dc
, cpu_addr
);
4490 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_fpr
[rd
]);
4491 tcg_gen_qemu_st32(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4493 case 0x25: /* stfsr, V9 stxfsr */
4494 #ifdef TARGET_SPARC64
4495 gen_address_mask(dc
, cpu_addr
);
4496 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUState
, fsr
));
4498 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4500 tcg_gen_qemu_st32(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4502 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUState
, fsr
));
4503 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4507 #ifdef TARGET_SPARC64
4508 /* V9 stqf, store quad fpreg */
4512 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4513 gen_op_load_fpr_QT0(QFPREG(rd
));
4514 r_const
= tcg_const_i32(dc
->mem_idx
);
4515 gen_helper_stqf(cpu_addr
, r_const
);
4516 tcg_temp_free_i32(r_const
);
4519 #else /* !TARGET_SPARC64 */
4520 /* stdfq, store floating point queue */
4521 #if defined(CONFIG_USER_ONLY)
4524 if (!supervisor(dc
))
4526 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4531 case 0x27: /* stdf, store double fpreg */
4535 gen_op_load_fpr_DT0(DFPREG(rd
));
4536 r_const
= tcg_const_i32(dc
->mem_idx
);
4537 gen_helper_stdf(cpu_addr
, r_const
);
4538 tcg_temp_free_i32(r_const
);
4544 } else if (xop
> 0x33 && xop
< 0x3f) {
4545 save_state(dc
, cpu_cond
);
4547 #ifdef TARGET_SPARC64
4548 case 0x34: /* V9 stfa */
4549 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
4551 case 0x36: /* V9 stqfa */
4555 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4556 r_const
= tcg_const_i32(7);
4557 gen_helper_check_align(cpu_addr
, r_const
);
4558 tcg_temp_free_i32(r_const
);
4559 gen_op_load_fpr_QT0(QFPREG(rd
));
4560 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4563 case 0x37: /* V9 stdfa */
4564 gen_op_load_fpr_DT0(DFPREG(rd
));
4565 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4567 case 0x3c: /* V9 casa */
4568 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4569 gen_movl_TN_reg(rd
, cpu_val
);
4571 case 0x3e: /* V9 casxa */
4572 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4573 gen_movl_TN_reg(rd
, cpu_val
);
4576 case 0x34: /* stc */
4577 case 0x35: /* stcsr */
4578 case 0x36: /* stdcq */
4579 case 0x37: /* stdc */
4590 /* default case for non jump instructions */
4591 if (dc
->npc
== DYNAMIC_PC
) {
4592 dc
->pc
= DYNAMIC_PC
;
4594 } else if (dc
->npc
== JUMP_PC
) {
4595 /* we can do a static jump */
4596 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
4600 dc
->npc
= dc
->npc
+ 4;
4608 save_state(dc
, cpu_cond
);
4609 r_const
= tcg_const_i32(TT_ILL_INSN
);
4610 gen_helper_raise_exception(r_const
);
4611 tcg_temp_free_i32(r_const
);
4619 save_state(dc
, cpu_cond
);
4620 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
4621 gen_helper_raise_exception(r_const
);
4622 tcg_temp_free_i32(r_const
);
4626 #if !defined(CONFIG_USER_ONLY)
4631 save_state(dc
, cpu_cond
);
4632 r_const
= tcg_const_i32(TT_PRIV_INSN
);
4633 gen_helper_raise_exception(r_const
);
4634 tcg_temp_free_i32(r_const
);
4640 save_state(dc
, cpu_cond
);
4641 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4644 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4646 save_state(dc
, cpu_cond
);
4647 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4651 #ifndef TARGET_SPARC64
4656 save_state(dc
, cpu_cond
);
4657 r_const
= tcg_const_i32(TT_NCP_INSN
);
4658 gen_helper_raise_exception(r_const
);
4659 tcg_temp_free(r_const
);
4665 tcg_temp_free(cpu_tmp1
);
4666 tcg_temp_free(cpu_tmp2
);
4669 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
4670 int spc
, CPUSPARCState
*env
)
4672 target_ulong pc_start
, last_pc
;
4673 uint16_t *gen_opc_end
;
4674 DisasContext dc1
, *dc
= &dc1
;
4680 memset(dc
, 0, sizeof(DisasContext
));
4685 dc
->npc
= (target_ulong
) tb
->cs_base
;
4686 dc
->cc_op
= CC_OP_DYNAMIC
;
4687 dc
->mem_idx
= cpu_mmu_index(env
);
4689 if ((dc
->def
->features
& CPU_FEATURE_FLOAT
))
4690 dc
->fpu_enabled
= cpu_fpu_enabled(env
);
4692 dc
->fpu_enabled
= 0;
4693 #ifdef TARGET_SPARC64
4694 dc
->address_mask_32bit
= env
->pstate
& PS_AM
;
4696 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4698 cpu_tmp0
= tcg_temp_new();
4699 cpu_tmp32
= tcg_temp_new_i32();
4700 cpu_tmp64
= tcg_temp_new_i64();
4702 cpu_dst
= tcg_temp_local_new();
4705 cpu_val
= tcg_temp_local_new();
4706 cpu_addr
= tcg_temp_local_new();
4709 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
4711 max_insns
= CF_COUNT_MASK
;
4714 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
4715 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
4716 if (bp
->pc
== dc
->pc
) {
4717 if (dc
->pc
!= pc_start
)
4718 save_state(dc
, cpu_cond
);
4727 qemu_log("Search PC...\n");
4728 j
= gen_opc_ptr
- gen_opc_buf
;
4732 gen_opc_instr_start
[lj
++] = 0;
4733 gen_opc_pc
[lj
] = dc
->pc
;
4734 gen_opc_npc
[lj
] = dc
->npc
;
4735 gen_opc_instr_start
[lj
] = 1;
4736 gen_opc_icount
[lj
] = num_insns
;
4739 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
4742 disas_sparc_insn(dc
);
4747 /* if the next PC is different, we abort now */
4748 if (dc
->pc
!= (last_pc
+ 4))
4750 /* if we reach a page boundary, we stop generation so that the
4751 PC of a TT_TFAULT exception is always in the right page */
4752 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
4754 /* if single step mode, we generate only one instruction and
4755 generate an exception */
4756 if (env
->singlestep_enabled
|| singlestep
) {
4757 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4761 } while ((gen_opc_ptr
< gen_opc_end
) &&
4762 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
4763 num_insns
< max_insns
);
4766 tcg_temp_free(cpu_addr
);
4767 tcg_temp_free(cpu_val
);
4768 tcg_temp_free(cpu_dst
);
4769 tcg_temp_free_i64(cpu_tmp64
);
4770 tcg_temp_free_i32(cpu_tmp32
);
4771 tcg_temp_free(cpu_tmp0
);
4772 if (tb
->cflags
& CF_LAST_IO
)
4775 if (dc
->pc
!= DYNAMIC_PC
&&
4776 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
4777 /* static PC and NPC: we can use direct chaining */
4778 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
4780 if (dc
->pc
!= DYNAMIC_PC
)
4781 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4782 save_npc(dc
, cpu_cond
);
4786 gen_icount_end(tb
, num_insns
);
4787 *gen_opc_ptr
= INDEX_op_end
;
4789 j
= gen_opc_ptr
- gen_opc_buf
;
4792 gen_opc_instr_start
[lj
++] = 0;
4796 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
4797 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
4799 tb
->size
= last_pc
+ 4 - pc_start
;
4800 tb
->icount
= num_insns
;
4803 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
4804 qemu_log("--------------\n");
4805 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
4806 log_target_disas(pc_start
, last_pc
+ 4 - pc_start
, 0);
4812 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
4814 gen_intermediate_code_internal(tb
, 0, env
);
4817 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
4819 gen_intermediate_code_internal(tb
, 1, env
);
4822 void gen_intermediate_code_init(CPUSPARCState
*env
)
4826 static const char * const gregnames
[8] = {
4827 NULL
, // g0 not used
4836 static const char * const fregnames
[64] = {
4837 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4838 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4839 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4840 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4841 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4842 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4843 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4844 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4847 /* init various static tables */
4851 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
4852 cpu_regwptr
= tcg_global_mem_new_ptr(TCG_AREG0
,
4853 offsetof(CPUState
, regwptr
),
4855 #ifdef TARGET_SPARC64
4856 cpu_xcc
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, xcc
),
4858 cpu_asi
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, asi
),
4860 cpu_fprs
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, fprs
),
4862 cpu_gsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, gsr
),
4864 cpu_tick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4865 offsetof(CPUState
, tick_cmpr
),
4867 cpu_stick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4868 offsetof(CPUState
, stick_cmpr
),
4870 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
4871 offsetof(CPUState
, hstick_cmpr
),
4873 cpu_hintp
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hintp
),
4875 cpu_htba
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, htba
),
4877 cpu_hver
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hver
),
4879 cpu_ssr
= tcg_global_mem_new(TCG_AREG0
,
4880 offsetof(CPUState
, ssr
), "ssr");
4881 cpu_ver
= tcg_global_mem_new(TCG_AREG0
,
4882 offsetof(CPUState
, version
), "ver");
4883 cpu_softint
= tcg_global_mem_new_i32(TCG_AREG0
,
4884 offsetof(CPUState
, softint
),
4887 cpu_wim
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, wim
),
4890 cpu_cond
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cond
),
4892 cpu_cc_src
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_src
),
4894 cpu_cc_src2
= tcg_global_mem_new(TCG_AREG0
,
4895 offsetof(CPUState
, cc_src2
),
4897 cpu_cc_dst
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_dst
),
4899 cpu_cc_op
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, cc_op
),
4901 cpu_psr
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, psr
),
4903 cpu_fsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, fsr
),
4905 cpu_pc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, pc
),
4907 cpu_npc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, npc
),
4909 cpu_y
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, y
), "y");
4910 #ifndef CONFIG_USER_ONLY
4911 cpu_tbr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, tbr
),
4914 for (i
= 1; i
< 8; i
++)
4915 cpu_gregs
[i
] = tcg_global_mem_new(TCG_AREG0
,
4916 offsetof(CPUState
, gregs
[i
]),
4918 for (i
= 0; i
< TARGET_FPREGS
; i
++)
4919 cpu_fpr
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
4920 offsetof(CPUState
, fpr
[i
]),
4923 /* register helpers */
4925 #define GEN_HELPER 2
4930 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
4931 unsigned long searched_pc
, int pc_pos
, void *puc
)
4934 env
->pc
= gen_opc_pc
[pc_pos
];
4935 npc
= gen_opc_npc
[pc_pos
];
4937 /* dynamic NPC: already stored */
4938 } else if (npc
== 2) {
4939 /* jump PC: use 'cond' and the jump targets of the translation */
4941 env
->npc
= gen_opc_jump_pc
[0];
4943 env
->npc
= gen_opc_jump_pc
[1];
4949 /* flush pending conditional evaluations before exposing cpu state */
4950 if (CC_OP
!= CC_OP_FLAGS
) {
4951 helper_compute_psr();