4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
36 #define DYNAMIC_PC 1 /* dynamic pc value */
37 #define JUMP_PC 2 /* dynamic pc value which takes only two values
38 according to jump_pc[T2] */
40 /* global register indexes */
41 static TCGv cpu_env
, cpu_regwptr
;
42 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
43 static TCGv cpu_psr
, cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
44 static TCGv cpu_cond
, cpu_src1
, cpu_src2
, cpu_dst
, cpu_addr
, cpu_val
;
48 /* local register indexes (only used inside old micro ops) */
49 static TCGv cpu_tmp0
, cpu_tmp32
, cpu_tmp64
;
51 #include "gen-icount.h"
53 typedef struct DisasContext
{
54 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
55 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
56 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
60 int address_mask_32bit
;
61 struct TranslationBlock
*tb
;
65 // This function uses non-native bit order
66 #define GET_FIELD(X, FROM, TO) \
67 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
69 // This function uses the order in the manuals, i.e. bit 0 is 2^0
70 #define GET_FIELD_SP(X, FROM, TO) \
71 GET_FIELD(X, 31 - (TO), 31 - (FROM))
73 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
74 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
78 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
79 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
82 #define DFPREG(r) (r & 0x1e)
83 #define QFPREG(r) (r & 0x1c)
86 static int sign_extend(int x
, int len
)
89 return (x
<< len
) >> len
;
92 #define IS_IMM (insn & (1<<13))
94 /* floating point registers moves */
95 static void gen_op_load_fpr_FT0(unsigned int src
)
97 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
]));
98 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, ft0
));
101 static void gen_op_load_fpr_FT1(unsigned int src
)
103 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
]));
104 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, ft1
));
107 static void gen_op_store_FT0_fpr(unsigned int dst
)
109 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, ft0
));
110 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[dst
]));
113 static void gen_op_load_fpr_DT0(unsigned int src
)
115 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
]));
116 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, dt0
) +
117 offsetof(CPU_DoubleU
, l
.upper
));
118 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
+ 1]));
119 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, dt0
) +
120 offsetof(CPU_DoubleU
, l
.lower
));
123 static void gen_op_load_fpr_DT1(unsigned int src
)
125 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
]));
126 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, dt1
) +
127 offsetof(CPU_DoubleU
, l
.upper
));
128 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
+ 1]));
129 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, dt1
) +
130 offsetof(CPU_DoubleU
, l
.lower
));
133 static void gen_op_store_DT0_fpr(unsigned int dst
)
135 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, dt0
) +
136 offsetof(CPU_DoubleU
, l
.upper
));
137 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[dst
]));
138 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, dt0
) +
139 offsetof(CPU_DoubleU
, l
.lower
));
140 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[dst
+ 1]));
143 static void gen_op_load_fpr_QT0(unsigned int src
)
145 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
]));
146 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, qt0
) +
147 offsetof(CPU_QuadU
, l
.upmost
));
148 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
+ 1]));
149 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, qt0
) +
150 offsetof(CPU_QuadU
, l
.upper
));
151 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
+ 2]));
152 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, qt0
) +
153 offsetof(CPU_QuadU
, l
.lower
));
154 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
+ 3]));
155 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, qt0
) +
156 offsetof(CPU_QuadU
, l
.lowest
));
159 static void gen_op_load_fpr_QT1(unsigned int src
)
161 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
]));
162 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, qt1
) +
163 offsetof(CPU_QuadU
, l
.upmost
));
164 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
+ 1]));
165 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, qt1
) +
166 offsetof(CPU_QuadU
, l
.upper
));
167 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
+ 2]));
168 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, qt1
) +
169 offsetof(CPU_QuadU
, l
.lower
));
170 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
+ 3]));
171 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, qt1
) +
172 offsetof(CPU_QuadU
, l
.lowest
));
175 static void gen_op_store_QT0_fpr(unsigned int dst
)
177 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, qt0
) +
178 offsetof(CPU_QuadU
, l
.upmost
));
179 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[dst
]));
180 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, qt0
) +
181 offsetof(CPU_QuadU
, l
.upper
));
182 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[dst
+ 1]));
183 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, qt0
) +
184 offsetof(CPU_QuadU
, l
.lower
));
185 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[dst
+ 2]));
186 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, qt0
) +
187 offsetof(CPU_QuadU
, l
.lowest
));
188 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[dst
+ 3]));
192 #ifdef CONFIG_USER_ONLY
193 #define supervisor(dc) 0
194 #ifdef TARGET_SPARC64
195 #define hypervisor(dc) 0
198 #define supervisor(dc) (dc->mem_idx >= 1)
199 #ifdef TARGET_SPARC64
200 #define hypervisor(dc) (dc->mem_idx == 2)
205 #ifdef TARGET_SPARC64
207 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
209 #define AM_CHECK(dc) (1)
213 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
215 #ifdef TARGET_SPARC64
217 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
221 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
224 tcg_gen_movi_tl(tn
, 0);
226 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
228 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
232 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
237 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
239 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
243 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
244 target_ulong pc
, target_ulong npc
)
246 TranslationBlock
*tb
;
249 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
250 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
)) {
251 /* jump to same page: we can use a direct jump */
252 tcg_gen_goto_tb(tb_num
);
253 tcg_gen_movi_tl(cpu_pc
, pc
);
254 tcg_gen_movi_tl(cpu_npc
, npc
);
255 tcg_gen_exit_tb((long)tb
+ tb_num
);
257 /* jump to another page: currently not optimized */
258 tcg_gen_movi_tl(cpu_pc
, pc
);
259 tcg_gen_movi_tl(cpu_npc
, npc
);
265 static inline void gen_mov_reg_N(TCGv reg
, TCGv src
)
267 tcg_gen_extu_i32_tl(reg
, src
);
268 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
269 tcg_gen_andi_tl(reg
, reg
, 0x1);
272 static inline void gen_mov_reg_Z(TCGv reg
, TCGv src
)
274 tcg_gen_extu_i32_tl(reg
, src
);
275 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
276 tcg_gen_andi_tl(reg
, reg
, 0x1);
279 static inline void gen_mov_reg_V(TCGv reg
, TCGv src
)
281 tcg_gen_extu_i32_tl(reg
, src
);
282 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
283 tcg_gen_andi_tl(reg
, reg
, 0x1);
286 static inline void gen_mov_reg_C(TCGv reg
, TCGv src
)
288 tcg_gen_extu_i32_tl(reg
, src
);
289 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
290 tcg_gen_andi_tl(reg
, reg
, 0x1);
293 static inline void gen_cc_clear_icc(void)
295 tcg_gen_movi_i32(cpu_psr
, 0);
298 #ifdef TARGET_SPARC64
299 static inline void gen_cc_clear_xcc(void)
301 tcg_gen_movi_i32(cpu_xcc
, 0);
307 env->psr |= PSR_ZERO;
308 if ((int32_t) T0 < 0)
311 static inline void gen_cc_NZ_icc(TCGv dst
)
316 l1
= gen_new_label();
317 l2
= gen_new_label();
318 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
319 tcg_gen_andi_tl(r_temp
, dst
, 0xffffffffULL
);
320 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
321 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_ZERO
);
323 tcg_gen_ext_i32_tl(r_temp
, dst
);
324 tcg_gen_brcondi_tl(TCG_COND_GE
, r_temp
, 0, l2
);
325 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_NEG
);
327 tcg_temp_free(r_temp
);
330 #ifdef TARGET_SPARC64
331 static inline void gen_cc_NZ_xcc(TCGv dst
)
335 l1
= gen_new_label();
336 l2
= gen_new_label();
337 tcg_gen_brcondi_tl(TCG_COND_NE
, dst
, 0, l1
);
338 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_ZERO
);
340 tcg_gen_brcondi_tl(TCG_COND_GE
, dst
, 0, l2
);
341 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_NEG
);
348 env->psr |= PSR_CARRY;
350 static inline void gen_cc_C_add_icc(TCGv dst
, TCGv src1
)
355 l1
= gen_new_label();
356 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
357 tcg_gen_andi_tl(r_temp
, dst
, 0xffffffffULL
);
358 tcg_gen_brcond_tl(TCG_COND_GEU
, dst
, src1
, l1
);
359 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_CARRY
);
361 tcg_temp_free(r_temp
);
364 #ifdef TARGET_SPARC64
365 static inline void gen_cc_C_add_xcc(TCGv dst
, TCGv src1
)
369 l1
= gen_new_label();
370 tcg_gen_brcond_tl(TCG_COND_GEU
, dst
, src1
, l1
);
371 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_CARRY
);
377 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
380 static inline void gen_cc_V_add_icc(TCGv dst
, TCGv src1
, TCGv src2
)
384 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
385 tcg_gen_xor_tl(r_temp
, src1
, src2
);
386 tcg_gen_xori_tl(r_temp
, r_temp
, -1);
387 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
388 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
389 tcg_gen_andi_tl(r_temp
, r_temp
, (1 << 31));
390 tcg_gen_shri_tl(r_temp
, r_temp
, 31 - PSR_OVF_SHIFT
);
391 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
392 tcg_temp_free(r_temp
);
393 tcg_gen_or_i32(cpu_psr
, cpu_psr
, cpu_tmp32
);
396 #ifdef TARGET_SPARC64
397 static inline void gen_cc_V_add_xcc(TCGv dst
, TCGv src1
, TCGv src2
)
401 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
402 tcg_gen_xor_tl(r_temp
, src1
, src2
);
403 tcg_gen_xori_tl(r_temp
, r_temp
, -1);
404 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
405 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
406 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 63));
407 tcg_gen_shri_tl(r_temp
, r_temp
, 63 - PSR_OVF_SHIFT
);
408 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
409 tcg_temp_free(r_temp
);
410 tcg_gen_or_i32(cpu_xcc
, cpu_xcc
, cpu_tmp32
);
414 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
416 TCGv r_temp
, r_const
;
419 l1
= gen_new_label();
421 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
422 tcg_gen_xor_tl(r_temp
, src1
, src2
);
423 tcg_gen_xori_tl(r_temp
, r_temp
, -1);
424 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
425 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
426 tcg_gen_andi_tl(r_temp
, r_temp
, (1 << 31));
427 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
428 r_const
= tcg_const_i32(TT_TOVF
);
429 tcg_gen_helper_0_1(raise_exception
, r_const
);
430 tcg_temp_free(r_const
);
432 tcg_temp_free(r_temp
);
435 static inline void gen_cc_V_tag(TCGv src1
, TCGv src2
)
439 l1
= gen_new_label();
440 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
441 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
442 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
443 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
447 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
452 l1
= gen_new_label();
453 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
454 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
455 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
456 r_const
= tcg_const_i32(TT_TOVF
);
457 tcg_gen_helper_0_1(raise_exception
, r_const
);
458 tcg_temp_free(r_const
);
462 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
464 tcg_gen_mov_tl(cpu_cc_src
, src1
);
465 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
466 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
468 gen_cc_NZ_icc(cpu_cc_dst
);
469 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
470 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
471 #ifdef TARGET_SPARC64
473 gen_cc_NZ_xcc(cpu_cc_dst
);
474 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
475 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
477 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
480 static inline void gen_op_addx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
482 tcg_gen_mov_tl(cpu_cc_src
, src1
);
483 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
484 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
485 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
487 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
488 #ifdef TARGET_SPARC64
490 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
492 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
493 gen_cc_NZ_icc(cpu_cc_dst
);
494 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
495 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
496 #ifdef TARGET_SPARC64
497 gen_cc_NZ_xcc(cpu_cc_dst
);
498 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
499 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
501 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
504 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
506 tcg_gen_mov_tl(cpu_cc_src
, src1
);
507 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
508 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
510 gen_cc_NZ_icc(cpu_cc_dst
);
511 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
512 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
513 gen_cc_V_tag(cpu_cc_src
, cpu_cc_src2
);
514 #ifdef TARGET_SPARC64
516 gen_cc_NZ_xcc(cpu_cc_dst
);
517 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
518 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
520 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
523 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
525 tcg_gen_mov_tl(cpu_cc_src
, src1
);
526 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
527 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
528 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
529 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
531 gen_cc_NZ_icc(cpu_cc_dst
);
532 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
533 #ifdef TARGET_SPARC64
535 gen_cc_NZ_xcc(cpu_cc_dst
);
536 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
537 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
539 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
544 env->psr |= PSR_CARRY;
546 static inline void gen_cc_C_sub_icc(TCGv src1
, TCGv src2
)
548 TCGv r_temp1
, r_temp2
;
551 l1
= gen_new_label();
552 r_temp1
= tcg_temp_new(TCG_TYPE_TL
);
553 r_temp2
= tcg_temp_new(TCG_TYPE_TL
);
554 tcg_gen_andi_tl(r_temp1
, src1
, 0xffffffffULL
);
555 tcg_gen_andi_tl(r_temp2
, src2
, 0xffffffffULL
);
556 tcg_gen_brcond_tl(TCG_COND_GEU
, r_temp1
, r_temp2
, l1
);
557 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_CARRY
);
559 tcg_temp_free(r_temp1
);
560 tcg_temp_free(r_temp2
);
563 #ifdef TARGET_SPARC64
564 static inline void gen_cc_C_sub_xcc(TCGv src1
, TCGv src2
)
568 l1
= gen_new_label();
569 tcg_gen_brcond_tl(TCG_COND_GEU
, src1
, src2
, l1
);
570 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_CARRY
);
576 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
579 static inline void gen_cc_V_sub_icc(TCGv dst
, TCGv src1
, TCGv src2
)
583 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
584 tcg_gen_xor_tl(r_temp
, src1
, src2
);
585 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
586 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
587 tcg_gen_andi_tl(r_temp
, r_temp
, (1 << 31));
588 tcg_gen_shri_tl(r_temp
, r_temp
, 31 - PSR_OVF_SHIFT
);
589 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
590 tcg_gen_or_i32(cpu_psr
, cpu_psr
, cpu_tmp32
);
591 tcg_temp_free(r_temp
);
594 #ifdef TARGET_SPARC64
595 static inline void gen_cc_V_sub_xcc(TCGv dst
, TCGv src1
, TCGv src2
)
599 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
600 tcg_gen_xor_tl(r_temp
, src1
, src2
);
601 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
602 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
603 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 63));
604 tcg_gen_shri_tl(r_temp
, r_temp
, 63 - PSR_OVF_SHIFT
);
605 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
606 tcg_gen_or_i32(cpu_xcc
, cpu_xcc
, cpu_tmp32
);
607 tcg_temp_free(r_temp
);
611 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
613 TCGv r_temp
, r_const
;
616 l1
= gen_new_label();
618 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
619 tcg_gen_xor_tl(r_temp
, src1
, src2
);
620 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
621 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
622 tcg_gen_andi_tl(r_temp
, r_temp
, (1 << 31));
623 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
624 r_const
= tcg_const_i32(TT_TOVF
);
625 tcg_gen_helper_0_1(raise_exception
, r_const
);
626 tcg_temp_free(r_const
);
628 tcg_temp_free(r_temp
);
631 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
633 tcg_gen_mov_tl(cpu_cc_src
, src1
);
634 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
635 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
637 gen_cc_NZ_icc(cpu_cc_dst
);
638 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
639 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
640 #ifdef TARGET_SPARC64
642 gen_cc_NZ_xcc(cpu_cc_dst
);
643 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
644 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
646 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
649 static inline void gen_op_subx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
651 tcg_gen_mov_tl(cpu_cc_src
, src1
);
652 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
653 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
654 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
656 gen_cc_C_sub_icc(cpu_cc_dst
, cpu_cc_src
);
657 #ifdef TARGET_SPARC64
659 gen_cc_C_sub_xcc(cpu_cc_dst
, cpu_cc_src
);
661 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
662 gen_cc_NZ_icc(cpu_cc_dst
);
663 gen_cc_C_sub_icc(cpu_cc_dst
, cpu_cc_src
);
664 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
665 #ifdef TARGET_SPARC64
666 gen_cc_NZ_xcc(cpu_cc_dst
);
667 gen_cc_C_sub_xcc(cpu_cc_dst
, cpu_cc_src
);
668 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
670 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
673 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
675 tcg_gen_mov_tl(cpu_cc_src
, src1
);
676 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
677 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
679 gen_cc_NZ_icc(cpu_cc_dst
);
680 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
681 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
682 gen_cc_V_tag(cpu_cc_src
, cpu_cc_src2
);
683 #ifdef TARGET_SPARC64
685 gen_cc_NZ_xcc(cpu_cc_dst
);
686 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
687 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
689 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
692 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
694 tcg_gen_mov_tl(cpu_cc_src
, src1
);
695 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
696 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
697 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
698 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
700 gen_cc_NZ_icc(cpu_cc_dst
);
701 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
702 #ifdef TARGET_SPARC64
704 gen_cc_NZ_xcc(cpu_cc_dst
);
705 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
706 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
708 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
711 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
713 TCGv r_temp
, r_temp2
;
716 l1
= gen_new_label();
717 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
718 r_temp2
= tcg_temp_new(TCG_TYPE_I32
);
724 tcg_gen_mov_tl(cpu_cc_src
, src1
);
725 tcg_gen_ld32u_tl(r_temp
, cpu_env
, offsetof(CPUSPARCState
, y
));
726 tcg_gen_trunc_tl_i32(r_temp2
, r_temp
);
727 tcg_gen_andi_i32(r_temp2
, r_temp2
, 0x1);
728 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
729 tcg_gen_brcondi_i32(TCG_COND_NE
, r_temp2
, 0, l1
);
730 tcg_gen_movi_tl(cpu_cc_src2
, 0);
734 // env->y = (b2 << 31) | (env->y >> 1);
735 tcg_gen_trunc_tl_i32(r_temp2
, cpu_cc_src
);
736 tcg_gen_andi_i32(r_temp2
, r_temp2
, 0x1);
737 tcg_gen_shli_i32(r_temp2
, r_temp2
, 31);
738 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, y
));
739 tcg_gen_shri_i32(cpu_tmp32
, cpu_tmp32
, 1);
740 tcg_gen_or_i32(cpu_tmp32
, cpu_tmp32
, r_temp2
);
741 tcg_temp_free(r_temp2
);
742 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, y
));
745 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
746 gen_mov_reg_V(r_temp
, cpu_psr
);
747 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
748 tcg_temp_free(r_temp
);
750 // T0 = (b1 << 31) | (T0 >> 1);
752 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
753 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
754 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
756 /* do addition and update flags */
757 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
760 gen_cc_NZ_icc(cpu_cc_dst
);
761 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
762 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
763 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
766 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
768 TCGv r_temp
, r_temp2
;
770 r_temp
= tcg_temp_new(TCG_TYPE_I64
);
771 r_temp2
= tcg_temp_new(TCG_TYPE_I64
);
773 tcg_gen_extu_tl_i64(r_temp
, src2
);
774 tcg_gen_extu_tl_i64(r_temp2
, src1
);
775 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
777 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
778 tcg_gen_trunc_i64_i32(r_temp
, r_temp
);
779 tcg_gen_st_i32(r_temp
, cpu_env
, offsetof(CPUSPARCState
, y
));
780 tcg_temp_free(r_temp
);
781 #ifdef TARGET_SPARC64
782 tcg_gen_mov_i64(dst
, r_temp2
);
784 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
786 tcg_temp_free(r_temp2
);
789 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
791 TCGv r_temp
, r_temp2
;
793 r_temp
= tcg_temp_new(TCG_TYPE_I64
);
794 r_temp2
= tcg_temp_new(TCG_TYPE_I64
);
796 tcg_gen_ext_tl_i64(r_temp
, src2
);
797 tcg_gen_ext_tl_i64(r_temp2
, src1
);
798 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
800 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
801 tcg_gen_trunc_i64_i32(r_temp
, r_temp
);
802 tcg_gen_st_i32(r_temp
, cpu_env
, offsetof(CPUSPARCState
, y
));
803 tcg_temp_free(r_temp
);
804 #ifdef TARGET_SPARC64
805 tcg_gen_mov_i64(dst
, r_temp2
);
807 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
809 tcg_temp_free(r_temp2
);
812 #ifdef TARGET_SPARC64
813 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
818 l1
= gen_new_label();
819 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
820 r_const
= tcg_const_i32(TT_DIV_ZERO
);
821 tcg_gen_helper_0_1(raise_exception
, r_const
);
822 tcg_temp_free(r_const
);
826 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
830 l1
= gen_new_label();
831 l2
= gen_new_label();
832 tcg_gen_mov_tl(cpu_cc_src
, src1
);
833 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
834 gen_trap_ifdivzero_tl(cpu_cc_src2
);
835 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src
, INT64_MIN
, l1
);
836 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src2
, -1, l1
);
837 tcg_gen_movi_i64(dst
, INT64_MIN
);
840 tcg_gen_div_i64(dst
, cpu_cc_src
, cpu_cc_src2
);
845 static inline void gen_op_div_cc(TCGv dst
)
849 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
851 gen_cc_NZ_icc(cpu_cc_dst
);
852 l1
= gen_new_label();
853 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_cc_src2
, 0, l1
);
854 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
858 static inline void gen_op_logic_cc(TCGv dst
)
860 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
863 gen_cc_NZ_icc(cpu_cc_dst
);
864 #ifdef TARGET_SPARC64
866 gen_cc_NZ_xcc(cpu_cc_dst
);
871 static inline void gen_op_eval_ba(TCGv dst
)
873 tcg_gen_movi_tl(dst
, 1);
877 static inline void gen_op_eval_be(TCGv dst
, TCGv src
)
879 gen_mov_reg_Z(dst
, src
);
883 static inline void gen_op_eval_ble(TCGv dst
, TCGv src
)
885 gen_mov_reg_N(cpu_tmp0
, src
);
886 gen_mov_reg_V(dst
, src
);
887 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
888 gen_mov_reg_Z(cpu_tmp0
, src
);
889 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
893 static inline void gen_op_eval_bl(TCGv dst
, TCGv src
)
895 gen_mov_reg_V(cpu_tmp0
, src
);
896 gen_mov_reg_N(dst
, src
);
897 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
901 static inline void gen_op_eval_bleu(TCGv dst
, TCGv src
)
903 gen_mov_reg_Z(cpu_tmp0
, src
);
904 gen_mov_reg_C(dst
, src
);
905 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
909 static inline void gen_op_eval_bcs(TCGv dst
, TCGv src
)
911 gen_mov_reg_C(dst
, src
);
915 static inline void gen_op_eval_bvs(TCGv dst
, TCGv src
)
917 gen_mov_reg_V(dst
, src
);
921 static inline void gen_op_eval_bn(TCGv dst
)
923 tcg_gen_movi_tl(dst
, 0);
927 static inline void gen_op_eval_bneg(TCGv dst
, TCGv src
)
929 gen_mov_reg_N(dst
, src
);
933 static inline void gen_op_eval_bne(TCGv dst
, TCGv src
)
935 gen_mov_reg_Z(dst
, src
);
936 tcg_gen_xori_tl(dst
, dst
, 0x1);
940 static inline void gen_op_eval_bg(TCGv dst
, TCGv src
)
942 gen_mov_reg_N(cpu_tmp0
, src
);
943 gen_mov_reg_V(dst
, src
);
944 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
945 gen_mov_reg_Z(cpu_tmp0
, src
);
946 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
947 tcg_gen_xori_tl(dst
, dst
, 0x1);
951 static inline void gen_op_eval_bge(TCGv dst
, TCGv src
)
953 gen_mov_reg_V(cpu_tmp0
, src
);
954 gen_mov_reg_N(dst
, src
);
955 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
956 tcg_gen_xori_tl(dst
, dst
, 0x1);
960 static inline void gen_op_eval_bgu(TCGv dst
, TCGv src
)
962 gen_mov_reg_Z(cpu_tmp0
, src
);
963 gen_mov_reg_C(dst
, src
);
964 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
965 tcg_gen_xori_tl(dst
, dst
, 0x1);
969 static inline void gen_op_eval_bcc(TCGv dst
, TCGv src
)
971 gen_mov_reg_C(dst
, src
);
972 tcg_gen_xori_tl(dst
, dst
, 0x1);
976 static inline void gen_op_eval_bpos(TCGv dst
, TCGv src
)
978 gen_mov_reg_N(dst
, src
);
979 tcg_gen_xori_tl(dst
, dst
, 0x1);
983 static inline void gen_op_eval_bvc(TCGv dst
, TCGv src
)
985 gen_mov_reg_V(dst
, src
);
986 tcg_gen_xori_tl(dst
, dst
, 0x1);
990 FPSR bit field FCC1 | FCC0:
996 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
997 unsigned int fcc_offset
)
999 tcg_gen_extu_i32_tl(reg
, src
);
1000 tcg_gen_shri_tl(reg
, reg
, FSR_FCC0_SHIFT
+ fcc_offset
);
1001 tcg_gen_andi_tl(reg
, reg
, 0x1);
1004 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
1005 unsigned int fcc_offset
)
1007 tcg_gen_extu_i32_tl(reg
, src
);
1008 tcg_gen_shri_tl(reg
, reg
, FSR_FCC1_SHIFT
+ fcc_offset
);
1009 tcg_gen_andi_tl(reg
, reg
, 0x1);
1013 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
1014 unsigned int fcc_offset
)
1016 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1017 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1018 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
1021 // 1 or 2: FCC0 ^ FCC1
1022 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
1023 unsigned int fcc_offset
)
1025 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1026 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1027 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1031 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
1032 unsigned int fcc_offset
)
1034 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1038 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
1039 unsigned int fcc_offset
)
1041 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1042 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1043 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
1044 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1048 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
1049 unsigned int fcc_offset
)
1051 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
1055 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
1056 unsigned int fcc_offset
)
1058 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1059 tcg_gen_xori_tl(dst
, dst
, 0x1);
1060 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1061 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1065 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
1066 unsigned int fcc_offset
)
1068 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1069 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1070 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1073 // 0: !(FCC0 | FCC1)
1074 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
1075 unsigned int fcc_offset
)
1077 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1078 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1079 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
1080 tcg_gen_xori_tl(dst
, dst
, 0x1);
1083 // 0 or 3: !(FCC0 ^ FCC1)
1084 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
1085 unsigned int fcc_offset
)
1087 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1088 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1089 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1090 tcg_gen_xori_tl(dst
, dst
, 0x1);
1094 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
1095 unsigned int fcc_offset
)
1097 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1098 tcg_gen_xori_tl(dst
, dst
, 0x1);
1101 // !1: !(FCC0 & !FCC1)
1102 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
1103 unsigned int fcc_offset
)
1105 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1106 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1107 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
1108 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1109 tcg_gen_xori_tl(dst
, dst
, 0x1);
1113 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
1114 unsigned int fcc_offset
)
1116 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
1117 tcg_gen_xori_tl(dst
, dst
, 0x1);
1120 // !2: !(!FCC0 & FCC1)
1121 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
1122 unsigned int fcc_offset
)
1124 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1125 tcg_gen_xori_tl(dst
, dst
, 0x1);
1126 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1127 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1128 tcg_gen_xori_tl(dst
, dst
, 0x1);
1131 // !3: !(FCC0 & FCC1)
1132 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
1133 unsigned int fcc_offset
)
1135 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1136 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1137 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1138 tcg_gen_xori_tl(dst
, dst
, 0x1);
1141 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
1142 target_ulong pc2
, TCGv r_cond
)
1146 l1
= gen_new_label();
1148 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1150 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
1153 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
1156 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
1157 target_ulong pc2
, TCGv r_cond
)
1161 l1
= gen_new_label();
1163 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1165 gen_goto_tb(dc
, 0, pc2
, pc1
);
1168 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
1171 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
1176 l1
= gen_new_label();
1177 l2
= gen_new_label();
1179 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1181 tcg_gen_movi_tl(cpu_npc
, npc1
);
1185 tcg_gen_movi_tl(cpu_npc
, npc2
);
1189 /* call this function before using the condition register as it may
1190 have been set for a jump */
1191 static inline void flush_cond(DisasContext
*dc
, TCGv cond
)
1193 if (dc
->npc
== JUMP_PC
) {
1194 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1195 dc
->npc
= DYNAMIC_PC
;
1199 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
1201 if (dc
->npc
== JUMP_PC
) {
1202 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1203 dc
->npc
= DYNAMIC_PC
;
1204 } else if (dc
->npc
!= DYNAMIC_PC
) {
1205 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
1209 static inline void save_state(DisasContext
*dc
, TCGv cond
)
1211 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1215 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
1217 if (dc
->npc
== JUMP_PC
) {
1218 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1219 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1220 dc
->pc
= DYNAMIC_PC
;
1221 } else if (dc
->npc
== DYNAMIC_PC
) {
1222 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1223 dc
->pc
= DYNAMIC_PC
;
1229 static inline void gen_op_next_insn(void)
1231 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1232 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
1235 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1239 #ifdef TARGET_SPARC64
1249 gen_op_eval_bn(r_dst
);
1252 gen_op_eval_be(r_dst
, r_src
);
1255 gen_op_eval_ble(r_dst
, r_src
);
1258 gen_op_eval_bl(r_dst
, r_src
);
1261 gen_op_eval_bleu(r_dst
, r_src
);
1264 gen_op_eval_bcs(r_dst
, r_src
);
1267 gen_op_eval_bneg(r_dst
, r_src
);
1270 gen_op_eval_bvs(r_dst
, r_src
);
1273 gen_op_eval_ba(r_dst
);
1276 gen_op_eval_bne(r_dst
, r_src
);
1279 gen_op_eval_bg(r_dst
, r_src
);
1282 gen_op_eval_bge(r_dst
, r_src
);
1285 gen_op_eval_bgu(r_dst
, r_src
);
1288 gen_op_eval_bcc(r_dst
, r_src
);
1291 gen_op_eval_bpos(r_dst
, r_src
);
1294 gen_op_eval_bvc(r_dst
, r_src
);
1299 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1301 unsigned int offset
;
1321 gen_op_eval_bn(r_dst
);
1324 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1327 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1330 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1333 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1336 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1339 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1342 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1345 gen_op_eval_ba(r_dst
);
1348 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1351 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1354 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1357 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1360 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1363 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1366 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1371 #ifdef TARGET_SPARC64
1373 static const int gen_tcg_cond_reg
[8] = {
1384 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1388 l1
= gen_new_label();
1389 tcg_gen_movi_tl(r_dst
, 0);
1390 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1391 tcg_gen_movi_tl(r_dst
, 1);
1396 /* XXX: potentially incorrect if dynamic npc */
1397 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1400 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1401 target_ulong target
= dc
->pc
+ offset
;
1404 /* unconditional not taken */
1406 dc
->pc
= dc
->npc
+ 4;
1407 dc
->npc
= dc
->pc
+ 4;
1410 dc
->npc
= dc
->pc
+ 4;
1412 } else if (cond
== 0x8) {
1413 /* unconditional taken */
1416 dc
->npc
= dc
->pc
+ 4;
1422 flush_cond(dc
, r_cond
);
1423 gen_cond(r_cond
, cc
, cond
);
1425 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1429 dc
->jump_pc
[0] = target
;
1430 dc
->jump_pc
[1] = dc
->npc
+ 4;
1436 /* XXX: potentially incorrect if dynamic npc */
1437 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1440 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1441 target_ulong target
= dc
->pc
+ offset
;
1444 /* unconditional not taken */
1446 dc
->pc
= dc
->npc
+ 4;
1447 dc
->npc
= dc
->pc
+ 4;
1450 dc
->npc
= dc
->pc
+ 4;
1452 } else if (cond
== 0x8) {
1453 /* unconditional taken */
1456 dc
->npc
= dc
->pc
+ 4;
1462 flush_cond(dc
, r_cond
);
1463 gen_fcond(r_cond
, cc
, cond
);
1465 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1469 dc
->jump_pc
[0] = target
;
1470 dc
->jump_pc
[1] = dc
->npc
+ 4;
1476 #ifdef TARGET_SPARC64
1477 /* XXX: potentially incorrect if dynamic npc */
1478 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1479 TCGv r_cond
, TCGv r_reg
)
1481 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1482 target_ulong target
= dc
->pc
+ offset
;
1484 flush_cond(dc
, r_cond
);
1485 gen_cond_reg(r_cond
, cond
, r_reg
);
1487 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1491 dc
->jump_pc
[0] = target
;
1492 dc
->jump_pc
[1] = dc
->npc
+ 4;
1497 static GenOpFunc
* const gen_fcmps
[4] = {
1504 static GenOpFunc
* const gen_fcmpd
[4] = {
1511 static GenOpFunc
* const gen_fcmpq
[4] = {
1518 static GenOpFunc
* const gen_fcmpes
[4] = {
1525 static GenOpFunc
* const gen_fcmped
[4] = {
1532 static GenOpFunc
* const gen_fcmpeq
[4] = {
1539 static inline void gen_op_fcmps(int fccno
)
1541 tcg_gen_helper_0_0(gen_fcmps
[fccno
]);
1544 static inline void gen_op_fcmpd(int fccno
)
1546 tcg_gen_helper_0_0(gen_fcmpd
[fccno
]);
1549 static inline void gen_op_fcmpq(int fccno
)
1551 tcg_gen_helper_0_0(gen_fcmpq
[fccno
]);
1554 static inline void gen_op_fcmpes(int fccno
)
1556 tcg_gen_helper_0_0(gen_fcmpes
[fccno
]);
1559 static inline void gen_op_fcmped(int fccno
)
1561 tcg_gen_helper_0_0(gen_fcmped
[fccno
]);
1564 static inline void gen_op_fcmpeq(int fccno
)
1566 tcg_gen_helper_0_0(gen_fcmpeq
[fccno
]);
1571 static inline void gen_op_fcmps(int fccno
)
1573 tcg_gen_helper_0_0(helper_fcmps
);
1576 static inline void gen_op_fcmpd(int fccno
)
1578 tcg_gen_helper_0_0(helper_fcmpd
);
1581 static inline void gen_op_fcmpq(int fccno
)
1583 tcg_gen_helper_0_0(helper_fcmpq
);
1586 static inline void gen_op_fcmpes(int fccno
)
1588 tcg_gen_helper_0_0(helper_fcmpes
);
1591 static inline void gen_op_fcmped(int fccno
)
1593 tcg_gen_helper_0_0(helper_fcmped
);
1596 static inline void gen_op_fcmpeq(int fccno
)
1598 tcg_gen_helper_0_0(helper_fcmpeq
);
1602 static inline void gen_op_fpexception_im(int fsr_flags
)
1606 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, ~FSR_FTT_MASK
);
1607 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1608 r_const
= tcg_const_i32(TT_FP_EXCP
);
1609 tcg_gen_helper_0_1(raise_exception
, r_const
);
1610 tcg_temp_free(r_const
);
1613 static int gen_trap_ifnofpu(DisasContext
*dc
, TCGv r_cond
)
1615 #if !defined(CONFIG_USER_ONLY)
1616 if (!dc
->fpu_enabled
) {
1619 save_state(dc
, r_cond
);
1620 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1621 tcg_gen_helper_0_1(raise_exception
, r_const
);
1622 tcg_temp_free(r_const
);
1630 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1632 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, ~(FSR_FTT_MASK
| FSR_CEXC_MASK
));
1635 static inline void gen_clear_float_exceptions(void)
1637 tcg_gen_helper_0_0(helper_clear_float_exceptions
);
1641 #ifdef TARGET_SPARC64
1642 static inline TCGv
gen_get_asi(int insn
, TCGv r_addr
)
1648 r_asi
= tcg_temp_new(TCG_TYPE_I32
);
1649 offset
= GET_FIELD(insn
, 25, 31);
1650 tcg_gen_addi_tl(r_addr
, r_addr
, offset
);
1651 tcg_gen_ld_i32(r_asi
, cpu_env
, offsetof(CPUSPARCState
, asi
));
1653 asi
= GET_FIELD(insn
, 19, 26);
1654 r_asi
= tcg_const_i32(asi
);
1659 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1662 TCGv r_asi
, r_size
, r_sign
;
1664 r_asi
= gen_get_asi(insn
, addr
);
1665 r_size
= tcg_const_i32(size
);
1666 r_sign
= tcg_const_i32(sign
);
1667 tcg_gen_helper_1_4(helper_ld_asi
, dst
, addr
, r_asi
, r_size
, r_sign
);
1668 tcg_temp_free(r_sign
);
1669 tcg_temp_free(r_size
);
1670 tcg_temp_free(r_asi
);
1673 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1677 r_asi
= gen_get_asi(insn
, addr
);
1678 r_size
= tcg_const_i32(size
);
1679 tcg_gen_helper_0_4(helper_st_asi
, addr
, src
, r_asi
, r_size
);
1680 tcg_temp_free(r_size
);
1681 tcg_temp_free(r_asi
);
1684 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1686 TCGv r_asi
, r_size
, r_rd
;
1688 r_asi
= gen_get_asi(insn
, addr
);
1689 r_size
= tcg_const_i32(size
);
1690 r_rd
= tcg_const_i32(rd
);
1691 tcg_gen_helper_0_4(helper_ldf_asi
, addr
, r_asi
, r_size
, r_rd
);
1692 tcg_temp_free(r_rd
);
1693 tcg_temp_free(r_size
);
1694 tcg_temp_free(r_asi
);
1697 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
1699 TCGv r_asi
, r_size
, r_rd
;
1701 r_asi
= gen_get_asi(insn
, addr
);
1702 r_size
= tcg_const_i32(size
);
1703 r_rd
= tcg_const_i32(rd
);
1704 tcg_gen_helper_0_4(helper_stf_asi
, addr
, r_asi
, r_size
, r_rd
);
1705 tcg_temp_free(r_rd
);
1706 tcg_temp_free(r_size
);
1707 tcg_temp_free(r_asi
);
1710 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1712 TCGv r_asi
, r_size
, r_sign
;
1714 r_asi
= gen_get_asi(insn
, addr
);
1715 r_size
= tcg_const_i32(4);
1716 r_sign
= tcg_const_i32(0);
1717 tcg_gen_helper_1_4(helper_ld_asi
, cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1718 tcg_temp_free(r_sign
);
1719 tcg_gen_helper_0_4(helper_st_asi
, addr
, dst
, r_asi
, r_size
);
1720 tcg_temp_free(r_size
);
1721 tcg_temp_free(r_asi
);
1722 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1725 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1729 r_asi
= gen_get_asi(insn
, addr
);
1730 r_rd
= tcg_const_i32(rd
);
1731 tcg_gen_helper_0_3(helper_ldda_asi
, addr
, r_asi
, r_rd
);
1732 tcg_temp_free(r_rd
);
1733 tcg_temp_free(r_asi
);
1736 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1738 TCGv r_temp
, r_asi
, r_size
;
1740 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
1741 gen_movl_reg_TN(rd
+ 1, r_temp
);
1742 tcg_gen_helper_1_2(helper_pack64
, cpu_tmp64
, hi
,
1744 tcg_temp_free(r_temp
);
1745 r_asi
= gen_get_asi(insn
, addr
);
1746 r_size
= tcg_const_i32(8);
1747 tcg_gen_helper_0_4(helper_st_asi
, addr
, cpu_tmp64
, r_asi
, r_size
);
1748 tcg_temp_free(r_size
);
1749 tcg_temp_free(r_asi
);
1752 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1757 r_val1
= tcg_temp_new(TCG_TYPE_TL
);
1758 gen_movl_reg_TN(rd
, r_val1
);
1759 r_asi
= gen_get_asi(insn
, addr
);
1760 tcg_gen_helper_1_4(helper_cas_asi
, dst
, addr
, r_val1
, val2
, r_asi
);
1761 tcg_temp_free(r_asi
);
1762 tcg_temp_free(r_val1
);
1765 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1770 gen_movl_reg_TN(rd
, cpu_tmp64
);
1771 r_asi
= gen_get_asi(insn
, addr
);
1772 tcg_gen_helper_1_4(helper_casx_asi
, dst
, addr
, cpu_tmp64
, val2
, r_asi
);
1773 tcg_temp_free(r_asi
);
1776 #elif !defined(CONFIG_USER_ONLY)
1778 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1781 TCGv r_asi
, r_size
, r_sign
;
1783 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1784 r_size
= tcg_const_i32(size
);
1785 r_sign
= tcg_const_i32(sign
);
1786 tcg_gen_helper_1_4(helper_ld_asi
, cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1787 tcg_temp_free(r_sign
);
1788 tcg_temp_free(r_size
);
1789 tcg_temp_free(r_asi
);
1790 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1793 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1797 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
1798 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1799 r_size
= tcg_const_i32(size
);
1800 tcg_gen_helper_0_4(helper_st_asi
, addr
, cpu_tmp64
, r_asi
, r_size
);
1801 tcg_temp_free(r_size
);
1802 tcg_temp_free(r_asi
);
1805 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1807 TCGv r_asi
, r_size
, r_sign
;
1809 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1810 r_size
= tcg_const_i32(4);
1811 r_sign
= tcg_const_i32(0);
1812 tcg_gen_helper_1_4(helper_ld_asi
, cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1813 tcg_temp_free(r_sign
);
1814 tcg_gen_helper_0_4(helper_st_asi
, addr
, dst
, r_asi
, r_size
);
1815 tcg_temp_free(r_size
);
1816 tcg_temp_free(r_asi
);
1817 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1820 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1822 TCGv r_asi
, r_size
, r_sign
;
1824 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1825 r_size
= tcg_const_i32(8);
1826 r_sign
= tcg_const_i32(0);
1827 tcg_gen_helper_1_4(helper_ld_asi
, cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1828 tcg_temp_free(r_sign
);
1829 tcg_temp_free(r_size
);
1830 tcg_temp_free(r_asi
);
1831 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
1832 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
1833 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
1834 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
1835 gen_movl_TN_reg(rd
, hi
);
1838 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1840 TCGv r_temp
, r_asi
, r_size
;
1842 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
1843 gen_movl_reg_TN(rd
+ 1, r_temp
);
1844 tcg_gen_helper_1_2(helper_pack64
, cpu_tmp64
, hi
, r_temp
);
1845 tcg_temp_free(r_temp
);
1846 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1847 r_size
= tcg_const_i32(8);
1848 tcg_gen_helper_0_4(helper_st_asi
, addr
, cpu_tmp64
, r_asi
, r_size
);
1849 tcg_temp_free(r_size
);
1850 tcg_temp_free(r_asi
);
1854 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1855 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
1857 TCGv r_val
, r_asi
, r_size
;
1859 gen_ld_asi(dst
, addr
, insn
, 1, 0);
1861 r_val
= tcg_const_i64(0xffULL
);
1862 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1863 r_size
= tcg_const_i32(1);
1864 tcg_gen_helper_0_4(helper_st_asi
, addr
, r_val
, r_asi
, r_size
);
1865 tcg_temp_free(r_size
);
1866 tcg_temp_free(r_asi
);
1867 tcg_temp_free(r_val
);
1871 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
1876 rs1
= GET_FIELD(insn
, 13, 17);
1878 r_rs1
= tcg_const_tl(0); // XXX how to free?
1880 r_rs1
= cpu_gregs
[rs1
];
1882 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
1886 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
1891 if (IS_IMM
) { /* immediate */
1892 rs2
= GET_FIELDs(insn
, 19, 31);
1893 r_rs2
= tcg_const_tl((int)rs2
); // XXX how to free?
1894 } else { /* register */
1895 rs2
= GET_FIELD(insn
, 27, 31);
1897 r_rs2
= tcg_const_tl(0); // XXX how to free?
1899 r_rs2
= cpu_gregs
[rs2
];
1901 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
1906 #define CHECK_IU_FEATURE(dc, FEATURE) \
1907 if (!((dc)->features & CPU_FEATURE_ ## FEATURE)) \
1909 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1910 if (!((dc)->features & CPU_FEATURE_ ## FEATURE)) \
1913 /* before an instruction, dc->pc must be static */
1914 static void disas_sparc_insn(DisasContext
* dc
)
1916 unsigned int insn
, opc
, rs1
, rs2
, rd
;
1918 if (unlikely(loglevel
& CPU_LOG_TB_OP
))
1919 tcg_gen_debug_insn_start(dc
->pc
);
1920 insn
= ldl_code(dc
->pc
);
1921 opc
= GET_FIELD(insn
, 0, 1);
1923 rd
= GET_FIELD(insn
, 2, 6);
1925 cpu_src1
= tcg_temp_new(TCG_TYPE_TL
); // const
1926 cpu_src2
= tcg_temp_new(TCG_TYPE_TL
); // const
1929 case 0: /* branches/sethi */
1931 unsigned int xop
= GET_FIELD(insn
, 7, 9);
1934 #ifdef TARGET_SPARC64
1935 case 0x1: /* V9 BPcc */
1939 target
= GET_FIELD_SP(insn
, 0, 18);
1940 target
= sign_extend(target
, 18);
1942 cc
= GET_FIELD_SP(insn
, 20, 21);
1944 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1946 do_branch(dc
, target
, insn
, 1, cpu_cond
);
1951 case 0x3: /* V9 BPr */
1953 target
= GET_FIELD_SP(insn
, 0, 13) |
1954 (GET_FIELD_SP(insn
, 20, 21) << 14);
1955 target
= sign_extend(target
, 16);
1957 cpu_src1
= get_src1(insn
, cpu_src1
);
1958 do_branch_reg(dc
, target
, insn
, cpu_cond
, cpu_src1
);
1961 case 0x5: /* V9 FBPcc */
1963 int cc
= GET_FIELD_SP(insn
, 20, 21);
1964 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1966 target
= GET_FIELD_SP(insn
, 0, 18);
1967 target
= sign_extend(target
, 19);
1969 do_fbranch(dc
, target
, insn
, cc
, cpu_cond
);
1973 case 0x7: /* CBN+x */
1978 case 0x2: /* BN+x */
1980 target
= GET_FIELD(insn
, 10, 31);
1981 target
= sign_extend(target
, 22);
1983 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1986 case 0x6: /* FBN+x */
1988 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1990 target
= GET_FIELD(insn
, 10, 31);
1991 target
= sign_extend(target
, 22);
1993 do_fbranch(dc
, target
, insn
, 0, cpu_cond
);
1996 case 0x4: /* SETHI */
1998 uint32_t value
= GET_FIELD(insn
, 10, 31);
2001 r_const
= tcg_const_tl(value
<< 10);
2002 gen_movl_TN_reg(rd
, r_const
);
2003 tcg_temp_free(r_const
);
2006 case 0x0: /* UNIMPL */
2015 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
2018 r_const
= tcg_const_tl(dc
->pc
);
2019 gen_movl_TN_reg(15, r_const
);
2020 tcg_temp_free(r_const
);
2022 gen_mov_pc_npc(dc
, cpu_cond
);
2026 case 2: /* FPU & Logical Operations */
2028 unsigned int xop
= GET_FIELD(insn
, 7, 12);
2029 if (xop
== 0x3a) { /* generate trap */
2032 cpu_src1
= get_src1(insn
, cpu_src1
);
2034 rs2
= GET_FIELD(insn
, 25, 31);
2035 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
2037 rs2
= GET_FIELD(insn
, 27, 31);
2039 gen_movl_reg_TN(rs2
, cpu_src2
);
2040 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2042 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
2044 cond
= GET_FIELD(insn
, 3, 6);
2046 save_state(dc
, cpu_cond
);
2047 tcg_gen_helper_0_1(helper_trap
, cpu_dst
);
2048 } else if (cond
!= 0) {
2049 TCGv r_cond
= tcg_temp_new(TCG_TYPE_TL
);
2050 #ifdef TARGET_SPARC64
2052 int cc
= GET_FIELD_SP(insn
, 11, 12);
2054 save_state(dc
, cpu_cond
);
2056 gen_cond(r_cond
, 0, cond
);
2058 gen_cond(r_cond
, 1, cond
);
2062 save_state(dc
, cpu_cond
);
2063 gen_cond(r_cond
, 0, cond
);
2065 tcg_gen_helper_0_2(helper_trapcc
, cpu_dst
, r_cond
);
2066 tcg_temp_free(r_cond
);
2072 } else if (xop
== 0x28) {
2073 rs1
= GET_FIELD(insn
, 13, 17);
2076 #ifndef TARGET_SPARC64
2077 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2078 manual, rdy on the microSPARC
2080 case 0x0f: /* stbar in the SPARCv8 manual,
2081 rdy on the microSPARC II */
2082 case 0x10 ... 0x1f: /* implementation-dependent in the
2083 SPARCv8 manual, rdy on the
2086 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
,
2087 offsetof(CPUSPARCState
, y
));
2088 gen_movl_TN_reg(rd
, cpu_tmp0
);
2090 #ifdef TARGET_SPARC64
2091 case 0x2: /* V9 rdccr */
2092 tcg_gen_helper_1_0(helper_rdccr
, cpu_dst
);
2093 gen_movl_TN_reg(rd
, cpu_dst
);
2095 case 0x3: /* V9 rdasi */
2096 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2097 offsetof(CPUSPARCState
, asi
));
2098 tcg_gen_ext_i32_tl(cpu_dst
, cpu_tmp32
);
2099 gen_movl_TN_reg(rd
, cpu_dst
);
2101 case 0x4: /* V9 rdtick */
2105 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2106 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2107 offsetof(CPUState
, tick
));
2108 tcg_gen_helper_1_1(helper_tick_get_count
, cpu_dst
,
2110 tcg_temp_free(r_tickptr
);
2111 gen_movl_TN_reg(rd
, cpu_dst
);
2114 case 0x5: /* V9 rdpc */
2118 r_const
= tcg_const_tl(dc
->pc
);
2119 gen_movl_TN_reg(rd
, r_const
);
2120 tcg_temp_free(r_const
);
2123 case 0x6: /* V9 rdfprs */
2124 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2125 offsetof(CPUSPARCState
, fprs
));
2126 tcg_gen_ext_i32_tl(cpu_dst
, cpu_tmp32
);
2127 gen_movl_TN_reg(rd
, cpu_dst
);
2129 case 0xf: /* V9 membar */
2130 break; /* no effect */
2131 case 0x13: /* Graphics Status */
2132 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2134 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
,
2135 offsetof(CPUSPARCState
, gsr
));
2136 gen_movl_TN_reg(rd
, cpu_tmp0
);
2138 case 0x17: /* Tick compare */
2139 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
,
2140 offsetof(CPUSPARCState
, tick_cmpr
));
2141 gen_movl_TN_reg(rd
, cpu_tmp0
);
2143 case 0x18: /* System tick */
2147 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2148 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2149 offsetof(CPUState
, stick
));
2150 tcg_gen_helper_1_1(helper_tick_get_count
, cpu_dst
,
2152 tcg_temp_free(r_tickptr
);
2153 gen_movl_TN_reg(rd
, cpu_dst
);
2156 case 0x19: /* System tick compare */
2157 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
,
2158 offsetof(CPUSPARCState
, stick_cmpr
));
2159 gen_movl_TN_reg(rd
, cpu_tmp0
);
2161 case 0x10: /* Performance Control */
2162 case 0x11: /* Performance Instrumentation Counter */
2163 case 0x12: /* Dispatch Control */
2164 case 0x14: /* Softint set, WO */
2165 case 0x15: /* Softint clear, WO */
2166 case 0x16: /* Softint write */
2171 #if !defined(CONFIG_USER_ONLY)
2172 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2173 #ifndef TARGET_SPARC64
2174 if (!supervisor(dc
))
2176 tcg_gen_helper_1_0(helper_rdpsr
, cpu_dst
);
2178 CHECK_IU_FEATURE(dc
, HYPV
);
2179 if (!hypervisor(dc
))
2181 rs1
= GET_FIELD(insn
, 13, 17);
2184 // gen_op_rdhpstate();
2187 // gen_op_rdhtstate();
2190 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2191 offsetof(CPUSPARCState
, hintp
));
2192 tcg_gen_ext_i32_tl(cpu_dst
, cpu_tmp32
);
2195 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2196 offsetof(CPUSPARCState
, htba
));
2197 tcg_gen_ext_i32_tl(cpu_dst
, cpu_tmp32
);
2200 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2201 offsetof(CPUSPARCState
, hver
));
2202 tcg_gen_ext_i32_tl(cpu_dst
, cpu_tmp32
);
2204 case 31: // hstick_cmpr
2205 tcg_gen_ld_tl(cpu_dst
, cpu_env
,
2206 offsetof(CPUSPARCState
, hstick_cmpr
));
2212 gen_movl_TN_reg(rd
, cpu_dst
);
2214 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2215 if (!supervisor(dc
))
2217 #ifdef TARGET_SPARC64
2218 rs1
= GET_FIELD(insn
, 13, 17);
2224 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2225 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2226 offsetof(CPUState
, tsptr
));
2227 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2228 offsetof(trap_state
, tpc
));
2229 tcg_temp_free(r_tsptr
);
2236 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2237 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2238 offsetof(CPUState
, tsptr
));
2239 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2240 offsetof(trap_state
, tnpc
));
2241 tcg_temp_free(r_tsptr
);
2248 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2249 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2250 offsetof(CPUState
, tsptr
));
2251 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2252 offsetof(trap_state
, tstate
));
2253 tcg_temp_free(r_tsptr
);
2260 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2261 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2262 offsetof(CPUState
, tsptr
));
2263 tcg_gen_ld_i32(cpu_tmp0
, r_tsptr
,
2264 offsetof(trap_state
, tt
));
2265 tcg_temp_free(r_tsptr
);
2272 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2273 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2274 offsetof(CPUState
, tick
));
2275 tcg_gen_helper_1_1(helper_tick_get_count
, cpu_tmp0
,
2277 gen_movl_TN_reg(rd
, cpu_tmp0
);
2278 tcg_temp_free(r_tickptr
);
2282 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
,
2283 offsetof(CPUSPARCState
, tbr
));
2286 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2287 offsetof(CPUSPARCState
, pstate
));
2288 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2291 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2292 offsetof(CPUSPARCState
, tl
));
2293 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2296 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2297 offsetof(CPUSPARCState
, psrpil
));
2298 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2301 tcg_gen_helper_1_0(helper_rdcwp
, cpu_tmp0
);
2304 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2305 offsetof(CPUSPARCState
, cansave
));
2306 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2308 case 11: // canrestore
2309 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2310 offsetof(CPUSPARCState
, canrestore
));
2311 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2313 case 12: // cleanwin
2314 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2315 offsetof(CPUSPARCState
, cleanwin
));
2316 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2318 case 13: // otherwin
2319 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2320 offsetof(CPUSPARCState
, otherwin
));
2321 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2324 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2325 offsetof(CPUSPARCState
, wstate
));
2326 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2328 case 16: // UA2005 gl
2329 CHECK_IU_FEATURE(dc
, GL
);
2330 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2331 offsetof(CPUSPARCState
, gl
));
2332 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2334 case 26: // UA2005 strand status
2335 CHECK_IU_FEATURE(dc
, HYPV
);
2336 if (!hypervisor(dc
))
2338 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2339 offsetof(CPUSPARCState
, ssr
));
2340 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2343 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
,
2344 offsetof(CPUSPARCState
, version
));
2351 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2352 offsetof(CPUSPARCState
, wim
));
2353 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2355 gen_movl_TN_reg(rd
, cpu_tmp0
);
2357 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2358 #ifdef TARGET_SPARC64
2359 save_state(dc
, cpu_cond
);
2360 tcg_gen_helper_0_0(helper_flushw
);
2362 if (!supervisor(dc
))
2364 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, tbr
));
2365 gen_movl_TN_reg(rd
, cpu_tmp0
);
2369 } else if (xop
== 0x34) { /* FPU Operations */
2370 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2372 gen_op_clear_ieee_excp_and_FTT();
2373 rs1
= GET_FIELD(insn
, 13, 17);
2374 rs2
= GET_FIELD(insn
, 27, 31);
2375 xop
= GET_FIELD(insn
, 18, 26);
2377 case 0x1: /* fmovs */
2378 gen_op_load_fpr_FT0(rs2
);
2379 gen_op_store_FT0_fpr(rd
);
2381 case 0x5: /* fnegs */
2382 gen_op_load_fpr_FT1(rs2
);
2383 tcg_gen_helper_0_0(helper_fnegs
);
2384 gen_op_store_FT0_fpr(rd
);
2386 case 0x9: /* fabss */
2387 gen_op_load_fpr_FT1(rs2
);
2388 tcg_gen_helper_0_0(helper_fabss
);
2389 gen_op_store_FT0_fpr(rd
);
2391 case 0x29: /* fsqrts */
2392 CHECK_FPU_FEATURE(dc
, FSQRT
);
2393 gen_op_load_fpr_FT1(rs2
);
2394 gen_clear_float_exceptions();
2395 tcg_gen_helper_0_0(helper_fsqrts
);
2396 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2397 gen_op_store_FT0_fpr(rd
);
2399 case 0x2a: /* fsqrtd */
2400 CHECK_FPU_FEATURE(dc
, FSQRT
);
2401 gen_op_load_fpr_DT1(DFPREG(rs2
));
2402 gen_clear_float_exceptions();
2403 tcg_gen_helper_0_0(helper_fsqrtd
);
2404 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2405 gen_op_store_DT0_fpr(DFPREG(rd
));
2407 case 0x2b: /* fsqrtq */
2408 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2409 gen_op_load_fpr_QT1(QFPREG(rs2
));
2410 gen_clear_float_exceptions();
2411 tcg_gen_helper_0_0(helper_fsqrtq
);
2412 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2413 gen_op_store_QT0_fpr(QFPREG(rd
));
2416 gen_op_load_fpr_FT0(rs1
);
2417 gen_op_load_fpr_FT1(rs2
);
2418 gen_clear_float_exceptions();
2419 tcg_gen_helper_0_0(helper_fadds
);
2420 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2421 gen_op_store_FT0_fpr(rd
);
2424 gen_op_load_fpr_DT0(DFPREG(rs1
));
2425 gen_op_load_fpr_DT1(DFPREG(rs2
));
2426 gen_clear_float_exceptions();
2427 tcg_gen_helper_0_0(helper_faddd
);
2428 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2429 gen_op_store_DT0_fpr(DFPREG(rd
));
2431 case 0x43: /* faddq */
2432 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2433 gen_op_load_fpr_QT0(QFPREG(rs1
));
2434 gen_op_load_fpr_QT1(QFPREG(rs2
));
2435 gen_clear_float_exceptions();
2436 tcg_gen_helper_0_0(helper_faddq
);
2437 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2438 gen_op_store_QT0_fpr(QFPREG(rd
));
2441 gen_op_load_fpr_FT0(rs1
);
2442 gen_op_load_fpr_FT1(rs2
);
2443 gen_clear_float_exceptions();
2444 tcg_gen_helper_0_0(helper_fsubs
);
2445 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2446 gen_op_store_FT0_fpr(rd
);
2449 gen_op_load_fpr_DT0(DFPREG(rs1
));
2450 gen_op_load_fpr_DT1(DFPREG(rs2
));
2451 gen_clear_float_exceptions();
2452 tcg_gen_helper_0_0(helper_fsubd
);
2453 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2454 gen_op_store_DT0_fpr(DFPREG(rd
));
2456 case 0x47: /* fsubq */
2457 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2458 gen_op_load_fpr_QT0(QFPREG(rs1
));
2459 gen_op_load_fpr_QT1(QFPREG(rs2
));
2460 gen_clear_float_exceptions();
2461 tcg_gen_helper_0_0(helper_fsubq
);
2462 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2463 gen_op_store_QT0_fpr(QFPREG(rd
));
2465 case 0x49: /* fmuls */
2466 CHECK_FPU_FEATURE(dc
, FMUL
);
2467 gen_op_load_fpr_FT0(rs1
);
2468 gen_op_load_fpr_FT1(rs2
);
2469 gen_clear_float_exceptions();
2470 tcg_gen_helper_0_0(helper_fmuls
);
2471 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2472 gen_op_store_FT0_fpr(rd
);
2474 case 0x4a: /* fmuld */
2475 CHECK_FPU_FEATURE(dc
, FMUL
);
2476 gen_op_load_fpr_DT0(DFPREG(rs1
));
2477 gen_op_load_fpr_DT1(DFPREG(rs2
));
2478 gen_clear_float_exceptions();
2479 tcg_gen_helper_0_0(helper_fmuld
);
2480 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2481 gen_op_store_DT0_fpr(DFPREG(rd
));
2483 case 0x4b: /* fmulq */
2484 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2485 CHECK_FPU_FEATURE(dc
, FMUL
);
2486 gen_op_load_fpr_QT0(QFPREG(rs1
));
2487 gen_op_load_fpr_QT1(QFPREG(rs2
));
2488 gen_clear_float_exceptions();
2489 tcg_gen_helper_0_0(helper_fmulq
);
2490 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2491 gen_op_store_QT0_fpr(QFPREG(rd
));
2494 gen_op_load_fpr_FT0(rs1
);
2495 gen_op_load_fpr_FT1(rs2
);
2496 gen_clear_float_exceptions();
2497 tcg_gen_helper_0_0(helper_fdivs
);
2498 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2499 gen_op_store_FT0_fpr(rd
);
2502 gen_op_load_fpr_DT0(DFPREG(rs1
));
2503 gen_op_load_fpr_DT1(DFPREG(rs2
));
2504 gen_clear_float_exceptions();
2505 tcg_gen_helper_0_0(helper_fdivd
);
2506 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2507 gen_op_store_DT0_fpr(DFPREG(rd
));
2509 case 0x4f: /* fdivq */
2510 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2511 gen_op_load_fpr_QT0(QFPREG(rs1
));
2512 gen_op_load_fpr_QT1(QFPREG(rs2
));
2513 gen_clear_float_exceptions();
2514 tcg_gen_helper_0_0(helper_fdivq
);
2515 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2516 gen_op_store_QT0_fpr(QFPREG(rd
));
2519 CHECK_FPU_FEATURE(dc
, FSMULD
);
2520 gen_op_load_fpr_FT0(rs1
);
2521 gen_op_load_fpr_FT1(rs2
);
2522 gen_clear_float_exceptions();
2523 tcg_gen_helper_0_0(helper_fsmuld
);
2524 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2525 gen_op_store_DT0_fpr(DFPREG(rd
));
2527 case 0x6e: /* fdmulq */
2528 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2529 gen_op_load_fpr_DT0(DFPREG(rs1
));
2530 gen_op_load_fpr_DT1(DFPREG(rs2
));
2531 gen_clear_float_exceptions();
2532 tcg_gen_helper_0_0(helper_fdmulq
);
2533 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2534 gen_op_store_QT0_fpr(QFPREG(rd
));
2537 gen_op_load_fpr_FT1(rs2
);
2538 gen_clear_float_exceptions();
2539 tcg_gen_helper_0_0(helper_fitos
);
2540 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2541 gen_op_store_FT0_fpr(rd
);
2544 gen_op_load_fpr_DT1(DFPREG(rs2
));
2545 gen_clear_float_exceptions();
2546 tcg_gen_helper_0_0(helper_fdtos
);
2547 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2548 gen_op_store_FT0_fpr(rd
);
2550 case 0xc7: /* fqtos */
2551 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2552 gen_op_load_fpr_QT1(QFPREG(rs2
));
2553 gen_clear_float_exceptions();
2554 tcg_gen_helper_0_0(helper_fqtos
);
2555 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2556 gen_op_store_FT0_fpr(rd
);
2559 gen_op_load_fpr_FT1(rs2
);
2560 tcg_gen_helper_0_0(helper_fitod
);
2561 gen_op_store_DT0_fpr(DFPREG(rd
));
2564 gen_op_load_fpr_FT1(rs2
);
2565 tcg_gen_helper_0_0(helper_fstod
);
2566 gen_op_store_DT0_fpr(DFPREG(rd
));
2568 case 0xcb: /* fqtod */
2569 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2570 gen_op_load_fpr_QT1(QFPREG(rs2
));
2571 gen_clear_float_exceptions();
2572 tcg_gen_helper_0_0(helper_fqtod
);
2573 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2574 gen_op_store_DT0_fpr(DFPREG(rd
));
2576 case 0xcc: /* fitoq */
2577 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2578 gen_op_load_fpr_FT1(rs2
);
2579 tcg_gen_helper_0_0(helper_fitoq
);
2580 gen_op_store_QT0_fpr(QFPREG(rd
));
2582 case 0xcd: /* fstoq */
2583 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2584 gen_op_load_fpr_FT1(rs2
);
2585 tcg_gen_helper_0_0(helper_fstoq
);
2586 gen_op_store_QT0_fpr(QFPREG(rd
));
2588 case 0xce: /* fdtoq */
2589 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2590 gen_op_load_fpr_DT1(DFPREG(rs2
));
2591 tcg_gen_helper_0_0(helper_fdtoq
);
2592 gen_op_store_QT0_fpr(QFPREG(rd
));
2595 gen_op_load_fpr_FT1(rs2
);
2596 gen_clear_float_exceptions();
2597 tcg_gen_helper_0_0(helper_fstoi
);
2598 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2599 gen_op_store_FT0_fpr(rd
);
2602 gen_op_load_fpr_DT1(DFPREG(rs2
));
2603 gen_clear_float_exceptions();
2604 tcg_gen_helper_0_0(helper_fdtoi
);
2605 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2606 gen_op_store_FT0_fpr(rd
);
2608 case 0xd3: /* fqtoi */
2609 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2610 gen_op_load_fpr_QT1(QFPREG(rs2
));
2611 gen_clear_float_exceptions();
2612 tcg_gen_helper_0_0(helper_fqtoi
);
2613 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2614 gen_op_store_FT0_fpr(rd
);
2616 #ifdef TARGET_SPARC64
2617 case 0x2: /* V9 fmovd */
2618 gen_op_load_fpr_DT0(DFPREG(rs2
));
2619 gen_op_store_DT0_fpr(DFPREG(rd
));
2621 case 0x3: /* V9 fmovq */
2622 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2623 gen_op_load_fpr_QT0(QFPREG(rs2
));
2624 gen_op_store_QT0_fpr(QFPREG(rd
));
2626 case 0x6: /* V9 fnegd */
2627 gen_op_load_fpr_DT1(DFPREG(rs2
));
2628 tcg_gen_helper_0_0(helper_fnegd
);
2629 gen_op_store_DT0_fpr(DFPREG(rd
));
2631 case 0x7: /* V9 fnegq */
2632 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2633 gen_op_load_fpr_QT1(QFPREG(rs2
));
2634 tcg_gen_helper_0_0(helper_fnegq
);
2635 gen_op_store_QT0_fpr(QFPREG(rd
));
2637 case 0xa: /* V9 fabsd */
2638 gen_op_load_fpr_DT1(DFPREG(rs2
));
2639 tcg_gen_helper_0_0(helper_fabsd
);
2640 gen_op_store_DT0_fpr(DFPREG(rd
));
2642 case 0xb: /* V9 fabsq */
2643 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2644 gen_op_load_fpr_QT1(QFPREG(rs2
));
2645 tcg_gen_helper_0_0(helper_fabsq
);
2646 gen_op_store_QT0_fpr(QFPREG(rd
));
2648 case 0x81: /* V9 fstox */
2649 gen_op_load_fpr_FT1(rs2
);
2650 gen_clear_float_exceptions();
2651 tcg_gen_helper_0_0(helper_fstox
);
2652 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2653 gen_op_store_DT0_fpr(DFPREG(rd
));
2655 case 0x82: /* V9 fdtox */
2656 gen_op_load_fpr_DT1(DFPREG(rs2
));
2657 gen_clear_float_exceptions();
2658 tcg_gen_helper_0_0(helper_fdtox
);
2659 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2660 gen_op_store_DT0_fpr(DFPREG(rd
));
2662 case 0x83: /* V9 fqtox */
2663 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2664 gen_op_load_fpr_QT1(QFPREG(rs2
));
2665 gen_clear_float_exceptions();
2666 tcg_gen_helper_0_0(helper_fqtox
);
2667 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2668 gen_op_store_DT0_fpr(DFPREG(rd
));
2670 case 0x84: /* V9 fxtos */
2671 gen_op_load_fpr_DT1(DFPREG(rs2
));
2672 gen_clear_float_exceptions();
2673 tcg_gen_helper_0_0(helper_fxtos
);
2674 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2675 gen_op_store_FT0_fpr(rd
);
2677 case 0x88: /* V9 fxtod */
2678 gen_op_load_fpr_DT1(DFPREG(rs2
));
2679 gen_clear_float_exceptions();
2680 tcg_gen_helper_0_0(helper_fxtod
);
2681 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2682 gen_op_store_DT0_fpr(DFPREG(rd
));
2684 case 0x8c: /* V9 fxtoq */
2685 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2686 gen_op_load_fpr_DT1(DFPREG(rs2
));
2687 gen_clear_float_exceptions();
2688 tcg_gen_helper_0_0(helper_fxtoq
);
2689 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2690 gen_op_store_QT0_fpr(QFPREG(rd
));
2696 } else if (xop
== 0x35) { /* FPU Operations */
2697 #ifdef TARGET_SPARC64
2700 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2702 gen_op_clear_ieee_excp_and_FTT();
2703 rs1
= GET_FIELD(insn
, 13, 17);
2704 rs2
= GET_FIELD(insn
, 27, 31);
2705 xop
= GET_FIELD(insn
, 18, 26);
2706 #ifdef TARGET_SPARC64
2707 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2710 l1
= gen_new_label();
2711 cond
= GET_FIELD_SP(insn
, 14, 17);
2712 cpu_src1
= get_src1(insn
, cpu_src1
);
2713 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2715 gen_op_load_fpr_FT0(rs2
);
2716 gen_op_store_FT0_fpr(rd
);
2719 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2722 l1
= gen_new_label();
2723 cond
= GET_FIELD_SP(insn
, 14, 17);
2724 cpu_src1
= get_src1(insn
, cpu_src1
);
2725 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2727 gen_op_load_fpr_DT0(DFPREG(rs2
));
2728 gen_op_store_DT0_fpr(DFPREG(rd
));
2731 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2734 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2735 l1
= gen_new_label();
2736 cond
= GET_FIELD_SP(insn
, 14, 17);
2737 cpu_src1
= get_src1(insn
, cpu_src1
);
2738 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2740 gen_op_load_fpr_QT0(QFPREG(rs2
));
2741 gen_op_store_QT0_fpr(QFPREG(rd
));
2747 #ifdef TARGET_SPARC64
2748 #define FMOVCC(size_FDQ, fcc) \
2753 l1 = gen_new_label(); \
2754 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2755 cond = GET_FIELD_SP(insn, 14, 17); \
2756 gen_fcond(r_cond, fcc, cond); \
2757 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2759 glue(glue(gen_op_load_fpr_, size_FDQ), T0) \
2760 (glue(size_FDQ, FPREG(rs2))); \
2761 glue(glue(gen_op_store_, size_FDQ), T0_fpr) \
2762 (glue(size_FDQ, FPREG(rd))); \
2763 gen_set_label(l1); \
2764 tcg_temp_free(r_cond); \
2766 case 0x001: /* V9 fmovscc %fcc0 */
2769 case 0x002: /* V9 fmovdcc %fcc0 */
2772 case 0x003: /* V9 fmovqcc %fcc0 */
2773 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2776 case 0x041: /* V9 fmovscc %fcc1 */
2779 case 0x042: /* V9 fmovdcc %fcc1 */
2782 case 0x043: /* V9 fmovqcc %fcc1 */
2783 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2786 case 0x081: /* V9 fmovscc %fcc2 */
2789 case 0x082: /* V9 fmovdcc %fcc2 */
2792 case 0x083: /* V9 fmovqcc %fcc2 */
2793 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2796 case 0x0c1: /* V9 fmovscc %fcc3 */
2799 case 0x0c2: /* V9 fmovdcc %fcc3 */
2802 case 0x0c3: /* V9 fmovqcc %fcc3 */
2803 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2807 #define FMOVCC(size_FDQ, icc) \
2812 l1 = gen_new_label(); \
2813 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2814 cond = GET_FIELD_SP(insn, 14, 17); \
2815 gen_cond(r_cond, icc, cond); \
2816 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2818 glue(glue(gen_op_load_fpr_, size_FDQ), T0) \
2819 (glue(size_FDQ, FPREG(rs2))); \
2820 glue(glue(gen_op_store_, size_FDQ), T0_fpr) \
2821 (glue(size_FDQ, FPREG(rd))); \
2822 gen_set_label(l1); \
2823 tcg_temp_free(r_cond); \
2826 case 0x101: /* V9 fmovscc %icc */
2829 case 0x102: /* V9 fmovdcc %icc */
2831 case 0x103: /* V9 fmovqcc %icc */
2832 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2835 case 0x181: /* V9 fmovscc %xcc */
2838 case 0x182: /* V9 fmovdcc %xcc */
2841 case 0x183: /* V9 fmovqcc %xcc */
2842 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2847 case 0x51: /* fcmps, V9 %fcc */
2848 gen_op_load_fpr_FT0(rs1
);
2849 gen_op_load_fpr_FT1(rs2
);
2850 gen_op_fcmps(rd
& 3);
2852 case 0x52: /* fcmpd, V9 %fcc */
2853 gen_op_load_fpr_DT0(DFPREG(rs1
));
2854 gen_op_load_fpr_DT1(DFPREG(rs2
));
2855 gen_op_fcmpd(rd
& 3);
2857 case 0x53: /* fcmpq, V9 %fcc */
2858 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2859 gen_op_load_fpr_QT0(QFPREG(rs1
));
2860 gen_op_load_fpr_QT1(QFPREG(rs2
));
2861 gen_op_fcmpq(rd
& 3);
2863 case 0x55: /* fcmpes, V9 %fcc */
2864 gen_op_load_fpr_FT0(rs1
);
2865 gen_op_load_fpr_FT1(rs2
);
2866 gen_op_fcmpes(rd
& 3);
2868 case 0x56: /* fcmped, V9 %fcc */
2869 gen_op_load_fpr_DT0(DFPREG(rs1
));
2870 gen_op_load_fpr_DT1(DFPREG(rs2
));
2871 gen_op_fcmped(rd
& 3);
2873 case 0x57: /* fcmpeq, V9 %fcc */
2874 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2875 gen_op_load_fpr_QT0(QFPREG(rs1
));
2876 gen_op_load_fpr_QT1(QFPREG(rs2
));
2877 gen_op_fcmpeq(rd
& 3);
2882 } else if (xop
== 0x2) {
2885 rs1
= GET_FIELD(insn
, 13, 17);
2887 // or %g0, x, y -> mov T0, x; mov y, T0
2888 if (IS_IMM
) { /* immediate */
2891 rs2
= GET_FIELDs(insn
, 19, 31);
2892 r_const
= tcg_const_tl((int)rs2
);
2893 gen_movl_TN_reg(rd
, r_const
);
2894 tcg_temp_free(r_const
);
2895 } else { /* register */
2896 rs2
= GET_FIELD(insn
, 27, 31);
2897 gen_movl_reg_TN(rs2
, cpu_dst
);
2898 gen_movl_TN_reg(rd
, cpu_dst
);
2901 cpu_src1
= get_src1(insn
, cpu_src1
);
2902 if (IS_IMM
) { /* immediate */
2903 rs2
= GET_FIELDs(insn
, 19, 31);
2904 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, (int)rs2
);
2905 gen_movl_TN_reg(rd
, cpu_dst
);
2906 } else { /* register */
2907 // or x, %g0, y -> mov T1, x; mov y, T1
2908 rs2
= GET_FIELD(insn
, 27, 31);
2910 gen_movl_reg_TN(rs2
, cpu_src2
);
2911 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2912 gen_movl_TN_reg(rd
, cpu_dst
);
2914 gen_movl_TN_reg(rd
, cpu_src1
);
2917 #ifdef TARGET_SPARC64
2918 } else if (xop
== 0x25) { /* sll, V9 sllx */
2919 cpu_src1
= get_src1(insn
, cpu_src1
);
2920 if (IS_IMM
) { /* immediate */
2921 rs2
= GET_FIELDs(insn
, 20, 31);
2922 if (insn
& (1 << 12)) {
2923 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, rs2
& 0x3f);
2925 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2926 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, rs2
& 0x1f);
2928 } else { /* register */
2929 rs2
= GET_FIELD(insn
, 27, 31);
2930 gen_movl_reg_TN(rs2
, cpu_src2
);
2931 if (insn
& (1 << 12)) {
2932 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2933 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2935 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2936 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2937 tcg_gen_shl_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2940 gen_movl_TN_reg(rd
, cpu_dst
);
2941 } else if (xop
== 0x26) { /* srl, V9 srlx */
2942 cpu_src1
= get_src1(insn
, cpu_src1
);
2943 if (IS_IMM
) { /* immediate */
2944 rs2
= GET_FIELDs(insn
, 20, 31);
2945 if (insn
& (1 << 12)) {
2946 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, rs2
& 0x3f);
2948 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2949 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, rs2
& 0x1f);
2951 } else { /* register */
2952 rs2
= GET_FIELD(insn
, 27, 31);
2953 gen_movl_reg_TN(rs2
, cpu_src2
);
2954 if (insn
& (1 << 12)) {
2955 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2956 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2958 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2959 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2960 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2963 gen_movl_TN_reg(rd
, cpu_dst
);
2964 } else if (xop
== 0x27) { /* sra, V9 srax */
2965 cpu_src1
= get_src1(insn
, cpu_src1
);
2966 if (IS_IMM
) { /* immediate */
2967 rs2
= GET_FIELDs(insn
, 20, 31);
2968 if (insn
& (1 << 12)) {
2969 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, rs2
& 0x3f);
2971 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2972 tcg_gen_ext_i32_i64(cpu_dst
, cpu_dst
);
2973 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, rs2
& 0x1f);
2975 } else { /* register */
2976 rs2
= GET_FIELD(insn
, 27, 31);
2977 gen_movl_reg_TN(rs2
, cpu_src2
);
2978 if (insn
& (1 << 12)) {
2979 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2980 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2982 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2983 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2984 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2987 gen_movl_TN_reg(rd
, cpu_dst
);
2989 } else if (xop
< 0x36) {
2990 cpu_src1
= get_src1(insn
, cpu_src1
);
2991 cpu_src2
= get_src2(insn
, cpu_src2
);
2993 switch (xop
& ~0x10) {
2996 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
2998 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3001 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3003 gen_op_logic_cc(cpu_dst
);
3006 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3008 gen_op_logic_cc(cpu_dst
);
3011 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3013 gen_op_logic_cc(cpu_dst
);
3017 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3019 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3022 tcg_gen_xori_tl(cpu_tmp0
, cpu_src2
, -1);
3023 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3025 gen_op_logic_cc(cpu_dst
);
3028 tcg_gen_xori_tl(cpu_tmp0
, cpu_src2
, -1);
3029 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3031 gen_op_logic_cc(cpu_dst
);
3034 tcg_gen_xori_tl(cpu_tmp0
, cpu_src2
, -1);
3035 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3037 gen_op_logic_cc(cpu_dst
);
3041 gen_op_addx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3043 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3044 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
3045 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3048 #ifdef TARGET_SPARC64
3049 case 0x9: /* V9 mulx */
3050 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
3054 CHECK_IU_FEATURE(dc
, MUL
);
3055 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
3057 gen_op_logic_cc(cpu_dst
);
3060 CHECK_IU_FEATURE(dc
, MUL
);
3061 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
3063 gen_op_logic_cc(cpu_dst
);
3067 gen_op_subx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3069 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3070 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
3071 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3074 #ifdef TARGET_SPARC64
3075 case 0xd: /* V9 udivx */
3076 tcg_gen_mov_tl(cpu_cc_src
, cpu_src1
);
3077 tcg_gen_mov_tl(cpu_cc_src2
, cpu_src2
);
3078 gen_trap_ifdivzero_tl(cpu_cc_src2
);
3079 tcg_gen_divu_i64(cpu_dst
, cpu_cc_src
, cpu_cc_src2
);
3083 CHECK_IU_FEATURE(dc
, DIV
);
3084 tcg_gen_helper_1_2(helper_udiv
, cpu_dst
, cpu_src1
,
3087 gen_op_div_cc(cpu_dst
);
3090 CHECK_IU_FEATURE(dc
, DIV
);
3091 tcg_gen_helper_1_2(helper_sdiv
, cpu_dst
, cpu_src1
,
3094 gen_op_div_cc(cpu_dst
);
3099 gen_movl_TN_reg(rd
, cpu_dst
);
3102 case 0x20: /* taddcc */
3103 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3104 gen_movl_TN_reg(rd
, cpu_dst
);
3106 case 0x21: /* tsubcc */
3107 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3108 gen_movl_TN_reg(rd
, cpu_dst
);
3110 case 0x22: /* taddcctv */
3111 save_state(dc
, cpu_cond
);
3112 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3113 gen_movl_TN_reg(rd
, cpu_dst
);
3115 case 0x23: /* tsubcctv */
3116 save_state(dc
, cpu_cond
);
3117 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3118 gen_movl_TN_reg(rd
, cpu_dst
);
3120 case 0x24: /* mulscc */
3121 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3122 gen_movl_TN_reg(rd
, cpu_dst
);
3124 #ifndef TARGET_SPARC64
3125 case 0x25: /* sll */
3126 if (IS_IMM
) { /* immediate */
3127 rs2
= GET_FIELDs(insn
, 20, 31);
3128 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, rs2
& 0x1f);
3129 } else { /* register */
3130 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3131 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3133 gen_movl_TN_reg(rd
, cpu_dst
);
3135 case 0x26: /* srl */
3136 if (IS_IMM
) { /* immediate */
3137 rs2
= GET_FIELDs(insn
, 20, 31);
3138 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, rs2
& 0x1f);
3139 } else { /* register */
3140 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3141 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3143 gen_movl_TN_reg(rd
, cpu_dst
);
3145 case 0x27: /* sra */
3146 if (IS_IMM
) { /* immediate */
3147 rs2
= GET_FIELDs(insn
, 20, 31);
3148 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, rs2
& 0x1f);
3149 } else { /* register */
3150 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3151 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3153 gen_movl_TN_reg(rd
, cpu_dst
);
3160 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3161 tcg_gen_st_tl(cpu_tmp0
, cpu_env
,
3162 offsetof(CPUSPARCState
, y
));
3164 #ifndef TARGET_SPARC64
3165 case 0x01 ... 0x0f: /* undefined in the
3169 case 0x10 ... 0x1f: /* implementation-dependent
3175 case 0x2: /* V9 wrccr */
3176 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3177 tcg_gen_helper_0_1(helper_wrccr
, cpu_dst
);
3179 case 0x3: /* V9 wrasi */
3180 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3181 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
3182 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3183 offsetof(CPUSPARCState
, asi
));
3185 case 0x6: /* V9 wrfprs */
3186 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3187 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
3188 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3189 offsetof(CPUSPARCState
, fprs
));
3190 save_state(dc
, cpu_cond
);
3195 case 0xf: /* V9 sir, nop if user */
3196 #if !defined(CONFIG_USER_ONLY)
3201 case 0x13: /* Graphics Status */
3202 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3204 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3205 tcg_gen_st_tl(cpu_tmp0
, cpu_env
,
3206 offsetof(CPUSPARCState
, gsr
));
3208 case 0x17: /* Tick compare */
3209 #if !defined(CONFIG_USER_ONLY)
3210 if (!supervisor(dc
))
3216 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
,
3218 tcg_gen_st_tl(cpu_tmp0
, cpu_env
,
3219 offsetof(CPUSPARCState
,
3221 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3222 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3223 offsetof(CPUState
, tick
));
3224 tcg_gen_helper_0_2(helper_tick_set_limit
,
3225 r_tickptr
, cpu_tmp0
);
3226 tcg_temp_free(r_tickptr
);
3229 case 0x18: /* System tick */
3230 #if !defined(CONFIG_USER_ONLY)
3231 if (!supervisor(dc
))
3237 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3239 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3240 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3241 offsetof(CPUState
, stick
));
3242 tcg_gen_helper_0_2(helper_tick_set_count
,
3243 r_tickptr
, cpu_dst
);
3244 tcg_temp_free(r_tickptr
);
3247 case 0x19: /* System tick compare */
3248 #if !defined(CONFIG_USER_ONLY)
3249 if (!supervisor(dc
))
3255 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
,
3257 tcg_gen_st_tl(cpu_tmp0
, cpu_env
,
3258 offsetof(CPUSPARCState
,
3260 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3261 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3262 offsetof(CPUState
, stick
));
3263 tcg_gen_helper_0_2(helper_tick_set_limit
,
3264 r_tickptr
, cpu_tmp0
);
3265 tcg_temp_free(r_tickptr
);
3269 case 0x10: /* Performance Control */
3270 case 0x11: /* Performance Instrumentation
3272 case 0x12: /* Dispatch Control */
3273 case 0x14: /* Softint set */
3274 case 0x15: /* Softint clear */
3275 case 0x16: /* Softint write */
3282 #if !defined(CONFIG_USER_ONLY)
3283 case 0x31: /* wrpsr, V9 saved, restored */
3285 if (!supervisor(dc
))
3287 #ifdef TARGET_SPARC64
3290 tcg_gen_helper_0_0(helper_saved
);
3293 tcg_gen_helper_0_0(helper_restored
);
3295 case 2: /* UA2005 allclean */
3296 case 3: /* UA2005 otherw */
3297 case 4: /* UA2005 normalw */
3298 case 5: /* UA2005 invalw */
3304 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3305 tcg_gen_helper_0_1(helper_wrpsr
, cpu_dst
);
3306 save_state(dc
, cpu_cond
);
3313 case 0x32: /* wrwim, V9 wrpr */
3315 if (!supervisor(dc
))
3317 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3318 #ifdef TARGET_SPARC64
3324 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3325 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3326 offsetof(CPUState
, tsptr
));
3327 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3328 offsetof(trap_state
, tpc
));
3329 tcg_temp_free(r_tsptr
);
3336 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3337 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3338 offsetof(CPUState
, tsptr
));
3339 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3340 offsetof(trap_state
, tnpc
));
3341 tcg_temp_free(r_tsptr
);
3348 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3349 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3350 offsetof(CPUState
, tsptr
));
3351 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3352 offsetof(trap_state
,
3354 tcg_temp_free(r_tsptr
);
3361 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3362 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3363 offsetof(CPUState
, tsptr
));
3364 tcg_gen_st_i32(cpu_tmp0
, r_tsptr
,
3365 offsetof(trap_state
, tt
));
3366 tcg_temp_free(r_tsptr
);
3373 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3374 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3375 offsetof(CPUState
, tick
));
3376 tcg_gen_helper_0_2(helper_tick_set_count
,
3377 r_tickptr
, cpu_tmp0
);
3378 tcg_temp_free(r_tickptr
);
3382 tcg_gen_st_tl(cpu_tmp0
, cpu_env
,
3383 offsetof(CPUSPARCState
, tbr
));
3386 save_state(dc
, cpu_cond
);
3387 tcg_gen_helper_0_1(helper_wrpstate
, cpu_tmp0
);
3393 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3394 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3395 offsetof(CPUSPARCState
, tl
));
3398 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3399 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3400 offsetof(CPUSPARCState
,
3404 tcg_gen_helper_0_1(helper_wrcwp
, cpu_tmp0
);
3407 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3408 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3409 offsetof(CPUSPARCState
,
3412 case 11: // canrestore
3413 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3414 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3415 offsetof(CPUSPARCState
,
3418 case 12: // cleanwin
3419 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3420 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3421 offsetof(CPUSPARCState
,
3424 case 13: // otherwin
3425 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3426 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3427 offsetof(CPUSPARCState
,
3431 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3432 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3433 offsetof(CPUSPARCState
,
3436 case 16: // UA2005 gl
3437 CHECK_IU_FEATURE(dc
, GL
);
3438 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3439 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3440 offsetof(CPUSPARCState
, gl
));
3442 case 26: // UA2005 strand status
3443 CHECK_IU_FEATURE(dc
, HYPV
);
3444 if (!hypervisor(dc
))
3446 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3447 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3448 offsetof(CPUSPARCState
, ssr
));
3454 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3455 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3456 offsetof(CPUSPARCState
, wim
));
3460 case 0x33: /* wrtbr, UA2005 wrhpr */
3462 #ifndef TARGET_SPARC64
3463 if (!supervisor(dc
))
3465 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3466 tcg_gen_st_tl(cpu_tmp0
, cpu_env
,
3467 offsetof(CPUSPARCState
, tbr
));
3469 CHECK_IU_FEATURE(dc
, HYPV
);
3470 if (!hypervisor(dc
))
3472 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3475 // XXX gen_op_wrhpstate();
3476 save_state(dc
, cpu_cond
);
3482 // XXX gen_op_wrhtstate();
3485 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3486 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3487 offsetof(CPUSPARCState
, hintp
));
3490 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3491 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3492 offsetof(CPUSPARCState
, htba
));
3494 case 31: // hstick_cmpr
3498 tcg_gen_st_tl(cpu_tmp0
, cpu_env
,
3499 offsetof(CPUSPARCState
,
3501 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3502 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3503 offsetof(CPUState
, hstick
));
3504 tcg_gen_helper_0_2(helper_tick_set_limit
,
3505 r_tickptr
, cpu_tmp0
);
3506 tcg_temp_free(r_tickptr
);
3509 case 6: // hver readonly
3517 #ifdef TARGET_SPARC64
3518 case 0x2c: /* V9 movcc */
3520 int cc
= GET_FIELD_SP(insn
, 11, 12);
3521 int cond
= GET_FIELD_SP(insn
, 14, 17);
3525 r_cond
= tcg_temp_new(TCG_TYPE_TL
);
3526 if (insn
& (1 << 18)) {
3528 gen_cond(r_cond
, 0, cond
);
3530 gen_cond(r_cond
, 1, cond
);
3534 gen_fcond(r_cond
, cc
, cond
);
3537 l1
= gen_new_label();
3539 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
3540 if (IS_IMM
) { /* immediate */
3543 rs2
= GET_FIELD_SPs(insn
, 0, 10);
3544 r_const
= tcg_const_tl((int)rs2
);
3545 gen_movl_TN_reg(rd
, r_const
);
3546 tcg_temp_free(r_const
);
3548 rs2
= GET_FIELD_SP(insn
, 0, 4);
3549 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3550 gen_movl_TN_reg(rd
, cpu_tmp0
);
3553 tcg_temp_free(r_cond
);
3556 case 0x2d: /* V9 sdivx */
3557 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
3558 gen_movl_TN_reg(rd
, cpu_dst
);
3560 case 0x2e: /* V9 popc */
3562 cpu_src2
= get_src2(insn
, cpu_src2
);
3563 tcg_gen_helper_1_1(helper_popc
, cpu_dst
,
3565 gen_movl_TN_reg(rd
, cpu_dst
);
3567 case 0x2f: /* V9 movr */
3569 int cond
= GET_FIELD_SP(insn
, 10, 12);
3572 cpu_src1
= get_src1(insn
, cpu_src1
);
3574 l1
= gen_new_label();
3576 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
3578 if (IS_IMM
) { /* immediate */
3581 rs2
= GET_FIELD_SPs(insn
, 0, 9);
3582 r_const
= tcg_const_tl((int)rs2
);
3583 gen_movl_TN_reg(rd
, r_const
);
3584 tcg_temp_free(r_const
);
3586 rs2
= GET_FIELD_SP(insn
, 0, 4);
3587 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3588 gen_movl_TN_reg(rd
, cpu_tmp0
);
3598 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3599 #ifdef TARGET_SPARC64
3600 int opf
= GET_FIELD_SP(insn
, 5, 13);
3601 rs1
= GET_FIELD(insn
, 13, 17);
3602 rs2
= GET_FIELD(insn
, 27, 31);
3603 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3607 case 0x000: /* VIS I edge8cc */
3608 case 0x001: /* VIS II edge8n */
3609 case 0x002: /* VIS I edge8lcc */
3610 case 0x003: /* VIS II edge8ln */
3611 case 0x004: /* VIS I edge16cc */
3612 case 0x005: /* VIS II edge16n */
3613 case 0x006: /* VIS I edge16lcc */
3614 case 0x007: /* VIS II edge16ln */
3615 case 0x008: /* VIS I edge32cc */
3616 case 0x009: /* VIS II edge32n */
3617 case 0x00a: /* VIS I edge32lcc */
3618 case 0x00b: /* VIS II edge32ln */
3621 case 0x010: /* VIS I array8 */
3622 CHECK_FPU_FEATURE(dc
, VIS1
);
3623 cpu_src1
= get_src1(insn
, cpu_src1
);
3624 gen_movl_reg_TN(rs2
, cpu_src2
);
3625 tcg_gen_helper_1_2(helper_array8
, cpu_dst
, cpu_src1
,
3627 gen_movl_TN_reg(rd
, cpu_dst
);
3629 case 0x012: /* VIS I array16 */
3630 CHECK_FPU_FEATURE(dc
, VIS1
);
3631 cpu_src1
= get_src1(insn
, cpu_src1
);
3632 gen_movl_reg_TN(rs2
, cpu_src2
);
3633 tcg_gen_helper_1_2(helper_array8
, cpu_dst
, cpu_src1
,
3635 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
3636 gen_movl_TN_reg(rd
, cpu_dst
);
3638 case 0x014: /* VIS I array32 */
3639 CHECK_FPU_FEATURE(dc
, VIS1
);
3640 cpu_src1
= get_src1(insn
, cpu_src1
);
3641 gen_movl_reg_TN(rs2
, cpu_src2
);
3642 tcg_gen_helper_1_2(helper_array8
, cpu_dst
, cpu_src1
,
3644 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
3645 gen_movl_TN_reg(rd
, cpu_dst
);
3647 case 0x018: /* VIS I alignaddr */
3648 CHECK_FPU_FEATURE(dc
, VIS1
);
3649 cpu_src1
= get_src1(insn
, cpu_src1
);
3650 gen_movl_reg_TN(rs2
, cpu_src2
);
3651 tcg_gen_helper_1_2(helper_alignaddr
, cpu_dst
, cpu_src1
,
3653 gen_movl_TN_reg(rd
, cpu_dst
);
3655 case 0x019: /* VIS II bmask */
3656 case 0x01a: /* VIS I alignaddrl */
3659 case 0x020: /* VIS I fcmple16 */
3660 CHECK_FPU_FEATURE(dc
, VIS1
);
3661 gen_op_load_fpr_DT0(DFPREG(rs1
));
3662 gen_op_load_fpr_DT1(DFPREG(rs2
));
3663 tcg_gen_helper_0_0(helper_fcmple16
);
3664 gen_op_store_DT0_fpr(DFPREG(rd
));
3666 case 0x022: /* VIS I fcmpne16 */
3667 CHECK_FPU_FEATURE(dc
, VIS1
);
3668 gen_op_load_fpr_DT0(DFPREG(rs1
));
3669 gen_op_load_fpr_DT1(DFPREG(rs2
));
3670 tcg_gen_helper_0_0(helper_fcmpne16
);
3671 gen_op_store_DT0_fpr(DFPREG(rd
));
3673 case 0x024: /* VIS I fcmple32 */
3674 CHECK_FPU_FEATURE(dc
, VIS1
);
3675 gen_op_load_fpr_DT0(DFPREG(rs1
));
3676 gen_op_load_fpr_DT1(DFPREG(rs2
));
3677 tcg_gen_helper_0_0(helper_fcmple32
);
3678 gen_op_store_DT0_fpr(DFPREG(rd
));
3680 case 0x026: /* VIS I fcmpne32 */
3681 CHECK_FPU_FEATURE(dc
, VIS1
);
3682 gen_op_load_fpr_DT0(DFPREG(rs1
));
3683 gen_op_load_fpr_DT1(DFPREG(rs2
));
3684 tcg_gen_helper_0_0(helper_fcmpne32
);
3685 gen_op_store_DT0_fpr(DFPREG(rd
));
3687 case 0x028: /* VIS I fcmpgt16 */
3688 CHECK_FPU_FEATURE(dc
, VIS1
);
3689 gen_op_load_fpr_DT0(DFPREG(rs1
));
3690 gen_op_load_fpr_DT1(DFPREG(rs2
));
3691 tcg_gen_helper_0_0(helper_fcmpgt16
);
3692 gen_op_store_DT0_fpr(DFPREG(rd
));
3694 case 0x02a: /* VIS I fcmpeq16 */
3695 CHECK_FPU_FEATURE(dc
, VIS1
);
3696 gen_op_load_fpr_DT0(DFPREG(rs1
));
3697 gen_op_load_fpr_DT1(DFPREG(rs2
));
3698 tcg_gen_helper_0_0(helper_fcmpeq16
);
3699 gen_op_store_DT0_fpr(DFPREG(rd
));
3701 case 0x02c: /* VIS I fcmpgt32 */
3702 CHECK_FPU_FEATURE(dc
, VIS1
);
3703 gen_op_load_fpr_DT0(DFPREG(rs1
));
3704 gen_op_load_fpr_DT1(DFPREG(rs2
));
3705 tcg_gen_helper_0_0(helper_fcmpgt32
);
3706 gen_op_store_DT0_fpr(DFPREG(rd
));
3708 case 0x02e: /* VIS I fcmpeq32 */
3709 CHECK_FPU_FEATURE(dc
, VIS1
);
3710 gen_op_load_fpr_DT0(DFPREG(rs1
));
3711 gen_op_load_fpr_DT1(DFPREG(rs2
));
3712 tcg_gen_helper_0_0(helper_fcmpeq32
);
3713 gen_op_store_DT0_fpr(DFPREG(rd
));
3715 case 0x031: /* VIS I fmul8x16 */
3716 CHECK_FPU_FEATURE(dc
, VIS1
);
3717 gen_op_load_fpr_DT0(DFPREG(rs1
));
3718 gen_op_load_fpr_DT1(DFPREG(rs2
));
3719 tcg_gen_helper_0_0(helper_fmul8x16
);
3720 gen_op_store_DT0_fpr(DFPREG(rd
));
3722 case 0x033: /* VIS I fmul8x16au */
3723 CHECK_FPU_FEATURE(dc
, VIS1
);
3724 gen_op_load_fpr_DT0(DFPREG(rs1
));
3725 gen_op_load_fpr_DT1(DFPREG(rs2
));
3726 tcg_gen_helper_0_0(helper_fmul8x16au
);
3727 gen_op_store_DT0_fpr(DFPREG(rd
));
3729 case 0x035: /* VIS I fmul8x16al */
3730 CHECK_FPU_FEATURE(dc
, VIS1
);
3731 gen_op_load_fpr_DT0(DFPREG(rs1
));
3732 gen_op_load_fpr_DT1(DFPREG(rs2
));
3733 tcg_gen_helper_0_0(helper_fmul8x16al
);
3734 gen_op_store_DT0_fpr(DFPREG(rd
));
3736 case 0x036: /* VIS I fmul8sux16 */
3737 CHECK_FPU_FEATURE(dc
, VIS1
);
3738 gen_op_load_fpr_DT0(DFPREG(rs1
));
3739 gen_op_load_fpr_DT1(DFPREG(rs2
));
3740 tcg_gen_helper_0_0(helper_fmul8sux16
);
3741 gen_op_store_DT0_fpr(DFPREG(rd
));
3743 case 0x037: /* VIS I fmul8ulx16 */
3744 CHECK_FPU_FEATURE(dc
, VIS1
);
3745 gen_op_load_fpr_DT0(DFPREG(rs1
));
3746 gen_op_load_fpr_DT1(DFPREG(rs2
));
3747 tcg_gen_helper_0_0(helper_fmul8ulx16
);
3748 gen_op_store_DT0_fpr(DFPREG(rd
));
3750 case 0x038: /* VIS I fmuld8sux16 */
3751 CHECK_FPU_FEATURE(dc
, VIS1
);
3752 gen_op_load_fpr_DT0(DFPREG(rs1
));
3753 gen_op_load_fpr_DT1(DFPREG(rs2
));
3754 tcg_gen_helper_0_0(helper_fmuld8sux16
);
3755 gen_op_store_DT0_fpr(DFPREG(rd
));
3757 case 0x039: /* VIS I fmuld8ulx16 */
3758 CHECK_FPU_FEATURE(dc
, VIS1
);
3759 gen_op_load_fpr_DT0(DFPREG(rs1
));
3760 gen_op_load_fpr_DT1(DFPREG(rs2
));
3761 tcg_gen_helper_0_0(helper_fmuld8ulx16
);
3762 gen_op_store_DT0_fpr(DFPREG(rd
));
3764 case 0x03a: /* VIS I fpack32 */
3765 case 0x03b: /* VIS I fpack16 */
3766 case 0x03d: /* VIS I fpackfix */
3767 case 0x03e: /* VIS I pdist */
3770 case 0x048: /* VIS I faligndata */
3771 CHECK_FPU_FEATURE(dc
, VIS1
);
3772 gen_op_load_fpr_DT0(DFPREG(rs1
));
3773 gen_op_load_fpr_DT1(DFPREG(rs2
));
3774 tcg_gen_helper_0_0(helper_faligndata
);
3775 gen_op_store_DT0_fpr(DFPREG(rd
));
3777 case 0x04b: /* VIS I fpmerge */
3778 CHECK_FPU_FEATURE(dc
, VIS1
);
3779 gen_op_load_fpr_DT0(DFPREG(rs1
));
3780 gen_op_load_fpr_DT1(DFPREG(rs2
));
3781 tcg_gen_helper_0_0(helper_fpmerge
);
3782 gen_op_store_DT0_fpr(DFPREG(rd
));
3784 case 0x04c: /* VIS II bshuffle */
3787 case 0x04d: /* VIS I fexpand */
3788 CHECK_FPU_FEATURE(dc
, VIS1
);
3789 gen_op_load_fpr_DT0(DFPREG(rs1
));
3790 gen_op_load_fpr_DT1(DFPREG(rs2
));
3791 tcg_gen_helper_0_0(helper_fexpand
);
3792 gen_op_store_DT0_fpr(DFPREG(rd
));
3794 case 0x050: /* VIS I fpadd16 */
3795 CHECK_FPU_FEATURE(dc
, VIS1
);
3796 gen_op_load_fpr_DT0(DFPREG(rs1
));
3797 gen_op_load_fpr_DT1(DFPREG(rs2
));
3798 tcg_gen_helper_0_0(helper_fpadd16
);
3799 gen_op_store_DT0_fpr(DFPREG(rd
));
3801 case 0x051: /* VIS I fpadd16s */
3802 CHECK_FPU_FEATURE(dc
, VIS1
);
3803 gen_op_load_fpr_FT0(rs1
);
3804 gen_op_load_fpr_FT1(rs2
);
3805 tcg_gen_helper_0_0(helper_fpadd16s
);
3806 gen_op_store_FT0_fpr(rd
);
3808 case 0x052: /* VIS I fpadd32 */
3809 CHECK_FPU_FEATURE(dc
, VIS1
);
3810 gen_op_load_fpr_DT0(DFPREG(rs1
));
3811 gen_op_load_fpr_DT1(DFPREG(rs2
));
3812 tcg_gen_helper_0_0(helper_fpadd32
);
3813 gen_op_store_DT0_fpr(DFPREG(rd
));
3815 case 0x053: /* VIS I fpadd32s */
3816 CHECK_FPU_FEATURE(dc
, VIS1
);
3817 gen_op_load_fpr_FT0(rs1
);
3818 gen_op_load_fpr_FT1(rs2
);
3819 tcg_gen_helper_0_0(helper_fpadd32s
);
3820 gen_op_store_FT0_fpr(rd
);
3822 case 0x054: /* VIS I fpsub16 */
3823 CHECK_FPU_FEATURE(dc
, VIS1
);
3824 gen_op_load_fpr_DT0(DFPREG(rs1
));
3825 gen_op_load_fpr_DT1(DFPREG(rs2
));
3826 tcg_gen_helper_0_0(helper_fpsub16
);
3827 gen_op_store_DT0_fpr(DFPREG(rd
));
3829 case 0x055: /* VIS I fpsub16s */
3830 CHECK_FPU_FEATURE(dc
, VIS1
);
3831 gen_op_load_fpr_FT0(rs1
);
3832 gen_op_load_fpr_FT1(rs2
);
3833 tcg_gen_helper_0_0(helper_fpsub16s
);
3834 gen_op_store_FT0_fpr(rd
);
3836 case 0x056: /* VIS I fpsub32 */
3837 CHECK_FPU_FEATURE(dc
, VIS1
);
3838 gen_op_load_fpr_DT0(DFPREG(rs1
));
3839 gen_op_load_fpr_DT1(DFPREG(rs2
));
3840 tcg_gen_helper_0_0(helper_fpadd32
);
3841 gen_op_store_DT0_fpr(DFPREG(rd
));
3843 case 0x057: /* VIS I fpsub32s */
3844 CHECK_FPU_FEATURE(dc
, VIS1
);
3845 gen_op_load_fpr_FT0(rs1
);
3846 gen_op_load_fpr_FT1(rs2
);
3847 tcg_gen_helper_0_0(helper_fpsub32s
);
3848 gen_op_store_FT0_fpr(rd
);
3850 case 0x060: /* VIS I fzero */
3851 CHECK_FPU_FEATURE(dc
, VIS1
);
3852 tcg_gen_helper_0_0(helper_movl_DT0_0
);
3853 gen_op_store_DT0_fpr(DFPREG(rd
));
3855 case 0x061: /* VIS I fzeros */
3856 CHECK_FPU_FEATURE(dc
, VIS1
);
3857 tcg_gen_helper_0_0(helper_movl_FT0_0
);
3858 gen_op_store_FT0_fpr(rd
);
3860 case 0x062: /* VIS I fnor */
3861 CHECK_FPU_FEATURE(dc
, VIS1
);
3862 gen_op_load_fpr_DT0(DFPREG(rs1
));
3863 gen_op_load_fpr_DT1(DFPREG(rs2
));
3864 tcg_gen_helper_0_0(helper_fnor
);
3865 gen_op_store_DT0_fpr(DFPREG(rd
));
3867 case 0x063: /* VIS I fnors */
3868 CHECK_FPU_FEATURE(dc
, VIS1
);
3869 gen_op_load_fpr_FT0(rs1
);
3870 gen_op_load_fpr_FT1(rs2
);
3871 tcg_gen_helper_0_0(helper_fnors
);
3872 gen_op_store_FT0_fpr(rd
);
3874 case 0x064: /* VIS I fandnot2 */
3875 CHECK_FPU_FEATURE(dc
, VIS1
);
3876 gen_op_load_fpr_DT1(DFPREG(rs1
));
3877 gen_op_load_fpr_DT0(DFPREG(rs2
));
3878 tcg_gen_helper_0_0(helper_fandnot
);
3879 gen_op_store_DT0_fpr(DFPREG(rd
));
3881 case 0x065: /* VIS I fandnot2s */
3882 CHECK_FPU_FEATURE(dc
, VIS1
);
3883 gen_op_load_fpr_FT1(rs1
);
3884 gen_op_load_fpr_FT0(rs2
);
3885 tcg_gen_helper_0_0(helper_fandnots
);
3886 gen_op_store_FT0_fpr(rd
);
3888 case 0x066: /* VIS I fnot2 */
3889 CHECK_FPU_FEATURE(dc
, VIS1
);
3890 gen_op_load_fpr_DT1(DFPREG(rs2
));
3891 tcg_gen_helper_0_0(helper_fnot
);
3892 gen_op_store_DT0_fpr(DFPREG(rd
));
3894 case 0x067: /* VIS I fnot2s */
3895 CHECK_FPU_FEATURE(dc
, VIS1
);
3896 gen_op_load_fpr_FT1(rs2
);
3897 tcg_gen_helper_0_0(helper_fnot
);
3898 gen_op_store_FT0_fpr(rd
);
3900 case 0x068: /* VIS I fandnot1 */
3901 CHECK_FPU_FEATURE(dc
, VIS1
);
3902 gen_op_load_fpr_DT0(DFPREG(rs1
));
3903 gen_op_load_fpr_DT1(DFPREG(rs2
));
3904 tcg_gen_helper_0_0(helper_fandnot
);
3905 gen_op_store_DT0_fpr(DFPREG(rd
));
3907 case 0x069: /* VIS I fandnot1s */
3908 CHECK_FPU_FEATURE(dc
, VIS1
);
3909 gen_op_load_fpr_FT0(rs1
);
3910 gen_op_load_fpr_FT1(rs2
);
3911 tcg_gen_helper_0_0(helper_fandnots
);
3912 gen_op_store_FT0_fpr(rd
);
3914 case 0x06a: /* VIS I fnot1 */
3915 CHECK_FPU_FEATURE(dc
, VIS1
);
3916 gen_op_load_fpr_DT1(DFPREG(rs1
));
3917 tcg_gen_helper_0_0(helper_fnot
);
3918 gen_op_store_DT0_fpr(DFPREG(rd
));
3920 case 0x06b: /* VIS I fnot1s */
3921 CHECK_FPU_FEATURE(dc
, VIS1
);
3922 gen_op_load_fpr_FT1(rs1
);
3923 tcg_gen_helper_0_0(helper_fnot
);
3924 gen_op_store_FT0_fpr(rd
);
3926 case 0x06c: /* VIS I fxor */
3927 CHECK_FPU_FEATURE(dc
, VIS1
);
3928 gen_op_load_fpr_DT0(DFPREG(rs1
));
3929 gen_op_load_fpr_DT1(DFPREG(rs2
));
3930 tcg_gen_helper_0_0(helper_fxor
);
3931 gen_op_store_DT0_fpr(DFPREG(rd
));
3933 case 0x06d: /* VIS I fxors */
3934 CHECK_FPU_FEATURE(dc
, VIS1
);
3935 gen_op_load_fpr_FT0(rs1
);
3936 gen_op_load_fpr_FT1(rs2
);
3937 tcg_gen_helper_0_0(helper_fxors
);
3938 gen_op_store_FT0_fpr(rd
);
3940 case 0x06e: /* VIS I fnand */
3941 CHECK_FPU_FEATURE(dc
, VIS1
);
3942 gen_op_load_fpr_DT0(DFPREG(rs1
));
3943 gen_op_load_fpr_DT1(DFPREG(rs2
));
3944 tcg_gen_helper_0_0(helper_fnand
);
3945 gen_op_store_DT0_fpr(DFPREG(rd
));
3947 case 0x06f: /* VIS I fnands */
3948 CHECK_FPU_FEATURE(dc
, VIS1
);
3949 gen_op_load_fpr_FT0(rs1
);
3950 gen_op_load_fpr_FT1(rs2
);
3951 tcg_gen_helper_0_0(helper_fnands
);
3952 gen_op_store_FT0_fpr(rd
);
3954 case 0x070: /* VIS I fand */
3955 CHECK_FPU_FEATURE(dc
, VIS1
);
3956 gen_op_load_fpr_DT0(DFPREG(rs1
));
3957 gen_op_load_fpr_DT1(DFPREG(rs2
));
3958 tcg_gen_helper_0_0(helper_fand
);
3959 gen_op_store_DT0_fpr(DFPREG(rd
));
3961 case 0x071: /* VIS I fands */
3962 CHECK_FPU_FEATURE(dc
, VIS1
);
3963 gen_op_load_fpr_FT0(rs1
);
3964 gen_op_load_fpr_FT1(rs2
);
3965 tcg_gen_helper_0_0(helper_fands
);
3966 gen_op_store_FT0_fpr(rd
);
3968 case 0x072: /* VIS I fxnor */
3969 CHECK_FPU_FEATURE(dc
, VIS1
);
3970 gen_op_load_fpr_DT0(DFPREG(rs1
));
3971 gen_op_load_fpr_DT1(DFPREG(rs2
));
3972 tcg_gen_helper_0_0(helper_fxnor
);
3973 gen_op_store_DT0_fpr(DFPREG(rd
));
3975 case 0x073: /* VIS I fxnors */
3976 CHECK_FPU_FEATURE(dc
, VIS1
);
3977 gen_op_load_fpr_FT0(rs1
);
3978 gen_op_load_fpr_FT1(rs2
);
3979 tcg_gen_helper_0_0(helper_fxnors
);
3980 gen_op_store_FT0_fpr(rd
);
3982 case 0x074: /* VIS I fsrc1 */
3983 CHECK_FPU_FEATURE(dc
, VIS1
);
3984 gen_op_load_fpr_DT0(DFPREG(rs1
));
3985 gen_op_store_DT0_fpr(DFPREG(rd
));
3987 case 0x075: /* VIS I fsrc1s */
3988 CHECK_FPU_FEATURE(dc
, VIS1
);
3989 gen_op_load_fpr_FT0(rs1
);
3990 gen_op_store_FT0_fpr(rd
);
3992 case 0x076: /* VIS I fornot2 */
3993 CHECK_FPU_FEATURE(dc
, VIS1
);
3994 gen_op_load_fpr_DT1(DFPREG(rs1
));
3995 gen_op_load_fpr_DT0(DFPREG(rs2
));
3996 tcg_gen_helper_0_0(helper_fornot
);
3997 gen_op_store_DT0_fpr(DFPREG(rd
));
3999 case 0x077: /* VIS I fornot2s */
4000 CHECK_FPU_FEATURE(dc
, VIS1
);
4001 gen_op_load_fpr_FT1(rs1
);
4002 gen_op_load_fpr_FT0(rs2
);
4003 tcg_gen_helper_0_0(helper_fornots
);
4004 gen_op_store_FT0_fpr(rd
);
4006 case 0x078: /* VIS I fsrc2 */
4007 CHECK_FPU_FEATURE(dc
, VIS1
);
4008 gen_op_load_fpr_DT0(DFPREG(rs2
));
4009 gen_op_store_DT0_fpr(DFPREG(rd
));
4011 case 0x079: /* VIS I fsrc2s */
4012 CHECK_FPU_FEATURE(dc
, VIS1
);
4013 gen_op_load_fpr_FT0(rs2
);
4014 gen_op_store_FT0_fpr(rd
);
4016 case 0x07a: /* VIS I fornot1 */
4017 CHECK_FPU_FEATURE(dc
, VIS1
);
4018 gen_op_load_fpr_DT0(DFPREG(rs1
));
4019 gen_op_load_fpr_DT1(DFPREG(rs2
));
4020 tcg_gen_helper_0_0(helper_fornot
);
4021 gen_op_store_DT0_fpr(DFPREG(rd
));
4023 case 0x07b: /* VIS I fornot1s */
4024 CHECK_FPU_FEATURE(dc
, VIS1
);
4025 gen_op_load_fpr_FT0(rs1
);
4026 gen_op_load_fpr_FT1(rs2
);
4027 tcg_gen_helper_0_0(helper_fornots
);
4028 gen_op_store_FT0_fpr(rd
);
4030 case 0x07c: /* VIS I for */
4031 CHECK_FPU_FEATURE(dc
, VIS1
);
4032 gen_op_load_fpr_DT0(DFPREG(rs1
));
4033 gen_op_load_fpr_DT1(DFPREG(rs2
));
4034 tcg_gen_helper_0_0(helper_for
);
4035 gen_op_store_DT0_fpr(DFPREG(rd
));
4037 case 0x07d: /* VIS I fors */
4038 CHECK_FPU_FEATURE(dc
, VIS1
);
4039 gen_op_load_fpr_FT0(rs1
);
4040 gen_op_load_fpr_FT1(rs2
);
4041 tcg_gen_helper_0_0(helper_fors
);
4042 gen_op_store_FT0_fpr(rd
);
4044 case 0x07e: /* VIS I fone */
4045 CHECK_FPU_FEATURE(dc
, VIS1
);
4046 tcg_gen_helper_0_0(helper_movl_DT0_1
);
4047 gen_op_store_DT0_fpr(DFPREG(rd
));
4049 case 0x07f: /* VIS I fones */
4050 CHECK_FPU_FEATURE(dc
, VIS1
);
4051 tcg_gen_helper_0_0(helper_movl_FT0_1
);
4052 gen_op_store_FT0_fpr(rd
);
4054 case 0x080: /* VIS I shutdown */
4055 case 0x081: /* VIS II siam */
4064 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
4065 #ifdef TARGET_SPARC64
4070 #ifdef TARGET_SPARC64
4071 } else if (xop
== 0x39) { /* V9 return */
4074 save_state(dc
, cpu_cond
);
4075 cpu_src1
= get_src1(insn
, cpu_src1
);
4076 if (IS_IMM
) { /* immediate */
4077 rs2
= GET_FIELDs(insn
, 19, 31);
4078 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, (int)rs2
);
4079 } else { /* register */
4080 rs2
= GET_FIELD(insn
, 27, 31);
4082 gen_movl_reg_TN(rs2
, cpu_src2
);
4083 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4085 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4087 tcg_gen_helper_0_0(helper_restore
);
4088 gen_mov_pc_npc(dc
, cpu_cond
);
4089 r_const
= tcg_const_i32(3);
4090 tcg_gen_helper_0_2(helper_check_align
, cpu_dst
, r_const
);
4091 tcg_temp_free(r_const
);
4092 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4093 dc
->npc
= DYNAMIC_PC
;
4097 cpu_src1
= get_src1(insn
, cpu_src1
);
4098 if (IS_IMM
) { /* immediate */
4099 rs2
= GET_FIELDs(insn
, 19, 31);
4100 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, (int)rs2
);
4101 } else { /* register */
4102 rs2
= GET_FIELD(insn
, 27, 31);
4104 gen_movl_reg_TN(rs2
, cpu_src2
);
4105 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4107 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4110 case 0x38: /* jmpl */
4114 r_const
= tcg_const_tl(dc
->pc
);
4115 gen_movl_TN_reg(rd
, r_const
);
4116 tcg_temp_free(r_const
);
4117 gen_mov_pc_npc(dc
, cpu_cond
);
4118 r_const
= tcg_const_i32(3);
4119 tcg_gen_helper_0_2(helper_check_align
, cpu_dst
,
4121 tcg_temp_free(r_const
);
4122 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4123 dc
->npc
= DYNAMIC_PC
;
4126 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4127 case 0x39: /* rett, V9 return */
4131 if (!supervisor(dc
))
4133 gen_mov_pc_npc(dc
, cpu_cond
);
4134 r_const
= tcg_const_i32(3);
4135 tcg_gen_helper_0_2(helper_check_align
, cpu_dst
,
4137 tcg_temp_free(r_const
);
4138 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4139 dc
->npc
= DYNAMIC_PC
;
4140 tcg_gen_helper_0_0(helper_rett
);
4144 case 0x3b: /* flush */
4145 if (!((dc
)->features
& CPU_FEATURE_FLUSH
))
4147 tcg_gen_helper_0_1(helper_flush
, cpu_dst
);
4149 case 0x3c: /* save */
4150 save_state(dc
, cpu_cond
);
4151 tcg_gen_helper_0_0(helper_save
);
4152 gen_movl_TN_reg(rd
, cpu_dst
);
4154 case 0x3d: /* restore */
4155 save_state(dc
, cpu_cond
);
4156 tcg_gen_helper_0_0(helper_restore
);
4157 gen_movl_TN_reg(rd
, cpu_dst
);
4159 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4160 case 0x3e: /* V9 done/retry */
4164 if (!supervisor(dc
))
4166 dc
->npc
= DYNAMIC_PC
;
4167 dc
->pc
= DYNAMIC_PC
;
4168 tcg_gen_helper_0_0(helper_done
);
4171 if (!supervisor(dc
))
4173 dc
->npc
= DYNAMIC_PC
;
4174 dc
->pc
= DYNAMIC_PC
;
4175 tcg_gen_helper_0_0(helper_retry
);
4190 case 3: /* load/store instructions */
4192 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4194 cpu_src1
= get_src1(insn
, cpu_src1
);
4195 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4196 rs2
= GET_FIELD(insn
, 27, 31);
4197 gen_movl_reg_TN(rs2
, cpu_src2
);
4198 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4199 } else if (IS_IMM
) { /* immediate */
4200 rs2
= GET_FIELDs(insn
, 19, 31);
4201 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, (int)rs2
);
4202 } else { /* register */
4203 rs2
= GET_FIELD(insn
, 27, 31);
4205 gen_movl_reg_TN(rs2
, cpu_src2
);
4206 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4208 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4210 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4211 (xop
> 0x17 && xop
<= 0x1d ) ||
4212 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4214 case 0x0: /* load unsigned word */
4215 gen_address_mask(dc
, cpu_addr
);
4216 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4218 case 0x1: /* load unsigned byte */
4219 gen_address_mask(dc
, cpu_addr
);
4220 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4222 case 0x2: /* load unsigned halfword */
4223 gen_address_mask(dc
, cpu_addr
);
4224 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4226 case 0x3: /* load double word */
4232 save_state(dc
, cpu_cond
);
4233 r_const
= tcg_const_i32(7);
4234 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
,
4235 r_const
); // XXX remove
4236 tcg_temp_free(r_const
);
4237 gen_address_mask(dc
, cpu_addr
);
4238 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4239 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4240 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4241 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4242 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4243 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4244 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4247 case 0x9: /* load signed byte */
4248 gen_address_mask(dc
, cpu_addr
);
4249 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4251 case 0xa: /* load signed halfword */
4252 gen_address_mask(dc
, cpu_addr
);
4253 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4255 case 0xd: /* ldstub -- XXX: should be atomically */
4259 gen_address_mask(dc
, cpu_addr
);
4260 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4261 r_const
= tcg_const_tl(0xff);
4262 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4263 tcg_temp_free(r_const
);
4266 case 0x0f: /* swap register with memory. Also
4268 CHECK_IU_FEATURE(dc
, SWAP
);
4269 gen_movl_reg_TN(rd
, cpu_val
);
4270 gen_address_mask(dc
, cpu_addr
);
4271 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4272 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4273 tcg_gen_extu_i32_tl(cpu_val
, cpu_tmp32
);
4275 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4276 case 0x10: /* load word alternate */
4277 #ifndef TARGET_SPARC64
4280 if (!supervisor(dc
))
4283 save_state(dc
, cpu_cond
);
4284 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4286 case 0x11: /* load unsigned byte alternate */
4287 #ifndef TARGET_SPARC64
4290 if (!supervisor(dc
))
4293 save_state(dc
, cpu_cond
);
4294 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4296 case 0x12: /* load unsigned halfword alternate */
4297 #ifndef TARGET_SPARC64
4300 if (!supervisor(dc
))
4303 save_state(dc
, cpu_cond
);
4304 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4306 case 0x13: /* load double word alternate */
4307 #ifndef TARGET_SPARC64
4310 if (!supervisor(dc
))
4315 save_state(dc
, cpu_cond
);
4316 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4318 case 0x19: /* load signed byte alternate */
4319 #ifndef TARGET_SPARC64
4322 if (!supervisor(dc
))
4325 save_state(dc
, cpu_cond
);
4326 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4328 case 0x1a: /* load signed halfword alternate */
4329 #ifndef TARGET_SPARC64
4332 if (!supervisor(dc
))
4335 save_state(dc
, cpu_cond
);
4336 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4338 case 0x1d: /* ldstuba -- XXX: should be atomically */
4339 #ifndef TARGET_SPARC64
4342 if (!supervisor(dc
))
4345 save_state(dc
, cpu_cond
);
4346 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4348 case 0x1f: /* swap reg with alt. memory. Also
4350 CHECK_IU_FEATURE(dc
, SWAP
);
4351 #ifndef TARGET_SPARC64
4354 if (!supervisor(dc
))
4357 save_state(dc
, cpu_cond
);
4358 gen_movl_reg_TN(rd
, cpu_val
);
4359 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4362 #ifndef TARGET_SPARC64
4363 case 0x30: /* ldc */
4364 case 0x31: /* ldcsr */
4365 case 0x33: /* lddc */
4369 #ifdef TARGET_SPARC64
4370 case 0x08: /* V9 ldsw */
4371 gen_address_mask(dc
, cpu_addr
);
4372 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4374 case 0x0b: /* V9 ldx */
4375 gen_address_mask(dc
, cpu_addr
);
4376 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4378 case 0x18: /* V9 ldswa */
4379 save_state(dc
, cpu_cond
);
4380 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4382 case 0x1b: /* V9 ldxa */
4383 save_state(dc
, cpu_cond
);
4384 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4386 case 0x2d: /* V9 prefetch, no effect */
4388 case 0x30: /* V9 ldfa */
4389 save_state(dc
, cpu_cond
);
4390 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4392 case 0x33: /* V9 lddfa */
4393 save_state(dc
, cpu_cond
);
4394 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4396 case 0x3d: /* V9 prefetcha, no effect */
4398 case 0x32: /* V9 ldqfa */
4399 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4400 save_state(dc
, cpu_cond
);
4401 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4407 gen_movl_TN_reg(rd
, cpu_val
);
4408 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4411 } else if (xop
>= 0x20 && xop
< 0x24) {
4412 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4414 save_state(dc
, cpu_cond
);
4416 case 0x20: /* load fpreg */
4417 gen_address_mask(dc
, cpu_addr
);
4418 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4419 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
4420 offsetof(CPUState
, fpr
[rd
]));
4422 case 0x21: /* load fsr */
4423 gen_address_mask(dc
, cpu_addr
);
4424 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4425 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
4426 offsetof(CPUState
, ft0
));
4427 tcg_gen_helper_0_0(helper_ldfsr
);
4429 case 0x22: /* load quad fpreg */
4433 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4434 r_const
= tcg_const_i32(dc
->mem_idx
);
4435 tcg_gen_helper_0_2(helper_ldqf
, cpu_addr
, r_const
);
4436 tcg_temp_free(r_const
);
4437 gen_op_store_QT0_fpr(QFPREG(rd
));
4440 case 0x23: /* load double fpreg */
4444 r_const
= tcg_const_i32(dc
->mem_idx
);
4445 tcg_gen_helper_0_2(helper_lddf
, cpu_addr
, r_const
);
4446 tcg_temp_free(r_const
);
4447 gen_op_store_DT0_fpr(DFPREG(rd
));
4453 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) || \
4454 xop
== 0xe || xop
== 0x1e) {
4455 gen_movl_reg_TN(rd
, cpu_val
);
4457 case 0x4: /* store word */
4458 gen_address_mask(dc
, cpu_addr
);
4459 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4461 case 0x5: /* store byte */
4462 gen_address_mask(dc
, cpu_addr
);
4463 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4465 case 0x6: /* store halfword */
4466 gen_address_mask(dc
, cpu_addr
);
4467 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4469 case 0x7: /* store double word */
4473 TCGv r_low
, r_const
;
4475 save_state(dc
, cpu_cond
);
4476 gen_address_mask(dc
, cpu_addr
);
4477 r_const
= tcg_const_i32(7);
4478 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
,
4479 r_const
); // XXX remove
4480 tcg_temp_free(r_const
);
4481 r_low
= tcg_temp_new(TCG_TYPE_TL
);
4482 gen_movl_reg_TN(rd
+ 1, r_low
);
4483 tcg_gen_helper_1_2(helper_pack64
, cpu_tmp64
, cpu_val
,
4485 tcg_temp_free(r_low
);
4486 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4489 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4490 case 0x14: /* store word alternate */
4491 #ifndef TARGET_SPARC64
4494 if (!supervisor(dc
))
4497 save_state(dc
, cpu_cond
);
4498 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4500 case 0x15: /* store byte alternate */
4501 #ifndef TARGET_SPARC64
4504 if (!supervisor(dc
))
4507 save_state(dc
, cpu_cond
);
4508 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4510 case 0x16: /* store halfword alternate */
4511 #ifndef TARGET_SPARC64
4514 if (!supervisor(dc
))
4517 save_state(dc
, cpu_cond
);
4518 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4520 case 0x17: /* store double word alternate */
4521 #ifndef TARGET_SPARC64
4524 if (!supervisor(dc
))
4530 save_state(dc
, cpu_cond
);
4531 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4535 #ifdef TARGET_SPARC64
4536 case 0x0e: /* V9 stx */
4537 gen_address_mask(dc
, cpu_addr
);
4538 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4540 case 0x1e: /* V9 stxa */
4541 save_state(dc
, cpu_cond
);
4542 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4548 } else if (xop
> 0x23 && xop
< 0x28) {
4549 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4551 save_state(dc
, cpu_cond
);
4553 case 0x24: /* store fpreg */
4554 gen_address_mask(dc
, cpu_addr
);
4555 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
4556 offsetof(CPUState
, fpr
[rd
]));
4557 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4559 case 0x25: /* stfsr, V9 stxfsr */
4560 gen_address_mask(dc
, cpu_addr
);
4561 tcg_gen_helper_0_0(helper_stfsr
);
4562 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
4563 offsetof(CPUState
, ft0
));
4564 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4567 #ifdef TARGET_SPARC64
4568 /* V9 stqf, store quad fpreg */
4572 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4573 gen_op_load_fpr_QT0(QFPREG(rd
));
4574 r_const
= tcg_const_i32(dc
->mem_idx
);
4575 tcg_gen_helper_0_2(helper_stqf
, cpu_addr
, r_const
);
4576 tcg_temp_free(r_const
);
4579 #else /* !TARGET_SPARC64 */
4580 /* stdfq, store floating point queue */
4581 #if defined(CONFIG_USER_ONLY)
4584 if (!supervisor(dc
))
4586 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4591 case 0x27: /* store double fpreg */
4595 gen_op_load_fpr_DT0(DFPREG(rd
));
4596 r_const
= tcg_const_i32(dc
->mem_idx
);
4597 tcg_gen_helper_0_2(helper_stdf
, cpu_addr
, r_const
);
4598 tcg_temp_free(r_const
);
4604 } else if (xop
> 0x33 && xop
< 0x3f) {
4605 save_state(dc
, cpu_cond
);
4607 #ifdef TARGET_SPARC64
4608 case 0x34: /* V9 stfa */
4609 gen_op_load_fpr_FT0(rd
);
4610 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
4612 case 0x36: /* V9 stqfa */
4616 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4617 r_const
= tcg_const_i32(7);
4618 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
,
4620 tcg_temp_free(r_const
);
4621 gen_op_load_fpr_QT0(QFPREG(rd
));
4622 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4625 case 0x37: /* V9 stdfa */
4626 gen_op_load_fpr_DT0(DFPREG(rd
));
4627 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4629 case 0x3c: /* V9 casa */
4630 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4631 gen_movl_TN_reg(rd
, cpu_val
);
4633 case 0x3e: /* V9 casxa */
4634 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4635 gen_movl_TN_reg(rd
, cpu_val
);
4638 case 0x34: /* stc */
4639 case 0x35: /* stcsr */
4640 case 0x36: /* stdcq */
4641 case 0x37: /* stdc */
4653 /* default case for non jump instructions */
4654 if (dc
->npc
== DYNAMIC_PC
) {
4655 dc
->pc
= DYNAMIC_PC
;
4657 } else if (dc
->npc
== JUMP_PC
) {
4658 /* we can do a static jump */
4659 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
4663 dc
->npc
= dc
->npc
+ 4;
4671 save_state(dc
, cpu_cond
);
4672 r_const
= tcg_const_i32(TT_ILL_INSN
);
4673 tcg_gen_helper_0_1(raise_exception
, r_const
);
4674 tcg_temp_free(r_const
);
4682 save_state(dc
, cpu_cond
);
4683 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
4684 tcg_gen_helper_0_1(raise_exception
, r_const
);
4685 tcg_temp_free(r_const
);
4689 #if !defined(CONFIG_USER_ONLY)
4694 save_state(dc
, cpu_cond
);
4695 r_const
= tcg_const_i32(TT_PRIV_INSN
);
4696 tcg_gen_helper_0_1(raise_exception
, r_const
);
4697 tcg_temp_free(r_const
);
4703 save_state(dc
, cpu_cond
);
4704 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4707 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4709 save_state(dc
, cpu_cond
);
4710 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4714 #ifndef TARGET_SPARC64
4719 save_state(dc
, cpu_cond
);
4720 r_const
= tcg_const_i32(TT_NCP_INSN
);
4721 tcg_gen_helper_0_1(raise_exception
, r_const
);
4722 tcg_temp_free(r_const
);
4729 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
4730 int spc
, CPUSPARCState
*env
)
4732 target_ulong pc_start
, last_pc
;
4733 uint16_t *gen_opc_end
;
4734 DisasContext dc1
, *dc
= &dc1
;
4739 memset(dc
, 0, sizeof(DisasContext
));
4744 dc
->npc
= (target_ulong
) tb
->cs_base
;
4745 dc
->mem_idx
= cpu_mmu_index(env
);
4746 dc
->features
= env
->features
;
4747 if ((dc
->features
& CPU_FEATURE_FLOAT
)) {
4748 dc
->fpu_enabled
= cpu_fpu_enabled(env
);
4749 #if defined(CONFIG_USER_ONLY)
4750 dc
->features
|= CPU_FEATURE_FLOAT128
;
4753 dc
->fpu_enabled
= 0;
4754 #ifdef TARGET_SPARC64
4755 dc
->address_mask_32bit
= env
->pstate
& PS_AM
;
4757 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4759 cpu_tmp0
= tcg_temp_new(TCG_TYPE_TL
);
4760 cpu_tmp32
= tcg_temp_new(TCG_TYPE_I32
);
4761 cpu_tmp64
= tcg_temp_new(TCG_TYPE_I64
);
4763 cpu_dst
= tcg_temp_local_new(TCG_TYPE_TL
);
4766 cpu_val
= tcg_temp_local_new(TCG_TYPE_TL
);
4767 cpu_addr
= tcg_temp_local_new(TCG_TYPE_TL
);
4770 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
4772 max_insns
= CF_COUNT_MASK
;
4775 if (env
->nb_breakpoints
> 0) {
4776 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
4777 if (env
->breakpoints
[j
] == dc
->pc
) {
4778 if (dc
->pc
!= pc_start
)
4779 save_state(dc
, cpu_cond
);
4780 tcg_gen_helper_0_0(helper_debug
);
4789 fprintf(logfile
, "Search PC...\n");
4790 j
= gen_opc_ptr
- gen_opc_buf
;
4794 gen_opc_instr_start
[lj
++] = 0;
4795 gen_opc_pc
[lj
] = dc
->pc
;
4796 gen_opc_npc
[lj
] = dc
->npc
;
4797 gen_opc_instr_start
[lj
] = 1;
4798 gen_opc_icount
[lj
] = num_insns
;
4801 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
4804 disas_sparc_insn(dc
);
4809 /* if the next PC is different, we abort now */
4810 if (dc
->pc
!= (last_pc
+ 4))
4812 /* if we reach a page boundary, we stop generation so that the
4813 PC of a TT_TFAULT exception is always in the right page */
4814 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
4816 /* if single step mode, we generate only one instruction and
4817 generate an exception */
4818 if (env
->singlestep_enabled
) {
4819 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4823 } while ((gen_opc_ptr
< gen_opc_end
) &&
4824 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
4825 num_insns
< max_insns
);
4828 tcg_temp_free(cpu_addr
);
4829 tcg_temp_free(cpu_val
);
4830 tcg_temp_free(cpu_dst
);
4831 tcg_temp_free(cpu_tmp64
);
4832 tcg_temp_free(cpu_tmp32
);
4833 tcg_temp_free(cpu_tmp0
);
4834 if (tb
->cflags
& CF_LAST_IO
)
4837 if (dc
->pc
!= DYNAMIC_PC
&&
4838 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
4839 /* static PC and NPC: we can use direct chaining */
4840 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
4842 if (dc
->pc
!= DYNAMIC_PC
)
4843 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4844 save_npc(dc
, cpu_cond
);
4848 gen_icount_end(tb
, num_insns
);
4849 *gen_opc_ptr
= INDEX_op_end
;
4851 j
= gen_opc_ptr
- gen_opc_buf
;
4854 gen_opc_instr_start
[lj
++] = 0;
4860 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
4861 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
4863 tb
->size
= last_pc
+ 4 - pc_start
;
4864 tb
->icount
= num_insns
;
4867 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
4868 fprintf(logfile
, "--------------\n");
4869 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
4870 target_disas(logfile
, pc_start
, last_pc
+ 4 - pc_start
, 0);
4871 fprintf(logfile
, "\n");
4876 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
4878 gen_intermediate_code_internal(tb
, 0, env
);
4881 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
4883 gen_intermediate_code_internal(tb
, 1, env
);
4886 void gen_intermediate_code_init(CPUSPARCState
*env
)
4890 static const char * const gregnames
[8] = {
4891 NULL
, // g0 not used
4901 /* init various static tables */
4905 cpu_env
= tcg_global_reg_new(TCG_TYPE_PTR
, TCG_AREG0
, "env");
4906 cpu_regwptr
= tcg_global_mem_new(TCG_TYPE_PTR
, TCG_AREG0
,
4907 offsetof(CPUState
, regwptr
),
4909 #ifdef TARGET_SPARC64
4910 cpu_xcc
= tcg_global_mem_new(TCG_TYPE_I32
,
4911 TCG_AREG0
, offsetof(CPUState
, xcc
),
4914 cpu_cond
= tcg_global_mem_new(TCG_TYPE_TL
,
4915 TCG_AREG0
, offsetof(CPUState
, cond
),
4917 cpu_cc_src
= tcg_global_mem_new(TCG_TYPE_TL
,
4918 TCG_AREG0
, offsetof(CPUState
, cc_src
),
4920 cpu_cc_src2
= tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4921 offsetof(CPUState
, cc_src2
),
4923 cpu_cc_dst
= tcg_global_mem_new(TCG_TYPE_TL
,
4924 TCG_AREG0
, offsetof(CPUState
, cc_dst
),
4926 cpu_psr
= tcg_global_mem_new(TCG_TYPE_I32
,
4927 TCG_AREG0
, offsetof(CPUState
, psr
),
4929 cpu_fsr
= tcg_global_mem_new(TCG_TYPE_TL
,
4930 TCG_AREG0
, offsetof(CPUState
, fsr
),
4932 cpu_pc
= tcg_global_mem_new(TCG_TYPE_TL
,
4933 TCG_AREG0
, offsetof(CPUState
, pc
),
4935 cpu_npc
= tcg_global_mem_new(TCG_TYPE_TL
,
4936 TCG_AREG0
, offsetof(CPUState
, npc
),
4938 for (i
= 1; i
< 8; i
++)
4939 cpu_gregs
[i
] = tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4940 offsetof(CPUState
, gregs
[i
]),
4942 /* register helpers */
4945 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
4950 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
4951 unsigned long searched_pc
, int pc_pos
, void *puc
)
4954 env
->pc
= gen_opc_pc
[pc_pos
];
4955 npc
= gen_opc_npc
[pc_pos
];
4957 /* dynamic NPC: already stored */
4958 } else if (npc
== 2) {
4959 target_ulong t2
= (target_ulong
)(unsigned long)puc
;
4960 /* jump PC: use T2 and the jump targets of the translation */
4962 env
->npc
= gen_opc_jump_pc
[0];
4964 env
->npc
= gen_opc_jump_pc
[1];