target-sparc: Free instruction temporaries.
[qemu/mdroth.git] / target-sparc / translate.c
blob4f25278a7d460872b6c433090dcf90eb1a821217
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
27 #include "cpu.h"
28 #include "exec-all.h"
29 #include "disas.h"
30 #include "helper.h"
31 #include "tcg-op.h"
33 #define GEN_HELPER 1
34 #include "helper.h"
36 #define DEBUG_DISAS
38 #define DYNAMIC_PC 1 /* dynamic pc value */
39 #define JUMP_PC 2 /* dynamic pc value which takes only two values
40 according to jump_pc[T2] */
42 /* global register indexes */
43 static TCGv_ptr cpu_env, cpu_regwptr;
44 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
45 static TCGv_i32 cpu_cc_op;
46 static TCGv_i32 cpu_psr;
47 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48 static TCGv cpu_y;
49 #ifndef CONFIG_USER_ONLY
50 static TCGv cpu_tbr;
51 #endif
52 static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
53 #ifdef TARGET_SPARC64
54 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55 static TCGv cpu_gsr;
56 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58 static TCGv_i32 cpu_softint;
59 #else
60 static TCGv cpu_wim;
61 #endif
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0;
64 static TCGv_i32 cpu_tmp32;
65 static TCGv_i64 cpu_tmp64;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
69 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
70 static target_ulong gen_opc_jump_pc[2];
72 #include "gen-icount.h"
74 typedef struct DisasContext {
75 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
76 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
77 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
78 int is_br;
79 int mem_idx;
80 int fpu_enabled;
81 int address_mask_32bit;
82 uint32_t cc_op; /* current CC operation */
83 struct TranslationBlock *tb;
84 sparc_def_t *def;
85 } DisasContext;
87 // This function uses non-native bit order
88 #define GET_FIELD(X, FROM, TO) \
89 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
91 // This function uses the order in the manuals, i.e. bit 0 is 2^0
92 #define GET_FIELD_SP(X, FROM, TO) \
93 GET_FIELD(X, 31 - (TO), 31 - (FROM))
95 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
96 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
98 #ifdef TARGET_SPARC64
99 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
100 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
101 #else
102 #define DFPREG(r) (r & 0x1e)
103 #define QFPREG(r) (r & 0x1c)
104 #endif
106 #define UA2005_HTRAP_MASK 0xff
107 #define V8_TRAP_MASK 0x7f
109 static int sign_extend(int x, int len)
111 len = 32 - len;
112 return (x << len) >> len;
115 #define IS_IMM (insn & (1<<13))
117 /* floating point registers moves */
118 static void gen_op_load_fpr_DT0(unsigned int src)
120 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
121 offsetof(CPU_DoubleU, l.upper));
122 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
123 offsetof(CPU_DoubleU, l.lower));
126 static void gen_op_load_fpr_DT1(unsigned int src)
128 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
129 offsetof(CPU_DoubleU, l.upper));
130 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
131 offsetof(CPU_DoubleU, l.lower));
134 static void gen_op_store_DT0_fpr(unsigned int dst)
136 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
137 offsetof(CPU_DoubleU, l.upper));
138 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
139 offsetof(CPU_DoubleU, l.lower));
142 static void gen_op_load_fpr_QT0(unsigned int src)
144 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
145 offsetof(CPU_QuadU, l.upmost));
146 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
147 offsetof(CPU_QuadU, l.upper));
148 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
149 offsetof(CPU_QuadU, l.lower));
150 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
151 offsetof(CPU_QuadU, l.lowest));
154 static void gen_op_load_fpr_QT1(unsigned int src)
156 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
157 offsetof(CPU_QuadU, l.upmost));
158 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
159 offsetof(CPU_QuadU, l.upper));
160 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
161 offsetof(CPU_QuadU, l.lower));
162 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
163 offsetof(CPU_QuadU, l.lowest));
166 static void gen_op_store_QT0_fpr(unsigned int dst)
168 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
169 offsetof(CPU_QuadU, l.upmost));
170 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
171 offsetof(CPU_QuadU, l.upper));
172 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
173 offsetof(CPU_QuadU, l.lower));
174 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
175 offsetof(CPU_QuadU, l.lowest));
178 /* moves */
179 #ifdef CONFIG_USER_ONLY
180 #define supervisor(dc) 0
181 #ifdef TARGET_SPARC64
182 #define hypervisor(dc) 0
183 #endif
184 #else
185 #define supervisor(dc) (dc->mem_idx >= 1)
186 #ifdef TARGET_SPARC64
187 #define hypervisor(dc) (dc->mem_idx == 2)
188 #else
189 #endif
190 #endif
192 #ifdef TARGET_SPARC64
193 #ifndef TARGET_ABI32
194 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
195 #else
196 #define AM_CHECK(dc) (1)
197 #endif
198 #endif
200 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
202 #ifdef TARGET_SPARC64
203 if (AM_CHECK(dc))
204 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
205 #endif
208 static inline void gen_movl_reg_TN(int reg, TCGv tn)
210 if (reg == 0)
211 tcg_gen_movi_tl(tn, 0);
212 else if (reg < 8)
213 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
214 else {
215 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
219 static inline void gen_movl_TN_reg(int reg, TCGv tn)
221 if (reg == 0)
222 return;
223 else if (reg < 8)
224 tcg_gen_mov_tl(cpu_gregs[reg], tn);
225 else {
226 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
230 static inline void gen_goto_tb(DisasContext *s, int tb_num,
231 target_ulong pc, target_ulong npc)
233 TranslationBlock *tb;
235 tb = s->tb;
236 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
237 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
238 /* jump to same page: we can use a direct jump */
239 tcg_gen_goto_tb(tb_num);
240 tcg_gen_movi_tl(cpu_pc, pc);
241 tcg_gen_movi_tl(cpu_npc, npc);
242 tcg_gen_exit_tb((long)tb + tb_num);
243 } else {
244 /* jump to another page: currently not optimized */
245 tcg_gen_movi_tl(cpu_pc, pc);
246 tcg_gen_movi_tl(cpu_npc, npc);
247 tcg_gen_exit_tb(0);
251 // XXX suboptimal
252 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
254 tcg_gen_extu_i32_tl(reg, src);
255 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
256 tcg_gen_andi_tl(reg, reg, 0x1);
259 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
261 tcg_gen_extu_i32_tl(reg, src);
262 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
263 tcg_gen_andi_tl(reg, reg, 0x1);
266 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
268 tcg_gen_extu_i32_tl(reg, src);
269 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
270 tcg_gen_andi_tl(reg, reg, 0x1);
273 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
275 tcg_gen_extu_i32_tl(reg, src);
276 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
277 tcg_gen_andi_tl(reg, reg, 0x1);
280 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
282 TCGv r_temp;
283 TCGv_i32 r_const;
284 int l1;
286 l1 = gen_new_label();
288 r_temp = tcg_temp_new();
289 tcg_gen_xor_tl(r_temp, src1, src2);
290 tcg_gen_not_tl(r_temp, r_temp);
291 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
292 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
293 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
294 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
295 r_const = tcg_const_i32(TT_TOVF);
296 gen_helper_raise_exception(r_const);
297 tcg_temp_free_i32(r_const);
298 gen_set_label(l1);
299 tcg_temp_free(r_temp);
302 static inline void gen_tag_tv(TCGv src1, TCGv src2)
304 int l1;
305 TCGv_i32 r_const;
307 l1 = gen_new_label();
308 tcg_gen_or_tl(cpu_tmp0, src1, src2);
309 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
310 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
311 r_const = tcg_const_i32(TT_TOVF);
312 gen_helper_raise_exception(r_const);
313 tcg_temp_free_i32(r_const);
314 gen_set_label(l1);
317 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
319 tcg_gen_mov_tl(cpu_cc_src, src1);
320 tcg_gen_movi_tl(cpu_cc_src2, src2);
321 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
322 tcg_gen_mov_tl(dst, cpu_cc_dst);
325 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
327 tcg_gen_mov_tl(cpu_cc_src, src1);
328 tcg_gen_mov_tl(cpu_cc_src2, src2);
329 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
330 tcg_gen_mov_tl(dst, cpu_cc_dst);
333 static inline void gen_op_addxi_cc(TCGv dst, TCGv src1, target_long src2)
335 tcg_gen_mov_tl(cpu_cc_src, src1);
336 tcg_gen_movi_tl(cpu_cc_src2, src2);
337 gen_mov_reg_C(cpu_tmp0, cpu_psr);
338 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
339 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_dst, src2);
340 tcg_gen_mov_tl(dst, cpu_cc_dst);
343 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
345 tcg_gen_mov_tl(cpu_cc_src, src1);
346 tcg_gen_mov_tl(cpu_cc_src2, src2);
347 gen_mov_reg_C(cpu_tmp0, cpu_psr);
348 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
349 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
350 tcg_gen_mov_tl(dst, cpu_cc_dst);
353 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
355 tcg_gen_mov_tl(cpu_cc_src, src1);
356 tcg_gen_mov_tl(cpu_cc_src2, src2);
357 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
358 tcg_gen_mov_tl(dst, cpu_cc_dst);
361 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
363 tcg_gen_mov_tl(cpu_cc_src, src1);
364 tcg_gen_mov_tl(cpu_cc_src2, src2);
365 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
366 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
367 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
368 tcg_gen_mov_tl(dst, cpu_cc_dst);
371 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
373 TCGv r_temp;
374 TCGv_i32 r_const;
375 int l1;
377 l1 = gen_new_label();
379 r_temp = tcg_temp_new();
380 tcg_gen_xor_tl(r_temp, src1, src2);
381 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
382 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
383 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
384 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
385 r_const = tcg_const_i32(TT_TOVF);
386 gen_helper_raise_exception(r_const);
387 tcg_temp_free_i32(r_const);
388 gen_set_label(l1);
389 tcg_temp_free(r_temp);
392 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
394 tcg_gen_mov_tl(cpu_cc_src, src1);
395 tcg_gen_movi_tl(cpu_cc_src2, src2);
396 if (src2 == 0) {
397 tcg_gen_mov_tl(cpu_cc_dst, src1);
398 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
399 dc->cc_op = CC_OP_LOGIC;
400 } else {
401 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
402 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
403 dc->cc_op = CC_OP_SUB;
405 tcg_gen_mov_tl(dst, cpu_cc_dst);
408 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
410 tcg_gen_mov_tl(cpu_cc_src, src1);
411 tcg_gen_mov_tl(cpu_cc_src2, src2);
412 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
413 tcg_gen_mov_tl(dst, cpu_cc_dst);
416 static inline void gen_op_subxi_cc(TCGv dst, TCGv src1, target_long src2)
418 tcg_gen_mov_tl(cpu_cc_src, src1);
419 tcg_gen_movi_tl(cpu_cc_src2, src2);
420 gen_mov_reg_C(cpu_tmp0, cpu_psr);
421 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
422 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_dst, src2);
423 tcg_gen_mov_tl(dst, cpu_cc_dst);
426 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
428 tcg_gen_mov_tl(cpu_cc_src, src1);
429 tcg_gen_mov_tl(cpu_cc_src2, src2);
430 gen_mov_reg_C(cpu_tmp0, cpu_psr);
431 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
432 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
433 tcg_gen_mov_tl(dst, cpu_cc_dst);
436 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
438 tcg_gen_mov_tl(cpu_cc_src, src1);
439 tcg_gen_mov_tl(cpu_cc_src2, src2);
440 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
441 tcg_gen_mov_tl(dst, cpu_cc_dst);
444 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
446 tcg_gen_mov_tl(cpu_cc_src, src1);
447 tcg_gen_mov_tl(cpu_cc_src2, src2);
448 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
449 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
450 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
451 tcg_gen_mov_tl(dst, cpu_cc_dst);
454 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
456 TCGv r_temp;
457 int l1;
459 l1 = gen_new_label();
460 r_temp = tcg_temp_new();
462 /* old op:
463 if (!(env->y & 1))
464 T1 = 0;
466 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
467 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
468 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
469 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
470 tcg_gen_movi_tl(cpu_cc_src2, 0);
471 gen_set_label(l1);
473 // b2 = T0 & 1;
474 // env->y = (b2 << 31) | (env->y >> 1);
475 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
476 tcg_gen_shli_tl(r_temp, r_temp, 31);
477 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
478 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
479 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
480 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
482 // b1 = N ^ V;
483 gen_mov_reg_N(cpu_tmp0, cpu_psr);
484 gen_mov_reg_V(r_temp, cpu_psr);
485 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
486 tcg_temp_free(r_temp);
488 // T0 = (b1 << 31) | (T0 >> 1);
489 // src1 = T0;
490 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
491 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
492 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
494 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
496 tcg_gen_mov_tl(dst, cpu_cc_dst);
499 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
501 TCGv_i64 r_temp, r_temp2;
503 r_temp = tcg_temp_new_i64();
504 r_temp2 = tcg_temp_new_i64();
506 tcg_gen_extu_tl_i64(r_temp, src2);
507 tcg_gen_extu_tl_i64(r_temp2, src1);
508 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
510 tcg_gen_shri_i64(r_temp, r_temp2, 32);
511 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
512 tcg_temp_free_i64(r_temp);
513 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
514 #ifdef TARGET_SPARC64
515 tcg_gen_mov_i64(dst, r_temp2);
516 #else
517 tcg_gen_trunc_i64_tl(dst, r_temp2);
518 #endif
519 tcg_temp_free_i64(r_temp2);
522 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
524 TCGv_i64 r_temp, r_temp2;
526 r_temp = tcg_temp_new_i64();
527 r_temp2 = tcg_temp_new_i64();
529 tcg_gen_ext_tl_i64(r_temp, src2);
530 tcg_gen_ext_tl_i64(r_temp2, src1);
531 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
533 tcg_gen_shri_i64(r_temp, r_temp2, 32);
534 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
535 tcg_temp_free_i64(r_temp);
536 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
537 #ifdef TARGET_SPARC64
538 tcg_gen_mov_i64(dst, r_temp2);
539 #else
540 tcg_gen_trunc_i64_tl(dst, r_temp2);
541 #endif
542 tcg_temp_free_i64(r_temp2);
545 #ifdef TARGET_SPARC64
546 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
548 TCGv_i32 r_const;
549 int l1;
551 l1 = gen_new_label();
552 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
553 r_const = tcg_const_i32(TT_DIV_ZERO);
554 gen_helper_raise_exception(r_const);
555 tcg_temp_free_i32(r_const);
556 gen_set_label(l1);
559 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
561 int l1, l2;
563 l1 = gen_new_label();
564 l2 = gen_new_label();
565 tcg_gen_mov_tl(cpu_cc_src, src1);
566 tcg_gen_mov_tl(cpu_cc_src2, src2);
567 gen_trap_ifdivzero_tl(cpu_cc_src2);
568 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
569 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
570 tcg_gen_movi_i64(dst, INT64_MIN);
571 tcg_gen_br(l2);
572 gen_set_label(l1);
573 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
574 gen_set_label(l2);
576 #endif
578 // 1
579 static inline void gen_op_eval_ba(TCGv dst)
581 tcg_gen_movi_tl(dst, 1);
584 // Z
585 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
587 gen_mov_reg_Z(dst, src);
590 // Z | (N ^ V)
591 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
593 gen_mov_reg_N(cpu_tmp0, src);
594 gen_mov_reg_V(dst, src);
595 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
596 gen_mov_reg_Z(cpu_tmp0, src);
597 tcg_gen_or_tl(dst, dst, cpu_tmp0);
600 // N ^ V
601 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
603 gen_mov_reg_V(cpu_tmp0, src);
604 gen_mov_reg_N(dst, src);
605 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
608 // C | Z
609 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
611 gen_mov_reg_Z(cpu_tmp0, src);
612 gen_mov_reg_C(dst, src);
613 tcg_gen_or_tl(dst, dst, cpu_tmp0);
616 // C
617 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
619 gen_mov_reg_C(dst, src);
622 // V
623 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
625 gen_mov_reg_V(dst, src);
628 // 0
629 static inline void gen_op_eval_bn(TCGv dst)
631 tcg_gen_movi_tl(dst, 0);
634 // N
635 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
637 gen_mov_reg_N(dst, src);
640 // !Z
641 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
643 gen_mov_reg_Z(dst, src);
644 tcg_gen_xori_tl(dst, dst, 0x1);
647 // !(Z | (N ^ V))
648 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
650 gen_mov_reg_N(cpu_tmp0, src);
651 gen_mov_reg_V(dst, src);
652 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
653 gen_mov_reg_Z(cpu_tmp0, src);
654 tcg_gen_or_tl(dst, dst, cpu_tmp0);
655 tcg_gen_xori_tl(dst, dst, 0x1);
658 // !(N ^ V)
659 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
661 gen_mov_reg_V(cpu_tmp0, src);
662 gen_mov_reg_N(dst, src);
663 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
664 tcg_gen_xori_tl(dst, dst, 0x1);
667 // !(C | Z)
668 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
670 gen_mov_reg_Z(cpu_tmp0, src);
671 gen_mov_reg_C(dst, src);
672 tcg_gen_or_tl(dst, dst, cpu_tmp0);
673 tcg_gen_xori_tl(dst, dst, 0x1);
676 // !C
677 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
679 gen_mov_reg_C(dst, src);
680 tcg_gen_xori_tl(dst, dst, 0x1);
683 // !N
684 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
686 gen_mov_reg_N(dst, src);
687 tcg_gen_xori_tl(dst, dst, 0x1);
690 // !V
691 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
693 gen_mov_reg_V(dst, src);
694 tcg_gen_xori_tl(dst, dst, 0x1);
698 FPSR bit field FCC1 | FCC0:
702 3 unordered
704 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
705 unsigned int fcc_offset)
707 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
708 tcg_gen_andi_tl(reg, reg, 0x1);
711 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
712 unsigned int fcc_offset)
714 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
715 tcg_gen_andi_tl(reg, reg, 0x1);
718 // !0: FCC0 | FCC1
719 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
720 unsigned int fcc_offset)
722 gen_mov_reg_FCC0(dst, src, fcc_offset);
723 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
724 tcg_gen_or_tl(dst, dst, cpu_tmp0);
727 // 1 or 2: FCC0 ^ FCC1
728 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
729 unsigned int fcc_offset)
731 gen_mov_reg_FCC0(dst, src, fcc_offset);
732 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
733 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
736 // 1 or 3: FCC0
737 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
738 unsigned int fcc_offset)
740 gen_mov_reg_FCC0(dst, src, fcc_offset);
743 // 1: FCC0 & !FCC1
744 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
745 unsigned int fcc_offset)
747 gen_mov_reg_FCC0(dst, src, fcc_offset);
748 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
749 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
750 tcg_gen_and_tl(dst, dst, cpu_tmp0);
753 // 2 or 3: FCC1
754 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
755 unsigned int fcc_offset)
757 gen_mov_reg_FCC1(dst, src, fcc_offset);
760 // 2: !FCC0 & FCC1
761 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
762 unsigned int fcc_offset)
764 gen_mov_reg_FCC0(dst, src, fcc_offset);
765 tcg_gen_xori_tl(dst, dst, 0x1);
766 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
767 tcg_gen_and_tl(dst, dst, cpu_tmp0);
770 // 3: FCC0 & FCC1
771 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
772 unsigned int fcc_offset)
774 gen_mov_reg_FCC0(dst, src, fcc_offset);
775 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
776 tcg_gen_and_tl(dst, dst, cpu_tmp0);
779 // 0: !(FCC0 | FCC1)
780 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
781 unsigned int fcc_offset)
783 gen_mov_reg_FCC0(dst, src, fcc_offset);
784 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
785 tcg_gen_or_tl(dst, dst, cpu_tmp0);
786 tcg_gen_xori_tl(dst, dst, 0x1);
789 // 0 or 3: !(FCC0 ^ FCC1)
790 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
791 unsigned int fcc_offset)
793 gen_mov_reg_FCC0(dst, src, fcc_offset);
794 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
795 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
796 tcg_gen_xori_tl(dst, dst, 0x1);
799 // 0 or 2: !FCC0
800 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
801 unsigned int fcc_offset)
803 gen_mov_reg_FCC0(dst, src, fcc_offset);
804 tcg_gen_xori_tl(dst, dst, 0x1);
807 // !1: !(FCC0 & !FCC1)
808 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
809 unsigned int fcc_offset)
811 gen_mov_reg_FCC0(dst, src, fcc_offset);
812 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
813 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
814 tcg_gen_and_tl(dst, dst, cpu_tmp0);
815 tcg_gen_xori_tl(dst, dst, 0x1);
818 // 0 or 1: !FCC1
819 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
820 unsigned int fcc_offset)
822 gen_mov_reg_FCC1(dst, src, fcc_offset);
823 tcg_gen_xori_tl(dst, dst, 0x1);
826 // !2: !(!FCC0 & FCC1)
827 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
828 unsigned int fcc_offset)
830 gen_mov_reg_FCC0(dst, src, fcc_offset);
831 tcg_gen_xori_tl(dst, dst, 0x1);
832 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
833 tcg_gen_and_tl(dst, dst, cpu_tmp0);
834 tcg_gen_xori_tl(dst, dst, 0x1);
837 // !3: !(FCC0 & FCC1)
838 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
839 unsigned int fcc_offset)
841 gen_mov_reg_FCC0(dst, src, fcc_offset);
842 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
843 tcg_gen_and_tl(dst, dst, cpu_tmp0);
844 tcg_gen_xori_tl(dst, dst, 0x1);
847 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
848 target_ulong pc2, TCGv r_cond)
850 int l1;
852 l1 = gen_new_label();
854 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
856 gen_goto_tb(dc, 0, pc1, pc1 + 4);
858 gen_set_label(l1);
859 gen_goto_tb(dc, 1, pc2, pc2 + 4);
862 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
863 target_ulong pc2, TCGv r_cond)
865 int l1;
867 l1 = gen_new_label();
869 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
871 gen_goto_tb(dc, 0, pc2, pc1);
873 gen_set_label(l1);
874 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
877 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
878 TCGv r_cond)
880 int l1, l2;
882 l1 = gen_new_label();
883 l2 = gen_new_label();
885 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
887 tcg_gen_movi_tl(cpu_npc, npc1);
888 tcg_gen_br(l2);
890 gen_set_label(l1);
891 tcg_gen_movi_tl(cpu_npc, npc2);
892 gen_set_label(l2);
895 /* call this function before using the condition register as it may
896 have been set for a jump */
897 static inline void flush_cond(DisasContext *dc, TCGv cond)
899 if (dc->npc == JUMP_PC) {
900 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
901 dc->npc = DYNAMIC_PC;
905 static inline void save_npc(DisasContext *dc, TCGv cond)
907 if (dc->npc == JUMP_PC) {
908 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
909 dc->npc = DYNAMIC_PC;
910 } else if (dc->npc != DYNAMIC_PC) {
911 tcg_gen_movi_tl(cpu_npc, dc->npc);
915 static inline void save_state(DisasContext *dc, TCGv cond)
917 tcg_gen_movi_tl(cpu_pc, dc->pc);
918 /* flush pending conditional evaluations before exposing cpu state */
919 if (dc->cc_op != CC_OP_FLAGS) {
920 dc->cc_op = CC_OP_FLAGS;
921 gen_helper_compute_psr();
923 save_npc(dc, cond);
926 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
928 if (dc->npc == JUMP_PC) {
929 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
930 tcg_gen_mov_tl(cpu_pc, cpu_npc);
931 dc->pc = DYNAMIC_PC;
932 } else if (dc->npc == DYNAMIC_PC) {
933 tcg_gen_mov_tl(cpu_pc, cpu_npc);
934 dc->pc = DYNAMIC_PC;
935 } else {
936 dc->pc = dc->npc;
940 static inline void gen_op_next_insn(void)
942 tcg_gen_mov_tl(cpu_pc, cpu_npc);
943 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
946 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
947 DisasContext *dc)
949 TCGv_i32 r_src;
951 #ifdef TARGET_SPARC64
952 if (cc)
953 r_src = cpu_xcc;
954 else
955 r_src = cpu_psr;
956 #else
957 r_src = cpu_psr;
958 #endif
959 switch (dc->cc_op) {
960 case CC_OP_FLAGS:
961 break;
962 default:
963 gen_helper_compute_psr();
964 dc->cc_op = CC_OP_FLAGS;
965 break;
967 switch (cond) {
968 case 0x0:
969 gen_op_eval_bn(r_dst);
970 break;
971 case 0x1:
972 gen_op_eval_be(r_dst, r_src);
973 break;
974 case 0x2:
975 gen_op_eval_ble(r_dst, r_src);
976 break;
977 case 0x3:
978 gen_op_eval_bl(r_dst, r_src);
979 break;
980 case 0x4:
981 gen_op_eval_bleu(r_dst, r_src);
982 break;
983 case 0x5:
984 gen_op_eval_bcs(r_dst, r_src);
985 break;
986 case 0x6:
987 gen_op_eval_bneg(r_dst, r_src);
988 break;
989 case 0x7:
990 gen_op_eval_bvs(r_dst, r_src);
991 break;
992 case 0x8:
993 gen_op_eval_ba(r_dst);
994 break;
995 case 0x9:
996 gen_op_eval_bne(r_dst, r_src);
997 break;
998 case 0xa:
999 gen_op_eval_bg(r_dst, r_src);
1000 break;
1001 case 0xb:
1002 gen_op_eval_bge(r_dst, r_src);
1003 break;
1004 case 0xc:
1005 gen_op_eval_bgu(r_dst, r_src);
1006 break;
1007 case 0xd:
1008 gen_op_eval_bcc(r_dst, r_src);
1009 break;
1010 case 0xe:
1011 gen_op_eval_bpos(r_dst, r_src);
1012 break;
1013 case 0xf:
1014 gen_op_eval_bvc(r_dst, r_src);
1015 break;
1019 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1021 unsigned int offset;
1023 switch (cc) {
1024 default:
1025 case 0x0:
1026 offset = 0;
1027 break;
1028 case 0x1:
1029 offset = 32 - 10;
1030 break;
1031 case 0x2:
1032 offset = 34 - 10;
1033 break;
1034 case 0x3:
1035 offset = 36 - 10;
1036 break;
1039 switch (cond) {
1040 case 0x0:
1041 gen_op_eval_bn(r_dst);
1042 break;
1043 case 0x1:
1044 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1045 break;
1046 case 0x2:
1047 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1048 break;
1049 case 0x3:
1050 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1051 break;
1052 case 0x4:
1053 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1054 break;
1055 case 0x5:
1056 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1057 break;
1058 case 0x6:
1059 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1060 break;
1061 case 0x7:
1062 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1063 break;
1064 case 0x8:
1065 gen_op_eval_ba(r_dst);
1066 break;
1067 case 0x9:
1068 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1069 break;
1070 case 0xa:
1071 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1072 break;
1073 case 0xb:
1074 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1075 break;
1076 case 0xc:
1077 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1078 break;
1079 case 0xd:
1080 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1081 break;
1082 case 0xe:
1083 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1084 break;
1085 case 0xf:
1086 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1087 break;
1091 #ifdef TARGET_SPARC64
1092 // Inverted logic
1093 static const int gen_tcg_cond_reg[8] = {
1095 TCG_COND_NE,
1096 TCG_COND_GT,
1097 TCG_COND_GE,
1099 TCG_COND_EQ,
1100 TCG_COND_LE,
1101 TCG_COND_LT,
1104 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1106 int l1;
1108 l1 = gen_new_label();
1109 tcg_gen_movi_tl(r_dst, 0);
1110 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1111 tcg_gen_movi_tl(r_dst, 1);
1112 gen_set_label(l1);
1114 #endif
1116 /* XXX: potentially incorrect if dynamic npc */
1117 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1118 TCGv r_cond)
1120 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1121 target_ulong target = dc->pc + offset;
1123 if (cond == 0x0) {
1124 /* unconditional not taken */
1125 if (a) {
1126 dc->pc = dc->npc + 4;
1127 dc->npc = dc->pc + 4;
1128 } else {
1129 dc->pc = dc->npc;
1130 dc->npc = dc->pc + 4;
1132 } else if (cond == 0x8) {
1133 /* unconditional taken */
1134 if (a) {
1135 dc->pc = target;
1136 dc->npc = dc->pc + 4;
1137 } else {
1138 dc->pc = dc->npc;
1139 dc->npc = target;
1140 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1142 } else {
1143 flush_cond(dc, r_cond);
1144 gen_cond(r_cond, cc, cond, dc);
1145 if (a) {
1146 gen_branch_a(dc, target, dc->npc, r_cond);
1147 dc->is_br = 1;
1148 } else {
1149 dc->pc = dc->npc;
1150 dc->jump_pc[0] = target;
1151 dc->jump_pc[1] = dc->npc + 4;
1152 dc->npc = JUMP_PC;
1157 /* XXX: potentially incorrect if dynamic npc */
1158 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1159 TCGv r_cond)
1161 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1162 target_ulong target = dc->pc + offset;
1164 if (cond == 0x0) {
1165 /* unconditional not taken */
1166 if (a) {
1167 dc->pc = dc->npc + 4;
1168 dc->npc = dc->pc + 4;
1169 } else {
1170 dc->pc = dc->npc;
1171 dc->npc = dc->pc + 4;
1173 } else if (cond == 0x8) {
1174 /* unconditional taken */
1175 if (a) {
1176 dc->pc = target;
1177 dc->npc = dc->pc + 4;
1178 } else {
1179 dc->pc = dc->npc;
1180 dc->npc = target;
1181 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1183 } else {
1184 flush_cond(dc, r_cond);
1185 gen_fcond(r_cond, cc, cond);
1186 if (a) {
1187 gen_branch_a(dc, target, dc->npc, r_cond);
1188 dc->is_br = 1;
1189 } else {
1190 dc->pc = dc->npc;
1191 dc->jump_pc[0] = target;
1192 dc->jump_pc[1] = dc->npc + 4;
1193 dc->npc = JUMP_PC;
1198 #ifdef TARGET_SPARC64
1199 /* XXX: potentially incorrect if dynamic npc */
1200 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1201 TCGv r_cond, TCGv r_reg)
1203 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1204 target_ulong target = dc->pc + offset;
1206 flush_cond(dc, r_cond);
1207 gen_cond_reg(r_cond, cond, r_reg);
1208 if (a) {
1209 gen_branch_a(dc, target, dc->npc, r_cond);
1210 dc->is_br = 1;
1211 } else {
1212 dc->pc = dc->npc;
1213 dc->jump_pc[0] = target;
1214 dc->jump_pc[1] = dc->npc + 4;
1215 dc->npc = JUMP_PC;
1219 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1221 switch (fccno) {
1222 case 0:
1223 gen_helper_fcmps(r_rs1, r_rs2);
1224 break;
1225 case 1:
1226 gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1227 break;
1228 case 2:
1229 gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1230 break;
1231 case 3:
1232 gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1233 break;
1237 static inline void gen_op_fcmpd(int fccno)
1239 switch (fccno) {
1240 case 0:
1241 gen_helper_fcmpd();
1242 break;
1243 case 1:
1244 gen_helper_fcmpd_fcc1();
1245 break;
1246 case 2:
1247 gen_helper_fcmpd_fcc2();
1248 break;
1249 case 3:
1250 gen_helper_fcmpd_fcc3();
1251 break;
1255 static inline void gen_op_fcmpq(int fccno)
1257 switch (fccno) {
1258 case 0:
1259 gen_helper_fcmpq();
1260 break;
1261 case 1:
1262 gen_helper_fcmpq_fcc1();
1263 break;
1264 case 2:
1265 gen_helper_fcmpq_fcc2();
1266 break;
1267 case 3:
1268 gen_helper_fcmpq_fcc3();
1269 break;
1273 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1275 switch (fccno) {
1276 case 0:
1277 gen_helper_fcmpes(r_rs1, r_rs2);
1278 break;
1279 case 1:
1280 gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1281 break;
1282 case 2:
1283 gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1284 break;
1285 case 3:
1286 gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1287 break;
1291 static inline void gen_op_fcmped(int fccno)
1293 switch (fccno) {
1294 case 0:
1295 gen_helper_fcmped();
1296 break;
1297 case 1:
1298 gen_helper_fcmped_fcc1();
1299 break;
1300 case 2:
1301 gen_helper_fcmped_fcc2();
1302 break;
1303 case 3:
1304 gen_helper_fcmped_fcc3();
1305 break;
1309 static inline void gen_op_fcmpeq(int fccno)
1311 switch (fccno) {
1312 case 0:
1313 gen_helper_fcmpeq();
1314 break;
1315 case 1:
1316 gen_helper_fcmpeq_fcc1();
1317 break;
1318 case 2:
1319 gen_helper_fcmpeq_fcc2();
1320 break;
1321 case 3:
1322 gen_helper_fcmpeq_fcc3();
1323 break;
1327 #else
1329 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1331 gen_helper_fcmps(r_rs1, r_rs2);
1334 static inline void gen_op_fcmpd(int fccno)
1336 gen_helper_fcmpd();
1339 static inline void gen_op_fcmpq(int fccno)
1341 gen_helper_fcmpq();
1344 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1346 gen_helper_fcmpes(r_rs1, r_rs2);
1349 static inline void gen_op_fcmped(int fccno)
1351 gen_helper_fcmped();
1354 static inline void gen_op_fcmpeq(int fccno)
1356 gen_helper_fcmpeq();
1358 #endif
1360 static inline void gen_op_fpexception_im(int fsr_flags)
1362 TCGv_i32 r_const;
1364 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1365 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1366 r_const = tcg_const_i32(TT_FP_EXCP);
1367 gen_helper_raise_exception(r_const);
1368 tcg_temp_free_i32(r_const);
1371 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1373 #if !defined(CONFIG_USER_ONLY)
1374 if (!dc->fpu_enabled) {
1375 TCGv_i32 r_const;
1377 save_state(dc, r_cond);
1378 r_const = tcg_const_i32(TT_NFPU_INSN);
1379 gen_helper_raise_exception(r_const);
1380 tcg_temp_free_i32(r_const);
1381 dc->is_br = 1;
1382 return 1;
1384 #endif
1385 return 0;
1388 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1390 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1393 static inline void gen_clear_float_exceptions(void)
1395 gen_helper_clear_float_exceptions();
1398 /* asi moves */
1399 #ifdef TARGET_SPARC64
1400 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1402 int asi;
1403 TCGv_i32 r_asi;
1405 if (IS_IMM) {
1406 r_asi = tcg_temp_new_i32();
1407 tcg_gen_mov_i32(r_asi, cpu_asi);
1408 } else {
1409 asi = GET_FIELD(insn, 19, 26);
1410 r_asi = tcg_const_i32(asi);
1412 return r_asi;
1415 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1416 int sign)
1418 TCGv_i32 r_asi, r_size, r_sign;
1420 r_asi = gen_get_asi(insn, addr);
1421 r_size = tcg_const_i32(size);
1422 r_sign = tcg_const_i32(sign);
1423 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1424 tcg_temp_free_i32(r_sign);
1425 tcg_temp_free_i32(r_size);
1426 tcg_temp_free_i32(r_asi);
1429 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1431 TCGv_i32 r_asi, r_size;
1433 r_asi = gen_get_asi(insn, addr);
1434 r_size = tcg_const_i32(size);
1435 gen_helper_st_asi(addr, src, r_asi, r_size);
1436 tcg_temp_free_i32(r_size);
1437 tcg_temp_free_i32(r_asi);
1440 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1442 TCGv_i32 r_asi, r_size, r_rd;
1444 r_asi = gen_get_asi(insn, addr);
1445 r_size = tcg_const_i32(size);
1446 r_rd = tcg_const_i32(rd);
1447 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1448 tcg_temp_free_i32(r_rd);
1449 tcg_temp_free_i32(r_size);
1450 tcg_temp_free_i32(r_asi);
1453 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1455 TCGv_i32 r_asi, r_size, r_rd;
1457 r_asi = gen_get_asi(insn, addr);
1458 r_size = tcg_const_i32(size);
1459 r_rd = tcg_const_i32(rd);
1460 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1461 tcg_temp_free_i32(r_rd);
1462 tcg_temp_free_i32(r_size);
1463 tcg_temp_free_i32(r_asi);
1466 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1468 TCGv_i32 r_asi, r_size, r_sign;
1470 r_asi = gen_get_asi(insn, addr);
1471 r_size = tcg_const_i32(4);
1472 r_sign = tcg_const_i32(0);
1473 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1474 tcg_temp_free_i32(r_sign);
1475 gen_helper_st_asi(addr, dst, r_asi, r_size);
1476 tcg_temp_free_i32(r_size);
1477 tcg_temp_free_i32(r_asi);
1478 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1481 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1483 TCGv_i32 r_asi, r_rd;
1485 r_asi = gen_get_asi(insn, addr);
1486 r_rd = tcg_const_i32(rd);
1487 gen_helper_ldda_asi(addr, r_asi, r_rd);
1488 tcg_temp_free_i32(r_rd);
1489 tcg_temp_free_i32(r_asi);
1492 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1494 TCGv_i32 r_asi, r_size;
1496 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1497 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1498 r_asi = gen_get_asi(insn, addr);
1499 r_size = tcg_const_i32(8);
1500 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1501 tcg_temp_free_i32(r_size);
1502 tcg_temp_free_i32(r_asi);
1505 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1506 int rd)
1508 TCGv r_val1;
1509 TCGv_i32 r_asi;
1511 r_val1 = tcg_temp_new();
1512 gen_movl_reg_TN(rd, r_val1);
1513 r_asi = gen_get_asi(insn, addr);
1514 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1515 tcg_temp_free_i32(r_asi);
1516 tcg_temp_free(r_val1);
1519 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1520 int rd)
1522 TCGv_i32 r_asi;
1524 gen_movl_reg_TN(rd, cpu_tmp64);
1525 r_asi = gen_get_asi(insn, addr);
1526 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1527 tcg_temp_free_i32(r_asi);
1530 #elif !defined(CONFIG_USER_ONLY)
1532 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1533 int sign)
1535 TCGv_i32 r_asi, r_size, r_sign;
1537 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1538 r_size = tcg_const_i32(size);
1539 r_sign = tcg_const_i32(sign);
1540 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1541 tcg_temp_free(r_sign);
1542 tcg_temp_free(r_size);
1543 tcg_temp_free(r_asi);
1544 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1547 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1549 TCGv_i32 r_asi, r_size;
1551 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1552 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1553 r_size = tcg_const_i32(size);
1554 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1555 tcg_temp_free(r_size);
1556 tcg_temp_free(r_asi);
1559 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1561 TCGv_i32 r_asi, r_size, r_sign;
1562 TCGv_i64 r_val;
1564 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1565 r_size = tcg_const_i32(4);
1566 r_sign = tcg_const_i32(0);
1567 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1568 tcg_temp_free(r_sign);
1569 r_val = tcg_temp_new_i64();
1570 tcg_gen_extu_tl_i64(r_val, dst);
1571 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1572 tcg_temp_free_i64(r_val);
1573 tcg_temp_free(r_size);
1574 tcg_temp_free(r_asi);
1575 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1578 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1580 TCGv_i32 r_asi, r_size, r_sign;
1582 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1583 r_size = tcg_const_i32(8);
1584 r_sign = tcg_const_i32(0);
1585 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1586 tcg_temp_free(r_sign);
1587 tcg_temp_free(r_size);
1588 tcg_temp_free(r_asi);
1589 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1590 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1591 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1592 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1593 gen_movl_TN_reg(rd, hi);
1596 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1598 TCGv_i32 r_asi, r_size;
1600 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1601 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1602 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1603 r_size = tcg_const_i32(8);
1604 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1605 tcg_temp_free(r_size);
1606 tcg_temp_free(r_asi);
1608 #endif
1610 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1611 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1613 TCGv_i64 r_val;
1614 TCGv_i32 r_asi, r_size;
1616 gen_ld_asi(dst, addr, insn, 1, 0);
1618 r_val = tcg_const_i64(0xffULL);
1619 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1620 r_size = tcg_const_i32(1);
1621 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1622 tcg_temp_free_i32(r_size);
1623 tcg_temp_free_i32(r_asi);
1624 tcg_temp_free_i64(r_val);
1626 #endif
1628 static inline TCGv get_src1(unsigned int insn, TCGv def)
1630 TCGv r_rs1 = def;
1631 unsigned int rs1;
1633 rs1 = GET_FIELD(insn, 13, 17);
1634 if (rs1 == 0) {
1635 tcg_gen_movi_tl(def, 0);
1636 } else if (rs1 < 8) {
1637 r_rs1 = cpu_gregs[rs1];
1638 } else {
1639 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1641 return r_rs1;
1644 static inline TCGv get_src2(unsigned int insn, TCGv def)
1646 TCGv r_rs2 = def;
1648 if (IS_IMM) { /* immediate */
1649 target_long simm = GET_FIELDs(insn, 19, 31);
1650 tcg_gen_movi_tl(def, simm);
1651 } else { /* register */
1652 unsigned int rs2 = GET_FIELD(insn, 27, 31);
1653 if (rs2 == 0) {
1654 tcg_gen_movi_tl(def, 0);
1655 } else if (rs2 < 8) {
1656 r_rs2 = cpu_gregs[rs2];
1657 } else {
1658 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1661 return r_rs2;
1664 #ifdef TARGET_SPARC64
1665 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
1667 TCGv_i32 r_tl = tcg_temp_new_i32();
1669 /* load env->tl into r_tl */
1670 tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
1672 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1673 tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
1675 /* calculate offset to current trap state from env->ts, reuse r_tl */
1676 tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
1677 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUState, ts));
1679 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1681 TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
1682 tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
1683 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
1684 tcg_temp_free_ptr(r_tl_tmp);
1687 tcg_temp_free_i32(r_tl);
1689 #endif
1691 #define CHECK_IU_FEATURE(dc, FEATURE) \
1692 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1693 goto illegal_insn;
1694 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1695 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1696 goto nfpu_insn;
1698 /* before an instruction, dc->pc must be static */
1699 static void disas_sparc_insn(DisasContext * dc)
1701 unsigned int insn, opc, rs1, rs2, rd;
1702 TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
1703 target_long simm;
1705 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1706 tcg_gen_debug_insn_start(dc->pc);
1707 insn = ldl_code(dc->pc);
1708 opc = GET_FIELD(insn, 0, 1);
1710 rd = GET_FIELD(insn, 2, 6);
1712 cpu_tmp1 = cpu_src1 = tcg_temp_new();
1713 cpu_tmp2 = cpu_src2 = tcg_temp_new();
1715 switch (opc) {
1716 case 0: /* branches/sethi */
1718 unsigned int xop = GET_FIELD(insn, 7, 9);
1719 int32_t target;
1720 switch (xop) {
1721 #ifdef TARGET_SPARC64
1722 case 0x1: /* V9 BPcc */
1724 int cc;
1726 target = GET_FIELD_SP(insn, 0, 18);
1727 target = sign_extend(target, 18);
1728 target <<= 2;
1729 cc = GET_FIELD_SP(insn, 20, 21);
1730 if (cc == 0)
1731 do_branch(dc, target, insn, 0, cpu_cond);
1732 else if (cc == 2)
1733 do_branch(dc, target, insn, 1, cpu_cond);
1734 else
1735 goto illegal_insn;
1736 goto jmp_insn;
1738 case 0x3: /* V9 BPr */
1740 target = GET_FIELD_SP(insn, 0, 13) |
1741 (GET_FIELD_SP(insn, 20, 21) << 14);
1742 target = sign_extend(target, 16);
1743 target <<= 2;
1744 cpu_src1 = get_src1(insn, cpu_src1);
1745 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1746 goto jmp_insn;
1748 case 0x5: /* V9 FBPcc */
1750 int cc = GET_FIELD_SP(insn, 20, 21);
1751 if (gen_trap_ifnofpu(dc, cpu_cond))
1752 goto jmp_insn;
1753 target = GET_FIELD_SP(insn, 0, 18);
1754 target = sign_extend(target, 19);
1755 target <<= 2;
1756 do_fbranch(dc, target, insn, cc, cpu_cond);
1757 goto jmp_insn;
1759 #else
1760 case 0x7: /* CBN+x */
1762 goto ncp_insn;
1764 #endif
1765 case 0x2: /* BN+x */
1767 target = GET_FIELD(insn, 10, 31);
1768 target = sign_extend(target, 22);
1769 target <<= 2;
1770 do_branch(dc, target, insn, 0, cpu_cond);
1771 goto jmp_insn;
1773 case 0x6: /* FBN+x */
1775 if (gen_trap_ifnofpu(dc, cpu_cond))
1776 goto jmp_insn;
1777 target = GET_FIELD(insn, 10, 31);
1778 target = sign_extend(target, 22);
1779 target <<= 2;
1780 do_fbranch(dc, target, insn, 0, cpu_cond);
1781 goto jmp_insn;
1783 case 0x4: /* SETHI */
1784 if (rd) { // nop
1785 uint32_t value = GET_FIELD(insn, 10, 31);
1786 TCGv r_const;
1788 r_const = tcg_const_tl(value << 10);
1789 gen_movl_TN_reg(rd, r_const);
1790 tcg_temp_free(r_const);
1792 break;
1793 case 0x0: /* UNIMPL */
1794 default:
1795 goto illegal_insn;
1797 break;
1799 break;
1800 case 1: /*CALL*/
1802 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1803 TCGv r_const;
1805 r_const = tcg_const_tl(dc->pc);
1806 gen_movl_TN_reg(15, r_const);
1807 tcg_temp_free(r_const);
1808 target += dc->pc;
1809 gen_mov_pc_npc(dc, cpu_cond);
1810 dc->npc = target;
1812 goto jmp_insn;
1813 case 2: /* FPU & Logical Operations */
1815 unsigned int xop = GET_FIELD(insn, 7, 12);
1816 if (xop == 0x3a) { /* generate trap */
1817 int cond;
1819 cpu_src1 = get_src1(insn, cpu_src1);
1820 if (IS_IMM) {
1821 rs2 = GET_FIELD(insn, 25, 31);
1822 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
1823 } else {
1824 rs2 = GET_FIELD(insn, 27, 31);
1825 if (rs2 != 0) {
1826 gen_movl_reg_TN(rs2, cpu_src2);
1827 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
1828 } else
1829 tcg_gen_mov_tl(cpu_dst, cpu_src1);
1831 cond = GET_FIELD(insn, 3, 6);
1832 if (cond == 0x8) {
1833 save_state(dc, cpu_cond);
1834 if ((dc->def->features & CPU_FEATURE_HYPV) &&
1835 supervisor(dc))
1836 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1837 else
1838 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1839 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1840 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1841 gen_helper_raise_exception(cpu_tmp32);
1842 } else if (cond != 0) {
1843 TCGv r_cond = tcg_temp_new();
1844 int l1;
1845 #ifdef TARGET_SPARC64
1846 /* V9 icc/xcc */
1847 int cc = GET_FIELD_SP(insn, 11, 12);
1849 save_state(dc, cpu_cond);
1850 if (cc == 0)
1851 gen_cond(r_cond, 0, cond, dc);
1852 else if (cc == 2)
1853 gen_cond(r_cond, 1, cond, dc);
1854 else
1855 goto illegal_insn;
1856 #else
1857 save_state(dc, cpu_cond);
1858 gen_cond(r_cond, 0, cond, dc);
1859 #endif
1860 l1 = gen_new_label();
1861 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1863 if ((dc->def->features & CPU_FEATURE_HYPV) &&
1864 supervisor(dc))
1865 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
1866 else
1867 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
1868 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
1869 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
1870 gen_helper_raise_exception(cpu_tmp32);
1872 gen_set_label(l1);
1873 tcg_temp_free(r_cond);
1875 gen_op_next_insn();
1876 tcg_gen_exit_tb(0);
1877 dc->is_br = 1;
1878 goto jmp_insn;
1879 } else if (xop == 0x28) {
1880 rs1 = GET_FIELD(insn, 13, 17);
1881 switch(rs1) {
1882 case 0: /* rdy */
1883 #ifndef TARGET_SPARC64
1884 case 0x01 ... 0x0e: /* undefined in the SPARCv8
1885 manual, rdy on the microSPARC
1886 II */
1887 case 0x0f: /* stbar in the SPARCv8 manual,
1888 rdy on the microSPARC II */
1889 case 0x10 ... 0x1f: /* implementation-dependent in the
1890 SPARCv8 manual, rdy on the
1891 microSPARC II */
1892 #endif
1893 gen_movl_TN_reg(rd, cpu_y);
1894 break;
1895 #ifdef TARGET_SPARC64
1896 case 0x2: /* V9 rdccr */
1897 gen_helper_compute_psr();
1898 gen_helper_rdccr(cpu_dst);
1899 gen_movl_TN_reg(rd, cpu_dst);
1900 break;
1901 case 0x3: /* V9 rdasi */
1902 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
1903 gen_movl_TN_reg(rd, cpu_dst);
1904 break;
1905 case 0x4: /* V9 rdtick */
1907 TCGv_ptr r_tickptr;
1909 r_tickptr = tcg_temp_new_ptr();
1910 tcg_gen_ld_ptr(r_tickptr, cpu_env,
1911 offsetof(CPUState, tick));
1912 gen_helper_tick_get_count(cpu_dst, r_tickptr);
1913 tcg_temp_free_ptr(r_tickptr);
1914 gen_movl_TN_reg(rd, cpu_dst);
1916 break;
1917 case 0x5: /* V9 rdpc */
1919 TCGv r_const;
1921 r_const = tcg_const_tl(dc->pc);
1922 gen_movl_TN_reg(rd, r_const);
1923 tcg_temp_free(r_const);
1925 break;
1926 case 0x6: /* V9 rdfprs */
1927 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
1928 gen_movl_TN_reg(rd, cpu_dst);
1929 break;
1930 case 0xf: /* V9 membar */
1931 break; /* no effect */
1932 case 0x13: /* Graphics Status */
1933 if (gen_trap_ifnofpu(dc, cpu_cond))
1934 goto jmp_insn;
1935 gen_movl_TN_reg(rd, cpu_gsr);
1936 break;
1937 case 0x16: /* Softint */
1938 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
1939 gen_movl_TN_reg(rd, cpu_dst);
1940 break;
1941 case 0x17: /* Tick compare */
1942 gen_movl_TN_reg(rd, cpu_tick_cmpr);
1943 break;
1944 case 0x18: /* System tick */
1946 TCGv_ptr r_tickptr;
1948 r_tickptr = tcg_temp_new_ptr();
1949 tcg_gen_ld_ptr(r_tickptr, cpu_env,
1950 offsetof(CPUState, stick));
1951 gen_helper_tick_get_count(cpu_dst, r_tickptr);
1952 tcg_temp_free_ptr(r_tickptr);
1953 gen_movl_TN_reg(rd, cpu_dst);
1955 break;
1956 case 0x19: /* System tick compare */
1957 gen_movl_TN_reg(rd, cpu_stick_cmpr);
1958 break;
1959 case 0x10: /* Performance Control */
1960 case 0x11: /* Performance Instrumentation Counter */
1961 case 0x12: /* Dispatch Control */
1962 case 0x14: /* Softint set, WO */
1963 case 0x15: /* Softint clear, WO */
1964 #endif
1965 default:
1966 goto illegal_insn;
1968 #if !defined(CONFIG_USER_ONLY)
1969 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
1970 #ifndef TARGET_SPARC64
1971 if (!supervisor(dc))
1972 goto priv_insn;
1973 gen_helper_compute_psr();
1974 dc->cc_op = CC_OP_FLAGS;
1975 gen_helper_rdpsr(cpu_dst);
1976 #else
1977 CHECK_IU_FEATURE(dc, HYPV);
1978 if (!hypervisor(dc))
1979 goto priv_insn;
1980 rs1 = GET_FIELD(insn, 13, 17);
1981 switch (rs1) {
1982 case 0: // hpstate
1983 // gen_op_rdhpstate();
1984 break;
1985 case 1: // htstate
1986 // gen_op_rdhtstate();
1987 break;
1988 case 3: // hintp
1989 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
1990 break;
1991 case 5: // htba
1992 tcg_gen_mov_tl(cpu_dst, cpu_htba);
1993 break;
1994 case 6: // hver
1995 tcg_gen_mov_tl(cpu_dst, cpu_hver);
1996 break;
1997 case 31: // hstick_cmpr
1998 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
1999 break;
2000 default:
2001 goto illegal_insn;
2003 #endif
2004 gen_movl_TN_reg(rd, cpu_dst);
2005 break;
2006 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2007 if (!supervisor(dc))
2008 goto priv_insn;
2009 #ifdef TARGET_SPARC64
2010 rs1 = GET_FIELD(insn, 13, 17);
2011 switch (rs1) {
2012 case 0: // tpc
2014 TCGv_ptr r_tsptr;
2016 r_tsptr = tcg_temp_new_ptr();
2017 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2018 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2019 offsetof(trap_state, tpc));
2020 tcg_temp_free_ptr(r_tsptr);
2022 break;
2023 case 1: // tnpc
2025 TCGv_ptr r_tsptr;
2027 r_tsptr = tcg_temp_new_ptr();
2028 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2029 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2030 offsetof(trap_state, tnpc));
2031 tcg_temp_free_ptr(r_tsptr);
2033 break;
2034 case 2: // tstate
2036 TCGv_ptr r_tsptr;
2038 r_tsptr = tcg_temp_new_ptr();
2039 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2040 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2041 offsetof(trap_state, tstate));
2042 tcg_temp_free_ptr(r_tsptr);
2044 break;
2045 case 3: // tt
2047 TCGv_ptr r_tsptr;
2049 r_tsptr = tcg_temp_new_ptr();
2050 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2051 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2052 offsetof(trap_state, tt));
2053 tcg_temp_free_ptr(r_tsptr);
2054 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2056 break;
2057 case 4: // tick
2059 TCGv_ptr r_tickptr;
2061 r_tickptr = tcg_temp_new_ptr();
2062 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2063 offsetof(CPUState, tick));
2064 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2065 gen_movl_TN_reg(rd, cpu_tmp0);
2066 tcg_temp_free_ptr(r_tickptr);
2068 break;
2069 case 5: // tba
2070 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2071 break;
2072 case 6: // pstate
2073 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2074 offsetof(CPUSPARCState, pstate));
2075 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2076 break;
2077 case 7: // tl
2078 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2079 offsetof(CPUSPARCState, tl));
2080 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2081 break;
2082 case 8: // pil
2083 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2084 offsetof(CPUSPARCState, psrpil));
2085 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2086 break;
2087 case 9: // cwp
2088 gen_helper_rdcwp(cpu_tmp0);
2089 break;
2090 case 10: // cansave
2091 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2092 offsetof(CPUSPARCState, cansave));
2093 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2094 break;
2095 case 11: // canrestore
2096 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2097 offsetof(CPUSPARCState, canrestore));
2098 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2099 break;
2100 case 12: // cleanwin
2101 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2102 offsetof(CPUSPARCState, cleanwin));
2103 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2104 break;
2105 case 13: // otherwin
2106 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2107 offsetof(CPUSPARCState, otherwin));
2108 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2109 break;
2110 case 14: // wstate
2111 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2112 offsetof(CPUSPARCState, wstate));
2113 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2114 break;
2115 case 16: // UA2005 gl
2116 CHECK_IU_FEATURE(dc, GL);
2117 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2118 offsetof(CPUSPARCState, gl));
2119 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2120 break;
2121 case 26: // UA2005 strand status
2122 CHECK_IU_FEATURE(dc, HYPV);
2123 if (!hypervisor(dc))
2124 goto priv_insn;
2125 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2126 break;
2127 case 31: // ver
2128 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2129 break;
2130 case 15: // fq
2131 default:
2132 goto illegal_insn;
2134 #else
2135 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2136 #endif
2137 gen_movl_TN_reg(rd, cpu_tmp0);
2138 break;
2139 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2140 #ifdef TARGET_SPARC64
2141 save_state(dc, cpu_cond);
2142 gen_helper_flushw();
2143 #else
2144 if (!supervisor(dc))
2145 goto priv_insn;
2146 gen_movl_TN_reg(rd, cpu_tbr);
2147 #endif
2148 break;
2149 #endif
2150 } else if (xop == 0x34) { /* FPU Operations */
2151 if (gen_trap_ifnofpu(dc, cpu_cond))
2152 goto jmp_insn;
2153 gen_op_clear_ieee_excp_and_FTT();
2154 rs1 = GET_FIELD(insn, 13, 17);
2155 rs2 = GET_FIELD(insn, 27, 31);
2156 xop = GET_FIELD(insn, 18, 26);
2157 save_state(dc, cpu_cond);
2158 switch (xop) {
2159 case 0x1: /* fmovs */
2160 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2161 break;
2162 case 0x5: /* fnegs */
2163 gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2164 break;
2165 case 0x9: /* fabss */
2166 gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2167 break;
2168 case 0x29: /* fsqrts */
2169 CHECK_FPU_FEATURE(dc, FSQRT);
2170 gen_clear_float_exceptions();
2171 gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2172 gen_helper_check_ieee_exceptions();
2173 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2174 break;
2175 case 0x2a: /* fsqrtd */
2176 CHECK_FPU_FEATURE(dc, FSQRT);
2177 gen_op_load_fpr_DT1(DFPREG(rs2));
2178 gen_clear_float_exceptions();
2179 gen_helper_fsqrtd();
2180 gen_helper_check_ieee_exceptions();
2181 gen_op_store_DT0_fpr(DFPREG(rd));
2182 break;
2183 case 0x2b: /* fsqrtq */
2184 CHECK_FPU_FEATURE(dc, FLOAT128);
2185 gen_op_load_fpr_QT1(QFPREG(rs2));
2186 gen_clear_float_exceptions();
2187 gen_helper_fsqrtq();
2188 gen_helper_check_ieee_exceptions();
2189 gen_op_store_QT0_fpr(QFPREG(rd));
2190 break;
2191 case 0x41: /* fadds */
2192 gen_clear_float_exceptions();
2193 gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2194 gen_helper_check_ieee_exceptions();
2195 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2196 break;
2197 case 0x42: /* faddd */
2198 gen_op_load_fpr_DT0(DFPREG(rs1));
2199 gen_op_load_fpr_DT1(DFPREG(rs2));
2200 gen_clear_float_exceptions();
2201 gen_helper_faddd();
2202 gen_helper_check_ieee_exceptions();
2203 gen_op_store_DT0_fpr(DFPREG(rd));
2204 break;
2205 case 0x43: /* faddq */
2206 CHECK_FPU_FEATURE(dc, FLOAT128);
2207 gen_op_load_fpr_QT0(QFPREG(rs1));
2208 gen_op_load_fpr_QT1(QFPREG(rs2));
2209 gen_clear_float_exceptions();
2210 gen_helper_faddq();
2211 gen_helper_check_ieee_exceptions();
2212 gen_op_store_QT0_fpr(QFPREG(rd));
2213 break;
2214 case 0x45: /* fsubs */
2215 gen_clear_float_exceptions();
2216 gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2217 gen_helper_check_ieee_exceptions();
2218 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2219 break;
2220 case 0x46: /* fsubd */
2221 gen_op_load_fpr_DT0(DFPREG(rs1));
2222 gen_op_load_fpr_DT1(DFPREG(rs2));
2223 gen_clear_float_exceptions();
2224 gen_helper_fsubd();
2225 gen_helper_check_ieee_exceptions();
2226 gen_op_store_DT0_fpr(DFPREG(rd));
2227 break;
2228 case 0x47: /* fsubq */
2229 CHECK_FPU_FEATURE(dc, FLOAT128);
2230 gen_op_load_fpr_QT0(QFPREG(rs1));
2231 gen_op_load_fpr_QT1(QFPREG(rs2));
2232 gen_clear_float_exceptions();
2233 gen_helper_fsubq();
2234 gen_helper_check_ieee_exceptions();
2235 gen_op_store_QT0_fpr(QFPREG(rd));
2236 break;
2237 case 0x49: /* fmuls */
2238 CHECK_FPU_FEATURE(dc, FMUL);
2239 gen_clear_float_exceptions();
2240 gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2241 gen_helper_check_ieee_exceptions();
2242 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2243 break;
2244 case 0x4a: /* fmuld */
2245 CHECK_FPU_FEATURE(dc, FMUL);
2246 gen_op_load_fpr_DT0(DFPREG(rs1));
2247 gen_op_load_fpr_DT1(DFPREG(rs2));
2248 gen_clear_float_exceptions();
2249 gen_helper_fmuld();
2250 gen_helper_check_ieee_exceptions();
2251 gen_op_store_DT0_fpr(DFPREG(rd));
2252 break;
2253 case 0x4b: /* fmulq */
2254 CHECK_FPU_FEATURE(dc, FLOAT128);
2255 CHECK_FPU_FEATURE(dc, FMUL);
2256 gen_op_load_fpr_QT0(QFPREG(rs1));
2257 gen_op_load_fpr_QT1(QFPREG(rs2));
2258 gen_clear_float_exceptions();
2259 gen_helper_fmulq();
2260 gen_helper_check_ieee_exceptions();
2261 gen_op_store_QT0_fpr(QFPREG(rd));
2262 break;
2263 case 0x4d: /* fdivs */
2264 gen_clear_float_exceptions();
2265 gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2266 gen_helper_check_ieee_exceptions();
2267 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2268 break;
2269 case 0x4e: /* fdivd */
2270 gen_op_load_fpr_DT0(DFPREG(rs1));
2271 gen_op_load_fpr_DT1(DFPREG(rs2));
2272 gen_clear_float_exceptions();
2273 gen_helper_fdivd();
2274 gen_helper_check_ieee_exceptions();
2275 gen_op_store_DT0_fpr(DFPREG(rd));
2276 break;
2277 case 0x4f: /* fdivq */
2278 CHECK_FPU_FEATURE(dc, FLOAT128);
2279 gen_op_load_fpr_QT0(QFPREG(rs1));
2280 gen_op_load_fpr_QT1(QFPREG(rs2));
2281 gen_clear_float_exceptions();
2282 gen_helper_fdivq();
2283 gen_helper_check_ieee_exceptions();
2284 gen_op_store_QT0_fpr(QFPREG(rd));
2285 break;
2286 case 0x69: /* fsmuld */
2287 CHECK_FPU_FEATURE(dc, FSMULD);
2288 gen_clear_float_exceptions();
2289 gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2290 gen_helper_check_ieee_exceptions();
2291 gen_op_store_DT0_fpr(DFPREG(rd));
2292 break;
2293 case 0x6e: /* fdmulq */
2294 CHECK_FPU_FEATURE(dc, FLOAT128);
2295 gen_op_load_fpr_DT0(DFPREG(rs1));
2296 gen_op_load_fpr_DT1(DFPREG(rs2));
2297 gen_clear_float_exceptions();
2298 gen_helper_fdmulq();
2299 gen_helper_check_ieee_exceptions();
2300 gen_op_store_QT0_fpr(QFPREG(rd));
2301 break;
2302 case 0xc4: /* fitos */
2303 gen_clear_float_exceptions();
2304 gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2305 gen_helper_check_ieee_exceptions();
2306 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2307 break;
2308 case 0xc6: /* fdtos */
2309 gen_op_load_fpr_DT1(DFPREG(rs2));
2310 gen_clear_float_exceptions();
2311 gen_helper_fdtos(cpu_tmp32);
2312 gen_helper_check_ieee_exceptions();
2313 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2314 break;
2315 case 0xc7: /* fqtos */
2316 CHECK_FPU_FEATURE(dc, FLOAT128);
2317 gen_op_load_fpr_QT1(QFPREG(rs2));
2318 gen_clear_float_exceptions();
2319 gen_helper_fqtos(cpu_tmp32);
2320 gen_helper_check_ieee_exceptions();
2321 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2322 break;
2323 case 0xc8: /* fitod */
2324 gen_helper_fitod(cpu_fpr[rs2]);
2325 gen_op_store_DT0_fpr(DFPREG(rd));
2326 break;
2327 case 0xc9: /* fstod */
2328 gen_helper_fstod(cpu_fpr[rs2]);
2329 gen_op_store_DT0_fpr(DFPREG(rd));
2330 break;
2331 case 0xcb: /* fqtod */
2332 CHECK_FPU_FEATURE(dc, FLOAT128);
2333 gen_op_load_fpr_QT1(QFPREG(rs2));
2334 gen_clear_float_exceptions();
2335 gen_helper_fqtod();
2336 gen_helper_check_ieee_exceptions();
2337 gen_op_store_DT0_fpr(DFPREG(rd));
2338 break;
2339 case 0xcc: /* fitoq */
2340 CHECK_FPU_FEATURE(dc, FLOAT128);
2341 gen_helper_fitoq(cpu_fpr[rs2]);
2342 gen_op_store_QT0_fpr(QFPREG(rd));
2343 break;
2344 case 0xcd: /* fstoq */
2345 CHECK_FPU_FEATURE(dc, FLOAT128);
2346 gen_helper_fstoq(cpu_fpr[rs2]);
2347 gen_op_store_QT0_fpr(QFPREG(rd));
2348 break;
2349 case 0xce: /* fdtoq */
2350 CHECK_FPU_FEATURE(dc, FLOAT128);
2351 gen_op_load_fpr_DT1(DFPREG(rs2));
2352 gen_helper_fdtoq();
2353 gen_op_store_QT0_fpr(QFPREG(rd));
2354 break;
2355 case 0xd1: /* fstoi */
2356 gen_clear_float_exceptions();
2357 gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2358 gen_helper_check_ieee_exceptions();
2359 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2360 break;
2361 case 0xd2: /* fdtoi */
2362 gen_op_load_fpr_DT1(DFPREG(rs2));
2363 gen_clear_float_exceptions();
2364 gen_helper_fdtoi(cpu_tmp32);
2365 gen_helper_check_ieee_exceptions();
2366 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2367 break;
2368 case 0xd3: /* fqtoi */
2369 CHECK_FPU_FEATURE(dc, FLOAT128);
2370 gen_op_load_fpr_QT1(QFPREG(rs2));
2371 gen_clear_float_exceptions();
2372 gen_helper_fqtoi(cpu_tmp32);
2373 gen_helper_check_ieee_exceptions();
2374 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2375 break;
2376 #ifdef TARGET_SPARC64
2377 case 0x2: /* V9 fmovd */
2378 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2379 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2380 cpu_fpr[DFPREG(rs2) + 1]);
2381 break;
2382 case 0x3: /* V9 fmovq */
2383 CHECK_FPU_FEATURE(dc, FLOAT128);
2384 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2385 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2386 cpu_fpr[QFPREG(rs2) + 1]);
2387 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2388 cpu_fpr[QFPREG(rs2) + 2]);
2389 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2390 cpu_fpr[QFPREG(rs2) + 3]);
2391 break;
2392 case 0x6: /* V9 fnegd */
2393 gen_op_load_fpr_DT1(DFPREG(rs2));
2394 gen_helper_fnegd();
2395 gen_op_store_DT0_fpr(DFPREG(rd));
2396 break;
2397 case 0x7: /* V9 fnegq */
2398 CHECK_FPU_FEATURE(dc, FLOAT128);
2399 gen_op_load_fpr_QT1(QFPREG(rs2));
2400 gen_helper_fnegq();
2401 gen_op_store_QT0_fpr(QFPREG(rd));
2402 break;
2403 case 0xa: /* V9 fabsd */
2404 gen_op_load_fpr_DT1(DFPREG(rs2));
2405 gen_helper_fabsd();
2406 gen_op_store_DT0_fpr(DFPREG(rd));
2407 break;
2408 case 0xb: /* V9 fabsq */
2409 CHECK_FPU_FEATURE(dc, FLOAT128);
2410 gen_op_load_fpr_QT1(QFPREG(rs2));
2411 gen_helper_fabsq();
2412 gen_op_store_QT0_fpr(QFPREG(rd));
2413 break;
2414 case 0x81: /* V9 fstox */
2415 gen_clear_float_exceptions();
2416 gen_helper_fstox(cpu_fpr[rs2]);
2417 gen_helper_check_ieee_exceptions();
2418 gen_op_store_DT0_fpr(DFPREG(rd));
2419 break;
2420 case 0x82: /* V9 fdtox */
2421 gen_op_load_fpr_DT1(DFPREG(rs2));
2422 gen_clear_float_exceptions();
2423 gen_helper_fdtox();
2424 gen_helper_check_ieee_exceptions();
2425 gen_op_store_DT0_fpr(DFPREG(rd));
2426 break;
2427 case 0x83: /* V9 fqtox */
2428 CHECK_FPU_FEATURE(dc, FLOAT128);
2429 gen_op_load_fpr_QT1(QFPREG(rs2));
2430 gen_clear_float_exceptions();
2431 gen_helper_fqtox();
2432 gen_helper_check_ieee_exceptions();
2433 gen_op_store_DT0_fpr(DFPREG(rd));
2434 break;
2435 case 0x84: /* V9 fxtos */
2436 gen_op_load_fpr_DT1(DFPREG(rs2));
2437 gen_clear_float_exceptions();
2438 gen_helper_fxtos(cpu_tmp32);
2439 gen_helper_check_ieee_exceptions();
2440 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2441 break;
2442 case 0x88: /* V9 fxtod */
2443 gen_op_load_fpr_DT1(DFPREG(rs2));
2444 gen_clear_float_exceptions();
2445 gen_helper_fxtod();
2446 gen_helper_check_ieee_exceptions();
2447 gen_op_store_DT0_fpr(DFPREG(rd));
2448 break;
2449 case 0x8c: /* V9 fxtoq */
2450 CHECK_FPU_FEATURE(dc, FLOAT128);
2451 gen_op_load_fpr_DT1(DFPREG(rs2));
2452 gen_clear_float_exceptions();
2453 gen_helper_fxtoq();
2454 gen_helper_check_ieee_exceptions();
2455 gen_op_store_QT0_fpr(QFPREG(rd));
2456 break;
2457 #endif
2458 default:
2459 goto illegal_insn;
2461 } else if (xop == 0x35) { /* FPU Operations */
2462 #ifdef TARGET_SPARC64
2463 int cond;
2464 #endif
2465 if (gen_trap_ifnofpu(dc, cpu_cond))
2466 goto jmp_insn;
2467 gen_op_clear_ieee_excp_and_FTT();
2468 rs1 = GET_FIELD(insn, 13, 17);
2469 rs2 = GET_FIELD(insn, 27, 31);
2470 xop = GET_FIELD(insn, 18, 26);
2471 save_state(dc, cpu_cond);
2472 #ifdef TARGET_SPARC64
2473 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2474 int l1;
2476 l1 = gen_new_label();
2477 cond = GET_FIELD_SP(insn, 14, 17);
2478 cpu_src1 = get_src1(insn, cpu_src1);
2479 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2480 0, l1);
2481 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2482 gen_set_label(l1);
2483 break;
2484 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2485 int l1;
2487 l1 = gen_new_label();
2488 cond = GET_FIELD_SP(insn, 14, 17);
2489 cpu_src1 = get_src1(insn, cpu_src1);
2490 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2491 0, l1);
2492 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2493 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2494 gen_set_label(l1);
2495 break;
2496 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2497 int l1;
2499 CHECK_FPU_FEATURE(dc, FLOAT128);
2500 l1 = gen_new_label();
2501 cond = GET_FIELD_SP(insn, 14, 17);
2502 cpu_src1 = get_src1(insn, cpu_src1);
2503 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2504 0, l1);
2505 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2506 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2507 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2508 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2509 gen_set_label(l1);
2510 break;
2512 #endif
2513 switch (xop) {
2514 #ifdef TARGET_SPARC64
2515 #define FMOVSCC(fcc) \
2517 TCGv r_cond; \
2518 int l1; \
2520 l1 = gen_new_label(); \
2521 r_cond = tcg_temp_new(); \
2522 cond = GET_FIELD_SP(insn, 14, 17); \
2523 gen_fcond(r_cond, fcc, cond); \
2524 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2525 0, l1); \
2526 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2527 gen_set_label(l1); \
2528 tcg_temp_free(r_cond); \
2530 #define FMOVDCC(fcc) \
2532 TCGv r_cond; \
2533 int l1; \
2535 l1 = gen_new_label(); \
2536 r_cond = tcg_temp_new(); \
2537 cond = GET_FIELD_SP(insn, 14, 17); \
2538 gen_fcond(r_cond, fcc, cond); \
2539 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2540 0, l1); \
2541 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2542 cpu_fpr[DFPREG(rs2)]); \
2543 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2544 cpu_fpr[DFPREG(rs2) + 1]); \
2545 gen_set_label(l1); \
2546 tcg_temp_free(r_cond); \
2548 #define FMOVQCC(fcc) \
2550 TCGv r_cond; \
2551 int l1; \
2553 l1 = gen_new_label(); \
2554 r_cond = tcg_temp_new(); \
2555 cond = GET_FIELD_SP(insn, 14, 17); \
2556 gen_fcond(r_cond, fcc, cond); \
2557 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2558 0, l1); \
2559 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2560 cpu_fpr[QFPREG(rs2)]); \
2561 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2562 cpu_fpr[QFPREG(rs2) + 1]); \
2563 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2564 cpu_fpr[QFPREG(rs2) + 2]); \
2565 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2566 cpu_fpr[QFPREG(rs2) + 3]); \
2567 gen_set_label(l1); \
2568 tcg_temp_free(r_cond); \
2570 case 0x001: /* V9 fmovscc %fcc0 */
2571 FMOVSCC(0);
2572 break;
2573 case 0x002: /* V9 fmovdcc %fcc0 */
2574 FMOVDCC(0);
2575 break;
2576 case 0x003: /* V9 fmovqcc %fcc0 */
2577 CHECK_FPU_FEATURE(dc, FLOAT128);
2578 FMOVQCC(0);
2579 break;
2580 case 0x041: /* V9 fmovscc %fcc1 */
2581 FMOVSCC(1);
2582 break;
2583 case 0x042: /* V9 fmovdcc %fcc1 */
2584 FMOVDCC(1);
2585 break;
2586 case 0x043: /* V9 fmovqcc %fcc1 */
2587 CHECK_FPU_FEATURE(dc, FLOAT128);
2588 FMOVQCC(1);
2589 break;
2590 case 0x081: /* V9 fmovscc %fcc2 */
2591 FMOVSCC(2);
2592 break;
2593 case 0x082: /* V9 fmovdcc %fcc2 */
2594 FMOVDCC(2);
2595 break;
2596 case 0x083: /* V9 fmovqcc %fcc2 */
2597 CHECK_FPU_FEATURE(dc, FLOAT128);
2598 FMOVQCC(2);
2599 break;
2600 case 0x0c1: /* V9 fmovscc %fcc3 */
2601 FMOVSCC(3);
2602 break;
2603 case 0x0c2: /* V9 fmovdcc %fcc3 */
2604 FMOVDCC(3);
2605 break;
2606 case 0x0c3: /* V9 fmovqcc %fcc3 */
2607 CHECK_FPU_FEATURE(dc, FLOAT128);
2608 FMOVQCC(3);
2609 break;
2610 #undef FMOVSCC
2611 #undef FMOVDCC
2612 #undef FMOVQCC
2613 #define FMOVSCC(icc) \
2615 TCGv r_cond; \
2616 int l1; \
2618 l1 = gen_new_label(); \
2619 r_cond = tcg_temp_new(); \
2620 cond = GET_FIELD_SP(insn, 14, 17); \
2621 gen_cond(r_cond, icc, cond, dc); \
2622 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2623 0, l1); \
2624 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2625 gen_set_label(l1); \
2626 tcg_temp_free(r_cond); \
2628 #define FMOVDCC(icc) \
2630 TCGv r_cond; \
2631 int l1; \
2633 l1 = gen_new_label(); \
2634 r_cond = tcg_temp_new(); \
2635 cond = GET_FIELD_SP(insn, 14, 17); \
2636 gen_cond(r_cond, icc, cond, dc); \
2637 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2638 0, l1); \
2639 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2640 cpu_fpr[DFPREG(rs2)]); \
2641 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2642 cpu_fpr[DFPREG(rs2) + 1]); \
2643 gen_set_label(l1); \
2644 tcg_temp_free(r_cond); \
2646 #define FMOVQCC(icc) \
2648 TCGv r_cond; \
2649 int l1; \
2651 l1 = gen_new_label(); \
2652 r_cond = tcg_temp_new(); \
2653 cond = GET_FIELD_SP(insn, 14, 17); \
2654 gen_cond(r_cond, icc, cond, dc); \
2655 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2656 0, l1); \
2657 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2658 cpu_fpr[QFPREG(rs2)]); \
2659 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2660 cpu_fpr[QFPREG(rs2) + 1]); \
2661 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2662 cpu_fpr[QFPREG(rs2) + 2]); \
2663 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2664 cpu_fpr[QFPREG(rs2) + 3]); \
2665 gen_set_label(l1); \
2666 tcg_temp_free(r_cond); \
2669 case 0x101: /* V9 fmovscc %icc */
2670 FMOVSCC(0);
2671 break;
2672 case 0x102: /* V9 fmovdcc %icc */
2673 FMOVDCC(0);
2674 case 0x103: /* V9 fmovqcc %icc */
2675 CHECK_FPU_FEATURE(dc, FLOAT128);
2676 FMOVQCC(0);
2677 break;
2678 case 0x181: /* V9 fmovscc %xcc */
2679 FMOVSCC(1);
2680 break;
2681 case 0x182: /* V9 fmovdcc %xcc */
2682 FMOVDCC(1);
2683 break;
2684 case 0x183: /* V9 fmovqcc %xcc */
2685 CHECK_FPU_FEATURE(dc, FLOAT128);
2686 FMOVQCC(1);
2687 break;
2688 #undef FMOVSCC
2689 #undef FMOVDCC
2690 #undef FMOVQCC
2691 #endif
2692 case 0x51: /* fcmps, V9 %fcc */
2693 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2694 break;
2695 case 0x52: /* fcmpd, V9 %fcc */
2696 gen_op_load_fpr_DT0(DFPREG(rs1));
2697 gen_op_load_fpr_DT1(DFPREG(rs2));
2698 gen_op_fcmpd(rd & 3);
2699 break;
2700 case 0x53: /* fcmpq, V9 %fcc */
2701 CHECK_FPU_FEATURE(dc, FLOAT128);
2702 gen_op_load_fpr_QT0(QFPREG(rs1));
2703 gen_op_load_fpr_QT1(QFPREG(rs2));
2704 gen_op_fcmpq(rd & 3);
2705 break;
2706 case 0x55: /* fcmpes, V9 %fcc */
2707 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2708 break;
2709 case 0x56: /* fcmped, V9 %fcc */
2710 gen_op_load_fpr_DT0(DFPREG(rs1));
2711 gen_op_load_fpr_DT1(DFPREG(rs2));
2712 gen_op_fcmped(rd & 3);
2713 break;
2714 case 0x57: /* fcmpeq, V9 %fcc */
2715 CHECK_FPU_FEATURE(dc, FLOAT128);
2716 gen_op_load_fpr_QT0(QFPREG(rs1));
2717 gen_op_load_fpr_QT1(QFPREG(rs2));
2718 gen_op_fcmpeq(rd & 3);
2719 break;
2720 default:
2721 goto illegal_insn;
2723 } else if (xop == 0x2) {
2724 // clr/mov shortcut
2726 rs1 = GET_FIELD(insn, 13, 17);
2727 if (rs1 == 0) {
2728 // or %g0, x, y -> mov T0, x; mov y, T0
2729 if (IS_IMM) { /* immediate */
2730 TCGv r_const;
2732 simm = GET_FIELDs(insn, 19, 31);
2733 r_const = tcg_const_tl(simm);
2734 gen_movl_TN_reg(rd, r_const);
2735 tcg_temp_free(r_const);
2736 } else { /* register */
2737 rs2 = GET_FIELD(insn, 27, 31);
2738 gen_movl_reg_TN(rs2, cpu_dst);
2739 gen_movl_TN_reg(rd, cpu_dst);
2741 } else {
2742 cpu_src1 = get_src1(insn, cpu_src1);
2743 if (IS_IMM) { /* immediate */
2744 simm = GET_FIELDs(insn, 19, 31);
2745 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2746 gen_movl_TN_reg(rd, cpu_dst);
2747 } else { /* register */
2748 // or x, %g0, y -> mov T1, x; mov y, T1
2749 rs2 = GET_FIELD(insn, 27, 31);
2750 if (rs2 != 0) {
2751 gen_movl_reg_TN(rs2, cpu_src2);
2752 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2753 gen_movl_TN_reg(rd, cpu_dst);
2754 } else
2755 gen_movl_TN_reg(rd, cpu_src1);
2758 #ifdef TARGET_SPARC64
2759 } else if (xop == 0x25) { /* sll, V9 sllx */
2760 cpu_src1 = get_src1(insn, cpu_src1);
2761 if (IS_IMM) { /* immediate */
2762 simm = GET_FIELDs(insn, 20, 31);
2763 if (insn & (1 << 12)) {
2764 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
2765 } else {
2766 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
2768 } else { /* register */
2769 rs2 = GET_FIELD(insn, 27, 31);
2770 gen_movl_reg_TN(rs2, cpu_src2);
2771 if (insn & (1 << 12)) {
2772 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2773 } else {
2774 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2776 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2778 gen_movl_TN_reg(rd, cpu_dst);
2779 } else if (xop == 0x26) { /* srl, V9 srlx */
2780 cpu_src1 = get_src1(insn, cpu_src1);
2781 if (IS_IMM) { /* immediate */
2782 simm = GET_FIELDs(insn, 20, 31);
2783 if (insn & (1 << 12)) {
2784 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
2785 } else {
2786 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2787 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
2789 } else { /* register */
2790 rs2 = GET_FIELD(insn, 27, 31);
2791 gen_movl_reg_TN(rs2, cpu_src2);
2792 if (insn & (1 << 12)) {
2793 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2794 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2795 } else {
2796 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2797 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2798 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2801 gen_movl_TN_reg(rd, cpu_dst);
2802 } else if (xop == 0x27) { /* sra, V9 srax */
2803 cpu_src1 = get_src1(insn, cpu_src1);
2804 if (IS_IMM) { /* immediate */
2805 simm = GET_FIELDs(insn, 20, 31);
2806 if (insn & (1 << 12)) {
2807 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
2808 } else {
2809 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2810 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2811 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
2813 } else { /* register */
2814 rs2 = GET_FIELD(insn, 27, 31);
2815 gen_movl_reg_TN(rs2, cpu_src2);
2816 if (insn & (1 << 12)) {
2817 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2818 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
2819 } else {
2820 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2821 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2822 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
2823 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
2826 gen_movl_TN_reg(rd, cpu_dst);
2827 #endif
2828 } else if (xop < 0x36) {
2829 if (xop < 0x20) {
2830 cpu_src1 = get_src1(insn, cpu_src1);
2831 cpu_src2 = get_src2(insn, cpu_src2);
2832 switch (xop & ~0x10) {
2833 case 0x0: /* add */
2834 if (IS_IMM) {
2835 simm = GET_FIELDs(insn, 19, 31);
2836 if (xop & 0x10) {
2837 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
2838 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2839 dc->cc_op = CC_OP_ADD;
2840 } else {
2841 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
2843 } else {
2844 if (xop & 0x10) {
2845 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
2846 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
2847 dc->cc_op = CC_OP_ADD;
2848 } else {
2849 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2852 break;
2853 case 0x1: /* and */
2854 if (IS_IMM) {
2855 simm = GET_FIELDs(insn, 19, 31);
2856 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
2857 } else {
2858 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
2860 if (xop & 0x10) {
2861 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2862 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2863 dc->cc_op = CC_OP_LOGIC;
2865 break;
2866 case 0x2: /* or */
2867 if (IS_IMM) {
2868 simm = GET_FIELDs(insn, 19, 31);
2869 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2870 } else {
2871 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2873 if (xop & 0x10) {
2874 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2875 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2876 dc->cc_op = CC_OP_LOGIC;
2878 break;
2879 case 0x3: /* xor */
2880 if (IS_IMM) {
2881 simm = GET_FIELDs(insn, 19, 31);
2882 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
2883 } else {
2884 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
2886 if (xop & 0x10) {
2887 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2888 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2889 dc->cc_op = CC_OP_LOGIC;
2891 break;
2892 case 0x4: /* sub */
2893 if (IS_IMM) {
2894 simm = GET_FIELDs(insn, 19, 31);
2895 if (xop & 0x10) {
2896 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
2897 } else {
2898 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
2900 } else {
2901 if (xop & 0x10) {
2902 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
2903 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2904 dc->cc_op = CC_OP_SUB;
2905 } else {
2906 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
2909 break;
2910 case 0x5: /* andn */
2911 if (IS_IMM) {
2912 simm = GET_FIELDs(insn, 19, 31);
2913 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
2914 } else {
2915 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
2917 if (xop & 0x10) {
2918 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2919 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2920 dc->cc_op = CC_OP_LOGIC;
2922 break;
2923 case 0x6: /* orn */
2924 if (IS_IMM) {
2925 simm = GET_FIELDs(insn, 19, 31);
2926 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
2927 } else {
2928 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
2930 if (xop & 0x10) {
2931 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2932 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2933 dc->cc_op = CC_OP_LOGIC;
2935 break;
2936 case 0x7: /* xorn */
2937 if (IS_IMM) {
2938 simm = GET_FIELDs(insn, 19, 31);
2939 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
2940 } else {
2941 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
2942 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
2944 if (xop & 0x10) {
2945 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2946 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2947 dc->cc_op = CC_OP_LOGIC;
2949 break;
2950 case 0x8: /* addx, V9 addc */
2951 if (IS_IMM) {
2952 simm = GET_FIELDs(insn, 19, 31);
2953 if (xop & 0x10) {
2954 gen_helper_compute_psr();
2955 gen_op_addxi_cc(cpu_dst, cpu_src1, simm);
2956 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
2957 dc->cc_op = CC_OP_ADDX;
2958 } else {
2959 gen_helper_compute_psr();
2960 gen_mov_reg_C(cpu_tmp0, cpu_psr);
2961 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
2962 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
2964 } else {
2965 if (xop & 0x10) {
2966 gen_helper_compute_psr();
2967 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
2968 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
2969 dc->cc_op = CC_OP_ADDX;
2970 } else {
2971 gen_helper_compute_psr();
2972 gen_mov_reg_C(cpu_tmp0, cpu_psr);
2973 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
2974 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
2977 break;
2978 #ifdef TARGET_SPARC64
2979 case 0x9: /* V9 mulx */
2980 if (IS_IMM) {
2981 simm = GET_FIELDs(insn, 19, 31);
2982 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
2983 } else {
2984 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
2986 break;
2987 #endif
2988 case 0xa: /* umul */
2989 CHECK_IU_FEATURE(dc, MUL);
2990 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
2991 if (xop & 0x10) {
2992 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
2993 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
2994 dc->cc_op = CC_OP_LOGIC;
2996 break;
2997 case 0xb: /* smul */
2998 CHECK_IU_FEATURE(dc, MUL);
2999 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3000 if (xop & 0x10) {
3001 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3002 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3003 dc->cc_op = CC_OP_LOGIC;
3005 break;
3006 case 0xc: /* subx, V9 subc */
3007 if (IS_IMM) {
3008 simm = GET_FIELDs(insn, 19, 31);
3009 if (xop & 0x10) {
3010 gen_helper_compute_psr();
3011 gen_op_subxi_cc(cpu_dst, cpu_src1, simm);
3012 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3013 dc->cc_op = CC_OP_SUBX;
3014 } else {
3015 gen_helper_compute_psr();
3016 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3017 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3018 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3020 } else {
3021 if (xop & 0x10) {
3022 gen_helper_compute_psr();
3023 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3024 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
3025 dc->cc_op = CC_OP_SUBX;
3026 } else {
3027 gen_helper_compute_psr();
3028 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3029 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3030 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3033 break;
3034 #ifdef TARGET_SPARC64
3035 case 0xd: /* V9 udivx */
3036 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3037 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3038 gen_trap_ifdivzero_tl(cpu_cc_src2);
3039 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3040 break;
3041 #endif
3042 case 0xe: /* udiv */
3043 CHECK_IU_FEATURE(dc, DIV);
3044 gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3045 if (xop & 0x10) {
3046 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3047 tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3048 dc->cc_op = CC_OP_DIV;
3050 break;
3051 case 0xf: /* sdiv */
3052 CHECK_IU_FEATURE(dc, DIV);
3053 gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3054 if (xop & 0x10) {
3055 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3056 tcg_gen_movi_i32(cpu_cc_op, CC_OP_DIV);
3057 dc->cc_op = CC_OP_DIV;
3059 break;
3060 default:
3061 goto illegal_insn;
3063 gen_movl_TN_reg(rd, cpu_dst);
3064 } else {
3065 cpu_src1 = get_src1(insn, cpu_src1);
3066 cpu_src2 = get_src2(insn, cpu_src2);
3067 switch (xop) {
3068 case 0x20: /* taddcc */
3069 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3070 gen_movl_TN_reg(rd, cpu_dst);
3071 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3072 dc->cc_op = CC_OP_TADD;
3073 break;
3074 case 0x21: /* tsubcc */
3075 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3076 gen_movl_TN_reg(rd, cpu_dst);
3077 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3078 dc->cc_op = CC_OP_TSUB;
3079 break;
3080 case 0x22: /* taddcctv */
3081 save_state(dc, cpu_cond);
3082 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3083 gen_movl_TN_reg(rd, cpu_dst);
3084 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3085 dc->cc_op = CC_OP_TADDTV;
3086 break;
3087 case 0x23: /* tsubcctv */
3088 save_state(dc, cpu_cond);
3089 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3090 gen_movl_TN_reg(rd, cpu_dst);
3091 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3092 dc->cc_op = CC_OP_TSUBTV;
3093 break;
3094 case 0x24: /* mulscc */
3095 gen_helper_compute_psr();
3096 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3097 gen_movl_TN_reg(rd, cpu_dst);
3098 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3099 dc->cc_op = CC_OP_ADD;
3100 break;
3101 #ifndef TARGET_SPARC64
3102 case 0x25: /* sll */
3103 if (IS_IMM) { /* immediate */
3104 simm = GET_FIELDs(insn, 20, 31);
3105 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3106 } else { /* register */
3107 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3108 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3110 gen_movl_TN_reg(rd, cpu_dst);
3111 break;
3112 case 0x26: /* srl */
3113 if (IS_IMM) { /* immediate */
3114 simm = GET_FIELDs(insn, 20, 31);
3115 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3116 } else { /* register */
3117 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3118 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3120 gen_movl_TN_reg(rd, cpu_dst);
3121 break;
3122 case 0x27: /* sra */
3123 if (IS_IMM) { /* immediate */
3124 simm = GET_FIELDs(insn, 20, 31);
3125 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3126 } else { /* register */
3127 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3128 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3130 gen_movl_TN_reg(rd, cpu_dst);
3131 break;
3132 #endif
3133 case 0x30:
3135 switch(rd) {
3136 case 0: /* wry */
3137 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3138 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3139 break;
3140 #ifndef TARGET_SPARC64
3141 case 0x01 ... 0x0f: /* undefined in the
3142 SPARCv8 manual, nop
3143 on the microSPARC
3144 II */
3145 case 0x10 ... 0x1f: /* implementation-dependent
3146 in the SPARCv8
3147 manual, nop on the
3148 microSPARC II */
3149 break;
3150 #else
3151 case 0x2: /* V9 wrccr */
3152 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3153 gen_helper_wrccr(cpu_dst);
3154 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3155 dc->cc_op = CC_OP_FLAGS;
3156 break;
3157 case 0x3: /* V9 wrasi */
3158 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3159 tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3160 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3161 break;
3162 case 0x6: /* V9 wrfprs */
3163 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3164 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3165 save_state(dc, cpu_cond);
3166 gen_op_next_insn();
3167 tcg_gen_exit_tb(0);
3168 dc->is_br = 1;
3169 break;
3170 case 0xf: /* V9 sir, nop if user */
3171 #if !defined(CONFIG_USER_ONLY)
3172 if (supervisor(dc))
3173 ; // XXX
3174 #endif
3175 break;
3176 case 0x13: /* Graphics Status */
3177 if (gen_trap_ifnofpu(dc, cpu_cond))
3178 goto jmp_insn;
3179 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3180 break;
3181 case 0x14: /* Softint set */
3182 if (!supervisor(dc))
3183 goto illegal_insn;
3184 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3185 gen_helper_set_softint(cpu_tmp64);
3186 break;
3187 case 0x15: /* Softint clear */
3188 if (!supervisor(dc))
3189 goto illegal_insn;
3190 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3191 gen_helper_clear_softint(cpu_tmp64);
3192 break;
3193 case 0x16: /* Softint write */
3194 if (!supervisor(dc))
3195 goto illegal_insn;
3196 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3197 gen_helper_write_softint(cpu_tmp64);
3198 break;
3199 case 0x17: /* Tick compare */
3200 #if !defined(CONFIG_USER_ONLY)
3201 if (!supervisor(dc))
3202 goto illegal_insn;
3203 #endif
3205 TCGv_ptr r_tickptr;
3207 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3208 cpu_src2);
3209 r_tickptr = tcg_temp_new_ptr();
3210 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3211 offsetof(CPUState, tick));
3212 gen_helper_tick_set_limit(r_tickptr,
3213 cpu_tick_cmpr);
3214 tcg_temp_free_ptr(r_tickptr);
3216 break;
3217 case 0x18: /* System tick */
3218 #if !defined(CONFIG_USER_ONLY)
3219 if (!supervisor(dc))
3220 goto illegal_insn;
3221 #endif
3223 TCGv_ptr r_tickptr;
3225 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3226 cpu_src2);
3227 r_tickptr = tcg_temp_new_ptr();
3228 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3229 offsetof(CPUState, stick));
3230 gen_helper_tick_set_count(r_tickptr,
3231 cpu_dst);
3232 tcg_temp_free_ptr(r_tickptr);
3234 break;
3235 case 0x19: /* System tick compare */
3236 #if !defined(CONFIG_USER_ONLY)
3237 if (!supervisor(dc))
3238 goto illegal_insn;
3239 #endif
3241 TCGv_ptr r_tickptr;
3243 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3244 cpu_src2);
3245 r_tickptr = tcg_temp_new_ptr();
3246 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3247 offsetof(CPUState, stick));
3248 gen_helper_tick_set_limit(r_tickptr,
3249 cpu_stick_cmpr);
3250 tcg_temp_free_ptr(r_tickptr);
3252 break;
3254 case 0x10: /* Performance Control */
3255 case 0x11: /* Performance Instrumentation
3256 Counter */
3257 case 0x12: /* Dispatch Control */
3258 #endif
3259 default:
3260 goto illegal_insn;
3263 break;
3264 #if !defined(CONFIG_USER_ONLY)
3265 case 0x31: /* wrpsr, V9 saved, restored */
3267 if (!supervisor(dc))
3268 goto priv_insn;
3269 #ifdef TARGET_SPARC64
3270 switch (rd) {
3271 case 0:
3272 gen_helper_saved();
3273 break;
3274 case 1:
3275 gen_helper_restored();
3276 break;
3277 case 2: /* UA2005 allclean */
3278 case 3: /* UA2005 otherw */
3279 case 4: /* UA2005 normalw */
3280 case 5: /* UA2005 invalw */
3281 // XXX
3282 default:
3283 goto illegal_insn;
3285 #else
3286 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3287 gen_helper_wrpsr(cpu_dst);
3288 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3289 dc->cc_op = CC_OP_FLAGS;
3290 save_state(dc, cpu_cond);
3291 gen_op_next_insn();
3292 tcg_gen_exit_tb(0);
3293 dc->is_br = 1;
3294 #endif
3296 break;
3297 case 0x32: /* wrwim, V9 wrpr */
3299 if (!supervisor(dc))
3300 goto priv_insn;
3301 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3302 #ifdef TARGET_SPARC64
3303 switch (rd) {
3304 case 0: // tpc
3306 TCGv_ptr r_tsptr;
3308 r_tsptr = tcg_temp_new_ptr();
3309 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3310 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3311 offsetof(trap_state, tpc));
3312 tcg_temp_free_ptr(r_tsptr);
3314 break;
3315 case 1: // tnpc
3317 TCGv_ptr r_tsptr;
3319 r_tsptr = tcg_temp_new_ptr();
3320 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3321 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3322 offsetof(trap_state, tnpc));
3323 tcg_temp_free_ptr(r_tsptr);
3325 break;
3326 case 2: // tstate
3328 TCGv_ptr r_tsptr;
3330 r_tsptr = tcg_temp_new_ptr();
3331 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3332 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3333 offsetof(trap_state,
3334 tstate));
3335 tcg_temp_free_ptr(r_tsptr);
3337 break;
3338 case 3: // tt
3340 TCGv_ptr r_tsptr;
3342 r_tsptr = tcg_temp_new_ptr();
3343 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3344 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3345 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3346 offsetof(trap_state, tt));
3347 tcg_temp_free_ptr(r_tsptr);
3349 break;
3350 case 4: // tick
3352 TCGv_ptr r_tickptr;
3354 r_tickptr = tcg_temp_new_ptr();
3355 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3356 offsetof(CPUState, tick));
3357 gen_helper_tick_set_count(r_tickptr,
3358 cpu_tmp0);
3359 tcg_temp_free_ptr(r_tickptr);
3361 break;
3362 case 5: // tba
3363 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3364 break;
3365 case 6: // pstate
3366 save_state(dc, cpu_cond);
3367 gen_helper_wrpstate(cpu_tmp0);
3368 gen_op_next_insn();
3369 tcg_gen_exit_tb(0);
3370 dc->is_br = 1;
3371 break;
3372 case 7: // tl
3373 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3374 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3375 offsetof(CPUSPARCState, tl));
3376 break;
3377 case 8: // pil
3378 gen_helper_wrpil(cpu_tmp0);
3379 break;
3380 case 9: // cwp
3381 gen_helper_wrcwp(cpu_tmp0);
3382 break;
3383 case 10: // cansave
3384 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3385 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3386 offsetof(CPUSPARCState,
3387 cansave));
3388 break;
3389 case 11: // canrestore
3390 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3391 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3392 offsetof(CPUSPARCState,
3393 canrestore));
3394 break;
3395 case 12: // cleanwin
3396 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3397 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3398 offsetof(CPUSPARCState,
3399 cleanwin));
3400 break;
3401 case 13: // otherwin
3402 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3403 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3404 offsetof(CPUSPARCState,
3405 otherwin));
3406 break;
3407 case 14: // wstate
3408 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3409 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3410 offsetof(CPUSPARCState,
3411 wstate));
3412 break;
3413 case 16: // UA2005 gl
3414 CHECK_IU_FEATURE(dc, GL);
3415 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3416 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3417 offsetof(CPUSPARCState, gl));
3418 break;
3419 case 26: // UA2005 strand status
3420 CHECK_IU_FEATURE(dc, HYPV);
3421 if (!hypervisor(dc))
3422 goto priv_insn;
3423 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3424 break;
3425 default:
3426 goto illegal_insn;
3428 #else
3429 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3430 if (dc->def->nwindows != 32)
3431 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3432 (1 << dc->def->nwindows) - 1);
3433 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3434 #endif
3436 break;
3437 case 0x33: /* wrtbr, UA2005 wrhpr */
3439 #ifndef TARGET_SPARC64
3440 if (!supervisor(dc))
3441 goto priv_insn;
3442 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3443 #else
3444 CHECK_IU_FEATURE(dc, HYPV);
3445 if (!hypervisor(dc))
3446 goto priv_insn;
3447 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3448 switch (rd) {
3449 case 0: // hpstate
3450 // XXX gen_op_wrhpstate();
3451 save_state(dc, cpu_cond);
3452 gen_op_next_insn();
3453 tcg_gen_exit_tb(0);
3454 dc->is_br = 1;
3455 break;
3456 case 1: // htstate
3457 // XXX gen_op_wrhtstate();
3458 break;
3459 case 3: // hintp
3460 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3461 break;
3462 case 5: // htba
3463 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3464 break;
3465 case 31: // hstick_cmpr
3467 TCGv_ptr r_tickptr;
3469 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3470 r_tickptr = tcg_temp_new_ptr();
3471 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3472 offsetof(CPUState, hstick));
3473 gen_helper_tick_set_limit(r_tickptr,
3474 cpu_hstick_cmpr);
3475 tcg_temp_free_ptr(r_tickptr);
3477 break;
3478 case 6: // hver readonly
3479 default:
3480 goto illegal_insn;
3482 #endif
3484 break;
3485 #endif
3486 #ifdef TARGET_SPARC64
3487 case 0x2c: /* V9 movcc */
3489 int cc = GET_FIELD_SP(insn, 11, 12);
3490 int cond = GET_FIELD_SP(insn, 14, 17);
3491 TCGv r_cond;
3492 int l1;
3494 r_cond = tcg_temp_new();
3495 if (insn & (1 << 18)) {
3496 if (cc == 0)
3497 gen_cond(r_cond, 0, cond, dc);
3498 else if (cc == 2)
3499 gen_cond(r_cond, 1, cond, dc);
3500 else
3501 goto illegal_insn;
3502 } else {
3503 gen_fcond(r_cond, cc, cond);
3506 l1 = gen_new_label();
3508 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3509 if (IS_IMM) { /* immediate */
3510 TCGv r_const;
3512 simm = GET_FIELD_SPs(insn, 0, 10);
3513 r_const = tcg_const_tl(simm);
3514 gen_movl_TN_reg(rd, r_const);
3515 tcg_temp_free(r_const);
3516 } else {
3517 rs2 = GET_FIELD_SP(insn, 0, 4);
3518 gen_movl_reg_TN(rs2, cpu_tmp0);
3519 gen_movl_TN_reg(rd, cpu_tmp0);
3521 gen_set_label(l1);
3522 tcg_temp_free(r_cond);
3523 break;
3525 case 0x2d: /* V9 sdivx */
3526 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3527 gen_movl_TN_reg(rd, cpu_dst);
3528 break;
3529 case 0x2e: /* V9 popc */
3531 cpu_src2 = get_src2(insn, cpu_src2);
3532 gen_helper_popc(cpu_dst, cpu_src2);
3533 gen_movl_TN_reg(rd, cpu_dst);
3535 case 0x2f: /* V9 movr */
3537 int cond = GET_FIELD_SP(insn, 10, 12);
3538 int l1;
3540 cpu_src1 = get_src1(insn, cpu_src1);
3542 l1 = gen_new_label();
3544 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3545 cpu_src1, 0, l1);
3546 if (IS_IMM) { /* immediate */
3547 TCGv r_const;
3549 simm = GET_FIELD_SPs(insn, 0, 9);
3550 r_const = tcg_const_tl(simm);
3551 gen_movl_TN_reg(rd, r_const);
3552 tcg_temp_free(r_const);
3553 } else {
3554 rs2 = GET_FIELD_SP(insn, 0, 4);
3555 gen_movl_reg_TN(rs2, cpu_tmp0);
3556 gen_movl_TN_reg(rd, cpu_tmp0);
3558 gen_set_label(l1);
3559 break;
3561 #endif
3562 default:
3563 goto illegal_insn;
3566 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3567 #ifdef TARGET_SPARC64
3568 int opf = GET_FIELD_SP(insn, 5, 13);
3569 rs1 = GET_FIELD(insn, 13, 17);
3570 rs2 = GET_FIELD(insn, 27, 31);
3571 if (gen_trap_ifnofpu(dc, cpu_cond))
3572 goto jmp_insn;
3574 switch (opf) {
3575 case 0x000: /* VIS I edge8cc */
3576 case 0x001: /* VIS II edge8n */
3577 case 0x002: /* VIS I edge8lcc */
3578 case 0x003: /* VIS II edge8ln */
3579 case 0x004: /* VIS I edge16cc */
3580 case 0x005: /* VIS II edge16n */
3581 case 0x006: /* VIS I edge16lcc */
3582 case 0x007: /* VIS II edge16ln */
3583 case 0x008: /* VIS I edge32cc */
3584 case 0x009: /* VIS II edge32n */
3585 case 0x00a: /* VIS I edge32lcc */
3586 case 0x00b: /* VIS II edge32ln */
3587 // XXX
3588 goto illegal_insn;
3589 case 0x010: /* VIS I array8 */
3590 CHECK_FPU_FEATURE(dc, VIS1);
3591 cpu_src1 = get_src1(insn, cpu_src1);
3592 gen_movl_reg_TN(rs2, cpu_src2);
3593 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3594 gen_movl_TN_reg(rd, cpu_dst);
3595 break;
3596 case 0x012: /* VIS I array16 */
3597 CHECK_FPU_FEATURE(dc, VIS1);
3598 cpu_src1 = get_src1(insn, cpu_src1);
3599 gen_movl_reg_TN(rs2, cpu_src2);
3600 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3601 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3602 gen_movl_TN_reg(rd, cpu_dst);
3603 break;
3604 case 0x014: /* VIS I array32 */
3605 CHECK_FPU_FEATURE(dc, VIS1);
3606 cpu_src1 = get_src1(insn, cpu_src1);
3607 gen_movl_reg_TN(rs2, cpu_src2);
3608 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3609 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3610 gen_movl_TN_reg(rd, cpu_dst);
3611 break;
3612 case 0x018: /* VIS I alignaddr */
3613 CHECK_FPU_FEATURE(dc, VIS1);
3614 cpu_src1 = get_src1(insn, cpu_src1);
3615 gen_movl_reg_TN(rs2, cpu_src2);
3616 gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3617 gen_movl_TN_reg(rd, cpu_dst);
3618 break;
3619 case 0x019: /* VIS II bmask */
3620 case 0x01a: /* VIS I alignaddrl */
3621 // XXX
3622 goto illegal_insn;
3623 case 0x020: /* VIS I fcmple16 */
3624 CHECK_FPU_FEATURE(dc, VIS1);
3625 gen_op_load_fpr_DT0(DFPREG(rs1));
3626 gen_op_load_fpr_DT1(DFPREG(rs2));
3627 gen_helper_fcmple16();
3628 gen_op_store_DT0_fpr(DFPREG(rd));
3629 break;
3630 case 0x022: /* VIS I fcmpne16 */
3631 CHECK_FPU_FEATURE(dc, VIS1);
3632 gen_op_load_fpr_DT0(DFPREG(rs1));
3633 gen_op_load_fpr_DT1(DFPREG(rs2));
3634 gen_helper_fcmpne16();
3635 gen_op_store_DT0_fpr(DFPREG(rd));
3636 break;
3637 case 0x024: /* VIS I fcmple32 */
3638 CHECK_FPU_FEATURE(dc, VIS1);
3639 gen_op_load_fpr_DT0(DFPREG(rs1));
3640 gen_op_load_fpr_DT1(DFPREG(rs2));
3641 gen_helper_fcmple32();
3642 gen_op_store_DT0_fpr(DFPREG(rd));
3643 break;
3644 case 0x026: /* VIS I fcmpne32 */
3645 CHECK_FPU_FEATURE(dc, VIS1);
3646 gen_op_load_fpr_DT0(DFPREG(rs1));
3647 gen_op_load_fpr_DT1(DFPREG(rs2));
3648 gen_helper_fcmpne32();
3649 gen_op_store_DT0_fpr(DFPREG(rd));
3650 break;
3651 case 0x028: /* VIS I fcmpgt16 */
3652 CHECK_FPU_FEATURE(dc, VIS1);
3653 gen_op_load_fpr_DT0(DFPREG(rs1));
3654 gen_op_load_fpr_DT1(DFPREG(rs2));
3655 gen_helper_fcmpgt16();
3656 gen_op_store_DT0_fpr(DFPREG(rd));
3657 break;
3658 case 0x02a: /* VIS I fcmpeq16 */
3659 CHECK_FPU_FEATURE(dc, VIS1);
3660 gen_op_load_fpr_DT0(DFPREG(rs1));
3661 gen_op_load_fpr_DT1(DFPREG(rs2));
3662 gen_helper_fcmpeq16();
3663 gen_op_store_DT0_fpr(DFPREG(rd));
3664 break;
3665 case 0x02c: /* VIS I fcmpgt32 */
3666 CHECK_FPU_FEATURE(dc, VIS1);
3667 gen_op_load_fpr_DT0(DFPREG(rs1));
3668 gen_op_load_fpr_DT1(DFPREG(rs2));
3669 gen_helper_fcmpgt32();
3670 gen_op_store_DT0_fpr(DFPREG(rd));
3671 break;
3672 case 0x02e: /* VIS I fcmpeq32 */
3673 CHECK_FPU_FEATURE(dc, VIS1);
3674 gen_op_load_fpr_DT0(DFPREG(rs1));
3675 gen_op_load_fpr_DT1(DFPREG(rs2));
3676 gen_helper_fcmpeq32();
3677 gen_op_store_DT0_fpr(DFPREG(rd));
3678 break;
3679 case 0x031: /* VIS I fmul8x16 */
3680 CHECK_FPU_FEATURE(dc, VIS1);
3681 gen_op_load_fpr_DT0(DFPREG(rs1));
3682 gen_op_load_fpr_DT1(DFPREG(rs2));
3683 gen_helper_fmul8x16();
3684 gen_op_store_DT0_fpr(DFPREG(rd));
3685 break;
3686 case 0x033: /* VIS I fmul8x16au */
3687 CHECK_FPU_FEATURE(dc, VIS1);
3688 gen_op_load_fpr_DT0(DFPREG(rs1));
3689 gen_op_load_fpr_DT1(DFPREG(rs2));
3690 gen_helper_fmul8x16au();
3691 gen_op_store_DT0_fpr(DFPREG(rd));
3692 break;
3693 case 0x035: /* VIS I fmul8x16al */
3694 CHECK_FPU_FEATURE(dc, VIS1);
3695 gen_op_load_fpr_DT0(DFPREG(rs1));
3696 gen_op_load_fpr_DT1(DFPREG(rs2));
3697 gen_helper_fmul8x16al();
3698 gen_op_store_DT0_fpr(DFPREG(rd));
3699 break;
3700 case 0x036: /* VIS I fmul8sux16 */
3701 CHECK_FPU_FEATURE(dc, VIS1);
3702 gen_op_load_fpr_DT0(DFPREG(rs1));
3703 gen_op_load_fpr_DT1(DFPREG(rs2));
3704 gen_helper_fmul8sux16();
3705 gen_op_store_DT0_fpr(DFPREG(rd));
3706 break;
3707 case 0x037: /* VIS I fmul8ulx16 */
3708 CHECK_FPU_FEATURE(dc, VIS1);
3709 gen_op_load_fpr_DT0(DFPREG(rs1));
3710 gen_op_load_fpr_DT1(DFPREG(rs2));
3711 gen_helper_fmul8ulx16();
3712 gen_op_store_DT0_fpr(DFPREG(rd));
3713 break;
3714 case 0x038: /* VIS I fmuld8sux16 */
3715 CHECK_FPU_FEATURE(dc, VIS1);
3716 gen_op_load_fpr_DT0(DFPREG(rs1));
3717 gen_op_load_fpr_DT1(DFPREG(rs2));
3718 gen_helper_fmuld8sux16();
3719 gen_op_store_DT0_fpr(DFPREG(rd));
3720 break;
3721 case 0x039: /* VIS I fmuld8ulx16 */
3722 CHECK_FPU_FEATURE(dc, VIS1);
3723 gen_op_load_fpr_DT0(DFPREG(rs1));
3724 gen_op_load_fpr_DT1(DFPREG(rs2));
3725 gen_helper_fmuld8ulx16();
3726 gen_op_store_DT0_fpr(DFPREG(rd));
3727 break;
3728 case 0x03a: /* VIS I fpack32 */
3729 case 0x03b: /* VIS I fpack16 */
3730 case 0x03d: /* VIS I fpackfix */
3731 case 0x03e: /* VIS I pdist */
3732 // XXX
3733 goto illegal_insn;
3734 case 0x048: /* VIS I faligndata */
3735 CHECK_FPU_FEATURE(dc, VIS1);
3736 gen_op_load_fpr_DT0(DFPREG(rs1));
3737 gen_op_load_fpr_DT1(DFPREG(rs2));
3738 gen_helper_faligndata();
3739 gen_op_store_DT0_fpr(DFPREG(rd));
3740 break;
3741 case 0x04b: /* VIS I fpmerge */
3742 CHECK_FPU_FEATURE(dc, VIS1);
3743 gen_op_load_fpr_DT0(DFPREG(rs1));
3744 gen_op_load_fpr_DT1(DFPREG(rs2));
3745 gen_helper_fpmerge();
3746 gen_op_store_DT0_fpr(DFPREG(rd));
3747 break;
3748 case 0x04c: /* VIS II bshuffle */
3749 // XXX
3750 goto illegal_insn;
3751 case 0x04d: /* VIS I fexpand */
3752 CHECK_FPU_FEATURE(dc, VIS1);
3753 gen_op_load_fpr_DT0(DFPREG(rs1));
3754 gen_op_load_fpr_DT1(DFPREG(rs2));
3755 gen_helper_fexpand();
3756 gen_op_store_DT0_fpr(DFPREG(rd));
3757 break;
3758 case 0x050: /* VIS I fpadd16 */
3759 CHECK_FPU_FEATURE(dc, VIS1);
3760 gen_op_load_fpr_DT0(DFPREG(rs1));
3761 gen_op_load_fpr_DT1(DFPREG(rs2));
3762 gen_helper_fpadd16();
3763 gen_op_store_DT0_fpr(DFPREG(rd));
3764 break;
3765 case 0x051: /* VIS I fpadd16s */
3766 CHECK_FPU_FEATURE(dc, VIS1);
3767 gen_helper_fpadd16s(cpu_fpr[rd],
3768 cpu_fpr[rs1], cpu_fpr[rs2]);
3769 break;
3770 case 0x052: /* VIS I fpadd32 */
3771 CHECK_FPU_FEATURE(dc, VIS1);
3772 gen_op_load_fpr_DT0(DFPREG(rs1));
3773 gen_op_load_fpr_DT1(DFPREG(rs2));
3774 gen_helper_fpadd32();
3775 gen_op_store_DT0_fpr(DFPREG(rd));
3776 break;
3777 case 0x053: /* VIS I fpadd32s */
3778 CHECK_FPU_FEATURE(dc, VIS1);
3779 gen_helper_fpadd32s(cpu_fpr[rd],
3780 cpu_fpr[rs1], cpu_fpr[rs2]);
3781 break;
3782 case 0x054: /* VIS I fpsub16 */
3783 CHECK_FPU_FEATURE(dc, VIS1);
3784 gen_op_load_fpr_DT0(DFPREG(rs1));
3785 gen_op_load_fpr_DT1(DFPREG(rs2));
3786 gen_helper_fpsub16();
3787 gen_op_store_DT0_fpr(DFPREG(rd));
3788 break;
3789 case 0x055: /* VIS I fpsub16s */
3790 CHECK_FPU_FEATURE(dc, VIS1);
3791 gen_helper_fpsub16s(cpu_fpr[rd],
3792 cpu_fpr[rs1], cpu_fpr[rs2]);
3793 break;
3794 case 0x056: /* VIS I fpsub32 */
3795 CHECK_FPU_FEATURE(dc, VIS1);
3796 gen_op_load_fpr_DT0(DFPREG(rs1));
3797 gen_op_load_fpr_DT1(DFPREG(rs2));
3798 gen_helper_fpsub32();
3799 gen_op_store_DT0_fpr(DFPREG(rd));
3800 break;
3801 case 0x057: /* VIS I fpsub32s */
3802 CHECK_FPU_FEATURE(dc, VIS1);
3803 gen_helper_fpsub32s(cpu_fpr[rd],
3804 cpu_fpr[rs1], cpu_fpr[rs2]);
3805 break;
3806 case 0x060: /* VIS I fzero */
3807 CHECK_FPU_FEATURE(dc, VIS1);
3808 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3809 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3810 break;
3811 case 0x061: /* VIS I fzeros */
3812 CHECK_FPU_FEATURE(dc, VIS1);
3813 tcg_gen_movi_i32(cpu_fpr[rd], 0);
3814 break;
3815 case 0x062: /* VIS I fnor */
3816 CHECK_FPU_FEATURE(dc, VIS1);
3817 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3818 cpu_fpr[DFPREG(rs2)]);
3819 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3820 cpu_fpr[DFPREG(rs2) + 1]);
3821 break;
3822 case 0x063: /* VIS I fnors */
3823 CHECK_FPU_FEATURE(dc, VIS1);
3824 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3825 break;
3826 case 0x064: /* VIS I fandnot2 */
3827 CHECK_FPU_FEATURE(dc, VIS1);
3828 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3829 cpu_fpr[DFPREG(rs2)]);
3830 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3831 cpu_fpr[DFPREG(rs1) + 1],
3832 cpu_fpr[DFPREG(rs2) + 1]);
3833 break;
3834 case 0x065: /* VIS I fandnot2s */
3835 CHECK_FPU_FEATURE(dc, VIS1);
3836 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3837 break;
3838 case 0x066: /* VIS I fnot2 */
3839 CHECK_FPU_FEATURE(dc, VIS1);
3840 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
3841 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3842 cpu_fpr[DFPREG(rs2) + 1]);
3843 break;
3844 case 0x067: /* VIS I fnot2s */
3845 CHECK_FPU_FEATURE(dc, VIS1);
3846 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3847 break;
3848 case 0x068: /* VIS I fandnot1 */
3849 CHECK_FPU_FEATURE(dc, VIS1);
3850 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3851 cpu_fpr[DFPREG(rs1)]);
3852 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3853 cpu_fpr[DFPREG(rs2) + 1],
3854 cpu_fpr[DFPREG(rs1) + 1]);
3855 break;
3856 case 0x069: /* VIS I fandnot1s */
3857 CHECK_FPU_FEATURE(dc, VIS1);
3858 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3859 break;
3860 case 0x06a: /* VIS I fnot1 */
3861 CHECK_FPU_FEATURE(dc, VIS1);
3862 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3863 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3864 cpu_fpr[DFPREG(rs1) + 1]);
3865 break;
3866 case 0x06b: /* VIS I fnot1s */
3867 CHECK_FPU_FEATURE(dc, VIS1);
3868 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3869 break;
3870 case 0x06c: /* VIS I fxor */
3871 CHECK_FPU_FEATURE(dc, VIS1);
3872 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3873 cpu_fpr[DFPREG(rs2)]);
3874 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
3875 cpu_fpr[DFPREG(rs1) + 1],
3876 cpu_fpr[DFPREG(rs2) + 1]);
3877 break;
3878 case 0x06d: /* VIS I fxors */
3879 CHECK_FPU_FEATURE(dc, VIS1);
3880 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3881 break;
3882 case 0x06e: /* VIS I fnand */
3883 CHECK_FPU_FEATURE(dc, VIS1);
3884 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3885 cpu_fpr[DFPREG(rs2)]);
3886 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3887 cpu_fpr[DFPREG(rs2) + 1]);
3888 break;
3889 case 0x06f: /* VIS I fnands */
3890 CHECK_FPU_FEATURE(dc, VIS1);
3891 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3892 break;
3893 case 0x070: /* VIS I fand */
3894 CHECK_FPU_FEATURE(dc, VIS1);
3895 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3896 cpu_fpr[DFPREG(rs2)]);
3897 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
3898 cpu_fpr[DFPREG(rs1) + 1],
3899 cpu_fpr[DFPREG(rs2) + 1]);
3900 break;
3901 case 0x071: /* VIS I fands */
3902 CHECK_FPU_FEATURE(dc, VIS1);
3903 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3904 break;
3905 case 0x072: /* VIS I fxnor */
3906 CHECK_FPU_FEATURE(dc, VIS1);
3907 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3908 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3909 cpu_fpr[DFPREG(rs1)]);
3910 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
3911 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3912 cpu_fpr[DFPREG(rs1) + 1]);
3913 break;
3914 case 0x073: /* VIS I fxnors */
3915 CHECK_FPU_FEATURE(dc, VIS1);
3916 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
3917 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
3918 break;
3919 case 0x074: /* VIS I fsrc1 */
3920 CHECK_FPU_FEATURE(dc, VIS1);
3921 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3922 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
3923 cpu_fpr[DFPREG(rs1) + 1]);
3924 break;
3925 case 0x075: /* VIS I fsrc1s */
3926 CHECK_FPU_FEATURE(dc, VIS1);
3927 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3928 break;
3929 case 0x076: /* VIS I fornot2 */
3930 CHECK_FPU_FEATURE(dc, VIS1);
3931 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3932 cpu_fpr[DFPREG(rs2)]);
3933 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
3934 cpu_fpr[DFPREG(rs1) + 1],
3935 cpu_fpr[DFPREG(rs2) + 1]);
3936 break;
3937 case 0x077: /* VIS I fornot2s */
3938 CHECK_FPU_FEATURE(dc, VIS1);
3939 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3940 break;
3941 case 0x078: /* VIS I fsrc2 */
3942 CHECK_FPU_FEATURE(dc, VIS1);
3943 gen_op_load_fpr_DT0(DFPREG(rs2));
3944 gen_op_store_DT0_fpr(DFPREG(rd));
3945 break;
3946 case 0x079: /* VIS I fsrc2s */
3947 CHECK_FPU_FEATURE(dc, VIS1);
3948 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3949 break;
3950 case 0x07a: /* VIS I fornot1 */
3951 CHECK_FPU_FEATURE(dc, VIS1);
3952 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3953 cpu_fpr[DFPREG(rs1)]);
3954 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
3955 cpu_fpr[DFPREG(rs2) + 1],
3956 cpu_fpr[DFPREG(rs1) + 1]);
3957 break;
3958 case 0x07b: /* VIS I fornot1s */
3959 CHECK_FPU_FEATURE(dc, VIS1);
3960 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3961 break;
3962 case 0x07c: /* VIS I for */
3963 CHECK_FPU_FEATURE(dc, VIS1);
3964 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3965 cpu_fpr[DFPREG(rs2)]);
3966 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
3967 cpu_fpr[DFPREG(rs1) + 1],
3968 cpu_fpr[DFPREG(rs2) + 1]);
3969 break;
3970 case 0x07d: /* VIS I fors */
3971 CHECK_FPU_FEATURE(dc, VIS1);
3972 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3973 break;
3974 case 0x07e: /* VIS I fone */
3975 CHECK_FPU_FEATURE(dc, VIS1);
3976 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
3977 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
3978 break;
3979 case 0x07f: /* VIS I fones */
3980 CHECK_FPU_FEATURE(dc, VIS1);
3981 tcg_gen_movi_i32(cpu_fpr[rd], -1);
3982 break;
3983 case 0x080: /* VIS I shutdown */
3984 case 0x081: /* VIS II siam */
3985 // XXX
3986 goto illegal_insn;
3987 default:
3988 goto illegal_insn;
3990 #else
3991 goto ncp_insn;
3992 #endif
3993 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
3994 #ifdef TARGET_SPARC64
3995 goto illegal_insn;
3996 #else
3997 goto ncp_insn;
3998 #endif
3999 #ifdef TARGET_SPARC64
4000 } else if (xop == 0x39) { /* V9 return */
4001 TCGv_i32 r_const;
4003 save_state(dc, cpu_cond);
4004 cpu_src1 = get_src1(insn, cpu_src1);
4005 if (IS_IMM) { /* immediate */
4006 simm = GET_FIELDs(insn, 19, 31);
4007 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4008 } else { /* register */
4009 rs2 = GET_FIELD(insn, 27, 31);
4010 if (rs2) {
4011 gen_movl_reg_TN(rs2, cpu_src2);
4012 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4013 } else
4014 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4016 gen_helper_restore();
4017 gen_mov_pc_npc(dc, cpu_cond);
4018 r_const = tcg_const_i32(3);
4019 gen_helper_check_align(cpu_dst, r_const);
4020 tcg_temp_free_i32(r_const);
4021 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4022 dc->npc = DYNAMIC_PC;
4023 goto jmp_insn;
4024 #endif
4025 } else {
4026 cpu_src1 = get_src1(insn, cpu_src1);
4027 if (IS_IMM) { /* immediate */
4028 simm = GET_FIELDs(insn, 19, 31);
4029 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4030 } else { /* register */
4031 rs2 = GET_FIELD(insn, 27, 31);
4032 if (rs2) {
4033 gen_movl_reg_TN(rs2, cpu_src2);
4034 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4035 } else
4036 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4038 switch (xop) {
4039 case 0x38: /* jmpl */
4041 TCGv r_pc;
4042 TCGv_i32 r_const;
4044 r_pc = tcg_const_tl(dc->pc);
4045 gen_movl_TN_reg(rd, r_pc);
4046 tcg_temp_free(r_pc);
4047 gen_mov_pc_npc(dc, cpu_cond);
4048 r_const = tcg_const_i32(3);
4049 gen_helper_check_align(cpu_dst, r_const);
4050 tcg_temp_free_i32(r_const);
4051 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4052 dc->npc = DYNAMIC_PC;
4054 goto jmp_insn;
4055 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4056 case 0x39: /* rett, V9 return */
4058 TCGv_i32 r_const;
4060 if (!supervisor(dc))
4061 goto priv_insn;
4062 gen_mov_pc_npc(dc, cpu_cond);
4063 r_const = tcg_const_i32(3);
4064 gen_helper_check_align(cpu_dst, r_const);
4065 tcg_temp_free_i32(r_const);
4066 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4067 dc->npc = DYNAMIC_PC;
4068 gen_helper_rett();
4070 goto jmp_insn;
4071 #endif
4072 case 0x3b: /* flush */
4073 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4074 goto unimp_flush;
4075 gen_helper_flush(cpu_dst);
4076 break;
4077 case 0x3c: /* save */
4078 save_state(dc, cpu_cond);
4079 gen_helper_save();
4080 gen_movl_TN_reg(rd, cpu_dst);
4081 break;
4082 case 0x3d: /* restore */
4083 save_state(dc, cpu_cond);
4084 gen_helper_restore();
4085 gen_movl_TN_reg(rd, cpu_dst);
4086 break;
4087 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4088 case 0x3e: /* V9 done/retry */
4090 switch (rd) {
4091 case 0:
4092 if (!supervisor(dc))
4093 goto priv_insn;
4094 dc->npc = DYNAMIC_PC;
4095 dc->pc = DYNAMIC_PC;
4096 gen_helper_done();
4097 goto jmp_insn;
4098 case 1:
4099 if (!supervisor(dc))
4100 goto priv_insn;
4101 dc->npc = DYNAMIC_PC;
4102 dc->pc = DYNAMIC_PC;
4103 gen_helper_retry();
4104 goto jmp_insn;
4105 default:
4106 goto illegal_insn;
4109 break;
4110 #endif
4111 default:
4112 goto illegal_insn;
4115 break;
4117 break;
4118 case 3: /* load/store instructions */
4120 unsigned int xop = GET_FIELD(insn, 7, 12);
4122 /* flush pending conditional evaluations before exposing
4123 cpu state */
4124 if (dc->cc_op != CC_OP_FLAGS) {
4125 dc->cc_op = CC_OP_FLAGS;
4126 gen_helper_compute_psr();
4128 cpu_src1 = get_src1(insn, cpu_src1);
4129 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4130 rs2 = GET_FIELD(insn, 27, 31);
4131 gen_movl_reg_TN(rs2, cpu_src2);
4132 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4133 } else if (IS_IMM) { /* immediate */
4134 simm = GET_FIELDs(insn, 19, 31);
4135 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4136 } else { /* register */
4137 rs2 = GET_FIELD(insn, 27, 31);
4138 if (rs2 != 0) {
4139 gen_movl_reg_TN(rs2, cpu_src2);
4140 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4141 } else
4142 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4144 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4145 (xop > 0x17 && xop <= 0x1d ) ||
4146 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4147 switch (xop) {
4148 case 0x0: /* ld, V9 lduw, load unsigned word */
4149 gen_address_mask(dc, cpu_addr);
4150 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4151 break;
4152 case 0x1: /* ldub, load unsigned byte */
4153 gen_address_mask(dc, cpu_addr);
4154 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4155 break;
4156 case 0x2: /* lduh, load unsigned halfword */
4157 gen_address_mask(dc, cpu_addr);
4158 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4159 break;
4160 case 0x3: /* ldd, load double word */
4161 if (rd & 1)
4162 goto illegal_insn;
4163 else {
4164 TCGv_i32 r_const;
4166 save_state(dc, cpu_cond);
4167 r_const = tcg_const_i32(7);
4168 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4169 tcg_temp_free_i32(r_const);
4170 gen_address_mask(dc, cpu_addr);
4171 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4172 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4173 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4174 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4175 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4176 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4177 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4179 break;
4180 case 0x9: /* ldsb, load signed byte */
4181 gen_address_mask(dc, cpu_addr);
4182 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4183 break;
4184 case 0xa: /* ldsh, load signed halfword */
4185 gen_address_mask(dc, cpu_addr);
4186 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4187 break;
4188 case 0xd: /* ldstub -- XXX: should be atomically */
4190 TCGv r_const;
4192 gen_address_mask(dc, cpu_addr);
4193 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4194 r_const = tcg_const_tl(0xff);
4195 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4196 tcg_temp_free(r_const);
4198 break;
4199 case 0x0f: /* swap, swap register with memory. Also
4200 atomically */
4201 CHECK_IU_FEATURE(dc, SWAP);
4202 gen_movl_reg_TN(rd, cpu_val);
4203 gen_address_mask(dc, cpu_addr);
4204 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4205 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4206 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4207 break;
4208 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4209 case 0x10: /* lda, V9 lduwa, load word alternate */
4210 #ifndef TARGET_SPARC64
4211 if (IS_IMM)
4212 goto illegal_insn;
4213 if (!supervisor(dc))
4214 goto priv_insn;
4215 #endif
4216 save_state(dc, cpu_cond);
4217 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4218 break;
4219 case 0x11: /* lduba, load unsigned byte alternate */
4220 #ifndef TARGET_SPARC64
4221 if (IS_IMM)
4222 goto illegal_insn;
4223 if (!supervisor(dc))
4224 goto priv_insn;
4225 #endif
4226 save_state(dc, cpu_cond);
4227 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4228 break;
4229 case 0x12: /* lduha, load unsigned halfword alternate */
4230 #ifndef TARGET_SPARC64
4231 if (IS_IMM)
4232 goto illegal_insn;
4233 if (!supervisor(dc))
4234 goto priv_insn;
4235 #endif
4236 save_state(dc, cpu_cond);
4237 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4238 break;
4239 case 0x13: /* ldda, load double word alternate */
4240 #ifndef TARGET_SPARC64
4241 if (IS_IMM)
4242 goto illegal_insn;
4243 if (!supervisor(dc))
4244 goto priv_insn;
4245 #endif
4246 if (rd & 1)
4247 goto illegal_insn;
4248 save_state(dc, cpu_cond);
4249 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4250 goto skip_move;
4251 case 0x19: /* ldsba, load signed byte alternate */
4252 #ifndef TARGET_SPARC64
4253 if (IS_IMM)
4254 goto illegal_insn;
4255 if (!supervisor(dc))
4256 goto priv_insn;
4257 #endif
4258 save_state(dc, cpu_cond);
4259 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4260 break;
4261 case 0x1a: /* ldsha, load signed halfword alternate */
4262 #ifndef TARGET_SPARC64
4263 if (IS_IMM)
4264 goto illegal_insn;
4265 if (!supervisor(dc))
4266 goto priv_insn;
4267 #endif
4268 save_state(dc, cpu_cond);
4269 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4270 break;
4271 case 0x1d: /* ldstuba -- XXX: should be atomically */
4272 #ifndef TARGET_SPARC64
4273 if (IS_IMM)
4274 goto illegal_insn;
4275 if (!supervisor(dc))
4276 goto priv_insn;
4277 #endif
4278 save_state(dc, cpu_cond);
4279 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4280 break;
4281 case 0x1f: /* swapa, swap reg with alt. memory. Also
4282 atomically */
4283 CHECK_IU_FEATURE(dc, SWAP);
4284 #ifndef TARGET_SPARC64
4285 if (IS_IMM)
4286 goto illegal_insn;
4287 if (!supervisor(dc))
4288 goto priv_insn;
4289 #endif
4290 save_state(dc, cpu_cond);
4291 gen_movl_reg_TN(rd, cpu_val);
4292 gen_swap_asi(cpu_val, cpu_addr, insn);
4293 break;
4295 #ifndef TARGET_SPARC64
4296 case 0x30: /* ldc */
4297 case 0x31: /* ldcsr */
4298 case 0x33: /* lddc */
4299 goto ncp_insn;
4300 #endif
4301 #endif
4302 #ifdef TARGET_SPARC64
4303 case 0x08: /* V9 ldsw */
4304 gen_address_mask(dc, cpu_addr);
4305 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4306 break;
4307 case 0x0b: /* V9 ldx */
4308 gen_address_mask(dc, cpu_addr);
4309 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4310 break;
4311 case 0x18: /* V9 ldswa */
4312 save_state(dc, cpu_cond);
4313 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4314 break;
4315 case 0x1b: /* V9 ldxa */
4316 save_state(dc, cpu_cond);
4317 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4318 break;
4319 case 0x2d: /* V9 prefetch, no effect */
4320 goto skip_move;
4321 case 0x30: /* V9 ldfa */
4322 save_state(dc, cpu_cond);
4323 gen_ldf_asi(cpu_addr, insn, 4, rd);
4324 goto skip_move;
4325 case 0x33: /* V9 lddfa */
4326 save_state(dc, cpu_cond);
4327 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4328 goto skip_move;
4329 case 0x3d: /* V9 prefetcha, no effect */
4330 goto skip_move;
4331 case 0x32: /* V9 ldqfa */
4332 CHECK_FPU_FEATURE(dc, FLOAT128);
4333 save_state(dc, cpu_cond);
4334 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4335 goto skip_move;
4336 #endif
4337 default:
4338 goto illegal_insn;
4340 gen_movl_TN_reg(rd, cpu_val);
4341 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4342 skip_move: ;
4343 #endif
4344 } else if (xop >= 0x20 && xop < 0x24) {
4345 if (gen_trap_ifnofpu(dc, cpu_cond))
4346 goto jmp_insn;
4347 save_state(dc, cpu_cond);
4348 switch (xop) {
4349 case 0x20: /* ldf, load fpreg */
4350 gen_address_mask(dc, cpu_addr);
4351 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4352 tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4353 break;
4354 case 0x21: /* ldfsr, V9 ldxfsr */
4355 #ifdef TARGET_SPARC64
4356 gen_address_mask(dc, cpu_addr);
4357 if (rd == 1) {
4358 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4359 gen_helper_ldxfsr(cpu_tmp64);
4360 } else
4361 #else
4363 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4364 gen_helper_ldfsr(cpu_tmp32);
4366 #endif
4367 break;
4368 case 0x22: /* ldqf, load quad fpreg */
4370 TCGv_i32 r_const;
4372 CHECK_FPU_FEATURE(dc, FLOAT128);
4373 r_const = tcg_const_i32(dc->mem_idx);
4374 gen_helper_ldqf(cpu_addr, r_const);
4375 tcg_temp_free_i32(r_const);
4376 gen_op_store_QT0_fpr(QFPREG(rd));
4378 break;
4379 case 0x23: /* lddf, load double fpreg */
4381 TCGv_i32 r_const;
4383 r_const = tcg_const_i32(dc->mem_idx);
4384 gen_helper_lddf(cpu_addr, r_const);
4385 tcg_temp_free_i32(r_const);
4386 gen_op_store_DT0_fpr(DFPREG(rd));
4388 break;
4389 default:
4390 goto illegal_insn;
4392 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4393 xop == 0xe || xop == 0x1e) {
4394 gen_movl_reg_TN(rd, cpu_val);
4395 switch (xop) {
4396 case 0x4: /* st, store word */
4397 gen_address_mask(dc, cpu_addr);
4398 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4399 break;
4400 case 0x5: /* stb, store byte */
4401 gen_address_mask(dc, cpu_addr);
4402 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4403 break;
4404 case 0x6: /* sth, store halfword */
4405 gen_address_mask(dc, cpu_addr);
4406 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4407 break;
4408 case 0x7: /* std, store double word */
4409 if (rd & 1)
4410 goto illegal_insn;
4411 else {
4412 TCGv_i32 r_const;
4414 save_state(dc, cpu_cond);
4415 gen_address_mask(dc, cpu_addr);
4416 r_const = tcg_const_i32(7);
4417 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4418 tcg_temp_free_i32(r_const);
4419 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4420 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4421 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4423 break;
4424 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4425 case 0x14: /* sta, V9 stwa, store word alternate */
4426 #ifndef TARGET_SPARC64
4427 if (IS_IMM)
4428 goto illegal_insn;
4429 if (!supervisor(dc))
4430 goto priv_insn;
4431 #endif
4432 save_state(dc, cpu_cond);
4433 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4434 break;
4435 case 0x15: /* stba, store byte alternate */
4436 #ifndef TARGET_SPARC64
4437 if (IS_IMM)
4438 goto illegal_insn;
4439 if (!supervisor(dc))
4440 goto priv_insn;
4441 #endif
4442 save_state(dc, cpu_cond);
4443 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4444 break;
4445 case 0x16: /* stha, store halfword alternate */
4446 #ifndef TARGET_SPARC64
4447 if (IS_IMM)
4448 goto illegal_insn;
4449 if (!supervisor(dc))
4450 goto priv_insn;
4451 #endif
4452 save_state(dc, cpu_cond);
4453 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4454 break;
4455 case 0x17: /* stda, store double word alternate */
4456 #ifndef TARGET_SPARC64
4457 if (IS_IMM)
4458 goto illegal_insn;
4459 if (!supervisor(dc))
4460 goto priv_insn;
4461 #endif
4462 if (rd & 1)
4463 goto illegal_insn;
4464 else {
4465 save_state(dc, cpu_cond);
4466 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4468 break;
4469 #endif
4470 #ifdef TARGET_SPARC64
4471 case 0x0e: /* V9 stx */
4472 gen_address_mask(dc, cpu_addr);
4473 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4474 break;
4475 case 0x1e: /* V9 stxa */
4476 save_state(dc, cpu_cond);
4477 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4478 break;
4479 #endif
4480 default:
4481 goto illegal_insn;
4483 } else if (xop > 0x23 && xop < 0x28) {
4484 if (gen_trap_ifnofpu(dc, cpu_cond))
4485 goto jmp_insn;
4486 save_state(dc, cpu_cond);
4487 switch (xop) {
4488 case 0x24: /* stf, store fpreg */
4489 gen_address_mask(dc, cpu_addr);
4490 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4491 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4492 break;
4493 case 0x25: /* stfsr, V9 stxfsr */
4494 #ifdef TARGET_SPARC64
4495 gen_address_mask(dc, cpu_addr);
4496 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4497 if (rd == 1)
4498 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4499 else
4500 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4501 #else
4502 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4503 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4504 #endif
4505 break;
4506 case 0x26:
4507 #ifdef TARGET_SPARC64
4508 /* V9 stqf, store quad fpreg */
4510 TCGv_i32 r_const;
4512 CHECK_FPU_FEATURE(dc, FLOAT128);
4513 gen_op_load_fpr_QT0(QFPREG(rd));
4514 r_const = tcg_const_i32(dc->mem_idx);
4515 gen_helper_stqf(cpu_addr, r_const);
4516 tcg_temp_free_i32(r_const);
4518 break;
4519 #else /* !TARGET_SPARC64 */
4520 /* stdfq, store floating point queue */
4521 #if defined(CONFIG_USER_ONLY)
4522 goto illegal_insn;
4523 #else
4524 if (!supervisor(dc))
4525 goto priv_insn;
4526 if (gen_trap_ifnofpu(dc, cpu_cond))
4527 goto jmp_insn;
4528 goto nfq_insn;
4529 #endif
4530 #endif
4531 case 0x27: /* stdf, store double fpreg */
4533 TCGv_i32 r_const;
4535 gen_op_load_fpr_DT0(DFPREG(rd));
4536 r_const = tcg_const_i32(dc->mem_idx);
4537 gen_helper_stdf(cpu_addr, r_const);
4538 tcg_temp_free_i32(r_const);
4540 break;
4541 default:
4542 goto illegal_insn;
4544 } else if (xop > 0x33 && xop < 0x3f) {
4545 save_state(dc, cpu_cond);
4546 switch (xop) {
4547 #ifdef TARGET_SPARC64
4548 case 0x34: /* V9 stfa */
4549 gen_stf_asi(cpu_addr, insn, 4, rd);
4550 break;
4551 case 0x36: /* V9 stqfa */
4553 TCGv_i32 r_const;
4555 CHECK_FPU_FEATURE(dc, FLOAT128);
4556 r_const = tcg_const_i32(7);
4557 gen_helper_check_align(cpu_addr, r_const);
4558 tcg_temp_free_i32(r_const);
4559 gen_op_load_fpr_QT0(QFPREG(rd));
4560 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4562 break;
4563 case 0x37: /* V9 stdfa */
4564 gen_op_load_fpr_DT0(DFPREG(rd));
4565 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4566 break;
4567 case 0x3c: /* V9 casa */
4568 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4569 gen_movl_TN_reg(rd, cpu_val);
4570 break;
4571 case 0x3e: /* V9 casxa */
4572 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4573 gen_movl_TN_reg(rd, cpu_val);
4574 break;
4575 #else
4576 case 0x34: /* stc */
4577 case 0x35: /* stcsr */
4578 case 0x36: /* stdcq */
4579 case 0x37: /* stdc */
4580 goto ncp_insn;
4581 #endif
4582 default:
4583 goto illegal_insn;
4585 } else
4586 goto illegal_insn;
4588 break;
4590 /* default case for non jump instructions */
4591 if (dc->npc == DYNAMIC_PC) {
4592 dc->pc = DYNAMIC_PC;
4593 gen_op_next_insn();
4594 } else if (dc->npc == JUMP_PC) {
4595 /* we can do a static jump */
4596 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4597 dc->is_br = 1;
4598 } else {
4599 dc->pc = dc->npc;
4600 dc->npc = dc->npc + 4;
4602 jmp_insn:
4603 goto egress;
4604 illegal_insn:
4606 TCGv_i32 r_const;
4608 save_state(dc, cpu_cond);
4609 r_const = tcg_const_i32(TT_ILL_INSN);
4610 gen_helper_raise_exception(r_const);
4611 tcg_temp_free_i32(r_const);
4612 dc->is_br = 1;
4614 goto egress;
4615 unimp_flush:
4617 TCGv_i32 r_const;
4619 save_state(dc, cpu_cond);
4620 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4621 gen_helper_raise_exception(r_const);
4622 tcg_temp_free_i32(r_const);
4623 dc->is_br = 1;
4625 goto egress;
4626 #if !defined(CONFIG_USER_ONLY)
4627 priv_insn:
4629 TCGv_i32 r_const;
4631 save_state(dc, cpu_cond);
4632 r_const = tcg_const_i32(TT_PRIV_INSN);
4633 gen_helper_raise_exception(r_const);
4634 tcg_temp_free_i32(r_const);
4635 dc->is_br = 1;
4637 goto egress;
4638 #endif
4639 nfpu_insn:
4640 save_state(dc, cpu_cond);
4641 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4642 dc->is_br = 1;
4643 goto egress;
4644 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4645 nfq_insn:
4646 save_state(dc, cpu_cond);
4647 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4648 dc->is_br = 1;
4649 goto egress;
4650 #endif
4651 #ifndef TARGET_SPARC64
4652 ncp_insn:
4654 TCGv r_const;
4656 save_state(dc, cpu_cond);
4657 r_const = tcg_const_i32(TT_NCP_INSN);
4658 gen_helper_raise_exception(r_const);
4659 tcg_temp_free(r_const);
4660 dc->is_br = 1;
4662 goto egress;
4663 #endif
4664 egress:
4665 tcg_temp_free(cpu_tmp1);
4666 tcg_temp_free(cpu_tmp2);
4669 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4670 int spc, CPUSPARCState *env)
4672 target_ulong pc_start, last_pc;
4673 uint16_t *gen_opc_end;
4674 DisasContext dc1, *dc = &dc1;
4675 CPUBreakpoint *bp;
4676 int j, lj = -1;
4677 int num_insns;
4678 int max_insns;
4680 memset(dc, 0, sizeof(DisasContext));
4681 dc->tb = tb;
4682 pc_start = tb->pc;
4683 dc->pc = pc_start;
4684 last_pc = dc->pc;
4685 dc->npc = (target_ulong) tb->cs_base;
4686 dc->cc_op = CC_OP_DYNAMIC;
4687 dc->mem_idx = cpu_mmu_index(env);
4688 dc->def = env->def;
4689 if ((dc->def->features & CPU_FEATURE_FLOAT))
4690 dc->fpu_enabled = cpu_fpu_enabled(env);
4691 else
4692 dc->fpu_enabled = 0;
4693 #ifdef TARGET_SPARC64
4694 dc->address_mask_32bit = env->pstate & PS_AM;
4695 #endif
4696 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4698 cpu_tmp0 = tcg_temp_new();
4699 cpu_tmp32 = tcg_temp_new_i32();
4700 cpu_tmp64 = tcg_temp_new_i64();
4702 cpu_dst = tcg_temp_local_new();
4704 // loads and stores
4705 cpu_val = tcg_temp_local_new();
4706 cpu_addr = tcg_temp_local_new();
4708 num_insns = 0;
4709 max_insns = tb->cflags & CF_COUNT_MASK;
4710 if (max_insns == 0)
4711 max_insns = CF_COUNT_MASK;
4712 gen_icount_start();
4713 do {
4714 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
4715 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
4716 if (bp->pc == dc->pc) {
4717 if (dc->pc != pc_start)
4718 save_state(dc, cpu_cond);
4719 gen_helper_debug();
4720 tcg_gen_exit_tb(0);
4721 dc->is_br = 1;
4722 goto exit_gen_loop;
4726 if (spc) {
4727 qemu_log("Search PC...\n");
4728 j = gen_opc_ptr - gen_opc_buf;
4729 if (lj < j) {
4730 lj++;
4731 while (lj < j)
4732 gen_opc_instr_start[lj++] = 0;
4733 gen_opc_pc[lj] = dc->pc;
4734 gen_opc_npc[lj] = dc->npc;
4735 gen_opc_instr_start[lj] = 1;
4736 gen_opc_icount[lj] = num_insns;
4739 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4740 gen_io_start();
4741 last_pc = dc->pc;
4742 disas_sparc_insn(dc);
4743 num_insns++;
4745 if (dc->is_br)
4746 break;
4747 /* if the next PC is different, we abort now */
4748 if (dc->pc != (last_pc + 4))
4749 break;
4750 /* if we reach a page boundary, we stop generation so that the
4751 PC of a TT_TFAULT exception is always in the right page */
4752 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4753 break;
4754 /* if single step mode, we generate only one instruction and
4755 generate an exception */
4756 if (env->singlestep_enabled || singlestep) {
4757 tcg_gen_movi_tl(cpu_pc, dc->pc);
4758 tcg_gen_exit_tb(0);
4759 break;
4761 } while ((gen_opc_ptr < gen_opc_end) &&
4762 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4763 num_insns < max_insns);
4765 exit_gen_loop:
4766 tcg_temp_free(cpu_addr);
4767 tcg_temp_free(cpu_val);
4768 tcg_temp_free(cpu_dst);
4769 tcg_temp_free_i64(cpu_tmp64);
4770 tcg_temp_free_i32(cpu_tmp32);
4771 tcg_temp_free(cpu_tmp0);
4772 if (tb->cflags & CF_LAST_IO)
4773 gen_io_end();
4774 if (!dc->is_br) {
4775 if (dc->pc != DYNAMIC_PC &&
4776 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4777 /* static PC and NPC: we can use direct chaining */
4778 gen_goto_tb(dc, 0, dc->pc, dc->npc);
4779 } else {
4780 if (dc->pc != DYNAMIC_PC)
4781 tcg_gen_movi_tl(cpu_pc, dc->pc);
4782 save_npc(dc, cpu_cond);
4783 tcg_gen_exit_tb(0);
4786 gen_icount_end(tb, num_insns);
4787 *gen_opc_ptr = INDEX_op_end;
4788 if (spc) {
4789 j = gen_opc_ptr - gen_opc_buf;
4790 lj++;
4791 while (lj <= j)
4792 gen_opc_instr_start[lj++] = 0;
4793 #if 0
4794 log_page_dump();
4795 #endif
4796 gen_opc_jump_pc[0] = dc->jump_pc[0];
4797 gen_opc_jump_pc[1] = dc->jump_pc[1];
4798 } else {
4799 tb->size = last_pc + 4 - pc_start;
4800 tb->icount = num_insns;
4802 #ifdef DEBUG_DISAS
4803 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
4804 qemu_log("--------------\n");
4805 qemu_log("IN: %s\n", lookup_symbol(pc_start));
4806 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
4807 qemu_log("\n");
4809 #endif
4812 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4814 gen_intermediate_code_internal(tb, 0, env);
4817 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4819 gen_intermediate_code_internal(tb, 1, env);
4822 void gen_intermediate_code_init(CPUSPARCState *env)
4824 unsigned int i;
4825 static int inited;
4826 static const char * const gregnames[8] = {
4827 NULL, // g0 not used
4828 "g1",
4829 "g2",
4830 "g3",
4831 "g4",
4832 "g5",
4833 "g6",
4834 "g7",
4836 static const char * const fregnames[64] = {
4837 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4838 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4839 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4840 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4841 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4842 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4843 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4844 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4847 /* init various static tables */
4848 if (!inited) {
4849 inited = 1;
4851 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
4852 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
4853 offsetof(CPUState, regwptr),
4854 "regwptr");
4855 #ifdef TARGET_SPARC64
4856 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
4857 "xcc");
4858 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
4859 "asi");
4860 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
4861 "fprs");
4862 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
4863 "gsr");
4864 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
4865 offsetof(CPUState, tick_cmpr),
4866 "tick_cmpr");
4867 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
4868 offsetof(CPUState, stick_cmpr),
4869 "stick_cmpr");
4870 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
4871 offsetof(CPUState, hstick_cmpr),
4872 "hstick_cmpr");
4873 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
4874 "hintp");
4875 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
4876 "htba");
4877 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
4878 "hver");
4879 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
4880 offsetof(CPUState, ssr), "ssr");
4881 cpu_ver = tcg_global_mem_new(TCG_AREG0,
4882 offsetof(CPUState, version), "ver");
4883 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
4884 offsetof(CPUState, softint),
4885 "softint");
4886 #else
4887 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
4888 "wim");
4889 #endif
4890 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
4891 "cond");
4892 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
4893 "cc_src");
4894 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
4895 offsetof(CPUState, cc_src2),
4896 "cc_src2");
4897 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
4898 "cc_dst");
4899 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
4900 "cc_op");
4901 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
4902 "psr");
4903 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
4904 "fsr");
4905 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
4906 "pc");
4907 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
4908 "npc");
4909 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
4910 #ifndef CONFIG_USER_ONLY
4911 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
4912 "tbr");
4913 #endif
4914 for (i = 1; i < 8; i++)
4915 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
4916 offsetof(CPUState, gregs[i]),
4917 gregnames[i]);
4918 for (i = 0; i < TARGET_FPREGS; i++)
4919 cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
4920 offsetof(CPUState, fpr[i]),
4921 fregnames[i]);
4923 /* register helpers */
4925 #define GEN_HELPER 2
4926 #include "helper.h"
4930 void gen_pc_load(CPUState *env, TranslationBlock *tb,
4931 unsigned long searched_pc, int pc_pos, void *puc)
4933 target_ulong npc;
4934 env->pc = gen_opc_pc[pc_pos];
4935 npc = gen_opc_npc[pc_pos];
4936 if (npc == 1) {
4937 /* dynamic NPC: already stored */
4938 } else if (npc == 2) {
4939 /* jump PC: use 'cond' and the jump targets of the translation */
4940 if (env->cond) {
4941 env->npc = gen_opc_jump_pc[0];
4942 } else {
4943 env->npc = gen_opc_jump_pc[1];
4945 } else {
4946 env->npc = npc;
4949 /* flush pending conditional evaluations before exposing cpu state */
4950 if (CC_OP != CC_OP_FLAGS) {
4951 helper_compute_psr();