virtio-blk: add SGI_IO passthru support
[qemu-kvm/fedora.git] / target-sparc / translate.c
blob86319a704bf4ab6cfa2442b052ef0441ff0e60ac
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
22 #include <stdarg.h>
23 #include <stdlib.h>
24 #include <stdio.h>
25 #include <string.h>
26 #include <inttypes.h>
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31 #include "helper.h"
32 #include "tcg-op.h"
34 #define GEN_HELPER 1
35 #include "helper.h"
37 #define DEBUG_DISAS
39 #define DYNAMIC_PC 1 /* dynamic pc value */
40 #define JUMP_PC 2 /* dynamic pc value which takes only two values
41 according to jump_pc[T2] */
43 /* global register indexes */
44 static TCGv_ptr cpu_env, cpu_regwptr;
45 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
46 static TCGv_i32 cpu_psr;
47 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48 static TCGv cpu_y;
49 #ifndef CONFIG_USER_ONLY
50 static TCGv cpu_tbr;
51 #endif
52 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
53 #ifdef TARGET_SPARC64
54 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55 static TCGv cpu_gsr;
56 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58 static TCGv_i32 cpu_softint;
59 #else
60 static TCGv cpu_wim;
61 #endif
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0;
64 static TCGv_i32 cpu_tmp32;
65 static TCGv_i64 cpu_tmp64;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
69 #include "gen-icount.h"
71 typedef struct DisasContext {
72 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
73 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
75 int is_br;
76 int mem_idx;
77 int fpu_enabled;
78 int address_mask_32bit;
79 struct TranslationBlock *tb;
80 sparc_def_t *def;
81 } DisasContext;
83 // This function uses non-native bit order
84 #define GET_FIELD(X, FROM, TO) \
85 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
87 // This function uses the order in the manuals, i.e. bit 0 is 2^0
88 #define GET_FIELD_SP(X, FROM, TO) \
89 GET_FIELD(X, 31 - (TO), 31 - (FROM))
91 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
92 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
94 #ifdef TARGET_SPARC64
95 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
96 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
97 #else
98 #define DFPREG(r) (r & 0x1e)
99 #define QFPREG(r) (r & 0x1c)
100 #endif
102 #define UA2005_HTRAP_MASK 0xff
103 #define V8_TRAP_MASK 0x7f
105 static int sign_extend(int x, int len)
107 len = 32 - len;
108 return (x << len) >> len;
111 #define IS_IMM (insn & (1<<13))
113 /* floating point registers moves */
114 static void gen_op_load_fpr_DT0(unsigned int src)
116 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
117 offsetof(CPU_DoubleU, l.upper));
118 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
119 offsetof(CPU_DoubleU, l.lower));
122 static void gen_op_load_fpr_DT1(unsigned int src)
124 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
125 offsetof(CPU_DoubleU, l.upper));
126 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
127 offsetof(CPU_DoubleU, l.lower));
130 static void gen_op_store_DT0_fpr(unsigned int dst)
132 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
133 offsetof(CPU_DoubleU, l.upper));
134 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
135 offsetof(CPU_DoubleU, l.lower));
138 static void gen_op_load_fpr_QT0(unsigned int src)
140 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
141 offsetof(CPU_QuadU, l.upmost));
142 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
143 offsetof(CPU_QuadU, l.upper));
144 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
145 offsetof(CPU_QuadU, l.lower));
146 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
147 offsetof(CPU_QuadU, l.lowest));
150 static void gen_op_load_fpr_QT1(unsigned int src)
152 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
153 offsetof(CPU_QuadU, l.upmost));
154 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
155 offsetof(CPU_QuadU, l.upper));
156 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
157 offsetof(CPU_QuadU, l.lower));
158 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
159 offsetof(CPU_QuadU, l.lowest));
162 static void gen_op_store_QT0_fpr(unsigned int dst)
164 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
165 offsetof(CPU_QuadU, l.upmost));
166 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
167 offsetof(CPU_QuadU, l.upper));
168 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
169 offsetof(CPU_QuadU, l.lower));
170 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
171 offsetof(CPU_QuadU, l.lowest));
174 /* moves */
175 #ifdef CONFIG_USER_ONLY
176 #define supervisor(dc) 0
177 #ifdef TARGET_SPARC64
178 #define hypervisor(dc) 0
179 #endif
180 #else
181 #define supervisor(dc) (dc->mem_idx >= 1)
182 #ifdef TARGET_SPARC64
183 #define hypervisor(dc) (dc->mem_idx == 2)
184 #else
185 #endif
186 #endif
188 #ifdef TARGET_SPARC64
189 #ifndef TARGET_ABI32
190 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
191 #else
192 #define AM_CHECK(dc) (1)
193 #endif
194 #endif
196 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
198 #ifdef TARGET_SPARC64
199 if (AM_CHECK(dc))
200 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
201 #endif
204 static inline void gen_movl_reg_TN(int reg, TCGv tn)
206 if (reg == 0)
207 tcg_gen_movi_tl(tn, 0);
208 else if (reg < 8)
209 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
210 else {
211 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
215 static inline void gen_movl_TN_reg(int reg, TCGv tn)
217 if (reg == 0)
218 return;
219 else if (reg < 8)
220 tcg_gen_mov_tl(cpu_gregs[reg], tn);
221 else {
222 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
226 static inline void gen_goto_tb(DisasContext *s, int tb_num,
227 target_ulong pc, target_ulong npc)
229 TranslationBlock *tb;
231 tb = s->tb;
232 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
233 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
234 /* jump to same page: we can use a direct jump */
235 tcg_gen_goto_tb(tb_num);
236 tcg_gen_movi_tl(cpu_pc, pc);
237 tcg_gen_movi_tl(cpu_npc, npc);
238 tcg_gen_exit_tb((long)tb + tb_num);
239 } else {
240 /* jump to another page: currently not optimized */
241 tcg_gen_movi_tl(cpu_pc, pc);
242 tcg_gen_movi_tl(cpu_npc, npc);
243 tcg_gen_exit_tb(0);
247 // XXX suboptimal
248 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
250 tcg_gen_extu_i32_tl(reg, src);
251 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
252 tcg_gen_andi_tl(reg, reg, 0x1);
255 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
257 tcg_gen_extu_i32_tl(reg, src);
258 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
259 tcg_gen_andi_tl(reg, reg, 0x1);
262 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
264 tcg_gen_extu_i32_tl(reg, src);
265 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
266 tcg_gen_andi_tl(reg, reg, 0x1);
269 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
271 tcg_gen_extu_i32_tl(reg, src);
272 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
273 tcg_gen_andi_tl(reg, reg, 0x1);
276 static inline void gen_cc_clear_icc(void)
278 tcg_gen_movi_i32(cpu_psr, 0);
281 #ifdef TARGET_SPARC64
282 static inline void gen_cc_clear_xcc(void)
284 tcg_gen_movi_i32(cpu_xcc, 0);
286 #endif
288 /* old op:
289 if (!T0)
290 env->psr |= PSR_ZERO;
291 if ((int32_t) T0 < 0)
292 env->psr |= PSR_NEG;
294 static inline void gen_cc_NZ_icc(TCGv dst)
296 TCGv r_temp;
297 int l1, l2;
299 l1 = gen_new_label();
300 l2 = gen_new_label();
301 r_temp = tcg_temp_new();
302 tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
303 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
304 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
305 gen_set_label(l1);
306 tcg_gen_ext32s_tl(r_temp, dst);
307 tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
308 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
309 gen_set_label(l2);
310 tcg_temp_free(r_temp);
313 #ifdef TARGET_SPARC64
314 static inline void gen_cc_NZ_xcc(TCGv dst)
316 int l1, l2;
318 l1 = gen_new_label();
319 l2 = gen_new_label();
320 tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
321 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
322 gen_set_label(l1);
323 tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
324 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
325 gen_set_label(l2);
327 #endif
329 /* old op:
330 if (T0 < src1)
331 env->psr |= PSR_CARRY;
333 static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
335 TCGv r_temp1, r_temp2;
336 int l1;
338 l1 = gen_new_label();
339 r_temp1 = tcg_temp_new();
340 r_temp2 = tcg_temp_new();
341 tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
342 tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
343 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
344 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
345 gen_set_label(l1);
346 tcg_temp_free(r_temp1);
347 tcg_temp_free(r_temp2);
350 #ifdef TARGET_SPARC64
351 static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
353 int l1;
355 l1 = gen_new_label();
356 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
357 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
358 gen_set_label(l1);
360 #endif
362 /* old op:
363 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
364 env->psr |= PSR_OVF;
366 static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
368 TCGv r_temp;
370 r_temp = tcg_temp_new();
371 tcg_gen_xor_tl(r_temp, src1, src2);
372 tcg_gen_not_tl(r_temp, r_temp);
373 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
374 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
375 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
376 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
377 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
378 tcg_temp_free(r_temp);
379 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
382 #ifdef TARGET_SPARC64
383 static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
385 TCGv r_temp;
387 r_temp = tcg_temp_new();
388 tcg_gen_xor_tl(r_temp, src1, src2);
389 tcg_gen_not_tl(r_temp, r_temp);
390 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
391 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
392 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
393 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
394 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
395 tcg_temp_free(r_temp);
396 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
398 #endif
400 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
402 TCGv r_temp;
403 TCGv_i32 r_const;
404 int l1;
406 l1 = gen_new_label();
408 r_temp = tcg_temp_new();
409 tcg_gen_xor_tl(r_temp, src1, src2);
410 tcg_gen_not_tl(r_temp, r_temp);
411 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
412 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
413 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
414 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
415 r_const = tcg_const_i32(TT_TOVF);
416 gen_helper_raise_exception(r_const);
417 tcg_temp_free_i32(r_const);
418 gen_set_label(l1);
419 tcg_temp_free(r_temp);
422 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
424 int l1;
426 l1 = gen_new_label();
427 tcg_gen_or_tl(cpu_tmp0, src1, src2);
428 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
429 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
430 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
431 gen_set_label(l1);
434 static inline void gen_tag_tv(TCGv src1, TCGv src2)
436 int l1;
437 TCGv_i32 r_const;
439 l1 = gen_new_label();
440 tcg_gen_or_tl(cpu_tmp0, src1, src2);
441 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
442 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
443 r_const = tcg_const_i32(TT_TOVF);
444 gen_helper_raise_exception(r_const);
445 tcg_temp_free_i32(r_const);
446 gen_set_label(l1);
449 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
451 tcg_gen_mov_tl(cpu_cc_src, src1);
452 tcg_gen_mov_tl(cpu_cc_src2, src2);
453 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
454 gen_cc_clear_icc();
455 gen_cc_NZ_icc(cpu_cc_dst);
456 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
457 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
458 #ifdef TARGET_SPARC64
459 gen_cc_clear_xcc();
460 gen_cc_NZ_xcc(cpu_cc_dst);
461 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
462 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
463 #endif
464 tcg_gen_mov_tl(dst, cpu_cc_dst);
467 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
469 tcg_gen_mov_tl(cpu_cc_src, src1);
470 tcg_gen_mov_tl(cpu_cc_src2, src2);
471 gen_mov_reg_C(cpu_tmp0, cpu_psr);
472 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
473 gen_cc_clear_icc();
474 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
475 #ifdef TARGET_SPARC64
476 gen_cc_clear_xcc();
477 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
478 #endif
479 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
480 gen_cc_NZ_icc(cpu_cc_dst);
481 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
482 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
483 #ifdef TARGET_SPARC64
484 gen_cc_NZ_xcc(cpu_cc_dst);
485 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
486 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
487 #endif
488 tcg_gen_mov_tl(dst, cpu_cc_dst);
491 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
493 tcg_gen_mov_tl(cpu_cc_src, src1);
494 tcg_gen_mov_tl(cpu_cc_src2, src2);
495 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
496 gen_cc_clear_icc();
497 gen_cc_NZ_icc(cpu_cc_dst);
498 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
499 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
500 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
501 #ifdef TARGET_SPARC64
502 gen_cc_clear_xcc();
503 gen_cc_NZ_xcc(cpu_cc_dst);
504 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
505 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
506 #endif
507 tcg_gen_mov_tl(dst, cpu_cc_dst);
510 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
512 tcg_gen_mov_tl(cpu_cc_src, src1);
513 tcg_gen_mov_tl(cpu_cc_src2, src2);
514 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
515 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
516 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
517 gen_cc_clear_icc();
518 gen_cc_NZ_icc(cpu_cc_dst);
519 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
520 #ifdef TARGET_SPARC64
521 gen_cc_clear_xcc();
522 gen_cc_NZ_xcc(cpu_cc_dst);
523 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
524 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
525 #endif
526 tcg_gen_mov_tl(dst, cpu_cc_dst);
529 /* old op:
530 if (src1 < T1)
531 env->psr |= PSR_CARRY;
533 static inline void gen_cc_C_sub_icc(TCGv src1, TCGv src2)
535 TCGv r_temp1, r_temp2;
536 int l1;
538 l1 = gen_new_label();
539 r_temp1 = tcg_temp_new();
540 r_temp2 = tcg_temp_new();
541 tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
542 tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
543 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
544 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
545 gen_set_label(l1);
546 tcg_temp_free(r_temp1);
547 tcg_temp_free(r_temp2);
550 #ifdef TARGET_SPARC64
551 static inline void gen_cc_C_sub_xcc(TCGv src1, TCGv src2)
553 int l1;
555 l1 = gen_new_label();
556 tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l1);
557 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
558 gen_set_label(l1);
560 #endif
562 /* old op:
563 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
564 env->psr |= PSR_OVF;
566 static inline void gen_cc_V_sub_icc(TCGv dst, TCGv src1, TCGv src2)
568 TCGv r_temp;
570 r_temp = tcg_temp_new();
571 tcg_gen_xor_tl(r_temp, src1, src2);
572 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
573 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
574 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
575 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
576 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
577 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
578 tcg_temp_free(r_temp);
581 #ifdef TARGET_SPARC64
582 static inline void gen_cc_V_sub_xcc(TCGv dst, TCGv src1, TCGv src2)
584 TCGv r_temp;
586 r_temp = tcg_temp_new();
587 tcg_gen_xor_tl(r_temp, src1, src2);
588 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
589 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
590 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
591 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
592 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
593 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
594 tcg_temp_free(r_temp);
596 #endif
598 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
600 TCGv r_temp;
601 TCGv_i32 r_const;
602 int l1;
604 l1 = gen_new_label();
606 r_temp = tcg_temp_new();
607 tcg_gen_xor_tl(r_temp, src1, src2);
608 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
609 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
610 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
611 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
612 r_const = tcg_const_i32(TT_TOVF);
613 gen_helper_raise_exception(r_const);
614 tcg_temp_free_i32(r_const);
615 gen_set_label(l1);
616 tcg_temp_free(r_temp);
619 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
621 tcg_gen_mov_tl(cpu_cc_src, src1);
622 tcg_gen_mov_tl(cpu_cc_src2, src2);
623 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
624 gen_cc_clear_icc();
625 gen_cc_NZ_icc(cpu_cc_dst);
626 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
627 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
628 #ifdef TARGET_SPARC64
629 gen_cc_clear_xcc();
630 gen_cc_NZ_xcc(cpu_cc_dst);
631 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
632 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
633 #endif
634 tcg_gen_mov_tl(dst, cpu_cc_dst);
637 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
639 tcg_gen_mov_tl(cpu_cc_src, src1);
640 tcg_gen_mov_tl(cpu_cc_src2, src2);
641 gen_mov_reg_C(cpu_tmp0, cpu_psr);
642 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
643 gen_cc_clear_icc();
644 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
645 #ifdef TARGET_SPARC64
646 gen_cc_clear_xcc();
647 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
648 #endif
649 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
650 gen_cc_NZ_icc(cpu_cc_dst);
651 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
652 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
653 #ifdef TARGET_SPARC64
654 gen_cc_NZ_xcc(cpu_cc_dst);
655 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
656 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
657 #endif
658 tcg_gen_mov_tl(dst, cpu_cc_dst);
661 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
663 tcg_gen_mov_tl(cpu_cc_src, src1);
664 tcg_gen_mov_tl(cpu_cc_src2, src2);
665 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
666 gen_cc_clear_icc();
667 gen_cc_NZ_icc(cpu_cc_dst);
668 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
669 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
670 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
671 #ifdef TARGET_SPARC64
672 gen_cc_clear_xcc();
673 gen_cc_NZ_xcc(cpu_cc_dst);
674 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
675 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
676 #endif
677 tcg_gen_mov_tl(dst, cpu_cc_dst);
680 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
682 tcg_gen_mov_tl(cpu_cc_src, src1);
683 tcg_gen_mov_tl(cpu_cc_src2, src2);
684 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
685 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
686 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
687 gen_cc_clear_icc();
688 gen_cc_NZ_icc(cpu_cc_dst);
689 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
690 #ifdef TARGET_SPARC64
691 gen_cc_clear_xcc();
692 gen_cc_NZ_xcc(cpu_cc_dst);
693 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
694 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
695 #endif
696 tcg_gen_mov_tl(dst, cpu_cc_dst);
699 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
701 TCGv r_temp;
702 int l1;
704 l1 = gen_new_label();
705 r_temp = tcg_temp_new();
707 /* old op:
708 if (!(env->y & 1))
709 T1 = 0;
711 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
712 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
713 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
714 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
715 tcg_gen_movi_tl(cpu_cc_src2, 0);
716 gen_set_label(l1);
718 // b2 = T0 & 1;
719 // env->y = (b2 << 31) | (env->y >> 1);
720 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
721 tcg_gen_shli_tl(r_temp, r_temp, 31);
722 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
723 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
724 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
725 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
727 // b1 = N ^ V;
728 gen_mov_reg_N(cpu_tmp0, cpu_psr);
729 gen_mov_reg_V(r_temp, cpu_psr);
730 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
731 tcg_temp_free(r_temp);
733 // T0 = (b1 << 31) | (T0 >> 1);
734 // src1 = T0;
735 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
736 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
737 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
739 /* do addition and update flags */
740 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
742 gen_cc_clear_icc();
743 gen_cc_NZ_icc(cpu_cc_dst);
744 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
745 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
746 tcg_gen_mov_tl(dst, cpu_cc_dst);
749 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
751 TCGv_i64 r_temp, r_temp2;
753 r_temp = tcg_temp_new_i64();
754 r_temp2 = tcg_temp_new_i64();
756 tcg_gen_extu_tl_i64(r_temp, src2);
757 tcg_gen_extu_tl_i64(r_temp2, src1);
758 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
760 tcg_gen_shri_i64(r_temp, r_temp2, 32);
761 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
762 tcg_temp_free_i64(r_temp);
763 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
764 #ifdef TARGET_SPARC64
765 tcg_gen_mov_i64(dst, r_temp2);
766 #else
767 tcg_gen_trunc_i64_tl(dst, r_temp2);
768 #endif
769 tcg_temp_free_i64(r_temp2);
772 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
774 TCGv_i64 r_temp, r_temp2;
776 r_temp = tcg_temp_new_i64();
777 r_temp2 = tcg_temp_new_i64();
779 tcg_gen_ext_tl_i64(r_temp, src2);
780 tcg_gen_ext_tl_i64(r_temp2, src1);
781 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
783 tcg_gen_shri_i64(r_temp, r_temp2, 32);
784 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
785 tcg_temp_free_i64(r_temp);
786 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
787 #ifdef TARGET_SPARC64
788 tcg_gen_mov_i64(dst, r_temp2);
789 #else
790 tcg_gen_trunc_i64_tl(dst, r_temp2);
791 #endif
792 tcg_temp_free_i64(r_temp2);
795 #ifdef TARGET_SPARC64
796 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
798 TCGv_i32 r_const;
799 int l1;
801 l1 = gen_new_label();
802 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
803 r_const = tcg_const_i32(TT_DIV_ZERO);
804 gen_helper_raise_exception(r_const);
805 tcg_temp_free_i32(r_const);
806 gen_set_label(l1);
809 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
811 int l1, l2;
813 l1 = gen_new_label();
814 l2 = gen_new_label();
815 tcg_gen_mov_tl(cpu_cc_src, src1);
816 tcg_gen_mov_tl(cpu_cc_src2, src2);
817 gen_trap_ifdivzero_tl(cpu_cc_src2);
818 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
819 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
820 tcg_gen_movi_i64(dst, INT64_MIN);
821 tcg_gen_br(l2);
822 gen_set_label(l1);
823 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
824 gen_set_label(l2);
826 #endif
828 static inline void gen_op_div_cc(TCGv dst)
830 int l1;
832 tcg_gen_mov_tl(cpu_cc_dst, dst);
833 gen_cc_clear_icc();
834 gen_cc_NZ_icc(cpu_cc_dst);
835 l1 = gen_new_label();
836 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_src2, 0, l1);
837 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
838 gen_set_label(l1);
841 static inline void gen_op_logic_cc(TCGv dst)
843 tcg_gen_mov_tl(cpu_cc_dst, dst);
845 gen_cc_clear_icc();
846 gen_cc_NZ_icc(cpu_cc_dst);
847 #ifdef TARGET_SPARC64
848 gen_cc_clear_xcc();
849 gen_cc_NZ_xcc(cpu_cc_dst);
850 #endif
853 // 1
854 static inline void gen_op_eval_ba(TCGv dst)
856 tcg_gen_movi_tl(dst, 1);
859 // Z
860 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
862 gen_mov_reg_Z(dst, src);
865 // Z | (N ^ V)
866 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
868 gen_mov_reg_N(cpu_tmp0, src);
869 gen_mov_reg_V(dst, src);
870 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
871 gen_mov_reg_Z(cpu_tmp0, src);
872 tcg_gen_or_tl(dst, dst, cpu_tmp0);
875 // N ^ V
876 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
878 gen_mov_reg_V(cpu_tmp0, src);
879 gen_mov_reg_N(dst, src);
880 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
883 // C | Z
884 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
886 gen_mov_reg_Z(cpu_tmp0, src);
887 gen_mov_reg_C(dst, src);
888 tcg_gen_or_tl(dst, dst, cpu_tmp0);
891 // C
892 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
894 gen_mov_reg_C(dst, src);
897 // V
898 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
900 gen_mov_reg_V(dst, src);
903 // 0
904 static inline void gen_op_eval_bn(TCGv dst)
906 tcg_gen_movi_tl(dst, 0);
909 // N
910 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
912 gen_mov_reg_N(dst, src);
915 // !Z
916 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
918 gen_mov_reg_Z(dst, src);
919 tcg_gen_xori_tl(dst, dst, 0x1);
922 // !(Z | (N ^ V))
923 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
925 gen_mov_reg_N(cpu_tmp0, src);
926 gen_mov_reg_V(dst, src);
927 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
928 gen_mov_reg_Z(cpu_tmp0, src);
929 tcg_gen_or_tl(dst, dst, cpu_tmp0);
930 tcg_gen_xori_tl(dst, dst, 0x1);
933 // !(N ^ V)
934 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
936 gen_mov_reg_V(cpu_tmp0, src);
937 gen_mov_reg_N(dst, src);
938 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
939 tcg_gen_xori_tl(dst, dst, 0x1);
942 // !(C | Z)
943 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
945 gen_mov_reg_Z(cpu_tmp0, src);
946 gen_mov_reg_C(dst, src);
947 tcg_gen_or_tl(dst, dst, cpu_tmp0);
948 tcg_gen_xori_tl(dst, dst, 0x1);
951 // !C
952 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
954 gen_mov_reg_C(dst, src);
955 tcg_gen_xori_tl(dst, dst, 0x1);
958 // !N
959 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
961 gen_mov_reg_N(dst, src);
962 tcg_gen_xori_tl(dst, dst, 0x1);
965 // !V
966 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
968 gen_mov_reg_V(dst, src);
969 tcg_gen_xori_tl(dst, dst, 0x1);
973 FPSR bit field FCC1 | FCC0:
977 3 unordered
979 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
980 unsigned int fcc_offset)
982 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
983 tcg_gen_andi_tl(reg, reg, 0x1);
986 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
987 unsigned int fcc_offset)
989 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
990 tcg_gen_andi_tl(reg, reg, 0x1);
993 // !0: FCC0 | FCC1
994 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
995 unsigned int fcc_offset)
997 gen_mov_reg_FCC0(dst, src, fcc_offset);
998 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
999 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1002 // 1 or 2: FCC0 ^ FCC1
1003 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
1004 unsigned int fcc_offset)
1006 gen_mov_reg_FCC0(dst, src, fcc_offset);
1007 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1008 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1011 // 1 or 3: FCC0
1012 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
1013 unsigned int fcc_offset)
1015 gen_mov_reg_FCC0(dst, src, fcc_offset);
1018 // 1: FCC0 & !FCC1
1019 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
1020 unsigned int fcc_offset)
1022 gen_mov_reg_FCC0(dst, src, fcc_offset);
1023 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1024 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1025 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1028 // 2 or 3: FCC1
1029 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
1030 unsigned int fcc_offset)
1032 gen_mov_reg_FCC1(dst, src, fcc_offset);
1035 // 2: !FCC0 & FCC1
1036 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
1037 unsigned int fcc_offset)
1039 gen_mov_reg_FCC0(dst, src, fcc_offset);
1040 tcg_gen_xori_tl(dst, dst, 0x1);
1041 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1042 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1045 // 3: FCC0 & FCC1
1046 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1047 unsigned int fcc_offset)
1049 gen_mov_reg_FCC0(dst, src, fcc_offset);
1050 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1051 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1054 // 0: !(FCC0 | FCC1)
1055 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1056 unsigned int fcc_offset)
1058 gen_mov_reg_FCC0(dst, src, fcc_offset);
1059 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1060 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1061 tcg_gen_xori_tl(dst, dst, 0x1);
1064 // 0 or 3: !(FCC0 ^ FCC1)
1065 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1066 unsigned int fcc_offset)
1068 gen_mov_reg_FCC0(dst, src, fcc_offset);
1069 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1070 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1071 tcg_gen_xori_tl(dst, dst, 0x1);
1074 // 0 or 2: !FCC0
1075 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1076 unsigned int fcc_offset)
1078 gen_mov_reg_FCC0(dst, src, fcc_offset);
1079 tcg_gen_xori_tl(dst, dst, 0x1);
1082 // !1: !(FCC0 & !FCC1)
1083 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1084 unsigned int fcc_offset)
1086 gen_mov_reg_FCC0(dst, src, fcc_offset);
1087 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1088 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1089 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1090 tcg_gen_xori_tl(dst, dst, 0x1);
1093 // 0 or 1: !FCC1
1094 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1095 unsigned int fcc_offset)
1097 gen_mov_reg_FCC1(dst, src, fcc_offset);
1098 tcg_gen_xori_tl(dst, dst, 0x1);
1101 // !2: !(!FCC0 & FCC1)
1102 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1103 unsigned int fcc_offset)
1105 gen_mov_reg_FCC0(dst, src, fcc_offset);
1106 tcg_gen_xori_tl(dst, dst, 0x1);
1107 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1108 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1109 tcg_gen_xori_tl(dst, dst, 0x1);
1112 // !3: !(FCC0 & FCC1)
1113 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1114 unsigned int fcc_offset)
1116 gen_mov_reg_FCC0(dst, src, fcc_offset);
1117 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1118 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1119 tcg_gen_xori_tl(dst, dst, 0x1);
1122 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1123 target_ulong pc2, TCGv r_cond)
1125 int l1;
1127 l1 = gen_new_label();
1129 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1131 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1133 gen_set_label(l1);
1134 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1137 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1138 target_ulong pc2, TCGv r_cond)
1140 int l1;
1142 l1 = gen_new_label();
1144 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1146 gen_goto_tb(dc, 0, pc2, pc1);
1148 gen_set_label(l1);
1149 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1152 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1153 TCGv r_cond)
1155 int l1, l2;
1157 l1 = gen_new_label();
1158 l2 = gen_new_label();
1160 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1162 tcg_gen_movi_tl(cpu_npc, npc1);
1163 tcg_gen_br(l2);
1165 gen_set_label(l1);
1166 tcg_gen_movi_tl(cpu_npc, npc2);
1167 gen_set_label(l2);
1170 /* call this function before using the condition register as it may
1171 have been set for a jump */
1172 static inline void flush_cond(DisasContext *dc, TCGv cond)
1174 if (dc->npc == JUMP_PC) {
1175 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1176 dc->npc = DYNAMIC_PC;
1180 static inline void save_npc(DisasContext *dc, TCGv cond)
1182 if (dc->npc == JUMP_PC) {
1183 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1184 dc->npc = DYNAMIC_PC;
1185 } else if (dc->npc != DYNAMIC_PC) {
1186 tcg_gen_movi_tl(cpu_npc, dc->npc);
1190 static inline void save_state(DisasContext *dc, TCGv cond)
1192 tcg_gen_movi_tl(cpu_pc, dc->pc);
1193 save_npc(dc, cond);
1196 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1198 if (dc->npc == JUMP_PC) {
1199 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1200 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1201 dc->pc = DYNAMIC_PC;
1202 } else if (dc->npc == DYNAMIC_PC) {
1203 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1204 dc->pc = DYNAMIC_PC;
1205 } else {
1206 dc->pc = dc->npc;
1210 static inline void gen_op_next_insn(void)
1212 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1213 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1216 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond)
1218 TCGv_i32 r_src;
1220 #ifdef TARGET_SPARC64
1221 if (cc)
1222 r_src = cpu_xcc;
1223 else
1224 r_src = cpu_psr;
1225 #else
1226 r_src = cpu_psr;
1227 #endif
1228 switch (cond) {
1229 case 0x0:
1230 gen_op_eval_bn(r_dst);
1231 break;
1232 case 0x1:
1233 gen_op_eval_be(r_dst, r_src);
1234 break;
1235 case 0x2:
1236 gen_op_eval_ble(r_dst, r_src);
1237 break;
1238 case 0x3:
1239 gen_op_eval_bl(r_dst, r_src);
1240 break;
1241 case 0x4:
1242 gen_op_eval_bleu(r_dst, r_src);
1243 break;
1244 case 0x5:
1245 gen_op_eval_bcs(r_dst, r_src);
1246 break;
1247 case 0x6:
1248 gen_op_eval_bneg(r_dst, r_src);
1249 break;
1250 case 0x7:
1251 gen_op_eval_bvs(r_dst, r_src);
1252 break;
1253 case 0x8:
1254 gen_op_eval_ba(r_dst);
1255 break;
1256 case 0x9:
1257 gen_op_eval_bne(r_dst, r_src);
1258 break;
1259 case 0xa:
1260 gen_op_eval_bg(r_dst, r_src);
1261 break;
1262 case 0xb:
1263 gen_op_eval_bge(r_dst, r_src);
1264 break;
1265 case 0xc:
1266 gen_op_eval_bgu(r_dst, r_src);
1267 break;
1268 case 0xd:
1269 gen_op_eval_bcc(r_dst, r_src);
1270 break;
1271 case 0xe:
1272 gen_op_eval_bpos(r_dst, r_src);
1273 break;
1274 case 0xf:
1275 gen_op_eval_bvc(r_dst, r_src);
1276 break;
1280 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1282 unsigned int offset;
1284 switch (cc) {
1285 default:
1286 case 0x0:
1287 offset = 0;
1288 break;
1289 case 0x1:
1290 offset = 32 - 10;
1291 break;
1292 case 0x2:
1293 offset = 34 - 10;
1294 break;
1295 case 0x3:
1296 offset = 36 - 10;
1297 break;
1300 switch (cond) {
1301 case 0x0:
1302 gen_op_eval_bn(r_dst);
1303 break;
1304 case 0x1:
1305 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1306 break;
1307 case 0x2:
1308 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1309 break;
1310 case 0x3:
1311 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1312 break;
1313 case 0x4:
1314 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1315 break;
1316 case 0x5:
1317 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1318 break;
1319 case 0x6:
1320 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1321 break;
1322 case 0x7:
1323 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1324 break;
1325 case 0x8:
1326 gen_op_eval_ba(r_dst);
1327 break;
1328 case 0x9:
1329 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1330 break;
1331 case 0xa:
1332 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1333 break;
1334 case 0xb:
1335 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1336 break;
1337 case 0xc:
1338 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1339 break;
1340 case 0xd:
1341 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1342 break;
1343 case 0xe:
1344 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1345 break;
1346 case 0xf:
1347 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1348 break;
1352 #ifdef TARGET_SPARC64
1353 // Inverted logic
1354 static const int gen_tcg_cond_reg[8] = {
1356 TCG_COND_NE,
1357 TCG_COND_GT,
1358 TCG_COND_GE,
1360 TCG_COND_EQ,
1361 TCG_COND_LE,
1362 TCG_COND_LT,
1365 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1367 int l1;
1369 l1 = gen_new_label();
1370 tcg_gen_movi_tl(r_dst, 0);
1371 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1372 tcg_gen_movi_tl(r_dst, 1);
1373 gen_set_label(l1);
1375 #endif
1377 /* XXX: potentially incorrect if dynamic npc */
1378 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1379 TCGv r_cond)
1381 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1382 target_ulong target = dc->pc + offset;
1384 if (cond == 0x0) {
1385 /* unconditional not taken */
1386 if (a) {
1387 dc->pc = dc->npc + 4;
1388 dc->npc = dc->pc + 4;
1389 } else {
1390 dc->pc = dc->npc;
1391 dc->npc = dc->pc + 4;
1393 } else if (cond == 0x8) {
1394 /* unconditional taken */
1395 if (a) {
1396 dc->pc = target;
1397 dc->npc = dc->pc + 4;
1398 } else {
1399 dc->pc = dc->npc;
1400 dc->npc = target;
1402 } else {
1403 flush_cond(dc, r_cond);
1404 gen_cond(r_cond, cc, cond);
1405 if (a) {
1406 gen_branch_a(dc, target, dc->npc, r_cond);
1407 dc->is_br = 1;
1408 } else {
1409 dc->pc = dc->npc;
1410 dc->jump_pc[0] = target;
1411 dc->jump_pc[1] = dc->npc + 4;
1412 dc->npc = JUMP_PC;
1417 /* XXX: potentially incorrect if dynamic npc */
1418 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1419 TCGv r_cond)
1421 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1422 target_ulong target = dc->pc + offset;
1424 if (cond == 0x0) {
1425 /* unconditional not taken */
1426 if (a) {
1427 dc->pc = dc->npc + 4;
1428 dc->npc = dc->pc + 4;
1429 } else {
1430 dc->pc = dc->npc;
1431 dc->npc = dc->pc + 4;
1433 } else if (cond == 0x8) {
1434 /* unconditional taken */
1435 if (a) {
1436 dc->pc = target;
1437 dc->npc = dc->pc + 4;
1438 } else {
1439 dc->pc = dc->npc;
1440 dc->npc = target;
1442 } else {
1443 flush_cond(dc, r_cond);
1444 gen_fcond(r_cond, cc, cond);
1445 if (a) {
1446 gen_branch_a(dc, target, dc->npc, r_cond);
1447 dc->is_br = 1;
1448 } else {
1449 dc->pc = dc->npc;
1450 dc->jump_pc[0] = target;
1451 dc->jump_pc[1] = dc->npc + 4;
1452 dc->npc = JUMP_PC;
1457 #ifdef TARGET_SPARC64
1458 /* XXX: potentially incorrect if dynamic npc */
1459 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1460 TCGv r_cond, TCGv r_reg)
1462 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1463 target_ulong target = dc->pc + offset;
1465 flush_cond(dc, r_cond);
1466 gen_cond_reg(r_cond, cond, r_reg);
1467 if (a) {
1468 gen_branch_a(dc, target, dc->npc, r_cond);
1469 dc->is_br = 1;
1470 } else {
1471 dc->pc = dc->npc;
1472 dc->jump_pc[0] = target;
1473 dc->jump_pc[1] = dc->npc + 4;
1474 dc->npc = JUMP_PC;
1478 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1480 switch (fccno) {
1481 case 0:
1482 gen_helper_fcmps(r_rs1, r_rs2);
1483 break;
1484 case 1:
1485 gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1486 break;
1487 case 2:
1488 gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1489 break;
1490 case 3:
1491 gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1492 break;
1496 static inline void gen_op_fcmpd(int fccno)
1498 switch (fccno) {
1499 case 0:
1500 gen_helper_fcmpd();
1501 break;
1502 case 1:
1503 gen_helper_fcmpd_fcc1();
1504 break;
1505 case 2:
1506 gen_helper_fcmpd_fcc2();
1507 break;
1508 case 3:
1509 gen_helper_fcmpd_fcc3();
1510 break;
1514 static inline void gen_op_fcmpq(int fccno)
1516 switch (fccno) {
1517 case 0:
1518 gen_helper_fcmpq();
1519 break;
1520 case 1:
1521 gen_helper_fcmpq_fcc1();
1522 break;
1523 case 2:
1524 gen_helper_fcmpq_fcc2();
1525 break;
1526 case 3:
1527 gen_helper_fcmpq_fcc3();
1528 break;
1532 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1534 switch (fccno) {
1535 case 0:
1536 gen_helper_fcmpes(r_rs1, r_rs2);
1537 break;
1538 case 1:
1539 gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1540 break;
1541 case 2:
1542 gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1543 break;
1544 case 3:
1545 gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1546 break;
1550 static inline void gen_op_fcmped(int fccno)
1552 switch (fccno) {
1553 case 0:
1554 gen_helper_fcmped();
1555 break;
1556 case 1:
1557 gen_helper_fcmped_fcc1();
1558 break;
1559 case 2:
1560 gen_helper_fcmped_fcc2();
1561 break;
1562 case 3:
1563 gen_helper_fcmped_fcc3();
1564 break;
1568 static inline void gen_op_fcmpeq(int fccno)
1570 switch (fccno) {
1571 case 0:
1572 gen_helper_fcmpeq();
1573 break;
1574 case 1:
1575 gen_helper_fcmpeq_fcc1();
1576 break;
1577 case 2:
1578 gen_helper_fcmpeq_fcc2();
1579 break;
1580 case 3:
1581 gen_helper_fcmpeq_fcc3();
1582 break;
1586 #else
1588 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1590 gen_helper_fcmps(r_rs1, r_rs2);
1593 static inline void gen_op_fcmpd(int fccno)
1595 gen_helper_fcmpd();
1598 static inline void gen_op_fcmpq(int fccno)
1600 gen_helper_fcmpq();
1603 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1605 gen_helper_fcmpes(r_rs1, r_rs2);
1608 static inline void gen_op_fcmped(int fccno)
1610 gen_helper_fcmped();
1613 static inline void gen_op_fcmpeq(int fccno)
1615 gen_helper_fcmpeq();
1617 #endif
1619 static inline void gen_op_fpexception_im(int fsr_flags)
1621 TCGv_i32 r_const;
1623 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1624 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1625 r_const = tcg_const_i32(TT_FP_EXCP);
1626 gen_helper_raise_exception(r_const);
1627 tcg_temp_free_i32(r_const);
1630 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1632 #if !defined(CONFIG_USER_ONLY)
1633 if (!dc->fpu_enabled) {
1634 TCGv_i32 r_const;
1636 save_state(dc, r_cond);
1637 r_const = tcg_const_i32(TT_NFPU_INSN);
1638 gen_helper_raise_exception(r_const);
1639 tcg_temp_free_i32(r_const);
1640 dc->is_br = 1;
1641 return 1;
1643 #endif
1644 return 0;
1647 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1649 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1652 static inline void gen_clear_float_exceptions(void)
1654 gen_helper_clear_float_exceptions();
1657 /* asi moves */
1658 #ifdef TARGET_SPARC64
1659 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1661 int asi;
1662 TCGv_i32 r_asi;
1664 if (IS_IMM) {
1665 r_asi = tcg_temp_new_i32();
1666 tcg_gen_mov_i32(r_asi, cpu_asi);
1667 } else {
1668 asi = GET_FIELD(insn, 19, 26);
1669 r_asi = tcg_const_i32(asi);
1671 return r_asi;
1674 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1675 int sign)
1677 TCGv_i32 r_asi, r_size, r_sign;
1679 r_asi = gen_get_asi(insn, addr);
1680 r_size = tcg_const_i32(size);
1681 r_sign = tcg_const_i32(sign);
1682 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1683 tcg_temp_free_i32(r_sign);
1684 tcg_temp_free_i32(r_size);
1685 tcg_temp_free_i32(r_asi);
1688 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1690 TCGv_i32 r_asi, r_size;
1692 r_asi = gen_get_asi(insn, addr);
1693 r_size = tcg_const_i32(size);
1694 gen_helper_st_asi(addr, src, r_asi, r_size);
1695 tcg_temp_free_i32(r_size);
1696 tcg_temp_free_i32(r_asi);
1699 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1701 TCGv_i32 r_asi, r_size, r_rd;
1703 r_asi = gen_get_asi(insn, addr);
1704 r_size = tcg_const_i32(size);
1705 r_rd = tcg_const_i32(rd);
1706 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1707 tcg_temp_free_i32(r_rd);
1708 tcg_temp_free_i32(r_size);
1709 tcg_temp_free_i32(r_asi);
1712 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1714 TCGv_i32 r_asi, r_size, r_rd;
1716 r_asi = gen_get_asi(insn, addr);
1717 r_size = tcg_const_i32(size);
1718 r_rd = tcg_const_i32(rd);
1719 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1720 tcg_temp_free_i32(r_rd);
1721 tcg_temp_free_i32(r_size);
1722 tcg_temp_free_i32(r_asi);
1725 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1727 TCGv_i32 r_asi, r_size, r_sign;
1729 r_asi = gen_get_asi(insn, addr);
1730 r_size = tcg_const_i32(4);
1731 r_sign = tcg_const_i32(0);
1732 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1733 tcg_temp_free_i32(r_sign);
1734 gen_helper_st_asi(addr, dst, r_asi, r_size);
1735 tcg_temp_free_i32(r_size);
1736 tcg_temp_free_i32(r_asi);
1737 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1740 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1742 TCGv_i32 r_asi, r_rd;
1744 r_asi = gen_get_asi(insn, addr);
1745 r_rd = tcg_const_i32(rd);
1746 gen_helper_ldda_asi(addr, r_asi, r_rd);
1747 tcg_temp_free_i32(r_rd);
1748 tcg_temp_free_i32(r_asi);
1751 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1753 TCGv_i32 r_asi, r_size;
1755 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1756 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1757 r_asi = gen_get_asi(insn, addr);
1758 r_size = tcg_const_i32(8);
1759 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1760 tcg_temp_free_i32(r_size);
1761 tcg_temp_free_i32(r_asi);
1764 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1765 int rd)
1767 TCGv r_val1;
1768 TCGv_i32 r_asi;
1770 r_val1 = tcg_temp_new();
1771 gen_movl_reg_TN(rd, r_val1);
1772 r_asi = gen_get_asi(insn, addr);
1773 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1774 tcg_temp_free_i32(r_asi);
1775 tcg_temp_free(r_val1);
1778 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1779 int rd)
1781 TCGv_i32 r_asi;
1783 gen_movl_reg_TN(rd, cpu_tmp64);
1784 r_asi = gen_get_asi(insn, addr);
1785 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1786 tcg_temp_free_i32(r_asi);
1789 #elif !defined(CONFIG_USER_ONLY)
1791 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1792 int sign)
1794 TCGv_i32 r_asi, r_size, r_sign;
1796 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1797 r_size = tcg_const_i32(size);
1798 r_sign = tcg_const_i32(sign);
1799 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1800 tcg_temp_free(r_sign);
1801 tcg_temp_free(r_size);
1802 tcg_temp_free(r_asi);
1803 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1806 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1808 TCGv_i32 r_asi, r_size;
1810 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1811 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1812 r_size = tcg_const_i32(size);
1813 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1814 tcg_temp_free(r_size);
1815 tcg_temp_free(r_asi);
1818 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1820 TCGv_i32 r_asi, r_size, r_sign;
1821 TCGv_i64 r_val;
1823 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1824 r_size = tcg_const_i32(4);
1825 r_sign = tcg_const_i32(0);
1826 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1827 tcg_temp_free(r_sign);
1828 r_val = tcg_temp_new_i64();
1829 tcg_gen_extu_tl_i64(r_val, dst);
1830 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1831 tcg_temp_free_i64(r_val);
1832 tcg_temp_free(r_size);
1833 tcg_temp_free(r_asi);
1834 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1837 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1839 TCGv_i32 r_asi, r_size, r_sign;
1841 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1842 r_size = tcg_const_i32(8);
1843 r_sign = tcg_const_i32(0);
1844 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1845 tcg_temp_free(r_sign);
1846 tcg_temp_free(r_size);
1847 tcg_temp_free(r_asi);
1848 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1849 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1850 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1851 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1852 gen_movl_TN_reg(rd, hi);
1855 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1857 TCGv_i32 r_asi, r_size;
1859 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1860 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1861 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1862 r_size = tcg_const_i32(8);
1863 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1864 tcg_temp_free(r_size);
1865 tcg_temp_free(r_asi);
1867 #endif
1869 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1870 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1872 TCGv_i64 r_val;
1873 TCGv_i32 r_asi, r_size;
1875 gen_ld_asi(dst, addr, insn, 1, 0);
1877 r_val = tcg_const_i64(0xffULL);
1878 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1879 r_size = tcg_const_i32(1);
1880 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1881 tcg_temp_free_i32(r_size);
1882 tcg_temp_free_i32(r_asi);
1883 tcg_temp_free_i64(r_val);
1885 #endif
1887 static inline TCGv get_src1(unsigned int insn, TCGv def)
1889 TCGv r_rs1 = def;
1890 unsigned int rs1;
1892 rs1 = GET_FIELD(insn, 13, 17);
1893 if (rs1 == 0)
1894 r_rs1 = tcg_const_tl(0); // XXX how to free?
1895 else if (rs1 < 8)
1896 r_rs1 = cpu_gregs[rs1];
1897 else
1898 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1899 return r_rs1;
1902 static inline TCGv get_src2(unsigned int insn, TCGv def)
1904 TCGv r_rs2 = def;
1905 unsigned int rs2;
1907 if (IS_IMM) { /* immediate */
1908 rs2 = GET_FIELDs(insn, 19, 31);
1909 r_rs2 = tcg_const_tl((int)rs2); // XXX how to free?
1910 } else { /* register */
1911 rs2 = GET_FIELD(insn, 27, 31);
1912 if (rs2 == 0)
1913 r_rs2 = tcg_const_tl(0); // XXX how to free?
1914 else if (rs2 < 8)
1915 r_rs2 = cpu_gregs[rs2];
1916 else
1917 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1919 return r_rs2;
1922 #define CHECK_IU_FEATURE(dc, FEATURE) \
1923 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1924 goto illegal_insn;
1925 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1926 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1927 goto nfpu_insn;
1929 /* before an instruction, dc->pc must be static */
1930 static void disas_sparc_insn(DisasContext * dc)
1932 unsigned int insn, opc, rs1, rs2, rd;
1934 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1935 tcg_gen_debug_insn_start(dc->pc);
1936 insn = ldl_code(dc->pc);
1937 opc = GET_FIELD(insn, 0, 1);
1939 rd = GET_FIELD(insn, 2, 6);
1941 cpu_src1 = tcg_temp_new(); // const
1942 cpu_src2 = tcg_temp_new(); // const
1944 switch (opc) {
1945 case 0: /* branches/sethi */
1947 unsigned int xop = GET_FIELD(insn, 7, 9);
1948 int32_t target;
1949 switch (xop) {
1950 #ifdef TARGET_SPARC64
1951 case 0x1: /* V9 BPcc */
1953 int cc;
1955 target = GET_FIELD_SP(insn, 0, 18);
1956 target = sign_extend(target, 18);
1957 target <<= 2;
1958 cc = GET_FIELD_SP(insn, 20, 21);
1959 if (cc == 0)
1960 do_branch(dc, target, insn, 0, cpu_cond);
1961 else if (cc == 2)
1962 do_branch(dc, target, insn, 1, cpu_cond);
1963 else
1964 goto illegal_insn;
1965 goto jmp_insn;
1967 case 0x3: /* V9 BPr */
1969 target = GET_FIELD_SP(insn, 0, 13) |
1970 (GET_FIELD_SP(insn, 20, 21) << 14);
1971 target = sign_extend(target, 16);
1972 target <<= 2;
1973 cpu_src1 = get_src1(insn, cpu_src1);
1974 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1975 goto jmp_insn;
1977 case 0x5: /* V9 FBPcc */
1979 int cc = GET_FIELD_SP(insn, 20, 21);
1980 if (gen_trap_ifnofpu(dc, cpu_cond))
1981 goto jmp_insn;
1982 target = GET_FIELD_SP(insn, 0, 18);
1983 target = sign_extend(target, 19);
1984 target <<= 2;
1985 do_fbranch(dc, target, insn, cc, cpu_cond);
1986 goto jmp_insn;
1988 #else
1989 case 0x7: /* CBN+x */
1991 goto ncp_insn;
1993 #endif
1994 case 0x2: /* BN+x */
1996 target = GET_FIELD(insn, 10, 31);
1997 target = sign_extend(target, 22);
1998 target <<= 2;
1999 do_branch(dc, target, insn, 0, cpu_cond);
2000 goto jmp_insn;
2002 case 0x6: /* FBN+x */
2004 if (gen_trap_ifnofpu(dc, cpu_cond))
2005 goto jmp_insn;
2006 target = GET_FIELD(insn, 10, 31);
2007 target = sign_extend(target, 22);
2008 target <<= 2;
2009 do_fbranch(dc, target, insn, 0, cpu_cond);
2010 goto jmp_insn;
2012 case 0x4: /* SETHI */
2013 if (rd) { // nop
2014 uint32_t value = GET_FIELD(insn, 10, 31);
2015 TCGv r_const;
2017 r_const = tcg_const_tl(value << 10);
2018 gen_movl_TN_reg(rd, r_const);
2019 tcg_temp_free(r_const);
2021 break;
2022 case 0x0: /* UNIMPL */
2023 default:
2024 goto illegal_insn;
2026 break;
2028 break;
2029 case 1:
2030 /*CALL*/ {
2031 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2032 TCGv r_const;
2034 r_const = tcg_const_tl(dc->pc);
2035 gen_movl_TN_reg(15, r_const);
2036 tcg_temp_free(r_const);
2037 target += dc->pc;
2038 gen_mov_pc_npc(dc, cpu_cond);
2039 dc->npc = target;
2041 goto jmp_insn;
2042 case 2: /* FPU & Logical Operations */
2044 unsigned int xop = GET_FIELD(insn, 7, 12);
2045 if (xop == 0x3a) { /* generate trap */
2046 int cond;
2048 cpu_src1 = get_src1(insn, cpu_src1);
2049 if (IS_IMM) {
2050 rs2 = GET_FIELD(insn, 25, 31);
2051 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2052 } else {
2053 rs2 = GET_FIELD(insn, 27, 31);
2054 if (rs2 != 0) {
2055 gen_movl_reg_TN(rs2, cpu_src2);
2056 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2057 } else
2058 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2060 cond = GET_FIELD(insn, 3, 6);
2061 if (cond == 0x8) {
2062 save_state(dc, cpu_cond);
2063 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2064 supervisor(dc))
2065 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2066 else
2067 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2068 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2069 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2070 gen_helper_raise_exception(cpu_tmp32);
2071 } else if (cond != 0) {
2072 TCGv r_cond = tcg_temp_new();
2073 int l1;
2074 #ifdef TARGET_SPARC64
2075 /* V9 icc/xcc */
2076 int cc = GET_FIELD_SP(insn, 11, 12);
2078 save_state(dc, cpu_cond);
2079 if (cc == 0)
2080 gen_cond(r_cond, 0, cond);
2081 else if (cc == 2)
2082 gen_cond(r_cond, 1, cond);
2083 else
2084 goto illegal_insn;
2085 #else
2086 save_state(dc, cpu_cond);
2087 gen_cond(r_cond, 0, cond);
2088 #endif
2089 l1 = gen_new_label();
2090 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2092 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2093 supervisor(dc))
2094 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2095 else
2096 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2097 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2098 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2099 gen_helper_raise_exception(cpu_tmp32);
2101 gen_set_label(l1);
2102 tcg_temp_free(r_cond);
2104 gen_op_next_insn();
2105 tcg_gen_exit_tb(0);
2106 dc->is_br = 1;
2107 goto jmp_insn;
2108 } else if (xop == 0x28) {
2109 rs1 = GET_FIELD(insn, 13, 17);
2110 switch(rs1) {
2111 case 0: /* rdy */
2112 #ifndef TARGET_SPARC64
2113 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2114 manual, rdy on the microSPARC
2115 II */
2116 case 0x0f: /* stbar in the SPARCv8 manual,
2117 rdy on the microSPARC II */
2118 case 0x10 ... 0x1f: /* implementation-dependent in the
2119 SPARCv8 manual, rdy on the
2120 microSPARC II */
2121 #endif
2122 gen_movl_TN_reg(rd, cpu_y);
2123 break;
2124 #ifdef TARGET_SPARC64
2125 case 0x2: /* V9 rdccr */
2126 gen_helper_rdccr(cpu_dst);
2127 gen_movl_TN_reg(rd, cpu_dst);
2128 break;
2129 case 0x3: /* V9 rdasi */
2130 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2131 gen_movl_TN_reg(rd, cpu_dst);
2132 break;
2133 case 0x4: /* V9 rdtick */
2135 TCGv_ptr r_tickptr;
2137 r_tickptr = tcg_temp_new_ptr();
2138 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2139 offsetof(CPUState, tick));
2140 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2141 tcg_temp_free_ptr(r_tickptr);
2142 gen_movl_TN_reg(rd, cpu_dst);
2144 break;
2145 case 0x5: /* V9 rdpc */
2147 TCGv r_const;
2149 r_const = tcg_const_tl(dc->pc);
2150 gen_movl_TN_reg(rd, r_const);
2151 tcg_temp_free(r_const);
2153 break;
2154 case 0x6: /* V9 rdfprs */
2155 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2156 gen_movl_TN_reg(rd, cpu_dst);
2157 break;
2158 case 0xf: /* V9 membar */
2159 break; /* no effect */
2160 case 0x13: /* Graphics Status */
2161 if (gen_trap_ifnofpu(dc, cpu_cond))
2162 goto jmp_insn;
2163 gen_movl_TN_reg(rd, cpu_gsr);
2164 break;
2165 case 0x16: /* Softint */
2166 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2167 gen_movl_TN_reg(rd, cpu_dst);
2168 break;
2169 case 0x17: /* Tick compare */
2170 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2171 break;
2172 case 0x18: /* System tick */
2174 TCGv_ptr r_tickptr;
2176 r_tickptr = tcg_temp_new_ptr();
2177 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2178 offsetof(CPUState, stick));
2179 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2180 tcg_temp_free_ptr(r_tickptr);
2181 gen_movl_TN_reg(rd, cpu_dst);
2183 break;
2184 case 0x19: /* System tick compare */
2185 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2186 break;
2187 case 0x10: /* Performance Control */
2188 case 0x11: /* Performance Instrumentation Counter */
2189 case 0x12: /* Dispatch Control */
2190 case 0x14: /* Softint set, WO */
2191 case 0x15: /* Softint clear, WO */
2192 #endif
2193 default:
2194 goto illegal_insn;
2196 #if !defined(CONFIG_USER_ONLY)
2197 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2198 #ifndef TARGET_SPARC64
2199 if (!supervisor(dc))
2200 goto priv_insn;
2201 gen_helper_rdpsr(cpu_dst);
2202 #else
2203 CHECK_IU_FEATURE(dc, HYPV);
2204 if (!hypervisor(dc))
2205 goto priv_insn;
2206 rs1 = GET_FIELD(insn, 13, 17);
2207 switch (rs1) {
2208 case 0: // hpstate
2209 // gen_op_rdhpstate();
2210 break;
2211 case 1: // htstate
2212 // gen_op_rdhtstate();
2213 break;
2214 case 3: // hintp
2215 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2216 break;
2217 case 5: // htba
2218 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2219 break;
2220 case 6: // hver
2221 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2222 break;
2223 case 31: // hstick_cmpr
2224 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2225 break;
2226 default:
2227 goto illegal_insn;
2229 #endif
2230 gen_movl_TN_reg(rd, cpu_dst);
2231 break;
2232 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2233 if (!supervisor(dc))
2234 goto priv_insn;
2235 #ifdef TARGET_SPARC64
2236 rs1 = GET_FIELD(insn, 13, 17);
2237 switch (rs1) {
2238 case 0: // tpc
2240 TCGv_ptr r_tsptr;
2242 r_tsptr = tcg_temp_new_ptr();
2243 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2244 offsetof(CPUState, tsptr));
2245 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2246 offsetof(trap_state, tpc));
2247 tcg_temp_free_ptr(r_tsptr);
2249 break;
2250 case 1: // tnpc
2252 TCGv_ptr r_tsptr;
2254 r_tsptr = tcg_temp_new_ptr();
2255 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2256 offsetof(CPUState, tsptr));
2257 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2258 offsetof(trap_state, tnpc));
2259 tcg_temp_free_ptr(r_tsptr);
2261 break;
2262 case 2: // tstate
2264 TCGv_ptr r_tsptr;
2266 r_tsptr = tcg_temp_new_ptr();
2267 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2268 offsetof(CPUState, tsptr));
2269 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2270 offsetof(trap_state, tstate));
2271 tcg_temp_free_ptr(r_tsptr);
2273 break;
2274 case 3: // tt
2276 TCGv_ptr r_tsptr;
2278 r_tsptr = tcg_temp_new_ptr();
2279 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2280 offsetof(CPUState, tsptr));
2281 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2282 offsetof(trap_state, tt));
2283 tcg_temp_free_ptr(r_tsptr);
2284 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2286 break;
2287 case 4: // tick
2289 TCGv_ptr r_tickptr;
2291 r_tickptr = tcg_temp_new_ptr();
2292 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2293 offsetof(CPUState, tick));
2294 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2295 gen_movl_TN_reg(rd, cpu_tmp0);
2296 tcg_temp_free_ptr(r_tickptr);
2298 break;
2299 case 5: // tba
2300 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2301 break;
2302 case 6: // pstate
2303 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2304 offsetof(CPUSPARCState, pstate));
2305 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2306 break;
2307 case 7: // tl
2308 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2309 offsetof(CPUSPARCState, tl));
2310 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2311 break;
2312 case 8: // pil
2313 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2314 offsetof(CPUSPARCState, psrpil));
2315 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2316 break;
2317 case 9: // cwp
2318 gen_helper_rdcwp(cpu_tmp0);
2319 break;
2320 case 10: // cansave
2321 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2322 offsetof(CPUSPARCState, cansave));
2323 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2324 break;
2325 case 11: // canrestore
2326 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2327 offsetof(CPUSPARCState, canrestore));
2328 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2329 break;
2330 case 12: // cleanwin
2331 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2332 offsetof(CPUSPARCState, cleanwin));
2333 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2334 break;
2335 case 13: // otherwin
2336 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2337 offsetof(CPUSPARCState, otherwin));
2338 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2339 break;
2340 case 14: // wstate
2341 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2342 offsetof(CPUSPARCState, wstate));
2343 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2344 break;
2345 case 16: // UA2005 gl
2346 CHECK_IU_FEATURE(dc, GL);
2347 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2348 offsetof(CPUSPARCState, gl));
2349 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2350 break;
2351 case 26: // UA2005 strand status
2352 CHECK_IU_FEATURE(dc, HYPV);
2353 if (!hypervisor(dc))
2354 goto priv_insn;
2355 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2356 break;
2357 case 31: // ver
2358 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2359 break;
2360 case 15: // fq
2361 default:
2362 goto illegal_insn;
2364 #else
2365 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2366 #endif
2367 gen_movl_TN_reg(rd, cpu_tmp0);
2368 break;
2369 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2370 #ifdef TARGET_SPARC64
2371 save_state(dc, cpu_cond);
2372 gen_helper_flushw();
2373 #else
2374 if (!supervisor(dc))
2375 goto priv_insn;
2376 gen_movl_TN_reg(rd, cpu_tbr);
2377 #endif
2378 break;
2379 #endif
2380 } else if (xop == 0x34) { /* FPU Operations */
2381 if (gen_trap_ifnofpu(dc, cpu_cond))
2382 goto jmp_insn;
2383 gen_op_clear_ieee_excp_and_FTT();
2384 rs1 = GET_FIELD(insn, 13, 17);
2385 rs2 = GET_FIELD(insn, 27, 31);
2386 xop = GET_FIELD(insn, 18, 26);
2387 switch (xop) {
2388 case 0x1: /* fmovs */
2389 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2390 break;
2391 case 0x5: /* fnegs */
2392 gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2393 break;
2394 case 0x9: /* fabss */
2395 gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2396 break;
2397 case 0x29: /* fsqrts */
2398 CHECK_FPU_FEATURE(dc, FSQRT);
2399 gen_clear_float_exceptions();
2400 gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2401 gen_helper_check_ieee_exceptions();
2402 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2403 break;
2404 case 0x2a: /* fsqrtd */
2405 CHECK_FPU_FEATURE(dc, FSQRT);
2406 gen_op_load_fpr_DT1(DFPREG(rs2));
2407 gen_clear_float_exceptions();
2408 gen_helper_fsqrtd();
2409 gen_helper_check_ieee_exceptions();
2410 gen_op_store_DT0_fpr(DFPREG(rd));
2411 break;
2412 case 0x2b: /* fsqrtq */
2413 CHECK_FPU_FEATURE(dc, FLOAT128);
2414 gen_op_load_fpr_QT1(QFPREG(rs2));
2415 gen_clear_float_exceptions();
2416 gen_helper_fsqrtq();
2417 gen_helper_check_ieee_exceptions();
2418 gen_op_store_QT0_fpr(QFPREG(rd));
2419 break;
2420 case 0x41: /* fadds */
2421 gen_clear_float_exceptions();
2422 gen_helper_fadds(cpu_tmp32,
2423 cpu_fpr[rs1], cpu_fpr[rs2]);
2424 gen_helper_check_ieee_exceptions();
2425 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2426 break;
2427 case 0x42:
2428 gen_op_load_fpr_DT0(DFPREG(rs1));
2429 gen_op_load_fpr_DT1(DFPREG(rs2));
2430 gen_clear_float_exceptions();
2431 gen_helper_faddd();
2432 gen_helper_check_ieee_exceptions();
2433 gen_op_store_DT0_fpr(DFPREG(rd));
2434 break;
2435 case 0x43: /* faddq */
2436 CHECK_FPU_FEATURE(dc, FLOAT128);
2437 gen_op_load_fpr_QT0(QFPREG(rs1));
2438 gen_op_load_fpr_QT1(QFPREG(rs2));
2439 gen_clear_float_exceptions();
2440 gen_helper_faddq();
2441 gen_helper_check_ieee_exceptions();
2442 gen_op_store_QT0_fpr(QFPREG(rd));
2443 break;
2444 case 0x45: /* fsubs */
2445 gen_clear_float_exceptions();
2446 gen_helper_fsubs(cpu_tmp32,
2447 cpu_fpr[rs1], cpu_fpr[rs2]);
2448 gen_helper_check_ieee_exceptions();
2449 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2450 break;
2451 case 0x46:
2452 gen_op_load_fpr_DT0(DFPREG(rs1));
2453 gen_op_load_fpr_DT1(DFPREG(rs2));
2454 gen_clear_float_exceptions();
2455 gen_helper_fsubd();
2456 gen_helper_check_ieee_exceptions();
2457 gen_op_store_DT0_fpr(DFPREG(rd));
2458 break;
2459 case 0x47: /* fsubq */
2460 CHECK_FPU_FEATURE(dc, FLOAT128);
2461 gen_op_load_fpr_QT0(QFPREG(rs1));
2462 gen_op_load_fpr_QT1(QFPREG(rs2));
2463 gen_clear_float_exceptions();
2464 gen_helper_fsubq();
2465 gen_helper_check_ieee_exceptions();
2466 gen_op_store_QT0_fpr(QFPREG(rd));
2467 break;
2468 case 0x49: /* fmuls */
2469 CHECK_FPU_FEATURE(dc, FMUL);
2470 gen_clear_float_exceptions();
2471 gen_helper_fmuls(cpu_tmp32,
2472 cpu_fpr[rs1], cpu_fpr[rs2]);
2473 gen_helper_check_ieee_exceptions();
2474 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2475 break;
2476 case 0x4a: /* fmuld */
2477 CHECK_FPU_FEATURE(dc, FMUL);
2478 gen_op_load_fpr_DT0(DFPREG(rs1));
2479 gen_op_load_fpr_DT1(DFPREG(rs2));
2480 gen_clear_float_exceptions();
2481 gen_helper_fmuld();
2482 gen_helper_check_ieee_exceptions();
2483 gen_op_store_DT0_fpr(DFPREG(rd));
2484 break;
2485 case 0x4b: /* fmulq */
2486 CHECK_FPU_FEATURE(dc, FLOAT128);
2487 CHECK_FPU_FEATURE(dc, FMUL);
2488 gen_op_load_fpr_QT0(QFPREG(rs1));
2489 gen_op_load_fpr_QT1(QFPREG(rs2));
2490 gen_clear_float_exceptions();
2491 gen_helper_fmulq();
2492 gen_helper_check_ieee_exceptions();
2493 gen_op_store_QT0_fpr(QFPREG(rd));
2494 break;
2495 case 0x4d: /* fdivs */
2496 gen_clear_float_exceptions();
2497 gen_helper_fdivs(cpu_tmp32,
2498 cpu_fpr[rs1], cpu_fpr[rs2]);
2499 gen_helper_check_ieee_exceptions();
2500 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2501 break;
2502 case 0x4e:
2503 gen_op_load_fpr_DT0(DFPREG(rs1));
2504 gen_op_load_fpr_DT1(DFPREG(rs2));
2505 gen_clear_float_exceptions();
2506 gen_helper_fdivd();
2507 gen_helper_check_ieee_exceptions();
2508 gen_op_store_DT0_fpr(DFPREG(rd));
2509 break;
2510 case 0x4f: /* fdivq */
2511 CHECK_FPU_FEATURE(dc, FLOAT128);
2512 gen_op_load_fpr_QT0(QFPREG(rs1));
2513 gen_op_load_fpr_QT1(QFPREG(rs2));
2514 gen_clear_float_exceptions();
2515 gen_helper_fdivq();
2516 gen_helper_check_ieee_exceptions();
2517 gen_op_store_QT0_fpr(QFPREG(rd));
2518 break;
2519 case 0x69: /* fsmuld */
2520 CHECK_FPU_FEATURE(dc, FSMULD);
2521 gen_clear_float_exceptions();
2522 gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2523 gen_helper_check_ieee_exceptions();
2524 gen_op_store_DT0_fpr(DFPREG(rd));
2525 break;
2526 case 0x6e: /* fdmulq */
2527 CHECK_FPU_FEATURE(dc, FLOAT128);
2528 gen_op_load_fpr_DT0(DFPREG(rs1));
2529 gen_op_load_fpr_DT1(DFPREG(rs2));
2530 gen_clear_float_exceptions();
2531 gen_helper_fdmulq();
2532 gen_helper_check_ieee_exceptions();
2533 gen_op_store_QT0_fpr(QFPREG(rd));
2534 break;
2535 case 0xc4: /* fitos */
2536 gen_clear_float_exceptions();
2537 gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2538 gen_helper_check_ieee_exceptions();
2539 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2540 break;
2541 case 0xc6: /* fdtos */
2542 gen_op_load_fpr_DT1(DFPREG(rs2));
2543 gen_clear_float_exceptions();
2544 gen_helper_fdtos(cpu_tmp32);
2545 gen_helper_check_ieee_exceptions();
2546 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2547 break;
2548 case 0xc7: /* fqtos */
2549 CHECK_FPU_FEATURE(dc, FLOAT128);
2550 gen_op_load_fpr_QT1(QFPREG(rs2));
2551 gen_clear_float_exceptions();
2552 gen_helper_fqtos(cpu_tmp32);
2553 gen_helper_check_ieee_exceptions();
2554 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2555 break;
2556 case 0xc8: /* fitod */
2557 gen_helper_fitod(cpu_fpr[rs2]);
2558 gen_op_store_DT0_fpr(DFPREG(rd));
2559 break;
2560 case 0xc9: /* fstod */
2561 gen_helper_fstod(cpu_fpr[rs2]);
2562 gen_op_store_DT0_fpr(DFPREG(rd));
2563 break;
2564 case 0xcb: /* fqtod */
2565 CHECK_FPU_FEATURE(dc, FLOAT128);
2566 gen_op_load_fpr_QT1(QFPREG(rs2));
2567 gen_clear_float_exceptions();
2568 gen_helper_fqtod();
2569 gen_helper_check_ieee_exceptions();
2570 gen_op_store_DT0_fpr(DFPREG(rd));
2571 break;
2572 case 0xcc: /* fitoq */
2573 CHECK_FPU_FEATURE(dc, FLOAT128);
2574 gen_helper_fitoq(cpu_fpr[rs2]);
2575 gen_op_store_QT0_fpr(QFPREG(rd));
2576 break;
2577 case 0xcd: /* fstoq */
2578 CHECK_FPU_FEATURE(dc, FLOAT128);
2579 gen_helper_fstoq(cpu_fpr[rs2]);
2580 gen_op_store_QT0_fpr(QFPREG(rd));
2581 break;
2582 case 0xce: /* fdtoq */
2583 CHECK_FPU_FEATURE(dc, FLOAT128);
2584 gen_op_load_fpr_DT1(DFPREG(rs2));
2585 gen_helper_fdtoq();
2586 gen_op_store_QT0_fpr(QFPREG(rd));
2587 break;
2588 case 0xd1: /* fstoi */
2589 gen_clear_float_exceptions();
2590 gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2591 gen_helper_check_ieee_exceptions();
2592 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2593 break;
2594 case 0xd2: /* fdtoi */
2595 gen_op_load_fpr_DT1(DFPREG(rs2));
2596 gen_clear_float_exceptions();
2597 gen_helper_fdtoi(cpu_tmp32);
2598 gen_helper_check_ieee_exceptions();
2599 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2600 break;
2601 case 0xd3: /* fqtoi */
2602 CHECK_FPU_FEATURE(dc, FLOAT128);
2603 gen_op_load_fpr_QT1(QFPREG(rs2));
2604 gen_clear_float_exceptions();
2605 gen_helper_fqtoi(cpu_tmp32);
2606 gen_helper_check_ieee_exceptions();
2607 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2608 break;
2609 #ifdef TARGET_SPARC64
2610 case 0x2: /* V9 fmovd */
2611 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],
2612 cpu_fpr[DFPREG(rs2)]);
2613 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2614 cpu_fpr[DFPREG(rs2) + 1]);
2615 break;
2616 case 0x3: /* V9 fmovq */
2617 CHECK_FPU_FEATURE(dc, FLOAT128);
2618 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],
2619 cpu_fpr[QFPREG(rs2)]);
2620 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2621 cpu_fpr[QFPREG(rs2) + 1]);
2622 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2623 cpu_fpr[QFPREG(rs2) + 2]);
2624 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2625 cpu_fpr[QFPREG(rs2) + 3]);
2626 break;
2627 case 0x6: /* V9 fnegd */
2628 gen_op_load_fpr_DT1(DFPREG(rs2));
2629 gen_helper_fnegd();
2630 gen_op_store_DT0_fpr(DFPREG(rd));
2631 break;
2632 case 0x7: /* V9 fnegq */
2633 CHECK_FPU_FEATURE(dc, FLOAT128);
2634 gen_op_load_fpr_QT1(QFPREG(rs2));
2635 gen_helper_fnegq();
2636 gen_op_store_QT0_fpr(QFPREG(rd));
2637 break;
2638 case 0xa: /* V9 fabsd */
2639 gen_op_load_fpr_DT1(DFPREG(rs2));
2640 gen_helper_fabsd();
2641 gen_op_store_DT0_fpr(DFPREG(rd));
2642 break;
2643 case 0xb: /* V9 fabsq */
2644 CHECK_FPU_FEATURE(dc, FLOAT128);
2645 gen_op_load_fpr_QT1(QFPREG(rs2));
2646 gen_helper_fabsq();
2647 gen_op_store_QT0_fpr(QFPREG(rd));
2648 break;
2649 case 0x81: /* V9 fstox */
2650 gen_clear_float_exceptions();
2651 gen_helper_fstox(cpu_fpr[rs2]);
2652 gen_helper_check_ieee_exceptions();
2653 gen_op_store_DT0_fpr(DFPREG(rd));
2654 break;
2655 case 0x82: /* V9 fdtox */
2656 gen_op_load_fpr_DT1(DFPREG(rs2));
2657 gen_clear_float_exceptions();
2658 gen_helper_fdtox();
2659 gen_helper_check_ieee_exceptions();
2660 gen_op_store_DT0_fpr(DFPREG(rd));
2661 break;
2662 case 0x83: /* V9 fqtox */
2663 CHECK_FPU_FEATURE(dc, FLOAT128);
2664 gen_op_load_fpr_QT1(QFPREG(rs2));
2665 gen_clear_float_exceptions();
2666 gen_helper_fqtox();
2667 gen_helper_check_ieee_exceptions();
2668 gen_op_store_DT0_fpr(DFPREG(rd));
2669 break;
2670 case 0x84: /* V9 fxtos */
2671 gen_op_load_fpr_DT1(DFPREG(rs2));
2672 gen_clear_float_exceptions();
2673 gen_helper_fxtos(cpu_tmp32);
2674 gen_helper_check_ieee_exceptions();
2675 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2676 break;
2677 case 0x88: /* V9 fxtod */
2678 gen_op_load_fpr_DT1(DFPREG(rs2));
2679 gen_clear_float_exceptions();
2680 gen_helper_fxtod();
2681 gen_helper_check_ieee_exceptions();
2682 gen_op_store_DT0_fpr(DFPREG(rd));
2683 break;
2684 case 0x8c: /* V9 fxtoq */
2685 CHECK_FPU_FEATURE(dc, FLOAT128);
2686 gen_op_load_fpr_DT1(DFPREG(rs2));
2687 gen_clear_float_exceptions();
2688 gen_helper_fxtoq();
2689 gen_helper_check_ieee_exceptions();
2690 gen_op_store_QT0_fpr(QFPREG(rd));
2691 break;
2692 #endif
2693 default:
2694 goto illegal_insn;
2696 } else if (xop == 0x35) { /* FPU Operations */
2697 #ifdef TARGET_SPARC64
2698 int cond;
2699 #endif
2700 if (gen_trap_ifnofpu(dc, cpu_cond))
2701 goto jmp_insn;
2702 gen_op_clear_ieee_excp_and_FTT();
2703 rs1 = GET_FIELD(insn, 13, 17);
2704 rs2 = GET_FIELD(insn, 27, 31);
2705 xop = GET_FIELD(insn, 18, 26);
2706 #ifdef TARGET_SPARC64
2707 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2708 int l1;
2710 l1 = gen_new_label();
2711 cond = GET_FIELD_SP(insn, 14, 17);
2712 cpu_src1 = get_src1(insn, cpu_src1);
2713 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2714 0, l1);
2715 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2716 gen_set_label(l1);
2717 break;
2718 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2719 int l1;
2721 l1 = gen_new_label();
2722 cond = GET_FIELD_SP(insn, 14, 17);
2723 cpu_src1 = get_src1(insn, cpu_src1);
2724 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2725 0, l1);
2726 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2727 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2728 gen_set_label(l1);
2729 break;
2730 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2731 int l1;
2733 CHECK_FPU_FEATURE(dc, FLOAT128);
2734 l1 = gen_new_label();
2735 cond = GET_FIELD_SP(insn, 14, 17);
2736 cpu_src1 = get_src1(insn, cpu_src1);
2737 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2738 0, l1);
2739 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2740 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2741 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2742 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2743 gen_set_label(l1);
2744 break;
2746 #endif
2747 switch (xop) {
2748 #ifdef TARGET_SPARC64
2749 #define FMOVSCC(fcc) \
2751 TCGv r_cond; \
2752 int l1; \
2754 l1 = gen_new_label(); \
2755 r_cond = tcg_temp_new(); \
2756 cond = GET_FIELD_SP(insn, 14, 17); \
2757 gen_fcond(r_cond, fcc, cond); \
2758 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2759 0, l1); \
2760 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2761 gen_set_label(l1); \
2762 tcg_temp_free(r_cond); \
2764 #define FMOVDCC(fcc) \
2766 TCGv r_cond; \
2767 int l1; \
2769 l1 = gen_new_label(); \
2770 r_cond = tcg_temp_new(); \
2771 cond = GET_FIELD_SP(insn, 14, 17); \
2772 gen_fcond(r_cond, fcc, cond); \
2773 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2774 0, l1); \
2775 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2776 cpu_fpr[DFPREG(rs2)]); \
2777 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2778 cpu_fpr[DFPREG(rs2) + 1]); \
2779 gen_set_label(l1); \
2780 tcg_temp_free(r_cond); \
2782 #define FMOVQCC(fcc) \
2784 TCGv r_cond; \
2785 int l1; \
2787 l1 = gen_new_label(); \
2788 r_cond = tcg_temp_new(); \
2789 cond = GET_FIELD_SP(insn, 14, 17); \
2790 gen_fcond(r_cond, fcc, cond); \
2791 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2792 0, l1); \
2793 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2794 cpu_fpr[QFPREG(rs2)]); \
2795 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2796 cpu_fpr[QFPREG(rs2) + 1]); \
2797 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2798 cpu_fpr[QFPREG(rs2) + 2]); \
2799 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2800 cpu_fpr[QFPREG(rs2) + 3]); \
2801 gen_set_label(l1); \
2802 tcg_temp_free(r_cond); \
2804 case 0x001: /* V9 fmovscc %fcc0 */
2805 FMOVSCC(0);
2806 break;
2807 case 0x002: /* V9 fmovdcc %fcc0 */
2808 FMOVDCC(0);
2809 break;
2810 case 0x003: /* V9 fmovqcc %fcc0 */
2811 CHECK_FPU_FEATURE(dc, FLOAT128);
2812 FMOVQCC(0);
2813 break;
2814 case 0x041: /* V9 fmovscc %fcc1 */
2815 FMOVSCC(1);
2816 break;
2817 case 0x042: /* V9 fmovdcc %fcc1 */
2818 FMOVDCC(1);
2819 break;
2820 case 0x043: /* V9 fmovqcc %fcc1 */
2821 CHECK_FPU_FEATURE(dc, FLOAT128);
2822 FMOVQCC(1);
2823 break;
2824 case 0x081: /* V9 fmovscc %fcc2 */
2825 FMOVSCC(2);
2826 break;
2827 case 0x082: /* V9 fmovdcc %fcc2 */
2828 FMOVDCC(2);
2829 break;
2830 case 0x083: /* V9 fmovqcc %fcc2 */
2831 CHECK_FPU_FEATURE(dc, FLOAT128);
2832 FMOVQCC(2);
2833 break;
2834 case 0x0c1: /* V9 fmovscc %fcc3 */
2835 FMOVSCC(3);
2836 break;
2837 case 0x0c2: /* V9 fmovdcc %fcc3 */
2838 FMOVDCC(3);
2839 break;
2840 case 0x0c3: /* V9 fmovqcc %fcc3 */
2841 CHECK_FPU_FEATURE(dc, FLOAT128);
2842 FMOVQCC(3);
2843 break;
2844 #undef FMOVSCC
2845 #undef FMOVDCC
2846 #undef FMOVQCC
2847 #define FMOVSCC(icc) \
2849 TCGv r_cond; \
2850 int l1; \
2852 l1 = gen_new_label(); \
2853 r_cond = tcg_temp_new(); \
2854 cond = GET_FIELD_SP(insn, 14, 17); \
2855 gen_cond(r_cond, icc, cond); \
2856 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2857 0, l1); \
2858 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2859 gen_set_label(l1); \
2860 tcg_temp_free(r_cond); \
2862 #define FMOVDCC(icc) \
2864 TCGv r_cond; \
2865 int l1; \
2867 l1 = gen_new_label(); \
2868 r_cond = tcg_temp_new(); \
2869 cond = GET_FIELD_SP(insn, 14, 17); \
2870 gen_cond(r_cond, icc, cond); \
2871 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2872 0, l1); \
2873 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2874 cpu_fpr[DFPREG(rs2)]); \
2875 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2876 cpu_fpr[DFPREG(rs2) + 1]); \
2877 gen_set_label(l1); \
2878 tcg_temp_free(r_cond); \
2880 #define FMOVQCC(icc) \
2882 TCGv r_cond; \
2883 int l1; \
2885 l1 = gen_new_label(); \
2886 r_cond = tcg_temp_new(); \
2887 cond = GET_FIELD_SP(insn, 14, 17); \
2888 gen_cond(r_cond, icc, cond); \
2889 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2890 0, l1); \
2891 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2892 cpu_fpr[QFPREG(rs2)]); \
2893 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2894 cpu_fpr[QFPREG(rs2) + 1]); \
2895 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2896 cpu_fpr[QFPREG(rs2) + 2]); \
2897 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2898 cpu_fpr[QFPREG(rs2) + 3]); \
2899 gen_set_label(l1); \
2900 tcg_temp_free(r_cond); \
2903 case 0x101: /* V9 fmovscc %icc */
2904 FMOVSCC(0);
2905 break;
2906 case 0x102: /* V9 fmovdcc %icc */
2907 FMOVDCC(0);
2908 case 0x103: /* V9 fmovqcc %icc */
2909 CHECK_FPU_FEATURE(dc, FLOAT128);
2910 FMOVQCC(0);
2911 break;
2912 case 0x181: /* V9 fmovscc %xcc */
2913 FMOVSCC(1);
2914 break;
2915 case 0x182: /* V9 fmovdcc %xcc */
2916 FMOVDCC(1);
2917 break;
2918 case 0x183: /* V9 fmovqcc %xcc */
2919 CHECK_FPU_FEATURE(dc, FLOAT128);
2920 FMOVQCC(1);
2921 break;
2922 #undef FMOVSCC
2923 #undef FMOVDCC
2924 #undef FMOVQCC
2925 #endif
2926 case 0x51: /* fcmps, V9 %fcc */
2927 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2928 break;
2929 case 0x52: /* fcmpd, V9 %fcc */
2930 gen_op_load_fpr_DT0(DFPREG(rs1));
2931 gen_op_load_fpr_DT1(DFPREG(rs2));
2932 gen_op_fcmpd(rd & 3);
2933 break;
2934 case 0x53: /* fcmpq, V9 %fcc */
2935 CHECK_FPU_FEATURE(dc, FLOAT128);
2936 gen_op_load_fpr_QT0(QFPREG(rs1));
2937 gen_op_load_fpr_QT1(QFPREG(rs2));
2938 gen_op_fcmpq(rd & 3);
2939 break;
2940 case 0x55: /* fcmpes, V9 %fcc */
2941 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2942 break;
2943 case 0x56: /* fcmped, V9 %fcc */
2944 gen_op_load_fpr_DT0(DFPREG(rs1));
2945 gen_op_load_fpr_DT1(DFPREG(rs2));
2946 gen_op_fcmped(rd & 3);
2947 break;
2948 case 0x57: /* fcmpeq, V9 %fcc */
2949 CHECK_FPU_FEATURE(dc, FLOAT128);
2950 gen_op_load_fpr_QT0(QFPREG(rs1));
2951 gen_op_load_fpr_QT1(QFPREG(rs2));
2952 gen_op_fcmpeq(rd & 3);
2953 break;
2954 default:
2955 goto illegal_insn;
2957 } else if (xop == 0x2) {
2958 // clr/mov shortcut
2960 rs1 = GET_FIELD(insn, 13, 17);
2961 if (rs1 == 0) {
2962 // or %g0, x, y -> mov T0, x; mov y, T0
2963 if (IS_IMM) { /* immediate */
2964 TCGv r_const;
2966 rs2 = GET_FIELDs(insn, 19, 31);
2967 r_const = tcg_const_tl((int)rs2);
2968 gen_movl_TN_reg(rd, r_const);
2969 tcg_temp_free(r_const);
2970 } else { /* register */
2971 rs2 = GET_FIELD(insn, 27, 31);
2972 gen_movl_reg_TN(rs2, cpu_dst);
2973 gen_movl_TN_reg(rd, cpu_dst);
2975 } else {
2976 cpu_src1 = get_src1(insn, cpu_src1);
2977 if (IS_IMM) { /* immediate */
2978 rs2 = GET_FIELDs(insn, 19, 31);
2979 tcg_gen_ori_tl(cpu_dst, cpu_src1, (int)rs2);
2980 gen_movl_TN_reg(rd, cpu_dst);
2981 } else { /* register */
2982 // or x, %g0, y -> mov T1, x; mov y, T1
2983 rs2 = GET_FIELD(insn, 27, 31);
2984 if (rs2 != 0) {
2985 gen_movl_reg_TN(rs2, cpu_src2);
2986 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2987 gen_movl_TN_reg(rd, cpu_dst);
2988 } else
2989 gen_movl_TN_reg(rd, cpu_src1);
2992 #ifdef TARGET_SPARC64
2993 } else if (xop == 0x25) { /* sll, V9 sllx */
2994 cpu_src1 = get_src1(insn, cpu_src1);
2995 if (IS_IMM) { /* immediate */
2996 rs2 = GET_FIELDs(insn, 20, 31);
2997 if (insn & (1 << 12)) {
2998 tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
2999 } else {
3000 tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x1f);
3002 } else { /* register */
3003 rs2 = GET_FIELD(insn, 27, 31);
3004 gen_movl_reg_TN(rs2, cpu_src2);
3005 if (insn & (1 << 12)) {
3006 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3007 } else {
3008 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3010 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3012 gen_movl_TN_reg(rd, cpu_dst);
3013 } else if (xop == 0x26) { /* srl, V9 srlx */
3014 cpu_src1 = get_src1(insn, cpu_src1);
3015 if (IS_IMM) { /* immediate */
3016 rs2 = GET_FIELDs(insn, 20, 31);
3017 if (insn & (1 << 12)) {
3018 tcg_gen_shri_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
3019 } else {
3020 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3021 tcg_gen_shri_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
3023 } else { /* register */
3024 rs2 = GET_FIELD(insn, 27, 31);
3025 gen_movl_reg_TN(rs2, cpu_src2);
3026 if (insn & (1 << 12)) {
3027 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3028 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3029 } else {
3030 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3031 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3032 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3035 gen_movl_TN_reg(rd, cpu_dst);
3036 } else if (xop == 0x27) { /* sra, V9 srax */
3037 cpu_src1 = get_src1(insn, cpu_src1);
3038 if (IS_IMM) { /* immediate */
3039 rs2 = GET_FIELDs(insn, 20, 31);
3040 if (insn & (1 << 12)) {
3041 tcg_gen_sari_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
3042 } else {
3043 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3044 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3045 tcg_gen_sari_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
3047 } else { /* register */
3048 rs2 = GET_FIELD(insn, 27, 31);
3049 gen_movl_reg_TN(rs2, cpu_src2);
3050 if (insn & (1 << 12)) {
3051 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3052 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3053 } else {
3054 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3055 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3056 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3057 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3060 gen_movl_TN_reg(rd, cpu_dst);
3061 #endif
3062 } else if (xop < 0x36) {
3063 cpu_src1 = get_src1(insn, cpu_src1);
3064 cpu_src2 = get_src2(insn, cpu_src2);
3065 if (xop < 0x20) {
3066 switch (xop & ~0x10) {
3067 case 0x0:
3068 if (xop & 0x10)
3069 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3070 else
3071 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3072 break;
3073 case 0x1:
3074 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3075 if (xop & 0x10)
3076 gen_op_logic_cc(cpu_dst);
3077 break;
3078 case 0x2:
3079 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3080 if (xop & 0x10)
3081 gen_op_logic_cc(cpu_dst);
3082 break;
3083 case 0x3:
3084 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3085 if (xop & 0x10)
3086 gen_op_logic_cc(cpu_dst);
3087 break;
3088 case 0x4:
3089 if (xop & 0x10)
3090 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3091 else
3092 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3093 break;
3094 case 0x5:
3095 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3096 if (xop & 0x10)
3097 gen_op_logic_cc(cpu_dst);
3098 break;
3099 case 0x6:
3100 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3101 if (xop & 0x10)
3102 gen_op_logic_cc(cpu_dst);
3103 break;
3104 case 0x7:
3105 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3106 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3107 if (xop & 0x10)
3108 gen_op_logic_cc(cpu_dst);
3109 break;
3110 case 0x8:
3111 if (xop & 0x10)
3112 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3113 else {
3114 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3115 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3116 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3118 break;
3119 #ifdef TARGET_SPARC64
3120 case 0x9: /* V9 mulx */
3121 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3122 break;
3123 #endif
3124 case 0xa:
3125 CHECK_IU_FEATURE(dc, MUL);
3126 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3127 if (xop & 0x10)
3128 gen_op_logic_cc(cpu_dst);
3129 break;
3130 case 0xb:
3131 CHECK_IU_FEATURE(dc, MUL);
3132 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3133 if (xop & 0x10)
3134 gen_op_logic_cc(cpu_dst);
3135 break;
3136 case 0xc:
3137 if (xop & 0x10)
3138 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3139 else {
3140 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3141 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3142 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3144 break;
3145 #ifdef TARGET_SPARC64
3146 case 0xd: /* V9 udivx */
3147 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3148 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3149 gen_trap_ifdivzero_tl(cpu_cc_src2);
3150 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3151 break;
3152 #endif
3153 case 0xe:
3154 CHECK_IU_FEATURE(dc, DIV);
3155 gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3156 if (xop & 0x10)
3157 gen_op_div_cc(cpu_dst);
3158 break;
3159 case 0xf:
3160 CHECK_IU_FEATURE(dc, DIV);
3161 gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3162 if (xop & 0x10)
3163 gen_op_div_cc(cpu_dst);
3164 break;
3165 default:
3166 goto illegal_insn;
3168 gen_movl_TN_reg(rd, cpu_dst);
3169 } else {
3170 switch (xop) {
3171 case 0x20: /* taddcc */
3172 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3173 gen_movl_TN_reg(rd, cpu_dst);
3174 break;
3175 case 0x21: /* tsubcc */
3176 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3177 gen_movl_TN_reg(rd, cpu_dst);
3178 break;
3179 case 0x22: /* taddcctv */
3180 save_state(dc, cpu_cond);
3181 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3182 gen_movl_TN_reg(rd, cpu_dst);
3183 break;
3184 case 0x23: /* tsubcctv */
3185 save_state(dc, cpu_cond);
3186 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3187 gen_movl_TN_reg(rd, cpu_dst);
3188 break;
3189 case 0x24: /* mulscc */
3190 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3191 gen_movl_TN_reg(rd, cpu_dst);
3192 break;
3193 #ifndef TARGET_SPARC64
3194 case 0x25: /* sll */
3195 if (IS_IMM) { /* immediate */
3196 rs2 = GET_FIELDs(insn, 20, 31);
3197 tcg_gen_shli_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3198 } else { /* register */
3199 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3200 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3202 gen_movl_TN_reg(rd, cpu_dst);
3203 break;
3204 case 0x26: /* srl */
3205 if (IS_IMM) { /* immediate */
3206 rs2 = GET_FIELDs(insn, 20, 31);
3207 tcg_gen_shri_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3208 } else { /* register */
3209 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3210 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3212 gen_movl_TN_reg(rd, cpu_dst);
3213 break;
3214 case 0x27: /* sra */
3215 if (IS_IMM) { /* immediate */
3216 rs2 = GET_FIELDs(insn, 20, 31);
3217 tcg_gen_sari_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3218 } else { /* register */
3219 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3220 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3222 gen_movl_TN_reg(rd, cpu_dst);
3223 break;
3224 #endif
3225 case 0x30:
3227 switch(rd) {
3228 case 0: /* wry */
3229 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3230 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3231 break;
3232 #ifndef TARGET_SPARC64
3233 case 0x01 ... 0x0f: /* undefined in the
3234 SPARCv8 manual, nop
3235 on the microSPARC
3236 II */
3237 case 0x10 ... 0x1f: /* implementation-dependent
3238 in the SPARCv8
3239 manual, nop on the
3240 microSPARC II */
3241 break;
3242 #else
3243 case 0x2: /* V9 wrccr */
3244 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3245 gen_helper_wrccr(cpu_dst);
3246 break;
3247 case 0x3: /* V9 wrasi */
3248 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3249 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3250 break;
3251 case 0x6: /* V9 wrfprs */
3252 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3253 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3254 save_state(dc, cpu_cond);
3255 gen_op_next_insn();
3256 tcg_gen_exit_tb(0);
3257 dc->is_br = 1;
3258 break;
3259 case 0xf: /* V9 sir, nop if user */
3260 #if !defined(CONFIG_USER_ONLY)
3261 if (supervisor(dc))
3262 ; // XXX
3263 #endif
3264 break;
3265 case 0x13: /* Graphics Status */
3266 if (gen_trap_ifnofpu(dc, cpu_cond))
3267 goto jmp_insn;
3268 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3269 break;
3270 case 0x14: /* Softint set */
3271 if (!supervisor(dc))
3272 goto illegal_insn;
3273 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3274 gen_helper_set_softint(cpu_tmp64);
3275 break;
3276 case 0x15: /* Softint clear */
3277 if (!supervisor(dc))
3278 goto illegal_insn;
3279 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3280 gen_helper_clear_softint(cpu_tmp64);
3281 break;
3282 case 0x16: /* Softint write */
3283 if (!supervisor(dc))
3284 goto illegal_insn;
3285 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3286 gen_helper_write_softint(cpu_tmp64);
3287 break;
3288 case 0x17: /* Tick compare */
3289 #if !defined(CONFIG_USER_ONLY)
3290 if (!supervisor(dc))
3291 goto illegal_insn;
3292 #endif
3294 TCGv_ptr r_tickptr;
3296 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3297 cpu_src2);
3298 r_tickptr = tcg_temp_new_ptr();
3299 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3300 offsetof(CPUState, tick));
3301 gen_helper_tick_set_limit(r_tickptr,
3302 cpu_tick_cmpr);
3303 tcg_temp_free_ptr(r_tickptr);
3305 break;
3306 case 0x18: /* System tick */
3307 #if !defined(CONFIG_USER_ONLY)
3308 if (!supervisor(dc))
3309 goto illegal_insn;
3310 #endif
3312 TCGv_ptr r_tickptr;
3314 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3315 cpu_src2);
3316 r_tickptr = tcg_temp_new_ptr();
3317 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3318 offsetof(CPUState, stick));
3319 gen_helper_tick_set_count(r_tickptr,
3320 cpu_dst);
3321 tcg_temp_free_ptr(r_tickptr);
3323 break;
3324 case 0x19: /* System tick compare */
3325 #if !defined(CONFIG_USER_ONLY)
3326 if (!supervisor(dc))
3327 goto illegal_insn;
3328 #endif
3330 TCGv_ptr r_tickptr;
3332 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3333 cpu_src2);
3334 r_tickptr = tcg_temp_new_ptr();
3335 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3336 offsetof(CPUState, stick));
3337 gen_helper_tick_set_limit(r_tickptr,
3338 cpu_stick_cmpr);
3339 tcg_temp_free_ptr(r_tickptr);
3341 break;
3343 case 0x10: /* Performance Control */
3344 case 0x11: /* Performance Instrumentation
3345 Counter */
3346 case 0x12: /* Dispatch Control */
3347 #endif
3348 default:
3349 goto illegal_insn;
3352 break;
3353 #if !defined(CONFIG_USER_ONLY)
3354 case 0x31: /* wrpsr, V9 saved, restored */
3356 if (!supervisor(dc))
3357 goto priv_insn;
3358 #ifdef TARGET_SPARC64
3359 switch (rd) {
3360 case 0:
3361 gen_helper_saved();
3362 break;
3363 case 1:
3364 gen_helper_restored();
3365 break;
3366 case 2: /* UA2005 allclean */
3367 case 3: /* UA2005 otherw */
3368 case 4: /* UA2005 normalw */
3369 case 5: /* UA2005 invalw */
3370 // XXX
3371 default:
3372 goto illegal_insn;
3374 #else
3375 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3376 gen_helper_wrpsr(cpu_dst);
3377 save_state(dc, cpu_cond);
3378 gen_op_next_insn();
3379 tcg_gen_exit_tb(0);
3380 dc->is_br = 1;
3381 #endif
3383 break;
3384 case 0x32: /* wrwim, V9 wrpr */
3386 if (!supervisor(dc))
3387 goto priv_insn;
3388 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3389 #ifdef TARGET_SPARC64
3390 switch (rd) {
3391 case 0: // tpc
3393 TCGv_ptr r_tsptr;
3395 r_tsptr = tcg_temp_new_ptr();
3396 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3397 offsetof(CPUState, tsptr));
3398 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3399 offsetof(trap_state, tpc));
3400 tcg_temp_free_ptr(r_tsptr);
3402 break;
3403 case 1: // tnpc
3405 TCGv_ptr r_tsptr;
3407 r_tsptr = tcg_temp_new_ptr();
3408 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3409 offsetof(CPUState, tsptr));
3410 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3411 offsetof(trap_state, tnpc));
3412 tcg_temp_free_ptr(r_tsptr);
3414 break;
3415 case 2: // tstate
3417 TCGv_ptr r_tsptr;
3419 r_tsptr = tcg_temp_new_ptr();
3420 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3421 offsetof(CPUState, tsptr));
3422 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3423 offsetof(trap_state,
3424 tstate));
3425 tcg_temp_free_ptr(r_tsptr);
3427 break;
3428 case 3: // tt
3430 TCGv_ptr r_tsptr;
3432 r_tsptr = tcg_temp_new_ptr();
3433 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3434 offsetof(CPUState, tsptr));
3435 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3436 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3437 offsetof(trap_state, tt));
3438 tcg_temp_free_ptr(r_tsptr);
3440 break;
3441 case 4: // tick
3443 TCGv_ptr r_tickptr;
3445 r_tickptr = tcg_temp_new_ptr();
3446 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3447 offsetof(CPUState, tick));
3448 gen_helper_tick_set_count(r_tickptr,
3449 cpu_tmp0);
3450 tcg_temp_free_ptr(r_tickptr);
3452 break;
3453 case 5: // tba
3454 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3455 break;
3456 case 6: // pstate
3457 save_state(dc, cpu_cond);
3458 gen_helper_wrpstate(cpu_tmp0);
3459 gen_op_next_insn();
3460 tcg_gen_exit_tb(0);
3461 dc->is_br = 1;
3462 break;
3463 case 7: // tl
3464 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3465 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3466 offsetof(CPUSPARCState, tl));
3467 break;
3468 case 8: // pil
3469 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3470 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3471 offsetof(CPUSPARCState,
3472 psrpil));
3473 break;
3474 case 9: // cwp
3475 gen_helper_wrcwp(cpu_tmp0);
3476 break;
3477 case 10: // cansave
3478 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3479 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3480 offsetof(CPUSPARCState,
3481 cansave));
3482 break;
3483 case 11: // canrestore
3484 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3485 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3486 offsetof(CPUSPARCState,
3487 canrestore));
3488 break;
3489 case 12: // cleanwin
3490 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3491 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3492 offsetof(CPUSPARCState,
3493 cleanwin));
3494 break;
3495 case 13: // otherwin
3496 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3497 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3498 offsetof(CPUSPARCState,
3499 otherwin));
3500 break;
3501 case 14: // wstate
3502 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3503 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3504 offsetof(CPUSPARCState,
3505 wstate));
3506 break;
3507 case 16: // UA2005 gl
3508 CHECK_IU_FEATURE(dc, GL);
3509 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3510 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3511 offsetof(CPUSPARCState, gl));
3512 break;
3513 case 26: // UA2005 strand status
3514 CHECK_IU_FEATURE(dc, HYPV);
3515 if (!hypervisor(dc))
3516 goto priv_insn;
3517 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3518 break;
3519 default:
3520 goto illegal_insn;
3522 #else
3523 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3524 if (dc->def->nwindows != 32)
3525 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3526 (1 << dc->def->nwindows) - 1);
3527 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3528 #endif
3530 break;
3531 case 0x33: /* wrtbr, UA2005 wrhpr */
3533 #ifndef TARGET_SPARC64
3534 if (!supervisor(dc))
3535 goto priv_insn;
3536 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3537 #else
3538 CHECK_IU_FEATURE(dc, HYPV);
3539 if (!hypervisor(dc))
3540 goto priv_insn;
3541 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3542 switch (rd) {
3543 case 0: // hpstate
3544 // XXX gen_op_wrhpstate();
3545 save_state(dc, cpu_cond);
3546 gen_op_next_insn();
3547 tcg_gen_exit_tb(0);
3548 dc->is_br = 1;
3549 break;
3550 case 1: // htstate
3551 // XXX gen_op_wrhtstate();
3552 break;
3553 case 3: // hintp
3554 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3555 break;
3556 case 5: // htba
3557 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3558 break;
3559 case 31: // hstick_cmpr
3561 TCGv_ptr r_tickptr;
3563 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3564 r_tickptr = tcg_temp_new_ptr();
3565 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3566 offsetof(CPUState, hstick));
3567 gen_helper_tick_set_limit(r_tickptr,
3568 cpu_hstick_cmpr);
3569 tcg_temp_free_ptr(r_tickptr);
3571 break;
3572 case 6: // hver readonly
3573 default:
3574 goto illegal_insn;
3576 #endif
3578 break;
3579 #endif
3580 #ifdef TARGET_SPARC64
3581 case 0x2c: /* V9 movcc */
3583 int cc = GET_FIELD_SP(insn, 11, 12);
3584 int cond = GET_FIELD_SP(insn, 14, 17);
3585 TCGv r_cond;
3586 int l1;
3588 r_cond = tcg_temp_new();
3589 if (insn & (1 << 18)) {
3590 if (cc == 0)
3591 gen_cond(r_cond, 0, cond);
3592 else if (cc == 2)
3593 gen_cond(r_cond, 1, cond);
3594 else
3595 goto illegal_insn;
3596 } else {
3597 gen_fcond(r_cond, cc, cond);
3600 l1 = gen_new_label();
3602 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3603 if (IS_IMM) { /* immediate */
3604 TCGv r_const;
3606 rs2 = GET_FIELD_SPs(insn, 0, 10);
3607 r_const = tcg_const_tl((int)rs2);
3608 gen_movl_TN_reg(rd, r_const);
3609 tcg_temp_free(r_const);
3610 } else {
3611 rs2 = GET_FIELD_SP(insn, 0, 4);
3612 gen_movl_reg_TN(rs2, cpu_tmp0);
3613 gen_movl_TN_reg(rd, cpu_tmp0);
3615 gen_set_label(l1);
3616 tcg_temp_free(r_cond);
3617 break;
3619 case 0x2d: /* V9 sdivx */
3620 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3621 gen_movl_TN_reg(rd, cpu_dst);
3622 break;
3623 case 0x2e: /* V9 popc */
3625 cpu_src2 = get_src2(insn, cpu_src2);
3626 gen_helper_popc(cpu_dst, cpu_src2);
3627 gen_movl_TN_reg(rd, cpu_dst);
3629 case 0x2f: /* V9 movr */
3631 int cond = GET_FIELD_SP(insn, 10, 12);
3632 int l1;
3634 cpu_src1 = get_src1(insn, cpu_src1);
3636 l1 = gen_new_label();
3638 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3639 cpu_src1, 0, l1);
3640 if (IS_IMM) { /* immediate */
3641 TCGv r_const;
3643 rs2 = GET_FIELD_SPs(insn, 0, 9);
3644 r_const = tcg_const_tl((int)rs2);
3645 gen_movl_TN_reg(rd, r_const);
3646 tcg_temp_free(r_const);
3647 } else {
3648 rs2 = GET_FIELD_SP(insn, 0, 4);
3649 gen_movl_reg_TN(rs2, cpu_tmp0);
3650 gen_movl_TN_reg(rd, cpu_tmp0);
3652 gen_set_label(l1);
3653 break;
3655 #endif
3656 default:
3657 goto illegal_insn;
3660 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3661 #ifdef TARGET_SPARC64
3662 int opf = GET_FIELD_SP(insn, 5, 13);
3663 rs1 = GET_FIELD(insn, 13, 17);
3664 rs2 = GET_FIELD(insn, 27, 31);
3665 if (gen_trap_ifnofpu(dc, cpu_cond))
3666 goto jmp_insn;
3668 switch (opf) {
3669 case 0x000: /* VIS I edge8cc */
3670 case 0x001: /* VIS II edge8n */
3671 case 0x002: /* VIS I edge8lcc */
3672 case 0x003: /* VIS II edge8ln */
3673 case 0x004: /* VIS I edge16cc */
3674 case 0x005: /* VIS II edge16n */
3675 case 0x006: /* VIS I edge16lcc */
3676 case 0x007: /* VIS II edge16ln */
3677 case 0x008: /* VIS I edge32cc */
3678 case 0x009: /* VIS II edge32n */
3679 case 0x00a: /* VIS I edge32lcc */
3680 case 0x00b: /* VIS II edge32ln */
3681 // XXX
3682 goto illegal_insn;
3683 case 0x010: /* VIS I array8 */
3684 CHECK_FPU_FEATURE(dc, VIS1);
3685 cpu_src1 = get_src1(insn, cpu_src1);
3686 gen_movl_reg_TN(rs2, cpu_src2);
3687 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3688 gen_movl_TN_reg(rd, cpu_dst);
3689 break;
3690 case 0x012: /* VIS I array16 */
3691 CHECK_FPU_FEATURE(dc, VIS1);
3692 cpu_src1 = get_src1(insn, cpu_src1);
3693 gen_movl_reg_TN(rs2, cpu_src2);
3694 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3695 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3696 gen_movl_TN_reg(rd, cpu_dst);
3697 break;
3698 case 0x014: /* VIS I array32 */
3699 CHECK_FPU_FEATURE(dc, VIS1);
3700 cpu_src1 = get_src1(insn, cpu_src1);
3701 gen_movl_reg_TN(rs2, cpu_src2);
3702 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3703 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3704 gen_movl_TN_reg(rd, cpu_dst);
3705 break;
3706 case 0x018: /* VIS I alignaddr */
3707 CHECK_FPU_FEATURE(dc, VIS1);
3708 cpu_src1 = get_src1(insn, cpu_src1);
3709 gen_movl_reg_TN(rs2, cpu_src2);
3710 gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3711 gen_movl_TN_reg(rd, cpu_dst);
3712 break;
3713 case 0x019: /* VIS II bmask */
3714 case 0x01a: /* VIS I alignaddrl */
3715 // XXX
3716 goto illegal_insn;
3717 case 0x020: /* VIS I fcmple16 */
3718 CHECK_FPU_FEATURE(dc, VIS1);
3719 gen_op_load_fpr_DT0(DFPREG(rs1));
3720 gen_op_load_fpr_DT1(DFPREG(rs2));
3721 gen_helper_fcmple16();
3722 gen_op_store_DT0_fpr(DFPREG(rd));
3723 break;
3724 case 0x022: /* VIS I fcmpne16 */
3725 CHECK_FPU_FEATURE(dc, VIS1);
3726 gen_op_load_fpr_DT0(DFPREG(rs1));
3727 gen_op_load_fpr_DT1(DFPREG(rs2));
3728 gen_helper_fcmpne16();
3729 gen_op_store_DT0_fpr(DFPREG(rd));
3730 break;
3731 case 0x024: /* VIS I fcmple32 */
3732 CHECK_FPU_FEATURE(dc, VIS1);
3733 gen_op_load_fpr_DT0(DFPREG(rs1));
3734 gen_op_load_fpr_DT1(DFPREG(rs2));
3735 gen_helper_fcmple32();
3736 gen_op_store_DT0_fpr(DFPREG(rd));
3737 break;
3738 case 0x026: /* VIS I fcmpne32 */
3739 CHECK_FPU_FEATURE(dc, VIS1);
3740 gen_op_load_fpr_DT0(DFPREG(rs1));
3741 gen_op_load_fpr_DT1(DFPREG(rs2));
3742 gen_helper_fcmpne32();
3743 gen_op_store_DT0_fpr(DFPREG(rd));
3744 break;
3745 case 0x028: /* VIS I fcmpgt16 */
3746 CHECK_FPU_FEATURE(dc, VIS1);
3747 gen_op_load_fpr_DT0(DFPREG(rs1));
3748 gen_op_load_fpr_DT1(DFPREG(rs2));
3749 gen_helper_fcmpgt16();
3750 gen_op_store_DT0_fpr(DFPREG(rd));
3751 break;
3752 case 0x02a: /* VIS I fcmpeq16 */
3753 CHECK_FPU_FEATURE(dc, VIS1);
3754 gen_op_load_fpr_DT0(DFPREG(rs1));
3755 gen_op_load_fpr_DT1(DFPREG(rs2));
3756 gen_helper_fcmpeq16();
3757 gen_op_store_DT0_fpr(DFPREG(rd));
3758 break;
3759 case 0x02c: /* VIS I fcmpgt32 */
3760 CHECK_FPU_FEATURE(dc, VIS1);
3761 gen_op_load_fpr_DT0(DFPREG(rs1));
3762 gen_op_load_fpr_DT1(DFPREG(rs2));
3763 gen_helper_fcmpgt32();
3764 gen_op_store_DT0_fpr(DFPREG(rd));
3765 break;
3766 case 0x02e: /* VIS I fcmpeq32 */
3767 CHECK_FPU_FEATURE(dc, VIS1);
3768 gen_op_load_fpr_DT0(DFPREG(rs1));
3769 gen_op_load_fpr_DT1(DFPREG(rs2));
3770 gen_helper_fcmpeq32();
3771 gen_op_store_DT0_fpr(DFPREG(rd));
3772 break;
3773 case 0x031: /* VIS I fmul8x16 */
3774 CHECK_FPU_FEATURE(dc, VIS1);
3775 gen_op_load_fpr_DT0(DFPREG(rs1));
3776 gen_op_load_fpr_DT1(DFPREG(rs2));
3777 gen_helper_fmul8x16();
3778 gen_op_store_DT0_fpr(DFPREG(rd));
3779 break;
3780 case 0x033: /* VIS I fmul8x16au */
3781 CHECK_FPU_FEATURE(dc, VIS1);
3782 gen_op_load_fpr_DT0(DFPREG(rs1));
3783 gen_op_load_fpr_DT1(DFPREG(rs2));
3784 gen_helper_fmul8x16au();
3785 gen_op_store_DT0_fpr(DFPREG(rd));
3786 break;
3787 case 0x035: /* VIS I fmul8x16al */
3788 CHECK_FPU_FEATURE(dc, VIS1);
3789 gen_op_load_fpr_DT0(DFPREG(rs1));
3790 gen_op_load_fpr_DT1(DFPREG(rs2));
3791 gen_helper_fmul8x16al();
3792 gen_op_store_DT0_fpr(DFPREG(rd));
3793 break;
3794 case 0x036: /* VIS I fmul8sux16 */
3795 CHECK_FPU_FEATURE(dc, VIS1);
3796 gen_op_load_fpr_DT0(DFPREG(rs1));
3797 gen_op_load_fpr_DT1(DFPREG(rs2));
3798 gen_helper_fmul8sux16();
3799 gen_op_store_DT0_fpr(DFPREG(rd));
3800 break;
3801 case 0x037: /* VIS I fmul8ulx16 */
3802 CHECK_FPU_FEATURE(dc, VIS1);
3803 gen_op_load_fpr_DT0(DFPREG(rs1));
3804 gen_op_load_fpr_DT1(DFPREG(rs2));
3805 gen_helper_fmul8ulx16();
3806 gen_op_store_DT0_fpr(DFPREG(rd));
3807 break;
3808 case 0x038: /* VIS I fmuld8sux16 */
3809 CHECK_FPU_FEATURE(dc, VIS1);
3810 gen_op_load_fpr_DT0(DFPREG(rs1));
3811 gen_op_load_fpr_DT1(DFPREG(rs2));
3812 gen_helper_fmuld8sux16();
3813 gen_op_store_DT0_fpr(DFPREG(rd));
3814 break;
3815 case 0x039: /* VIS I fmuld8ulx16 */
3816 CHECK_FPU_FEATURE(dc, VIS1);
3817 gen_op_load_fpr_DT0(DFPREG(rs1));
3818 gen_op_load_fpr_DT1(DFPREG(rs2));
3819 gen_helper_fmuld8ulx16();
3820 gen_op_store_DT0_fpr(DFPREG(rd));
3821 break;
3822 case 0x03a: /* VIS I fpack32 */
3823 case 0x03b: /* VIS I fpack16 */
3824 case 0x03d: /* VIS I fpackfix */
3825 case 0x03e: /* VIS I pdist */
3826 // XXX
3827 goto illegal_insn;
3828 case 0x048: /* VIS I faligndata */
3829 CHECK_FPU_FEATURE(dc, VIS1);
3830 gen_op_load_fpr_DT0(DFPREG(rs1));
3831 gen_op_load_fpr_DT1(DFPREG(rs2));
3832 gen_helper_faligndata();
3833 gen_op_store_DT0_fpr(DFPREG(rd));
3834 break;
3835 case 0x04b: /* VIS I fpmerge */
3836 CHECK_FPU_FEATURE(dc, VIS1);
3837 gen_op_load_fpr_DT0(DFPREG(rs1));
3838 gen_op_load_fpr_DT1(DFPREG(rs2));
3839 gen_helper_fpmerge();
3840 gen_op_store_DT0_fpr(DFPREG(rd));
3841 break;
3842 case 0x04c: /* VIS II bshuffle */
3843 // XXX
3844 goto illegal_insn;
3845 case 0x04d: /* VIS I fexpand */
3846 CHECK_FPU_FEATURE(dc, VIS1);
3847 gen_op_load_fpr_DT0(DFPREG(rs1));
3848 gen_op_load_fpr_DT1(DFPREG(rs2));
3849 gen_helper_fexpand();
3850 gen_op_store_DT0_fpr(DFPREG(rd));
3851 break;
3852 case 0x050: /* VIS I fpadd16 */
3853 CHECK_FPU_FEATURE(dc, VIS1);
3854 gen_op_load_fpr_DT0(DFPREG(rs1));
3855 gen_op_load_fpr_DT1(DFPREG(rs2));
3856 gen_helper_fpadd16();
3857 gen_op_store_DT0_fpr(DFPREG(rd));
3858 break;
3859 case 0x051: /* VIS I fpadd16s */
3860 CHECK_FPU_FEATURE(dc, VIS1);
3861 gen_helper_fpadd16s(cpu_fpr[rd],
3862 cpu_fpr[rs1], cpu_fpr[rs2]);
3863 break;
3864 case 0x052: /* VIS I fpadd32 */
3865 CHECK_FPU_FEATURE(dc, VIS1);
3866 gen_op_load_fpr_DT0(DFPREG(rs1));
3867 gen_op_load_fpr_DT1(DFPREG(rs2));
3868 gen_helper_fpadd32();
3869 gen_op_store_DT0_fpr(DFPREG(rd));
3870 break;
3871 case 0x053: /* VIS I fpadd32s */
3872 CHECK_FPU_FEATURE(dc, VIS1);
3873 gen_helper_fpadd32s(cpu_fpr[rd],
3874 cpu_fpr[rs1], cpu_fpr[rs2]);
3875 break;
3876 case 0x054: /* VIS I fpsub16 */
3877 CHECK_FPU_FEATURE(dc, VIS1);
3878 gen_op_load_fpr_DT0(DFPREG(rs1));
3879 gen_op_load_fpr_DT1(DFPREG(rs2));
3880 gen_helper_fpsub16();
3881 gen_op_store_DT0_fpr(DFPREG(rd));
3882 break;
3883 case 0x055: /* VIS I fpsub16s */
3884 CHECK_FPU_FEATURE(dc, VIS1);
3885 gen_helper_fpsub16s(cpu_fpr[rd],
3886 cpu_fpr[rs1], cpu_fpr[rs2]);
3887 break;
3888 case 0x056: /* VIS I fpsub32 */
3889 CHECK_FPU_FEATURE(dc, VIS1);
3890 gen_op_load_fpr_DT0(DFPREG(rs1));
3891 gen_op_load_fpr_DT1(DFPREG(rs2));
3892 gen_helper_fpsub32();
3893 gen_op_store_DT0_fpr(DFPREG(rd));
3894 break;
3895 case 0x057: /* VIS I fpsub32s */
3896 CHECK_FPU_FEATURE(dc, VIS1);
3897 gen_helper_fpsub32s(cpu_fpr[rd],
3898 cpu_fpr[rs1], cpu_fpr[rs2]);
3899 break;
3900 case 0x060: /* VIS I fzero */
3901 CHECK_FPU_FEATURE(dc, VIS1);
3902 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3903 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3904 break;
3905 case 0x061: /* VIS I fzeros */
3906 CHECK_FPU_FEATURE(dc, VIS1);
3907 tcg_gen_movi_i32(cpu_fpr[rd], 0);
3908 break;
3909 case 0x062: /* VIS I fnor */
3910 CHECK_FPU_FEATURE(dc, VIS1);
3911 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3912 cpu_fpr[DFPREG(rs2)]);
3913 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3914 cpu_fpr[DFPREG(rs2) + 1]);
3915 break;
3916 case 0x063: /* VIS I fnors */
3917 CHECK_FPU_FEATURE(dc, VIS1);
3918 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3919 break;
3920 case 0x064: /* VIS I fandnot2 */
3921 CHECK_FPU_FEATURE(dc, VIS1);
3922 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3923 cpu_fpr[DFPREG(rs2)]);
3924 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3925 cpu_fpr[DFPREG(rs1) + 1],
3926 cpu_fpr[DFPREG(rs2) + 1]);
3927 break;
3928 case 0x065: /* VIS I fandnot2s */
3929 CHECK_FPU_FEATURE(dc, VIS1);
3930 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3931 break;
3932 case 0x066: /* VIS I fnot2 */
3933 CHECK_FPU_FEATURE(dc, VIS1);
3934 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
3935 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3936 cpu_fpr[DFPREG(rs2) + 1]);
3937 break;
3938 case 0x067: /* VIS I fnot2s */
3939 CHECK_FPU_FEATURE(dc, VIS1);
3940 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
3941 break;
3942 case 0x068: /* VIS I fandnot1 */
3943 CHECK_FPU_FEATURE(dc, VIS1);
3944 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3945 cpu_fpr[DFPREG(rs1)]);
3946 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3947 cpu_fpr[DFPREG(rs2) + 1],
3948 cpu_fpr[DFPREG(rs1) + 1]);
3949 break;
3950 case 0x069: /* VIS I fandnot1s */
3951 CHECK_FPU_FEATURE(dc, VIS1);
3952 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
3953 break;
3954 case 0x06a: /* VIS I fnot1 */
3955 CHECK_FPU_FEATURE(dc, VIS1);
3956 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3957 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
3958 cpu_fpr[DFPREG(rs1) + 1]);
3959 break;
3960 case 0x06b: /* VIS I fnot1s */
3961 CHECK_FPU_FEATURE(dc, VIS1);
3962 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3963 break;
3964 case 0x06c: /* VIS I fxor */
3965 CHECK_FPU_FEATURE(dc, VIS1);
3966 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3967 cpu_fpr[DFPREG(rs2)]);
3968 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
3969 cpu_fpr[DFPREG(rs1) + 1],
3970 cpu_fpr[DFPREG(rs2) + 1]);
3971 break;
3972 case 0x06d: /* VIS I fxors */
3973 CHECK_FPU_FEATURE(dc, VIS1);
3974 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3975 break;
3976 case 0x06e: /* VIS I fnand */
3977 CHECK_FPU_FEATURE(dc, VIS1);
3978 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3979 cpu_fpr[DFPREG(rs2)]);
3980 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3981 cpu_fpr[DFPREG(rs2) + 1]);
3982 break;
3983 case 0x06f: /* VIS I fnands */
3984 CHECK_FPU_FEATURE(dc, VIS1);
3985 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3986 break;
3987 case 0x070: /* VIS I fand */
3988 CHECK_FPU_FEATURE(dc, VIS1);
3989 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3990 cpu_fpr[DFPREG(rs2)]);
3991 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
3992 cpu_fpr[DFPREG(rs1) + 1],
3993 cpu_fpr[DFPREG(rs2) + 1]);
3994 break;
3995 case 0x071: /* VIS I fands */
3996 CHECK_FPU_FEATURE(dc, VIS1);
3997 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3998 break;
3999 case 0x072: /* VIS I fxnor */
4000 CHECK_FPU_FEATURE(dc, VIS1);
4001 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4002 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4003 cpu_fpr[DFPREG(rs1)]);
4004 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4005 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4006 cpu_fpr[DFPREG(rs1) + 1]);
4007 break;
4008 case 0x073: /* VIS I fxnors */
4009 CHECK_FPU_FEATURE(dc, VIS1);
4010 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4011 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4012 break;
4013 case 0x074: /* VIS I fsrc1 */
4014 CHECK_FPU_FEATURE(dc, VIS1);
4015 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4016 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4017 cpu_fpr[DFPREG(rs1) + 1]);
4018 break;
4019 case 0x075: /* VIS I fsrc1s */
4020 CHECK_FPU_FEATURE(dc, VIS1);
4021 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4022 break;
4023 case 0x076: /* VIS I fornot2 */
4024 CHECK_FPU_FEATURE(dc, VIS1);
4025 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4026 cpu_fpr[DFPREG(rs2)]);
4027 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4028 cpu_fpr[DFPREG(rs1) + 1],
4029 cpu_fpr[DFPREG(rs2) + 1]);
4030 break;
4031 case 0x077: /* VIS I fornot2s */
4032 CHECK_FPU_FEATURE(dc, VIS1);
4033 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4034 break;
4035 case 0x078: /* VIS I fsrc2 */
4036 CHECK_FPU_FEATURE(dc, VIS1);
4037 gen_op_load_fpr_DT0(DFPREG(rs2));
4038 gen_op_store_DT0_fpr(DFPREG(rd));
4039 break;
4040 case 0x079: /* VIS I fsrc2s */
4041 CHECK_FPU_FEATURE(dc, VIS1);
4042 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4043 break;
4044 case 0x07a: /* VIS I fornot1 */
4045 CHECK_FPU_FEATURE(dc, VIS1);
4046 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4047 cpu_fpr[DFPREG(rs1)]);
4048 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4049 cpu_fpr[DFPREG(rs2) + 1],
4050 cpu_fpr[DFPREG(rs1) + 1]);
4051 break;
4052 case 0x07b: /* VIS I fornot1s */
4053 CHECK_FPU_FEATURE(dc, VIS1);
4054 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4055 break;
4056 case 0x07c: /* VIS I for */
4057 CHECK_FPU_FEATURE(dc, VIS1);
4058 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4059 cpu_fpr[DFPREG(rs2)]);
4060 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4061 cpu_fpr[DFPREG(rs1) + 1],
4062 cpu_fpr[DFPREG(rs2) + 1]);
4063 break;
4064 case 0x07d: /* VIS I fors */
4065 CHECK_FPU_FEATURE(dc, VIS1);
4066 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4067 break;
4068 case 0x07e: /* VIS I fone */
4069 CHECK_FPU_FEATURE(dc, VIS1);
4070 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4071 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4072 break;
4073 case 0x07f: /* VIS I fones */
4074 CHECK_FPU_FEATURE(dc, VIS1);
4075 tcg_gen_movi_i32(cpu_fpr[rd], -1);
4076 break;
4077 case 0x080: /* VIS I shutdown */
4078 case 0x081: /* VIS II siam */
4079 // XXX
4080 goto illegal_insn;
4081 default:
4082 goto illegal_insn;
4084 #else
4085 goto ncp_insn;
4086 #endif
4087 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4088 #ifdef TARGET_SPARC64
4089 goto illegal_insn;
4090 #else
4091 goto ncp_insn;
4092 #endif
4093 #ifdef TARGET_SPARC64
4094 } else if (xop == 0x39) { /* V9 return */
4095 TCGv_i32 r_const;
4097 save_state(dc, cpu_cond);
4098 cpu_src1 = get_src1(insn, cpu_src1);
4099 if (IS_IMM) { /* immediate */
4100 rs2 = GET_FIELDs(insn, 19, 31);
4101 tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2);
4102 } else { /* register */
4103 rs2 = GET_FIELD(insn, 27, 31);
4104 if (rs2) {
4105 gen_movl_reg_TN(rs2, cpu_src2);
4106 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4107 } else
4108 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4110 gen_helper_restore();
4111 gen_mov_pc_npc(dc, cpu_cond);
4112 r_const = tcg_const_i32(3);
4113 gen_helper_check_align(cpu_dst, r_const);
4114 tcg_temp_free_i32(r_const);
4115 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4116 dc->npc = DYNAMIC_PC;
4117 goto jmp_insn;
4118 #endif
4119 } else {
4120 cpu_src1 = get_src1(insn, cpu_src1);
4121 if (IS_IMM) { /* immediate */
4122 rs2 = GET_FIELDs(insn, 19, 31);
4123 tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2);
4124 } else { /* register */
4125 rs2 = GET_FIELD(insn, 27, 31);
4126 if (rs2) {
4127 gen_movl_reg_TN(rs2, cpu_src2);
4128 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4129 } else
4130 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4132 switch (xop) {
4133 case 0x38: /* jmpl */
4135 TCGv r_pc;
4136 TCGv_i32 r_const;
4138 r_pc = tcg_const_tl(dc->pc);
4139 gen_movl_TN_reg(rd, r_pc);
4140 tcg_temp_free(r_pc);
4141 gen_mov_pc_npc(dc, cpu_cond);
4142 r_const = tcg_const_i32(3);
4143 gen_helper_check_align(cpu_dst, r_const);
4144 tcg_temp_free_i32(r_const);
4145 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4146 dc->npc = DYNAMIC_PC;
4148 goto jmp_insn;
4149 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4150 case 0x39: /* rett, V9 return */
4152 TCGv_i32 r_const;
4154 if (!supervisor(dc))
4155 goto priv_insn;
4156 gen_mov_pc_npc(dc, cpu_cond);
4157 r_const = tcg_const_i32(3);
4158 gen_helper_check_align(cpu_dst, r_const);
4159 tcg_temp_free_i32(r_const);
4160 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4161 dc->npc = DYNAMIC_PC;
4162 gen_helper_rett();
4164 goto jmp_insn;
4165 #endif
4166 case 0x3b: /* flush */
4167 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4168 goto unimp_flush;
4169 gen_helper_flush(cpu_dst);
4170 break;
4171 case 0x3c: /* save */
4172 save_state(dc, cpu_cond);
4173 gen_helper_save();
4174 gen_movl_TN_reg(rd, cpu_dst);
4175 break;
4176 case 0x3d: /* restore */
4177 save_state(dc, cpu_cond);
4178 gen_helper_restore();
4179 gen_movl_TN_reg(rd, cpu_dst);
4180 break;
4181 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4182 case 0x3e: /* V9 done/retry */
4184 switch (rd) {
4185 case 0:
4186 if (!supervisor(dc))
4187 goto priv_insn;
4188 dc->npc = DYNAMIC_PC;
4189 dc->pc = DYNAMIC_PC;
4190 gen_helper_done();
4191 goto jmp_insn;
4192 case 1:
4193 if (!supervisor(dc))
4194 goto priv_insn;
4195 dc->npc = DYNAMIC_PC;
4196 dc->pc = DYNAMIC_PC;
4197 gen_helper_retry();
4198 goto jmp_insn;
4199 default:
4200 goto illegal_insn;
4203 break;
4204 #endif
4205 default:
4206 goto illegal_insn;
4209 break;
4211 break;
4212 case 3: /* load/store instructions */
4214 unsigned int xop = GET_FIELD(insn, 7, 12);
4216 cpu_src1 = get_src1(insn, cpu_src1);
4217 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4218 rs2 = GET_FIELD(insn, 27, 31);
4219 gen_movl_reg_TN(rs2, cpu_src2);
4220 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4221 } else if (IS_IMM) { /* immediate */
4222 rs2 = GET_FIELDs(insn, 19, 31);
4223 tcg_gen_addi_tl(cpu_addr, cpu_src1, (int)rs2);
4224 } else { /* register */
4225 rs2 = GET_FIELD(insn, 27, 31);
4226 if (rs2 != 0) {
4227 gen_movl_reg_TN(rs2, cpu_src2);
4228 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4229 } else
4230 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4232 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4233 (xop > 0x17 && xop <= 0x1d ) ||
4234 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4235 switch (xop) {
4236 case 0x0: /* load unsigned word */
4237 gen_address_mask(dc, cpu_addr);
4238 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4239 break;
4240 case 0x1: /* load unsigned byte */
4241 gen_address_mask(dc, cpu_addr);
4242 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4243 break;
4244 case 0x2: /* load unsigned halfword */
4245 gen_address_mask(dc, cpu_addr);
4246 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4247 break;
4248 case 0x3: /* load double word */
4249 if (rd & 1)
4250 goto illegal_insn;
4251 else {
4252 TCGv_i32 r_const;
4254 save_state(dc, cpu_cond);
4255 r_const = tcg_const_i32(7);
4256 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4257 tcg_temp_free_i32(r_const);
4258 gen_address_mask(dc, cpu_addr);
4259 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4260 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4261 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4262 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4263 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4264 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4265 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4267 break;
4268 case 0x9: /* load signed byte */
4269 gen_address_mask(dc, cpu_addr);
4270 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4271 break;
4272 case 0xa: /* load signed halfword */
4273 gen_address_mask(dc, cpu_addr);
4274 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4275 break;
4276 case 0xd: /* ldstub -- XXX: should be atomically */
4278 TCGv r_const;
4280 gen_address_mask(dc, cpu_addr);
4281 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4282 r_const = tcg_const_tl(0xff);
4283 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4284 tcg_temp_free(r_const);
4286 break;
4287 case 0x0f: /* swap register with memory. Also
4288 atomically */
4289 CHECK_IU_FEATURE(dc, SWAP);
4290 gen_movl_reg_TN(rd, cpu_val);
4291 gen_address_mask(dc, cpu_addr);
4292 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4293 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4294 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4295 break;
4296 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4297 case 0x10: /* load word alternate */
4298 #ifndef TARGET_SPARC64
4299 if (IS_IMM)
4300 goto illegal_insn;
4301 if (!supervisor(dc))
4302 goto priv_insn;
4303 #endif
4304 save_state(dc, cpu_cond);
4305 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4306 break;
4307 case 0x11: /* load unsigned byte alternate */
4308 #ifndef TARGET_SPARC64
4309 if (IS_IMM)
4310 goto illegal_insn;
4311 if (!supervisor(dc))
4312 goto priv_insn;
4313 #endif
4314 save_state(dc, cpu_cond);
4315 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4316 break;
4317 case 0x12: /* load unsigned halfword alternate */
4318 #ifndef TARGET_SPARC64
4319 if (IS_IMM)
4320 goto illegal_insn;
4321 if (!supervisor(dc))
4322 goto priv_insn;
4323 #endif
4324 save_state(dc, cpu_cond);
4325 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4326 break;
4327 case 0x13: /* load double word alternate */
4328 #ifndef TARGET_SPARC64
4329 if (IS_IMM)
4330 goto illegal_insn;
4331 if (!supervisor(dc))
4332 goto priv_insn;
4333 #endif
4334 if (rd & 1)
4335 goto illegal_insn;
4336 save_state(dc, cpu_cond);
4337 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4338 goto skip_move;
4339 case 0x19: /* load signed byte alternate */
4340 #ifndef TARGET_SPARC64
4341 if (IS_IMM)
4342 goto illegal_insn;
4343 if (!supervisor(dc))
4344 goto priv_insn;
4345 #endif
4346 save_state(dc, cpu_cond);
4347 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4348 break;
4349 case 0x1a: /* load signed halfword alternate */
4350 #ifndef TARGET_SPARC64
4351 if (IS_IMM)
4352 goto illegal_insn;
4353 if (!supervisor(dc))
4354 goto priv_insn;
4355 #endif
4356 save_state(dc, cpu_cond);
4357 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4358 break;
4359 case 0x1d: /* ldstuba -- XXX: should be atomically */
4360 #ifndef TARGET_SPARC64
4361 if (IS_IMM)
4362 goto illegal_insn;
4363 if (!supervisor(dc))
4364 goto priv_insn;
4365 #endif
4366 save_state(dc, cpu_cond);
4367 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4368 break;
4369 case 0x1f: /* swap reg with alt. memory. Also
4370 atomically */
4371 CHECK_IU_FEATURE(dc, SWAP);
4372 #ifndef TARGET_SPARC64
4373 if (IS_IMM)
4374 goto illegal_insn;
4375 if (!supervisor(dc))
4376 goto priv_insn;
4377 #endif
4378 save_state(dc, cpu_cond);
4379 gen_movl_reg_TN(rd, cpu_val);
4380 gen_swap_asi(cpu_val, cpu_addr, insn);
4381 break;
4383 #ifndef TARGET_SPARC64
4384 case 0x30: /* ldc */
4385 case 0x31: /* ldcsr */
4386 case 0x33: /* lddc */
4387 goto ncp_insn;
4388 #endif
4389 #endif
4390 #ifdef TARGET_SPARC64
4391 case 0x08: /* V9 ldsw */
4392 gen_address_mask(dc, cpu_addr);
4393 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4394 break;
4395 case 0x0b: /* V9 ldx */
4396 gen_address_mask(dc, cpu_addr);
4397 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4398 break;
4399 case 0x18: /* V9 ldswa */
4400 save_state(dc, cpu_cond);
4401 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4402 break;
4403 case 0x1b: /* V9 ldxa */
4404 save_state(dc, cpu_cond);
4405 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4406 break;
4407 case 0x2d: /* V9 prefetch, no effect */
4408 goto skip_move;
4409 case 0x30: /* V9 ldfa */
4410 save_state(dc, cpu_cond);
4411 gen_ldf_asi(cpu_addr, insn, 4, rd);
4412 goto skip_move;
4413 case 0x33: /* V9 lddfa */
4414 save_state(dc, cpu_cond);
4415 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4416 goto skip_move;
4417 case 0x3d: /* V9 prefetcha, no effect */
4418 goto skip_move;
4419 case 0x32: /* V9 ldqfa */
4420 CHECK_FPU_FEATURE(dc, FLOAT128);
4421 save_state(dc, cpu_cond);
4422 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4423 goto skip_move;
4424 #endif
4425 default:
4426 goto illegal_insn;
4428 gen_movl_TN_reg(rd, cpu_val);
4429 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4430 skip_move: ;
4431 #endif
4432 } else if (xop >= 0x20 && xop < 0x24) {
4433 if (gen_trap_ifnofpu(dc, cpu_cond))
4434 goto jmp_insn;
4435 save_state(dc, cpu_cond);
4436 switch (xop) {
4437 case 0x20: /* load fpreg */
4438 gen_address_mask(dc, cpu_addr);
4439 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4440 tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4441 break;
4442 case 0x21: /* ldfsr, V9 ldxfsr */
4443 #ifdef TARGET_SPARC64
4444 gen_address_mask(dc, cpu_addr);
4445 if (rd == 1) {
4446 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4447 gen_helper_ldxfsr(cpu_tmp64);
4448 } else
4449 #else
4451 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4452 gen_helper_ldfsr(cpu_tmp32);
4454 #endif
4455 break;
4456 case 0x22: /* load quad fpreg */
4458 TCGv_i32 r_const;
4460 CHECK_FPU_FEATURE(dc, FLOAT128);
4461 r_const = tcg_const_i32(dc->mem_idx);
4462 gen_helper_ldqf(cpu_addr, r_const);
4463 tcg_temp_free_i32(r_const);
4464 gen_op_store_QT0_fpr(QFPREG(rd));
4466 break;
4467 case 0x23: /* load double fpreg */
4469 TCGv_i32 r_const;
4471 r_const = tcg_const_i32(dc->mem_idx);
4472 gen_helper_lddf(cpu_addr, r_const);
4473 tcg_temp_free_i32(r_const);
4474 gen_op_store_DT0_fpr(DFPREG(rd));
4476 break;
4477 default:
4478 goto illegal_insn;
4480 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) || \
4481 xop == 0xe || xop == 0x1e) {
4482 gen_movl_reg_TN(rd, cpu_val);
4483 switch (xop) {
4484 case 0x4: /* store word */
4485 gen_address_mask(dc, cpu_addr);
4486 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4487 break;
4488 case 0x5: /* store byte */
4489 gen_address_mask(dc, cpu_addr);
4490 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4491 break;
4492 case 0x6: /* store halfword */
4493 gen_address_mask(dc, cpu_addr);
4494 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4495 break;
4496 case 0x7: /* store double word */
4497 if (rd & 1)
4498 goto illegal_insn;
4499 else {
4500 TCGv_i32 r_const;
4502 save_state(dc, cpu_cond);
4503 gen_address_mask(dc, cpu_addr);
4504 r_const = tcg_const_i32(7);
4505 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4506 tcg_temp_free_i32(r_const);
4507 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4508 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4509 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4511 break;
4512 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4513 case 0x14: /* store word alternate */
4514 #ifndef TARGET_SPARC64
4515 if (IS_IMM)
4516 goto illegal_insn;
4517 if (!supervisor(dc))
4518 goto priv_insn;
4519 #endif
4520 save_state(dc, cpu_cond);
4521 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4522 break;
4523 case 0x15: /* store byte alternate */
4524 #ifndef TARGET_SPARC64
4525 if (IS_IMM)
4526 goto illegal_insn;
4527 if (!supervisor(dc))
4528 goto priv_insn;
4529 #endif
4530 save_state(dc, cpu_cond);
4531 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4532 break;
4533 case 0x16: /* store halfword alternate */
4534 #ifndef TARGET_SPARC64
4535 if (IS_IMM)
4536 goto illegal_insn;
4537 if (!supervisor(dc))
4538 goto priv_insn;
4539 #endif
4540 save_state(dc, cpu_cond);
4541 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4542 break;
4543 case 0x17: /* store double word alternate */
4544 #ifndef TARGET_SPARC64
4545 if (IS_IMM)
4546 goto illegal_insn;
4547 if (!supervisor(dc))
4548 goto priv_insn;
4549 #endif
4550 if (rd & 1)
4551 goto illegal_insn;
4552 else {
4553 save_state(dc, cpu_cond);
4554 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4556 break;
4557 #endif
4558 #ifdef TARGET_SPARC64
4559 case 0x0e: /* V9 stx */
4560 gen_address_mask(dc, cpu_addr);
4561 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4562 break;
4563 case 0x1e: /* V9 stxa */
4564 save_state(dc, cpu_cond);
4565 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4566 break;
4567 #endif
4568 default:
4569 goto illegal_insn;
4571 } else if (xop > 0x23 && xop < 0x28) {
4572 if (gen_trap_ifnofpu(dc, cpu_cond))
4573 goto jmp_insn;
4574 save_state(dc, cpu_cond);
4575 switch (xop) {
4576 case 0x24: /* store fpreg */
4577 gen_address_mask(dc, cpu_addr);
4578 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4579 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4580 break;
4581 case 0x25: /* stfsr, V9 stxfsr */
4582 #ifdef TARGET_SPARC64
4583 gen_address_mask(dc, cpu_addr);
4584 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4585 if (rd == 1)
4586 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4587 else
4588 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4589 #else
4590 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4591 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4592 #endif
4593 break;
4594 case 0x26:
4595 #ifdef TARGET_SPARC64
4596 /* V9 stqf, store quad fpreg */
4598 TCGv_i32 r_const;
4600 CHECK_FPU_FEATURE(dc, FLOAT128);
4601 gen_op_load_fpr_QT0(QFPREG(rd));
4602 r_const = tcg_const_i32(dc->mem_idx);
4603 gen_helper_stqf(cpu_addr, r_const);
4604 tcg_temp_free_i32(r_const);
4606 break;
4607 #else /* !TARGET_SPARC64 */
4608 /* stdfq, store floating point queue */
4609 #if defined(CONFIG_USER_ONLY)
4610 goto illegal_insn;
4611 #else
4612 if (!supervisor(dc))
4613 goto priv_insn;
4614 if (gen_trap_ifnofpu(dc, cpu_cond))
4615 goto jmp_insn;
4616 goto nfq_insn;
4617 #endif
4618 #endif
4619 case 0x27: /* store double fpreg */
4621 TCGv_i32 r_const;
4623 gen_op_load_fpr_DT0(DFPREG(rd));
4624 r_const = tcg_const_i32(dc->mem_idx);
4625 gen_helper_stdf(cpu_addr, r_const);
4626 tcg_temp_free_i32(r_const);
4628 break;
4629 default:
4630 goto illegal_insn;
4632 } else if (xop > 0x33 && xop < 0x3f) {
4633 save_state(dc, cpu_cond);
4634 switch (xop) {
4635 #ifdef TARGET_SPARC64
4636 case 0x34: /* V9 stfa */
4637 gen_stf_asi(cpu_addr, insn, 4, rd);
4638 break;
4639 case 0x36: /* V9 stqfa */
4641 TCGv_i32 r_const;
4643 CHECK_FPU_FEATURE(dc, FLOAT128);
4644 r_const = tcg_const_i32(7);
4645 gen_helper_check_align(cpu_addr, r_const);
4646 tcg_temp_free_i32(r_const);
4647 gen_op_load_fpr_QT0(QFPREG(rd));
4648 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4650 break;
4651 case 0x37: /* V9 stdfa */
4652 gen_op_load_fpr_DT0(DFPREG(rd));
4653 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4654 break;
4655 case 0x3c: /* V9 casa */
4656 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4657 gen_movl_TN_reg(rd, cpu_val);
4658 break;
4659 case 0x3e: /* V9 casxa */
4660 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4661 gen_movl_TN_reg(rd, cpu_val);
4662 break;
4663 #else
4664 case 0x34: /* stc */
4665 case 0x35: /* stcsr */
4666 case 0x36: /* stdcq */
4667 case 0x37: /* stdc */
4668 goto ncp_insn;
4669 #endif
4670 default:
4671 goto illegal_insn;
4674 else
4675 goto illegal_insn;
4677 break;
4679 /* default case for non jump instructions */
4680 if (dc->npc == DYNAMIC_PC) {
4681 dc->pc = DYNAMIC_PC;
4682 gen_op_next_insn();
4683 } else if (dc->npc == JUMP_PC) {
4684 /* we can do a static jump */
4685 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4686 dc->is_br = 1;
4687 } else {
4688 dc->pc = dc->npc;
4689 dc->npc = dc->npc + 4;
4691 jmp_insn:
4692 return;
4693 illegal_insn:
4695 TCGv_i32 r_const;
4697 save_state(dc, cpu_cond);
4698 r_const = tcg_const_i32(TT_ILL_INSN);
4699 gen_helper_raise_exception(r_const);
4700 tcg_temp_free_i32(r_const);
4701 dc->is_br = 1;
4703 return;
4704 unimp_flush:
4706 TCGv_i32 r_const;
4708 save_state(dc, cpu_cond);
4709 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4710 gen_helper_raise_exception(r_const);
4711 tcg_temp_free_i32(r_const);
4712 dc->is_br = 1;
4714 return;
4715 #if !defined(CONFIG_USER_ONLY)
4716 priv_insn:
4718 TCGv_i32 r_const;
4720 save_state(dc, cpu_cond);
4721 r_const = tcg_const_i32(TT_PRIV_INSN);
4722 gen_helper_raise_exception(r_const);
4723 tcg_temp_free_i32(r_const);
4724 dc->is_br = 1;
4726 return;
4727 #endif
4728 nfpu_insn:
4729 save_state(dc, cpu_cond);
4730 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4731 dc->is_br = 1;
4732 return;
4733 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4734 nfq_insn:
4735 save_state(dc, cpu_cond);
4736 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4737 dc->is_br = 1;
4738 return;
4739 #endif
4740 #ifndef TARGET_SPARC64
4741 ncp_insn:
4743 TCGv r_const;
4745 save_state(dc, cpu_cond);
4746 r_const = tcg_const_i32(TT_NCP_INSN);
4747 gen_helper_raise_exception(r_const);
4748 tcg_temp_free(r_const);
4749 dc->is_br = 1;
4751 return;
4752 #endif
4755 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4756 int spc, CPUSPARCState *env)
4758 target_ulong pc_start, last_pc;
4759 uint16_t *gen_opc_end;
4760 DisasContext dc1, *dc = &dc1;
4761 CPUBreakpoint *bp;
4762 int j, lj = -1;
4763 int num_insns;
4764 int max_insns;
4766 memset(dc, 0, sizeof(DisasContext));
4767 dc->tb = tb;
4768 pc_start = tb->pc;
4769 dc->pc = pc_start;
4770 last_pc = dc->pc;
4771 dc->npc = (target_ulong) tb->cs_base;
4772 dc->mem_idx = cpu_mmu_index(env);
4773 dc->def = env->def;
4774 if ((dc->def->features & CPU_FEATURE_FLOAT))
4775 dc->fpu_enabled = cpu_fpu_enabled(env);
4776 else
4777 dc->fpu_enabled = 0;
4778 #ifdef TARGET_SPARC64
4779 dc->address_mask_32bit = env->pstate & PS_AM;
4780 #endif
4781 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4783 cpu_tmp0 = tcg_temp_new();
4784 cpu_tmp32 = tcg_temp_new_i32();
4785 cpu_tmp64 = tcg_temp_new_i64();
4787 cpu_dst = tcg_temp_local_new();
4789 // loads and stores
4790 cpu_val = tcg_temp_local_new();
4791 cpu_addr = tcg_temp_local_new();
4793 num_insns = 0;
4794 max_insns = tb->cflags & CF_COUNT_MASK;
4795 if (max_insns == 0)
4796 max_insns = CF_COUNT_MASK;
4797 gen_icount_start();
4798 do {
4799 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
4800 TAILQ_FOREACH(bp, &env->breakpoints, entry) {
4801 if (bp->pc == dc->pc) {
4802 if (dc->pc != pc_start)
4803 save_state(dc, cpu_cond);
4804 gen_helper_debug();
4805 tcg_gen_exit_tb(0);
4806 dc->is_br = 1;
4807 goto exit_gen_loop;
4811 if (spc) {
4812 qemu_log("Search PC...\n");
4813 j = gen_opc_ptr - gen_opc_buf;
4814 if (lj < j) {
4815 lj++;
4816 while (lj < j)
4817 gen_opc_instr_start[lj++] = 0;
4818 gen_opc_pc[lj] = dc->pc;
4819 gen_opc_npc[lj] = dc->npc;
4820 gen_opc_instr_start[lj] = 1;
4821 gen_opc_icount[lj] = num_insns;
4824 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4825 gen_io_start();
4826 last_pc = dc->pc;
4827 disas_sparc_insn(dc);
4828 num_insns++;
4830 if (dc->is_br)
4831 break;
4832 /* if the next PC is different, we abort now */
4833 if (dc->pc != (last_pc + 4))
4834 break;
4835 /* if we reach a page boundary, we stop generation so that the
4836 PC of a TT_TFAULT exception is always in the right page */
4837 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4838 break;
4839 /* if single step mode, we generate only one instruction and
4840 generate an exception */
4841 if (env->singlestep_enabled || singlestep) {
4842 tcg_gen_movi_tl(cpu_pc, dc->pc);
4843 tcg_gen_exit_tb(0);
4844 break;
4846 } while ((gen_opc_ptr < gen_opc_end) &&
4847 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4848 num_insns < max_insns);
4850 exit_gen_loop:
4851 tcg_temp_free(cpu_addr);
4852 tcg_temp_free(cpu_val);
4853 tcg_temp_free(cpu_dst);
4854 tcg_temp_free_i64(cpu_tmp64);
4855 tcg_temp_free_i32(cpu_tmp32);
4856 tcg_temp_free(cpu_tmp0);
4857 if (tb->cflags & CF_LAST_IO)
4858 gen_io_end();
4859 if (!dc->is_br) {
4860 if (dc->pc != DYNAMIC_PC &&
4861 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4862 /* static PC and NPC: we can use direct chaining */
4863 gen_goto_tb(dc, 0, dc->pc, dc->npc);
4864 } else {
4865 if (dc->pc != DYNAMIC_PC)
4866 tcg_gen_movi_tl(cpu_pc, dc->pc);
4867 save_npc(dc, cpu_cond);
4868 tcg_gen_exit_tb(0);
4871 gen_icount_end(tb, num_insns);
4872 *gen_opc_ptr = INDEX_op_end;
4873 if (spc) {
4874 j = gen_opc_ptr - gen_opc_buf;
4875 lj++;
4876 while (lj <= j)
4877 gen_opc_instr_start[lj++] = 0;
4878 #if 0
4879 log_page_dump();
4880 #endif
4881 gen_opc_jump_pc[0] = dc->jump_pc[0];
4882 gen_opc_jump_pc[1] = dc->jump_pc[1];
4883 } else {
4884 tb->size = last_pc + 4 - pc_start;
4885 tb->icount = num_insns;
4887 #ifdef DEBUG_DISAS
4888 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
4889 qemu_log("--------------\n");
4890 qemu_log("IN: %s\n", lookup_symbol(pc_start));
4891 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
4892 qemu_log("\n");
4894 #endif
4897 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4899 gen_intermediate_code_internal(tb, 0, env);
4902 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4904 gen_intermediate_code_internal(tb, 1, env);
4907 void gen_intermediate_code_init(CPUSPARCState *env)
4909 unsigned int i;
4910 static int inited;
4911 static const char * const gregnames[8] = {
4912 NULL, // g0 not used
4913 "g1",
4914 "g2",
4915 "g3",
4916 "g4",
4917 "g5",
4918 "g6",
4919 "g7",
4921 static const char * const fregnames[64] = {
4922 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4923 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4924 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4925 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4926 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4927 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4928 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4929 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4932 /* init various static tables */
4933 if (!inited) {
4934 inited = 1;
4936 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
4937 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
4938 offsetof(CPUState, regwptr),
4939 "regwptr");
4940 #ifdef TARGET_SPARC64
4941 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
4942 "xcc");
4943 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
4944 "asi");
4945 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
4946 "fprs");
4947 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
4948 "gsr");
4949 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
4950 offsetof(CPUState, tick_cmpr),
4951 "tick_cmpr");
4952 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
4953 offsetof(CPUState, stick_cmpr),
4954 "stick_cmpr");
4955 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
4956 offsetof(CPUState, hstick_cmpr),
4957 "hstick_cmpr");
4958 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
4959 "hintp");
4960 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
4961 "htba");
4962 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
4963 "hver");
4964 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
4965 offsetof(CPUState, ssr), "ssr");
4966 cpu_ver = tcg_global_mem_new(TCG_AREG0,
4967 offsetof(CPUState, version), "ver");
4968 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
4969 offsetof(CPUState, softint),
4970 "softint");
4971 #else
4972 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
4973 "wim");
4974 #endif
4975 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
4976 "cond");
4977 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
4978 "cc_src");
4979 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
4980 offsetof(CPUState, cc_src2),
4981 "cc_src2");
4982 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
4983 "cc_dst");
4984 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
4985 "psr");
4986 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
4987 "fsr");
4988 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
4989 "pc");
4990 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
4991 "npc");
4992 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
4993 #ifndef CONFIG_USER_ONLY
4994 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
4995 "tbr");
4996 #endif
4997 for (i = 1; i < 8; i++)
4998 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
4999 offsetof(CPUState, gregs[i]),
5000 gregnames[i]);
5001 for (i = 0; i < TARGET_FPREGS; i++)
5002 cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5003 offsetof(CPUState, fpr[i]),
5004 fregnames[i]);
5006 /* register helpers */
5008 #define GEN_HELPER 2
5009 #include "helper.h"
5013 void gen_pc_load(CPUState *env, TranslationBlock *tb,
5014 unsigned long searched_pc, int pc_pos, void *puc)
5016 target_ulong npc;
5017 env->pc = gen_opc_pc[pc_pos];
5018 npc = gen_opc_npc[pc_pos];
5019 if (npc == 1) {
5020 /* dynamic NPC: already stored */
5021 } else if (npc == 2) {
5022 target_ulong t2 = (target_ulong)(unsigned long)puc;
5023 /* jump PC: use T2 and the jump targets of the translation */
5024 if (t2)
5025 env->npc = gen_opc_jump_pc[0];
5026 else
5027 env->npc = gen_opc_jump_pc[1];
5028 } else {
5029 env->npc = npc;