Reindent
[qemu/hppa.git] / target-sparc / translate.c
blobf095b9bc5437d868e8831730afbdc539f42da9f7
1 /*
2 SPARC translation
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
22 #include <stdarg.h>
23 #include <stdlib.h>
24 #include <stdio.h>
25 #include <string.h>
26 #include <inttypes.h>
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31 #include "helper.h"
32 #include "tcg-op.h"
34 #define GEN_HELPER 1
35 #include "helper.h"
37 #define DEBUG_DISAS
39 #define DYNAMIC_PC 1 /* dynamic pc value */
40 #define JUMP_PC 2 /* dynamic pc value which takes only two values
41 according to jump_pc[T2] */
43 /* global register indexes */
44 static TCGv_ptr cpu_env, cpu_regwptr;
45 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
46 static TCGv_i32 cpu_psr;
47 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48 static TCGv cpu_y;
49 #ifndef CONFIG_USER_ONLY
50 static TCGv cpu_tbr;
51 #endif
52 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
53 #ifdef TARGET_SPARC64
54 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55 static TCGv cpu_gsr;
56 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58 static TCGv_i32 cpu_softint;
59 #else
60 static TCGv cpu_wim;
61 #endif
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0;
64 static TCGv_i32 cpu_tmp32;
65 static TCGv_i64 cpu_tmp64;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
69 #include "gen-icount.h"
71 typedef struct DisasContext {
72 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
73 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
75 int is_br;
76 int mem_idx;
77 int fpu_enabled;
78 int address_mask_32bit;
79 struct TranslationBlock *tb;
80 sparc_def_t *def;
81 } DisasContext;
83 // This function uses non-native bit order
84 #define GET_FIELD(X, FROM, TO) \
85 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
87 // This function uses the order in the manuals, i.e. bit 0 is 2^0
88 #define GET_FIELD_SP(X, FROM, TO) \
89 GET_FIELD(X, 31 - (TO), 31 - (FROM))
91 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
92 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
94 #ifdef TARGET_SPARC64
95 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
96 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
97 #else
98 #define DFPREG(r) (r & 0x1e)
99 #define QFPREG(r) (r & 0x1c)
100 #endif
102 #define UA2005_HTRAP_MASK 0xff
103 #define V8_TRAP_MASK 0x7f
105 static int sign_extend(int x, int len)
107 len = 32 - len;
108 return (x << len) >> len;
111 #define IS_IMM (insn & (1<<13))
113 /* floating point registers moves */
114 static void gen_op_load_fpr_DT0(unsigned int src)
116 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
117 offsetof(CPU_DoubleU, l.upper));
118 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
119 offsetof(CPU_DoubleU, l.lower));
122 static void gen_op_load_fpr_DT1(unsigned int src)
124 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
125 offsetof(CPU_DoubleU, l.upper));
126 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
127 offsetof(CPU_DoubleU, l.lower));
130 static void gen_op_store_DT0_fpr(unsigned int dst)
132 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
133 offsetof(CPU_DoubleU, l.upper));
134 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
135 offsetof(CPU_DoubleU, l.lower));
138 static void gen_op_load_fpr_QT0(unsigned int src)
140 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
141 offsetof(CPU_QuadU, l.upmost));
142 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
143 offsetof(CPU_QuadU, l.upper));
144 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
145 offsetof(CPU_QuadU, l.lower));
146 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
147 offsetof(CPU_QuadU, l.lowest));
150 static void gen_op_load_fpr_QT1(unsigned int src)
152 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
153 offsetof(CPU_QuadU, l.upmost));
154 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
155 offsetof(CPU_QuadU, l.upper));
156 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
157 offsetof(CPU_QuadU, l.lower));
158 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
159 offsetof(CPU_QuadU, l.lowest));
162 static void gen_op_store_QT0_fpr(unsigned int dst)
164 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
165 offsetof(CPU_QuadU, l.upmost));
166 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
167 offsetof(CPU_QuadU, l.upper));
168 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
169 offsetof(CPU_QuadU, l.lower));
170 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
171 offsetof(CPU_QuadU, l.lowest));
174 /* moves */
175 #ifdef CONFIG_USER_ONLY
176 #define supervisor(dc) 0
177 #ifdef TARGET_SPARC64
178 #define hypervisor(dc) 0
179 #endif
180 #else
181 #define supervisor(dc) (dc->mem_idx >= 1)
182 #ifdef TARGET_SPARC64
183 #define hypervisor(dc) (dc->mem_idx == 2)
184 #else
185 #endif
186 #endif
188 #ifdef TARGET_SPARC64
189 #ifndef TARGET_ABI32
190 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
191 #else
192 #define AM_CHECK(dc) (1)
193 #endif
194 #endif
196 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
198 #ifdef TARGET_SPARC64
199 if (AM_CHECK(dc))
200 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
201 #endif
204 static inline void gen_movl_reg_TN(int reg, TCGv tn)
206 if (reg == 0)
207 tcg_gen_movi_tl(tn, 0);
208 else if (reg < 8)
209 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
210 else {
211 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
215 static inline void gen_movl_TN_reg(int reg, TCGv tn)
217 if (reg == 0)
218 return;
219 else if (reg < 8)
220 tcg_gen_mov_tl(cpu_gregs[reg], tn);
221 else {
222 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
226 static inline void gen_goto_tb(DisasContext *s, int tb_num,
227 target_ulong pc, target_ulong npc)
229 TranslationBlock *tb;
231 tb = s->tb;
232 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
233 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
234 /* jump to same page: we can use a direct jump */
235 tcg_gen_goto_tb(tb_num);
236 tcg_gen_movi_tl(cpu_pc, pc);
237 tcg_gen_movi_tl(cpu_npc, npc);
238 tcg_gen_exit_tb((long)tb + tb_num);
239 } else {
240 /* jump to another page: currently not optimized */
241 tcg_gen_movi_tl(cpu_pc, pc);
242 tcg_gen_movi_tl(cpu_npc, npc);
243 tcg_gen_exit_tb(0);
247 // XXX suboptimal
248 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
250 tcg_gen_extu_i32_tl(reg, src);
251 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
252 tcg_gen_andi_tl(reg, reg, 0x1);
255 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
257 tcg_gen_extu_i32_tl(reg, src);
258 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
259 tcg_gen_andi_tl(reg, reg, 0x1);
262 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
264 tcg_gen_extu_i32_tl(reg, src);
265 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
266 tcg_gen_andi_tl(reg, reg, 0x1);
269 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
271 tcg_gen_extu_i32_tl(reg, src);
272 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
273 tcg_gen_andi_tl(reg, reg, 0x1);
276 static inline void gen_cc_clear_icc(void)
278 tcg_gen_movi_i32(cpu_psr, 0);
281 #ifdef TARGET_SPARC64
282 static inline void gen_cc_clear_xcc(void)
284 tcg_gen_movi_i32(cpu_xcc, 0);
286 #endif
288 /* old op:
289 if (!T0)
290 env->psr |= PSR_ZERO;
291 if ((int32_t) T0 < 0)
292 env->psr |= PSR_NEG;
294 static inline void gen_cc_NZ_icc(TCGv dst)
296 TCGv r_temp;
297 int l1, l2;
299 l1 = gen_new_label();
300 l2 = gen_new_label();
301 r_temp = tcg_temp_new();
302 tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
303 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
304 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
305 gen_set_label(l1);
306 tcg_gen_ext32s_tl(r_temp, dst);
307 tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
308 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
309 gen_set_label(l2);
310 tcg_temp_free(r_temp);
313 #ifdef TARGET_SPARC64
314 static inline void gen_cc_NZ_xcc(TCGv dst)
316 int l1, l2;
318 l1 = gen_new_label();
319 l2 = gen_new_label();
320 tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
321 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
322 gen_set_label(l1);
323 tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
324 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
325 gen_set_label(l2);
327 #endif
329 /* old op:
330 if (T0 < src1)
331 env->psr |= PSR_CARRY;
333 static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
335 TCGv r_temp1, r_temp2;
336 int l1;
338 l1 = gen_new_label();
339 r_temp1 = tcg_temp_new();
340 r_temp2 = tcg_temp_new();
341 tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
342 tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
343 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
344 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
345 gen_set_label(l1);
346 tcg_temp_free(r_temp1);
347 tcg_temp_free(r_temp2);
350 #ifdef TARGET_SPARC64
351 static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
353 int l1;
355 l1 = gen_new_label();
356 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
357 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
358 gen_set_label(l1);
360 #endif
362 /* old op:
363 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
364 env->psr |= PSR_OVF;
366 static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
368 TCGv r_temp;
370 r_temp = tcg_temp_new();
371 tcg_gen_xor_tl(r_temp, src1, src2);
372 tcg_gen_not_tl(r_temp, r_temp);
373 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
374 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
375 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
376 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
377 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
378 tcg_temp_free(r_temp);
379 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
382 #ifdef TARGET_SPARC64
383 static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
385 TCGv r_temp;
387 r_temp = tcg_temp_new();
388 tcg_gen_xor_tl(r_temp, src1, src2);
389 tcg_gen_not_tl(r_temp, r_temp);
390 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
391 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
392 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
393 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
394 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
395 tcg_temp_free(r_temp);
396 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
398 #endif
400 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
402 TCGv r_temp;
403 TCGv_i32 r_const;
404 int l1;
406 l1 = gen_new_label();
408 r_temp = tcg_temp_new();
409 tcg_gen_xor_tl(r_temp, src1, src2);
410 tcg_gen_not_tl(r_temp, r_temp);
411 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
412 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
413 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
414 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
415 r_const = tcg_const_i32(TT_TOVF);
416 gen_helper_raise_exception(r_const);
417 tcg_temp_free_i32(r_const);
418 gen_set_label(l1);
419 tcg_temp_free(r_temp);
422 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
424 int l1;
426 l1 = gen_new_label();
427 tcg_gen_or_tl(cpu_tmp0, src1, src2);
428 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
429 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
430 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
431 gen_set_label(l1);
434 static inline void gen_tag_tv(TCGv src1, TCGv src2)
436 int l1;
437 TCGv_i32 r_const;
439 l1 = gen_new_label();
440 tcg_gen_or_tl(cpu_tmp0, src1, src2);
441 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
442 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
443 r_const = tcg_const_i32(TT_TOVF);
444 gen_helper_raise_exception(r_const);
445 tcg_temp_free_i32(r_const);
446 gen_set_label(l1);
449 static inline void gen_op_add_cc2(TCGv dst)
451 gen_cc_clear_icc();
452 gen_cc_NZ_icc(cpu_cc_dst);
453 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
454 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
455 #ifdef TARGET_SPARC64
456 gen_cc_clear_xcc();
457 gen_cc_NZ_xcc(cpu_cc_dst);
458 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
459 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
460 #endif
461 tcg_gen_mov_tl(dst, cpu_cc_dst);
464 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
466 tcg_gen_mov_tl(cpu_cc_src, src1);
467 tcg_gen_movi_tl(cpu_cc_src2, src2);
468 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
469 gen_op_add_cc2(dst);
472 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
474 tcg_gen_mov_tl(cpu_cc_src, src1);
475 tcg_gen_mov_tl(cpu_cc_src2, src2);
476 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
477 gen_op_add_cc2(dst);
480 static inline void gen_op_addx_cc2(TCGv dst)
482 gen_cc_NZ_icc(cpu_cc_dst);
483 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
484 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
485 #ifdef TARGET_SPARC64
486 gen_cc_NZ_xcc(cpu_cc_dst);
487 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
488 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
489 #endif
490 tcg_gen_mov_tl(dst, cpu_cc_dst);
493 static inline void gen_op_addxi_cc(TCGv dst, TCGv src1, target_long src2)
495 tcg_gen_mov_tl(cpu_cc_src, src1);
496 tcg_gen_movi_tl(cpu_cc_src2, src2);
497 gen_mov_reg_C(cpu_tmp0, cpu_psr);
498 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
499 gen_cc_clear_icc();
500 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
501 #ifdef TARGET_SPARC64
502 gen_cc_clear_xcc();
503 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
504 #endif
505 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_dst, src2);
506 gen_op_addx_cc2(dst);
509 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
511 tcg_gen_mov_tl(cpu_cc_src, src1);
512 tcg_gen_mov_tl(cpu_cc_src2, src2);
513 gen_mov_reg_C(cpu_tmp0, cpu_psr);
514 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
515 gen_cc_clear_icc();
516 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
517 #ifdef TARGET_SPARC64
518 gen_cc_clear_xcc();
519 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
520 #endif
521 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
522 gen_op_addx_cc2(dst);
525 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
527 tcg_gen_mov_tl(cpu_cc_src, src1);
528 tcg_gen_mov_tl(cpu_cc_src2, src2);
529 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
530 gen_cc_clear_icc();
531 gen_cc_NZ_icc(cpu_cc_dst);
532 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
533 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
534 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
535 #ifdef TARGET_SPARC64
536 gen_cc_clear_xcc();
537 gen_cc_NZ_xcc(cpu_cc_dst);
538 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
539 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
540 #endif
541 tcg_gen_mov_tl(dst, cpu_cc_dst);
544 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
546 tcg_gen_mov_tl(cpu_cc_src, src1);
547 tcg_gen_mov_tl(cpu_cc_src2, src2);
548 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
549 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
550 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
551 gen_cc_clear_icc();
552 gen_cc_NZ_icc(cpu_cc_dst);
553 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
554 #ifdef TARGET_SPARC64
555 gen_cc_clear_xcc();
556 gen_cc_NZ_xcc(cpu_cc_dst);
557 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
558 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
559 #endif
560 tcg_gen_mov_tl(dst, cpu_cc_dst);
563 /* old op:
564 if (src1 < T1)
565 env->psr |= PSR_CARRY;
567 static inline void gen_cc_C_sub_icc(TCGv src1, TCGv src2)
569 TCGv r_temp1, r_temp2;
570 int l1;
572 l1 = gen_new_label();
573 r_temp1 = tcg_temp_new();
574 r_temp2 = tcg_temp_new();
575 tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
576 tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
577 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
578 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
579 gen_set_label(l1);
580 tcg_temp_free(r_temp1);
581 tcg_temp_free(r_temp2);
584 #ifdef TARGET_SPARC64
585 static inline void gen_cc_C_sub_xcc(TCGv src1, TCGv src2)
587 int l1;
589 l1 = gen_new_label();
590 tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l1);
591 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
592 gen_set_label(l1);
594 #endif
596 /* old op:
597 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
598 env->psr |= PSR_OVF;
600 static inline void gen_cc_V_sub_icc(TCGv dst, TCGv src1, TCGv src2)
602 TCGv r_temp;
604 r_temp = tcg_temp_new();
605 tcg_gen_xor_tl(r_temp, src1, src2);
606 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
607 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
608 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
609 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
610 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
611 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
612 tcg_temp_free(r_temp);
615 #ifdef TARGET_SPARC64
616 static inline void gen_cc_V_sub_xcc(TCGv dst, TCGv src1, TCGv src2)
618 TCGv r_temp;
620 r_temp = tcg_temp_new();
621 tcg_gen_xor_tl(r_temp, src1, src2);
622 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
623 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
624 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
625 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
626 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
627 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
628 tcg_temp_free(r_temp);
630 #endif
632 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
634 TCGv r_temp;
635 TCGv_i32 r_const;
636 int l1;
638 l1 = gen_new_label();
640 r_temp = tcg_temp_new();
641 tcg_gen_xor_tl(r_temp, src1, src2);
642 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
643 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
644 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
645 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
646 r_const = tcg_const_i32(TT_TOVF);
647 gen_helper_raise_exception(r_const);
648 tcg_temp_free_i32(r_const);
649 gen_set_label(l1);
650 tcg_temp_free(r_temp);
653 static inline void gen_op_sub_cc2(TCGv dst)
655 gen_cc_clear_icc();
656 gen_cc_NZ_icc(cpu_cc_dst);
657 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
658 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
659 #ifdef TARGET_SPARC64
660 gen_cc_clear_xcc();
661 gen_cc_NZ_xcc(cpu_cc_dst);
662 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
663 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
664 #endif
665 tcg_gen_mov_tl(dst, cpu_cc_dst);
668 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2)
670 tcg_gen_mov_tl(cpu_cc_src, src1);
671 tcg_gen_movi_tl(cpu_cc_src2, src2);
672 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
673 gen_op_sub_cc2(dst);
676 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
678 tcg_gen_mov_tl(cpu_cc_src, src1);
679 tcg_gen_mov_tl(cpu_cc_src2, src2);
680 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
681 gen_op_sub_cc2(dst);
684 static inline void gen_op_subx_cc2(TCGv dst)
686 gen_cc_NZ_icc(cpu_cc_dst);
687 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
688 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
689 #ifdef TARGET_SPARC64
690 gen_cc_NZ_xcc(cpu_cc_dst);
691 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
692 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
693 #endif
694 tcg_gen_mov_tl(dst, cpu_cc_dst);
697 static inline void gen_op_subxi_cc(TCGv dst, TCGv src1, target_long src2)
699 tcg_gen_mov_tl(cpu_cc_src, src1);
700 tcg_gen_movi_tl(cpu_cc_src2, src2);
701 gen_mov_reg_C(cpu_tmp0, cpu_psr);
702 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
703 gen_cc_clear_icc();
704 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
705 #ifdef TARGET_SPARC64
706 gen_cc_clear_xcc();
707 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
708 #endif
709 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_dst, src2);
710 gen_op_subx_cc2(dst);
713 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
715 tcg_gen_mov_tl(cpu_cc_src, src1);
716 tcg_gen_mov_tl(cpu_cc_src2, src2);
717 gen_mov_reg_C(cpu_tmp0, cpu_psr);
718 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
719 gen_cc_clear_icc();
720 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
721 #ifdef TARGET_SPARC64
722 gen_cc_clear_xcc();
723 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
724 #endif
725 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
726 gen_op_subx_cc2(dst);
729 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
731 tcg_gen_mov_tl(cpu_cc_src, src1);
732 tcg_gen_mov_tl(cpu_cc_src2, src2);
733 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
734 gen_cc_clear_icc();
735 gen_cc_NZ_icc(cpu_cc_dst);
736 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
737 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
738 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
739 #ifdef TARGET_SPARC64
740 gen_cc_clear_xcc();
741 gen_cc_NZ_xcc(cpu_cc_dst);
742 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
743 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
744 #endif
745 tcg_gen_mov_tl(dst, cpu_cc_dst);
748 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
750 tcg_gen_mov_tl(cpu_cc_src, src1);
751 tcg_gen_mov_tl(cpu_cc_src2, src2);
752 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
753 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
754 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
755 gen_cc_clear_icc();
756 gen_cc_NZ_icc(cpu_cc_dst);
757 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
758 #ifdef TARGET_SPARC64
759 gen_cc_clear_xcc();
760 gen_cc_NZ_xcc(cpu_cc_dst);
761 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
762 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
763 #endif
764 tcg_gen_mov_tl(dst, cpu_cc_dst);
767 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
769 TCGv r_temp;
770 int l1;
772 l1 = gen_new_label();
773 r_temp = tcg_temp_new();
775 /* old op:
776 if (!(env->y & 1))
777 T1 = 0;
779 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
780 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
781 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
782 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
783 tcg_gen_movi_tl(cpu_cc_src2, 0);
784 gen_set_label(l1);
786 // b2 = T0 & 1;
787 // env->y = (b2 << 31) | (env->y >> 1);
788 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
789 tcg_gen_shli_tl(r_temp, r_temp, 31);
790 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
791 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
792 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
793 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
795 // b1 = N ^ V;
796 gen_mov_reg_N(cpu_tmp0, cpu_psr);
797 gen_mov_reg_V(r_temp, cpu_psr);
798 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
799 tcg_temp_free(r_temp);
801 // T0 = (b1 << 31) | (T0 >> 1);
802 // src1 = T0;
803 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
804 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
805 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
807 /* do addition and update flags */
808 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
810 gen_cc_clear_icc();
811 gen_cc_NZ_icc(cpu_cc_dst);
812 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
813 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
814 tcg_gen_mov_tl(dst, cpu_cc_dst);
817 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
819 TCGv_i64 r_temp, r_temp2;
821 r_temp = tcg_temp_new_i64();
822 r_temp2 = tcg_temp_new_i64();
824 tcg_gen_extu_tl_i64(r_temp, src2);
825 tcg_gen_extu_tl_i64(r_temp2, src1);
826 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
828 tcg_gen_shri_i64(r_temp, r_temp2, 32);
829 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
830 tcg_temp_free_i64(r_temp);
831 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
832 #ifdef TARGET_SPARC64
833 tcg_gen_mov_i64(dst, r_temp2);
834 #else
835 tcg_gen_trunc_i64_tl(dst, r_temp2);
836 #endif
837 tcg_temp_free_i64(r_temp2);
840 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
842 TCGv_i64 r_temp, r_temp2;
844 r_temp = tcg_temp_new_i64();
845 r_temp2 = tcg_temp_new_i64();
847 tcg_gen_ext_tl_i64(r_temp, src2);
848 tcg_gen_ext_tl_i64(r_temp2, src1);
849 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
851 tcg_gen_shri_i64(r_temp, r_temp2, 32);
852 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
853 tcg_temp_free_i64(r_temp);
854 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
855 #ifdef TARGET_SPARC64
856 tcg_gen_mov_i64(dst, r_temp2);
857 #else
858 tcg_gen_trunc_i64_tl(dst, r_temp2);
859 #endif
860 tcg_temp_free_i64(r_temp2);
863 #ifdef TARGET_SPARC64
864 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
866 TCGv_i32 r_const;
867 int l1;
869 l1 = gen_new_label();
870 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
871 r_const = tcg_const_i32(TT_DIV_ZERO);
872 gen_helper_raise_exception(r_const);
873 tcg_temp_free_i32(r_const);
874 gen_set_label(l1);
877 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
879 int l1, l2;
881 l1 = gen_new_label();
882 l2 = gen_new_label();
883 tcg_gen_mov_tl(cpu_cc_src, src1);
884 tcg_gen_mov_tl(cpu_cc_src2, src2);
885 gen_trap_ifdivzero_tl(cpu_cc_src2);
886 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
887 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
888 tcg_gen_movi_i64(dst, INT64_MIN);
889 tcg_gen_br(l2);
890 gen_set_label(l1);
891 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
892 gen_set_label(l2);
894 #endif
896 static inline void gen_op_div_cc(TCGv dst)
898 int l1;
900 tcg_gen_mov_tl(cpu_cc_dst, dst);
901 gen_cc_clear_icc();
902 gen_cc_NZ_icc(cpu_cc_dst);
903 l1 = gen_new_label();
904 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_src2, 0, l1);
905 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
906 gen_set_label(l1);
909 static inline void gen_op_logic_cc(TCGv dst)
911 tcg_gen_mov_tl(cpu_cc_dst, dst);
913 gen_cc_clear_icc();
914 gen_cc_NZ_icc(cpu_cc_dst);
915 #ifdef TARGET_SPARC64
916 gen_cc_clear_xcc();
917 gen_cc_NZ_xcc(cpu_cc_dst);
918 #endif
921 // 1
922 static inline void gen_op_eval_ba(TCGv dst)
924 tcg_gen_movi_tl(dst, 1);
927 // Z
928 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
930 gen_mov_reg_Z(dst, src);
933 // Z | (N ^ V)
934 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
936 gen_mov_reg_N(cpu_tmp0, src);
937 gen_mov_reg_V(dst, src);
938 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
939 gen_mov_reg_Z(cpu_tmp0, src);
940 tcg_gen_or_tl(dst, dst, cpu_tmp0);
943 // N ^ V
944 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
946 gen_mov_reg_V(cpu_tmp0, src);
947 gen_mov_reg_N(dst, src);
948 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
951 // C | Z
952 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
954 gen_mov_reg_Z(cpu_tmp0, src);
955 gen_mov_reg_C(dst, src);
956 tcg_gen_or_tl(dst, dst, cpu_tmp0);
959 // C
960 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
962 gen_mov_reg_C(dst, src);
965 // V
966 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
968 gen_mov_reg_V(dst, src);
971 // 0
972 static inline void gen_op_eval_bn(TCGv dst)
974 tcg_gen_movi_tl(dst, 0);
977 // N
978 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
980 gen_mov_reg_N(dst, src);
983 // !Z
984 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
986 gen_mov_reg_Z(dst, src);
987 tcg_gen_xori_tl(dst, dst, 0x1);
990 // !(Z | (N ^ V))
991 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
993 gen_mov_reg_N(cpu_tmp0, src);
994 gen_mov_reg_V(dst, src);
995 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
996 gen_mov_reg_Z(cpu_tmp0, src);
997 tcg_gen_or_tl(dst, dst, cpu_tmp0);
998 tcg_gen_xori_tl(dst, dst, 0x1);
1001 // !(N ^ V)
1002 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
1004 gen_mov_reg_V(cpu_tmp0, src);
1005 gen_mov_reg_N(dst, src);
1006 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1007 tcg_gen_xori_tl(dst, dst, 0x1);
1010 // !(C | Z)
1011 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
1013 gen_mov_reg_Z(cpu_tmp0, src);
1014 gen_mov_reg_C(dst, src);
1015 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1016 tcg_gen_xori_tl(dst, dst, 0x1);
1019 // !C
1020 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
1022 gen_mov_reg_C(dst, src);
1023 tcg_gen_xori_tl(dst, dst, 0x1);
1026 // !N
1027 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
1029 gen_mov_reg_N(dst, src);
1030 tcg_gen_xori_tl(dst, dst, 0x1);
1033 // !V
1034 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
1036 gen_mov_reg_V(dst, src);
1037 tcg_gen_xori_tl(dst, dst, 0x1);
1041 FPSR bit field FCC1 | FCC0:
1045 3 unordered
1047 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
1048 unsigned int fcc_offset)
1050 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
1051 tcg_gen_andi_tl(reg, reg, 0x1);
1054 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
1055 unsigned int fcc_offset)
1057 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
1058 tcg_gen_andi_tl(reg, reg, 0x1);
1061 // !0: FCC0 | FCC1
1062 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
1063 unsigned int fcc_offset)
1065 gen_mov_reg_FCC0(dst, src, fcc_offset);
1066 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1067 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1070 // 1 or 2: FCC0 ^ FCC1
1071 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
1072 unsigned int fcc_offset)
1074 gen_mov_reg_FCC0(dst, src, fcc_offset);
1075 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1076 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1079 // 1 or 3: FCC0
1080 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
1081 unsigned int fcc_offset)
1083 gen_mov_reg_FCC0(dst, src, fcc_offset);
1086 // 1: FCC0 & !FCC1
1087 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
1088 unsigned int fcc_offset)
1090 gen_mov_reg_FCC0(dst, src, fcc_offset);
1091 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1092 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1093 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1096 // 2 or 3: FCC1
1097 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
1098 unsigned int fcc_offset)
1100 gen_mov_reg_FCC1(dst, src, fcc_offset);
1103 // 2: !FCC0 & FCC1
1104 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
1105 unsigned int fcc_offset)
1107 gen_mov_reg_FCC0(dst, src, fcc_offset);
1108 tcg_gen_xori_tl(dst, dst, 0x1);
1109 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1110 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1113 // 3: FCC0 & FCC1
1114 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1115 unsigned int fcc_offset)
1117 gen_mov_reg_FCC0(dst, src, fcc_offset);
1118 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1119 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1122 // 0: !(FCC0 | FCC1)
1123 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1124 unsigned int fcc_offset)
1126 gen_mov_reg_FCC0(dst, src, fcc_offset);
1127 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1128 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1129 tcg_gen_xori_tl(dst, dst, 0x1);
1132 // 0 or 3: !(FCC0 ^ FCC1)
1133 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1134 unsigned int fcc_offset)
1136 gen_mov_reg_FCC0(dst, src, fcc_offset);
1137 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1138 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1139 tcg_gen_xori_tl(dst, dst, 0x1);
1142 // 0 or 2: !FCC0
1143 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1144 unsigned int fcc_offset)
1146 gen_mov_reg_FCC0(dst, src, fcc_offset);
1147 tcg_gen_xori_tl(dst, dst, 0x1);
1150 // !1: !(FCC0 & !FCC1)
1151 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1152 unsigned int fcc_offset)
1154 gen_mov_reg_FCC0(dst, src, fcc_offset);
1155 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1156 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1157 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1158 tcg_gen_xori_tl(dst, dst, 0x1);
1161 // 0 or 1: !FCC1
1162 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1163 unsigned int fcc_offset)
1165 gen_mov_reg_FCC1(dst, src, fcc_offset);
1166 tcg_gen_xori_tl(dst, dst, 0x1);
1169 // !2: !(!FCC0 & FCC1)
1170 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1171 unsigned int fcc_offset)
1173 gen_mov_reg_FCC0(dst, src, fcc_offset);
1174 tcg_gen_xori_tl(dst, dst, 0x1);
1175 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1176 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1177 tcg_gen_xori_tl(dst, dst, 0x1);
1180 // !3: !(FCC0 & FCC1)
1181 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1182 unsigned int fcc_offset)
1184 gen_mov_reg_FCC0(dst, src, fcc_offset);
1185 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1186 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1187 tcg_gen_xori_tl(dst, dst, 0x1);
1190 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1191 target_ulong pc2, TCGv r_cond)
1193 int l1;
1195 l1 = gen_new_label();
1197 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1199 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1201 gen_set_label(l1);
1202 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1205 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1206 target_ulong pc2, TCGv r_cond)
1208 int l1;
1210 l1 = gen_new_label();
1212 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1214 gen_goto_tb(dc, 0, pc2, pc1);
1216 gen_set_label(l1);
1217 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1220 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1221 TCGv r_cond)
1223 int l1, l2;
1225 l1 = gen_new_label();
1226 l2 = gen_new_label();
1228 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1230 tcg_gen_movi_tl(cpu_npc, npc1);
1231 tcg_gen_br(l2);
1233 gen_set_label(l1);
1234 tcg_gen_movi_tl(cpu_npc, npc2);
1235 gen_set_label(l2);
1238 /* call this function before using the condition register as it may
1239 have been set for a jump */
1240 static inline void flush_cond(DisasContext *dc, TCGv cond)
1242 if (dc->npc == JUMP_PC) {
1243 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1244 dc->npc = DYNAMIC_PC;
1248 static inline void save_npc(DisasContext *dc, TCGv cond)
1250 if (dc->npc == JUMP_PC) {
1251 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1252 dc->npc = DYNAMIC_PC;
1253 } else if (dc->npc != DYNAMIC_PC) {
1254 tcg_gen_movi_tl(cpu_npc, dc->npc);
1258 static inline void save_state(DisasContext *dc, TCGv cond)
1260 tcg_gen_movi_tl(cpu_pc, dc->pc);
1261 save_npc(dc, cond);
1264 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1266 if (dc->npc == JUMP_PC) {
1267 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1268 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1269 dc->pc = DYNAMIC_PC;
1270 } else if (dc->npc == DYNAMIC_PC) {
1271 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1272 dc->pc = DYNAMIC_PC;
1273 } else {
1274 dc->pc = dc->npc;
1278 static inline void gen_op_next_insn(void)
1280 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1281 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1284 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond)
1286 TCGv_i32 r_src;
1288 #ifdef TARGET_SPARC64
1289 if (cc)
1290 r_src = cpu_xcc;
1291 else
1292 r_src = cpu_psr;
1293 #else
1294 r_src = cpu_psr;
1295 #endif
1296 switch (cond) {
1297 case 0x0:
1298 gen_op_eval_bn(r_dst);
1299 break;
1300 case 0x1:
1301 gen_op_eval_be(r_dst, r_src);
1302 break;
1303 case 0x2:
1304 gen_op_eval_ble(r_dst, r_src);
1305 break;
1306 case 0x3:
1307 gen_op_eval_bl(r_dst, r_src);
1308 break;
1309 case 0x4:
1310 gen_op_eval_bleu(r_dst, r_src);
1311 break;
1312 case 0x5:
1313 gen_op_eval_bcs(r_dst, r_src);
1314 break;
1315 case 0x6:
1316 gen_op_eval_bneg(r_dst, r_src);
1317 break;
1318 case 0x7:
1319 gen_op_eval_bvs(r_dst, r_src);
1320 break;
1321 case 0x8:
1322 gen_op_eval_ba(r_dst);
1323 break;
1324 case 0x9:
1325 gen_op_eval_bne(r_dst, r_src);
1326 break;
1327 case 0xa:
1328 gen_op_eval_bg(r_dst, r_src);
1329 break;
1330 case 0xb:
1331 gen_op_eval_bge(r_dst, r_src);
1332 break;
1333 case 0xc:
1334 gen_op_eval_bgu(r_dst, r_src);
1335 break;
1336 case 0xd:
1337 gen_op_eval_bcc(r_dst, r_src);
1338 break;
1339 case 0xe:
1340 gen_op_eval_bpos(r_dst, r_src);
1341 break;
1342 case 0xf:
1343 gen_op_eval_bvc(r_dst, r_src);
1344 break;
1348 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1350 unsigned int offset;
1352 switch (cc) {
1353 default:
1354 case 0x0:
1355 offset = 0;
1356 break;
1357 case 0x1:
1358 offset = 32 - 10;
1359 break;
1360 case 0x2:
1361 offset = 34 - 10;
1362 break;
1363 case 0x3:
1364 offset = 36 - 10;
1365 break;
1368 switch (cond) {
1369 case 0x0:
1370 gen_op_eval_bn(r_dst);
1371 break;
1372 case 0x1:
1373 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1374 break;
1375 case 0x2:
1376 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1377 break;
1378 case 0x3:
1379 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1380 break;
1381 case 0x4:
1382 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1383 break;
1384 case 0x5:
1385 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1386 break;
1387 case 0x6:
1388 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1389 break;
1390 case 0x7:
1391 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1392 break;
1393 case 0x8:
1394 gen_op_eval_ba(r_dst);
1395 break;
1396 case 0x9:
1397 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1398 break;
1399 case 0xa:
1400 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1401 break;
1402 case 0xb:
1403 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1404 break;
1405 case 0xc:
1406 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1407 break;
1408 case 0xd:
1409 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1410 break;
1411 case 0xe:
1412 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1413 break;
1414 case 0xf:
1415 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1416 break;
1420 #ifdef TARGET_SPARC64
1421 // Inverted logic
1422 static const int gen_tcg_cond_reg[8] = {
1424 TCG_COND_NE,
1425 TCG_COND_GT,
1426 TCG_COND_GE,
1428 TCG_COND_EQ,
1429 TCG_COND_LE,
1430 TCG_COND_LT,
1433 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1435 int l1;
1437 l1 = gen_new_label();
1438 tcg_gen_movi_tl(r_dst, 0);
1439 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1440 tcg_gen_movi_tl(r_dst, 1);
1441 gen_set_label(l1);
1443 #endif
1445 /* XXX: potentially incorrect if dynamic npc */
1446 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1447 TCGv r_cond)
1449 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1450 target_ulong target = dc->pc + offset;
1452 if (cond == 0x0) {
1453 /* unconditional not taken */
1454 if (a) {
1455 dc->pc = dc->npc + 4;
1456 dc->npc = dc->pc + 4;
1457 } else {
1458 dc->pc = dc->npc;
1459 dc->npc = dc->pc + 4;
1461 } else if (cond == 0x8) {
1462 /* unconditional taken */
1463 if (a) {
1464 dc->pc = target;
1465 dc->npc = dc->pc + 4;
1466 } else {
1467 dc->pc = dc->npc;
1468 dc->npc = target;
1470 } else {
1471 flush_cond(dc, r_cond);
1472 gen_cond(r_cond, cc, cond);
1473 if (a) {
1474 gen_branch_a(dc, target, dc->npc, r_cond);
1475 dc->is_br = 1;
1476 } else {
1477 dc->pc = dc->npc;
1478 dc->jump_pc[0] = target;
1479 dc->jump_pc[1] = dc->npc + 4;
1480 dc->npc = JUMP_PC;
1485 /* XXX: potentially incorrect if dynamic npc */
1486 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1487 TCGv r_cond)
1489 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1490 target_ulong target = dc->pc + offset;
1492 if (cond == 0x0) {
1493 /* unconditional not taken */
1494 if (a) {
1495 dc->pc = dc->npc + 4;
1496 dc->npc = dc->pc + 4;
1497 } else {
1498 dc->pc = dc->npc;
1499 dc->npc = dc->pc + 4;
1501 } else if (cond == 0x8) {
1502 /* unconditional taken */
1503 if (a) {
1504 dc->pc = target;
1505 dc->npc = dc->pc + 4;
1506 } else {
1507 dc->pc = dc->npc;
1508 dc->npc = target;
1510 } else {
1511 flush_cond(dc, r_cond);
1512 gen_fcond(r_cond, cc, cond);
1513 if (a) {
1514 gen_branch_a(dc, target, dc->npc, r_cond);
1515 dc->is_br = 1;
1516 } else {
1517 dc->pc = dc->npc;
1518 dc->jump_pc[0] = target;
1519 dc->jump_pc[1] = dc->npc + 4;
1520 dc->npc = JUMP_PC;
1525 #ifdef TARGET_SPARC64
1526 /* XXX: potentially incorrect if dynamic npc */
1527 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1528 TCGv r_cond, TCGv r_reg)
1530 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1531 target_ulong target = dc->pc + offset;
1533 flush_cond(dc, r_cond);
1534 gen_cond_reg(r_cond, cond, r_reg);
1535 if (a) {
1536 gen_branch_a(dc, target, dc->npc, r_cond);
1537 dc->is_br = 1;
1538 } else {
1539 dc->pc = dc->npc;
1540 dc->jump_pc[0] = target;
1541 dc->jump_pc[1] = dc->npc + 4;
1542 dc->npc = JUMP_PC;
1546 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1548 switch (fccno) {
1549 case 0:
1550 gen_helper_fcmps(r_rs1, r_rs2);
1551 break;
1552 case 1:
1553 gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1554 break;
1555 case 2:
1556 gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1557 break;
1558 case 3:
1559 gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1560 break;
1564 static inline void gen_op_fcmpd(int fccno)
1566 switch (fccno) {
1567 case 0:
1568 gen_helper_fcmpd();
1569 break;
1570 case 1:
1571 gen_helper_fcmpd_fcc1();
1572 break;
1573 case 2:
1574 gen_helper_fcmpd_fcc2();
1575 break;
1576 case 3:
1577 gen_helper_fcmpd_fcc3();
1578 break;
1582 static inline void gen_op_fcmpq(int fccno)
1584 switch (fccno) {
1585 case 0:
1586 gen_helper_fcmpq();
1587 break;
1588 case 1:
1589 gen_helper_fcmpq_fcc1();
1590 break;
1591 case 2:
1592 gen_helper_fcmpq_fcc2();
1593 break;
1594 case 3:
1595 gen_helper_fcmpq_fcc3();
1596 break;
1600 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1602 switch (fccno) {
1603 case 0:
1604 gen_helper_fcmpes(r_rs1, r_rs2);
1605 break;
1606 case 1:
1607 gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1608 break;
1609 case 2:
1610 gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1611 break;
1612 case 3:
1613 gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1614 break;
1618 static inline void gen_op_fcmped(int fccno)
1620 switch (fccno) {
1621 case 0:
1622 gen_helper_fcmped();
1623 break;
1624 case 1:
1625 gen_helper_fcmped_fcc1();
1626 break;
1627 case 2:
1628 gen_helper_fcmped_fcc2();
1629 break;
1630 case 3:
1631 gen_helper_fcmped_fcc3();
1632 break;
1636 static inline void gen_op_fcmpeq(int fccno)
1638 switch (fccno) {
1639 case 0:
1640 gen_helper_fcmpeq();
1641 break;
1642 case 1:
1643 gen_helper_fcmpeq_fcc1();
1644 break;
1645 case 2:
1646 gen_helper_fcmpeq_fcc2();
1647 break;
1648 case 3:
1649 gen_helper_fcmpeq_fcc3();
1650 break;
1654 #else
1656 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1658 gen_helper_fcmps(r_rs1, r_rs2);
1661 static inline void gen_op_fcmpd(int fccno)
1663 gen_helper_fcmpd();
1666 static inline void gen_op_fcmpq(int fccno)
1668 gen_helper_fcmpq();
1671 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1673 gen_helper_fcmpes(r_rs1, r_rs2);
1676 static inline void gen_op_fcmped(int fccno)
1678 gen_helper_fcmped();
1681 static inline void gen_op_fcmpeq(int fccno)
1683 gen_helper_fcmpeq();
1685 #endif
1687 static inline void gen_op_fpexception_im(int fsr_flags)
1689 TCGv_i32 r_const;
1691 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1692 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1693 r_const = tcg_const_i32(TT_FP_EXCP);
1694 gen_helper_raise_exception(r_const);
1695 tcg_temp_free_i32(r_const);
1698 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1700 #if !defined(CONFIG_USER_ONLY)
1701 if (!dc->fpu_enabled) {
1702 TCGv_i32 r_const;
1704 save_state(dc, r_cond);
1705 r_const = tcg_const_i32(TT_NFPU_INSN);
1706 gen_helper_raise_exception(r_const);
1707 tcg_temp_free_i32(r_const);
1708 dc->is_br = 1;
1709 return 1;
1711 #endif
1712 return 0;
1715 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1717 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1720 static inline void gen_clear_float_exceptions(void)
1722 gen_helper_clear_float_exceptions();
1725 /* asi moves */
1726 #ifdef TARGET_SPARC64
1727 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1729 int asi;
1730 TCGv_i32 r_asi;
1732 if (IS_IMM) {
1733 r_asi = tcg_temp_new_i32();
1734 tcg_gen_mov_i32(r_asi, cpu_asi);
1735 } else {
1736 asi = GET_FIELD(insn, 19, 26);
1737 r_asi = tcg_const_i32(asi);
1739 return r_asi;
1742 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1743 int sign)
1745 TCGv_i32 r_asi, r_size, r_sign;
1747 r_asi = gen_get_asi(insn, addr);
1748 r_size = tcg_const_i32(size);
1749 r_sign = tcg_const_i32(sign);
1750 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1751 tcg_temp_free_i32(r_sign);
1752 tcg_temp_free_i32(r_size);
1753 tcg_temp_free_i32(r_asi);
1756 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1758 TCGv_i32 r_asi, r_size;
1760 r_asi = gen_get_asi(insn, addr);
1761 r_size = tcg_const_i32(size);
1762 gen_helper_st_asi(addr, src, r_asi, r_size);
1763 tcg_temp_free_i32(r_size);
1764 tcg_temp_free_i32(r_asi);
1767 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1769 TCGv_i32 r_asi, r_size, r_rd;
1771 r_asi = gen_get_asi(insn, addr);
1772 r_size = tcg_const_i32(size);
1773 r_rd = tcg_const_i32(rd);
1774 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1775 tcg_temp_free_i32(r_rd);
1776 tcg_temp_free_i32(r_size);
1777 tcg_temp_free_i32(r_asi);
1780 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1782 TCGv_i32 r_asi, r_size, r_rd;
1784 r_asi = gen_get_asi(insn, addr);
1785 r_size = tcg_const_i32(size);
1786 r_rd = tcg_const_i32(rd);
1787 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1788 tcg_temp_free_i32(r_rd);
1789 tcg_temp_free_i32(r_size);
1790 tcg_temp_free_i32(r_asi);
1793 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1795 TCGv_i32 r_asi, r_size, r_sign;
1797 r_asi = gen_get_asi(insn, addr);
1798 r_size = tcg_const_i32(4);
1799 r_sign = tcg_const_i32(0);
1800 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1801 tcg_temp_free_i32(r_sign);
1802 gen_helper_st_asi(addr, dst, r_asi, r_size);
1803 tcg_temp_free_i32(r_size);
1804 tcg_temp_free_i32(r_asi);
1805 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1808 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1810 TCGv_i32 r_asi, r_rd;
1812 r_asi = gen_get_asi(insn, addr);
1813 r_rd = tcg_const_i32(rd);
1814 gen_helper_ldda_asi(addr, r_asi, r_rd);
1815 tcg_temp_free_i32(r_rd);
1816 tcg_temp_free_i32(r_asi);
1819 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1821 TCGv_i32 r_asi, r_size;
1823 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1824 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1825 r_asi = gen_get_asi(insn, addr);
1826 r_size = tcg_const_i32(8);
1827 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1828 tcg_temp_free_i32(r_size);
1829 tcg_temp_free_i32(r_asi);
1832 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1833 int rd)
1835 TCGv r_val1;
1836 TCGv_i32 r_asi;
1838 r_val1 = tcg_temp_new();
1839 gen_movl_reg_TN(rd, r_val1);
1840 r_asi = gen_get_asi(insn, addr);
1841 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1842 tcg_temp_free_i32(r_asi);
1843 tcg_temp_free(r_val1);
1846 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1847 int rd)
1849 TCGv_i32 r_asi;
1851 gen_movl_reg_TN(rd, cpu_tmp64);
1852 r_asi = gen_get_asi(insn, addr);
1853 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1854 tcg_temp_free_i32(r_asi);
1857 #elif !defined(CONFIG_USER_ONLY)
1859 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1860 int sign)
1862 TCGv_i32 r_asi, r_size, r_sign;
1864 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1865 r_size = tcg_const_i32(size);
1866 r_sign = tcg_const_i32(sign);
1867 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1868 tcg_temp_free(r_sign);
1869 tcg_temp_free(r_size);
1870 tcg_temp_free(r_asi);
1871 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1874 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1876 TCGv_i32 r_asi, r_size;
1878 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1879 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1880 r_size = tcg_const_i32(size);
1881 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1882 tcg_temp_free(r_size);
1883 tcg_temp_free(r_asi);
1886 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1888 TCGv_i32 r_asi, r_size, r_sign;
1889 TCGv_i64 r_val;
1891 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1892 r_size = tcg_const_i32(4);
1893 r_sign = tcg_const_i32(0);
1894 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1895 tcg_temp_free(r_sign);
1896 r_val = tcg_temp_new_i64();
1897 tcg_gen_extu_tl_i64(r_val, dst);
1898 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1899 tcg_temp_free_i64(r_val);
1900 tcg_temp_free(r_size);
1901 tcg_temp_free(r_asi);
1902 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1905 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1907 TCGv_i32 r_asi, r_size, r_sign;
1909 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1910 r_size = tcg_const_i32(8);
1911 r_sign = tcg_const_i32(0);
1912 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1913 tcg_temp_free(r_sign);
1914 tcg_temp_free(r_size);
1915 tcg_temp_free(r_asi);
1916 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1917 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1918 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1919 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1920 gen_movl_TN_reg(rd, hi);
1923 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1925 TCGv_i32 r_asi, r_size;
1927 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1928 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1929 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1930 r_size = tcg_const_i32(8);
1931 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1932 tcg_temp_free(r_size);
1933 tcg_temp_free(r_asi);
1935 #endif
1937 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1938 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1940 TCGv_i64 r_val;
1941 TCGv_i32 r_asi, r_size;
1943 gen_ld_asi(dst, addr, insn, 1, 0);
1945 r_val = tcg_const_i64(0xffULL);
1946 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1947 r_size = tcg_const_i32(1);
1948 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1949 tcg_temp_free_i32(r_size);
1950 tcg_temp_free_i32(r_asi);
1951 tcg_temp_free_i64(r_val);
1953 #endif
1955 static inline TCGv get_src1(unsigned int insn, TCGv def)
1957 TCGv r_rs1 = def;
1958 unsigned int rs1;
1960 rs1 = GET_FIELD(insn, 13, 17);
1961 if (rs1 == 0)
1962 r_rs1 = tcg_const_tl(0); // XXX how to free?
1963 else if (rs1 < 8)
1964 r_rs1 = cpu_gregs[rs1];
1965 else
1966 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1967 return r_rs1;
1970 static inline TCGv get_src2(unsigned int insn, TCGv def)
1972 TCGv r_rs2 = def;
1974 if (IS_IMM) { /* immediate */
1975 target_long simm;
1977 simm = GET_FIELDs(insn, 19, 31);
1978 r_rs2 = tcg_const_tl(simm); // XXX how to free?
1979 } else { /* register */
1980 unsigned int rs2;
1982 rs2 = GET_FIELD(insn, 27, 31);
1983 if (rs2 == 0)
1984 r_rs2 = tcg_const_tl(0); // XXX how to free?
1985 else if (rs2 < 8)
1986 r_rs2 = cpu_gregs[rs2];
1987 else
1988 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1990 return r_rs2;
1993 #define CHECK_IU_FEATURE(dc, FEATURE) \
1994 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1995 goto illegal_insn;
1996 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1997 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1998 goto nfpu_insn;
2000 /* before an instruction, dc->pc must be static */
2001 static void disas_sparc_insn(DisasContext * dc)
2003 unsigned int insn, opc, rs1, rs2, rd;
2004 target_long simm;
2006 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
2007 tcg_gen_debug_insn_start(dc->pc);
2008 insn = ldl_code(dc->pc);
2009 opc = GET_FIELD(insn, 0, 1);
2011 rd = GET_FIELD(insn, 2, 6);
2013 cpu_src1 = tcg_temp_new(); // const
2014 cpu_src2 = tcg_temp_new(); // const
2016 switch (opc) {
2017 case 0: /* branches/sethi */
2019 unsigned int xop = GET_FIELD(insn, 7, 9);
2020 int32_t target;
2021 switch (xop) {
2022 #ifdef TARGET_SPARC64
2023 case 0x1: /* V9 BPcc */
2025 int cc;
2027 target = GET_FIELD_SP(insn, 0, 18);
2028 target = sign_extend(target, 18);
2029 target <<= 2;
2030 cc = GET_FIELD_SP(insn, 20, 21);
2031 if (cc == 0)
2032 do_branch(dc, target, insn, 0, cpu_cond);
2033 else if (cc == 2)
2034 do_branch(dc, target, insn, 1, cpu_cond);
2035 else
2036 goto illegal_insn;
2037 goto jmp_insn;
2039 case 0x3: /* V9 BPr */
2041 target = GET_FIELD_SP(insn, 0, 13) |
2042 (GET_FIELD_SP(insn, 20, 21) << 14);
2043 target = sign_extend(target, 16);
2044 target <<= 2;
2045 cpu_src1 = get_src1(insn, cpu_src1);
2046 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
2047 goto jmp_insn;
2049 case 0x5: /* V9 FBPcc */
2051 int cc = GET_FIELD_SP(insn, 20, 21);
2052 if (gen_trap_ifnofpu(dc, cpu_cond))
2053 goto jmp_insn;
2054 target = GET_FIELD_SP(insn, 0, 18);
2055 target = sign_extend(target, 19);
2056 target <<= 2;
2057 do_fbranch(dc, target, insn, cc, cpu_cond);
2058 goto jmp_insn;
2060 #else
2061 case 0x7: /* CBN+x */
2063 goto ncp_insn;
2065 #endif
2066 case 0x2: /* BN+x */
2068 target = GET_FIELD(insn, 10, 31);
2069 target = sign_extend(target, 22);
2070 target <<= 2;
2071 do_branch(dc, target, insn, 0, cpu_cond);
2072 goto jmp_insn;
2074 case 0x6: /* FBN+x */
2076 if (gen_trap_ifnofpu(dc, cpu_cond))
2077 goto jmp_insn;
2078 target = GET_FIELD(insn, 10, 31);
2079 target = sign_extend(target, 22);
2080 target <<= 2;
2081 do_fbranch(dc, target, insn, 0, cpu_cond);
2082 goto jmp_insn;
2084 case 0x4: /* SETHI */
2085 if (rd) { // nop
2086 uint32_t value = GET_FIELD(insn, 10, 31);
2087 TCGv r_const;
2089 r_const = tcg_const_tl(value << 10);
2090 gen_movl_TN_reg(rd, r_const);
2091 tcg_temp_free(r_const);
2093 break;
2094 case 0x0: /* UNIMPL */
2095 default:
2096 goto illegal_insn;
2098 break;
2100 break;
2101 case 1: /*CALL*/
2103 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2104 TCGv r_const;
2106 r_const = tcg_const_tl(dc->pc);
2107 gen_movl_TN_reg(15, r_const);
2108 tcg_temp_free(r_const);
2109 target += dc->pc;
2110 gen_mov_pc_npc(dc, cpu_cond);
2111 dc->npc = target;
2113 goto jmp_insn;
2114 case 2: /* FPU & Logical Operations */
2116 unsigned int xop = GET_FIELD(insn, 7, 12);
2117 if (xop == 0x3a) { /* generate trap */
2118 int cond;
2120 cpu_src1 = get_src1(insn, cpu_src1);
2121 if (IS_IMM) {
2122 rs2 = GET_FIELD(insn, 25, 31);
2123 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2124 } else {
2125 rs2 = GET_FIELD(insn, 27, 31);
2126 if (rs2 != 0) {
2127 gen_movl_reg_TN(rs2, cpu_src2);
2128 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2129 } else
2130 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2132 cond = GET_FIELD(insn, 3, 6);
2133 if (cond == 0x8) {
2134 save_state(dc, cpu_cond);
2135 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2136 supervisor(dc))
2137 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2138 else
2139 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2140 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2141 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2142 gen_helper_raise_exception(cpu_tmp32);
2143 } else if (cond != 0) {
2144 TCGv r_cond = tcg_temp_new();
2145 int l1;
2146 #ifdef TARGET_SPARC64
2147 /* V9 icc/xcc */
2148 int cc = GET_FIELD_SP(insn, 11, 12);
2150 save_state(dc, cpu_cond);
2151 if (cc == 0)
2152 gen_cond(r_cond, 0, cond);
2153 else if (cc == 2)
2154 gen_cond(r_cond, 1, cond);
2155 else
2156 goto illegal_insn;
2157 #else
2158 save_state(dc, cpu_cond);
2159 gen_cond(r_cond, 0, cond);
2160 #endif
2161 l1 = gen_new_label();
2162 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2164 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2165 supervisor(dc))
2166 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2167 else
2168 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2169 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2170 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2171 gen_helper_raise_exception(cpu_tmp32);
2173 gen_set_label(l1);
2174 tcg_temp_free(r_cond);
2176 gen_op_next_insn();
2177 tcg_gen_exit_tb(0);
2178 dc->is_br = 1;
2179 goto jmp_insn;
2180 } else if (xop == 0x28) {
2181 rs1 = GET_FIELD(insn, 13, 17);
2182 switch(rs1) {
2183 case 0: /* rdy */
2184 #ifndef TARGET_SPARC64
2185 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2186 manual, rdy on the microSPARC
2187 II */
2188 case 0x0f: /* stbar in the SPARCv8 manual,
2189 rdy on the microSPARC II */
2190 case 0x10 ... 0x1f: /* implementation-dependent in the
2191 SPARCv8 manual, rdy on the
2192 microSPARC II */
2193 #endif
2194 gen_movl_TN_reg(rd, cpu_y);
2195 break;
2196 #ifdef TARGET_SPARC64
2197 case 0x2: /* V9 rdccr */
2198 gen_helper_rdccr(cpu_dst);
2199 gen_movl_TN_reg(rd, cpu_dst);
2200 break;
2201 case 0x3: /* V9 rdasi */
2202 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2203 gen_movl_TN_reg(rd, cpu_dst);
2204 break;
2205 case 0x4: /* V9 rdtick */
2207 TCGv_ptr r_tickptr;
2209 r_tickptr = tcg_temp_new_ptr();
2210 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2211 offsetof(CPUState, tick));
2212 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2213 tcg_temp_free_ptr(r_tickptr);
2214 gen_movl_TN_reg(rd, cpu_dst);
2216 break;
2217 case 0x5: /* V9 rdpc */
2219 TCGv r_const;
2221 r_const = tcg_const_tl(dc->pc);
2222 gen_movl_TN_reg(rd, r_const);
2223 tcg_temp_free(r_const);
2225 break;
2226 case 0x6: /* V9 rdfprs */
2227 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2228 gen_movl_TN_reg(rd, cpu_dst);
2229 break;
2230 case 0xf: /* V9 membar */
2231 break; /* no effect */
2232 case 0x13: /* Graphics Status */
2233 if (gen_trap_ifnofpu(dc, cpu_cond))
2234 goto jmp_insn;
2235 gen_movl_TN_reg(rd, cpu_gsr);
2236 break;
2237 case 0x16: /* Softint */
2238 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2239 gen_movl_TN_reg(rd, cpu_dst);
2240 break;
2241 case 0x17: /* Tick compare */
2242 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2243 break;
2244 case 0x18: /* System tick */
2246 TCGv_ptr r_tickptr;
2248 r_tickptr = tcg_temp_new_ptr();
2249 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2250 offsetof(CPUState, stick));
2251 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2252 tcg_temp_free_ptr(r_tickptr);
2253 gen_movl_TN_reg(rd, cpu_dst);
2255 break;
2256 case 0x19: /* System tick compare */
2257 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2258 break;
2259 case 0x10: /* Performance Control */
2260 case 0x11: /* Performance Instrumentation Counter */
2261 case 0x12: /* Dispatch Control */
2262 case 0x14: /* Softint set, WO */
2263 case 0x15: /* Softint clear, WO */
2264 #endif
2265 default:
2266 goto illegal_insn;
2268 #if !defined(CONFIG_USER_ONLY)
2269 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2270 #ifndef TARGET_SPARC64
2271 if (!supervisor(dc))
2272 goto priv_insn;
2273 gen_helper_rdpsr(cpu_dst);
2274 #else
2275 CHECK_IU_FEATURE(dc, HYPV);
2276 if (!hypervisor(dc))
2277 goto priv_insn;
2278 rs1 = GET_FIELD(insn, 13, 17);
2279 switch (rs1) {
2280 case 0: // hpstate
2281 // gen_op_rdhpstate();
2282 break;
2283 case 1: // htstate
2284 // gen_op_rdhtstate();
2285 break;
2286 case 3: // hintp
2287 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2288 break;
2289 case 5: // htba
2290 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2291 break;
2292 case 6: // hver
2293 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2294 break;
2295 case 31: // hstick_cmpr
2296 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2297 break;
2298 default:
2299 goto illegal_insn;
2301 #endif
2302 gen_movl_TN_reg(rd, cpu_dst);
2303 break;
2304 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2305 if (!supervisor(dc))
2306 goto priv_insn;
2307 #ifdef TARGET_SPARC64
2308 rs1 = GET_FIELD(insn, 13, 17);
2309 switch (rs1) {
2310 case 0: // tpc
2312 TCGv_ptr r_tsptr;
2314 r_tsptr = tcg_temp_new_ptr();
2315 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2316 offsetof(CPUState, tsptr));
2317 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2318 offsetof(trap_state, tpc));
2319 tcg_temp_free_ptr(r_tsptr);
2321 break;
2322 case 1: // tnpc
2324 TCGv_ptr r_tsptr;
2326 r_tsptr = tcg_temp_new_ptr();
2327 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2328 offsetof(CPUState, tsptr));
2329 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2330 offsetof(trap_state, tnpc));
2331 tcg_temp_free_ptr(r_tsptr);
2333 break;
2334 case 2: // tstate
2336 TCGv_ptr r_tsptr;
2338 r_tsptr = tcg_temp_new_ptr();
2339 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2340 offsetof(CPUState, tsptr));
2341 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2342 offsetof(trap_state, tstate));
2343 tcg_temp_free_ptr(r_tsptr);
2345 break;
2346 case 3: // tt
2348 TCGv_ptr r_tsptr;
2350 r_tsptr = tcg_temp_new_ptr();
2351 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2352 offsetof(CPUState, tsptr));
2353 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2354 offsetof(trap_state, tt));
2355 tcg_temp_free_ptr(r_tsptr);
2356 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2358 break;
2359 case 4: // tick
2361 TCGv_ptr r_tickptr;
2363 r_tickptr = tcg_temp_new_ptr();
2364 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2365 offsetof(CPUState, tick));
2366 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2367 gen_movl_TN_reg(rd, cpu_tmp0);
2368 tcg_temp_free_ptr(r_tickptr);
2370 break;
2371 case 5: // tba
2372 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2373 break;
2374 case 6: // pstate
2375 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2376 offsetof(CPUSPARCState, pstate));
2377 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2378 break;
2379 case 7: // tl
2380 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2381 offsetof(CPUSPARCState, tl));
2382 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2383 break;
2384 case 8: // pil
2385 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2386 offsetof(CPUSPARCState, psrpil));
2387 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2388 break;
2389 case 9: // cwp
2390 gen_helper_rdcwp(cpu_tmp0);
2391 break;
2392 case 10: // cansave
2393 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2394 offsetof(CPUSPARCState, cansave));
2395 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2396 break;
2397 case 11: // canrestore
2398 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2399 offsetof(CPUSPARCState, canrestore));
2400 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2401 break;
2402 case 12: // cleanwin
2403 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2404 offsetof(CPUSPARCState, cleanwin));
2405 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2406 break;
2407 case 13: // otherwin
2408 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2409 offsetof(CPUSPARCState, otherwin));
2410 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2411 break;
2412 case 14: // wstate
2413 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2414 offsetof(CPUSPARCState, wstate));
2415 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2416 break;
2417 case 16: // UA2005 gl
2418 CHECK_IU_FEATURE(dc, GL);
2419 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2420 offsetof(CPUSPARCState, gl));
2421 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2422 break;
2423 case 26: // UA2005 strand status
2424 CHECK_IU_FEATURE(dc, HYPV);
2425 if (!hypervisor(dc))
2426 goto priv_insn;
2427 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2428 break;
2429 case 31: // ver
2430 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2431 break;
2432 case 15: // fq
2433 default:
2434 goto illegal_insn;
2436 #else
2437 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2438 #endif
2439 gen_movl_TN_reg(rd, cpu_tmp0);
2440 break;
2441 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2442 #ifdef TARGET_SPARC64
2443 save_state(dc, cpu_cond);
2444 gen_helper_flushw();
2445 #else
2446 if (!supervisor(dc))
2447 goto priv_insn;
2448 gen_movl_TN_reg(rd, cpu_tbr);
2449 #endif
2450 break;
2451 #endif
2452 } else if (xop == 0x34) { /* FPU Operations */
2453 if (gen_trap_ifnofpu(dc, cpu_cond))
2454 goto jmp_insn;
2455 gen_op_clear_ieee_excp_and_FTT();
2456 rs1 = GET_FIELD(insn, 13, 17);
2457 rs2 = GET_FIELD(insn, 27, 31);
2458 xop = GET_FIELD(insn, 18, 26);
2459 switch (xop) {
2460 case 0x1: /* fmovs */
2461 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2462 break;
2463 case 0x5: /* fnegs */
2464 gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2465 break;
2466 case 0x9: /* fabss */
2467 gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2468 break;
2469 case 0x29: /* fsqrts */
2470 CHECK_FPU_FEATURE(dc, FSQRT);
2471 gen_clear_float_exceptions();
2472 gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2473 gen_helper_check_ieee_exceptions();
2474 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2475 break;
2476 case 0x2a: /* fsqrtd */
2477 CHECK_FPU_FEATURE(dc, FSQRT);
2478 gen_op_load_fpr_DT1(DFPREG(rs2));
2479 gen_clear_float_exceptions();
2480 gen_helper_fsqrtd();
2481 gen_helper_check_ieee_exceptions();
2482 gen_op_store_DT0_fpr(DFPREG(rd));
2483 break;
2484 case 0x2b: /* fsqrtq */
2485 CHECK_FPU_FEATURE(dc, FLOAT128);
2486 gen_op_load_fpr_QT1(QFPREG(rs2));
2487 gen_clear_float_exceptions();
2488 gen_helper_fsqrtq();
2489 gen_helper_check_ieee_exceptions();
2490 gen_op_store_QT0_fpr(QFPREG(rd));
2491 break;
2492 case 0x41: /* fadds */
2493 gen_clear_float_exceptions();
2494 gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2495 gen_helper_check_ieee_exceptions();
2496 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2497 break;
2498 case 0x42: /* faddd */
2499 gen_op_load_fpr_DT0(DFPREG(rs1));
2500 gen_op_load_fpr_DT1(DFPREG(rs2));
2501 gen_clear_float_exceptions();
2502 gen_helper_faddd();
2503 gen_helper_check_ieee_exceptions();
2504 gen_op_store_DT0_fpr(DFPREG(rd));
2505 break;
2506 case 0x43: /* faddq */
2507 CHECK_FPU_FEATURE(dc, FLOAT128);
2508 gen_op_load_fpr_QT0(QFPREG(rs1));
2509 gen_op_load_fpr_QT1(QFPREG(rs2));
2510 gen_clear_float_exceptions();
2511 gen_helper_faddq();
2512 gen_helper_check_ieee_exceptions();
2513 gen_op_store_QT0_fpr(QFPREG(rd));
2514 break;
2515 case 0x45: /* fsubs */
2516 gen_clear_float_exceptions();
2517 gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2518 gen_helper_check_ieee_exceptions();
2519 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2520 break;
2521 case 0x46: /* fsubd */
2522 gen_op_load_fpr_DT0(DFPREG(rs1));
2523 gen_op_load_fpr_DT1(DFPREG(rs2));
2524 gen_clear_float_exceptions();
2525 gen_helper_fsubd();
2526 gen_helper_check_ieee_exceptions();
2527 gen_op_store_DT0_fpr(DFPREG(rd));
2528 break;
2529 case 0x47: /* fsubq */
2530 CHECK_FPU_FEATURE(dc, FLOAT128);
2531 gen_op_load_fpr_QT0(QFPREG(rs1));
2532 gen_op_load_fpr_QT1(QFPREG(rs2));
2533 gen_clear_float_exceptions();
2534 gen_helper_fsubq();
2535 gen_helper_check_ieee_exceptions();
2536 gen_op_store_QT0_fpr(QFPREG(rd));
2537 break;
2538 case 0x49: /* fmuls */
2539 CHECK_FPU_FEATURE(dc, FMUL);
2540 gen_clear_float_exceptions();
2541 gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2542 gen_helper_check_ieee_exceptions();
2543 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2544 break;
2545 case 0x4a: /* fmuld */
2546 CHECK_FPU_FEATURE(dc, FMUL);
2547 gen_op_load_fpr_DT0(DFPREG(rs1));
2548 gen_op_load_fpr_DT1(DFPREG(rs2));
2549 gen_clear_float_exceptions();
2550 gen_helper_fmuld();
2551 gen_helper_check_ieee_exceptions();
2552 gen_op_store_DT0_fpr(DFPREG(rd));
2553 break;
2554 case 0x4b: /* fmulq */
2555 CHECK_FPU_FEATURE(dc, FLOAT128);
2556 CHECK_FPU_FEATURE(dc, FMUL);
2557 gen_op_load_fpr_QT0(QFPREG(rs1));
2558 gen_op_load_fpr_QT1(QFPREG(rs2));
2559 gen_clear_float_exceptions();
2560 gen_helper_fmulq();
2561 gen_helper_check_ieee_exceptions();
2562 gen_op_store_QT0_fpr(QFPREG(rd));
2563 break;
2564 case 0x4d: /* fdivs */
2565 gen_clear_float_exceptions();
2566 gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2567 gen_helper_check_ieee_exceptions();
2568 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2569 break;
2570 case 0x4e: /* fdivd */
2571 gen_op_load_fpr_DT0(DFPREG(rs1));
2572 gen_op_load_fpr_DT1(DFPREG(rs2));
2573 gen_clear_float_exceptions();
2574 gen_helper_fdivd();
2575 gen_helper_check_ieee_exceptions();
2576 gen_op_store_DT0_fpr(DFPREG(rd));
2577 break;
2578 case 0x4f: /* fdivq */
2579 CHECK_FPU_FEATURE(dc, FLOAT128);
2580 gen_op_load_fpr_QT0(QFPREG(rs1));
2581 gen_op_load_fpr_QT1(QFPREG(rs2));
2582 gen_clear_float_exceptions();
2583 gen_helper_fdivq();
2584 gen_helper_check_ieee_exceptions();
2585 gen_op_store_QT0_fpr(QFPREG(rd));
2586 break;
2587 case 0x69: /* fsmuld */
2588 CHECK_FPU_FEATURE(dc, FSMULD);
2589 gen_clear_float_exceptions();
2590 gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2591 gen_helper_check_ieee_exceptions();
2592 gen_op_store_DT0_fpr(DFPREG(rd));
2593 break;
2594 case 0x6e: /* fdmulq */
2595 CHECK_FPU_FEATURE(dc, FLOAT128);
2596 gen_op_load_fpr_DT0(DFPREG(rs1));
2597 gen_op_load_fpr_DT1(DFPREG(rs2));
2598 gen_clear_float_exceptions();
2599 gen_helper_fdmulq();
2600 gen_helper_check_ieee_exceptions();
2601 gen_op_store_QT0_fpr(QFPREG(rd));
2602 break;
2603 case 0xc4: /* fitos */
2604 gen_clear_float_exceptions();
2605 gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2606 gen_helper_check_ieee_exceptions();
2607 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2608 break;
2609 case 0xc6: /* fdtos */
2610 gen_op_load_fpr_DT1(DFPREG(rs2));
2611 gen_clear_float_exceptions();
2612 gen_helper_fdtos(cpu_tmp32);
2613 gen_helper_check_ieee_exceptions();
2614 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2615 break;
2616 case 0xc7: /* fqtos */
2617 CHECK_FPU_FEATURE(dc, FLOAT128);
2618 gen_op_load_fpr_QT1(QFPREG(rs2));
2619 gen_clear_float_exceptions();
2620 gen_helper_fqtos(cpu_tmp32);
2621 gen_helper_check_ieee_exceptions();
2622 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2623 break;
2624 case 0xc8: /* fitod */
2625 gen_helper_fitod(cpu_fpr[rs2]);
2626 gen_op_store_DT0_fpr(DFPREG(rd));
2627 break;
2628 case 0xc9: /* fstod */
2629 gen_helper_fstod(cpu_fpr[rs2]);
2630 gen_op_store_DT0_fpr(DFPREG(rd));
2631 break;
2632 case 0xcb: /* fqtod */
2633 CHECK_FPU_FEATURE(dc, FLOAT128);
2634 gen_op_load_fpr_QT1(QFPREG(rs2));
2635 gen_clear_float_exceptions();
2636 gen_helper_fqtod();
2637 gen_helper_check_ieee_exceptions();
2638 gen_op_store_DT0_fpr(DFPREG(rd));
2639 break;
2640 case 0xcc: /* fitoq */
2641 CHECK_FPU_FEATURE(dc, FLOAT128);
2642 gen_helper_fitoq(cpu_fpr[rs2]);
2643 gen_op_store_QT0_fpr(QFPREG(rd));
2644 break;
2645 case 0xcd: /* fstoq */
2646 CHECK_FPU_FEATURE(dc, FLOAT128);
2647 gen_helper_fstoq(cpu_fpr[rs2]);
2648 gen_op_store_QT0_fpr(QFPREG(rd));
2649 break;
2650 case 0xce: /* fdtoq */
2651 CHECK_FPU_FEATURE(dc, FLOAT128);
2652 gen_op_load_fpr_DT1(DFPREG(rs2));
2653 gen_helper_fdtoq();
2654 gen_op_store_QT0_fpr(QFPREG(rd));
2655 break;
2656 case 0xd1: /* fstoi */
2657 gen_clear_float_exceptions();
2658 gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2659 gen_helper_check_ieee_exceptions();
2660 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2661 break;
2662 case 0xd2: /* fdtoi */
2663 gen_op_load_fpr_DT1(DFPREG(rs2));
2664 gen_clear_float_exceptions();
2665 gen_helper_fdtoi(cpu_tmp32);
2666 gen_helper_check_ieee_exceptions();
2667 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2668 break;
2669 case 0xd3: /* fqtoi */
2670 CHECK_FPU_FEATURE(dc, FLOAT128);
2671 gen_op_load_fpr_QT1(QFPREG(rs2));
2672 gen_clear_float_exceptions();
2673 gen_helper_fqtoi(cpu_tmp32);
2674 gen_helper_check_ieee_exceptions();
2675 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2676 break;
2677 #ifdef TARGET_SPARC64
2678 case 0x2: /* V9 fmovd */
2679 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2680 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2681 cpu_fpr[DFPREG(rs2) + 1]);
2682 break;
2683 case 0x3: /* V9 fmovq */
2684 CHECK_FPU_FEATURE(dc, FLOAT128);
2685 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2686 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2687 cpu_fpr[QFPREG(rs2) + 1]);
2688 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2689 cpu_fpr[QFPREG(rs2) + 2]);
2690 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2691 cpu_fpr[QFPREG(rs2) + 3]);
2692 break;
2693 case 0x6: /* V9 fnegd */
2694 gen_op_load_fpr_DT1(DFPREG(rs2));
2695 gen_helper_fnegd();
2696 gen_op_store_DT0_fpr(DFPREG(rd));
2697 break;
2698 case 0x7: /* V9 fnegq */
2699 CHECK_FPU_FEATURE(dc, FLOAT128);
2700 gen_op_load_fpr_QT1(QFPREG(rs2));
2701 gen_helper_fnegq();
2702 gen_op_store_QT0_fpr(QFPREG(rd));
2703 break;
2704 case 0xa: /* V9 fabsd */
2705 gen_op_load_fpr_DT1(DFPREG(rs2));
2706 gen_helper_fabsd();
2707 gen_op_store_DT0_fpr(DFPREG(rd));
2708 break;
2709 case 0xb: /* V9 fabsq */
2710 CHECK_FPU_FEATURE(dc, FLOAT128);
2711 gen_op_load_fpr_QT1(QFPREG(rs2));
2712 gen_helper_fabsq();
2713 gen_op_store_QT0_fpr(QFPREG(rd));
2714 break;
2715 case 0x81: /* V9 fstox */
2716 gen_clear_float_exceptions();
2717 gen_helper_fstox(cpu_fpr[rs2]);
2718 gen_helper_check_ieee_exceptions();
2719 gen_op_store_DT0_fpr(DFPREG(rd));
2720 break;
2721 case 0x82: /* V9 fdtox */
2722 gen_op_load_fpr_DT1(DFPREG(rs2));
2723 gen_clear_float_exceptions();
2724 gen_helper_fdtox();
2725 gen_helper_check_ieee_exceptions();
2726 gen_op_store_DT0_fpr(DFPREG(rd));
2727 break;
2728 case 0x83: /* V9 fqtox */
2729 CHECK_FPU_FEATURE(dc, FLOAT128);
2730 gen_op_load_fpr_QT1(QFPREG(rs2));
2731 gen_clear_float_exceptions();
2732 gen_helper_fqtox();
2733 gen_helper_check_ieee_exceptions();
2734 gen_op_store_DT0_fpr(DFPREG(rd));
2735 break;
2736 case 0x84: /* V9 fxtos */
2737 gen_op_load_fpr_DT1(DFPREG(rs2));
2738 gen_clear_float_exceptions();
2739 gen_helper_fxtos(cpu_tmp32);
2740 gen_helper_check_ieee_exceptions();
2741 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2742 break;
2743 case 0x88: /* V9 fxtod */
2744 gen_op_load_fpr_DT1(DFPREG(rs2));
2745 gen_clear_float_exceptions();
2746 gen_helper_fxtod();
2747 gen_helper_check_ieee_exceptions();
2748 gen_op_store_DT0_fpr(DFPREG(rd));
2749 break;
2750 case 0x8c: /* V9 fxtoq */
2751 CHECK_FPU_FEATURE(dc, FLOAT128);
2752 gen_op_load_fpr_DT1(DFPREG(rs2));
2753 gen_clear_float_exceptions();
2754 gen_helper_fxtoq();
2755 gen_helper_check_ieee_exceptions();
2756 gen_op_store_QT0_fpr(QFPREG(rd));
2757 break;
2758 #endif
2759 default:
2760 goto illegal_insn;
2762 } else if (xop == 0x35) { /* FPU Operations */
2763 #ifdef TARGET_SPARC64
2764 int cond;
2765 #endif
2766 if (gen_trap_ifnofpu(dc, cpu_cond))
2767 goto jmp_insn;
2768 gen_op_clear_ieee_excp_and_FTT();
2769 rs1 = GET_FIELD(insn, 13, 17);
2770 rs2 = GET_FIELD(insn, 27, 31);
2771 xop = GET_FIELD(insn, 18, 26);
2772 #ifdef TARGET_SPARC64
2773 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2774 int l1;
2776 l1 = gen_new_label();
2777 cond = GET_FIELD_SP(insn, 14, 17);
2778 cpu_src1 = get_src1(insn, cpu_src1);
2779 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2780 0, l1);
2781 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2782 gen_set_label(l1);
2783 break;
2784 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2785 int l1;
2787 l1 = gen_new_label();
2788 cond = GET_FIELD_SP(insn, 14, 17);
2789 cpu_src1 = get_src1(insn, cpu_src1);
2790 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2791 0, l1);
2792 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2793 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2794 gen_set_label(l1);
2795 break;
2796 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2797 int l1;
2799 CHECK_FPU_FEATURE(dc, FLOAT128);
2800 l1 = gen_new_label();
2801 cond = GET_FIELD_SP(insn, 14, 17);
2802 cpu_src1 = get_src1(insn, cpu_src1);
2803 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2804 0, l1);
2805 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2806 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2807 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2808 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2809 gen_set_label(l1);
2810 break;
2812 #endif
2813 switch (xop) {
2814 #ifdef TARGET_SPARC64
2815 #define FMOVSCC(fcc) \
2817 TCGv r_cond; \
2818 int l1; \
2820 l1 = gen_new_label(); \
2821 r_cond = tcg_temp_new(); \
2822 cond = GET_FIELD_SP(insn, 14, 17); \
2823 gen_fcond(r_cond, fcc, cond); \
2824 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2825 0, l1); \
2826 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2827 gen_set_label(l1); \
2828 tcg_temp_free(r_cond); \
2830 #define FMOVDCC(fcc) \
2832 TCGv r_cond; \
2833 int l1; \
2835 l1 = gen_new_label(); \
2836 r_cond = tcg_temp_new(); \
2837 cond = GET_FIELD_SP(insn, 14, 17); \
2838 gen_fcond(r_cond, fcc, cond); \
2839 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2840 0, l1); \
2841 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2842 cpu_fpr[DFPREG(rs2)]); \
2843 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2844 cpu_fpr[DFPREG(rs2) + 1]); \
2845 gen_set_label(l1); \
2846 tcg_temp_free(r_cond); \
2848 #define FMOVQCC(fcc) \
2850 TCGv r_cond; \
2851 int l1; \
2853 l1 = gen_new_label(); \
2854 r_cond = tcg_temp_new(); \
2855 cond = GET_FIELD_SP(insn, 14, 17); \
2856 gen_fcond(r_cond, fcc, cond); \
2857 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2858 0, l1); \
2859 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2860 cpu_fpr[QFPREG(rs2)]); \
2861 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2862 cpu_fpr[QFPREG(rs2) + 1]); \
2863 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2864 cpu_fpr[QFPREG(rs2) + 2]); \
2865 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2866 cpu_fpr[QFPREG(rs2) + 3]); \
2867 gen_set_label(l1); \
2868 tcg_temp_free(r_cond); \
2870 case 0x001: /* V9 fmovscc %fcc0 */
2871 FMOVSCC(0);
2872 break;
2873 case 0x002: /* V9 fmovdcc %fcc0 */
2874 FMOVDCC(0);
2875 break;
2876 case 0x003: /* V9 fmovqcc %fcc0 */
2877 CHECK_FPU_FEATURE(dc, FLOAT128);
2878 FMOVQCC(0);
2879 break;
2880 case 0x041: /* V9 fmovscc %fcc1 */
2881 FMOVSCC(1);
2882 break;
2883 case 0x042: /* V9 fmovdcc %fcc1 */
2884 FMOVDCC(1);
2885 break;
2886 case 0x043: /* V9 fmovqcc %fcc1 */
2887 CHECK_FPU_FEATURE(dc, FLOAT128);
2888 FMOVQCC(1);
2889 break;
2890 case 0x081: /* V9 fmovscc %fcc2 */
2891 FMOVSCC(2);
2892 break;
2893 case 0x082: /* V9 fmovdcc %fcc2 */
2894 FMOVDCC(2);
2895 break;
2896 case 0x083: /* V9 fmovqcc %fcc2 */
2897 CHECK_FPU_FEATURE(dc, FLOAT128);
2898 FMOVQCC(2);
2899 break;
2900 case 0x0c1: /* V9 fmovscc %fcc3 */
2901 FMOVSCC(3);
2902 break;
2903 case 0x0c2: /* V9 fmovdcc %fcc3 */
2904 FMOVDCC(3);
2905 break;
2906 case 0x0c3: /* V9 fmovqcc %fcc3 */
2907 CHECK_FPU_FEATURE(dc, FLOAT128);
2908 FMOVQCC(3);
2909 break;
2910 #undef FMOVSCC
2911 #undef FMOVDCC
2912 #undef FMOVQCC
2913 #define FMOVSCC(icc) \
2915 TCGv r_cond; \
2916 int l1; \
2918 l1 = gen_new_label(); \
2919 r_cond = tcg_temp_new(); \
2920 cond = GET_FIELD_SP(insn, 14, 17); \
2921 gen_cond(r_cond, icc, cond); \
2922 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2923 0, l1); \
2924 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2925 gen_set_label(l1); \
2926 tcg_temp_free(r_cond); \
2928 #define FMOVDCC(icc) \
2930 TCGv r_cond; \
2931 int l1; \
2933 l1 = gen_new_label(); \
2934 r_cond = tcg_temp_new(); \
2935 cond = GET_FIELD_SP(insn, 14, 17); \
2936 gen_cond(r_cond, icc, cond); \
2937 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2938 0, l1); \
2939 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2940 cpu_fpr[DFPREG(rs2)]); \
2941 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2942 cpu_fpr[DFPREG(rs2) + 1]); \
2943 gen_set_label(l1); \
2944 tcg_temp_free(r_cond); \
2946 #define FMOVQCC(icc) \
2948 TCGv r_cond; \
2949 int l1; \
2951 l1 = gen_new_label(); \
2952 r_cond = tcg_temp_new(); \
2953 cond = GET_FIELD_SP(insn, 14, 17); \
2954 gen_cond(r_cond, icc, cond); \
2955 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2956 0, l1); \
2957 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2958 cpu_fpr[QFPREG(rs2)]); \
2959 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2960 cpu_fpr[QFPREG(rs2) + 1]); \
2961 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2962 cpu_fpr[QFPREG(rs2) + 2]); \
2963 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2964 cpu_fpr[QFPREG(rs2) + 3]); \
2965 gen_set_label(l1); \
2966 tcg_temp_free(r_cond); \
2969 case 0x101: /* V9 fmovscc %icc */
2970 FMOVSCC(0);
2971 break;
2972 case 0x102: /* V9 fmovdcc %icc */
2973 FMOVDCC(0);
2974 case 0x103: /* V9 fmovqcc %icc */
2975 CHECK_FPU_FEATURE(dc, FLOAT128);
2976 FMOVQCC(0);
2977 break;
2978 case 0x181: /* V9 fmovscc %xcc */
2979 FMOVSCC(1);
2980 break;
2981 case 0x182: /* V9 fmovdcc %xcc */
2982 FMOVDCC(1);
2983 break;
2984 case 0x183: /* V9 fmovqcc %xcc */
2985 CHECK_FPU_FEATURE(dc, FLOAT128);
2986 FMOVQCC(1);
2987 break;
2988 #undef FMOVSCC
2989 #undef FMOVDCC
2990 #undef FMOVQCC
2991 #endif
2992 case 0x51: /* fcmps, V9 %fcc */
2993 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2994 break;
2995 case 0x52: /* fcmpd, V9 %fcc */
2996 gen_op_load_fpr_DT0(DFPREG(rs1));
2997 gen_op_load_fpr_DT1(DFPREG(rs2));
2998 gen_op_fcmpd(rd & 3);
2999 break;
3000 case 0x53: /* fcmpq, V9 %fcc */
3001 CHECK_FPU_FEATURE(dc, FLOAT128);
3002 gen_op_load_fpr_QT0(QFPREG(rs1));
3003 gen_op_load_fpr_QT1(QFPREG(rs2));
3004 gen_op_fcmpq(rd & 3);
3005 break;
3006 case 0x55: /* fcmpes, V9 %fcc */
3007 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
3008 break;
3009 case 0x56: /* fcmped, V9 %fcc */
3010 gen_op_load_fpr_DT0(DFPREG(rs1));
3011 gen_op_load_fpr_DT1(DFPREG(rs2));
3012 gen_op_fcmped(rd & 3);
3013 break;
3014 case 0x57: /* fcmpeq, V9 %fcc */
3015 CHECK_FPU_FEATURE(dc, FLOAT128);
3016 gen_op_load_fpr_QT0(QFPREG(rs1));
3017 gen_op_load_fpr_QT1(QFPREG(rs2));
3018 gen_op_fcmpeq(rd & 3);
3019 break;
3020 default:
3021 goto illegal_insn;
3023 } else if (xop == 0x2) {
3024 // clr/mov shortcut
3026 rs1 = GET_FIELD(insn, 13, 17);
3027 if (rs1 == 0) {
3028 // or %g0, x, y -> mov T0, x; mov y, T0
3029 if (IS_IMM) { /* immediate */
3030 TCGv r_const;
3032 simm = GET_FIELDs(insn, 19, 31);
3033 r_const = tcg_const_tl(simm);
3034 gen_movl_TN_reg(rd, r_const);
3035 tcg_temp_free(r_const);
3036 } else { /* register */
3037 rs2 = GET_FIELD(insn, 27, 31);
3038 gen_movl_reg_TN(rs2, cpu_dst);
3039 gen_movl_TN_reg(rd, cpu_dst);
3041 } else {
3042 cpu_src1 = get_src1(insn, cpu_src1);
3043 if (IS_IMM) { /* immediate */
3044 simm = GET_FIELDs(insn, 19, 31);
3045 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3046 gen_movl_TN_reg(rd, cpu_dst);
3047 } else { /* register */
3048 // or x, %g0, y -> mov T1, x; mov y, T1
3049 rs2 = GET_FIELD(insn, 27, 31);
3050 if (rs2 != 0) {
3051 gen_movl_reg_TN(rs2, cpu_src2);
3052 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3053 gen_movl_TN_reg(rd, cpu_dst);
3054 } else
3055 gen_movl_TN_reg(rd, cpu_src1);
3058 #ifdef TARGET_SPARC64
3059 } else if (xop == 0x25) { /* sll, V9 sllx */
3060 cpu_src1 = get_src1(insn, cpu_src1);
3061 if (IS_IMM) { /* immediate */
3062 simm = GET_FIELDs(insn, 20, 31);
3063 if (insn & (1 << 12)) {
3064 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3065 } else {
3066 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3068 } else { /* register */
3069 rs2 = GET_FIELD(insn, 27, 31);
3070 gen_movl_reg_TN(rs2, cpu_src2);
3071 if (insn & (1 << 12)) {
3072 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3073 } else {
3074 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3076 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3078 gen_movl_TN_reg(rd, cpu_dst);
3079 } else if (xop == 0x26) { /* srl, V9 srlx */
3080 cpu_src1 = get_src1(insn, cpu_src1);
3081 if (IS_IMM) { /* immediate */
3082 simm = GET_FIELDs(insn, 20, 31);
3083 if (insn & (1 << 12)) {
3084 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3085 } else {
3086 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3087 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3089 } else { /* register */
3090 rs2 = GET_FIELD(insn, 27, 31);
3091 gen_movl_reg_TN(rs2, cpu_src2);
3092 if (insn & (1 << 12)) {
3093 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3094 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3095 } else {
3096 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3097 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3098 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3101 gen_movl_TN_reg(rd, cpu_dst);
3102 } else if (xop == 0x27) { /* sra, V9 srax */
3103 cpu_src1 = get_src1(insn, cpu_src1);
3104 if (IS_IMM) { /* immediate */
3105 simm = GET_FIELDs(insn, 20, 31);
3106 if (insn & (1 << 12)) {
3107 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3108 } else {
3109 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3110 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3111 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3113 } else { /* register */
3114 rs2 = GET_FIELD(insn, 27, 31);
3115 gen_movl_reg_TN(rs2, cpu_src2);
3116 if (insn & (1 << 12)) {
3117 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3118 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3119 } else {
3120 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3121 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3122 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3123 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3126 gen_movl_TN_reg(rd, cpu_dst);
3127 #endif
3128 } else if (xop < 0x36) {
3129 if (xop < 0x20) {
3130 cpu_src1 = get_src1(insn, cpu_src1);
3131 cpu_src2 = get_src2(insn, cpu_src2);
3132 switch (xop & ~0x10) {
3133 case 0x0: /* add */
3134 if (IS_IMM) {
3135 simm = GET_FIELDs(insn, 19, 31);
3136 if (xop & 0x10) {
3137 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3138 } else {
3139 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3141 } else {
3142 if (xop & 0x10) {
3143 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3144 } else {
3145 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3148 break;
3149 case 0x1: /* and */
3150 if (IS_IMM) {
3151 simm = GET_FIELDs(insn, 19, 31);
3152 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3153 } else {
3154 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3156 if (xop & 0x10) {
3157 gen_op_logic_cc(cpu_dst);
3159 break;
3160 case 0x2: /* or */
3161 if (IS_IMM) {
3162 simm = GET_FIELDs(insn, 19, 31);
3163 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3164 } else {
3165 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3167 if (xop & 0x10)
3168 gen_op_logic_cc(cpu_dst);
3169 break;
3170 case 0x3: /* xor */
3171 if (IS_IMM) {
3172 simm = GET_FIELDs(insn, 19, 31);
3173 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3174 } else {
3175 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3177 if (xop & 0x10)
3178 gen_op_logic_cc(cpu_dst);
3179 break;
3180 case 0x4: /* sub */
3181 if (IS_IMM) {
3182 simm = GET_FIELDs(insn, 19, 31);
3183 if (xop & 0x10) {
3184 gen_op_subi_cc(cpu_dst, cpu_src1, simm);
3185 } else {
3186 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3188 } else {
3189 if (xop & 0x10) {
3190 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3191 } else {
3192 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3195 break;
3196 case 0x5: /* andn */
3197 if (IS_IMM) {
3198 simm = GET_FIELDs(insn, 19, 31);
3199 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3200 } else {
3201 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3203 if (xop & 0x10)
3204 gen_op_logic_cc(cpu_dst);
3205 break;
3206 case 0x6: /* orn */
3207 if (IS_IMM) {
3208 simm = GET_FIELDs(insn, 19, 31);
3209 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3210 } else {
3211 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3213 if (xop & 0x10)
3214 gen_op_logic_cc(cpu_dst);
3215 break;
3216 case 0x7: /* xorn */
3217 if (IS_IMM) {
3218 simm = GET_FIELDs(insn, 19, 31);
3219 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3220 } else {
3221 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3222 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3224 if (xop & 0x10)
3225 gen_op_logic_cc(cpu_dst);
3226 break;
3227 case 0x8: /* addx, V9 addc */
3228 if (IS_IMM) {
3229 simm = GET_FIELDs(insn, 19, 31);
3230 if (xop & 0x10)
3231 gen_op_addxi_cc(cpu_dst, cpu_src1, simm);
3232 else {
3233 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3234 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3235 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3237 } else {
3238 if (xop & 0x10)
3239 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3240 else {
3241 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3242 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3243 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3246 break;
3247 #ifdef TARGET_SPARC64
3248 case 0x9: /* V9 mulx */
3249 if (IS_IMM) {
3250 simm = GET_FIELDs(insn, 19, 31);
3251 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3252 } else {
3253 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3255 break;
3256 #endif
3257 case 0xa: /* umul */
3258 CHECK_IU_FEATURE(dc, MUL);
3259 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3260 if (xop & 0x10)
3261 gen_op_logic_cc(cpu_dst);
3262 break;
3263 case 0xb: /* smul */
3264 CHECK_IU_FEATURE(dc, MUL);
3265 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3266 if (xop & 0x10)
3267 gen_op_logic_cc(cpu_dst);
3268 break;
3269 case 0xc: /* subx, V9 subc */
3270 if (IS_IMM) {
3271 simm = GET_FIELDs(insn, 19, 31);
3272 if (xop & 0x10) {
3273 gen_op_subxi_cc(cpu_dst, cpu_src1, simm);
3274 } else {
3275 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3276 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3277 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3279 } else {
3280 if (xop & 0x10) {
3281 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3282 } else {
3283 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3284 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3285 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3288 break;
3289 #ifdef TARGET_SPARC64
3290 case 0xd: /* V9 udivx */
3291 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3292 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3293 gen_trap_ifdivzero_tl(cpu_cc_src2);
3294 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3295 break;
3296 #endif
3297 case 0xe: /* udiv */
3298 CHECK_IU_FEATURE(dc, DIV);
3299 gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3300 if (xop & 0x10)
3301 gen_op_div_cc(cpu_dst);
3302 break;
3303 case 0xf: /* sdiv */
3304 CHECK_IU_FEATURE(dc, DIV);
3305 gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3306 if (xop & 0x10)
3307 gen_op_div_cc(cpu_dst);
3308 break;
3309 default:
3310 goto illegal_insn;
3312 gen_movl_TN_reg(rd, cpu_dst);
3313 } else {
3314 cpu_src1 = get_src1(insn, cpu_src1);
3315 cpu_src2 = get_src2(insn, cpu_src2);
3316 switch (xop) {
3317 case 0x20: /* taddcc */
3318 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3319 gen_movl_TN_reg(rd, cpu_dst);
3320 break;
3321 case 0x21: /* tsubcc */
3322 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3323 gen_movl_TN_reg(rd, cpu_dst);
3324 break;
3325 case 0x22: /* taddcctv */
3326 save_state(dc, cpu_cond);
3327 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3328 gen_movl_TN_reg(rd, cpu_dst);
3329 break;
3330 case 0x23: /* tsubcctv */
3331 save_state(dc, cpu_cond);
3332 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3333 gen_movl_TN_reg(rd, cpu_dst);
3334 break;
3335 case 0x24: /* mulscc */
3336 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3337 gen_movl_TN_reg(rd, cpu_dst);
3338 break;
3339 #ifndef TARGET_SPARC64
3340 case 0x25: /* sll */
3341 if (IS_IMM) { /* immediate */
3342 simm = GET_FIELDs(insn, 20, 31);
3343 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3344 } else { /* register */
3345 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3346 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3348 gen_movl_TN_reg(rd, cpu_dst);
3349 break;
3350 case 0x26: /* srl */
3351 if (IS_IMM) { /* immediate */
3352 simm = GET_FIELDs(insn, 20, 31);
3353 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3354 } else { /* register */
3355 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3356 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3358 gen_movl_TN_reg(rd, cpu_dst);
3359 break;
3360 case 0x27: /* sra */
3361 if (IS_IMM) { /* immediate */
3362 simm = GET_FIELDs(insn, 20, 31);
3363 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3364 } else { /* register */
3365 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3366 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3368 gen_movl_TN_reg(rd, cpu_dst);
3369 break;
3370 #endif
3371 case 0x30:
3373 switch(rd) {
3374 case 0: /* wry */
3375 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3376 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3377 break;
3378 #ifndef TARGET_SPARC64
3379 case 0x01 ... 0x0f: /* undefined in the
3380 SPARCv8 manual, nop
3381 on the microSPARC
3382 II */
3383 case 0x10 ... 0x1f: /* implementation-dependent
3384 in the SPARCv8
3385 manual, nop on the
3386 microSPARC II */
3387 break;
3388 #else
3389 case 0x2: /* V9 wrccr */
3390 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3391 gen_helper_wrccr(cpu_dst);
3392 break;
3393 case 0x3: /* V9 wrasi */
3394 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3395 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3396 break;
3397 case 0x6: /* V9 wrfprs */
3398 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3399 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3400 save_state(dc, cpu_cond);
3401 gen_op_next_insn();
3402 tcg_gen_exit_tb(0);
3403 dc->is_br = 1;
3404 break;
3405 case 0xf: /* V9 sir, nop if user */
3406 #if !defined(CONFIG_USER_ONLY)
3407 if (supervisor(dc))
3408 ; // XXX
3409 #endif
3410 break;
3411 case 0x13: /* Graphics Status */
3412 if (gen_trap_ifnofpu(dc, cpu_cond))
3413 goto jmp_insn;
3414 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3415 break;
3416 case 0x14: /* Softint set */
3417 if (!supervisor(dc))
3418 goto illegal_insn;
3419 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3420 gen_helper_set_softint(cpu_tmp64);
3421 break;
3422 case 0x15: /* Softint clear */
3423 if (!supervisor(dc))
3424 goto illegal_insn;
3425 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3426 gen_helper_clear_softint(cpu_tmp64);
3427 break;
3428 case 0x16: /* Softint write */
3429 if (!supervisor(dc))
3430 goto illegal_insn;
3431 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3432 gen_helper_write_softint(cpu_tmp64);
3433 break;
3434 case 0x17: /* Tick compare */
3435 #if !defined(CONFIG_USER_ONLY)
3436 if (!supervisor(dc))
3437 goto illegal_insn;
3438 #endif
3440 TCGv_ptr r_tickptr;
3442 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3443 cpu_src2);
3444 r_tickptr = tcg_temp_new_ptr();
3445 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3446 offsetof(CPUState, tick));
3447 gen_helper_tick_set_limit(r_tickptr,
3448 cpu_tick_cmpr);
3449 tcg_temp_free_ptr(r_tickptr);
3451 break;
3452 case 0x18: /* System tick */
3453 #if !defined(CONFIG_USER_ONLY)
3454 if (!supervisor(dc))
3455 goto illegal_insn;
3456 #endif
3458 TCGv_ptr r_tickptr;
3460 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3461 cpu_src2);
3462 r_tickptr = tcg_temp_new_ptr();
3463 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3464 offsetof(CPUState, stick));
3465 gen_helper_tick_set_count(r_tickptr,
3466 cpu_dst);
3467 tcg_temp_free_ptr(r_tickptr);
3469 break;
3470 case 0x19: /* System tick compare */
3471 #if !defined(CONFIG_USER_ONLY)
3472 if (!supervisor(dc))
3473 goto illegal_insn;
3474 #endif
3476 TCGv_ptr r_tickptr;
3478 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3479 cpu_src2);
3480 r_tickptr = tcg_temp_new_ptr();
3481 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3482 offsetof(CPUState, stick));
3483 gen_helper_tick_set_limit(r_tickptr,
3484 cpu_stick_cmpr);
3485 tcg_temp_free_ptr(r_tickptr);
3487 break;
3489 case 0x10: /* Performance Control */
3490 case 0x11: /* Performance Instrumentation
3491 Counter */
3492 case 0x12: /* Dispatch Control */
3493 #endif
3494 default:
3495 goto illegal_insn;
3498 break;
3499 #if !defined(CONFIG_USER_ONLY)
3500 case 0x31: /* wrpsr, V9 saved, restored */
3502 if (!supervisor(dc))
3503 goto priv_insn;
3504 #ifdef TARGET_SPARC64
3505 switch (rd) {
3506 case 0:
3507 gen_helper_saved();
3508 break;
3509 case 1:
3510 gen_helper_restored();
3511 break;
3512 case 2: /* UA2005 allclean */
3513 case 3: /* UA2005 otherw */
3514 case 4: /* UA2005 normalw */
3515 case 5: /* UA2005 invalw */
3516 // XXX
3517 default:
3518 goto illegal_insn;
3520 #else
3521 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3522 gen_helper_wrpsr(cpu_dst);
3523 save_state(dc, cpu_cond);
3524 gen_op_next_insn();
3525 tcg_gen_exit_tb(0);
3526 dc->is_br = 1;
3527 #endif
3529 break;
3530 case 0x32: /* wrwim, V9 wrpr */
3532 if (!supervisor(dc))
3533 goto priv_insn;
3534 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3535 #ifdef TARGET_SPARC64
3536 switch (rd) {
3537 case 0: // tpc
3539 TCGv_ptr r_tsptr;
3541 r_tsptr = tcg_temp_new_ptr();
3542 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3543 offsetof(CPUState, tsptr));
3544 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3545 offsetof(trap_state, tpc));
3546 tcg_temp_free_ptr(r_tsptr);
3548 break;
3549 case 1: // tnpc
3551 TCGv_ptr r_tsptr;
3553 r_tsptr = tcg_temp_new_ptr();
3554 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3555 offsetof(CPUState, tsptr));
3556 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3557 offsetof(trap_state, tnpc));
3558 tcg_temp_free_ptr(r_tsptr);
3560 break;
3561 case 2: // tstate
3563 TCGv_ptr r_tsptr;
3565 r_tsptr = tcg_temp_new_ptr();
3566 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3567 offsetof(CPUState, tsptr));
3568 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3569 offsetof(trap_state,
3570 tstate));
3571 tcg_temp_free_ptr(r_tsptr);
3573 break;
3574 case 3: // tt
3576 TCGv_ptr r_tsptr;
3578 r_tsptr = tcg_temp_new_ptr();
3579 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3580 offsetof(CPUState, tsptr));
3581 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3582 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3583 offsetof(trap_state, tt));
3584 tcg_temp_free_ptr(r_tsptr);
3586 break;
3587 case 4: // tick
3589 TCGv_ptr r_tickptr;
3591 r_tickptr = tcg_temp_new_ptr();
3592 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3593 offsetof(CPUState, tick));
3594 gen_helper_tick_set_count(r_tickptr,
3595 cpu_tmp0);
3596 tcg_temp_free_ptr(r_tickptr);
3598 break;
3599 case 5: // tba
3600 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3601 break;
3602 case 6: // pstate
3603 save_state(dc, cpu_cond);
3604 gen_helper_wrpstate(cpu_tmp0);
3605 gen_op_next_insn();
3606 tcg_gen_exit_tb(0);
3607 dc->is_br = 1;
3608 break;
3609 case 7: // tl
3610 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3611 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3612 offsetof(CPUSPARCState, tl));
3613 break;
3614 case 8: // pil
3615 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3616 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3617 offsetof(CPUSPARCState,
3618 psrpil));
3619 break;
3620 case 9: // cwp
3621 gen_helper_wrcwp(cpu_tmp0);
3622 break;
3623 case 10: // cansave
3624 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3625 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3626 offsetof(CPUSPARCState,
3627 cansave));
3628 break;
3629 case 11: // canrestore
3630 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3631 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3632 offsetof(CPUSPARCState,
3633 canrestore));
3634 break;
3635 case 12: // cleanwin
3636 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3637 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3638 offsetof(CPUSPARCState,
3639 cleanwin));
3640 break;
3641 case 13: // otherwin
3642 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3643 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3644 offsetof(CPUSPARCState,
3645 otherwin));
3646 break;
3647 case 14: // wstate
3648 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3649 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3650 offsetof(CPUSPARCState,
3651 wstate));
3652 break;
3653 case 16: // UA2005 gl
3654 CHECK_IU_FEATURE(dc, GL);
3655 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3656 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3657 offsetof(CPUSPARCState, gl));
3658 break;
3659 case 26: // UA2005 strand status
3660 CHECK_IU_FEATURE(dc, HYPV);
3661 if (!hypervisor(dc))
3662 goto priv_insn;
3663 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3664 break;
3665 default:
3666 goto illegal_insn;
3668 #else
3669 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3670 if (dc->def->nwindows != 32)
3671 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3672 (1 << dc->def->nwindows) - 1);
3673 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3674 #endif
3676 break;
3677 case 0x33: /* wrtbr, UA2005 wrhpr */
3679 #ifndef TARGET_SPARC64
3680 if (!supervisor(dc))
3681 goto priv_insn;
3682 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3683 #else
3684 CHECK_IU_FEATURE(dc, HYPV);
3685 if (!hypervisor(dc))
3686 goto priv_insn;
3687 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3688 switch (rd) {
3689 case 0: // hpstate
3690 // XXX gen_op_wrhpstate();
3691 save_state(dc, cpu_cond);
3692 gen_op_next_insn();
3693 tcg_gen_exit_tb(0);
3694 dc->is_br = 1;
3695 break;
3696 case 1: // htstate
3697 // XXX gen_op_wrhtstate();
3698 break;
3699 case 3: // hintp
3700 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3701 break;
3702 case 5: // htba
3703 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3704 break;
3705 case 31: // hstick_cmpr
3707 TCGv_ptr r_tickptr;
3709 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3710 r_tickptr = tcg_temp_new_ptr();
3711 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3712 offsetof(CPUState, hstick));
3713 gen_helper_tick_set_limit(r_tickptr,
3714 cpu_hstick_cmpr);
3715 tcg_temp_free_ptr(r_tickptr);
3717 break;
3718 case 6: // hver readonly
3719 default:
3720 goto illegal_insn;
3722 #endif
3724 break;
3725 #endif
3726 #ifdef TARGET_SPARC64
3727 case 0x2c: /* V9 movcc */
3729 int cc = GET_FIELD_SP(insn, 11, 12);
3730 int cond = GET_FIELD_SP(insn, 14, 17);
3731 TCGv r_cond;
3732 int l1;
3734 r_cond = tcg_temp_new();
3735 if (insn & (1 << 18)) {
3736 if (cc == 0)
3737 gen_cond(r_cond, 0, cond);
3738 else if (cc == 2)
3739 gen_cond(r_cond, 1, cond);
3740 else
3741 goto illegal_insn;
3742 } else {
3743 gen_fcond(r_cond, cc, cond);
3746 l1 = gen_new_label();
3748 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3749 if (IS_IMM) { /* immediate */
3750 TCGv r_const;
3752 simm = GET_FIELD_SPs(insn, 0, 10);
3753 r_const = tcg_const_tl(simm);
3754 gen_movl_TN_reg(rd, r_const);
3755 tcg_temp_free(r_const);
3756 } else {
3757 rs2 = GET_FIELD_SP(insn, 0, 4);
3758 gen_movl_reg_TN(rs2, cpu_tmp0);
3759 gen_movl_TN_reg(rd, cpu_tmp0);
3761 gen_set_label(l1);
3762 tcg_temp_free(r_cond);
3763 break;
3765 case 0x2d: /* V9 sdivx */
3766 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3767 gen_movl_TN_reg(rd, cpu_dst);
3768 break;
3769 case 0x2e: /* V9 popc */
3771 cpu_src2 = get_src2(insn, cpu_src2);
3772 gen_helper_popc(cpu_dst, cpu_src2);
3773 gen_movl_TN_reg(rd, cpu_dst);
3775 case 0x2f: /* V9 movr */
3777 int cond = GET_FIELD_SP(insn, 10, 12);
3778 int l1;
3780 cpu_src1 = get_src1(insn, cpu_src1);
3782 l1 = gen_new_label();
3784 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3785 cpu_src1, 0, l1);
3786 if (IS_IMM) { /* immediate */
3787 TCGv r_const;
3789 simm = GET_FIELD_SPs(insn, 0, 9);
3790 r_const = tcg_const_tl(simm);
3791 gen_movl_TN_reg(rd, r_const);
3792 tcg_temp_free(r_const);
3793 } else {
3794 rs2 = GET_FIELD_SP(insn, 0, 4);
3795 gen_movl_reg_TN(rs2, cpu_tmp0);
3796 gen_movl_TN_reg(rd, cpu_tmp0);
3798 gen_set_label(l1);
3799 break;
3801 #endif
3802 default:
3803 goto illegal_insn;
3806 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3807 #ifdef TARGET_SPARC64
3808 int opf = GET_FIELD_SP(insn, 5, 13);
3809 rs1 = GET_FIELD(insn, 13, 17);
3810 rs2 = GET_FIELD(insn, 27, 31);
3811 if (gen_trap_ifnofpu(dc, cpu_cond))
3812 goto jmp_insn;
3814 switch (opf) {
3815 case 0x000: /* VIS I edge8cc */
3816 case 0x001: /* VIS II edge8n */
3817 case 0x002: /* VIS I edge8lcc */
3818 case 0x003: /* VIS II edge8ln */
3819 case 0x004: /* VIS I edge16cc */
3820 case 0x005: /* VIS II edge16n */
3821 case 0x006: /* VIS I edge16lcc */
3822 case 0x007: /* VIS II edge16ln */
3823 case 0x008: /* VIS I edge32cc */
3824 case 0x009: /* VIS II edge32n */
3825 case 0x00a: /* VIS I edge32lcc */
3826 case 0x00b: /* VIS II edge32ln */
3827 // XXX
3828 goto illegal_insn;
3829 case 0x010: /* VIS I array8 */
3830 CHECK_FPU_FEATURE(dc, VIS1);
3831 cpu_src1 = get_src1(insn, cpu_src1);
3832 gen_movl_reg_TN(rs2, cpu_src2);
3833 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3834 gen_movl_TN_reg(rd, cpu_dst);
3835 break;
3836 case 0x012: /* VIS I array16 */
3837 CHECK_FPU_FEATURE(dc, VIS1);
3838 cpu_src1 = get_src1(insn, cpu_src1);
3839 gen_movl_reg_TN(rs2, cpu_src2);
3840 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3841 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3842 gen_movl_TN_reg(rd, cpu_dst);
3843 break;
3844 case 0x014: /* VIS I array32 */
3845 CHECK_FPU_FEATURE(dc, VIS1);
3846 cpu_src1 = get_src1(insn, cpu_src1);
3847 gen_movl_reg_TN(rs2, cpu_src2);
3848 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3849 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3850 gen_movl_TN_reg(rd, cpu_dst);
3851 break;
3852 case 0x018: /* VIS I alignaddr */
3853 CHECK_FPU_FEATURE(dc, VIS1);
3854 cpu_src1 = get_src1(insn, cpu_src1);
3855 gen_movl_reg_TN(rs2, cpu_src2);
3856 gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3857 gen_movl_TN_reg(rd, cpu_dst);
3858 break;
3859 case 0x019: /* VIS II bmask */
3860 case 0x01a: /* VIS I alignaddrl */
3861 // XXX
3862 goto illegal_insn;
3863 case 0x020: /* VIS I fcmple16 */
3864 CHECK_FPU_FEATURE(dc, VIS1);
3865 gen_op_load_fpr_DT0(DFPREG(rs1));
3866 gen_op_load_fpr_DT1(DFPREG(rs2));
3867 gen_helper_fcmple16();
3868 gen_op_store_DT0_fpr(DFPREG(rd));
3869 break;
3870 case 0x022: /* VIS I fcmpne16 */
3871 CHECK_FPU_FEATURE(dc, VIS1);
3872 gen_op_load_fpr_DT0(DFPREG(rs1));
3873 gen_op_load_fpr_DT1(DFPREG(rs2));
3874 gen_helper_fcmpne16();
3875 gen_op_store_DT0_fpr(DFPREG(rd));
3876 break;
3877 case 0x024: /* VIS I fcmple32 */
3878 CHECK_FPU_FEATURE(dc, VIS1);
3879 gen_op_load_fpr_DT0(DFPREG(rs1));
3880 gen_op_load_fpr_DT1(DFPREG(rs2));
3881 gen_helper_fcmple32();
3882 gen_op_store_DT0_fpr(DFPREG(rd));
3883 break;
3884 case 0x026: /* VIS I fcmpne32 */
3885 CHECK_FPU_FEATURE(dc, VIS1);
3886 gen_op_load_fpr_DT0(DFPREG(rs1));
3887 gen_op_load_fpr_DT1(DFPREG(rs2));
3888 gen_helper_fcmpne32();
3889 gen_op_store_DT0_fpr(DFPREG(rd));
3890 break;
3891 case 0x028: /* VIS I fcmpgt16 */
3892 CHECK_FPU_FEATURE(dc, VIS1);
3893 gen_op_load_fpr_DT0(DFPREG(rs1));
3894 gen_op_load_fpr_DT1(DFPREG(rs2));
3895 gen_helper_fcmpgt16();
3896 gen_op_store_DT0_fpr(DFPREG(rd));
3897 break;
3898 case 0x02a: /* VIS I fcmpeq16 */
3899 CHECK_FPU_FEATURE(dc, VIS1);
3900 gen_op_load_fpr_DT0(DFPREG(rs1));
3901 gen_op_load_fpr_DT1(DFPREG(rs2));
3902 gen_helper_fcmpeq16();
3903 gen_op_store_DT0_fpr(DFPREG(rd));
3904 break;
3905 case 0x02c: /* VIS I fcmpgt32 */
3906 CHECK_FPU_FEATURE(dc, VIS1);
3907 gen_op_load_fpr_DT0(DFPREG(rs1));
3908 gen_op_load_fpr_DT1(DFPREG(rs2));
3909 gen_helper_fcmpgt32();
3910 gen_op_store_DT0_fpr(DFPREG(rd));
3911 break;
3912 case 0x02e: /* VIS I fcmpeq32 */
3913 CHECK_FPU_FEATURE(dc, VIS1);
3914 gen_op_load_fpr_DT0(DFPREG(rs1));
3915 gen_op_load_fpr_DT1(DFPREG(rs2));
3916 gen_helper_fcmpeq32();
3917 gen_op_store_DT0_fpr(DFPREG(rd));
3918 break;
3919 case 0x031: /* VIS I fmul8x16 */
3920 CHECK_FPU_FEATURE(dc, VIS1);
3921 gen_op_load_fpr_DT0(DFPREG(rs1));
3922 gen_op_load_fpr_DT1(DFPREG(rs2));
3923 gen_helper_fmul8x16();
3924 gen_op_store_DT0_fpr(DFPREG(rd));
3925 break;
3926 case 0x033: /* VIS I fmul8x16au */
3927 CHECK_FPU_FEATURE(dc, VIS1);
3928 gen_op_load_fpr_DT0(DFPREG(rs1));
3929 gen_op_load_fpr_DT1(DFPREG(rs2));
3930 gen_helper_fmul8x16au();
3931 gen_op_store_DT0_fpr(DFPREG(rd));
3932 break;
3933 case 0x035: /* VIS I fmul8x16al */
3934 CHECK_FPU_FEATURE(dc, VIS1);
3935 gen_op_load_fpr_DT0(DFPREG(rs1));
3936 gen_op_load_fpr_DT1(DFPREG(rs2));
3937 gen_helper_fmul8x16al();
3938 gen_op_store_DT0_fpr(DFPREG(rd));
3939 break;
3940 case 0x036: /* VIS I fmul8sux16 */
3941 CHECK_FPU_FEATURE(dc, VIS1);
3942 gen_op_load_fpr_DT0(DFPREG(rs1));
3943 gen_op_load_fpr_DT1(DFPREG(rs2));
3944 gen_helper_fmul8sux16();
3945 gen_op_store_DT0_fpr(DFPREG(rd));
3946 break;
3947 case 0x037: /* VIS I fmul8ulx16 */
3948 CHECK_FPU_FEATURE(dc, VIS1);
3949 gen_op_load_fpr_DT0(DFPREG(rs1));
3950 gen_op_load_fpr_DT1(DFPREG(rs2));
3951 gen_helper_fmul8ulx16();
3952 gen_op_store_DT0_fpr(DFPREG(rd));
3953 break;
3954 case 0x038: /* VIS I fmuld8sux16 */
3955 CHECK_FPU_FEATURE(dc, VIS1);
3956 gen_op_load_fpr_DT0(DFPREG(rs1));
3957 gen_op_load_fpr_DT1(DFPREG(rs2));
3958 gen_helper_fmuld8sux16();
3959 gen_op_store_DT0_fpr(DFPREG(rd));
3960 break;
3961 case 0x039: /* VIS I fmuld8ulx16 */
3962 CHECK_FPU_FEATURE(dc, VIS1);
3963 gen_op_load_fpr_DT0(DFPREG(rs1));
3964 gen_op_load_fpr_DT1(DFPREG(rs2));
3965 gen_helper_fmuld8ulx16();
3966 gen_op_store_DT0_fpr(DFPREG(rd));
3967 break;
3968 case 0x03a: /* VIS I fpack32 */
3969 case 0x03b: /* VIS I fpack16 */
3970 case 0x03d: /* VIS I fpackfix */
3971 case 0x03e: /* VIS I pdist */
3972 // XXX
3973 goto illegal_insn;
3974 case 0x048: /* VIS I faligndata */
3975 CHECK_FPU_FEATURE(dc, VIS1);
3976 gen_op_load_fpr_DT0(DFPREG(rs1));
3977 gen_op_load_fpr_DT1(DFPREG(rs2));
3978 gen_helper_faligndata();
3979 gen_op_store_DT0_fpr(DFPREG(rd));
3980 break;
3981 case 0x04b: /* VIS I fpmerge */
3982 CHECK_FPU_FEATURE(dc, VIS1);
3983 gen_op_load_fpr_DT0(DFPREG(rs1));
3984 gen_op_load_fpr_DT1(DFPREG(rs2));
3985 gen_helper_fpmerge();
3986 gen_op_store_DT0_fpr(DFPREG(rd));
3987 break;
3988 case 0x04c: /* VIS II bshuffle */
3989 // XXX
3990 goto illegal_insn;
3991 case 0x04d: /* VIS I fexpand */
3992 CHECK_FPU_FEATURE(dc, VIS1);
3993 gen_op_load_fpr_DT0(DFPREG(rs1));
3994 gen_op_load_fpr_DT1(DFPREG(rs2));
3995 gen_helper_fexpand();
3996 gen_op_store_DT0_fpr(DFPREG(rd));
3997 break;
3998 case 0x050: /* VIS I fpadd16 */
3999 CHECK_FPU_FEATURE(dc, VIS1);
4000 gen_op_load_fpr_DT0(DFPREG(rs1));
4001 gen_op_load_fpr_DT1(DFPREG(rs2));
4002 gen_helper_fpadd16();
4003 gen_op_store_DT0_fpr(DFPREG(rd));
4004 break;
4005 case 0x051: /* VIS I fpadd16s */
4006 CHECK_FPU_FEATURE(dc, VIS1);
4007 gen_helper_fpadd16s(cpu_fpr[rd],
4008 cpu_fpr[rs1], cpu_fpr[rs2]);
4009 break;
4010 case 0x052: /* VIS I fpadd32 */
4011 CHECK_FPU_FEATURE(dc, VIS1);
4012 gen_op_load_fpr_DT0(DFPREG(rs1));
4013 gen_op_load_fpr_DT1(DFPREG(rs2));
4014 gen_helper_fpadd32();
4015 gen_op_store_DT0_fpr(DFPREG(rd));
4016 break;
4017 case 0x053: /* VIS I fpadd32s */
4018 CHECK_FPU_FEATURE(dc, VIS1);
4019 gen_helper_fpadd32s(cpu_fpr[rd],
4020 cpu_fpr[rs1], cpu_fpr[rs2]);
4021 break;
4022 case 0x054: /* VIS I fpsub16 */
4023 CHECK_FPU_FEATURE(dc, VIS1);
4024 gen_op_load_fpr_DT0(DFPREG(rs1));
4025 gen_op_load_fpr_DT1(DFPREG(rs2));
4026 gen_helper_fpsub16();
4027 gen_op_store_DT0_fpr(DFPREG(rd));
4028 break;
4029 case 0x055: /* VIS I fpsub16s */
4030 CHECK_FPU_FEATURE(dc, VIS1);
4031 gen_helper_fpsub16s(cpu_fpr[rd],
4032 cpu_fpr[rs1], cpu_fpr[rs2]);
4033 break;
4034 case 0x056: /* VIS I fpsub32 */
4035 CHECK_FPU_FEATURE(dc, VIS1);
4036 gen_op_load_fpr_DT0(DFPREG(rs1));
4037 gen_op_load_fpr_DT1(DFPREG(rs2));
4038 gen_helper_fpsub32();
4039 gen_op_store_DT0_fpr(DFPREG(rd));
4040 break;
4041 case 0x057: /* VIS I fpsub32s */
4042 CHECK_FPU_FEATURE(dc, VIS1);
4043 gen_helper_fpsub32s(cpu_fpr[rd],
4044 cpu_fpr[rs1], cpu_fpr[rs2]);
4045 break;
4046 case 0x060: /* VIS I fzero */
4047 CHECK_FPU_FEATURE(dc, VIS1);
4048 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
4049 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
4050 break;
4051 case 0x061: /* VIS I fzeros */
4052 CHECK_FPU_FEATURE(dc, VIS1);
4053 tcg_gen_movi_i32(cpu_fpr[rd], 0);
4054 break;
4055 case 0x062: /* VIS I fnor */
4056 CHECK_FPU_FEATURE(dc, VIS1);
4057 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4058 cpu_fpr[DFPREG(rs2)]);
4059 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4060 cpu_fpr[DFPREG(rs2) + 1]);
4061 break;
4062 case 0x063: /* VIS I fnors */
4063 CHECK_FPU_FEATURE(dc, VIS1);
4064 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4065 break;
4066 case 0x064: /* VIS I fandnot2 */
4067 CHECK_FPU_FEATURE(dc, VIS1);
4068 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4069 cpu_fpr[DFPREG(rs2)]);
4070 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4071 cpu_fpr[DFPREG(rs1) + 1],
4072 cpu_fpr[DFPREG(rs2) + 1]);
4073 break;
4074 case 0x065: /* VIS I fandnot2s */
4075 CHECK_FPU_FEATURE(dc, VIS1);
4076 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4077 break;
4078 case 0x066: /* VIS I fnot2 */
4079 CHECK_FPU_FEATURE(dc, VIS1);
4080 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
4081 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4082 cpu_fpr[DFPREG(rs2) + 1]);
4083 break;
4084 case 0x067: /* VIS I fnot2s */
4085 CHECK_FPU_FEATURE(dc, VIS1);
4086 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4087 break;
4088 case 0x068: /* VIS I fandnot1 */
4089 CHECK_FPU_FEATURE(dc, VIS1);
4090 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4091 cpu_fpr[DFPREG(rs1)]);
4092 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4093 cpu_fpr[DFPREG(rs2) + 1],
4094 cpu_fpr[DFPREG(rs1) + 1]);
4095 break;
4096 case 0x069: /* VIS I fandnot1s */
4097 CHECK_FPU_FEATURE(dc, VIS1);
4098 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4099 break;
4100 case 0x06a: /* VIS I fnot1 */
4101 CHECK_FPU_FEATURE(dc, VIS1);
4102 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4103 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4104 cpu_fpr[DFPREG(rs1) + 1]);
4105 break;
4106 case 0x06b: /* VIS I fnot1s */
4107 CHECK_FPU_FEATURE(dc, VIS1);
4108 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4109 break;
4110 case 0x06c: /* VIS I fxor */
4111 CHECK_FPU_FEATURE(dc, VIS1);
4112 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4113 cpu_fpr[DFPREG(rs2)]);
4114 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
4115 cpu_fpr[DFPREG(rs1) + 1],
4116 cpu_fpr[DFPREG(rs2) + 1]);
4117 break;
4118 case 0x06d: /* VIS I fxors */
4119 CHECK_FPU_FEATURE(dc, VIS1);
4120 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4121 break;
4122 case 0x06e: /* VIS I fnand */
4123 CHECK_FPU_FEATURE(dc, VIS1);
4124 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4125 cpu_fpr[DFPREG(rs2)]);
4126 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4127 cpu_fpr[DFPREG(rs2) + 1]);
4128 break;
4129 case 0x06f: /* VIS I fnands */
4130 CHECK_FPU_FEATURE(dc, VIS1);
4131 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4132 break;
4133 case 0x070: /* VIS I fand */
4134 CHECK_FPU_FEATURE(dc, VIS1);
4135 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4136 cpu_fpr[DFPREG(rs2)]);
4137 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4138 cpu_fpr[DFPREG(rs1) + 1],
4139 cpu_fpr[DFPREG(rs2) + 1]);
4140 break;
4141 case 0x071: /* VIS I fands */
4142 CHECK_FPU_FEATURE(dc, VIS1);
4143 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4144 break;
4145 case 0x072: /* VIS I fxnor */
4146 CHECK_FPU_FEATURE(dc, VIS1);
4147 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4148 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4149 cpu_fpr[DFPREG(rs1)]);
4150 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4151 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4152 cpu_fpr[DFPREG(rs1) + 1]);
4153 break;
4154 case 0x073: /* VIS I fxnors */
4155 CHECK_FPU_FEATURE(dc, VIS1);
4156 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4157 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4158 break;
4159 case 0x074: /* VIS I fsrc1 */
4160 CHECK_FPU_FEATURE(dc, VIS1);
4161 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4162 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4163 cpu_fpr[DFPREG(rs1) + 1]);
4164 break;
4165 case 0x075: /* VIS I fsrc1s */
4166 CHECK_FPU_FEATURE(dc, VIS1);
4167 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4168 break;
4169 case 0x076: /* VIS I fornot2 */
4170 CHECK_FPU_FEATURE(dc, VIS1);
4171 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4172 cpu_fpr[DFPREG(rs2)]);
4173 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4174 cpu_fpr[DFPREG(rs1) + 1],
4175 cpu_fpr[DFPREG(rs2) + 1]);
4176 break;
4177 case 0x077: /* VIS I fornot2s */
4178 CHECK_FPU_FEATURE(dc, VIS1);
4179 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4180 break;
4181 case 0x078: /* VIS I fsrc2 */
4182 CHECK_FPU_FEATURE(dc, VIS1);
4183 gen_op_load_fpr_DT0(DFPREG(rs2));
4184 gen_op_store_DT0_fpr(DFPREG(rd));
4185 break;
4186 case 0x079: /* VIS I fsrc2s */
4187 CHECK_FPU_FEATURE(dc, VIS1);
4188 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4189 break;
4190 case 0x07a: /* VIS I fornot1 */
4191 CHECK_FPU_FEATURE(dc, VIS1);
4192 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4193 cpu_fpr[DFPREG(rs1)]);
4194 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4195 cpu_fpr[DFPREG(rs2) + 1],
4196 cpu_fpr[DFPREG(rs1) + 1]);
4197 break;
4198 case 0x07b: /* VIS I fornot1s */
4199 CHECK_FPU_FEATURE(dc, VIS1);
4200 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4201 break;
4202 case 0x07c: /* VIS I for */
4203 CHECK_FPU_FEATURE(dc, VIS1);
4204 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4205 cpu_fpr[DFPREG(rs2)]);
4206 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4207 cpu_fpr[DFPREG(rs1) + 1],
4208 cpu_fpr[DFPREG(rs2) + 1]);
4209 break;
4210 case 0x07d: /* VIS I fors */
4211 CHECK_FPU_FEATURE(dc, VIS1);
4212 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4213 break;
4214 case 0x07e: /* VIS I fone */
4215 CHECK_FPU_FEATURE(dc, VIS1);
4216 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4217 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4218 break;
4219 case 0x07f: /* VIS I fones */
4220 CHECK_FPU_FEATURE(dc, VIS1);
4221 tcg_gen_movi_i32(cpu_fpr[rd], -1);
4222 break;
4223 case 0x080: /* VIS I shutdown */
4224 case 0x081: /* VIS II siam */
4225 // XXX
4226 goto illegal_insn;
4227 default:
4228 goto illegal_insn;
4230 #else
4231 goto ncp_insn;
4232 #endif
4233 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4234 #ifdef TARGET_SPARC64
4235 goto illegal_insn;
4236 #else
4237 goto ncp_insn;
4238 #endif
4239 #ifdef TARGET_SPARC64
4240 } else if (xop == 0x39) { /* V9 return */
4241 TCGv_i32 r_const;
4243 save_state(dc, cpu_cond);
4244 cpu_src1 = get_src1(insn, cpu_src1);
4245 if (IS_IMM) { /* immediate */
4246 simm = GET_FIELDs(insn, 19, 31);
4247 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4248 } else { /* register */
4249 rs2 = GET_FIELD(insn, 27, 31);
4250 if (rs2) {
4251 gen_movl_reg_TN(rs2, cpu_src2);
4252 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4253 } else
4254 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4256 gen_helper_restore();
4257 gen_mov_pc_npc(dc, cpu_cond);
4258 r_const = tcg_const_i32(3);
4259 gen_helper_check_align(cpu_dst, r_const);
4260 tcg_temp_free_i32(r_const);
4261 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4262 dc->npc = DYNAMIC_PC;
4263 goto jmp_insn;
4264 #endif
4265 } else {
4266 cpu_src1 = get_src1(insn, cpu_src1);
4267 if (IS_IMM) { /* immediate */
4268 simm = GET_FIELDs(insn, 19, 31);
4269 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4270 } else { /* register */
4271 rs2 = GET_FIELD(insn, 27, 31);
4272 if (rs2) {
4273 gen_movl_reg_TN(rs2, cpu_src2);
4274 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4275 } else
4276 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4278 switch (xop) {
4279 case 0x38: /* jmpl */
4281 TCGv r_pc;
4282 TCGv_i32 r_const;
4284 r_pc = tcg_const_tl(dc->pc);
4285 gen_movl_TN_reg(rd, r_pc);
4286 tcg_temp_free(r_pc);
4287 gen_mov_pc_npc(dc, cpu_cond);
4288 r_const = tcg_const_i32(3);
4289 gen_helper_check_align(cpu_dst, r_const);
4290 tcg_temp_free_i32(r_const);
4291 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4292 dc->npc = DYNAMIC_PC;
4294 goto jmp_insn;
4295 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4296 case 0x39: /* rett, V9 return */
4298 TCGv_i32 r_const;
4300 if (!supervisor(dc))
4301 goto priv_insn;
4302 gen_mov_pc_npc(dc, cpu_cond);
4303 r_const = tcg_const_i32(3);
4304 gen_helper_check_align(cpu_dst, r_const);
4305 tcg_temp_free_i32(r_const);
4306 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4307 dc->npc = DYNAMIC_PC;
4308 gen_helper_rett();
4310 goto jmp_insn;
4311 #endif
4312 case 0x3b: /* flush */
4313 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4314 goto unimp_flush;
4315 gen_helper_flush(cpu_dst);
4316 break;
4317 case 0x3c: /* save */
4318 save_state(dc, cpu_cond);
4319 gen_helper_save();
4320 gen_movl_TN_reg(rd, cpu_dst);
4321 break;
4322 case 0x3d: /* restore */
4323 save_state(dc, cpu_cond);
4324 gen_helper_restore();
4325 gen_movl_TN_reg(rd, cpu_dst);
4326 break;
4327 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4328 case 0x3e: /* V9 done/retry */
4330 switch (rd) {
4331 case 0:
4332 if (!supervisor(dc))
4333 goto priv_insn;
4334 dc->npc = DYNAMIC_PC;
4335 dc->pc = DYNAMIC_PC;
4336 gen_helper_done();
4337 goto jmp_insn;
4338 case 1:
4339 if (!supervisor(dc))
4340 goto priv_insn;
4341 dc->npc = DYNAMIC_PC;
4342 dc->pc = DYNAMIC_PC;
4343 gen_helper_retry();
4344 goto jmp_insn;
4345 default:
4346 goto illegal_insn;
4349 break;
4350 #endif
4351 default:
4352 goto illegal_insn;
4355 break;
4357 break;
4358 case 3: /* load/store instructions */
4360 unsigned int xop = GET_FIELD(insn, 7, 12);
4362 cpu_src1 = get_src1(insn, cpu_src1);
4363 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4364 rs2 = GET_FIELD(insn, 27, 31);
4365 gen_movl_reg_TN(rs2, cpu_src2);
4366 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4367 } else if (IS_IMM) { /* immediate */
4368 simm = GET_FIELDs(insn, 19, 31);
4369 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4370 } else { /* register */
4371 rs2 = GET_FIELD(insn, 27, 31);
4372 if (rs2 != 0) {
4373 gen_movl_reg_TN(rs2, cpu_src2);
4374 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4375 } else
4376 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4378 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4379 (xop > 0x17 && xop <= 0x1d ) ||
4380 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4381 switch (xop) {
4382 case 0x0: /* ld, V9 lduw, load unsigned word */
4383 gen_address_mask(dc, cpu_addr);
4384 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4385 break;
4386 case 0x1: /* ldub, load unsigned byte */
4387 gen_address_mask(dc, cpu_addr);
4388 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4389 break;
4390 case 0x2: /* lduh, load unsigned halfword */
4391 gen_address_mask(dc, cpu_addr);
4392 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4393 break;
4394 case 0x3: /* ldd, load double word */
4395 if (rd & 1)
4396 goto illegal_insn;
4397 else {
4398 TCGv_i32 r_const;
4400 save_state(dc, cpu_cond);
4401 r_const = tcg_const_i32(7);
4402 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4403 tcg_temp_free_i32(r_const);
4404 gen_address_mask(dc, cpu_addr);
4405 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4406 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4407 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4408 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4409 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4410 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4411 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4413 break;
4414 case 0x9: /* ldsb, load signed byte */
4415 gen_address_mask(dc, cpu_addr);
4416 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4417 break;
4418 case 0xa: /* ldsh, load signed halfword */
4419 gen_address_mask(dc, cpu_addr);
4420 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4421 break;
4422 case 0xd: /* ldstub -- XXX: should be atomically */
4424 TCGv r_const;
4426 gen_address_mask(dc, cpu_addr);
4427 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4428 r_const = tcg_const_tl(0xff);
4429 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4430 tcg_temp_free(r_const);
4432 break;
4433 case 0x0f: /* swap, swap register with memory. Also
4434 atomically */
4435 CHECK_IU_FEATURE(dc, SWAP);
4436 gen_movl_reg_TN(rd, cpu_val);
4437 gen_address_mask(dc, cpu_addr);
4438 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4439 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4440 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4441 break;
4442 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4443 case 0x10: /* lda, V9 lduwa, load word alternate */
4444 #ifndef TARGET_SPARC64
4445 if (IS_IMM)
4446 goto illegal_insn;
4447 if (!supervisor(dc))
4448 goto priv_insn;
4449 #endif
4450 save_state(dc, cpu_cond);
4451 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4452 break;
4453 case 0x11: /* lduba, load unsigned byte alternate */
4454 #ifndef TARGET_SPARC64
4455 if (IS_IMM)
4456 goto illegal_insn;
4457 if (!supervisor(dc))
4458 goto priv_insn;
4459 #endif
4460 save_state(dc, cpu_cond);
4461 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4462 break;
4463 case 0x12: /* lduha, load unsigned halfword alternate */
4464 #ifndef TARGET_SPARC64
4465 if (IS_IMM)
4466 goto illegal_insn;
4467 if (!supervisor(dc))
4468 goto priv_insn;
4469 #endif
4470 save_state(dc, cpu_cond);
4471 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4472 break;
4473 case 0x13: /* ldda, load double word alternate */
4474 #ifndef TARGET_SPARC64
4475 if (IS_IMM)
4476 goto illegal_insn;
4477 if (!supervisor(dc))
4478 goto priv_insn;
4479 #endif
4480 if (rd & 1)
4481 goto illegal_insn;
4482 save_state(dc, cpu_cond);
4483 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4484 goto skip_move;
4485 case 0x19: /* ldsba, load signed byte alternate */
4486 #ifndef TARGET_SPARC64
4487 if (IS_IMM)
4488 goto illegal_insn;
4489 if (!supervisor(dc))
4490 goto priv_insn;
4491 #endif
4492 save_state(dc, cpu_cond);
4493 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4494 break;
4495 case 0x1a: /* ldsha, load signed halfword alternate */
4496 #ifndef TARGET_SPARC64
4497 if (IS_IMM)
4498 goto illegal_insn;
4499 if (!supervisor(dc))
4500 goto priv_insn;
4501 #endif
4502 save_state(dc, cpu_cond);
4503 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4504 break;
4505 case 0x1d: /* ldstuba -- XXX: should be atomically */
4506 #ifndef TARGET_SPARC64
4507 if (IS_IMM)
4508 goto illegal_insn;
4509 if (!supervisor(dc))
4510 goto priv_insn;
4511 #endif
4512 save_state(dc, cpu_cond);
4513 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4514 break;
4515 case 0x1f: /* swapa, swap reg with alt. memory. Also
4516 atomically */
4517 CHECK_IU_FEATURE(dc, SWAP);
4518 #ifndef TARGET_SPARC64
4519 if (IS_IMM)
4520 goto illegal_insn;
4521 if (!supervisor(dc))
4522 goto priv_insn;
4523 #endif
4524 save_state(dc, cpu_cond);
4525 gen_movl_reg_TN(rd, cpu_val);
4526 gen_swap_asi(cpu_val, cpu_addr, insn);
4527 break;
4529 #ifndef TARGET_SPARC64
4530 case 0x30: /* ldc */
4531 case 0x31: /* ldcsr */
4532 case 0x33: /* lddc */
4533 goto ncp_insn;
4534 #endif
4535 #endif
4536 #ifdef TARGET_SPARC64
4537 case 0x08: /* V9 ldsw */
4538 gen_address_mask(dc, cpu_addr);
4539 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4540 break;
4541 case 0x0b: /* V9 ldx */
4542 gen_address_mask(dc, cpu_addr);
4543 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4544 break;
4545 case 0x18: /* V9 ldswa */
4546 save_state(dc, cpu_cond);
4547 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4548 break;
4549 case 0x1b: /* V9 ldxa */
4550 save_state(dc, cpu_cond);
4551 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4552 break;
4553 case 0x2d: /* V9 prefetch, no effect */
4554 goto skip_move;
4555 case 0x30: /* V9 ldfa */
4556 save_state(dc, cpu_cond);
4557 gen_ldf_asi(cpu_addr, insn, 4, rd);
4558 goto skip_move;
4559 case 0x33: /* V9 lddfa */
4560 save_state(dc, cpu_cond);
4561 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4562 goto skip_move;
4563 case 0x3d: /* V9 prefetcha, no effect */
4564 goto skip_move;
4565 case 0x32: /* V9 ldqfa */
4566 CHECK_FPU_FEATURE(dc, FLOAT128);
4567 save_state(dc, cpu_cond);
4568 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4569 goto skip_move;
4570 #endif
4571 default:
4572 goto illegal_insn;
4574 gen_movl_TN_reg(rd, cpu_val);
4575 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4576 skip_move: ;
4577 #endif
4578 } else if (xop >= 0x20 && xop < 0x24) {
4579 if (gen_trap_ifnofpu(dc, cpu_cond))
4580 goto jmp_insn;
4581 save_state(dc, cpu_cond);
4582 switch (xop) {
4583 case 0x20: /* ldf, load fpreg */
4584 gen_address_mask(dc, cpu_addr);
4585 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4586 tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4587 break;
4588 case 0x21: /* ldfsr, V9 ldxfsr */
4589 #ifdef TARGET_SPARC64
4590 gen_address_mask(dc, cpu_addr);
4591 if (rd == 1) {
4592 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4593 gen_helper_ldxfsr(cpu_tmp64);
4594 } else
4595 #else
4597 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4598 gen_helper_ldfsr(cpu_tmp32);
4600 #endif
4601 break;
4602 case 0x22: /* ldqf, load quad fpreg */
4604 TCGv_i32 r_const;
4606 CHECK_FPU_FEATURE(dc, FLOAT128);
4607 r_const = tcg_const_i32(dc->mem_idx);
4608 gen_helper_ldqf(cpu_addr, r_const);
4609 tcg_temp_free_i32(r_const);
4610 gen_op_store_QT0_fpr(QFPREG(rd));
4612 break;
4613 case 0x23: /* lddf, load double fpreg */
4615 TCGv_i32 r_const;
4617 r_const = tcg_const_i32(dc->mem_idx);
4618 gen_helper_lddf(cpu_addr, r_const);
4619 tcg_temp_free_i32(r_const);
4620 gen_op_store_DT0_fpr(DFPREG(rd));
4622 break;
4623 default:
4624 goto illegal_insn;
4626 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4627 xop == 0xe || xop == 0x1e) {
4628 gen_movl_reg_TN(rd, cpu_val);
4629 switch (xop) {
4630 case 0x4: /* st, store word */
4631 gen_address_mask(dc, cpu_addr);
4632 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4633 break;
4634 case 0x5: /* stb, store byte */
4635 gen_address_mask(dc, cpu_addr);
4636 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4637 break;
4638 case 0x6: /* sth, store halfword */
4639 gen_address_mask(dc, cpu_addr);
4640 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4641 break;
4642 case 0x7: /* std, store double word */
4643 if (rd & 1)
4644 goto illegal_insn;
4645 else {
4646 TCGv_i32 r_const;
4648 save_state(dc, cpu_cond);
4649 gen_address_mask(dc, cpu_addr);
4650 r_const = tcg_const_i32(7);
4651 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4652 tcg_temp_free_i32(r_const);
4653 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4654 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4655 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4657 break;
4658 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4659 case 0x14: /* sta, V9 stwa, store word alternate */
4660 #ifndef TARGET_SPARC64
4661 if (IS_IMM)
4662 goto illegal_insn;
4663 if (!supervisor(dc))
4664 goto priv_insn;
4665 #endif
4666 save_state(dc, cpu_cond);
4667 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4668 break;
4669 case 0x15: /* stba, store byte alternate */
4670 #ifndef TARGET_SPARC64
4671 if (IS_IMM)
4672 goto illegal_insn;
4673 if (!supervisor(dc))
4674 goto priv_insn;
4675 #endif
4676 save_state(dc, cpu_cond);
4677 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4678 break;
4679 case 0x16: /* stha, store halfword alternate */
4680 #ifndef TARGET_SPARC64
4681 if (IS_IMM)
4682 goto illegal_insn;
4683 if (!supervisor(dc))
4684 goto priv_insn;
4685 #endif
4686 save_state(dc, cpu_cond);
4687 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4688 break;
4689 case 0x17: /* stda, store double word alternate */
4690 #ifndef TARGET_SPARC64
4691 if (IS_IMM)
4692 goto illegal_insn;
4693 if (!supervisor(dc))
4694 goto priv_insn;
4695 #endif
4696 if (rd & 1)
4697 goto illegal_insn;
4698 else {
4699 save_state(dc, cpu_cond);
4700 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4702 break;
4703 #endif
4704 #ifdef TARGET_SPARC64
4705 case 0x0e: /* V9 stx */
4706 gen_address_mask(dc, cpu_addr);
4707 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4708 break;
4709 case 0x1e: /* V9 stxa */
4710 save_state(dc, cpu_cond);
4711 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4712 break;
4713 #endif
4714 default:
4715 goto illegal_insn;
4717 } else if (xop > 0x23 && xop < 0x28) {
4718 if (gen_trap_ifnofpu(dc, cpu_cond))
4719 goto jmp_insn;
4720 save_state(dc, cpu_cond);
4721 switch (xop) {
4722 case 0x24: /* stf, store fpreg */
4723 gen_address_mask(dc, cpu_addr);
4724 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4725 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4726 break;
4727 case 0x25: /* stfsr, V9 stxfsr */
4728 #ifdef TARGET_SPARC64
4729 gen_address_mask(dc, cpu_addr);
4730 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4731 if (rd == 1)
4732 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4733 else
4734 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4735 #else
4736 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4737 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4738 #endif
4739 break;
4740 case 0x26:
4741 #ifdef TARGET_SPARC64
4742 /* V9 stqf, store quad fpreg */
4744 TCGv_i32 r_const;
4746 CHECK_FPU_FEATURE(dc, FLOAT128);
4747 gen_op_load_fpr_QT0(QFPREG(rd));
4748 r_const = tcg_const_i32(dc->mem_idx);
4749 gen_helper_stqf(cpu_addr, r_const);
4750 tcg_temp_free_i32(r_const);
4752 break;
4753 #else /* !TARGET_SPARC64 */
4754 /* stdfq, store floating point queue */
4755 #if defined(CONFIG_USER_ONLY)
4756 goto illegal_insn;
4757 #else
4758 if (!supervisor(dc))
4759 goto priv_insn;
4760 if (gen_trap_ifnofpu(dc, cpu_cond))
4761 goto jmp_insn;
4762 goto nfq_insn;
4763 #endif
4764 #endif
4765 case 0x27: /* stdf, store double fpreg */
4767 TCGv_i32 r_const;
4769 gen_op_load_fpr_DT0(DFPREG(rd));
4770 r_const = tcg_const_i32(dc->mem_idx);
4771 gen_helper_stdf(cpu_addr, r_const);
4772 tcg_temp_free_i32(r_const);
4774 break;
4775 default:
4776 goto illegal_insn;
4778 } else if (xop > 0x33 && xop < 0x3f) {
4779 save_state(dc, cpu_cond);
4780 switch (xop) {
4781 #ifdef TARGET_SPARC64
4782 case 0x34: /* V9 stfa */
4783 gen_stf_asi(cpu_addr, insn, 4, rd);
4784 break;
4785 case 0x36: /* V9 stqfa */
4787 TCGv_i32 r_const;
4789 CHECK_FPU_FEATURE(dc, FLOAT128);
4790 r_const = tcg_const_i32(7);
4791 gen_helper_check_align(cpu_addr, r_const);
4792 tcg_temp_free_i32(r_const);
4793 gen_op_load_fpr_QT0(QFPREG(rd));
4794 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4796 break;
4797 case 0x37: /* V9 stdfa */
4798 gen_op_load_fpr_DT0(DFPREG(rd));
4799 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4800 break;
4801 case 0x3c: /* V9 casa */
4802 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4803 gen_movl_TN_reg(rd, cpu_val);
4804 break;
4805 case 0x3e: /* V9 casxa */
4806 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4807 gen_movl_TN_reg(rd, cpu_val);
4808 break;
4809 #else
4810 case 0x34: /* stc */
4811 case 0x35: /* stcsr */
4812 case 0x36: /* stdcq */
4813 case 0x37: /* stdc */
4814 goto ncp_insn;
4815 #endif
4816 default:
4817 goto illegal_insn;
4819 } else
4820 goto illegal_insn;
4822 break;
4824 /* default case for non jump instructions */
4825 if (dc->npc == DYNAMIC_PC) {
4826 dc->pc = DYNAMIC_PC;
4827 gen_op_next_insn();
4828 } else if (dc->npc == JUMP_PC) {
4829 /* we can do a static jump */
4830 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4831 dc->is_br = 1;
4832 } else {
4833 dc->pc = dc->npc;
4834 dc->npc = dc->npc + 4;
4836 jmp_insn:
4837 return;
4838 illegal_insn:
4840 TCGv_i32 r_const;
4842 save_state(dc, cpu_cond);
4843 r_const = tcg_const_i32(TT_ILL_INSN);
4844 gen_helper_raise_exception(r_const);
4845 tcg_temp_free_i32(r_const);
4846 dc->is_br = 1;
4848 return;
4849 unimp_flush:
4851 TCGv_i32 r_const;
4853 save_state(dc, cpu_cond);
4854 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4855 gen_helper_raise_exception(r_const);
4856 tcg_temp_free_i32(r_const);
4857 dc->is_br = 1;
4859 return;
4860 #if !defined(CONFIG_USER_ONLY)
4861 priv_insn:
4863 TCGv_i32 r_const;
4865 save_state(dc, cpu_cond);
4866 r_const = tcg_const_i32(TT_PRIV_INSN);
4867 gen_helper_raise_exception(r_const);
4868 tcg_temp_free_i32(r_const);
4869 dc->is_br = 1;
4871 return;
4872 #endif
4873 nfpu_insn:
4874 save_state(dc, cpu_cond);
4875 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4876 dc->is_br = 1;
4877 return;
4878 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4879 nfq_insn:
4880 save_state(dc, cpu_cond);
4881 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4882 dc->is_br = 1;
4883 return;
4884 #endif
4885 #ifndef TARGET_SPARC64
4886 ncp_insn:
4888 TCGv r_const;
4890 save_state(dc, cpu_cond);
4891 r_const = tcg_const_i32(TT_NCP_INSN);
4892 gen_helper_raise_exception(r_const);
4893 tcg_temp_free(r_const);
4894 dc->is_br = 1;
4896 return;
4897 #endif
4900 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4901 int spc, CPUSPARCState *env)
4903 target_ulong pc_start, last_pc;
4904 uint16_t *gen_opc_end;
4905 DisasContext dc1, *dc = &dc1;
4906 CPUBreakpoint *bp;
4907 int j, lj = -1;
4908 int num_insns;
4909 int max_insns;
4911 memset(dc, 0, sizeof(DisasContext));
4912 dc->tb = tb;
4913 pc_start = tb->pc;
4914 dc->pc = pc_start;
4915 last_pc = dc->pc;
4916 dc->npc = (target_ulong) tb->cs_base;
4917 dc->mem_idx = cpu_mmu_index(env);
4918 dc->def = env->def;
4919 if ((dc->def->features & CPU_FEATURE_FLOAT))
4920 dc->fpu_enabled = cpu_fpu_enabled(env);
4921 else
4922 dc->fpu_enabled = 0;
4923 #ifdef TARGET_SPARC64
4924 dc->address_mask_32bit = env->pstate & PS_AM;
4925 #endif
4926 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4928 cpu_tmp0 = tcg_temp_new();
4929 cpu_tmp32 = tcg_temp_new_i32();
4930 cpu_tmp64 = tcg_temp_new_i64();
4932 cpu_dst = tcg_temp_local_new();
4934 // loads and stores
4935 cpu_val = tcg_temp_local_new();
4936 cpu_addr = tcg_temp_local_new();
4938 num_insns = 0;
4939 max_insns = tb->cflags & CF_COUNT_MASK;
4940 if (max_insns == 0)
4941 max_insns = CF_COUNT_MASK;
4942 gen_icount_start();
4943 do {
4944 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
4945 TAILQ_FOREACH(bp, &env->breakpoints, entry) {
4946 if (bp->pc == dc->pc) {
4947 if (dc->pc != pc_start)
4948 save_state(dc, cpu_cond);
4949 gen_helper_debug();
4950 tcg_gen_exit_tb(0);
4951 dc->is_br = 1;
4952 goto exit_gen_loop;
4956 if (spc) {
4957 qemu_log("Search PC...\n");
4958 j = gen_opc_ptr - gen_opc_buf;
4959 if (lj < j) {
4960 lj++;
4961 while (lj < j)
4962 gen_opc_instr_start[lj++] = 0;
4963 gen_opc_pc[lj] = dc->pc;
4964 gen_opc_npc[lj] = dc->npc;
4965 gen_opc_instr_start[lj] = 1;
4966 gen_opc_icount[lj] = num_insns;
4969 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4970 gen_io_start();
4971 last_pc = dc->pc;
4972 disas_sparc_insn(dc);
4973 num_insns++;
4975 if (dc->is_br)
4976 break;
4977 /* if the next PC is different, we abort now */
4978 if (dc->pc != (last_pc + 4))
4979 break;
4980 /* if we reach a page boundary, we stop generation so that the
4981 PC of a TT_TFAULT exception is always in the right page */
4982 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4983 break;
4984 /* if single step mode, we generate only one instruction and
4985 generate an exception */
4986 if (env->singlestep_enabled || singlestep) {
4987 tcg_gen_movi_tl(cpu_pc, dc->pc);
4988 tcg_gen_exit_tb(0);
4989 break;
4991 } while ((gen_opc_ptr < gen_opc_end) &&
4992 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4993 num_insns < max_insns);
4995 exit_gen_loop:
4996 tcg_temp_free(cpu_addr);
4997 tcg_temp_free(cpu_val);
4998 tcg_temp_free(cpu_dst);
4999 tcg_temp_free_i64(cpu_tmp64);
5000 tcg_temp_free_i32(cpu_tmp32);
5001 tcg_temp_free(cpu_tmp0);
5002 if (tb->cflags & CF_LAST_IO)
5003 gen_io_end();
5004 if (!dc->is_br) {
5005 if (dc->pc != DYNAMIC_PC &&
5006 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5007 /* static PC and NPC: we can use direct chaining */
5008 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5009 } else {
5010 if (dc->pc != DYNAMIC_PC)
5011 tcg_gen_movi_tl(cpu_pc, dc->pc);
5012 save_npc(dc, cpu_cond);
5013 tcg_gen_exit_tb(0);
5016 gen_icount_end(tb, num_insns);
5017 *gen_opc_ptr = INDEX_op_end;
5018 if (spc) {
5019 j = gen_opc_ptr - gen_opc_buf;
5020 lj++;
5021 while (lj <= j)
5022 gen_opc_instr_start[lj++] = 0;
5023 #if 0
5024 log_page_dump();
5025 #endif
5026 gen_opc_jump_pc[0] = dc->jump_pc[0];
5027 gen_opc_jump_pc[1] = dc->jump_pc[1];
5028 } else {
5029 tb->size = last_pc + 4 - pc_start;
5030 tb->icount = num_insns;
5032 #ifdef DEBUG_DISAS
5033 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5034 qemu_log("--------------\n");
5035 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5036 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5037 qemu_log("\n");
5039 #endif
5042 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5044 gen_intermediate_code_internal(tb, 0, env);
5047 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5049 gen_intermediate_code_internal(tb, 1, env);
5052 void gen_intermediate_code_init(CPUSPARCState *env)
5054 unsigned int i;
5055 static int inited;
5056 static const char * const gregnames[8] = {
5057 NULL, // g0 not used
5058 "g1",
5059 "g2",
5060 "g3",
5061 "g4",
5062 "g5",
5063 "g6",
5064 "g7",
5066 static const char * const fregnames[64] = {
5067 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5068 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5069 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5070 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5071 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5072 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5073 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5074 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5077 /* init various static tables */
5078 if (!inited) {
5079 inited = 1;
5081 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5082 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5083 offsetof(CPUState, regwptr),
5084 "regwptr");
5085 #ifdef TARGET_SPARC64
5086 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
5087 "xcc");
5088 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
5089 "asi");
5090 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
5091 "fprs");
5092 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
5093 "gsr");
5094 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5095 offsetof(CPUState, tick_cmpr),
5096 "tick_cmpr");
5097 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5098 offsetof(CPUState, stick_cmpr),
5099 "stick_cmpr");
5100 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5101 offsetof(CPUState, hstick_cmpr),
5102 "hstick_cmpr");
5103 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5104 "hintp");
5105 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5106 "htba");
5107 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5108 "hver");
5109 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5110 offsetof(CPUState, ssr), "ssr");
5111 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5112 offsetof(CPUState, version), "ver");
5113 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5114 offsetof(CPUState, softint),
5115 "softint");
5116 #else
5117 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5118 "wim");
5119 #endif
5120 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5121 "cond");
5122 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5123 "cc_src");
5124 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5125 offsetof(CPUState, cc_src2),
5126 "cc_src2");
5127 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5128 "cc_dst");
5129 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5130 "psr");
5131 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5132 "fsr");
5133 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5134 "pc");
5135 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5136 "npc");
5137 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5138 #ifndef CONFIG_USER_ONLY
5139 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5140 "tbr");
5141 #endif
5142 for (i = 1; i < 8; i++)
5143 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5144 offsetof(CPUState, gregs[i]),
5145 gregnames[i]);
5146 for (i = 0; i < TARGET_FPREGS; i++)
5147 cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5148 offsetof(CPUState, fpr[i]),
5149 fregnames[i]);
5151 /* register helpers */
5153 #define GEN_HELPER 2
5154 #include "helper.h"
5158 void gen_pc_load(CPUState *env, TranslationBlock *tb,
5159 unsigned long searched_pc, int pc_pos, void *puc)
5161 target_ulong npc;
5162 env->pc = gen_opc_pc[pc_pos];
5163 npc = gen_opc_npc[pc_pos];
5164 if (npc == 1) {
5165 /* dynamic NPC: already stored */
5166 } else if (npc == 2) {
5167 target_ulong t2 = (target_ulong)(unsigned long)puc;
5168 /* jump PC: use T2 and the jump targets of the translation */
5169 if (t2)
5170 env->npc = gen_opc_jump_pc[0];
5171 else
5172 env->npc = gen_opc_jump_pc[1];
5173 } else {
5174 env->npc = npc;