4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
25 Rest of V9 instructions, VIS instructions
26 NPC/PC static optimisations (use JUMP_TB when possible)
27 Optimize synthetic instructions
28 Optional alignment check
45 #define DYNAMIC_PC 1 /* dynamic pc value */
46 #define JUMP_PC 2 /* dynamic pc value which takes only two values
47 according to jump_pc[T2] */
49 typedef struct DisasContext
{
50 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
51 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
52 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
56 struct TranslationBlock
*tb
;
59 static uint16_t *gen_opc_ptr
;
60 static uint32_t *gen_opparam_ptr
;
65 #define DEF(s,n,copy_size) INDEX_op_ ## s,
73 // This function uses non-native bit order
74 #define GET_FIELD(X, FROM, TO) \
75 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
77 // This function uses the order in the manuals, i.e. bit 0 is 2^0
78 #define GET_FIELD_SP(X, FROM, TO) \
79 GET_FIELD(X, 31 - (TO), 31 - (FROM))
81 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
82 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), 32 - ((b) - (a) + 1))
85 #define DFPREG(r) (((r & 1) << 6) | (r & 0x1e))
90 #ifdef USE_DIRECT_JUMP
93 #define TBPARAM(x) (long)(x)
96 static int sign_extend(int x
, int len
)
99 return (x
<< len
) >> len
;
102 #define IS_IMM (insn & (1<<13))
104 static void disas_sparc_insn(DisasContext
* dc
);
106 static GenOpFunc
*gen_op_movl_TN_reg
[2][32] = {
177 static GenOpFunc
*gen_op_movl_reg_TN
[3][32] = {
282 static GenOpFunc1
*gen_op_movl_TN_im
[3] = {
288 // Sign extending version
289 static GenOpFunc1
* const gen_op_movl_TN_sim
[3] = {
295 #ifdef TARGET_SPARC64
296 #define GEN32(func, NAME) \
297 static GenOpFunc *NAME ## _table [64] = { \
298 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
299 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
300 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
301 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
302 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
303 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
304 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
305 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
306 NAME ## 32, 0, NAME ## 34, 0, NAME ## 36, 0, NAME ## 38, 0, \
307 NAME ## 40, 0, NAME ## 42, 0, NAME ## 44, 0, NAME ## 46, 0, \
308 NAME ## 48, 0, NAME ## 50, 0, NAME ## 52, 0, NAME ## 54, 0, \
309 NAME ## 56, 0, NAME ## 58, 0, NAME ## 60, 0, NAME ## 62, 0, \
311 static inline void func(int n) \
313 NAME ## _table[n](); \
316 #define GEN32(func, NAME) \
317 static GenOpFunc *NAME ## _table [32] = { \
318 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
319 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
320 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
321 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
322 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
323 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
324 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
325 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
327 static inline void func(int n) \
329 NAME ## _table[n](); \
333 /* floating point registers moves */
334 GEN32(gen_op_load_fpr_FT0
, gen_op_load_fpr_FT0_fprf
);
335 GEN32(gen_op_load_fpr_FT1
, gen_op_load_fpr_FT1_fprf
);
336 GEN32(gen_op_store_FT0_fpr
, gen_op_store_FT0_fpr_fprf
);
337 GEN32(gen_op_store_FT1_fpr
, gen_op_store_FT1_fpr_fprf
);
339 GEN32(gen_op_load_fpr_DT0
, gen_op_load_fpr_DT0_fprf
);
340 GEN32(gen_op_load_fpr_DT1
, gen_op_load_fpr_DT1_fprf
);
341 GEN32(gen_op_store_DT0_fpr
, gen_op_store_DT0_fpr_fprf
);
342 GEN32(gen_op_store_DT1_fpr
, gen_op_store_DT1_fpr_fprf
);
344 #ifdef TARGET_SPARC64
345 // 'a' versions allowed to user depending on asi
346 #if defined(CONFIG_USER_ONLY)
347 #define supervisor(dc) 0
348 #define gen_op_ldst(name) gen_op_##name##_raw()
349 #define OP_LD_TABLE(width) \
350 static void gen_op_##width##a(int insn, int is_ld, int size, int sign) \
355 offset = GET_FIELD(insn, 25, 31); \
357 gen_op_ld_asi_reg(offset, size, sign); \
359 gen_op_st_asi_reg(offset, size, sign); \
362 asi = GET_FIELD(insn, 19, 26); \
364 case 0x80: /* Primary address space */ \
365 gen_op_##width##_raw(); \
367 case 0x82: /* Primary address space, non-faulting load */ \
368 gen_op_##width##_raw(); \
376 #define gen_op_ldst(name) (*gen_op_##name[dc->mem_idx])()
377 #define OP_LD_TABLE(width) \
378 static GenOpFunc *gen_op_##width[] = { \
379 &gen_op_##width##_user, \
380 &gen_op_##width##_kernel, \
383 static void gen_op_##width##a(int insn, int is_ld, int size, int sign) \
388 offset = GET_FIELD(insn, 25, 31); \
390 gen_op_ld_asi_reg(offset, size, sign); \
392 gen_op_st_asi_reg(offset, size, sign); \
395 asi = GET_FIELD(insn, 19, 26); \
397 gen_op_ld_asi(asi, size, sign); \
399 gen_op_st_asi(asi, size, sign); \
402 #define supervisor(dc) (dc->mem_idx == 1)
405 #if defined(CONFIG_USER_ONLY)
406 #define gen_op_ldst(name) gen_op_##name##_raw()
407 #define OP_LD_TABLE(width)
408 #define supervisor(dc) 0
410 #define gen_op_ldst(name) (*gen_op_##name[dc->mem_idx])()
411 #define OP_LD_TABLE(width) \
412 static GenOpFunc *gen_op_##width[] = { \
413 &gen_op_##width##_user, \
414 &gen_op_##width##_kernel, \
417 static void gen_op_##width##a(int insn, int is_ld, int size, int sign) \
421 asi = GET_FIELD(insn, 19, 26); \
423 case 10: /* User data access */ \
424 gen_op_##width##_user(); \
426 case 11: /* Supervisor data access */ \
427 gen_op_##width##_kernel(); \
429 case 0x20 ... 0x2f: /* MMU passthrough */ \
431 gen_op_ld_asi(asi, size, sign); \
433 gen_op_st_asi(asi, size, sign); \
437 gen_op_ld_asi(asi, size, sign); \
439 gen_op_st_asi(asi, size, sign); \
444 #define supervisor(dc) (dc->mem_idx == 1)
465 #ifdef TARGET_SPARC64
473 static inline void gen_movl_imm_TN(int reg
, uint32_t imm
)
475 gen_op_movl_TN_im
[reg
](imm
);
478 static inline void gen_movl_imm_T1(uint32_t val
)
480 gen_movl_imm_TN(1, val
);
483 static inline void gen_movl_imm_T0(uint32_t val
)
485 gen_movl_imm_TN(0, val
);
488 static inline void gen_movl_simm_TN(int reg
, int32_t imm
)
490 gen_op_movl_TN_sim
[reg
](imm
);
493 static inline void gen_movl_simm_T1(int32_t val
)
495 gen_movl_simm_TN(1, val
);
498 static inline void gen_movl_simm_T0(int32_t val
)
500 gen_movl_simm_TN(0, val
);
503 static inline void gen_movl_reg_TN(int reg
, int t
)
506 gen_op_movl_reg_TN
[t
][reg
] ();
508 gen_movl_imm_TN(t
, 0);
511 static inline void gen_movl_reg_T0(int reg
)
513 gen_movl_reg_TN(reg
, 0);
516 static inline void gen_movl_reg_T1(int reg
)
518 gen_movl_reg_TN(reg
, 1);
521 static inline void gen_movl_reg_T2(int reg
)
523 gen_movl_reg_TN(reg
, 2);
526 static inline void gen_movl_TN_reg(int reg
, int t
)
529 gen_op_movl_TN_reg
[t
][reg
] ();
532 static inline void gen_movl_T0_reg(int reg
)
534 gen_movl_TN_reg(reg
, 0);
537 static inline void gen_movl_T1_reg(int reg
)
539 gen_movl_TN_reg(reg
, 1);
542 static inline void gen_jmp_im(target_ulong pc
)
544 #ifdef TARGET_SPARC64
545 if (pc
== (uint32_t)pc
) {
548 gen_op_jmp_im64(pc
>> 32, pc
);
555 static inline void gen_movl_npc_im(target_ulong npc
)
557 #ifdef TARGET_SPARC64
558 if (npc
== (uint32_t)npc
) {
559 gen_op_movl_npc_im(npc
);
561 gen_op_movq_npc_im64(npc
>> 32, npc
);
564 gen_op_movl_npc_im(npc
);
568 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
569 target_ulong pc
, target_ulong npc
)
571 TranslationBlock
*tb
;
574 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
575 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
)) {
576 /* jump to same page: we can use a direct jump */
578 gen_op_goto_tb0(TBPARAM(tb
));
580 gen_op_goto_tb1(TBPARAM(tb
));
582 gen_movl_npc_im(npc
);
583 gen_op_movl_T0_im((long)tb
+ tb_num
);
586 /* jump to another page: currently not optimized */
588 gen_movl_npc_im(npc
);
594 static inline void gen_branch2(DisasContext
*dc
, long tb
, target_ulong pc1
, target_ulong pc2
)
598 l1
= gen_new_label();
600 gen_op_jz_T2_label(l1
);
602 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
605 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
608 static inline void gen_branch_a(DisasContext
*dc
, long tb
, target_ulong pc1
, target_ulong pc2
)
612 l1
= gen_new_label();
614 gen_op_jz_T2_label(l1
);
616 gen_goto_tb(dc
, 0, pc2
, pc1
);
619 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
622 static inline void gen_branch(DisasContext
*dc
, long tb
, target_ulong pc
, target_ulong npc
)
624 gen_goto_tb(dc
, 0, pc
, npc
);
627 static inline void gen_generic_branch(DisasContext
*dc
, target_ulong npc1
, target_ulong npc2
)
631 l1
= gen_new_label();
632 l2
= gen_new_label();
633 gen_op_jz_T2_label(l1
);
635 gen_movl_npc_im(npc1
);
636 gen_op_jmp_label(l2
);
639 gen_movl_npc_im(npc2
);
643 /* call this function before using T2 as it may have been set for a jump */
644 static inline void flush_T2(DisasContext
* dc
)
646 if (dc
->npc
== JUMP_PC
) {
647 gen_generic_branch(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1]);
648 dc
->npc
= DYNAMIC_PC
;
652 static inline void save_npc(DisasContext
* dc
)
654 if (dc
->npc
== JUMP_PC
) {
655 gen_generic_branch(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1]);
656 dc
->npc
= DYNAMIC_PC
;
657 } else if (dc
->npc
!= DYNAMIC_PC
) {
658 gen_movl_npc_im(dc
->npc
);
662 static inline void save_state(DisasContext
* dc
)
668 static inline void gen_mov_pc_npc(DisasContext
* dc
)
670 if (dc
->npc
== JUMP_PC
) {
671 gen_generic_branch(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1]);
674 } else if (dc
->npc
== DYNAMIC_PC
) {
682 static GenOpFunc
* const gen_cond
[2][16] = {
702 #ifdef TARGET_SPARC64
723 static GenOpFunc
* const gen_fcond
[4][16] = {
742 #ifdef TARGET_SPARC64
745 gen_op_eval_fbne_fcc1
,
746 gen_op_eval_fblg_fcc1
,
747 gen_op_eval_fbul_fcc1
,
748 gen_op_eval_fbl_fcc1
,
749 gen_op_eval_fbug_fcc1
,
750 gen_op_eval_fbg_fcc1
,
751 gen_op_eval_fbu_fcc1
,
753 gen_op_eval_fbe_fcc1
,
754 gen_op_eval_fbue_fcc1
,
755 gen_op_eval_fbge_fcc1
,
756 gen_op_eval_fbuge_fcc1
,
757 gen_op_eval_fble_fcc1
,
758 gen_op_eval_fbule_fcc1
,
759 gen_op_eval_fbo_fcc1
,
763 gen_op_eval_fbne_fcc2
,
764 gen_op_eval_fblg_fcc2
,
765 gen_op_eval_fbul_fcc2
,
766 gen_op_eval_fbl_fcc2
,
767 gen_op_eval_fbug_fcc2
,
768 gen_op_eval_fbg_fcc2
,
769 gen_op_eval_fbu_fcc2
,
771 gen_op_eval_fbe_fcc2
,
772 gen_op_eval_fbue_fcc2
,
773 gen_op_eval_fbge_fcc2
,
774 gen_op_eval_fbuge_fcc2
,
775 gen_op_eval_fble_fcc2
,
776 gen_op_eval_fbule_fcc2
,
777 gen_op_eval_fbo_fcc2
,
781 gen_op_eval_fbne_fcc3
,
782 gen_op_eval_fblg_fcc3
,
783 gen_op_eval_fbul_fcc3
,
784 gen_op_eval_fbl_fcc3
,
785 gen_op_eval_fbug_fcc3
,
786 gen_op_eval_fbg_fcc3
,
787 gen_op_eval_fbu_fcc3
,
789 gen_op_eval_fbe_fcc3
,
790 gen_op_eval_fbue_fcc3
,
791 gen_op_eval_fbge_fcc3
,
792 gen_op_eval_fbuge_fcc3
,
793 gen_op_eval_fble_fcc3
,
794 gen_op_eval_fbule_fcc3
,
795 gen_op_eval_fbo_fcc3
,
802 #ifdef TARGET_SPARC64
803 static void gen_cond_reg(int cond
)
829 /* XXX: potentially incorrect if dynamic npc */
830 static void do_branch(DisasContext
* dc
, int32_t offset
, uint32_t insn
, int cc
)
832 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
833 target_ulong target
= dc
->pc
+ offset
;
836 /* unconditional not taken */
838 dc
->pc
= dc
->npc
+ 4;
839 dc
->npc
= dc
->pc
+ 4;
842 dc
->npc
= dc
->pc
+ 4;
844 } else if (cond
== 0x8) {
845 /* unconditional taken */
848 dc
->npc
= dc
->pc
+ 4;
855 gen_cond
[cc
][cond
]();
857 gen_branch_a(dc
, (long)dc
->tb
, target
, dc
->npc
);
861 dc
->jump_pc
[0] = target
;
862 dc
->jump_pc
[1] = dc
->npc
+ 4;
868 /* XXX: potentially incorrect if dynamic npc */
869 static void do_fbranch(DisasContext
* dc
, int32_t offset
, uint32_t insn
, int cc
)
871 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
872 target_ulong target
= dc
->pc
+ offset
;
875 /* unconditional not taken */
877 dc
->pc
= dc
->npc
+ 4;
878 dc
->npc
= dc
->pc
+ 4;
881 dc
->npc
= dc
->pc
+ 4;
883 } else if (cond
== 0x8) {
884 /* unconditional taken */
887 dc
->npc
= dc
->pc
+ 4;
894 gen_fcond
[cc
][cond
]();
896 gen_branch_a(dc
, (long)dc
->tb
, target
, dc
->npc
);
900 dc
->jump_pc
[0] = target
;
901 dc
->jump_pc
[1] = dc
->npc
+ 4;
907 #ifdef TARGET_SPARC64
908 /* XXX: potentially incorrect if dynamic npc */
909 static void do_branch_reg(DisasContext
* dc
, int32_t offset
, uint32_t insn
)
911 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
912 target_ulong target
= dc
->pc
+ offset
;
917 gen_branch_a(dc
, (long)dc
->tb
, target
, dc
->npc
);
921 dc
->jump_pc
[0] = target
;
922 dc
->jump_pc
[1] = dc
->npc
+ 4;
927 static GenOpFunc
* const gen_fcmps
[4] = {
934 static GenOpFunc
* const gen_fcmpd
[4] = {
942 static int gen_trap_ifnofpu(DisasContext
* dc
)
944 #if !defined(CONFIG_USER_ONLY)
945 if (!dc
->fpu_enabled
) {
947 gen_op_exception(TT_NFPU_INSN
);
955 /* before an instruction, dc->pc must be static */
956 static void disas_sparc_insn(DisasContext
* dc
)
958 unsigned int insn
, opc
, rs1
, rs2
, rd
;
960 insn
= ldl_code(dc
->pc
);
961 opc
= GET_FIELD(insn
, 0, 1);
963 rd
= GET_FIELD(insn
, 2, 6);
965 case 0: /* branches/sethi */
967 unsigned int xop
= GET_FIELD(insn
, 7, 9);
970 #ifdef TARGET_SPARC64
971 case 0x1: /* V9 BPcc */
975 target
= GET_FIELD_SP(insn
, 0, 18);
976 target
= sign_extend(target
, 18);
978 cc
= GET_FIELD_SP(insn
, 20, 21);
980 do_branch(dc
, target
, insn
, 0);
982 do_branch(dc
, target
, insn
, 1);
987 case 0x3: /* V9 BPr */
989 target
= GET_FIELD_SP(insn
, 0, 13) |
990 (GET_FIELD_SP(insn
, 20, 21) << 14);
991 target
= sign_extend(target
, 16);
993 rs1
= GET_FIELD(insn
, 13, 17);
994 gen_movl_reg_T0(rs1
);
995 do_branch_reg(dc
, target
, insn
);
998 case 0x5: /* V9 FBPcc */
1000 int cc
= GET_FIELD_SP(insn
, 20, 21);
1001 if (gen_trap_ifnofpu(dc
))
1003 target
= GET_FIELD_SP(insn
, 0, 18);
1004 target
= sign_extend(target
, 19);
1006 do_fbranch(dc
, target
, insn
, cc
);
1010 case 0x2: /* BN+x */
1012 target
= GET_FIELD(insn
, 10, 31);
1013 target
= sign_extend(target
, 22);
1015 do_branch(dc
, target
, insn
, 0);
1018 case 0x6: /* FBN+x */
1020 if (gen_trap_ifnofpu(dc
))
1022 target
= GET_FIELD(insn
, 10, 31);
1023 target
= sign_extend(target
, 22);
1025 do_fbranch(dc
, target
, insn
, 0);
1028 case 0x4: /* SETHI */
1033 uint32_t value
= GET_FIELD(insn
, 10, 31);
1034 gen_movl_imm_T0(value
<< 10);
1035 gen_movl_T0_reg(rd
);
1040 case 0x0: /* UNIMPL */
1049 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
1051 #ifdef TARGET_SPARC64
1052 if (dc
->pc
== (uint32_t)dc
->pc
) {
1053 gen_op_movl_T0_im(dc
->pc
);
1055 gen_op_movq_T0_im64(dc
->pc
>> 32, dc
->pc
);
1058 gen_op_movl_T0_im(dc
->pc
);
1060 gen_movl_T0_reg(15);
1066 case 2: /* FPU & Logical Operations */
1068 unsigned int xop
= GET_FIELD(insn
, 7, 12);
1069 if (xop
== 0x3a) { /* generate trap */
1072 rs1
= GET_FIELD(insn
, 13, 17);
1073 gen_movl_reg_T0(rs1
);
1075 rs2
= GET_FIELD(insn
, 25, 31);
1079 gen_movl_simm_T1(rs2
);
1085 rs2
= GET_FIELD(insn
, 27, 31);
1089 gen_movl_reg_T1(rs2
);
1095 cond
= GET_FIELD(insn
, 3, 6);
1099 } else if (cond
!= 0) {
1100 #ifdef TARGET_SPARC64
1102 int cc
= GET_FIELD_SP(insn
, 11, 12);
1106 gen_cond
[0][cond
]();
1108 gen_cond
[1][cond
]();
1114 gen_cond
[0][cond
]();
1123 } else if (xop
== 0x28) {
1124 rs1
= GET_FIELD(insn
, 13, 17);
1127 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, y
));
1128 gen_movl_T0_reg(rd
);
1130 case 15: /* stbar / V9 membar */
1131 break; /* no effect? */
1132 #ifdef TARGET_SPARC64
1133 case 0x2: /* V9 rdccr */
1135 gen_movl_T0_reg(rd
);
1137 case 0x3: /* V9 rdasi */
1138 gen_op_movl_T0_env(offsetof(CPUSPARCState
, asi
));
1139 gen_movl_T0_reg(rd
);
1141 case 0x4: /* V9 rdtick */
1143 gen_movl_T0_reg(rd
);
1145 case 0x5: /* V9 rdpc */
1146 if (dc
->pc
== (uint32_t)dc
->pc
) {
1147 gen_op_movl_T0_im(dc
->pc
);
1149 gen_op_movq_T0_im64(dc
->pc
>> 32, dc
->pc
);
1151 gen_movl_T0_reg(rd
);
1153 case 0x6: /* V9 rdfprs */
1154 gen_op_movl_T0_env(offsetof(CPUSPARCState
, fprs
));
1155 gen_movl_T0_reg(rd
);
1157 case 0x13: /* Graphics Status */
1158 if (gen_trap_ifnofpu(dc
))
1160 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, gsr
));
1161 gen_movl_T0_reg(rd
);
1163 case 0x17: /* Tick compare */
1164 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, tick_cmpr
));
1165 gen_movl_T0_reg(rd
);
1167 case 0x18: /* System tick */
1168 gen_op_rdtick(); // XXX
1169 gen_movl_T0_reg(rd
);
1171 case 0x19: /* System tick compare */
1172 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, stick_cmpr
));
1173 gen_movl_T0_reg(rd
);
1175 case 0x10: /* Performance Control */
1176 case 0x11: /* Performance Instrumentation Counter */
1177 case 0x12: /* Dispatch Control */
1178 case 0x14: /* Softint set, WO */
1179 case 0x15: /* Softint clear, WO */
1180 case 0x16: /* Softint write */
1185 #if !defined(CONFIG_USER_ONLY)
1186 #ifndef TARGET_SPARC64
1187 } else if (xop
== 0x29) { /* rdpsr / V9 unimp */
1188 if (!supervisor(dc
))
1191 gen_movl_T0_reg(rd
);
1194 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
1195 if (!supervisor(dc
))
1197 #ifdef TARGET_SPARC64
1198 rs1
= GET_FIELD(insn
, 13, 17);
1216 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, tbr
));
1222 gen_op_movl_T0_env(offsetof(CPUSPARCState
, tl
));
1225 gen_op_movl_T0_env(offsetof(CPUSPARCState
, psrpil
));
1231 gen_op_movl_T0_env(offsetof(CPUSPARCState
, cansave
));
1233 case 11: // canrestore
1234 gen_op_movl_T0_env(offsetof(CPUSPARCState
, canrestore
));
1236 case 12: // cleanwin
1237 gen_op_movl_T0_env(offsetof(CPUSPARCState
, cleanwin
));
1239 case 13: // otherwin
1240 gen_op_movl_T0_env(offsetof(CPUSPARCState
, otherwin
));
1243 gen_op_movl_T0_env(offsetof(CPUSPARCState
, wstate
));
1246 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, version
));
1253 gen_op_movl_T0_env(offsetof(CPUSPARCState
, wim
));
1255 gen_movl_T0_reg(rd
);
1257 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
1258 #ifdef TARGET_SPARC64
1261 if (!supervisor(dc
))
1263 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, tbr
));
1264 gen_movl_T0_reg(rd
);
1268 } else if (xop
== 0x34) { /* FPU Operations */
1269 if (gen_trap_ifnofpu(dc
))
1271 rs1
= GET_FIELD(insn
, 13, 17);
1272 rs2
= GET_FIELD(insn
, 27, 31);
1273 xop
= GET_FIELD(insn
, 18, 26);
1275 case 0x1: /* fmovs */
1276 gen_op_load_fpr_FT0(rs2
);
1277 gen_op_store_FT0_fpr(rd
);
1279 case 0x5: /* fnegs */
1280 gen_op_load_fpr_FT1(rs2
);
1282 gen_op_store_FT0_fpr(rd
);
1284 case 0x9: /* fabss */
1285 gen_op_load_fpr_FT1(rs2
);
1287 gen_op_store_FT0_fpr(rd
);
1289 case 0x29: /* fsqrts */
1290 gen_op_load_fpr_FT1(rs2
);
1292 gen_op_store_FT0_fpr(rd
);
1294 case 0x2a: /* fsqrtd */
1295 gen_op_load_fpr_DT1(DFPREG(rs2
));
1297 gen_op_store_DT0_fpr(DFPREG(rd
));
1299 case 0x2b: /* fsqrtq */
1302 gen_op_load_fpr_FT0(rs1
);
1303 gen_op_load_fpr_FT1(rs2
);
1305 gen_op_store_FT0_fpr(rd
);
1308 gen_op_load_fpr_DT0(DFPREG(rs1
));
1309 gen_op_load_fpr_DT1(DFPREG(rs2
));
1311 gen_op_store_DT0_fpr(DFPREG(rd
));
1313 case 0x43: /* faddq */
1316 gen_op_load_fpr_FT0(rs1
);
1317 gen_op_load_fpr_FT1(rs2
);
1319 gen_op_store_FT0_fpr(rd
);
1322 gen_op_load_fpr_DT0(DFPREG(rs1
));
1323 gen_op_load_fpr_DT1(DFPREG(rs2
));
1325 gen_op_store_DT0_fpr(DFPREG(rd
));
1327 case 0x47: /* fsubq */
1330 gen_op_load_fpr_FT0(rs1
);
1331 gen_op_load_fpr_FT1(rs2
);
1333 gen_op_store_FT0_fpr(rd
);
1336 gen_op_load_fpr_DT0(DFPREG(rs1
));
1337 gen_op_load_fpr_DT1(DFPREG(rs2
));
1339 gen_op_store_DT0_fpr(rd
);
1341 case 0x4b: /* fmulq */
1344 gen_op_load_fpr_FT0(rs1
);
1345 gen_op_load_fpr_FT1(rs2
);
1347 gen_op_store_FT0_fpr(rd
);
1350 gen_op_load_fpr_DT0(DFPREG(rs1
));
1351 gen_op_load_fpr_DT1(DFPREG(rs2
));
1353 gen_op_store_DT0_fpr(DFPREG(rd
));
1355 case 0x4f: /* fdivq */
1358 gen_op_load_fpr_FT0(rs1
);
1359 gen_op_load_fpr_FT1(rs2
);
1361 gen_op_store_DT0_fpr(DFPREG(rd
));
1363 case 0x6e: /* fdmulq */
1366 gen_op_load_fpr_FT1(rs2
);
1368 gen_op_store_FT0_fpr(rd
);
1371 gen_op_load_fpr_DT1(DFPREG(rs2
));
1373 gen_op_store_FT0_fpr(rd
);
1375 case 0xc7: /* fqtos */
1378 gen_op_load_fpr_FT1(rs2
);
1380 gen_op_store_DT0_fpr(DFPREG(rd
));
1383 gen_op_load_fpr_FT1(rs2
);
1385 gen_op_store_DT0_fpr(DFPREG(rd
));
1387 case 0xcb: /* fqtod */
1389 case 0xcc: /* fitoq */
1391 case 0xcd: /* fstoq */
1393 case 0xce: /* fdtoq */
1396 gen_op_load_fpr_FT1(rs2
);
1398 gen_op_store_FT0_fpr(rd
);
1401 gen_op_load_fpr_DT1(rs2
);
1403 gen_op_store_FT0_fpr(rd
);
1405 case 0xd3: /* fqtoi */
1407 #ifdef TARGET_SPARC64
1408 case 0x2: /* V9 fmovd */
1409 gen_op_load_fpr_DT0(DFPREG(rs2
));
1410 gen_op_store_DT0_fpr(DFPREG(rd
));
1412 case 0x6: /* V9 fnegd */
1413 gen_op_load_fpr_DT1(DFPREG(rs2
));
1415 gen_op_store_DT0_fpr(DFPREG(rd
));
1417 case 0xa: /* V9 fabsd */
1418 gen_op_load_fpr_DT1(DFPREG(rs2
));
1420 gen_op_store_DT0_fpr(DFPREG(rd
));
1422 case 0x81: /* V9 fstox */
1423 gen_op_load_fpr_FT1(rs2
);
1425 gen_op_store_DT0_fpr(DFPREG(rd
));
1427 case 0x82: /* V9 fdtox */
1428 gen_op_load_fpr_DT1(DFPREG(rs2
));
1430 gen_op_store_DT0_fpr(DFPREG(rd
));
1432 case 0x84: /* V9 fxtos */
1433 gen_op_load_fpr_DT1(DFPREG(rs2
));
1435 gen_op_store_FT0_fpr(rd
);
1437 case 0x88: /* V9 fxtod */
1438 gen_op_load_fpr_DT1(DFPREG(rs2
));
1440 gen_op_store_DT0_fpr(DFPREG(rd
));
1442 case 0x3: /* V9 fmovq */
1443 case 0x7: /* V9 fnegq */
1444 case 0xb: /* V9 fabsq */
1445 case 0x83: /* V9 fqtox */
1446 case 0x8c: /* V9 fxtoq */
1452 } else if (xop
== 0x35) { /* FPU Operations */
1453 #ifdef TARGET_SPARC64
1456 if (gen_trap_ifnofpu(dc
))
1458 rs1
= GET_FIELD(insn
, 13, 17);
1459 rs2
= GET_FIELD(insn
, 27, 31);
1460 xop
= GET_FIELD(insn
, 18, 26);
1461 #ifdef TARGET_SPARC64
1462 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
1463 cond
= GET_FIELD_SP(insn
, 14, 17);
1464 gen_op_load_fpr_FT0(rd
);
1465 gen_op_load_fpr_FT1(rs2
);
1466 rs1
= GET_FIELD(insn
, 13, 17);
1467 gen_movl_reg_T0(rs1
);
1471 gen_op_store_FT0_fpr(rd
);
1473 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
1474 cond
= GET_FIELD_SP(insn
, 14, 17);
1475 gen_op_load_fpr_DT0(rd
);
1476 gen_op_load_fpr_DT1(rs2
);
1478 rs1
= GET_FIELD(insn
, 13, 17);
1479 gen_movl_reg_T0(rs1
);
1482 gen_op_store_DT0_fpr(rd
);
1484 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
1489 #ifdef TARGET_SPARC64
1490 case 0x001: /* V9 fmovscc %fcc0 */
1491 cond
= GET_FIELD_SP(insn
, 14, 17);
1492 gen_op_load_fpr_FT0(rd
);
1493 gen_op_load_fpr_FT1(rs2
);
1495 gen_fcond
[0][cond
]();
1497 gen_op_store_FT0_fpr(rd
);
1499 case 0x002: /* V9 fmovdcc %fcc0 */
1500 cond
= GET_FIELD_SP(insn
, 14, 17);
1501 gen_op_load_fpr_DT0(rd
);
1502 gen_op_load_fpr_DT1(rs2
);
1504 gen_fcond
[0][cond
]();
1506 gen_op_store_DT0_fpr(rd
);
1508 case 0x003: /* V9 fmovqcc %fcc0 */
1510 case 0x041: /* V9 fmovscc %fcc1 */
1511 cond
= GET_FIELD_SP(insn
, 14, 17);
1512 gen_op_load_fpr_FT0(rd
);
1513 gen_op_load_fpr_FT1(rs2
);
1515 gen_fcond
[1][cond
]();
1517 gen_op_store_FT0_fpr(rd
);
1519 case 0x042: /* V9 fmovdcc %fcc1 */
1520 cond
= GET_FIELD_SP(insn
, 14, 17);
1521 gen_op_load_fpr_DT0(rd
);
1522 gen_op_load_fpr_DT1(rs2
);
1524 gen_fcond
[1][cond
]();
1526 gen_op_store_DT0_fpr(rd
);
1528 case 0x043: /* V9 fmovqcc %fcc1 */
1530 case 0x081: /* V9 fmovscc %fcc2 */
1531 cond
= GET_FIELD_SP(insn
, 14, 17);
1532 gen_op_load_fpr_FT0(rd
);
1533 gen_op_load_fpr_FT1(rs2
);
1535 gen_fcond
[2][cond
]();
1537 gen_op_store_FT0_fpr(rd
);
1539 case 0x082: /* V9 fmovdcc %fcc2 */
1540 cond
= GET_FIELD_SP(insn
, 14, 17);
1541 gen_op_load_fpr_DT0(rd
);
1542 gen_op_load_fpr_DT1(rs2
);
1544 gen_fcond
[2][cond
]();
1546 gen_op_store_DT0_fpr(rd
);
1548 case 0x083: /* V9 fmovqcc %fcc2 */
1550 case 0x0c1: /* V9 fmovscc %fcc3 */
1551 cond
= GET_FIELD_SP(insn
, 14, 17);
1552 gen_op_load_fpr_FT0(rd
);
1553 gen_op_load_fpr_FT1(rs2
);
1555 gen_fcond
[3][cond
]();
1557 gen_op_store_FT0_fpr(rd
);
1559 case 0x0c2: /* V9 fmovdcc %fcc3 */
1560 cond
= GET_FIELD_SP(insn
, 14, 17);
1561 gen_op_load_fpr_DT0(rd
);
1562 gen_op_load_fpr_DT1(rs2
);
1564 gen_fcond
[3][cond
]();
1566 gen_op_store_DT0_fpr(rd
);
1568 case 0x0c3: /* V9 fmovqcc %fcc3 */
1570 case 0x101: /* V9 fmovscc %icc */
1571 cond
= GET_FIELD_SP(insn
, 14, 17);
1572 gen_op_load_fpr_FT0(rd
);
1573 gen_op_load_fpr_FT1(rs2
);
1575 gen_cond
[0][cond
]();
1577 gen_op_store_FT0_fpr(rd
);
1579 case 0x102: /* V9 fmovdcc %icc */
1580 cond
= GET_FIELD_SP(insn
, 14, 17);
1581 gen_op_load_fpr_DT0(rd
);
1582 gen_op_load_fpr_DT1(rs2
);
1584 gen_cond
[0][cond
]();
1586 gen_op_store_DT0_fpr(rd
);
1588 case 0x103: /* V9 fmovqcc %icc */
1590 case 0x181: /* V9 fmovscc %xcc */
1591 cond
= GET_FIELD_SP(insn
, 14, 17);
1592 gen_op_load_fpr_FT0(rd
);
1593 gen_op_load_fpr_FT1(rs2
);
1595 gen_cond
[1][cond
]();
1597 gen_op_store_FT0_fpr(rd
);
1599 case 0x182: /* V9 fmovdcc %xcc */
1600 cond
= GET_FIELD_SP(insn
, 14, 17);
1601 gen_op_load_fpr_DT0(rd
);
1602 gen_op_load_fpr_DT1(rs2
);
1604 gen_cond
[1][cond
]();
1606 gen_op_store_DT0_fpr(rd
);
1608 case 0x183: /* V9 fmovqcc %xcc */
1611 case 0x51: /* V9 %fcc */
1612 gen_op_load_fpr_FT0(rs1
);
1613 gen_op_load_fpr_FT1(rs2
);
1614 #ifdef TARGET_SPARC64
1615 gen_fcmps
[rd
& 3]();
1620 case 0x52: /* V9 %fcc */
1621 gen_op_load_fpr_DT0(DFPREG(rs1
));
1622 gen_op_load_fpr_DT1(DFPREG(rs2
));
1623 #ifdef TARGET_SPARC64
1624 gen_fcmpd
[rd
& 3]();
1629 case 0x53: /* fcmpq */
1631 case 0x55: /* fcmpes, V9 %fcc */
1632 gen_op_load_fpr_FT0(rs1
);
1633 gen_op_load_fpr_FT1(rs2
);
1634 #ifdef TARGET_SPARC64
1635 gen_fcmps
[rd
& 3]();
1637 gen_op_fcmps(); /* XXX should trap if qNaN or sNaN */
1640 case 0x56: /* fcmped, V9 %fcc */
1641 gen_op_load_fpr_DT0(DFPREG(rs1
));
1642 gen_op_load_fpr_DT1(DFPREG(rs2
));
1643 #ifdef TARGET_SPARC64
1644 gen_fcmpd
[rd
& 3]();
1646 gen_op_fcmpd(); /* XXX should trap if qNaN or sNaN */
1649 case 0x57: /* fcmpeq */
1655 } else if (xop
== 0x2) {
1658 rs1
= GET_FIELD(insn
, 13, 17);
1660 // or %g0, x, y -> mov T1, x; mov y, T1
1661 if (IS_IMM
) { /* immediate */
1662 rs2
= GET_FIELDs(insn
, 19, 31);
1663 gen_movl_simm_T1(rs2
);
1664 } else { /* register */
1665 rs2
= GET_FIELD(insn
, 27, 31);
1666 gen_movl_reg_T1(rs2
);
1668 gen_movl_T1_reg(rd
);
1670 gen_movl_reg_T0(rs1
);
1671 if (IS_IMM
) { /* immediate */
1672 // or x, #0, y -> mov T1, x; mov y, T1
1673 rs2
= GET_FIELDs(insn
, 19, 31);
1675 gen_movl_simm_T1(rs2
);
1678 } else { /* register */
1679 // or x, %g0, y -> mov T1, x; mov y, T1
1680 rs2
= GET_FIELD(insn
, 27, 31);
1682 gen_movl_reg_T1(rs2
);
1686 gen_movl_T0_reg(rd
);
1689 #ifdef TARGET_SPARC64
1690 } else if (xop
== 0x25) { /* sll, V9 sllx ( == sll) */
1691 rs1
= GET_FIELD(insn
, 13, 17);
1692 gen_movl_reg_T0(rs1
);
1693 if (IS_IMM
) { /* immediate */
1694 rs2
= GET_FIELDs(insn
, 20, 31);
1695 gen_movl_simm_T1(rs2
);
1696 } else { /* register */
1697 rs2
= GET_FIELD(insn
, 27, 31);
1698 gen_movl_reg_T1(rs2
);
1701 gen_movl_T0_reg(rd
);
1702 } else if (xop
== 0x26) { /* srl, V9 srlx */
1703 rs1
= GET_FIELD(insn
, 13, 17);
1704 gen_movl_reg_T0(rs1
);
1705 if (IS_IMM
) { /* immediate */
1706 rs2
= GET_FIELDs(insn
, 20, 31);
1707 gen_movl_simm_T1(rs2
);
1708 } else { /* register */
1709 rs2
= GET_FIELD(insn
, 27, 31);
1710 gen_movl_reg_T1(rs2
);
1712 if (insn
& (1 << 12))
1716 gen_movl_T0_reg(rd
);
1717 } else if (xop
== 0x27) { /* sra, V9 srax */
1718 rs1
= GET_FIELD(insn
, 13, 17);
1719 gen_movl_reg_T0(rs1
);
1720 if (IS_IMM
) { /* immediate */
1721 rs2
= GET_FIELDs(insn
, 20, 31);
1722 gen_movl_simm_T1(rs2
);
1723 } else { /* register */
1724 rs2
= GET_FIELD(insn
, 27, 31);
1725 gen_movl_reg_T1(rs2
);
1727 if (insn
& (1 << 12))
1731 gen_movl_T0_reg(rd
);
1733 } else if (xop
< 0x38) {
1734 rs1
= GET_FIELD(insn
, 13, 17);
1735 gen_movl_reg_T0(rs1
);
1736 if (IS_IMM
) { /* immediate */
1737 rs2
= GET_FIELDs(insn
, 19, 31);
1738 gen_movl_simm_T1(rs2
);
1739 } else { /* register */
1740 rs2
= GET_FIELD(insn
, 27, 31);
1741 gen_movl_reg_T1(rs2
);
1744 switch (xop
& ~0x10) {
1747 gen_op_add_T1_T0_cc();
1754 gen_op_logic_T0_cc();
1759 gen_op_logic_T0_cc();
1764 gen_op_logic_T0_cc();
1768 gen_op_sub_T1_T0_cc();
1773 gen_op_andn_T1_T0();
1775 gen_op_logic_T0_cc();
1780 gen_op_logic_T0_cc();
1783 gen_op_xnor_T1_T0();
1785 gen_op_logic_T0_cc();
1789 gen_op_addx_T1_T0_cc();
1791 gen_op_addx_T1_T0();
1793 #ifdef TARGET_SPARC64
1794 case 0x9: /* V9 mulx */
1795 gen_op_mulx_T1_T0();
1799 gen_op_umul_T1_T0();
1801 gen_op_logic_T0_cc();
1804 gen_op_smul_T1_T0();
1806 gen_op_logic_T0_cc();
1810 gen_op_subx_T1_T0_cc();
1812 gen_op_subx_T1_T0();
1814 #ifdef TARGET_SPARC64
1815 case 0xd: /* V9 udivx */
1816 gen_op_udivx_T1_T0();
1820 gen_op_udiv_T1_T0();
1825 gen_op_sdiv_T1_T0();
1832 gen_movl_T0_reg(rd
);
1835 case 0x20: /* taddcc */
1836 case 0x21: /* tsubcc */
1837 case 0x22: /* taddcctv */
1838 case 0x23: /* tsubcctv */
1840 case 0x24: /* mulscc */
1841 gen_op_mulscc_T1_T0();
1842 gen_movl_T0_reg(rd
);
1844 #ifndef TARGET_SPARC64
1845 case 0x25: /* sll */
1847 gen_movl_T0_reg(rd
);
1849 case 0x26: /* srl */
1851 gen_movl_T0_reg(rd
);
1853 case 0x27: /* sra */
1855 gen_movl_T0_reg(rd
);
1863 gen_op_movtl_env_T0(offsetof(CPUSPARCState
, y
));
1865 #ifdef TARGET_SPARC64
1866 case 0x2: /* V9 wrccr */
1869 case 0x3: /* V9 wrasi */
1870 gen_op_movl_env_T0(offsetof(CPUSPARCState
, asi
));
1872 case 0x6: /* V9 wrfprs */
1873 gen_op_movl_env_T0(offsetof(CPUSPARCState
, fprs
));
1875 case 0xf: /* V9 sir, nop if user */
1876 #if !defined(CONFIG_USER_ONLY)
1881 case 0x13: /* Graphics Status */
1882 if (gen_trap_ifnofpu(dc
))
1884 gen_op_movtl_env_T0(offsetof(CPUSPARCState
, gsr
));
1886 case 0x17: /* Tick compare */
1887 #if !defined(CONFIG_USER_ONLY)
1888 if (!supervisor(dc
))
1891 gen_op_movtl_env_T0(offsetof(CPUSPARCState
, tick_cmpr
));
1893 case 0x18: /* System tick */
1894 #if !defined(CONFIG_USER_ONLY)
1895 if (!supervisor(dc
))
1898 gen_op_movtl_env_T0(offsetof(CPUSPARCState
, stick_cmpr
));
1900 case 0x19: /* System tick compare */
1901 #if !defined(CONFIG_USER_ONLY)
1902 if (!supervisor(dc
))
1905 gen_op_movtl_env_T0(offsetof(CPUSPARCState
, stick_cmpr
));
1908 case 0x10: /* Performance Control */
1909 case 0x11: /* Performance Instrumentation Counter */
1910 case 0x12: /* Dispatch Control */
1911 case 0x14: /* Softint set */
1912 case 0x15: /* Softint clear */
1913 case 0x16: /* Softint write */
1920 #if !defined(CONFIG_USER_ONLY)
1921 case 0x31: /* wrpsr, V9 saved, restored */
1923 if (!supervisor(dc
))
1925 #ifdef TARGET_SPARC64
1947 case 0x32: /* wrwim, V9 wrpr */
1949 if (!supervisor(dc
))
1952 #ifdef TARGET_SPARC64
1970 gen_op_movtl_env_T0(offsetof(CPUSPARCState
, tbr
));
1981 gen_op_movl_env_T0(offsetof(CPUSPARCState
, tl
));
1984 gen_op_movl_env_T0(offsetof(CPUSPARCState
, psrpil
));
1990 gen_op_movl_env_T0(offsetof(CPUSPARCState
, cansave
));
1992 case 11: // canrestore
1993 gen_op_movl_env_T0(offsetof(CPUSPARCState
, canrestore
));
1995 case 12: // cleanwin
1996 gen_op_movl_env_T0(offsetof(CPUSPARCState
, cleanwin
));
1998 case 13: // otherwin
1999 gen_op_movl_env_T0(offsetof(CPUSPARCState
, otherwin
));
2002 gen_op_movl_env_T0(offsetof(CPUSPARCState
, wstate
));
2012 #ifndef TARGET_SPARC64
2013 case 0x33: /* wrtbr, V9 unimp */
2015 if (!supervisor(dc
))
2018 gen_op_movtl_env_T0(offsetof(CPUSPARCState
, tbr
));
2023 #ifdef TARGET_SPARC64
2024 case 0x2c: /* V9 movcc */
2026 int cc
= GET_FIELD_SP(insn
, 11, 12);
2027 int cond
= GET_FIELD_SP(insn
, 14, 17);
2028 if (IS_IMM
) { /* immediate */
2029 rs2
= GET_FIELD_SPs(insn
, 0, 10);
2030 gen_movl_simm_T1(rs2
);
2033 rs2
= GET_FIELD_SP(insn
, 0, 4);
2034 gen_movl_reg_T1(rs2
);
2036 gen_movl_reg_T0(rd
);
2038 if (insn
& (1 << 18)) {
2040 gen_cond
[0][cond
]();
2042 gen_cond
[1][cond
]();
2046 gen_fcond
[cc
][cond
]();
2049 gen_movl_T0_reg(rd
);
2052 case 0x2d: /* V9 sdivx */
2053 gen_op_sdivx_T1_T0();
2054 gen_movl_T0_reg(rd
);
2056 case 0x2e: /* V9 popc */
2058 if (IS_IMM
) { /* immediate */
2059 rs2
= GET_FIELD_SPs(insn
, 0, 12);
2060 gen_movl_simm_T1(rs2
);
2061 // XXX optimize: popc(constant)
2064 rs2
= GET_FIELD_SP(insn
, 0, 4);
2065 gen_movl_reg_T1(rs2
);
2068 gen_movl_T0_reg(rd
);
2070 case 0x2f: /* V9 movr */
2072 int cond
= GET_FIELD_SP(insn
, 10, 12);
2073 rs1
= GET_FIELD(insn
, 13, 17);
2075 gen_movl_reg_T0(rs1
);
2077 if (IS_IMM
) { /* immediate */
2078 rs2
= GET_FIELD_SPs(insn
, 0, 10);
2079 gen_movl_simm_T1(rs2
);
2082 rs2
= GET_FIELD_SP(insn
, 0, 4);
2083 gen_movl_reg_T1(rs2
);
2085 gen_movl_reg_T0(rd
);
2087 gen_movl_T0_reg(rd
);
2090 case 0x36: /* UltraSparc shutdown, VIS */
2092 int opf
= GET_FIELD_SP(insn
, 5, 13);
2093 rs1
= GET_FIELD(insn
, 13, 17);
2094 rs2
= GET_FIELD(insn
, 27, 31);
2097 case 0x018: /* VIS I alignaddr */
2098 if (gen_trap_ifnofpu(dc
))
2100 gen_movl_reg_T0(rs1
);
2101 gen_movl_reg_T1(rs2
);
2103 gen_movl_T0_reg(rd
);
2105 case 0x01a: /* VIS I alignaddrl */
2106 if (gen_trap_ifnofpu(dc
))
2110 case 0x048: /* VIS I faligndata */
2111 if (gen_trap_ifnofpu(dc
))
2113 gen_op_load_fpr_DT0(rs1
);
2114 gen_op_load_fpr_DT1(rs2
);
2115 gen_op_faligndata();
2116 gen_op_store_DT0_fpr(rd
);
2128 #ifdef TARGET_SPARC64
2129 } else if (xop
== 0x39) { /* V9 return */
2130 rs1
= GET_FIELD(insn
, 13, 17);
2131 gen_movl_reg_T0(rs1
);
2132 if (IS_IMM
) { /* immediate */
2133 rs2
= GET_FIELDs(insn
, 19, 31);
2137 gen_movl_simm_T1(rs2
);
2142 } else { /* register */
2143 rs2
= GET_FIELD(insn
, 27, 31);
2147 gen_movl_reg_T1(rs2
);
2155 gen_op_movl_npc_T0();
2156 dc
->npc
= DYNAMIC_PC
;
2160 rs1
= GET_FIELD(insn
, 13, 17);
2161 gen_movl_reg_T0(rs1
);
2162 if (IS_IMM
) { /* immediate */
2163 rs2
= GET_FIELDs(insn
, 19, 31);
2167 gen_movl_simm_T1(rs2
);
2172 } else { /* register */
2173 rs2
= GET_FIELD(insn
, 27, 31);
2177 gen_movl_reg_T1(rs2
);
2184 case 0x38: /* jmpl */
2187 #ifdef TARGET_SPARC64
2188 if (dc
->pc
== (uint32_t)dc
->pc
) {
2189 gen_op_movl_T1_im(dc
->pc
);
2191 gen_op_movq_T1_im64(dc
->pc
>> 32, dc
->pc
);
2194 gen_op_movl_T1_im(dc
->pc
);
2196 gen_movl_T1_reg(rd
);
2199 gen_op_movl_npc_T0();
2200 dc
->npc
= DYNAMIC_PC
;
2203 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
2204 case 0x39: /* rett, V9 return */
2206 if (!supervisor(dc
))
2209 gen_op_movl_npc_T0();
2210 dc
->npc
= DYNAMIC_PC
;
2215 case 0x3b: /* flush */
2218 case 0x3c: /* save */
2221 gen_movl_T0_reg(rd
);
2223 case 0x3d: /* restore */
2226 gen_movl_T0_reg(rd
);
2228 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
2229 case 0x3e: /* V9 done/retry */
2233 if (!supervisor(dc
))
2235 dc
->npc
= DYNAMIC_PC
;
2236 dc
->pc
= DYNAMIC_PC
;
2240 if (!supervisor(dc
))
2242 dc
->npc
= DYNAMIC_PC
;
2243 dc
->pc
= DYNAMIC_PC
;
2259 case 3: /* load/store instructions */
2261 unsigned int xop
= GET_FIELD(insn
, 7, 12);
2262 rs1
= GET_FIELD(insn
, 13, 17);
2263 gen_movl_reg_T0(rs1
);
2264 if (IS_IMM
) { /* immediate */
2265 rs2
= GET_FIELDs(insn
, 19, 31);
2269 gen_movl_simm_T1(rs2
);
2274 } else { /* register */
2275 rs2
= GET_FIELD(insn
, 27, 31);
2279 gen_movl_reg_T1(rs2
);
2285 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) || \
2286 (xop
> 0x17 && xop
< 0x1d ) || \
2287 (xop
> 0x2c && xop
< 0x33) || xop
== 0x1f) {
2289 case 0x0: /* load word */
2292 case 0x1: /* load unsigned byte */
2295 case 0x2: /* load unsigned halfword */
2298 case 0x3: /* load double word */
2300 gen_movl_T0_reg(rd
+ 1);
2302 case 0x9: /* load signed byte */
2305 case 0xa: /* load signed halfword */
2308 case 0xd: /* ldstub -- XXX: should be atomically */
2309 gen_op_ldst(ldstub
);
2311 case 0x0f: /* swap register with memory. Also atomically */
2312 gen_movl_reg_T1(rd
);
2315 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2316 case 0x10: /* load word alternate */
2317 #ifndef TARGET_SPARC64
2318 if (!supervisor(dc
))
2321 gen_op_lda(insn
, 1, 4, 0);
2323 case 0x11: /* load unsigned byte alternate */
2324 #ifndef TARGET_SPARC64
2325 if (!supervisor(dc
))
2328 gen_op_lduba(insn
, 1, 1, 0);
2330 case 0x12: /* load unsigned halfword alternate */
2331 #ifndef TARGET_SPARC64
2332 if (!supervisor(dc
))
2335 gen_op_lduha(insn
, 1, 2, 0);
2337 case 0x13: /* load double word alternate */
2338 #ifndef TARGET_SPARC64
2339 if (!supervisor(dc
))
2342 gen_op_ldda(insn
, 1, 8, 0);
2343 gen_movl_T0_reg(rd
+ 1);
2345 case 0x19: /* load signed byte alternate */
2346 #ifndef TARGET_SPARC64
2347 if (!supervisor(dc
))
2350 gen_op_ldsba(insn
, 1, 1, 1);
2352 case 0x1a: /* load signed halfword alternate */
2353 #ifndef TARGET_SPARC64
2354 if (!supervisor(dc
))
2357 gen_op_ldsha(insn
, 1, 2 ,1);
2359 case 0x1d: /* ldstuba -- XXX: should be atomically */
2360 #ifndef TARGET_SPARC64
2361 if (!supervisor(dc
))
2364 gen_op_ldstuba(insn
, 1, 1, 0);
2366 case 0x1f: /* swap reg with alt. memory. Also atomically */
2367 #ifndef TARGET_SPARC64
2368 if (!supervisor(dc
))
2371 gen_movl_reg_T1(rd
);
2372 gen_op_swapa(insn
, 1, 4, 0);
2375 #ifndef TARGET_SPARC64
2376 /* avoid warnings */
2377 (void) &gen_op_stfa
;
2378 (void) &gen_op_stdfa
;
2379 (void) &gen_op_ldfa
;
2380 (void) &gen_op_lddfa
;
2382 #if !defined(CONFIG_USER_ONLY)
2384 (void) &gen_op_casx
;
2388 #ifdef TARGET_SPARC64
2389 case 0x08: /* V9 ldsw */
2392 case 0x0b: /* V9 ldx */
2395 case 0x18: /* V9 ldswa */
2396 gen_op_ldswa(insn
, 1, 4, 1);
2398 case 0x1b: /* V9 ldxa */
2399 gen_op_ldxa(insn
, 1, 8, 0);
2401 case 0x2d: /* V9 prefetch, no effect */
2403 case 0x30: /* V9 ldfa */
2404 gen_op_ldfa(insn
, 1, 8, 0); // XXX
2406 case 0x33: /* V9 lddfa */
2407 gen_op_lddfa(insn
, 1, 8, 0); // XXX
2410 case 0x3d: /* V9 prefetcha, no effect */
2412 case 0x32: /* V9 ldqfa */
2418 gen_movl_T1_reg(rd
);
2419 #ifdef TARGET_SPARC64
2422 } else if (xop
>= 0x20 && xop
< 0x24) {
2423 if (gen_trap_ifnofpu(dc
))
2426 case 0x20: /* load fpreg */
2428 gen_op_store_FT0_fpr(rd
);
2430 case 0x21: /* load fsr */
2434 case 0x22: /* load quad fpreg */
2436 case 0x23: /* load double fpreg */
2438 gen_op_store_DT0_fpr(DFPREG(rd
));
2443 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) || \
2444 xop
== 0xe || xop
== 0x1e) {
2445 gen_movl_reg_T1(rd
);
2458 gen_movl_reg_T2(rd
+ 1);
2461 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2463 #ifndef TARGET_SPARC64
2464 if (!supervisor(dc
))
2467 gen_op_sta(insn
, 0, 4, 0);
2470 #ifndef TARGET_SPARC64
2471 if (!supervisor(dc
))
2474 gen_op_stba(insn
, 0, 1, 0);
2477 #ifndef TARGET_SPARC64
2478 if (!supervisor(dc
))
2481 gen_op_stha(insn
, 0, 2, 0);
2484 #ifndef TARGET_SPARC64
2485 if (!supervisor(dc
))
2489 gen_movl_reg_T2(rd
+ 1);
2490 gen_op_stda(insn
, 0, 8, 0);
2493 #ifdef TARGET_SPARC64
2494 case 0x0e: /* V9 stx */
2497 case 0x1e: /* V9 stxa */
2498 gen_op_stxa(insn
, 0, 8, 0); // XXX
2504 } else if (xop
> 0x23 && xop
< 0x28) {
2505 if (gen_trap_ifnofpu(dc
))
2509 gen_op_load_fpr_FT0(rd
);
2512 case 0x25: /* stfsr, V9 stxfsr */
2516 case 0x26: /* stdfq */
2519 gen_op_load_fpr_DT0(DFPREG(rd
));
2525 } else if (xop
> 0x33 && xop
< 0x3f) {
2526 #ifdef TARGET_SPARC64
2528 case 0x34: /* V9 stfa */
2529 gen_op_stfa(insn
, 0, 0, 0); // XXX
2531 case 0x37: /* V9 stdfa */
2532 gen_op_stdfa(insn
, 0, 0, 0); // XXX
2534 case 0x3c: /* V9 casa */
2535 gen_op_casa(insn
, 0, 4, 0); // XXX
2537 case 0x3e: /* V9 casxa */
2538 gen_op_casxa(insn
, 0, 8, 0); // XXX
2540 case 0x36: /* V9 stqfa */
2554 /* default case for non jump instructions */
2555 if (dc
->npc
== DYNAMIC_PC
) {
2556 dc
->pc
= DYNAMIC_PC
;
2558 } else if (dc
->npc
== JUMP_PC
) {
2559 /* we can do a static jump */
2560 gen_branch2(dc
, (long)dc
->tb
, dc
->jump_pc
[0], dc
->jump_pc
[1]);
2564 dc
->npc
= dc
->npc
+ 4;
2570 gen_op_exception(TT_ILL_INSN
);
2573 #if !defined(CONFIG_USER_ONLY)
2576 gen_op_exception(TT_PRIV_INSN
);
2582 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
2586 static inline int gen_intermediate_code_internal(TranslationBlock
* tb
,
2587 int spc
, CPUSPARCState
*env
)
2589 target_ulong pc_start
, last_pc
;
2590 uint16_t *gen_opc_end
;
2591 DisasContext dc1
, *dc
= &dc1
;
2594 memset(dc
, 0, sizeof(DisasContext
));
2599 dc
->npc
= (target_ulong
) tb
->cs_base
;
2600 #if defined(CONFIG_USER_ONLY)
2602 dc
->fpu_enabled
= 1;
2604 dc
->mem_idx
= ((env
->psrs
) != 0);
2605 #ifdef TARGET_SPARC64
2606 dc
->fpu_enabled
= (((env
->pstate
& PS_PEF
) != 0) && ((env
->fprs
& FPRS_FEF
) != 0));
2608 dc
->fpu_enabled
= ((env
->psref
) != 0);
2611 gen_opc_ptr
= gen_opc_buf
;
2612 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
2613 gen_opparam_ptr
= gen_opparam_buf
;
2617 if (env
->nb_breakpoints
> 0) {
2618 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
2619 if (env
->breakpoints
[j
] == dc
->pc
) {
2620 if (dc
->pc
!= pc_start
)
2632 fprintf(logfile
, "Search PC...\n");
2633 j
= gen_opc_ptr
- gen_opc_buf
;
2637 gen_opc_instr_start
[lj
++] = 0;
2638 gen_opc_pc
[lj
] = dc
->pc
;
2639 gen_opc_npc
[lj
] = dc
->npc
;
2640 gen_opc_instr_start
[lj
] = 1;
2644 disas_sparc_insn(dc
);
2648 /* if the next PC is different, we abort now */
2649 if (dc
->pc
!= (last_pc
+ 4))
2651 /* if we reach a page boundary, we stop generation so that the
2652 PC of a TT_TFAULT exception is always in the right page */
2653 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
2655 /* if single step mode, we generate only one instruction and
2656 generate an exception */
2657 if (env
->singlestep_enabled
) {
2663 } while ((gen_opc_ptr
< gen_opc_end
) &&
2664 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32));
2668 if (dc
->pc
!= DYNAMIC_PC
&&
2669 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
2670 /* static PC and NPC: we can use direct chaining */
2671 gen_branch(dc
, (long)tb
, dc
->pc
, dc
->npc
);
2673 if (dc
->pc
!= DYNAMIC_PC
)
2680 *gen_opc_ptr
= INDEX_op_end
;
2682 j
= gen_opc_ptr
- gen_opc_buf
;
2685 gen_opc_instr_start
[lj
++] = 0;
2692 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
2693 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
2695 tb
->size
= last_pc
+ 4 - pc_start
;
2698 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
2699 fprintf(logfile
, "--------------\n");
2700 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
2701 target_disas(logfile
, pc_start
, last_pc
+ 4 - pc_start
, 0);
2702 fprintf(logfile
, "\n");
2703 if (loglevel
& CPU_LOG_TB_OP
) {
2704 fprintf(logfile
, "OP:\n");
2705 dump_ops(gen_opc_buf
, gen_opparam_buf
);
2706 fprintf(logfile
, "\n");
2713 int gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
2715 return gen_intermediate_code_internal(tb
, 0, env
);
2718 int gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
2720 return gen_intermediate_code_internal(tb
, 1, env
);
2723 extern int ram_size
;
2725 void cpu_reset(CPUSPARCState
*env
)
2727 memset(env
, 0, sizeof(*env
));
2731 env
->regwptr
= env
->regbase
+ (env
->cwp
* 16);
2732 #if defined(CONFIG_USER_ONLY)
2733 env
->user_mode_only
= 1;
2734 #ifdef TARGET_SPARC64
2735 env
->cleanwin
= NWINDOWS
- 1;
2736 env
->cansave
= NWINDOWS
- 1;
2741 env
->gregs
[1] = ram_size
;
2742 #ifdef TARGET_SPARC64
2743 env
->pstate
= PS_PRIV
;
2744 env
->version
= GET_VER(env
);
2745 env
->pc
= 0x1fff0000000ULL
;
2747 env
->mmuregs
[0] = (0x04 << 24); /* Impl 0, ver 4, MMU disabled */
2748 env
->pc
= 0xffd00000;
2750 env
->npc
= env
->pc
+ 4;
2754 CPUSPARCState
*cpu_sparc_init(void)
2758 env
= qemu_mallocz(sizeof(CPUSPARCState
));
2766 #define GET_FLAG(a,b) ((env->psr & a)?b:'-')
2768 void cpu_dump_state(CPUState
*env
, FILE *f
,
2769 int (*cpu_fprintf
)(FILE *f
, const char *fmt
, ...),
2774 cpu_fprintf(f
, "pc: " TARGET_FMT_lx
" npc: " TARGET_FMT_lx
"\n", env
->pc
, env
->npc
);
2775 cpu_fprintf(f
, "General Registers:\n");
2776 for (i
= 0; i
< 4; i
++)
2777 cpu_fprintf(f
, "%%g%c: " TARGET_FMT_lx
"\t", i
+ '0', env
->gregs
[i
]);
2778 cpu_fprintf(f
, "\n");
2780 cpu_fprintf(f
, "%%g%c: " TARGET_FMT_lx
"\t", i
+ '0', env
->gregs
[i
]);
2781 cpu_fprintf(f
, "\nCurrent Register Window:\n");
2782 for (x
= 0; x
< 3; x
++) {
2783 for (i
= 0; i
< 4; i
++)
2784 cpu_fprintf(f
, "%%%c%d: " TARGET_FMT_lx
"\t",
2785 (x
== 0 ? 'o' : (x
== 1 ? 'l' : 'i')), i
,
2786 env
->regwptr
[i
+ x
* 8]);
2787 cpu_fprintf(f
, "\n");
2789 cpu_fprintf(f
, "%%%c%d: " TARGET_FMT_lx
"\t",
2790 (x
== 0 ? 'o' : x
== 1 ? 'l' : 'i'), i
,
2791 env
->regwptr
[i
+ x
* 8]);
2792 cpu_fprintf(f
, "\n");
2794 cpu_fprintf(f
, "\nFloating Point Registers:\n");
2795 for (i
= 0; i
< 32; i
++) {
2797 cpu_fprintf(f
, "%%f%02d:", i
);
2798 cpu_fprintf(f
, " %016lf", env
->fpr
[i
]);
2800 cpu_fprintf(f
, "\n");
2802 #ifdef TARGET_SPARC64
2803 cpu_fprintf(f
, "pstate: 0x%08x ccr: 0x%02x asi: 0x%02x tl: %d\n",
2804 env
->pstate
, GET_CCR(env
), env
->asi
, env
->tl
);
2805 cpu_fprintf(f
, "cansave: %d canrestore: %d otherwin: %d wstate %d cleanwin %d cwp %d\n",
2806 env
->cansave
, env
->canrestore
, env
->otherwin
, env
->wstate
,
2807 env
->cleanwin
, NWINDOWS
- 1 - env
->cwp
);
2809 cpu_fprintf(f
, "psr: 0x%08x -> %c%c%c%c %c%c%c wim: 0x%08x\n", GET_PSR(env
),
2810 GET_FLAG(PSR_ZERO
, 'Z'), GET_FLAG(PSR_OVF
, 'V'),
2811 GET_FLAG(PSR_NEG
, 'N'), GET_FLAG(PSR_CARRY
, 'C'),
2812 env
->psrs
?'S':'-', env
->psrps
?'P':'-',
2813 env
->psret
?'E':'-', env
->wim
);
2815 cpu_fprintf(f
, "fsr: 0x%08x\n", GET_FSR32(env
));
2818 #if defined(CONFIG_USER_ONLY)
2819 target_ulong
cpu_get_phys_page_debug(CPUState
*env
, target_ulong addr
)
2825 extern int get_physical_address (CPUState
*env
, target_phys_addr_t
*physical
, int *prot
,
2826 int *access_index
, target_ulong address
, int rw
,
2829 target_ulong
cpu_get_phys_page_debug(CPUState
*env
, target_ulong addr
)
2831 target_phys_addr_t phys_addr
;
2832 int prot
, access_index
;
2834 if (get_physical_address(env
, &phys_addr
, &prot
, &access_index
, addr
, 2, 0) != 0)
2835 if (get_physical_address(env
, &phys_addr
, &prot
, &access_index
, addr
, 0, 0) != 0)
2841 void helper_flush(target_ulong addr
)
2844 tb_invalidate_page_range(addr
, addr
+ 8);