2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
8 * This library is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; either
11 * version 2 of the License, or (at your option) any later version.
13 * This library is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
18 * You should have received a copy of the GNU Lesser General Public
19 * License along with this library; if not, write to the Free Software
20 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
33 #include "qemu-common.h"
39 //#define MIPS_DEBUG_DISAS
40 //#define MIPS_DEBUG_SIGN_EXTENSIONS
41 //#define MIPS_SINGLE_STEP
43 /* MIPS major opcodes */
44 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
47 /* indirect opcode tables */
48 OPC_SPECIAL
= (0x00 << 26),
49 OPC_REGIMM
= (0x01 << 26),
50 OPC_CP0
= (0x10 << 26),
51 OPC_CP1
= (0x11 << 26),
52 OPC_CP2
= (0x12 << 26),
53 OPC_CP3
= (0x13 << 26),
54 OPC_SPECIAL2
= (0x1C << 26),
55 OPC_SPECIAL3
= (0x1F << 26),
56 /* arithmetic with immediate */
57 OPC_ADDI
= (0x08 << 26),
58 OPC_ADDIU
= (0x09 << 26),
59 OPC_SLTI
= (0x0A << 26),
60 OPC_SLTIU
= (0x0B << 26),
61 OPC_ANDI
= (0x0C << 26),
62 OPC_ORI
= (0x0D << 26),
63 OPC_XORI
= (0x0E << 26),
64 OPC_LUI
= (0x0F << 26),
65 OPC_DADDI
= (0x18 << 26),
66 OPC_DADDIU
= (0x19 << 26),
67 /* Jump and branches */
69 OPC_JAL
= (0x03 << 26),
70 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
71 OPC_BEQL
= (0x14 << 26),
72 OPC_BNE
= (0x05 << 26),
73 OPC_BNEL
= (0x15 << 26),
74 OPC_BLEZ
= (0x06 << 26),
75 OPC_BLEZL
= (0x16 << 26),
76 OPC_BGTZ
= (0x07 << 26),
77 OPC_BGTZL
= (0x17 << 26),
78 OPC_JALX
= (0x1D << 26), /* MIPS 16 only */
80 OPC_LDL
= (0x1A << 26),
81 OPC_LDR
= (0x1B << 26),
82 OPC_LB
= (0x20 << 26),
83 OPC_LH
= (0x21 << 26),
84 OPC_LWL
= (0x22 << 26),
85 OPC_LW
= (0x23 << 26),
86 OPC_LBU
= (0x24 << 26),
87 OPC_LHU
= (0x25 << 26),
88 OPC_LWR
= (0x26 << 26),
89 OPC_LWU
= (0x27 << 26),
90 OPC_SB
= (0x28 << 26),
91 OPC_SH
= (0x29 << 26),
92 OPC_SWL
= (0x2A << 26),
93 OPC_SW
= (0x2B << 26),
94 OPC_SDL
= (0x2C << 26),
95 OPC_SDR
= (0x2D << 26),
96 OPC_SWR
= (0x2E << 26),
97 OPC_LL
= (0x30 << 26),
98 OPC_LLD
= (0x34 << 26),
99 OPC_LD
= (0x37 << 26),
100 OPC_SC
= (0x38 << 26),
101 OPC_SCD
= (0x3C << 26),
102 OPC_SD
= (0x3F << 26),
103 /* Floating point load/store */
104 OPC_LWC1
= (0x31 << 26),
105 OPC_LWC2
= (0x32 << 26),
106 OPC_LDC1
= (0x35 << 26),
107 OPC_LDC2
= (0x36 << 26),
108 OPC_SWC1
= (0x39 << 26),
109 OPC_SWC2
= (0x3A << 26),
110 OPC_SDC1
= (0x3D << 26),
111 OPC_SDC2
= (0x3E << 26),
112 /* MDMX ASE specific */
113 OPC_MDMX
= (0x1E << 26),
114 /* Cache and prefetch */
115 OPC_CACHE
= (0x2F << 26),
116 OPC_PREF
= (0x33 << 26),
117 /* Reserved major opcode */
118 OPC_MAJOR3B_RESERVED
= (0x3B << 26),
121 /* MIPS special opcodes */
122 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
126 OPC_SLL
= 0x00 | OPC_SPECIAL
,
127 /* NOP is SLL r0, r0, 0 */
128 /* SSNOP is SLL r0, r0, 1 */
129 /* EHB is SLL r0, r0, 3 */
130 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
131 OPC_SRA
= 0x03 | OPC_SPECIAL
,
132 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
133 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
134 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
135 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
136 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
137 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
138 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
139 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
140 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
141 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
142 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
143 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
144 /* Multiplication / division */
145 OPC_MULT
= 0x18 | OPC_SPECIAL
,
146 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
147 OPC_DIV
= 0x1A | OPC_SPECIAL
,
148 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
149 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
150 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
151 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
152 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
153 /* 2 registers arithmetic / logic */
154 OPC_ADD
= 0x20 | OPC_SPECIAL
,
155 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
156 OPC_SUB
= 0x22 | OPC_SPECIAL
,
157 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
158 OPC_AND
= 0x24 | OPC_SPECIAL
,
159 OPC_OR
= 0x25 | OPC_SPECIAL
,
160 OPC_XOR
= 0x26 | OPC_SPECIAL
,
161 OPC_NOR
= 0x27 | OPC_SPECIAL
,
162 OPC_SLT
= 0x2A | OPC_SPECIAL
,
163 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
164 OPC_DADD
= 0x2C | OPC_SPECIAL
,
165 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
166 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
167 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
169 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
170 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
172 OPC_TGE
= 0x30 | OPC_SPECIAL
,
173 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
174 OPC_TLT
= 0x32 | OPC_SPECIAL
,
175 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
176 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
177 OPC_TNE
= 0x36 | OPC_SPECIAL
,
178 /* HI / LO registers load & stores */
179 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
180 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
181 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
182 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
183 /* Conditional moves */
184 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
185 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
187 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
190 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* inofficial */
191 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
192 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
193 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* inofficial */
194 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
196 OPC_SPECIAL15_RESERVED
= 0x15 | OPC_SPECIAL
,
197 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
198 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
199 OPC_SPECIAL35_RESERVED
= 0x35 | OPC_SPECIAL
,
200 OPC_SPECIAL37_RESERVED
= 0x37 | OPC_SPECIAL
,
201 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
202 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
205 /* Multiplication variants of the vr54xx. */
206 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
209 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
210 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
211 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
212 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
213 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
214 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
215 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
216 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
217 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
218 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
219 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
220 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
221 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
222 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
225 /* REGIMM (rt field) opcodes */
226 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
229 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
230 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
231 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
232 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
233 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
234 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
235 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
236 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
237 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
238 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
239 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
240 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
241 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
242 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
243 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
246 /* Special2 opcodes */
247 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
250 /* Multiply & xxx operations */
251 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
252 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
253 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
254 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
255 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
257 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
258 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
259 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
260 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
262 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
265 /* Special3 opcodes */
266 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
269 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
270 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
271 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
272 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
273 OPC_INS
= 0x04 | OPC_SPECIAL3
,
274 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
275 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
276 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
277 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
278 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
279 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
280 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
281 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
285 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
288 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
289 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
290 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
294 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
297 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
298 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
301 /* Coprocessor 0 (rs field) */
302 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
305 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
306 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
307 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
308 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
309 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
310 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
311 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
312 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
313 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
314 OPC_C0
= (0x10 << 21) | OPC_CP0
,
315 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
316 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
320 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
323 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
324 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
325 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
326 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
327 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
328 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
331 /* Coprocessor 0 (with rs == C0) */
332 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
335 OPC_TLBR
= 0x01 | OPC_C0
,
336 OPC_TLBWI
= 0x02 | OPC_C0
,
337 OPC_TLBWR
= 0x06 | OPC_C0
,
338 OPC_TLBP
= 0x08 | OPC_C0
,
339 OPC_RFE
= 0x10 | OPC_C0
,
340 OPC_ERET
= 0x18 | OPC_C0
,
341 OPC_DERET
= 0x1F | OPC_C0
,
342 OPC_WAIT
= 0x20 | OPC_C0
,
345 /* Coprocessor 1 (rs field) */
346 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
349 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
350 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
351 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
352 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
353 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
354 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
355 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
356 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
357 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
358 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
359 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
360 OPC_S_FMT
= (0x10 << 21) | OPC_CP1
, /* 16: fmt=single fp */
361 OPC_D_FMT
= (0x11 << 21) | OPC_CP1
, /* 17: fmt=double fp */
362 OPC_E_FMT
= (0x12 << 21) | OPC_CP1
, /* 18: fmt=extended fp */
363 OPC_Q_FMT
= (0x13 << 21) | OPC_CP1
, /* 19: fmt=quad fp */
364 OPC_W_FMT
= (0x14 << 21) | OPC_CP1
, /* 20: fmt=32bit fixed */
365 OPC_L_FMT
= (0x15 << 21) | OPC_CP1
, /* 21: fmt=64bit fixed */
366 OPC_PS_FMT
= (0x16 << 21) | OPC_CP1
, /* 22: fmt=paired single fp */
369 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
370 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
373 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
374 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
375 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
376 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
380 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
381 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
385 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
386 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
389 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
392 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
393 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
394 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
395 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
396 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
397 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
398 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
399 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
400 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
403 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
406 OPC_LWXC1
= 0x00 | OPC_CP3
,
407 OPC_LDXC1
= 0x01 | OPC_CP3
,
408 OPC_LUXC1
= 0x05 | OPC_CP3
,
409 OPC_SWXC1
= 0x08 | OPC_CP3
,
410 OPC_SDXC1
= 0x09 | OPC_CP3
,
411 OPC_SUXC1
= 0x0D | OPC_CP3
,
412 OPC_PREFX
= 0x0F | OPC_CP3
,
413 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
414 OPC_MADD_S
= 0x20 | OPC_CP3
,
415 OPC_MADD_D
= 0x21 | OPC_CP3
,
416 OPC_MADD_PS
= 0x26 | OPC_CP3
,
417 OPC_MSUB_S
= 0x28 | OPC_CP3
,
418 OPC_MSUB_D
= 0x29 | OPC_CP3
,
419 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
420 OPC_NMADD_S
= 0x30 | OPC_CP3
,
421 OPC_NMADD_D
= 0x31 | OPC_CP3
,
422 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
423 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
424 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
425 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
428 /* global register indices */
429 static TCGv_ptr cpu_env
;
430 static TCGv cpu_gpr
[32], cpu_PC
;
431 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
], cpu_ACX
[MIPS_DSP_ACC
];
432 static TCGv cpu_dspctrl
, btarget
;
433 static TCGv_i32 bcond
;
434 static TCGv_i32 fpu_fpr32
[32], fpu_fpr32h
[32];
435 static TCGv_i64 fpu_fpr64
[32];
436 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
438 #include "gen-icount.h"
440 #define gen_helper_0i(name, arg) do { \
441 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
442 gen_helper_##name(helper_tmp); \
443 tcg_temp_free_i32(helper_tmp); \
446 #define gen_helper_1i(name, arg1, arg2) do { \
447 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
448 gen_helper_##name(arg1, helper_tmp); \
449 tcg_temp_free_i32(helper_tmp); \
452 #define gen_helper_2i(name, arg1, arg2, arg3) do { \
453 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
454 gen_helper_##name(arg1, arg2, helper_tmp); \
455 tcg_temp_free_i32(helper_tmp); \
458 #define gen_helper_3i(name, arg1, arg2, arg3, arg4) do { \
459 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
460 gen_helper_##name(arg1, arg2, arg3, helper_tmp); \
461 tcg_temp_free_i32(helper_tmp); \
464 typedef struct DisasContext
{
465 struct TranslationBlock
*tb
;
466 target_ulong pc
, saved_pc
;
468 /* Routine used to access memory */
470 uint32_t hflags
, saved_hflags
;
472 target_ulong btarget
;
476 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
477 * exception condition */
478 BS_STOP
= 1, /* We want to stop translation for any reason */
479 BS_BRANCH
= 2, /* We reached a branch condition */
480 BS_EXCP
= 3, /* We reached an exception condition */
483 static const char *regnames
[] =
484 { "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
485 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
486 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
487 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra", };
489 static const char *regnames_HI
[] =
490 { "HI0", "HI1", "HI2", "HI3", };
492 static const char *regnames_LO
[] =
493 { "LO0", "LO1", "LO2", "LO3", };
495 static const char *regnames_ACX
[] =
496 { "ACX0", "ACX1", "ACX2", "ACX3", };
498 static const char *fregnames
[] =
499 { "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
500 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
501 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
502 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31", };
504 static const char *fregnames_64
[] =
505 { "F0", "F1", "F2", "F3", "F4", "F5", "F6", "F7",
506 "F8", "F9", "F10", "F11", "F12", "F13", "F14", "F15",
507 "F16", "F17", "F18", "F19", "F20", "F21", "F22", "F23",
508 "F24", "F25", "F26", "F27", "F28", "F29", "F30", "F31", };
510 static const char *fregnames_h
[] =
511 { "h0", "h1", "h2", "h3", "h4", "h5", "h6", "h7",
512 "h8", "h9", "h10", "h11", "h12", "h13", "h14", "h15",
513 "h16", "h17", "h18", "h19", "h20", "h21", "h22", "h23",
514 "h24", "h25", "h26", "h27", "h28", "h29", "h30", "h31", };
516 #ifdef MIPS_DEBUG_DISAS
517 #define MIPS_DEBUG(fmt, args...) \
518 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
519 TARGET_FMT_lx ": %08x " fmt "\n", \
520 ctx->pc, ctx->opcode , ##args)
521 #define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
523 #define MIPS_DEBUG(fmt, args...) do { } while(0)
524 #define LOG_DISAS(...) do { } while (0)
527 #define MIPS_INVAL(op) \
529 MIPS_DEBUG("Invalid %s %03x %03x %03x", op, ctx->opcode >> 26, \
530 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
533 /* General purpose registers moves. */
534 static inline void gen_load_gpr (TCGv t
, int reg
)
537 tcg_gen_movi_tl(t
, 0);
539 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
542 static inline void gen_store_gpr (TCGv t
, int reg
)
545 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
548 /* Moves to/from ACX register. */
549 static inline void gen_load_ACX (TCGv t
, int reg
)
551 tcg_gen_mov_tl(t
, cpu_ACX
[reg
]);
554 static inline void gen_store_ACX (TCGv t
, int reg
)
556 tcg_gen_mov_tl(cpu_ACX
[reg
], t
);
559 /* Moves to/from shadow registers. */
560 static inline void gen_load_srsgpr (int from
, int to
)
562 TCGv r_tmp1
= tcg_temp_new();
565 tcg_gen_movi_tl(r_tmp1
, 0);
567 TCGv_i32 r_tmp2
= tcg_temp_new_i32();
568 TCGv_ptr addr
= tcg_temp_new_ptr();
570 tcg_gen_ld_i32(r_tmp2
, cpu_env
, offsetof(CPUState
, CP0_SRSCtl
));
571 tcg_gen_shri_i32(r_tmp2
, r_tmp2
, CP0SRSCtl_PSS
);
572 tcg_gen_andi_i32(r_tmp2
, r_tmp2
, 0xf);
573 tcg_gen_muli_i32(r_tmp2
, r_tmp2
, sizeof(target_ulong
) * 32);
574 tcg_gen_ext_i32_ptr(addr
, r_tmp2
);
575 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
577 tcg_gen_ld_tl(r_tmp1
, addr
, sizeof(target_ulong
) * from
);
578 tcg_temp_free_ptr(addr
);
579 tcg_temp_free_i32(r_tmp2
);
581 gen_store_gpr(r_tmp1
, to
);
582 tcg_temp_free(r_tmp1
);
585 static inline void gen_store_srsgpr (int from
, int to
)
588 TCGv r_tmp1
= tcg_temp_new();
589 TCGv_i32 r_tmp2
= tcg_temp_new_i32();
590 TCGv_ptr addr
= tcg_temp_new_ptr();
592 gen_load_gpr(r_tmp1
, from
);
593 tcg_gen_ld_i32(r_tmp2
, cpu_env
, offsetof(CPUState
, CP0_SRSCtl
));
594 tcg_gen_shri_i32(r_tmp2
, r_tmp2
, CP0SRSCtl_PSS
);
595 tcg_gen_andi_i32(r_tmp2
, r_tmp2
, 0xf);
596 tcg_gen_muli_i32(r_tmp2
, r_tmp2
, sizeof(target_ulong
) * 32);
597 tcg_gen_ext_i32_ptr(addr
, r_tmp2
);
598 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
600 tcg_gen_st_tl(r_tmp1
, addr
, sizeof(target_ulong
) * to
);
601 tcg_temp_free_ptr(addr
);
602 tcg_temp_free_i32(r_tmp2
);
603 tcg_temp_free(r_tmp1
);
607 /* Floating point register moves. */
608 static inline void gen_load_fpr32 (TCGv_i32 t
, int reg
)
610 tcg_gen_mov_i32(t
, fpu_fpr32
[reg
]);
613 static inline void gen_store_fpr32 (TCGv_i32 t
, int reg
)
615 tcg_gen_mov_i32(fpu_fpr32
[reg
], t
);
618 static inline void gen_load_fpr64 (DisasContext
*ctx
, TCGv_i64 t
, int reg
)
620 if (ctx
->hflags
& MIPS_HFLAG_F64
)
621 tcg_gen_mov_i64(t
, fpu_fpr64
[reg
]);
623 tcg_gen_concat_i32_i64(t
, fpu_fpr32
[reg
& ~1], fpu_fpr32
[reg
| 1]);
627 static inline void gen_store_fpr64 (DisasContext
*ctx
, TCGv_i64 t
, int reg
)
629 if (ctx
->hflags
& MIPS_HFLAG_F64
)
630 tcg_gen_mov_i64(fpu_fpr64
[reg
], t
);
632 tcg_gen_trunc_i64_i32(fpu_fpr32
[reg
& ~1], t
);
633 tcg_gen_shri_i64(t
, t
, 32);
634 tcg_gen_trunc_i64_i32(fpu_fpr32
[reg
| 1], t
);
638 static inline void gen_load_fpr32h (TCGv_i32 t
, int reg
)
640 tcg_gen_mov_i32(t
, fpu_fpr32h
[reg
]);
643 static inline void gen_store_fpr32h (TCGv_i32 t
, int reg
)
645 tcg_gen_mov_i32(fpu_fpr32h
[reg
], t
);
648 static inline void get_fp_cond (TCGv_i32 t
)
650 TCGv_i32 r_tmp1
= tcg_temp_new_i32();
651 TCGv_i32 r_tmp2
= tcg_temp_new_i32();
653 tcg_gen_shri_i32(r_tmp2
, fpu_fcr31
, 24);
654 tcg_gen_andi_i32(r_tmp2
, r_tmp2
, 0xfe);
655 tcg_gen_shri_i32(r_tmp1
, fpu_fcr31
, 23);
656 tcg_gen_andi_i32(r_tmp1
, r_tmp1
, 0x1);
657 tcg_gen_or_i32(t
, r_tmp1
, r_tmp2
);
658 tcg_temp_free_i32(r_tmp1
);
659 tcg_temp_free_i32(r_tmp2
);
662 #define FOP_CONDS(type, fmt, bits) \
663 static inline void gen_cmp ## type ## _ ## fmt(int n, TCGv_i##bits a, \
664 TCGv_i##bits b, int cc) \
667 case 0: gen_helper_2i(cmp ## type ## _ ## fmt ## _f, a, b, cc); break;\
668 case 1: gen_helper_2i(cmp ## type ## _ ## fmt ## _un, a, b, cc); break;\
669 case 2: gen_helper_2i(cmp ## type ## _ ## fmt ## _eq, a, b, cc); break;\
670 case 3: gen_helper_2i(cmp ## type ## _ ## fmt ## _ueq, a, b, cc); break;\
671 case 4: gen_helper_2i(cmp ## type ## _ ## fmt ## _olt, a, b, cc); break;\
672 case 5: gen_helper_2i(cmp ## type ## _ ## fmt ## _ult, a, b, cc); break;\
673 case 6: gen_helper_2i(cmp ## type ## _ ## fmt ## _ole, a, b, cc); break;\
674 case 7: gen_helper_2i(cmp ## type ## _ ## fmt ## _ule, a, b, cc); break;\
675 case 8: gen_helper_2i(cmp ## type ## _ ## fmt ## _sf, a, b, cc); break;\
676 case 9: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngle, a, b, cc); break;\
677 case 10: gen_helper_2i(cmp ## type ## _ ## fmt ## _seq, a, b, cc); break;\
678 case 11: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngl, a, b, cc); break;\
679 case 12: gen_helper_2i(cmp ## type ## _ ## fmt ## _lt, a, b, cc); break;\
680 case 13: gen_helper_2i(cmp ## type ## _ ## fmt ## _nge, a, b, cc); break;\
681 case 14: gen_helper_2i(cmp ## type ## _ ## fmt ## _le, a, b, cc); break;\
682 case 15: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngt, a, b, cc); break;\
688 FOP_CONDS(abs
, d
, 64)
690 FOP_CONDS(abs
, s
, 32)
692 FOP_CONDS(abs
, ps
, 64)
696 #define OP_COND(name, cond) \
697 static inline void glue(gen_op_, name) (TCGv t0, TCGv t1) \
699 int l1 = gen_new_label(); \
700 int l2 = gen_new_label(); \
702 tcg_gen_brcond_tl(cond, t0, t1, l1); \
703 tcg_gen_movi_tl(t0, 0); \
706 tcg_gen_movi_tl(t0, 1); \
709 OP_COND(eq
, TCG_COND_EQ
);
710 OP_COND(ne
, TCG_COND_NE
);
711 OP_COND(ge
, TCG_COND_GE
);
712 OP_COND(geu
, TCG_COND_GEU
);
713 OP_COND(lt
, TCG_COND_LT
);
714 OP_COND(ltu
, TCG_COND_LTU
);
717 #define OP_CONDI(name, cond) \
718 static inline void glue(gen_op_, name) (TCGv t, target_ulong val) \
720 int l1 = gen_new_label(); \
721 int l2 = gen_new_label(); \
723 tcg_gen_brcondi_tl(cond, t, val, l1); \
724 tcg_gen_movi_tl(t, 0); \
727 tcg_gen_movi_tl(t, 1); \
730 OP_CONDI(lti
, TCG_COND_LT
);
731 OP_CONDI(ltiu
, TCG_COND_LTU
);
734 #define OP_CONDZ(name, cond) \
735 static inline void glue(gen_op_, name) (TCGv t) \
737 int l1 = gen_new_label(); \
738 int l2 = gen_new_label(); \
740 tcg_gen_brcondi_tl(cond, t, 0, l1); \
741 tcg_gen_movi_tl(t, 0); \
744 tcg_gen_movi_tl(t, 1); \
747 OP_CONDZ(gez
, TCG_COND_GE
);
748 OP_CONDZ(gtz
, TCG_COND_GT
);
749 OP_CONDZ(lez
, TCG_COND_LE
);
750 OP_CONDZ(ltz
, TCG_COND_LT
);
753 static inline void gen_save_pc(target_ulong pc
)
755 tcg_gen_movi_tl(cpu_PC
, pc
);
758 static inline void save_cpu_state (DisasContext
*ctx
, int do_save_pc
)
760 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
761 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
762 gen_save_pc(ctx
->pc
);
763 ctx
->saved_pc
= ctx
->pc
;
765 if (ctx
->hflags
!= ctx
->saved_hflags
) {
766 TCGv_i32 r_tmp
= tcg_temp_new_i32();
768 tcg_gen_movi_i32(r_tmp
, ctx
->hflags
);
769 tcg_gen_st_i32(r_tmp
, cpu_env
, offsetof(CPUState
, hflags
));
770 tcg_temp_free_i32(r_tmp
);
771 ctx
->saved_hflags
= ctx
->hflags
;
772 switch (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
778 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
784 static inline void restore_cpu_state (CPUState
*env
, DisasContext
*ctx
)
786 ctx
->saved_hflags
= ctx
->hflags
;
787 switch (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
793 ctx
->btarget
= env
->btarget
;
799 generate_exception_err (DisasContext
*ctx
, int excp
, int err
)
801 TCGv_i32 texcp
= tcg_const_i32(excp
);
802 TCGv_i32 terr
= tcg_const_i32(err
);
803 save_cpu_state(ctx
, 1);
804 gen_helper_raise_exception_err(texcp
, terr
);
805 tcg_temp_free_i32(terr
);
806 tcg_temp_free_i32(texcp
);
807 gen_helper_interrupt_restart();
812 generate_exception (DisasContext
*ctx
, int excp
)
814 save_cpu_state(ctx
, 1);
815 gen_helper_0i(raise_exception
, excp
);
816 gen_helper_interrupt_restart();
820 /* Addresses computation */
821 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv t0
, TCGv t1
)
823 tcg_gen_add_tl(t0
, t0
, t1
);
825 #if defined(TARGET_MIPS64)
826 /* For compatibility with 32-bit code, data reference in user mode
827 with Status_UX = 0 should be casted to 32-bit and sign extended.
828 See the MIPS64 PRA manual, section 4.10. */
829 if (((ctx
->hflags
& MIPS_HFLAG_KSU
) == MIPS_HFLAG_UM
) &&
830 !(ctx
->hflags
& MIPS_HFLAG_UX
)) {
831 tcg_gen_ext32s_i64(t0
, t0
);
836 static inline void check_cp0_enabled(DisasContext
*ctx
)
838 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
839 generate_exception_err(ctx
, EXCP_CpU
, 1);
842 static inline void check_cp1_enabled(DisasContext
*ctx
)
844 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
845 generate_exception_err(ctx
, EXCP_CpU
, 1);
848 /* Verify that the processor is running with COP1X instructions enabled.
849 This is associated with the nabla symbol in the MIPS32 and MIPS64
852 static inline void check_cop1x(DisasContext
*ctx
)
854 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
855 generate_exception(ctx
, EXCP_RI
);
858 /* Verify that the processor is running with 64-bit floating-point
859 operations enabled. */
861 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
863 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
864 generate_exception(ctx
, EXCP_RI
);
868 * Verify if floating point register is valid; an operation is not defined
869 * if bit 0 of any register specification is set and the FR bit in the
870 * Status register equals zero, since the register numbers specify an
871 * even-odd pair of adjacent coprocessor general registers. When the FR bit
872 * in the Status register equals one, both even and odd register numbers
873 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
875 * Multiple 64 bit wide registers can be checked by calling
876 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
878 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
880 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
881 generate_exception(ctx
, EXCP_RI
);
884 /* This code generates a "reserved instruction" exception if the
885 CPU does not support the instruction set corresponding to flags. */
886 static inline void check_insn(CPUState
*env
, DisasContext
*ctx
, int flags
)
888 if (unlikely(!(env
->insn_flags
& flags
)))
889 generate_exception(ctx
, EXCP_RI
);
892 /* This code generates a "reserved instruction" exception if 64-bit
893 instructions are not enabled. */
894 static inline void check_mips_64(DisasContext
*ctx
)
896 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
897 generate_exception(ctx
, EXCP_RI
);
900 /* load/store instructions. */
901 #define OP_LD(insn,fname) \
902 static inline void op_ldst_##insn(TCGv t0, DisasContext *ctx) \
904 tcg_gen_qemu_##fname(t0, t0, ctx->mem_idx); \
911 #if defined(TARGET_MIPS64)
917 #define OP_ST(insn,fname) \
918 static inline void op_ldst_##insn(TCGv t0, TCGv t1, DisasContext *ctx) \
920 tcg_gen_qemu_##fname(t1, t0, ctx->mem_idx); \
925 #if defined(TARGET_MIPS64)
930 #define OP_LD_ATOMIC(insn,fname) \
931 static inline void op_ldst_##insn(TCGv t0, TCGv t1, DisasContext *ctx) \
933 tcg_gen_mov_tl(t1, t0); \
934 tcg_gen_qemu_##fname(t0, t0, ctx->mem_idx); \
935 tcg_gen_st_tl(t1, cpu_env, offsetof(CPUState, CP0_LLAddr)); \
937 OP_LD_ATOMIC(ll
,ld32s
);
938 #if defined(TARGET_MIPS64)
939 OP_LD_ATOMIC(lld
,ld64
);
943 #define OP_ST_ATOMIC(insn,fname,almask) \
944 static inline void op_ldst_##insn(TCGv t0, TCGv t1, DisasContext *ctx) \
946 TCGv r_tmp = tcg_temp_local_new(); \
947 int l1 = gen_new_label(); \
948 int l2 = gen_new_label(); \
949 int l3 = gen_new_label(); \
951 tcg_gen_andi_tl(r_tmp, t0, almask); \
952 tcg_gen_brcondi_tl(TCG_COND_EQ, r_tmp, 0, l1); \
953 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, CP0_BadVAddr)); \
954 generate_exception(ctx, EXCP_AdES); \
956 tcg_gen_ld_tl(r_tmp, cpu_env, offsetof(CPUState, CP0_LLAddr)); \
957 tcg_gen_brcond_tl(TCG_COND_NE, t0, r_tmp, l2); \
958 tcg_gen_qemu_##fname(t1, t0, ctx->mem_idx); \
959 tcg_gen_movi_tl(t0, 1); \
962 tcg_gen_movi_tl(t0, 0); \
964 tcg_temp_free(r_tmp); \
966 OP_ST_ATOMIC(sc
,st32
,0x3);
967 #if defined(TARGET_MIPS64)
968 OP_ST_ATOMIC(scd
,st64
,0x7);
973 static void gen_ldst (DisasContext
*ctx
, uint32_t opc
, int rt
,
974 int base
, int16_t offset
)
976 const char *opn
= "ldst";
977 TCGv t0
= tcg_temp_local_new();
978 TCGv t1
= tcg_temp_local_new();
981 tcg_gen_movi_tl(t0
, offset
);
982 } else if (offset
== 0) {
983 gen_load_gpr(t0
, base
);
985 tcg_gen_movi_tl(t0
, offset
);
986 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
]);
988 /* Don't do NOP if destination is zero: we must perform the actual
991 #if defined(TARGET_MIPS64)
993 op_ldst_lwu(t0
, ctx
);
994 gen_store_gpr(t0
, rt
);
999 gen_store_gpr(t0
, rt
);
1003 op_ldst_lld(t0
, t1
, ctx
);
1004 gen_store_gpr(t0
, rt
);
1008 gen_load_gpr(t1
, rt
);
1009 op_ldst_sd(t0
, t1
, ctx
);
1013 save_cpu_state(ctx
, 1);
1014 gen_load_gpr(t1
, rt
);
1015 op_ldst_scd(t0
, t1
, ctx
);
1016 gen_store_gpr(t0
, rt
);
1020 save_cpu_state(ctx
, 1);
1021 gen_load_gpr(t1
, rt
);
1022 gen_helper_3i(ldl
, t1
, t0
, t1
, ctx
->mem_idx
);
1023 gen_store_gpr(t1
, rt
);
1027 save_cpu_state(ctx
, 1);
1028 gen_load_gpr(t1
, rt
);
1029 gen_helper_2i(sdl
, t0
, t1
, ctx
->mem_idx
);
1033 save_cpu_state(ctx
, 1);
1034 gen_load_gpr(t1
, rt
);
1035 gen_helper_3i(ldr
, t1
, t0
, t1
, ctx
->mem_idx
);
1036 gen_store_gpr(t1
, rt
);
1040 save_cpu_state(ctx
, 1);
1041 gen_load_gpr(t1
, rt
);
1042 gen_helper_2i(sdr
, t0
, t1
, ctx
->mem_idx
);
1047 op_ldst_lw(t0
, ctx
);
1048 gen_store_gpr(t0
, rt
);
1052 gen_load_gpr(t1
, rt
);
1053 op_ldst_sw(t0
, t1
, ctx
);
1057 op_ldst_lh(t0
, ctx
);
1058 gen_store_gpr(t0
, rt
);
1062 gen_load_gpr(t1
, rt
);
1063 op_ldst_sh(t0
, t1
, ctx
);
1067 op_ldst_lhu(t0
, ctx
);
1068 gen_store_gpr(t0
, rt
);
1072 op_ldst_lb(t0
, ctx
);
1073 gen_store_gpr(t0
, rt
);
1077 gen_load_gpr(t1
, rt
);
1078 op_ldst_sb(t0
, t1
, ctx
);
1082 op_ldst_lbu(t0
, ctx
);
1083 gen_store_gpr(t0
, rt
);
1087 save_cpu_state(ctx
, 1);
1088 gen_load_gpr(t1
, rt
);
1089 gen_helper_3i(lwl
, t1
, t0
, t1
, ctx
->mem_idx
);
1090 gen_store_gpr(t1
, rt
);
1094 save_cpu_state(ctx
, 1);
1095 gen_load_gpr(t1
, rt
);
1096 gen_helper_2i(swl
, t0
, t1
, ctx
->mem_idx
);
1100 save_cpu_state(ctx
, 1);
1101 gen_load_gpr(t1
, rt
);
1102 gen_helper_3i(lwr
, t1
, t0
, t1
, ctx
->mem_idx
);
1103 gen_store_gpr(t1
, rt
);
1107 save_cpu_state(ctx
, 1);
1108 gen_load_gpr(t1
, rt
);
1109 gen_helper_2i(swr
, t0
, t1
, ctx
->mem_idx
);
1113 op_ldst_ll(t0
, t1
, ctx
);
1114 gen_store_gpr(t0
, rt
);
1118 save_cpu_state(ctx
, 1);
1119 gen_load_gpr(t1
, rt
);
1120 op_ldst_sc(t0
, t1
, ctx
);
1121 gen_store_gpr(t0
, rt
);
1126 generate_exception(ctx
, EXCP_RI
);
1129 MIPS_DEBUG("%s %s, %d(%s)", opn
, regnames
[rt
], offset
, regnames
[base
]);
1135 /* Load and store */
1136 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
1137 int base
, int16_t offset
)
1139 const char *opn
= "flt_ldst";
1140 TCGv t0
= tcg_temp_local_new();
1143 tcg_gen_movi_tl(t0
, offset
);
1144 } else if (offset
== 0) {
1145 gen_load_gpr(t0
, base
);
1147 tcg_gen_movi_tl(t0
, offset
);
1148 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
]);
1150 /* Don't do NOP if destination is zero: we must perform the actual
1155 TCGv_i32 fp0
= tcg_temp_new_i32();
1156 TCGv t1
= tcg_temp_new();
1158 tcg_gen_qemu_ld32s(t1
, t0
, ctx
->mem_idx
);
1159 tcg_gen_trunc_tl_i32(fp0
, t1
);
1160 gen_store_fpr32(fp0
, ft
);
1162 tcg_temp_free_i32(fp0
);
1168 TCGv_i32 fp0
= tcg_temp_new_i32();
1169 TCGv t1
= tcg_temp_new();
1171 gen_load_fpr32(fp0
, ft
);
1172 tcg_gen_extu_i32_tl(t1
, fp0
);
1173 tcg_gen_qemu_st32(t1
, t0
, ctx
->mem_idx
);
1175 tcg_temp_free_i32(fp0
);
1181 TCGv_i64 fp0
= tcg_temp_new_i64();
1183 tcg_gen_qemu_ld64(fp0
, t0
, ctx
->mem_idx
);
1184 gen_store_fpr64(ctx
, fp0
, ft
);
1185 tcg_temp_free_i64(fp0
);
1191 TCGv_i64 fp0
= tcg_temp_new_i64();
1193 gen_load_fpr64(ctx
, fp0
, ft
);
1194 tcg_gen_qemu_st64(fp0
, t0
, ctx
->mem_idx
);
1195 tcg_temp_free_i64(fp0
);
1201 generate_exception(ctx
, EXCP_RI
);
1204 MIPS_DEBUG("%s %s, %d(%s)", opn
, fregnames
[ft
], offset
, regnames
[base
]);
1209 /* Arithmetic with immediate operand */
1210 static void gen_arith_imm (CPUState
*env
, DisasContext
*ctx
, uint32_t opc
,
1211 int rt
, int rs
, int16_t imm
)
1214 const char *opn
= "imm arith";
1215 TCGv t0
= tcg_temp_local_new();
1217 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
1218 /* If no destination, treat it as a NOP.
1219 For addi, we must generate the overflow exception when needed. */
1223 uimm
= (uint16_t)imm
;
1227 #if defined(TARGET_MIPS64)
1233 uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
1238 gen_load_gpr(t0
, rs
);
1241 tcg_gen_movi_tl(t0
, imm
<< 16);
1246 #if defined(TARGET_MIPS64)
1255 gen_load_gpr(t0
, rs
);
1261 TCGv r_tmp1
= tcg_temp_new();
1262 TCGv r_tmp2
= tcg_temp_new();
1263 int l1
= gen_new_label();
1265 save_cpu_state(ctx
, 1);
1266 tcg_gen_ext32s_tl(r_tmp1
, t0
);
1267 tcg_gen_addi_tl(t0
, r_tmp1
, uimm
);
1269 tcg_gen_xori_tl(r_tmp1
, r_tmp1
, ~uimm
);
1270 tcg_gen_xori_tl(r_tmp2
, t0
, uimm
);
1271 tcg_gen_and_tl(r_tmp1
, r_tmp1
, r_tmp2
);
1272 tcg_temp_free(r_tmp2
);
1273 tcg_gen_brcondi_tl(TCG_COND_GE
, r_tmp1
, 0, l1
);
1274 /* operands of same sign, result different sign */
1275 generate_exception(ctx
, EXCP_OVERFLOW
);
1277 tcg_temp_free(r_tmp1
);
1279 tcg_gen_ext32s_tl(t0
, t0
);
1284 tcg_gen_addi_tl(t0
, t0
, uimm
);
1285 tcg_gen_ext32s_tl(t0
, t0
);
1288 #if defined(TARGET_MIPS64)
1291 TCGv r_tmp1
= tcg_temp_new();
1292 TCGv r_tmp2
= tcg_temp_new();
1293 int l1
= gen_new_label();
1295 save_cpu_state(ctx
, 1);
1296 tcg_gen_mov_tl(r_tmp1
, t0
);
1297 tcg_gen_addi_tl(t0
, t0
, uimm
);
1299 tcg_gen_xori_tl(r_tmp1
, r_tmp1
, ~uimm
);
1300 tcg_gen_xori_tl(r_tmp2
, t0
, uimm
);
1301 tcg_gen_and_tl(r_tmp1
, r_tmp1
, r_tmp2
);
1302 tcg_temp_free(r_tmp2
);
1303 tcg_gen_brcondi_tl(TCG_COND_GE
, r_tmp1
, 0, l1
);
1304 /* operands of same sign, result different sign */
1305 generate_exception(ctx
, EXCP_OVERFLOW
);
1307 tcg_temp_free(r_tmp1
);
1312 tcg_gen_addi_tl(t0
, t0
, uimm
);
1317 gen_op_lti(t0
, uimm
);
1321 gen_op_ltiu(t0
, uimm
);
1325 tcg_gen_andi_tl(t0
, t0
, uimm
);
1329 tcg_gen_ori_tl(t0
, t0
, uimm
);
1333 tcg_gen_xori_tl(t0
, t0
, uimm
);
1340 tcg_gen_shli_tl(t0
, t0
, uimm
);
1341 tcg_gen_ext32s_tl(t0
, t0
);
1345 tcg_gen_ext32s_tl(t0
, t0
);
1346 tcg_gen_sari_tl(t0
, t0
, uimm
);
1350 switch ((ctx
->opcode
>> 21) & 0x1f) {
1353 tcg_gen_ext32u_tl(t0
, t0
);
1354 tcg_gen_shri_tl(t0
, t0
, uimm
);
1356 tcg_gen_ext32s_tl(t0
, t0
);
1361 /* rotr is decoded as srl on non-R2 CPUs */
1362 if (env
->insn_flags
& ISA_MIPS32R2
) {
1364 TCGv_i32 r_tmp1
= tcg_temp_new_i32();
1366 tcg_gen_trunc_tl_i32(r_tmp1
, t0
);
1367 tcg_gen_rotri_i32(r_tmp1
, r_tmp1
, uimm
);
1368 tcg_gen_ext_i32_tl(t0
, r_tmp1
);
1369 tcg_temp_free_i32(r_tmp1
);
1374 tcg_gen_ext32u_tl(t0
, t0
);
1375 tcg_gen_shri_tl(t0
, t0
, uimm
);
1377 tcg_gen_ext32s_tl(t0
, t0
);
1383 MIPS_INVAL("invalid srl flag");
1384 generate_exception(ctx
, EXCP_RI
);
1388 #if defined(TARGET_MIPS64)
1390 tcg_gen_shli_tl(t0
, t0
, uimm
);
1394 tcg_gen_sari_tl(t0
, t0
, uimm
);
1398 switch ((ctx
->opcode
>> 21) & 0x1f) {
1400 tcg_gen_shri_tl(t0
, t0
, uimm
);
1404 /* drotr is decoded as dsrl on non-R2 CPUs */
1405 if (env
->insn_flags
& ISA_MIPS32R2
) {
1407 tcg_gen_rotri_tl(t0
, t0
, uimm
);
1411 tcg_gen_shri_tl(t0
, t0
, uimm
);
1416 MIPS_INVAL("invalid dsrl flag");
1417 generate_exception(ctx
, EXCP_RI
);
1422 tcg_gen_shli_tl(t0
, t0
, uimm
+ 32);
1426 tcg_gen_sari_tl(t0
, t0
, uimm
+ 32);
1430 switch ((ctx
->opcode
>> 21) & 0x1f) {
1432 tcg_gen_shri_tl(t0
, t0
, uimm
+ 32);
1436 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
1437 if (env
->insn_flags
& ISA_MIPS32R2
) {
1438 tcg_gen_rotri_tl(t0
, t0
, uimm
+ 32);
1441 tcg_gen_shri_tl(t0
, t0
, uimm
+ 32);
1446 MIPS_INVAL("invalid dsrl32 flag");
1447 generate_exception(ctx
, EXCP_RI
);
1454 generate_exception(ctx
, EXCP_RI
);
1457 gen_store_gpr(t0
, rt
);
1458 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
1464 static void gen_arith (CPUState
*env
, DisasContext
*ctx
, uint32_t opc
,
1465 int rd
, int rs
, int rt
)
1467 const char *opn
= "arith";
1468 TCGv t0
= tcg_temp_local_new();
1469 TCGv t1
= tcg_temp_local_new();
1471 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
1472 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
1473 /* If no destination, treat it as a NOP.
1474 For add & sub, we must generate the overflow exception when needed. */
1478 gen_load_gpr(t0
, rs
);
1479 /* Specialcase the conventional move operation. */
1480 if (rt
== 0 && (opc
== OPC_ADDU
|| opc
== OPC_DADDU
1481 || opc
== OPC_SUBU
|| opc
== OPC_DSUBU
)) {
1482 gen_store_gpr(t0
, rd
);
1485 gen_load_gpr(t1
, rt
);
1489 TCGv r_tmp1
= tcg_temp_new();
1490 TCGv r_tmp2
= tcg_temp_new();
1491 int l1
= gen_new_label();
1493 save_cpu_state(ctx
, 1);
1494 tcg_gen_ext32s_tl(r_tmp1
, t0
);
1495 tcg_gen_ext32s_tl(r_tmp2
, t1
);
1496 tcg_gen_add_tl(t0
, r_tmp1
, r_tmp2
);
1498 tcg_gen_xor_tl(r_tmp1
, r_tmp1
, t1
);
1499 tcg_gen_xori_tl(r_tmp1
, r_tmp1
, -1);
1500 tcg_gen_xor_tl(r_tmp2
, t0
, t1
);
1501 tcg_gen_and_tl(r_tmp1
, r_tmp1
, r_tmp2
);
1502 tcg_temp_free(r_tmp2
);
1503 tcg_gen_brcondi_tl(TCG_COND_GE
, r_tmp1
, 0, l1
);
1504 /* operands of same sign, result different sign */
1505 generate_exception(ctx
, EXCP_OVERFLOW
);
1507 tcg_temp_free(r_tmp1
);
1509 tcg_gen_ext32s_tl(t0
, t0
);
1514 tcg_gen_add_tl(t0
, t0
, t1
);
1515 tcg_gen_ext32s_tl(t0
, t0
);
1520 TCGv r_tmp1
= tcg_temp_new();
1521 TCGv r_tmp2
= tcg_temp_new();
1522 int l1
= gen_new_label();
1524 save_cpu_state(ctx
, 1);
1525 tcg_gen_ext32s_tl(r_tmp1
, t0
);
1526 tcg_gen_ext32s_tl(r_tmp2
, t1
);
1527 tcg_gen_sub_tl(t0
, r_tmp1
, r_tmp2
);
1529 tcg_gen_xor_tl(r_tmp2
, r_tmp1
, t1
);
1530 tcg_gen_xor_tl(r_tmp1
, r_tmp1
, t0
);
1531 tcg_gen_and_tl(r_tmp1
, r_tmp1
, r_tmp2
);
1532 tcg_temp_free(r_tmp2
);
1533 tcg_gen_brcondi_tl(TCG_COND_GE
, r_tmp1
, 0, l1
);
1534 /* operands of different sign, first operand and result different sign */
1535 generate_exception(ctx
, EXCP_OVERFLOW
);
1537 tcg_temp_free(r_tmp1
);
1539 tcg_gen_ext32s_tl(t0
, t0
);
1544 tcg_gen_sub_tl(t0
, t0
, t1
);
1545 tcg_gen_ext32s_tl(t0
, t0
);
1548 #if defined(TARGET_MIPS64)
1551 TCGv r_tmp1
= tcg_temp_new();
1552 TCGv r_tmp2
= tcg_temp_new();
1553 int l1
= gen_new_label();
1555 save_cpu_state(ctx
, 1);
1556 tcg_gen_mov_tl(r_tmp1
, t0
);
1557 tcg_gen_add_tl(t0
, t0
, t1
);
1559 tcg_gen_xor_tl(r_tmp1
, r_tmp1
, t1
);
1560 tcg_gen_xori_tl(r_tmp1
, r_tmp1
, -1);
1561 tcg_gen_xor_tl(r_tmp2
, t0
, t1
);
1562 tcg_gen_and_tl(r_tmp1
, r_tmp1
, r_tmp2
);
1563 tcg_temp_free(r_tmp2
);
1564 tcg_gen_brcondi_tl(TCG_COND_GE
, r_tmp1
, 0, l1
);
1565 /* operands of same sign, result different sign */
1566 generate_exception(ctx
, EXCP_OVERFLOW
);
1568 tcg_temp_free(r_tmp1
);
1573 tcg_gen_add_tl(t0
, t0
, t1
);
1578 TCGv r_tmp1
= tcg_temp_new();
1579 TCGv r_tmp2
= tcg_temp_new();
1580 int l1
= gen_new_label();
1582 save_cpu_state(ctx
, 1);
1583 tcg_gen_mov_tl(r_tmp1
, t0
);
1584 tcg_gen_sub_tl(t0
, t0
, t1
);
1586 tcg_gen_xor_tl(r_tmp2
, r_tmp1
, t1
);
1587 tcg_gen_xor_tl(r_tmp1
, r_tmp1
, t0
);
1588 tcg_gen_and_tl(r_tmp1
, r_tmp1
, r_tmp2
);
1589 tcg_temp_free(r_tmp2
);
1590 tcg_gen_brcondi_tl(TCG_COND_GE
, r_tmp1
, 0, l1
);
1591 /* operands of different sign, first operand and result different sign */
1592 generate_exception(ctx
, EXCP_OVERFLOW
);
1594 tcg_temp_free(r_tmp1
);
1599 tcg_gen_sub_tl(t0
, t0
, t1
);
1612 tcg_gen_and_tl(t0
, t0
, t1
);
1616 tcg_gen_or_tl(t0
, t0
, t1
);
1617 tcg_gen_not_tl(t0
, t0
);
1621 tcg_gen_or_tl(t0
, t0
, t1
);
1625 tcg_gen_xor_tl(t0
, t0
, t1
);
1629 tcg_gen_mul_tl(t0
, t0
, t1
);
1630 tcg_gen_ext32s_tl(t0
, t0
);
1635 int l1
= gen_new_label();
1637 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
1638 gen_store_gpr(t0
, rd
);
1645 int l1
= gen_new_label();
1647 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
1648 gen_store_gpr(t0
, rd
);
1654 tcg_gen_andi_tl(t0
, t0
, 0x1f);
1655 tcg_gen_shl_tl(t0
, t1
, t0
);
1656 tcg_gen_ext32s_tl(t0
, t0
);
1660 tcg_gen_ext32s_tl(t1
, t1
);
1661 tcg_gen_andi_tl(t0
, t0
, 0x1f);
1662 tcg_gen_sar_tl(t0
, t1
, t0
);
1666 switch ((ctx
->opcode
>> 6) & 0x1f) {
1668 tcg_gen_ext32u_tl(t1
, t1
);
1669 tcg_gen_andi_tl(t0
, t0
, 0x1f);
1670 tcg_gen_shr_tl(t0
, t1
, t0
);
1671 tcg_gen_ext32s_tl(t0
, t0
);
1675 /* rotrv is decoded as srlv on non-R2 CPUs */
1676 if (env
->insn_flags
& ISA_MIPS32R2
) {
1677 int l1
= gen_new_label();
1678 int l2
= gen_new_label();
1680 tcg_gen_andi_tl(t0
, t0
, 0x1f);
1681 tcg_gen_brcondi_tl(TCG_COND_EQ
, t0
, 0, l1
);
1683 TCGv_i32 r_tmp1
= tcg_temp_new_i32();
1684 TCGv_i32 r_tmp2
= tcg_temp_new_i32();
1686 tcg_gen_trunc_tl_i32(r_tmp1
, t0
);
1687 tcg_gen_trunc_tl_i32(r_tmp2
, t1
);
1688 tcg_gen_rotr_i32(r_tmp1
, r_tmp1
, r_tmp2
);
1689 tcg_temp_free_i32(r_tmp1
);
1690 tcg_temp_free_i32(r_tmp2
);
1694 tcg_gen_mov_tl(t0
, t1
);
1698 tcg_gen_ext32u_tl(t1
, t1
);
1699 tcg_gen_andi_tl(t0
, t0
, 0x1f);
1700 tcg_gen_shr_tl(t0
, t1
, t0
);
1701 tcg_gen_ext32s_tl(t0
, t0
);
1706 MIPS_INVAL("invalid srlv flag");
1707 generate_exception(ctx
, EXCP_RI
);
1711 #if defined(TARGET_MIPS64)
1713 tcg_gen_andi_tl(t0
, t0
, 0x3f);
1714 tcg_gen_shl_tl(t0
, t1
, t0
);
1718 tcg_gen_andi_tl(t0
, t0
, 0x3f);
1719 tcg_gen_sar_tl(t0
, t1
, t0
);
1723 switch ((ctx
->opcode
>> 6) & 0x1f) {
1725 tcg_gen_andi_tl(t0
, t0
, 0x3f);
1726 tcg_gen_shr_tl(t0
, t1
, t0
);
1730 /* drotrv is decoded as dsrlv on non-R2 CPUs */
1731 if (env
->insn_flags
& ISA_MIPS32R2
) {
1732 int l1
= gen_new_label();
1733 int l2
= gen_new_label();
1735 tcg_gen_andi_tl(t0
, t0
, 0x3f);
1736 tcg_gen_brcondi_tl(TCG_COND_EQ
, t0
, 0, l1
);
1738 tcg_gen_rotr_tl(t0
, t1
, t0
);
1742 tcg_gen_mov_tl(t0
, t1
);
1746 tcg_gen_andi_tl(t0
, t0
, 0x3f);
1747 tcg_gen_shr_tl(t0
, t1
, t0
);
1752 MIPS_INVAL("invalid dsrlv flag");
1753 generate_exception(ctx
, EXCP_RI
);
1760 generate_exception(ctx
, EXCP_RI
);
1763 gen_store_gpr(t0
, rd
);
1765 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
1771 /* Arithmetic on HI/LO registers */
1772 static void gen_HILO (DisasContext
*ctx
, uint32_t opc
, int reg
)
1774 const char *opn
= "hilo";
1775 TCGv t0
= tcg_temp_local_new();
1777 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
1784 tcg_gen_mov_tl(t0
, cpu_HI
[0]);
1785 gen_store_gpr(t0
, reg
);
1789 tcg_gen_mov_tl(t0
, cpu_LO
[0]);
1790 gen_store_gpr(t0
, reg
);
1794 gen_load_gpr(t0
, reg
);
1795 tcg_gen_mov_tl(cpu_HI
[0], t0
);
1799 gen_load_gpr(t0
, reg
);
1800 tcg_gen_mov_tl(cpu_LO
[0], t0
);
1805 generate_exception(ctx
, EXCP_RI
);
1808 MIPS_DEBUG("%s %s", opn
, regnames
[reg
]);
1813 static void gen_muldiv (DisasContext
*ctx
, uint32_t opc
,
1816 const char *opn
= "mul/div";
1817 TCGv t0
= tcg_temp_local_new();
1818 TCGv t1
= tcg_temp_local_new();
1820 gen_load_gpr(t0
, rs
);
1821 gen_load_gpr(t1
, rt
);
1825 int l1
= gen_new_label();
1827 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
1829 int l2
= gen_new_label();
1830 TCGv_i32 r_tmp1
= tcg_temp_local_new_i32();
1831 TCGv_i32 r_tmp2
= tcg_temp_local_new_i32();
1832 TCGv_i32 r_tmp3
= tcg_temp_local_new_i32();
1834 tcg_gen_trunc_tl_i32(r_tmp1
, t0
);
1835 tcg_gen_trunc_tl_i32(r_tmp2
, t1
);
1836 tcg_gen_brcondi_i32(TCG_COND_NE
, r_tmp1
, -1 << 31, l2
);
1837 tcg_gen_brcondi_i32(TCG_COND_NE
, r_tmp2
, -1, l2
);
1838 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
1839 tcg_gen_movi_tl(cpu_HI
[0], 0);
1842 tcg_gen_div_i32(r_tmp3
, r_tmp1
, r_tmp2
);
1843 tcg_gen_rem_i32(r_tmp2
, r_tmp1
, r_tmp2
);
1844 tcg_gen_ext_i32_tl(cpu_LO
[0], r_tmp3
);
1845 tcg_gen_ext_i32_tl(cpu_HI
[0], r_tmp2
);
1846 tcg_temp_free_i32(r_tmp1
);
1847 tcg_temp_free_i32(r_tmp2
);
1848 tcg_temp_free_i32(r_tmp3
);
1856 int l1
= gen_new_label();
1858 tcg_gen_ext32s_tl(t1
, t1
);
1859 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
1861 TCGv_i32 r_tmp1
= tcg_temp_new_i32();
1862 TCGv_i32 r_tmp2
= tcg_temp_new_i32();
1863 TCGv_i32 r_tmp3
= tcg_temp_new_i32();
1865 tcg_gen_trunc_tl_i32(r_tmp1
, t0
);
1866 tcg_gen_trunc_tl_i32(r_tmp2
, t1
);
1867 tcg_gen_divu_i32(r_tmp3
, r_tmp1
, r_tmp2
);
1868 tcg_gen_remu_i32(r_tmp1
, r_tmp1
, r_tmp2
);
1869 tcg_gen_ext_i32_tl(cpu_LO
[0], r_tmp3
);
1870 tcg_gen_ext_i32_tl(cpu_HI
[0], r_tmp1
);
1871 tcg_temp_free_i32(r_tmp1
);
1872 tcg_temp_free_i32(r_tmp2
);
1873 tcg_temp_free_i32(r_tmp3
);
1881 TCGv_i64 r_tmp1
= tcg_temp_new_i64();
1882 TCGv_i64 r_tmp2
= tcg_temp_new_i64();
1884 tcg_gen_ext_tl_i64(r_tmp1
, t0
);
1885 tcg_gen_ext_tl_i64(r_tmp2
, t1
);
1886 tcg_gen_mul_i64(r_tmp1
, r_tmp1
, r_tmp2
);
1887 tcg_temp_free_i64(r_tmp2
);
1888 tcg_gen_trunc_i64_tl(t0
, r_tmp1
);
1889 tcg_gen_shri_i64(r_tmp1
, r_tmp1
, 32);
1890 tcg_gen_trunc_i64_tl(t1
, r_tmp1
);
1891 tcg_temp_free_i64(r_tmp1
);
1892 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
1893 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
1899 TCGv_i64 r_tmp1
= tcg_temp_new_i64();
1900 TCGv_i64 r_tmp2
= tcg_temp_new_i64();
1902 tcg_gen_ext32u_tl(t0
, t0
);
1903 tcg_gen_ext32u_tl(t1
, t1
);
1904 tcg_gen_extu_tl_i64(r_tmp1
, t0
);
1905 tcg_gen_extu_tl_i64(r_tmp2
, t1
);
1906 tcg_gen_mul_i64(r_tmp1
, r_tmp1
, r_tmp2
);
1907 tcg_temp_free_i64(r_tmp2
);
1908 tcg_gen_trunc_i64_tl(t0
, r_tmp1
);
1909 tcg_gen_shri_i64(r_tmp1
, r_tmp1
, 32);
1910 tcg_gen_trunc_i64_tl(t1
, r_tmp1
);
1911 tcg_temp_free_i64(r_tmp1
);
1912 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
1913 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
1917 #if defined(TARGET_MIPS64)
1920 int l1
= gen_new_label();
1922 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
1924 int l2
= gen_new_label();
1926 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
1927 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
1928 tcg_gen_mov_tl(cpu_LO
[0], t0
);
1929 tcg_gen_movi_tl(cpu_HI
[0], 0);
1932 tcg_gen_div_i64(cpu_LO
[0], t0
, t1
);
1933 tcg_gen_rem_i64(cpu_HI
[0], t0
, t1
);
1941 int l1
= gen_new_label();
1943 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
1944 tcg_gen_divu_i64(cpu_LO
[0], t0
, t1
);
1945 tcg_gen_remu_i64(cpu_HI
[0], t0
, t1
);
1951 gen_helper_dmult(t0
, t1
);
1955 gen_helper_dmultu(t0
, t1
);
1961 TCGv_i64 r_tmp1
= tcg_temp_new_i64();
1962 TCGv_i64 r_tmp2
= tcg_temp_new_i64();
1964 tcg_gen_ext_tl_i64(r_tmp1
, t0
);
1965 tcg_gen_ext_tl_i64(r_tmp2
, t1
);
1966 tcg_gen_mul_i64(r_tmp1
, r_tmp1
, r_tmp2
);
1967 tcg_gen_concat_tl_i64(r_tmp2
, cpu_LO
[0], cpu_HI
[0]);
1968 tcg_gen_add_i64(r_tmp1
, r_tmp1
, r_tmp2
);
1969 tcg_temp_free_i64(r_tmp2
);
1970 tcg_gen_trunc_i64_tl(t0
, r_tmp1
);
1971 tcg_gen_shri_i64(r_tmp1
, r_tmp1
, 32);
1972 tcg_gen_trunc_i64_tl(t1
, r_tmp1
);
1973 tcg_temp_free_i64(r_tmp1
);
1974 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
1975 tcg_gen_ext32s_tl(cpu_LO
[1], t1
);
1981 TCGv_i64 r_tmp1
= tcg_temp_new_i64();
1982 TCGv_i64 r_tmp2
= tcg_temp_new_i64();
1984 tcg_gen_ext32u_tl(t0
, t0
);
1985 tcg_gen_ext32u_tl(t1
, t1
);
1986 tcg_gen_extu_tl_i64(r_tmp1
, t0
);
1987 tcg_gen_extu_tl_i64(r_tmp2
, t1
);
1988 tcg_gen_mul_i64(r_tmp1
, r_tmp1
, r_tmp2
);
1989 tcg_gen_concat_tl_i64(r_tmp2
, cpu_LO
[0], cpu_HI
[0]);
1990 tcg_gen_add_i64(r_tmp1
, r_tmp1
, r_tmp2
);
1991 tcg_temp_free_i64(r_tmp2
);
1992 tcg_gen_trunc_i64_tl(t0
, r_tmp1
);
1993 tcg_gen_shri_i64(r_tmp1
, r_tmp1
, 32);
1994 tcg_gen_trunc_i64_tl(t1
, r_tmp1
);
1995 tcg_temp_free_i64(r_tmp1
);
1996 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
1997 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2003 TCGv_i64 r_tmp1
= tcg_temp_new_i64();
2004 TCGv_i64 r_tmp2
= tcg_temp_new_i64();
2006 tcg_gen_ext_tl_i64(r_tmp1
, t0
);
2007 tcg_gen_ext_tl_i64(r_tmp2
, t1
);
2008 tcg_gen_mul_i64(r_tmp1
, r_tmp1
, r_tmp2
);
2009 tcg_gen_concat_tl_i64(r_tmp2
, cpu_LO
[0], cpu_HI
[0]);
2010 tcg_gen_sub_i64(r_tmp1
, r_tmp1
, r_tmp2
);
2011 tcg_temp_free_i64(r_tmp2
);
2012 tcg_gen_trunc_i64_tl(t0
, r_tmp1
);
2013 tcg_gen_shri_i64(r_tmp1
, r_tmp1
, 32);
2014 tcg_gen_trunc_i64_tl(t1
, r_tmp1
);
2015 tcg_temp_free_i64(r_tmp1
);
2016 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2017 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2023 TCGv_i64 r_tmp1
= tcg_temp_new_i64();
2024 TCGv_i64 r_tmp2
= tcg_temp_new_i64();
2026 tcg_gen_ext32u_tl(t0
, t0
);
2027 tcg_gen_ext32u_tl(t1
, t1
);
2028 tcg_gen_extu_tl_i64(r_tmp1
, t0
);
2029 tcg_gen_extu_tl_i64(r_tmp2
, t1
);
2030 tcg_gen_mul_i64(r_tmp1
, r_tmp1
, r_tmp2
);
2031 tcg_gen_concat_tl_i64(r_tmp2
, cpu_LO
[0], cpu_HI
[0]);
2032 tcg_gen_sub_i64(r_tmp1
, r_tmp1
, r_tmp2
);
2033 tcg_temp_free_i64(r_tmp2
);
2034 tcg_gen_trunc_i64_tl(t0
, r_tmp1
);
2035 tcg_gen_shri_i64(r_tmp1
, r_tmp1
, 32);
2036 tcg_gen_trunc_i64_tl(t1
, r_tmp1
);
2037 tcg_temp_free_i64(r_tmp1
);
2038 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2039 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2045 generate_exception(ctx
, EXCP_RI
);
2048 MIPS_DEBUG("%s %s %s", opn
, regnames
[rs
], regnames
[rt
]);
2054 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
2055 int rd
, int rs
, int rt
)
2057 const char *opn
= "mul vr54xx";
2058 TCGv t0
= tcg_temp_local_new();
2059 TCGv t1
= tcg_temp_local_new();
2061 gen_load_gpr(t0
, rs
);
2062 gen_load_gpr(t1
, rt
);
2065 case OPC_VR54XX_MULS
:
2066 gen_helper_muls(t0
, t0
, t1
);
2069 case OPC_VR54XX_MULSU
:
2070 gen_helper_mulsu(t0
, t0
, t1
);
2073 case OPC_VR54XX_MACC
:
2074 gen_helper_macc(t0
, t0
, t1
);
2077 case OPC_VR54XX_MACCU
:
2078 gen_helper_maccu(t0
, t0
, t1
);
2081 case OPC_VR54XX_MSAC
:
2082 gen_helper_msac(t0
, t0
, t1
);
2085 case OPC_VR54XX_MSACU
:
2086 gen_helper_msacu(t0
, t0
, t1
);
2089 case OPC_VR54XX_MULHI
:
2090 gen_helper_mulhi(t0
, t0
, t1
);
2093 case OPC_VR54XX_MULHIU
:
2094 gen_helper_mulhiu(t0
, t0
, t1
);
2097 case OPC_VR54XX_MULSHI
:
2098 gen_helper_mulshi(t0
, t0
, t1
);
2101 case OPC_VR54XX_MULSHIU
:
2102 gen_helper_mulshiu(t0
, t0
, t1
);
2105 case OPC_VR54XX_MACCHI
:
2106 gen_helper_macchi(t0
, t0
, t1
);
2109 case OPC_VR54XX_MACCHIU
:
2110 gen_helper_macchiu(t0
, t0
, t1
);
2113 case OPC_VR54XX_MSACHI
:
2114 gen_helper_msachi(t0
, t0
, t1
);
2117 case OPC_VR54XX_MSACHIU
:
2118 gen_helper_msachiu(t0
, t0
, t1
);
2122 MIPS_INVAL("mul vr54xx");
2123 generate_exception(ctx
, EXCP_RI
);
2126 gen_store_gpr(t0
, rd
);
2127 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
2134 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
2137 const char *opn
= "CLx";
2138 TCGv t0
= tcg_temp_local_new();
2145 gen_load_gpr(t0
, rs
);
2148 gen_helper_clo(t0
, t0
);
2152 gen_helper_clz(t0
, t0
);
2155 #if defined(TARGET_MIPS64)
2157 gen_helper_dclo(t0
, t0
);
2161 gen_helper_dclz(t0
, t0
);
2167 generate_exception(ctx
, EXCP_RI
);
2170 gen_store_gpr(t0
, rd
);
2171 MIPS_DEBUG("%s %s, %s", opn
, regnames
[rd
], regnames
[rs
]);
2178 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
2179 int rs
, int rt
, int16_t imm
)
2182 TCGv t0
= tcg_temp_local_new();
2183 TCGv t1
= tcg_temp_local_new();
2186 /* Load needed operands */
2194 /* Compare two registers */
2196 gen_load_gpr(t0
, rs
);
2197 gen_load_gpr(t1
, rt
);
2207 /* Compare register to immediate */
2208 if (rs
!= 0 || imm
!= 0) {
2209 gen_load_gpr(t0
, rs
);
2210 tcg_gen_movi_tl(t1
, (int32_t)imm
);
2217 case OPC_TEQ
: /* rs == rs */
2218 case OPC_TEQI
: /* r0 == 0 */
2219 case OPC_TGE
: /* rs >= rs */
2220 case OPC_TGEI
: /* r0 >= 0 */
2221 case OPC_TGEU
: /* rs >= rs unsigned */
2222 case OPC_TGEIU
: /* r0 >= 0 unsigned */
2224 tcg_gen_movi_tl(t0
, 1);
2226 case OPC_TLT
: /* rs < rs */
2227 case OPC_TLTI
: /* r0 < 0 */
2228 case OPC_TLTU
: /* rs < rs unsigned */
2229 case OPC_TLTIU
: /* r0 < 0 unsigned */
2230 case OPC_TNE
: /* rs != rs */
2231 case OPC_TNEI
: /* r0 != 0 */
2232 /* Never trap: treat as NOP. */
2236 generate_exception(ctx
, EXCP_RI
);
2267 generate_exception(ctx
, EXCP_RI
);
2271 save_cpu_state(ctx
, 1);
2273 int l1
= gen_new_label();
2275 tcg_gen_brcondi_tl(TCG_COND_EQ
, t0
, 0, l1
);
2276 gen_helper_0i(raise_exception
, EXCP_TRAP
);
2279 ctx
->bstate
= BS_STOP
;
2285 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
2287 TranslationBlock
*tb
;
2289 if ((tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
)) {
2292 tcg_gen_exit_tb((long)tb
+ n
);
2299 /* Branches (before delay slot) */
2300 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
2301 int rs
, int rt
, int32_t offset
)
2303 target_ulong btgt
= -1;
2305 int bcond_compute
= 0;
2306 TCGv t0
= tcg_temp_local_new();
2307 TCGv t1
= tcg_temp_local_new();
2309 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2310 #ifdef MIPS_DEBUG_DISAS
2311 LOG_DISAS("Branch in delay slot at PC 0x" TARGET_FMT_lx
"\n", ctx
->pc
);
2313 generate_exception(ctx
, EXCP_RI
);
2317 /* Load needed operands */
2323 /* Compare two registers */
2325 gen_load_gpr(t0
, rs
);
2326 gen_load_gpr(t1
, rt
);
2329 btgt
= ctx
->pc
+ 4 + offset
;
2343 /* Compare to zero */
2345 gen_load_gpr(t0
, rs
);
2348 btgt
= ctx
->pc
+ 4 + offset
;
2352 /* Jump to immediate */
2353 btgt
= ((ctx
->pc
+ 4) & (int32_t)0xF0000000) | (uint32_t)offset
;
2357 /* Jump to register */
2358 if (offset
!= 0 && offset
!= 16) {
2359 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
2360 others are reserved. */
2361 MIPS_INVAL("jump hint");
2362 generate_exception(ctx
, EXCP_RI
);
2365 gen_load_gpr(btarget
, rs
);
2368 MIPS_INVAL("branch/jump");
2369 generate_exception(ctx
, EXCP_RI
);
2372 if (bcond_compute
== 0) {
2373 /* No condition to be computed */
2375 case OPC_BEQ
: /* rx == rx */
2376 case OPC_BEQL
: /* rx == rx likely */
2377 case OPC_BGEZ
: /* 0 >= 0 */
2378 case OPC_BGEZL
: /* 0 >= 0 likely */
2379 case OPC_BLEZ
: /* 0 <= 0 */
2380 case OPC_BLEZL
: /* 0 <= 0 likely */
2382 ctx
->hflags
|= MIPS_HFLAG_B
;
2383 MIPS_DEBUG("balways");
2385 case OPC_BGEZAL
: /* 0 >= 0 */
2386 case OPC_BGEZALL
: /* 0 >= 0 likely */
2387 /* Always take and link */
2389 ctx
->hflags
|= MIPS_HFLAG_B
;
2390 MIPS_DEBUG("balways and link");
2392 case OPC_BNE
: /* rx != rx */
2393 case OPC_BGTZ
: /* 0 > 0 */
2394 case OPC_BLTZ
: /* 0 < 0 */
2396 MIPS_DEBUG("bnever (NOP)");
2398 case OPC_BLTZAL
: /* 0 < 0 */
2399 tcg_gen_movi_tl(t0
, ctx
->pc
+ 8);
2400 gen_store_gpr(t0
, 31);
2401 MIPS_DEBUG("bnever and link");
2403 case OPC_BLTZALL
: /* 0 < 0 likely */
2404 tcg_gen_movi_tl(t0
, ctx
->pc
+ 8);
2405 gen_store_gpr(t0
, 31);
2406 /* Skip the instruction in the delay slot */
2407 MIPS_DEBUG("bnever, link and skip");
2410 case OPC_BNEL
: /* rx != rx likely */
2411 case OPC_BGTZL
: /* 0 > 0 likely */
2412 case OPC_BLTZL
: /* 0 < 0 likely */
2413 /* Skip the instruction in the delay slot */
2414 MIPS_DEBUG("bnever and skip");
2418 ctx
->hflags
|= MIPS_HFLAG_B
;
2419 MIPS_DEBUG("j " TARGET_FMT_lx
, btgt
);
2423 ctx
->hflags
|= MIPS_HFLAG_B
;
2424 MIPS_DEBUG("jal " TARGET_FMT_lx
, btgt
);
2427 ctx
->hflags
|= MIPS_HFLAG_BR
;
2428 MIPS_DEBUG("jr %s", regnames
[rs
]);
2432 ctx
->hflags
|= MIPS_HFLAG_BR
;
2433 MIPS_DEBUG("jalr %s, %s", regnames
[rt
], regnames
[rs
]);
2436 MIPS_INVAL("branch/jump");
2437 generate_exception(ctx
, EXCP_RI
);
2444 MIPS_DEBUG("beq %s, %s, " TARGET_FMT_lx
,
2445 regnames
[rs
], regnames
[rt
], btgt
);
2449 MIPS_DEBUG("beql %s, %s, " TARGET_FMT_lx
,
2450 regnames
[rs
], regnames
[rt
], btgt
);
2454 MIPS_DEBUG("bne %s, %s, " TARGET_FMT_lx
,
2455 regnames
[rs
], regnames
[rt
], btgt
);
2459 MIPS_DEBUG("bnel %s, %s, " TARGET_FMT_lx
,
2460 regnames
[rs
], regnames
[rt
], btgt
);
2464 MIPS_DEBUG("bgez %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2468 MIPS_DEBUG("bgezl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2472 MIPS_DEBUG("bgezal %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2478 MIPS_DEBUG("bgezall %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2482 MIPS_DEBUG("bgtz %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2486 MIPS_DEBUG("bgtzl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2490 MIPS_DEBUG("blez %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2494 MIPS_DEBUG("blezl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2498 MIPS_DEBUG("bltz %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2502 MIPS_DEBUG("bltzl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2507 MIPS_DEBUG("bltzal %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2509 ctx
->hflags
|= MIPS_HFLAG_BC
;
2510 tcg_gen_trunc_tl_i32(bcond
, t0
);
2515 MIPS_DEBUG("bltzall %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2517 ctx
->hflags
|= MIPS_HFLAG_BL
;
2518 tcg_gen_trunc_tl_i32(bcond
, t0
);
2521 MIPS_INVAL("conditional branch/jump");
2522 generate_exception(ctx
, EXCP_RI
);
2526 MIPS_DEBUG("enter ds: link %d cond %02x target " TARGET_FMT_lx
,
2527 blink
, ctx
->hflags
, btgt
);
2529 ctx
->btarget
= btgt
;
2531 tcg_gen_movi_tl(t0
, ctx
->pc
+ 8);
2532 gen_store_gpr(t0
, blink
);
2540 /* special3 bitfield operations */
2541 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
2542 int rs
, int lsb
, int msb
)
2544 TCGv t0
= tcg_temp_new();
2545 TCGv t1
= tcg_temp_new();
2548 gen_load_gpr(t1
, rs
);
2553 tcg_gen_shri_tl(t0
, t1
, lsb
);
2555 tcg_gen_andi_tl(t0
, t0
, (1 << (msb
+ 1)) - 1);
2557 tcg_gen_ext32s_tl(t0
, t0
);
2560 #if defined(TARGET_MIPS64)
2562 tcg_gen_shri_tl(t0
, t1
, lsb
);
2564 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1 + 32)) - 1);
2568 tcg_gen_shri_tl(t0
, t1
, lsb
+ 32);
2569 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
2572 tcg_gen_shri_tl(t0
, t1
, lsb
);
2573 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
2579 mask
= ((msb
- lsb
+ 1 < 32) ? ((1 << (msb
- lsb
+ 1)) - 1) : ~0) << lsb
;
2580 gen_load_gpr(t0
, rt
);
2581 tcg_gen_andi_tl(t0
, t0
, ~mask
);
2582 tcg_gen_shli_tl(t1
, t1
, lsb
);
2583 tcg_gen_andi_tl(t1
, t1
, mask
);
2584 tcg_gen_or_tl(t0
, t0
, t1
);
2585 tcg_gen_ext32s_tl(t0
, t0
);
2587 #if defined(TARGET_MIPS64)
2591 mask
= ((msb
- lsb
+ 1 + 32 < 64) ? ((1ULL << (msb
- lsb
+ 1 + 32)) - 1) : ~0ULL) << lsb
;
2592 gen_load_gpr(t0
, rt
);
2593 tcg_gen_andi_tl(t0
, t0
, ~mask
);
2594 tcg_gen_shli_tl(t1
, t1
, lsb
);
2595 tcg_gen_andi_tl(t1
, t1
, mask
);
2596 tcg_gen_or_tl(t0
, t0
, t1
);
2601 mask
= ((1ULL << (msb
- lsb
+ 1)) - 1) << lsb
;
2602 gen_load_gpr(t0
, rt
);
2603 tcg_gen_andi_tl(t0
, t0
, ~mask
);
2604 tcg_gen_shli_tl(t1
, t1
, lsb
+ 32);
2605 tcg_gen_andi_tl(t1
, t1
, mask
);
2606 tcg_gen_or_tl(t0
, t0
, t1
);
2611 gen_load_gpr(t0
, rt
);
2612 mask
= ((1ULL << (msb
- lsb
+ 1)) - 1) << lsb
;
2613 gen_load_gpr(t0
, rt
);
2614 tcg_gen_andi_tl(t0
, t0
, ~mask
);
2615 tcg_gen_shli_tl(t1
, t1
, lsb
);
2616 tcg_gen_andi_tl(t1
, t1
, mask
);
2617 tcg_gen_or_tl(t0
, t0
, t1
);
2622 MIPS_INVAL("bitops");
2623 generate_exception(ctx
, EXCP_RI
);
2628 gen_store_gpr(t0
, rt
);
2633 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
2635 TCGv t0
= tcg_temp_new();
2636 TCGv t1
= tcg_temp_new();
2638 gen_load_gpr(t1
, rt
);
2641 tcg_gen_shri_tl(t0
, t1
, 8);
2642 tcg_gen_andi_tl(t0
, t0
, 0x00FF00FF);
2643 tcg_gen_shli_tl(t1
, t1
, 8);
2644 tcg_gen_andi_tl(t1
, t1
, ~0x00FF00FF);
2645 tcg_gen_or_tl(t0
, t0
, t1
);
2646 tcg_gen_ext32s_tl(t0
, t0
);
2649 tcg_gen_ext8s_tl(t0
, t1
);
2652 tcg_gen_ext16s_tl(t0
, t1
);
2654 #if defined(TARGET_MIPS64)
2656 gen_load_gpr(t1
, rt
);
2657 tcg_gen_shri_tl(t0
, t1
, 8);
2658 tcg_gen_andi_tl(t0
, t0
, 0x00FF00FF00FF00FFULL
);
2659 tcg_gen_shli_tl(t1
, t1
, 8);
2660 tcg_gen_andi_tl(t1
, t1
, ~0x00FF00FF00FF00FFULL
);
2661 tcg_gen_or_tl(t0
, t0
, t1
);
2664 gen_load_gpr(t1
, rt
);
2665 tcg_gen_shri_tl(t0
, t1
, 16);
2666 tcg_gen_andi_tl(t0
, t0
, 0x0000FFFF0000FFFFULL
);
2667 tcg_gen_shli_tl(t1
, t1
, 16);
2668 tcg_gen_andi_tl(t1
, t1
, ~0x0000FFFF0000FFFFULL
);
2669 tcg_gen_or_tl(t1
, t0
, t1
);
2670 tcg_gen_shri_tl(t0
, t1
, 32);
2671 tcg_gen_shli_tl(t1
, t1
, 32);
2672 tcg_gen_or_tl(t0
, t0
, t1
);
2676 MIPS_INVAL("bsfhl");
2677 generate_exception(ctx
, EXCP_RI
);
2682 gen_store_gpr(t0
, rd
);
2687 #ifndef CONFIG_USER_ONLY
2688 /* CP0 (MMU and control) */
2689 static inline void gen_mfc0_load32 (TCGv t
, target_ulong off
)
2691 TCGv_i32 r_tmp
= tcg_temp_new_i32();
2693 tcg_gen_ld_i32(r_tmp
, cpu_env
, off
);
2694 tcg_gen_ext_i32_tl(t
, r_tmp
);
2695 tcg_temp_free_i32(r_tmp
);
2698 static inline void gen_mfc0_load64 (TCGv t
, target_ulong off
)
2700 tcg_gen_ld_tl(t
, cpu_env
, off
);
2701 tcg_gen_ext32s_tl(t
, t
);
2704 static inline void gen_mtc0_store32 (TCGv t
, target_ulong off
)
2706 TCGv_i32 r_tmp
= tcg_temp_new_i32();
2708 tcg_gen_trunc_tl_i32(r_tmp
, t
);
2709 tcg_gen_st_i32(r_tmp
, cpu_env
, off
);
2710 tcg_temp_free_i32(r_tmp
);
2713 static inline void gen_mtc0_store64 (TCGv t
, target_ulong off
)
2715 tcg_gen_ext32s_tl(t
, t
);
2716 tcg_gen_st_tl(t
, cpu_env
, off
);
2719 static void gen_mfc0 (CPUState
*env
, DisasContext
*ctx
, TCGv t0
, int reg
, int sel
)
2721 const char *rn
= "invalid";
2724 check_insn(env
, ctx
, ISA_MIPS32
);
2730 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Index
));
2734 check_insn(env
, ctx
, ASE_MT
);
2735 gen_helper_mfc0_mvpcontrol(t0
);
2739 check_insn(env
, ctx
, ASE_MT
);
2740 gen_helper_mfc0_mvpconf0(t0
);
2744 check_insn(env
, ctx
, ASE_MT
);
2745 gen_helper_mfc0_mvpconf1(t0
);
2755 gen_helper_mfc0_random(t0
);
2759 check_insn(env
, ctx
, ASE_MT
);
2760 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_VPEControl
));
2764 check_insn(env
, ctx
, ASE_MT
);
2765 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_VPEConf0
));
2769 check_insn(env
, ctx
, ASE_MT
);
2770 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_VPEConf1
));
2774 check_insn(env
, ctx
, ASE_MT
);
2775 gen_mfc0_load64(t0
, offsetof(CPUState
, CP0_YQMask
));
2779 check_insn(env
, ctx
, ASE_MT
);
2780 gen_mfc0_load64(t0
, offsetof(CPUState
, CP0_VPESchedule
));
2784 check_insn(env
, ctx
, ASE_MT
);
2785 gen_mfc0_load64(t0
, offsetof(CPUState
, CP0_VPEScheFBack
));
2786 rn
= "VPEScheFBack";
2789 check_insn(env
, ctx
, ASE_MT
);
2790 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_VPEOpt
));
2800 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_EntryLo0
));
2801 tcg_gen_ext32s_tl(t0
, t0
);
2805 check_insn(env
, ctx
, ASE_MT
);
2806 gen_helper_mfc0_tcstatus(t0
);
2810 check_insn(env
, ctx
, ASE_MT
);
2811 gen_helper_mfc0_tcbind(t0
);
2815 check_insn(env
, ctx
, ASE_MT
);
2816 gen_helper_mfc0_tcrestart(t0
);
2820 check_insn(env
, ctx
, ASE_MT
);
2821 gen_helper_mfc0_tchalt(t0
);
2825 check_insn(env
, ctx
, ASE_MT
);
2826 gen_helper_mfc0_tccontext(t0
);
2830 check_insn(env
, ctx
, ASE_MT
);
2831 gen_helper_mfc0_tcschedule(t0
);
2835 check_insn(env
, ctx
, ASE_MT
);
2836 gen_helper_mfc0_tcschefback(t0
);
2846 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_EntryLo1
));
2847 tcg_gen_ext32s_tl(t0
, t0
);
2857 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_Context
));
2858 tcg_gen_ext32s_tl(t0
, t0
);
2862 // gen_helper_mfc0_contextconfig(t0); /* SmartMIPS ASE */
2863 rn
= "ContextConfig";
2872 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_PageMask
));
2876 check_insn(env
, ctx
, ISA_MIPS32R2
);
2877 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_PageGrain
));
2887 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Wired
));
2891 check_insn(env
, ctx
, ISA_MIPS32R2
);
2892 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSConf0
));
2896 check_insn(env
, ctx
, ISA_MIPS32R2
);
2897 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSConf1
));
2901 check_insn(env
, ctx
, ISA_MIPS32R2
);
2902 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSConf2
));
2906 check_insn(env
, ctx
, ISA_MIPS32R2
);
2907 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSConf3
));
2911 check_insn(env
, ctx
, ISA_MIPS32R2
);
2912 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSConf4
));
2922 check_insn(env
, ctx
, ISA_MIPS32R2
);
2923 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_HWREna
));
2933 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_BadVAddr
));
2934 tcg_gen_ext32s_tl(t0
, t0
);
2944 /* Mark as an IO operation because we read the time. */
2947 gen_helper_mfc0_count(t0
);
2950 ctx
->bstate
= BS_STOP
;
2954 /* 6,7 are implementation dependent */
2962 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_EntryHi
));
2963 tcg_gen_ext32s_tl(t0
, t0
);
2973 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Compare
));
2976 /* 6,7 are implementation dependent */
2984 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Status
));
2988 check_insn(env
, ctx
, ISA_MIPS32R2
);
2989 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_IntCtl
));
2993 check_insn(env
, ctx
, ISA_MIPS32R2
);
2994 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSCtl
));
2998 check_insn(env
, ctx
, ISA_MIPS32R2
);
2999 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSMap
));
3009 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Cause
));
3019 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_EPC
));
3020 tcg_gen_ext32s_tl(t0
, t0
);
3030 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_PRid
));
3034 check_insn(env
, ctx
, ISA_MIPS32R2
);
3035 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_EBase
));
3045 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Config0
));
3049 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Config1
));
3053 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Config2
));
3057 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Config3
));
3060 /* 4,5 are reserved */
3061 /* 6,7 are implementation dependent */
3063 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Config6
));
3067 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Config7
));
3077 gen_helper_mfc0_lladdr(t0
);
3087 gen_helper_1i(mfc0_watchlo
, t0
, sel
);
3097 gen_helper_1i(mfc0_watchhi
, t0
, sel
);
3107 #if defined(TARGET_MIPS64)
3108 check_insn(env
, ctx
, ISA_MIPS3
);
3109 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_XContext
));
3110 tcg_gen_ext32s_tl(t0
, t0
);
3119 /* Officially reserved, but sel 0 is used for R1x000 framemask */
3122 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Framemask
));
3130 tcg_gen_movi_tl(t0
, 0); /* unimplemented */
3131 rn
= "'Diagnostic"; /* implementation dependent */
3136 gen_helper_mfc0_debug(t0
); /* EJTAG support */
3140 // gen_helper_mfc0_tracecontrol(t0); /* PDtrace support */
3141 rn
= "TraceControl";
3144 // gen_helper_mfc0_tracecontrol2(t0); /* PDtrace support */
3145 rn
= "TraceControl2";
3148 // gen_helper_mfc0_usertracedata(t0); /* PDtrace support */
3149 rn
= "UserTraceData";
3152 // gen_helper_mfc0_tracebpc(t0); /* PDtrace support */
3163 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_DEPC
));
3164 tcg_gen_ext32s_tl(t0
, t0
);
3174 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Performance0
));
3175 rn
= "Performance0";
3178 // gen_helper_mfc0_performance1(t0);
3179 rn
= "Performance1";
3182 // gen_helper_mfc0_performance2(t0);
3183 rn
= "Performance2";
3186 // gen_helper_mfc0_performance3(t0);
3187 rn
= "Performance3";
3190 // gen_helper_mfc0_performance4(t0);
3191 rn
= "Performance4";
3194 // gen_helper_mfc0_performance5(t0);
3195 rn
= "Performance5";
3198 // gen_helper_mfc0_performance6(t0);
3199 rn
= "Performance6";
3202 // gen_helper_mfc0_performance7(t0);
3203 rn
= "Performance7";
3210 tcg_gen_movi_tl(t0
, 0); /* unimplemented */
3216 tcg_gen_movi_tl(t0
, 0); /* unimplemented */
3229 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_TagLo
));
3236 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_DataLo
));
3249 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_TagHi
));
3256 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_DataHi
));
3266 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_ErrorEPC
));
3267 tcg_gen_ext32s_tl(t0
, t0
);
3278 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_DESAVE
));
3288 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
3292 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
3293 generate_exception(ctx
, EXCP_RI
);
3296 static void gen_mtc0 (CPUState
*env
, DisasContext
*ctx
, TCGv t0
, int reg
, int sel
)
3298 const char *rn
= "invalid";
3301 check_insn(env
, ctx
, ISA_MIPS32
);
3310 gen_helper_mtc0_index(t0
);
3314 check_insn(env
, ctx
, ASE_MT
);
3315 gen_helper_mtc0_mvpcontrol(t0
);
3319 check_insn(env
, ctx
, ASE_MT
);
3324 check_insn(env
, ctx
, ASE_MT
);
3339 check_insn(env
, ctx
, ASE_MT
);
3340 gen_helper_mtc0_vpecontrol(t0
);
3344 check_insn(env
, ctx
, ASE_MT
);
3345 gen_helper_mtc0_vpeconf0(t0
);
3349 check_insn(env
, ctx
, ASE_MT
);
3350 gen_helper_mtc0_vpeconf1(t0
);
3354 check_insn(env
, ctx
, ASE_MT
);
3355 gen_helper_mtc0_yqmask(t0
);
3359 check_insn(env
, ctx
, ASE_MT
);
3360 gen_mtc0_store64(t0
, offsetof(CPUState
, CP0_VPESchedule
));
3364 check_insn(env
, ctx
, ASE_MT
);
3365 gen_mtc0_store64(t0
, offsetof(CPUState
, CP0_VPEScheFBack
));
3366 rn
= "VPEScheFBack";
3369 check_insn(env
, ctx
, ASE_MT
);
3370 gen_helper_mtc0_vpeopt(t0
);
3380 gen_helper_mtc0_entrylo0(t0
);
3384 check_insn(env
, ctx
, ASE_MT
);
3385 gen_helper_mtc0_tcstatus(t0
);
3389 check_insn(env
, ctx
, ASE_MT
);
3390 gen_helper_mtc0_tcbind(t0
);
3394 check_insn(env
, ctx
, ASE_MT
);
3395 gen_helper_mtc0_tcrestart(t0
);
3399 check_insn(env
, ctx
, ASE_MT
);
3400 gen_helper_mtc0_tchalt(t0
);
3404 check_insn(env
, ctx
, ASE_MT
);
3405 gen_helper_mtc0_tccontext(t0
);
3409 check_insn(env
, ctx
, ASE_MT
);
3410 gen_helper_mtc0_tcschedule(t0
);
3414 check_insn(env
, ctx
, ASE_MT
);
3415 gen_helper_mtc0_tcschefback(t0
);
3425 gen_helper_mtc0_entrylo1(t0
);
3435 gen_helper_mtc0_context(t0
);
3439 // gen_helper_mtc0_contextconfig(t0); /* SmartMIPS ASE */
3440 rn
= "ContextConfig";
3449 gen_helper_mtc0_pagemask(t0
);
3453 check_insn(env
, ctx
, ISA_MIPS32R2
);
3454 gen_helper_mtc0_pagegrain(t0
);
3464 gen_helper_mtc0_wired(t0
);
3468 check_insn(env
, ctx
, ISA_MIPS32R2
);
3469 gen_helper_mtc0_srsconf0(t0
);
3473 check_insn(env
, ctx
, ISA_MIPS32R2
);
3474 gen_helper_mtc0_srsconf1(t0
);
3478 check_insn(env
, ctx
, ISA_MIPS32R2
);
3479 gen_helper_mtc0_srsconf2(t0
);
3483 check_insn(env
, ctx
, ISA_MIPS32R2
);
3484 gen_helper_mtc0_srsconf3(t0
);
3488 check_insn(env
, ctx
, ISA_MIPS32R2
);
3489 gen_helper_mtc0_srsconf4(t0
);
3499 check_insn(env
, ctx
, ISA_MIPS32R2
);
3500 gen_helper_mtc0_hwrena(t0
);
3514 gen_helper_mtc0_count(t0
);
3517 /* 6,7 are implementation dependent */
3521 /* Stop translation as we may have switched the execution mode */
3522 ctx
->bstate
= BS_STOP
;
3527 gen_helper_mtc0_entryhi(t0
);
3537 gen_helper_mtc0_compare(t0
);
3540 /* 6,7 are implementation dependent */
3544 /* Stop translation as we may have switched the execution mode */
3545 ctx
->bstate
= BS_STOP
;
3550 gen_helper_mtc0_status(t0
);
3551 /* BS_STOP isn't good enough here, hflags may have changed. */
3552 gen_save_pc(ctx
->pc
+ 4);
3553 ctx
->bstate
= BS_EXCP
;
3557 check_insn(env
, ctx
, ISA_MIPS32R2
);
3558 gen_helper_mtc0_intctl(t0
);
3559 /* Stop translation as we may have switched the execution mode */
3560 ctx
->bstate
= BS_STOP
;
3564 check_insn(env
, ctx
, ISA_MIPS32R2
);
3565 gen_helper_mtc0_srsctl(t0
);
3566 /* Stop translation as we may have switched the execution mode */
3567 ctx
->bstate
= BS_STOP
;
3571 check_insn(env
, ctx
, ISA_MIPS32R2
);
3572 gen_mtc0_store32(t0
, offsetof(CPUState
, CP0_SRSMap
));
3573 /* Stop translation as we may have switched the execution mode */
3574 ctx
->bstate
= BS_STOP
;
3584 gen_helper_mtc0_cause(t0
);
3590 /* Stop translation as we may have switched the execution mode */
3591 ctx
->bstate
= BS_STOP
;
3596 gen_mtc0_store64(t0
, offsetof(CPUState
, CP0_EPC
));
3610 check_insn(env
, ctx
, ISA_MIPS32R2
);
3611 gen_helper_mtc0_ebase(t0
);
3621 gen_helper_mtc0_config0(t0
);
3623 /* Stop translation as we may have switched the execution mode */
3624 ctx
->bstate
= BS_STOP
;
3627 /* ignored, read only */
3631 gen_helper_mtc0_config2(t0
);
3633 /* Stop translation as we may have switched the execution mode */
3634 ctx
->bstate
= BS_STOP
;
3637 /* ignored, read only */
3640 /* 4,5 are reserved */
3641 /* 6,7 are implementation dependent */
3651 rn
= "Invalid config selector";
3668 gen_helper_1i(mtc0_watchlo
, t0
, sel
);
3678 gen_helper_1i(mtc0_watchhi
, t0
, sel
);
3688 #if defined(TARGET_MIPS64)
3689 check_insn(env
, ctx
, ISA_MIPS3
);
3690 gen_helper_mtc0_xcontext(t0
);
3699 /* Officially reserved, but sel 0 is used for R1x000 framemask */
3702 gen_helper_mtc0_framemask(t0
);
3711 rn
= "Diagnostic"; /* implementation dependent */
3716 gen_helper_mtc0_debug(t0
); /* EJTAG support */
3717 /* BS_STOP isn't good enough here, hflags may have changed. */
3718 gen_save_pc(ctx
->pc
+ 4);
3719 ctx
->bstate
= BS_EXCP
;
3723 // gen_helper_mtc0_tracecontrol(t0); /* PDtrace support */
3724 rn
= "TraceControl";
3725 /* Stop translation as we may have switched the execution mode */
3726 ctx
->bstate
= BS_STOP
;
3729 // gen_helper_mtc0_tracecontrol2(t0); /* PDtrace support */
3730 rn
= "TraceControl2";
3731 /* Stop translation as we may have switched the execution mode */
3732 ctx
->bstate
= BS_STOP
;
3735 /* Stop translation as we may have switched the execution mode */
3736 ctx
->bstate
= BS_STOP
;
3737 // gen_helper_mtc0_usertracedata(t0); /* PDtrace support */
3738 rn
= "UserTraceData";
3739 /* Stop translation as we may have switched the execution mode */
3740 ctx
->bstate
= BS_STOP
;
3743 // gen_helper_mtc0_tracebpc(t0); /* PDtrace support */
3744 /* Stop translation as we may have switched the execution mode */
3745 ctx
->bstate
= BS_STOP
;
3756 gen_mtc0_store64(t0
, offsetof(CPUState
, CP0_DEPC
));
3766 gen_helper_mtc0_performance0(t0
);
3767 rn
= "Performance0";
3770 // gen_helper_mtc0_performance1(t0);
3771 rn
= "Performance1";
3774 // gen_helper_mtc0_performance2(t0);
3775 rn
= "Performance2";
3778 // gen_helper_mtc0_performance3(t0);
3779 rn
= "Performance3";
3782 // gen_helper_mtc0_performance4(t0);
3783 rn
= "Performance4";
3786 // gen_helper_mtc0_performance5(t0);
3787 rn
= "Performance5";
3790 // gen_helper_mtc0_performance6(t0);
3791 rn
= "Performance6";
3794 // gen_helper_mtc0_performance7(t0);
3795 rn
= "Performance7";
3821 gen_helper_mtc0_taglo(t0
);
3828 gen_helper_mtc0_datalo(t0
);
3841 gen_helper_mtc0_taghi(t0
);
3848 gen_helper_mtc0_datahi(t0
);
3859 gen_mtc0_store64(t0
, offsetof(CPUState
, CP0_ErrorEPC
));
3870 gen_mtc0_store32(t0
, offsetof(CPUState
, CP0_DESAVE
));
3876 /* Stop translation as we may have switched the execution mode */
3877 ctx
->bstate
= BS_STOP
;
3882 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
3883 /* For simplicity assume that all writes can cause interrupts. */
3886 ctx
->bstate
= BS_STOP
;
3891 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
3892 generate_exception(ctx
, EXCP_RI
);
3895 #if defined(TARGET_MIPS64)
3896 static void gen_dmfc0 (CPUState
*env
, DisasContext
*ctx
, TCGv t0
, int reg
, int sel
)
3898 const char *rn
= "invalid";
3901 check_insn(env
, ctx
, ISA_MIPS64
);
3907 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Index
));
3911 check_insn(env
, ctx
, ASE_MT
);
3912 gen_helper_mfc0_mvpcontrol(t0
);
3916 check_insn(env
, ctx
, ASE_MT
);
3917 gen_helper_mfc0_mvpconf0(t0
);
3921 check_insn(env
, ctx
, ASE_MT
);
3922 gen_helper_mfc0_mvpconf1(t0
);
3932 gen_helper_mfc0_random(t0
);
3936 check_insn(env
, ctx
, ASE_MT
);
3937 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_VPEControl
));
3941 check_insn(env
, ctx
, ASE_MT
);
3942 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_VPEConf0
));
3946 check_insn(env
, ctx
, ASE_MT
);
3947 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_VPEConf1
));
3951 check_insn(env
, ctx
, ASE_MT
);
3952 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_YQMask
));
3956 check_insn(env
, ctx
, ASE_MT
);
3957 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_VPESchedule
));
3961 check_insn(env
, ctx
, ASE_MT
);
3962 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_VPEScheFBack
));
3963 rn
= "VPEScheFBack";
3966 check_insn(env
, ctx
, ASE_MT
);
3967 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_VPEOpt
));
3977 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_EntryLo0
));
3981 check_insn(env
, ctx
, ASE_MT
);
3982 gen_helper_mfc0_tcstatus(t0
);
3986 check_insn(env
, ctx
, ASE_MT
);
3987 gen_helper_mfc0_tcbind(t0
);
3991 check_insn(env
, ctx
, ASE_MT
);
3992 gen_helper_dmfc0_tcrestart(t0
);
3996 check_insn(env
, ctx
, ASE_MT
);
3997 gen_helper_dmfc0_tchalt(t0
);
4001 check_insn(env
, ctx
, ASE_MT
);
4002 gen_helper_dmfc0_tccontext(t0
);
4006 check_insn(env
, ctx
, ASE_MT
);
4007 gen_helper_dmfc0_tcschedule(t0
);
4011 check_insn(env
, ctx
, ASE_MT
);
4012 gen_helper_dmfc0_tcschefback(t0
);
4022 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_EntryLo1
));
4032 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_Context
));
4036 // gen_helper_dmfc0_contextconfig(t0); /* SmartMIPS ASE */
4037 rn
= "ContextConfig";
4046 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_PageMask
));
4050 check_insn(env
, ctx
, ISA_MIPS32R2
);
4051 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_PageGrain
));
4061 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Wired
));
4065 check_insn(env
, ctx
, ISA_MIPS32R2
);
4066 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSConf0
));
4070 check_insn(env
, ctx
, ISA_MIPS32R2
);
4071 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSConf1
));
4075 check_insn(env
, ctx
, ISA_MIPS32R2
);
4076 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSConf2
));
4080 check_insn(env
, ctx
, ISA_MIPS32R2
);
4081 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSConf3
));
4085 check_insn(env
, ctx
, ISA_MIPS32R2
);
4086 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSConf4
));
4096 check_insn(env
, ctx
, ISA_MIPS32R2
);
4097 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_HWREna
));
4107 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_BadVAddr
));
4117 /* Mark as an IO operation because we read the time. */
4120 gen_helper_mfc0_count(t0
);
4123 ctx
->bstate
= BS_STOP
;
4127 /* 6,7 are implementation dependent */
4135 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_EntryHi
));
4145 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Compare
));
4148 /* 6,7 are implementation dependent */
4156 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Status
));
4160 check_insn(env
, ctx
, ISA_MIPS32R2
);
4161 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_IntCtl
));
4165 check_insn(env
, ctx
, ISA_MIPS32R2
);
4166 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSCtl
));
4170 check_insn(env
, ctx
, ISA_MIPS32R2
);
4171 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSMap
));
4181 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Cause
));
4191 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_EPC
));
4201 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_PRid
));
4205 check_insn(env
, ctx
, ISA_MIPS32R2
);
4206 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_EBase
));
4216 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Config0
));
4220 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Config1
));
4224 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Config2
));
4228 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Config3
));
4231 /* 6,7 are implementation dependent */
4233 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Config6
));
4237 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Config7
));
4247 gen_helper_dmfc0_lladdr(t0
);
4257 gen_helper_1i(dmfc0_watchlo
, t0
, sel
);
4267 gen_helper_1i(mfc0_watchhi
, t0
, sel
);
4277 check_insn(env
, ctx
, ISA_MIPS3
);
4278 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_XContext
));
4286 /* Officially reserved, but sel 0 is used for R1x000 framemask */
4289 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Framemask
));
4297 tcg_gen_movi_tl(t0
, 0); /* unimplemented */
4298 rn
= "'Diagnostic"; /* implementation dependent */
4303 gen_helper_mfc0_debug(t0
); /* EJTAG support */
4307 // gen_helper_dmfc0_tracecontrol(t0); /* PDtrace support */
4308 rn
= "TraceControl";
4311 // gen_helper_dmfc0_tracecontrol2(t0); /* PDtrace support */
4312 rn
= "TraceControl2";
4315 // gen_helper_dmfc0_usertracedata(t0); /* PDtrace support */
4316 rn
= "UserTraceData";
4319 // gen_helper_dmfc0_tracebpc(t0); /* PDtrace support */
4330 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_DEPC
));
4340 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Performance0
));
4341 rn
= "Performance0";
4344 // gen_helper_dmfc0_performance1(t0);
4345 rn
= "Performance1";
4348 // gen_helper_dmfc0_performance2(t0);
4349 rn
= "Performance2";
4352 // gen_helper_dmfc0_performance3(t0);
4353 rn
= "Performance3";
4356 // gen_helper_dmfc0_performance4(t0);
4357 rn
= "Performance4";
4360 // gen_helper_dmfc0_performance5(t0);
4361 rn
= "Performance5";
4364 // gen_helper_dmfc0_performance6(t0);
4365 rn
= "Performance6";
4368 // gen_helper_dmfc0_performance7(t0);
4369 rn
= "Performance7";
4376 tcg_gen_movi_tl(t0
, 0); /* unimplemented */
4383 tcg_gen_movi_tl(t0
, 0); /* unimplemented */
4396 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_TagLo
));
4403 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_DataLo
));
4416 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_TagHi
));
4423 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_DataHi
));
4433 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_ErrorEPC
));
4444 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_DESAVE
));
4454 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4458 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4459 generate_exception(ctx
, EXCP_RI
);
4462 static void gen_dmtc0 (CPUState
*env
, DisasContext
*ctx
, TCGv t0
, int reg
, int sel
)
4464 const char *rn
= "invalid";
4467 check_insn(env
, ctx
, ISA_MIPS64
);
4476 gen_helper_mtc0_index(t0
);
4480 check_insn(env
, ctx
, ASE_MT
);
4481 gen_helper_mtc0_mvpcontrol(t0
);
4485 check_insn(env
, ctx
, ASE_MT
);
4490 check_insn(env
, ctx
, ASE_MT
);
4505 check_insn(env
, ctx
, ASE_MT
);
4506 gen_helper_mtc0_vpecontrol(t0
);
4510 check_insn(env
, ctx
, ASE_MT
);
4511 gen_helper_mtc0_vpeconf0(t0
);
4515 check_insn(env
, ctx
, ASE_MT
);
4516 gen_helper_mtc0_vpeconf1(t0
);
4520 check_insn(env
, ctx
, ASE_MT
);
4521 gen_helper_mtc0_yqmask(t0
);
4525 check_insn(env
, ctx
, ASE_MT
);
4526 tcg_gen_st_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_VPESchedule
));
4530 check_insn(env
, ctx
, ASE_MT
);
4531 tcg_gen_st_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_VPEScheFBack
));
4532 rn
= "VPEScheFBack";
4535 check_insn(env
, ctx
, ASE_MT
);
4536 gen_helper_mtc0_vpeopt(t0
);
4546 gen_helper_mtc0_entrylo0(t0
);
4550 check_insn(env
, ctx
, ASE_MT
);
4551 gen_helper_mtc0_tcstatus(t0
);
4555 check_insn(env
, ctx
, ASE_MT
);
4556 gen_helper_mtc0_tcbind(t0
);
4560 check_insn(env
, ctx
, ASE_MT
);
4561 gen_helper_mtc0_tcrestart(t0
);
4565 check_insn(env
, ctx
, ASE_MT
);
4566 gen_helper_mtc0_tchalt(t0
);
4570 check_insn(env
, ctx
, ASE_MT
);
4571 gen_helper_mtc0_tccontext(t0
);
4575 check_insn(env
, ctx
, ASE_MT
);
4576 gen_helper_mtc0_tcschedule(t0
);
4580 check_insn(env
, ctx
, ASE_MT
);
4581 gen_helper_mtc0_tcschefback(t0
);
4591 gen_helper_mtc0_entrylo1(t0
);
4601 gen_helper_mtc0_context(t0
);
4605 // gen_helper_mtc0_contextconfig(t0); /* SmartMIPS ASE */
4606 rn
= "ContextConfig";
4615 gen_helper_mtc0_pagemask(t0
);
4619 check_insn(env
, ctx
, ISA_MIPS32R2
);
4620 gen_helper_mtc0_pagegrain(t0
);
4630 gen_helper_mtc0_wired(t0
);
4634 check_insn(env
, ctx
, ISA_MIPS32R2
);
4635 gen_helper_mtc0_srsconf0(t0
);
4639 check_insn(env
, ctx
, ISA_MIPS32R2
);
4640 gen_helper_mtc0_srsconf1(t0
);
4644 check_insn(env
, ctx
, ISA_MIPS32R2
);
4645 gen_helper_mtc0_srsconf2(t0
);
4649 check_insn(env
, ctx
, ISA_MIPS32R2
);
4650 gen_helper_mtc0_srsconf3(t0
);
4654 check_insn(env
, ctx
, ISA_MIPS32R2
);
4655 gen_helper_mtc0_srsconf4(t0
);
4665 check_insn(env
, ctx
, ISA_MIPS32R2
);
4666 gen_helper_mtc0_hwrena(t0
);
4680 gen_helper_mtc0_count(t0
);
4683 /* 6,7 are implementation dependent */
4687 /* Stop translation as we may have switched the execution mode */
4688 ctx
->bstate
= BS_STOP
;
4693 gen_helper_mtc0_entryhi(t0
);
4703 gen_helper_mtc0_compare(t0
);
4706 /* 6,7 are implementation dependent */
4710 /* Stop translation as we may have switched the execution mode */
4711 ctx
->bstate
= BS_STOP
;
4716 gen_helper_mtc0_status(t0
);
4717 /* BS_STOP isn't good enough here, hflags may have changed. */
4718 gen_save_pc(ctx
->pc
+ 4);
4719 ctx
->bstate
= BS_EXCP
;
4723 check_insn(env
, ctx
, ISA_MIPS32R2
);
4724 gen_helper_mtc0_intctl(t0
);
4725 /* Stop translation as we may have switched the execution mode */
4726 ctx
->bstate
= BS_STOP
;
4730 check_insn(env
, ctx
, ISA_MIPS32R2
);
4731 gen_helper_mtc0_srsctl(t0
);
4732 /* Stop translation as we may have switched the execution mode */
4733 ctx
->bstate
= BS_STOP
;
4737 check_insn(env
, ctx
, ISA_MIPS32R2
);
4738 gen_mtc0_store32(t0
, offsetof(CPUState
, CP0_SRSMap
));
4739 /* Stop translation as we may have switched the execution mode */
4740 ctx
->bstate
= BS_STOP
;
4750 gen_helper_mtc0_cause(t0
);
4756 /* Stop translation as we may have switched the execution mode */
4757 ctx
->bstate
= BS_STOP
;
4762 tcg_gen_st_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_EPC
));
4776 check_insn(env
, ctx
, ISA_MIPS32R2
);
4777 gen_helper_mtc0_ebase(t0
);
4787 gen_helper_mtc0_config0(t0
);
4789 /* Stop translation as we may have switched the execution mode */
4790 ctx
->bstate
= BS_STOP
;
4797 gen_helper_mtc0_config2(t0
);
4799 /* Stop translation as we may have switched the execution mode */
4800 ctx
->bstate
= BS_STOP
;
4806 /* 6,7 are implementation dependent */
4808 rn
= "Invalid config selector";
4825 gen_helper_1i(mtc0_watchlo
, t0
, sel
);
4835 gen_helper_1i(mtc0_watchhi
, t0
, sel
);
4845 check_insn(env
, ctx
, ISA_MIPS3
);
4846 gen_helper_mtc0_xcontext(t0
);
4854 /* Officially reserved, but sel 0 is used for R1x000 framemask */
4857 gen_helper_mtc0_framemask(t0
);
4866 rn
= "Diagnostic"; /* implementation dependent */
4871 gen_helper_mtc0_debug(t0
); /* EJTAG support */
4872 /* BS_STOP isn't good enough here, hflags may have changed. */
4873 gen_save_pc(ctx
->pc
+ 4);
4874 ctx
->bstate
= BS_EXCP
;
4878 // gen_helper_mtc0_tracecontrol(t0); /* PDtrace support */
4879 /* Stop translation as we may have switched the execution mode */
4880 ctx
->bstate
= BS_STOP
;
4881 rn
= "TraceControl";
4884 // gen_helper_mtc0_tracecontrol2(t0); /* PDtrace support */
4885 /* Stop translation as we may have switched the execution mode */
4886 ctx
->bstate
= BS_STOP
;
4887 rn
= "TraceControl2";
4890 // gen_helper_mtc0_usertracedata(t0); /* PDtrace support */
4891 /* Stop translation as we may have switched the execution mode */
4892 ctx
->bstate
= BS_STOP
;
4893 rn
= "UserTraceData";
4896 // gen_helper_mtc0_tracebpc(t0); /* PDtrace support */
4897 /* Stop translation as we may have switched the execution mode */
4898 ctx
->bstate
= BS_STOP
;
4909 tcg_gen_st_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_DEPC
));
4919 gen_helper_mtc0_performance0(t0
);
4920 rn
= "Performance0";
4923 // gen_helper_mtc0_performance1(t0);
4924 rn
= "Performance1";
4927 // gen_helper_mtc0_performance2(t0);
4928 rn
= "Performance2";
4931 // gen_helper_mtc0_performance3(t0);
4932 rn
= "Performance3";
4935 // gen_helper_mtc0_performance4(t0);
4936 rn
= "Performance4";
4939 // gen_helper_mtc0_performance5(t0);
4940 rn
= "Performance5";
4943 // gen_helper_mtc0_performance6(t0);
4944 rn
= "Performance6";
4947 // gen_helper_mtc0_performance7(t0);
4948 rn
= "Performance7";
4974 gen_helper_mtc0_taglo(t0
);
4981 gen_helper_mtc0_datalo(t0
);
4994 gen_helper_mtc0_taghi(t0
);
5001 gen_helper_mtc0_datahi(t0
);
5012 tcg_gen_st_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_ErrorEPC
));
5023 gen_mtc0_store32(t0
, offsetof(CPUState
, CP0_DESAVE
));
5029 /* Stop translation as we may have switched the execution mode */
5030 ctx
->bstate
= BS_STOP
;
5035 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5036 /* For simplicity assume that all writes can cause interrupts. */
5039 ctx
->bstate
= BS_STOP
;
5044 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5045 generate_exception(ctx
, EXCP_RI
);
5047 #endif /* TARGET_MIPS64 */
5049 static void gen_mftr(CPUState
*env
, DisasContext
*ctx
, int rt
, int rd
,
5050 int u
, int sel
, int h
)
5052 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
5053 TCGv t0
= tcg_temp_local_new();
5055 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
5056 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
5057 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
5058 tcg_gen_movi_tl(t0
, -1);
5059 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
5060 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
5061 tcg_gen_movi_tl(t0
, -1);
5067 gen_helper_mftc0_tcstatus(t0
);
5070 gen_helper_mftc0_tcbind(t0
);
5073 gen_helper_mftc0_tcrestart(t0
);
5076 gen_helper_mftc0_tchalt(t0
);
5079 gen_helper_mftc0_tccontext(t0
);
5082 gen_helper_mftc0_tcschedule(t0
);
5085 gen_helper_mftc0_tcschefback(t0
);
5088 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5095 gen_helper_mftc0_entryhi(t0
);
5098 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5104 gen_helper_mftc0_status(t0
);
5107 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5113 gen_helper_mftc0_debug(t0
);
5116 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5121 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5123 } else switch (sel
) {
5124 /* GPR registers. */
5126 gen_helper_1i(mftgpr
, t0
, rt
);
5128 /* Auxiliary CPU registers */
5132 gen_helper_1i(mftlo
, t0
, 0);
5135 gen_helper_1i(mfthi
, t0
, 0);
5138 gen_helper_1i(mftacx
, t0
, 0);
5141 gen_helper_1i(mftlo
, t0
, 1);
5144 gen_helper_1i(mfthi
, t0
, 1);
5147 gen_helper_1i(mftacx
, t0
, 1);
5150 gen_helper_1i(mftlo
, t0
, 2);
5153 gen_helper_1i(mfthi
, t0
, 2);
5156 gen_helper_1i(mftacx
, t0
, 2);
5159 gen_helper_1i(mftlo
, t0
, 3);
5162 gen_helper_1i(mfthi
, t0
, 3);
5165 gen_helper_1i(mftacx
, t0
, 3);
5168 gen_helper_mftdsp(t0
);
5174 /* Floating point (COP1). */
5176 /* XXX: For now we support only a single FPU context. */
5178 TCGv_i32 fp0
= tcg_temp_new_i32();
5180 gen_load_fpr32(fp0
, rt
);
5181 tcg_gen_ext_i32_tl(t0
, fp0
);
5182 tcg_temp_free_i32(fp0
);
5184 TCGv_i32 fp0
= tcg_temp_new_i32();
5186 gen_load_fpr32h(fp0
, rt
);
5187 tcg_gen_ext_i32_tl(t0
, fp0
);
5188 tcg_temp_free_i32(fp0
);
5192 /* XXX: For now we support only a single FPU context. */
5193 gen_helper_1i(cfc1
, t0
, rt
);
5195 /* COP2: Not implemented. */
5202 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
5203 gen_store_gpr(t0
, rd
);
5209 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
5210 generate_exception(ctx
, EXCP_RI
);
5213 static void gen_mttr(CPUState
*env
, DisasContext
*ctx
, int rd
, int rt
,
5214 int u
, int sel
, int h
)
5216 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
5217 TCGv t0
= tcg_temp_local_new();
5219 gen_load_gpr(t0
, rt
);
5220 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
5221 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
5222 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
5224 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
5225 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
5232 gen_helper_mttc0_tcstatus(t0
);
5235 gen_helper_mttc0_tcbind(t0
);
5238 gen_helper_mttc0_tcrestart(t0
);
5241 gen_helper_mttc0_tchalt(t0
);
5244 gen_helper_mttc0_tccontext(t0
);
5247 gen_helper_mttc0_tcschedule(t0
);
5250 gen_helper_mttc0_tcschefback(t0
);
5253 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
5260 gen_helper_mttc0_entryhi(t0
);
5263 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
5269 gen_helper_mttc0_status(t0
);
5272 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
5278 gen_helper_mttc0_debug(t0
);
5281 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
5286 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
5288 } else switch (sel
) {
5289 /* GPR registers. */
5291 gen_helper_1i(mttgpr
, t0
, rd
);
5293 /* Auxiliary CPU registers */
5297 gen_helper_1i(mttlo
, t0
, 0);
5300 gen_helper_1i(mtthi
, t0
, 0);
5303 gen_helper_1i(mttacx
, t0
, 0);
5306 gen_helper_1i(mttlo
, t0
, 1);
5309 gen_helper_1i(mtthi
, t0
, 1);
5312 gen_helper_1i(mttacx
, t0
, 1);
5315 gen_helper_1i(mttlo
, t0
, 2);
5318 gen_helper_1i(mtthi
, t0
, 2);
5321 gen_helper_1i(mttacx
, t0
, 2);
5324 gen_helper_1i(mttlo
, t0
, 3);
5327 gen_helper_1i(mtthi
, t0
, 3);
5330 gen_helper_1i(mttacx
, t0
, 3);
5333 gen_helper_mttdsp(t0
);
5339 /* Floating point (COP1). */
5341 /* XXX: For now we support only a single FPU context. */
5343 TCGv_i32 fp0
= tcg_temp_new_i32();
5345 tcg_gen_trunc_tl_i32(fp0
, t0
);
5346 gen_store_fpr32(fp0
, rd
);
5347 tcg_temp_free_i32(fp0
);
5349 TCGv_i32 fp0
= tcg_temp_new_i32();
5351 tcg_gen_trunc_tl_i32(fp0
, t0
);
5352 gen_store_fpr32h(fp0
, rd
);
5353 tcg_temp_free_i32(fp0
);
5357 /* XXX: For now we support only a single FPU context. */
5358 gen_helper_1i(ctc1
, t0
, rd
);
5360 /* COP2: Not implemented. */
5367 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
5373 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
5374 generate_exception(ctx
, EXCP_RI
);
5377 static void gen_cp0 (CPUState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
5379 const char *opn
= "ldst";
5388 TCGv t0
= tcg_temp_local_new();
5390 gen_mfc0(env
, ctx
, t0
, rd
, ctx
->opcode
& 0x7);
5391 gen_store_gpr(t0
, rt
);
5398 TCGv t0
= tcg_temp_local_new();
5400 gen_load_gpr(t0
, rt
);
5401 save_cpu_state(ctx
, 1);
5402 gen_mtc0(env
, ctx
, t0
, rd
, ctx
->opcode
& 0x7);
5407 #if defined(TARGET_MIPS64)
5409 check_insn(env
, ctx
, ISA_MIPS3
);
5415 TCGv t0
= tcg_temp_local_new();
5417 gen_dmfc0(env
, ctx
, t0
, rd
, ctx
->opcode
& 0x7);
5418 gen_store_gpr(t0
, rt
);
5424 check_insn(env
, ctx
, ISA_MIPS3
);
5426 TCGv t0
= tcg_temp_local_new();
5428 gen_load_gpr(t0
, rt
);
5429 save_cpu_state(ctx
, 1);
5430 gen_dmtc0(env
, ctx
, t0
, rd
, ctx
->opcode
& 0x7);
5437 check_insn(env
, ctx
, ASE_MT
);
5442 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
5443 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
5447 check_insn(env
, ctx
, ASE_MT
);
5448 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
5449 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
5454 if (!env
->tlb
->helper_tlbwi
)
5460 if (!env
->tlb
->helper_tlbwr
)
5466 if (!env
->tlb
->helper_tlbp
)
5472 if (!env
->tlb
->helper_tlbr
)
5478 check_insn(env
, ctx
, ISA_MIPS2
);
5479 save_cpu_state(ctx
, 1);
5481 ctx
->bstate
= BS_EXCP
;
5485 check_insn(env
, ctx
, ISA_MIPS32
);
5486 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
5488 generate_exception(ctx
, EXCP_RI
);
5490 save_cpu_state(ctx
, 1);
5492 ctx
->bstate
= BS_EXCP
;
5497 check_insn(env
, ctx
, ISA_MIPS3
| ISA_MIPS32
);
5498 /* If we get an exception, we want to restart at next instruction */
5500 save_cpu_state(ctx
, 1);
5503 ctx
->bstate
= BS_EXCP
;
5508 generate_exception(ctx
, EXCP_RI
);
5511 MIPS_DEBUG("%s %s %d", opn
, regnames
[rt
], rd
);
5513 #endif /* !CONFIG_USER_ONLY */
5515 /* CP1 Branches (before delay slot) */
5516 static void gen_compute_branch1 (CPUState
*env
, DisasContext
*ctx
, uint32_t op
,
5517 int32_t cc
, int32_t offset
)
5519 target_ulong btarget
;
5520 const char *opn
= "cp1 cond branch";
5521 TCGv_i32 t0
= tcg_temp_new_i32();
5524 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
);
5526 btarget
= ctx
->pc
+ 4 + offset
;
5531 int l1
= gen_new_label();
5532 int l2
= gen_new_label();
5535 tcg_gen_andi_i32(t0
, t0
, 0x1 << cc
);
5536 tcg_gen_brcondi_i32(TCG_COND_EQ
, t0
, 0, l1
);
5537 tcg_gen_movi_i32(bcond
, 0);
5540 tcg_gen_movi_i32(bcond
, 1);
5547 int l1
= gen_new_label();
5548 int l2
= gen_new_label();
5551 tcg_gen_andi_i32(t0
, t0
, 0x1 << cc
);
5552 tcg_gen_brcondi_i32(TCG_COND_EQ
, t0
, 0, l1
);
5553 tcg_gen_movi_i32(bcond
, 0);
5556 tcg_gen_movi_i32(bcond
, 1);
5563 int l1
= gen_new_label();
5564 int l2
= gen_new_label();
5567 tcg_gen_andi_i32(t0
, t0
, 0x1 << cc
);
5568 tcg_gen_brcondi_i32(TCG_COND_NE
, t0
, 0, l1
);
5569 tcg_gen_movi_i32(bcond
, 0);
5572 tcg_gen_movi_i32(bcond
, 1);
5579 int l1
= gen_new_label();
5580 int l2
= gen_new_label();
5583 tcg_gen_andi_i32(t0
, t0
, 0x1 << cc
);
5584 tcg_gen_brcondi_i32(TCG_COND_NE
, t0
, 0, l1
);
5585 tcg_gen_movi_i32(bcond
, 0);
5588 tcg_gen_movi_i32(bcond
, 1);
5593 ctx
->hflags
|= MIPS_HFLAG_BL
;
5597 int l1
= gen_new_label();
5598 int l2
= gen_new_label();
5601 tcg_gen_andi_i32(t0
, t0
, 0x3 << cc
);
5602 tcg_gen_brcondi_i32(TCG_COND_EQ
, t0
, 0, l1
);
5603 tcg_gen_movi_i32(bcond
, 0);
5606 tcg_gen_movi_i32(bcond
, 1);
5613 int l1
= gen_new_label();
5614 int l2
= gen_new_label();
5617 tcg_gen_andi_i32(t0
, t0
, 0x3 << cc
);
5618 tcg_gen_brcondi_i32(TCG_COND_NE
, t0
, 0, l1
);
5619 tcg_gen_movi_i32(bcond
, 0);
5622 tcg_gen_movi_i32(bcond
, 1);
5629 int l1
= gen_new_label();
5630 int l2
= gen_new_label();
5633 tcg_gen_andi_i32(t0
, t0
, 0xf << cc
);
5634 tcg_gen_brcondi_i32(TCG_COND_EQ
, t0
, 0, l1
);
5635 tcg_gen_movi_i32(bcond
, 0);
5638 tcg_gen_movi_i32(bcond
, 1);
5645 int l1
= gen_new_label();
5646 int l2
= gen_new_label();
5649 tcg_gen_andi_i32(t0
, t0
, 0xf << cc
);
5650 tcg_gen_brcondi_i32(TCG_COND_NE
, t0
, 0, l1
);
5651 tcg_gen_movi_i32(bcond
, 0);
5654 tcg_gen_movi_i32(bcond
, 1);
5659 ctx
->hflags
|= MIPS_HFLAG_BC
;
5663 generate_exception (ctx
, EXCP_RI
);
5666 MIPS_DEBUG("%s: cond %02x target " TARGET_FMT_lx
, opn
,
5667 ctx
->hflags
, btarget
);
5668 ctx
->btarget
= btarget
;
5671 tcg_temp_free_i32(t0
);
5674 /* Coprocessor 1 (FPU) */
5676 #define FOP(func, fmt) (((fmt) << 21) | (func))
5678 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
5680 const char *opn
= "cp1 move";
5681 TCGv t0
= tcg_temp_local_new();
5686 TCGv_i32 fp0
= tcg_temp_new_i32();
5688 gen_load_fpr32(fp0
, fs
);
5689 tcg_gen_ext_i32_tl(t0
, fp0
);
5690 tcg_temp_free_i32(fp0
);
5692 gen_store_gpr(t0
, rt
);
5696 gen_load_gpr(t0
, rt
);
5698 TCGv_i32 fp0
= tcg_temp_new_i32();
5700 tcg_gen_trunc_tl_i32(fp0
, t0
);
5701 gen_store_fpr32(fp0
, fs
);
5702 tcg_temp_free_i32(fp0
);
5707 gen_helper_1i(cfc1
, t0
, fs
);
5708 gen_store_gpr(t0
, rt
);
5712 gen_load_gpr(t0
, rt
);
5713 gen_helper_1i(ctc1
, t0
, fs
);
5718 TCGv_i64 fp0
= tcg_temp_new_i64();
5720 gen_load_fpr64(ctx
, fp0
, fs
);
5721 tcg_gen_trunc_i64_tl(t0
, fp0
);
5722 tcg_temp_free_i64(fp0
);
5724 gen_store_gpr(t0
, rt
);
5728 gen_load_gpr(t0
, rt
);
5730 TCGv_i64 fp0
= tcg_temp_new_i64();
5732 tcg_gen_extu_tl_i64(fp0
, t0
);
5733 gen_store_fpr64(ctx
, fp0
, fs
);
5734 tcg_temp_free_i64(fp0
);
5740 TCGv_i32 fp0
= tcg_temp_new_i32();
5742 gen_load_fpr32h(fp0
, fs
);
5743 tcg_gen_ext_i32_tl(t0
, fp0
);
5744 tcg_temp_free_i32(fp0
);
5746 gen_store_gpr(t0
, rt
);
5750 gen_load_gpr(t0
, rt
);
5752 TCGv_i32 fp0
= tcg_temp_new_i32();
5754 tcg_gen_trunc_tl_i32(fp0
, t0
);
5755 gen_store_fpr32h(fp0
, fs
);
5756 tcg_temp_free_i32(fp0
);
5762 generate_exception (ctx
, EXCP_RI
);
5765 MIPS_DEBUG("%s %s %s", opn
, regnames
[rt
], fregnames
[fs
]);
5771 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
5773 int l1
= gen_new_label();
5776 TCGv t0
= tcg_temp_local_new();
5777 TCGv_i32 r_tmp
= tcg_temp_new_i32();
5780 ccbit
= 1 << (24 + cc
);
5788 gen_load_gpr(t0
, rd
);
5789 tcg_gen_andi_i32(r_tmp
, fpu_fcr31
, ccbit
);
5790 tcg_gen_brcondi_i32(cond
, r_tmp
, 0, l1
);
5791 tcg_temp_free_i32(r_tmp
);
5792 gen_load_gpr(t0
, rs
);
5794 gen_store_gpr(t0
, rd
);
5798 static inline void gen_movcf_s (int fs
, int fd
, int cc
, int tf
)
5802 TCGv_i32 r_tmp1
= tcg_temp_new_i32();
5803 TCGv_i32 fp0
= tcg_temp_local_new_i32();
5804 int l1
= gen_new_label();
5807 ccbit
= 1 << (24 + cc
);
5816 gen_load_fpr32(fp0
, fd
);
5817 tcg_gen_andi_i32(r_tmp1
, fpu_fcr31
, ccbit
);
5818 tcg_gen_brcondi_i32(cond
, r_tmp1
, 0, l1
);
5819 tcg_temp_free_i32(r_tmp1
);
5820 gen_load_fpr32(fp0
, fs
);
5822 gen_store_fpr32(fp0
, fd
);
5823 tcg_temp_free_i32(fp0
);
5826 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
5830 TCGv_i32 r_tmp1
= tcg_temp_new_i32();
5831 TCGv_i64 fp0
= tcg_temp_local_new_i64();
5832 int l1
= gen_new_label();
5835 ccbit
= 1 << (24 + cc
);
5844 gen_load_fpr64(ctx
, fp0
, fd
);
5845 tcg_gen_andi_i32(r_tmp1
, fpu_fcr31
, ccbit
);
5846 tcg_gen_brcondi_i32(cond
, r_tmp1
, 0, l1
);
5847 tcg_temp_free_i32(r_tmp1
);
5848 gen_load_fpr64(ctx
, fp0
, fs
);
5850 gen_store_fpr64(ctx
, fp0
, fd
);
5851 tcg_temp_free_i64(fp0
);
5854 static inline void gen_movcf_ps (int fs
, int fd
, int cc
, int tf
)
5856 uint32_t ccbit1
, ccbit2
;
5858 TCGv_i32 r_tmp1
= tcg_temp_new_i32();
5859 TCGv_i32 fp0
= tcg_temp_local_new_i32();
5860 int l1
= gen_new_label();
5861 int l2
= gen_new_label();
5864 ccbit1
= 1 << (24 + cc
);
5865 ccbit2
= 1 << (25 + cc
);
5876 gen_load_fpr32(fp0
, fd
);
5877 tcg_gen_andi_i32(r_tmp1
, fpu_fcr31
, ccbit1
);
5878 tcg_gen_brcondi_i32(cond
, r_tmp1
, 0, l1
);
5879 gen_load_fpr32(fp0
, fs
);
5881 gen_store_fpr32(fp0
, fd
);
5883 gen_load_fpr32h(fp0
, fd
);
5884 tcg_gen_andi_i32(r_tmp1
, fpu_fcr31
, ccbit2
);
5885 tcg_gen_brcondi_i32(cond
, r_tmp1
, 0, l2
);
5886 gen_load_fpr32h(fp0
, fs
);
5888 gen_store_fpr32h(fp0
, fd
);
5890 tcg_temp_free_i32(r_tmp1
);
5891 tcg_temp_free_i32(fp0
);
5895 static void gen_farith (DisasContext
*ctx
, uint32_t op1
,
5896 int ft
, int fs
, int fd
, int cc
)
5898 const char *opn
= "farith";
5899 const char *condnames
[] = {
5917 const char *condnames_abs
[] = {
5935 enum { BINOP
, CMPOP
, OTHEROP
} optype
= OTHEROP
;
5936 uint32_t func
= ctx
->opcode
& 0x3f;
5938 switch (ctx
->opcode
& FOP(0x3f, 0x1f)) {
5941 TCGv_i32 fp0
= tcg_temp_new_i32();
5942 TCGv_i32 fp1
= tcg_temp_new_i32();
5944 gen_load_fpr32(fp0
, fs
);
5945 gen_load_fpr32(fp1
, ft
);
5946 gen_helper_float_add_s(fp0
, fp0
, fp1
);
5947 tcg_temp_free_i32(fp1
);
5948 gen_store_fpr32(fp0
, fd
);
5949 tcg_temp_free_i32(fp0
);
5956 TCGv_i32 fp0
= tcg_temp_new_i32();
5957 TCGv_i32 fp1
= tcg_temp_new_i32();
5959 gen_load_fpr32(fp0
, fs
);
5960 gen_load_fpr32(fp1
, ft
);
5961 gen_helper_float_sub_s(fp0
, fp0
, fp1
);
5962 tcg_temp_free_i32(fp1
);
5963 gen_store_fpr32(fp0
, fd
);
5964 tcg_temp_free_i32(fp0
);
5971 TCGv_i32 fp0
= tcg_temp_new_i32();
5972 TCGv_i32 fp1
= tcg_temp_new_i32();
5974 gen_load_fpr32(fp0
, fs
);
5975 gen_load_fpr32(fp1
, ft
);
5976 gen_helper_float_mul_s(fp0
, fp0
, fp1
);
5977 tcg_temp_free_i32(fp1
);
5978 gen_store_fpr32(fp0
, fd
);
5979 tcg_temp_free_i32(fp0
);
5986 TCGv_i32 fp0
= tcg_temp_new_i32();
5987 TCGv_i32 fp1
= tcg_temp_new_i32();
5989 gen_load_fpr32(fp0
, fs
);
5990 gen_load_fpr32(fp1
, ft
);
5991 gen_helper_float_div_s(fp0
, fp0
, fp1
);
5992 tcg_temp_free_i32(fp1
);
5993 gen_store_fpr32(fp0
, fd
);
5994 tcg_temp_free_i32(fp0
);
6001 TCGv_i32 fp0
= tcg_temp_new_i32();
6003 gen_load_fpr32(fp0
, fs
);
6004 gen_helper_float_sqrt_s(fp0
, fp0
);
6005 gen_store_fpr32(fp0
, fd
);
6006 tcg_temp_free_i32(fp0
);
6012 TCGv_i32 fp0
= tcg_temp_new_i32();
6014 gen_load_fpr32(fp0
, fs
);
6015 gen_helper_float_abs_s(fp0
, fp0
);
6016 gen_store_fpr32(fp0
, fd
);
6017 tcg_temp_free_i32(fp0
);
6023 TCGv_i32 fp0
= tcg_temp_new_i32();
6025 gen_load_fpr32(fp0
, fs
);
6026 gen_store_fpr32(fp0
, fd
);
6027 tcg_temp_free_i32(fp0
);
6033 TCGv_i32 fp0
= tcg_temp_new_i32();
6035 gen_load_fpr32(fp0
, fs
);
6036 gen_helper_float_chs_s(fp0
, fp0
);
6037 gen_store_fpr32(fp0
, fd
);
6038 tcg_temp_free_i32(fp0
);
6043 check_cp1_64bitmode(ctx
);
6045 TCGv_i32 fp32
= tcg_temp_new_i32();
6046 TCGv_i64 fp64
= tcg_temp_new_i64();
6048 gen_load_fpr32(fp32
, fs
);
6049 gen_helper_float_roundl_s(fp64
, fp32
);
6050 tcg_temp_free_i32(fp32
);
6051 gen_store_fpr64(ctx
, fp64
, fd
);
6052 tcg_temp_free_i64(fp64
);
6057 check_cp1_64bitmode(ctx
);
6059 TCGv_i32 fp32
= tcg_temp_new_i32();
6060 TCGv_i64 fp64
= tcg_temp_new_i64();
6062 gen_load_fpr32(fp32
, fs
);
6063 gen_helper_float_truncl_s(fp64
, fp32
);
6064 tcg_temp_free_i32(fp32
);
6065 gen_store_fpr64(ctx
, fp64
, fd
);
6066 tcg_temp_free_i64(fp64
);
6071 check_cp1_64bitmode(ctx
);
6073 TCGv_i32 fp32
= tcg_temp_new_i32();
6074 TCGv_i64 fp64
= tcg_temp_new_i64();
6076 gen_load_fpr32(fp32
, fs
);
6077 gen_helper_float_ceill_s(fp64
, fp32
);
6078 tcg_temp_free_i32(fp32
);
6079 gen_store_fpr64(ctx
, fp64
, fd
);
6080 tcg_temp_free_i64(fp64
);
6085 check_cp1_64bitmode(ctx
);
6087 TCGv_i32 fp32
= tcg_temp_new_i32();
6088 TCGv_i64 fp64
= tcg_temp_new_i64();
6090 gen_load_fpr32(fp32
, fs
);
6091 gen_helper_float_floorl_s(fp64
, fp32
);
6092 tcg_temp_free_i32(fp32
);
6093 gen_store_fpr64(ctx
, fp64
, fd
);
6094 tcg_temp_free_i64(fp64
);
6100 TCGv_i32 fp0
= tcg_temp_new_i32();
6102 gen_load_fpr32(fp0
, fs
);
6103 gen_helper_float_roundw_s(fp0
, fp0
);
6104 gen_store_fpr32(fp0
, fd
);
6105 tcg_temp_free_i32(fp0
);
6111 TCGv_i32 fp0
= tcg_temp_new_i32();
6113 gen_load_fpr32(fp0
, fs
);
6114 gen_helper_float_truncw_s(fp0
, fp0
);
6115 gen_store_fpr32(fp0
, fd
);
6116 tcg_temp_free_i32(fp0
);
6122 TCGv_i32 fp0
= tcg_temp_new_i32();
6124 gen_load_fpr32(fp0
, fs
);
6125 gen_helper_float_ceilw_s(fp0
, fp0
);
6126 gen_store_fpr32(fp0
, fd
);
6127 tcg_temp_free_i32(fp0
);
6133 TCGv_i32 fp0
= tcg_temp_new_i32();
6135 gen_load_fpr32(fp0
, fs
);
6136 gen_helper_float_floorw_s(fp0
, fp0
);
6137 gen_store_fpr32(fp0
, fd
);
6138 tcg_temp_free_i32(fp0
);
6143 gen_movcf_s(fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
6148 int l1
= gen_new_label();
6149 TCGv t0
= tcg_temp_new();
6150 TCGv_i32 fp0
= tcg_temp_local_new_i32();
6152 gen_load_gpr(t0
, ft
);
6153 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
6154 gen_load_fpr32(fp0
, fs
);
6155 gen_store_fpr32(fp0
, fd
);
6156 tcg_temp_free_i32(fp0
);
6164 int l1
= gen_new_label();
6165 TCGv t0
= tcg_temp_new();
6166 TCGv_i32 fp0
= tcg_temp_local_new_i32();
6168 gen_load_gpr(t0
, ft
);
6169 tcg_gen_brcondi_tl(TCG_COND_EQ
, t0
, 0, l1
);
6170 gen_load_fpr32(fp0
, fs
);
6171 gen_store_fpr32(fp0
, fd
);
6172 tcg_temp_free_i32(fp0
);
6181 TCGv_i32 fp0
= tcg_temp_new_i32();
6183 gen_load_fpr32(fp0
, fs
);
6184 gen_helper_float_recip_s(fp0
, fp0
);
6185 gen_store_fpr32(fp0
, fd
);
6186 tcg_temp_free_i32(fp0
);
6193 TCGv_i32 fp0
= tcg_temp_new_i32();
6195 gen_load_fpr32(fp0
, fs
);
6196 gen_helper_float_rsqrt_s(fp0
, fp0
);
6197 gen_store_fpr32(fp0
, fd
);
6198 tcg_temp_free_i32(fp0
);
6203 check_cp1_64bitmode(ctx
);
6205 TCGv_i32 fp0
= tcg_temp_new_i32();
6206 TCGv_i32 fp1
= tcg_temp_new_i32();
6208 gen_load_fpr32(fp0
, fs
);
6209 gen_load_fpr32(fp1
, fd
);
6210 gen_helper_float_recip2_s(fp0
, fp0
, fp1
);
6211 tcg_temp_free_i32(fp1
);
6212 gen_store_fpr32(fp0
, fd
);
6213 tcg_temp_free_i32(fp0
);
6218 check_cp1_64bitmode(ctx
);
6220 TCGv_i32 fp0
= tcg_temp_new_i32();
6222 gen_load_fpr32(fp0
, fs
);
6223 gen_helper_float_recip1_s(fp0
, fp0
);
6224 gen_store_fpr32(fp0
, fd
);
6225 tcg_temp_free_i32(fp0
);
6230 check_cp1_64bitmode(ctx
);
6232 TCGv_i32 fp0
= tcg_temp_new_i32();
6234 gen_load_fpr32(fp0
, fs
);
6235 gen_helper_float_rsqrt1_s(fp0
, fp0
);
6236 gen_store_fpr32(fp0
, fd
);
6237 tcg_temp_free_i32(fp0
);
6242 check_cp1_64bitmode(ctx
);
6244 TCGv_i32 fp0
= tcg_temp_new_i32();
6245 TCGv_i32 fp1
= tcg_temp_new_i32();
6247 gen_load_fpr32(fp0
, fs
);
6248 gen_load_fpr32(fp1
, ft
);
6249 gen_helper_float_rsqrt2_s(fp0
, fp0
, fp1
);
6250 tcg_temp_free_i32(fp1
);
6251 gen_store_fpr32(fp0
, fd
);
6252 tcg_temp_free_i32(fp0
);
6257 check_cp1_registers(ctx
, fd
);
6259 TCGv_i32 fp32
= tcg_temp_new_i32();
6260 TCGv_i64 fp64
= tcg_temp_new_i64();
6262 gen_load_fpr32(fp32
, fs
);
6263 gen_helper_float_cvtd_s(fp64
, fp32
);
6264 tcg_temp_free_i32(fp32
);
6265 gen_store_fpr64(ctx
, fp64
, fd
);
6266 tcg_temp_free_i64(fp64
);
6272 TCGv_i32 fp0
= tcg_temp_new_i32();
6274 gen_load_fpr32(fp0
, fs
);
6275 gen_helper_float_cvtw_s(fp0
, fp0
);
6276 gen_store_fpr32(fp0
, fd
);
6277 tcg_temp_free_i32(fp0
);
6282 check_cp1_64bitmode(ctx
);
6284 TCGv_i32 fp32
= tcg_temp_new_i32();
6285 TCGv_i64 fp64
= tcg_temp_new_i64();
6287 gen_load_fpr32(fp32
, fs
);
6288 gen_helper_float_cvtl_s(fp64
, fp32
);
6289 tcg_temp_free_i32(fp32
);
6290 gen_store_fpr64(ctx
, fp64
, fd
);
6291 tcg_temp_free_i64(fp64
);
6296 check_cp1_64bitmode(ctx
);
6298 TCGv_i64 fp64
= tcg_temp_new_i64();
6299 TCGv_i32 fp32_0
= tcg_temp_new_i32();
6300 TCGv_i32 fp32_1
= tcg_temp_new_i32();
6302 gen_load_fpr32(fp32_0
, fs
);
6303 gen_load_fpr32(fp32_1
, ft
);
6304 tcg_gen_concat_i32_i64(fp64
, fp32_0
, fp32_1
);
6305 tcg_temp_free_i32(fp32_1
);
6306 tcg_temp_free_i32(fp32_0
);
6307 gen_store_fpr64(ctx
, fp64
, fd
);
6308 tcg_temp_free_i64(fp64
);
6329 TCGv_i32 fp0
= tcg_temp_new_i32();
6330 TCGv_i32 fp1
= tcg_temp_new_i32();
6332 gen_load_fpr32(fp0
, fs
);
6333 gen_load_fpr32(fp1
, ft
);
6334 if (ctx
->opcode
& (1 << 6)) {
6336 gen_cmpabs_s(func
-48, fp0
, fp1
, cc
);
6337 opn
= condnames_abs
[func
-48];
6339 gen_cmp_s(func
-48, fp0
, fp1
, cc
);
6340 opn
= condnames
[func
-48];
6342 tcg_temp_free_i32(fp0
);
6343 tcg_temp_free_i32(fp1
);
6347 check_cp1_registers(ctx
, fs
| ft
| fd
);
6349 TCGv_i64 fp0
= tcg_temp_new_i64();
6350 TCGv_i64 fp1
= tcg_temp_new_i64();
6352 gen_load_fpr64(ctx
, fp0
, fs
);
6353 gen_load_fpr64(ctx
, fp1
, ft
);
6354 gen_helper_float_add_d(fp0
, fp0
, fp1
);
6355 tcg_temp_free_i64(fp1
);
6356 gen_store_fpr64(ctx
, fp0
, fd
);
6357 tcg_temp_free_i64(fp0
);
6363 check_cp1_registers(ctx
, fs
| ft
| fd
);
6365 TCGv_i64 fp0
= tcg_temp_new_i64();
6366 TCGv_i64 fp1
= tcg_temp_new_i64();
6368 gen_load_fpr64(ctx
, fp0
, fs
);
6369 gen_load_fpr64(ctx
, fp1
, ft
);
6370 gen_helper_float_sub_d(fp0
, fp0
, fp1
);
6371 tcg_temp_free_i64(fp1
);
6372 gen_store_fpr64(ctx
, fp0
, fd
);
6373 tcg_temp_free_i64(fp0
);
6379 check_cp1_registers(ctx
, fs
| ft
| fd
);
6381 TCGv_i64 fp0
= tcg_temp_new_i64();
6382 TCGv_i64 fp1
= tcg_temp_new_i64();
6384 gen_load_fpr64(ctx
, fp0
, fs
);
6385 gen_load_fpr64(ctx
, fp1
, ft
);
6386 gen_helper_float_mul_d(fp0
, fp0
, fp1
);
6387 tcg_temp_free_i64(fp1
);
6388 gen_store_fpr64(ctx
, fp0
, fd
);
6389 tcg_temp_free_i64(fp0
);
6395 check_cp1_registers(ctx
, fs
| ft
| fd
);
6397 TCGv_i64 fp0
= tcg_temp_new_i64();
6398 TCGv_i64 fp1
= tcg_temp_new_i64();
6400 gen_load_fpr64(ctx
, fp0
, fs
);
6401 gen_load_fpr64(ctx
, fp1
, ft
);
6402 gen_helper_float_div_d(fp0
, fp0
, fp1
);
6403 tcg_temp_free_i64(fp1
);
6404 gen_store_fpr64(ctx
, fp0
, fd
);
6405 tcg_temp_free_i64(fp0
);
6411 check_cp1_registers(ctx
, fs
| fd
);
6413 TCGv_i64 fp0
= tcg_temp_new_i64();
6415 gen_load_fpr64(ctx
, fp0
, fs
);
6416 gen_helper_float_sqrt_d(fp0
, fp0
);
6417 gen_store_fpr64(ctx
, fp0
, fd
);
6418 tcg_temp_free_i64(fp0
);
6423 check_cp1_registers(ctx
, fs
| fd
);
6425 TCGv_i64 fp0
= tcg_temp_new_i64();
6427 gen_load_fpr64(ctx
, fp0
, fs
);
6428 gen_helper_float_abs_d(fp0
, fp0
);
6429 gen_store_fpr64(ctx
, fp0
, fd
);
6430 tcg_temp_free_i64(fp0
);
6435 check_cp1_registers(ctx
, fs
| fd
);
6437 TCGv_i64 fp0
= tcg_temp_new_i64();
6439 gen_load_fpr64(ctx
, fp0
, fs
);
6440 gen_store_fpr64(ctx
, fp0
, fd
);
6441 tcg_temp_free_i64(fp0
);
6446 check_cp1_registers(ctx
, fs
| fd
);
6448 TCGv_i64 fp0
= tcg_temp_new_i64();
6450 gen_load_fpr64(ctx
, fp0
, fs
);
6451 gen_helper_float_chs_d(fp0
, fp0
);
6452 gen_store_fpr64(ctx
, fp0
, fd
);
6453 tcg_temp_free_i64(fp0
);
6458 check_cp1_64bitmode(ctx
);
6460 TCGv_i64 fp0
= tcg_temp_new_i64();
6462 gen_load_fpr64(ctx
, fp0
, fs
);
6463 gen_helper_float_roundl_d(fp0
, fp0
);
6464 gen_store_fpr64(ctx
, fp0
, fd
);
6465 tcg_temp_free_i64(fp0
);
6470 check_cp1_64bitmode(ctx
);
6472 TCGv_i64 fp0
= tcg_temp_new_i64();
6474 gen_load_fpr64(ctx
, fp0
, fs
);
6475 gen_helper_float_truncl_d(fp0
, fp0
);
6476 gen_store_fpr64(ctx
, fp0
, fd
);
6477 tcg_temp_free_i64(fp0
);
6482 check_cp1_64bitmode(ctx
);
6484 TCGv_i64 fp0
= tcg_temp_new_i64();
6486 gen_load_fpr64(ctx
, fp0
, fs
);
6487 gen_helper_float_ceill_d(fp0
, fp0
);
6488 gen_store_fpr64(ctx
, fp0
, fd
);
6489 tcg_temp_free_i64(fp0
);
6494 check_cp1_64bitmode(ctx
);
6496 TCGv_i64 fp0
= tcg_temp_new_i64();
6498 gen_load_fpr64(ctx
, fp0
, fs
);
6499 gen_helper_float_floorl_d(fp0
, fp0
);
6500 gen_store_fpr64(ctx
, fp0
, fd
);
6501 tcg_temp_free_i64(fp0
);
6506 check_cp1_registers(ctx
, fs
);
6508 TCGv_i32 fp32
= tcg_temp_new_i32();
6509 TCGv_i64 fp64
= tcg_temp_new_i64();
6511 gen_load_fpr64(ctx
, fp64
, fs
);
6512 gen_helper_float_roundw_d(fp32
, fp64
);
6513 tcg_temp_free_i64(fp64
);
6514 gen_store_fpr32(fp32
, fd
);
6515 tcg_temp_free_i32(fp32
);
6520 check_cp1_registers(ctx
, fs
);
6522 TCGv_i32 fp32
= tcg_temp_new_i32();
6523 TCGv_i64 fp64
= tcg_temp_new_i64();
6525 gen_load_fpr64(ctx
, fp64
, fs
);
6526 gen_helper_float_truncw_d(fp32
, fp64
);
6527 tcg_temp_free_i64(fp64
);
6528 gen_store_fpr32(fp32
, fd
);
6529 tcg_temp_free_i32(fp32
);
6534 check_cp1_registers(ctx
, fs
);
6536 TCGv_i32 fp32
= tcg_temp_new_i32();
6537 TCGv_i64 fp64
= tcg_temp_new_i64();
6539 gen_load_fpr64(ctx
, fp64
, fs
);
6540 gen_helper_float_ceilw_d(fp32
, fp64
);
6541 tcg_temp_free_i64(fp64
);
6542 gen_store_fpr32(fp32
, fd
);
6543 tcg_temp_free_i32(fp32
);
6548 check_cp1_registers(ctx
, fs
);
6550 TCGv_i32 fp32
= tcg_temp_new_i32();
6551 TCGv_i64 fp64
= tcg_temp_new_i64();
6553 gen_load_fpr64(ctx
, fp64
, fs
);
6554 gen_helper_float_floorw_d(fp32
, fp64
);
6555 tcg_temp_free_i64(fp64
);
6556 gen_store_fpr32(fp32
, fd
);
6557 tcg_temp_free_i32(fp32
);
6562 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
6567 int l1
= gen_new_label();
6568 TCGv t0
= tcg_temp_new();
6569 TCGv_i64 fp0
= tcg_temp_local_new_i64();
6571 gen_load_gpr(t0
, ft
);
6572 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
6573 gen_load_fpr64(ctx
, fp0
, fs
);
6574 gen_store_fpr64(ctx
, fp0
, fd
);
6575 tcg_temp_free_i64(fp0
);
6583 int l1
= gen_new_label();
6584 TCGv t0
= tcg_temp_new();
6585 TCGv_i64 fp0
= tcg_temp_local_new_i64();
6587 gen_load_gpr(t0
, ft
);
6588 tcg_gen_brcondi_tl(TCG_COND_EQ
, t0
, 0, l1
);
6589 gen_load_fpr64(ctx
, fp0
, fs
);
6590 gen_store_fpr64(ctx
, fp0
, fd
);
6591 tcg_temp_free_i64(fp0
);
6598 check_cp1_64bitmode(ctx
);
6600 TCGv_i64 fp0
= tcg_temp_new_i64();
6602 gen_load_fpr64(ctx
, fp0
, fs
);
6603 gen_helper_float_recip_d(fp0
, fp0
);
6604 gen_store_fpr64(ctx
, fp0
, fd
);
6605 tcg_temp_free_i64(fp0
);
6610 check_cp1_64bitmode(ctx
);
6612 TCGv_i64 fp0
= tcg_temp_new_i64();
6614 gen_load_fpr64(ctx
, fp0
, fs
);
6615 gen_helper_float_rsqrt_d(fp0
, fp0
);
6616 gen_store_fpr64(ctx
, fp0
, fd
);
6617 tcg_temp_free_i64(fp0
);
6622 check_cp1_64bitmode(ctx
);
6624 TCGv_i64 fp0
= tcg_temp_new_i64();
6625 TCGv_i64 fp1
= tcg_temp_new_i64();
6627 gen_load_fpr64(ctx
, fp0
, fs
);
6628 gen_load_fpr64(ctx
, fp1
, ft
);
6629 gen_helper_float_recip2_d(fp0
, fp0
, fp1
);
6630 tcg_temp_free_i64(fp1
);
6631 gen_store_fpr64(ctx
, fp0
, fd
);
6632 tcg_temp_free_i64(fp0
);
6637 check_cp1_64bitmode(ctx
);
6639 TCGv_i64 fp0
= tcg_temp_new_i64();
6641 gen_load_fpr64(ctx
, fp0
, fs
);
6642 gen_helper_float_recip1_d(fp0
, fp0
);
6643 gen_store_fpr64(ctx
, fp0
, fd
);
6644 tcg_temp_free_i64(fp0
);
6649 check_cp1_64bitmode(ctx
);
6651 TCGv_i64 fp0
= tcg_temp_new_i64();
6653 gen_load_fpr64(ctx
, fp0
, fs
);
6654 gen_helper_float_rsqrt1_d(fp0
, fp0
);
6655 gen_store_fpr64(ctx
, fp0
, fd
);
6656 tcg_temp_free_i64(fp0
);
6661 check_cp1_64bitmode(ctx
);
6663 TCGv_i64 fp0
= tcg_temp_new_i64();
6664 TCGv_i64 fp1
= tcg_temp_new_i64();
6666 gen_load_fpr64(ctx
, fp0
, fs
);
6667 gen_load_fpr64(ctx
, fp1
, ft
);
6668 gen_helper_float_rsqrt2_d(fp0
, fp0
, fp1
);
6669 tcg_temp_free_i64(fp1
);
6670 gen_store_fpr64(ctx
, fp0
, fd
);
6671 tcg_temp_free_i64(fp0
);
6692 TCGv_i64 fp0
= tcg_temp_new_i64();
6693 TCGv_i64 fp1
= tcg_temp_new_i64();
6695 gen_load_fpr64(ctx
, fp0
, fs
);
6696 gen_load_fpr64(ctx
, fp1
, ft
);
6697 if (ctx
->opcode
& (1 << 6)) {
6699 check_cp1_registers(ctx
, fs
| ft
);
6700 gen_cmpabs_d(func
-48, fp0
, fp1
, cc
);
6701 opn
= condnames_abs
[func
-48];
6703 check_cp1_registers(ctx
, fs
| ft
);
6704 gen_cmp_d(func
-48, fp0
, fp1
, cc
);
6705 opn
= condnames
[func
-48];
6707 tcg_temp_free_i64(fp0
);
6708 tcg_temp_free_i64(fp1
);
6712 check_cp1_registers(ctx
, fs
);
6714 TCGv_i32 fp32
= tcg_temp_new_i32();
6715 TCGv_i64 fp64
= tcg_temp_new_i64();
6717 gen_load_fpr64(ctx
, fp64
, fs
);
6718 gen_helper_float_cvts_d(fp32
, fp64
);
6719 tcg_temp_free_i64(fp64
);
6720 gen_store_fpr32(fp32
, fd
);
6721 tcg_temp_free_i32(fp32
);
6726 check_cp1_registers(ctx
, fs
);
6728 TCGv_i32 fp32
= tcg_temp_new_i32();
6729 TCGv_i64 fp64
= tcg_temp_new_i64();
6731 gen_load_fpr64(ctx
, fp64
, fs
);
6732 gen_helper_float_cvtw_d(fp32
, fp64
);
6733 tcg_temp_free_i64(fp64
);
6734 gen_store_fpr32(fp32
, fd
);
6735 tcg_temp_free_i32(fp32
);
6740 check_cp1_64bitmode(ctx
);
6742 TCGv_i64 fp0
= tcg_temp_new_i64();
6744 gen_load_fpr64(ctx
, fp0
, fs
);
6745 gen_helper_float_cvtl_d(fp0
, fp0
);
6746 gen_store_fpr64(ctx
, fp0
, fd
);
6747 tcg_temp_free_i64(fp0
);
6753 TCGv_i32 fp0
= tcg_temp_new_i32();
6755 gen_load_fpr32(fp0
, fs
);
6756 gen_helper_float_cvts_w(fp0
, fp0
);
6757 gen_store_fpr32(fp0
, fd
);
6758 tcg_temp_free_i32(fp0
);
6763 check_cp1_registers(ctx
, fd
);
6765 TCGv_i32 fp32
= tcg_temp_new_i32();
6766 TCGv_i64 fp64
= tcg_temp_new_i64();
6768 gen_load_fpr32(fp32
, fs
);
6769 gen_helper_float_cvtd_w(fp64
, fp32
);
6770 tcg_temp_free_i32(fp32
);
6771 gen_store_fpr64(ctx
, fp64
, fd
);
6772 tcg_temp_free_i64(fp64
);
6777 check_cp1_64bitmode(ctx
);
6779 TCGv_i32 fp32
= tcg_temp_new_i32();
6780 TCGv_i64 fp64
= tcg_temp_new_i64();
6782 gen_load_fpr64(ctx
, fp64
, fs
);
6783 gen_helper_float_cvts_l(fp32
, fp64
);
6784 tcg_temp_free_i64(fp64
);
6785 gen_store_fpr32(fp32
, fd
);
6786 tcg_temp_free_i32(fp32
);
6791 check_cp1_64bitmode(ctx
);
6793 TCGv_i64 fp0
= tcg_temp_new_i64();
6795 gen_load_fpr64(ctx
, fp0
, fs
);
6796 gen_helper_float_cvtd_l(fp0
, fp0
);
6797 gen_store_fpr64(ctx
, fp0
, fd
);
6798 tcg_temp_free_i64(fp0
);
6803 check_cp1_64bitmode(ctx
);
6805 TCGv_i64 fp0
= tcg_temp_new_i64();
6807 gen_load_fpr64(ctx
, fp0
, fs
);
6808 gen_helper_float_cvtps_pw(fp0
, fp0
);
6809 gen_store_fpr64(ctx
, fp0
, fd
);
6810 tcg_temp_free_i64(fp0
);
6815 check_cp1_64bitmode(ctx
);
6817 TCGv_i64 fp0
= tcg_temp_new_i64();
6818 TCGv_i64 fp1
= tcg_temp_new_i64();
6820 gen_load_fpr64(ctx
, fp0
, fs
);
6821 gen_load_fpr64(ctx
, fp1
, ft
);
6822 gen_helper_float_add_ps(fp0
, fp0
, fp1
);
6823 tcg_temp_free_i64(fp1
);
6824 gen_store_fpr64(ctx
, fp0
, fd
);
6825 tcg_temp_free_i64(fp0
);
6830 check_cp1_64bitmode(ctx
);
6832 TCGv_i64 fp0
= tcg_temp_new_i64();
6833 TCGv_i64 fp1
= tcg_temp_new_i64();
6835 gen_load_fpr64(ctx
, fp0
, fs
);
6836 gen_load_fpr64(ctx
, fp1
, ft
);
6837 gen_helper_float_sub_ps(fp0
, fp0
, fp1
);
6838 tcg_temp_free_i64(fp1
);
6839 gen_store_fpr64(ctx
, fp0
, fd
);
6840 tcg_temp_free_i64(fp0
);
6845 check_cp1_64bitmode(ctx
);
6847 TCGv_i64 fp0
= tcg_temp_new_i64();
6848 TCGv_i64 fp1
= tcg_temp_new_i64();
6850 gen_load_fpr64(ctx
, fp0
, fs
);
6851 gen_load_fpr64(ctx
, fp1
, ft
);
6852 gen_helper_float_mul_ps(fp0
, fp0
, fp1
);
6853 tcg_temp_free_i64(fp1
);
6854 gen_store_fpr64(ctx
, fp0
, fd
);
6855 tcg_temp_free_i64(fp0
);
6860 check_cp1_64bitmode(ctx
);
6862 TCGv_i64 fp0
= tcg_temp_new_i64();
6864 gen_load_fpr64(ctx
, fp0
, fs
);
6865 gen_helper_float_abs_ps(fp0
, fp0
);
6866 gen_store_fpr64(ctx
, fp0
, fd
);
6867 tcg_temp_free_i64(fp0
);
6872 check_cp1_64bitmode(ctx
);
6874 TCGv_i64 fp0
= tcg_temp_new_i64();
6876 gen_load_fpr64(ctx
, fp0
, fs
);
6877 gen_store_fpr64(ctx
, fp0
, fd
);
6878 tcg_temp_free_i64(fp0
);
6883 check_cp1_64bitmode(ctx
);
6885 TCGv_i64 fp0
= tcg_temp_new_i64();
6887 gen_load_fpr64(ctx
, fp0
, fs
);
6888 gen_helper_float_chs_ps(fp0
, fp0
);
6889 gen_store_fpr64(ctx
, fp0
, fd
);
6890 tcg_temp_free_i64(fp0
);
6895 check_cp1_64bitmode(ctx
);
6896 gen_movcf_ps(fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
6900 check_cp1_64bitmode(ctx
);
6902 int l1
= gen_new_label();
6903 TCGv t0
= tcg_temp_new();
6904 TCGv_i32 fp0
= tcg_temp_local_new_i32();
6905 TCGv_i32 fph0
= tcg_temp_local_new_i32();
6907 gen_load_gpr(t0
, ft
);
6908 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
6909 gen_load_fpr32(fp0
, fs
);
6910 gen_load_fpr32h(fph0
, fs
);
6911 gen_store_fpr32(fp0
, fd
);
6912 gen_store_fpr32h(fph0
, fd
);
6913 tcg_temp_free_i32(fp0
);
6914 tcg_temp_free_i32(fph0
);
6921 check_cp1_64bitmode(ctx
);
6923 int l1
= gen_new_label();
6924 TCGv t0
= tcg_temp_new();
6925 TCGv_i32 fp0
= tcg_temp_local_new_i32();
6926 TCGv_i32 fph0
= tcg_temp_local_new_i32();
6928 gen_load_gpr(t0
, ft
);
6929 tcg_gen_brcondi_tl(TCG_COND_EQ
, t0
, 0, l1
);
6930 gen_load_fpr32(fp0
, fs
);
6931 gen_load_fpr32h(fph0
, fs
);
6932 gen_store_fpr32(fp0
, fd
);
6933 gen_store_fpr32h(fph0
, fd
);
6934 tcg_temp_free_i32(fp0
);
6935 tcg_temp_free_i32(fph0
);
6942 check_cp1_64bitmode(ctx
);
6944 TCGv_i64 fp0
= tcg_temp_new_i64();
6945 TCGv_i64 fp1
= tcg_temp_new_i64();
6947 gen_load_fpr64(ctx
, fp0
, ft
);
6948 gen_load_fpr64(ctx
, fp1
, fs
);
6949 gen_helper_float_addr_ps(fp0
, fp0
, fp1
);
6950 tcg_temp_free_i64(fp1
);
6951 gen_store_fpr64(ctx
, fp0
, fd
);
6952 tcg_temp_free_i64(fp0
);
6957 check_cp1_64bitmode(ctx
);
6959 TCGv_i64 fp0
= tcg_temp_new_i64();
6960 TCGv_i64 fp1
= tcg_temp_new_i64();
6962 gen_load_fpr64(ctx
, fp0
, ft
);
6963 gen_load_fpr64(ctx
, fp1
, fs
);
6964 gen_helper_float_mulr_ps(fp0
, fp0
, fp1
);
6965 tcg_temp_free_i64(fp1
);
6966 gen_store_fpr64(ctx
, fp0
, fd
);
6967 tcg_temp_free_i64(fp0
);
6972 check_cp1_64bitmode(ctx
);
6974 TCGv_i64 fp0
= tcg_temp_new_i64();
6975 TCGv_i64 fp1
= tcg_temp_new_i64();
6977 gen_load_fpr64(ctx
, fp0
, fs
);
6978 gen_load_fpr64(ctx
, fp1
, fd
);
6979 gen_helper_float_recip2_ps(fp0
, fp0
, fp1
);
6980 tcg_temp_free_i64(fp1
);
6981 gen_store_fpr64(ctx
, fp0
, fd
);
6982 tcg_temp_free_i64(fp0
);
6987 check_cp1_64bitmode(ctx
);
6989 TCGv_i64 fp0
= tcg_temp_new_i64();
6991 gen_load_fpr64(ctx
, fp0
, fs
);
6992 gen_helper_float_recip1_ps(fp0
, fp0
);
6993 gen_store_fpr64(ctx
, fp0
, fd
);
6994 tcg_temp_free_i64(fp0
);
6999 check_cp1_64bitmode(ctx
);
7001 TCGv_i64 fp0
= tcg_temp_new_i64();
7003 gen_load_fpr64(ctx
, fp0
, fs
);
7004 gen_helper_float_rsqrt1_ps(fp0
, fp0
);
7005 gen_store_fpr64(ctx
, fp0
, fd
);
7006 tcg_temp_free_i64(fp0
);
7011 check_cp1_64bitmode(ctx
);
7013 TCGv_i64 fp0
= tcg_temp_new_i64();
7014 TCGv_i64 fp1
= tcg_temp_new_i64();
7016 gen_load_fpr64(ctx
, fp0
, fs
);
7017 gen_load_fpr64(ctx
, fp1
, ft
);
7018 gen_helper_float_rsqrt2_ps(fp0
, fp0
, fp1
);
7019 tcg_temp_free_i64(fp1
);
7020 gen_store_fpr64(ctx
, fp0
, fd
);
7021 tcg_temp_free_i64(fp0
);
7026 check_cp1_64bitmode(ctx
);
7028 TCGv_i32 fp0
= tcg_temp_new_i32();
7030 gen_load_fpr32h(fp0
, fs
);
7031 gen_helper_float_cvts_pu(fp0
, fp0
);
7032 gen_store_fpr32(fp0
, fd
);
7033 tcg_temp_free_i32(fp0
);
7038 check_cp1_64bitmode(ctx
);
7040 TCGv_i64 fp0
= tcg_temp_new_i64();
7042 gen_load_fpr64(ctx
, fp0
, fs
);
7043 gen_helper_float_cvtpw_ps(fp0
, fp0
);
7044 gen_store_fpr64(ctx
, fp0
, fd
);
7045 tcg_temp_free_i64(fp0
);
7050 check_cp1_64bitmode(ctx
);
7052 TCGv_i32 fp0
= tcg_temp_new_i32();
7054 gen_load_fpr32(fp0
, fs
);
7055 gen_helper_float_cvts_pl(fp0
, fp0
);
7056 gen_store_fpr32(fp0
, fd
);
7057 tcg_temp_free_i32(fp0
);
7062 check_cp1_64bitmode(ctx
);
7064 TCGv_i32 fp0
= tcg_temp_new_i32();
7065 TCGv_i32 fp1
= tcg_temp_new_i32();
7067 gen_load_fpr32(fp0
, fs
);
7068 gen_load_fpr32(fp1
, ft
);
7069 gen_store_fpr32h(fp0
, fd
);
7070 gen_store_fpr32(fp1
, fd
);
7071 tcg_temp_free_i32(fp0
);
7072 tcg_temp_free_i32(fp1
);
7077 check_cp1_64bitmode(ctx
);
7079 TCGv_i32 fp0
= tcg_temp_new_i32();
7080 TCGv_i32 fp1
= tcg_temp_new_i32();
7082 gen_load_fpr32(fp0
, fs
);
7083 gen_load_fpr32h(fp1
, ft
);
7084 gen_store_fpr32(fp1
, fd
);
7085 gen_store_fpr32h(fp0
, fd
);
7086 tcg_temp_free_i32(fp0
);
7087 tcg_temp_free_i32(fp1
);
7092 check_cp1_64bitmode(ctx
);
7094 TCGv_i32 fp0
= tcg_temp_new_i32();
7095 TCGv_i32 fp1
= tcg_temp_new_i32();
7097 gen_load_fpr32h(fp0
, fs
);
7098 gen_load_fpr32(fp1
, ft
);
7099 gen_store_fpr32(fp1
, fd
);
7100 gen_store_fpr32h(fp0
, fd
);
7101 tcg_temp_free_i32(fp0
);
7102 tcg_temp_free_i32(fp1
);
7107 check_cp1_64bitmode(ctx
);
7109 TCGv_i32 fp0
= tcg_temp_new_i32();
7110 TCGv_i32 fp1
= tcg_temp_new_i32();
7112 gen_load_fpr32h(fp0
, fs
);
7113 gen_load_fpr32h(fp1
, ft
);
7114 gen_store_fpr32(fp1
, fd
);
7115 gen_store_fpr32h(fp0
, fd
);
7116 tcg_temp_free_i32(fp0
);
7117 tcg_temp_free_i32(fp1
);
7137 check_cp1_64bitmode(ctx
);
7139 TCGv_i64 fp0
= tcg_temp_new_i64();
7140 TCGv_i64 fp1
= tcg_temp_new_i64();
7142 gen_load_fpr64(ctx
, fp0
, fs
);
7143 gen_load_fpr64(ctx
, fp1
, ft
);
7144 if (ctx
->opcode
& (1 << 6)) {
7145 gen_cmpabs_ps(func
-48, fp0
, fp1
, cc
);
7146 opn
= condnames_abs
[func
-48];
7148 gen_cmp_ps(func
-48, fp0
, fp1
, cc
);
7149 opn
= condnames
[func
-48];
7151 tcg_temp_free_i64(fp0
);
7152 tcg_temp_free_i64(fp1
);
7157 generate_exception (ctx
, EXCP_RI
);
7162 MIPS_DEBUG("%s %s, %s, %s", opn
, fregnames
[fd
], fregnames
[fs
], fregnames
[ft
]);
7165 MIPS_DEBUG("%s %s,%s", opn
, fregnames
[fs
], fregnames
[ft
]);
7168 MIPS_DEBUG("%s %s,%s", opn
, fregnames
[fd
], fregnames
[fs
]);
7173 /* Coprocessor 3 (FPU) */
7174 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
7175 int fd
, int fs
, int base
, int index
)
7177 const char *opn
= "extended float load/store";
7179 TCGv t0
= tcg_temp_local_new();
7180 TCGv t1
= tcg_temp_local_new();
7183 gen_load_gpr(t0
, index
);
7184 } else if (index
== 0) {
7185 gen_load_gpr(t0
, base
);
7187 gen_load_gpr(t0
, index
);
7188 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
]);
7190 /* Don't do NOP if destination is zero: we must perform the actual
7196 TCGv_i32 fp0
= tcg_temp_new_i32();
7198 tcg_gen_qemu_ld32s(t1
, t0
, ctx
->mem_idx
);
7199 tcg_gen_trunc_tl_i32(fp0
, t1
);
7200 gen_store_fpr32(fp0
, fd
);
7201 tcg_temp_free_i32(fp0
);
7207 check_cp1_registers(ctx
, fd
);
7209 TCGv_i64 fp0
= tcg_temp_new_i64();
7211 tcg_gen_qemu_ld64(fp0
, t0
, ctx
->mem_idx
);
7212 gen_store_fpr64(ctx
, fp0
, fd
);
7213 tcg_temp_free_i64(fp0
);
7218 check_cp1_64bitmode(ctx
);
7219 tcg_gen_andi_tl(t0
, t0
, ~0x7);
7221 TCGv_i64 fp0
= tcg_temp_new_i64();
7223 tcg_gen_qemu_ld64(fp0
, t0
, ctx
->mem_idx
);
7224 gen_store_fpr64(ctx
, fp0
, fd
);
7225 tcg_temp_free_i64(fp0
);
7232 TCGv_i32 fp0
= tcg_temp_new_i32();
7234 gen_load_fpr32(fp0
, fs
);
7235 tcg_gen_extu_i32_tl(t1
, fp0
);
7236 tcg_gen_qemu_st32(t1
, t0
, ctx
->mem_idx
);
7237 tcg_temp_free_i32(fp0
);
7244 check_cp1_registers(ctx
, fs
);
7246 TCGv_i64 fp0
= tcg_temp_new_i64();
7248 gen_load_fpr64(ctx
, fp0
, fs
);
7249 tcg_gen_qemu_st64(fp0
, t0
, ctx
->mem_idx
);
7250 tcg_temp_free_i64(fp0
);
7256 check_cp1_64bitmode(ctx
);
7257 tcg_gen_andi_tl(t0
, t0
, ~0x7);
7259 TCGv_i64 fp0
= tcg_temp_new_i64();
7261 gen_load_fpr64(ctx
, fp0
, fs
);
7262 tcg_gen_qemu_st64(fp0
, t0
, ctx
->mem_idx
);
7263 tcg_temp_free_i64(fp0
);
7270 generate_exception(ctx
, EXCP_RI
);
7277 MIPS_DEBUG("%s %s, %s(%s)", opn
, fregnames
[store
? fs
: fd
],
7278 regnames
[index
], regnames
[base
]);
7281 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
7282 int fd
, int fr
, int fs
, int ft
)
7284 const char *opn
= "flt3_arith";
7288 check_cp1_64bitmode(ctx
);
7290 TCGv t0
= tcg_temp_local_new();
7291 TCGv_i32 fp0
= tcg_temp_local_new_i32();
7292 TCGv_i32 fph0
= tcg_temp_local_new_i32();
7293 TCGv_i32 fp1
= tcg_temp_local_new_i32();
7294 TCGv_i32 fph1
= tcg_temp_local_new_i32();
7295 int l1
= gen_new_label();
7296 int l2
= gen_new_label();
7298 gen_load_gpr(t0
, fr
);
7299 tcg_gen_andi_tl(t0
, t0
, 0x7);
7300 gen_load_fpr32(fp0
, fs
);
7301 gen_load_fpr32h(fph0
, fs
);
7302 gen_load_fpr32(fp1
, ft
);
7303 gen_load_fpr32h(fph1
, ft
);
7305 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
7306 gen_store_fpr32(fp0
, fd
);
7307 gen_store_fpr32h(fph0
, fd
);
7310 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
7312 #ifdef TARGET_WORDS_BIGENDIAN
7313 gen_store_fpr32(fph1
, fd
);
7314 gen_store_fpr32h(fp0
, fd
);
7316 gen_store_fpr32(fph0
, fd
);
7317 gen_store_fpr32h(fp1
, fd
);
7320 tcg_temp_free_i32(fp0
);
7321 tcg_temp_free_i32(fph0
);
7322 tcg_temp_free_i32(fp1
);
7323 tcg_temp_free_i32(fph1
);
7330 TCGv_i32 fp0
= tcg_temp_new_i32();
7331 TCGv_i32 fp1
= tcg_temp_new_i32();
7332 TCGv_i32 fp2
= tcg_temp_new_i32();
7334 gen_load_fpr32(fp0
, fs
);
7335 gen_load_fpr32(fp1
, ft
);
7336 gen_load_fpr32(fp2
, fr
);
7337 gen_helper_float_muladd_s(fp2
, fp0
, fp1
, fp2
);
7338 tcg_temp_free_i32(fp0
);
7339 tcg_temp_free_i32(fp1
);
7340 gen_store_fpr32(fp2
, fd
);
7341 tcg_temp_free_i32(fp2
);
7347 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
7349 TCGv_i64 fp0
= tcg_temp_new_i64();
7350 TCGv_i64 fp1
= tcg_temp_new_i64();
7351 TCGv_i64 fp2
= tcg_temp_new_i64();
7353 gen_load_fpr64(ctx
, fp0
, fs
);
7354 gen_load_fpr64(ctx
, fp1
, ft
);
7355 gen_load_fpr64(ctx
, fp2
, fr
);
7356 gen_helper_float_muladd_d(fp2
, fp0
, fp1
, fp2
);
7357 tcg_temp_free_i64(fp0
);
7358 tcg_temp_free_i64(fp1
);
7359 gen_store_fpr64(ctx
, fp2
, fd
);
7360 tcg_temp_free_i64(fp2
);
7365 check_cp1_64bitmode(ctx
);
7367 TCGv_i64 fp0
= tcg_temp_new_i64();
7368 TCGv_i64 fp1
= tcg_temp_new_i64();
7369 TCGv_i64 fp2
= tcg_temp_new_i64();
7371 gen_load_fpr64(ctx
, fp0
, fs
);
7372 gen_load_fpr64(ctx
, fp1
, ft
);
7373 gen_load_fpr64(ctx
, fp2
, fr
);
7374 gen_helper_float_muladd_ps(fp2
, fp0
, fp1
, fp2
);
7375 tcg_temp_free_i64(fp0
);
7376 tcg_temp_free_i64(fp1
);
7377 gen_store_fpr64(ctx
, fp2
, fd
);
7378 tcg_temp_free_i64(fp2
);
7385 TCGv_i32 fp0
= tcg_temp_new_i32();
7386 TCGv_i32 fp1
= tcg_temp_new_i32();
7387 TCGv_i32 fp2
= tcg_temp_new_i32();
7389 gen_load_fpr32(fp0
, fs
);
7390 gen_load_fpr32(fp1
, ft
);
7391 gen_load_fpr32(fp2
, fr
);
7392 gen_helper_float_mulsub_s(fp2
, fp0
, fp1
, fp2
);
7393 tcg_temp_free_i32(fp0
);
7394 tcg_temp_free_i32(fp1
);
7395 gen_store_fpr32(fp2
, fd
);
7396 tcg_temp_free_i32(fp2
);
7402 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
7404 TCGv_i64 fp0
= tcg_temp_new_i64();
7405 TCGv_i64 fp1
= tcg_temp_new_i64();
7406 TCGv_i64 fp2
= tcg_temp_new_i64();
7408 gen_load_fpr64(ctx
, fp0
, fs
);
7409 gen_load_fpr64(ctx
, fp1
, ft
);
7410 gen_load_fpr64(ctx
, fp2
, fr
);
7411 gen_helper_float_mulsub_d(fp2
, fp0
, fp1
, fp2
);
7412 tcg_temp_free_i64(fp0
);
7413 tcg_temp_free_i64(fp1
);
7414 gen_store_fpr64(ctx
, fp2
, fd
);
7415 tcg_temp_free_i64(fp2
);
7420 check_cp1_64bitmode(ctx
);
7422 TCGv_i64 fp0
= tcg_temp_new_i64();
7423 TCGv_i64 fp1
= tcg_temp_new_i64();
7424 TCGv_i64 fp2
= tcg_temp_new_i64();
7426 gen_load_fpr64(ctx
, fp0
, fs
);
7427 gen_load_fpr64(ctx
, fp1
, ft
);
7428 gen_load_fpr64(ctx
, fp2
, fr
);
7429 gen_helper_float_mulsub_ps(fp2
, fp0
, fp1
, fp2
);
7430 tcg_temp_free_i64(fp0
);
7431 tcg_temp_free_i64(fp1
);
7432 gen_store_fpr64(ctx
, fp2
, fd
);
7433 tcg_temp_free_i64(fp2
);
7440 TCGv_i32 fp0
= tcg_temp_new_i32();
7441 TCGv_i32 fp1
= tcg_temp_new_i32();
7442 TCGv_i32 fp2
= tcg_temp_new_i32();
7444 gen_load_fpr32(fp0
, fs
);
7445 gen_load_fpr32(fp1
, ft
);
7446 gen_load_fpr32(fp2
, fr
);
7447 gen_helper_float_nmuladd_s(fp2
, fp0
, fp1
, fp2
);
7448 tcg_temp_free_i32(fp0
);
7449 tcg_temp_free_i32(fp1
);
7450 gen_store_fpr32(fp2
, fd
);
7451 tcg_temp_free_i32(fp2
);
7457 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
7459 TCGv_i64 fp0
= tcg_temp_new_i64();
7460 TCGv_i64 fp1
= tcg_temp_new_i64();
7461 TCGv_i64 fp2
= tcg_temp_new_i64();
7463 gen_load_fpr64(ctx
, fp0
, fs
);
7464 gen_load_fpr64(ctx
, fp1
, ft
);
7465 gen_load_fpr64(ctx
, fp2
, fr
);
7466 gen_helper_float_nmuladd_d(fp2
, fp0
, fp1
, fp2
);
7467 tcg_temp_free_i64(fp0
);
7468 tcg_temp_free_i64(fp1
);
7469 gen_store_fpr64(ctx
, fp2
, fd
);
7470 tcg_temp_free_i64(fp2
);
7475 check_cp1_64bitmode(ctx
);
7477 TCGv_i64 fp0
= tcg_temp_new_i64();
7478 TCGv_i64 fp1
= tcg_temp_new_i64();
7479 TCGv_i64 fp2
= tcg_temp_new_i64();
7481 gen_load_fpr64(ctx
, fp0
, fs
);
7482 gen_load_fpr64(ctx
, fp1
, ft
);
7483 gen_load_fpr64(ctx
, fp2
, fr
);
7484 gen_helper_float_nmuladd_ps(fp2
, fp0
, fp1
, fp2
);
7485 tcg_temp_free_i64(fp0
);
7486 tcg_temp_free_i64(fp1
);
7487 gen_store_fpr64(ctx
, fp2
, fd
);
7488 tcg_temp_free_i64(fp2
);
7495 TCGv_i32 fp0
= tcg_temp_new_i32();
7496 TCGv_i32 fp1
= tcg_temp_new_i32();
7497 TCGv_i32 fp2
= tcg_temp_new_i32();
7499 gen_load_fpr32(fp0
, fs
);
7500 gen_load_fpr32(fp1
, ft
);
7501 gen_load_fpr32(fp2
, fr
);
7502 gen_helper_float_nmulsub_s(fp2
, fp0
, fp1
, fp2
);
7503 tcg_temp_free_i32(fp0
);
7504 tcg_temp_free_i32(fp1
);
7505 gen_store_fpr32(fp2
, fd
);
7506 tcg_temp_free_i32(fp2
);
7512 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
7514 TCGv_i64 fp0
= tcg_temp_new_i64();
7515 TCGv_i64 fp1
= tcg_temp_new_i64();
7516 TCGv_i64 fp2
= tcg_temp_new_i64();
7518 gen_load_fpr64(ctx
, fp0
, fs
);
7519 gen_load_fpr64(ctx
, fp1
, ft
);
7520 gen_load_fpr64(ctx
, fp2
, fr
);
7521 gen_helper_float_nmulsub_d(fp2
, fp0
, fp1
, fp2
);
7522 tcg_temp_free_i64(fp0
);
7523 tcg_temp_free_i64(fp1
);
7524 gen_store_fpr64(ctx
, fp2
, fd
);
7525 tcg_temp_free_i64(fp2
);
7530 check_cp1_64bitmode(ctx
);
7532 TCGv_i64 fp0
= tcg_temp_new_i64();
7533 TCGv_i64 fp1
= tcg_temp_new_i64();
7534 TCGv_i64 fp2
= tcg_temp_new_i64();
7536 gen_load_fpr64(ctx
, fp0
, fs
);
7537 gen_load_fpr64(ctx
, fp1
, ft
);
7538 gen_load_fpr64(ctx
, fp2
, fr
);
7539 gen_helper_float_nmulsub_ps(fp2
, fp0
, fp1
, fp2
);
7540 tcg_temp_free_i64(fp0
);
7541 tcg_temp_free_i64(fp1
);
7542 gen_store_fpr64(ctx
, fp2
, fd
);
7543 tcg_temp_free_i64(fp2
);
7549 generate_exception (ctx
, EXCP_RI
);
7552 MIPS_DEBUG("%s %s, %s, %s, %s", opn
, fregnames
[fd
], fregnames
[fr
],
7553 fregnames
[fs
], fregnames
[ft
]);
7556 /* ISA extensions (ASEs) */
7557 /* MIPS16 extension to MIPS32 */
7558 /* SmartMIPS extension to MIPS32 */
7560 #if defined(TARGET_MIPS64)
7562 /* MDMX extension to MIPS64 */
7566 static void decode_opc (CPUState
*env
, DisasContext
*ctx
)
7570 uint32_t op
, op1
, op2
;
7573 /* make sure instructions are on a word boundary */
7574 if (ctx
->pc
& 0x3) {
7575 env
->CP0_BadVAddr
= ctx
->pc
;
7576 generate_exception(ctx
, EXCP_AdEL
);
7580 /* Handle blikely not taken case */
7581 if ((ctx
->hflags
& MIPS_HFLAG_BMASK
) == MIPS_HFLAG_BL
) {
7582 int l1
= gen_new_label();
7584 MIPS_DEBUG("blikely condition (" TARGET_FMT_lx
")", ctx
->pc
+ 4);
7585 tcg_gen_brcondi_i32(TCG_COND_NE
, bcond
, 0, l1
);
7587 TCGv_i32 r_tmp
= tcg_temp_new_i32();
7589 tcg_gen_movi_i32(r_tmp
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
7590 tcg_gen_st_i32(r_tmp
, cpu_env
, offsetof(CPUState
, hflags
));
7591 tcg_temp_free_i32(r_tmp
);
7593 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
7596 op
= MASK_OP_MAJOR(ctx
->opcode
);
7597 rs
= (ctx
->opcode
>> 21) & 0x1f;
7598 rt
= (ctx
->opcode
>> 16) & 0x1f;
7599 rd
= (ctx
->opcode
>> 11) & 0x1f;
7600 sa
= (ctx
->opcode
>> 6) & 0x1f;
7601 imm
= (int16_t)ctx
->opcode
;
7604 op1
= MASK_SPECIAL(ctx
->opcode
);
7606 case OPC_SLL
: /* Arithmetic with immediate */
7607 case OPC_SRL
... OPC_SRA
:
7608 gen_arith_imm(env
, ctx
, op1
, rd
, rt
, sa
);
7610 case OPC_MOVZ
... OPC_MOVN
:
7611 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
);
7612 case OPC_SLLV
: /* Arithmetic */
7613 case OPC_SRLV
... OPC_SRAV
:
7614 case OPC_ADD
... OPC_NOR
:
7615 case OPC_SLT
... OPC_SLTU
:
7616 gen_arith(env
, ctx
, op1
, rd
, rs
, rt
);
7618 case OPC_MULT
... OPC_DIVU
:
7620 check_insn(env
, ctx
, INSN_VR54XX
);
7621 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
7622 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
7624 gen_muldiv(ctx
, op1
, rs
, rt
);
7626 case OPC_JR
... OPC_JALR
:
7627 gen_compute_branch(ctx
, op1
, rs
, rd
, sa
);
7629 case OPC_TGE
... OPC_TEQ
: /* Traps */
7631 gen_trap(ctx
, op1
, rs
, rt
, -1);
7633 case OPC_MFHI
: /* Move from HI/LO */
7635 gen_HILO(ctx
, op1
, rd
);
7638 case OPC_MTLO
: /* Move to HI/LO */
7639 gen_HILO(ctx
, op1
, rs
);
7641 case OPC_PMON
: /* Pmon entry point, also R4010 selsl */
7642 #ifdef MIPS_STRICT_STANDARD
7643 MIPS_INVAL("PMON / selsl");
7644 generate_exception(ctx
, EXCP_RI
);
7646 gen_helper_0i(pmon
, sa
);
7650 generate_exception(ctx
, EXCP_SYSCALL
);
7653 generate_exception(ctx
, EXCP_BREAK
);
7656 #ifdef MIPS_STRICT_STANDARD
7658 generate_exception(ctx
, EXCP_RI
);
7660 /* Implemented as RI exception for now. */
7661 MIPS_INVAL("spim (unofficial)");
7662 generate_exception(ctx
, EXCP_RI
);
7670 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
);
7671 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
7672 save_cpu_state(ctx
, 1);
7673 check_cp1_enabled(ctx
);
7674 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
7675 (ctx
->opcode
>> 16) & 1);
7677 generate_exception_err(ctx
, EXCP_CpU
, 1);
7681 #if defined(TARGET_MIPS64)
7682 /* MIPS64 specific opcodes */
7684 case OPC_DSRL
... OPC_DSRA
:
7686 case OPC_DSRL32
... OPC_DSRA32
:
7687 check_insn(env
, ctx
, ISA_MIPS3
);
7689 gen_arith_imm(env
, ctx
, op1
, rd
, rt
, sa
);
7692 case OPC_DSRLV
... OPC_DSRAV
:
7693 case OPC_DADD
... OPC_DSUBU
:
7694 check_insn(env
, ctx
, ISA_MIPS3
);
7696 gen_arith(env
, ctx
, op1
, rd
, rs
, rt
);
7698 case OPC_DMULT
... OPC_DDIVU
:
7699 check_insn(env
, ctx
, ISA_MIPS3
);
7701 gen_muldiv(ctx
, op1
, rs
, rt
);
7704 default: /* Invalid */
7705 MIPS_INVAL("special");
7706 generate_exception(ctx
, EXCP_RI
);
7711 op1
= MASK_SPECIAL2(ctx
->opcode
);
7713 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
7714 case OPC_MSUB
... OPC_MSUBU
:
7715 check_insn(env
, ctx
, ISA_MIPS32
);
7716 gen_muldiv(ctx
, op1
, rs
, rt
);
7719 gen_arith(env
, ctx
, op1
, rd
, rs
, rt
);
7721 case OPC_CLZ
... OPC_CLO
:
7722 check_insn(env
, ctx
, ISA_MIPS32
);
7723 gen_cl(ctx
, op1
, rd
, rs
);
7726 /* XXX: not clear which exception should be raised
7727 * when in debug mode...
7729 check_insn(env
, ctx
, ISA_MIPS32
);
7730 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
7731 generate_exception(ctx
, EXCP_DBp
);
7733 generate_exception(ctx
, EXCP_DBp
);
7737 #if defined(TARGET_MIPS64)
7738 case OPC_DCLZ
... OPC_DCLO
:
7739 check_insn(env
, ctx
, ISA_MIPS64
);
7741 gen_cl(ctx
, op1
, rd
, rs
);
7744 default: /* Invalid */
7745 MIPS_INVAL("special2");
7746 generate_exception(ctx
, EXCP_RI
);
7751 op1
= MASK_SPECIAL3(ctx
->opcode
);
7755 check_insn(env
, ctx
, ISA_MIPS32R2
);
7756 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
7759 check_insn(env
, ctx
, ISA_MIPS32R2
);
7760 op2
= MASK_BSHFL(ctx
->opcode
);
7761 gen_bshfl(ctx
, op2
, rt
, rd
);
7764 check_insn(env
, ctx
, ISA_MIPS32R2
);
7766 TCGv t0
= tcg_temp_local_new();
7770 save_cpu_state(ctx
, 1);
7771 gen_helper_rdhwr_cpunum(t0
);
7774 save_cpu_state(ctx
, 1);
7775 gen_helper_rdhwr_synci_step(t0
);
7778 save_cpu_state(ctx
, 1);
7779 gen_helper_rdhwr_cc(t0
);
7782 save_cpu_state(ctx
, 1);
7783 gen_helper_rdhwr_ccres(t0
);
7786 #if defined(CONFIG_USER_ONLY)
7787 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, tls_value
));
7790 /* XXX: Some CPUs implement this in hardware.
7791 Not supported yet. */
7793 default: /* Invalid */
7794 MIPS_INVAL("rdhwr");
7795 generate_exception(ctx
, EXCP_RI
);
7798 gen_store_gpr(t0
, rt
);
7803 check_insn(env
, ctx
, ASE_MT
);
7805 TCGv t0
= tcg_temp_local_new();
7806 TCGv t1
= tcg_temp_local_new();
7808 gen_load_gpr(t0
, rt
);
7809 gen_load_gpr(t1
, rs
);
7810 gen_helper_fork(t0
, t1
);
7816 check_insn(env
, ctx
, ASE_MT
);
7818 TCGv t0
= tcg_temp_local_new();
7820 gen_load_gpr(t0
, rs
);
7821 gen_helper_yield(t0
, t0
);
7822 gen_store_gpr(t0
, rd
);
7826 #if defined(TARGET_MIPS64)
7827 case OPC_DEXTM
... OPC_DEXT
:
7828 case OPC_DINSM
... OPC_DINS
:
7829 check_insn(env
, ctx
, ISA_MIPS64R2
);
7831 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
7834 check_insn(env
, ctx
, ISA_MIPS64R2
);
7836 op2
= MASK_DBSHFL(ctx
->opcode
);
7837 gen_bshfl(ctx
, op2
, rt
, rd
);
7840 default: /* Invalid */
7841 MIPS_INVAL("special3");
7842 generate_exception(ctx
, EXCP_RI
);
7847 op1
= MASK_REGIMM(ctx
->opcode
);
7849 case OPC_BLTZ
... OPC_BGEZL
: /* REGIMM branches */
7850 case OPC_BLTZAL
... OPC_BGEZALL
:
7851 gen_compute_branch(ctx
, op1
, rs
, -1, imm
<< 2);
7853 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
7855 gen_trap(ctx
, op1
, rs
, -1, imm
);
7858 check_insn(env
, ctx
, ISA_MIPS32R2
);
7861 default: /* Invalid */
7862 MIPS_INVAL("regimm");
7863 generate_exception(ctx
, EXCP_RI
);
7868 check_cp0_enabled(ctx
);
7869 op1
= MASK_CP0(ctx
->opcode
);
7875 #if defined(TARGET_MIPS64)
7879 #ifndef CONFIG_USER_ONLY
7880 gen_cp0(env
, ctx
, op1
, rt
, rd
);
7881 #endif /* !CONFIG_USER_ONLY */
7883 case OPC_C0_FIRST
... OPC_C0_LAST
:
7884 #ifndef CONFIG_USER_ONLY
7885 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
7886 #endif /* !CONFIG_USER_ONLY */
7889 #ifndef CONFIG_USER_ONLY
7891 TCGv t0
= tcg_temp_local_new();
7893 op2
= MASK_MFMC0(ctx
->opcode
);
7896 check_insn(env
, ctx
, ASE_MT
);
7897 gen_helper_dmt(t0
, t0
);
7900 check_insn(env
, ctx
, ASE_MT
);
7901 gen_helper_emt(t0
, t0
);
7904 check_insn(env
, ctx
, ASE_MT
);
7905 gen_helper_dvpe(t0
, t0
);
7908 check_insn(env
, ctx
, ASE_MT
);
7909 gen_helper_evpe(t0
, t0
);
7912 check_insn(env
, ctx
, ISA_MIPS32R2
);
7913 save_cpu_state(ctx
, 1);
7915 /* Stop translation as we may have switched the execution mode */
7916 ctx
->bstate
= BS_STOP
;
7919 check_insn(env
, ctx
, ISA_MIPS32R2
);
7920 save_cpu_state(ctx
, 1);
7922 /* Stop translation as we may have switched the execution mode */
7923 ctx
->bstate
= BS_STOP
;
7925 default: /* Invalid */
7926 MIPS_INVAL("mfmc0");
7927 generate_exception(ctx
, EXCP_RI
);
7930 gen_store_gpr(t0
, rt
);
7933 #endif /* !CONFIG_USER_ONLY */
7936 check_insn(env
, ctx
, ISA_MIPS32R2
);
7937 gen_load_srsgpr(rt
, rd
);
7940 check_insn(env
, ctx
, ISA_MIPS32R2
);
7941 gen_store_srsgpr(rt
, rd
);
7945 generate_exception(ctx
, EXCP_RI
);
7949 case OPC_ADDI
... OPC_LUI
: /* Arithmetic with immediate opcode */
7950 gen_arith_imm(env
, ctx
, op
, rt
, rs
, imm
);
7952 case OPC_J
... OPC_JAL
: /* Jump */
7953 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
7954 gen_compute_branch(ctx
, op
, rs
, rt
, offset
);
7956 case OPC_BEQ
... OPC_BGTZ
: /* Branch */
7957 case OPC_BEQL
... OPC_BGTZL
:
7958 gen_compute_branch(ctx
, op
, rs
, rt
, imm
<< 2);
7960 case OPC_LB
... OPC_LWR
: /* Load and stores */
7961 case OPC_SB
... OPC_SW
:
7965 gen_ldst(ctx
, op
, rt
, rs
, imm
);
7968 check_insn(env
, ctx
, ISA_MIPS3
| ISA_MIPS32
);
7972 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
);
7976 /* Floating point (COP1). */
7981 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
7982 save_cpu_state(ctx
, 1);
7983 check_cp1_enabled(ctx
);
7984 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
7986 generate_exception_err(ctx
, EXCP_CpU
, 1);
7991 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
7992 save_cpu_state(ctx
, 1);
7993 check_cp1_enabled(ctx
);
7994 op1
= MASK_CP1(ctx
->opcode
);
7998 check_insn(env
, ctx
, ISA_MIPS32R2
);
8003 gen_cp1(ctx
, op1
, rt
, rd
);
8005 #if defined(TARGET_MIPS64)
8008 check_insn(env
, ctx
, ISA_MIPS3
);
8009 gen_cp1(ctx
, op1
, rt
, rd
);
8015 check_insn(env
, ctx
, ASE_MIPS3D
);
8018 gen_compute_branch1(env
, ctx
, MASK_BC1(ctx
->opcode
),
8019 (rt
>> 2) & 0x7, imm
<< 2);
8026 gen_farith(ctx
, MASK_CP1_FUNC(ctx
->opcode
), rt
, rd
, sa
,
8031 generate_exception (ctx
, EXCP_RI
);
8035 generate_exception_err(ctx
, EXCP_CpU
, 1);
8045 /* COP2: Not implemented. */
8046 generate_exception_err(ctx
, EXCP_CpU
, 2);
8050 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
8051 save_cpu_state(ctx
, 1);
8052 check_cp1_enabled(ctx
);
8053 op1
= MASK_CP3(ctx
->opcode
);
8061 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
8079 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
8083 generate_exception (ctx
, EXCP_RI
);
8087 generate_exception_err(ctx
, EXCP_CpU
, 1);
8091 #if defined(TARGET_MIPS64)
8092 /* MIPS64 opcodes */
8094 case OPC_LDL
... OPC_LDR
:
8095 case OPC_SDL
... OPC_SDR
:
8100 check_insn(env
, ctx
, ISA_MIPS3
);
8102 gen_ldst(ctx
, op
, rt
, rs
, imm
);
8104 case OPC_DADDI
... OPC_DADDIU
:
8105 check_insn(env
, ctx
, ISA_MIPS3
);
8107 gen_arith_imm(env
, ctx
, op
, rt
, rs
, imm
);
8111 check_insn(env
, ctx
, ASE_MIPS16
);
8112 /* MIPS16: Not implemented. */
8114 check_insn(env
, ctx
, ASE_MDMX
);
8115 /* MDMX: Not implemented. */
8116 default: /* Invalid */
8117 MIPS_INVAL("major opcode");
8118 generate_exception(ctx
, EXCP_RI
);
8121 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8122 int hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
8123 /* Branches completion */
8124 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
8125 ctx
->bstate
= BS_BRANCH
;
8126 save_cpu_state(ctx
, 0);
8127 /* FIXME: Need to clear can_do_io. */
8130 /* unconditional branch */
8131 MIPS_DEBUG("unconditional branch");
8132 gen_goto_tb(ctx
, 0, ctx
->btarget
);
8135 /* blikely taken case */
8136 MIPS_DEBUG("blikely branch taken");
8137 gen_goto_tb(ctx
, 0, ctx
->btarget
);
8140 /* Conditional branch */
8141 MIPS_DEBUG("conditional branch");
8143 int l1
= gen_new_label();
8145 tcg_gen_brcondi_i32(TCG_COND_NE
, bcond
, 0, l1
);
8146 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
8148 gen_goto_tb(ctx
, 0, ctx
->btarget
);
8152 /* unconditional branch to register */
8153 MIPS_DEBUG("branch to register");
8154 tcg_gen_mov_tl(cpu_PC
, btarget
);
8158 MIPS_DEBUG("unknown branch");
8165 gen_intermediate_code_internal (CPUState
*env
, TranslationBlock
*tb
,
8169 target_ulong pc_start
;
8170 uint16_t *gen_opc_end
;
8177 qemu_log("search pc %d\n", search_pc
);
8180 /* Leave some spare opc slots for branch handling. */
8181 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
- 16;
8185 ctx
.bstate
= BS_NONE
;
8186 /* Restore delay slot state from the tb context. */
8187 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
8188 restore_cpu_state(env
, &ctx
);
8189 #ifdef CONFIG_USER_ONLY
8190 ctx
.mem_idx
= MIPS_HFLAG_UM
;
8192 ctx
.mem_idx
= ctx
.hflags
& MIPS_HFLAG_KSU
;
8195 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
8197 max_insns
= CF_COUNT_MASK
;
8199 qemu_log_mask(CPU_LOG_TB_CPU
, "------------------------------------------------\n");
8200 /* FIXME: This may print out stale hflags from env... */
8201 log_cpu_state_mask(CPU_LOG_TB_CPU
, env
, 0);
8203 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
8205 while (ctx
.bstate
== BS_NONE
) {
8206 if (unlikely(!TAILQ_EMPTY(&env
->breakpoints
))) {
8207 TAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
8208 if (bp
->pc
== ctx
.pc
) {
8209 save_cpu_state(&ctx
, 1);
8210 ctx
.bstate
= BS_BRANCH
;
8211 gen_helper_0i(raise_exception
, EXCP_DEBUG
);
8212 /* Include the breakpoint location or the tb won't
8213 * be flushed when it must be. */
8215 goto done_generating
;
8221 j
= gen_opc_ptr
- gen_opc_buf
;
8225 gen_opc_instr_start
[lj
++] = 0;
8227 gen_opc_pc
[lj
] = ctx
.pc
;
8228 gen_opc_hflags
[lj
] = ctx
.hflags
& MIPS_HFLAG_BMASK
;
8229 gen_opc_instr_start
[lj
] = 1;
8230 gen_opc_icount
[lj
] = num_insns
;
8232 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
8234 ctx
.opcode
= ldl_code(ctx
.pc
);
8235 decode_opc(env
, &ctx
);
8239 if (env
->singlestep_enabled
)
8242 if ((ctx
.pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
8245 if (gen_opc_ptr
>= gen_opc_end
)
8248 if (num_insns
>= max_insns
)
8250 #if defined (MIPS_SINGLE_STEP)
8254 if (tb
->cflags
& CF_LAST_IO
)
8256 if (env
->singlestep_enabled
) {
8257 save_cpu_state(&ctx
, ctx
.bstate
== BS_NONE
);
8258 gen_helper_0i(raise_exception
, EXCP_DEBUG
);
8260 switch (ctx
.bstate
) {
8262 gen_helper_interrupt_restart();
8263 gen_goto_tb(&ctx
, 0, ctx
.pc
);
8266 save_cpu_state(&ctx
, 0);
8267 gen_goto_tb(&ctx
, 0, ctx
.pc
);
8270 gen_helper_interrupt_restart();
8279 gen_icount_end(tb
, num_insns
);
8280 *gen_opc_ptr
= INDEX_op_end
;
8282 j
= gen_opc_ptr
- gen_opc_buf
;
8285 gen_opc_instr_start
[lj
++] = 0;
8287 tb
->size
= ctx
.pc
- pc_start
;
8288 tb
->icount
= num_insns
;
8292 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
8293 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
8294 log_target_disas(pc_start
, ctx
.pc
- pc_start
, 0);
8297 qemu_log_mask(CPU_LOG_TB_CPU
, "---------------- %d %08x\n", ctx
.bstate
, ctx
.hflags
);
8301 void gen_intermediate_code (CPUState
*env
, struct TranslationBlock
*tb
)
8303 gen_intermediate_code_internal(env
, tb
, 0);
8306 void gen_intermediate_code_pc (CPUState
*env
, struct TranslationBlock
*tb
)
8308 gen_intermediate_code_internal(env
, tb
, 1);
8311 static void fpu_dump_state(CPUState
*env
, FILE *f
,
8312 int (*fpu_fprintf
)(FILE *f
, const char *fmt
, ...),
8316 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
8318 #define printfpr(fp) \
8321 fpu_fprintf(f, "w:%08x d:%016lx fd:%13g fs:%13g psu: %13g\n", \
8322 (fp)->w[FP_ENDIAN_IDX], (fp)->d, (fp)->fd, \
8323 (fp)->fs[FP_ENDIAN_IDX], (fp)->fs[!FP_ENDIAN_IDX]); \
8326 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
8327 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
8328 fpu_fprintf(f, "w:%08x d:%016lx fd:%13g fs:%13g psu:%13g\n", \
8329 tmp.w[FP_ENDIAN_IDX], tmp.d, tmp.fd, \
8330 tmp.fs[FP_ENDIAN_IDX], tmp.fs[!FP_ENDIAN_IDX]); \
8335 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%08x(0x%02x)\n",
8336 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
, env
->active_fpu
.fp_status
,
8337 get_float_exception_flags(&env
->active_fpu
.fp_status
));
8338 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
8339 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
8340 printfpr(&env
->active_fpu
.fpr
[i
]);
8346 #if defined(TARGET_MIPS64) && defined(MIPS_DEBUG_SIGN_EXTENSIONS)
8347 /* Debug help: The architecture requires 32bit code to maintain proper
8348 sign-extended values on 64bit machines. */
8350 #define SIGN_EXT_P(val) ((((val) & ~0x7fffffff) == 0) || (((val) & ~0x7fffffff) == ~0x7fffffff))
8353 cpu_mips_check_sign_extensions (CPUState
*env
, FILE *f
,
8354 int (*cpu_fprintf
)(FILE *f
, const char *fmt
, ...),
8359 if (!SIGN_EXT_P(env
->active_tc
.PC
))
8360 cpu_fprintf(f
, "BROKEN: pc=0x" TARGET_FMT_lx
"\n", env
->active_tc
.PC
);
8361 if (!SIGN_EXT_P(env
->active_tc
.HI
[0]))
8362 cpu_fprintf(f
, "BROKEN: HI=0x" TARGET_FMT_lx
"\n", env
->active_tc
.HI
[0]);
8363 if (!SIGN_EXT_P(env
->active_tc
.LO
[0]))
8364 cpu_fprintf(f
, "BROKEN: LO=0x" TARGET_FMT_lx
"\n", env
->active_tc
.LO
[0]);
8365 if (!SIGN_EXT_P(env
->btarget
))
8366 cpu_fprintf(f
, "BROKEN: btarget=0x" TARGET_FMT_lx
"\n", env
->btarget
);
8368 for (i
= 0; i
< 32; i
++) {
8369 if (!SIGN_EXT_P(env
->active_tc
.gpr
[i
]))
8370 cpu_fprintf(f
, "BROKEN: %s=0x" TARGET_FMT_lx
"\n", regnames
[i
], env
->active_tc
.gpr
[i
]);
8373 if (!SIGN_EXT_P(env
->CP0_EPC
))
8374 cpu_fprintf(f
, "BROKEN: EPC=0x" TARGET_FMT_lx
"\n", env
->CP0_EPC
);
8375 if (!SIGN_EXT_P(env
->CP0_LLAddr
))
8376 cpu_fprintf(f
, "BROKEN: LLAddr=0x" TARGET_FMT_lx
"\n", env
->CP0_LLAddr
);
8380 void cpu_dump_state (CPUState
*env
, FILE *f
,
8381 int (*cpu_fprintf
)(FILE *f
, const char *fmt
, ...),
8386 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
" LO=0x" TARGET_FMT_lx
" ds %04x " TARGET_FMT_lx
" %d\n",
8387 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
8388 env
->hflags
, env
->btarget
, env
->bcond
);
8389 for (i
= 0; i
< 32; i
++) {
8391 cpu_fprintf(f
, "GPR%02d:", i
);
8392 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
8394 cpu_fprintf(f
, "\n");
8397 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
8398 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
8399 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x" TARGET_FMT_lx
"\n",
8400 env
->CP0_Config0
, env
->CP0_Config1
, env
->CP0_LLAddr
);
8401 if (env
->hflags
& MIPS_HFLAG_FPU
)
8402 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
8403 #if defined(TARGET_MIPS64) && defined(MIPS_DEBUG_SIGN_EXTENSIONS)
8404 cpu_mips_check_sign_extensions(env
, f
, cpu_fprintf
, flags
);
8408 static void mips_tcg_init(void)
8413 /* Initialize various static tables. */
8417 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
8418 for (i
= 0; i
< 32; i
++)
8419 cpu_gpr
[i
] = tcg_global_mem_new(TCG_AREG0
,
8420 offsetof(CPUState
, active_tc
.gpr
[i
]),
8422 cpu_PC
= tcg_global_mem_new(TCG_AREG0
,
8423 offsetof(CPUState
, active_tc
.PC
), "PC");
8424 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
8425 cpu_HI
[i
] = tcg_global_mem_new(TCG_AREG0
,
8426 offsetof(CPUState
, active_tc
.HI
[i
]),
8428 cpu_LO
[i
] = tcg_global_mem_new(TCG_AREG0
,
8429 offsetof(CPUState
, active_tc
.LO
[i
]),
8431 cpu_ACX
[i
] = tcg_global_mem_new(TCG_AREG0
,
8432 offsetof(CPUState
, active_tc
.ACX
[i
]),
8435 cpu_dspctrl
= tcg_global_mem_new(TCG_AREG0
,
8436 offsetof(CPUState
, active_tc
.DSPControl
),
8438 bcond
= tcg_global_mem_new_i32(TCG_AREG0
,
8439 offsetof(CPUState
, bcond
), "bcond");
8440 btarget
= tcg_global_mem_new(TCG_AREG0
,
8441 offsetof(CPUState
, btarget
), "btarget");
8442 for (i
= 0; i
< 32; i
++)
8443 fpu_fpr32
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
8444 offsetof(CPUState
, active_fpu
.fpr
[i
].w
[FP_ENDIAN_IDX
]),
8446 for (i
= 0; i
< 32; i
++)
8447 fpu_fpr64
[i
] = tcg_global_mem_new_i64(TCG_AREG0
,
8448 offsetof(CPUState
, active_fpu
.fpr
[i
]),
8450 for (i
= 0; i
< 32; i
++)
8451 fpu_fpr32h
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
8452 offsetof(CPUState
, active_fpu
.fpr
[i
].w
[!FP_ENDIAN_IDX
]),
8454 fpu_fcr0
= tcg_global_mem_new_i32(TCG_AREG0
,
8455 offsetof(CPUState
, active_fpu
.fcr0
),
8457 fpu_fcr31
= tcg_global_mem_new_i32(TCG_AREG0
,
8458 offsetof(CPUState
, active_fpu
.fcr31
),
8461 /* register helpers */
8462 #define GEN_HELPER 2
8468 #include "translate_init.c"
8470 CPUMIPSState
*cpu_mips_init (const char *cpu_model
)
8473 const mips_def_t
*def
;
8475 def
= cpu_mips_find_by_name(cpu_model
);
8478 env
= qemu_mallocz(sizeof(CPUMIPSState
));
8479 env
->cpu_model
= def
;
8482 env
->cpu_model_str
= cpu_model
;
8488 void cpu_reset (CPUMIPSState
*env
)
8490 if (qemu_loglevel_mask(CPU_LOG_RESET
)) {
8491 qemu_log("CPU Reset (CPU %d)\n", env
->cpu_index
);
8492 log_cpu_state(env
, 0);
8495 memset(env
, 0, offsetof(CPUMIPSState
, breakpoints
));
8500 #if defined(CONFIG_USER_ONLY)
8501 env
->hflags
= MIPS_HFLAG_UM
;
8503 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
8504 /* If the exception was raised from a delay slot,
8505 come back to the jump. */
8506 env
->CP0_ErrorEPC
= env
->active_tc
.PC
- 4;
8508 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
8510 env
->active_tc
.PC
= (int32_t)0xBFC00000;
8512 /* SMP not implemented */
8513 env
->CP0_EBase
= 0x80000000;
8514 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
8515 /* vectored interrupts not implemented, timer on int 7,
8516 no performance counters. */
8517 env
->CP0_IntCtl
= 0xe0000000;
8521 for (i
= 0; i
< 7; i
++) {
8522 env
->CP0_WatchLo
[i
] = 0;
8523 env
->CP0_WatchHi
[i
] = 0x80000000;
8525 env
->CP0_WatchLo
[7] = 0;
8526 env
->CP0_WatchHi
[7] = 0;
8528 /* Count register increments in debug mode, EJTAG version 1 */
8529 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
8530 env
->hflags
= MIPS_HFLAG_CP0
;
8532 env
->exception_index
= EXCP_NONE
;
8533 cpu_mips_register(env
, env
->cpu_model
);
8536 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
8537 unsigned long searched_pc
, int pc_pos
, void *puc
)
8539 env
->active_tc
.PC
= gen_opc_pc
[pc_pos
];
8540 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
8541 env
->hflags
|= gen_opc_hflags
[pc_pos
];