2 /*---------------------------------------------------------------*/
3 /*--- begin host_nanomips_defs.h ---*/
4 /*---------------------------------------------------------------*/
7 This file is part of Valgrind, a dynamic binary instrumentation
10 Copyright (C) 2017-2018 RT-RK
12 This program is free software; you can redistribute it and/or
13 modify it under the terms of the GNU General Public License as
14 published by the Free Software Foundation; either version 2 of the
15 License, or (at your option) any later version.
17 This program is distributed in the hope that it will be useful, but
18 WITHOUT ANY WARRANTY; without even the implied warranty of
19 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 General Public License for more details.
22 You should have received a copy of the GNU General Public License
23 along with this program; if not, write to the Free Software
24 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
27 The GNU General Public License is contained in the file COPYING.
30 #ifndef __VEX_HOST_NANOMIPS_DEFS_H
31 #define __VEX_HOST_NANOMIPS_DEFS_H
33 #include "libvex_basictypes.h"
34 #include "libvex.h" /* VexArch */
35 #include "host_generic_regs.h" /* HReg */
36 #include "common_nanomips_defs.h"
38 /* --------- Registers. --------- */
40 #define ST_IN static inline
42 #define GPR(_enc, _ix) \
43 mkHReg(False, HRcInt32, (_enc), (_ix))
45 ST_IN HReg
hregNANOMIPS_GPR16(void) {
48 ST_IN HReg
hregNANOMIPS_GPR17(void) {
51 ST_IN HReg
hregNANOMIPS_GPR18(void) {
54 ST_IN HReg
hregNANOMIPS_GPR19(void) {
57 ST_IN HReg
hregNANOMIPS_GPR20(void) {
60 ST_IN HReg
hregNANOMIPS_GPR21(void) {
63 ST_IN HReg
hregNANOMIPS_GPR22(void) {
67 ST_IN HReg
hregNANOMIPS_GPR12(void) {
70 ST_IN HReg
hregNANOMIPS_GPR13(void) {
73 ST_IN HReg
hregNANOMIPS_GPR14(void) {
76 ST_IN HReg
hregNANOMIPS_GPR15(void) {
79 ST_IN HReg
hregNANOMIPS_GPR24(void) {
83 ST_IN HReg
hregNANOMIPS_GPR0(void) {
86 ST_IN HReg
hregNANOMIPS_GPR1(void) {
89 ST_IN HReg
hregNANOMIPS_GPR2(void) {
92 ST_IN HReg
hregNANOMIPS_GPR3(void) {
95 ST_IN HReg
hregNANOMIPS_GPR4(void) {
98 ST_IN HReg
hregNANOMIPS_GPR5(void) {
101 ST_IN HReg
hregNANOMIPS_GPR6(void) {
104 ST_IN HReg
hregNANOMIPS_GPR7(void) {
107 ST_IN HReg
hregNANOMIPS_GPR8(void) {
110 ST_IN HReg
hregNANOMIPS_GPR9(void) {
113 ST_IN HReg
hregNANOMIPS_GPR10(void) {
116 ST_IN HReg
hregNANOMIPS_GPR11(void) {
119 ST_IN HReg
hregNANOMIPS_GPR23(void) {
122 ST_IN HReg
hregNANOMIPS_GPR25(void) {
125 ST_IN HReg
hregNANOMIPS_GPR29(void) {
128 ST_IN HReg
hregNANOMIPS_GPR31(void) {
135 #undef GuestStatePointer
136 #undef StackFramePointer
140 #define GuestStatePointer hregNANOMIPS_GPR23()
141 #define StackFramePointer hregNANOMIPS_GPR30()
142 #define StackPointer hregNANOMIPS_GPR29()
143 #define Zero hregNANOMIPS_GPR0()
145 /* Num registers used for function calls */
146 /* a0, a1, a2, a3, a4, a5, a6, a7 */
147 # define NANOMIPS_N_REGPARMS 8
150 NMin_Imm
, /* Operation with word and imm (fake insn). */
151 NMin_Unary
, /* Unary ops: clo, clz, neg and nop. */
152 NMin_Alu
, /* Binary ops: add/sub/and/or/xor/nor/mul/div. */
153 NMin_Cmp
, /* Word compare (fake insn). */
154 NMin_Call
, /* Call to address in register. */
156 /* The following 5 insns are mandated by translation chaining */
157 NMin_XDirect
, /* Direct transfer to GA. */
158 NMin_XIndir
, /* Indirect transfer to GA. */
159 NMin_XAssisted
, /* Assisted transfer to GA. */
160 NMin_EvCheck
, /* Event check. */
161 NMin_ProfInc
, /* 64-bit profile counter increment. */
163 NMin_Load
, /* Sign-extending load a 8|16|32 bit value from mem. */
164 NMin_Store
, /* Store a 8|16|32 bit value to mem. */
165 NMin_Cas
, /* Compare and swap. */
166 NMin_LoadL
, /* Mips Load Linked Word - LL. */
167 NMin_StoreC
, /* Mips Store Conditional Word - SC. */
168 NMin_MoveCond
, /* Move Conditional. */
172 NMimm_INVALID
= -1, /* Invalid / unknown op */
173 NMimm_SLL
= 0x00, /* Shift left */
174 NMimm_SRL
= 0x02, /* Logic shift right */
175 NMimm_LI
= 0x03, /* Load immediate */
176 NMimm_SRA
= 0x04, /* Arithetic shift right */
177 NMimm_SGN
= 0x05, /* Sign extend from imm bits */
178 NMimm_ORI
= 0x06, /* Logical or */
179 NMimm_XORI
= 0x07, /* Logical xor */
180 NMimm_ANDI
= 0x08, /* Logical and */
181 NMimm_ROTX
= 0x09, /* Rotx */
192 NMalu_SLL
= NMimm_SLL
,
193 NMalu_SRL
= NMimm_SRL
,
194 NMalu_SRA
= NMimm_SRA
,
195 NMalu_OR
= NMimm_ORI
,
196 NMalu_XOR
= NMimm_XORI
,
197 NMalu_AND
= NMimm_ANDI
,
213 NMcc_INVALID
, /* Invalid or unknown condition */
215 NMcc_NE
, /* not equal */
217 NMcc_LTS
, /* signed less than */
218 NMcc_LTU
, /* unsigned less than */
220 NMcc_LES
, /* signed less than or equal */
221 NMcc_LEU
, /* unsigned less than or equal */
223 NMcc_AL
, /* always (unconditional) */
224 NMcc_NV
, /* never (unconditional) */
228 NMMoveCond_movn
/* Move Conditional on Not Zero */
229 } NANOMIPSMoveCondOp
;
232 NANOMIPSInstrTag tag
;
246 /* Clz, Clo, not, nop */
252 /* Word compare. Fake instruction, used for basic block ending. */
257 NANOMIPSCondCode cond
;
259 /* Pseudo-insn. Call target (an absolute address), on given
265 RetLoc rloc
; /* Where the return value will be */
267 /* Update the guest EIP value, then exit requesting to chain
268 to it. May be conditional. */
270 Addr dstGA
; /* next guest address */
271 HReg addr
; /* Address register */
272 Int addr_offset
; /* Offset */
273 HReg cond
; /* Condition */
274 Bool toFastEP
; /* Chain to the slow or fast point? */
276 /* Boring transfer to a guest address not known at JIT time.
277 Not chainable. May be conditional. */
280 HReg addr
; /* Address register */
281 Int addr_offset
; /* Offset */
282 HReg cond
; /* Condition */
284 /* Assisted transfer to a guest address, most general case.
285 Not chainable. May be conditional. */
288 HReg addr
; /* Address register */
289 Int addr_offset
; /* Offset */
290 HReg cond
; /* Condition */
291 IRJumpKind jk
; /* Jump kind */
295 Int offset_amCounter
;
297 Int offset_amFailAddr
;
300 /* No fields. The address of the counter to inc is
301 installed later, post-translation, by patching it in,
302 as it is not known at translation time. */
304 /* Sign extending loads. Dst size is host word size */
306 UChar sz
; /* Must be 4 bytes for now. */
307 HReg dst
; /* Destionation register */
308 HReg addr
; /* Address register */
309 Int addr_offset
; /* Offset */
312 UChar sz
; /* Must be 4 bytes for now. */
313 HReg addr
; /* Address register */
314 Int addr_offset
; /* Offset */
315 HReg src
; /* Source register */
318 UChar sz
; /* Must be 4 bytes for now. */
328 UChar sz
; /* Must be 4 bytes for now. */
329 HReg dst
; /* Destination register */
330 HReg addr
; /* Address register */
331 Int addr_offset
; /* Offset */
334 UChar sz
; /* Must be 4 bytes for now. */
335 HReg addr
; /* Address register */
336 Int addr_offset
; /* Offset */
337 HReg src
; /* Sorce register */
339 /* Conditional move. */
341 NANOMIPSMoveCondOp op
;
349 extern NANOMIPSInstr
*NANOMIPSInstr_Imm(NANOMIPSImmOp
, HReg
, HReg
, UInt
);
350 extern NANOMIPSInstr
*NANOMIPSInstr_Unary(NANOMIPSUnaryOp op
, HReg dst
,
352 extern NANOMIPSInstr
*NANOMIPSInstr_Alu(NANOMIPSAluOp
, HReg
, HReg
, HReg
);
353 extern NANOMIPSInstr
*NANOMIPSInstr_Cmp(NANOMIPSCondCode
, HReg
, HReg
, HReg
);
354 extern NANOMIPSInstr
*NANOMIPSInstr_Call(Addr
, UInt
, HReg
, RetLoc
);
355 extern NANOMIPSInstr
*NANOMIPSInstr_XDirect(Addr64 dstGA
, HReg
, Int
,
356 HReg cond
, Bool toFastEP
);
357 extern NANOMIPSInstr
*NANOMIPSInstr_XIndir(HReg dstGA
, HReg
, Int
,
359 extern NANOMIPSInstr
*NANOMIPSInstr_XAssisted(HReg dstGA
, HReg
, Int
,
360 HReg cond
, IRJumpKind jk
);
361 extern NANOMIPSInstr
*NANOMIPSInstr_EvCheck(HReg
, Int
, HReg
, Int
);
362 extern NANOMIPSInstr
*NANOMIPSInstr_ProfInc(void);
363 extern NANOMIPSInstr
*NANOMIPSInstr_Load(UChar sz
, HReg dst
, HReg src
,
365 extern NANOMIPSInstr
*NANOMIPSInstr_Store(UChar sz
, HReg dst
, Int addr_offset
,
367 extern NANOMIPSInstr
*NANOMIPSInstr_Cas(UChar sz
, HReg oldLo
, HReg oldHi
, HReg addr
,
368 HReg expdLo
, HReg expdHi
,
369 HReg dataLo
, HReg dataHi
);
370 extern NANOMIPSInstr
*NANOMIPSInstr_LoadL(UChar sz
, HReg dst
, HReg src
,
372 extern NANOMIPSInstr
*NANOMIPSInstr_StoreC(UChar sz
, HReg dst
, Int addr_offset
,
374 extern NANOMIPSInstr
*NANOMIPSInstr_MoveCond(NANOMIPSMoveCondOp op
, HReg dst
,
375 HReg src
, HReg cond
);
376 extern void ppNANOMIPSInstr(const NANOMIPSInstr
*);
377 extern UInt
ppHRegNANOMIPS(HReg
);
378 extern void getRegUsage_NANOMIPSInstr (HRegUsage
*, const NANOMIPSInstr
*);
379 extern void mapRegs_NANOMIPSInstr (HRegRemap
*, NANOMIPSInstr
*);
380 extern void genSpill_NANOMIPS ( /*OUT*/ HInstr
** i1
, /*OUT*/ HInstr
** i2
,
381 HReg rreg
, Int offset
, Bool mode64
);
382 extern void genReload_NANOMIPS( /*OUT*/ HInstr
** i1
, /*OUT*/ HInstr
** i2
,
383 HReg rreg
, Int offset
, Bool mode64
);
384 extern NANOMIPSInstr
* genMove_NANOMIPS(HReg from
, HReg to
);
385 extern HInstrArray
*iselSB_NANOMIPS(const IRSB
*,
389 Int offs_Host_EvC_Counter
,
390 Int offs_Host_EvC_FailAddr
,
391 Bool chainingAllowed
,
394 extern Int
emit_NANOMIPSInstr (/*MB_MOD*/Bool
* is_profInc
,
395 UChar
* buf
, Int nbuf
,
396 const NANOMIPSInstr
* i
,
398 VexEndness endness_host
,
399 const void* disp_cp_chain_me_to_slowEP
,
400 const void* disp_cp_chain_me_to_fastEP
,
401 const void* disp_cp_xindir
,
402 const void* disp_cp_xassisted
);
403 /* How big is an event check? This is kind of a kludge because it
404 depends on the offsets of host_EvC_FAILADDR and host_EvC_COUNTER,
405 and so assumes that they are both <= 128, and so can use the short
406 offset encoding. This is all checked with assertions, so in the
407 worst case we will merely assert at startup. */
408 extern Int
evCheckSzB_NANOMIPS (void);
409 /* Perform a chaining and unchaining of an XDirect jump. */
410 extern VexInvalRange
chainXDirect_NANOMIPS (VexEndness endness_host
,
411 void* place_to_chain
,
412 const void* disp_cp_chain_me_EXPECTED
,
413 const void* place_to_jump_to
);
414 extern VexInvalRange
unchainXDirect_NANOMIPS(VexEndness endness_host
,
415 void* place_to_unchain
,
416 const void* place_to_jump_to_EXPECTED
,
417 const void* disp_cp_chain_me
);
418 /* Patch the counter location into an existing ProfInc point. */
419 extern VexInvalRange
patchProfInc_NANOMIPS (VexEndness endness_host
,
420 void* place_to_patch
,
421 const ULong
* location_of_counter
);
422 extern const RRegUniverse
* getRRegUniverse_NANOMIPS (Bool mode64
);
424 #endif /* ndef __VEX_HOST_NANOMIPS_DEFS_H */
426 /*---------------------------------------------------------------*/
427 /*--- end host-nanomips_defs.h ---*/
428 /*---------------------------------------------------------------*/