2 /*---------------------------------------------------------------*/
3 /*--- begin guest_arm64_defs.h ---*/
4 /*---------------------------------------------------------------*/
6 This file is part of Valgrind, a dynamic binary instrumentation
9 Copyright (C) 2013-2017 OpenWorks
12 This program is free software; you can redistribute it and/or
13 modify it under the terms of the GNU General Public License as
14 published by the Free Software Foundation; either version 2 of the
15 License, or (at your option) any later version.
17 This program is distributed in the hope that it will be useful, but
18 WITHOUT ANY WARRANTY; without even the implied warranty of
19 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 General Public License for more details.
22 You should have received a copy of the GNU General Public License
23 along with this program; if not, see <http://www.gnu.org/licenses/>.
25 The GNU General Public License is contained in the file COPYING.
28 #ifndef __VEX_GUEST_ARM64_DEFS_H
29 #define __VEX_GUEST_ARM64_DEFS_H
31 #include "libvex_basictypes.h"
32 #include "guest_generic_bb_to_IR.h" // DisResult
34 /*---------------------------------------------------------*/
35 /*--- arm64 to IR conversion ---*/
36 /*---------------------------------------------------------*/
38 /* Convert one ARM64 insn to IR. See the type DisOneInstrFn in
39 guest_generic_bb_to_IR.h. */
41 DisResult
disInstr_ARM64 ( IRSB
* irbb
,
42 const UChar
* guest_code
,
46 const VexArchInfo
* archinfo
,
47 const VexAbiInfo
* abiinfo
,
48 VexEndness host_endness
,
51 /* Used by the optimiser to specialise calls to helpers. */
53 IRExpr
* guest_arm64_spechelper ( const HChar
* function_name
,
55 IRStmt
** precedingStmts
,
56 Int n_precedingStmts
);
58 /* Describes to the optimser which part of the guest state require
59 precise memory exceptions. This is logically part of the guest
62 Bool
guest_arm64_state_requires_precise_mem_exns ( Int
, Int
,
66 VexGuestLayout arm64Guest_layout
;
69 /*---------------------------------------------------------*/
70 /*--- arm64 guest helpers ---*/
71 /*---------------------------------------------------------*/
73 /* --- CLEAN HELPERS --- */
75 /* Calculate NZCV from the supplied thunk components, in the positions
76 they appear in the CPSR, viz bits 31:28 for N Z C V respectively.
77 Returned bits 63:32 and 27:0 are zero. */
79 ULong
arm64g_calculate_flags_nzcv ( ULong cc_op
, ULong cc_dep1
,
80 ULong cc_dep2
, ULong cc_dep3
);
82 /* Calculate the C flag from the thunk components, in the lowest bit
83 of the word (bit 0). */
85 ULong
arm64g_calculate_flag_c ( ULong cc_op
, ULong cc_dep1
,
86 ULong cc_dep2
, ULong cc_dep3
);
88 //ZZ /* Calculate the V flag from the thunk components, in the lowest bit
89 //ZZ of the word (bit 0). */
91 //ZZ UInt armg_calculate_flag_v ( UInt cc_op, UInt cc_dep1,
92 //ZZ UInt cc_dep2, UInt cc_dep3 );
94 /* Calculate the specified condition from the thunk components, in the
95 lowest bit of the word (bit 0). */
97 ULong
arm64g_calculate_condition ( /* ARM64Condcode << 4 | cc_op */
100 ULong cc_dep2
, ULong cc_dep3
);
102 //ZZ /* Calculate the QC flag from the thunk components, in the lowest bit
103 //ZZ of the word (bit 0). */
105 //ZZ UInt armg_calculate_flag_qc ( UInt resL1, UInt resL2,
106 //ZZ UInt resR1, UInt resR2 );
108 extern ULong
arm64g_calc_crc32b ( ULong acc
, ULong bits
);
109 extern ULong
arm64g_calc_crc32h ( ULong acc
, ULong bits
);
110 extern ULong
arm64g_calc_crc32w ( ULong acc
, ULong bits
);
111 extern ULong
arm64g_calc_crc32x ( ULong acc
, ULong bits
);
113 extern ULong
arm64g_calc_crc32cb ( ULong acc
, ULong bits
);
114 extern ULong
arm64g_calc_crc32ch ( ULong acc
, ULong bits
);
115 extern ULong
arm64g_calc_crc32cw ( ULong acc
, ULong bits
);
116 extern ULong
arm64g_calc_crc32cx ( ULong acc
, ULong bits
);
118 /* --- DIRTY HELPERS --- */
119 extern ULong
arm64g_dirtyhelper_MRS_DCZID_EL0 ( void );
121 extern ULong
arm64g_dirtyhelper_MRS_CNTVCT_EL0 ( void );
123 extern ULong
arm64g_dirtyhelper_MRS_CNTFRQ_EL0 ( void );
125 extern ULong
arm64g_dirtyhelper_MRS_MIDR_EL1 ( void );
127 extern ULong
arm64g_dirtyhelper_MRS_ID_AA64PFR0_EL1 ( void );
129 extern ULong
arm64g_dirtyhelper_MRS_ID_AA64MMFR0_EL1 ( void );
130 extern ULong
arm64g_dirtyhelper_MRS_ID_AA64MMFR1_EL1 ( void );
132 extern ULong
arm64g_dirtyhelper_MRS_ID_AA64ISAR0_EL1 ( void );
133 extern ULong
arm64g_dirtyhelper_MRS_ID_AA64ISAR1_EL1 ( void );
135 extern void arm64g_dirtyhelper_PMULLQ ( /*OUT*/V128
* res
,
136 ULong arg1
, ULong arg2
);
138 extern void arm64g_dirtyhelper_AESE ( /*OUT*/V128
* res
,
139 ULong argHi
, ULong argLo
);
140 extern void arm64g_dirtyhelper_AESD ( /*OUT*/V128
* res
,
141 ULong argHi
, ULong argLo
);
142 extern void arm64g_dirtyhelper_AESMC ( /*OUT*/V128
* res
,
143 ULong argHi
, ULong argLo
);
144 extern void arm64g_dirtyhelper_AESIMC ( /*OUT*/V128
* res
,
145 ULong argHi
, ULong argLo
);
148 void arm64g_dirtyhelper_SHA1C ( /*OUT*/V128
* res
, ULong dHi
, ULong dLo
,
149 ULong nHi
, ULong nLo
, ULong mHi
, ULong mLo
);
151 void arm64g_dirtyhelper_SHA1H ( /*OUT*/V128
* res
,
152 ULong nHi
, ULong nLo
);
154 void arm64g_dirtyhelper_SHA1M ( /*OUT*/V128
* res
, ULong dHi
, ULong dLo
,
155 ULong nHi
, ULong nLo
, ULong mHi
, ULong mLo
);
157 void arm64g_dirtyhelper_SHA1P ( /*OUT*/V128
* res
, ULong dHi
, ULong dLo
,
158 ULong nHi
, ULong nLo
, ULong mHi
, ULong mLo
);
160 void arm64g_dirtyhelper_SHA1SU0 ( /*OUT*/V128
* res
, ULong dHi
, ULong dLo
,
161 ULong nHi
, ULong nLo
, ULong mHi
, ULong mLo
);
163 void arm64g_dirtyhelper_SHA1SU1 ( /*OUT*/V128
* res
, ULong dHi
, ULong dLo
,
164 ULong nHi
, ULong nLo
);
166 void arm64g_dirtyhelper_SHA256H2 ( /*OUT*/V128
* res
, ULong dHi
, ULong dLo
,
167 ULong nHi
, ULong nLo
, ULong mHi
, ULong mLo
);
169 void arm64g_dirtyhelper_SHA256H ( /*OUT*/V128
* res
, ULong dHi
, ULong dLo
,
170 ULong nHi
, ULong nLo
, ULong mHi
, ULong mLo
);
172 void arm64g_dirtyhelper_SHA256SU0 ( /*OUT*/V128
* res
, ULong dHi
, ULong dLo
,
173 ULong nHi
, ULong nLo
);
175 void arm64g_dirtyhelper_SHA256SU1 ( /*OUT*/V128
* res
, ULong dHi
, ULong dLo
,
176 ULong nHi
, ULong nLo
,
177 ULong mHi
, ULong mLo
);
179 void arm64g_dirtyhelper_SHA512H2 ( /*OUT*/V128
* res
, ULong dHi
, ULong dLo
,
180 ULong nHi
, ULong nLo
, ULong mHi
, ULong mLo
);
182 void arm64g_dirtyhelper_SHA512H ( /*OUT*/V128
* res
, ULong dHi
, ULong dLo
,
183 ULong nHi
, ULong nLo
, ULong mHi
, ULong mLo
);
185 void arm64g_dirtyhelper_SHA512SU0 ( /*OUT*/V128
* res
, ULong dHi
, ULong dLo
,
186 ULong nHi
, ULong nLo
);
188 void arm64g_dirtyhelper_SHA512SU1 ( /*OUT*/V128
* res
, ULong dHi
, ULong dLo
,
189 ULong nHi
, ULong nLo
,
190 ULong mHi
, ULong mLo
);
193 /*---------------------------------------------------------*/
194 /*--- Condition code stuff ---*/
195 /*---------------------------------------------------------*/
197 /* Flag masks. Defines positions of flag bits in the NZCV
199 #define ARM64G_CC_SHIFT_N 31
200 #define ARM64G_CC_SHIFT_Z 30
201 #define ARM64G_CC_SHIFT_C 29
202 #define ARM64G_CC_SHIFT_V 28
203 //ZZ #define ARMG_CC_SHIFT_Q 27
205 //ZZ #define ARMG_CC_MASK_N (1 << ARMG_CC_SHIFT_N)
206 //ZZ #define ARMG_CC_MASK_Z (1 << ARMG_CC_SHIFT_Z)
207 #define ARM64G_CC_MASK_C (1 << ARM64G_CC_SHIFT_C)
208 //ZZ #define ARMG_CC_MASK_V (1 << ARMG_CC_SHIFT_V)
209 //ZZ #define ARMG_CC_MASK_Q (1 << ARMG_CC_SHIFT_Q)
211 /* Flag thunk descriptors. A four-word thunk is used to record
212 details of the most recent flag-setting operation, so NZCV can
213 be computed later if needed.
217 CC_OP, which describes the operation.
219 CC_DEP1, CC_DEP2, CC_NDEP. These are arguments to the
220 operation. We want set up the mcx_masks in flag helper calls
221 involving these fields so that Memcheck "believes" that the
222 resulting flags are data-dependent on both CC_DEP1 and
223 CC_DEP2. Hence the name DEP.
225 When building the thunk, it is always necessary to write words into
226 CC_DEP1/2 and NDEP, even if those args are not used given the CC_OP
227 field. This is important because otherwise Memcheck could give
228 false positives as it does not understand the relationship between
229 the CC_OP field and CC_DEP1/2/NDEP, and so believes that the
230 definedness of the stored flags always depends on all 3 DEP values.
232 A summary of the field usages is:
235 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
237 OP_COPY curr_NZCV:28x0 unused unused
238 OP_ADD32 argL argR unused
239 OP_ADD64 argL argR unused
240 OP_SUB32 argL argR unused
241 OP_SUB64 argL argR unused
242 OP_ADC32 argL argR 63x0:old_C
243 OP_ADC64 argL argR 63x0:old_C
244 OP_SBC32 argL argR 63x0:old_C
245 OP_SBC64 argL argR 63x0:old_C
246 OP_LOGIC32 result unused unused
247 OP_LOGIC64 result unused unused
248 //ZZ OP_MUL result unused 30x0:old_C:old_V
249 //ZZ OP_MULL resLO32 resHI32 30x0:old_C:old_V
253 ARM64G_CC_OP_COPY
=0, /* DEP1 = NZCV in 31:28, DEP2 = 0, DEP3 = 0
254 just copy DEP1 to output */
256 ARM64G_CC_OP_ADD32
, /* DEP1 = argL (Rn), DEP2 = argR (shifter_op),
259 ARM64G_CC_OP_ADD64
, /* DEP1 = argL (Rn), DEP2 = argR (shifter_op),
262 ARM64G_CC_OP_SUB32
, /* DEP1 = argL (Rn), DEP2 = argR (shifter_op),
265 ARM64G_CC_OP_SUB64
, /* DEP1 = argL (Rn), DEP2 = argR (shifter_op),
268 ARM64G_CC_OP_ADC32
, /* DEP1 = argL (Rn), DEP2 = arg2 (shifter_op),
269 DEP3 = oldC (in LSB) */
271 ARM64G_CC_OP_ADC64
, /* DEP1 = argL (Rn), DEP2 = arg2 (shifter_op),
272 DEP3 = oldC (in LSB) */
274 ARM64G_CC_OP_SBC32
, /* DEP1 = argL (Rn), DEP2 = arg2 (shifter_op),
275 DEP3 = oldC (in LSB) */
277 ARM64G_CC_OP_SBC64
, /* DEP1 = argL (Rn), DEP2 = arg2 (shifter_op),
278 DEP3 = oldC (in LSB) */
280 ARM64G_CC_OP_LOGIC32
, /* DEP1 = result, DEP2 = 0, DEP3 = 0 */
281 ARM64G_CC_OP_LOGIC64
, /* DEP1 = result, DEP2 = 0, DEP3 = 0 */
283 //ZZ ARMG_CC_OP_MUL, /* DEP1 = result, DEP2 = 0, DEP3 = oldC:old_V
284 //ZZ (in bits 1:0) */
286 //ZZ ARMG_CC_OP_MULL, /* DEP1 = resLO32, DEP2 = resHI32, DEP3 = oldC:old_V
287 //ZZ (in bits 1:0) */
292 /* XXXX because of the calling conventions for
293 arm64g_calculate_condition, all these OP values MUST be in the range
294 0 .. 15 only (viz, 4-bits). */
298 /* Defines conditions which we can ask for */
302 ARM64CondEQ
= 0, /* equal : Z=1 */
303 ARM64CondNE
= 1, /* not equal : Z=0 */
305 ARM64CondCS
= 2, /* >=u (higher or same) (aka HS) : C=1 */
306 ARM64CondCC
= 3, /* <u (lower) (aka LO) : C=0 */
308 ARM64CondMI
= 4, /* minus (negative) : N=1 */
309 ARM64CondPL
= 5, /* plus (zero or +ve) : N=0 */
311 ARM64CondVS
= 6, /* overflow : V=1 */
312 ARM64CondVC
= 7, /* no overflow : V=0 */
314 ARM64CondHI
= 8, /* >u (higher) : C=1 && Z=0 */
315 ARM64CondLS
= 9, /* <=u (lower or same) : C=0 || Z=1 */
317 ARM64CondGE
= 10, /* >=s (signed greater or equal) : N=V */
318 ARM64CondLT
= 11, /* <s (signed less than) : N!=V */
320 ARM64CondGT
= 12, /* >s (signed greater) : Z=0 && N=V */
321 ARM64CondLE
= 13, /* <=s (signed less or equal) : Z=1 || N!=V */
323 ARM64CondAL
= 14, /* always (unconditional) : 1 */
324 ARM64CondNV
= 15 /* always (unconditional) : 1 */
328 /* Vector element size specifiers */
332 ARM64VSizeH
= 0, /* 16 bits (integer halfword or half-precision FP) */
333 ARM64VSizeS
= 1, /* 32 bits (integer shortword or single-precision FP) */
334 ARM64VSizeD
= 2 /* 64 bits (integer word or double-precision FP) */
339 #endif /* ndef __VEX_GUEST_ARM64_DEFS_H */
341 /*---------------------------------------------------------------*/
342 /*--- end guest_arm64_defs.h ---*/
343 /*---------------------------------------------------------------*/