2 /*---------------------------------------------------------------*/
3 /*--- begin host_arm64_defs.h ---*/
4 /*---------------------------------------------------------------*/
7 This file is part of Valgrind, a dynamic binary instrumentation
10 Copyright (C) 2013-2017 OpenWorks
13 This program is free software; you can redistribute it and/or
14 modify it under the terms of the GNU General Public License as
15 published by the Free Software Foundation; either version 2 of the
16 License, or (at your option) any later version.
18 This program is distributed in the hope that it will be useful, but
19 WITHOUT ANY WARRANTY; without even the implied warranty of
20 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
21 General Public License for more details.
23 You should have received a copy of the GNU General Public License
24 along with this program; if not, see <http://www.gnu.org/licenses/>.
26 The GNU General Public License is contained in the file COPYING.
29 #ifndef __VEX_HOST_ARM64_DEFS_H
30 #define __VEX_HOST_ARM64_DEFS_H
32 #include "libvex_basictypes.h"
33 #include "libvex.h" // VexArch
34 #include "host_generic_regs.h" // HReg
37 /* --------- Registers. --------- */
39 #define ST_IN static inline
40 ST_IN HReg
hregARM64_X22 ( void ) { return mkHReg(False
, HRcInt64
, 22, 0); }
41 ST_IN HReg
hregARM64_X23 ( void ) { return mkHReg(False
, HRcInt64
, 23, 1); }
42 ST_IN HReg
hregARM64_X24 ( void ) { return mkHReg(False
, HRcInt64
, 24, 2); }
43 ST_IN HReg
hregARM64_X25 ( void ) { return mkHReg(False
, HRcInt64
, 25, 3); }
44 ST_IN HReg
hregARM64_X26 ( void ) { return mkHReg(False
, HRcInt64
, 26, 4); }
45 ST_IN HReg
hregARM64_X27 ( void ) { return mkHReg(False
, HRcInt64
, 27, 5); }
46 ST_IN HReg
hregARM64_X28 ( void ) { return mkHReg(False
, HRcInt64
, 28, 6); }
48 ST_IN HReg
hregARM64_X0 ( void ) { return mkHReg(False
, HRcInt64
, 0, 7); }
49 ST_IN HReg
hregARM64_X1 ( void ) { return mkHReg(False
, HRcInt64
, 1, 8); }
50 ST_IN HReg
hregARM64_X2 ( void ) { return mkHReg(False
, HRcInt64
, 2, 9); }
51 ST_IN HReg
hregARM64_X3 ( void ) { return mkHReg(False
, HRcInt64
, 3, 10); }
52 ST_IN HReg
hregARM64_X4 ( void ) { return mkHReg(False
, HRcInt64
, 4, 11); }
53 ST_IN HReg
hregARM64_X5 ( void ) { return mkHReg(False
, HRcInt64
, 5, 12); }
54 ST_IN HReg
hregARM64_X6 ( void ) { return mkHReg(False
, HRcInt64
, 6, 13); }
55 ST_IN HReg
hregARM64_X7 ( void ) { return mkHReg(False
, HRcInt64
, 7, 14); }
57 ST_IN HReg
hregARM64_Q16 ( void ) { return mkHReg(False
, HRcVec128
, 16, 15); }
58 ST_IN HReg
hregARM64_Q17 ( void ) { return mkHReg(False
, HRcVec128
, 17, 16); }
59 ST_IN HReg
hregARM64_Q18 ( void ) { return mkHReg(False
, HRcVec128
, 18, 17); }
60 ST_IN HReg
hregARM64_Q19 ( void ) { return mkHReg(False
, HRcVec128
, 19, 18); }
61 ST_IN HReg
hregARM64_Q20 ( void ) { return mkHReg(False
, HRcVec128
, 20, 19); }
63 ST_IN HReg
hregARM64_D8 ( void ) { return mkHReg(False
, HRcFlt64
, 8, 20); }
64 ST_IN HReg
hregARM64_D9 ( void ) { return mkHReg(False
, HRcFlt64
, 9, 21); }
65 ST_IN HReg
hregARM64_D10 ( void ) { return mkHReg(False
, HRcFlt64
, 10, 22); }
66 ST_IN HReg
hregARM64_D11 ( void ) { return mkHReg(False
, HRcFlt64
, 11, 23); }
67 ST_IN HReg
hregARM64_D12 ( void ) { return mkHReg(False
, HRcFlt64
, 12, 24); }
68 ST_IN HReg
hregARM64_D13 ( void ) { return mkHReg(False
, HRcFlt64
, 13, 25); }
70 ST_IN HReg
hregARM64_X8 ( void ) { return mkHReg(False
, HRcInt64
, 8, 26); }
71 ST_IN HReg
hregARM64_X9 ( void ) { return mkHReg(False
, HRcInt64
, 9, 27); }
72 ST_IN HReg
hregARM64_X21 ( void ) { return mkHReg(False
, HRcInt64
, 21, 28); }
74 // This is the integer register with encoding 31. Be *very* careful how you
75 // use it, since its meaning is dependent on the instruction and indeed even
76 // the position within an instruction, that it appears. It denotes either the
77 // zero register or the stack pointer.
78 ST_IN HReg
hregARM64_XZR_XSP ( void ) { return mkHReg(False
,
82 extern UInt
ppHRegARM64 ( HReg
);
84 /* Number of registers used arg passing in function calls */
85 #define ARM64_N_ARGREGS 8 /* x0 .. x7 */
88 /* --------- Condition codes. --------- */
92 ARM64cc_EQ
= 0, /* equal : Z=1 */
93 ARM64cc_NE
= 1, /* not equal : Z=0 */
95 ARM64cc_CS
= 2, /* >=u (higher or same) : C=1 */
96 ARM64cc_CC
= 3, /* <u (lower) : C=0 */
98 ARM64cc_MI
= 4, /* minus (negative) : N=1 */
99 ARM64cc_PL
= 5, /* plus (zero or +ve) : N=0 */
101 ARM64cc_VS
= 6, /* overflow : V=1 */
102 ARM64cc_VC
= 7, /* no overflow : V=0 */
104 ARM64cc_HI
= 8, /* >u (higher) : C=1 && Z=0 */
105 ARM64cc_LS
= 9, /* <=u (lower or same) : !(C=1 && Z=0) */
107 ARM64cc_GE
= 10, /* >=s (signed greater or equal) : N=V */
108 ARM64cc_LT
= 11, /* <s (signed less than) : !(N=V) */
110 ARM64cc_GT
= 12, /* >s (signed greater) : Z=0 && N=V */
111 ARM64cc_LE
= 13, /* <=s (signed less or equal) : !(Z=0 && N=V) */
113 ARM64cc_AL
= 14, /* always (unconditional) */
114 ARM64cc_NV
= 15 /* in 64-bit mode also means "always" */
119 /* --------- Memory address expressions (amodes). --------- */
123 ARM64am_RI9
=10, /* reg + simm9 */
124 ARM64am_RI12
, /* reg + uimm12 * szB (iow, scaled by access size) */
125 ARM64am_RR
/* reg1 + reg2 */
135 Int simm9
; /* -256 .. +255 */
139 UInt uimm12
; /* 0 .. 4095 */
140 UChar szB
; /* 1, 2, 4, 8 (16 ?) */
150 extern ARM64AMode
* ARM64AMode_RI9 ( HReg reg
, Int simm9
);
151 extern ARM64AMode
* ARM64AMode_RI12 ( HReg reg
, Int uimm12
, UChar szB
);
152 extern ARM64AMode
* ARM64AMode_RR ( HReg base
, HReg index
);
155 /* --------- Reg or uimm12 or (uimm12 << 12) operands --------- */
159 ARM64riA_I12
=20, /* uimm12 << 0 or 12 only */
169 UShort imm12
; /* 0 .. 4095 */
170 UChar shift
; /* 0 or 12 only */
179 extern ARM64RIA
* ARM64RIA_I12 ( UShort imm12
, UChar shift
);
180 extern ARM64RIA
* ARM64RIA_R ( HReg
);
183 /* --------- Reg or "bitfield" (logic immediate) operands --------- */
187 ARM64riL_I13
=6, /* wierd-o bitfield immediate, 13 bits in total */
197 UChar bitN
; /* 0 .. 1 */
198 UChar immR
; /* 0 .. 63 */
199 UChar immS
; /* 0 .. 63 */
208 extern ARM64RIL
* ARM64RIL_I13 ( UChar bitN
, UChar immR
, UChar immS
);
209 extern ARM64RIL
* ARM64RIL_R ( HReg
);
212 /* --------------- Reg or uimm6 operands --------------- */
216 ARM64ri6_I6
=30, /* uimm6, 1 .. 63 only */
226 UInt imm6
; /* 1 .. 63 */
235 extern ARM64RI6
* ARM64RI6_I6 ( UInt imm6
);
236 extern ARM64RI6
* ARM64RI6_R ( HReg
);
239 /* --------------------- Instructions --------------------- */
278 ARM64mul_PLAIN
=70, /* lo64(64 * 64) */
279 ARM64mul_ZX
, /* hi64(64 *u 64) */
280 ARM64mul_SX
/* hi64(64 *s 64) */
285 /* These characterise an integer-FP conversion, but don't imply any
286 particular direction. */
288 ARM64cvt_F32_I32S
=80,
333 ARM64vecb_ADD64x2
=120, ARM64vecb_ADD32x4
,
334 ARM64vecb_ADD16x8
, ARM64vecb_ADD8x16
,
335 ARM64vecb_SUB64x2
, ARM64vecb_SUB32x4
,
336 ARM64vecb_SUB16x8
, ARM64vecb_SUB8x16
,
338 ARM64vecb_MUL16x8
, ARM64vecb_MUL8x16
,
339 ARM64vecb_FADD64x2
, ARM64vecb_FADD32x4
,
341 ARM64vecb_FSUB64x2
, ARM64vecb_FSUB32x4
,
343 ARM64vecb_FMUL64x2
, ARM64vecb_FMUL32x4
,
344 ARM64vecb_FDIV64x2
, ARM64vecb_FDIV32x4
,
345 ARM64vecb_FMAX64x2
, ARM64vecb_FMAX32x4
,
346 ARM64vecb_FMIN64x2
, ARM64vecb_FMIN32x4
,
348 ARM64vecb_UMAX16x8
, ARM64vecb_UMAX8x16
,
350 ARM64vecb_UMIN16x8
, ARM64vecb_UMIN8x16
,
352 ARM64vecb_SMAX16x8
, ARM64vecb_SMAX8x16
,
354 ARM64vecb_SMIN16x8
, ARM64vecb_SMIN8x16
,
358 ARM64vecb_CMEQ64x2
, ARM64vecb_CMEQ32x4
,
359 ARM64vecb_CMEQ16x8
, ARM64vecb_CMEQ8x16
,
360 ARM64vecb_CMHI64x2
, ARM64vecb_CMHI32x4
, /* >u */
361 ARM64vecb_CMHI16x8
, ARM64vecb_CMHI8x16
,
362 ARM64vecb_CMGT64x2
, ARM64vecb_CMGT32x4
, /* >s */
363 ARM64vecb_CMGT16x8
, ARM64vecb_CMGT8x16
,
364 ARM64vecb_FCMEQ64x2
, ARM64vecb_FCMEQ32x4
,
365 ARM64vecb_FCMGE64x2
, ARM64vecb_FCMGE32x4
,
366 ARM64vecb_FCMGT64x2
, ARM64vecb_FCMGT32x4
,
367 ARM64vecb_FCMGE16x8
, ARM64vecb_FCMGT16x8
,
370 ARM64vecb_UZP164x2
, ARM64vecb_UZP132x4
,
371 ARM64vecb_UZP116x8
, ARM64vecb_UZP18x16
,
372 ARM64vecb_UZP264x2
, ARM64vecb_UZP232x4
,
373 ARM64vecb_UZP216x8
, ARM64vecb_UZP28x16
,
374 ARM64vecb_ZIP132x4
, ARM64vecb_ZIP116x8
,
375 ARM64vecb_ZIP18x16
, ARM64vecb_ZIP232x4
,
376 ARM64vecb_ZIP216x8
, ARM64vecb_ZIP28x16
,
380 ARM64vecb_UMULL4SHH
, ARM64vecb_UMULL8HBB
,
382 ARM64vecb_SMULL4SHH
, ARM64vecb_SMULL8HBB
,
383 ARM64vecb_SQADD64x2
, ARM64vecb_SQADD32x4
,
384 ARM64vecb_SQADD16x8
, ARM64vecb_SQADD8x16
,
385 ARM64vecb_UQADD64x2
, ARM64vecb_UQADD32x4
,
386 ARM64vecb_UQADD16x8
, ARM64vecb_UQADD8x16
,
387 ARM64vecb_SQSUB64x2
, ARM64vecb_SQSUB32x4
,
388 ARM64vecb_SQSUB16x8
, ARM64vecb_SQSUB8x16
,
389 ARM64vecb_UQSUB64x2
, ARM64vecb_UQSUB32x4
,
390 ARM64vecb_UQSUB16x8
, ARM64vecb_UQSUB8x16
,
391 ARM64vecb_SQDMULL2DSS
,
392 ARM64vecb_SQDMULL4SHH
,
393 ARM64vecb_SQDMULH32x4
,
394 ARM64vecb_SQDMULH16x8
,
395 ARM64vecb_SQRDMULH32x4
,
396 ARM64vecb_SQRDMULH16x8
,
397 ARM64vecb_SQSHL64x2
, ARM64vecb_SQSHL32x4
,
398 ARM64vecb_SQSHL16x8
, ARM64vecb_SQSHL8x16
,
399 ARM64vecb_UQSHL64x2
, ARM64vecb_UQSHL32x4
,
400 ARM64vecb_UQSHL16x8
, ARM64vecb_UQSHL8x16
,
401 ARM64vecb_SQRSHL64x2
, ARM64vecb_SQRSHL32x4
,
402 ARM64vecb_SQRSHL16x8
, ARM64vecb_SQRSHL8x16
,
403 ARM64vecb_UQRSHL64x2
, ARM64vecb_UQRSHL32x4
,
404 ARM64vecb_UQRSHL16x8
, ARM64vecb_UQRSHL8x16
,
405 ARM64vecb_SSHL64x2
, ARM64vecb_SSHL32x4
,
406 ARM64vecb_SSHL16x8
, ARM64vecb_SSHL8x16
,
407 ARM64vecb_USHL64x2
, ARM64vecb_USHL32x4
,
408 ARM64vecb_USHL16x8
, ARM64vecb_USHL8x16
,
409 ARM64vecb_SRSHL64x2
, ARM64vecb_SRSHL32x4
,
410 ARM64vecb_SRSHL16x8
, ARM64vecb_SRSHL8x16
,
411 ARM64vecb_URSHL64x2
, ARM64vecb_URSHL32x4
,
412 ARM64vecb_URSHL16x8
, ARM64vecb_URSHL8x16
,
413 ARM64vecb_FRECPS64x2
, ARM64vecb_FRECPS32x4
,
414 ARM64vecb_FRSQRTS64x2
, ARM64vecb_FRSQRTS32x4
,
421 ARM64vecmo_SUQADD64x2
=300, ARM64vecmo_SUQADD32x4
,
422 ARM64vecmo_SUQADD16x8
, ARM64vecmo_SUQADD8x16
,
423 ARM64vecmo_USQADD64x2
, ARM64vecmo_USQADD32x4
,
424 ARM64vecmo_USQADD16x8
, ARM64vecmo_USQADD8x16
,
431 ARM64vecu_FNEG64x2
=350, ARM64vecu_FNEG32x4
, ARM64vecu_FNEG16x8
,
432 ARM64vecu_FABS64x2
, ARM64vecu_FABS32x4
, ARM64vecu_FABS16x8
,
434 ARM64vecu_ABS64x2
, ARM64vecu_ABS32x4
,
435 ARM64vecu_ABS16x8
, ARM64vecu_ABS8x16
,
436 ARM64vecu_CLS32x4
, ARM64vecu_CLS16x8
, ARM64vecu_CLS8x16
,
437 ARM64vecu_CLZ32x4
, ARM64vecu_CLZ16x8
, ARM64vecu_CLZ8x16
,
441 ARM64vecu_REV3216B
, ARM64vecu_REV328H
,
442 ARM64vecu_REV6416B
, ARM64vecu_REV648H
, ARM64vecu_REV644S
,
443 ARM64vecu_URECPE32x4
,
444 ARM64vecu_URSQRTE32x4
,
445 ARM64vecu_FRECPE64x2
, ARM64vecu_FRECPE32x4
,
446 ARM64vecu_FRSQRTE64x2
, ARM64vecu_FRSQRTE32x4
,
447 ARM64vecu_FSQRT64x2
, ARM64vecu_FSQRT32x4
, ARM64vecu_FSQRT16x8
,
454 ARM64vecshi_USHR64x2
=400, ARM64vecshi_USHR32x4
,
455 ARM64vecshi_USHR16x8
, ARM64vecshi_USHR8x16
,
456 ARM64vecshi_SSHR64x2
, ARM64vecshi_SSHR32x4
,
457 ARM64vecshi_SSHR16x8
, ARM64vecshi_SSHR8x16
,
458 ARM64vecshi_SHL64x2
, ARM64vecshi_SHL32x4
,
459 ARM64vecshi_SHL16x8
, ARM64vecshi_SHL8x16
,
460 /* These narrowing shifts zero out the top half of the destination
462 ARM64vecshi_SQSHRN2SD
, ARM64vecshi_SQSHRN4HS
, ARM64vecshi_SQSHRN8BH
,
463 ARM64vecshi_UQSHRN2SD
, ARM64vecshi_UQSHRN4HS
, ARM64vecshi_UQSHRN8BH
,
464 ARM64vecshi_SQSHRUN2SD
, ARM64vecshi_SQSHRUN4HS
, ARM64vecshi_SQSHRUN8BH
,
465 ARM64vecshi_SQRSHRN2SD
, ARM64vecshi_SQRSHRN4HS
, ARM64vecshi_SQRSHRN8BH
,
466 ARM64vecshi_UQRSHRN2SD
, ARM64vecshi_UQRSHRN4HS
, ARM64vecshi_UQRSHRN8BH
,
467 ARM64vecshi_SQRSHRUN2SD
, ARM64vecshi_SQRSHRUN4HS
, ARM64vecshi_SQRSHRUN8BH
,
468 /* Saturating left shifts, of various flavours. */
469 ARM64vecshi_UQSHL64x2
, ARM64vecshi_UQSHL32x4
,
470 ARM64vecshi_UQSHL16x8
, ARM64vecshi_UQSHL8x16
,
471 ARM64vecshi_SQSHL64x2
, ARM64vecshi_SQSHL32x4
,
472 ARM64vecshi_SQSHL16x8
, ARM64vecshi_SQSHL8x16
,
473 ARM64vecshi_SQSHLU64x2
, ARM64vecshi_SQSHLU32x4
,
474 ARM64vecshi_SQSHLU16x8
, ARM64vecshi_SQSHLU8x16
,
500 ARM64in_MovI
, /* int reg-reg move */
503 ARM64in_LdSt32
, /* w/ ZX loads */
504 ARM64in_LdSt16
, /* w/ ZX loads */
505 ARM64in_LdSt8
, /* w/ ZX loads */
506 ARM64in_XDirect
, /* direct transfer to GA */
507 ARM64in_XIndir
, /* indirect transfer to GA */
508 ARM64in_XAssisted
, /* assisted transfer to GA */
511 ARM64in_AddToSP
, /* move SP by small, signed constant */
512 ARM64in_FromSP
, /* move SP to integer register */
514 ARM64in_LdrEX
, /* load exclusive, single register */
515 ARM64in_StrEX
, /* store exclusive, single register */
516 ARM64in_LdrEXP
, /* load exclusive, register pair, 2x64-bit only */
517 ARM64in_StrEXP
, /* store exclusive, register pair, 2x64-bit only */
522 /* ARM64in_V*: scalar ops involving vector registers */
523 ARM64in_VLdStH
, /* ld/st to/from low 16 bits of vec reg, imm offset */
524 ARM64in_VLdStS
, /* ld/st to/from low 32 bits of vec reg, imm offset */
525 ARM64in_VLdStD
, /* ld/st to/from low 64 bits of vec reg, imm offset */
526 ARM64in_VLdStQ
, /* ld/st to/from all 128 bits of vec reg, no offset */
529 ARM64in_VCvtSD
, /* scalar 32 bit FP <--> 64 bit FP */
530 ARM64in_VCvtHS
, /* scalar 16 bit FP <--> 32 bit FP */
531 ARM64in_VCvtHD
, /* scalar 16 bit FP <--> 64 bit FP */
546 /* ARM64in_V*V: vector ops on vector registers */
554 ARM64in_VDfromX
, /* Move an Xreg to a Dreg */
555 ARM64in_VQfromX
, /* Move an Xreg to a Qreg lo64, and zero hi64 */
556 ARM64in_VQfromXX
, /* Move 2 Xregs to a Qreg */
557 ARM64in_VXfromQ
, /* Move half a Qreg to an Xreg */
558 ARM64in_VXfromDorS
, /* Move Dreg or Sreg(ZX) to an Xreg */
559 ARM64in_VMov
, /* vector reg-reg move, 16, 8 or 4 bytes */
561 ARM64in_EvCheck
, /* Event check */
562 ARM64in_ProfInc
/* 64-bit profile counter increment */
566 /* Destinations are on the LEFT (first operand) */
572 /* --- INTEGER INSTRUCTIONS --- */
573 /* 64 bit ADD/SUB reg, reg or uimm12<<{0,12} */
580 /* 64 or 32 bit CMP reg, reg or aimm (SUB and set flags) */
586 /* 64 bit AND/OR/XOR reg, reg or bitfield-immediate */
593 /* 64 bit AND/OR/XOR/ADD/SUB, reg, reg-with-imm-shift */
598 ARM64ShiftOp shiftOp
;
599 UChar amt
; /* 1 to 63 only */
602 /* 64 bit TST reg, reg or bimm (AND and set flags) */
607 /* 64 bit SHL/SHR/SAR, 2nd arg is reg or imm */
614 /* NOT/NEG/CLZ, 64 bit only */
620 /* CSET -- Convert a condition code to a 64-bit value (0 or 1). */
625 /* MOV dst, src -- reg-reg move for integer registers */
630 /* Pseudo-insn; make a 64-bit immediate */
635 /* 64-bit load or store */
641 /* zx-32-to-64-bit load, or 32-bit store */
647 /* zx-16-to-64-bit load, or 16-bit store */
653 /* zx-8-to-64-bit load, or 8-bit store */
659 /* Update the guest PC value, then exit requesting to chain
660 to it. May be conditional. Urr, use of Addr64 implicitly
661 assumes that wordsize(guest) == wordsize(host). */
663 Addr64 dstGA
; /* next guest address */
664 ARM64AMode
* amPC
; /* amode in guest state for PC */
665 ARM64CondCode cond
; /* can be ARM64cc_AL */
666 Bool toFastEP
; /* chain to the slow or fast point? */
668 /* Boring transfer to a guest address not known at JIT time.
669 Not chainable. May be conditional. */
673 ARM64CondCode cond
; /* can be ARM64cc_AL */
675 /* Assisted transfer to a guest address, most general case.
676 Not chainable. May be conditional. */
680 ARM64CondCode cond
; /* can be ARM64cc_AL */
683 /* CSEL: dst = if cond then argL else argR. cond may be anything. */
690 /* Pseudo-insn. Call target (an absolute address), on given
691 condition (which could be ARM64cc_AL). */
693 RetLoc rloc
; /* where the return value will be */
696 Int nArgRegs
; /* # regs carrying args: 0 .. 8 */
698 /* move SP by small, signed constant */
700 Int simm
; /* needs to be 0 % 16 and in the range -4095
703 /* move SP to integer register */
707 /* Integer multiply, with 3 variants:
708 (PLAIN) lo64(64 * 64)
718 /* LDXR{,H,B} x2, [x4] */
720 Int szB
; /* 1, 2, 4 or 8 */
722 /* STXR{,H,B} w0, x2, [x4] */
724 Int szB
; /* 1, 2, 4 or 8 */
726 /* LDXP x2, x3, [x4]. This is 2x64-bit only. */
729 /* STXP w0, x2, x3, [x4]. This is 2x64-bit only. */
732 /* x1 = CAS(x3(addr), x5(expected) -> x7(new)),
734 where x1[8*szB-1 : 0] == x5[8*szB-1 : 0] indicates success,
735 x1[8*szB-1 : 0] != x5[8*szB-1 : 0] indicates failure.
736 Uses x8 as scratch (but that's not allocatable).
737 Hence: RD x3, x5, x7; WR x1
741 (szB=4) and x8, x5, #0xFFFFFFFF
742 (szB=2) and x8, x5, #0xFFFF
743 (szB=1) and x8, x5, #0xFF
744 -- x8 is correctly zero-extended expected value
746 -- x1 is correctly zero-extended actual value
749 -- if branch taken, failure; x1[[8*szB-1 : 0] holds old value
752 -- if store successful, x8==0
753 -- if store failed, branch back and try again.
758 Int szB
; /* 1, 2, 4 or 8 */
760 /* Doubleworld CAS, 2 x 32 bit or 2 x 64 bit
761 x0(oldLSW),x1(oldMSW)
762 = DCAS(x2(addr), x4(expectedLSW),x5(expectedMSW)
763 -> x6(newLSW),x7(newMSW))
764 and trashes x8, x9 and x3
767 Int szB
; /* 4 or 8 */
769 /* Mem fence. An insn which fences all loads and stores as
770 much as possible before continuing. On ARM64 we emit the
771 sequence "dsb sy ; dmb sy ; isb sy", which is probably
772 total nuclear overkill, but better safe than sorry. */
775 /* A CLREX instruction. */
778 /* --- INSTRUCTIONS INVOLVING VECTOR REGISTERS --- */
779 /* ld/st to/from low 16 bits of vec reg, imm offset */
784 UInt uimm12
; /* 0 .. 8190 inclusive, 0 % 2 */
786 /* ld/st to/from low 32 bits of vec reg, imm offset */
791 UInt uimm12
; /* 0 .. 16380 inclusive, 0 % 4 */
793 /* ld/st to/from low 64 bits of vec reg, imm offset */
798 UInt uimm12
; /* 0 .. 32760 inclusive, 0 % 8 */
800 /* ld/st to/from all 128 bits of vec reg, no offset */
806 /* Scalar conversion of int to float. */
809 HReg rD
; // dst, a D or S register
810 HReg rS
; // src, a W or X register
812 /* Scalar conversion of float to int, w/ specified RM. */
815 HReg rD
; // dst, a W or X register
816 HReg rS
; // src, a D or S register
817 UChar armRM
; // ARM encoded RM:
818 // 00=nearest, 01=+inf, 10=-inf, 11=zero
821 /* Convert between 32-bit and 64-bit FP values (both ways). (FCVT) */
823 Bool sToD
; /* True: F32->F64. False: F64->F32 */
827 /* Convert between 16-bit and 32-bit FP values (both ways). (FCVT) */
829 Bool hToS
; /* True: F16->F32. False: F32->F16 */
833 /* Convert between 16-bit and 64-bit FP values (both ways). (FCVT) */
835 Bool hToD
; /* True: F16->F64. False: F64->F16 */
839 /* 64-bit FP unary */
845 /* 32-bit FP unary */
851 /* 16-bit FP unary */
857 /* 64-bit FP binary arithmetic */
864 /* 32-bit FP binary arithmetic */
871 /* 16-bit FP binary arithmetic */
878 /* 64-bit FP ternary arithmetic */
886 /* 32-bit FP ternary arithmetic */
894 /* 64-bit FP compare */
899 /* 32-bit FP compare */
904 /* 16-bit FP compare */
909 /* 32- or 64-bit FP conditional select */
918 /* Move a 32-bit value to/from the FPCR */
923 /* Move a 32-bit value to/from the FPSR */
928 /* binary vector operation on vector registers */
935 /* binary vector operation on vector registers.
936 Dst reg is also a src. */
942 /* unary vector operation on vector registers */
948 /* vector narrowing, Q -> Q. Result goes in the bottom half
949 of dst and the top half is zeroed out. Iow one of the
953 UInt dszBlg2
; // 0: 16to8_x8 1: 32to16_x4 2: 64to32_x2
957 /* Vector shift by immediate. For left shifts, |amt| must be
958 >= 0 and < implied lane size of |op|. For right shifts,
959 |amt| must be > 0 and <= implied lane size of |op|. Shifts
960 beyond these ranges are not allowed. */
962 ARM64VecShiftImmOp op
;
975 UShort imm
; /* Same 1-bit-per-byte encoding as IR */
993 UInt laneNo
; /* either 0 or 1 */
1000 /* MOV dst, src -- reg-reg move for vector registers */
1002 UInt szB
; // 16=mov qD,qS; 8=mov dD,dS; 4=mov sD,sS
1007 ARM64AMode
* amCounter
;
1008 ARM64AMode
* amFailAddr
;
1011 /* No fields. The address of the counter to inc is
1012 installed later, post-translation, by patching it in,
1013 as it is not known at translation time. */
1020 extern ARM64Instr
* ARM64Instr_Arith ( HReg
, HReg
, ARM64RIA
*, Bool isAdd
);
1021 extern ARM64Instr
* ARM64Instr_Cmp ( HReg
, ARM64RIA
*, Bool is64
);
1022 extern ARM64Instr
* ARM64Instr_Logic ( HReg
, HReg
, ARM64RIL
*, ARM64LogicOp
);
1023 extern ARM64Instr
* ARM64Instr_RRS ( HReg
, HReg
, HReg
, ARM64ShiftOp
,
1024 UChar amt
, ARM64RRSOp mainOp
);
1025 extern ARM64Instr
* ARM64Instr_Test ( HReg
, ARM64RIL
* );
1026 extern ARM64Instr
* ARM64Instr_Shift ( HReg
, HReg
, ARM64RI6
*, ARM64ShiftOp
);
1027 extern ARM64Instr
* ARM64Instr_Unary ( HReg
, HReg
, ARM64UnaryOp
);
1028 extern ARM64Instr
* ARM64Instr_Set64 ( HReg
, ARM64CondCode
);
1029 extern ARM64Instr
* ARM64Instr_MovI ( HReg
, HReg
);
1030 extern ARM64Instr
* ARM64Instr_Imm64 ( HReg
, ULong
);
1031 extern ARM64Instr
* ARM64Instr_LdSt64 ( Bool isLoad
, HReg
, ARM64AMode
* );
1032 extern ARM64Instr
* ARM64Instr_LdSt32 ( Bool isLoad
, HReg
, ARM64AMode
* );
1033 extern ARM64Instr
* ARM64Instr_LdSt16 ( Bool isLoad
, HReg
, ARM64AMode
* );
1034 extern ARM64Instr
* ARM64Instr_LdSt8 ( Bool isLoad
, HReg
, ARM64AMode
* );
1035 extern ARM64Instr
* ARM64Instr_XDirect ( Addr64 dstGA
, ARM64AMode
* amPC
,
1036 ARM64CondCode cond
, Bool toFastEP
);
1037 extern ARM64Instr
* ARM64Instr_XIndir ( HReg dstGA
, ARM64AMode
* amPC
,
1038 ARM64CondCode cond
);
1039 extern ARM64Instr
* ARM64Instr_XAssisted ( HReg dstGA
, ARM64AMode
* amPC
,
1040 ARM64CondCode cond
, IRJumpKind jk
);
1041 extern ARM64Instr
* ARM64Instr_CSel ( HReg dst
, HReg argL
, HReg argR
,
1042 ARM64CondCode cond
);
1043 extern ARM64Instr
* ARM64Instr_Call ( ARM64CondCode
, Addr64
, Int nArgRegs
,
1045 extern ARM64Instr
* ARM64Instr_AddToSP ( Int simm
);
1046 extern ARM64Instr
* ARM64Instr_FromSP ( HReg dst
);
1047 extern ARM64Instr
* ARM64Instr_Mul ( HReg dst
, HReg argL
, HReg argR
,
1049 extern ARM64Instr
* ARM64Instr_LdrEX ( Int szB
);
1050 extern ARM64Instr
* ARM64Instr_StrEX ( Int szB
);
1051 extern ARM64Instr
* ARM64Instr_LdrEXP ( void );
1052 extern ARM64Instr
* ARM64Instr_StrEXP ( void );
1053 extern ARM64Instr
* ARM64Instr_CAS ( Int szB
);
1054 extern ARM64Instr
* ARM64Instr_CASP ( Int szB
);
1055 extern ARM64Instr
* ARM64Instr_MFence ( void );
1056 extern ARM64Instr
* ARM64Instr_ClrEX ( void );
1057 extern ARM64Instr
* ARM64Instr_VLdStH ( Bool isLoad
, HReg sD
, HReg rN
,
1058 UInt uimm12
/* 0 .. 8190, 0 % 2 */ );
1059 extern ARM64Instr
* ARM64Instr_VLdStS ( Bool isLoad
, HReg sD
, HReg rN
,
1060 UInt uimm12
/* 0 .. 16380, 0 % 4 */ );
1061 extern ARM64Instr
* ARM64Instr_VLdStD ( Bool isLoad
, HReg dD
, HReg rN
,
1062 UInt uimm12
/* 0 .. 32760, 0 % 8 */ );
1063 extern ARM64Instr
* ARM64Instr_VLdStQ ( Bool isLoad
, HReg rQ
, HReg rN
);
1064 extern ARM64Instr
* ARM64Instr_VCvtI2F ( ARM64CvtOp how
, HReg rD
, HReg rS
);
1065 extern ARM64Instr
* ARM64Instr_VCvtF2I ( ARM64CvtOp how
, HReg rD
, HReg rS
,
1066 UChar armRM
, Bool tiesToAway
);
1067 extern ARM64Instr
* ARM64Instr_VCvtSD ( Bool sToD
, HReg dst
, HReg src
);
1068 extern ARM64Instr
* ARM64Instr_VCvtHS ( Bool hToS
, HReg dst
, HReg src
);
1069 extern ARM64Instr
* ARM64Instr_VCvtHD ( Bool hToD
, HReg dst
, HReg src
);
1070 extern ARM64Instr
* ARM64Instr_VUnaryD ( ARM64FpUnaryOp op
, HReg dst
, HReg src
);
1071 extern ARM64Instr
* ARM64Instr_VUnaryS ( ARM64FpUnaryOp op
, HReg dst
, HReg src
);
1072 extern ARM64Instr
* ARM64Instr_VUnaryH ( ARM64FpUnaryOp op
, HReg dst
, HReg src
);
1073 extern ARM64Instr
* ARM64Instr_VBinD ( ARM64FpBinOp op
, HReg
, HReg
, HReg
);
1074 extern ARM64Instr
* ARM64Instr_VBinS ( ARM64FpBinOp op
, HReg
, HReg
, HReg
);
1075 extern ARM64Instr
* ARM64Instr_VBinH ( ARM64FpBinOp op
, HReg
, HReg
, HReg
);
1076 extern ARM64Instr
* ARM64Instr_VTriD ( ARM64FpTriOp op
, HReg dst
,
1078 extern ARM64Instr
* ARM64Instr_VTriS ( ARM64FpTriOp op
, HReg dst
,
1080 extern ARM64Instr
* ARM64Instr_VCmpD ( HReg argL
, HReg argR
);
1081 extern ARM64Instr
* ARM64Instr_VCmpS ( HReg argL
, HReg argR
);
1082 extern ARM64Instr
* ARM64Instr_VCmpH ( HReg argL
, HReg argR
);
1083 extern ARM64Instr
* ARM64Instr_VFCSel ( HReg dst
, HReg argL
, HReg argR
,
1084 ARM64CondCode cond
, Bool isD
);
1085 extern ARM64Instr
* ARM64Instr_FPCR ( Bool toFPCR
, HReg iReg
);
1086 extern ARM64Instr
* ARM64Instr_FPSR ( Bool toFPSR
, HReg iReg
);
1087 extern ARM64Instr
* ARM64Instr_VBinV ( ARM64VecBinOp op
, HReg
, HReg
, HReg
);
1088 extern ARM64Instr
* ARM64Instr_VModifyV ( ARM64VecModifyOp
, HReg
, HReg
);
1089 extern ARM64Instr
* ARM64Instr_VUnaryV ( ARM64VecUnaryOp op
, HReg
, HReg
);
1090 extern ARM64Instr
* ARM64Instr_VNarrowV ( ARM64VecNarrowOp op
, UInt dszBlg2
,
1091 HReg dst
, HReg src
);
1092 extern ARM64Instr
* ARM64Instr_VShiftImmV ( ARM64VecShiftImmOp op
,
1093 HReg dst
, HReg src
, UInt amt
);
1094 extern ARM64Instr
* ARM64Instr_VExtV ( HReg dst
,
1095 HReg srcLo
, HReg srcHi
, UInt amtB
);
1096 extern ARM64Instr
* ARM64Instr_VImmQ ( HReg
, UShort
);
1097 extern ARM64Instr
* ARM64Instr_VDfromX ( HReg rD
, HReg rX
);
1098 extern ARM64Instr
* ARM64Instr_VQfromX ( HReg rQ
, HReg rXlo
);
1099 extern ARM64Instr
* ARM64Instr_VQfromXX( HReg rQ
, HReg rXhi
, HReg rXlo
);
1100 extern ARM64Instr
* ARM64Instr_VXfromQ ( HReg rX
, HReg rQ
, UInt laneNo
);
1101 extern ARM64Instr
* ARM64Instr_VXfromDorS ( HReg rX
, HReg rDorS
, Bool fromD
);
1102 extern ARM64Instr
* ARM64Instr_VMov ( UInt szB
, HReg dst
, HReg src
);
1104 extern ARM64Instr
* ARM64Instr_EvCheck ( ARM64AMode
* amCounter
,
1105 ARM64AMode
* amFailAddr
);
1106 extern ARM64Instr
* ARM64Instr_ProfInc ( void );
1108 extern void ppARM64Instr ( const ARM64Instr
* );
1111 /* Some functions that insulate the register allocator from details
1112 of the underlying instruction set. */
1113 extern void getRegUsage_ARM64Instr ( HRegUsage
*, const ARM64Instr
*, Bool
);
1114 extern void mapRegs_ARM64Instr ( HRegRemap
*, ARM64Instr
*, Bool
);
1115 extern Int
emit_ARM64Instr ( /*MB_MOD*/Bool
* is_profInc
,
1116 UChar
* buf
, Int nbuf
, const ARM64Instr
* i
,
1118 VexEndness endness_host
,
1119 const void* disp_cp_chain_me_to_slowEP
,
1120 const void* disp_cp_chain_me_to_fastEP
,
1121 const void* disp_cp_xindir
,
1122 const void* disp_cp_xassisted
);
1124 extern void genSpill_ARM64 ( /*OUT*/HInstr
** i1
, /*OUT*/HInstr
** i2
,
1125 HReg rreg
, Int offset
, Bool
);
1126 extern void genReload_ARM64 ( /*OUT*/HInstr
** i1
, /*OUT*/HInstr
** i2
,
1127 HReg rreg
, Int offset
, Bool
);
1128 extern ARM64Instr
* genMove_ARM64(HReg from
, HReg to
, Bool
);
1130 extern const RRegUniverse
* getRRegUniverse_ARM64 ( void );
1132 extern HInstrArray
* iselSB_ARM64 ( const IRSB
*,
1136 Int offs_Host_EvC_Counter
,
1137 Int offs_Host_EvC_FailAddr
,
1138 Bool chainingAllowed
,
1142 /* How big is an event check? This is kind of a kludge because it
1143 depends on the offsets of host_EvC_FAILADDR and
1144 host_EvC_COUNTER. */
1145 extern Int
evCheckSzB_ARM64 (void);
1147 /* Perform a chaining and unchaining of an XDirect jump. */
1148 extern VexInvalRange
chainXDirect_ARM64 ( VexEndness endness_host
,
1149 void* place_to_chain
,
1150 const void* disp_cp_chain_me_EXPECTED
,
1151 const void* place_to_jump_to
);
1153 extern VexInvalRange
unchainXDirect_ARM64 ( VexEndness endness_host
,
1154 void* place_to_unchain
,
1155 const void* place_to_jump_to_EXPECTED
,
1156 const void* disp_cp_chain_me
);
1158 /* Patch the counter location into an existing ProfInc point. */
1159 extern VexInvalRange
patchProfInc_ARM64 ( VexEndness endness_host
,
1160 void* place_to_patch
,
1161 const ULong
* location_of_counter
);
1164 #endif /* ndef __VEX_HOST_ARM64_DEFS_H */
1166 /*---------------------------------------------------------------*/
1167 /*--- end host_arm64_defs.h ---*/
1168 /*---------------------------------------------------------------*/