1 /* -*- mode: C; c-basic-offset: 3; -*- */
3 /*---------------------------------------------------------------*/
4 /*--- begin host_s390_defs.c ---*/
5 /*---------------------------------------------------------------*/
8 This file is part of Valgrind, a dynamic binary instrumentation
11 Copyright IBM Corp. 2010-2020
12 Copyright (C) 2012-2017 Florian Krohm (britzel@acm.org)
14 This program is free software; you can redistribute it and/or
15 modify it under the terms of the GNU General Public License as
16 published by the Free Software Foundation; either version 2 of the
17 License, or (at your option) any later version.
19 This program is distributed in the hope that it will be useful, but
20 WITHOUT ANY WARRANTY; without even the implied warranty of
21 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
22 General Public License for more details.
24 You should have received a copy of the GNU General Public License
25 along with this program; if not, see <http://www.gnu.org/licenses/>.
27 The GNU General Public License is contained in the file COPYING.
30 /* Contributed by Florian Krohm */
32 #include "libvex_basictypes.h"
34 #include "libvex_trc_values.h"
35 #include "libvex_s390x_common.h"
37 #include "main_util.h"
38 #include "main_globals.h"
39 #include "host_generic_regs.h"
40 #include "host_s390_defs.h"
41 #include "s390_disasm.h"
42 #include "guest_s390_defs.h" /* S390X_GUEST_OFFSET */
45 /*------------------------------------------------------------*/
46 /*--- Forward declarations ---*/
47 /*------------------------------------------------------------*/
49 static void s390_insn_map_regs(HRegRemap
*, s390_insn
*);
50 static void s390_insn_get_reg_usage(HRegUsage
*u
, const s390_insn
*);
51 static UInt
s390_tchain_load64_len(void);
54 /*------------------------------------------------------------*/
56 /*------------------------------------------------------------*/
58 /* A mapping from register number to register index */
59 static Int gpr_index
[16]; // GPR regno -> register index
60 static Int vr_index
[32]; // VR regno -> register index
63 s390_hreg_gpr(UInt regno
)
65 Int ix
= gpr_index
[regno
];
67 return mkHReg(/*virtual*/False
, HRcInt64
, regno
, ix
);
71 s390_hreg_fpr(UInt regno
)
73 Int ix
= vr_index
[regno
];
75 return mkHReg(/*virtual*/False
, HRcFlt64
, regno
, ix
);
79 s390_hreg_vr(UInt regno
)
81 Int ix
= vr_index
[regno
];
83 return mkHReg(/*virtual*/False
, HRcVec128
, regno
, ix
);
86 static __inline__ UInt
89 return hregEncoding(reg
);
92 /* Decompile the given register into a static buffer and return it */
94 s390_hreg_as_string(HReg reg
)
98 static const HChar ireg_names
[16][5] = {
99 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
100 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15"
103 static const HChar freg_names
[16][5] = {
104 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
105 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15"
108 static const HChar vreg_names
[32][5] = {
109 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
110 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
111 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
112 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31"
115 /* Be generic for all virtual regs. */
116 if (hregIsVirtual(reg
)) {
117 UInt r
= hregIndex(reg
);
119 switch (hregClass(reg
)) {
120 case HRcInt64
: vex_sprintf(buf
, "%%vR%u", r
); break;
121 case HRcFlt64
: vex_sprintf(buf
, "%%vF%u", r
); break;
122 case HRcVec128
: vex_sprintf(buf
, "%%vV%u", r
); break;
128 /* But specific for real regs. */
129 UInt r
= hregNumber(reg
);
130 switch (hregClass(reg
)) {
131 case HRcInt64
: vassert(r
< 16); return ireg_names
[r
];
132 case HRcFlt64
: vassert(r
< 16); return freg_names
[r
];
133 case HRcVec128
: vassert(r
< 32); return vreg_names
[r
];
137 fail
: vpanic("s390_hreg_as_string");
141 /* Return the real register that holds the guest state pointer */
143 s390_hreg_guest_state_pointer(void)
145 return s390_hreg_gpr(S390_REGNO_GUEST_STATE_POINTER
);
148 /* Return the real register that holds the stack pointer */
150 s390_hreg_stack_pointer(void)
152 return s390_hreg_gpr(S390_REGNO_STACK_POINTER
);
156 /* Is VALUE within the domain of a 20-bit signed integer. */
157 static __inline__ Bool
158 fits_signed_20bit(Int value
)
161 return ((Int
)(uval
<< 12) >> 12) == value
;
165 /* Is VALUE within the domain of a 12-bit unsigned integer. */
166 static __inline__ Bool
167 fits_unsigned_12bit(Int value
)
169 return (value
& 0xFFF) == value
;
172 /*------------------------------------------------------------*/
173 /*--- Addressing modes (amodes) ---*/
174 /*------------------------------------------------------------*/
176 /* Construct a b12 amode. */
178 s390_amode_b12(Int d
, HReg b
)
180 s390_amode
*am
= LibVEX_Alloc_inline(sizeof(s390_amode
));
182 vassert(fits_unsigned_12bit(d
));
184 am
->tag
= S390_AMODE_B12
;
187 am
->x
= s390_hreg_gpr(0); /* hregNumber(am->x) == 0 */
193 /* Construct a b20 amode. */
195 s390_amode_b20(Int d
, HReg b
)
197 s390_amode
*am
= LibVEX_Alloc_inline(sizeof(s390_amode
));
199 vassert(fits_signed_20bit(d
));
201 am
->tag
= S390_AMODE_B20
;
204 am
->x
= s390_hreg_gpr(0); /* hregNumber(am->x) == 0 */
210 /* Construct a bx12 amode. */
212 s390_amode_bx12(Int d
, HReg b
, HReg x
)
214 s390_amode
*am
= LibVEX_Alloc_inline(sizeof(s390_amode
));
216 vassert(fits_unsigned_12bit(d
));
217 vassert(hregNumber(b
) != 0);
218 vassert(hregNumber(x
) != 0);
220 am
->tag
= S390_AMODE_BX12
;
229 /* Construct a bx20 amode. */
231 s390_amode_bx20(Int d
, HReg b
, HReg x
)
233 s390_amode
*am
= LibVEX_Alloc_inline(sizeof(s390_amode
));
235 vassert(fits_signed_20bit(d
));
236 vassert(hregNumber(b
) != 0);
237 vassert(hregNumber(x
) != 0);
239 am
->tag
= S390_AMODE_BX20
;
248 /* Construct an AMODE for accessing the guest state at OFFSET.
249 OFFSET can be at most 3 * sizeof(VexGuestS390XState) + LibVEX_N_SPILL_BYTES
250 which may be too large for a B12 addressing mode.
251 Use a B20 amode as a fallback which will be safe for any offset.
254 s390_amode_for_guest_state(Int offset
)
256 if (fits_unsigned_12bit(offset
))
257 return s390_amode_b12(offset
, s390_hreg_guest_state_pointer());
259 if (fits_signed_20bit(offset
))
260 return s390_amode_b20(offset
, s390_hreg_guest_state_pointer());
262 vpanic("invalid guest state offset");
266 /* Construct an AMODE for accessing stack pointer at OFFSET.
267 OFFSET can be at most 3 * sizeof(VexGuestS390XState) + LibVEX_N_SPILL_BYTES
268 which may be too large for a B12 addressing mode.
269 Use a B20 amode as a fallback which will be safe for any offset.
272 s390_amode_for_stack_pointer(Int offset
)
274 if (fits_unsigned_12bit(offset
))
275 return s390_amode_b12(offset
, s390_hreg_stack_pointer());
277 if (fits_signed_20bit(offset
))
278 return s390_amode_b20(offset
, s390_hreg_stack_pointer());
280 vpanic("invalid stack pointer offset");
284 /* Decompile the given amode into a static buffer and return it. */
286 s390_amode_as_string(const s390_amode
*am
)
288 static HChar buf
[30];
297 vex_sprintf(p
, "%d(%s)", am
->d
, s390_hreg_as_string(am
->b
));
300 case S390_AMODE_BX12
:
301 case S390_AMODE_BX20
:
302 /* s390_hreg_as_string returns pointer to local buffer. Need to
303 split this into two printfs */
304 p
+= vex_sprintf(p
, "%d(%s,", am
->d
, s390_hreg_as_string(am
->x
));
305 vex_sprintf(p
, "%s)", s390_hreg_as_string(am
->b
));
309 vpanic("s390_amode_as_string");
316 /* Helper function for s390_amode_is_sane */
317 static __inline__ Bool
318 is_virtual_gpr(HReg reg
)
320 return hregIsVirtual(reg
) && hregClass(reg
) == HRcInt64
;
323 /* Helper function for all vector operations */
325 s390_getM_from_size(const UChar size
) {
338 vex_printf("size=%d\n", size
);
339 vpanic("s390_getM_from_size: unknown size");
343 /* Helper for generating RXB field in vector instructions */
345 s390_update_rxb(const UChar rxb
, const UChar index
, UChar
* vr
) {
346 vassert((index
>= 1) && (index
<= 4));
350 result
|= 1 << (4 - index
);
357 /* Sanity check for an amode */
359 s390_amode_is_sane(const s390_amode
*am
)
363 return (is_virtual_gpr(am
->b
) || sameHReg(am
->b
, s390_hreg_gpr(0))) &&
364 fits_unsigned_12bit(am
->d
);
367 return is_virtual_gpr(am
->b
) && fits_signed_20bit(am
->d
);
369 case S390_AMODE_BX12
:
370 return is_virtual_gpr(am
->b
) && is_virtual_gpr(am
->x
) &&
371 fits_unsigned_12bit(am
->d
);
373 case S390_AMODE_BX20
:
374 return is_virtual_gpr(am
->b
) && is_virtual_gpr(am
->x
) &&
375 fits_signed_20bit(am
->d
);
378 vpanic("s390_amode_is_sane");
383 s390_amode_is_constant(const s390_amode
*am
)
385 return am
->tag
== S390_AMODE_B12
&& sameHReg(am
->b
, s390_hreg_gpr(0));
389 /* Record the register use of an amode */
391 s390_amode_get_reg_usage(HRegUsage
*u
, const s390_amode
*am
)
393 if (!sameHReg(am
->b
, s390_hreg_gpr(0)))
394 addHRegUse(u
, HRmRead
, am
->b
);
395 if (!sameHReg(am
->x
, s390_hreg_gpr(0)))
396 addHRegUse(u
, HRmRead
, am
->x
);
401 s390_amode_map_regs(HRegRemap
*m
, s390_amode
*am
)
403 if (!sameHReg(am
->b
, s390_hreg_gpr(0)))
404 am
->b
= lookupHRegRemap(m
, am
->b
);
405 if (!sameHReg(am
->x
, s390_hreg_gpr(0)))
406 am
->x
= lookupHRegRemap(m
, am
->x
);
411 ppS390AMode(const s390_amode
*am
)
413 vex_printf("%s", s390_amode_as_string(am
));
417 ppS390Instr(const s390_insn
*insn
, Bool mode64
)
419 vex_printf("%s", s390_insn_as_string(insn
));
425 return vex_printf("%s", s390_hreg_as_string(reg
));
428 /*------------------------------------------------------------*/
429 /*--- Helpers for register allocation ---*/
430 /*------------------------------------------------------------*/
432 /* Initialise and return the "register universe", i.e. a list of
433 all hardware registers. Called once. */
435 getRRegUniverse_S390(void)
437 static RRegUniverse all_regs
;
438 static Bool initialised
= False
;
439 RRegUniverse
*ru
= &all_regs
;
441 if (LIKELY(initialised
))
444 RRegUniverse__init(ru
);
446 /* Assign invalid values to the gpr/vr_index */
447 for (UInt i
= 0; i
< sizeof gpr_index
/ sizeof gpr_index
[0]; ++i
)
449 for (UInt i
= 0; i
< sizeof vr_index
/ sizeof vr_index
[0]; ++i
)
453 /* Add the registers that are available to the register allocator.
454 GPRs: registers 6..11 are callee saved, list them first
455 registers 1..5 are caller saved, list them after
456 FPRs: registers 8..15 are callee saved, list them first
457 registers 0..7 are caller saved, list them after
458 FPR12 - FPR15 are also used as register pairs for 128-bit
459 floating point operations
460 VRs: registers 0..31 are available
462 ru
->allocable_start
[HRcInt64
] = ru
->size
;
463 for (UInt regno
= 6; regno
<= 11; ++regno
) {
464 gpr_index
[regno
] = ru
->size
;
465 ru
->regs
[ru
->size
++] = s390_hreg_gpr(regno
);
467 for (UInt regno
= 1; regno
<= 5; ++regno
) {
468 gpr_index
[regno
] = ru
->size
;
469 ru
->regs
[ru
->size
++] = s390_hreg_gpr(regno
);
471 ru
->allocable_end
[HRcInt64
] = ru
->size
- 1;
473 ru
->allocable_start
[HRcFlt64
] = ru
->size
;
474 for (UInt regno
= 8; regno
<= 15; ++regno
) {
475 vr_index
[regno
] = ru
->size
;
476 ru
->regs
[ru
->size
++] = s390_hreg_fpr(regno
);
478 for (UInt regno
= 0; regno
<= 7; ++regno
) {
479 vr_index
[regno
] = ru
->size
;
480 ru
->regs
[ru
->size
++] = s390_hreg_fpr(regno
);
482 ru
->allocable_end
[HRcFlt64
] = ru
->size
- 1;
484 ru
->allocable_start
[HRcVec128
] = ru
->size
;
485 for (UInt regno
= 16; regno
<= 31; ++regno
) {
486 vr_index
[regno
] = ru
->size
;
487 ru
->regs
[ru
->size
++] = s390_hreg_vr(regno
);
489 ru
->allocable_end
[HRcVec128
] = ru
->size
- 1;
490 ru
->allocable
= ru
->size
;
492 /* Add the registers that are not available for allocation.
493 r0 -- cannot be used as a base or index register
494 r12 -- scratch register for translation chaining support
495 r13 -- guest state pointer
499 UInt other
[] = { 0, 12, 13, 14, 15 };
500 for (UInt i
= 0; i
< sizeof other
/ sizeof other
[0]; ++i
) {
501 gpr_index
[other
[i
]] = ru
->size
;
502 ru
->regs
[ru
->size
++] = s390_hreg_gpr(other
[i
]);
505 /* Sanity checking */
506 for (UInt i
= 0; i
< sizeof gpr_index
/ sizeof gpr_index
[0]; ++i
)
507 vassert(gpr_index
[i
] >= 0);
508 for (UInt i
= 0; i
< sizeof vr_index
/ sizeof vr_index
[0]; ++i
)
509 vassert(vr_index
[i
] >= 0);
513 RRegUniverse__check_is_sane(ru
);
517 /* Tell the register allocator how the given instruction uses the registers
520 getRegUsage_S390Instr(HRegUsage
*u
, const s390_insn
*insn
, Bool mode64
)
522 s390_insn_get_reg_usage(u
, insn
);
526 /* Map the registers of the given instruction */
528 mapRegs_S390Instr(HRegRemap
*m
, s390_insn
*insn
, Bool mode64
)
530 s390_insn_map_regs(m
, insn
);
534 /* Generate s390 spill/reload instructions under the direction of the
535 register allocator. Note it's critical these don't write the
536 condition codes. This is like an Ist_Put */
538 genSpill_S390(HInstr
**i1
, HInstr
**i2
, HReg rreg
, Int offsetB
, Bool mode64
)
542 vassert(offsetB
>= 0);
543 vassert(!hregIsVirtual(rreg
));
547 am
= s390_amode_for_guest_state(offsetB
);
549 switch (hregClass(rreg
)) {
552 *i1
= s390_insn_store(8, am
, rreg
);
555 *i1
= s390_insn_store(16, am
, rreg
);
558 ppHRegClass(hregClass(rreg
));
559 vpanic("genSpill_S390: unimplemented regclass");
564 /* This is like an Iex_Get */
566 genReload_S390(HInstr
**i1
, HInstr
**i2
, HReg rreg
, Int offsetB
, Bool mode64
)
570 vassert(offsetB
>= 0);
571 vassert(!hregIsVirtual(rreg
));
575 am
= s390_amode_for_guest_state(offsetB
);
577 switch (hregClass(rreg
)) {
580 *i1
= s390_insn_load(8, rreg
, am
);
583 *i1
= s390_insn_load(16, rreg
, am
);
586 ppHRegClass(hregClass(rreg
));
587 vpanic("genReload_S390: unimplemented regclass");
591 /* Direct reload function. For the given vreg (currently located at the given
592 spill offset) and a given instruction that reads vreg exactly once, return a
593 variant of the instruction that references the spill slot directly. Return
594 NULL if no such instruction is found. */
596 directReload_S390(HInstr
* i
, HReg vreg
, Short spill_off
)
598 s390_insn
* insn
= (s390_insn
*) i
;
600 /* For simplicity, reject spill offsets that may cause trouble with 12-bit
601 addressing. They probably shouldn't occur anyway. */
602 if (!fits_unsigned_12bit(spill_off
+ 15))
605 /* In case of a spilled GPR, adjust the offset to be right-aligned within the
607 Int delta
= hregClass(vreg
) == HRcInt64
? 8 - insn
->size
: 0;
608 s390_amode
* vreg_am
= s390_amode_for_guest_state(spill_off
+ delta
);
609 s390_opnd_RMI vreg_opnd
;
610 vreg_opnd
.tag
= S390_OPND_AMODE
;
611 vreg_opnd
.variant
.am
= vreg_am
;
613 /* v-move <reg>,<vreg> */
614 if (insn
->tag
== S390_INSN_MOVE
615 && sameHReg(insn
->variant
.move
.src
, vreg
)) {
616 return s390_insn_load(insn
->size
, insn
->variant
.move
.dst
, vreg_am
);
619 /* v-store <vreg>,<addr> */
620 if (insn
->tag
== S390_INSN_STORE
621 && sameHReg(insn
->variant
.store
.src
, vreg
)
622 && insn
->variant
.store
.dst
->tag
== S390_AMODE_B12
) {
623 return s390_insn_memcpy(insn
->size
, insn
->variant
.store
.dst
, vreg_am
);
627 if (insn
->tag
== S390_INSN_TEST
628 && insn
->variant
.test
.src
.tag
== S390_OPND_REG
629 && sameHReg(insn
->variant
.test
.src
.variant
.reg
, vreg
)) {
630 return s390_insn_test(insn
->size
, vreg_opnd
);
633 /* v-<alu> <reg>,<vreg> */
634 if (insn
->tag
== S390_INSN_ALU
635 && insn
->variant
.alu
.op2
.tag
== S390_OPND_REG
636 && sameHReg(insn
->variant
.alu
.op2
.variant
.reg
, vreg
)) {
637 return s390_insn_alu(insn
->size
, insn
->variant
.alu
.tag
,
638 insn
->variant
.alu
.dst
, vreg_opnd
);
641 /* v-vgetelem <reg>,<vreg> */
642 if (insn
->tag
== S390_INSN_VEC_AMODEOP
643 && insn
->variant
.vec_amodeop
.tag
== S390_VEC_GET_ELEM
645 && sameHReg(insn
->variant
.vec_amodeop
.op1
, vreg
)
646 && s390_amode_is_constant(insn
->variant
.vec_amodeop
.op2
)) {
647 vreg_am
->d
+= 8 * insn
->variant
.vec_amodeop
.op2
->d
;
648 return s390_insn_load(insn
->size
, insn
->variant
.vec_amodeop
.dst
, vreg_am
);
651 /* v-<unop> <reg>,<vreg> */
652 if (insn
->tag
== S390_INSN_UNOP
653 && insn
->variant
.unop
.src
.tag
== S390_OPND_REG
654 && sameHReg(insn
->variant
.unop
.src
.variant
.reg
, vreg
)
655 && hregClass(vreg
) == HRcInt64
) {
656 /* Some operations define the input size to be different from the insn's
657 `size' field. Adjust the address accordingly. */
658 switch (insn
->variant
.unop
.tag
) {
659 case S390_ZERO_EXTEND_8
:
660 case S390_SIGN_EXTEND_8
: vreg_am
->d
= spill_off
+ 7; break;
661 case S390_ZERO_EXTEND_16
:
662 case S390_SIGN_EXTEND_16
: vreg_am
->d
= spill_off
+ 6; break;
663 case S390_ZERO_EXTEND_32
:
664 case S390_SIGN_EXTEND_32
: vreg_am
->d
= spill_off
+ 4; break;
665 case S390_NEGATE
: /* Nothing to adjust. */ break;
669 return s390_insn_unop(insn
->size
, insn
->variant
.unop
.tag
,
670 insn
->variant
.unop
.dst
, vreg_opnd
);
673 /* v-vrep <reg>,<vreg>,<idx> */
674 if (insn
->tag
== S390_INSN_VEC_REPLICATE
675 && sameHReg(insn
->variant
.vec_replicate
.op1
, vreg
)) {
676 vreg_am
->d
+= insn
->size
* insn
->variant
.vec_replicate
.idx
;
677 return s390_insn_unop(insn
->size
, S390_VEC_DUPLICATE
,
678 insn
->variant
.vec_replicate
.dst
, vreg_opnd
);
685 s390_insn
* genMove_S390(HReg from
, HReg to
, Bool mode64
)
687 switch (hregClass(from
)) {
689 return s390_insn_move(sizeofIRType(Ity_I64
), to
, from
);
691 return s390_insn_move(sizeofIRType(Ity_F64
), to
, from
);
693 return s390_insn_move(sizeofIRType(Ity_V128
), to
, from
);
695 ppHRegClass(hregClass(from
));
696 vpanic("genMove_S390: unimplemented regclass");
700 /* Helper function for s390_insn_get_reg_usage */
702 s390_opnd_RMI_get_reg_usage(HRegUsage
*u
, s390_opnd_RMI op
)
706 addHRegUse(u
, HRmRead
, op
.variant
.reg
);
709 case S390_OPND_AMODE
:
710 s390_amode_get_reg_usage(u
, op
.variant
.am
);
713 case S390_OPND_IMMEDIATE
:
717 vpanic("s390_opnd_RMI_get_reg_usage");
722 /* Tell the register allocator how the given insn uses the registers */
724 s390_insn_get_reg_usage(HRegUsage
*u
, const s390_insn
*insn
)
730 addHRegUse(u
, HRmWrite
, insn
->variant
.load
.dst
);
731 s390_amode_get_reg_usage(u
, insn
->variant
.load
.src
);
734 case S390_INSN_LOAD_IMMEDIATE
:
735 addHRegUse(u
, HRmWrite
, insn
->variant
.load_immediate
.dst
);
738 case S390_INSN_STORE
:
739 addHRegUse(u
, HRmRead
, insn
->variant
.store
.src
);
740 s390_amode_get_reg_usage(u
, insn
->variant
.store
.dst
);
744 addHRegUse(u
, HRmRead
, insn
->variant
.move
.src
);
745 addHRegUse(u
, HRmWrite
, insn
->variant
.move
.dst
);
747 if (hregClass(insn
->variant
.move
.src
) == hregClass(insn
->variant
.move
.dst
)) {
748 u
->isRegRegMove
= True
;
749 u
->regMoveSrc
= insn
->variant
.move
.src
;
750 u
->regMoveDst
= insn
->variant
.move
.dst
;
754 case S390_INSN_MEMCPY
:
755 s390_amode_get_reg_usage(u
, insn
->variant
.memcpy
.src
);
756 s390_amode_get_reg_usage(u
, insn
->variant
.memcpy
.dst
);
759 case S390_INSN_COND_MOVE
:
760 switch (insn
->variant
.cond_move
.cond
) {
764 s390_opnd_RMI_get_reg_usage(u
, insn
->variant
.cond_move
.src
);
765 addHRegUse(u
, HRmWrite
, insn
->variant
.cond_move
.dst
);
768 s390_opnd_RMI_get_reg_usage(u
, insn
->variant
.cond_move
.src
);
769 addHRegUse(u
, HRmModify
, insn
->variant
.cond_move
.dst
);
775 addHRegUse(u
, HRmModify
, insn
->variant
.alu
.dst
); /* op1 */
776 s390_opnd_RMI_get_reg_usage(u
, insn
->variant
.alu
.op2
);
781 addHRegUse(u
, HRmModify
, insn
->variant
.mul
.dst_lo
); /* op1 */
782 addHRegUse(u
, HRmWrite
, insn
->variant
.mul
.dst_hi
);
783 s390_opnd_RMI_get_reg_usage(u
, insn
->variant
.mul
.op2
);
788 addHRegUse(u
, HRmModify
, insn
->variant
.div
.op1_lo
);
789 addHRegUse(u
, HRmModify
, insn
->variant
.div
.op1_hi
);
790 s390_opnd_RMI_get_reg_usage(u
, insn
->variant
.div
.op2
);
794 addHRegUse(u
, HRmModify
, insn
->variant
.divs
.op1
); /* quotient */
795 addHRegUse(u
, HRmWrite
, insn
->variant
.divs
.rem
); /* remainder */
796 s390_opnd_RMI_get_reg_usage(u
, insn
->variant
.divs
.op2
);
800 addHRegUse(u
, HRmWrite
, insn
->variant
.clz
.num_bits
);
801 addHRegUse(u
, HRmWrite
, insn
->variant
.clz
.clobber
);
802 s390_opnd_RMI_get_reg_usage(u
, insn
->variant
.clz
.src
);
806 addHRegUse(u
, HRmWrite
, insn
->variant
.unop
.dst
);
807 s390_opnd_RMI_get_reg_usage(u
, insn
->variant
.unop
.src
);
811 s390_opnd_RMI_get_reg_usage(u
, insn
->variant
.test
.src
);
814 case S390_INSN_CC2BOOL
:
815 addHRegUse(u
, HRmWrite
, insn
->variant
.cc2bool
.dst
);
819 addHRegUse(u
, HRmRead
, insn
->variant
.cas
.op1
);
820 s390_amode_get_reg_usage(u
, insn
->variant
.cas
.op2
);
821 addHRegUse(u
, HRmRead
, insn
->variant
.cas
.op3
);
822 addHRegUse(u
, HRmWrite
, insn
->variant
.cas
.old_mem
);
825 case S390_INSN_CDAS
: {
826 s390_cdas
*cdas
= insn
->variant
.cdas
.details
;
828 addHRegUse(u
, HRmRead
, cdas
->op1_high
);
829 addHRegUse(u
, HRmRead
, cdas
->op1_low
);
830 s390_amode_get_reg_usage(u
, cdas
->op2
);
831 addHRegUse(u
, HRmRead
, cdas
->op3_high
);
832 addHRegUse(u
, HRmRead
, cdas
->op3_low
);
833 addHRegUse(u
, HRmWrite
, cdas
->old_mem_high
);
834 addHRegUse(u
, HRmWrite
, cdas
->old_mem_low
);
835 addHRegUse(u
, HRmWrite
, cdas
->scratch
);
839 case S390_INSN_COMPARE
:
840 addHRegUse(u
, HRmRead
, insn
->variant
.compare
.src1
);
841 s390_opnd_RMI_get_reg_usage(u
, insn
->variant
.compare
.src2
);
844 case S390_INSN_HELPER_CALL
: {
847 /* Assume that all volatile registers are clobbered. ABI says,
848 volatile registers are: r0 - r5. Valgrind's register allocator
849 does not know about r0, so we can leave that out */
850 for (i
= 1; i
<= 5; ++i
) {
851 addHRegUse(u
, HRmWrite
, s390_hreg_gpr(i
));
854 /* Ditto for floating point registers. f0 - f7 are volatile */
855 for (i
= 0; i
<= 7; ++i
) {
856 addHRegUse(u
, HRmWrite
, s390_hreg_fpr(i
));
859 /* Ditto for all allocatable vector registers. */
860 for (i
= 16; i
<= 31; ++i
) {
861 addHRegUse(u
, HRmWrite
, s390_hreg_vr(i
));
864 /* The registers that are used for passing arguments will be read.
865 Not all of them may, but in general we need to assume that. */
866 for (i
= 0; i
< insn
->variant
.helper_call
.details
->num_args
; ++i
) {
867 addHRegUse(u
, HRmRead
, s390_hreg_gpr(s390_gprno_from_arg_index(i
)));
870 /* s390_insn_helper_call_emit also reads / writes the link register
871 and stack pointer. But those registers are not visible to the
872 register allocator. So we don't need to do anything for them. */
876 case S390_INSN_BFP_TRIOP
:
877 addHRegUse(u
, HRmModify
, insn
->variant
.bfp_triop
.dst
); /* first */
878 addHRegUse(u
, HRmRead
, insn
->variant
.bfp_triop
.op2
); /* second */
879 addHRegUse(u
, HRmRead
, insn
->variant
.bfp_triop
.op3
); /* third */
882 case S390_INSN_BFP_BINOP
:
883 addHRegUse(u
, HRmModify
, insn
->variant
.bfp_binop
.dst_hi
); /* left */
884 addHRegUse(u
, HRmRead
, insn
->variant
.bfp_binop
.op2_hi
); /* right */
885 if (insn
->size
== 16) {
886 addHRegUse(u
, HRmModify
, insn
->variant
.bfp_binop
.dst_lo
); /* left */
887 addHRegUse(u
, HRmRead
, insn
->variant
.bfp_binop
.op2_lo
); /* right */
891 case S390_INSN_BFP_UNOP
:
892 addHRegUse(u
, HRmWrite
, insn
->variant
.bfp_unop
.dst_hi
);
893 addHRegUse(u
, HRmRead
, insn
->variant
.bfp_unop
.op_hi
); /* operand */
894 if (insn
->size
== 16) {
895 addHRegUse(u
, HRmWrite
, insn
->variant
.bfp_unop
.dst_lo
);
896 addHRegUse(u
, HRmRead
, insn
->variant
.bfp_unop
.op_lo
); /* operand */
900 case S390_INSN_BFP_COMPARE
:
901 addHRegUse(u
, HRmWrite
, insn
->variant
.bfp_compare
.dst
);
902 addHRegUse(u
, HRmRead
, insn
->variant
.bfp_compare
.op1_hi
); /* left */
903 addHRegUse(u
, HRmRead
, insn
->variant
.bfp_compare
.op2_hi
); /* right */
904 if (insn
->size
== 16) {
905 addHRegUse(u
, HRmRead
, insn
->variant
.bfp_compare
.op1_lo
); /* left */
906 addHRegUse(u
, HRmRead
, insn
->variant
.bfp_compare
.op2_lo
); /* right */
910 case S390_INSN_BFP_CONVERT
:
911 addHRegUse(u
, HRmWrite
, insn
->variant
.bfp_convert
.dst_hi
);
912 if (! hregIsInvalid(insn
->variant
.bfp_convert
.dst_lo
))
913 addHRegUse(u
, HRmWrite
, insn
->variant
.bfp_convert
.dst_lo
);
914 addHRegUse(u
, HRmRead
, insn
->variant
.bfp_convert
.op_hi
);
915 if (! hregIsInvalid(insn
->variant
.bfp_convert
.op_lo
))
916 addHRegUse(u
, HRmRead
, insn
->variant
.bfp_convert
.op_lo
);
919 case S390_INSN_DFP_BINOP
: {
920 s390_dfp_binop
*dfp_binop
= insn
->variant
.dfp_binop
.details
;
922 addHRegUse(u
, HRmWrite
, dfp_binop
->dst_hi
);
923 addHRegUse(u
, HRmRead
, dfp_binop
->op2_hi
); /* left */
924 addHRegUse(u
, HRmRead
, dfp_binop
->op3_hi
); /* right */
925 if (insn
->size
== 16) {
926 addHRegUse(u
, HRmWrite
, dfp_binop
->dst_lo
);
927 addHRegUse(u
, HRmRead
, dfp_binop
->op2_lo
); /* left */
928 addHRegUse(u
, HRmRead
, dfp_binop
->op3_lo
); /* right */
933 case S390_INSN_DFP_UNOP
:
934 addHRegUse(u
, HRmWrite
, insn
->variant
.dfp_unop
.dst_hi
);
935 addHRegUse(u
, HRmRead
, insn
->variant
.dfp_unop
.op_hi
); /* operand */
936 if (insn
->size
== 16) {
937 addHRegUse(u
, HRmWrite
, insn
->variant
.dfp_unop
.dst_lo
);
938 addHRegUse(u
, HRmRead
, insn
->variant
.dfp_unop
.op_lo
); /* operand */
942 case S390_INSN_DFP_INTOP
:
943 addHRegUse(u
, HRmWrite
, insn
->variant
.dfp_intop
.dst_hi
);
944 addHRegUse(u
, HRmRead
, insn
->variant
.dfp_intop
.op2
);
945 addHRegUse(u
, HRmRead
, insn
->variant
.dfp_intop
.op3_hi
);
946 if (insn
->size
== 16) {
947 addHRegUse(u
, HRmWrite
, insn
->variant
.dfp_intop
.dst_lo
);
948 addHRegUse(u
, HRmRead
, insn
->variant
.dfp_intop
.op3_lo
);
952 case S390_INSN_DFP_COMPARE
:
953 addHRegUse(u
, HRmWrite
, insn
->variant
.dfp_compare
.dst
);
954 addHRegUse(u
, HRmRead
, insn
->variant
.dfp_compare
.op1_hi
); /* left */
955 addHRegUse(u
, HRmRead
, insn
->variant
.dfp_compare
.op2_hi
); /* right */
956 if (insn
->size
== 16) {
957 addHRegUse(u
, HRmRead
, insn
->variant
.dfp_compare
.op1_lo
); /* left */
958 addHRegUse(u
, HRmRead
, insn
->variant
.dfp_compare
.op2_lo
); /* right */
962 case S390_INSN_DFP_CONVERT
:
963 addHRegUse(u
, HRmWrite
, insn
->variant
.dfp_convert
.dst_hi
);
964 if (! hregIsInvalid(insn
->variant
.dfp_convert
.dst_lo
))
965 addHRegUse(u
, HRmWrite
, insn
->variant
.dfp_convert
.dst_lo
);
966 addHRegUse(u
, HRmRead
, insn
->variant
.dfp_convert
.op_hi
); /* operand */
967 if (! hregIsInvalid(insn
->variant
.dfp_convert
.op_lo
))
968 addHRegUse(u
, HRmRead
, insn
->variant
.dfp_convert
.op_lo
); /* operand */
971 case S390_INSN_DFP_REROUND
:
972 addHRegUse(u
, HRmWrite
, insn
->variant
.dfp_reround
.dst_hi
);
973 addHRegUse(u
, HRmRead
, insn
->variant
.dfp_reround
.op2
); /* left */
974 addHRegUse(u
, HRmRead
, insn
->variant
.dfp_reround
.op3_hi
); /* right */
975 if (insn
->size
== 16) {
976 addHRegUse(u
, HRmWrite
, insn
->variant
.dfp_reround
.dst_lo
);
977 addHRegUse(u
, HRmRead
, insn
->variant
.dfp_reround
.op3_lo
); /* right */
981 case S390_INSN_FP_CONVERT
: {
982 s390_fp_convert
*fp_convert
= insn
->variant
.fp_convert
.details
;
984 addHRegUse(u
, HRmWrite
, fp_convert
->dst_hi
);
985 if (! hregIsInvalid(fp_convert
->dst_lo
))
986 addHRegUse(u
, HRmWrite
, fp_convert
->dst_lo
);
987 addHRegUse(u
, HRmRead
, fp_convert
->op_hi
);
988 if (! hregIsInvalid(fp_convert
->op_lo
))
989 addHRegUse(u
, HRmRead
, fp_convert
->op_lo
);
990 addHRegUse(u
, HRmWrite
, fp_convert
->r1
);
995 s390_amode_get_reg_usage(u
, insn
->variant
.mimm
.dst
);
999 s390_amode_get_reg_usage(u
, insn
->variant
.madd
.dst
);
1002 case S390_INSN_MFENCE
:
1005 case S390_INSN_SET_FPC_BFPRM
:
1006 addHRegUse(u
, HRmRead
, insn
->variant
.set_fpc_bfprm
.mode
);
1009 case S390_INSN_SET_FPC_DFPRM
:
1010 addHRegUse(u
, HRmRead
, insn
->variant
.set_fpc_dfprm
.mode
);
1013 case S390_INSN_EVCHECK
:
1014 s390_amode_get_reg_usage(u
, insn
->variant
.evcheck
.counter
);
1015 s390_amode_get_reg_usage(u
, insn
->variant
.evcheck
.fail_addr
);
1018 case S390_INSN_PROFINC
:
1019 /* Does not use any register visible to the register allocator */
1022 case S390_INSN_XDIRECT
:
1023 s390_amode_get_reg_usage(u
, insn
->variant
.xdirect
.guest_IA
);
1026 case S390_INSN_XINDIR
:
1027 addHRegUse(u
, HRmRead
, insn
->variant
.xindir
.dst
);
1028 s390_amode_get_reg_usage(u
, insn
->variant
.xindir
.guest_IA
);
1031 case S390_INSN_XASSISTED
:
1032 addHRegUse(u
, HRmRead
, insn
->variant
.xassisted
.dst
);
1033 s390_amode_get_reg_usage(u
, insn
->variant
.xassisted
.guest_IA
);
1036 case S390_INSN_VEC_AMODEOP
:
1037 addHRegUse(u
, HRmWrite
, insn
->variant
.vec_amodeop
.dst
);
1038 addHRegUse(u
, HRmRead
, insn
->variant
.vec_amodeop
.op1
);
1039 s390_amode_get_reg_usage(u
, insn
->variant
.vec_amodeop
.op2
);
1042 case S390_INSN_VEC_AMODEINTOP
:
1043 addHRegUse(u
, HRmModify
, insn
->variant
.vec_amodeintop
.dst
);
1044 s390_amode_get_reg_usage(u
, insn
->variant
.vec_amodeintop
.op2
);
1045 addHRegUse(u
, HRmRead
, insn
->variant
.vec_amodeintop
.op3
);
1048 case S390_INSN_VEC_BINOP
:
1049 addHRegUse(u
, HRmWrite
, insn
->variant
.vec_binop
.dst
);
1050 addHRegUse(u
, HRmRead
, insn
->variant
.vec_binop
.op1
);
1051 addHRegUse(u
, HRmRead
, insn
->variant
.vec_binop
.op2
);
1054 case S390_INSN_VEC_TRIOP
:
1055 addHRegUse(u
, HRmWrite
, insn
->variant
.vec_triop
.dst
);
1056 addHRegUse(u
, HRmRead
, insn
->variant
.vec_triop
.op1
);
1057 addHRegUse(u
, HRmRead
, insn
->variant
.vec_triop
.op2
);
1058 addHRegUse(u
, HRmRead
, insn
->variant
.vec_triop
.op3
);
1061 case S390_INSN_VEC_REPLICATE
:
1062 addHRegUse(u
, HRmWrite
, insn
->variant
.vec_replicate
.dst
);
1063 addHRegUse(u
, HRmRead
, insn
->variant
.vec_replicate
.op1
);
1067 vpanic("s390_insn_get_reg_usage");
1072 /* Helper function for s390_insn_map_regs */
1074 s390_opnd_RMI_map_regs(HRegRemap
*m
, s390_opnd_RMI
*op
)
1078 op
->variant
.reg
= lookupHRegRemap(m
, op
->variant
.reg
);
1081 case S390_OPND_IMMEDIATE
:
1084 case S390_OPND_AMODE
:
1085 s390_amode_map_regs(m
, op
->variant
.am
);
1089 vpanic("s390_opnd_RMI_map_regs");
1095 s390_insn_map_regs(HRegRemap
*m
, s390_insn
*insn
)
1097 switch (insn
->tag
) {
1098 case S390_INSN_LOAD
:
1099 insn
->variant
.load
.dst
= lookupHRegRemap(m
, insn
->variant
.load
.dst
);
1100 s390_amode_map_regs(m
, insn
->variant
.load
.src
);
1103 case S390_INSN_STORE
:
1104 s390_amode_map_regs(m
, insn
->variant
.store
.dst
);
1105 insn
->variant
.store
.src
= lookupHRegRemap(m
, insn
->variant
.store
.src
);
1108 case S390_INSN_MOVE
:
1109 insn
->variant
.move
.dst
= lookupHRegRemap(m
, insn
->variant
.move
.dst
);
1110 insn
->variant
.move
.src
= lookupHRegRemap(m
, insn
->variant
.move
.src
);
1113 case S390_INSN_MEMCPY
:
1114 s390_amode_map_regs(m
, insn
->variant
.memcpy
.dst
);
1115 s390_amode_map_regs(m
, insn
->variant
.memcpy
.src
);
1118 case S390_INSN_COND_MOVE
:
1119 insn
->variant
.cond_move
.dst
= lookupHRegRemap(m
, insn
->variant
.cond_move
.dst
);
1120 s390_opnd_RMI_map_regs(m
, &insn
->variant
.cond_move
.src
);
1123 case S390_INSN_LOAD_IMMEDIATE
:
1124 insn
->variant
.load_immediate
.dst
=
1125 lookupHRegRemap(m
, insn
->variant
.load_immediate
.dst
);
1129 insn
->variant
.alu
.dst
= lookupHRegRemap(m
, insn
->variant
.alu
.dst
);
1130 s390_opnd_RMI_map_regs(m
, &insn
->variant
.alu
.op2
);
1133 case S390_INSN_SMUL
:
1134 case S390_INSN_UMUL
:
1135 insn
->variant
.mul
.dst_hi
= lookupHRegRemap(m
, insn
->variant
.mul
.dst_hi
);
1136 insn
->variant
.mul
.dst_lo
= lookupHRegRemap(m
, insn
->variant
.mul
.dst_lo
);
1137 s390_opnd_RMI_map_regs(m
, &insn
->variant
.mul
.op2
);
1140 case S390_INSN_SDIV
:
1141 case S390_INSN_UDIV
:
1142 insn
->variant
.div
.op1_hi
= lookupHRegRemap(m
, insn
->variant
.div
.op1_hi
);
1143 insn
->variant
.div
.op1_lo
= lookupHRegRemap(m
, insn
->variant
.div
.op1_lo
);
1144 s390_opnd_RMI_map_regs(m
, &insn
->variant
.div
.op2
);
1147 case S390_INSN_DIVS
:
1148 insn
->variant
.divs
.op1
= lookupHRegRemap(m
, insn
->variant
.divs
.op1
);
1149 insn
->variant
.divs
.rem
= lookupHRegRemap(m
, insn
->variant
.divs
.rem
);
1150 s390_opnd_RMI_map_regs(m
, &insn
->variant
.divs
.op2
);
1154 insn
->variant
.clz
.num_bits
= lookupHRegRemap(m
, insn
->variant
.clz
.num_bits
);
1155 insn
->variant
.clz
.clobber
= lookupHRegRemap(m
, insn
->variant
.clz
.clobber
);
1156 s390_opnd_RMI_map_regs(m
, &insn
->variant
.clz
.src
);
1159 case S390_INSN_UNOP
:
1160 insn
->variant
.unop
.dst
= lookupHRegRemap(m
, insn
->variant
.unop
.dst
);
1161 s390_opnd_RMI_map_regs(m
, &insn
->variant
.unop
.src
);
1164 case S390_INSN_TEST
:
1165 s390_opnd_RMI_map_regs(m
, &insn
->variant
.test
.src
);
1168 case S390_INSN_CC2BOOL
:
1169 insn
->variant
.cc2bool
.dst
= lookupHRegRemap(m
, insn
->variant
.cc2bool
.dst
);
1173 insn
->variant
.cas
.op1
= lookupHRegRemap(m
, insn
->variant
.cas
.op1
);
1174 s390_amode_map_regs(m
, insn
->variant
.cas
.op2
);
1175 insn
->variant
.cas
.op3
= lookupHRegRemap(m
, insn
->variant
.cas
.op3
);
1176 insn
->variant
.cas
.old_mem
= lookupHRegRemap(m
, insn
->variant
.cas
.old_mem
);
1179 case S390_INSN_CDAS
: {
1180 s390_cdas
*cdas
= insn
->variant
.cdas
.details
;
1182 cdas
->op1_high
= lookupHRegRemap(m
, cdas
->op1_high
);
1183 cdas
->op1_low
= lookupHRegRemap(m
, cdas
->op1_low
);
1184 s390_amode_map_regs(m
, cdas
->op2
);
1185 cdas
->op3_high
= lookupHRegRemap(m
, cdas
->op3_high
);
1186 cdas
->op3_low
= lookupHRegRemap(m
, cdas
->op3_low
);
1187 cdas
->old_mem_high
= lookupHRegRemap(m
, cdas
->old_mem_high
);
1188 cdas
->old_mem_low
= lookupHRegRemap(m
, cdas
->old_mem_low
);
1189 cdas
->scratch
= lookupHRegRemap(m
, cdas
->scratch
);
1193 case S390_INSN_COMPARE
:
1194 insn
->variant
.compare
.src1
= lookupHRegRemap(m
, insn
->variant
.compare
.src1
);
1195 s390_opnd_RMI_map_regs(m
, &insn
->variant
.compare
.src2
);
1198 case S390_INSN_HELPER_CALL
:
1199 /* s390_insn_helper_call_emit also reads / writes the link register
1200 and stack pointer. But those registers are not visible to the
1201 register allocator. So we don't need to do anything for them.
1202 As for the arguments of the helper call -- they will be loaded into
1203 non-virtual registers. Again, we don't need to do anything for those
1207 case S390_INSN_BFP_TRIOP
:
1208 insn
->variant
.bfp_triop
.dst
=
1209 lookupHRegRemap(m
, insn
->variant
.bfp_triop
.dst
);
1210 insn
->variant
.bfp_triop
.op2
=
1211 lookupHRegRemap(m
, insn
->variant
.bfp_triop
.op2
);
1212 insn
->variant
.bfp_triop
.op3
=
1213 lookupHRegRemap(m
, insn
->variant
.bfp_triop
.op3
);
1216 case S390_INSN_BFP_BINOP
:
1217 insn
->variant
.bfp_binop
.dst_hi
=
1218 lookupHRegRemap(m
, insn
->variant
.bfp_binop
.dst_hi
);
1219 insn
->variant
.bfp_binop
.op2_hi
=
1220 lookupHRegRemap(m
, insn
->variant
.bfp_binop
.op2_hi
);
1221 if (insn
->size
== 16) {
1222 insn
->variant
.bfp_binop
.dst_lo
=
1223 lookupHRegRemap(m
, insn
->variant
.bfp_binop
.dst_lo
);
1224 insn
->variant
.bfp_binop
.op2_lo
=
1225 lookupHRegRemap(m
, insn
->variant
.bfp_binop
.op2_lo
);
1229 case S390_INSN_BFP_UNOP
:
1230 insn
->variant
.bfp_unop
.dst_hi
=
1231 lookupHRegRemap(m
, insn
->variant
.bfp_unop
.dst_hi
);
1232 insn
->variant
.bfp_unop
.op_hi
=
1233 lookupHRegRemap(m
, insn
->variant
.bfp_unop
.op_hi
);
1234 if (insn
->size
== 16) {
1235 insn
->variant
.bfp_unop
.dst_lo
=
1236 lookupHRegRemap(m
, insn
->variant
.bfp_unop
.dst_lo
);
1237 insn
->variant
.bfp_unop
.op_lo
=
1238 lookupHRegRemap(m
, insn
->variant
.bfp_unop
.op_lo
);
1242 case S390_INSN_BFP_COMPARE
:
1243 insn
->variant
.bfp_compare
.dst
=
1244 lookupHRegRemap(m
, insn
->variant
.bfp_compare
.dst
);
1245 insn
->variant
.bfp_compare
.op1_hi
=
1246 lookupHRegRemap(m
, insn
->variant
.bfp_compare
.op1_hi
);
1247 insn
->variant
.bfp_compare
.op2_hi
=
1248 lookupHRegRemap(m
, insn
->variant
.bfp_compare
.op2_hi
);
1249 if (insn
->size
== 16) {
1250 insn
->variant
.bfp_compare
.op1_lo
=
1251 lookupHRegRemap(m
, insn
->variant
.bfp_compare
.op1_lo
);
1252 insn
->variant
.bfp_compare
.op2_lo
=
1253 lookupHRegRemap(m
, insn
->variant
.bfp_compare
.op2_lo
);
1257 case S390_INSN_BFP_CONVERT
:
1258 insn
->variant
.bfp_convert
.dst_hi
=
1259 lookupHRegRemap(m
, insn
->variant
.bfp_convert
.dst_hi
);
1260 if (! hregIsInvalid(insn
->variant
.bfp_convert
.dst_lo
))
1261 insn
->variant
.bfp_convert
.dst_lo
=
1262 lookupHRegRemap(m
, insn
->variant
.bfp_convert
.dst_lo
);
1263 insn
->variant
.bfp_convert
.op_hi
=
1264 lookupHRegRemap(m
, insn
->variant
.bfp_convert
.op_hi
);
1265 if (! hregIsInvalid(insn
->variant
.bfp_convert
.op_lo
))
1266 insn
->variant
.bfp_convert
.op_lo
=
1267 lookupHRegRemap(m
, insn
->variant
.bfp_convert
.op_lo
);
1270 case S390_INSN_DFP_BINOP
: {
1271 s390_dfp_binop
*dfp_binop
= insn
->variant
.dfp_binop
.details
;
1273 dfp_binop
->dst_hi
= lookupHRegRemap(m
, dfp_binop
->dst_hi
);
1274 dfp_binop
->op2_hi
= lookupHRegRemap(m
, dfp_binop
->op2_hi
);
1275 dfp_binop
->op3_hi
= lookupHRegRemap(m
, dfp_binop
->op3_hi
);
1276 if (insn
->size
== 16) {
1277 dfp_binop
->dst_lo
= lookupHRegRemap(m
, dfp_binop
->dst_lo
);
1278 dfp_binop
->op2_lo
= lookupHRegRemap(m
, dfp_binop
->op2_lo
);
1279 dfp_binop
->op3_lo
= lookupHRegRemap(m
, dfp_binop
->op3_lo
);
1284 case S390_INSN_DFP_UNOP
:
1285 insn
->variant
.dfp_unop
.dst_hi
=
1286 lookupHRegRemap(m
, insn
->variant
.dfp_unop
.dst_hi
);
1287 insn
->variant
.dfp_unop
.op_hi
=
1288 lookupHRegRemap(m
, insn
->variant
.dfp_unop
.op_hi
);
1289 if (insn
->size
== 16) {
1290 insn
->variant
.dfp_unop
.dst_lo
=
1291 lookupHRegRemap(m
, insn
->variant
.dfp_unop
.dst_lo
);
1292 insn
->variant
.dfp_unop
.op_lo
=
1293 lookupHRegRemap(m
, insn
->variant
.dfp_unop
.op_lo
);
1297 case S390_INSN_DFP_INTOP
:
1298 insn
->variant
.dfp_intop
.dst_hi
=
1299 lookupHRegRemap(m
, insn
->variant
.dfp_intop
.dst_hi
);
1300 insn
->variant
.dfp_intop
.op2
=
1301 lookupHRegRemap(m
, insn
->variant
.dfp_intop
.op2
);
1302 insn
->variant
.dfp_intop
.op3_hi
=
1303 lookupHRegRemap(m
, insn
->variant
.dfp_intop
.op3_hi
);
1304 if (insn
->size
== 16) {
1305 insn
->variant
.dfp_intop
.dst_lo
=
1306 lookupHRegRemap(m
, insn
->variant
.dfp_intop
.dst_lo
);
1307 insn
->variant
.dfp_intop
.op3_lo
=
1308 lookupHRegRemap(m
, insn
->variant
.dfp_intop
.op3_lo
);
1312 case S390_INSN_DFP_COMPARE
:
1313 insn
->variant
.dfp_compare
.dst
=
1314 lookupHRegRemap(m
, insn
->variant
.dfp_compare
.dst
);
1315 insn
->variant
.dfp_compare
.op1_hi
=
1316 lookupHRegRemap(m
, insn
->variant
.dfp_compare
.op1_hi
);
1317 insn
->variant
.dfp_compare
.op2_hi
=
1318 lookupHRegRemap(m
, insn
->variant
.dfp_compare
.op2_hi
);
1319 if (insn
->size
== 16) {
1320 insn
->variant
.dfp_compare
.op1_lo
=
1321 lookupHRegRemap(m
, insn
->variant
.dfp_compare
.op1_lo
);
1322 insn
->variant
.dfp_compare
.op2_lo
=
1323 lookupHRegRemap(m
, insn
->variant
.dfp_compare
.op2_lo
);
1327 case S390_INSN_DFP_CONVERT
:
1328 insn
->variant
.dfp_convert
.dst_hi
=
1329 lookupHRegRemap(m
, insn
->variant
.dfp_convert
.dst_hi
);
1330 if (! hregIsInvalid(insn
->variant
.dfp_convert
.dst_lo
))
1331 insn
->variant
.dfp_convert
.dst_lo
=
1332 lookupHRegRemap(m
, insn
->variant
.dfp_convert
.dst_lo
);
1333 insn
->variant
.dfp_convert
.op_hi
=
1334 lookupHRegRemap(m
, insn
->variant
.dfp_convert
.op_hi
);
1335 if (! hregIsInvalid(insn
->variant
.dfp_convert
.op_lo
))
1336 insn
->variant
.dfp_convert
.op_lo
=
1337 lookupHRegRemap(m
, insn
->variant
.dfp_convert
.op_lo
);
1340 case S390_INSN_DFP_REROUND
:
1341 insn
->variant
.dfp_reround
.dst_hi
=
1342 lookupHRegRemap(m
, insn
->variant
.dfp_reround
.dst_hi
);
1343 insn
->variant
.dfp_reround
.op2
=
1344 lookupHRegRemap(m
, insn
->variant
.dfp_reround
.op2
);
1345 insn
->variant
.dfp_reround
.op3_hi
=
1346 lookupHRegRemap(m
, insn
->variant
.dfp_reround
.op3_hi
);
1347 if (insn
->size
== 16) {
1348 insn
->variant
.dfp_reround
.dst_lo
=
1349 lookupHRegRemap(m
, insn
->variant
.dfp_reround
.dst_lo
);
1350 insn
->variant
.dfp_reround
.op3_lo
=
1351 lookupHRegRemap(m
, insn
->variant
.dfp_reround
.op3_lo
);
1355 case S390_INSN_FP_CONVERT
: {
1356 s390_fp_convert
*fp_convert
= insn
->variant
.fp_convert
.details
;
1358 fp_convert
->dst_hi
= lookupHRegRemap(m
, fp_convert
->dst_hi
);
1359 if (! hregIsInvalid(fp_convert
->dst_lo
))
1360 fp_convert
->dst_lo
= lookupHRegRemap(m
, fp_convert
->dst_lo
);
1361 fp_convert
->op_hi
= lookupHRegRemap(m
, fp_convert
->op_hi
);
1362 if (! hregIsInvalid(fp_convert
->op_lo
))
1363 fp_convert
->op_lo
= lookupHRegRemap(m
, fp_convert
->op_lo
);
1364 fp_convert
->r1
= lookupHRegRemap(m
, fp_convert
->r1
);
1368 case S390_INSN_MIMM
:
1369 s390_amode_map_regs(m
, insn
->variant
.mimm
.dst
);
1372 case S390_INSN_MADD
:
1373 s390_amode_map_regs(m
, insn
->variant
.madd
.dst
);
1376 case S390_INSN_MFENCE
:
1379 case S390_INSN_SET_FPC_BFPRM
:
1380 insn
->variant
.set_fpc_bfprm
.mode
=
1381 lookupHRegRemap(m
, insn
->variant
.set_fpc_bfprm
.mode
);
1384 case S390_INSN_SET_FPC_DFPRM
:
1385 insn
->variant
.set_fpc_dfprm
.mode
=
1386 lookupHRegRemap(m
, insn
->variant
.set_fpc_dfprm
.mode
);
1389 case S390_INSN_EVCHECK
:
1390 s390_amode_map_regs(m
, insn
->variant
.evcheck
.counter
);
1391 s390_amode_map_regs(m
, insn
->variant
.evcheck
.fail_addr
);
1394 case S390_INSN_PROFINC
:
1395 /* Does not use any register visible to the register allocator */
1398 case S390_INSN_XDIRECT
:
1399 s390_amode_map_regs(m
, insn
->variant
.xdirect
.guest_IA
);
1402 case S390_INSN_XINDIR
:
1403 s390_amode_map_regs(m
, insn
->variant
.xindir
.guest_IA
);
1404 insn
->variant
.xindir
.dst
=
1405 lookupHRegRemap(m
, insn
->variant
.xindir
.dst
);
1408 case S390_INSN_XASSISTED
:
1409 s390_amode_map_regs(m
, insn
->variant
.xassisted
.guest_IA
);
1410 insn
->variant
.xassisted
.dst
=
1411 lookupHRegRemap(m
, insn
->variant
.xassisted
.dst
);
1414 case S390_INSN_VEC_AMODEOP
:
1415 insn
->variant
.vec_amodeop
.dst
=
1416 lookupHRegRemap(m
, insn
->variant
.vec_amodeop
.dst
);
1417 insn
->variant
.vec_amodeop
.op1
=
1418 lookupHRegRemap(m
, insn
->variant
.vec_amodeop
.op1
);
1419 s390_amode_map_regs(m
, insn
->variant
.vec_amodeop
.op2
);
1422 case S390_INSN_VEC_AMODEINTOP
:
1423 insn
->variant
.vec_amodeintop
.dst
=
1424 lookupHRegRemap(m
, insn
->variant
.vec_amodeintop
.dst
);
1425 s390_amode_map_regs(m
, insn
->variant
.vec_amodeintop
.op2
);
1426 insn
->variant
.vec_amodeintop
.op3
=
1427 lookupHRegRemap(m
, insn
->variant
.vec_amodeintop
.op3
);
1430 case S390_INSN_VEC_BINOP
:
1431 insn
->variant
.vec_binop
.dst
=
1432 lookupHRegRemap(m
, insn
->variant
.vec_binop
.dst
);
1433 insn
->variant
.vec_binop
.op1
=
1434 lookupHRegRemap(m
, insn
->variant
.vec_binop
.op1
);
1435 insn
->variant
.vec_binop
.op2
=
1436 lookupHRegRemap(m
, insn
->variant
.vec_binop
.op2
);
1439 case S390_INSN_VEC_TRIOP
:
1440 insn
->variant
.vec_triop
.dst
=
1441 lookupHRegRemap(m
, insn
->variant
.vec_triop
.dst
);
1442 insn
->variant
.vec_triop
.op1
=
1443 lookupHRegRemap(m
, insn
->variant
.vec_triop
.op1
);
1444 insn
->variant
.vec_triop
.op2
=
1445 lookupHRegRemap(m
, insn
->variant
.vec_triop
.op2
);
1446 insn
->variant
.vec_triop
.op3
=
1447 lookupHRegRemap(m
, insn
->variant
.vec_triop
.op3
);
1450 case S390_INSN_VEC_REPLICATE
:
1451 insn
->variant
.vec_replicate
.dst
=
1452 lookupHRegRemap(m
, insn
->variant
.vec_replicate
.dst
);
1453 insn
->variant
.vec_replicate
.op1
=
1454 lookupHRegRemap(m
, insn
->variant
.vec_replicate
.op1
);
1458 vpanic("s390_insn_map_regs");
1463 /*------------------------------------------------------------*/
1464 /*--- Functions to emit a sequence of bytes ---*/
1465 /*------------------------------------------------------------*/
1467 static __inline__ UChar
*
1468 emit_2bytes(UChar
*p
, ULong val
)
1470 return (UChar
*)__builtin_memcpy(p
, ((UChar
*)&val
) + 6, 2) + 2;
1474 static __inline__ UChar
*
1475 emit_4bytes(UChar
*p
, ULong val
)
1477 return (UChar
*)__builtin_memcpy(p
, ((UChar
*)&val
) + 4, 4) + 4;
1481 static __inline__ UChar
*
1482 emit_6bytes(UChar
*p
, ULong val
)
1484 return (UChar
*)__builtin_memcpy(p
, ((UChar
*)&val
) + 2, 6) + 6;
1488 /*------------------------------------------------------------*/
1489 /*--- Functions to emit various instruction formats ---*/
1490 /*------------------------------------------------------------*/
1493 emit_RI(UChar
*p
, UInt op
, UChar r1
, UShort i2
)
1495 ULong the_insn
= op
;
1497 the_insn
|= ((ULong
)r1
) << 20;
1498 the_insn
|= ((ULong
)i2
) << 0;
1500 return emit_4bytes(p
, the_insn
);
1505 emit_RIL(UChar
*p
, ULong op
, UChar r1
, UInt i2
)
1507 ULong the_insn
= op
;
1509 the_insn
|= ((ULong
)r1
) << 36;
1510 the_insn
|= ((ULong
)i2
) << 0;
1512 return emit_6bytes(p
, the_insn
);
1517 emit_RIE(UChar
*p
, ULong op
, UChar r1
, UShort i2
, UChar m3
)
1519 ULong the_insn
= op
;
1521 the_insn
|= ((ULong
)r1
) << 36;
1522 the_insn
|= ((ULong
)m3
) << 32;
1523 the_insn
|= ((ULong
)i2
) << 16;
1525 return emit_6bytes(p
, the_insn
);
1530 emit_RIEf(UChar
*p
, ULong op
, UChar r1
, UChar r2
,
1531 UChar i3
, Char i4
, UChar i5
)
1533 ULong the_insn
= op
;
1535 the_insn
|= ((ULong
)r1
) << 36;
1536 the_insn
|= ((ULong
)r2
) << 32;
1537 the_insn
|= ((ULong
)i3
) << 24;
1538 the_insn
|= ((ULong
)i4
) << 16;
1539 the_insn
|= ((ULong
)i5
) << 8;
1541 return emit_6bytes(p
, the_insn
);
1546 emit_RR(UChar
*p
, UInt op
, UChar r1
, UChar r2
)
1548 ULong the_insn
= op
;
1550 the_insn
|= ((ULong
)r1
) << 4;
1551 the_insn
|= ((ULong
)r2
) << 0;
1553 return emit_2bytes(p
, the_insn
);
1558 emit_RRE(UChar
*p
, UInt op
, UChar r1
, UChar r2
)
1560 ULong the_insn
= op
;
1562 the_insn
|= ((ULong
)r1
) << 4;
1563 the_insn
|= ((ULong
)r2
) << 0;
1565 return emit_4bytes(p
, the_insn
);
1570 emit_RRF(UChar
*p
, UInt op
, UChar r1
, UChar r3
, UChar r2
)
1572 ULong the_insn
= op
;
1574 the_insn
|= ((ULong
)r1
) << 12;
1575 the_insn
|= ((ULong
)r3
) << 4;
1576 the_insn
|= ((ULong
)r2
) << 0;
1578 return emit_4bytes(p
, the_insn
);
1583 emit_RRF2(UChar
*p
, UInt op
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
1585 ULong the_insn
= op
;
1587 the_insn
|= ((ULong
)m3
) << 12;
1588 the_insn
|= ((ULong
)m4
) << 8;
1589 the_insn
|= ((ULong
)r1
) << 4;
1590 the_insn
|= ((ULong
)r2
) << 0;
1592 return emit_4bytes(p
, the_insn
);
1597 emit_RRF3(UChar
*p
, UInt op
, UChar r3
, UChar r1
, UChar r2
)
1599 ULong the_insn
= op
;
1601 the_insn
|= ((ULong
)r3
) << 12;
1602 the_insn
|= ((ULong
)r1
) << 4;
1603 the_insn
|= ((ULong
)r2
) << 0;
1605 return emit_4bytes(p
, the_insn
);
1610 emit_RRF4(UChar
*p
, UInt op
, UChar r3
, UChar m4
, UChar r1
, UChar r2
)
1612 ULong the_insn
= op
;
1614 the_insn
|= ((ULong
)r3
) << 12;
1615 the_insn
|= ((ULong
)m4
) << 8;
1616 the_insn
|= ((ULong
)r1
) << 4;
1617 the_insn
|= ((ULong
)r2
) << 0;
1619 return emit_4bytes(p
, the_insn
);
1624 emit_RRF5(UChar
*p
, UInt op
, UChar m4
, UChar r1
, UChar r2
)
1626 ULong the_insn
= op
;
1628 the_insn
|= ((ULong
)m4
) << 8;
1629 the_insn
|= ((ULong
)r1
) << 4;
1630 the_insn
|= ((ULong
)r2
) << 0;
1632 return emit_4bytes(p
, the_insn
);
1637 emit_RS(UChar
*p
, UInt op
, UChar r1
, UChar r3
, UChar b2
, UShort d2
)
1639 ULong the_insn
= op
;
1641 the_insn
|= ((ULong
)r1
) << 20;
1642 the_insn
|= ((ULong
)r3
) << 16;
1643 the_insn
|= ((ULong
)b2
) << 12;
1644 the_insn
|= ((ULong
)d2
) << 0;
1646 return emit_4bytes(p
, the_insn
);
1651 emit_RSY(UChar
*p
, ULong op
, UChar r1
, UChar r3
, UChar b2
, UShort dl2
, UChar dh2
)
1653 ULong the_insn
= op
;
1655 the_insn
|= ((ULong
)r1
) << 36;
1656 the_insn
|= ((ULong
)r3
) << 32;
1657 the_insn
|= ((ULong
)b2
) << 28;
1658 the_insn
|= ((ULong
)dl2
) << 16;
1659 the_insn
|= ((ULong
)dh2
) << 8;
1661 return emit_6bytes(p
, the_insn
);
1666 emit_RX(UChar
*p
, UInt op
, UChar r1
, UChar x2
, UChar b2
, UShort d2
)
1668 ULong the_insn
= op
;
1670 the_insn
|= ((ULong
)r1
) << 20;
1671 the_insn
|= ((ULong
)x2
) << 16;
1672 the_insn
|= ((ULong
)b2
) << 12;
1673 the_insn
|= ((ULong
)d2
) << 0;
1675 return emit_4bytes(p
, the_insn
);
1680 emit_RXF(UChar
*p
, ULong op
, UChar r3
, UChar x2
, UChar b2
, UShort d2
, UChar r1
)
1682 ULong the_insn
= op
;
1684 the_insn
|= ((ULong
)r3
) << 36;
1685 the_insn
|= ((ULong
)x2
) << 32;
1686 the_insn
|= ((ULong
)b2
) << 28;
1687 the_insn
|= ((ULong
)d2
) << 16;
1688 the_insn
|= ((ULong
)r1
) << 12;
1690 return emit_6bytes(p
, the_insn
);
1695 emit_RXY(UChar
*p
, ULong op
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
1697 ULong the_insn
= op
;
1699 the_insn
|= ((ULong
)r1
) << 36;
1700 the_insn
|= ((ULong
)x2
) << 32;
1701 the_insn
|= ((ULong
)b2
) << 28;
1702 the_insn
|= ((ULong
)dl2
) << 16;
1703 the_insn
|= ((ULong
)dh2
) << 8;
1705 return emit_6bytes(p
, the_insn
);
1710 emit_SI(UChar
*p
, UInt op
, UChar i2
, UChar b1
, UShort d1
)
1712 ULong the_insn
= op
;
1714 the_insn
|= ((ULong
)i2
) << 16;
1715 the_insn
|= ((ULong
)b1
) << 12;
1716 the_insn
|= ((ULong
)d1
) << 0;
1718 return emit_4bytes(p
, the_insn
);
1723 emit_SIL(UChar
*p
, ULong op
, UChar b1
, UShort d1
, UShort i2
)
1725 ULong the_insn
= op
;
1727 the_insn
|= ((ULong
)b1
) << 28;
1728 the_insn
|= ((ULong
)d1
) << 16;
1729 the_insn
|= ((ULong
)i2
) << 0;
1731 return emit_6bytes(p
, the_insn
);
1736 emit_SIY(UChar
*p
, ULong op
, UChar i2
, UChar b1
, UShort dl1
, UChar dh1
)
1738 ULong the_insn
= op
;
1740 the_insn
|= ((ULong
)i2
) << 32;
1741 the_insn
|= ((ULong
)b1
) << 28;
1742 the_insn
|= ((ULong
)dl1
) << 16;
1743 the_insn
|= ((ULong
)dh1
) << 8;
1745 return emit_6bytes(p
, the_insn
);
1750 emit_SSa(UChar
*p
, ULong op
, UChar l
, UChar b1
, UShort d1
, UChar b2
, UShort d2
)
1752 ULong the_insn
= op
;
1754 the_insn
|= ((ULong
)l
) << 32;
1755 the_insn
|= ((ULong
)b1
) << 28;
1756 the_insn
|= ((ULong
)d1
) << 16;
1757 the_insn
|= ((ULong
)b2
) << 12;
1758 the_insn
|= ((ULong
)d2
) << 0;
1760 return emit_6bytes(p
, the_insn
);
1765 emit_VRI_VI(UChar
*p
, ULong op
, UChar v1
, UShort i2
)
1767 ULong the_insn
= op
;
1768 ULong rxb
= s390_update_rxb(0, 1, &v1
);
1770 the_insn
|= ((ULong
)v1
) << 36;
1771 the_insn
|= ((ULong
)i2
) << 16;
1772 the_insn
|= ((ULong
)rxb
)<< 8;
1774 return emit_6bytes(p
, the_insn
);
1779 emit_VRI_VIM(UChar
*p
, ULong op
, UChar v1
, UShort i2
, UChar m3
)
1781 ULong the_insn
= op
;
1782 ULong rxb
= s390_update_rxb(0, 1, &v1
);
1784 the_insn
|= ((ULong
)v1
) << 36;
1785 the_insn
|= ((ULong
)i2
) << 16;
1786 the_insn
|= ((ULong
)m3
) << 12;
1787 the_insn
|= ((ULong
)rxb
)<< 8;
1789 return emit_6bytes(p
, the_insn
);
1794 emit_VRI_VVMM(UChar
*p
, ULong op
, UChar v1
, UChar v3
, UShort i2
, UChar m4
)
1796 ULong the_insn
= op
;
1797 ULong rxb
= s390_update_rxb(0, 1, &v1
);
1798 rxb
= s390_update_rxb(rxb
, 2, &v3
);
1800 the_insn
|= ((ULong
)v1
) << 36;
1801 the_insn
|= ((ULong
)v3
) << 32;
1802 the_insn
|= ((ULong
)i2
) << 16;
1803 the_insn
|= ((ULong
)m4
) << 12;
1804 the_insn
|= ((ULong
)rxb
) << 8;
1806 return emit_6bytes(p
, the_insn
);
1811 emit_VRX(UChar
*p
, ULong op
, UChar v1
, UChar x2
, UChar b2
, UShort d2
, UChar m3
)
1813 ULong the_insn
= op
;
1814 ULong rxb
= s390_update_rxb(0, 1, &v1
);
1816 the_insn
|= ((ULong
)v1
) << 36;
1817 the_insn
|= ((ULong
)x2
) << 32;
1818 the_insn
|= ((ULong
)b2
) << 28;
1819 the_insn
|= ((ULong
)d2
) << 16;
1820 the_insn
|= ((ULong
)m3
) << 12;
1821 the_insn
|= ((ULong
)rxb
)<< 8;
1823 return emit_6bytes(p
, the_insn
);
1828 emit_VRS(UChar
*p
, ULong op
, UChar reg1
, UChar b2
, UShort d2
, UChar reg3
, UChar m4
)
1830 ULong the_insn
= op
;
1831 ULong rxb
= s390_update_rxb(0, 1, ®1
);
1832 rxb
= s390_update_rxb(rxb
, 2, ®3
);
1834 the_insn
|= ((ULong
)reg1
) << 36;
1835 the_insn
|= ((ULong
)reg3
) << 32;
1836 the_insn
|= ((ULong
)b2
) << 28;
1837 the_insn
|= ((ULong
)d2
) << 16;
1838 the_insn
|= ((ULong
)m4
) << 12;
1839 the_insn
|= ((ULong
)rxb
) << 8;
1841 return emit_6bytes(p
, the_insn
);
1846 emit_VRR_VVM(UChar
*p
, ULong op
, UChar v1
, UChar v2
, UChar m4
)
1848 ULong the_insn
= op
;
1849 ULong rxb
= s390_update_rxb(0, 1, &v1
);
1850 rxb
= s390_update_rxb(rxb
, 2, &v2
);
1852 the_insn
|= ((ULong
)v1
) << 36;
1853 the_insn
|= ((ULong
)v2
) << 32;
1854 the_insn
|= ((ULong
)m4
) << 12;
1855 the_insn
|= ((ULong
)rxb
)<< 8;
1857 return emit_6bytes(p
, the_insn
);
1861 emit_VRR_VVMMM(UChar
*p
, ULong op
, UChar v1
, UChar v2
, UChar m3
, UChar m4
,
1864 ULong the_insn
= op
;
1865 ULong rxb
= s390_update_rxb(0, 1, &v1
);
1866 rxb
= s390_update_rxb(rxb
, 2, &v2
);
1868 the_insn
|= ((ULong
)v1
) << 36;
1869 the_insn
|= ((ULong
)v2
) << 32;
1870 the_insn
|= ((ULong
)m5
) << 20;
1871 the_insn
|= ((ULong
)m4
) << 16;
1872 the_insn
|= ((ULong
)m3
) << 12;
1873 the_insn
|= ((ULong
)rxb
) << 8;
1875 return emit_6bytes(p
, the_insn
);
1879 emit_VRR_VVVM(UChar
*p
, ULong op
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
1881 ULong the_insn
= op
;
1882 ULong rxb
= s390_update_rxb(0, 1, &v1
);
1883 rxb
= s390_update_rxb(rxb
, 2, &v2
);
1884 rxb
= s390_update_rxb(rxb
, 3, &v3
);
1886 the_insn
|= ((ULong
)v1
) << 36;
1887 the_insn
|= ((ULong
)v2
) << 32;
1888 the_insn
|= ((ULong
)v3
) << 28;
1889 the_insn
|= ((ULong
)m4
) << 12;
1890 the_insn
|= ((ULong
)rxb
)<< 8;
1892 return emit_6bytes(p
, the_insn
);
1897 emit_VRR_VVV(UChar
*p
, ULong op
, UChar v1
, UChar v2
, UChar v3
)
1899 return emit_VRR_VVVM(p
, op
, v1
, v2
, v3
, 0);
1904 emit_VRR_VV(UChar
*p
, ULong op
, UChar v1
, UChar v2
)
1906 return emit_VRR_VVM(p
, op
, v1
, v2
, 0);
1911 emit_VRR_VVVV(UChar
*p
, ULong op
, UChar v1
, UChar v2
, UChar v3
, UChar v4
)
1913 ULong the_insn
= op
;
1914 ULong rxb
= s390_update_rxb(0, 1, &v1
);
1915 rxb
= s390_update_rxb(rxb
, 2, &v2
);
1916 rxb
= s390_update_rxb(rxb
, 3, &v3
);
1917 rxb
= s390_update_rxb(rxb
, 4, &v4
);
1919 the_insn
|= ((ULong
)v1
) << 36;
1920 the_insn
|= ((ULong
)v2
) << 32;
1921 the_insn
|= ((ULong
)v3
) << 28;
1922 the_insn
|= ((ULong
)v4
) << 12;
1923 the_insn
|= ((ULong
)rxb
)<< 8;
1925 return emit_6bytes(p
, the_insn
);
1929 emit_VRRe_VVVVMM(UChar
*p
, ULong op
, UChar v1
, UChar v2
, UChar v3
, UChar v4
,
1932 ULong the_insn
= op
;
1933 ULong rxb
= s390_update_rxb(0, 1, &v1
);
1934 rxb
= s390_update_rxb(rxb
, 2, &v2
);
1935 rxb
= s390_update_rxb(rxb
, 3, &v3
);
1936 rxb
= s390_update_rxb(rxb
, 4, &v4
);
1938 the_insn
|= ((ULong
)v1
) << 36;
1939 the_insn
|= ((ULong
)v2
) << 32;
1940 the_insn
|= ((ULong
)v3
) << 28;
1941 the_insn
|= ((ULong
)m6
) << 24;
1942 the_insn
|= ((ULong
)m5
) << 16;
1943 the_insn
|= ((ULong
)v4
) << 12;
1944 the_insn
|= ((ULong
)rxb
) << 8;
1946 return emit_6bytes(p
, the_insn
);
1950 emit_VRR_VRR(UChar
*p
, ULong op
, UChar v1
, UChar r2
, UChar r3
)
1952 ULong the_insn
= op
;
1953 ULong rxb
= s390_update_rxb(0, 1, &v1
);
1955 the_insn
|= ((ULong
)v1
) << 36;
1956 the_insn
|= ((ULong
)r2
) << 32;
1957 the_insn
|= ((ULong
)r3
) << 28;
1958 the_insn
|= ((ULong
)rxb
)<< 8;
1960 return emit_6bytes(p
, the_insn
);
1964 emit_VRR_VVVMMM(UChar
*p
, ULong op
, UChar v1
, UChar v2
, UChar v3
, UChar m4
,
1967 ULong the_insn
= op
;
1968 ULong rxb
= s390_update_rxb(0, 1, &v1
);
1969 rxb
= s390_update_rxb(rxb
, 2, &v2
);
1970 rxb
= s390_update_rxb(rxb
, 3, &v3
);
1972 the_insn
|= ((ULong
)v1
) << 36;
1973 the_insn
|= ((ULong
)v2
) << 32;
1974 the_insn
|= ((ULong
)v3
) << 28;
1975 the_insn
|= ((ULong
)m6
) << 20;
1976 the_insn
|= ((ULong
)m5
) << 16;
1977 the_insn
|= ((ULong
)m4
) << 12;
1978 the_insn
|= ((ULong
)rxb
) << 8;
1980 return emit_6bytes(p
, the_insn
);
1984 emit_VRR_VVVMM(UChar
*p
, ULong op
, UChar v1
, UChar v2
, UChar v3
, UChar m4
,
1987 return emit_VRR_VVVMMM(p
, op
, v1
, v2
, v3
, m4
, m5
, 0);
1990 /*------------------------------------------------------------*/
1991 /*--- Functions to emit particular instructions ---*/
1992 /*------------------------------------------------------------*/
1995 s390_emit_AR(UChar
*p
, UChar r1
, UChar r2
)
1997 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
1998 s390_disasm(ENC3(MNM
, GPR
, GPR
), "ar", r1
, r2
);
2000 return emit_RR(p
, 0x1a00, r1
, r2
);
2005 s390_emit_AGR(UChar
*p
, UChar r1
, UChar r2
)
2007 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2008 s390_disasm(ENC3(MNM
, GPR
, GPR
), "agr", r1
, r2
);
2010 return emit_RRE(p
, 0xb9080000, r1
, r2
);
2015 s390_emit_A(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort d2
)
2017 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2018 s390_disasm(ENC3(MNM
, GPR
, UDXB
), "a", r1
, d2
, x2
, b2
);
2020 return emit_RX(p
, 0x5a000000, r1
, x2
, b2
, d2
);
2025 s390_emit_AY(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
2027 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2028 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "ay", r1
, dh2
, dl2
, x2
, b2
);
2030 return emit_RXY(p
, 0xe3000000005aULL
, r1
, x2
, b2
, dl2
, dh2
);
2035 s390_emit_AG(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
2037 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2038 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "ag", r1
, dh2
, dl2
, x2
, b2
);
2040 return emit_RXY(p
, 0xe30000000008ULL
, r1
, x2
, b2
, dl2
, dh2
);
2045 s390_emit_AFI(UChar
*p
, UChar r1
, UInt i2
)
2047 vassert(s390_host_has_eimm
);
2049 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2050 s390_disasm(ENC3(MNM
, GPR
, INT
), "afi", r1
, i2
);
2052 return emit_RIL(p
, 0xc20900000000ULL
, r1
, i2
);
2057 s390_emit_AGFI(UChar
*p
, UChar r1
, UInt i2
)
2059 vassert(s390_host_has_eimm
);
2061 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2062 s390_disasm(ENC3(MNM
, GPR
, INT
), "agfi", r1
, i2
);
2064 return emit_RIL(p
, 0xc20800000000ULL
, r1
, i2
);
2069 s390_emit_AH(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort d2
)
2071 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2072 s390_disasm(ENC3(MNM
, GPR
, UDXB
), "ah", r1
, d2
, x2
, b2
);
2074 return emit_RX(p
, 0x4a000000, r1
, x2
, b2
, d2
);
2079 s390_emit_AHY(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
2081 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2082 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "ahy", r1
, dh2
, dl2
, x2
, b2
);
2084 return emit_RXY(p
, 0xe3000000007aULL
, r1
, x2
, b2
, dl2
, dh2
);
2089 s390_emit_AHI(UChar
*p
, UChar r1
, UShort i2
)
2091 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2092 s390_disasm(ENC3(MNM
, GPR
, INT
), "ahi", r1
, (Int
)(Short
)i2
);
2094 return emit_RI(p
, 0xa70a0000, r1
, i2
);
2099 s390_emit_AGHI(UChar
*p
, UChar r1
, UShort i2
)
2101 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2102 s390_disasm(ENC3(MNM
, GPR
, INT
), "aghi", r1
, (Int
)(Short
)i2
);
2104 return emit_RI(p
, 0xa70b0000, r1
, i2
);
2109 s390_emit_AGSI(UChar
*p
, UChar i2
, UChar b1
, UShort dl1
, UChar dh1
)
2111 vassert(s390_host_has_gie
);
2113 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2114 s390_disasm(ENC3(MNM
, SDXB
, INT
), "agsi", dh1
, dl1
, 0, b1
, (Int
)(Char
)i2
);
2116 return emit_SIY(p
, 0xeb000000007aULL
, i2
, b1
, dl1
, dh1
);
2121 s390_emit_ASI(UChar
*p
, UChar i2
, UChar b1
, UShort dl1
, UChar dh1
)
2123 vassert(s390_host_has_gie
);
2125 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2126 s390_disasm(ENC3(MNM
, SDXB
, INT
), "asi", dh1
, dl1
, 0, b1
, (Int
)(Char
)i2
);
2128 return emit_SIY(p
, 0xeb000000006aULL
, i2
, b1
, dl1
, dh1
);
2133 s390_emit_NR(UChar
*p
, UChar r1
, UChar r2
)
2135 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2136 s390_disasm(ENC3(MNM
, GPR
, GPR
), "nr", r1
, r2
);
2138 return emit_RR(p
, 0x1400, r1
, r2
);
2143 s390_emit_NGR(UChar
*p
, UChar r1
, UChar r2
)
2145 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2146 s390_disasm(ENC3(MNM
, GPR
, GPR
), "ngr", r1
, r2
);
2148 return emit_RRE(p
, 0xb9800000, r1
, r2
);
2153 s390_emit_N(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort d2
)
2155 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2156 s390_disasm(ENC3(MNM
, GPR
, UDXB
), "n", r1
, d2
, x2
, b2
);
2158 return emit_RX(p
, 0x54000000, r1
, x2
, b2
, d2
);
2163 s390_emit_NY(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
2165 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2166 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "ny", r1
, dh2
, dl2
, x2
, b2
);
2168 return emit_RXY(p
, 0xe30000000054ULL
, r1
, x2
, b2
, dl2
, dh2
);
2173 s390_emit_NG(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
2175 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2176 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "ng", r1
, dh2
, dl2
, x2
, b2
);
2178 return emit_RXY(p
, 0xe30000000080ULL
, r1
, x2
, b2
, dl2
, dh2
);
2183 s390_emit_NIHF(UChar
*p
, UChar r1
, UInt i2
)
2185 vassert(s390_host_has_eimm
);
2187 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2188 s390_disasm(ENC3(MNM
, GPR
, UINT
), "nihf", r1
, i2
);
2190 return emit_RIL(p
, 0xc00a00000000ULL
, r1
, i2
);
2195 s390_emit_NILF(UChar
*p
, UChar r1
, UInt i2
)
2197 vassert(s390_host_has_eimm
);
2199 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2200 s390_disasm(ENC3(MNM
, GPR
, UINT
), "nilf", r1
, i2
);
2202 return emit_RIL(p
, 0xc00b00000000ULL
, r1
, i2
);
2207 s390_emit_NILL(UChar
*p
, UChar r1
, UShort i2
)
2209 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2210 s390_disasm(ENC3(MNM
, GPR
, UINT
), "nill", r1
, i2
);
2212 return emit_RI(p
, 0xa5070000, r1
, i2
);
2217 s390_emit_TM(UChar
*p
, UChar i2
, UChar b1
, UShort d1
)
2219 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2220 s390_disasm(ENC3(MNM
, UDXB
, INT
), "tm", d1
, 0, b1
, i2
);
2222 return emit_SI(p
, 0x91000000, i2
, b1
, d1
);
2227 s390_emit_TMY(UChar
*p
, UChar i2
, UChar b1
, UShort dl1
, UChar dh1
)
2229 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2230 s390_disasm(ENC3(MNM
, SDXB
, INT
), "tmy", dh1
, dl1
, 0, b1
, (Int
)(Char
)i2
);
2232 return emit_SIY(p
, 0xeb0000000051ULL
, i2
, b1
, dl1
, dh1
);
2237 s390_emit_TMLL(UChar
*p
, UChar r1
, UShort i2
)
2239 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2240 s390_disasm(ENC3(MNM
, GPR
, UINT
), "tmll", r1
, i2
);
2242 return emit_RI(p
, 0xa7010000, r1
, i2
);
2247 s390_emit_BASR(UChar
*p
, UChar r1
, UChar r2
)
2249 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2250 s390_disasm(ENC3(MNM
, GPR
, GPR
), "basr", r1
, r2
);
2252 return emit_RR(p
, 0x0d00, r1
, r2
);
2257 s390_emit_BCR(UChar
*p
, UChar r1
, UChar r2
)
2259 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2260 s390_disasm(ENC2(XMNM
, GPR
), S390_XMNM_BCR
, r1
, r2
);
2262 return emit_RR(p
, 0x0700, r1
, r2
);
2267 s390_emit_BRC(UChar
*p
, UChar r1
, UShort i2
)
2269 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2270 s390_disasm(ENC2(XMNM
, PCREL
), S390_XMNM_BRC
, r1
, (Int
)(Short
)i2
);
2272 return emit_RI(p
, 0xa7040000, r1
, i2
);
2277 s390_emit_BRCL(UChar
*p
, UChar r1
, ULong i2
)
2279 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2280 s390_disasm(ENC2(XMNM
, PCREL
), S390_XMNM_BRCL
, r1
, i2
);
2282 return emit_RIL(p
, 0xc00400000000ULL
, r1
, i2
);
2287 s390_emit_CR(UChar
*p
, UChar r1
, UChar r2
)
2289 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2290 s390_disasm(ENC3(MNM
, GPR
, GPR
), "cr", r1
, r2
);
2292 return emit_RR(p
, 0x1900, r1
, r2
);
2297 s390_emit_CGR(UChar
*p
, UChar r1
, UChar r2
)
2299 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2300 s390_disasm(ENC3(MNM
, GPR
, GPR
), "cgr", r1
, r2
);
2302 return emit_RRE(p
, 0xb9200000, r1
, r2
);
2307 s390_emit_C(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort d2
)
2309 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2310 s390_disasm(ENC3(MNM
, GPR
, UDXB
), "c", r1
, d2
, x2
, b2
);
2312 return emit_RX(p
, 0x59000000, r1
, x2
, b2
, d2
);
2317 s390_emit_CY(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
2319 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2320 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "cy", r1
, dh2
, dl2
, x2
, b2
);
2322 return emit_RXY(p
, 0xe30000000059ULL
, r1
, x2
, b2
, dl2
, dh2
);
2327 s390_emit_CG(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
2329 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2330 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "cg", r1
, dh2
, dl2
, x2
, b2
);
2332 return emit_RXY(p
, 0xe30000000020ULL
, r1
, x2
, b2
, dl2
, dh2
);
2337 s390_emit_CFI(UChar
*p
, UChar r1
, UInt i2
)
2339 vassert(s390_host_has_eimm
);
2341 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2342 s390_disasm(ENC3(MNM
, GPR
, INT
), "cfi", r1
, i2
);
2344 return emit_RIL(p
, 0xc20d00000000ULL
, r1
, i2
);
2349 s390_emit_CGFI(UChar
*p
, UChar r1
, UInt i2
)
2351 vassert(s390_host_has_eimm
);
2353 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2354 s390_disasm(ENC3(MNM
, GPR
, INT
), "cgfi", r1
, i2
);
2356 return emit_RIL(p
, 0xc20c00000000ULL
, r1
, i2
);
2361 s390_emit_CS(UChar
*p
, UChar r1
, UChar r3
, UChar b2
, UShort d2
)
2363 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2364 s390_disasm(ENC4(MNM
, GPR
, GPR
, UDXB
), "cs", r1
, r3
, d2
, 0, b2
);
2366 return emit_RS(p
, 0xba000000, r1
, r3
, b2
, d2
);
2371 s390_emit_CSY(UChar
*p
, UChar r1
, UChar r3
, UChar b2
, UShort dl2
, UChar dh2
)
2373 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2374 s390_disasm(ENC4(MNM
, GPR
, GPR
, SDXB
), "csy", r1
, r3
, dh2
, dl2
, 0, b2
);
2376 return emit_RSY(p
, 0xeb0000000014ULL
, r1
, r3
, b2
, dl2
, dh2
);
2381 s390_emit_CSG(UChar
*p
, UChar r1
, UChar r3
, UChar b2
, UShort dl2
, UChar dh2
)
2383 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2384 s390_disasm(ENC4(MNM
, GPR
, GPR
, SDXB
), "csg", r1
, r3
, dh2
, dl2
, 0, b2
);
2386 return emit_RSY(p
, 0xeb0000000030ULL
, r1
, r3
, b2
, dl2
, dh2
);
2391 s390_emit_CDS(UChar
*p
, UChar r1
, UChar r3
, UChar b2
, UShort d2
)
2393 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2394 s390_disasm(ENC4(MNM
, GPR
, GPR
, UDXB
), "cds", r1
, r3
, d2
, 0, b2
);
2396 return emit_RS(p
, 0xbb000000, r1
, r3
, b2
, d2
);
2401 s390_emit_CDSY(UChar
*p
, UChar r1
, UChar r3
, UChar b2
, UShort dl2
, UChar dh2
)
2403 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2404 s390_disasm(ENC4(MNM
, GPR
, GPR
, SDXB
), "cdsy", r1
, r3
, dh2
, dl2
, 0, b2
);
2406 return emit_RSY(p
, 0xeb0000000031ULL
, r1
, r3
, b2
, dl2
, dh2
);
2411 s390_emit_CDSG(UChar
*p
, UChar r1
, UChar r3
, UChar b2
, UShort dl2
, UChar dh2
)
2413 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2414 s390_disasm(ENC4(MNM
, GPR
, GPR
, SDXB
), "cdsg", r1
, r3
, dh2
, dl2
, 0, b2
);
2416 return emit_RSY(p
, 0xeb000000003eULL
, r1
, r3
, b2
, dl2
, dh2
);
2421 s390_emit_CLR(UChar
*p
, UChar r1
, UChar r2
)
2423 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2424 s390_disasm(ENC3(MNM
, GPR
, GPR
), "clr", r1
, r2
);
2426 return emit_RR(p
, 0x1500, r1
, r2
);
2431 s390_emit_CLGR(UChar
*p
, UChar r1
, UChar r2
)
2433 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2434 s390_disasm(ENC3(MNM
, GPR
, GPR
), "clgr", r1
, r2
);
2436 return emit_RRE(p
, 0xb9210000, r1
, r2
);
2441 s390_emit_CL(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort d2
)
2443 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2444 s390_disasm(ENC3(MNM
, GPR
, UDXB
), "cl", r1
, d2
, x2
, b2
);
2446 return emit_RX(p
, 0x55000000, r1
, x2
, b2
, d2
);
2451 s390_emit_CLY(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
2453 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2454 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "cly", r1
, dh2
, dl2
, x2
, b2
);
2456 return emit_RXY(p
, 0xe30000000055ULL
, r1
, x2
, b2
, dl2
, dh2
);
2461 s390_emit_CLG(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
2463 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2464 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "clg", r1
, dh2
, dl2
, x2
, b2
);
2466 return emit_RXY(p
, 0xe30000000021ULL
, r1
, x2
, b2
, dl2
, dh2
);
2471 s390_emit_CLFI(UChar
*p
, UChar r1
, UInt i2
)
2473 vassert(s390_host_has_eimm
);
2475 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2476 s390_disasm(ENC3(MNM
, GPR
, UINT
), "clfi", r1
, i2
);
2478 return emit_RIL(p
, 0xc20f00000000ULL
, r1
, i2
);
2483 s390_emit_CLGFI(UChar
*p
, UChar r1
, UInt i2
)
2485 vassert(s390_host_has_eimm
);
2487 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2488 s390_disasm(ENC3(MNM
, GPR
, UINT
), "clgfi", r1
, i2
);
2490 return emit_RIL(p
, 0xc20e00000000ULL
, r1
, i2
);
2495 s390_emit_DR(UChar
*p
, UChar r1
, UChar r2
)
2497 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2498 s390_disasm(ENC3(MNM
, GPR
, GPR
), "dr", r1
, r2
);
2500 return emit_RR(p
, 0x1d00, r1
, r2
);
2505 s390_emit_D(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort d2
)
2507 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2508 s390_disasm(ENC3(MNM
, GPR
, UDXB
), "d", r1
, d2
, x2
, b2
);
2510 return emit_RX(p
, 0x5d000000, r1
, x2
, b2
, d2
);
2515 s390_emit_DLR(UChar
*p
, UChar r1
, UChar r2
)
2517 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2518 s390_disasm(ENC3(MNM
, GPR
, GPR
), "dlr", r1
, r2
);
2520 return emit_RRE(p
, 0xb9970000, r1
, r2
);
2525 s390_emit_DLGR(UChar
*p
, UChar r1
, UChar r2
)
2527 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2528 s390_disasm(ENC3(MNM
, GPR
, GPR
), "dlgr", r1
, r2
);
2530 return emit_RRE(p
, 0xb9870000, r1
, r2
);
2535 s390_emit_DL(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
2537 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2538 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "dl", r1
, dh2
, dl2
, x2
, b2
);
2540 return emit_RXY(p
, 0xe30000000097ULL
, r1
, x2
, b2
, dl2
, dh2
);
2545 s390_emit_DLG(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
2547 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2548 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "dlg", r1
, dh2
, dl2
, x2
, b2
);
2550 return emit_RXY(p
, 0xe30000000087ULL
, r1
, x2
, b2
, dl2
, dh2
);
2555 s390_emit_DSGR(UChar
*p
, UChar r1
, UChar r2
)
2557 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2558 s390_disasm(ENC3(MNM
, GPR
, GPR
), "dsgr", r1
, r2
);
2560 return emit_RRE(p
, 0xb90d0000, r1
, r2
);
2565 s390_emit_DSG(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
2567 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2568 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "dsg", r1
, dh2
, dl2
, x2
, b2
);
2570 return emit_RXY(p
, 0xe3000000000dULL
, r1
, x2
, b2
, dl2
, dh2
);
2575 s390_emit_XR(UChar
*p
, UChar r1
, UChar r2
)
2577 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2578 s390_disasm(ENC3(MNM
, GPR
, GPR
), "xr", r1
, r2
);
2580 return emit_RR(p
, 0x1700, r1
, r2
);
2585 s390_emit_XGR(UChar
*p
, UChar r1
, UChar r2
)
2587 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2588 s390_disasm(ENC3(MNM
, GPR
, GPR
), "xgr", r1
, r2
);
2590 return emit_RRE(p
, 0xb9820000, r1
, r2
);
2595 s390_emit_X(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort d2
)
2597 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2598 s390_disasm(ENC3(MNM
, GPR
, UDXB
), "x", r1
, d2
, x2
, b2
);
2600 return emit_RX(p
, 0x57000000, r1
, x2
, b2
, d2
);
2605 s390_emit_XY(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
2607 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2608 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "xy", r1
, dh2
, dl2
, x2
, b2
);
2610 return emit_RXY(p
, 0xe30000000057ULL
, r1
, x2
, b2
, dl2
, dh2
);
2615 s390_emit_XG(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
2617 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2618 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "xg", r1
, dh2
, dl2
, x2
, b2
);
2620 return emit_RXY(p
, 0xe30000000082ULL
, r1
, x2
, b2
, dl2
, dh2
);
2625 s390_emit_XIHF(UChar
*p
, UChar r1
, UInt i2
)
2627 vassert(s390_host_has_eimm
);
2629 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2630 s390_disasm(ENC3(MNM
, GPR
, UINT
), "xihf", r1
, i2
);
2632 return emit_RIL(p
, 0xc00600000000ULL
, r1
, i2
);
2637 s390_emit_XILF(UChar
*p
, UChar r1
, UInt i2
)
2639 vassert(s390_host_has_eimm
);
2641 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2642 s390_disasm(ENC3(MNM
, GPR
, UINT
), "xilf", r1
, i2
);
2644 return emit_RIL(p
, 0xc00700000000ULL
, r1
, i2
);
2649 s390_emit_XC(UChar
*p
, UInt l
, UChar b1
, UShort d1
, UChar b2
, UShort d2
)
2651 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2652 s390_disasm(ENC3(MNM
, UDLB
, UDXB
), "xc", d1
, l
, b1
, d2
, 0, b2
);
2654 return emit_SSa(p
, 0xd70000000000ULL
, l
, b1
, d1
, b2
, d2
);
2659 s390_emit_FLOGR(UChar
*p
, UChar r1
, UChar r2
)
2661 vassert(s390_host_has_eimm
);
2663 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2664 s390_disasm(ENC3(MNM
, GPR
, GPR
), "flogr", r1
, r2
);
2666 return emit_RRE(p
, 0xb9830000, r1
, r2
);
2671 s390_emit_IC(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort d2
)
2673 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2674 s390_disasm(ENC3(MNM
, GPR
, UDXB
), "ic", r1
, d2
, x2
, b2
);
2676 return emit_RX(p
, 0x43000000, r1
, x2
, b2
, d2
);
2681 s390_emit_ICY(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
2683 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2684 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "icy", r1
, dh2
, dl2
, x2
, b2
);
2686 return emit_RXY(p
, 0xe30000000073ULL
, r1
, x2
, b2
, dl2
, dh2
);
2691 s390_emit_IIHF(UChar
*p
, UChar r1
, UInt i2
)
2693 vassert(s390_host_has_eimm
);
2695 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2696 s390_disasm(ENC3(MNM
, GPR
, UINT
), "iihf", r1
, i2
);
2698 return emit_RIL(p
, 0xc00800000000ULL
, r1
, i2
);
2703 s390_emit_IIHH(UChar
*p
, UChar r1
, UShort i2
)
2705 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2706 s390_disasm(ENC3(MNM
, GPR
, UINT
), "iihh", r1
, i2
);
2708 return emit_RI(p
, 0xa5000000, r1
, i2
);
2713 s390_emit_IIHL(UChar
*p
, UChar r1
, UShort i2
)
2715 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2716 s390_disasm(ENC3(MNM
, GPR
, UINT
), "iihl", r1
, i2
);
2718 return emit_RI(p
, 0xa5010000, r1
, i2
);
2723 s390_emit_IILF(UChar
*p
, UChar r1
, UInt i2
)
2725 vassert(s390_host_has_eimm
);
2727 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2728 s390_disasm(ENC3(MNM
, GPR
, UINT
), "iilf", r1
, i2
);
2730 return emit_RIL(p
, 0xc00900000000ULL
, r1
, i2
);
2735 s390_emit_IILH(UChar
*p
, UChar r1
, UShort i2
)
2737 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2738 s390_disasm(ENC3(MNM
, GPR
, UINT
), "iilh", r1
, i2
);
2740 return emit_RI(p
, 0xa5020000, r1
, i2
);
2745 s390_emit_IILL(UChar
*p
, UChar r1
, UShort i2
)
2747 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2748 s390_disasm(ENC3(MNM
, GPR
, UINT
), "iill", r1
, i2
);
2750 return emit_RI(p
, 0xa5030000, r1
, i2
);
2755 s390_emit_IPM(UChar
*p
, UChar r1
, UChar r2
)
2757 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2758 s390_disasm(ENC2(MNM
, GPR
), "ipm", r1
);
2760 return emit_RRE(p
, 0xb2220000, r1
, r2
);
2765 s390_emit_LR(UChar
*p
, UChar r1
, UChar r2
)
2767 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2768 s390_disasm(ENC3(MNM
, GPR
, GPR
), "lr", r1
, r2
);
2770 return emit_RR(p
, 0x1800, r1
, r2
);
2775 s390_emit_LGR(UChar
*p
, UChar r1
, UChar r2
)
2777 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2778 s390_disasm(ENC3(MNM
, GPR
, GPR
), "lgr", r1
, r2
);
2780 return emit_RRE(p
, 0xb9040000, r1
, r2
);
2785 s390_emit_LGFR(UChar
*p
, UChar r1
, UChar r2
)
2787 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2788 s390_disasm(ENC3(MNM
, GPR
, GPR
), "lgfr", r1
, r2
);
2790 return emit_RRE(p
, 0xb9140000, r1
, r2
);
2795 s390_emit_L(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort d2
)
2797 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2798 s390_disasm(ENC3(MNM
, GPR
, UDXB
), "l", r1
, d2
, x2
, b2
);
2800 return emit_RX(p
, 0x58000000, r1
, x2
, b2
, d2
);
2805 s390_emit_LY(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
2807 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2808 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "ly", r1
, dh2
, dl2
, x2
, b2
);
2810 return emit_RXY(p
, 0xe30000000058ULL
, r1
, x2
, b2
, dl2
, dh2
);
2815 s390_emit_LG(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
2817 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2818 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "lg", r1
, dh2
, dl2
, x2
, b2
);
2820 return emit_RXY(p
, 0xe30000000004ULL
, r1
, x2
, b2
, dl2
, dh2
);
2825 s390_emit_LGF(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
2827 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2828 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "lgf", r1
, dh2
, dl2
, x2
, b2
);
2830 return emit_RXY(p
, 0xe30000000014ULL
, r1
, x2
, b2
, dl2
, dh2
);
2835 s390_emit_LGFI(UChar
*p
, UChar r1
, UInt i2
)
2837 vassert(s390_host_has_eimm
);
2839 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2840 s390_disasm(ENC3(MNM
, GPR
, INT
), "lgfi", r1
, i2
);
2842 return emit_RIL(p
, 0xc00100000000ULL
, r1
, i2
);
2847 s390_emit_LTR(UChar
*p
, UChar r1
, UChar r2
)
2849 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2850 s390_disasm(ENC3(MNM
, GPR
, GPR
), "ltr", r1
, r2
);
2852 return emit_RR(p
, 0x1200, r1
, r2
);
2857 s390_emit_LTGR(UChar
*p
, UChar r1
, UChar r2
)
2859 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2860 s390_disasm(ENC3(MNM
, GPR
, GPR
), "ltgr", r1
, r2
);
2862 return emit_RRE(p
, 0xb9020000, r1
, r2
);
2867 s390_emit_LT(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
2869 vassert(s390_host_has_eimm
);
2871 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2872 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "lt", r1
, dh2
, dl2
, x2
, b2
);
2874 return emit_RXY(p
, 0xe30000000012ULL
, r1
, x2
, b2
, dl2
, dh2
);
2879 s390_emit_LTG(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
2881 vassert(s390_host_has_eimm
);
2883 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2884 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "ltg", r1
, dh2
, dl2
, x2
, b2
);
2886 return emit_RXY(p
, 0xe30000000002ULL
, r1
, x2
, b2
, dl2
, dh2
);
2891 s390_emit_LBR(UChar
*p
, UChar r1
, UChar r2
)
2893 vassert(s390_host_has_eimm
);
2895 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2896 s390_disasm(ENC3(MNM
, GPR
, GPR
), "lbr", r1
, r2
);
2898 return emit_RRE(p
, 0xb9260000, r1
, r2
);
2903 s390_emit_LGBR(UChar
*p
, UChar r1
, UChar r2
)
2905 vassert(s390_host_has_eimm
);
2907 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2908 s390_disasm(ENC3(MNM
, GPR
, GPR
), "lgbr", r1
, r2
);
2910 return emit_RRE(p
, 0xb9060000, r1
, r2
);
2915 s390_emit_LB(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
2917 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2918 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "lb", r1
, dh2
, dl2
, x2
, b2
);
2920 return emit_RXY(p
, 0xe30000000076ULL
, r1
, x2
, b2
, dl2
, dh2
);
2925 s390_emit_LGB(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
2927 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2928 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "lgb", r1
, dh2
, dl2
, x2
, b2
);
2930 return emit_RXY(p
, 0xe30000000077ULL
, r1
, x2
, b2
, dl2
, dh2
);
2935 s390_emit_LCR(UChar
*p
, UChar r1
, UChar r2
)
2937 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2938 s390_disasm(ENC3(MNM
, GPR
, GPR
), "lcr", r1
, r2
);
2940 return emit_RR(p
, 0x1300, r1
, r2
);
2945 s390_emit_LCGR(UChar
*p
, UChar r1
, UChar r2
)
2947 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2948 s390_disasm(ENC3(MNM
, GPR
, GPR
), "lcgr", r1
, r2
);
2950 return emit_RRE(p
, 0xb9030000, r1
, r2
);
2955 s390_emit_LHR(UChar
*p
, UChar r1
, UChar r2
)
2957 vassert(s390_host_has_eimm
);
2959 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2960 s390_disasm(ENC3(MNM
, GPR
, GPR
), "lhr", r1
, r2
);
2962 return emit_RRE(p
, 0xb9270000, r1
, r2
);
2967 s390_emit_LGHR(UChar
*p
, UChar r1
, UChar r2
)
2969 vassert(s390_host_has_eimm
);
2971 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2972 s390_disasm(ENC3(MNM
, GPR
, GPR
), "lghr", r1
, r2
);
2974 return emit_RRE(p
, 0xb9070000, r1
, r2
);
2979 s390_emit_LH(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort d2
)
2981 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2982 s390_disasm(ENC3(MNM
, GPR
, UDXB
), "lh", r1
, d2
, x2
, b2
);
2984 return emit_RX(p
, 0x48000000, r1
, x2
, b2
, d2
);
2989 s390_emit_LHY(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
2991 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
2992 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "lhy", r1
, dh2
, dl2
, x2
, b2
);
2994 return emit_RXY(p
, 0xe30000000078ULL
, r1
, x2
, b2
, dl2
, dh2
);
2999 s390_emit_LGH(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
3001 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3002 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "lgh", r1
, dh2
, dl2
, x2
, b2
);
3004 return emit_RXY(p
, 0xe30000000015ULL
, r1
, x2
, b2
, dl2
, dh2
);
3009 s390_emit_LHI(UChar
*p
, UChar r1
, UShort i2
)
3011 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3012 s390_disasm(ENC3(MNM
, GPR
, INT
), "lhi", r1
, (Int
)(Short
)i2
);
3014 return emit_RI(p
, 0xa7080000, r1
, i2
);
3019 s390_emit_LGHI(UChar
*p
, UChar r1
, UShort i2
)
3021 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3022 s390_disasm(ENC3(MNM
, GPR
, INT
), "lghi", r1
, (Int
)(Short
)i2
);
3024 return emit_RI(p
, 0xa7090000, r1
, i2
);
3029 s390_emit_LLGFR(UChar
*p
, UChar r1
, UChar r2
)
3031 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3032 s390_disasm(ENC3(MNM
, GPR
, GPR
), "llgfr", r1
, r2
);
3034 return emit_RRE(p
, 0xb9160000, r1
, r2
);
3039 s390_emit_LLGF(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
3041 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3042 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "llgf", r1
, dh2
, dl2
, x2
, b2
);
3044 return emit_RXY(p
, 0xe30000000016ULL
, r1
, x2
, b2
, dl2
, dh2
);
3049 s390_emit_LLCR(UChar
*p
, UChar r1
, UChar r2
)
3051 vassert(s390_host_has_eimm
);
3053 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3054 s390_disasm(ENC3(MNM
, GPR
, GPR
), "llcr", r1
, r2
);
3056 return emit_RRE(p
, 0xb9940000, r1
, r2
);
3061 s390_emit_LLGCR(UChar
*p
, UChar r1
, UChar r2
)
3063 vassert(s390_host_has_eimm
);
3065 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3066 s390_disasm(ENC3(MNM
, GPR
, GPR
), "llgcr", r1
, r2
);
3068 return emit_RRE(p
, 0xb9840000, r1
, r2
);
3073 s390_emit_LLC(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
3075 vassert(s390_host_has_eimm
);
3077 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3078 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "llc", r1
, dh2
, dl2
, x2
, b2
);
3080 return emit_RXY(p
, 0xe30000000094ULL
, r1
, x2
, b2
, dl2
, dh2
);
3085 s390_emit_LLGC(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
3087 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3088 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "llgc", r1
, dh2
, dl2
, x2
, b2
);
3090 return emit_RXY(p
, 0xe30000000090ULL
, r1
, x2
, b2
, dl2
, dh2
);
3095 s390_emit_LLHR(UChar
*p
, UChar r1
, UChar r2
)
3097 vassert(s390_host_has_eimm
);
3099 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3100 s390_disasm(ENC3(MNM
, GPR
, GPR
), "llhr", r1
, r2
);
3102 return emit_RRE(p
, 0xb9950000, r1
, r2
);
3107 s390_emit_LLGHR(UChar
*p
, UChar r1
, UChar r2
)
3109 vassert(s390_host_has_eimm
);
3111 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3112 s390_disasm(ENC3(MNM
, GPR
, GPR
), "llghr", r1
, r2
);
3114 return emit_RRE(p
, 0xb9850000, r1
, r2
);
3119 s390_emit_LLH(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
3121 vassert(s390_host_has_eimm
);
3123 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3124 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "llh", r1
, dh2
, dl2
, x2
, b2
);
3126 return emit_RXY(p
, 0xe30000000095ULL
, r1
, x2
, b2
, dl2
, dh2
);
3131 s390_emit_LLGH(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
3133 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3134 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "llgh", r1
, dh2
, dl2
, x2
, b2
);
3136 return emit_RXY(p
, 0xe30000000091ULL
, r1
, x2
, b2
, dl2
, dh2
);
3141 s390_emit_LLILF(UChar
*p
, UChar r1
, UInt i2
)
3143 vassert(s390_host_has_eimm
);
3145 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3146 s390_disasm(ENC3(MNM
, GPR
, UINT
), "llilf", r1
, i2
);
3148 return emit_RIL(p
, 0xc00f00000000ULL
, r1
, i2
);
3153 s390_emit_LLILH(UChar
*p
, UChar r1
, UShort i2
)
3155 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3156 s390_disasm(ENC3(MNM
, GPR
, UINT
), "llilh", r1
, i2
);
3158 return emit_RI(p
, 0xa50e0000, r1
, i2
);
3163 s390_emit_LLILL(UChar
*p
, UChar r1
, UShort i2
)
3165 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3166 s390_disasm(ENC3(MNM
, GPR
, UINT
), "llill", r1
, i2
);
3168 return emit_RI(p
, 0xa50f0000, r1
, i2
);
3173 s390_emit_MR(UChar
*p
, UChar r1
, UChar r2
)
3175 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3176 s390_disasm(ENC3(MNM
, GPR
, GPR
), "mr", r1
, r2
);
3178 return emit_RR(p
, 0x1c00, r1
, r2
);
3183 s390_emit_M(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort d2
)
3185 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3186 s390_disasm(ENC3(MNM
, GPR
, UDXB
), "m", r1
, d2
, x2
, b2
);
3188 return emit_RX(p
, 0x5c000000, r1
, x2
, b2
, d2
);
3193 s390_emit_MFY(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
3195 vassert(s390_host_has_gie
);
3197 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3198 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "mfy", r1
, dh2
, dl2
, x2
, b2
);
3200 return emit_RXY(p
, 0xe3000000005cULL
, r1
, x2
, b2
, dl2
, dh2
);
3205 s390_emit_MG(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
3207 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3208 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "mg", r1
, dh2
, dl2
, x2
, b2
);
3210 return emit_RXY(p
, 0xe30000000084ULL
, r1
, x2
, b2
, dl2
, dh2
);
3215 s390_emit_MGRK(UChar
*p
, UChar r3
, UChar r1
, UChar r2
)
3217 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3218 s390_disasm(ENC4(MNM
, GPR
, GPR
, GPR
), "mgrk", r1
, r2
, r3
);
3220 return emit_RRF3(p
, 0xb9ec0000, r3
, r1
, r2
);
3225 s390_emit_MH(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort d2
)
3227 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3228 s390_disasm(ENC3(MNM
, GPR
, UDXB
), "mh", r1
, d2
, x2
, b2
);
3230 return emit_RX(p
, 0x4c000000, r1
, x2
, b2
, d2
);
3235 s390_emit_MHY(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
3237 vassert(s390_host_has_gie
);
3239 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3240 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "mhy", r1
, dh2
, dl2
, x2
, b2
);
3242 return emit_RXY(p
, 0xe3000000007cULL
, r1
, x2
, b2
, dl2
, dh2
);
3247 s390_emit_MHI(UChar
*p
, UChar r1
, UShort i2
)
3249 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3250 s390_disasm(ENC3(MNM
, GPR
, INT
), "mhi", r1
, (Int
)(Short
)i2
);
3252 return emit_RI(p
, 0xa70c0000, r1
, i2
);
3257 s390_emit_MLR(UChar
*p
, UChar r1
, UChar r2
)
3259 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3260 s390_disasm(ENC3(MNM
, GPR
, GPR
), "mlr", r1
, r2
);
3262 return emit_RRE(p
, 0xb9960000, r1
, r2
);
3267 s390_emit_MLGR(UChar
*p
, UChar r1
, UChar r2
)
3269 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3270 s390_disasm(ENC3(MNM
, GPR
, GPR
), "mlgr", r1
, r2
);
3272 return emit_RRE(p
, 0xb9860000, r1
, r2
);
3277 s390_emit_ML(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
3279 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3280 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "ml", r1
, dh2
, dl2
, x2
, b2
);
3282 return emit_RXY(p
, 0xe30000000096ULL
, r1
, x2
, b2
, dl2
, dh2
);
3287 s390_emit_MLG(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
3289 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3290 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "mlg", r1
, dh2
, dl2
, x2
, b2
);
3292 return emit_RXY(p
, 0xe30000000086ULL
, r1
, x2
, b2
, dl2
, dh2
);
3297 s390_emit_MSR(UChar
*p
, UChar r1
, UChar r2
)
3299 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3300 s390_disasm(ENC3(MNM
, GPR
, GPR
), "msr", r1
, r2
);
3302 return emit_RRE(p
, 0xb2520000, r1
, r2
);
3307 s390_emit_MSGR(UChar
*p
, UChar r1
, UChar r2
)
3309 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3310 s390_disasm(ENC3(MNM
, GPR
, GPR
), "msgr", r1
, r2
);
3312 return emit_RRE(p
, 0xb90c0000, r1
, r2
);
3317 s390_emit_MS(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort d2
)
3319 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3320 s390_disasm(ENC3(MNM
, GPR
, UDXB
), "ms", r1
, d2
, x2
, b2
);
3322 return emit_RX(p
, 0x71000000, r1
, x2
, b2
, d2
);
3327 s390_emit_MSY(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
3329 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3330 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "msy", r1
, dh2
, dl2
, x2
, b2
);
3332 return emit_RXY(p
, 0xe30000000051ULL
, r1
, x2
, b2
, dl2
, dh2
);
3337 s390_emit_MSG(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
3339 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3340 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "msg", r1
, dh2
, dl2
, x2
, b2
);
3342 return emit_RXY(p
, 0xe3000000000cULL
, r1
, x2
, b2
, dl2
, dh2
);
3347 s390_emit_MSFI(UChar
*p
, UChar r1
, UInt i2
)
3349 vassert(s390_host_has_gie
);
3351 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3352 s390_disasm(ENC3(MNM
, GPR
, INT
), "msfi", r1
, i2
);
3354 return emit_RIL(p
, 0xc20100000000ULL
, r1
, i2
);
3359 s390_emit_MSGFI(UChar
*p
, UChar r1
, UInt i2
)
3361 vassert(s390_host_has_gie
);
3363 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3364 s390_disasm(ENC3(MNM
, GPR
, INT
), "msgfi", r1
, i2
);
3366 return emit_RIL(p
, 0xc20000000000ULL
, r1
, i2
);
3371 s390_emit_MVC(UChar
*p
, UInt l
, UChar b1
, UShort d1
, UChar b2
, UShort d2
)
3373 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3374 s390_disasm(ENC3(MNM
, UDLB
, UDXB
), "mvc", d1
, l
, b1
, d2
, 0, b2
);
3376 return emit_SSa(p
, 0xd20000000000ULL
, l
, b1
, d1
, b2
, d2
);
3381 s390_emit_MVI(UChar
*p
, UChar i2
, UChar b1
, UShort d1
)
3383 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3384 s390_disasm(ENC3(MNM
, UDXB
, INT
), "mvi", d1
, 0, b1
, i2
);
3386 return emit_SI(p
, 0x92000000, i2
, b1
, d1
);
3391 s390_emit_MVHHI(UChar
*p
, UChar b1
, UShort d1
, UShort i2
)
3393 vassert(s390_host_has_gie
);
3395 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3396 s390_disasm(ENC3(MNM
, UDXB
, INT
), "mvhhi", d1
, 0, b1
, i2
);
3398 return emit_SIL(p
, 0xe54400000000ULL
, b1
, d1
, i2
);
3403 s390_emit_MVHI(UChar
*p
, UChar b1
, UShort d1
, UShort i2
)
3405 vassert(s390_host_has_gie
);
3407 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3408 s390_disasm(ENC3(MNM
, UDXB
, INT
), "mvhi", d1
, 0, b1
, i2
);
3410 return emit_SIL(p
, 0xe54c00000000ULL
, b1
, d1
, i2
);
3415 s390_emit_MVGHI(UChar
*p
, UChar b1
, UShort d1
, UShort i2
)
3417 vassert(s390_host_has_gie
);
3419 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3420 s390_disasm(ENC3(MNM
, UDXB
, INT
), "mvghi", d1
, 0, b1
, i2
);
3422 return emit_SIL(p
, 0xe54800000000ULL
, b1
, d1
, i2
);
3427 s390_emit_OR(UChar
*p
, UChar r1
, UChar r2
)
3429 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3430 s390_disasm(ENC3(MNM
, GPR
, GPR
), "or", r1
, r2
);
3432 return emit_RR(p
, 0x1600, r1
, r2
);
3437 s390_emit_OGR(UChar
*p
, UChar r1
, UChar r2
)
3439 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3440 s390_disasm(ENC3(MNM
, GPR
, GPR
), "ogr", r1
, r2
);
3442 return emit_RRE(p
, 0xb9810000, r1
, r2
);
3447 s390_emit_O(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort d2
)
3449 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3450 s390_disasm(ENC3(MNM
, GPR
, UDXB
), "o", r1
, d2
, x2
, b2
);
3452 return emit_RX(p
, 0x56000000, r1
, x2
, b2
, d2
);
3457 s390_emit_OY(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
3459 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3460 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "oy", r1
, dh2
, dl2
, x2
, b2
);
3462 return emit_RXY(p
, 0xe30000000056ULL
, r1
, x2
, b2
, dl2
, dh2
);
3467 s390_emit_OG(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
3469 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3470 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "og", r1
, dh2
, dl2
, x2
, b2
);
3472 return emit_RXY(p
, 0xe30000000081ULL
, r1
, x2
, b2
, dl2
, dh2
);
3477 s390_emit_OIHF(UChar
*p
, UChar r1
, UInt i2
)
3479 vassert(s390_host_has_eimm
);
3481 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3482 s390_disasm(ENC3(MNM
, GPR
, UINT
), "oihf", r1
, i2
);
3484 return emit_RIL(p
, 0xc00c00000000ULL
, r1
, i2
);
3489 s390_emit_OILF(UChar
*p
, UChar r1
, UInt i2
)
3491 vassert(s390_host_has_eimm
);
3493 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3494 s390_disasm(ENC3(MNM
, GPR
, UINT
), "oilf", r1
, i2
);
3496 return emit_RIL(p
, 0xc00d00000000ULL
, r1
, i2
);
3501 s390_emit_OILL(UChar
*p
, UChar r1
, UShort i2
)
3503 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3504 s390_disasm(ENC3(MNM
, GPR
, UINT
), "oill", r1
, i2
);
3506 return emit_RI(p
, 0xa50b0000, r1
, i2
);
3511 s390_emit_SLL(UChar
*p
, UChar r1
, UChar b2
, UShort d2
)
3513 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3514 s390_disasm(ENC3(MNM
, GPR
, UDXB
), "sll", r1
, d2
, 0, b2
);
3516 return emit_RS(p
, 0x89000000, r1
, 0, b2
, d2
);
3521 s390_emit_SLLG(UChar
*p
, UChar r1
, UChar r3
, UChar b2
, UShort dl2
, UChar dh2
)
3523 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3524 s390_disasm(ENC4(MNM
, GPR
, GPR
, SDXB
), "sllg", r1
, r3
, dh2
, dl2
, 0, b2
);
3526 return emit_RSY(p
, 0xeb000000000dULL
, r1
, r3
, b2
, dl2
, dh2
);
3531 s390_emit_SRA(UChar
*p
, UChar r1
, UChar b2
, UShort d2
)
3533 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3534 s390_disasm(ENC3(MNM
, GPR
, UDXB
), "sra", r1
, d2
, 0, b2
);
3536 return emit_RS(p
, 0x8a000000, r1
, 0, b2
, d2
);
3541 s390_emit_SRAG(UChar
*p
, UChar r1
, UChar r3
, UChar b2
, UShort dl2
, UChar dh2
)
3543 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3544 s390_disasm(ENC4(MNM
, GPR
, GPR
, SDXB
), "srag", r1
, r3
, dh2
, dl2
, 0, b2
);
3546 return emit_RSY(p
, 0xeb000000000aULL
, r1
, r3
, b2
, dl2
, dh2
);
3551 s390_emit_SRL(UChar
*p
, UChar r1
, UChar b2
, UShort d2
)
3553 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3554 s390_disasm(ENC3(MNM
, GPR
, UDXB
), "srl", r1
, d2
, 0, b2
);
3556 return emit_RS(p
, 0x88000000, r1
, 0, b2
, d2
);
3561 s390_emit_SRLG(UChar
*p
, UChar r1
, UChar r3
, UChar b2
, UShort dl2
, UChar dh2
)
3563 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3564 s390_disasm(ENC4(MNM
, GPR
, GPR
, SDXB
), "srlg", r1
, r3
, dh2
, dl2
, 0, b2
);
3566 return emit_RSY(p
, 0xeb000000000cULL
, r1
, r3
, b2
, dl2
, dh2
);
3571 s390_emit_ST(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort d2
)
3573 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3574 s390_disasm(ENC3(MNM
, GPR
, UDXB
), "st", r1
, d2
, x2
, b2
);
3576 return emit_RX(p
, 0x50000000, r1
, x2
, b2
, d2
);
3581 s390_emit_STY(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
3583 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3584 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "sty", r1
, dh2
, dl2
, x2
, b2
);
3586 return emit_RXY(p
, 0xe30000000050ULL
, r1
, x2
, b2
, dl2
, dh2
);
3591 s390_emit_STG(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
3593 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3594 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "stg", r1
, dh2
, dl2
, x2
, b2
);
3596 return emit_RXY(p
, 0xe30000000024ULL
, r1
, x2
, b2
, dl2
, dh2
);
3601 s390_emit_STC(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort d2
)
3603 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3604 s390_disasm(ENC3(MNM
, GPR
, UDXB
), "stc", r1
, d2
, x2
, b2
);
3606 return emit_RX(p
, 0x42000000, r1
, x2
, b2
, d2
);
3611 s390_emit_STCY(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
3613 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3614 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "stcy", r1
, dh2
, dl2
, x2
, b2
);
3616 return emit_RXY(p
, 0xe30000000072ULL
, r1
, x2
, b2
, dl2
, dh2
);
3621 s390_emit_STH(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort d2
)
3623 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3624 s390_disasm(ENC3(MNM
, GPR
, UDXB
), "sth", r1
, d2
, x2
, b2
);
3626 return emit_RX(p
, 0x40000000, r1
, x2
, b2
, d2
);
3631 s390_emit_STHY(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
3633 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3634 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "sthy", r1
, dh2
, dl2
, x2
, b2
);
3636 return emit_RXY(p
, 0xe30000000070ULL
, r1
, x2
, b2
, dl2
, dh2
);
3641 s390_emit_SR(UChar
*p
, UChar r1
, UChar r2
)
3643 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3644 s390_disasm(ENC3(MNM
, GPR
, GPR
), "sr", r1
, r2
);
3646 return emit_RR(p
, 0x1b00, r1
, r2
);
3651 s390_emit_SGR(UChar
*p
, UChar r1
, UChar r2
)
3653 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3654 s390_disasm(ENC3(MNM
, GPR
, GPR
), "sgr", r1
, r2
);
3656 return emit_RRE(p
, 0xb9090000, r1
, r2
);
3661 s390_emit_S(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort d2
)
3663 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3664 s390_disasm(ENC3(MNM
, GPR
, UDXB
), "s", r1
, d2
, x2
, b2
);
3666 return emit_RX(p
, 0x5b000000, r1
, x2
, b2
, d2
);
3671 s390_emit_SY(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
3673 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3674 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "sy", r1
, dh2
, dl2
, x2
, b2
);
3676 return emit_RXY(p
, 0xe3000000005bULL
, r1
, x2
, b2
, dl2
, dh2
);
3681 s390_emit_SG(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
3683 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3684 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "sg", r1
, dh2
, dl2
, x2
, b2
);
3686 return emit_RXY(p
, 0xe30000000009ULL
, r1
, x2
, b2
, dl2
, dh2
);
3691 s390_emit_SH(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort d2
)
3693 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3694 s390_disasm(ENC3(MNM
, GPR
, UDXB
), "sh", r1
, d2
, x2
, b2
);
3696 return emit_RX(p
, 0x4b000000, r1
, x2
, b2
, d2
);
3701 s390_emit_SHY(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
3703 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3704 s390_disasm(ENC3(MNM
, GPR
, SDXB
), "shy", r1
, dh2
, dl2
, x2
, b2
);
3706 return emit_RXY(p
, 0xe3000000007bULL
, r1
, x2
, b2
, dl2
, dh2
);
3711 s390_emit_SLFI(UChar
*p
, UChar r1
, UInt i2
)
3713 vassert(s390_host_has_eimm
);
3715 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3716 s390_disasm(ENC3(MNM
, GPR
, UINT
), "slfi", r1
, i2
);
3718 return emit_RIL(p
, 0xc20500000000ULL
, r1
, i2
);
3723 s390_emit_SLGFI(UChar
*p
, UChar r1
, UInt i2
)
3725 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3726 s390_disasm(ENC3(MNM
, GPR
, UINT
), "slgfi", r1
, i2
);
3728 return emit_RIL(p
, 0xc20400000000ULL
, r1
, i2
);
3733 s390_emit_LDR(UChar
*p
, UChar r1
, UChar r2
)
3735 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3736 s390_disasm(ENC3(MNM
, FPR
, FPR
), "ldr", r1
, r2
);
3738 return emit_RR(p
, 0x2800, r1
, r2
);
3743 s390_emit_LE(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort d2
)
3745 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3746 s390_disasm(ENC3(MNM
, FPR
, UDXB
), "le", r1
, d2
, x2
, b2
);
3748 return emit_RX(p
, 0x78000000, r1
, x2
, b2
, d2
);
3753 s390_emit_LD(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort d2
)
3755 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3756 s390_disasm(ENC3(MNM
, FPR
, UDXB
), "ld", r1
, d2
, x2
, b2
);
3758 return emit_RX(p
, 0x68000000, r1
, x2
, b2
, d2
);
3763 s390_emit_LEY(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
3765 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3766 s390_disasm(ENC3(MNM
, FPR
, SDXB
), "ley", r1
, dh2
, dl2
, x2
, b2
);
3768 return emit_RXY(p
, 0xed0000000064ULL
, r1
, x2
, b2
, dl2
, dh2
);
3773 s390_emit_LDY(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
3775 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3776 s390_disasm(ENC3(MNM
, FPR
, SDXB
), "ldy", r1
, dh2
, dl2
, x2
, b2
);
3778 return emit_RXY(p
, 0xed0000000065ULL
, r1
, x2
, b2
, dl2
, dh2
);
3783 s390_emit_LDGR(UChar
*p
, UChar r1
, UChar r2
)
3785 vassert(s390_host_has_fgx
);
3787 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3788 s390_disasm(ENC3(MNM
, FPR
, GPR
), "ldgr", r1
, r2
);
3790 return emit_RRE(p
, 0xb3c10000, r1
, r2
);
3795 s390_emit_LGDR(UChar
*p
, UChar r1
, UChar r2
)
3797 vassert(s390_host_has_fgx
);
3799 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3800 s390_disasm(ENC3(MNM
, GPR
, FPR
), "lgdr", r1
, r2
);
3802 return emit_RRE(p
, 0xb3cd0000, r1
, r2
);
3807 s390_emit_LZER(UChar
*p
, UChar r1
, UChar r2
)
3809 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3810 s390_disasm(ENC2(MNM
, FPR
), "lzer", r1
);
3812 return emit_RRE(p
, 0xb3740000, r1
, r2
);
3817 s390_emit_LZDR(UChar
*p
, UChar r1
, UChar r2
)
3819 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3820 s390_disasm(ENC2(MNM
, FPR
), "lzdr", r1
);
3822 return emit_RRE(p
, 0xb3750000, r1
, r2
);
3827 s390_emit_SFPC(UChar
*p
, UChar r1
)
3829 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3830 s390_disasm(ENC2(MNM
, GPR
), "sfpc", r1
);
3832 return emit_RRE(p
, 0xb3840000, r1
, 0);
3837 s390_emit_STE(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort d2
)
3839 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3840 s390_disasm(ENC3(MNM
, FPR
, UDXB
), "ste", r1
, d2
, x2
, b2
);
3842 return emit_RX(p
, 0x70000000, r1
, x2
, b2
, d2
);
3847 s390_emit_STD(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort d2
)
3849 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3850 s390_disasm(ENC3(MNM
, FPR
, UDXB
), "std", r1
, d2
, x2
, b2
);
3852 return emit_RX(p
, 0x60000000, r1
, x2
, b2
, d2
);
3857 s390_emit_STEY(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
3859 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3860 s390_disasm(ENC3(MNM
, FPR
, SDXB
), "stey", r1
, dh2
, dl2
, x2
, b2
);
3862 return emit_RXY(p
, 0xed0000000066ULL
, r1
, x2
, b2
, dl2
, dh2
);
3867 s390_emit_STDY(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl2
, UChar dh2
)
3869 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3870 s390_disasm(ENC3(MNM
, FPR
, SDXB
), "stdy", r1
, dh2
, dl2
, x2
, b2
);
3872 return emit_RXY(p
, 0xed0000000067ULL
, r1
, x2
, b2
, dl2
, dh2
);
3877 s390_emit_AEBR(UChar
*p
, UChar r1
, UChar r2
)
3879 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3880 s390_disasm(ENC3(MNM
, FPR
, FPR
), "aebr", r1
, r2
);
3882 return emit_RRE(p
, 0xb30a0000, r1
, r2
);
3887 s390_emit_ADBR(UChar
*p
, UChar r1
, UChar r2
)
3889 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3890 s390_disasm(ENC3(MNM
, FPR
, FPR
), "adbr", r1
, r2
);
3892 return emit_RRE(p
, 0xb31a0000, r1
, r2
);
3897 s390_emit_AXBR(UChar
*p
, UChar r1
, UChar r2
)
3899 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3900 s390_disasm(ENC3(MNM
, FPR
, FPR
), "axbr", r1
, r2
);
3902 return emit_RRE(p
, 0xb34a0000, r1
, r2
);
3907 s390_emit_CEBR(UChar
*p
, UChar r1
, UChar r2
)
3909 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3910 s390_disasm(ENC3(MNM
, FPR
, FPR
), "cebr", r1
, r2
);
3912 return emit_RRE(p
, 0xb3090000, r1
, r2
);
3917 s390_emit_CDBR(UChar
*p
, UChar r1
, UChar r2
)
3919 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3920 s390_disasm(ENC3(MNM
, FPR
, FPR
), "cdbr", r1
, r2
);
3922 return emit_RRE(p
, 0xb3190000, r1
, r2
);
3927 s390_emit_CXBR(UChar
*p
, UChar r1
, UChar r2
)
3929 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
3930 s390_disasm(ENC3(MNM
, FPR
, FPR
), "cxbr", r1
, r2
);
3932 return emit_RRE(p
, 0xb3490000, r1
, r2
);
3937 s390_emit_CEFBRA(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
3940 vassert(m3
== 0 || s390_host_has_fpext
);
3942 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
)) {
3944 s390_disasm(ENC3(MNM
, FPR
, GPR
), "cefbr", r1
, r2
);
3946 s390_disasm(ENC5(MNM
, FPR
, UINT
, GPR
, UINT
),
3947 "cefbra", r1
, m3
, r2
, m4
);
3950 return emit_RRF2(p
, 0xb3940000, m3
, m4
, r1
, r2
);
3955 s390_emit_CDFBRA(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
3958 vassert(m3
== 0 || s390_host_has_fpext
);
3960 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
)) {
3962 s390_disasm(ENC3(MNM
, FPR
, GPR
), "cdfbr", r1
, r2
);
3964 s390_disasm(ENC5(MNM
, FPR
, UINT
, GPR
, UINT
),
3965 "cdfbra", r1
, m3
, r2
, m4
);
3968 return emit_RRF2(p
, 0xb3950000, m3
, m4
, r1
, r2
);
3973 s390_emit_CXFBRA(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
3976 vassert(m3
== 0 || s390_host_has_fpext
);
3978 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
)) {
3980 s390_disasm(ENC3(MNM
, FPR
, GPR
), "cxfbr", r1
, r2
);
3982 s390_disasm(ENC5(MNM
, FPR
, UINT
, GPR
, UINT
),
3983 "cxfbra", r1
, m3
, r2
, m4
);
3986 return emit_RRF2(p
, 0xb3960000, m3
, m4
, r1
, r2
);
3991 s390_emit_CEGBRA(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
3994 vassert(m3
== 0 || s390_host_has_fpext
);
3996 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
)) {
3998 s390_disasm(ENC3(MNM
, FPR
, GPR
), "cegbr", r1
, r2
);
4000 s390_disasm(ENC5(MNM
, FPR
, UINT
, GPR
, UINT
),
4001 "cegbra", r1
, m3
, r2
, m4
);
4004 return emit_RRF2(p
, 0xb3a40000, m3
, m4
, r1
, r2
);
4009 s390_emit_CDGBRA(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4012 vassert(m3
== 0 || s390_host_has_fpext
);
4014 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
)) {
4016 s390_disasm(ENC3(MNM
, FPR
, GPR
), "cdgbr", r1
, r2
);
4018 s390_disasm(ENC5(MNM
, FPR
, UINT
, GPR
, UINT
),
4019 "cdgbra", r1
, m3
, r2
, m4
);
4022 return emit_RRF2(p
, 0xb3a50000, m3
, m4
, r1
, r2
);
4027 s390_emit_CXGBRA(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4030 vassert(m3
== 0 || s390_host_has_fpext
);
4032 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
)) {
4034 s390_disasm(ENC3(MNM
, FPR
, GPR
), "cxgbr", r1
, r2
);
4036 s390_disasm(ENC5(MNM
, FPR
, UINT
, GPR
, UINT
),
4037 "cxgbra", r1
, m3
, r2
, m4
);
4040 return emit_RRF2(p
, 0xb3a60000, m3
, m4
, r1
, r2
);
4045 s390_emit_CELFBR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4048 vassert(s390_host_has_fpext
);
4050 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4051 s390_disasm(ENC5(MNM
, FPR
, UINT
, GPR
, UINT
), "celfbr", r1
, m3
, r2
, m4
);
4053 return emit_RRF2(p
, 0xb3900000, m3
, m4
, r1
, r2
);
4058 s390_emit_CDLFBR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4061 vassert(s390_host_has_fpext
);
4063 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4064 s390_disasm(ENC5(MNM
, FPR
, UINT
, GPR
, UINT
), "cdlfbr", r1
, m3
, r2
, m4
);
4066 return emit_RRF2(p
, 0xb3910000, m3
, m4
, r1
, r2
);
4071 s390_emit_CXLFBR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4074 vassert(s390_host_has_fpext
);
4076 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4077 s390_disasm(ENC5(MNM
, FPR
, UINT
, GPR
, UINT
), "cxlfbr", r1
, m3
, r2
, m4
);
4079 return emit_RRF2(p
, 0xb3920000, m3
, m4
, r1
, r2
);
4084 s390_emit_CELGBR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4087 vassert(s390_host_has_fpext
);
4089 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4090 s390_disasm(ENC5(MNM
, FPR
, UINT
, GPR
, UINT
), "celgbr", r1
, m3
, r2
, m4
);
4092 return emit_RRF2(p
, 0xb3a00000, m3
, m4
, r1
, r2
);
4097 s390_emit_CDLGBR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4100 vassert(s390_host_has_fpext
);
4102 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4103 s390_disasm(ENC5(MNM
, FPR
, UINT
, GPR
, UINT
), "cdlgbr", r1
, m3
, r2
, m4
);
4105 return emit_RRF2(p
, 0xb3a10000, m3
, m4
, r1
, r2
);
4110 s390_emit_CXLGBR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4113 vassert(s390_host_has_fpext
);
4115 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4116 s390_disasm(ENC5(MNM
, FPR
, UINT
, GPR
, UINT
), "cxlgbr", r1
, m3
, r2
, m4
);
4118 return emit_RRF2(p
, 0xb3a20000, m3
, m4
, r1
, r2
);
4123 s390_emit_CLFEBR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4126 vassert(s390_host_has_fpext
);
4128 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4129 s390_disasm(ENC5(MNM
, GPR
, UINT
, FPR
, UINT
), "clfebr", r1
, m3
, r2
, m4
);
4131 return emit_RRF2(p
, 0xb39c0000, m3
, m4
, r1
, r2
);
4136 s390_emit_CLFDBR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4139 vassert(s390_host_has_fpext
);
4141 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4142 s390_disasm(ENC5(MNM
, GPR
, UINT
, FPR
, UINT
), "clfdbr", r1
, m3
, r2
, m4
);
4144 return emit_RRF2(p
, 0xb39d0000, m3
, m4
, r1
, r2
);
4149 s390_emit_CLFXBR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4152 vassert(s390_host_has_fpext
);
4154 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4155 s390_disasm(ENC5(MNM
, GPR
, UINT
, FPR
, UINT
), "clfxbr", r1
, m3
, r2
, m4
);
4157 return emit_RRF2(p
, 0xb39e0000, m3
, m4
, r1
, r2
);
4162 s390_emit_CLGEBR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4165 vassert(s390_host_has_fpext
);
4167 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4168 s390_disasm(ENC5(MNM
, GPR
, UINT
, FPR
, UINT
), "clgebr", r1
, m3
, r2
, m4
);
4170 return emit_RRF2(p
, 0xb3ac0000, m3
, m4
, r1
, r2
);
4175 s390_emit_CLGDBR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4178 vassert(s390_host_has_fpext
);
4180 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4181 s390_disasm(ENC5(MNM
, GPR
, UINT
, FPR
, UINT
), "clgdbr", r1
, m3
, r2
, m4
);
4183 return emit_RRF2(p
, 0xb3ad0000, m3
, m4
, r1
, r2
);
4188 s390_emit_CLGXBR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4191 vassert(s390_host_has_fpext
);
4193 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4194 s390_disasm(ENC5(MNM
, GPR
, UINT
, FPR
, UINT
), "clgxbr", r1
, m3
, r2
, m4
);
4196 return emit_RRF2(p
, 0xb3ae0000, m3
, m4
, r1
, r2
);
4201 s390_emit_CFEBR(UChar
*p
, UChar r3
, UChar r1
, UChar r2
)
4203 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4204 s390_disasm(ENC4(MNM
, GPR
, UINT
, FPR
), "cfebr", r1
, r3
, r2
);
4206 return emit_RRF3(p
, 0xb3980000, r3
, r1
, r2
);
4211 s390_emit_CFDBR(UChar
*p
, UChar r3
, UChar r1
, UChar r2
)
4213 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4214 s390_disasm(ENC4(MNM
, GPR
, UINT
, FPR
), "cfdbr", r1
, r3
, r2
);
4216 return emit_RRF3(p
, 0xb3990000, r3
, r1
, r2
);
4221 s390_emit_CFXBR(UChar
*p
, UChar r3
, UChar r1
, UChar r2
)
4223 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4224 s390_disasm(ENC4(MNM
, GPR
, UINT
, FPR
), "cfxbr", r1
, r3
, r2
);
4226 return emit_RRF3(p
, 0xb39a0000, r3
, r1
, r2
);
4231 s390_emit_CGEBR(UChar
*p
, UChar r3
, UChar r1
, UChar r2
)
4233 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4234 s390_disasm(ENC4(MNM
, GPR
, UINT
, FPR
), "cgebr", r1
, r3
, r2
);
4236 return emit_RRF3(p
, 0xb3a80000, r3
, r1
, r2
);
4241 s390_emit_CGDBR(UChar
*p
, UChar r3
, UChar r1
, UChar r2
)
4243 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4244 s390_disasm(ENC4(MNM
, GPR
, UINT
, FPR
), "cgdbr", r1
, r3
, r2
);
4246 return emit_RRF3(p
, 0xb3a90000, r3
, r1
, r2
);
4251 s390_emit_CGXBR(UChar
*p
, UChar r3
, UChar r1
, UChar r2
)
4253 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4254 s390_disasm(ENC4(MNM
, GPR
, UINT
, FPR
), "cgxbr", r1
, r3
, r2
);
4256 return emit_RRF3(p
, 0xb3aa0000, r3
, r1
, r2
);
4261 s390_emit_DEBR(UChar
*p
, UChar r1
, UChar r2
)
4263 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4264 s390_disasm(ENC3(MNM
, FPR
, FPR
), "debr", r1
, r2
);
4266 return emit_RRE(p
, 0xb30d0000, r1
, r2
);
4271 s390_emit_DDBR(UChar
*p
, UChar r1
, UChar r2
)
4273 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4274 s390_disasm(ENC3(MNM
, FPR
, FPR
), "ddbr", r1
, r2
);
4276 return emit_RRE(p
, 0xb31d0000, r1
, r2
);
4281 s390_emit_DXBR(UChar
*p
, UChar r1
, UChar r2
)
4283 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4284 s390_disasm(ENC3(MNM
, FPR
, FPR
), "dxbr", r1
, r2
);
4286 return emit_RRE(p
, 0xb34d0000, r1
, r2
);
4291 s390_emit_LCEBR(UChar
*p
, UChar r1
, UChar r2
)
4293 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4294 s390_disasm(ENC3(MNM
, FPR
, FPR
), "lcebr", r1
, r2
);
4296 return emit_RRE(p
, 0xb3030000, r1
, r2
);
4301 s390_emit_LCDBR(UChar
*p
, UChar r1
, UChar r2
)
4303 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4304 s390_disasm(ENC3(MNM
, FPR
, FPR
), "lcdbr", r1
, r2
);
4306 return emit_RRE(p
, 0xb3130000, r1
, r2
);
4311 s390_emit_LCXBR(UChar
*p
, UChar r1
, UChar r2
)
4313 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4314 s390_disasm(ENC3(MNM
, FPR
, FPR
), "lcxbr", r1
, r2
);
4316 return emit_RRE(p
, 0xb3430000, r1
, r2
);
4321 s390_emit_LDEBR(UChar
*p
, UChar r1
, UChar r2
)
4323 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4324 s390_disasm(ENC3(MNM
, FPR
, FPR
), "ldebr", r1
, r2
);
4326 return emit_RRE(p
, 0xb3040000, r1
, r2
);
4331 s390_emit_LXDBR(UChar
*p
, UChar r1
, UChar r2
)
4333 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4334 s390_disasm(ENC3(MNM
, FPR
, FPR
), "lxdbr", r1
, r2
);
4336 return emit_RRE(p
, 0xb3050000, r1
, r2
);
4341 s390_emit_LXEBR(UChar
*p
, UChar r1
, UChar r2
)
4343 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4344 s390_disasm(ENC3(MNM
, FPR
, FPR
), "lxebr", r1
, r2
);
4346 return emit_RRE(p
, 0xb3060000, r1
, r2
);
4351 s390_emit_LNEBR(UChar
*p
, UChar r1
, UChar r2
)
4353 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4354 s390_disasm(ENC3(MNM
, FPR
, FPR
), "lnebr", r1
, r2
);
4356 return emit_RRE(p
, 0xb3010000, r1
, r2
);
4361 s390_emit_LNDBR(UChar
*p
, UChar r1
, UChar r2
)
4363 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4364 s390_disasm(ENC3(MNM
, FPR
, FPR
), "lndbr", r1
, r2
);
4366 return emit_RRE(p
, 0xb3110000, r1
, r2
);
4371 s390_emit_LNXBR(UChar
*p
, UChar r1
, UChar r2
)
4373 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4374 s390_disasm(ENC3(MNM
, FPR
, FPR
), "lnxbr", r1
, r2
);
4376 return emit_RRE(p
, 0xb3410000, r1
, r2
);
4381 s390_emit_LPEBR(UChar
*p
, UChar r1
, UChar r2
)
4383 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4384 s390_disasm(ENC3(MNM
, FPR
, FPR
), "lpebr", r1
, r2
);
4386 return emit_RRE(p
, 0xb3000000, r1
, r2
);
4391 s390_emit_LPDBR(UChar
*p
, UChar r1
, UChar r2
)
4393 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4394 s390_disasm(ENC3(MNM
, FPR
, FPR
), "lpdbr", r1
, r2
);
4396 return emit_RRE(p
, 0xb3100000, r1
, r2
);
4401 s390_emit_LPXBR(UChar
*p
, UChar r1
, UChar r2
)
4403 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4404 s390_disasm(ENC3(MNM
, FPR
, FPR
), "lpxbr", r1
, r2
);
4406 return emit_RRE(p
, 0xb3400000, r1
, r2
);
4411 s390_emit_LEDBRA(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4414 vassert(m3
== 0 || s390_host_has_fpext
);
4416 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
)) {
4418 s390_disasm(ENC3(MNM
, FPR
, FPR
), "ledbr", r1
, r2
);
4420 s390_disasm(ENC5(MNM
, FPR
, UINT
, FPR
, UINT
),
4421 "ledbra", r1
, m3
, r2
, m4
);
4424 return emit_RRF2(p
, 0xb3440000, m3
, m4
, r1
, r2
);
4429 s390_emit_LDXBRA(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4432 vassert(m3
== 0 || s390_host_has_fpext
);
4434 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
)) {
4436 s390_disasm(ENC3(MNM
, FPR
, FPR
), "ldxbr", r1
, r2
);
4438 s390_disasm(ENC5(MNM
, FPR
, UINT
, FPR
, UINT
),
4439 "ldxbra", r1
, m3
, r2
, m4
);
4442 return emit_RRF2(p
, 0xb3450000, m3
, m4
, r1
, r2
);
4447 s390_emit_LEXBRA(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4450 vassert(m3
== 0 || s390_host_has_fpext
);
4452 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
)) {
4454 s390_disasm(ENC3(MNM
, FPR
, FPR
), "lexbr", r1
, r2
);
4456 s390_disasm(ENC5(MNM
, FPR
, UINT
, FPR
, UINT
),
4457 "lexbra", r1
, m3
, r2
, m4
);
4460 return emit_RRF2(p
, 0xb3460000, m3
, m4
, r1
, r2
);
4465 s390_emit_FIEBRA(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4467 vassert(m3
== 0 || s390_host_has_fpext
);
4469 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
)) {
4471 s390_disasm(ENC4(MNM
, FPR
, UINT
, FPR
), "fiebr", r1
, m3
, r2
);
4473 s390_disasm(ENC5(MNM
, FPR
, UINT
, FPR
, UINT
),
4474 "fiebra", r1
, m3
, r2
, m4
);
4477 return emit_RRF2(p
, 0xb3570000, m3
, m4
, r1
, r2
);
4482 s390_emit_FIDBRA(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4484 vassert(m3
== 0 || s390_host_has_fpext
);
4486 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
)) {
4488 s390_disasm(ENC4(MNM
, FPR
, UINT
, FPR
), "fidbr", r1
, m3
, r2
);
4490 s390_disasm(ENC5(MNM
, FPR
, UINT
, FPR
, UINT
),
4491 "fidbra", r1
, m3
, r2
, m4
);
4494 return emit_RRF2(p
, 0xb35f0000, m3
, m4
, r1
, r2
);
4499 s390_emit_FIXBRA(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4501 vassert(m3
== 0 || s390_host_has_fpext
);
4503 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
)) {
4505 s390_disasm(ENC4(MNM
, FPR
, UINT
, FPR
), "fixbr", r1
, m3
, r2
);
4507 s390_disasm(ENC5(MNM
, FPR
, UINT
, FPR
, UINT
),
4508 "fixbra", r1
, m3
, r2
, m4
);
4511 return emit_RRF2(p
, 0xb3470000, m3
, m4
, r1
, r2
);
4516 s390_emit_MEEBR(UChar
*p
, UChar r1
, UChar r2
)
4518 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4519 s390_disasm(ENC3(MNM
, FPR
, FPR
), "meebr", r1
, r2
);
4521 return emit_RRE(p
, 0xb3170000, r1
, r2
);
4526 s390_emit_MDBR(UChar
*p
, UChar r1
, UChar r2
)
4528 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4529 s390_disasm(ENC3(MNM
, FPR
, FPR
), "mdbr", r1
, r2
);
4531 return emit_RRE(p
, 0xb31c0000, r1
, r2
);
4536 s390_emit_MXBR(UChar
*p
, UChar r1
, UChar r2
)
4538 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4539 s390_disasm(ENC3(MNM
, FPR
, FPR
), "mxbr", r1
, r2
);
4541 return emit_RRE(p
, 0xb34c0000, r1
, r2
);
4546 s390_emit_MAEBR(UChar
*p
, UChar r1
, UChar r3
, UChar r2
)
4548 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4549 s390_disasm(ENC4(MNM
, FPR
, FPR
, FPR
), "maebr", r1
, r3
, r2
);
4551 return emit_RRF(p
, 0xb30e0000, r1
, r3
, r2
);
4556 s390_emit_MADBR(UChar
*p
, UChar r1
, UChar r3
, UChar r2
)
4558 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4559 s390_disasm(ENC4(MNM
, FPR
, FPR
, FPR
), "madbr", r1
, r3
, r2
);
4561 return emit_RRF(p
, 0xb31e0000, r1
, r3
, r2
);
4566 s390_emit_MSEBR(UChar
*p
, UChar r1
, UChar r3
, UChar r2
)
4568 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4569 s390_disasm(ENC4(MNM
, FPR
, FPR
, FPR
), "msebr", r1
, r3
, r2
);
4571 return emit_RRF(p
, 0xb30f0000, r1
, r3
, r2
);
4576 s390_emit_MSDBR(UChar
*p
, UChar r1
, UChar r3
, UChar r2
)
4578 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4579 s390_disasm(ENC4(MNM
, FPR
, FPR
, FPR
), "msdbr", r1
, r3
, r2
);
4581 return emit_RRF(p
, 0xb31f0000, r1
, r3
, r2
);
4586 s390_emit_SQEBR(UChar
*p
, UChar r1
, UChar r2
)
4588 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4589 s390_disasm(ENC3(MNM
, FPR
, FPR
), "sqebr", r1
, r2
);
4591 return emit_RRE(p
, 0xb3140000, r1
, r2
);
4596 s390_emit_SQDBR(UChar
*p
, UChar r1
, UChar r2
)
4598 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4599 s390_disasm(ENC3(MNM
, FPR
, FPR
), "sqdbr", r1
, r2
);
4601 return emit_RRE(p
, 0xb3150000, r1
, r2
);
4606 s390_emit_SQXBR(UChar
*p
, UChar r1
, UChar r2
)
4608 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4609 s390_disasm(ENC3(MNM
, FPR
, FPR
), "sqxbr", r1
, r2
);
4611 return emit_RRE(p
, 0xb3160000, r1
, r2
);
4616 s390_emit_SEBR(UChar
*p
, UChar r1
, UChar r2
)
4618 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4619 s390_disasm(ENC3(MNM
, FPR
, FPR
), "sebr", r1
, r2
);
4621 return emit_RRE(p
, 0xb30b0000, r1
, r2
);
4626 s390_emit_SDBR(UChar
*p
, UChar r1
, UChar r2
)
4628 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4629 s390_disasm(ENC3(MNM
, FPR
, FPR
), "sdbr", r1
, r2
);
4631 return emit_RRE(p
, 0xb31b0000, r1
, r2
);
4636 s390_emit_SXBR(UChar
*p
, UChar r1
, UChar r2
)
4638 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4639 s390_disasm(ENC3(MNM
, FPR
, FPR
), "sxbr", r1
, r2
);
4641 return emit_RRE(p
, 0xb34b0000, r1
, r2
);
4646 s390_emit_ADTRA(UChar
*p
, UChar r3
, UChar m4
, UChar r1
, UChar r2
)
4648 vassert(s390_host_has_dfp
);
4649 vassert(m4
== 0 || s390_host_has_fpext
);
4650 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
)) {
4652 s390_disasm(ENC4(MNM
, FPR
, FPR
, FPR
), "adtr", r1
, r2
, r3
);
4654 s390_disasm(ENC5(MNM
, FPR
, FPR
, FPR
, UINT
), "adtra", r1
, r2
, r3
, m4
);
4657 return emit_RRF4(p
, 0xb3d20000, r3
, m4
, r1
, r2
);
4662 s390_emit_AXTRA(UChar
*p
, UChar r3
, UChar m4
, UChar r1
, UChar r2
)
4664 vassert(s390_host_has_dfp
);
4665 vassert(m4
== 0 || s390_host_has_fpext
);
4666 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
)) {
4668 s390_disasm(ENC4(MNM
, FPR
, FPR
, FPR
), "axtr", r1
, r2
, r3
);
4670 s390_disasm(ENC5(MNM
, FPR
, FPR
, FPR
, UINT
), "axtra", r1
, r2
, r3
, m4
);
4673 return emit_RRF4(p
, 0xb3da0000, r3
, m4
, r1
, r2
);
4678 s390_emit_CDTR(UChar
*p
, UChar r1
, UChar r2
)
4680 vassert(s390_host_has_dfp
);
4681 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4682 s390_disasm(ENC3(MNM
, FPR
, FPR
), "cdtr", r1
, r2
);
4684 return emit_RRE(p
, 0xb3e40000, r1
, r2
);
4689 s390_emit_CXTR(UChar
*p
, UChar r1
, UChar r2
)
4691 vassert(s390_host_has_dfp
);
4692 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4693 s390_disasm(ENC3(MNM
, FPR
, FPR
), "cxtr", r1
, r2
);
4695 return emit_RRE(p
, 0xb3ec0000, r1
, r2
);
4700 s390_emit_CDGTRA(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4702 vassert(s390_host_has_dfp
);
4704 vassert(m3
== 0 || s390_host_has_fpext
);
4706 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
)) {
4708 s390_disasm(ENC3(MNM
, FPR
, GPR
), "cdgtr", r1
, r2
);
4710 s390_disasm(ENC5(MNM
, FPR
, UINT
, GPR
, UINT
), "cdgtra", r1
, m3
, r2
, m4
);
4713 return emit_RRF2(p
, 0xb3f10000, m3
, m4
, r1
, r2
);
4718 s390_emit_CXGTR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4720 vassert(s390_host_has_dfp
);
4722 /* rounding mode m3 is not considered, as the corresponding
4723 IRop (Iop_I64StoD128) does not take rounding mode. */
4726 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4727 s390_disasm(ENC3(MNM
, FPR
, GPR
), "cxgtr", r1
, r2
);
4729 return emit_RRF2(p
, 0xb3f90000, m3
, m4
, r1
, r2
);
4734 s390_emit_CDFTR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4737 vassert(s390_host_has_dfp
);
4738 vassert(s390_host_has_fpext
);
4740 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4741 s390_disasm(ENC5(MNM
, FPR
, UINT
, GPR
, UINT
), "cdftr", r1
, m3
, r2
, m4
);
4743 return emit_RRF2(p
, 0xb9510000, m3
, m4
, r1
, r2
);
4748 s390_emit_CXFTR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4751 vassert(s390_host_has_dfp
);
4752 vassert(s390_host_has_fpext
);
4754 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4755 s390_disasm(ENC5(MNM
, FPR
, UINT
, GPR
, UINT
), "cxftr", r1
, m3
, r2
, m4
);
4757 return emit_RRF2(p
, 0xb9590000, m3
, m4
, r1
, r2
);
4762 s390_emit_CDLFTR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4765 vassert(s390_host_has_dfp
);
4766 vassert(s390_host_has_fpext
);
4768 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4769 s390_disasm(ENC5(MNM
, FPR
, UINT
, GPR
, UINT
), "cdlftr", r1
, m3
, r2
, m4
);
4771 return emit_RRF2(p
, 0xb9530000, m3
, m4
, r1
, r2
);
4776 s390_emit_CXLFTR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4779 vassert(s390_host_has_dfp
);
4780 vassert(s390_host_has_fpext
);
4782 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4783 s390_disasm(ENC5(MNM
, FPR
, UINT
, GPR
, UINT
), "cxlftr", r1
, m3
, r2
, m4
);
4785 return emit_RRF2(p
, 0xb95b0000, m3
, m4
, r1
, r2
);
4790 s390_emit_CDLGTR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4793 vassert(s390_host_has_dfp
);
4794 vassert(s390_host_has_fpext
);
4796 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4797 s390_disasm(ENC5(MNM
, FPR
, UINT
, GPR
, UINT
), "cdlgtr", r1
, m3
, r2
, m4
);
4799 return emit_RRF2(p
, 0xb9520000, m3
, m4
, r1
, r2
);
4804 s390_emit_CXLGTR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4807 vassert(s390_host_has_dfp
);
4808 vassert(s390_host_has_fpext
);
4810 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4811 s390_disasm(ENC5(MNM
, FPR
, UINT
, GPR
, UINT
), "cxlgtr", r1
, m3
, r2
, m4
);
4813 return emit_RRF2(p
, 0xb95a0000, m3
, m4
, r1
, r2
);
4818 s390_emit_CEDTR(UChar
*p
, UChar r1
, UChar r2
)
4820 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4821 s390_disasm(ENC3(MNM
, FPR
, FPR
), "cedtr", r1
, r2
);
4823 return emit_RRE(p
, 0xb3f40000, r1
, r2
);
4828 s390_emit_CEXTR(UChar
*p
, UChar r1
, UChar r2
)
4830 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4831 s390_disasm(ENC3(MNM
, FPR
, FPR
), "cextr", r1
, r2
);
4833 return emit_RRE(p
, 0xb3fc0000, r1
, r2
);
4838 s390_emit_CFDTR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4841 vassert(s390_host_has_dfp
);
4842 vassert(s390_host_has_fpext
);
4844 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4845 s390_disasm(ENC5(MNM
, GPR
, UINT
, FPR
, UINT
), "cfdtr", r1
, m3
, r2
, m4
);
4847 return emit_RRF2(p
, 0xb9410000, m3
, m4
, r1
, r2
);
4852 s390_emit_CFXTR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4855 vassert(s390_host_has_dfp
);
4856 vassert(s390_host_has_fpext
);
4858 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4859 s390_disasm(ENC5(MNM
, GPR
, UINT
, FPR
, UINT
), "cfxtr", r1
, m3
, r2
, m4
);
4861 return emit_RRF2(p
, 0xb9490000, m3
, m4
, r1
, r2
);
4866 s390_emit_CGDTR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4868 vassert(s390_host_has_dfp
);
4870 vassert(s390_host_has_fpext
|| m3
< 1 || m3
> 7);
4872 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4873 s390_disasm(ENC4(MNM
, GPR
, UINT
, FPR
), "cgdtr", r1
, m3
, r2
);
4875 return emit_RRF2(p
, 0xb3e10000, m3
, m4
, r1
, r2
);
4880 s390_emit_CGXTR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4882 vassert(s390_host_has_dfp
);
4884 vassert(s390_host_has_fpext
|| m3
< 1 || m3
> 7);
4886 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4887 s390_disasm(ENC4(MNM
, GPR
, UINT
, FPR
), "cgxtr", r1
, m3
, r2
);
4889 return emit_RRF2(p
, 0xb3e90000, m3
, m4
, r1
, r2
);
4894 s390_emit_CLFDTR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4897 vassert(s390_host_has_dfp
);
4898 vassert(s390_host_has_fpext
);
4900 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4901 s390_disasm(ENC5(MNM
, GPR
, UINT
, FPR
, UINT
), "clfdtr", r1
, m3
, r2
, m4
);
4903 return emit_RRF2(p
, 0xb9430000, m3
, m4
, r1
, r2
);
4908 s390_emit_CLFXTR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4911 vassert(s390_host_has_dfp
);
4912 vassert(s390_host_has_fpext
);
4914 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4915 s390_disasm(ENC5(MNM
, GPR
, UINT
, FPR
, UINT
), "clfxtr", r1
, m3
, r2
, m4
);
4917 return emit_RRF2(p
, 0xb94b0000, m3
, m4
, r1
, r2
);
4922 s390_emit_CLGDTR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4925 vassert(s390_host_has_dfp
);
4926 vassert(s390_host_has_fpext
);
4928 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4929 s390_disasm(ENC5(MNM
, GPR
, UINT
, FPR
, UINT
), "clgdtr", r1
, m3
, r2
, m4
);
4931 return emit_RRF2(p
, 0xb9420000, m3
, m4
, r1
, r2
);
4936 s390_emit_CLGXTR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
4939 vassert(s390_host_has_dfp
);
4940 vassert(s390_host_has_fpext
);
4942 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4943 s390_disasm(ENC5(MNM
, GPR
, UINT
, FPR
, UINT
), "clgxtr", r1
, m3
, r2
, m4
);
4945 return emit_RRF2(p
, 0xb94a0000, m3
, m4
, r1
, r2
);
4950 s390_emit_DDTRA(UChar
*p
, UChar r3
, UChar m4
, UChar r1
, UChar r2
)
4952 vassert(s390_host_has_dfp
);
4953 vassert(m4
== 0 || s390_host_has_fpext
);
4954 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
)) {
4956 s390_disasm(ENC4(MNM
, FPR
, FPR
, FPR
), "ddtr", r1
, r2
, r3
);
4958 s390_disasm(ENC5(MNM
, FPR
, FPR
, FPR
, UINT
), "ddtra", r1
, r2
, r3
, m4
);
4961 return emit_RRF4(p
, 0xb3d10000, r3
, m4
, r1
, r2
);
4966 s390_emit_DXTRA(UChar
*p
, UChar r3
, UChar m4
, UChar r1
, UChar r2
)
4968 vassert(s390_host_has_dfp
);
4969 vassert(m4
== 0 || s390_host_has_fpext
);
4970 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
)) {
4972 s390_disasm(ENC4(MNM
, FPR
, FPR
, FPR
), "dxtr", r1
, r2
, r3
);
4974 s390_disasm(ENC5(MNM
, FPR
, FPR
, FPR
, UINT
), "dxtra", r1
, r2
, r3
, m4
);
4977 return emit_RRF4(p
, 0xb3d90000, r3
, m4
, r1
, r2
);
4982 s390_emit_EEDTR(UChar
*p
, UChar r1
, UChar r2
)
4984 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4985 s390_disasm(ENC3(MNM
, GPR
, FPR
), "eedtr", r1
, r2
);
4987 return emit_RRE(p
, 0xb3e50000, r1
, r2
);
4992 s390_emit_EEXTR(UChar
*p
, UChar r1
, UChar r2
)
4994 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
4995 s390_disasm(ENC3(MNM
, GPR
, FPR
), "eextr", r1
, r2
);
4997 return emit_RRE(p
, 0xb3ed0000, r1
, r2
);
5002 s390_emit_ESDTR(UChar
*p
, UChar r1
, UChar r2
)
5004 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5005 s390_disasm(ENC3(MNM
, GPR
, FPR
), "esdtr", r1
, r2
);
5007 return emit_RRE(p
, 0xb3e70000, r1
, r2
);
5012 s390_emit_ESXTR(UChar
*p
, UChar r1
, UChar r2
)
5014 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5015 s390_disasm(ENC3(MNM
, GPR
, FPR
), "esxtr", r1
, r2
);
5017 return emit_RRE(p
, 0xb3ef0000, r1
, r2
);
5022 s390_emit_IEDTR(UChar
*p
, UChar r3
, UChar r1
, UChar r2
)
5024 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5025 s390_disasm(ENC4(MNM
, FPR
, FPR
, GPR
), "iedtr", r1
, r3
, r2
);
5027 return emit_RRF(p
, 0xb3f60000, r3
, r1
, r2
);
5032 s390_emit_IEXTR(UChar
*p
, UChar r3
, UChar r1
, UChar r2
)
5034 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5035 s390_disasm(ENC4(MNM
, FPR
, FPR
, GPR
), "iextr", r1
, r3
, r2
);
5037 return emit_RRF(p
, 0xb3fe0000, r3
, r1
, r2
);
5042 s390_emit_LDETR(UChar
*p
, UChar m4
, UChar r1
, UChar r2
)
5044 vassert(s390_host_has_dfp
);
5045 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5046 s390_disasm(ENC4(MNM
, FPR
, FPR
, UINT
), "ldetr", r1
, r2
, m4
);
5048 return emit_RRF5(p
, 0xb3d40000, m4
, r1
, r2
);
5053 s390_emit_LXDTR(UChar
*p
, UChar m4
, UChar r1
, UChar r2
)
5055 vassert(s390_host_has_dfp
);
5056 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5057 s390_disasm(ENC4(MNM
, FPR
, FPR
, UINT
), "lxdtr", r1
, r2
, m4
);
5059 return emit_RRF5(p
, 0xb3dc0000, m4
, r1
, r2
);
5064 s390_emit_LEDTR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
5066 vassert(s390_host_has_dfp
);
5068 vassert(s390_host_has_fpext
|| m3
< 1 || m3
> 7);
5070 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5071 s390_disasm(ENC5(MNM
, FPR
, UINT
, FPR
, UINT
), "ledtr", r1
, m3
, r2
, m4
);
5073 return emit_RRF2(p
, 0xb3d50000, m3
, m4
, r1
, r2
);
5078 s390_emit_LDXTR(UChar
*p
, UChar m3
, UChar m4
, UChar r1
, UChar r2
)
5080 vassert(s390_host_has_dfp
);
5082 vassert(s390_host_has_fpext
|| m3
< 1 || m3
> 7);
5084 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5085 s390_disasm(ENC5(MNM
, FPR
, UINT
, FPR
, UINT
), "ldxtr", r1
, m3
, r2
, m4
);
5087 return emit_RRF2(p
, 0xb3dd0000, m3
, m4
, r1
, r2
);
5092 s390_emit_MDTRA(UChar
*p
, UChar r3
, UChar m4
, UChar r1
, UChar r2
)
5094 vassert(s390_host_has_dfp
);
5095 vassert(m4
== 0 || s390_host_has_fpext
);
5096 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
)) {
5098 s390_disasm(ENC4(MNM
, FPR
, FPR
, FPR
), "mdtr", r1
, r2
, r3
);
5100 s390_disasm(ENC5(MNM
, FPR
, FPR
, FPR
, UINT
), "mdtra", r1
, r2
, r3
, m4
);
5103 return emit_RRF4(p
, 0xb3d00000, r3
, m4
, r1
, r2
);
5108 s390_emit_MXTRA(UChar
*p
, UChar r3
, UChar m4
, UChar r1
, UChar r2
)
5110 vassert(s390_host_has_dfp
);
5111 vassert(m4
== 0 || s390_host_has_fpext
);
5112 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
)) {
5114 s390_disasm(ENC4(MNM
, FPR
, FPR
, FPR
), "mxtr", r1
, r2
, r3
);
5116 s390_disasm(ENC5(MNM
, FPR
, FPR
, FPR
, UINT
), "mxtra", r1
, r2
, r3
, m4
);
5119 return emit_RRF4(p
, 0xb3d80000, r3
, m4
, r1
, r2
);
5124 emit_E(UChar
*p
, UInt op
)
5126 ULong the_insn
= op
;
5128 return emit_2bytes(p
, the_insn
);
5133 s390_emit_PFPO(UChar
*p
)
5135 vassert(s390_host_has_pfpo
);
5136 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
)) {
5137 s390_disasm(ENC1(MNM
), "pfpo");
5140 return emit_E(p
, 0x010a);
5145 s390_emit_QADTR(UChar
*p
, UChar r3
, UChar m4
, UChar r1
, UChar r2
)
5147 vassert(s390_host_has_dfp
);
5148 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5149 s390_disasm(ENC5(MNM
, FPR
, FPR
, FPR
, UINT
), "qadtr", r1
, r3
, r2
, m4
);
5151 return emit_RRF4(p
, 0xb3f50000, r3
, m4
, r1
, r2
);
5156 s390_emit_QAXTR(UChar
*p
, UChar r3
, UChar m4
, UChar r1
, UChar r2
)
5158 vassert(s390_host_has_dfp
);
5159 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5160 s390_disasm(ENC5(MNM
, FPR
, FPR
, FPR
, UINT
), "qaxtr", r1
, r3
, r2
, m4
);
5162 return emit_RRF4(p
, 0xb3fd0000, r3
, m4
, r1
, r2
);
5167 s390_emit_RRDTR(UChar
*p
, UChar r3
, UChar m4
, UChar r1
, UChar r2
)
5169 vassert(s390_host_has_dfp
);
5170 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5171 s390_disasm(ENC5(MNM
, FPR
, FPR
, GPR
, UINT
), "rrdtr", r1
, r3
, r2
, m4
);
5173 return emit_RRF4(p
, 0xb3f70000, r3
, m4
, r1
, r2
);
5178 s390_emit_RRXTR(UChar
*p
, UChar r3
, UChar m4
, UChar r1
, UChar r2
)
5180 vassert(s390_host_has_dfp
);
5181 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5182 s390_disasm(ENC5(MNM
, FPR
, FPR
, GPR
, UINT
), "rrxtr", r1
, r3
, r2
, m4
);
5184 return emit_RRF4(p
, 0xb3ff0000, r3
, m4
, r1
, r2
);
5189 s390_emit_SDTRA(UChar
*p
, UChar r3
, UChar m4
, UChar r1
, UChar r2
)
5191 vassert(s390_host_has_dfp
);
5192 vassert(m4
== 0 || s390_host_has_fpext
);
5193 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
)) {
5195 s390_disasm(ENC4(MNM
, FPR
, FPR
, FPR
), "sdtr", r1
, r2
, r3
);
5197 s390_disasm(ENC5(MNM
, FPR
, FPR
, FPR
, UINT
), "sdtra", r1
, r2
, r3
, m4
);
5200 return emit_RRF4(p
, 0xb3d30000, r3
, m4
, r1
, r2
);
5205 s390_emit_SXTRA(UChar
*p
, UChar r3
, UChar m4
, UChar r1
, UChar r2
)
5207 vassert(s390_host_has_dfp
);
5208 vassert(m4
== 0 || s390_host_has_fpext
);
5209 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
)) {
5211 s390_disasm(ENC4(MNM
, FPR
, FPR
, FPR
), "sxtr", r1
, r2
, r3
);
5213 s390_disasm(ENC5(MNM
, FPR
, FPR
, FPR
, UINT
), "sxtra", r1
, r2
, r3
, m4
);
5216 return emit_RRF4(p
, 0xb3db0000, r3
, m4
, r1
, r2
);
5221 s390_emit_SLDT(UChar
*p
, UChar r3
, UChar r1
, UChar r2
)
5223 vassert(s390_host_has_dfp
);
5224 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5225 s390_disasm(ENC4(MNM
, FPR
, FPR
, UDXB
), "sldt", r1
, r3
, 0, 0, r2
);
5227 return emit_RXF(p
, 0xED0000000040ULL
, r3
, 0, r2
, 0, r1
);
5232 s390_emit_SLXT(UChar
*p
, UChar r3
, UChar r1
, UChar r2
)
5234 vassert(s390_host_has_dfp
);
5235 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5236 s390_disasm(ENC4(MNM
, FPR
, FPR
, UDXB
), "slxt", r1
, r3
, 0, 0, r2
);
5238 return emit_RXF(p
, 0xED0000000048ULL
, r3
, 0, r2
, 0, r1
);
5243 s390_emit_SRDT(UChar
*p
, UChar r3
, UChar r1
, UChar r2
)
5245 vassert(s390_host_has_dfp
);
5246 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5247 s390_disasm(ENC4(MNM
, FPR
, FPR
, UDXB
), "srdt", r1
, r3
, 0, 0, r2
);
5249 return emit_RXF(p
, 0xED0000000041ULL
, r3
, 0, r2
, 0, r1
);
5254 s390_emit_SRXT(UChar
*p
, UChar r3
, UChar r1
, UChar r2
)
5256 vassert(s390_host_has_dfp
);
5257 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5258 s390_disasm(ENC4(MNM
, FPR
, FPR
, UDXB
), "srxt", r1
, r3
, 0, 0, r2
);
5260 return emit_RXF(p
, 0xED0000000049ULL
, r3
, 0, r2
, 0, r1
);
5265 s390_emit_LOCGR(UChar
*p
, UChar m3
, UChar r1
, UChar r2
)
5267 vassert(s390_host_has_lsc
);
5268 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5269 s390_disasm(ENC4(MNM
, GPR
, GPR
, UINT
), "locgr", r1
, r2
, m3
);
5271 return emit_RRF3(p
, 0xb9e20000, m3
, r1
, r2
);
5276 s390_emit_LOC(UChar
*p
, UChar r1
, UChar m3
, UChar b2
, UShort dl2
, UChar dh2
)
5278 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5279 s390_disasm(ENC4(MNM
, GPR
, UINT
, SDXB
), "loc", r1
, m3
, dh2
, dl2
, 0, b2
);
5281 return emit_RSY(p
, 0xeb00000000f2ULL
, r1
, m3
, b2
, dl2
, dh2
);
5286 s390_emit_LOCG(UChar
*p
, UChar r1
, UChar m3
, UChar b2
, UShort dl2
, UChar dh2
)
5288 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5289 s390_disasm(ENC4(MNM
, GPR
, UINT
, SDXB
), "locg", r1
, m3
, dh2
, dl2
, 0, b2
);
5291 return emit_RSY(p
, 0xeb00000000e2ULL
, r1
, m3
, b2
, dl2
, dh2
);
5295 s390_emit_LOCGHI(UChar
*p
, UChar r1
, UShort i2
, UChar m3
)
5297 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5298 s390_disasm(ENC4(MNM
, GPR
, INT
, UINT
), "locghi", r1
, (Int
)(Short
)i2
, m3
);
5300 return emit_RIE(p
, 0xec0000000046ULL
, r1
, i2
, m3
);
5304 s390_emit_RISBG(UChar
*p
, UChar r1
, UChar r2
, UChar i3
, Char i4
, UChar i5
)
5306 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5307 s390_disasm(ENC6(MNM
, GPR
, GPR
, UINT
, UINT
, UINT
),
5308 "risbg", r1
, r2
, i3
, i4
, i5
);
5310 return emit_RIEf(p
, 0xec0000000055ULL
, r1
, r2
, i3
, i4
, i5
);
5314 /* Provide a symbolic name for register "R0" */
5317 /* Split up a 20-bit displacement into its high and low piece
5318 suitable for passing as function arguments */
5319 #define DISP20(d) (((UInt)d) & 0xFFF), ((((UInt)d) >> 12) & 0xFF)
5321 /*---------------------------------------------------------------*/
5322 /*--- Helper functions ---*/
5323 /*---------------------------------------------------------------*/
5325 static __inline__ Bool
5326 uint_fits_signed_16bit(UInt val
)
5328 UInt v
= val
& 0xFFFFu
;
5331 v
= (Int
)(v
<< 16) >> 16;
5337 static __inline__ Bool
5338 ulong_fits_signed_16bit(ULong val
)
5340 ULong v
= val
& 0xFFFFu
;
5343 v
= (Long
)(v
<< 48) >> 48;
5349 static __inline__ Bool
5350 ulong_fits_signed_32bit(ULong val
)
5352 ULong v
= val
& 0xFFFFFFFFu
;
5355 v
= (Long
)(v
<< 32) >> 32;
5361 static __inline__ Bool
5362 ulong_fits_unsigned_32bit(ULong val
)
5364 return (val
& 0xFFFFFFFFu
) == val
;
5368 /* Load a 64-bit immediate VAL into register REG. */
5370 s390_emit_load_64imm(UChar
*p
, UChar reg
, ULong val
)
5372 if (ulong_fits_signed_16bit(val
)) {
5373 return s390_emit_LGHI(p
, reg
, val
);
5376 if (s390_host_has_eimm
) {
5377 if (ulong_fits_unsigned_32bit(val
)) {
5378 return s390_emit_LLILF(p
, reg
, val
);
5380 if (ulong_fits_signed_32bit(val
)) {
5381 /* LGFI's sign extension will recreate the correct 64-bit value */
5382 return s390_emit_LGFI(p
, reg
, val
);
5384 /* Do it in two steps: upper half [0:31] and lower half [32:63] */
5385 p
= s390_emit_IIHF(p
, reg
, val
>> 32);
5386 return s390_emit_IILF(p
, reg
, val
& 0xFFFFFFFF);
5390 if (ulong_fits_unsigned_32bit(val
)) {
5391 p
= s390_emit_LLILH(p
, reg
, (val
>> 16) & 0xFFFF); /* sets val[32:47]
5393 p
= s390_emit_IILL(p
, reg
, val
& 0xFFFF); /* sets val[48:63] */
5397 p
= s390_emit_IIHH(p
, reg
, (val
>> 48) & 0xFFFF);
5398 p
= s390_emit_IIHL(p
, reg
, (val
>> 32) & 0xFFFF);
5399 p
= s390_emit_IILH(p
, reg
, (val
>> 16) & 0xFFFF);
5400 p
= s390_emit_IILL(p
, reg
, val
& 0xFFFF);
5405 /* Load a 32-bit immediate VAL into register REG. */
5407 s390_emit_load_32imm(UChar
*p
, UChar reg
, UInt val
)
5409 if (uint_fits_signed_16bit(val
)) {
5410 /* LHI's sign extension will recreate the correct 32-bit value */
5411 return s390_emit_LHI(p
, reg
, val
);
5413 if (s390_host_has_eimm
) {
5414 return s390_emit_IILF(p
, reg
, val
);
5416 /* val[0:15] --> (val >> 16) & 0xFFFF
5417 val[16:31] --> val & 0xFFFF */
5418 p
= s390_emit_IILH(p
, reg
, (val
>> 16) & 0xFFFF);
5419 return s390_emit_IILL(p
, reg
, val
& 0xFFFF);
5422 /*------------------------------------------------------------*/
5423 /*--- Wrapper functions ---*/
5424 /*------------------------------------------------------------*/
5426 /* r1[32:63],r1+1[32:63] = r1+1[32:63] * memory[op2addr][0:31] */
5428 s390_emit_MFYw(UChar
*p
, UChar r1
, UChar x
, UChar b
, UShort dl
, UChar dh
)
5430 if (s390_host_has_gie
) {
5431 return s390_emit_MFY(p
, r1
, x
, b
, dl
, dh
);
5434 /* Load from memory into R0, then MULTIPLY with R1 */
5435 p
= s390_emit_LY(p
, R0
, x
, b
, dl
, dh
);
5436 return s390_emit_MR(p
, r1
, R0
);
5439 /* r1[32:63] = r1[32:63] * memory[op2addr][0:15] */
5441 s390_emit_MHYw(UChar
*p
, UChar r1
, UChar x
, UChar b
, UShort dl
, UChar dh
)
5443 if (s390_host_has_gie
) {
5444 return s390_emit_MHY(p
, r1
, x
, b
, dl
, dh
);
5447 /* Load from memory into R0, then MULTIPLY with R1 */
5448 p
= s390_emit_LHY(p
, R0
, x
, b
, dl
, dh
);
5449 return s390_emit_MSR(p
, r1
, R0
);
5452 /* r1[32:63] = r1[32:63] * i2 */
5454 s390_emit_MSFIw(UChar
*p
, UChar r1
, UInt i2
)
5456 if (s390_host_has_gie
) {
5457 return s390_emit_MSFI(p
, r1
, i2
);
5460 /* Load I2 into R0; then MULTIPLY R0 with R1 */
5461 p
= s390_emit_load_32imm(p
, R0
, i2
);
5462 return s390_emit_MSR(p
, r1
, R0
);
5466 /* r1[32:63] = r1[32:63] & i2 */
5468 s390_emit_NILFw(UChar
*p
, UChar r1
, UInt i2
)
5470 if (s390_host_has_eimm
) {
5471 return s390_emit_NILF(p
, r1
, i2
);
5474 /* Load I2 into R0; then AND R0 with R1 */
5475 p
= s390_emit_load_32imm(p
, R0
, i2
);
5476 return s390_emit_NR(p
, r1
, R0
);
5480 /* r1[32:63] = r1[32:63] | i2 */
5482 s390_emit_OILFw(UChar
*p
, UChar r1
, UInt i2
)
5484 if (s390_host_has_eimm
) {
5485 return s390_emit_OILF(p
, r1
, i2
);
5488 /* Load I2 into R0; then AND R0 with R1 */
5489 p
= s390_emit_load_32imm(p
, R0
, i2
);
5490 return s390_emit_OR(p
, r1
, R0
);
5494 /* r1[32:63] = r1[32:63] ^ i2 */
5496 s390_emit_XILFw(UChar
*p
, UChar r1
, UInt i2
)
5498 if (s390_host_has_eimm
) {
5499 return s390_emit_XILF(p
, r1
, i2
);
5502 /* Load I2 into R0; then AND R0 with R1 */
5503 p
= s390_emit_load_32imm(p
, R0
, i2
);
5504 return s390_emit_XR(p
, r1
, R0
);
5508 /* r1[32:63] = sign_extend(r2[56:63]) */
5510 s390_emit_LBRw(UChar
*p
, UChar r1
, UChar r2
)
5512 if (s390_host_has_eimm
) {
5513 return s390_emit_LBR(p
, r1
, r2
);
5516 p
= s390_emit_LR(p
, r1
, r2
); /* r1 = r2 */
5517 p
= s390_emit_SLL(p
, r1
, R0
, 24); /* r1 = r1 << 24 */
5518 return s390_emit_SRA(p
, r1
, R0
, 24); /* r1 = r1 >>a 24 */
5522 /* r1[0:63] = sign_extend(r2[56:63]) */
5524 s390_emit_LGBRw(UChar
*p
, UChar r1
, UChar r2
)
5526 if (s390_host_has_eimm
) {
5527 return s390_emit_LGBR(p
, r1
, r2
);
5530 p
= s390_emit_LR(p
, r1
, r2
); /* r1 = r2 */
5531 p
= s390_emit_SLLG(p
, r1
, r1
, R0
, DISP20(56)); /* r1 = r1 << 56 */
5532 return s390_emit_SRAG(p
, r1
, r1
, R0
, DISP20(56)); /* r1 = r1 >>a 56 */
5536 /* r1[32:63] = sign_extend(r2[48:63]) */
5538 s390_emit_LHRw(UChar
*p
, UChar r1
, UChar r2
)
5540 if (s390_host_has_eimm
) {
5541 return s390_emit_LHR(p
, r1
, r2
);
5544 p
= s390_emit_LR(p
, r1
, r2
); /* r1 = r2 */
5545 p
= s390_emit_SLL(p
, r1
, R0
, 16); /* r1 = r1 << 16 */
5546 return s390_emit_SRA(p
, r1
, R0
, 16); /* r1 = r1 >>a 16 */
5550 /* r1[0:63] = sign_extend(r2[48:63]) */
5552 s390_emit_LGHRw(UChar
*p
, UChar r1
, UChar r2
)
5554 if (s390_host_has_eimm
) {
5555 return s390_emit_LGHR(p
, r1
, r2
);
5558 p
= s390_emit_LR(p
, r1
, r2
); /* r1 = r2 */
5559 p
= s390_emit_SLLG(p
, r1
, r1
, R0
, DISP20(48)); /* r1 = r1 << 48 */
5560 return s390_emit_SRAG(p
, r1
, r1
, R0
, DISP20(48)); /* r1 = r1 >>a 48 */
5564 /* r1[0:63] = sign_extend(i2) */
5566 s390_emit_LGFIw(UChar
*p
, UChar r1
, UInt i2
)
5568 if (s390_host_has_eimm
) {
5569 return s390_emit_LGFI(p
, r1
, i2
);
5572 p
= s390_emit_load_32imm(p
, R0
, i2
);
5573 return s390_emit_LGFR(p
, r1
, R0
);
5577 /* r1[32:63] = zero_extend($r2[56:63]) */
5579 s390_emit_LLCRw(UChar
*p
, UChar r1
, UChar r2
)
5581 if (s390_host_has_eimm
) {
5582 return s390_emit_LLCR(p
, r1
, r2
);
5585 p
= s390_emit_LR(p
, r1
, r2
);
5586 p
= s390_emit_LHI(p
, R0
, 0xFF);
5587 return s390_emit_NR(p
, r1
, R0
);
5591 /* r1[0:63] = zero_extend($r2[56:63]) */
5593 s390_emit_LLGCRw(UChar
*p
, UChar r1
, UChar r2
)
5595 if (s390_host_has_eimm
) {
5596 return s390_emit_LLGCR(p
, r1
, r2
);
5599 p
= s390_emit_LR(p
, r1
, r2
);
5600 p
= s390_emit_LLILL(p
, R0
, 0xFF);
5601 return s390_emit_NGR(p
, r1
, R0
);
5605 /* r1[32:63] = zero_extend(r2[48:63]) */
5607 s390_emit_LLHRw(UChar
*p
, UChar r1
, UChar r2
)
5609 if (s390_host_has_eimm
) {
5610 return s390_emit_LLHR(p
, r1
, r2
);
5613 p
= s390_emit_LR(p
, r1
, r2
);
5614 p
= s390_emit_LLILL(p
, R0
, 0xFFFF);
5615 return s390_emit_NR(p
, r1
, R0
);
5619 /* r1[0:63] = zero_extend(r2[48:63]) */
5621 s390_emit_LLGHRw(UChar
*p
, UChar r1
, UChar r2
)
5623 if (s390_host_has_eimm
) {
5624 return s390_emit_LLGHR(p
, r1
, r2
);
5627 p
= s390_emit_LR(p
, r1
, r2
);
5628 p
= s390_emit_LLILL(p
, R0
, 0xFFFF);
5629 return s390_emit_NGR(p
, r1
, R0
);
5633 /* r1[32:63] = zero_extend(mem[op2addr][0:7]) */
5635 s390_emit_LLCw(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl
, UChar dh
)
5637 if (s390_host_has_eimm
) {
5638 return s390_emit_LLC(p
, r1
, x2
, b2
, dl
, dh
);
5642 p
= s390_emit_IC(p
, r1
, x2
, b2
, dl
);
5644 p
= s390_emit_ICY(p
, r1
, x2
, b2
, dl
, dh
);
5646 p
= s390_emit_LLILL(p
, R0
, 0xFF);
5647 return s390_emit_NR(p
, r1
, R0
);
5651 /* r1[32:63] = zero_extend(mem[op2addr][0:15]) */
5653 s390_emit_LLHw(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl
, UChar dh
)
5655 if (s390_host_has_eimm
) {
5656 return s390_emit_LLH(p
, r1
, x2
, b2
, dl
, dh
);
5659 p
= s390_emit_LLGH(p
, r1
, x2
, b2
, dl
, dh
);
5660 p
= s390_emit_LLILL(p
, R0
, 0xFFFF);
5661 return s390_emit_NR(p
, r1
, R0
);
5665 /* r1[0:63] = zero_extend(i2) */
5667 s390_emit_LLILFw(UChar
*p
, UChar r1
, UInt i2
)
5669 if (s390_host_has_eimm
) {
5670 return s390_emit_LLILF(p
, r1
, i2
);
5673 p
= s390_emit_LLILH(p
, r1
, (i2
>> 16) & 0xFFFF); /* i2[0:15] */
5674 return s390_emit_OILL(p
, r1
, i2
& 0xFFFF);
5678 /* r1[32:63] = r1[32:63] + i2 */
5680 s390_emit_AFIw(UChar
*p
, UChar r1
, UInt i2
)
5682 if (s390_host_has_eimm
) {
5683 return s390_emit_AFI(p
, r1
, i2
);
5685 /* Load 32 bit immediate to R0 then add */
5686 p
= s390_emit_load_32imm(p
, R0
, i2
);
5687 return s390_emit_AR(p
, r1
, R0
);
5691 /* r1[32:63] = r1[32:63] - i2 */
5693 s390_emit_SLFIw(UChar
*p
, UChar r1
, UInt i2
)
5695 if (s390_host_has_eimm
) {
5696 return s390_emit_SLFI(p
, r1
, i2
);
5699 /* Load 32 bit immediate to R0 then subtract */
5700 p
= s390_emit_load_32imm(p
, R0
, i2
);
5701 return s390_emit_SR(p
, r1
, R0
);
5705 /* r1[0:63] = r1[0:63] - zero_extend(i2) */
5707 s390_emit_SLGFIw(UChar
*p
, UChar r1
, UInt i2
)
5709 if (s390_host_has_eimm
) {
5710 return s390_emit_SLGFI(p
, r1
, i2
);
5713 /* Load zero-extended 32 bit immediate to R0 then subtract */
5714 p
= s390_emit_load_64imm(p
, R0
, i2
);
5715 return s390_emit_SGR(p
, r1
, R0
);
5720 s390_emit_LTw(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl
, UChar dh
)
5722 if (s390_host_has_eimm
) {
5723 return s390_emit_LT(p
, r1
, x2
, b2
, dl
, dh
);
5725 /* Load 32 bit from memory to R0 then compare */
5727 p
= s390_emit_L(p
, R0
, x2
, b2
, dl
);
5729 p
= s390_emit_LY(p
, R0
, x2
, b2
, dl
, dh
);
5731 return s390_emit_LTR(p
, r1
, R0
);
5736 s390_emit_LTGw(UChar
*p
, UChar r1
, UChar x2
, UChar b2
, UShort dl
, UChar dh
)
5738 if (s390_host_has_eimm
) {
5739 return s390_emit_LTG(p
, r1
, x2
, b2
, dl
, dh
);
5741 /* Load 64 bit from memory to R0 then compare */
5742 p
= s390_emit_LG(p
, R0
, x2
, b2
, dl
, dh
);
5743 return s390_emit_LTGR(p
, r1
, R0
);
5748 s390_emit_CFIw(UChar
*p
, UChar r1
, UInt i2
)
5750 if (s390_host_has_eimm
) {
5751 return s390_emit_CFI(p
, r1
, i2
);
5753 /* Load 32 bit immediate to R0 then compare */
5754 p
= s390_emit_load_32imm(p
, R0
, i2
);
5755 return s390_emit_CR(p
, r1
, R0
);
5760 s390_emit_CLFIw(UChar
*p
, UChar r1
, UInt i2
)
5762 if (s390_host_has_eimm
) {
5763 return s390_emit_CLFI(p
, r1
, i2
);
5765 /* Load 32 bit immediate to R0 then compare */
5766 p
= s390_emit_load_32imm(p
, R0
, i2
);
5767 return s390_emit_CLR(p
, r1
, R0
);
5772 s390_emit_LGDRw(UChar
*p
, UChar r1
, UChar r2
)
5774 if (s390_host_has_fgx
) {
5775 return s390_emit_LGDR(p
, r1
, r2
);
5778 /* Store the FPR at memory[sp - 8]. This is safe because SP grows towards
5779 smaller addresses and is 8-byte aligned. Then load the GPR from that
5781 p
= s390_emit_STDY(p
, r2
, R0
, S390_REGNO_STACK_POINTER
, DISP20(-8));
5782 return s390_emit_LG(p
, r1
, R0
, S390_REGNO_STACK_POINTER
, DISP20(-8));
5787 s390_emit_LDGRw(UChar
*p
, UChar r1
, UChar r2
)
5789 if (s390_host_has_fgx
) {
5790 return s390_emit_LDGR(p
, r1
, r2
);
5793 /* Store the GPR at memory[sp - 8]. This is safe because SP grows towards
5794 smaller addresses and is 8-byte aligned. Then load the FPR from that
5796 p
= s390_emit_STG(p
, r2
, R0
, S390_REGNO_STACK_POINTER
, DISP20(-8));
5797 return s390_emit_LDY(p
, r1
, R0
, S390_REGNO_STACK_POINTER
, DISP20(-8));
5802 s390_emit_VL(UChar
*p
, UChar v1
, UChar x2
, UChar b2
, UShort d2
)
5804 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5805 s390_disasm(ENC3(MNM
, VR
, UDXB
), "vl", v1
, d2
, x2
, b2
);
5807 return emit_VRX(p
, 0xE70000000006ULL
, v1
, x2
, b2
, d2
, 0);
5811 s390_emit_VLR(UChar
*p
, UChar v1
, UChar v2
)
5813 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5814 s390_disasm(ENC3(MNM
, VR
, UDXB
), "vlr", v1
, v2
);
5816 return emit_VRR_VV(p
, 0xE70000000056ULL
, v1
, v2
);
5821 s390_emit_VLREP(UChar
*p
, UChar v1
, UChar x2
, UChar b2
, UShort d2
, UShort m3
)
5823 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5824 s390_disasm(ENC4(MNM
, VR
, UDXB
, UINT
), "vlrep", v1
, d2
, x2
, b2
, m3
);
5826 return emit_VRX(p
, 0xE70000000005ULL
, v1
, x2
, b2
, d2
, m3
);
5831 s390_emit_VST(UChar
*p
, UChar v1
, UChar x2
, UChar b2
, UShort d2
)
5833 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5834 s390_disasm(ENC3(MNM
, VR
, UDXB
), "vst", v1
, d2
, x2
, b2
);
5836 return emit_VRX(p
, 0xE7000000000eULL
, v1
, x2
, b2
, d2
, 0);
5841 s390_emit_VLGV(UChar
*p
, UChar r1
, UChar b2
, UShort d2
, UChar v3
, UChar m4
)
5843 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5844 s390_disasm(ENC5(MNM
, GPR
, UDXB
, VR
, UINT
), "vlgv", r1
, d2
, 0, b2
, v3
, m4
);
5846 return emit_VRS(p
, 0xE70000000021ULL
, r1
, b2
, d2
, v3
, m4
);
5851 s390_emit_VLVG(UChar
*p
, UChar v1
, UChar b2
, UShort d2
, UChar r3
, UChar m4
)
5853 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5854 s390_disasm(ENC5(MNM
, VR
, UDXB
, GPR
, UINT
), "vlvg", v1
, d2
, 0, b2
, r3
, m4
);
5856 return emit_VRS(p
, 0xE70000000022ULL
, v1
, b2
, d2
, r3
, m4
);
5861 s390_emit_VPERM(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar v4
)
5863 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5864 s390_disasm(ENC5(MNM
, VR
, VR
, VR
, VR
), "vperm", v1
, v2
, v3
, v4
);
5866 return emit_VRR_VVVV(p
, 0xE7000000008cULL
, v1
, v2
, v3
, v4
);
5870 s390_emit_VO(UChar
*p
, UChar v1
, UChar v2
, UChar v3
)
5872 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5873 s390_disasm(ENC4(MNM
, VR
, VR
, VR
), "vo", v1
, v2
, v3
);
5875 return emit_VRR_VVV(p
, 0xE7000000006aULL
, v1
, v2
, v3
);
5879 s390_emit_VOC(UChar
*p
, UChar v1
, UChar v2
, UChar v3
)
5881 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5882 s390_disasm(ENC4(MNM
, VR
, VR
, VR
), "voc", v1
, v2
, v3
);
5884 return emit_VRR_VVV(p
, 0xE7000000006fULL
, v1
, v2
, v3
);
5888 s390_emit_VX(UChar
*p
, UChar v1
, UChar v2
, UChar v3
)
5890 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5891 s390_disasm(ENC4(MNM
, VR
, VR
, VR
), "vx", v1
, v2
, v3
);
5893 return emit_VRR_VVV(p
, 0xE7000000006dULL
, v1
, v2
, v3
);
5897 s390_emit_VN(UChar
*p
, UChar v1
, UChar v2
, UChar v3
)
5899 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5900 s390_disasm(ENC4(MNM
, VR
, VR
, VR
), "vn", v1
, v2
, v3
);
5902 return emit_VRR_VVV(p
, 0xE70000000068ULL
, v1
, v2
, v3
);
5906 s390_emit_VCEQ(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
5908 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5909 s390_disasm(ENC5(MNM
, VR
, VR
, VR
, UINT
), "vceq", v1
, v2
, v3
, m4
);
5911 return emit_VRR_VVVM(p
, 0xE700000000f8ULL
, v1
, v2
, v3
, m4
);
5916 s390_emit_VGBM(UChar
*p
, UChar v1
, UShort i2
)
5918 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5919 s390_disasm(ENC3(MNM
, VR
, UINT
), "vgbm", v1
, i2
);
5921 return emit_VRI_VI(p
, 0xE70000000044ULL
, v1
, i2
);
5926 s390_emit_VPK(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
5928 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5929 s390_disasm(ENC5(MNM
, VR
, VR
, VR
, UINT
), "vpk", v1
, v2
, v3
, m4
);
5931 return emit_VRR_VVVM(p
, 0xE70000000094ULL
, v1
, v2
, v3
, m4
);
5936 s390_emit_VPKS(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
5938 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5939 s390_disasm(ENC6(MNM
, VR
, VR
, VR
, UINT
, UINT
), "vpks", v1
, v2
, v3
, m4
, 0);
5941 return emit_VRR_VVVM(p
, 0xE70000000097ULL
, v1
, v2
, v3
, m4
);
5946 s390_emit_VPKLS(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
5948 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5949 s390_disasm(ENC6(MNM
, VR
, VR
, VR
, UINT
, UINT
), "vpkls", v1
, v2
, v3
, m4
, 0);
5951 return emit_VRR_VVVM(p
, 0xE70000000095ULL
, v1
, v2
, v3
, m4
);
5956 s390_emit_VREP(UChar
*p
, UChar v1
, UChar v3
, UShort i2
, UChar m4
)
5958 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5959 s390_disasm(ENC5(MNM
, VR
, VR
, UINT
, UINT
), "vrep", v1
, v3
, i2
, m4
);
5961 return emit_VRI_VVMM(p
, 0xE7000000004DULL
, v1
, v3
, i2
, m4
);
5966 s390_emit_VREPI(UChar
*p
, UChar v1
, UShort i2
, UChar m3
)
5968 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5969 s390_disasm(ENC4(MNM
, VR
, UINT
, UINT
), "vrepi", v1
, i2
, m3
);
5971 return emit_VRI_VIM(p
, 0xE70000000045ULL
, v1
, i2
, m3
);
5976 s390_emit_VUPH(UChar
*p
, UChar v1
, UChar v3
, UChar m3
)
5978 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5979 s390_disasm(ENC4(MNM
, VR
, VR
, UINT
), "vuph", v1
, v3
, m3
);
5981 return emit_VRR_VVM(p
, 0xE700000000D7ULL
, v1
, v3
, m3
);
5986 s390_emit_VUPLH(UChar
*p
, UChar v1
, UChar v3
, UChar m3
)
5988 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5989 s390_disasm(ENC4(MNM
, VR
, VR
, UINT
), "vuplh", v1
, v3
, m3
);
5991 return emit_VRR_VVM(p
, 0xE700000000D5ULL
, v1
, v3
, m3
);
5996 s390_emit_VMRH(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
5998 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
5999 s390_disasm(ENC5(MNM
, VR
, VR
, VR
, UINT
), "vmrh", v1
, v2
, v3
, m4
);
6001 return emit_VRR_VVVM(p
, 0xE70000000061ULL
, v1
, v2
, v3
, m4
);
6006 s390_emit_VMRL(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
6008 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6009 s390_disasm(ENC5(MNM
, VR
, VR
, VR
, UINT
), "vmrl", v1
, v2
, v3
, m4
);
6011 return emit_VRR_VVVM(p
, 0xE70000000060ULL
, v1
, v2
, v3
, m4
);
6015 s390_emit_VA(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
6017 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6018 s390_disasm(ENC5(MNM
, VR
, VR
, VR
, UINT
), "va", v1
, v2
, v3
, m4
);
6020 return emit_VRR_VVVM(p
, 0xE700000000f3ULL
, v1
, v2
, v3
, m4
);
6024 s390_emit_VS(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
6026 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6027 s390_disasm(ENC5(MNM
, VR
, VR
, VR
, UINT
), "vs", v1
, v2
, v3
, m4
);
6029 return emit_VRR_VVVM(p
, 0xE700000000f7ULL
, v1
, v2
, v3
, m4
);
6033 s390_emit_VNO(UChar
*p
, UChar v1
, UChar v2
, UChar v3
)
6035 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6036 s390_disasm(ENC4(MNM
, VR
, VR
, VR
), "vno", v1
, v2
, v3
);
6038 return emit_VRR_VVV(p
, 0xE7000000006bULL
, v1
, v2
, v3
);
6042 s390_emit_VCH(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
6044 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6045 s390_disasm(ENC5(MNM
, VR
, VR
, VR
, UINT
), "vch", v1
, v2
, v3
, m4
);
6047 return emit_VRR_VVVM(p
, 0xE700000000fbULL
, v1
, v2
, v3
, m4
);
6051 s390_emit_VCHL(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
6053 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6054 s390_disasm(ENC5(MNM
, VR
, VR
, VR
, UINT
), "vchl", v1
, v2
, v3
, m4
);
6056 return emit_VRR_VVVM(p
, 0xE700000000f9ULL
, v1
, v2
, v3
, m4
);
6060 s390_emit_VCLZ(UChar
*p
, UChar v1
, UChar v2
, UChar m4
)
6062 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6063 s390_disasm(ENC4(MNM
, VR
, VR
, UINT
), "vclz", v1
, v2
, m4
);
6065 return emit_VRR_VVM(p
, 0xE70000000053ULL
, v1
, v2
, m4
);
6069 s390_emit_VCTZ(UChar
*p
, UChar v1
, UChar v2
, UChar m4
)
6071 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6072 s390_disasm(ENC4(MNM
, VR
, VR
, UINT
), "vctz", v1
, v2
, m4
);
6074 return emit_VRR_VVM(p
, 0xE70000000052ULL
, v1
, v2
, m4
);
6078 s390_emit_VPOPCT(UChar
*p
, UChar v1
, UChar v2
, UChar m4
)
6080 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6081 s390_disasm(ENC4(MNM
, VR
, VR
, UINT
), "vpopct", v1
, v2
, m4
);
6083 return emit_VRR_VVM(p
, 0xE70000000050ULL
, v1
, v2
, m4
);
6087 s390_emit_VMX(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
6089 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6090 s390_disasm(ENC5(MNM
, VR
, VR
, VR
, UINT
), "vmx", v1
, v2
, v3
, m4
);
6092 return emit_VRR_VVVM(p
, 0xE700000000ffULL
, v1
, v2
, v3
, m4
);
6096 s390_emit_VMXL(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
6098 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6099 s390_disasm(ENC5(MNM
, VR
, VR
, VR
, UINT
), "vmxl", v1
, v2
, v3
, m4
);
6101 return emit_VRR_VVVM(p
, 0xE700000000fdULL
, v1
, v2
, v3
, m4
);
6105 s390_emit_VMN(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
6107 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6108 s390_disasm(ENC5(MNM
, VR
, VR
, VR
, UINT
), "vmn", v1
, v2
, v3
, m4
);
6110 return emit_VRR_VVVM(p
, 0xE700000000feULL
, v1
, v2
, v3
, m4
);
6114 s390_emit_VMNL(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
6116 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6117 s390_disasm(ENC5(MNM
, VR
, VR
, VR
, UINT
), "vmnl", v1
, v2
, v3
, m4
);
6119 return emit_VRR_VVVM(p
, 0xE700000000fcULL
, v1
, v2
, v3
, m4
);
6123 s390_emit_VAVG(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
6125 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6126 s390_disasm(ENC5(MNM
, VR
, VR
, VR
, UINT
), "vavg", v1
, v2
, v3
, m4
);
6128 return emit_VRR_VVVM(p
, 0xE700000000f2ULL
, v1
, v2
, v3
, m4
);
6132 s390_emit_VAVGL(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
6134 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6135 s390_disasm(ENC5(MNM
, VR
, VR
, VR
, UINT
), "vavgl", v1
, v2
, v3
, m4
);
6137 return emit_VRR_VVVM(p
, 0xE700000000f0ULL
, v1
, v2
, v3
, m4
);
6141 s390_emit_VLP(UChar
*p
, UChar v1
, UChar v2
, UChar m3
)
6143 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6144 s390_disasm(ENC4(MNM
, VR
, VR
, UINT
), "vlp", v1
, v2
, m3
);
6146 return emit_VRR_VVM(p
, 0xE700000000DFULL
, v1
, v2
, m3
);
6150 s390_emit_VMH(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
6152 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6153 s390_disasm(ENC5(MNM
, VR
, VR
, VR
, UINT
), "vmh", v1
, v2
, v3
, m4
);
6155 return emit_VRR_VVVM(p
, 0xE700000000a3ULL
, v1
, v2
, v3
, m4
);
6159 s390_emit_VMLH(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
6161 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6162 s390_disasm(ENC5(MNM
, VR
, VR
, VR
, UINT
), "vmlh", v1
, v2
, v3
, m4
);
6164 return emit_VRR_VVVM(p
, 0xE700000000a1ULL
, v1
, v2
, v3
, m4
);
6168 s390_emit_VML(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
6170 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6171 s390_disasm(ENC5(MNM
, VR
, VR
, VR
, UINT
), "vml", v1
, v2
, v3
, m4
);
6173 return emit_VRR_VVVM(p
, 0xE700000000a2ULL
, v1
, v2
, v3
, m4
);
6177 s390_emit_VME(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
6179 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6180 s390_disasm(ENC5(MNM
, VR
, VR
, VR
, UINT
), "vme", v1
, v2
, v3
, m4
);
6182 return emit_VRR_VVVM(p
, 0xE700000000a6ULL
, v1
, v2
, v3
, m4
);
6186 s390_emit_VMLE(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
6188 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6189 s390_disasm(ENC5(MNM
, VR
, VR
, VR
, UINT
), "vmle", v1
, v2
, v3
, m4
);
6191 return emit_VRR_VVVM(p
, 0xE700000000a4ULL
, v1
, v2
, v3
, m4
);
6195 s390_emit_VESLV(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
6197 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6198 s390_disasm(ENC5(MNM
, VR
, VR
, VR
, UINT
), "veslv", v1
, v2
, v3
, m4
);
6200 return emit_VRR_VVVM(p
, 0xE70000000070ULL
, v1
, v2
, v3
, m4
);
6204 s390_emit_VESRAV(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
6206 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6207 s390_disasm(ENC5(MNM
, VR
, VR
, VR
, UINT
), "vesrav", v1
, v2
, v3
, m4
);
6209 return emit_VRR_VVVM(p
, 0xE7000000007aULL
, v1
, v2
, v3
, m4
);
6213 s390_emit_VESRLV(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
6215 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6216 s390_disasm(ENC5(MNM
, VR
, VR
, VR
, UINT
), "vesrlv", v1
, v2
, v3
, m4
);
6218 return emit_VRR_VVVM(p
, 0xE70000000078ULL
, v1
, v2
, v3
, m4
);
6222 s390_emit_VESL(UChar
*p
, UChar v1
, UChar b2
, UShort d2
, UChar v3
, UChar m4
)
6224 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6225 s390_disasm(ENC5(MNM
, VR
, UDXB
, VR
, UINT
), "vesl", v1
, d2
, 0, b2
, v3
, m4
);
6227 return emit_VRS(p
, 0xE70000000030ULL
, v1
, b2
, d2
, v3
, m4
);
6231 s390_emit_VESRA(UChar
*p
, UChar v1
, UChar b2
, UShort d2
, UChar v3
, UChar m4
)
6233 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6234 s390_disasm(ENC5(MNM
, VR
, UDXB
, VR
, UINT
), "vesra", v1
, d2
, 0, b2
, v3
, m4
);
6236 return emit_VRS(p
, 0xE7000000003aULL
, v1
, b2
, d2
, v3
, m4
);
6240 s390_emit_VESRL(UChar
*p
, UChar v1
, UChar b2
, UShort d2
, UChar v3
, UChar m4
)
6242 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6243 s390_disasm(ENC5(MNM
, VR
, UDXB
, VR
, UINT
), "vesrl", v1
, d2
, 0, b2
, v3
, m4
);
6245 return emit_VRS(p
, 0xE70000000038ULL
, v1
, b2
, d2
, v3
, m4
);
6249 s390_emit_VERLLV(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
6251 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6252 s390_disasm(ENC5(MNM
, VR
, VR
, VR
, UINT
), "verllv", v1
, v2
, v3
, m4
);
6254 return emit_VRR_VVVM(p
, 0xE70000000073ULL
, v1
, v2
, v3
, m4
);
6258 s390_emit_VSL(UChar
*p
, UChar v1
, UChar v2
, UChar v3
)
6260 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6261 s390_disasm(ENC4(MNM
, VR
, VR
, VR
), "vsl", v1
, v2
, v3
);
6263 return emit_VRR_VVV(p
, 0xE70000000074ULL
, v1
, v2
, v3
);
6267 s390_emit_VSRL(UChar
*p
, UChar v1
, UChar v2
, UChar v3
)
6269 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6270 s390_disasm(ENC4(MNM
, VR
, VR
, VR
), "vsrl", v1
, v2
, v3
);
6272 return emit_VRR_VVV(p
, 0xE7000000007cULL
, v1
, v2
, v3
);
6276 s390_emit_VSRA(UChar
*p
, UChar v1
, UChar v2
, UChar v3
)
6278 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6279 s390_disasm(ENC4(MNM
, VR
, VR
, VR
), "vsra", v1
, v2
, v3
);
6281 return emit_VRR_VVV(p
, 0xE7000000007eULL
, v1
, v2
, v3
);
6285 s390_emit_VSLB(UChar
*p
, UChar v1
, UChar v2
, UChar v3
)
6287 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6288 s390_disasm(ENC4(MNM
, VR
, VR
, VR
), "vslb", v1
, v2
, v3
);
6290 return emit_VRR_VVV(p
, 0xE70000000075ULL
, v1
, v2
, v3
);
6294 s390_emit_VSRLB(UChar
*p
, UChar v1
, UChar v2
, UChar v3
)
6296 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6297 s390_disasm(ENC4(MNM
, VR
, VR
, VR
), "vsrlb", v1
, v2
, v3
);
6299 return emit_VRR_VVV(p
, 0xE7000000007dULL
, v1
, v2
, v3
);
6303 s390_emit_VSRAB(UChar
*p
, UChar v1
, UChar v2
, UChar v3
)
6305 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6306 s390_disasm(ENC4(MNM
, VR
, VR
, VR
), "vsrab", v1
, v2
, v3
);
6308 return emit_VRR_VVV(p
, 0xE7000000007fULL
, v1
, v2
, v3
);
6312 s390_emit_VSUM(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
6314 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6315 s390_disasm(ENC5(MNM
, VR
, VR
, VR
, UINT
), "vsum", v1
, v2
, v3
, m4
);
6317 return emit_VRR_VVVM(p
, 0xE70000000064ULL
, v1
, v2
, v3
, m4
);
6321 s390_emit_VSUMG(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
6323 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6324 s390_disasm(ENC5(MNM
, VR
, VR
, VR
, UINT
), "vsumg", v1
, v2
, v3
, m4
);
6326 return emit_VRR_VVVM(p
, 0xE70000000065ULL
, v1
, v2
, v3
, m4
);
6330 s390_emit_VSUMQ(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
)
6332 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6333 s390_disasm(ENC5(MNM
, VR
, VR
, VR
, UINT
), "vsumq", v1
, v2
, v3
, m4
);
6335 return emit_VRR_VVVM(p
, 0xE70000000067ULL
, v1
, v2
, v3
, m4
);
6339 s390_emit_VLVGP(UChar
*p
, UChar v1
, UChar r2
, UChar r3
)
6341 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6342 s390_disasm(ENC4(MNM
, VR
, GPR
, GPR
), "vlvgp", v1
, r2
, r3
);
6344 return emit_VRR_VRR(p
, 0xE70000000062ULL
, v1
, r2
, r3
);
6348 s390_emit_VFPSO(UChar
*p
, UChar v1
, UChar v2
, UChar m3
, UChar m4
, UChar m5
)
6350 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6351 s390_disasm(ENC6(MNM
, VR
, VR
, UINT
, UINT
, UINT
), "vfpso", v1
, v2
, m3
, m4
,
6354 return emit_VRR_VVMMM(p
, 0xE700000000CCULL
, v1
, v2
, m3
, m4
, m5
);
6358 s390_emit_VFA(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
, UChar m5
)
6360 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6361 s390_disasm(ENC6(MNM
, VR
, VR
, VR
, UINT
, UINT
), "vfa", v1
, v2
, v3
, m4
, m5
);
6363 return emit_VRR_VVVMM(p
, 0xE700000000e3ULL
, v1
, v2
, v3
, m4
, m5
);
6367 s390_emit_VFS(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
, UChar m5
)
6369 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6370 s390_disasm(ENC6(MNM
, VR
, VR
, VR
, UINT
, UINT
), "vfs", v1
, v2
, v3
, m4
, m5
);
6372 return emit_VRR_VVVMM(p
, 0xE700000000e2ULL
, v1
, v2
, v3
, m4
, m5
);
6376 s390_emit_VFM(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
, UChar m5
)
6378 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6379 s390_disasm(ENC6(MNM
, VR
, VR
, VR
, UINT
, UINT
), "vfm", v1
, v2
, v3
, m4
, m5
);
6381 return emit_VRR_VVVMM(p
, 0xE700000000e7ULL
, v1
, v2
, v3
, m4
, m5
);
6385 s390_emit_VFD(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
, UChar m5
)
6387 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6388 s390_disasm(ENC6(MNM
, VR
, VR
, VR
, UINT
, UINT
), "vfd", v1
, v2
, v3
, m4
, m5
);
6390 return emit_VRR_VVVMM(p
, 0xE700000000e5ULL
, v1
, v2
, v3
, m4
, m5
);
6394 s390_emit_VFSQ(UChar
*p
, UChar v1
, UChar v2
, UChar m3
, UChar m4
)
6396 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6397 s390_disasm(ENC5(MNM
, VR
, VR
, UINT
, UINT
), "vfsq", v1
, v2
, m3
, m4
);
6399 return emit_VRR_VVMMM(p
, 0xE700000000CEULL
, v1
, v2
, m3
, m4
, 0);
6403 s390_emit_VFMA(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar v4
, UChar m5
,
6406 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6407 s390_disasm(ENC7(MNM
, VR
, VR
, VR
, VR
, UINT
, UINT
), "vfma",
6408 v1
, v2
, v3
, v4
, m5
, m6
);
6410 return emit_VRRe_VVVVMM(p
, 0xE7000000008fULL
, v1
, v2
, v3
, v4
, m5
, m6
);
6414 s390_emit_VFMS(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar v4
, UChar m5
,
6417 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6418 s390_disasm(ENC7(MNM
, VR
, VR
, VR
, VR
, UINT
, UINT
), "vfms",
6419 v1
, v2
, v3
, v4
, m5
, m6
);
6421 return emit_VRRe_VVVVMM(p
, 0xE7000000008eULL
, v1
, v2
, v3
, v4
, m5
, m6
);
6425 s390_emit_VFCE(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
, UChar m5
,
6428 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6429 s390_disasm(ENC7(MNM
, VR
, VR
, VR
, UINT
, UINT
, UINT
), "vfce",
6430 v1
, v2
, v3
, m4
, m5
, m6
);
6432 return emit_VRR_VVVMMM(p
, 0xE700000000e8ULL
, v1
, v2
, v3
, m4
, m5
, m6
);
6436 s390_emit_VFCH(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
, UChar m5
,
6439 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6440 s390_disasm(ENC7(MNM
, VR
, VR
, VR
, UINT
, UINT
, UINT
), "vfch",
6441 v1
, v2
, v3
, m4
, m5
, m6
);
6443 return emit_VRR_VVVMMM(p
, 0xE700000000ebULL
, v1
, v2
, v3
, m4
, m5
, m6
);
6447 s390_emit_VFCHE(UChar
*p
, UChar v1
, UChar v2
, UChar v3
, UChar m4
, UChar m5
,
6450 if (UNLIKELY(vex_traceflags
& VEX_TRACE_ASM
))
6451 s390_disasm(ENC7(MNM
, VR
, VR
, VR
, UINT
, UINT
, UINT
), "vfche",
6452 v1
, v2
, v3
, m4
, m5
, m6
);
6454 return emit_VRR_VVVMMM(p
, 0xE700000000eaULL
, v1
, v2
, v3
, m4
, m5
, m6
);
6457 /*---------------------------------------------------------------*/
6458 /*--- Constructors for the various s390_insn kinds ---*/
6459 /*---------------------------------------------------------------*/
6462 s390_insn_load(UChar size
, HReg dst
, s390_amode
*src
)
6464 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
6466 insn
->tag
= S390_INSN_LOAD
;
6468 insn
->variant
.load
.src
= src
;
6469 insn
->variant
.load
.dst
= dst
;
6471 vassert(size
== 1 || size
== 2 || size
== 4 || size
== 8 || size
== 16);
6478 s390_insn_store(UChar size
, s390_amode
*dst
, HReg src
)
6480 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
6482 insn
->tag
= S390_INSN_STORE
;
6484 insn
->variant
.store
.src
= src
;
6485 insn
->variant
.store
.dst
= dst
;
6487 vassert(size
== 1 || size
== 2 || size
== 4 || size
== 8 || size
== 16);
6494 s390_insn_move(UChar size
, HReg dst
, HReg src
)
6496 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
6498 insn
->tag
= S390_INSN_MOVE
;
6500 insn
->variant
.move
.src
= src
;
6501 insn
->variant
.move
.dst
= dst
;
6503 vassert(size
== 1 || size
== 2 || size
== 4 || size
== 8 || size
==16);
6510 s390_insn_memcpy(UChar size
, s390_amode
*dst
, s390_amode
*src
)
6512 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
6514 /* This insn will be mapped to MVC which requires base register
6515 plus 12-bit displacement */
6516 vassert(src
->tag
== S390_AMODE_B12
);
6517 vassert(dst
->tag
== S390_AMODE_B12
);
6519 insn
->tag
= S390_INSN_MEMCPY
;
6521 insn
->variant
.memcpy
.src
= src
;
6522 insn
->variant
.memcpy
.dst
= dst
;
6524 vassert(size
== 1 || size
== 2 || size
== 4 || size
== 8 || size
== 16);
6531 s390_insn_cond_move(UChar size
, s390_cc_t cond
, HReg dst
, s390_opnd_RMI src
)
6533 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
6535 insn
->tag
= S390_INSN_COND_MOVE
;
6537 insn
->variant
.cond_move
.cond
= cond
;
6538 insn
->variant
.cond_move
.src
= src
;
6539 insn
->variant
.cond_move
.dst
= dst
;
6541 vassert(size
== 1 || size
== 2 || size
== 4 || size
== 8 || size
== 16);
6548 s390_insn_load_immediate(UChar size
, HReg dst
, ULong value
)
6550 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
6552 insn
->tag
= S390_INSN_LOAD_IMMEDIATE
;
6554 insn
->variant
.load_immediate
.dst
= dst
;
6555 insn
->variant
.load_immediate
.value
= value
;
6562 s390_insn_alu(UChar size
, s390_alu_t tag
, HReg dst
, s390_opnd_RMI op2
)
6564 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
6566 insn
->tag
= S390_INSN_ALU
;
6568 insn
->variant
.alu
.tag
= tag
;
6569 insn
->variant
.alu
.dst
= dst
;
6570 insn
->variant
.alu
.op2
= op2
;
6577 s390_insn_mul(UChar size
, HReg dst_hi
, HReg dst_lo
, s390_opnd_RMI op2
,
6578 Bool signed_multiply
)
6580 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
6582 vassert(! hregIsVirtual(dst_hi
));
6583 vassert(! hregIsVirtual(dst_lo
));
6585 insn
->tag
= signed_multiply
? S390_INSN_SMUL
: S390_INSN_UMUL
;
6587 insn
->variant
.mul
.dst_hi
= dst_hi
;
6588 insn
->variant
.mul
.dst_lo
= dst_lo
;
6589 insn
->variant
.mul
.op2
= op2
;
6596 s390_insn_div(UChar size
, HReg op1_hi
, HReg op1_lo
, s390_opnd_RMI op2
,
6599 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
6601 vassert(size
== 4 || size
== 8);
6602 vassert(! hregIsVirtual(op1_hi
));
6603 vassert(! hregIsVirtual(op1_lo
));
6605 insn
->tag
= signed_divide
? S390_INSN_SDIV
: S390_INSN_UDIV
;
6607 insn
->variant
.div
.op1_hi
= op1_hi
;
6608 insn
->variant
.div
.op1_lo
= op1_lo
;
6609 insn
->variant
.div
.op2
= op2
;
6616 s390_insn_divs(UChar size
, HReg rem
, HReg op1
, s390_opnd_RMI op2
)
6618 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
6621 vassert(! hregIsVirtual(op1
));
6622 vassert(! hregIsVirtual(rem
));
6624 insn
->tag
= S390_INSN_DIVS
;
6626 insn
->variant
.divs
.rem
= rem
; /* remainder */
6627 insn
->variant
.divs
.op1
= op1
; /* also quotient */
6628 insn
->variant
.divs
.op2
= op2
;
6635 s390_insn_clz(UChar size
, HReg num_bits
, HReg clobber
, s390_opnd_RMI src
)
6637 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
6640 vassert(! hregIsVirtual(num_bits
));
6641 vassert(! hregIsVirtual(clobber
));
6643 insn
->tag
= S390_INSN_CLZ
;
6645 insn
->variant
.clz
.num_bits
= num_bits
;
6646 insn
->variant
.clz
.clobber
= clobber
;
6647 insn
->variant
.clz
.src
= src
;
6654 s390_insn_unop(UChar size
, s390_unop_t tag
, HReg dst
, s390_opnd_RMI opnd
)
6656 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
6658 insn
->tag
= S390_INSN_UNOP
;
6660 insn
->variant
.unop
.tag
= tag
;
6661 insn
->variant
.unop
.dst
= dst
;
6662 insn
->variant
.unop
.src
= opnd
;
6669 s390_insn_test(UChar size
, s390_opnd_RMI src
)
6671 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
6673 vassert(size
== 1 || size
== 2 || size
== 4 || size
== 8);
6675 insn
->tag
= S390_INSN_TEST
;
6677 insn
->variant
.test
.src
= src
;
6684 s390_insn_cc2bool(HReg dst
, s390_cc_t cond
)
6686 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
6688 insn
->tag
= S390_INSN_CC2BOOL
;
6689 insn
->size
= 0; /* does not matter */
6690 insn
->variant
.cc2bool
.cond
= cond
;
6691 insn
->variant
.cc2bool
.dst
= dst
;
6698 s390_insn_cas(UChar size
, HReg op1
, s390_amode
*op2
, HReg op3
, HReg old_mem
)
6700 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
6702 vassert(size
== 4 || size
== 8);
6703 vassert(hregNumber(op2
->x
) == 0);
6704 vassert(op2
->tag
== S390_AMODE_B12
|| op2
->tag
== S390_AMODE_B20
);
6706 insn
->tag
= S390_INSN_CAS
;
6708 insn
->variant
.cas
.op1
= op1
;
6709 insn
->variant
.cas
.op2
= op2
;
6710 insn
->variant
.cas
.op3
= op3
;
6711 insn
->variant
.cas
.old_mem
= old_mem
;
6718 s390_insn_cdas(UChar size
, HReg op1_high
, HReg op1_low
, s390_amode
*op2
,
6719 HReg op3_high
, HReg op3_low
, HReg old_mem_high
, HReg old_mem_low
,
6722 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
6723 s390_cdas
*cdas
= LibVEX_Alloc_inline(sizeof(s390_cdas
));
6725 vassert(size
== 4 || size
== 8);
6726 vassert(hregNumber(op2
->x
) == 0);
6727 vassert(hregNumber(scratch
) == 1); /* r0,r1 used as scratch reg pair */
6728 vassert(op2
->tag
== S390_AMODE_B12
|| op2
->tag
== S390_AMODE_B20
);
6730 insn
->tag
= S390_INSN_CDAS
;
6732 insn
->variant
.cdas
.details
= cdas
;
6734 cdas
->op1_high
= op1_high
;
6735 cdas
->op1_low
= op1_low
;
6737 cdas
->op3_high
= op3_high
;
6738 cdas
->op3_low
= op3_low
;
6739 cdas
->old_mem_high
= old_mem_high
;
6740 cdas
->old_mem_low
= old_mem_low
;
6741 cdas
->scratch
= scratch
;
6748 s390_insn_compare(UChar size
, HReg src1
, s390_opnd_RMI src2
,
6749 Bool signed_comparison
)
6751 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
6753 vassert(size
== 4 || size
== 8);
6755 insn
->tag
= S390_INSN_COMPARE
;
6757 insn
->variant
.compare
.src1
= src1
;
6758 insn
->variant
.compare
.src2
= src2
;
6759 insn
->variant
.compare
.signed_comparison
= signed_comparison
;
6766 s390_insn_helper_call(s390_cc_t cond
, Addr64 target
, UInt num_args
,
6767 const HChar
*name
, RetLoc rloc
)
6769 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
6770 s390_helper_call
*helper_call
= LibVEX_Alloc_inline(sizeof(s390_helper_call
));
6772 insn
->tag
= S390_INSN_HELPER_CALL
;
6773 insn
->size
= 0; /* does not matter */
6774 insn
->variant
.helper_call
.details
= helper_call
;
6776 helper_call
->cond
= cond
;
6777 helper_call
->target
= target
;
6778 helper_call
->num_args
= num_args
;
6779 helper_call
->name
= name
;
6780 helper_call
->rloc
= rloc
;
6782 vassert(is_sane_RetLoc(rloc
));
6789 s390_insn_bfp_triop(UChar size
, s390_bfp_triop_t tag
, HReg dst
, HReg op2
,
6792 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
6794 vassert(size
== 4 || size
== 8);
6796 insn
->tag
= S390_INSN_BFP_TRIOP
;
6798 insn
->variant
.bfp_triop
.tag
= tag
;
6799 insn
->variant
.bfp_triop
.dst
= dst
;
6800 insn
->variant
.bfp_triop
.op2
= op2
;
6801 insn
->variant
.bfp_triop
.op3
= op3
;
6808 s390_insn_bfp_binop(UChar size
, s390_bfp_binop_t tag
, HReg dst
, HReg op2
)
6810 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
6812 vassert(size
== 4 || size
== 8);
6814 insn
->tag
= S390_INSN_BFP_BINOP
;
6816 insn
->variant
.bfp_binop
.tag
= tag
;
6817 insn
->variant
.bfp_binop
.dst_hi
= dst
;
6818 insn
->variant
.bfp_binop
.op2_hi
= op2
;
6819 insn
->variant
.bfp_binop
.dst_lo
= INVALID_HREG
;
6820 insn
->variant
.bfp_binop
.op2_lo
= INVALID_HREG
;
6827 s390_insn_bfp_unop(UChar size
, s390_bfp_unop_t tag
, HReg dst
, HReg op
)
6829 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
6831 vassert(size
== 4 || size
== 8);
6833 insn
->tag
= S390_INSN_BFP_UNOP
;
6835 insn
->variant
.bfp_unop
.tag
= tag
;
6836 insn
->variant
.bfp_unop
.dst_hi
= dst
;
6837 insn
->variant
.bfp_unop
.op_hi
= op
;
6838 insn
->variant
.bfp_unop
.dst_lo
= INVALID_HREG
;
6839 insn
->variant
.bfp_unop
.op_lo
= INVALID_HREG
;
6846 s390_insn_bfp_compare(UChar size
, HReg dst
, HReg op1
, HReg op2
)
6848 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
6850 vassert(size
== 4 || size
== 8);
6852 insn
->tag
= S390_INSN_BFP_COMPARE
;
6854 insn
->variant
.bfp_compare
.dst
= dst
;
6855 insn
->variant
.bfp_compare
.op1_hi
= op1
;
6856 insn
->variant
.bfp_compare
.op2_hi
= op2
;
6857 insn
->variant
.bfp_compare
.op1_lo
= INVALID_HREG
;
6858 insn
->variant
.bfp_compare
.op2_lo
= INVALID_HREG
;
6865 s390_insn_bfp_convert(UChar size
, s390_bfp_conv_t tag
, HReg dst
, HReg op
,
6866 s390_bfp_round_t rounding_mode
)
6868 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
6870 vassert(size
== 4 || size
== 8);
6872 insn
->tag
= S390_INSN_BFP_CONVERT
;
6874 insn
->variant
.bfp_convert
.tag
= tag
;
6875 insn
->variant
.bfp_convert
.dst_hi
= dst
;
6876 insn
->variant
.bfp_convert
.op_hi
= op
;
6877 insn
->variant
.bfp_convert
.dst_lo
= INVALID_HREG
;
6878 insn
->variant
.bfp_convert
.op_lo
= INVALID_HREG
;
6879 insn
->variant
.bfp_convert
.rounding_mode
= rounding_mode
;
6885 /* Check validity of a register pair for 128-bit FP. Valid register
6886 pairs are (0,2), (1,3), (4, 6), (5, 7), (8, 10), (9, 11), (12, 14),
6889 is_valid_fp128_regpair(HReg hi
, HReg lo
)
6891 UInt hi_regno
= hregNumber(hi
);
6892 UInt lo_regno
= hregNumber(lo
);
6894 if (lo_regno
!= hi_regno
+ 2) return False
;
6895 if ((hi_regno
& 0x2) != 0) return False
;
6901 s390_insn_bfp128_binop(UChar size
, s390_bfp_binop_t tag
, HReg dst_hi
,
6902 HReg dst_lo
, HReg op2_hi
, HReg op2_lo
)
6904 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
6906 vassert(size
== 16);
6907 vassert(is_valid_fp128_regpair(dst_hi
, dst_lo
));
6908 vassert(is_valid_fp128_regpair(op2_hi
, op2_lo
));
6910 insn
->tag
= S390_INSN_BFP_BINOP
;
6912 insn
->variant
.bfp_binop
.tag
= tag
;
6913 insn
->variant
.bfp_binop
.dst_hi
= dst_hi
;
6914 insn
->variant
.bfp_binop
.dst_lo
= dst_lo
;
6915 insn
->variant
.bfp_binop
.op2_hi
= op2_hi
;
6916 insn
->variant
.bfp_binop
.op2_lo
= op2_lo
;
6923 s390_insn_bfp128_unop(UChar size
, s390_bfp_unop_t tag
, HReg dst_hi
,
6924 HReg dst_lo
, HReg op_hi
, HReg op_lo
)
6926 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
6928 vassert(size
== 16);
6929 vassert(is_valid_fp128_regpair(dst_hi
, dst_lo
));
6930 vassert(is_valid_fp128_regpair(op_hi
, op_lo
));
6932 insn
->tag
= S390_INSN_BFP_UNOP
;
6934 insn
->variant
.bfp_unop
.tag
= tag
;
6935 insn
->variant
.bfp_unop
.dst_hi
= dst_hi
;
6936 insn
->variant
.bfp_unop
.dst_lo
= dst_lo
;
6937 insn
->variant
.bfp_unop
.op_hi
= op_hi
;
6938 insn
->variant
.bfp_unop
.op_lo
= op_lo
;
6945 s390_insn_bfp128_compare(UChar size
, HReg dst
, HReg op1_hi
, HReg op1_lo
,
6946 HReg op2_hi
, HReg op2_lo
)
6948 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
6950 vassert(size
== 16);
6951 vassert(is_valid_fp128_regpair(op1_hi
, op1_lo
));
6952 vassert(is_valid_fp128_regpair(op2_hi
, op2_lo
));
6954 insn
->tag
= S390_INSN_BFP_COMPARE
;
6956 insn
->variant
.bfp_compare
.dst
= dst
;
6957 insn
->variant
.bfp_compare
.op1_hi
= op1_hi
;
6958 insn
->variant
.bfp_compare
.op1_lo
= op1_lo
;
6959 insn
->variant
.bfp_compare
.op2_hi
= op2_hi
;
6960 insn
->variant
.bfp_compare
.op2_lo
= op2_lo
;
6967 s390_insn_bfp128_convert(UChar size
, s390_bfp_conv_t tag
, HReg dst_hi
,
6968 HReg dst_lo
, HReg op_hi
, HReg op_lo
,
6969 s390_bfp_round_t rounding_mode
)
6971 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
6974 /* From smaller or equal size to 16 bytes */
6975 vassert(is_valid_fp128_regpair(dst_hi
, dst_lo
));
6976 vassert(hregIsInvalid(op_lo
)
6977 || is_valid_fp128_regpair(op_hi
, op_lo
));
6979 /* From 16 bytes to smaller size */
6980 vassert(is_valid_fp128_regpair(op_hi
, op_lo
));
6983 insn
->tag
= S390_INSN_BFP_CONVERT
;
6985 insn
->variant
.bfp_convert
.tag
= tag
;
6986 insn
->variant
.bfp_convert
.dst_hi
= dst_hi
;
6987 insn
->variant
.bfp_convert
.dst_lo
= dst_lo
;
6988 insn
->variant
.bfp_convert
.op_hi
= op_hi
;
6989 insn
->variant
.bfp_convert
.op_lo
= op_lo
;
6990 insn
->variant
.bfp_convert
.rounding_mode
= rounding_mode
;
6997 s390_insn_bfp128_convert_to(UChar size
, s390_bfp_conv_t tag
, HReg dst_hi
,
6998 HReg dst_lo
, HReg op
)
7000 /* Conversion to bfp128 never requires a rounding mode. Provide default
7001 rounding mode. It will not be used when emitting insns. */
7002 s390_bfp_round_t rounding_mode
= S390_BFP_ROUND_NEAREST_EVEN
;
7004 return s390_insn_bfp128_convert(size
, tag
, dst_hi
, dst_lo
, op
,
7005 INVALID_HREG
, rounding_mode
);
7010 s390_insn_bfp128_convert_from(UChar size
, s390_bfp_conv_t tag
, HReg dst_hi
,
7011 HReg dst_lo
, HReg op_hi
, HReg op_lo
,
7012 s390_bfp_round_t rounding_mode
)
7014 return s390_insn_bfp128_convert(size
, tag
, dst_hi
, dst_lo
, op_hi
, op_lo
,
7020 s390_insn_dfp_binop(UChar size
, s390_dfp_binop_t tag
, HReg dst
, HReg op2
,
7021 HReg op3
, s390_dfp_round_t rounding_mode
)
7023 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7024 s390_dfp_binop
*dfp_binop
= LibVEX_Alloc_inline(sizeof(s390_dfp_binop
));
7028 insn
->tag
= S390_INSN_DFP_BINOP
;
7030 insn
->variant
.dfp_binop
.details
= dfp_binop
;
7032 dfp_binop
->tag
= tag
;
7033 dfp_binop
->dst_hi
= dst
;
7034 dfp_binop
->op2_hi
= op2
;
7035 dfp_binop
->op3_hi
= op3
;
7036 dfp_binop
->dst_lo
= INVALID_HREG
;
7037 dfp_binop
->op2_lo
= INVALID_HREG
;
7038 dfp_binop
->op3_lo
= INVALID_HREG
;
7039 dfp_binop
->rounding_mode
= rounding_mode
;
7046 s390_insn_dfp_unop(UChar size
, s390_dfp_unop_t tag
, HReg dst
, HReg op
)
7048 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7052 insn
->tag
= S390_INSN_DFP_UNOP
;
7054 insn
->variant
.dfp_unop
.tag
= tag
;
7055 insn
->variant
.dfp_unop
.dst_hi
= dst
;
7056 insn
->variant
.dfp_unop
.op_hi
= op
;
7057 insn
->variant
.dfp_unop
.dst_lo
= INVALID_HREG
;
7058 insn
->variant
.dfp_unop
.op_lo
= INVALID_HREG
;
7065 s390_insn_dfp_intop(UChar size
, s390_dfp_intop_t tag
, HReg dst
, HReg op2
,
7068 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7072 insn
->tag
= S390_INSN_DFP_INTOP
;
7074 insn
->variant
.dfp_intop
.tag
= tag
;
7075 insn
->variant
.dfp_intop
.dst_hi
= dst
;
7076 insn
->variant
.dfp_intop
.op2
= op2
;
7077 insn
->variant
.dfp_intop
.op3_hi
= op3
;
7078 insn
->variant
.dfp_intop
.dst_lo
= INVALID_HREG
;
7079 insn
->variant
.dfp_intop
.op3_lo
= INVALID_HREG
;
7086 s390_insn_dfp_compare(UChar size
, s390_dfp_cmp_t tag
, HReg dst
,
7089 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7093 insn
->tag
= S390_INSN_DFP_COMPARE
;
7095 insn
->variant
.dfp_compare
.tag
= tag
;
7096 insn
->variant
.dfp_compare
.dst
= dst
;
7097 insn
->variant
.dfp_compare
.op1_hi
= op1
;
7098 insn
->variant
.dfp_compare
.op2_hi
= op2
;
7099 insn
->variant
.dfp_compare
.op1_lo
= INVALID_HREG
;
7100 insn
->variant
.dfp_compare
.op2_lo
= INVALID_HREG
;
7107 s390_insn_dfp_convert(UChar size
, s390_dfp_conv_t tag
, HReg dst
, HReg op
,
7108 s390_dfp_round_t rounding_mode
)
7110 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7112 vassert(size
== 4 || size
== 8);
7114 insn
->tag
= S390_INSN_DFP_CONVERT
;
7116 insn
->variant
.dfp_convert
.tag
= tag
;
7117 insn
->variant
.dfp_convert
.dst_hi
= dst
;
7118 insn
->variant
.dfp_convert
.op_hi
= op
;
7119 insn
->variant
.dfp_convert
.dst_lo
= INVALID_HREG
;
7120 insn
->variant
.dfp_convert
.op_lo
= INVALID_HREG
;
7121 insn
->variant
.dfp_convert
.rounding_mode
= rounding_mode
;
7128 s390_insn_dfp_reround(UChar size
, HReg dst
, HReg op2
, HReg op3
,
7129 s390_dfp_round_t rounding_mode
)
7131 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7135 insn
->tag
= S390_INSN_DFP_REROUND
;
7137 insn
->variant
.dfp_reround
.dst_hi
= dst
;
7138 insn
->variant
.dfp_reround
.op2
= op2
;
7139 insn
->variant
.dfp_reround
.op3_hi
= op3
;
7140 insn
->variant
.dfp_reround
.dst_lo
= INVALID_HREG
;
7141 insn
->variant
.dfp_reround
.op3_lo
= INVALID_HREG
;
7142 insn
->variant
.dfp_reround
.rounding_mode
= rounding_mode
;
7149 s390_insn_fp_convert(UChar size
, s390_fp_conv_t tag
, HReg dst
, HReg op
,
7150 HReg r1
, s390_dfp_round_t rounding_mode
)
7152 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7153 s390_fp_convert
*fp_convert
= LibVEX_Alloc_inline(sizeof(s390_fp_convert
));
7155 vassert(size
== 4 || size
== 8);
7157 insn
->tag
= S390_INSN_FP_CONVERT
;
7159 insn
->variant
.fp_convert
.details
= fp_convert
;
7161 fp_convert
->tag
= tag
;
7162 fp_convert
->dst_hi
= dst
;
7163 fp_convert
->op_hi
= op
;
7164 fp_convert
->r1
= r1
;
7165 fp_convert
->dst_lo
= INVALID_HREG
;
7166 fp_convert
->op_lo
= INVALID_HREG
;
7167 fp_convert
->rounding_mode
= rounding_mode
;
7174 s390_insn_fp128_convert(UChar size
, s390_fp_conv_t tag
, HReg dst_hi
,
7175 HReg dst_lo
, HReg op_hi
, HReg op_lo
, HReg r1
,
7176 s390_dfp_round_t rounding_mode
)
7178 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7179 s390_fp_convert
*fp_convert
= LibVEX_Alloc_inline(sizeof(s390_fp_convert
));
7181 vassert(size
== 16);
7183 insn
->tag
= S390_INSN_FP_CONVERT
;
7185 insn
->variant
.fp_convert
.details
= fp_convert
;
7187 fp_convert
->tag
= tag
;
7188 fp_convert
->dst_hi
= dst_hi
;
7189 fp_convert
->dst_lo
= dst_lo
;
7190 fp_convert
->op_hi
= op_hi
;
7191 fp_convert
->r1
= r1
;
7192 fp_convert
->op_lo
= op_lo
;
7193 fp_convert
->rounding_mode
= rounding_mode
;
7200 s390_insn_dfp128_binop(UChar size
, s390_dfp_binop_t tag
, HReg dst_hi
,
7201 HReg dst_lo
, HReg op2_hi
, HReg op2_lo
, HReg op3_hi
,
7202 HReg op3_lo
, s390_dfp_round_t rounding_mode
)
7204 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7205 s390_dfp_binop
*dfp_binop
= LibVEX_Alloc_inline(sizeof(s390_dfp_binop
));
7207 vassert(size
== 16);
7208 vassert(is_valid_fp128_regpair(dst_hi
, dst_lo
));
7209 vassert(is_valid_fp128_regpair(op2_hi
, op2_lo
));
7210 vassert(is_valid_fp128_regpair(op3_hi
, op3_lo
));
7212 insn
->tag
= S390_INSN_DFP_BINOP
;
7214 insn
->variant
.dfp_binop
.details
= dfp_binop
;
7216 dfp_binop
->tag
= tag
;
7217 dfp_binop
->dst_hi
= dst_hi
;
7218 dfp_binop
->dst_lo
= dst_lo
;
7219 dfp_binop
->op2_hi
= op2_hi
;
7220 dfp_binop
->op2_lo
= op2_lo
;
7221 dfp_binop
->op3_hi
= op3_hi
;
7222 dfp_binop
->op3_lo
= op3_lo
;
7223 dfp_binop
->rounding_mode
= rounding_mode
;
7230 s390_insn_dfp128_unop(UChar size
, s390_dfp_unop_t tag
, HReg dst
,
7231 HReg op_hi
, HReg op_lo
)
7233 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7235 /* destination is an 8 byte integer value */
7237 vassert(is_valid_fp128_regpair(op_hi
, op_lo
));
7239 insn
->tag
= S390_INSN_DFP_UNOP
;
7241 insn
->variant
.dfp_unop
.tag
= tag
;
7242 insn
->variant
.dfp_unop
.dst_hi
= dst
;
7243 insn
->variant
.dfp_unop
.dst_lo
= INVALID_HREG
;
7244 insn
->variant
.dfp_unop
.op_hi
= op_hi
;
7245 insn
->variant
.dfp_unop
.op_lo
= op_lo
;
7252 s390_insn_dfp128_intop(UChar size
, s390_dfp_intop_t tag
, HReg dst_hi
,
7253 HReg dst_lo
, HReg op2
, HReg op3_hi
, HReg op3_lo
)
7255 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7257 vassert(size
== 16);
7258 vassert(is_valid_fp128_regpair(dst_hi
, dst_lo
));
7259 vassert(is_valid_fp128_regpair(op3_hi
, op3_lo
));
7261 insn
->tag
= S390_INSN_DFP_INTOP
;
7263 insn
->variant
.dfp_intop
.tag
= tag
;
7264 insn
->variant
.dfp_intop
.dst_hi
= dst_hi
;
7265 insn
->variant
.dfp_intop
.dst_lo
= dst_lo
;
7266 insn
->variant
.dfp_intop
.op2
= op2
;
7267 insn
->variant
.dfp_intop
.op3_hi
= op3_hi
;
7268 insn
->variant
.dfp_intop
.op3_lo
= op3_lo
;
7275 s390_insn_dfp128_compare(UChar size
, s390_dfp_cmp_t tag
, HReg dst
, HReg op1_hi
,
7276 HReg op1_lo
, HReg op2_hi
, HReg op2_lo
)
7278 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7280 vassert(size
== 16);
7281 vassert(is_valid_fp128_regpair(op1_hi
, op1_lo
));
7282 vassert(is_valid_fp128_regpair(op2_hi
, op2_lo
));
7284 insn
->tag
= S390_INSN_DFP_COMPARE
;
7286 insn
->variant
.dfp_compare
.tag
= tag
;
7287 insn
->variant
.dfp_compare
.dst
= dst
;
7288 insn
->variant
.dfp_compare
.op1_hi
= op1_hi
;
7289 insn
->variant
.dfp_compare
.op1_lo
= op1_lo
;
7290 insn
->variant
.dfp_compare
.op2_hi
= op2_hi
;
7291 insn
->variant
.dfp_compare
.op2_lo
= op2_lo
;
7298 s390_insn_dfp128_convert(UChar size
, s390_dfp_conv_t tag
, HReg dst_hi
,
7299 HReg dst_lo
, HReg op_hi
, HReg op_lo
,
7300 s390_dfp_round_t rounding_mode
)
7302 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7305 /* From smaller size to 16 bytes */
7306 vassert(is_valid_fp128_regpair(dst_hi
, dst_lo
));
7307 vassert(hregIsInvalid(op_lo
));
7309 /* From 16 bytes to smaller size */
7310 vassert(is_valid_fp128_regpair(op_hi
, op_lo
));
7313 insn
->tag
= S390_INSN_DFP_CONVERT
;
7315 insn
->variant
.dfp_convert
.tag
= tag
;
7316 insn
->variant
.dfp_convert
.dst_hi
= dst_hi
;
7317 insn
->variant
.dfp_convert
.dst_lo
= dst_lo
;
7318 insn
->variant
.dfp_convert
.op_hi
= op_hi
;
7319 insn
->variant
.dfp_convert
.op_lo
= op_lo
;
7320 insn
->variant
.dfp_convert
.rounding_mode
= rounding_mode
;
7327 s390_insn_dfp128_convert_to(UChar size
, s390_dfp_conv_t tag
, HReg dst_hi
,
7328 HReg dst_lo
, HReg op
)
7330 /* Conversion to dfp128 never requires a rounding mode. Provide default
7331 rounding mode. It will not be used when emitting insns. */
7332 s390_dfp_round_t rounding_mode
= S390_DFP_ROUND_NEAREST_EVEN_4
;
7334 return s390_insn_dfp128_convert(size
, tag
, dst_hi
, dst_lo
, op
,
7335 INVALID_HREG
, rounding_mode
);
7340 s390_insn_dfp128_convert_from(UChar size
, s390_dfp_conv_t tag
, HReg dst_hi
,
7341 HReg dst_lo
, HReg op_hi
, HReg op_lo
,
7342 s390_dfp_round_t rounding_mode
)
7344 return s390_insn_dfp128_convert(size
, tag
, dst_hi
, dst_lo
, op_hi
, op_lo
,
7350 s390_insn_dfp128_reround(UChar size
, HReg dst_hi
, HReg dst_lo
, HReg op2
,
7351 HReg op3_hi
, HReg op3_lo
,
7352 s390_dfp_round_t rounding_mode
)
7354 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7356 vassert(size
== 16);
7357 vassert(is_valid_fp128_regpair(dst_hi
, dst_lo
));
7358 vassert(is_valid_fp128_regpair(op3_hi
, op3_lo
));
7360 insn
->tag
= S390_INSN_DFP_REROUND
;
7362 insn
->variant
.dfp_reround
.dst_hi
= dst_hi
;
7363 insn
->variant
.dfp_reround
.dst_lo
= dst_lo
;
7364 insn
->variant
.dfp_reround
.op2
= op2
;
7365 insn
->variant
.dfp_reround
.op3_hi
= op3_hi
;
7366 insn
->variant
.dfp_reround
.op3_lo
= op3_lo
;
7367 insn
->variant
.dfp_reround
.rounding_mode
= rounding_mode
;
7374 s390_insn_mfence(void)
7376 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7378 insn
->tag
= S390_INSN_MFENCE
;
7379 insn
->size
= 0; /* not needed */
7386 s390_insn_mimm(UChar size
, s390_amode
*dst
, ULong value
)
7388 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7390 /* This insn will be mapped to insns that require base register
7391 plus 12-bit displacement */
7392 vassert(dst
->tag
== S390_AMODE_B12
);
7394 insn
->tag
= S390_INSN_MIMM
;
7396 insn
->variant
.mimm
.dst
= dst
;
7397 insn
->variant
.mimm
.value
= value
;
7404 s390_insn_madd(UChar size
, s390_amode
*dst
, UChar delta
, ULong value
)
7406 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7408 vassert(size
== 4 || size
== 8);
7410 /* This insn will be mapped to an ASI or AGSI so we can only allow base
7411 register plus 12-bit / 20-bit displacement. */
7412 vassert(dst
->tag
== S390_AMODE_B12
|| dst
->tag
== S390_AMODE_B20
);
7413 /* ASI and AGSI require the GIE facility */
7414 vassert(s390_host_has_gie
);
7416 insn
->tag
= S390_INSN_MADD
;
7418 insn
->variant
.madd
.dst
= dst
;
7419 insn
->variant
.madd
.delta
= delta
;
7420 insn
->variant
.madd
.value
= value
;
7427 s390_insn_set_fpc_bfprm(UChar size
, HReg mode
)
7431 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7433 insn
->tag
= S390_INSN_SET_FPC_BFPRM
;
7435 insn
->variant
.set_fpc_bfprm
.mode
= mode
;
7442 s390_insn_set_fpc_dfprm(UChar size
, HReg mode
)
7446 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7448 insn
->tag
= S390_INSN_SET_FPC_DFPRM
;
7450 insn
->variant
.set_fpc_dfprm
.mode
= mode
;
7457 s390_insn_xdirect(s390_cc_t cond
, Addr64 dst
, s390_amode
*guest_IA
,
7460 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7462 vassert(guest_IA
->tag
== S390_AMODE_B12
);
7464 insn
->tag
= S390_INSN_XDIRECT
;
7465 insn
->size
= 0; /* does not matter */
7467 insn
->variant
.xdirect
.cond
= cond
;
7468 insn
->variant
.xdirect
.dst
= dst
;
7469 insn
->variant
.xdirect
.guest_IA
= guest_IA
;
7470 insn
->variant
.xdirect
.to_fast_entry
= to_fast_entry
;
7477 s390_insn_xindir(s390_cc_t cond
, HReg dst
, s390_amode
*guest_IA
)
7479 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7481 vassert(guest_IA
->tag
== S390_AMODE_B12
);
7483 insn
->tag
= S390_INSN_XINDIR
;
7484 insn
->size
= 0; /* does not matter */
7486 insn
->variant
.xindir
.cond
= cond
;
7487 insn
->variant
.xindir
.dst
= dst
;
7488 insn
->variant
.xindir
.guest_IA
= guest_IA
;
7495 s390_insn_xassisted(s390_cc_t cond
, HReg dst
, s390_amode
*guest_IA
,
7498 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7500 vassert(guest_IA
->tag
== S390_AMODE_B12
);
7502 insn
->tag
= S390_INSN_XASSISTED
;
7503 insn
->size
= 0; /* does not matter */
7505 insn
->variant
.xassisted
.cond
= cond
;
7506 insn
->variant
.xassisted
.dst
= dst
;
7507 insn
->variant
.xassisted
.guest_IA
= guest_IA
;
7508 insn
->variant
.xassisted
.kind
= kind
;
7515 s390_insn_evcheck(s390_amode
*counter
, s390_amode
*fail_addr
)
7517 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7519 vassert(counter
->tag
== S390_AMODE_B12
);
7520 vassert(fail_addr
->tag
== S390_AMODE_B12
);
7522 insn
->tag
= S390_INSN_EVCHECK
;
7523 insn
->size
= 0; /* does not matter */
7525 insn
->variant
.evcheck
.counter
= counter
;
7526 insn
->variant
.evcheck
.fail_addr
= fail_addr
;
7533 s390_insn_profinc(void)
7535 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7537 insn
->tag
= S390_INSN_PROFINC
;
7538 insn
->size
= 0; /* does not matter */
7545 s390_insn_vec_amodeop(UChar size
, s390_vec_amodeop_t tag
, HReg dst
, HReg op1
,
7548 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7550 vassert(size
== 1 || size
== 2 || size
== 4 || size
== 8);
7552 insn
->tag
= S390_INSN_VEC_AMODEOP
;
7554 insn
->variant
.vec_amodeop
.tag
= tag
;
7555 insn
->variant
.vec_amodeop
.dst
= dst
;
7556 insn
->variant
.vec_amodeop
.op1
= op1
;
7557 insn
->variant
.vec_amodeop
.op2
= op2
;
7562 s390_insn
*s390_insn_vec_amodeintop(UChar size
, s390_vec_amodeintop_t tag
, HReg dst
,
7563 s390_amode
* op2
, HReg op3
)
7565 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7567 vassert(size
== 1 || size
== 2 || size
== 4 || size
== 8);
7569 insn
->tag
= S390_INSN_VEC_AMODEINTOP
;
7571 insn
->variant
.vec_amodeintop
.tag
= tag
;
7572 insn
->variant
.vec_amodeintop
.dst
= dst
;
7573 insn
->variant
.vec_amodeintop
.op2
= op2
;
7574 insn
->variant
.vec_amodeintop
.op3
= op3
;
7579 s390_insn
*s390_insn_vec_binop(UChar size
, s390_vec_binop_t tag
, HReg dst
,
7582 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7584 vassert(size
== 1 || size
== 2 || size
== 4 || size
== 8 || size
== 16);
7586 insn
->tag
= S390_INSN_VEC_BINOP
;
7588 insn
->variant
.vec_binop
.tag
= tag
;
7589 insn
->variant
.vec_binop
.dst
= dst
;
7590 insn
->variant
.vec_binop
.op1
= op1
;
7591 insn
->variant
.vec_binop
.op2
= op2
;
7596 s390_insn
*s390_insn_vec_triop(UChar size
, s390_vec_triop_t tag
, HReg dst
,
7597 HReg op1
, HReg op2
, HReg op3
)
7599 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7602 insn
->tag
= S390_INSN_VEC_TRIOP
;
7604 insn
->variant
.vec_triop
.tag
= tag
;
7605 insn
->variant
.vec_triop
.dst
= dst
;
7606 insn
->variant
.vec_triop
.op1
= op1
;
7607 insn
->variant
.vec_triop
.op2
= op2
;
7608 insn
->variant
.vec_triop
.op3
= op3
;
7613 s390_insn
*s390_insn_vec_replicate(UChar size
, HReg dst
, HReg op1
,
7616 s390_insn
*insn
= LibVEX_Alloc_inline(sizeof(s390_insn
));
7618 insn
->tag
= S390_INSN_VEC_REPLICATE
;
7620 insn
->variant
.vec_replicate
.dst
= dst
;
7621 insn
->variant
.vec_replicate
.op1
= op1
;
7622 insn
->variant
.vec_replicate
.idx
= idx
;
7627 /*---------------------------------------------------------------*/
7628 /*--- Debug print ---*/
7629 /*---------------------------------------------------------------*/
7631 static const HChar
*
7632 s390_cc_as_string(s390_cc_t cc
)
7635 case S390_CC_NEVER
: return "never";
7636 case S390_CC_OVFL
: return "overflow";
7637 case S390_CC_H
: return "greater than"; /* A > B ; high */
7638 case S390_CC_NLE
: return "not low or equal";
7639 case S390_CC_L
: return "less than"; /* A < B ; low */
7640 case S390_CC_NHE
: return "not high or equal";
7641 case S390_CC_LH
: return "low or high";
7642 case S390_CC_NE
: return "not equal"; /* A != B ; not zero */
7643 case S390_CC_E
: return "equal"; /* A == B ; zero */
7644 case S390_CC_NLH
: return "not low or high";
7645 case S390_CC_HE
: return "greater or equal"; /* A >= B ; high or equal*/
7646 case S390_CC_NL
: return "not low"; /* not low */
7647 case S390_CC_LE
: return "less or equal"; /* A <= B ; low or equal */
7648 case S390_CC_NH
: return "not high";
7649 case S390_CC_NO
: return "not overflow";
7650 case S390_CC_ALWAYS
: return "always";
7652 vpanic("s390_cc_as_string");
7657 static const HChar
*
7658 s390_jump_kind_as_string(IRJumpKind kind
)
7661 case Ijk_Boring
: return "Boring";
7662 case Ijk_Call
: return "Call";
7663 case Ijk_Ret
: return "Return";
7664 case Ijk_ClientReq
: return "ClientReq";
7665 case Ijk_Yield
: return "Yield";
7666 case Ijk_EmWarn
: return "EmWarn";
7667 case Ijk_EmFail
: return "EmFail";
7668 case Ijk_NoDecode
: return "NoDecode";
7669 case Ijk_MapFail
: return "MapFail";
7670 case Ijk_InvalICache
: return "Invalidate";
7671 case Ijk_NoRedir
: return "NoRedir";
7672 case Ijk_SigTRAP
: return "SigTRAP";
7673 case Ijk_SigFPE
: return "SigFPE";
7674 case Ijk_SigSEGV
: return "SigSEGV";
7675 case Ijk_SigBUS
: return "SigBUS";
7676 case Ijk_Sys_syscall
: return "Sys_syscall";
7677 case Ijk_Extension
: return "Extension";
7679 vpanic("s390_jump_kind_as_string");
7684 /* Helper function for writing out a V insn */
7686 s390_sprintf(HChar
*buf
, const HChar
*fmt
, ...)
7691 va_start(args
, fmt
);
7694 for ( ; *fmt
; ++fmt
) {
7702 c
= *++fmt
; /* next char */
7709 p
+= vex_sprintf(p
, "%s", va_arg(args
, HChar
*));
7712 case 'M': /* %M = mnemonic */
7713 p
+= vex_sprintf(p
, "%-8s", va_arg(args
, HChar
*));
7716 case 'R': /* %R = register */
7717 p
+= vex_sprintf(p
, "%s", s390_hreg_as_string(va_arg(args
, HReg
)));
7720 case 'A': /* %A = amode */
7721 p
+= vex_sprintf(p
, "%s",
7722 s390_amode_as_string(va_arg(args
, s390_amode
*)));
7725 case 'G': /* %G = guest state @ offset */
7726 p
+= vex_sprintf(p
, "guest[%u]", va_arg(args
, UInt
));
7729 case 'C': /* %C = condition code */
7730 p
+= vex_sprintf(p
, "%s", s390_cc_as_string(va_arg(args
, s390_cc_t
)));
7733 case 'J': /* &J = jump kind */
7734 p
+= vex_sprintf(p
, "%s",
7735 s390_jump_kind_as_string(va_arg(args
, IRJumpKind
)));
7738 case 'L': { /* %L = argument list in helper call*/
7741 num_args
= va_arg(args
, UInt
);
7743 for (i
= 0; i
< num_args
; ++i
) {
7744 if (i
!= 0) p
+= vex_sprintf(p
, ", ");
7745 p
+= vex_sprintf(p
, "r%u", s390_gprno_from_arg_index(i
));
7750 case 'O': { /* %O = RMI operand */
7751 s390_opnd_RMI
*op
= va_arg(args
, s390_opnd_RMI
*);
7755 p
+= vex_sprintf(p
, "%s", s390_hreg_as_string(op
->variant
.reg
));
7758 case S390_OPND_AMODE
:
7759 p
+= vex_sprintf(p
, "%s", s390_amode_as_string(op
->variant
.am
));
7762 case S390_OPND_IMMEDIATE
:
7763 value
= op
->variant
.imm
;
7771 case 'I': /* %I = immediate value */
7772 value
= va_arg(args
, ULong
);
7776 if ((Long
)value
< 0)
7777 p
+= vex_sprintf(p
, "%lld", (Long
)value
);
7778 else if (value
< 100)
7779 p
+= vex_sprintf(p
, "%llu", value
);
7781 p
+= vex_sprintf(p
, "0x%llx", value
);
7793 fail
: vpanic("s390_printf");
7797 /* Decompile the given insn into a static buffer and return it */
7799 s390_insn_as_string(const s390_insn
*insn
)
7801 static HChar buf
[300]; // large enough
7807 switch (insn
->tag
) {
7808 case S390_INSN_LOAD
:
7809 s390_sprintf(buf
, "%M %R,%A", "v-load", insn
->variant
.load
.dst
,
7810 insn
->variant
.load
.src
);
7813 case S390_INSN_STORE
:
7814 s390_sprintf(buf
, "%M %R,%A", "v-store", insn
->variant
.store
.src
,
7815 insn
->variant
.store
.dst
);
7818 case S390_INSN_MOVE
:
7819 s390_sprintf(buf
, "%M %R,%R", "v-move", insn
->variant
.move
.dst
,
7820 insn
->variant
.move
.src
);
7823 case S390_INSN_MEMCPY
:
7824 s390_sprintf(buf
, "%M %A,%A", "v-memcpy", insn
->variant
.memcpy
.dst
,
7825 insn
->variant
.memcpy
.src
);
7828 case S390_INSN_COND_MOVE
:
7829 s390_sprintf(buf
, "%M if (%C) %R,%O", "v-move",
7830 insn
->variant
.cond_move
.cond
, insn
->variant
.cond_move
.dst
,
7831 &insn
->variant
.cond_move
.src
);
7834 case S390_INSN_LOAD_IMMEDIATE
:
7835 s390_sprintf(buf
, "%M %R,%I", "v-loadi", insn
->variant
.load_immediate
.dst
,
7836 insn
->variant
.load_immediate
.value
);
7840 switch (insn
->variant
.alu
.tag
) {
7841 case S390_ALU_ADD
: op
= "v-add"; break;
7842 case S390_ALU_SUB
: op
= "v-sub"; break;
7843 case S390_ALU_MUL
: op
= "v-mul"; break;
7844 case S390_ALU_AND
: op
= "v-and"; break;
7845 case S390_ALU_OR
: op
= "v-or"; break;
7846 case S390_ALU_XOR
: op
= "v-xor"; break;
7847 case S390_ALU_LSH
: op
= "v-lsh"; break;
7848 case S390_ALU_RSH
: op
= "v-rsh"; break;
7849 case S390_ALU_RSHA
: op
= "v-rsha"; break;
7850 case S390_ALU_ILIH
: op
= "v-ilih"; break;
7853 s390_sprintf(buf
, "%M %R,%O", op
, insn
->variant
.alu
.dst
, /* also op1 */
7854 &insn
->variant
.alu
.op2
);
7857 case S390_INSN_SMUL
:
7858 case S390_INSN_UMUL
:
7859 if (insn
->tag
== S390_INSN_SMUL
) {
7864 s390_sprintf(buf
, "%M %R,%O", op
, insn
->variant
.mul
.dst_hi
,
7865 &insn
->variant
.mul
.op2
);
7868 case S390_INSN_SDIV
:
7869 case S390_INSN_UDIV
:
7870 if (insn
->tag
== S390_INSN_SDIV
) {
7875 s390_sprintf(buf
, "%M %R,%O", op
, insn
->variant
.div
.op1_hi
,
7876 &insn
->variant
.div
.op2
);
7879 case S390_INSN_DIVS
:
7880 s390_sprintf(buf
, "%M %R,%O", "v-divsi", insn
->variant
.divs
.op1
,
7881 &insn
->variant
.divs
.op2
);
7885 s390_sprintf(buf
, "%M %R,%O", "v-clz", insn
->variant
.clz
.num_bits
,
7886 &insn
->variant
.clz
.src
);
7889 case S390_INSN_UNOP
:
7890 switch (insn
->variant
.unop
.tag
) {
7891 case S390_ZERO_EXTEND_8
:
7892 case S390_ZERO_EXTEND_16
:
7893 case S390_ZERO_EXTEND_32
:
7897 case S390_SIGN_EXTEND_8
:
7898 case S390_SIGN_EXTEND_16
:
7899 case S390_SIGN_EXTEND_32
:
7911 case S390_VEC_DUPLICATE
:
7915 case S390_VEC_UNPACKLOWS
:
7919 case S390_VEC_UNPACKLOWU
:
7927 case S390_VEC_COUNT_LEADING_ZEROES
:
7931 case S390_VEC_COUNT_TRAILING_ZEROES
:
7935 case S390_VEC_COUNT_ONES
:
7939 case S390_VEC_FLOAT_NEG
:
7943 case S390_VEC_FLOAT_ABS
:
7947 case S390_VEC_FLOAT_NABS
:
7948 op
= "v-vfloatnabs";
7951 case S390_VEC_FLOAT_SQRT
:
7952 op
= "v-vfloatsqrt";
7958 s390_sprintf(buf
, "%M %R,%O", op
, insn
->variant
.unop
.dst
,
7959 &insn
->variant
.unop
.src
);
7962 case S390_INSN_TEST
:
7963 s390_sprintf(buf
, "%M %O", "v-test", &insn
->variant
.test
.src
);
7966 case S390_INSN_CC2BOOL
:
7967 s390_sprintf(buf
, "%M %R,%C", "v-cc2b", insn
->variant
.cc2bool
.dst
,
7968 insn
->variant
.cc2bool
.cond
);
7972 s390_sprintf(buf
, "%M %R,%A,%R,%R", "v-cas", insn
->variant
.cas
.op1
,
7973 insn
->variant
.cas
.op2
, insn
->variant
.cas
.op3
,
7974 insn
->variant
.cas
.old_mem
);
7977 case S390_INSN_CDAS
: {
7978 s390_cdas
*cdas
= insn
->variant
.cdas
.details
;
7980 s390_sprintf(buf
, "%M %R,%R,%A,%R,%R,%R,%R", "v-cdas",
7981 cdas
->op1_high
, cdas
->op1_low
, cdas
->op2
, cdas
->op3_high
,
7982 cdas
->op3_low
, cdas
->old_mem_high
, cdas
->old_mem_low
);
7986 case S390_INSN_COMPARE
:
7987 if (insn
->variant
.compare
.signed_comparison
) {
7992 s390_sprintf(buf
, "%M %R,%O", op
, insn
->variant
.compare
.src1
,
7993 &insn
->variant
.compare
.src2
);
7996 case S390_INSN_HELPER_CALL
: {
7997 s390_helper_call
*helper_call
= insn
->variant
.helper_call
.details
;
7998 s390_sprintf(buf
, "%M if (%C) %s{%I}(%L)", "v-call",
8001 helper_call
->target
,
8002 helper_call
->num_args
);
8003 return buf
; /* avoid printing "size = ..." which is meaningless */
8006 case S390_INSN_BFP_TRIOP
:
8007 switch (insn
->variant
.bfp_triop
.tag
) {
8008 case S390_BFP_MADD
: op
= "v-fmadd"; break;
8009 case S390_BFP_MSUB
: op
= "v-fmsub"; break;
8012 s390_sprintf(buf
, "%M %R,%R,%R", op
,
8013 insn
->variant
.bfp_triop
.dst
/* op1 same as dst */,
8014 insn
->variant
.bfp_triop
.op2
, insn
->variant
.bfp_triop
.op3
);
8017 case S390_INSN_BFP_BINOP
:
8018 switch (insn
->variant
.bfp_binop
.tag
) {
8019 case S390_BFP_ADD
: op
= "v-fadd"; break;
8020 case S390_BFP_SUB
: op
= "v-fsub"; break;
8021 case S390_BFP_MUL
: op
= "v-fmul"; break;
8022 case S390_BFP_DIV
: op
= "v-fdiv"; break;
8025 s390_sprintf(buf
, "%M %R,%R", op
,
8026 insn
->variant
.bfp_binop
.dst_hi
/* op1 same as dst */,
8027 insn
->variant
.bfp_binop
.op2_hi
);
8030 case S390_INSN_BFP_COMPARE
:
8031 s390_sprintf(buf
, "%M %R,%R,%R", "v-fcmp", insn
->variant
.bfp_compare
.dst
,
8032 insn
->variant
.bfp_compare
.op1_hi
,
8033 insn
->variant
.bfp_compare
.op2_hi
);
8036 case S390_INSN_BFP_UNOP
:
8037 switch (insn
->variant
.bfp_unop
.tag
) {
8038 case S390_BFP_ABS
: op
= "v-fabs"; break;
8039 case S390_BFP_NABS
: op
= "v-fnabs"; break;
8040 case S390_BFP_NEG
: op
= "v-fneg"; break;
8041 case S390_BFP_SQRT
: op
= "v-fsqrt"; break;
8044 s390_sprintf(buf
, "%M %R,%R", op
, insn
->variant
.bfp_unop
.dst_hi
,
8045 insn
->variant
.bfp_unop
.op_hi
);
8048 case S390_INSN_BFP_CONVERT
:
8049 switch (insn
->variant
.bfp_convert
.tag
) {
8050 case S390_BFP_I32_TO_F32
:
8051 case S390_BFP_I32_TO_F64
:
8052 case S390_BFP_I32_TO_F128
:
8053 case S390_BFP_I64_TO_F32
:
8054 case S390_BFP_I64_TO_F64
:
8055 case S390_BFP_I64_TO_F128
: op
= "v-i2f"; break;
8056 case S390_BFP_U32_TO_F32
:
8057 case S390_BFP_U32_TO_F64
:
8058 case S390_BFP_U32_TO_F128
:
8059 case S390_BFP_U64_TO_F32
:
8060 case S390_BFP_U64_TO_F64
:
8061 case S390_BFP_U64_TO_F128
: op
= "v-u2f"; break;
8062 case S390_BFP_F32_TO_I32
:
8063 case S390_BFP_F32_TO_I64
:
8064 case S390_BFP_F64_TO_I32
:
8065 case S390_BFP_F64_TO_I64
:
8066 case S390_BFP_F128_TO_I32
:
8067 case S390_BFP_F128_TO_I64
: op
= "v-f2i"; break;
8068 case S390_BFP_F32_TO_U32
:
8069 case S390_BFP_F32_TO_U64
:
8070 case S390_BFP_F64_TO_U32
:
8071 case S390_BFP_F64_TO_U64
:
8072 case S390_BFP_F128_TO_U32
:
8073 case S390_BFP_F128_TO_U64
: op
= "v-f2u"; break;
8074 case S390_BFP_F32_TO_F64
:
8075 case S390_BFP_F32_TO_F128
:
8076 case S390_BFP_F64_TO_F32
:
8077 case S390_BFP_F64_TO_F128
:
8078 case S390_BFP_F128_TO_F32
:
8079 case S390_BFP_F128_TO_F64
: op
= "v-f2f"; break;
8080 case S390_BFP_F32_TO_F32I
:
8081 case S390_BFP_F64_TO_F64I
:
8082 case S390_BFP_F128_TO_F128I
: op
= "v-f2fi"; break;
8085 s390_sprintf(buf
, "%M %R,%R", op
, insn
->variant
.bfp_convert
.dst_hi
,
8086 insn
->variant
.bfp_convert
.op_hi
);
8089 case S390_INSN_DFP_BINOP
: {
8090 s390_dfp_binop
*dfp_binop
= insn
->variant
.dfp_binop
.details
;
8092 switch (dfp_binop
->tag
) {
8093 case S390_DFP_ADD
: op
= "v-dadd"; break;
8094 case S390_DFP_SUB
: op
= "v-dsub"; break;
8095 case S390_DFP_MUL
: op
= "v-dmul"; break;
8096 case S390_DFP_DIV
: op
= "v-ddiv"; break;
8097 case S390_DFP_QUANTIZE
: op
= "v-dqua"; break;
8100 s390_sprintf(buf
, "%M %R,%R,%R", op
, dfp_binop
->dst_hi
,
8101 dfp_binop
->op2_hi
, dfp_binop
->op3_hi
);
8105 case S390_INSN_DFP_UNOP
:
8106 switch (insn
->variant
.dfp_unop
.tag
) {
8107 case S390_DFP_EXTRACT_EXP_D64
:
8108 case S390_DFP_EXTRACT_EXP_D128
: op
= "v-d2exp"; break;
8109 case S390_DFP_EXTRACT_SIG_D64
:
8110 case S390_DFP_EXTRACT_SIG_D128
: op
= "v-d2sig"; break;
8113 s390_sprintf(buf
, "%M %R,%R", op
, insn
->variant
.dfp_unop
.dst_hi
,
8114 insn
->variant
.dfp_unop
.op_hi
);
8117 case S390_INSN_DFP_INTOP
:
8118 switch (insn
->variant
.dfp_intop
.tag
) {
8119 case S390_DFP_SHIFT_LEFT
: op
= "v-dshl"; break;
8120 case S390_DFP_SHIFT_RIGHT
: op
= "v-dshr"; break;
8121 case S390_DFP_INSERT_EXP
: op
= "v-diexp"; break;
8124 s390_sprintf(buf
, "%M %R,%R,%R", op
, insn
->variant
.dfp_intop
.dst_hi
,
8125 insn
->variant
.dfp_intop
.op2
,
8126 insn
->variant
.dfp_intop
.op3_hi
);
8129 case S390_INSN_DFP_COMPARE
:
8130 switch (insn
->variant
.dfp_compare
.tag
) {
8131 case S390_DFP_COMPARE
: op
= "v-dcmp"; break;
8132 case S390_DFP_COMPARE_EXP
: op
= "v-dcmpexp"; break;
8135 s390_sprintf(buf
, "%M %R,%R,%R", op
, insn
->variant
.dfp_compare
.dst
,
8136 insn
->variant
.dfp_compare
.op1_hi
,
8137 insn
->variant
.dfp_compare
.op2_hi
);
8140 case S390_INSN_DFP_CONVERT
:
8141 switch (insn
->variant
.dfp_convert
.tag
) {
8142 case S390_DFP_D32_TO_D64
:
8143 case S390_DFP_D64_TO_D32
:
8144 case S390_DFP_D64_TO_D128
:
8145 case S390_DFP_D128_TO_D64
: op
= "v-d2d"; break;
8146 case S390_DFP_I32_TO_D64
:
8147 case S390_DFP_I32_TO_D128
:
8148 case S390_DFP_I64_TO_D64
:
8149 case S390_DFP_I64_TO_D128
: op
= "v-i2d"; break;
8150 case S390_DFP_U32_TO_D64
:
8151 case S390_DFP_U32_TO_D128
:
8152 case S390_DFP_U64_TO_D64
:
8153 case S390_DFP_U64_TO_D128
: op
= "v-u2d"; break;
8154 case S390_DFP_D64_TO_I32
:
8155 case S390_DFP_D128_TO_I32
:
8156 case S390_DFP_D64_TO_I64
:
8157 case S390_DFP_D128_TO_I64
: op
= "v-d2i"; break;
8158 case S390_DFP_D64_TO_U32
:
8159 case S390_DFP_D64_TO_U64
:
8160 case S390_DFP_D128_TO_U32
:
8161 case S390_DFP_D128_TO_U64
: op
= "v-d2u"; break;
8164 s390_sprintf(buf
, "%M %R,%R", op
, insn
->variant
.dfp_convert
.dst_hi
,
8165 insn
->variant
.dfp_convert
.op_hi
);
8168 case S390_INSN_DFP_REROUND
:
8169 s390_sprintf(buf
, "%M %R,%R,%R", "v-drrnd",
8170 insn
->variant
.dfp_reround
.dst_hi
,
8171 insn
->variant
.dfp_reround
.op2
,
8172 insn
->variant
.dfp_reround
.op3_hi
);
8175 case S390_INSN_FP_CONVERT
: {
8176 s390_fp_convert
*fp_convert
= insn
->variant
.fp_convert
.details
;
8178 switch (fp_convert
->tag
) {
8179 case S390_FP_F32_TO_D32
:
8180 case S390_FP_F32_TO_D64
:
8181 case S390_FP_F32_TO_D128
:
8182 case S390_FP_F64_TO_D32
:
8183 case S390_FP_F64_TO_D64
:
8184 case S390_FP_F64_TO_D128
:
8185 case S390_FP_F128_TO_D32
:
8186 case S390_FP_F128_TO_D64
:
8187 case S390_FP_F128_TO_D128
: op
= "v-f2d"; break;
8188 case S390_FP_D32_TO_F32
:
8189 case S390_FP_D32_TO_F64
:
8190 case S390_FP_D32_TO_F128
:
8191 case S390_FP_D64_TO_F32
:
8192 case S390_FP_D64_TO_F64
:
8193 case S390_FP_D64_TO_F128
:
8194 case S390_FP_D128_TO_F32
:
8195 case S390_FP_D128_TO_F64
:
8196 case S390_FP_D128_TO_F128
: op
= "v-d2f"; break;
8199 s390_sprintf(buf
, "%M %R,%R", op
, fp_convert
->dst_hi
,
8204 case S390_INSN_MFENCE
:
8205 s390_sprintf(buf
, "%M", "v-mfence");
8206 return buf
; /* avoid printing "size = ..." which is meaningless */
8208 case S390_INSN_MIMM
:
8209 s390_sprintf(buf
, "%M %A,%I", "v-mimm", insn
->variant
.mimm
.dst
,
8210 insn
->variant
.mimm
.value
);
8213 case S390_INSN_MADD
:
8214 s390_sprintf(buf
, "%M %A += %I (= %I)", "v-madd",
8215 insn
->variant
.madd
.dst
,
8216 (Long
)(Char
)insn
->variant
.madd
.delta
,
8217 insn
->variant
.madd
.value
);
8220 case S390_INSN_SET_FPC_BFPRM
:
8221 s390_sprintf(buf
, "%M %R", "v-set-fpc-bfprm",
8222 insn
->variant
.set_fpc_bfprm
.mode
);
8225 case S390_INSN_SET_FPC_DFPRM
:
8226 s390_sprintf(buf
, "%M %R", "v-set-fpc-dfprm",
8227 insn
->variant
.set_fpc_dfprm
.mode
);
8230 case S390_INSN_EVCHECK
:
8231 s390_sprintf(buf
, "%M counter = %A, fail-addr = %A", "v-evcheck",
8232 insn
->variant
.evcheck
.counter
,
8233 insn
->variant
.evcheck
.fail_addr
);
8234 return buf
; /* avoid printing "size = ..." which is meaningless */
8236 case S390_INSN_PROFINC
:
8237 s390_sprintf(buf
, "%M", "v-profinc");
8238 return buf
; /* avoid printing "size = ..." which is meaningless */
8240 case S390_INSN_XDIRECT
:
8241 s390_sprintf(buf
, "%M if (%C) %A = %I %s", "v-xdirect",
8242 insn
->variant
.xdirect
.cond
,
8243 insn
->variant
.xdirect
.guest_IA
,
8244 insn
->variant
.xdirect
.dst
,
8245 insn
->variant
.xdirect
.to_fast_entry
? "fast" : "slow");
8246 return buf
; /* avoid printing "size = ..." which is meaningless */
8248 case S390_INSN_XINDIR
:
8249 s390_sprintf(buf
, "%M if (%C) %A = %R", "v-xindir",
8250 insn
->variant
.xindir
.cond
,
8251 insn
->variant
.xindir
.guest_IA
,
8252 insn
->variant
.xindir
.dst
);
8253 return buf
; /* avoid printing "size = ..." which is meaningless */
8255 case S390_INSN_XASSISTED
:
8256 s390_sprintf(buf
, "%M if (%C) %J %A = %R", "v-xassisted",
8257 insn
->variant
.xassisted
.cond
,
8258 insn
->variant
.xassisted
.kind
,
8259 insn
->variant
.xassisted
.guest_IA
,
8260 insn
->variant
.xassisted
.dst
);
8261 return buf
; /* avoid printing "size = ..." which is meaningless */
8263 case S390_INSN_VEC_AMODEOP
:
8264 switch (insn
->variant
.vec_amodeop
.tag
) {
8265 case S390_VEC_GET_ELEM
: op
= "v-vgetelem"; break;
8266 case S390_VEC_ELEM_SHL_INT
: op
= "v-veshl"; break;
8267 case S390_VEC_ELEM_SHRA_INT
: op
= "v-veshra"; break;
8268 case S390_VEC_ELEM_SHRL_INT
: op
= "v-veshrl"; break;
8271 s390_sprintf(buf
, "%M %R, %R, %A", op
, insn
->variant
.vec_amodeop
.dst
,
8272 insn
->variant
.vec_amodeop
.op1
,
8273 insn
->variant
.vec_amodeop
.op2
);
8276 case S390_INSN_VEC_AMODEINTOP
:
8277 switch (insn
->variant
.vec_amodeintop
.tag
) {
8278 case S390_VEC_SET_ELEM
: op
= "v-vsetelem"; break;
8281 s390_sprintf(buf
, "%M %R, %A, %R", op
, insn
->variant
.vec_amodeintop
.dst
,
8282 insn
->variant
.vec_amodeintop
.op2
,
8283 insn
->variant
.vec_amodeintop
.op3
);
8286 case S390_INSN_VEC_BINOP
:
8287 switch (insn
->variant
.vec_binop
.tag
) {
8288 case S390_VEC_PACK
: op
= "v-vpack"; break;
8289 case S390_VEC_PACK_SATURS
: op
= "v-vpacksaturs"; break;
8290 case S390_VEC_PACK_SATURU
: op
= "v-vpacksaturu"; break;
8291 case S390_VEC_COMPARE_EQUAL
: op
= "v-vcmpeq"; break;
8292 case S390_VEC_OR
: op
= "v-vor"; break;
8293 case S390_VEC_ORC
: op
= "v-vorc"; break;
8294 case S390_VEC_XOR
: op
= "v-vxor"; break;
8295 case S390_VEC_AND
: op
= "v-vand"; break;
8296 case S390_VEC_MERGEL
: op
= "v-vmergel"; break;
8297 case S390_VEC_MERGEH
: op
= "v-vmergeh"; break;
8298 case S390_VEC_NOR
: op
= "v-vnor"; break;
8299 case S390_VEC_INT_ADD
: op
= "v-vintadd"; break;
8300 case S390_VEC_INT_SUB
: op
= "v-vintsub"; break;
8301 case S390_VEC_MAXU
: op
= "v-vmaxu"; break;
8302 case S390_VEC_MAXS
: op
= "v-vmaxs"; break;
8303 case S390_VEC_MINU
: op
= "v-vminu"; break;
8304 case S390_VEC_MINS
: op
= "v-vmins"; break;
8305 case S390_VEC_AVGU
: op
= "v-vavgu"; break;
8306 case S390_VEC_AVGS
: op
= "v-vavgs"; break;
8307 case S390_VEC_COMPARE_GREATERS
: op
= "v-vcmpgts"; break;
8308 case S390_VEC_COMPARE_GREATERU
: op
= "v-vcmpgtu"; break;
8309 case S390_VEC_INT_MUL_HIGHS
: op
= "v-vintmulhis"; break;
8310 case S390_VEC_INT_MUL_HIGHU
: op
= "v-vintmulhiu"; break;
8311 case S390_VEC_INT_MUL_LOW
: op
= "v-vintmullo"; break;
8312 case S390_VEC_INT_MUL_EVENS
: op
= "v-vintmulevens"; break;
8313 case S390_VEC_INT_MUL_EVENU
: op
= "v-vintmulevenu"; break;
8314 case S390_VEC_ELEM_SHL_V
: op
= "v-velemshl"; break;
8315 case S390_VEC_ELEM_SHRA_V
: op
= "v-vshrav"; break;
8316 case S390_VEC_ELEM_SHRL_V
: op
= "v-vshrlv"; break;
8317 case S390_VEC_ELEM_ROLL_V
: op
= "v-vrollv"; break;
8318 case S390_VEC_SHL_BITS
: op
= "v-vshlbits"; break;
8319 case S390_VEC_SHRL_BITS
: op
= "v-vshrlbits"; break;
8320 case S390_VEC_SHRA_BITS
: op
= "v-vshrabits"; break;
8321 case S390_VEC_SHL_BYTES
: op
= "v-vshlbytes"; break;
8322 case S390_VEC_SHRL_BYTES
: op
= "v-vshrlbytes"; break;
8323 case S390_VEC_SHRA_BYTES
: op
= "v-vshrabytes"; break;
8324 case S390_VEC_PWSUM_W
: op
= "v-vpwsumw"; break;
8325 case S390_VEC_PWSUM_DW
: op
= "v-vpwsumdw"; break;
8326 case S390_VEC_PWSUM_QW
: op
= "v-vpwsumqw"; break;
8327 case S390_VEC_INIT_FROM_GPRS
: op
= "v-vinitfromgprs"; break;
8328 case S390_VEC_INIT_FROM_FPRS
: op
= "v-vinitfromfprs"; break;
8329 case S390_VEC_FLOAT_ADD
: op
= "v-vfloatadd"; break;
8330 case S390_VEC_FLOAT_SUB
: op
= "v-vfloatsub"; break;
8331 case S390_VEC_FLOAT_MUL
: op
= "v-vfloatmul"; break;
8332 case S390_VEC_FLOAT_DIV
: op
= "v-vfloatdiv"; break;
8333 case S390_VEC_FLOAT_COMPARE_EQUAL
: op
= "v-vfloatcmpeq"; break;
8334 case S390_VEC_FLOAT_COMPARE_LESS_OR_EQUAL
: op
= "v-vfloatcmple"; break;
8335 case S390_VEC_FLOAT_COMPARE_LESS
: op
= "v-vfloatcmpl"; break;
8338 s390_sprintf(buf
, "%M %R, %R, %R", op
, insn
->variant
.vec_binop
.dst
,
8339 insn
->variant
.vec_binop
.op1
, insn
->variant
.vec_binop
.op2
);
8342 case S390_INSN_VEC_TRIOP
:
8343 switch (insn
->variant
.vec_triop
.tag
) {
8344 case S390_VEC_PERM
: op
= "v-vperm"; break;
8345 case S390_VEC_FLOAT_MADD
: op
= "v-vfloatmadd"; break;
8346 case S390_VEC_FLOAT_MSUB
: op
= "v-vfloatmsub"; break;
8349 s390_sprintf(buf
, "%M %R, %R, %R, %R", op
, insn
->variant
.vec_triop
.dst
,
8350 insn
->variant
.vec_triop
.op1
, insn
->variant
.vec_triop
.op2
,
8351 insn
->variant
.vec_triop
.op3
);
8354 case S390_INSN_VEC_REPLICATE
:
8355 s390_sprintf(buf
, "%M %R, %R, %I", "v-vrep",
8356 insn
->variant
.vec_replicate
.dst
,
8357 insn
->variant
.vec_replicate
.op1
,
8358 insn
->variant
.vec_replicate
.idx
);
8364 /* Write out how many bytes are involved in the operation */
8369 for (p
= buf
; *p
; ++p
)
8375 for (i
= len
; i
< 32; ++i
)
8376 p
+= vex_sprintf(p
, " ");
8378 p
+= vex_sprintf(p
, "\t");
8382 /* Special cases first */
8383 switch (insn
->tag
) {
8384 case S390_INSN_UNOP
:
8385 switch (insn
->variant
.unop
.tag
) {
8386 case S390_SIGN_EXTEND_8
:
8387 case S390_ZERO_EXTEND_8
: p
+= vex_sprintf(p
, "1 -> "); goto common
;
8388 case S390_SIGN_EXTEND_16
:
8389 case S390_ZERO_EXTEND_16
: p
+= vex_sprintf(p
, "2 -> "); goto common
;
8390 case S390_SIGN_EXTEND_32
:
8391 case S390_ZERO_EXTEND_32
: p
+= vex_sprintf(p
, "4 -> "); goto common
;
8396 case S390_INSN_BFP_CONVERT
:
8397 switch (insn
->variant
.bfp_convert
.tag
) {
8398 case S390_BFP_I32_TO_F32
:
8399 case S390_BFP_I32_TO_F64
:
8400 case S390_BFP_I32_TO_F128
:
8401 case S390_BFP_U32_TO_F32
:
8402 case S390_BFP_U32_TO_F64
:
8403 case S390_BFP_U32_TO_F128
:
8404 case S390_BFP_F32_TO_I32
:
8405 case S390_BFP_F32_TO_I64
:
8406 case S390_BFP_F32_TO_U32
:
8407 case S390_BFP_F32_TO_U64
:
8408 case S390_BFP_F32_TO_F64
:
8409 case S390_BFP_F32_TO_F128
: p
+= vex_sprintf(p
, "4 -> "); goto common
;
8410 case S390_BFP_I64_TO_F32
:
8411 case S390_BFP_I64_TO_F64
:
8412 case S390_BFP_I64_TO_F128
:
8413 case S390_BFP_U64_TO_F32
:
8414 case S390_BFP_U64_TO_F64
:
8415 case S390_BFP_U64_TO_F128
:
8416 case S390_BFP_F64_TO_I32
:
8417 case S390_BFP_F64_TO_I64
:
8418 case S390_BFP_F64_TO_U32
:
8419 case S390_BFP_F64_TO_U64
:
8420 case S390_BFP_F64_TO_F32
:
8421 case S390_BFP_F64_TO_F128
: p
+= vex_sprintf(p
, "8 -> "); goto common
;
8422 case S390_BFP_F128_TO_I32
:
8423 case S390_BFP_F128_TO_I64
:
8424 case S390_BFP_F128_TO_U32
:
8425 case S390_BFP_F128_TO_U64
:
8426 case S390_BFP_F128_TO_F32
:
8427 case S390_BFP_F128_TO_F64
: p
+= vex_sprintf(p
, "16 -> "); goto common
;
8432 case S390_INSN_DFP_CONVERT
:
8433 switch (insn
->variant
.dfp_convert
.tag
) {
8434 case S390_DFP_D32_TO_D64
:
8435 case S390_DFP_I32_TO_D64
:
8436 case S390_DFP_I32_TO_D128
:
8437 case S390_DFP_U32_TO_D64
:
8438 case S390_DFP_U32_TO_D128
: p
+= vex_sprintf(p
, "4 -> "); goto common
;
8439 case S390_DFP_D64_TO_D32
:
8440 case S390_DFP_D64_TO_D128
:
8441 case S390_DFP_I64_TO_D64
:
8442 case S390_DFP_I64_TO_D128
:
8443 case S390_DFP_U64_TO_D64
:
8444 case S390_DFP_U64_TO_D128
:
8445 case S390_DFP_D64_TO_I32
:
8446 case S390_DFP_D64_TO_I64
:
8447 case S390_DFP_D64_TO_U32
:
8448 case S390_DFP_D64_TO_U64
: p
+= vex_sprintf(p
, "8 -> "); goto common
;
8449 case S390_DFP_D128_TO_D64
:
8450 case S390_DFP_D128_TO_I32
:
8451 case S390_DFP_D128_TO_I64
:
8452 case S390_DFP_D128_TO_U32
:
8453 case S390_DFP_D128_TO_U64
: p
+= vex_sprintf(p
, "16 -> "); goto common
;
8458 case S390_INSN_FP_CONVERT
: {
8459 s390_fp_convert
*fp_convert
= insn
->variant
.fp_convert
.details
;
8461 switch (fp_convert
->tag
) {
8462 case S390_FP_F32_TO_D32
:
8463 case S390_FP_F32_TO_D64
:
8464 case S390_FP_F32_TO_D128
:
8465 case S390_FP_D32_TO_F32
:
8466 case S390_FP_D32_TO_F64
:
8467 case S390_FP_D32_TO_F128
: p
+= vex_sprintf(p
, "4 -> "); goto common
;
8468 case S390_FP_F64_TO_D32
:
8469 case S390_FP_F64_TO_D64
:
8470 case S390_FP_F64_TO_D128
:
8471 case S390_FP_D64_TO_F32
:
8472 case S390_FP_D64_TO_F64
:
8473 case S390_FP_D64_TO_F128
: p
+= vex_sprintf(p
, "8 -> "); goto common
;
8474 case S390_FP_F128_TO_D32
:
8475 case S390_FP_F128_TO_D64
:
8476 case S390_FP_F128_TO_D128
:
8477 case S390_FP_D128_TO_F32
:
8478 case S390_FP_D128_TO_F64
:
8479 case S390_FP_D128_TO_F128
: p
+= vex_sprintf(p
, "16 -> "); goto common
;
8491 vex_sprintf(p
, "%u bytes", (UInt
)insn
->size
);
8495 fail
: vpanic("s390_insn_as_string");
8500 /* Load NUM bytes from memory into register REG using addressing mode AM. */
8502 s390_emit_load_mem(UChar
*p
, UInt num
, UChar reg
, const s390_amode
*am
)
8504 UInt b
= hregNumber(am
->b
);
8505 UInt x
= hregNumber(am
->x
); /* 0 for B12 and B20 */
8509 case S390_AMODE_B12
:
8510 case S390_AMODE_BX12
:
8512 case 1: return s390_emit_IC(p
, reg
, x
, b
, d
);
8513 case 2: return s390_emit_LH(p
, reg
, x
, b
, d
);
8514 case 4: return s390_emit_L(p
, reg
, x
, b
, d
);
8515 case 8: return s390_emit_LG(p
, reg
, x
, b
, DISP20(d
));
8516 case 16: return s390_emit_VL(p
, reg
, x
, b
, d
);
8521 case S390_AMODE_B20
:
8522 case S390_AMODE_BX20
:
8524 case 1: return s390_emit_ICY(p
, reg
, x
, b
, DISP20(d
));
8525 case 2: return s390_emit_LHY(p
, reg
, x
, b
, DISP20(d
));
8526 case 4: return s390_emit_LY(p
, reg
, x
, b
, DISP20(d
));
8527 case 8: return s390_emit_LG(p
, reg
, x
, b
, DISP20(d
));
8536 vpanic("s390_emit_load_mem");
8540 /* Load condition code into register REG */
8542 s390_emit_load_cc(UChar
*p
, UChar reg
)
8544 p
= s390_emit_LGHI(p
, reg
, 0); /* Clear out, cc not affected */
8545 p
= s390_emit_IPM(p
, reg
, reg
);
8546 /* Shift 28 bits to the right --> [0,1,2,3] */
8547 return s390_emit_SRL(p
, reg
, 0, 28); /* REG = cc */
8551 /*---------------------------------------------------------------*/
8552 /*--- Code generation ---*/
8553 /*---------------------------------------------------------------*/
8555 /* Do not load more bytes than requested. */
8557 s390_insn_load_emit(UChar
*buf
, const s390_insn
*insn
)
8560 const s390_amode
*src
;
8562 src
= insn
->variant
.load
.src
;
8564 r
= hregNumber(insn
->variant
.load
.dst
);
8566 if (hregClass(insn
->variant
.load
.dst
) == HRcFlt64
) {
8567 b
= hregNumber(src
->b
);
8568 x
= hregNumber(src
->x
); /* 0 for B12 and B20 */
8571 switch (insn
->size
) {
8575 case S390_AMODE_B12
:
8576 case S390_AMODE_BX12
:
8577 return s390_emit_LE(buf
, r
, x
, b
, d
);
8579 case S390_AMODE_B20
:
8580 case S390_AMODE_BX20
:
8581 return s390_emit_LEY(buf
, r
, x
, b
, DISP20(d
));
8587 case S390_AMODE_B12
:
8588 case S390_AMODE_BX12
:
8589 return s390_emit_LD(buf
, r
, x
, b
, d
);
8591 case S390_AMODE_B20
:
8592 case S390_AMODE_BX20
:
8593 return s390_emit_LDY(buf
, r
, x
, b
, DISP20(d
));
8597 vpanic("s390_insn_load_emit");
8601 return s390_emit_load_mem(buf
, insn
->size
, r
, src
);
8606 s390_insn_store_emit(UChar
*buf
, const s390_insn
*insn
)
8609 const s390_amode
*dst
;
8611 dst
= insn
->variant
.store
.dst
;
8613 r
= hregNumber(insn
->variant
.store
.src
);
8614 b
= hregNumber(dst
->b
);
8615 x
= hregNumber(dst
->x
); /* 0 for B12 and B20 */
8618 if (hregClass(insn
->variant
.store
.src
) == HRcFlt64
) {
8619 switch (insn
->size
) {
8623 case S390_AMODE_B12
:
8624 case S390_AMODE_BX12
:
8625 return s390_emit_STE(buf
, r
, x
, b
, d
);
8627 case S390_AMODE_B20
:
8628 case S390_AMODE_BX20
:
8629 return s390_emit_STEY(buf
, r
, x
, b
, DISP20(d
));
8635 case S390_AMODE_B12
:
8636 case S390_AMODE_BX12
:
8637 return s390_emit_STD(buf
, r
, x
, b
, d
);
8639 case S390_AMODE_B20
:
8640 case S390_AMODE_BX20
:
8641 return s390_emit_STDY(buf
, r
, x
, b
, DISP20(d
));
8645 vpanic("s390_insn_store_emit");
8648 if (hregClass(insn
->variant
.store
.src
) == HRcVec128
) {
8649 vassert(insn
->size
== 16);
8651 case S390_AMODE_B12
:
8652 case S390_AMODE_BX12
:
8653 return s390_emit_VST(buf
, r
, x
, b
, d
);
8656 vpanic("s390_insn_store_emit: unknown dst->tag for HRcVec128");
8660 switch (insn
->size
) {
8663 case S390_AMODE_B12
:
8664 case S390_AMODE_BX12
:
8665 return s390_emit_STC(buf
, r
, x
, b
, d
);
8667 case S390_AMODE_B20
:
8668 case S390_AMODE_BX20
:
8669 return s390_emit_STCY(buf
, r
, x
, b
, DISP20(d
));
8675 case S390_AMODE_B12
:
8676 case S390_AMODE_BX12
:
8677 return s390_emit_STH(buf
, r
, x
, b
, d
);
8679 case S390_AMODE_B20
:
8680 case S390_AMODE_BX20
:
8681 return s390_emit_STHY(buf
, r
, x
, b
, DISP20(d
));
8687 case S390_AMODE_B12
:
8688 case S390_AMODE_BX12
:
8689 return s390_emit_ST(buf
, r
, x
, b
, d
);
8691 case S390_AMODE_B20
:
8692 case S390_AMODE_BX20
:
8693 return s390_emit_STY(buf
, r
, x
, b
, DISP20(d
));
8698 return s390_emit_STG(buf
, r
, x
, b
, DISP20(d
));
8704 vpanic("s390_insn_store_emit");
8709 s390_insn_move_emit(UChar
*buf
, const s390_insn
*insn
)
8712 HRegClass dst_class
, src_class
;
8714 dst
= hregNumber(insn
->variant
.move
.dst
);
8715 src
= hregNumber(insn
->variant
.move
.src
);
8717 dst_class
= hregClass(insn
->variant
.move
.dst
);
8718 src_class
= hregClass(insn
->variant
.move
.src
);
8720 if (dst_class
== src_class
) {
8721 if (dst_class
== HRcInt64
)
8722 return s390_emit_LGR(buf
, dst
, src
);
8723 if (dst_class
== HRcFlt64
)
8724 return s390_emit_LDR(buf
, dst
, src
);
8725 if (dst_class
== HRcVec128
) {
8726 return s390_emit_VLR(buf
, dst
, src
);
8729 if (dst_class
== HRcFlt64
&& src_class
== HRcInt64
) {
8730 if (insn
->size
== 4) {
8731 buf
= s390_emit_SLLG(buf
, R0
, src
, 0, DISP20(32)); /* r0 = src << 32 */
8732 return s390_emit_LDGRw(buf
, dst
, R0
);
8734 return s390_emit_LDGRw(buf
, dst
, src
);
8737 if (dst_class
== HRcInt64
&& src_class
== HRcFlt64
) {
8738 if (insn
->size
== 4) {
8739 buf
= s390_emit_LGDRw(buf
, dst
, src
);
8740 return s390_emit_SRLG(buf
, dst
, dst
, 0, DISP20(32)); /* dst >>= 32 */
8742 return s390_emit_LGDRw(buf
, dst
, src
);
8745 if (dst_class
== HRcFlt64
&& src_class
== HRcVec128
) {
8746 return s390_emit_VLR(buf
, dst
, src
);
8748 /* A move between floating point registers and general purpose
8749 registers of different size should never occur and indicates
8750 an error elsewhere. */
8753 vpanic("s390_insn_move_emit");
8758 s390_insn_memcpy_emit(UChar
*buf
, const s390_insn
*insn
)
8760 s390_amode
*dst
= insn
->variant
.memcpy
.dst
;
8761 s390_amode
*src
= insn
->variant
.memcpy
.src
;
8763 return s390_emit_MVC(buf
, insn
->size
- 1, hregNumber(dst
->b
), dst
->d
,
8764 hregNumber(src
->b
), src
->d
);
8769 s390_insn_load_immediate_emit(UChar
*buf
, const s390_insn
*insn
)
8772 ULong value
= insn
->variant
.load_immediate
.value
;
8774 r
= hregNumber(insn
->variant
.load_immediate
.dst
);
8776 if (hregClass(insn
->variant
.load_immediate
.dst
) == HRcFlt64
) {
8777 vassert(value
== 0);
8778 switch (insn
->size
) {
8779 case 4: return s390_emit_LZER(buf
, r
, value
);
8780 case 8: return s390_emit_LZDR(buf
, r
, value
);
8782 vpanic("s390_insn_load_immediate_emit");
8785 switch (insn
->size
) {
8788 /* Load the immediate values as a 4 byte value. That does not hurt as
8789 those extra bytes will not be looked at. Fall through .... */
8791 return s390_emit_load_32imm(buf
, r
, value
);
8794 return s390_emit_load_64imm(buf
, r
, value
);
8797 vpanic("s390_insn_load_immediate_emit");
8801 /* Insert low half of r2 into high half of dst. */
8803 s390_emit_ilih(UChar
*buf
, UChar size
, UChar dst
, UChar r2
)
8805 if (s390_host_has_gie
)
8806 return s390_emit_RISBG(buf
, dst
, r2
, 64 - 8 * size
, 63 - 4 * size
,
8809 /* Clear dst's upper half. */
8810 buf
= s390_emit_SLLG(buf
, dst
, dst
, 0, DISP20(64 - 4 * size
));
8811 buf
= s390_emit_SRLG(buf
, dst
, dst
, 0, DISP20(64 - 4 * size
));
8813 /* Shift r2 by appropriate amount and OR it into dst. */
8814 buf
= s390_emit_SLLG(buf
, R0
, r2
, 0, DISP20(4 * size
));
8815 return s390_emit_OGR(buf
, dst
, R0
);
8819 /* There is no easy way to do ALU operations on 1-byte or 2-byte operands.
8820 So we simply perform a 4-byte operation. Doing so uses possibly undefined
8821 bits and produces an undefined result in those extra bit positions. But
8822 upstream does not look at those positions, so this is OK. */
8824 s390_insn_alu_emit(UChar
*buf
, const s390_insn
*insn
)
8829 dst
= hregNumber(insn
->variant
.alu
.dst
);
8830 op2
= insn
->variant
.alu
.op2
;
8832 /* Second operand is in a register */
8833 if (op2
.tag
== S390_OPND_REG
) {
8834 UInt r2
= hregNumber(op2
.variant
.reg
);
8836 switch (insn
->size
) {
8840 switch (insn
->variant
.alu
.tag
) {
8841 case S390_ALU_ADD
: return s390_emit_AR(buf
, dst
, r2
);
8842 case S390_ALU_SUB
: return s390_emit_SR(buf
, dst
, r2
);
8843 case S390_ALU_MUL
: return s390_emit_MSR(buf
, dst
, r2
);
8844 case S390_ALU_AND
: return s390_emit_NR(buf
, dst
, r2
);
8845 case S390_ALU_OR
: return s390_emit_OR(buf
, dst
, r2
);
8846 case S390_ALU_XOR
: return s390_emit_XR(buf
, dst
, r2
);
8847 case S390_ALU_LSH
: return s390_emit_SLL(buf
, dst
, r2
, 0);
8848 case S390_ALU_RSH
: return s390_emit_SRL(buf
, dst
, r2
, 0);
8849 case S390_ALU_RSHA
: return s390_emit_SRA(buf
, dst
, r2
, 0);
8850 case S390_ALU_ILIH
: return s390_emit_ilih(buf
, insn
->size
, dst
, r2
);
8855 switch (insn
->variant
.alu
.tag
) {
8856 case S390_ALU_ADD
: return s390_emit_AGR(buf
, dst
, r2
);
8857 case S390_ALU_SUB
: return s390_emit_SGR(buf
, dst
, r2
);
8858 case S390_ALU_MUL
: return s390_emit_MSGR(buf
, dst
, r2
);
8859 case S390_ALU_AND
: return s390_emit_NGR(buf
, dst
, r2
);
8860 case S390_ALU_OR
: return s390_emit_OGR(buf
, dst
, r2
);
8861 case S390_ALU_XOR
: return s390_emit_XGR(buf
, dst
, r2
);
8862 case S390_ALU_LSH
: return s390_emit_SLLG(buf
, dst
, dst
, r2
, DISP20(0));
8863 case S390_ALU_RSH
: return s390_emit_SRLG(buf
, dst
, dst
, r2
, DISP20(0));
8864 case S390_ALU_RSHA
: return s390_emit_SRAG(buf
, dst
, dst
, r2
, DISP20(0));
8865 case S390_ALU_ILIH
: return s390_emit_ilih(buf
, 8, dst
, r2
);
8872 /* 2nd operand is in memory */
8873 if (op2
.tag
== S390_OPND_AMODE
) {
8875 const s390_amode
*src
= op2
.variant
.am
;
8877 b
= hregNumber(src
->b
);
8878 x
= hregNumber(src
->x
); /* 0 for B12 and B20 */
8881 /* Shift operands are special here as there are no opcodes that
8882 allow a memory operand. So we first load the 2nd operand into
8883 some register. R0 is used to save restore the contents of the
8884 chosen register.. */
8886 if (insn
->variant
.alu
.tag
== S390_ALU_LSH
||
8887 insn
->variant
.alu
.tag
== S390_ALU_RSH
||
8888 insn
->variant
.alu
.tag
== S390_ALU_RSHA
) {
8891 /* Choose a register (other than DST or R0) into which to stick the
8892 shift amount. The following works because r15 is reserved and
8893 thusly dst != 15. */
8894 vassert(dst
!= 15); /* extra paranoia */
8895 b2
= (dst
+ 1) % 16;
8897 buf
= s390_emit_LGR(buf
, R0
, b2
); /* save */
8899 /* Loading SRC to B2 does not modify R0. */
8900 buf
= s390_emit_load_mem(buf
, insn
->size
, b2
, src
);
8902 if (insn
->size
== 8) {
8903 switch (insn
->variant
.alu
.tag
) {
8905 buf
= s390_emit_SLLG(buf
, dst
, dst
, b2
, DISP20(0));
8908 buf
= s390_emit_SRLG(buf
, dst
, dst
, b2
, DISP20(0));
8911 buf
= s390_emit_SRAG(buf
, dst
, dst
, b2
, DISP20(0));
8913 default: /* unreachable */
8917 switch (insn
->variant
.alu
.tag
) {
8919 buf
= s390_emit_SLL(buf
, dst
, b2
, 0);
8922 buf
= s390_emit_SRL(buf
, dst
, b2
, 0);
8925 buf
= s390_emit_SRA(buf
, dst
, b2
, 0);
8927 default: /* unreachable */
8931 return s390_emit_LGR(buf
, b2
, R0
); /* restore */
8934 switch (insn
->size
) {
8936 /* Move the byte from memory into scratch register r0 */
8937 buf
= s390_emit_load_mem(buf
, 1, R0
, src
);
8939 switch (insn
->variant
.alu
.tag
) {
8940 case S390_ALU_ADD
: return s390_emit_AR(buf
, dst
, R0
);
8941 case S390_ALU_SUB
: return s390_emit_SR(buf
, dst
, R0
);
8942 case S390_ALU_MUL
: return s390_emit_MSR(buf
, dst
, R0
);
8943 case S390_ALU_AND
: return s390_emit_NR(buf
, dst
, R0
);
8944 case S390_ALU_OR
: return s390_emit_OR(buf
, dst
, R0
);
8945 case S390_ALU_XOR
: return s390_emit_XR(buf
, dst
, R0
);
8949 case S390_ALU_RSHA
: ; /* avoid GCC warning */
8955 case S390_AMODE_B12
:
8956 case S390_AMODE_BX12
:
8957 switch (insn
->variant
.alu
.tag
) {
8959 return s390_emit_AH(buf
, dst
, x
, b
, d
);
8962 return s390_emit_SH(buf
, dst
, x
, b
, d
);
8965 return s390_emit_MH(buf
, dst
, x
, b
, d
);
8967 /* For bitwise operations: Move two bytes from memory into scratch
8968 register r0; then perform operation */
8970 buf
= s390_emit_LH(buf
, R0
, x
, b
, d
);
8971 return s390_emit_NR(buf
, dst
, R0
);
8974 buf
= s390_emit_LH(buf
, R0
, x
, b
, d
);
8975 return s390_emit_OR(buf
, dst
, R0
);
8978 buf
= s390_emit_LH(buf
, R0
, x
, b
, d
);
8979 return s390_emit_XR(buf
, dst
, R0
);
8984 case S390_ALU_RSHA
: ; /* avoid GCC warning */
8988 case S390_AMODE_B20
:
8989 case S390_AMODE_BX20
:
8990 switch (insn
->variant
.alu
.tag
) {
8992 return s390_emit_AHY(buf
, dst
, x
, b
, DISP20(d
));
8995 return s390_emit_SHY(buf
, dst
, x
, b
, DISP20(d
));
8998 return s390_emit_MHYw(buf
, dst
, x
, b
, DISP20(d
));
9000 /* For bitwise operations: Move two bytes from memory into scratch
9001 register r0; then perform operation */
9003 buf
= s390_emit_LHY(buf
, R0
, x
, b
, DISP20(d
));
9004 return s390_emit_NR(buf
, dst
, R0
);
9007 buf
= s390_emit_LHY(buf
, R0
, x
, b
, DISP20(d
));
9008 return s390_emit_OR(buf
, dst
, R0
);
9011 buf
= s390_emit_LHY(buf
, R0
, x
, b
, DISP20(d
));
9012 return s390_emit_XR(buf
, dst
, R0
);
9017 case S390_ALU_RSHA
: ; /* avoid GCC warning */
9025 case S390_AMODE_B12
:
9026 case S390_AMODE_BX12
:
9027 switch (insn
->variant
.alu
.tag
) {
9028 case S390_ALU_ADD
: return s390_emit_A(buf
, dst
, x
, b
, d
);
9029 case S390_ALU_SUB
: return s390_emit_S(buf
, dst
, x
, b
, d
);
9030 case S390_ALU_MUL
: return s390_emit_MS(buf
, dst
, x
, b
, d
);
9031 case S390_ALU_AND
: return s390_emit_N(buf
, dst
, x
, b
, d
);
9032 case S390_ALU_OR
: return s390_emit_O(buf
, dst
, x
, b
, d
);
9033 case S390_ALU_XOR
: return s390_emit_X(buf
, dst
, x
, b
, d
);
9037 case S390_ALU_RSHA
: ; /* avoid GCC warning */
9041 case S390_AMODE_B20
:
9042 case S390_AMODE_BX20
:
9043 switch (insn
->variant
.alu
.tag
) {
9044 case S390_ALU_ADD
: return s390_emit_AY(buf
, dst
, x
, b
, DISP20(d
));
9045 case S390_ALU_SUB
: return s390_emit_SY(buf
, dst
, x
, b
, DISP20(d
));
9046 case S390_ALU_MUL
: return s390_emit_MSY(buf
, dst
, x
, b
, DISP20(d
));
9047 case S390_ALU_AND
: return s390_emit_NY(buf
, dst
, x
, b
, DISP20(d
));
9048 case S390_ALU_OR
: return s390_emit_OY(buf
, dst
, x
, b
, DISP20(d
));
9049 case S390_ALU_XOR
: return s390_emit_XY(buf
, dst
, x
, b
, DISP20(d
));
9053 case S390_ALU_RSHA
: ; /* avoid GCC warning */
9060 switch (insn
->variant
.alu
.tag
) {
9061 case S390_ALU_ADD
: return s390_emit_AG(buf
, dst
, x
, b
, DISP20(d
));
9062 case S390_ALU_SUB
: return s390_emit_SG(buf
, dst
, x
, b
, DISP20(d
));
9063 case S390_ALU_MUL
: return s390_emit_MSG(buf
, dst
, x
, b
, DISP20(d
));
9064 case S390_ALU_AND
: return s390_emit_NG(buf
, dst
, x
, b
, DISP20(d
));
9065 case S390_ALU_OR
: return s390_emit_OG(buf
, dst
, x
, b
, DISP20(d
));
9066 case S390_ALU_XOR
: return s390_emit_XG(buf
, dst
, x
, b
, DISP20(d
));
9070 case S390_ALU_RSHA
: ; /* avoid GCC warning */
9077 /* 2nd operand is an immediate value */
9078 if (op2
.tag
== S390_OPND_IMMEDIATE
) {
9081 /* No masking of the value is required as it is not sign extended */
9082 value
= op2
.variant
.imm
;
9084 switch (insn
->size
) {
9087 /* There is no 1-byte opcode. Do the computation in
9088 2 bytes. The extra byte will be ignored. */
9089 switch (insn
->variant
.alu
.tag
) {
9091 return s390_emit_AHI(buf
, dst
, value
);
9094 return s390_emit_SLFIw(buf
, dst
, value
);
9097 return s390_emit_MHI(buf
, dst
, value
);
9099 case S390_ALU_AND
: return s390_emit_NILL(buf
, dst
, value
);
9100 case S390_ALU_OR
: return s390_emit_OILL(buf
, dst
, value
);
9102 /* There is no XILL instruction. Load the immediate value into
9103 R0 and combine with the destination register. */
9104 buf
= s390_emit_LHI(buf
, R0
, value
);
9105 return s390_emit_XR(buf
, dst
, R0
);
9108 return s390_emit_SLL(buf
, dst
, 0, value
);
9111 return s390_emit_SRL(buf
, dst
, 0, value
);
9114 return s390_emit_SRA(buf
, dst
, 0, value
);
9116 case S390_ALU_ILIH
: ; /* avoid GCC warning */
9121 switch (insn
->variant
.alu
.tag
) {
9123 if (uint_fits_signed_16bit(value
)) {
9124 return s390_emit_AHI(buf
, dst
, value
);
9126 return s390_emit_AFIw(buf
, dst
, value
);
9128 case S390_ALU_SUB
: return s390_emit_SLFIw(buf
, dst
, value
);
9129 case S390_ALU_MUL
: return s390_emit_MSFIw(buf
, dst
, value
);
9130 case S390_ALU_AND
: return s390_emit_NILFw(buf
, dst
, value
);
9131 case S390_ALU_OR
: return s390_emit_OILFw(buf
, dst
, value
);
9132 case S390_ALU_XOR
: return s390_emit_XILFw(buf
, dst
, value
);
9133 case S390_ALU_LSH
: return s390_emit_SLL(buf
, dst
, 0, value
);
9134 case S390_ALU_RSH
: return s390_emit_SRL(buf
, dst
, 0, value
);
9135 case S390_ALU_RSHA
: return s390_emit_SRA(buf
, dst
, 0, value
);
9136 case S390_ALU_ILIH
: ; /* avoid GCC warning */
9141 switch (insn
->variant
.alu
.tag
) {
9143 if (ulong_fits_signed_16bit(value
)) {
9144 return s390_emit_AGHI(buf
, dst
, value
);
9146 if (ulong_fits_signed_32bit(value
) && s390_host_has_eimm
) {
9147 return s390_emit_AGFI(buf
, dst
, value
);
9149 /* Load constant into R0 then add */
9150 buf
= s390_emit_load_64imm(buf
, R0
, value
);
9151 return s390_emit_AGR(buf
, dst
, R0
);
9154 if (ulong_fits_unsigned_32bit(value
)) {
9155 return s390_emit_SLGFIw(buf
, dst
, value
);
9157 /* Load value into R0; then subtract from destination reg */
9158 buf
= s390_emit_load_64imm(buf
, R0
, value
);
9159 return s390_emit_SGR(buf
, dst
, R0
);
9162 if (ulong_fits_signed_32bit(value
) && s390_host_has_gie
) {
9163 return s390_emit_MSGFI(buf
, dst
, value
);
9165 /* Load constant into R0 then add */
9166 buf
= s390_emit_load_64imm(buf
, R0
, value
);
9167 return s390_emit_MSGR(buf
, dst
, R0
);
9169 /* Do it in two steps: upper half [0:31] and lower half [32:63] */
9171 if (s390_host_has_eimm
) {
9172 buf
= s390_emit_NIHF(buf
, dst
, value
>> 32);
9173 return s390_emit_NILF(buf
, dst
, value
& 0xFFFFFFFF);
9175 /* Load value into R0; then combine with destination reg */
9176 buf
= s390_emit_load_64imm(buf
, R0
, value
);
9177 return s390_emit_NGR(buf
, dst
, R0
);
9180 if (s390_host_has_eimm
) {
9181 buf
= s390_emit_OIHF(buf
, dst
, value
>> 32);
9182 return s390_emit_OILF(buf
, dst
, value
& 0xFFFFFFFF);
9184 /* Load value into R0; then combine with destination reg */
9185 buf
= s390_emit_load_64imm(buf
, R0
, value
);
9186 return s390_emit_OGR(buf
, dst
, R0
);
9189 if (s390_host_has_eimm
) {
9190 buf
= s390_emit_XIHF(buf
, dst
, value
>> 32);
9191 return s390_emit_XILF(buf
, dst
, value
& 0xFFFFFFFF);
9193 /* Load value into R0; then combine with destination reg */
9194 buf
= s390_emit_load_64imm(buf
, R0
, value
);
9195 return s390_emit_XGR(buf
, dst
, R0
);
9197 /* No special considerations for long displacement here. Only the six
9198 least significant bits of VALUE will be taken; all other bits are
9199 ignored. So the DH2 bits are irrelevant and do not influence the
9200 shift operation, independent of whether long-displacement is available
9202 case S390_ALU_LSH
: return s390_emit_SLLG(buf
, dst
, dst
, 0, DISP20(value
));
9203 case S390_ALU_RSH
: return s390_emit_SRLG(buf
, dst
, dst
, 0, DISP20(value
));
9204 case S390_ALU_RSHA
: return s390_emit_SRAG(buf
, dst
, dst
, 0, DISP20(value
));
9205 case S390_ALU_ILIH
: ; /* avoid GCC warning */
9213 vpanic("s390_insn_alu_emit");
9218 s390_widen_emit(UChar
*buf
, const s390_insn
*insn
, UInt from_size
,
9221 s390_opnd_RMI opnd
= insn
->variant
.unop
.src
;
9224 case S390_OPND_REG
: {
9225 UChar r1
= hregNumber(insn
->variant
.unop
.dst
);
9226 UChar r2
= hregNumber(opnd
.variant
.reg
);
9228 switch (from_size
) {
9230 /* Widening to a half-word is implemented like widening to a word
9231 because the upper half-word will not be looked at. */
9232 if (insn
->size
== 4 || insn
->size
== 2) { /* 8 --> 32 8 --> 16 */
9234 return s390_emit_LBRw(buf
, r1
, r2
);
9236 return s390_emit_LLCRw(buf
, r1
, r2
);
9238 if (insn
->size
== 8) { /* 8 --> 64 */
9240 return s390_emit_LGBRw(buf
, r1
, r2
);
9242 return s390_emit_LLGCRw(buf
, r1
, r2
);
9247 if (insn
->size
== 4) { /* 16 --> 32 */
9249 return s390_emit_LHRw(buf
, r1
, r2
);
9251 return s390_emit_LLHRw(buf
, r1
, r2
);
9253 if (insn
->size
== 8) { /* 16 --> 64 */
9255 return s390_emit_LGHRw(buf
, r1
, r2
);
9257 return s390_emit_LLGHRw(buf
, r1
, r2
);
9262 if (insn
->size
== 8) { /* 32 --> 64 */
9264 return s390_emit_LGFR(buf
, r1
, r2
);
9266 return s390_emit_LLGFR(buf
, r1
, r2
);
9270 default: /* unexpected "from" size */
9275 case S390_OPND_AMODE
: {
9276 UChar r1
= hregNumber(insn
->variant
.unop
.dst
);
9277 const s390_amode
*src
= opnd
.variant
.am
;
9278 UChar b
= hregNumber(src
->b
);
9279 UChar x
= hregNumber(src
->x
);
9282 switch (from_size
) {
9284 if (insn
->size
== 4 || insn
->size
== 2) {
9286 return s390_emit_LB(buf
, r1
, x
, b
, DISP20(d
));
9288 return s390_emit_LLCw(buf
, r1
, x
, b
, DISP20(d
));
9290 if (insn
->size
== 8) {
9292 return s390_emit_LGB(buf
, r1
, x
, b
, DISP20(d
));
9294 return s390_emit_LLGC(buf
, r1
, x
, b
, DISP20(d
));
9299 if (insn
->size
== 4) { /* 16 --> 32 */
9300 if (sign_extend
== 0)
9301 return s390_emit_LLHw(buf
, r1
, x
, b
, DISP20(d
));
9304 case S390_AMODE_B12
:
9305 case S390_AMODE_BX12
:
9306 return s390_emit_LH(buf
, r1
, x
, b
, d
);
9308 case S390_AMODE_B20
:
9309 case S390_AMODE_BX20
:
9310 return s390_emit_LHY(buf
, r1
, x
, b
, DISP20(d
));
9314 if (insn
->size
== 8) { /* 16 --> 64 */
9316 return s390_emit_LGH(buf
, r1
, x
, b
, DISP20(d
));
9318 return s390_emit_LLGH(buf
, r1
, x
, b
, DISP20(d
));
9323 if (insn
->size
== 8) { /* 32 --> 64 */
9325 return s390_emit_LGF(buf
, r1
, x
, b
, DISP20(d
));
9327 return s390_emit_LLGF(buf
, r1
, x
, b
, DISP20(d
));
9331 default: /* unexpected "from" size */
9336 case S390_OPND_IMMEDIATE
: {
9337 UChar r1
= hregNumber(insn
->variant
.unop
.dst
);
9338 ULong value
= opnd
.variant
.imm
;
9340 switch (from_size
) {
9342 if (insn
->size
== 4 || insn
->size
== 2) { /* 8 --> 32 8 --> 16 */
9344 /* host can do the sign extension to 16-bit; LHI does the rest */
9345 return s390_emit_LHI(buf
, r1
, (Short
)(Char
)(UChar
)value
);
9347 return s390_emit_LHI(buf
, r1
, value
);
9350 if (insn
->size
== 8) { /* 8 --> 64 */
9352 /* host can do the sign extension to 16-bit; LGHI does the rest */
9353 return s390_emit_LGHI(buf
, r1
, (Short
)(Char
)(UChar
)value
);
9355 return s390_emit_LGHI(buf
, r1
, value
);
9361 if (insn
->size
== 4) { /* 16 --> 32 */
9362 return s390_emit_LHI(buf
, r1
, value
);
9364 if (insn
->size
== 8) { /* 16 --> 64 */
9366 return s390_emit_LGHI(buf
, r1
, value
);
9368 return s390_emit_LLILL(buf
, r1
, value
);
9373 if (insn
->size
== 8) { /* 32 --> 64 */
9375 return s390_emit_LGFIw(buf
, r1
, value
);
9377 return s390_emit_LLILFw(buf
, r1
, value
);
9381 default: /* unexpected "from" size */
9388 vpanic("s390_widen_emit");
9393 s390_negate_emit(UChar
*buf
, const s390_insn
*insn
)
9397 opnd
= insn
->variant
.unop
.src
;
9400 case S390_OPND_REG
: {
9401 UChar r1
= hregNumber(insn
->variant
.unop
.dst
);
9402 UChar r2
= hregNumber(opnd
.variant
.reg
);
9404 switch (insn
->size
) {
9408 return s390_emit_LCR(buf
, r1
, r2
);
9411 return s390_emit_LCGR(buf
, r1
, r2
);
9418 case S390_OPND_AMODE
: {
9419 UChar r1
= hregNumber(insn
->variant
.unop
.dst
);
9421 /* Load bytes into scratch register R0, then negate */
9422 buf
= s390_emit_load_mem(buf
, insn
->size
, R0
, opnd
.variant
.am
);
9424 switch (insn
->size
) {
9428 return s390_emit_LCR(buf
, r1
, R0
);
9431 return s390_emit_LCGR(buf
, r1
, R0
);
9438 case S390_OPND_IMMEDIATE
: {
9439 UChar r1
= hregNumber(insn
->variant
.unop
.dst
);
9440 ULong value
= opnd
.variant
.imm
;
9442 value
= ~value
+ 1; /* two's complement */
9444 switch (insn
->size
) {
9447 /* Load the immediate values as a 4 byte value. That does not hurt as
9448 those extra bytes will not be looked at. Fall through .... */
9450 return s390_emit_load_32imm(buf
, r1
, value
);
9453 return s390_emit_load_64imm(buf
, r1
, value
);
9462 vpanic("s390_negate_emit");
9467 s390_vec_duplicate_emit(UChar
*buf
, const s390_insn
*insn
)
9469 UChar v1
= hregNumber(insn
->variant
.unop
.dst
);
9470 s390_opnd_RMI opnd
= insn
->variant
.unop
.src
;
9474 case S390_OPND_AMODE
: {
9475 s390_amode
* am
= opnd
.variant
.am
;
9476 UInt b
= hregNumber(am
->b
);
9477 UInt x
= hregNumber(am
->x
);
9480 if (fits_unsigned_12bit(d
)) {
9481 return s390_emit_VLREP(buf
, v1
, x
, b
, d
,
9482 s390_getM_from_size(insn
->size
));
9484 buf
= s390_emit_load_mem(buf
, insn
->size
, R0
, am
);
9486 goto duplicate_from_gpr
;
9489 case S390_OPND_IMMEDIATE
: {
9490 ULong val
= opnd
.variant
.imm
;
9492 if (ulong_fits_signed_16bit(val
)) {
9493 return s390_emit_VREPI(buf
, v1
, val
, s390_getM_from_size(insn
->size
));
9495 buf
= s390_emit_load_64imm(buf
, R0
, val
);
9497 goto duplicate_from_gpr
;
9501 r2
= hregNumber(opnd
.variant
.reg
);
9504 buf
= s390_emit_VLVGP(buf
, v1
, r2
, r2
);
9505 if (insn
->size
!= 8) {
9506 buf
= s390_emit_VREP(buf
, v1
, v1
, 8 / insn
->size
- 1,
9507 s390_getM_from_size(insn
->size
));
9512 vpanic("s390_vec_duplicate_emit");
9517 s390_insn_unop_emit(UChar
*buf
, const s390_insn
*insn
)
9519 switch (insn
->variant
.unop
.tag
) {
9520 case S390_ZERO_EXTEND_8
: return s390_widen_emit(buf
, insn
, 1, 0);
9521 case S390_ZERO_EXTEND_16
: return s390_widen_emit(buf
, insn
, 2, 0);
9522 case S390_ZERO_EXTEND_32
: return s390_widen_emit(buf
, insn
, 4, 0);
9524 case S390_SIGN_EXTEND_8
: return s390_widen_emit(buf
, insn
, 1, 1);
9525 case S390_SIGN_EXTEND_16
: return s390_widen_emit(buf
, insn
, 2, 1);
9526 case S390_SIGN_EXTEND_32
: return s390_widen_emit(buf
, insn
, 4, 1);
9528 case S390_NEGATE
: return s390_negate_emit(buf
, insn
);
9529 case S390_VEC_FILL
: {
9530 vassert(insn
->variant
.unop
.src
.tag
== S390_OPND_IMMEDIATE
);
9531 UChar v1
= hregNumber(insn
->variant
.unop
.dst
);
9532 UShort i2
= insn
->variant
.unop
.src
.variant
.imm
;
9533 return s390_emit_VGBM(buf
, v1
, i2
);
9535 case S390_VEC_DUPLICATE
: return s390_vec_duplicate_emit(buf
, insn
);
9536 case S390_VEC_UNPACKLOWS
: {
9537 vassert(insn
->variant
.unop
.src
.tag
== S390_OPND_REG
);
9538 vassert(insn
->size
< 8);
9539 UChar v1
= hregNumber(insn
->variant
.unop
.dst
);
9540 UChar v2
= hregNumber(insn
->variant
.unop
.src
.variant
.reg
);
9541 return s390_emit_VUPH(buf
, v1
, v2
, s390_getM_from_size(insn
->size
));
9543 case S390_VEC_UNPACKLOWU
: {
9544 vassert(insn
->variant
.unop
.src
.tag
== S390_OPND_REG
);
9545 vassert(insn
->size
< 8);
9546 UChar v1
= hregNumber(insn
->variant
.unop
.dst
);
9547 UChar v2
= hregNumber(insn
->variant
.unop
.src
.variant
.reg
);
9548 return s390_emit_VUPLH(buf
, v1
, v2
, s390_getM_from_size(insn
->size
));
9552 vassert(insn
->variant
.unop
.src
.tag
== S390_OPND_REG
);
9553 UChar v1
= hregNumber(insn
->variant
.unop
.dst
);
9554 UChar v2
= hregNumber(insn
->variant
.unop
.src
.variant
.reg
);
9555 return s390_emit_VLP(buf
, v1
, v2
, s390_getM_from_size(insn
->size
));
9558 case S390_VEC_COUNT_LEADING_ZEROES
:{
9559 vassert(insn
->variant
.unop
.src
.tag
== S390_OPND_REG
);
9560 UChar v1
= hregNumber(insn
->variant
.unop
.dst
);
9561 UChar v2
= hregNumber(insn
->variant
.unop
.src
.variant
.reg
);
9562 return s390_emit_VCLZ(buf
, v1
, v2
, s390_getM_from_size(insn
->size
));
9565 case S390_VEC_COUNT_TRAILING_ZEROES
:{
9566 vassert(insn
->variant
.unop
.src
.tag
== S390_OPND_REG
);
9567 UChar v1
= hregNumber(insn
->variant
.unop
.dst
);
9568 UChar v2
= hregNumber(insn
->variant
.unop
.src
.variant
.reg
);
9569 return s390_emit_VCTZ(buf
, v1
, v2
, s390_getM_from_size(insn
->size
));
9572 case S390_VEC_COUNT_ONES
:{
9573 vassert(insn
->variant
.unop
.src
.tag
== S390_OPND_REG
);
9574 UChar v1
= hregNumber(insn
->variant
.unop
.dst
);
9575 UChar v2
= hregNumber(insn
->variant
.unop
.src
.variant
.reg
);
9576 return s390_emit_VPOPCT(buf
, v1
, v2
, s390_getM_from_size(insn
->size
));
9579 case S390_VEC_FLOAT_NEG
: {
9580 vassert(insn
->variant
.unop
.src
.tag
== S390_OPND_REG
);
9581 vassert(insn
->size
>= 4);
9582 UChar v1
= hregNumber(insn
->variant
.unop
.dst
);
9583 UChar v2
= hregNumber(insn
->variant
.unop
.src
.variant
.reg
);
9584 return s390_emit_VFPSO(buf
, v1
, v2
, s390_getM_from_size(insn
->size
), 0, 0);
9586 case S390_VEC_FLOAT_ABS
: {
9587 vassert(insn
->variant
.unop
.src
.tag
== S390_OPND_REG
);
9588 vassert(insn
->size
>= 4);
9589 UChar v1
= hregNumber(insn
->variant
.unop
.dst
);
9590 UChar v2
= hregNumber(insn
->variant
.unop
.src
.variant
.reg
);
9591 return s390_emit_VFPSO(buf
, v1
, v2
, s390_getM_from_size(insn
->size
), 0, 2);
9593 case S390_VEC_FLOAT_NABS
: {
9594 vassert(insn
->variant
.unop
.src
.tag
== S390_OPND_REG
);
9595 vassert(insn
->size
>= 4);
9596 UChar v1
= hregNumber(insn
->variant
.unop
.dst
);
9597 UChar v2
= hregNumber(insn
->variant
.unop
.src
.variant
.reg
);
9598 return s390_emit_VFPSO(buf
, v1
, v2
, s390_getM_from_size(insn
->size
), 0, 1);
9600 case S390_VEC_FLOAT_SQRT
: {
9601 vassert(insn
->variant
.unop
.src
.tag
== S390_OPND_REG
);
9602 vassert(insn
->size
>= 4);
9603 UChar v1
= hregNumber(insn
->variant
.unop
.dst
);
9604 UChar v2
= hregNumber(insn
->variant
.unop
.src
.variant
.reg
);
9605 return s390_emit_VFSQ(buf
, v1
, v2
, s390_getM_from_size(insn
->size
), 0);
9608 vpanic("s390_insn_unop_emit");
9613 /* Test operand for zero. */
9615 s390_insn_test_emit(UChar
*buf
, const s390_insn
*insn
)
9619 opnd
= insn
->variant
.test
.src
;
9622 case S390_OPND_REG
: {
9623 UInt reg
= hregNumber(opnd
.variant
.reg
);
9625 switch (insn
->size
) {
9627 return s390_emit_TMLL(buf
, reg
, 0xff);
9630 return s390_emit_TMLL(buf
, reg
, 0xffff);
9633 return s390_emit_LTR(buf
, reg
, reg
);
9636 return s390_emit_LTGR(buf
, reg
, reg
);
9643 case S390_OPND_AMODE
: {
9644 const s390_amode
*am
= opnd
.variant
.am
;
9645 UChar b
= hregNumber(am
->b
);
9646 UChar x
= hregNumber(am
->x
);
9649 switch (insn
->size
) {
9652 case S390_AMODE_B12
:
9653 return s390_emit_TM(buf
, 0xff, b
, d
);
9654 case S390_AMODE_B20
:
9655 return s390_emit_TMY(buf
, 0xff, b
, DISP20(d
));
9657 buf
= s390_emit_LB(buf
, R0
, x
, b
, DISP20(d
));
9658 return s390_emit_TMLL(buf
, R0
, 0xff);
9663 case S390_AMODE_B12
:
9664 buf
= s390_emit_LH(buf
, R0
, x
, b
, d
);
9665 return s390_emit_TMLL(buf
, R0
, 0xffff);
9667 buf
= s390_emit_LHY(buf
, R0
, x
, b
, DISP20(d
));
9668 return s390_emit_TMLL(buf
, R0
, 0xffff);
9672 return s390_emit_LTw(buf
, R0
, x
, b
, DISP20(d
));
9675 return s390_emit_LTGw(buf
, R0
, x
, b
, DISP20(d
));
9682 case S390_OPND_IMMEDIATE
: {
9683 if (opnd
.variant
.imm
== 0)
9684 return s390_emit_CR(buf
, R0
, R0
);
9686 return s390_emit_OILL(buf
, R0
, 1);
9694 vpanic("s390_insn_test_emit");
9699 s390_insn_cc2bool_emit(UChar
*buf
, const s390_insn
*insn
)
9701 UChar r1
= hregNumber(insn
->variant
.cc2bool
.dst
);
9702 s390_cc_t cond
= insn
->variant
.cc2bool
.cond
;
9704 /* Make the destination register be -1 or 0, depending on whether
9705 the relevant condition holds. A 64-bit value is computed. */
9706 if (cond
== S390_CC_ALWAYS
)
9707 return s390_emit_LGHI(buf
, r1
, -1); /* r1 = -1 */
9709 /* If LOCGHI is available, use it. */
9710 if (s390_host_has_lsc2
) {
9711 /* Clear r1, then load immediate -1 on condition. */
9712 buf
= s390_emit_LGHI(buf
, r1
, 0);
9713 if (cond
!= S390_CC_NEVER
)
9714 buf
= s390_emit_LOCGHI(buf
, r1
, -1, cond
);
9718 buf
= s390_emit_load_cc(buf
, r1
); /* r1 = cc */
9719 buf
= s390_emit_LGHI(buf
, R0
, cond
); /* r0 = mask */
9720 buf
= s390_emit_SLLG(buf
, r1
, R0
, r1
, DISP20(60)); /* r1 = mask << (cc+60) */
9721 buf
= s390_emit_SRAG(buf
, r1
, r1
, 0, DISP20(63)); /* r1 = r1 >> 63 */
9727 /* Only 4-byte and 8-byte operands are handled. */
9729 s390_insn_cas_emit(UChar
*buf
, const s390_insn
*insn
)
9731 UChar r1
, r3
, b
, old
;
9735 r1
= hregNumber(insn
->variant
.cas
.op1
); /* expected value */
9736 r3
= hregNumber(insn
->variant
.cas
.op3
);
9737 old
= hregNumber(insn
->variant
.cas
.old_mem
);
9738 am
= insn
->variant
.cas
.op2
;
9739 b
= hregNumber(am
->b
);
9742 vassert(am
->tag
== S390_AMODE_B12
|| am
->tag
== S390_AMODE_B20
);
9744 switch (insn
->size
) {
9746 /* r1 must not be overwritten. So copy it to R0 and let CS clobber it */
9747 buf
= s390_emit_LR(buf
, R0
, r1
);
9748 if (am
->tag
== S390_AMODE_B12
)
9749 buf
= s390_emit_CS(buf
, R0
, r3
, b
, d
);
9751 buf
= s390_emit_CSY(buf
, R0
, r3
, b
, DISP20(d
));
9752 /* Now copy R0 which has the old memory value to OLD */
9753 return s390_emit_LR(buf
, old
, R0
);
9756 /* r1 must not be overwritten. So copy it to R0 and let CS clobber it */
9757 buf
= s390_emit_LGR(buf
, R0
, r1
);
9758 buf
= s390_emit_CSG(buf
, R0
, r3
, b
, DISP20(d
));
9759 /* Now copy R0 which has the old memory value to OLD */
9760 return s390_emit_LGR(buf
, old
, R0
);
9767 vpanic("s390_insn_cas_emit");
9771 /* Only 4-byte and 8-byte operands are handled. */
9773 s390_insn_cdas_emit(UChar
*buf
, const s390_insn
*insn
)
9775 UChar r1
, r1p1
, r3
, /*r3p1,*/ b
, old_high
, old_low
, scratch
;
9778 s390_cdas
*cdas
= insn
->variant
.cdas
.details
;
9780 r1
= hregNumber(cdas
->op1_high
); /* expected value */
9781 r1p1
= hregNumber(cdas
->op1_low
); /* expected value */
9782 r3
= hregNumber(cdas
->op3_high
);
9783 /* r3p1 = hregNumber(cdas->op3_low); */ /* unused */
9784 old_high
= hregNumber(cdas
->old_mem_high
);
9785 old_low
= hregNumber(cdas
->old_mem_low
);
9786 scratch
= hregNumber(cdas
->scratch
);
9788 b
= hregNumber(am
->b
);
9791 vassert(scratch
== 1);
9792 vassert(am
->tag
== S390_AMODE_B12
|| am
->tag
== S390_AMODE_B20
);
9794 switch (insn
->size
) {
9796 /* r1, r1+1 must not be overwritten. So copy them to R0,scratch
9797 and let CDS/CDSY clobber it */
9798 buf
= s390_emit_LR(buf
, R0
, r1
);
9799 buf
= s390_emit_LR(buf
, scratch
, r1p1
);
9801 if (am
->tag
== S390_AMODE_B12
)
9802 buf
= s390_emit_CDS(buf
, R0
, r3
, b
, d
);
9804 buf
= s390_emit_CDSY(buf
, R0
, r3
, b
, DISP20(d
));
9806 /* Now copy R0,scratch which has the old memory value to OLD */
9807 buf
= s390_emit_LR(buf
, old_high
, R0
);
9808 buf
= s390_emit_LR(buf
, old_low
, scratch
);
9812 /* r1, r1+1 must not be overwritten. So copy them to R0,scratch
9813 and let CDSG clobber it */
9814 buf
= s390_emit_LGR(buf
, R0
, r1
);
9815 buf
= s390_emit_LGR(buf
, scratch
, r1p1
);
9817 buf
= s390_emit_CDSG(buf
, R0
, r3
, b
, DISP20(d
));
9819 /* Now copy R0,scratch which has the old memory value to OLD */
9820 buf
= s390_emit_LGR(buf
, old_high
, R0
);
9821 buf
= s390_emit_LGR(buf
, old_low
, scratch
);
9829 vpanic("s390_insn_cdas_emit");
9833 /* Only 4-byte and 8-byte comparisons are handled. 1-byte and 2-byte
9834 comparisons will have been converted to 4-byte comparisons in
9835 s390_isel_cc and should not occur here. */
9837 s390_insn_compare_emit(UChar
*buf
, const s390_insn
*insn
)
9841 Bool signed_comparison
;
9843 op1
= insn
->variant
.compare
.src1
;
9844 op2
= insn
->variant
.compare
.src2
;
9845 signed_comparison
= insn
->variant
.compare
.signed_comparison
;
9848 case S390_OPND_REG
: {
9849 UInt r1
= hregNumber(op1
);
9850 UInt r2
= hregNumber(op2
.variant
.reg
);
9852 switch (insn
->size
) {
9854 if (signed_comparison
)
9855 return s390_emit_CR(buf
, r1
, r2
);
9857 return s390_emit_CLR(buf
, r1
, r2
);
9860 if (signed_comparison
)
9861 return s390_emit_CGR(buf
, r1
, r2
);
9863 return s390_emit_CLGR(buf
, r1
, r2
);
9870 case S390_OPND_AMODE
: {
9871 UChar r1
= hregNumber(op1
);
9872 const s390_amode
*am
= op2
.variant
.am
;
9873 UChar b
= hregNumber(am
->b
);
9874 UChar x
= hregNumber(am
->x
);
9877 switch (insn
->size
) {
9880 case S390_AMODE_B12
:
9881 case S390_AMODE_BX12
:
9882 if (signed_comparison
)
9883 return s390_emit_C(buf
, r1
, x
, b
, d
);
9885 return s390_emit_CL(buf
, r1
, x
, b
, d
);
9887 case S390_AMODE_B20
:
9888 case S390_AMODE_BX20
:
9889 if (signed_comparison
)
9890 return s390_emit_CY(buf
, r1
, x
, b
, DISP20(d
));
9892 return s390_emit_CLY(buf
, r1
, x
, b
, DISP20(d
));
9897 if (signed_comparison
)
9898 return s390_emit_CG(buf
, r1
, x
, b
, DISP20(d
));
9900 return s390_emit_CLG(buf
, r1
, x
, b
, DISP20(d
));
9907 case S390_OPND_IMMEDIATE
: {
9908 UChar r1
= hregNumber(op1
);
9909 ULong value
= op2
.variant
.imm
;
9911 switch (insn
->size
) {
9913 if (signed_comparison
)
9914 return s390_emit_CFIw(buf
, r1
, value
);
9916 return s390_emit_CLFIw(buf
, r1
, value
);
9919 if (s390_host_has_eimm
) {
9920 if (signed_comparison
) {
9921 if (ulong_fits_signed_32bit(value
))
9922 return s390_emit_CGFI(buf
, r1
, value
);
9924 if (ulong_fits_unsigned_32bit(value
))
9925 return s390_emit_CLGFI(buf
, r1
, value
);
9928 buf
= s390_emit_load_64imm(buf
, R0
, value
);
9929 if (signed_comparison
)
9930 return s390_emit_CGR(buf
, r1
, R0
);
9932 return s390_emit_CLGR(buf
, r1
, R0
);
9944 vpanic("s390_insn_compare_emit");
9949 s390_insn_mul_emit(UChar
*buf
, const s390_insn
*insn
)
9953 Bool signed_multiply
;
9955 /* The register number identifying the register pair */
9956 r1
= hregNumber(insn
->variant
.mul
.dst_hi
);
9958 op2
= insn
->variant
.mul
.op2
;
9959 signed_multiply
= insn
->tag
== S390_INSN_SMUL
;
9962 case S390_OPND_REG
: {
9963 UInt r2
= hregNumber(op2
.variant
.reg
);
9965 switch (insn
->size
) {
9969 if (signed_multiply
)
9970 return s390_emit_MR(buf
, r1
, r2
);
9972 return s390_emit_MLR(buf
, r1
, r2
);
9975 if (signed_multiply
)
9976 return s390_emit_MGRK(buf
, r1
+ 1, r1
, r2
);
9978 return s390_emit_MLGR(buf
, r1
, r2
);
9985 case S390_OPND_AMODE
: {
9986 const s390_amode
*am
= op2
.variant
.am
;
9987 UChar b
= hregNumber(am
->b
);
9988 UChar x
= hregNumber(am
->x
);
9991 switch (insn
->size
) {
9994 /* Load bytes into scratch register R0, then multiply */
9995 buf
= s390_emit_load_mem(buf
, insn
->size
, R0
, am
);
9996 if (signed_multiply
)
9997 return s390_emit_MR(buf
, r1
, R0
);
9999 return s390_emit_MLR(buf
, r1
, R0
);
10003 case S390_AMODE_B12
:
10004 case S390_AMODE_BX12
:
10005 if (signed_multiply
)
10006 return s390_emit_M(buf
, r1
, x
, b
, d
);
10008 return s390_emit_ML(buf
, r1
, x
, b
, DISP20(d
));
10010 case S390_AMODE_B20
:
10011 case S390_AMODE_BX20
:
10012 if (signed_multiply
)
10013 return s390_emit_MFYw(buf
, r1
, x
, b
, DISP20(d
));
10015 return s390_emit_ML(buf
, r1
, x
, b
, DISP20(d
));
10020 if (signed_multiply
)
10021 return s390_emit_MG(buf
, r1
, x
, b
, DISP20(d
));
10023 return s390_emit_MLG(buf
, r1
, x
, b
, DISP20(d
));
10030 case S390_OPND_IMMEDIATE
: {
10031 ULong value
= op2
.variant
.imm
;
10033 switch (insn
->size
) {
10037 buf
= s390_emit_load_32imm(buf
, R0
, value
);
10038 if (signed_multiply
)
10039 return s390_emit_MR(buf
, r1
, R0
);
10041 return s390_emit_MLR(buf
, r1
, R0
);
10044 buf
= s390_emit_load_64imm(buf
, R0
, value
);
10045 if (signed_multiply
)
10046 return s390_emit_MGRK(buf
, r1
+ 1, r1
, R0
);
10048 return s390_emit_MLGR(buf
, r1
, R0
);
10060 vpanic("s390_insn_mul_emit");
10065 s390_insn_div_emit(UChar
*buf
, const s390_insn
*insn
)
10069 Bool signed_divide
;
10071 r1
= hregNumber(insn
->variant
.div
.op1_hi
);
10072 op2
= insn
->variant
.div
.op2
;
10073 signed_divide
= insn
->tag
== S390_INSN_SDIV
;
10076 case S390_OPND_REG
: {
10077 UInt r2
= hregNumber(op2
.variant
.reg
);
10079 switch (insn
->size
) {
10082 return s390_emit_DR(buf
, r1
, r2
);
10084 return s390_emit_DLR(buf
, r1
, r2
);
10088 vpanic("s390_insn_div_emit");
10090 return s390_emit_DLGR(buf
, r1
, r2
);
10097 case S390_OPND_AMODE
: {
10098 const s390_amode
*am
= op2
.variant
.am
;
10099 UChar b
= hregNumber(am
->b
);
10100 UChar x
= hregNumber(am
->x
);
10103 switch (insn
->size
) {
10106 case S390_AMODE_B12
:
10107 case S390_AMODE_BX12
:
10109 return s390_emit_D(buf
, r1
, x
, b
, d
);
10111 return s390_emit_DL(buf
, r1
, x
, b
, DISP20(d
));
10113 case S390_AMODE_B20
:
10114 case S390_AMODE_BX20
:
10115 if (signed_divide
) {
10116 buf
= s390_emit_LY(buf
, R0
, x
, b
, DISP20(d
));
10117 return s390_emit_DR(buf
, r1
, R0
);
10119 return s390_emit_DL(buf
, r1
, x
, b
, DISP20(d
));
10125 vpanic("s390_insn_div_emit");
10127 return s390_emit_DLG(buf
, r1
, x
, b
, DISP20(d
));
10134 case S390_OPND_IMMEDIATE
: {
10135 ULong value
= op2
.variant
.imm
;
10137 switch (insn
->size
) {
10139 buf
= s390_emit_load_32imm(buf
, R0
, value
);
10141 return s390_emit_DR(buf
, r1
, R0
);
10143 return s390_emit_DLR(buf
, r1
, R0
);
10146 buf
= s390_emit_load_64imm(buf
, R0
, value
);
10148 vpanic("s390_insn_div_emit");
10150 return s390_emit_DLGR(buf
, r1
, R0
);
10162 vpanic("s390_insn_div_emit");
10167 s390_insn_divs_emit(UChar
*buf
, const s390_insn
*insn
)
10172 r1
= hregNumber(insn
->variant
.divs
.rem
);
10173 op2
= insn
->variant
.divs
.op2
;
10176 case S390_OPND_REG
: {
10177 UInt r2
= hregNumber(op2
.variant
.reg
);
10179 return s390_emit_DSGR(buf
, r1
, r2
);
10182 case S390_OPND_AMODE
: {
10183 const s390_amode
*am
= op2
.variant
.am
;
10184 UChar b
= hregNumber(am
->b
);
10185 UChar x
= hregNumber(am
->x
);
10188 return s390_emit_DSG(buf
, r1
, x
, b
, DISP20(d
));
10191 case S390_OPND_IMMEDIATE
: {
10192 ULong value
= op2
.variant
.imm
;
10194 buf
= s390_emit_load_64imm(buf
, R0
, value
);
10195 return s390_emit_DSGR(buf
, r1
, R0
);
10203 vpanic("s390_insn_divs_emit");
10208 s390_insn_clz_emit(UChar
*buf
, const s390_insn
*insn
)
10211 UChar r1
, r1p1
, r2
, *p
;
10213 r1
= hregNumber(insn
->variant
.clz
.num_bits
);
10214 r1p1
= hregNumber(insn
->variant
.clz
.clobber
);
10216 vassert((r1
& 0x1) == 0);
10217 vassert(r1p1
== r1
+ 1);
10220 src
= insn
->variant
.clz
.src
;
10222 /* Get operand and move it to r2 */
10224 case S390_OPND_REG
:
10225 r2
= hregNumber(src
.variant
.reg
);
10228 case S390_OPND_AMODE
: {
10229 const s390_amode
*am
= src
.variant
.am
;
10230 UChar b
= hregNumber(am
->b
);
10231 UChar x
= hregNumber(am
->x
);
10234 p
= s390_emit_LG(p
, R0
, x
, b
, DISP20(d
));
10239 case S390_OPND_IMMEDIATE
: {
10240 ULong value
= src
.variant
.imm
;
10242 p
= s390_emit_load_64imm(p
, R0
, value
);
10251 /* Use FLOGR if you can */
10252 if (s390_host_has_eimm
) {
10253 return s390_emit_FLOGR(p
, r1
, r2
);
10264 p
= s390_emit_LTGR(p
, R0
, r2
);
10265 p
= s390_emit_LLILL(p
, r1
, 64);
10267 p
= s390_emit_BRC(p
, S390_CC_E
, (4 + 4 + 6 + 4 + 4)/ 2); /* 4 bytes */
10268 p
= s390_emit_AGHI(p
, r1
, (UShort
)-1); /* r1 -= 1; 4 bytes */
10269 p
= s390_emit_SRLG(p
, R0
, R0
, R0
, DISP20(1)); /* r0 >>= 1; 6 bytes */
10270 p
= s390_emit_LTGR(p
, R0
, R0
); /* set cc 4 bytes */
10271 p
= s390_emit_BRC(p
, S390_CC_NE
, /* 4 bytes */
10272 (UShort
)(-(4 + 6 + 4) / 2));
10276 vpanic("s390_insn_clz_emit");
10280 /* Returns a value == BUF to denote failure, != BUF to denote success. */
10282 s390_insn_helper_call_emit(UChar
*buf
, const s390_insn
*insn
)
10287 s390_helper_call
*helper_call
= insn
->variant
.helper_call
.details
;
10289 cond
= helper_call
->cond
;
10290 target
= helper_call
->target
;
10292 const Bool not_always
= (cond
!= S390_CC_ALWAYS
);
10293 const Bool not_void_return
= (helper_call
->rloc
.pri
!= RLPri_None
);
10295 /* We have this situation:
10296 ( *** code in this braces is for not_always && not_void_return*** )
10304 *** load_64imm $0x5555555555555555, %%r2 *** // e.g. for Int RetLoc
10310 UChar
*pBefore
= buf
;
10312 /* 4 bytes (a BRC insn) to be filled in here */
10317 /* Load the target address into a register, that
10318 (a) is not used for passing parameters to the helper and
10319 (b) can be clobbered by the callee
10320 (c) is not special to the BASR insn
10321 r1 is the only choice.
10322 Also, need to arrange for the return address be put into the
10324 buf
= s390_emit_load_64imm(buf
, 1, target
);
10325 buf
= s390_emit_BASR(buf
, S390_REGNO_LINK_REGISTER
, 1);
10328 UChar
* pPreElse
= buf
;
10329 if (not_always
&& not_void_return
) {
10330 /* 4 bytes (a BRC insn) to be filled in here */
10335 UChar
* pElse
= buf
;
10336 if (not_always
&& not_void_return
) {
10337 switch (helper_call
->rloc
.pri
) {
10339 buf
= s390_emit_load_64imm(buf
, S390_REGNO_RETURN_VALUE
, 0x5555555555555555ULL
);
10342 ppS390Instr(insn
, True
);
10343 vpanic("s390_insn_helper_call_emit: invalid conditional RetLoc.");
10348 UChar
* pAfter
= buf
;
10350 // fill "brc{!cond} else"
10353 delta
= pElse
- pBefore
;
10354 delta
>>= 1; /* immediate constant is #half-words */
10355 vassert(delta
> 0 && delta
< (1 << 16));
10356 s390_emit_BRC(pBefore
, s390_cc_invert(cond
), delta
);
10359 // fill "brc{ALWAYS} after"
10360 if (not_always
&& not_void_return
)
10362 delta
= pAfter
- pPreElse
;
10363 delta
>>= 1; /* immediate constant is #half-words */
10364 vassert(delta
> 0 && delta
< (1 << 16));
10365 s390_emit_BRC(pPreElse
, S390_CC_ALWAYS
, delta
);
10373 s390_insn_cond_move_emit(UChar
*buf
, const s390_insn
*insn
)
10378 UChar
*p
, *ptmp
= 0; /* avoid compiler warnings */
10380 cond
= insn
->variant
.cond_move
.cond
;
10381 dst
= insn
->variant
.cond_move
.dst
;
10382 src
= insn
->variant
.cond_move
.src
;
10384 if (cond
== S390_CC_NEVER
) return buf
;
10388 if (s390_host_has_lsc
&& hregClass(dst
) == HRcInt64
) {
10389 /* LOCx is not the preferred way to implement an unconditional load. */
10390 if (cond
== S390_CC_ALWAYS
) goto use_branch_insn
;
10393 case S390_OPND_REG
:
10394 return s390_emit_LOCGR(p
, cond
, hregNumber(dst
),
10395 hregNumber(src
.variant
.reg
));
10397 case S390_OPND_AMODE
: {
10398 const s390_amode
*am
= src
.variant
.am
;
10400 /* We cannot use LOCx for loads less than 4 bytes. In that case
10401 load into R0 and then use LOCGR. Do the same if the amode uses
10402 an index register. */
10403 if (insn
->size
< 4 ||
10404 am
->tag
== S390_AMODE_BX12
|| am
->tag
== S390_AMODE_BX20
) {
10405 p
= s390_emit_load_mem(p
, insn
->size
, R0
, am
);
10406 p
= s390_emit_LOCGR(p
, cond
, hregNumber(dst
), R0
);
10410 vassert(am
->tag
== S390_AMODE_B12
|| am
->tag
== S390_AMODE_B20
);
10411 vassert(insn
->size
== 4 || insn
->size
== 8);
10413 UInt b
= hregNumber(am
->b
);
10416 if (insn
->size
== 4) {
10417 return s390_emit_LOC(p
, hregNumber(dst
), cond
, b
, DISP20(d
));
10419 return s390_emit_LOCG(p
, hregNumber(dst
), cond
, b
, DISP20(d
));
10422 case S390_OPND_IMMEDIATE
: {
10423 ULong value
= src
.variant
.imm
;
10425 /* If LOCGHI is available, use it. */
10426 if (s390_host_has_lsc2
&& ulong_fits_signed_16bit(value
)) {
10427 return s390_emit_LOCGHI(p
, hregNumber(dst
), value
, cond
);
10430 /* Load value into R0, then use LOCGR */
10431 if (insn
->size
<= 4) {
10432 p
= s390_emit_load_32imm(p
, R0
, value
);
10433 return s390_emit_LOCGR(p
, cond
, hregNumber(dst
), R0
);
10436 vassert(insn
->size
== 8);
10437 p
= s390_emit_load_64imm(p
, R0
, value
);
10438 return s390_emit_LOCGR(p
, cond
, hregNumber(dst
), R0
);
10444 /* Branch (if cond fails) over move instrs */
10445 if (cond
!= S390_CC_ALWAYS
) {
10446 /* Don't know how many bytes to jump over yet.
10447 Make space for a BRC instruction (4 bytes) and fill in later. */
10448 ptmp
= p
; /* to be filled in here */
10452 // cond true: move src => dst
10455 case S390_OPND_REG
:
10456 switch (hregClass(dst
)) {
10458 p
= s390_emit_LGR(p
, hregNumber(dst
), hregNumber(src
.variant
.reg
));
10461 p
= s390_emit_LDR(p
, hregNumber(dst
), hregNumber(src
.variant
.reg
));
10464 p
= s390_emit_VLR(p
, hregNumber(dst
), hregNumber(src
.variant
.reg
));
10471 case S390_OPND_AMODE
:
10472 if (hregClass(dst
) != HRcInt64
)
10475 p
= s390_emit_load_mem(p
, insn
->size
, hregNumber(dst
), src
.variant
.am
);
10478 case S390_OPND_IMMEDIATE
: {
10479 if (hregClass(dst
) != HRcInt64
)
10482 ULong value
= src
.variant
.imm
;
10483 UInt r
= hregNumber(dst
);
10485 switch (insn
->size
) {
10488 /* Load the immediate values as a 4 byte value. That does not hurt as
10489 those extra bytes will not be looked at. Fall through .... */
10491 p
= s390_emit_load_32imm(p
, r
, value
);
10495 p
= s390_emit_load_64imm(p
, r
, value
);
10505 if (cond
!= S390_CC_ALWAYS
) {
10506 Int delta
= p
- ptmp
;
10508 delta
>>= 1; /* immediate constant is #half-words */
10509 vassert(delta
> 0 && delta
< (1 << 16));
10510 s390_emit_BRC(ptmp
, s390_cc_invert(cond
), delta
);
10516 vpanic("s390_insn_cond_move_emit");
10521 s390_insn_bfp_triop_emit(UChar
*buf
, const s390_insn
*insn
)
10523 UInt r1
= hregNumber(insn
->variant
.bfp_triop
.dst
);
10524 UInt r2
= hregNumber(insn
->variant
.bfp_triop
.op2
);
10525 UInt r3
= hregNumber(insn
->variant
.bfp_triop
.op3
);
10527 switch (insn
->size
) {
10529 switch (insn
->variant
.bfp_triop
.tag
) {
10530 case S390_BFP_MADD
: return s390_emit_MAEBR(buf
, r1
, r3
, r2
);
10531 case S390_BFP_MSUB
: return s390_emit_MSEBR(buf
, r1
, r3
, r2
);
10532 default: goto fail
;
10537 switch (insn
->variant
.bfp_triop
.tag
) {
10538 case S390_BFP_MADD
: return s390_emit_MADBR(buf
, r1
, r3
, r2
);
10539 case S390_BFP_MSUB
: return s390_emit_MSDBR(buf
, r1
, r3
, r2
);
10540 default: goto fail
;
10544 default: goto fail
;
10548 vpanic("s390_insn_bfp_triop_emit");
10553 s390_insn_bfp_binop_emit(UChar
*buf
, const s390_insn
*insn
)
10555 UInt r1
= hregNumber(insn
->variant
.bfp_binop
.dst_hi
);
10556 UInt r2
= hregNumber(insn
->variant
.bfp_binop
.op2_hi
);
10558 switch (insn
->size
) {
10560 switch (insn
->variant
.bfp_binop
.tag
) {
10561 case S390_BFP_ADD
: return s390_emit_AEBR(buf
, r1
, r2
);
10562 case S390_BFP_SUB
: return s390_emit_SEBR(buf
, r1
, r2
);
10563 case S390_BFP_MUL
: return s390_emit_MEEBR(buf
, r1
, r2
);
10564 case S390_BFP_DIV
: return s390_emit_DEBR(buf
, r1
, r2
);
10565 default: goto fail
;
10570 switch (insn
->variant
.bfp_binop
.tag
) {
10571 case S390_BFP_ADD
: return s390_emit_ADBR(buf
, r1
, r2
);
10572 case S390_BFP_SUB
: return s390_emit_SDBR(buf
, r1
, r2
);
10573 case S390_BFP_MUL
: return s390_emit_MDBR(buf
, r1
, r2
);
10574 case S390_BFP_DIV
: return s390_emit_DDBR(buf
, r1
, r2
);
10575 default: goto fail
;
10580 switch (insn
->variant
.bfp_binop
.tag
) {
10581 case S390_BFP_ADD
: return s390_emit_AXBR(buf
, r1
, r2
);
10582 case S390_BFP_SUB
: return s390_emit_SXBR(buf
, r1
, r2
);
10583 case S390_BFP_MUL
: return s390_emit_MXBR(buf
, r1
, r2
);
10584 case S390_BFP_DIV
: return s390_emit_DXBR(buf
, r1
, r2
);
10585 default: goto fail
;
10589 default: goto fail
;
10593 vpanic("s390_insn_bfp_binop_emit");
10598 s390_insn_bfp_unop_emit(UChar
*buf
, const s390_insn
*insn
)
10600 UInt r1
= hregNumber(insn
->variant
.bfp_unop
.dst_hi
);
10601 UInt r2
= hregNumber(insn
->variant
.bfp_unop
.op_hi
);
10603 switch (insn
->variant
.bfp_unop
.tag
) {
10605 switch (insn
->size
) {
10606 case 4: return s390_emit_LPEBR(buf
, r1
, r2
);
10607 case 8: return s390_emit_LPDBR(buf
, r1
, r2
);
10608 case 16: return s390_emit_LPXBR(buf
, r1
, r2
);
10609 default: goto fail
;
10613 case S390_BFP_NABS
:
10614 switch (insn
->size
) {
10615 case 4: return s390_emit_LNEBR(buf
, r1
, r2
);
10616 case 8: return s390_emit_LNDBR(buf
, r1
, r2
);
10617 case 16: return s390_emit_LNXBR(buf
, r1
, r2
);
10618 default: goto fail
;
10623 switch (insn
->size
) {
10624 case 4: return s390_emit_LCEBR(buf
, r1
, r2
);
10625 case 8: return s390_emit_LCDBR(buf
, r1
, r2
);
10626 case 16: return s390_emit_LCXBR(buf
, r1
, r2
);
10627 default: goto fail
;
10631 case S390_BFP_SQRT
:
10632 switch (insn
->size
) {
10633 case 4: return s390_emit_SQEBR(buf
, r1
, r2
);
10634 case 8: return s390_emit_SQDBR(buf
, r1
, r2
);
10635 case 16: return s390_emit_SQXBR(buf
, r1
, r2
);
10636 default: goto fail
;
10640 default: goto fail
;
10644 vpanic("s390_insn_bfp_unop_emit");
10649 s390_insn_bfp_convert_emit(UChar
*buf
, const s390_insn
*insn
)
10651 UInt r1
= hregNumber(insn
->variant
.bfp_convert
.dst_hi
);
10652 UInt r2
= hregNumber(insn
->variant
.bfp_convert
.op_hi
);
10653 s390_bfp_round_t m3
= insn
->variant
.bfp_convert
.rounding_mode
;
10654 /* The IEEE-inexact-exception control is not modelled. So the
10655 m4 field is 0 (which is what GCC does, too) */
10658 switch (insn
->variant
.bfp_convert
.tag
) {
10659 /* Convert to fixed */
10660 case S390_BFP_F32_TO_I32
: return s390_emit_CFEBR(buf
, m3
, r1
, r2
);
10661 case S390_BFP_F64_TO_I32
: return s390_emit_CFDBR(buf
, m3
, r1
, r2
);
10662 case S390_BFP_F128_TO_I32
: return s390_emit_CFXBR(buf
, m3
, r1
, r2
);
10663 case S390_BFP_F32_TO_I64
: return s390_emit_CGEBR(buf
, m3
, r1
, r2
);
10664 case S390_BFP_F64_TO_I64
: return s390_emit_CGDBR(buf
, m3
, r1
, r2
);
10665 case S390_BFP_F128_TO_I64
: return s390_emit_CGXBR(buf
, m3
, r1
, r2
);
10667 /* Convert to logical */
10668 case S390_BFP_F32_TO_U32
: return s390_emit_CLFEBR(buf
, m3
, m4
, r1
, r2
);
10669 case S390_BFP_F64_TO_U32
: return s390_emit_CLFDBR(buf
, m3
, m4
, r1
, r2
);
10670 case S390_BFP_F128_TO_U32
: return s390_emit_CLFXBR(buf
, m3
, m4
, r1
, r2
);
10671 case S390_BFP_F32_TO_U64
: return s390_emit_CLGEBR(buf
, m3
, m4
, r1
, r2
);
10672 case S390_BFP_F64_TO_U64
: return s390_emit_CLGDBR(buf
, m3
, m4
, r1
, r2
);
10673 case S390_BFP_F128_TO_U64
: return s390_emit_CLGXBR(buf
, m3
, m4
, r1
, r2
);
10675 /* Convert from fixed */
10676 case S390_BFP_I32_TO_F32
: return s390_emit_CEFBRA(buf
, m3
, m4
, r1
, r2
);
10677 case S390_BFP_I32_TO_F64
: return s390_emit_CDFBRA(buf
, 0, m4
, r1
, r2
);
10678 case S390_BFP_I32_TO_F128
: return s390_emit_CXFBRA(buf
, 0, m4
, r1
, r2
);
10679 case S390_BFP_I64_TO_F32
: return s390_emit_CEGBRA(buf
, m3
, m4
, r1
, r2
);
10680 case S390_BFP_I64_TO_F64
: return s390_emit_CDGBRA(buf
, m3
, m4
, r1
, r2
);
10681 case S390_BFP_I64_TO_F128
: return s390_emit_CXGBRA(buf
, 0, m4
, r1
, r2
);
10683 /* Convert from logical */
10684 case S390_BFP_U32_TO_F32
: return s390_emit_CELFBR(buf
, m3
, m4
, r1
, r2
);
10685 case S390_BFP_U32_TO_F64
: return s390_emit_CDLFBR(buf
, m3
, m4
, r1
, r2
);
10686 case S390_BFP_U32_TO_F128
: return s390_emit_CXLFBR(buf
, m3
, m4
, r1
, r2
);
10687 case S390_BFP_U64_TO_F32
: return s390_emit_CELGBR(buf
, m3
, m4
, r1
, r2
);
10688 case S390_BFP_U64_TO_F64
: return s390_emit_CDLGBR(buf
, m3
, m4
, r1
, r2
);
10689 case S390_BFP_U64_TO_F128
: return s390_emit_CXLGBR(buf
, m3
, m4
, r1
, r2
);
10691 /* Load lengthened */
10692 case S390_BFP_F32_TO_F64
: return s390_emit_LDEBR(buf
, r1
, r2
);
10693 case S390_BFP_F32_TO_F128
: return s390_emit_LXEBR(buf
, r1
, r2
);
10694 case S390_BFP_F64_TO_F128
: return s390_emit_LXDBR(buf
, r1
, r2
);
10697 case S390_BFP_F64_TO_F32
: return s390_emit_LEDBRA(buf
, m3
, m4
, r1
, r2
);
10698 case S390_BFP_F128_TO_F32
: return s390_emit_LEXBRA(buf
, m3
, m4
, r1
, r2
);
10699 case S390_BFP_F128_TO_F64
: return s390_emit_LDXBRA(buf
, m3
, m4
, r1
, r2
);
10701 /* Load FP integer */
10702 case S390_BFP_F32_TO_F32I
: return s390_emit_FIEBRA(buf
, m3
, m4
, r1
, r2
);
10703 case S390_BFP_F64_TO_F64I
: return s390_emit_FIDBRA(buf
, m3
, m4
, r1
, r2
);
10704 case S390_BFP_F128_TO_F128I
: return s390_emit_FIXBRA(buf
, m3
, m4
, r1
, r2
);
10706 default: goto fail
;
10710 vpanic("s390_insn_bfp_convert_emit");
10715 s390_insn_bfp_compare_emit(UChar
*buf
, const s390_insn
*insn
)
10717 UInt dst
= hregNumber(insn
->variant
.bfp_compare
.dst
);
10718 UInt r1
= hregNumber(insn
->variant
.bfp_compare
.op1_hi
);
10719 UInt r2
= hregNumber(insn
->variant
.bfp_compare
.op2_hi
);
10721 switch (insn
->size
) {
10722 case 4: buf
= s390_emit_CEBR(buf
, r1
, r2
); break;
10723 case 8: buf
= s390_emit_CDBR(buf
, r1
, r2
); break;
10724 case 16: buf
= s390_emit_CXBR(buf
, r1
, r2
); break;
10725 default: goto fail
;
10728 return s390_emit_load_cc(buf
, dst
); /* Load condition code into DST */
10731 vpanic("s390_insn_bfp_compare_emit");
10736 s390_insn_dfp_binop_emit(UChar
*buf
, const s390_insn
*insn
)
10738 s390_dfp_binop
*dfp_binop
= insn
->variant
.dfp_binop
.details
;
10740 UInt r1
= hregNumber(dfp_binop
->dst_hi
);
10741 UInt r2
= hregNumber(dfp_binop
->op2_hi
);
10742 UInt r3
= hregNumber(dfp_binop
->op3_hi
);
10743 s390_dfp_round_t m4
= dfp_binop
->rounding_mode
;
10745 switch (insn
->size
) {
10747 switch (dfp_binop
->tag
) {
10748 case S390_DFP_ADD
: return s390_emit_ADTRA(buf
, r3
, m4
, r1
, r2
);
10749 case S390_DFP_SUB
: return s390_emit_SDTRA(buf
, r3
, m4
, r1
, r2
);
10750 case S390_DFP_MUL
: return s390_emit_MDTRA(buf
, r3
, m4
, r1
, r2
);
10751 case S390_DFP_DIV
: return s390_emit_DDTRA(buf
, r3
, m4
, r1
, r2
);
10752 case S390_DFP_QUANTIZE
: return s390_emit_QADTR(buf
, r3
, m4
, r1
, r2
);
10753 default: goto fail
;
10758 switch (dfp_binop
->tag
) {
10759 case S390_DFP_ADD
: return s390_emit_AXTRA(buf
, r3
, m4
, r1
, r2
);
10760 case S390_DFP_SUB
: return s390_emit_SXTRA(buf
, r3
, m4
, r1
, r2
);
10761 case S390_DFP_MUL
: return s390_emit_MXTRA(buf
, r3
, m4
, r1
, r2
);
10762 case S390_DFP_DIV
: return s390_emit_DXTRA(buf
, r3
, m4
, r1
, r2
);
10763 case S390_DFP_QUANTIZE
: return s390_emit_QAXTR(buf
, r3
, m4
, r1
, r2
);
10764 default: goto fail
;
10768 default: goto fail
;
10772 vpanic("s390_insn_dfp_binop_emit");
10777 s390_insn_dfp_reround_emit(UChar
*buf
, const s390_insn
*insn
)
10779 UInt r1
= hregNumber(insn
->variant
.dfp_reround
.dst_hi
);
10780 UInt r2
= hregNumber(insn
->variant
.dfp_reround
.op2
);
10781 UInt r3
= hregNumber(insn
->variant
.dfp_reround
.op3_hi
);
10782 s390_dfp_round_t m4
= insn
->variant
.dfp_reround
.rounding_mode
;
10784 switch (insn
->size
) {
10786 return s390_emit_RRDTR(buf
, r3
, m4
, r1
, r2
);
10789 return s390_emit_RRXTR(buf
, r3
, m4
, r1
, r2
);
10791 default: goto fail
;
10794 vpanic("s390_insn_dfp_reround_emit");
10799 s390_insn_dfp_unop_emit(UChar
*buf
, const s390_insn
*insn
)
10801 UInt r1
= hregNumber(insn
->variant
.dfp_unop
.dst_hi
);
10802 UInt r2
= hregNumber(insn
->variant
.dfp_unop
.op_hi
);
10804 switch (insn
->variant
.dfp_unop
.tag
) {
10805 case S390_DFP_EXTRACT_EXP_D64
: return s390_emit_EEDTR(buf
, r1
, r2
); break;
10806 case S390_DFP_EXTRACT_EXP_D128
: return s390_emit_EEXTR(buf
, r1
, r2
); break;
10807 case S390_DFP_EXTRACT_SIG_D64
: return s390_emit_ESDTR(buf
, r1
, r2
); break;
10808 case S390_DFP_EXTRACT_SIG_D128
: return s390_emit_ESXTR(buf
, r1
, r2
); break;
10809 default: goto fail
;
10812 vpanic("s390_insn_dfp_unop_emit");
10817 s390_insn_dfp_intop_emit(UChar
*buf
, const s390_insn
*insn
)
10819 UInt r1
= hregNumber(insn
->variant
.dfp_intop
.dst_hi
);
10820 UInt r2
= hregNumber(insn
->variant
.dfp_intop
.op2
);
10821 UInt r3
= hregNumber(insn
->variant
.dfp_intop
.op3_hi
);
10823 switch (insn
->size
) {
10825 switch (insn
->variant
.dfp_intop
.tag
) {
10826 case S390_DFP_SHIFT_LEFT
: return s390_emit_SLDT(buf
, r3
, r1
, r2
);
10827 case S390_DFP_SHIFT_RIGHT
: return s390_emit_SRDT(buf
, r3
, r1
, r2
);
10828 case S390_DFP_INSERT_EXP
: return s390_emit_IEDTR(buf
, r3
, r1
, r2
);
10829 default: goto fail
;
10834 switch (insn
->variant
.dfp_intop
.tag
) {
10835 case S390_DFP_SHIFT_LEFT
: return s390_emit_SLXT(buf
, r3
, r1
, r2
);
10836 case S390_DFP_SHIFT_RIGHT
: return s390_emit_SRXT(buf
, r3
, r1
, r2
);
10837 case S390_DFP_INSERT_EXP
: return s390_emit_IEXTR(buf
, r3
, r1
, r2
);
10838 default: goto fail
;
10842 default: goto fail
;
10846 vpanic("s390_insn_dfp_intop_emit");
10851 s390_insn_dfp_compare_emit(UChar
*buf
, const s390_insn
*insn
)
10853 UInt dst
= hregNumber(insn
->variant
.dfp_compare
.dst
);
10854 UInt r1
= hregNumber(insn
->variant
.dfp_compare
.op1_hi
);
10855 UInt r2
= hregNumber(insn
->variant
.dfp_compare
.op2_hi
);
10857 switch (insn
->size
) {
10859 switch(insn
->variant
.dfp_compare
.tag
) {
10860 case S390_DFP_COMPARE
: buf
= s390_emit_CDTR(buf
, r1
, r2
); break;
10861 case S390_DFP_COMPARE_EXP
: buf
= s390_emit_CEDTR(buf
, r1
, r2
); break;
10862 default: goto fail
;
10867 switch(insn
->variant
.dfp_compare
.tag
) {
10868 case S390_DFP_COMPARE
: buf
= s390_emit_CXTR(buf
, r1
, r2
); break;
10869 case S390_DFP_COMPARE_EXP
: buf
= s390_emit_CEXTR(buf
, r1
, r2
); break;
10870 default: goto fail
;
10874 default: goto fail
;
10877 return s390_emit_load_cc(buf
, dst
); /* Load condition code into DST */
10880 vpanic("s390_insn_dfp_compare_emit");
10885 s390_insn_dfp_convert_emit(UChar
*buf
, const s390_insn
*insn
)
10887 UInt r1
= hregNumber(insn
->variant
.dfp_convert
.dst_hi
);
10888 UInt r2
= hregNumber(insn
->variant
.dfp_convert
.op_hi
);
10889 s390_dfp_round_t m3
= insn
->variant
.dfp_convert
.rounding_mode
;
10890 /* The IEEE-inexact-exception control is not modelled. So the
10891 m4 field is 0 (which is what GCC does, too) */
10894 switch (insn
->variant
.dfp_convert
.tag
) {
10896 /* Convert to fixed */
10897 case S390_DFP_D64_TO_I32
: return s390_emit_CFDTR(buf
, m3
, m4
, r1
, r2
);
10898 case S390_DFP_D128_TO_I32
: return s390_emit_CFXTR(buf
, m3
, m4
, r1
, r2
);
10899 case S390_DFP_D64_TO_I64
: return s390_emit_CGDTR(buf
, m3
, m4
, r1
, r2
);
10900 case S390_DFP_D128_TO_I64
: return s390_emit_CGXTR(buf
, m3
, m4
, r1
, r2
);
10902 /* Convert to logical */
10903 case S390_DFP_D64_TO_U32
: return s390_emit_CLFDTR(buf
, m3
, m4
, r1
, r2
);
10904 case S390_DFP_D128_TO_U32
: return s390_emit_CLFXTR(buf
, m3
, m4
, r1
, r2
);
10905 case S390_DFP_D64_TO_U64
: return s390_emit_CLGDTR(buf
, m3
, m4
, r1
, r2
);
10906 case S390_DFP_D128_TO_U64
: return s390_emit_CLGXTR(buf
, m3
, m4
, r1
, r2
);
10908 /* Convert from fixed */
10909 case S390_DFP_I32_TO_D64
: return s390_emit_CDFTR(buf
, 0, m4
, r1
, r2
);
10910 case S390_DFP_I32_TO_D128
: return s390_emit_CXFTR(buf
, 0, m4
, r1
, r2
);
10911 case S390_DFP_I64_TO_D64
: return s390_emit_CDGTRA(buf
, m3
, m4
, r1
, r2
);
10912 case S390_DFP_I64_TO_D128
: return s390_emit_CXGTR(buf
, 0, m4
, r1
, r2
);
10914 /* Convert from logical */
10915 case S390_DFP_U32_TO_D64
: return s390_emit_CDLFTR(buf
, m3
, m4
, r1
, r2
);
10916 case S390_DFP_U64_TO_D64
: return s390_emit_CDLGTR(buf
, m3
, m4
, r1
, r2
);
10917 case S390_DFP_U32_TO_D128
: return s390_emit_CXLFTR(buf
, m3
, m4
, r1
, r2
);
10918 case S390_DFP_U64_TO_D128
: return s390_emit_CXLGTR(buf
, m3
, m4
, r1
, r2
);
10920 /* Load lengthened */
10921 case S390_DFP_D32_TO_D64
: return s390_emit_LDETR(buf
, m4
, r1
, r2
);
10922 case S390_DFP_D64_TO_D128
: return s390_emit_LXDTR(buf
, m4
, r1
, r2
);
10925 case S390_DFP_D64_TO_D32
: return s390_emit_LEDTR(buf
, m3
, m4
, r1
, r2
);
10926 case S390_DFP_D128_TO_D64
: return s390_emit_LDXTR(buf
, m3
, m4
, r1
, r2
);
10928 default: goto fail
;
10932 vpanic("s390_insn_dfp_convert_emit");
10937 s390_insn_fp_convert_emit(UChar
*buf
, const s390_insn
*insn
)
10940 s390_fp_convert
*fp_convert
= insn
->variant
.fp_convert
.details
;
10941 s390_dfp_round_t rm
= fp_convert
->rounding_mode
;
10943 vassert(rm
< 2 || rm
> 7);
10945 switch (fp_convert
->tag
) {
10946 case S390_FP_F32_TO_D32
: pfpo
= S390_PFPO_F32_TO_D32
<< 8; break;
10947 case S390_FP_F32_TO_D64
: pfpo
= S390_PFPO_F32_TO_D64
<< 8; break;
10948 case S390_FP_F32_TO_D128
: pfpo
= S390_PFPO_F32_TO_D128
<< 8; break;
10949 case S390_FP_F64_TO_D32
: pfpo
= S390_PFPO_F64_TO_D32
<< 8; break;
10950 case S390_FP_F64_TO_D64
: pfpo
= S390_PFPO_F64_TO_D64
<< 8; break;
10951 case S390_FP_F64_TO_D128
: pfpo
= S390_PFPO_F64_TO_D128
<< 8; break;
10952 case S390_FP_F128_TO_D32
: pfpo
= S390_PFPO_F128_TO_D32
<< 8; break;
10953 case S390_FP_F128_TO_D64
: pfpo
= S390_PFPO_F128_TO_D64
<< 8; break;
10954 case S390_FP_F128_TO_D128
: pfpo
= S390_PFPO_F128_TO_D128
<< 8; break;
10955 case S390_FP_D32_TO_F32
: pfpo
= S390_PFPO_D32_TO_F32
<< 8; break;
10956 case S390_FP_D32_TO_F64
: pfpo
= S390_PFPO_D32_TO_F64
<< 8; break;
10957 case S390_FP_D32_TO_F128
: pfpo
= S390_PFPO_D32_TO_F128
<< 8; break;
10958 case S390_FP_D64_TO_F32
: pfpo
= S390_PFPO_D64_TO_F32
<< 8; break;
10959 case S390_FP_D64_TO_F64
: pfpo
= S390_PFPO_D64_TO_F64
<< 8; break;
10960 case S390_FP_D64_TO_F128
: pfpo
= S390_PFPO_D64_TO_F128
<< 8; break;
10961 case S390_FP_D128_TO_F32
: pfpo
= S390_PFPO_D128_TO_F32
<< 8; break;
10962 case S390_FP_D128_TO_F64
: pfpo
= S390_PFPO_D128_TO_F64
<< 8; break;
10963 case S390_FP_D128_TO_F128
: pfpo
= S390_PFPO_D128_TO_F128
<< 8; break;
10964 default: goto fail
;
10968 buf
= s390_emit_load_32imm(buf
, R0
, pfpo
);
10969 buf
= s390_emit_PFPO(buf
);
10973 vpanic("s390_insn_fp_convert_emit");
10978 s390_insn_mfence_emit(UChar
*buf
, const s390_insn
*insn
)
10980 return s390_emit_BCR(buf
, 0xF, 0x0);
10985 s390_insn_mimm_emit(UChar
*buf
, const s390_insn
*insn
)
10987 s390_amode
*am
= insn
->variant
.mimm
.dst
;
10988 UChar b
= hregNumber(am
->b
);
10990 ULong value
= insn
->variant
.mimm
.value
;
10993 return s390_emit_XC(buf
, insn
->size
- 1, b
, d
, b
, d
);
10996 if (insn
->size
== 1) {
10997 return s390_emit_MVI(buf
, value
& 0xFF, b
, d
);
11000 if (s390_host_has_gie
&& ulong_fits_signed_16bit(value
)) {
11002 switch (insn
->size
) {
11003 case 2: return s390_emit_MVHHI(buf
, b
, d
, value
);
11004 case 4: return s390_emit_MVHI(buf
, b
, d
, value
);
11005 case 8: return s390_emit_MVGHI(buf
, b
, d
, value
);
11008 // Load value to R0, then store.
11009 switch (insn
->size
) {
11011 buf
= s390_emit_LHI(buf
, R0
, value
& 0xFFFF);
11012 return s390_emit_STH(buf
, R0
, 0, b
, d
);
11014 buf
= s390_emit_load_32imm(buf
, R0
, value
);
11015 return s390_emit_ST(buf
, R0
, 0, b
, d
);
11017 buf
= s390_emit_load_64imm(buf
, R0
, value
);
11018 return s390_emit_STG(buf
, R0
, 0, b
, DISP20(d
));
11022 vpanic("s390_insn_mimm_emit");
11027 s390_insn_madd_emit(UChar
*buf
, const s390_insn
*insn
)
11029 s390_amode
*am
= insn
->variant
.madd
.dst
;
11030 UChar b
= hregNumber(am
->b
);
11033 if (insn
->size
== 4) {
11034 return s390_emit_ASI(buf
, insn
->variant
.madd
.delta
, b
, DISP20(d
));
11037 return s390_emit_AGSI(buf
, insn
->variant
.madd
.delta
, b
, DISP20(d
));
11042 s390_insn_set_fpc_bfprm_emit(UChar
*buf
, const s390_insn
*insn
)
11044 UInt mode
= hregNumber(insn
->variant
.set_fpc_bfprm
.mode
);
11046 /* Copy FPC from guest state to R0 and OR in the new rounding mode */
11047 buf
= s390_emit_L(buf
, R0
, 0, S390_REGNO_GUEST_STATE_POINTER
,
11048 S390X_GUEST_OFFSET(guest_fpc
)); // r0 = guest_fpc
11050 buf
= s390_emit_NILL(buf
, R0
, 0xFFF8); /* Clear out right-most 3 bits */
11051 buf
= s390_emit_OR(buf
, R0
, mode
); /* OR in the new rounding mode */
11052 buf
= s390_emit_SFPC(buf
, R0
); /* Load FPC register from R0 */
11059 s390_insn_set_fpc_dfprm_emit(UChar
*buf
, const s390_insn
*insn
)
11061 UInt mode
= hregNumber(insn
->variant
.set_fpc_dfprm
.mode
);
11063 /* Copy FPC from guest state to R0 and OR in the new rounding mode */
11064 buf
= s390_emit_L(buf
, R0
, 0, S390_REGNO_GUEST_STATE_POINTER
,
11065 S390X_GUEST_OFFSET(guest_fpc
)); // r0 = guest_fpc
11067 /* DFP rounding mode is set at bit position 25:27 in FPC register */
11068 buf
= s390_emit_NILL(buf
, R0
, 0xFF8F); /* Clear out 25:27 bits */
11069 buf
= s390_emit_SLL(buf
, mode
, 0, 4); /* bring mode to 25:27 bits */
11070 buf
= s390_emit_OR(buf
, R0
, mode
); /* OR in the new rounding mode */
11071 buf
= s390_emit_SFPC(buf
, R0
); /* Load FPC register from R0 */
11077 /* Define convenience functions needed for translation chaining.
11078 Any changes need to be applied to the functions in concert. */
11080 static __inline__ Bool
11081 s390_insn_is_BRCL(const UChar
*p
, UChar condition
)
11083 return p
[0] == 0xc0 && p
[1] == ((condition
<< 4) | 0x04);
11086 static __inline__ Bool
11087 s390_insn_is_BR(const UChar
*p
, UChar reg
)
11089 return p
[0] == 0x07 && p
[1] == (0xF0 | reg
); /* BCR 15,reg */
11093 /* The length of the BASR insn */
11094 #define S390_BASR_LEN 2
11097 /* Load the 64-bit VALUE into REG. Note that this function must NOT
11098 optimise the generated code by looking at the value. I.e. using
11099 LGHI if value == 0 would be very wrong. */
11101 s390_tchain_load64(UChar
*buf
, UChar regno
, ULong value
)
11103 UChar
*begin
= buf
;
11105 if (s390_host_has_eimm
) {
11106 /* Do it in two steps: upper half [0:31] and lower half [32:63] */
11107 buf
= s390_emit_IIHF(buf
, regno
, value
>> 32);
11108 buf
= s390_emit_IILF(buf
, regno
, value
& 0xFFFFFFFF);
11110 buf
= s390_emit_IILL(buf
, regno
, value
& 0xFFFF);
11112 buf
= s390_emit_IILH(buf
, regno
, value
& 0xFFFF);
11114 buf
= s390_emit_IIHL(buf
, regno
, value
& 0xFFFF);
11116 buf
= s390_emit_IIHH(buf
, regno
, value
& 0xFFFF);
11119 vassert(buf
- begin
== s390_tchain_load64_len());
11124 /* Return number of bytes generated by s390_tchain_load64 */
11126 s390_tchain_load64_len(void)
11128 if (s390_host_has_eimm
) {
11129 return 6 + 6; /* IIHF + IILF */
11131 return 4 + 4 + 4 + 4; /* IIHH + IIHL + IILH + IILL */
11134 /* Verify that CODE is the code sequence generated by s390_tchain_load64
11135 to load VALUE into REGNO. Return pointer to the byte following the
11137 static const UChar
*
11138 s390_tchain_verify_load64(const UChar
*code
, UChar regno
, ULong value
)
11140 UInt regmask
= regno
<< 4;
11143 if (s390_host_has_eimm
) {
11144 /* Check for IIHF */
11145 vassert(code
[0] == 0xC0);
11146 vassert(code
[1] == (0x08 | regmask
));
11147 vassert(*(const UInt
*)&code
[2] == (value
>> 32));
11148 /* Check for IILF */
11149 vassert(code
[6] == 0xC0);
11150 vassert(code
[7] == (0x09 | regmask
));
11151 vassert(*(const UInt
*)&code
[8] == (value
& 0xFFFFFFFF));
11153 /* Check for IILL */
11154 hw
= value
& 0xFFFF;
11155 vassert(code
[0] == 0xA5);
11156 vassert(code
[1] == (0x03 | regmask
));
11157 vassert(code
[2] == (hw
>> 8));
11158 vassert(code
[3] == (hw
& 0xFF));
11160 /* Check for IILH */
11161 hw
= (value
>> 16) & 0xFFFF;
11162 vassert(code
[4] == 0xA5);
11163 vassert(code
[5] == (0x02 | regmask
));
11164 vassert(code
[6] == (hw
>> 8));
11165 vassert(code
[7] == (hw
& 0xFF));
11167 /* Check for IIHL */
11168 hw
= (value
>> 32) & 0xFFFF;
11169 vassert(code
[8] == 0xA5);
11170 vassert(code
[9] == (0x01 | regmask
));
11171 vassert(code
[10] == (hw
>> 8));
11172 vassert(code
[11] == (hw
& 0xFF));
11174 /* Check for IIHH */
11175 hw
= (value
>> 48) & 0xFFFF;
11176 vassert(code
[12] == 0xA5);
11177 vassert(code
[13] == (0x00 | regmask
));
11178 vassert(code
[14] == (hw
>> 8));
11179 vassert(code
[15] == (hw
& 0xFF));
11182 return code
+ s390_tchain_load64_len();
11185 /* CODE points to the code sequence as generated by s390_tchain_load64.
11186 Change the loaded value to IMM64. Return pointer to the byte following
11187 the patched code sequence. */
11189 s390_tchain_patch_load64(UChar
*code
, ULong imm64
)
11191 if (s390_host_has_eimm
) {
11193 *(UInt
*)&code
[2] = imm64
>> 32;
11195 *(UInt
*)&code
[8] = imm64
& 0xFFFFFFFF;
11197 code
[3] = imm64
& 0xFF; imm64
>>= 8;
11198 code
[2] = imm64
& 0xFF; imm64
>>= 8;
11199 code
[7] = imm64
& 0xFF; imm64
>>= 8;
11200 code
[6] = imm64
& 0xFF; imm64
>>= 8;
11201 code
[11] = imm64
& 0xFF; imm64
>>= 8;
11202 code
[10] = imm64
& 0xFF; imm64
>>= 8;
11203 code
[15] = imm64
& 0xFF; imm64
>>= 8;
11204 code
[14] = imm64
& 0xFF; imm64
>>= 8;
11207 return code
+ s390_tchain_load64_len();
11211 /* NB: what goes on here has to be very closely coordinated with the
11212 chainXDirect_S390 and unchainXDirect_S390 below. */
11214 s390_insn_xdirect_emit(UChar
*buf
, const s390_insn
*insn
,
11215 const void *disp_cp_chain_me_to_slowEP
,
11216 const void *disp_cp_chain_me_to_fastEP
)
11218 /* We're generating chain-me requests here, so we need to be
11219 sure this is actually allowed -- no-redir translations can't
11220 use chain-me's. Hence: */
11221 vassert(disp_cp_chain_me_to_slowEP
!= NULL
);
11222 vassert(disp_cp_chain_me_to_fastEP
!= NULL
);
11224 /* Use ptmp for backpatching conditional jumps. */
11227 /* First off, if this is conditional, create a conditional
11228 jump over the rest of it. */
11229 s390_cc_t cond
= insn
->variant
.xdirect
.cond
;
11231 if (cond
!= S390_CC_ALWAYS
) {
11232 /* So we have something like this
11233 if (cond) do_xdirect;
11235 We convert this into
11236 if (! cond) goto Y; // BRC opcode; 4 bytes
11240 /* 4 bytes (a BRC insn) to be filled in here */
11244 /* Update the guest IA. */
11245 buf
= s390_emit_load_64imm(buf
, R0
, insn
->variant
.xdirect
.dst
);
11247 const s390_amode
*amode
= insn
->variant
.xdirect
.guest_IA
;
11248 vassert(amode
->tag
== S390_AMODE_B12
);
11249 UInt b
= hregNumber(amode
->b
);
11252 buf
= s390_emit_STG(buf
, R0
, 0, b
, DISP20(d
));
11254 /* Load the chosen entry point into the scratch reg */
11255 const void *disp_cp_chain_me
;
11258 insn
->variant
.xdirect
.to_fast_entry
? disp_cp_chain_me_to_fastEP
11259 : disp_cp_chain_me_to_slowEP
;
11260 /* Get the address of the beginning of the load64 code sequence into %r1.
11261 Do not change the register! This is part of the protocol with the
11263 buf
= s390_emit_BASR(buf
, 1, R0
);
11265 /* --- FIRST PATCHABLE BYTE follows (must not modify %r1) --- */
11266 Addr64 addr
= (Addr
)disp_cp_chain_me
;
11267 buf
= s390_tchain_load64(buf
, S390_REGNO_TCHAIN_SCRATCH
, addr
);
11269 /* goto *tchain_scratch */
11270 buf
= s390_emit_BCR(buf
, S390_CC_ALWAYS
, S390_REGNO_TCHAIN_SCRATCH
);
11272 /* --- END of PATCHABLE BYTES --- */
11274 /* Fix up the conditional jump, if there was one. */
11275 if (cond
!= S390_CC_ALWAYS
) {
11276 Int delta
= buf
- ptmp
;
11278 delta
>>= 1; /* immediate constant is #half-words */
11279 vassert(delta
> 0 && delta
< (1 << 16));
11280 s390_emit_BRC(ptmp
, s390_cc_invert(cond
), delta
);
11286 /* Return the number of patchable bytes from an xdirect insn. */
11288 s390_xdirect_patchable_len(void)
11290 return s390_tchain_load64_len() + S390_BASR_LEN
;
11295 s390_insn_xindir_emit(UChar
*buf
, const s390_insn
*insn
,
11296 const void *disp_cp_xindir
)
11298 /* We're generating transfers that could lead indirectly to a
11299 chain-me, so we need to be sure this is actually allowed --
11300 no-redir translations are not allowed to reach normal
11301 translations without going through the scheduler. That means
11302 no XDirects or XIndirs out from no-redir translations.
11304 vassert(disp_cp_xindir
!= NULL
);
11306 /* Use ptmp for backpatching conditional jumps. */
11309 /* First off, if this is conditional, create a conditional
11310 jump over the rest of it. */
11311 s390_cc_t cond
= insn
->variant
.xdirect
.cond
;
11313 if (cond
!= S390_CC_ALWAYS
) {
11314 /* So we have something like this
11315 if (cond) do_xdirect;
11317 We convert this into
11318 if (! cond) goto Y; // BRC opcode; 4 bytes
11322 /* 4 bytes (a BRC insn) to be filled in here */
11326 /* Update the guest IA with the address in xdirect.dst. */
11327 const s390_amode
*amode
= insn
->variant
.xindir
.guest_IA
;
11329 vassert(amode
->tag
== S390_AMODE_B12
);
11330 UInt b
= hregNumber(amode
->b
);
11332 UInt regno
= hregNumber(insn
->variant
.xindir
.dst
);
11334 buf
= s390_emit_STG(buf
, regno
, 0, b
, DISP20(d
));
11336 /* load tchain_scratch, #disp_indir */
11337 buf
= s390_tchain_load64(buf
, S390_REGNO_TCHAIN_SCRATCH
,
11338 (Addr
)disp_cp_xindir
);
11339 /* goto *tchain_direct */
11340 buf
= s390_emit_BCR(buf
, S390_CC_ALWAYS
, S390_REGNO_TCHAIN_SCRATCH
);
11342 /* Fix up the conditional jump, if there was one. */
11343 if (cond
!= S390_CC_ALWAYS
) {
11344 Int delta
= buf
- ptmp
;
11346 delta
>>= 1; /* immediate constant is #half-words */
11347 vassert(delta
> 0 && delta
< (1 << 16));
11348 s390_emit_BRC(ptmp
, s390_cc_invert(cond
), delta
);
11355 s390_insn_xassisted_emit(UChar
*buf
, const s390_insn
*insn
,
11356 const void *disp_cp_xassisted
)
11358 /* Use ptmp for backpatching conditional jumps. */
11361 /* First off, if this is conditional, create a conditional
11362 jump over the rest of it. */
11363 s390_cc_t cond
= insn
->variant
.xdirect
.cond
;
11365 if (cond
!= S390_CC_ALWAYS
) {
11366 /* So we have something like this
11367 if (cond) do_xdirect;
11369 We convert this into
11370 if (! cond) goto Y; // BRC opcode; 4 bytes
11374 /* 4 bytes (a BRC insn) to be filled in here */
11378 /* Update the guest IA with the address in xassisted.dst. */
11379 const s390_amode
*amode
= insn
->variant
.xassisted
.guest_IA
;
11381 vassert(amode
->tag
== S390_AMODE_B12
);
11382 UInt b
= hregNumber(amode
->b
);
11384 UInt regno
= hregNumber(insn
->variant
.xassisted
.dst
);
11386 buf
= s390_emit_STG(buf
, regno
, 0, b
, DISP20(d
));
11390 switch (insn
->variant
.xassisted
.kind
) {
11391 case Ijk_ClientReq
: trcval
= VEX_TRC_JMP_CLIENTREQ
; break;
11392 case Ijk_Sys_syscall
: trcval
= VEX_TRC_JMP_SYS_SYSCALL
; break;
11393 case Ijk_Extension
: trcval
= VEX_TRC_JMP_EXTENSION
; break;
11394 case Ijk_Yield
: trcval
= VEX_TRC_JMP_YIELD
; break;
11395 case Ijk_EmWarn
: trcval
= VEX_TRC_JMP_EMWARN
; break;
11396 case Ijk_EmFail
: trcval
= VEX_TRC_JMP_EMFAIL
; break;
11397 case Ijk_MapFail
: trcval
= VEX_TRC_JMP_MAPFAIL
; break;
11398 case Ijk_NoDecode
: trcval
= VEX_TRC_JMP_NODECODE
; break;
11399 case Ijk_InvalICache
: trcval
= VEX_TRC_JMP_INVALICACHE
; break;
11400 case Ijk_NoRedir
: trcval
= VEX_TRC_JMP_NOREDIR
; break;
11401 case Ijk_SigTRAP
: trcval
= VEX_TRC_JMP_SIGTRAP
; break;
11402 case Ijk_SigFPE
: trcval
= VEX_TRC_JMP_SIGFPE
; break;
11403 case Ijk_SigSEGV
: trcval
= VEX_TRC_JMP_SIGSEGV
; break;
11404 case Ijk_Boring
: trcval
= VEX_TRC_JMP_BORING
; break;
11405 /* We don't expect to see the following being assisted. */
11410 ppIRJumpKind(insn
->variant
.xassisted
.kind
);
11411 vpanic("s390_insn_xassisted_emit: unexpected jump kind");
11414 vassert(trcval
!= 0);
11416 /* guest_state_pointer = trcval */
11417 buf
= s390_emit_LGHI(buf
, S390_REGNO_GUEST_STATE_POINTER
, trcval
);
11419 /* load tchain_scratch, #disp_assisted */
11420 buf
= s390_tchain_load64(buf
, S390_REGNO_TCHAIN_SCRATCH
,
11421 (Addr
)disp_cp_xassisted
);
11423 /* goto *tchain_direct */
11424 buf
= s390_emit_BCR(buf
, S390_CC_ALWAYS
, S390_REGNO_TCHAIN_SCRATCH
);
11426 /* Fix up the conditional jump, if there was one. */
11427 if (cond
!= S390_CC_ALWAYS
) {
11428 Int delta
= buf
- ptmp
;
11430 delta
>>= 1; /* immediate constant is #half-words */
11431 vassert(delta
> 0 && delta
< (1 << 16));
11432 s390_emit_BRC(ptmp
, s390_cc_invert(cond
), delta
);
11441 guest_state[host_EvC_COUNTER] -= 1;
11442 if (guest_state[host_EvC_COUNTER] >= 0) goto nofail;
11443 goto guest_state[host_EvC_FAILADDR];
11446 The dispatch counter is a 32-bit value. */
11448 s390_insn_evcheck_emit(UChar
*buf
, const s390_insn
*insn
,
11449 VexEndness endness_host
)
11453 UChar
*code_begin
, *code_end
;
11457 amode
= insn
->variant
.evcheck
.counter
;
11458 vassert(amode
->tag
== S390_AMODE_B12
);
11459 b
= hregNumber(amode
->b
);
11462 /* Decrement the dispatch counter in the guest state */
11463 if (s390_host_has_gie
) {
11464 buf
= s390_emit_ASI(buf
, -1, b
, DISP20(d
)); /* 6 bytes */
11466 buf
= s390_emit_LHI(buf
, R0
, -1); /* 4 bytes */
11467 buf
= s390_emit_A(buf
, R0
, 0, b
, d
); /* 4 bytes */
11468 buf
= s390_emit_ST(buf
, R0
, 0, b
, d
); /* 4 bytes */
11471 /* Jump over the next insn if >= 0 */
11472 buf
= s390_emit_BRC(buf
, S390_CC_HE
, (4 + 6 + 2) / 2); /* 4 bytes */
11474 /* Computed goto to fail_address */
11475 amode
= insn
->variant
.evcheck
.fail_addr
;
11476 b
= hregNumber(amode
->b
);
11478 buf
= s390_emit_LG(buf
, S390_REGNO_TCHAIN_SCRATCH
, 0, b
, DISP20(d
)); /* 6 bytes */
11479 buf
= s390_emit_BCR(buf
, S390_CC_ALWAYS
, S390_REGNO_TCHAIN_SCRATCH
); /* 2 bytes */
11483 /* Make sure the size of the generated code is identical to the size
11484 returned by evCheckSzB_S390 */
11485 vassert(evCheckSzB_S390() == code_end
- code_begin
);
11492 s390_insn_profinc_emit(UChar
*buf
,
11493 const s390_insn
*insn
__attribute__((unused
)))
11495 /* Generate a code template to increment a memory location whose
11496 address will be known later as an immediate value. This code
11497 template will be patched once the memory location is known.
11498 For now we do this with address == 0. */
11499 buf
= s390_tchain_load64(buf
, S390_REGNO_TCHAIN_SCRATCH
, 0);
11500 if (s390_host_has_gie
) {
11501 buf
= s390_emit_AGSI(buf
, 1, S390_REGNO_TCHAIN_SCRATCH
, DISP20(0));
11503 buf
= s390_emit_LGHI(buf
, R0
, 1);
11504 buf
= s390_emit_AG( buf
, R0
, 0, S390_REGNO_TCHAIN_SCRATCH
, DISP20(0));
11505 buf
= s390_emit_STG(buf
, R0
, 0, S390_REGNO_TCHAIN_SCRATCH
, DISP20(0));
11513 s390_insn_vec_amodeop_emit(UChar
*buf
, const s390_insn
*insn
)
11515 UChar v1
= hregNumber(insn
->variant
.vec_amodeop
.dst
);
11516 UChar v2
= hregNumber(insn
->variant
.vec_amodeop
.op1
);
11517 s390_amode
* op2
= insn
->variant
.vec_amodeop
.op2
;
11519 vassert(hregNumber(op2
->x
) == 0);
11520 vassert(fits_unsigned_12bit(op2
->d
));
11522 UChar b
= hregNumber(op2
->b
);
11526 switch (insn
->variant
.vec_amodeop
.tag
) {
11527 case S390_VEC_GET_ELEM
:
11528 return s390_emit_VLGV(buf
, v1
, b
, d
, v2
, s390_getM_from_size(insn
->size
));
11530 case S390_VEC_ELEM_SHL_INT
:
11531 return s390_emit_VESL(buf
, v1
, b
, d
, v2
, s390_getM_from_size(insn
->size
));
11533 case S390_VEC_ELEM_SHRA_INT
:
11534 return s390_emit_VESRA(buf
, v1
, b
, d
, v2
, s390_getM_from_size(insn
->size
));
11536 case S390_VEC_ELEM_SHRL_INT
:
11537 return s390_emit_VESRL(buf
, v1
, b
, d
, v2
, s390_getM_from_size(insn
->size
));
11539 default: goto fail
;
11543 vpanic("s390_insn_vec_amodeop_emit");
11548 s390_insn_vec_amodeintop_emit(UChar
*buf
, const s390_insn
*insn
)
11550 UChar v1
= hregNumber(insn
->variant
.vec_amodeintop
.dst
);
11551 s390_amode
* op2
= insn
->variant
.vec_amodeintop
.op2
;
11552 UChar r3
= hregNumber(insn
->variant
.vec_amodeintop
.op3
);
11554 vassert(hregNumber(op2
->x
) == 0);
11555 UChar b
= hregNumber(op2
->b
);
11558 switch (insn
->variant
.vec_amodeintop
.tag
) {
11559 case S390_VEC_SET_ELEM
:
11560 return s390_emit_VLVG(buf
, v1
, b
, d
, r3
, s390_getM_from_size(insn
->size
));
11561 default: goto fail
;
11565 vpanic("s390_insn_vec_amodeop_emit");
11570 s390_insn_vec_binop_emit(UChar
*buf
, const s390_insn
*insn
)
11572 s390_vec_binop_t tag
= insn
->variant
.vec_binop
.tag
;
11573 UChar size
= insn
->size
;
11574 UChar v1
= hregNumber(insn
->variant
.vec_binop
.dst
);
11575 UChar v2
= hregNumber(insn
->variant
.vec_binop
.op1
);
11576 UChar v3
= hregNumber(insn
->variant
.vec_binop
.op2
);
11579 case S390_VEC_PACK
:
11580 return s390_emit_VPK(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11581 case S390_VEC_PACK_SATURU
:
11582 return s390_emit_VPKLS(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11583 case S390_VEC_PACK_SATURS
:
11584 return s390_emit_VPKS(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11585 case S390_VEC_COMPARE_EQUAL
:
11586 return s390_emit_VCEQ(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11588 return s390_emit_VO(buf
, v1
, v2
, v3
);
11590 return s390_emit_VOC(buf
, v1
, v2
, v3
);
11592 return s390_emit_VX(buf
, v1
, v2
, v3
);
11594 return s390_emit_VN(buf
, v1
, v2
, v3
);
11595 case S390_VEC_MERGEL
:
11596 return s390_emit_VMRL(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11597 case S390_VEC_MERGEH
:
11598 return s390_emit_VMRH(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11600 return s390_emit_VNO(buf
, v1
, v2
, v3
);
11601 case S390_VEC_INT_ADD
:
11602 return s390_emit_VA(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11603 case S390_VEC_INT_SUB
:
11604 return s390_emit_VS(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11605 case S390_VEC_MAXU
:
11606 return s390_emit_VMXL(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11607 case S390_VEC_MAXS
:
11608 return s390_emit_VMX(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11609 case S390_VEC_MINU
:
11610 return s390_emit_VMNL(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11611 case S390_VEC_MINS
:
11612 return s390_emit_VMN(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11613 case S390_VEC_AVGU
:
11614 return s390_emit_VAVGL(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11615 case S390_VEC_AVGS
:
11616 return s390_emit_VAVG(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11617 case S390_VEC_COMPARE_GREATERS
:
11618 return s390_emit_VCH(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11619 case S390_VEC_COMPARE_GREATERU
:
11620 return s390_emit_VCHL(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11621 case S390_VEC_INT_MUL_HIGHS
:
11622 return s390_emit_VMH(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11623 case S390_VEC_INT_MUL_HIGHU
:
11624 return s390_emit_VMLH(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11625 case S390_VEC_INT_MUL_LOW
:
11626 return s390_emit_VML(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11627 case S390_VEC_INT_MUL_EVENS
:
11628 return s390_emit_VME(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11629 case S390_VEC_INT_MUL_EVENU
:
11630 return s390_emit_VMLE(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11631 case S390_VEC_ELEM_SHL_V
:
11632 return s390_emit_VESLV(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11633 case S390_VEC_ELEM_SHRA_V
:
11634 return s390_emit_VESRAV(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11635 case S390_VEC_ELEM_SHRL_V
:
11636 return s390_emit_VESRLV(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11637 case S390_VEC_ELEM_ROLL_V
:
11638 return s390_emit_VERLLV(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11639 case S390_VEC_SHL_BITS
:
11640 return s390_emit_VSL(buf
, v1
, v2
, v3
);
11641 case S390_VEC_SHRL_BITS
:
11642 return s390_emit_VSRL(buf
, v1
, v2
, v3
);
11643 case S390_VEC_SHRA_BITS
:
11644 return s390_emit_VSRA(buf
, v1
, v2
, v3
);
11645 case S390_VEC_SHL_BYTES
:
11646 return s390_emit_VSLB(buf
, v1
, v2
, v3
);
11647 case S390_VEC_SHRL_BYTES
:
11648 return s390_emit_VSRLB(buf
, v1
, v2
, v3
);
11649 case S390_VEC_SHRA_BYTES
:
11650 return s390_emit_VSRAB(buf
, v1
, v2
, v3
);
11651 case S390_VEC_PWSUM_W
:
11652 vassert((size
== 1) || (size
== 2));
11653 return s390_emit_VSUM(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11654 case S390_VEC_PWSUM_DW
:
11655 vassert((size
== 2) || (size
== 4));
11656 return s390_emit_VSUMG(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11657 case S390_VEC_PWSUM_QW
:
11658 vassert((size
== 4) || (size
== 8));
11659 return s390_emit_VSUMQ(buf
, v1
, v2
, v3
, s390_getM_from_size(size
));
11660 case S390_VEC_INIT_FROM_GPRS
:
11661 return s390_emit_VLVGP(buf
, v1
, v2
, v3
);
11662 case S390_VEC_INIT_FROM_FPRS
:
11663 return s390_emit_VMRH(buf
, v1
, v2
, v3
, 3);
11664 case S390_VEC_FLOAT_ADD
:
11665 return s390_emit_VFA(buf
, v1
, v2
, v3
, s390_getM_from_size(size
), 0);
11666 case S390_VEC_FLOAT_SUB
:
11667 return s390_emit_VFS(buf
, v1
, v2
, v3
, s390_getM_from_size(size
), 0);
11668 case S390_VEC_FLOAT_MUL
:
11669 return s390_emit_VFM(buf
, v1
, v2
, v3
, s390_getM_from_size(size
), 0);
11670 case S390_VEC_FLOAT_DIV
:
11671 return s390_emit_VFD(buf
, v1
, v2
, v3
, s390_getM_from_size(size
), 0);
11672 case S390_VEC_FLOAT_COMPARE_EQUAL
:
11673 return s390_emit_VFCE(buf
, v1
, v2
, v3
, s390_getM_from_size(size
), 0, 0);
11674 case S390_VEC_FLOAT_COMPARE_LESS_OR_EQUAL
:
11675 // PJF I assume that CHE is compare higher or equal so the order needs swapping
11676 // coverity[SWAPPED_ARGUMENTS:FALSE]
11677 return s390_emit_VFCHE(buf
, v1
, v3
, v2
, s390_getM_from_size(size
), 0, 0);
11678 case S390_VEC_FLOAT_COMPARE_LESS
:
11679 // PJF as above but this time compare higher
11680 // coverity[SWAPPED_ARGUMENTS:FALSE]
11681 return s390_emit_VFCH(buf
, v1
, v3
, v2
, s390_getM_from_size(size
), 0, 0);
11688 ppS390Instr(insn
, True
);
11689 vpanic("s390_insn_vec_binop_emit");
11695 s390_insn_vec_triop_emit(UChar
*buf
, const s390_insn
*insn
)
11697 s390_vec_triop_t tag
= insn
->variant
.vec_triop
.tag
;
11698 UChar size
= insn
->size
;
11699 UChar v1
= hregNumber(insn
->variant
.vec_triop
.dst
);
11700 UChar v2
= hregNumber(insn
->variant
.vec_triop
.op1
);
11701 UChar v3
= hregNumber(insn
->variant
.vec_triop
.op2
);
11702 UChar v4
= hregNumber(insn
->variant
.vec_triop
.op3
);
11705 case S390_VEC_PERM
: {
11706 vassert(size
== 16);
11707 return s390_emit_VPERM(buf
, v1
, v2
, v3
, v4
);
11709 case S390_VEC_FLOAT_MADD
:
11710 return s390_emit_VFMA(buf
, v1
, v2
, v3
, v4
, 0,
11711 s390_getM_from_size(size
));
11712 case S390_VEC_FLOAT_MSUB
:
11713 return s390_emit_VFMS(buf
, v1
, v2
, v3
, v4
, 0,
11714 s390_getM_from_size(size
));
11720 vpanic("s390_insn_vec_triop_emit");
11726 s390_insn_vec_replicate_emit(UChar
*buf
, const s390_insn
*insn
)
11728 UChar v1
= hregNumber(insn
->variant
.vec_replicate
.dst
);
11729 UChar v2
= hregNumber(insn
->variant
.vec_replicate
.op1
);
11730 UShort idx
= (UShort
) insn
->variant
.vec_replicate
.idx
;
11731 return s390_emit_VREP(buf
, v1
, v2
, idx
, s390_getM_from_size(insn
->size
));
11736 emit_S390Instr(Bool
*is_profinc
, UChar
*buf
, Int nbuf
, const s390_insn
*insn
,
11737 Bool mode64
, VexEndness endness_host
,
11738 const void *disp_cp_chain_me_to_slowEP
,
11739 const void *disp_cp_chain_me_to_fastEP
,
11740 const void *disp_cp_xindir
,
11741 const void *disp_cp_xassisted
)
11745 /* Used to be 48 bytes. Make sure it stays low */
11746 vassert(sizeof(s390_insn
) == 32);
11748 switch (insn
->tag
) {
11749 case S390_INSN_LOAD
:
11750 end
= s390_insn_load_emit(buf
, insn
);
11753 case S390_INSN_STORE
:
11754 end
= s390_insn_store_emit(buf
, insn
);
11757 case S390_INSN_MOVE
:
11758 end
= s390_insn_move_emit(buf
, insn
);
11761 case S390_INSN_MEMCPY
:
11762 end
= s390_insn_memcpy_emit(buf
, insn
);
11765 case S390_INSN_COND_MOVE
:
11766 end
= s390_insn_cond_move_emit(buf
, insn
);
11769 case S390_INSN_LOAD_IMMEDIATE
:
11770 end
= s390_insn_load_immediate_emit(buf
, insn
);
11773 case S390_INSN_ALU
:
11774 end
= s390_insn_alu_emit(buf
, insn
);
11777 case S390_INSN_SMUL
:
11778 case S390_INSN_UMUL
:
11779 end
= s390_insn_mul_emit(buf
, insn
);
11782 case S390_INSN_SDIV
:
11783 case S390_INSN_UDIV
:
11784 end
= s390_insn_div_emit(buf
, insn
);
11787 case S390_INSN_DIVS
:
11788 end
= s390_insn_divs_emit(buf
, insn
);
11791 case S390_INSN_CLZ
:
11792 end
= s390_insn_clz_emit(buf
, insn
);
11795 case S390_INSN_UNOP
:
11796 end
= s390_insn_unop_emit(buf
, insn
);
11799 case S390_INSN_TEST
:
11800 end
= s390_insn_test_emit(buf
, insn
);
11803 case S390_INSN_CC2BOOL
:
11804 end
= s390_insn_cc2bool_emit(buf
, insn
);
11807 case S390_INSN_CAS
:
11808 end
= s390_insn_cas_emit(buf
, insn
);
11811 case S390_INSN_CDAS
:
11812 end
= s390_insn_cdas_emit(buf
, insn
);
11815 case S390_INSN_COMPARE
:
11816 end
= s390_insn_compare_emit(buf
, insn
);
11819 case S390_INSN_HELPER_CALL
:
11820 end
= s390_insn_helper_call_emit(buf
, insn
);
11821 if (end
== buf
) goto fail
;
11824 case S390_INSN_BFP_TRIOP
:
11825 end
= s390_insn_bfp_triop_emit(buf
, insn
);
11828 case S390_INSN_BFP_BINOP
:
11829 end
= s390_insn_bfp_binop_emit(buf
, insn
);
11832 case S390_INSN_BFP_UNOP
:
11833 end
= s390_insn_bfp_unop_emit(buf
, insn
);
11836 case S390_INSN_BFP_COMPARE
:
11837 end
= s390_insn_bfp_compare_emit(buf
, insn
);
11840 case S390_INSN_BFP_CONVERT
:
11841 end
= s390_insn_bfp_convert_emit(buf
, insn
);
11844 case S390_INSN_DFP_BINOP
:
11845 end
= s390_insn_dfp_binop_emit(buf
, insn
);
11848 case S390_INSN_DFP_UNOP
:
11849 end
= s390_insn_dfp_unop_emit(buf
, insn
);
11852 case S390_INSN_DFP_INTOP
:
11853 end
= s390_insn_dfp_intop_emit(buf
, insn
);
11856 case S390_INSN_DFP_COMPARE
:
11857 end
= s390_insn_dfp_compare_emit(buf
, insn
);
11860 case S390_INSN_DFP_CONVERT
:
11861 end
= s390_insn_dfp_convert_emit(buf
, insn
);
11864 case S390_INSN_DFP_REROUND
:
11865 end
= s390_insn_dfp_reround_emit(buf
, insn
);
11868 case S390_INSN_FP_CONVERT
:
11869 end
= s390_insn_fp_convert_emit(buf
, insn
);
11872 case S390_INSN_MFENCE
:
11873 end
= s390_insn_mfence_emit(buf
, insn
);
11876 case S390_INSN_MIMM
:
11877 end
= s390_insn_mimm_emit(buf
, insn
);
11880 case S390_INSN_MADD
:
11881 end
= s390_insn_madd_emit(buf
, insn
);
11884 case S390_INSN_SET_FPC_BFPRM
:
11885 end
= s390_insn_set_fpc_bfprm_emit(buf
, insn
);
11888 case S390_INSN_SET_FPC_DFPRM
:
11889 end
= s390_insn_set_fpc_dfprm_emit(buf
, insn
);
11892 case S390_INSN_PROFINC
:
11893 end
= s390_insn_profinc_emit(buf
, insn
);
11894 /* Tell the caller .. */
11895 vassert(*is_profinc
== False
);
11896 *is_profinc
= True
;
11899 case S390_INSN_EVCHECK
:
11900 end
= s390_insn_evcheck_emit(buf
, insn
, endness_host
);
11903 case S390_INSN_XDIRECT
:
11904 end
= s390_insn_xdirect_emit(buf
, insn
, disp_cp_chain_me_to_slowEP
,
11905 disp_cp_chain_me_to_fastEP
);
11908 case S390_INSN_XINDIR
:
11909 end
= s390_insn_xindir_emit(buf
, insn
, disp_cp_xindir
);
11912 case S390_INSN_XASSISTED
:
11913 end
= s390_insn_xassisted_emit(buf
, insn
, disp_cp_xassisted
);
11916 case S390_INSN_VEC_AMODEOP
:
11917 end
= s390_insn_vec_amodeop_emit(buf
, insn
);
11920 case S390_INSN_VEC_AMODEINTOP
:
11921 end
= s390_insn_vec_amodeintop_emit(buf
, insn
);
11924 case S390_INSN_VEC_BINOP
:
11925 end
= s390_insn_vec_binop_emit(buf
, insn
);
11928 case S390_INSN_VEC_TRIOP
:
11929 end
= s390_insn_vec_triop_emit(buf
, insn
);
11932 case S390_INSN_VEC_REPLICATE
:
11933 end
= s390_insn_vec_replicate_emit(buf
, insn
);
11938 vpanic("emit_S390Instr");
11941 vassert(end
- buf
<= nbuf
);
11947 /* Return the number of bytes emitted for an S390_INSN_EVCHECK.
11948 See s390_insn_evcheck_emit */
11950 evCheckSzB_S390(void)
11952 return s390_host_has_gie
? 18 : 24;
11956 /* Patch the counter address into CODE_TO_PATCH as previously
11957 generated by s390_insn_profinc_emit. */
11959 patchProfInc_S390(VexEndness endness_host
,
11960 void *code_to_patch
, const ULong
*location_of_counter
)
11962 vassert(sizeof(ULong
*) == 8);
11964 s390_tchain_verify_load64(code_to_patch
, S390_REGNO_TCHAIN_SCRATCH
, 0);
11966 UChar
*p
= s390_tchain_patch_load64(code_to_patch
,
11967 (Addr
)location_of_counter
);
11969 UInt len
= p
- (UChar
*)code_to_patch
;
11970 VexInvalRange vir
= { (HWord
)code_to_patch
, len
};
11975 /* NB: what goes on here has to be very closely coordinated with the
11976 s390_insn_xdirect_emit code above. */
11978 chainXDirect_S390(VexEndness endness_host
,
11979 void *place_to_chain
,
11980 const void *disp_cp_chain_me_EXPECTED
,
11981 const void *place_to_jump_to
)
11983 vassert(endness_host
== VexEndnessBE
);
11985 /* What we're expecting to see @ PLACE_TO_CHAIN is:
11987 load tchain_scratch, #disp_cp_chain_me_EXPECTED
11988 goto *tchain_scratch
11991 next
= s390_tchain_verify_load64(place_to_chain
, S390_REGNO_TCHAIN_SCRATCH
,
11992 (Addr
)disp_cp_chain_me_EXPECTED
);
11993 vassert(s390_insn_is_BR(next
, S390_REGNO_TCHAIN_SCRATCH
));
11995 /* And what we want to change it to is either:
11998 load tchain_scratch, #place_to_jump_to
11999 goto *tchain_scratch
12003 in the case where the displacement is small enough
12005 BRCL delta where delta is in half-words
12008 In both cases the replacement has the same length as the original.
12009 To remain sane & verifiable,
12010 (1) limit the displacement for the short form to
12011 (say) +/- one billion, so as to avoid wraparound
12013 (2) even if the short form is applicable, once every (say)
12014 1024 times use the long form anyway, so as to maintain
12018 /* This is the delta we need to put into a BRCL insn. Note, that the
12019 offset in BRCL is in half-words. Hence division by 2. */
12021 (Long
)((const UChar
*)place_to_jump_to
- (const UChar
*)place_to_chain
) / 2;
12022 Bool shortOK
= delta
>= -1000*1000*1000 && delta
< 1000*1000*1000;
12024 static UInt shortCTR
= 0; /* DO NOT MAKE NON-STATIC */
12026 shortCTR
++; // thread safety bleh
12027 if (0 == (shortCTR
& 0x3FF)) {
12030 vex_printf("QQQ chainXDirect_S390: shortCTR = %u, "
12031 "using long jmp\n", shortCTR
);
12035 /* And make the modifications. */
12036 UChar
*p
= (UChar
*)place_to_chain
;
12038 p
= s390_emit_BRCL(p
, S390_CC_ALWAYS
, delta
); /* 6 bytes */
12040 /* Make sure that BRCL fits into the patchable part of an xdirect
12042 vassert(6 <= s390_xdirect_patchable_len());
12044 /* Fill remaining bytes with 0x00 (invalid opcode) */
12046 for (i
= 0; i
< s390_xdirect_patchable_len() - 6; ++i
)
12050 load tchain_scratch, #place_to_jump_to
12051 goto *tchain_scratch
12053 Addr64 addr
= (Addr
)place_to_jump_to
;
12054 p
= s390_tchain_load64(p
, S390_REGNO_TCHAIN_SCRATCH
, addr
);
12055 /* There is not need to emit a BCR here, as it is already there. */
12058 UInt len
= p
- (UChar
*)place_to_chain
;
12059 VexInvalRange vir
= { (HWord
)place_to_chain
, len
};
12064 /* NB: what goes on here has to be very closely coordinated with the
12065 s390_insn_xdirect_emit code above. */
12067 unchainXDirect_S390(VexEndness endness_host
,
12068 void *place_to_unchain
,
12069 const void *place_to_jump_to_EXPECTED
,
12070 const void *disp_cp_chain_me
)
12072 vassert(endness_host
== VexEndnessBE
);
12074 /* What we're expecting to see @ PLACE_TO_UNCHAIN:
12076 load tchain_scratch, #place_to_jump_to_EXPECTED
12077 goto *tchain_scratch
12080 in the case where the displacement falls within 32 bits
12085 UChar
*p
= place_to_unchain
;
12087 Bool uses_short_form
= False
;
12089 if (s390_insn_is_BRCL(p
, S390_CC_ALWAYS
)) {
12090 /* Looks like the short form */
12091 Int num_hw
= *(Int
*)&p
[2];
12092 Int delta
= 2 *num_hw
;
12094 vassert(p
+ delta
== place_to_jump_to_EXPECTED
);
12097 for (i
= 0; i
< s390_xdirect_patchable_len() - 6; ++i
)
12098 vassert(p
[6+i
] == 0x00);
12099 uses_short_form
= True
;
12101 /* Should be the long form */
12104 next
= s390_tchain_verify_load64(p
, S390_REGNO_TCHAIN_SCRATCH
,
12105 (Addr
)place_to_jump_to_EXPECTED
);
12106 /* Check for BR *tchain_scratch */
12107 vassert(s390_insn_is_BR(next
, S390_REGNO_TCHAIN_SCRATCH
));
12110 /* And what we want to change it to is:
12112 load tchain_scratch, #disp_cp_chain_me
12113 goto *tchain_scratch
12116 /* Get the address of the beginning of the load64 code sequence into %r1.
12117 Do not change the register! This is part of the protocol with the
12119 Note: the incoming argument PLACE_TO_CHAIN points to the beginning of the
12120 load64 insn sequence. That sequence is prefixed with a BASR to get its
12121 address (see s390_insn_xdirect_emit). */
12122 p
= s390_emit_BASR(p
- S390_BASR_LEN
, 1, R0
);
12124 Addr64 addr
= (Addr
)disp_cp_chain_me
;
12125 p
= s390_tchain_load64(p
, S390_REGNO_TCHAIN_SCRATCH
, addr
);
12127 /* Emit the BCR in case the short form was used. In case of the long
12128 form, the BCR is already there. */
12129 if (uses_short_form
)
12130 s390_emit_BCR(p
, S390_CC_ALWAYS
, S390_REGNO_TCHAIN_SCRATCH
);
12132 UInt len
= p
- (UChar
*)place_to_unchain
;
12133 VexInvalRange vir
= { (HWord
)place_to_unchain
, len
};
12137 /*---------------------------------------------------------------*/
12138 /*--- end host_s390_defs.c ---*/
12139 /*---------------------------------------------------------------*/