2 * Copyright (C) 2024 Mikulas Patocka
4 * This file is part of Ajla.
6 * Ajla is free software: you can redistribute it and/or modify it under the
7 * terms of the GNU General Public License as published by the Free Software
8 * Foundation, either version 3 of the License, or (at your option) any later
11 * Ajla is distributed in the hope that it will be useful, but WITHOUT ANY
12 * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
13 * A PARTICULAR PURPOSE. See the GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along with
16 * Ajla. If not, see <https://www.gnu.org/licenses/>.
19 static bool attr_w gen_mov(struct codegen_context *ctx, unsigned size, unsigned dest, unsigned src)
21 if (dest == src && (size == OP_SIZE_NATIVE || reg_is_fp(dest)))
24 gen_insn(INSN_MOV, size, 0, 0);
31 static bool attr_w gen_sanitize_returned_pointer(struct codegen_context attr_unused *ctx, unsigned attr_unused reg)
33 #if defined(ARCH_X86_X32)
34 g(gen_mov(ctx, OP_SIZE_ADDRESS, reg, reg));
39 static bool alu_is_commutative(unsigned alu)
41 return alu == ALU_ADD || alu == ALU_OR || alu == ALU_AND || alu == ALU_XOR || alu == ALU_MUL || alu == ALU_UMULH || alu == ALU_SMULH;
44 static bool attr_w gen_3address_alu(struct codegen_context *ctx, unsigned size, unsigned alu, unsigned dest, unsigned src1, unsigned src2, unsigned writes_flags)
46 if (unlikely(dest == src2) && alu_is_commutative(alu)) {
51 if (!ARCH_IS_3ADDRESS(alu, writes_flags) && unlikely(dest == src2) && unlikely(dest != src1)) {
52 internal(file_line, "gen_3address_alu: invalid registers: %u, %u, %x, %x, %x", size, alu, dest, src1, src2);
54 if (!ARCH_IS_3ADDRESS(alu, writes_flags) && dest != src1) {
55 g(gen_mov(ctx, OP_SIZE_NATIVE, dest, src1));
57 gen_insn(INSN_ALU + ARCH_PARTIAL_ALU(size), size, alu, ALU_WRITES_FLAGS(alu, false) | writes_flags);
64 gen_insn(INSN_ALU + ARCH_PARTIAL_ALU(size), size, alu, ALU_WRITES_FLAGS(alu, false) | writes_flags);
71 static bool attr_w gen_3address_alu_imm(struct codegen_context *ctx, unsigned size, unsigned alu, unsigned dest, unsigned src, int64_t imm, unsigned writes_flags)
74 alu == ALU_ADD ? IMM_PURPOSE_ADD :
75 alu == ALU_SUB ? IMM_PURPOSE_SUB :
76 alu == ALU_MUL ? IMM_PURPOSE_MUL :
77 alu == ALU_UMULH ? IMM_PURPOSE_MUL :
78 alu == ALU_SMULH ? IMM_PURPOSE_MUL :
79 alu == ALU_ANDN ? IMM_PURPOSE_ANDN :
80 alu == ALU_AND ? IMM_PURPOSE_AND :
81 alu == ALU_OR ? IMM_PURPOSE_OR :
82 alu == ALU_XOR ? IMM_PURPOSE_XOR :
83 alu == ALU_EXTBL ? IMM_PURPOSE_OR :
84 alu == ALU_EXTWL ? IMM_PURPOSE_OR :
85 alu == ALU_EXTLL ? IMM_PURPOSE_OR :
86 alu == ALU_EXTLH ? IMM_PURPOSE_OR :
87 alu == ALU_INSBL ? IMM_PURPOSE_OR :
88 alu == ALU_MSKBL ? IMM_PURPOSE_OR :
89 alu == ALU_ZAP ? IMM_PURPOSE_ANDN :
90 alu == ALU_ZAPNOT ? IMM_PURPOSE_AND :
92 if (unlikely(purpose == -1U))
93 internal(file_line, "gen_3address_alu_imm: invalid parameters: size %u, alu %u, dest %u, src %u, imm %"PRIxMAX"", size, alu, dest, src, (uintmax_t)imm);
94 if (!ARCH_IS_3ADDRESS_IMM(alu, writes_flags) && dest != src) {
95 g(gen_mov(ctx, OP_SIZE_NATIVE, dest, src));
97 g(gen_imm(ctx, imm, purpose, i_size(OP_SIZE_ADDRESS)));
98 gen_insn(INSN_ALU + ARCH_PARTIAL_ALU(size), size, alu, ALU_WRITES_FLAGS(alu, is_imm()) | writes_flags);
105 g(gen_imm(ctx, imm, purpose, i_size(OP_SIZE_ADDRESS)));
106 gen_insn(INSN_ALU + ARCH_PARTIAL_ALU(size), size, alu, ALU_WRITES_FLAGS(alu, is_imm()) | writes_flags);
114 static bool attr_w attr_unused gen_3address_rot(struct codegen_context *ctx, unsigned size, unsigned alu, unsigned dest, unsigned src1, unsigned src2)
117 if (dest == src1 && src2 == R_CX) {
118 gen_insn(INSN_ROT + ARCH_PARTIAL_ALU(size), size, alu, 1);
126 if (!ARCH_IS_3ADDRESS_ROT(alu, size) && dest != src1) {
127 if (unlikely(dest == src2))
128 internal(file_line, "gen_3address_rot: invalid registers: %u, %u, %x, %x, %x", size, alu, dest, src1, src2);
130 g(gen_mov(ctx, OP_SIZE_NATIVE, dest, src1));
132 gen_insn(INSN_ROT + ARCH_PARTIAL_ALU(size), size, alu, ROT_WRITES_FLAGS(alu, size, false));
139 gen_insn(INSN_ROT + ARCH_PARTIAL_ALU(size), size, alu, ROT_WRITES_FLAGS(alu, size, false));
147 static bool attr_w gen_3address_rot_imm(struct codegen_context *ctx, unsigned size, unsigned alu, unsigned dest, unsigned src, int64_t imm, unsigned writes_flags)
149 if (!ARCH_IS_3ADDRESS_ROT_IMM(alu) && dest != src) {
150 g(gen_mov(ctx, OP_SIZE_NATIVE, dest, src));
152 gen_insn(INSN_ROT + ARCH_PARTIAL_ALU(size), size, alu, ROT_WRITES_FLAGS(alu, size, true) | writes_flags);
160 gen_insn(INSN_ROT + ARCH_PARTIAL_ALU(size), size, alu, ROT_WRITES_FLAGS(alu, size, true) | writes_flags);
169 static bool attr_w gen_2address_alu1(struct codegen_context *ctx, unsigned size, unsigned alu, unsigned dest, unsigned src, unsigned writes_flags)
171 if (!ARCH_IS_2ADDRESS(alu) && dest != src) {
172 g(gen_mov(ctx, OP_SIZE_NATIVE, dest, src));
174 gen_insn(INSN_ALU1 + ARCH_PARTIAL_ALU(size), size, alu, ALU1_WRITES_FLAGS(alu) | writes_flags);
180 gen_insn(INSN_ALU1 + ARCH_PARTIAL_ALU(size), size, alu, ALU1_WRITES_FLAGS(alu) | writes_flags);
187 static bool attr_w gen_3address_fp_alu(struct codegen_context *ctx, unsigned size, unsigned alu, unsigned dest, unsigned src1, unsigned src2)
189 if (!ARCH_IS_3ADDRESS_FP && unlikely(dest == src2) && unlikely(dest != src1)) {
190 internal(file_line, "gen_3address_fp_alu: invalid registers: %u, %u, %x, %x, %x", size, alu, dest, src1, src2);
192 if (!ARCH_IS_3ADDRESS_FP && dest != src1) {
193 g(gen_mov(ctx, size, dest, src1));
195 gen_insn(INSN_FP_ALU, size, alu, 0);
202 gen_insn(INSN_FP_ALU, size, alu, 0);
211 static bool attr_w attr_unused gen_load_two(struct codegen_context *ctx, unsigned dest, unsigned src, int64_t offset)
215 g(gen_address(ctx, src, offset, IMM_PURPOSE_LDR_OFFSET, OP_SIZE_8));
216 gen_insn(INSN_MOV_U, OP_SIZE_NATIVE, 0, 0);
218 gen_address_offset();
220 g(gen_3address_alu(ctx, OP_SIZE_NATIVE, ALU_EXTWL, dest, dest, src, 0));
222 g(gen_imm(ctx, offset, IMM_PURPOSE_ADD, i_size(OP_SIZE_ADDRESS)));
223 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, is_imm()));
224 gen_one(R_OFFSET_IMM);
228 gen_insn(INSN_MOV_U, OP_SIZE_NATIVE, 0, 0);
230 gen_one(ARG_ADDRESS_1);
231 gen_one(R_OFFSET_IMM);
234 g(gen_3address_alu(ctx, OP_SIZE_NATIVE, ALU_EXTWL, dest, dest, R_OFFSET_IMM, 0));
236 #if defined(ARCH_S390)
237 } else if (!cpu_test_feature(CPU_FEATURE_extended_imm)) {
238 g(gen_address(ctx, src, offset, IMM_PURPOSE_LDR_SX_OFFSET, OP_SIZE_2));
239 gen_insn(INSN_MOVSX, OP_SIZE_2, 0, 0);
241 gen_address_offset();
243 g(gen_3address_alu_imm(ctx, OP_SIZE_NATIVE, ALU_AND, dest, dest, 0xffff, 0));
246 g(gen_address(ctx, src, offset, IMM_PURPOSE_LDR_OFFSET, OP_SIZE_2));
247 gen_insn(INSN_MOV, OP_SIZE_2, 0, 0);
249 gen_address_offset();
254 static bool attr_w gen_load_code_32(struct codegen_context *ctx, unsigned dest, unsigned src, int64_t offset)
256 #if ARG_MODE_N == 3 && defined(ARCH_ALPHA) && !(defined(C_BIG_ENDIAN) ^ CODE_ENDIAN)
257 if (!ARCH_HAS_BWX && UNALIGNED_TRAP) {
259 g(gen_3address_alu_imm(ctx, OP_SIZE_NATIVE, ALU_ADD, R_OFFSET_IMM, src, offset, 0));
263 g(gen_address(ctx, src, offset, IMM_PURPOSE_LDR_OFFSET, OP_SIZE_8));
264 gen_insn(INSN_MOV_U, OP_SIZE_NATIVE, 0, 0);
266 gen_address_offset();
268 g(gen_3address_alu(ctx, OP_SIZE_NATIVE, ALU_EXTLL, dest, dest, src, 0));
270 g(gen_address(ctx, src, offset + 3, IMM_PURPOSE_LDR_OFFSET, OP_SIZE_8));
271 gen_insn(INSN_MOV_U, OP_SIZE_NATIVE, 0, 0);
272 gen_one(R_CONST_IMM);
273 gen_address_offset();
275 g(gen_3address_alu(ctx, OP_SIZE_NATIVE, ALU_EXTLH, R_CONST_IMM, R_CONST_IMM, src, 0));
277 g(gen_3address_alu(ctx, OP_SIZE_NATIVE, ALU_OR, dest, dest, R_CONST_IMM, 0));
282 #if ARG_MODE_N == 3 && defined(ARCH_MIPS) && !(defined(C_BIG_ENDIAN) ^ CODE_ENDIAN)
283 if (!MIPS_R6 && UNALIGNED_TRAP) {
284 g(gen_address(ctx, src, offset, IMM_PURPOSE_LDR_OFFSET, OP_SIZE_4));
285 gen_insn(INSN_MOV_LR, OP_SIZE_4, !CODE_ENDIAN, 0);
288 gen_address_offset();
290 g(gen_address(ctx, src, offset + 3, IMM_PURPOSE_LDR_OFFSET, OP_SIZE_4));
291 gen_insn(INSN_MOV_LR, OP_SIZE_4, CODE_ENDIAN, 0);
294 gen_address_offset();
300 #if !(defined(C_BIG_ENDIAN) ^ CODE_ENDIAN)
304 g(gen_load_two(ctx, dest, src, offset));
305 g(gen_load_two(ctx, R_CONST_IMM, src, offset + 2));
307 g(gen_3address_rot_imm(ctx, OP_SIZE_NATIVE, ROT_SHL, dest, dest, 16, false));
309 g(gen_3address_rot_imm(ctx, OP_SIZE_NATIVE, ROT_SHL, R_CONST_IMM, R_CONST_IMM, 16, false));
311 g(gen_3address_alu(ctx, OP_SIZE_NATIVE, ALU_OR, dest, dest, R_CONST_IMM, 0));
315 g(gen_address(ctx, src, offset, IMM_PURPOSE_LDR_OFFSET, ARG_MODE_N - 1));
316 gen_insn(INSN_MOV, ARG_MODE_N - 1, 0, 0);
318 gen_address_offset();
322 static bool attr_w attr_unused gen_cmp_dest_reg(struct codegen_context *ctx, unsigned attr_unused size, unsigned reg1, unsigned reg2, unsigned reg_dest, int64_t imm, unsigned cond)
324 unsigned neg_result = false;
326 #if defined(ARCH_ALPHA)
327 if (cond == COND_NE) {
328 if (reg2 == (unsigned)-1)
329 g(gen_imm(ctx, imm, IMM_PURPOSE_CMP, i_size_cmp(size)));
330 gen_insn(INSN_CMP_DEST_REG, i_size_cmp(size), COND_E, 0);
333 if (reg2 == (unsigned)-1)
341 #if defined(ARCH_LOONGARCH64) || defined(ARCH_MIPS) || defined(ARCH_RISCV64)
342 if (cond == COND_E || cond == COND_NE) {
344 if (reg2 == (unsigned)-1 && !imm) {
348 if (reg2 == (unsigned)-1)
349 g(gen_imm(ctx, imm, IMM_PURPOSE_XOR, i_size(size)));
350 gen_insn(INSN_ALU, i_size(size), ALU_XOR, ALU_WRITES_FLAGS(ALU_XOR, reg2 == (unsigned)-1 ? is_imm() : false));
353 if (reg2 == (unsigned)-1)
359 if (cond == COND_E) {
360 g(gen_imm(ctx, 1, IMM_PURPOSE_CMP, i_size_cmp(size)));
361 gen_insn(INSN_CMP_DEST_REG, i_size_cmp(size), COND_B, 0);
366 gen_insn(INSN_CMP_DEST_REG, i_size_cmp(size), COND_B, 0);
374 if (cond == COND_GE || cond == COND_LE || cond == COND_AE || cond == COND_BE) {
379 #if defined(ARCH_IA64)
380 if (reg2 == (unsigned)-1)
381 g(gen_imm(ctx, imm, IMM_PURPOSE_CMP, i_size_cmp(size)));
382 gen_insn(INSN_CMP_DEST_REG, i_size_cmp(size), cond, 0);
383 gen_one(R_CMP_RESULT);
385 if (reg2 == (unsigned)-1)
390 g(gen_mov(ctx, OP_SIZE_NATIVE, reg_dest, R_CMP_RESULT));
394 if (reg2 == (unsigned)-1)
395 g(gen_imm(ctx, imm, IMM_PURPOSE_CMP, i_size_cmp(size)));
396 gen_insn(INSN_CMP_DEST_REG, i_size_cmp(size), cond, 0);
399 if (reg2 == (unsigned)-1)
407 g(gen_3address_alu_imm(ctx, i_size(size), ALU_XOR, reg_dest, reg_dest, 1, 0));
412 static bool attr_w gen_cmp_test_jmp(struct codegen_context *ctx, unsigned insn, unsigned op_size, unsigned reg1, unsigned reg2, unsigned cond, uint32_t label)
414 bool arch_use_flags = ARCH_HAS_FLAGS;
415 #if defined(ARCH_ARM64)
416 if (insn == INSN_TEST && reg1 == reg2 && (cond == COND_E || cond == COND_NE))
417 arch_use_flags = false;
419 #if defined(ARCH_SPARC)
420 if (insn == INSN_TEST && reg1 == reg2)
421 arch_use_flags = false;
423 if (arch_use_flags) {
424 if (COND_IS_LOGICAL(cond)) {
425 gen_insn(insn, op_size, 0, 2);
429 gen_insn(INSN_JMP_COND_LOGICAL, op_size, cond, 0);
435 gen_insn(insn, op_size, 0, 1);
439 #if defined(ARCH_POWER) || defined(ARCH_S390)
440 if (insn == INSN_TEST) {
447 gen_insn(INSN_JMP_COND, op_size, cond, 0);
450 if (insn == INSN_CMP) {
451 #if defined(ARCH_LOONGARCH64) || defined(ARCH_PARISC) || defined(ARCH_RISCV64) || (defined(ARCH_MIPS) && MIPS_R6)
452 gen_insn(INSN_JMP_2REGS, op_size, cond, 0);
459 unsigned jmp_cond = COND_NE;
460 #if defined(ARCH_MIPS)
461 if (cond == COND_E || cond == COND_NE) {
462 gen_insn(INSN_JMP_2REGS, op_size, cond, 0);
468 if (cond == COND_AE || cond == COND_BE || cond == COND_LE || cond == COND_GE) {
473 #if defined(ARCH_ALPHA)
474 if (cond == COND_NE) {
475 g(gen_3address_alu(ctx, op_size, ALU_XOR, R_CMP_RESULT, reg1, reg2, 0));
479 gen_insn(INSN_CMP_DEST_REG, op_size, cond, 0);
480 gen_one(R_CMP_RESULT);
485 gen_insn(INSN_JMP_REG, OP_SIZE_NATIVE, jmp_cond, 0);
486 gen_one(R_CMP_RESULT);
489 internal(file_line, "gen_cmp_test_jmp: R_CMP_RESULT not defined");
492 } else if (insn == INSN_TEST) {
494 internal(file_line, "gen_cmp_test_jmp: INSN_TEST with two distinct registers is unsupported");
496 #if defined(ARCH_IA64)
501 g(gen_imm(ctx, 0, IMM_PURPOSE_CMP, OP_SIZE_NATIVE));
502 gen_insn(INSN_CMP_DEST_REG, OP_SIZE_NATIVE, cond, 0);
503 gen_one(R_CMP_RESULT);
510 gen_insn(INSN_JMP_REG, OP_SIZE_NATIVE, cond, 0);
518 static bool attr_w gen_cmp_test_imm_jmp(struct codegen_context *ctx, unsigned insn, unsigned attr_unused op_size, unsigned reg1, int64_t value, unsigned cond, uint32_t label)
520 if (insn == INSN_TEST && (cond == COND_E || cond == COND_NE) && is_power_of_2((uint64_t)value)) {
521 #ifdef HAVE_BUILTIN_CTZ
522 unsigned attr_unused bit = __builtin_ctzll(value);
524 unsigned attr_unused bit = 0;
529 #if defined(ARCH_ALPHA) || defined(ARCH_PARISC)
530 if (value == 1 && (cond == COND_E || cond == COND_NE)) {
531 gen_insn(INSN_JMP_REG, OP_SIZE_NATIVE, cond == COND_E ? COND_BLBC : COND_BLBS, 0);
537 #if defined(ARCH_ARM64) || defined(ARCH_PARISC)
538 gen_insn(INSN_JMP_REG_BIT, OP_SIZE_NATIVE, bit | ((cond == COND_NE) << 6), 0);
544 #if defined(ARCH_POWER)
545 g(gen_3address_rot_imm(ctx, OP_SIZE_NATIVE, ROT_SHL, R_CONST_IMM, reg1, (8U << OP_SIZE_NATIVE) - 1 - bit, true));
547 gen_insn(INSN_JMP_COND, OP_SIZE_NATIVE, cond == COND_E ? COND_GE : COND_L, 0);
552 #if defined(ARCH_IA64)
553 gen_insn(INSN_TEST_DEST_REG, OP_SIZE_NATIVE, bit | ((cond == COND_NE) << 6), 0);
554 gen_one(R_CMP_RESULT);
557 gen_insn(INSN_JMP_REG, OP_SIZE_NATIVE, COND_NE, 0);
558 gen_one(R_CMP_RESULT);
563 #if defined(R_CMP_RESULT)
564 if (!is_direct_const(1ULL << bit, IMM_PURPOSE_AND, OP_SIZE_NATIVE) && ARCH_HAS_BTX(BTX_BTEXT, OP_SIZE_NATIVE, true)) {
565 gen_insn(INSN_BTX, OP_SIZE_NATIVE, BTX_BTEXT, 0);
566 gen_one(R_CMP_RESULT);
571 gen_insn(INSN_JMP_REG, OP_SIZE_NATIVE, cond, 0);
572 gen_one(R_CMP_RESULT);
580 if (unlikely(insn == INSN_CMP) && COND_IS_LOGICAL(cond)) {
581 g(gen_imm(ctx, value, IMM_PURPOSE_CMP_LOGICAL, op_size));
582 gen_insn(insn, op_size, 0, 2);
586 gen_insn(INSN_JMP_COND_LOGICAL, op_size, cond, 0);
591 g(gen_imm(ctx, value, insn == INSN_CMP ? IMM_PURPOSE_CMP : IMM_PURPOSE_TEST, op_size));
592 gen_insn(insn, op_size, 0, 1);
596 gen_insn(INSN_JMP_COND, op_size, cond, 0);
599 if (insn == INSN_CMP) {
600 #if defined(ARCH_LOONGARCH64) || defined(ARCH_PARISC) || defined(ARCH_RISCV64)
601 g(gen_imm(ctx, value, IMM_PURPOSE_JMP_2REGS, op_size));
602 #if defined(ARCH_PARISC)
603 gen_insn(INSN_JMP_2REGS, op_size, cond, 0);
605 gen_insn(INSN_JMP_2REGS, i_size_cmp(op_size), cond, 0);
612 unsigned final_cond = COND_NE;
613 #if defined(ARCH_ALPHA)
614 if (cond == COND_AE || cond == COND_A || cond == COND_GE || cond == COND_G) {
618 } else if (cond == COND_NE) {
619 g(gen_3address_alu_imm(ctx, OP_SIZE_NATIVE, ALU_XOR, R_CMP_RESULT, reg1, value, 0));
623 #if defined(ARCH_MIPS)
624 if (cond == COND_E || cond == COND_NE) {
625 g(gen_load_constant(ctx, R_CONST_IMM, value));
626 gen_insn(INSN_JMP_2REGS, OP_SIZE_NATIVE, cond, 0);
628 gen_one(R_CONST_IMM);
632 if (cond == COND_AE || cond == COND_BE || cond == COND_LE || cond == COND_GE) {
636 if (cond == COND_A || cond == COND_G) {
637 g(gen_load_constant(ctx, R_CONST_IMM, value));
638 gen_insn(INSN_CMP_DEST_REG, OP_SIZE_NATIVE, cond, 0);
639 gen_one(R_CMP_RESULT);
641 gen_one(R_CONST_IMM);
645 g(gen_imm(ctx, value, IMM_PURPOSE_CMP, OP_SIZE_NATIVE));
646 gen_insn(INSN_CMP_DEST_REG, OP_SIZE_NATIVE, cond, 0);
647 gen_one(R_CMP_RESULT);
652 gen_insn(INSN_JMP_REG, OP_SIZE_NATIVE, final_cond, 0);
653 gen_one(R_CMP_RESULT);
656 } else if (insn == INSN_TEST) {
657 #if defined(ARCH_IA64)
658 internal(file_line, "gen_cmp_test_imm_jmp: value %"PRIxMAX" not supported", (uintmax_t)value);
660 g(gen_3address_alu_imm(ctx, OP_SIZE_NATIVE, ALU_AND, R_CMP_RESULT, reg1, value, 0));
662 gen_insn(INSN_JMP_REG, OP_SIZE_NATIVE, cond, 0);
663 gen_one(R_CMP_RESULT);
666 internal(file_line, "gen_cmp_test_imm_jmp: invalid insn");
672 static bool attr_w gen_jmp_on_zero(struct codegen_context *ctx, unsigned attr_unused op_size, unsigned reg, unsigned cond, uint32_t label)
674 bool jmp_reg = false;
675 #if defined(ARCH_ALPHA) || defined(ARCH_ARM64) || defined(ARCH_LOONGARCH64) || defined(ARCH_RISCV64)
678 #if defined(ARCH_SPARC)
682 gen_insn(INSN_JMP_REG, i_size(op_size), cond, 0);
688 g(gen_cmp_test_jmp(ctx, INSN_TEST, i_size(op_size), reg, reg, cond, label));
693 static bool attr_w gen_jmp_if_negative(struct codegen_context *ctx, unsigned reg, uint32_t label)
695 #if defined(ARCH_ARM64) || defined(ARCH_PARISC)
696 gen_insn(INSN_JMP_REG_BIT, OP_SIZE_NATIVE, (INT_DEFAULT_BITS - 1) | ((uint32_t)1 << 6), 0);
700 g(gen_jmp_on_zero(ctx, OP_SIZE_INT, reg, COND_S, label));
705 #if defined(ARCH_X86)
706 static bool attr_w gen_cmov(struct codegen_context *ctx, unsigned op_size, unsigned cond, unsigned reg, uint32_t *label)
708 if (unlikely(op_size < OP_SIZE_4))
709 internal(file_line, "gen_cmov: unsupported operand size");
710 if (likely(cpu_test_feature(CPU_FEATURE_cmov))) {
711 gen_insn(INSN_CMOV, op_size, cond, 0);
716 *label = alloc_label(ctx);
717 if (unlikely(!*label))
719 gen_insn(INSN_JMP_COND, op_size, cond ^ 1, 0);
721 gen_insn(INSN_MOV, op_size, 0, 0);
735 static bool attr_w gen_extend(struct codegen_context *ctx, unsigned op_size, enum extend ex, unsigned dest, unsigned src)
737 unsigned attr_unused shift;
739 ex = ARCH_PREFERS_SX(op_size) ? sign_x : zero_x;
740 ajla_assert_lo(ex == zero_x || ex == sign_x, (file_line, "gen_extend: invalid mode %u", (unsigned)ex));
741 if (unlikely(op_size == OP_SIZE_NATIVE)) {
742 g(gen_mov(ctx, op_size, dest, src));
745 if (OP_SIZE_NATIVE == OP_SIZE_4) {
746 shift = op_size == OP_SIZE_1 ? 24 : 16;
747 } else if (OP_SIZE_NATIVE == OP_SIZE_8) {
748 shift = op_size == OP_SIZE_1 ? 56 : op_size == OP_SIZE_2 ? 48 : 32;
750 internal(file_line, "gen_extend: invalid OP_SIZE_NATIVE");
752 #if defined(ARCH_ARM) || defined(ARCH_IA64) || defined(ARCH_LOONGARCH64) || defined(ARCH_PARISC) || defined(ARCH_X86)
753 #if defined(ARCH_ARM32)
754 if (unlikely(!cpu_test_feature(CPU_FEATURE_armv6)))
757 gen_insn(ex == sign_x ? INSN_MOVSX : INSN_MOV, op_size, 0, 0);
762 #if defined(ARCH_POWER)
763 if (ex == zero_x || op_size == OP_SIZE_2 || cpu_test_feature(CPU_FEATURE_ppc)) {
764 gen_insn(ex == sign_x ? INSN_MOVSX : INSN_MOV, op_size, 0, 0);
770 #if defined(ARCH_ALPHA)
772 g(gen_3address_alu_imm(ctx, OP_SIZE_NATIVE, ALU_ZAPNOT, dest, src, op_size == OP_SIZE_1 ? 0x1 : op_size == OP_SIZE_2 ? 0x3 : 0xf, 0));
774 } else if (op_size == OP_SIZE_4 || ARCH_HAS_BWX) {
775 gen_insn(INSN_MOVSX, op_size, 0, 0);
781 #if defined(ARCH_MIPS)
782 if (ex == sign_x && shift == 32) {
783 g(gen_3address_rot_imm(ctx, OP_SIZE_4, ROT_SHL, dest, src, 0, 0));
786 if (ex == sign_x && MIPS_HAS_ROT) {
787 gen_insn(INSN_MOVSX, op_size, 0, 0);
793 #if defined(ARCH_S390)
794 if (((op_size == OP_SIZE_1 || op_size == OP_SIZE_2) && cpu_test_feature(CPU_FEATURE_extended_imm)) || op_size == OP_SIZE_4) {
795 gen_insn(ex == zero_x ? INSN_MOV : INSN_MOVSX, op_size, 0, 0);
801 #if defined(ARCH_SPARC)
803 g(gen_3address_rot_imm(ctx, OP_SIZE_4, ex == sign_x ? ROT_SAR : ROT_SHR, dest, src, 0, 0));
807 #if defined(ARCH_RISCV64)
808 if (ex == sign_x && (op_size == OP_SIZE_4 || likely(cpu_test_feature(CPU_FEATURE_zbb)))) {
809 gen_insn(INSN_MOVSX, op_size, 0, 0);
814 if (ex == zero_x && ((op_size == OP_SIZE_1) ||
815 (op_size == OP_SIZE_2 && likely(cpu_test_feature(CPU_FEATURE_zbb))) ||
816 (op_size == OP_SIZE_4 && likely(cpu_test_feature(CPU_FEATURE_zba))))) {
817 g(gen_mov(ctx, op_size, dest, src));
823 if (ex == zero_x && op_size <= OP_SIZE_4) {
824 int64_t cnst = (0x1ULL << (8U << op_size)) - 1;
825 if (is_direct_const(cnst, IMM_PURPOSE_AND, OP_SIZE_NATIVE)) {
826 g(gen_imm(ctx, 0xff, IMM_PURPOSE_AND, OP_SIZE_NATIVE));
827 gen_insn(INSN_ALU, OP_SIZE_NATIVE, ALU_AND, ALU_WRITES_FLAGS(ALU_AND, is_imm()));
834 g(gen_3address_rot_imm(ctx, OP_SIZE_NATIVE, ROT_SHL, dest, src, shift, false));
835 g(gen_3address_rot_imm(ctx, OP_SIZE_NATIVE, ex == sign_x ? ROT_SAR : ROT_SHR, dest, dest, shift, false));
839 static bool attr_w gen_cmp_extended(struct codegen_context *ctx, unsigned cmp_op_size, unsigned sub_op_size, unsigned reg, unsigned attr_unused tmp_reg, uint32_t label_ovf)
841 if (unlikely(sub_op_size >= cmp_op_size))
843 #if defined(ARCH_ARM64)
844 gen_insn(INSN_CMP, cmp_op_size, 0, 1);
846 gen_one(ARG_EXTENDED_REGISTER);
847 gen_one(sub_op_size == OP_SIZE_1 ? ARG_EXTEND_SXTB : sub_op_size == OP_SIZE_2 ? ARG_EXTEND_SXTH : ARG_EXTEND_SXTW);
850 gen_insn(INSN_JMP_COND, cmp_op_size, COND_NE, 0);
853 g(gen_extend(ctx, sub_op_size, sign_x, tmp_reg, reg));
855 g(gen_cmp_test_jmp(ctx, INSN_CMP, cmp_op_size, reg, tmp_reg, COND_NE, label_ovf));
860 static bool attr_w gen_lea3(struct codegen_context *ctx, unsigned dest, unsigned base, unsigned shifted, unsigned shift, int64_t offset)
862 #if defined(ARCH_X86)
863 gen_insn(INSN_LEA3, i_size(OP_SIZE_ADDRESS), shift, 0);
868 gen_eight(likely(imm_is_32bit(offset)) ? offset : 0);
870 if (unlikely(!imm_is_32bit(offset)))
871 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, dest, dest, offset, 0));
875 if (ARCH_HAS_SHIFTED_ADD(shift)) {
876 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, false));
879 gen_one(ARG_SHIFTED_REGISTER);
880 gen_one(ARG_SHIFT_LSL | shift);
884 g(gen_imm(ctx, offset, IMM_PURPOSE_ADD, i_size(OP_SIZE_ADDRESS)));
885 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, is_imm()));
894 g(gen_3address_rot_imm(ctx, OP_SIZE_NATIVE, ROT_SHL, dest, shifted, shift, false));
896 g(gen_3address_alu(ctx, OP_SIZE_NATIVE, ALU_ADD, dest, dest, base, 0));
899 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, dest, dest, offset, 0));
903 #if !defined(POINTER_COMPRESSION)
904 #define gen_pointer_compression(base) do { } while (0)
905 #define gen_address_offset_compressed() gen_address_offset()
906 #elif defined(ARCH_X86)
907 #define gen_pointer_compression(base) do { } while (0)
908 #define gen_address_offset_compressed() \
910 if (likely(!ctx->offset_reg)) { \
911 gen_one(ARG_ADDRESS_1 + POINTER_COMPRESSION); \
912 gen_one(ctx->base_reg); \
913 gen_eight(ctx->offset_imm); \
915 gen_one(ARG_ADDRESS_2 + POINTER_COMPRESSION); \
916 gen_one(R_OFFSET_IMM); \
917 gen_one(ctx->base_reg); \
922 #define gen_pointer_compression(base) \
924 if (ARCH_PREFERS_SX(OP_SIZE_4)) { \
925 g(gen_extend(ctx, OP_SIZE_4, zero_x, base, base)); \
927 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, base, base, POINTER_COMPRESSION, 0));\
929 #define gen_address_offset_compressed() gen_address_offset()