2 * Copyright (C) 2024 Mikulas Patocka
4 * This file is part of Ajla.
6 * Ajla is free software: you can redistribute it and/or modify it under the
7 * terms of the GNU General Public License as published by the Free Software
8 * Foundation, either version 3 of the License, or (at your option) any later
11 * Ajla is distributed in the hope that it will be useful, but WITHOUT ANY
12 * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
13 * A PARTICULAR PURPOSE. See the GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along with
16 * Ajla. If not, see <https://www.gnu.org/licenses/>.
19 static bool attr_w gen_set_1(struct codegen_context *ctx, unsigned base, frame_t slot_1, int64_t offset, bool val)
21 #ifdef HAVE_BITWISE_FRAME
22 int bit = slot_1 & ((1 << (OP_SIZE_BITMAP + 3)) - 1);
23 offset += slot_1 >> (OP_SIZE_BITMAP + 3) << OP_SIZE_BITMAP;
25 if (OP_SIZE_BITMAP == OP_SIZE_4) {
26 g(gen_address(ctx, base, offset, IMM_PURPOSE_STR_OFFSET, OP_SIZE_BITMAP));
28 g(gen_imm(ctx, (int32_t)((uint32_t)1 << bit), IMM_PURPOSE_OR, OP_SIZE_BITMAP));
29 gen_insn(INSN_ALU, OP_SIZE_BITMAP, ALU_OR, ALU_WRITES_FLAGS(OP_SIZE_BITMAP, ALU_OR, false, is_imm(), ctx->const_imm));
31 g(gen_imm(ctx, ~(int32_t)((uint32_t)1 << bit), IMM_PURPOSE_AND, OP_SIZE_BITMAP));
32 gen_insn(INSN_ALU, OP_SIZE_BITMAP, ALU_AND, ALU_WRITES_FLAGS(OP_SIZE_BITMAP, ALU_AND, false, is_imm(), ctx->const_imm));
38 g(gen_address(ctx, base, offset, IMM_PURPOSE_STR_OFFSET, OP_SIZE_BITMAP));
39 g(gen_imm(ctx, bit, IMM_PURPOSE_BITWISE, OP_SIZE_BITMAP));
40 gen_insn(INSN_BTX, OP_SIZE_BITMAP, val ? BTX_BTS : BTX_BTR, 1);
46 g(gen_address(ctx, base, offset, ARCH_PREFERS_SX(OP_SIZE_BITMAP) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_BITMAP));
47 gen_insn(ARCH_PREFERS_SX(OP_SIZE_BITMAP) ? INSN_MOVSX : INSN_MOV, OP_SIZE_BITMAP, 0, 0);
48 gen_one(R_SCRATCH_NA_1);
51 if (!is_direct_const(!val ? ~(1ULL << bit) : 1ULL << bit, !val ? IMM_PURPOSE_AND : IMM_PURPOSE_OR, OP_SIZE_NATIVE) && ARCH_HAS_BTX(!val ? BTX_BTR : BTX_BTS, OP_SIZE_NATIVE, true)) {
52 g(gen_imm(ctx, bit, IMM_PURPOSE_BITWISE, OP_SIZE_NATIVE));
53 gen_insn(INSN_BTX, OP_SIZE_NATIVE, !val ? BTX_BTR : BTX_BTS, 0);
54 gen_one(R_SCRATCH_NA_1);
55 gen_one(R_SCRATCH_NA_1);
58 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_BITMAP), ALU_AND, R_SCRATCH_NA_1, R_SCRATCH_NA_1, ~((uintptr_t)1 << bit), 0));
60 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_BITMAP), val ? ALU_OR : ALU_ANDN, R_SCRATCH_NA_1, R_SCRATCH_NA_1, (uintptr_t)1 << bit, 0));
63 g(gen_address(ctx, base, offset, IMM_PURPOSE_STR_OFFSET, OP_SIZE_BITMAP));
64 gen_insn(INSN_MOV, OP_SIZE_BITMAP, 0, 0);
66 gen_one(R_SCRATCH_NA_1);
69 #if defined(ARCH_ALPHA)
71 g(gen_address(ctx, base, offset + (slot_1 & ~7), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_8));
72 gen_insn(INSN_MOV, OP_SIZE_8, 0, 0);
73 gen_one(R_SCRATCH_NA_1);
77 g(gen_3address_alu_imm(ctx, OP_SIZE_8, ALU_MSKBL, R_SCRATCH_NA_1, R_SCRATCH_NA_1, slot_1 & 7, 0));
79 g(gen_3address_alu_imm(ctx, OP_SIZE_8, ALU_OR, R_SCRATCH_NA_1, R_SCRATCH_NA_1, 1ULL << ((slot_1 & 7) * 8), 0));
82 g(gen_address(ctx, base, offset + (slot_1 & ~7), IMM_PURPOSE_STR_OFFSET, OP_SIZE_8));
83 gen_insn(INSN_MOV, OP_SIZE_8, 0, 0);
85 gen_one(R_SCRATCH_NA_1);
90 g(gen_address(ctx, base, offset + slot_1, IMM_PURPOSE_MVI_CLI_OFFSET, OP_SIZE_1));
91 g(gen_imm(ctx, val, IMM_PURPOSE_STORE_VALUE, OP_SIZE_1));
92 gen_insn(INSN_MOV, OP_SIZE_1, 0, 0);
99 static bool attr_w gen_set_1_variable(struct codegen_context *ctx, unsigned slot_reg, int64_t offset, bool val)
101 #ifdef HAVE_BITWISE_FRAME
102 #if defined(ARCH_X86)
103 g(gen_address(ctx, R_FRAME, offset, IMM_PURPOSE_STR_OFFSET, OP_SIZE_BITMAP));
104 gen_insn(INSN_BTX, OP_SIZE_BITMAP, val ? BTX_BTS : BTX_BTR, 1);
105 gen_address_offset();
106 gen_address_offset();
109 g(gen_3address_rot_imm(ctx, OP_SIZE_NATIVE, ROT_SHR, R_SCRATCH_NA_1, slot_reg, OP_SIZE_BITMAP + 3, false));
111 if (ARCH_HAS_SHIFTED_ADD(OP_SIZE_BITMAP)) {
112 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(i_size(OP_SIZE_ADDRESS), ALU_ADD, false, false, 0));
113 gen_one(R_SCRATCH_NA_1);
115 gen_one(ARG_SHIFTED_REGISTER);
116 gen_one(ARG_SHIFT_LSL | OP_SIZE_BITMAP);
117 gen_one(R_SCRATCH_NA_1);
119 g(gen_3address_rot_imm(ctx, OP_SIZE_NATIVE, ROT_SHL, R_SCRATCH_NA_1, R_SCRATCH_NA_1, OP_SIZE_BITMAP, false));
121 g(gen_3address_alu(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_SCRATCH_NA_1, R_SCRATCH_NA_1, R_FRAME, 0));
123 if (ARCH_HAS_BTX(!val ? BTX_BTR : BTX_BTS, OP_SIZE_BITMAP, false)) {
124 g(gen_address(ctx, R_SCRATCH_NA_1, offset, ARCH_PREFERS_SX(OP_SIZE_BITMAP) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_BITMAP));
125 gen_insn(ARCH_PREFERS_SX(OP_SIZE_BITMAP) ? INSN_MOVSX : INSN_MOV, OP_SIZE_BITMAP, 0, 0);
126 gen_one(R_SCRATCH_NA_3);
127 gen_address_offset();
129 gen_insn(INSN_BTX, OP_SIZE_BITMAP, !val ? BTX_BTR : BTX_BTS, 0);
130 gen_one(R_SCRATCH_NA_3);
131 gen_one(R_SCRATCH_NA_3);
136 if (ARCH_SHIFT_SIZE > OP_SIZE_BITMAP) {
137 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_BITMAP), ALU_AND, R_SCRATCH_NA_3, slot_reg, (1U << (OP_SIZE_BITMAP + 3)) - 1, 0));
139 g(gen_load_constant(ctx, R_SCRATCH_NA_2, 1));
141 g(gen_3address_rot(ctx, i_size(OP_SIZE_BITMAP), ROT_SHL, R_SCRATCH_NA_2, R_SCRATCH_NA_2, R_SCRATCH_NA_3));
143 g(gen_load_constant(ctx, R_SCRATCH_NA_2, 1));
145 g(gen_3address_rot(ctx, OP_SIZE_BITMAP, ROT_SHL, R_SCRATCH_NA_2, R_SCRATCH_NA_2, slot_reg));
147 g(gen_address(ctx, R_SCRATCH_NA_1, offset, ARCH_PREFERS_SX(OP_SIZE_BITMAP) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_BITMAP));
148 gen_insn(ARCH_PREFERS_SX(OP_SIZE_BITMAP) ? INSN_MOVSX : INSN_MOV, OP_SIZE_BITMAP, 0, 0);
149 gen_one(R_SCRATCH_NA_3);
150 gen_address_offset();
152 if (!val && !ARCH_HAS_ANDN) {
153 g(gen_2address_alu1(ctx, i_size(OP_SIZE_BITMAP), ALU1_NOT, R_SCRATCH_2, R_SCRATCH_2, 0));
155 g(gen_3address_alu(ctx, i_size(OP_SIZE_BITMAP), ALU_AND, R_SCRATCH_NA_3, R_SCRATCH_NA_3, R_SCRATCH_NA_2, 0));
157 g(gen_3address_alu(ctx, i_size(OP_SIZE_BITMAP), val ? ALU_OR : ALU_ANDN, R_SCRATCH_NA_3, R_SCRATCH_NA_3, R_SCRATCH_NA_2, 0));
162 g(gen_address(ctx, R_SCRATCH_NA_1, offset, IMM_PURPOSE_STR_OFFSET, OP_SIZE_BITMAP));
163 gen_insn(INSN_MOV, OP_SIZE_BITMAP, 0, 0);
164 gen_address_offset();
165 gen_one(R_SCRATCH_NA_3);
168 #if defined(ARCH_X86)
169 g(gen_imm(ctx, val, IMM_PURPOSE_STORE_VALUE, OP_SIZE_1));
170 gen_insn(INSN_MOV, OP_SIZE_1, 0, 0);
171 gen_one(ARG_ADDRESS_2);
177 g(gen_3address_alu(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_SCRATCH_NA_1, R_FRAME, slot_reg, 0));
180 g(gen_address(ctx, R_SCRATCH_NA_1, offset, IMM_PURPOSE_LDR_OFFSET, OP_SIZE_8));
181 gen_insn(INSN_MOV_U, OP_SIZE_8, 0, 0);
182 gen_one(R_SCRATCH_NA_2);
183 gen_address_offset();
185 g(gen_3address_alu(ctx, OP_SIZE_8, ALU_MSKBL, R_SCRATCH_NA_2, R_SCRATCH_NA_2, R_SCRATCH_NA_1, 0));
187 g(gen_load_constant(ctx, R_SCRATCH_NA_3, 1));
189 g(gen_3address_alu(ctx, OP_SIZE_8, ALU_INSBL, R_SCRATCH_NA_3, R_SCRATCH_NA_3, R_SCRATCH_NA_1, 0));
191 g(gen_3address_alu(ctx, OP_SIZE_8, ALU_OR, R_SCRATCH_NA_2, R_SCRATCH_NA_2, R_SCRATCH_NA_3, 0));
193 g(gen_address(ctx, R_SCRATCH_NA_1, offset, IMM_PURPOSE_STR_OFFSET, OP_SIZE_8));
194 gen_insn(INSN_MOV_U, OP_SIZE_8, 0, 0);
195 gen_address_offset();
196 gen_one(R_SCRATCH_NA_2);
202 g(gen_address(ctx, R_SCRATCH_NA_1, offset, IMM_PURPOSE_MVI_CLI_OFFSET, OP_SIZE_1));
203 g(gen_imm(ctx, val, IMM_PURPOSE_STORE_VALUE, OP_SIZE_1));
204 gen_insn(INSN_MOV, OP_SIZE_1, 0, 0);
205 gen_address_offset();
216 static bool attr_w gen_test_1(struct codegen_context *ctx, unsigned base, frame_t slot_1, int64_t offset, uint32_t label, bool jz, uint8_t test)
218 #ifdef HAVE_BITWISE_FRAME
219 int bit = slot_1 & ((1 << (OP_SIZE_BITMAP + 3)) - 1);
220 offset += slot_1 >> (OP_SIZE_BITMAP + 3) << OP_SIZE_BITMAP;
221 #if defined(ARCH_X86)
223 if (OP_SIZE_BITMAP == OP_SIZE_4) {
224 g(gen_address(ctx, base, offset, ARCH_PREFERS_SX(OP_SIZE_BITMAP) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_BITMAP));
225 g(gen_imm(ctx, (int32_t)((uint32_t)1 << bit), IMM_PURPOSE_TEST, OP_SIZE_BITMAP));
226 gen_insn(INSN_TEST, OP_SIZE_BITMAP, 0, 1);
227 gen_address_offset();
230 gen_insn(INSN_JMP_COND, OP_SIZE_BITMAP, jz ? COND_E : COND_NE, 0);
235 g(gen_address(ctx, base, offset, test == TEST ? IMM_PURPOSE_LDR_OFFSET : IMM_PURPOSE_STR_OFFSET, OP_SIZE_BITMAP));
236 g(gen_imm(ctx, bit, IMM_PURPOSE_BITWISE, OP_SIZE_BITMAP));
237 gen_insn(INSN_BT, OP_SIZE_BITMAP, 0, 1);
238 gen_address_offset();
241 g(gen_address(ctx, base, offset, test == TEST ? IMM_PURPOSE_LDR_OFFSET : IMM_PURPOSE_STR_OFFSET, OP_SIZE_BITMAP));
242 g(gen_imm(ctx, bit, IMM_PURPOSE_BITWISE, OP_SIZE_BITMAP));
243 gen_insn(INSN_BTX, OP_SIZE_BITMAP, test == TEST_CLEAR ? BTX_BTR : BTX_BTS, 1);
244 gen_address_offset();
245 gen_address_offset();
249 gen_insn(INSN_JMP_COND, OP_SIZE_1, jz ? COND_AE : COND_B, 0);
252 g(gen_address(ctx, base, offset, ARCH_PREFERS_SX(OP_SIZE_BITMAP) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_BITMAP));
253 gen_insn(ARCH_PREFERS_SX(OP_SIZE_BITMAP) ? INSN_MOVSX : INSN_MOV, OP_SIZE_BITMAP, 0, 0);
254 gen_one(R_SCRATCH_NA_1);
255 gen_address_offset();
257 if (jz ? test == TEST_SET : test == TEST_CLEAR) {
258 if (!is_direct_const(test == TEST_CLEAR ? ~(1ULL << bit) : 1ULL << bit, test == TEST_CLEAR ? IMM_PURPOSE_AND : IMM_PURPOSE_OR, OP_SIZE_NATIVE) && ARCH_HAS_BTX(test == TEST_CLEAR ? BTX_BTR : BTX_BTS, OP_SIZE_NATIVE, true)) {
259 g(gen_imm(ctx, bit, IMM_PURPOSE_BITWISE, OP_SIZE_NATIVE));
260 gen_insn(INSN_BTX, OP_SIZE_NATIVE, test == TEST_CLEAR ? BTX_BTR : BTX_BTS, 0);
261 gen_one(R_SCRATCH_NA_2);
262 gen_one(R_SCRATCH_NA_1);
264 } else if (test == TEST_CLEAR) {
265 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_BITMAP), ALU_AND, R_SCRATCH_NA_2, R_SCRATCH_NA_1, ~((uintptr_t)1 << bit), 0));
267 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_BITMAP), test == TEST_SET ? ALU_OR : ALU_ANDN, R_SCRATCH_NA_2, R_SCRATCH_NA_1, (uintptr_t)1 << bit, 0));
270 g(gen_address(ctx, base, offset, IMM_PURPOSE_STR_OFFSET, OP_SIZE_BITMAP));
271 gen_insn(INSN_MOV, OP_SIZE_BITMAP, 0, 0);
272 gen_address_offset();
273 gen_one(R_SCRATCH_NA_2);
275 #if defined(ARCH_ARM) || defined(ARCH_IA64) || defined(ARCH_LOONGARCH64) || defined(ARCH_PARISC) || defined(ARCH_POWER) || defined(ARCH_S390)
276 g(gen_cmp_test_imm_jmp(ctx, INSN_TEST, i_size(OP_SIZE_BITMAP), R_SCRATCH_NA_1, (uintptr_t)1 << bit, !jz ? COND_NE : COND_E, label));
278 g(gen_3address_rot_imm(ctx, i_size(OP_SIZE_BITMAP), ROT_SHL, R_SCRATCH_NA_3, R_SCRATCH_NA_1, (1U << (i_size(OP_SIZE_BITMAP) + 3)) - 1 - bit, false));
280 gen_insn(INSN_JMP_REG, i_size(OP_SIZE_BITMAP), !jz ? COND_S : COND_NS, 0);
281 gen_one(R_SCRATCH_NA_3);
284 if (!jz ? test == TEST_SET : test == TEST_CLEAR) {
285 if (!is_direct_const(test == TEST_CLEAR ? ~(1ULL << bit) : 1ULL << bit, test == TEST_CLEAR ? IMM_PURPOSE_XOR : IMM_PURPOSE_OR, OP_SIZE_NATIVE) && ARCH_HAS_BTX(test == TEST_CLEAR ? BTX_BTR : BTX_BTS, OP_SIZE_NATIVE, true)) {
286 g(gen_imm(ctx, bit, IMM_PURPOSE_BITWISE, OP_SIZE_NATIVE));
287 gen_insn(INSN_BTX, OP_SIZE_NATIVE, test == TEST_CLEAR ? BTX_BTR : BTX_BTS, 0);
288 gen_one(R_SCRATCH_NA_1);
289 gen_one(R_SCRATCH_NA_1);
292 #if defined(ARCH_S390)
293 if (test == TEST_CLEAR)
294 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_BITMAP), ALU_AND, R_SCRATCH_NA_1, R_SCRATCH_NA_1, ~((uintptr_t)1 << bit), 0));
297 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_BITMAP), test == TEST_SET ? ALU_OR : ALU_XOR, R_SCRATCH_NA_1, R_SCRATCH_NA_1, (uintptr_t)1 << bit, 0));
299 g(gen_address(ctx, base, offset, IMM_PURPOSE_STR_OFFSET, OP_SIZE_BITMAP));
300 gen_insn(INSN_MOV, OP_SIZE_BITMAP, 0, 0);
301 gen_address_offset();
302 gen_one(R_SCRATCH_NA_1);
306 #if defined(ARCH_X86) || defined(ARCH_S390)
307 g(gen_address(ctx, base, offset + slot_1, IMM_PURPOSE_MVI_CLI_OFFSET, OP_SIZE_1));
308 gen_insn(INSN_CMP, OP_SIZE_1, 0, 2);
309 gen_address_offset();
313 if (jz ? test == TEST_SET : test == TEST_CLEAR) {
314 g(gen_set_1(ctx, base, slot_1, offset, test == TEST_SET));
317 gen_insn(INSN_JMP_COND, OP_SIZE_1, jz ? COND_E : COND_NE, 0);
320 if (!jz ? test == TEST_SET : test == TEST_CLEAR) {
321 g(gen_set_1(ctx, base, slot_1, offset, test == TEST_SET));
325 #if defined(ARCH_ALPHA)
326 g(gen_address(ctx, base, offset + (slot_1 & ~7), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_8));
327 gen_insn(INSN_MOV, OP_SIZE_8, 0, 0);
328 gen_one(R_SCRATCH_NA_2);
329 gen_address_offset();
331 g(gen_3address_alu_imm(ctx, OP_SIZE_8, ALU_EXTBL, R_SCRATCH_NA_2, R_SCRATCH_NA_2, slot_1 & 7, 0));
334 g(gen_address(ctx, base, offset + slot_1, IMM_PURPOSE_LDR_OFFSET, OP_SIZE_1));
335 gen_insn(INSN_MOV, OP_SIZE_1, 0, 0);
336 gen_one(R_SCRATCH_NA_2);
337 gen_address_offset();
340 if (jz ? test == TEST_SET : test == TEST_CLEAR) {
341 g(gen_set_1(ctx, base, slot_1, offset, test == TEST_SET));
344 g(gen_jmp_on_zero(ctx, OP_SIZE_1, R_SCRATCH_NA_2, jz ? COND_E : COND_NE, label));
346 if (!jz ? test == TEST_SET : test == TEST_CLEAR) {
347 g(gen_set_1(ctx, base, slot_1, offset, test == TEST_SET));
354 static bool attr_w gen_test_2(struct codegen_context *ctx, frame_t slot_1, frame_t slot_2, uint32_t label)
356 unsigned attr_unused bit1, bit2;
357 frame_t attr_unused addr1, addr2;
358 if (unlikely(slot_1 == slot_2)) {
359 g(gen_test_1(ctx, R_FRAME, slot_1, 0, label, false, TEST));
362 #ifdef HAVE_BITWISE_FRAME
363 addr1 = slot_1 >> (OP_SIZE_BITMAP + 3) << OP_SIZE_BITMAP;
364 addr2 = slot_2 >> (OP_SIZE_BITMAP + 3) << OP_SIZE_BITMAP;
367 bit1 = slot_1 & ((1 << (OP_SIZE_BITMAP + 3)) - 1);
368 bit2 = slot_2 & ((1 << (OP_SIZE_BITMAP + 3)) - 1);
369 #if defined(ARCH_X86)
370 g(gen_address(ctx, R_FRAME, addr1, IMM_PURPOSE_LDR_OFFSET, OP_SIZE_BITMAP));
371 if (OP_SIZE_BITMAP == OP_SIZE_4) {
372 g(gen_imm(ctx, (int32_t)(((uintptr_t)1 << bit1) | ((uintptr_t)1 << bit2)), IMM_PURPOSE_TEST, OP_SIZE_BITMAP));
374 g(gen_imm(ctx, ((uintptr_t)1 << bit1) | ((uintptr_t)1 << bit2), IMM_PURPOSE_TEST, OP_SIZE_BITMAP));
376 gen_insn(INSN_TEST, OP_SIZE_BITMAP, 0, 1);
377 gen_address_offset();
380 gen_insn(INSN_JMP_COND, OP_SIZE_BITMAP, COND_NE, 0);
385 g(gen_address(ctx, R_FRAME, addr1, ARCH_PREFERS_SX(OP_SIZE_BITMAP) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_BITMAP));
386 gen_insn(ARCH_PREFERS_SX(OP_SIZE_BITMAP) ? INSN_MOVSX : INSN_MOV, OP_SIZE_BITMAP, 0, 0);
387 gen_one(R_SCRATCH_NA_1);
388 gen_address_offset();
390 if (is_direct_const(1ULL << bit1 | 1ULL << bit2, IMM_PURPOSE_TEST, OP_SIZE_BITMAP)) {
391 g(gen_cmp_test_imm_jmp(ctx, INSN_TEST, i_size(OP_SIZE_BITMAP), R_SCRATCH_NA_1, 1ULL << bit1 | 1ULL << bit2, COND_NE, label));
394 #if defined(ARCH_ARM) || defined(ARCH_IA64) || defined(ARCH_PARISC) || defined(ARCH_S390)
395 g(gen_cmp_test_imm_jmp(ctx, INSN_TEST, i_size(OP_SIZE_BITMAP), R_SCRATCH_NA_1, (uintptr_t)1 << bit1, COND_NE, label));
396 g(gen_cmp_test_imm_jmp(ctx, INSN_TEST, i_size(OP_SIZE_BITMAP), R_SCRATCH_NA_1, (uintptr_t)1 << bit2, COND_NE, label));
400 if (ARCH_HAS_BTX(BTX_BTEXT, OP_SIZE_NATIVE, true)) {
401 gen_insn(INSN_BTX, OP_SIZE_NATIVE, BTX_BTEXT, 0);
402 gen_one(R_SCRATCH_NA_2);
403 gen_one(R_SCRATCH_NA_1);
407 gen_insn(INSN_BTX, OP_SIZE_NATIVE, BTX_BTEXT, 0);
408 gen_one(R_SCRATCH_NA_1);
409 gen_one(R_SCRATCH_NA_1);
413 g(gen_3address_alu(ctx, i_size(OP_SIZE_NATIVE), ALU_OR, R_SCRATCH_NA_1, R_SCRATCH_NA_1, R_SCRATCH_NA_2, 0));
415 gen_insn(INSN_JMP_REG, i_size(OP_SIZE_NATIVE), COND_NE, 0);
416 gen_one(R_SCRATCH_NA_1);
421 g(gen_3address_rot_imm(ctx, i_size(OP_SIZE_BITMAP), ROT_SHL, R_SCRATCH_NA_2, R_SCRATCH_NA_1, (1U << (i_size(OP_SIZE_BITMAP) + 3)) - 1 - bit1, false));
422 g(gen_3address_rot_imm(ctx, i_size(OP_SIZE_BITMAP), ROT_SHL, R_SCRATCH_NA_1, R_SCRATCH_NA_1, (1U << (i_size(OP_SIZE_BITMAP) + 3)) - 1 - bit2, false));
423 #if defined(ARCH_POWER)
424 g(gen_3address_alu(ctx, i_size(OP_SIZE_BITMAP), ALU_OR, R_SCRATCH_NA_1, R_SCRATCH_NA_1, R_SCRATCH_NA_2, 1));
426 gen_insn(INSN_JMP_COND, i_size(OP_SIZE_BITMAP), COND_L, 0);
429 g(gen_3address_alu(ctx, i_size(OP_SIZE_BITMAP), ALU_OR, R_SCRATCH_NA_1, R_SCRATCH_NA_1, R_SCRATCH_NA_2, 0));
431 gen_insn(INSN_JMP_REG, i_size(OP_SIZE_BITMAP), COND_S, 0);
432 gen_one(R_SCRATCH_NA_1);
438 g(gen_test_1(ctx, R_FRAME, slot_1, 0, label, false, TEST));
439 g(gen_test_1(ctx, R_FRAME, slot_2, 0, label, false, TEST));
441 #if defined(ARCH_X86)
442 g(gen_address(ctx, R_FRAME, slot_1, IMM_PURPOSE_LDR_OFFSET, OP_SIZE_1));
443 gen_insn(INSN_MOV, OP_SIZE_1, 0, 0);
444 gen_one(R_SCRATCH_1);
445 gen_address_offset();
447 g(gen_address(ctx, R_FRAME, slot_2, IMM_PURPOSE_LDR_OFFSET, OP_SIZE_1));
448 gen_insn(INSN_ALU_PARTIAL, OP_SIZE_1, ALU_OR, 1);
449 gen_one(R_SCRATCH_1);
450 gen_one(R_SCRATCH_1);
451 gen_address_offset();
453 gen_insn(INSN_JMP_COND, OP_SIZE_1, COND_NE, 0);
456 if (!ARCH_HAS_BWX || !ARCH_HAS_FLAGS
457 #if defined(ARCH_S390)
461 if (!ARCH_HAS_BWX && (slot_1 & ~7) == (slot_2 & ~7)) {
462 g(gen_address(ctx, R_FRAME, slot_1 & ~7, IMM_PURPOSE_LDR_OFFSET, OP_SIZE_8));
463 gen_insn(INSN_MOV, OP_SIZE_8, 0, 0);
464 gen_one(R_SCRATCH_1);
465 gen_address_offset();
467 g(gen_3address_alu_imm(ctx, OP_SIZE_NATIVE, ALU_ZAPNOT, R_SCRATCH_1, R_SCRATCH_1, (1U << (slot_1 & 7)) | (1U << (slot_2 & 7)), 0));
469 g(gen_jmp_on_zero(ctx, OP_SIZE_8, R_SCRATCH_1, COND_NE, label));
471 g(gen_test_1(ctx, R_FRAME, slot_1, 0, label, false, TEST));
472 g(gen_test_1(ctx, R_FRAME, slot_2, 0, label, false, TEST));
475 g(gen_address(ctx, R_FRAME, slot_1, IMM_PURPOSE_LDR_OFFSET, OP_SIZE_1));
476 gen_insn(INSN_MOV, OP_SIZE_1, 0, 0);
477 gen_one(R_SCRATCH_1);
478 gen_address_offset();
480 g(gen_address(ctx, R_FRAME, slot_2, IMM_PURPOSE_LDR_OFFSET, OP_SIZE_1));
481 gen_insn(INSN_MOV, OP_SIZE_1, 0, 0);
482 gen_one(R_SCRATCH_2);
483 gen_address_offset();
484 #if defined(ARCH_ARM) || defined(ARCH_SPARC)
485 gen_insn(INSN_CMN, OP_SIZE_NATIVE, 0, 1);
486 gen_one(R_SCRATCH_1);
487 gen_one(R_SCRATCH_2);
489 g(gen_3address_alu(ctx, OP_SIZE_NATIVE, ALU_OR, R_SCRATCH_1, R_SCRATCH_1, R_SCRATCH_2, 1));
491 gen_insn(INSN_JMP_COND, OP_SIZE_NATIVE, COND_NE, 0);
499 static int frame_t_compare(const void *p1, const void *p2)
501 if (*(const frame_t *)p1 < *(const frame_t *)p2)
503 if (likely(*(const frame_t *)p1 > *(const frame_t *)p2))
508 static bool attr_w gen_test_multiple(struct codegen_context *ctx, frame_t *variables, size_t n_variables, uint32_t label)
511 size_t attr_unused pos;
512 qsort(variables, n_variables, sizeof(frame_t), frame_t_compare);
516 if (n_variables == 1) {
517 g(gen_test_1(ctx, R_FRAME, variables[0], 0, label, false, TEST));
520 if (n_variables == 2) {
521 g(gen_test_2(ctx, variables[0], variables[1], label));
524 #if defined(HAVE_BITWISE_FRAME)
526 while (pos < n_variables) {
527 frame_t addr = variables[pos] >> (OP_SIZE_BITMAP + 3) << OP_SIZE_BITMAP;
528 unsigned bit = variables[pos] & ((1 << (OP_SIZE_BITMAP + 3)) - 1);
529 uintptr_t mask = (uintptr_t)1 << bit;
532 while (pos < n_variables) {
533 frame_t addr2 = variables[pos] >> (OP_SIZE_BITMAP + 3) << OP_SIZE_BITMAP;
534 unsigned bit2 = variables[pos] & ((1 << (OP_SIZE_BITMAP + 3)) - 1);
535 uintptr_t mask2 = (uintptr_t)1 << bit2;
538 #if defined(ARCH_S390)
539 if (!is_direct_const(mask | mask2, IMM_PURPOSE_TEST, OP_SIZE_BITMAP))
547 g(gen_test_1(ctx, R_FRAME, variables[pos - 1], 0, label, false, TEST));
549 } else if (n_bits == 2) {
550 g(gen_test_2(ctx, variables[pos - 2], variables[pos - 1], label));
553 #if defined(ARCH_X86)
554 g(gen_address(ctx, R_FRAME, addr, IMM_PURPOSE_LDR_OFFSET, OP_SIZE_BITMAP));
555 if (OP_SIZE_BITMAP == OP_SIZE_4) {
556 g(gen_imm(ctx, (int32_t)mask, IMM_PURPOSE_TEST, OP_SIZE_BITMAP));
558 g(gen_imm(ctx, mask, IMM_PURPOSE_TEST, OP_SIZE_BITMAP));
560 gen_insn(INSN_TEST, OP_SIZE_BITMAP, 0, 1);
561 gen_address_offset();
564 gen_insn(INSN_JMP_COND, OP_SIZE_BITMAP, COND_NE, 0);
567 g(gen_address(ctx, R_FRAME, addr, ARCH_PREFERS_SX(OP_SIZE_BITMAP) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_BITMAP));
568 gen_insn(ARCH_PREFERS_SX(OP_SIZE_BITMAP) ? INSN_MOVSX : INSN_MOV, OP_SIZE_BITMAP, 0, 0);
569 gen_one(R_SCRATCH_NA_1);
570 gen_address_offset();
572 g(gen_cmp_test_imm_jmp(ctx, INSN_TEST, i_size(OP_SIZE_BITMAP), R_SCRATCH_NA_1, mask, COND_NE, label));
576 #elif !defined(HAVE_BITWISE_FRAME)
577 #if defined(ARCH_X86)
578 g(gen_address(ctx, R_FRAME, variables[0], IMM_PURPOSE_LDR_OFFSET, OP_SIZE_1));
579 gen_insn(INSN_MOV, OP_SIZE_1, 0, 0);
580 gen_one(R_SCRATCH_1);
581 gen_address_offset();
583 for (i = 1; i < n_variables; i++) {
584 g(gen_address(ctx, R_FRAME, variables[i], IMM_PURPOSE_LDR_OFFSET, OP_SIZE_1));
585 gen_insn(INSN_ALU_PARTIAL, OP_SIZE_1, ALU_OR, 1);
586 gen_one(R_SCRATCH_1);
587 gen_one(R_SCRATCH_1);
588 gen_address_offset();
591 gen_insn(INSN_JMP_COND, OP_SIZE_1, COND_NE, 0);
598 while (pos < n_variables) {
599 frame_t addr = variables[pos] & ~7;
600 unsigned bit = variables[pos] & 7;
601 unsigned mask = 1U << bit;
603 while (pos < n_variables) {
604 frame_t addr2 = variables[pos] & ~7;
605 unsigned bit2 = variables[pos] & 7;
606 unsigned mask2 = 1U << bit2;
614 g(gen_address(ctx, R_FRAME, addr, IMM_PURPOSE_LDR_OFFSET, OP_SIZE_8));
615 gen_insn(INSN_MOV, OP_SIZE_8, 0, 0);
616 gen_one(R_SCRATCH_1);
617 gen_address_offset();
619 g(gen_3address_alu_imm(ctx, OP_SIZE_NATIVE, ALU_ZAPNOT, R_SCRATCH_1, R_SCRATCH_1, mask, 0));
621 g(gen_jmp_on_zero(ctx, OP_SIZE_8, R_SCRATCH_1, COND_NE, label));
626 for (i = 0; i < n_variables; i++) {
627 g(gen_test_1(ctx, R_FRAME, variables[i], 0, label, false, TEST));