2 * Copyright (C) 2024 Mikulas Patocka
4 * This file is part of Ajla.
6 * Ajla is free software: you can redistribute it and/or modify it under the
7 * terms of the GNU General Public License as published by the Free Software
8 * Foundation, either version 3 of the License, or (at your option) any later
11 * Ajla is distributed in the hope that it will be useful, but WITHOUT ANY
12 * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
13 * A PARTICULAR PURPOSE. See the GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along with
16 * Ajla. If not, see <https://www.gnu.org/licenses/>.
19 static bool attr_w gen_flat_move_copy(struct codegen_context *ctx, frame_t slot_1, frame_t slot_r)
21 uint32_t escape_label;
23 escape_label = alloc_escape_label(ctx);
24 if (unlikely(!escape_label))
27 g(gen_test_1_cached(ctx, slot_1, escape_label));
29 g(gen_memcpy_slots(ctx, slot_r, slot_1));
31 flag_set(ctx, slot_1, false);
32 flag_set(ctx, slot_r, false);
37 static bool attr_w gen_ref_move_copy(struct codegen_context *ctx, code_t code, frame_t slot_1, frame_t slot_r)
40 if (flag_must_be_flat(ctx, slot_r)) {
41 uint32_t escape_label = alloc_escape_label(ctx);
42 if (unlikely(!escape_label))
44 gen_insn(INSN_JMP, 0, 0, 0);
45 gen_four(escape_label);
48 g(gen_frame_get_slot(ctx, slot_1, R_SCRATCH_1, ®));
49 if (!da(ctx->fn,function)->local_variables_flags[slot_1].must_be_data &&
50 da(ctx->fn,function)->local_variables_flags[slot_r].must_be_data) {
51 uint32_t escape_label = alloc_escape_label(ctx);
52 if (unlikely(!escape_label))
54 g(gen_ptr_is_thunk(ctx, reg, slot_1, escape_label));
55 ctx->flag_cache[slot_r] |= FLAG_CACHE_IS_NOT_THUNK;
57 g(gen_frame_store(ctx, OP_SIZE_SLOT, slot_r, 0, reg));
58 g(gen_set_1(ctx, R_FRAME, slot_r, 0, true));
59 flag_set(ctx, slot_r, true);
60 if (code == OPCODE_REF_COPY) {
61 g(gen_upcall_start(ctx, 1));
62 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, reg));
63 g(gen_upcall_argument(ctx, 0));
64 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
65 } else if (code == OPCODE_REF_MOVE && !da(ctx->fn,function)->local_variables_flags[slot_1].may_be_borrowed) {
66 g(gen_set_1(ctx, R_FRAME, slot_1, 0, false));
67 flag_set(ctx, slot_1, false);
70 if (unlikely(!(label_id = alloc_label(ctx))))
72 if (flag_is_set(ctx, slot_1)) {
73 g(gen_set_1(ctx, R_FRAME, slot_1, 0, false));
76 if (flag_is_clear(ctx, slot_1))
78 g(gen_test_1(ctx, R_FRAME, slot_1, 0, label_id, false, TEST_CLEAR));
80 g(gen_upcall_start(ctx, 1));
81 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, reg));
82 g(gen_upcall_argument(ctx, 0));
83 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
86 if (code == OPCODE_REF_MOVE_CLEAR)
87 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_1));
88 flag_set(ctx, slot_1, false);
93 static bool attr_w gen_box_move_copy(struct codegen_context *ctx, code_t code, frame_t slot_1, frame_t slot_r)
95 if (flag_must_be_flat(ctx, slot_r)) {
96 uint32_t escape_label = alloc_escape_label(ctx);
97 if (unlikely(!escape_label))
99 gen_insn(INSN_JMP, 0, 0, 0);
100 gen_four(escape_label);
104 if (slot_is_register(ctx, slot_1))
105 g(spill(ctx, slot_1));
107 g(gen_upcall_start(ctx, 3));
109 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
110 g(gen_upcall_argument(ctx, 0));
112 g(gen_load_constant(ctx, R_ARG1, slot_1));
113 g(gen_upcall_argument(ctx, 1));
115 g(gen_load_constant(ctx, R_ARG2, code == OPCODE_BOX_MOVE_CLEAR));
116 g(gen_upcall_argument(ctx, 2));
118 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_ipret_copy_variable_to_pointer), 3));
120 if (code == OPCODE_BOX_MOVE_CLEAR) {
121 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_1));
122 flag_set(ctx, slot_1, false);
125 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, false));
130 static bool attr_w gen_eval(struct codegen_context *ctx, frame_t slot_1)
132 uint32_t escape_label, skip_label;
135 escape_label = alloc_escape_label(ctx);
136 if (unlikely(!escape_label))
139 skip_label = alloc_label(ctx);
140 if (unlikely(!skip_label))
143 g(gen_test_1_jz_cached(ctx, slot_1, skip_label));
145 g(gen_frame_get_slot(ctx, slot_1, R_SCRATCH_1, ®));
146 g(gen_ptr_is_thunk(ctx, reg, slot_1, escape_label));
148 gen_label(skip_label);
153 static bool attr_w gen_jump(struct codegen_context *ctx, int32_t jmp_offset, unsigned op_size, unsigned cond, unsigned reg1, unsigned reg2)
155 ip_t ip = (ctx->current_position - da(ctx->fn,function)->code) + (jmp_offset / (int)sizeof(code_t));
156 if (likely(!ctx->code_labels[ip])) {
157 ctx->code_labels[ip] = alloc_label(ctx);
158 if (unlikely(!ctx->code_labels[ip]))
161 if (reg1 != -1U && reg2 != -1U) {
162 g(gen_cmp_test_jmp(ctx, INSN_CMP, i_size_cmp(op_size), reg1, reg2, cond, ctx->code_labels[ip]));
163 #if defined(ARCH_IA64)
164 } else if (reg_is_p(reg1)) {
165 gen_insn(INSN_JMP_REG, OP_SIZE_NATIVE, cond, 0);
167 gen_four(ctx->code_labels[ip]);
169 } else if (reg1 != -1U) {
170 g(gen_jmp_on_zero(ctx, op_size, reg1, cond, ctx->code_labels[ip]));
171 } else if (cond == COND_ALWAYS) {
172 gen_insn(INSN_JMP, 0, 0, 0);
173 gen_four(ctx->code_labels[ip]);
174 #if defined(ARCH_LOONGARCH64) || defined(ARCH_MIPS) || defined(ARCH_PARISC)
175 } else if (cond & COND_FP) {
176 gen_insn(INSN_JMP_FP_TEST, 0, cond, 0);
177 gen_four(ctx->code_labels[ip]);
180 gen_insn(COND_IS_LOGICAL(cond) ? INSN_JMP_COND_LOGICAL : INSN_JMP_COND, i_size_cmp(op_size), cond, 0);
181 gen_four(ctx->code_labels[ip]);
186 static bool attr_w gen_cond_jump(struct codegen_context *ctx, frame_t slot, int32_t jmp_offset)
188 unsigned size = log_2(sizeof(ajla_flat_option_t));
189 size_t attr_unused offset;
191 if (slot_is_register(ctx, slot)) {
194 #if defined(ARCH_S390) || defined(ARCH_X86)
195 offset = (size_t)slot * slot_size;
196 #if defined(ARCH_S390)
197 if (size != OP_SIZE_1)
200 g(gen_address(ctx, R_FRAME, offset, IMM_PURPOSE_MVI_CLI_OFFSET, size));
201 gen_insn(INSN_CMP, size, 0, 2);
202 gen_address_offset();
206 g(gen_jump(ctx, jmp_offset, size, COND_E, -1U, -1U));
211 g(gen_frame_get(ctx, size, i_size(size) == size ? garbage : native, slot, R_SCRATCH_1, ®1));
212 g(gen_jump(ctx, jmp_offset, size, COND_E, reg1, -1U));
216 static bool attr_w gen_load_fn_or_curry(struct codegen_context *ctx, frame_t fn_idx, frame_t slot_fn, frame_t slot_r, unsigned flags)
218 bool curry = fn_idx == NO_FRAME_T;
219 uint32_t escape_label;
222 escape_label = alloc_escape_label(ctx);
223 if (unlikely(!escape_label))
226 g(gen_upcall_start(ctx, 1));
228 g(gen_load_constant(ctx, R_ARG0, ctx->args_l));
229 g(gen_upcall_argument(ctx, 0));
231 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_function_reference_mayfail), 1));
232 g(gen_sanitize_returned_pointer(ctx, R_RET0));
233 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
235 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
238 g(load_function_offset(ctx, R_SCRATCH_1, offsetof(struct data, u_.function.local_directory[fn_idx])));
240 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.function_reference.u.direct), IMM_PURPOSE_STR_OFFSET, OP_SIZE_ADDRESS));
241 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
242 gen_address_offset();
243 gen_one(R_SCRATCH_1);
245 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.function_reference.is_indirect), IMM_PURPOSE_STR_OFFSET, log_2(sizeof(bool))));
246 g(gen_imm(ctx, 0, IMM_PURPOSE_STORE_VALUE, log_2(sizeof(uchar_efficient_t))));
247 gen_insn(INSN_MOV, log_2(sizeof(uchar_efficient_t)), 0, 0);
248 gen_address_offset();
251 g(gen_frame_get_pointer(ctx, slot_fn, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_SCRATCH_1));
253 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.function_reference.u.indirect), IMM_PURPOSE_STR_OFFSET, OP_SIZE_ADDRESS));
254 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
255 gen_address_offset();
256 gen_one(R_SCRATCH_1);
258 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.function_reference.is_indirect), IMM_PURPOSE_STR_OFFSET, log_2(sizeof(bool))));
259 g(gen_imm(ctx, 1, IMM_PURPOSE_STORE_VALUE, log_2(sizeof(uchar_efficient_t))));
260 gen_insn(INSN_MOV, log_2(sizeof(uchar_efficient_t)), 0, 0);
261 gen_address_offset();
265 for (i = 0; i < ctx->args_l; i++) {
266 uintptr_t arg_offset_tag = offsetof(struct data, u_.function_reference.arguments[i].tag);
267 uintptr_t arg_offset_ptr = offsetof(struct data, u_.function_reference.arguments[i].u.ptr);
268 uintptr_t arg_offset_slot = offsetof(struct data, u_.function_reference.arguments[i].u.slot);
269 frame_t arg_slot = ctx->args[i].slot;
270 const struct type *t = get_type_of_local(ctx, arg_slot);
271 uint32_t skip_flat_label, set_ptr_label, next_arg_label;
272 skip_flat_label = alloc_label(ctx);
273 if (unlikely(!skip_flat_label))
275 set_ptr_label = alloc_label(ctx);
276 if (unlikely(!set_ptr_label))
278 next_arg_label = alloc_label(ctx);
279 if (unlikely(!next_arg_label))
281 if (TYPE_IS_FLAT(t)) {
282 g(gen_test_1_cached(ctx, arg_slot, skip_flat_label));
283 if (t->size <= slot_size && TYPE_TAG_IS_BUILTIN(t->tag)) {
284 unsigned copy_size = OP_SIZE_SLOT;
285 if (is_power_of_2(t->size))
286 copy_size = log_2(t->size);
288 copy_size = maximum(copy_size, OP_SIZE_4);
289 g(gen_address(ctx, R_SAVED_1, arg_offset_tag, IMM_PURPOSE_STR_OFFSET, log_2(sizeof(type_tag_t))));
290 g(gen_imm(ctx, t->tag, IMM_PURPOSE_STORE_VALUE, log_2(sizeof(type_tag_t))));
291 gen_insn(INSN_MOV, log_2(sizeof(type_tag_t)), 0, 0);
292 gen_address_offset();
295 if (slot_is_register(ctx, arg_slot)) {
296 g(gen_address(ctx, R_SAVED_1, arg_offset_slot, IMM_PURPOSE_STR_OFFSET, copy_size));
297 gen_insn(INSN_MOV, spill_size(t), 0, 0);
298 gen_address_offset();
299 gen_one(ctx->registers[arg_slot]);
302 #if defined(ARCH_S390)
303 if (copy_size == OP_SIZE_1 && !cpu_test_feature(CPU_FEATURE_long_displacement)) {
304 g(gen_address(ctx, R_FRAME, (size_t)arg_slot * slot_size, IMM_PURPOSE_LDR_OFFSET, copy_size));
305 gen_insn(INSN_MOV_MASK, OP_SIZE_NATIVE, MOV_MASK_0_8, 0);
306 gen_one(R_SCRATCH_1);
307 gen_one(R_SCRATCH_1);
308 gen_address_offset();
312 g(gen_address(ctx, R_FRAME, (size_t)arg_slot * slot_size, ARCH_PREFERS_SX(copy_size) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, copy_size));
313 gen_insn(ARCH_PREFERS_SX(copy_size) ? INSN_MOVSX : INSN_MOV, copy_size, 0, 0);
314 gen_one(R_SCRATCH_1);
315 gen_address_offset();
318 g(gen_address(ctx, R_SAVED_1, arg_offset_slot, IMM_PURPOSE_STR_OFFSET, copy_size));
319 gen_insn(INSN_MOV, copy_size, 0, 0);
320 gen_address_offset();
321 gen_one(R_SCRATCH_1);
323 gen_insn(INSN_JMP, 0, 0, 0);
324 gen_four(next_arg_label);
326 if (slot_is_register(ctx, arg_slot))
327 g(spill(ctx, arg_slot));
329 g(gen_upcall_start(ctx, 3));
331 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
332 g(gen_upcall_argument(ctx, 0));
334 g(gen_load_constant(ctx, R_ARG1, arg_slot));
335 g(gen_upcall_argument(ctx, 1));
337 g(gen_imm(ctx, (size_t)arg_slot * slot_size, IMM_PURPOSE_ADD, i_size(OP_SIZE_ADDRESS)));
338 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(i_size(OP_SIZE_ADDRESS), ALU_ADD, false, is_imm(), ctx->const_imm));
342 g(gen_upcall_argument(ctx, 2));
344 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 3));
346 gen_insn(INSN_JMP, 0, 0, 0);
347 gen_four(set_ptr_label);
351 gen_label(skip_flat_label);
352 g(gen_frame_get_pointer(ctx, arg_slot, (ctx->args[i].flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_RET0));
354 gen_label(set_ptr_label);
355 g(gen_address(ctx, R_SAVED_1, arg_offset_tag, IMM_PURPOSE_STR_OFFSET, log_2(sizeof(type_tag_t))));
356 g(gen_imm(ctx, TYPE_TAG_unknown, IMM_PURPOSE_STORE_VALUE, log_2(sizeof(type_tag_t))));
357 gen_insn(INSN_MOV, log_2(sizeof(type_tag_t)), 0, 0);
358 gen_address_offset();
361 g(gen_address(ctx, R_SAVED_1, arg_offset_ptr, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
362 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
363 gen_address_offset();
366 gen_label(next_arg_label);
369 g(gen_compress_pointer(ctx, R_SAVED_1, R_SAVED_1));
370 g(gen_frame_set_pointer(ctx, slot_r, R_SAVED_1, false, true));
375 static bool attr_w gen_call(struct codegen_context *ctx, code_t code, frame_t fn_idx)
377 struct data *new_fn = ctx->local_directory[fn_idx];
378 frame_t required_slots = da(new_fn,function)->frame_slots;
379 frame_t bitmap_slots = da(new_fn,function)->n_bitmap_slots;
381 uint32_t escape_label;
382 int64_t new_fp_offset;
383 uchar_efficient_t call_mode;
385 bool arch_use_flags = ARCH_HAS_FLAGS;
386 #if defined(ARCH_POWER)
387 arch_use_flags = false;
390 escape_label = alloc_escape_label(ctx);
391 if (unlikely(!escape_label))
394 for (v = MIN_USEABLE_SLOT; v < function_n_variables(ctx->fn); v++) {
395 if (slot_is_register(ctx, v)) {
400 g(gen_frame_load_raw(ctx, log_2(sizeof(stack_size_t)), native, 0, frame_offs(available_slots), false, R_SCRATCH_1));
401 g(gen_imm(ctx, required_slots, IMM_PURPOSE_SUB, i_size(log_2(sizeof(stack_size_t)))));
402 gen_insn(INSN_ALU + ARCH_PARTIAL_ALU(i_size(log_2(sizeof(stack_size_t)))), i_size(log_2(sizeof(stack_size_t))), ALU_SUB, arch_use_flags);
403 gen_one(R_SCRATCH_1);
404 gen_one(R_SCRATCH_1);
407 if (arch_use_flags) {
408 gen_insn(COND_IS_LOGICAL(COND_B) ? INSN_JMP_COND_LOGICAL : INSN_JMP_COND, log_2(sizeof(stack_size_t)), COND_B, 0);
409 gen_four(escape_label);
411 g(gen_cmp_test_jmp(ctx, INSN_TEST, OP_SIZE_NATIVE, R_SCRATCH_1, R_SCRATCH_1, COND_S, escape_label));
414 new_fp_offset = -(ssize_t)(required_slots * slot_size);
416 g(gen_frame_store_raw(ctx, log_2(sizeof(stack_size_t)), 0, new_fp_offset + frame_offs(available_slots), R_SCRATCH_1));
417 g(gen_frame_store_imm_raw(ctx, log_2(sizeof(ip_t)), 0, new_fp_offset + frame_offs(previous_ip), ctx->return_values - da(ctx->fn,function)->code));
418 g(gen_frame_load_raw(ctx, log_2(sizeof(timestamp_t)), garbage, 0, frame_offs(timestamp), false, R_SCRATCH_1));
419 g(gen_frame_store_raw(ctx, log_2(sizeof(timestamp_t)), 0, new_fp_offset + frame_offs(timestamp), R_SCRATCH_1));
420 call_mode = code == OPCODE_CALL ? CALL_MODE_NORMAL : code == OPCODE_CALL_STRICT ? CALL_MODE_STRICT : CALL_MODE_SPARK;
421 g(gen_frame_store_imm_raw(ctx, log_2(sizeof(uchar_efficient_t)), 0, new_fp_offset + frame_offs(mode), call_mode));
423 g(gen_clear_bitmap(ctx, frame_offset, R_FRAME, new_fp_offset, bitmap_slots));
425 for (i = 0; i < ctx->args_l; i++) {
426 const struct code_arg *src_arg = &ctx->args[i];
427 const struct local_arg *dest_arg = &da(new_fn,function)->args[i];
428 const struct type *t = get_type_of_local(ctx, src_arg->slot);
429 uint32_t non_flat_label, thunk_label, incr_ref_label, next_arg_label;
431 non_flat_label = alloc_label(ctx);
432 if (unlikely(!non_flat_label))
434 thunk_label = alloc_label(ctx);
435 if (unlikely(!thunk_label))
437 incr_ref_label = alloc_label(ctx);
438 if (unlikely(!incr_ref_label))
440 next_arg_label = alloc_label(ctx);
441 if (unlikely(!next_arg_label))
443 if (TYPE_IS_FLAT(t)) {
444 g(gen_test_1_cached(ctx, src_arg->slot, non_flat_label));
445 if (dest_arg->may_be_flat) {
446 g(gen_memcpy_from_slot(ctx, R_FRAME, new_fp_offset + (size_t)dest_arg->slot * slot_size, src_arg->slot));
448 g(gen_upcall_start(ctx, 3));
450 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
451 g(gen_upcall_argument(ctx, 0));
453 g(gen_load_constant(ctx, R_ARG1, src_arg->slot));
454 g(gen_upcall_argument(ctx, 1));
456 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_ARG2, R_FRAME, (size_t)src_arg->slot * slot_size, 0));
457 g(gen_upcall_argument(ctx, 2));
459 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 3));
461 g(gen_frame_store_raw(ctx, OP_SIZE_SLOT, dest_arg->slot, new_fp_offset, R_RET0));
463 g(gen_set_1(ctx, R_FRAME, dest_arg->slot, new_fp_offset, true));
466 if (flag_is_clear(ctx, src_arg->slot))
467 goto skip_ref_argument;
469 gen_insn(INSN_JMP, 0, 0, 0);
470 gen_four(next_arg_label);
472 gen_label(non_flat_label);
474 if (dest_arg->may_be_borrowed && src_arg->flags & OPCODE_CALL_MAY_LEND) {
475 g(gen_frame_get_slot(ctx, src_arg->slot, R_SCRATCH_1, ®));
476 g(gen_ptr_is_thunk(ctx, reg, src_arg->slot, thunk_label));
477 g(gen_frame_store_raw(ctx, OP_SIZE_SLOT, dest_arg->slot, new_fp_offset, reg));
478 gen_insn(INSN_JMP, 0, 0, 0);
479 gen_four(next_arg_label);
480 } else if (dest_arg->may_be_borrowed && src_arg->flags & OPCODE_CALL_MAY_GIVE) {
481 g(gen_test_1_cached(ctx, src_arg->slot, thunk_label));
482 g(gen_frame_get_slot(ctx, src_arg->slot, R_SCRATCH_1, ®));
483 g(gen_ptr_is_thunk(ctx, reg, src_arg->slot, thunk_label));
484 g(gen_frame_store_raw(ctx, OP_SIZE_SLOT, dest_arg->slot, new_fp_offset, reg));
485 g(gen_frame_clear_raw(ctx, OP_SIZE_SLOT, src_arg->slot));
486 gen_insn(INSN_JMP, 0, 0, 0);
487 gen_four(next_arg_label);
490 gen_label(thunk_label);
491 g(gen_set_1(ctx, R_FRAME, dest_arg->slot, new_fp_offset, true));
492 g(gen_frame_get_slot(ctx, src_arg->slot, R_SCRATCH_1, ®));
493 g(gen_frame_store_raw(ctx, OP_SIZE_SLOT, dest_arg->slot, new_fp_offset, reg));
494 if (src_arg->flags & OPCODE_FLAG_FREE_ARGUMENT) {
495 g(gen_frame_clear_raw(ctx, OP_SIZE_SLOT, src_arg->slot));
496 if (flag_is_set(ctx, src_arg->slot)) {
497 g(gen_set_1(ctx, R_FRAME, src_arg->slot, 0, false));
498 flag_set(ctx, src_arg->slot, false);
499 goto skip_ref_argument;
501 if (flag_is_clear(ctx, src_arg->slot))
503 g(gen_test_1(ctx, R_FRAME, src_arg->slot, 0, incr_ref_label, true, TEST_CLEAR));
504 gen_insn(INSN_JMP, 0, 0, 0);
505 gen_four(next_arg_label);
508 gen_label(incr_ref_label);
510 g(gen_upcall_start(ctx, 1));
512 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, reg));
513 g(gen_upcall_argument(ctx, 0));
515 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
518 gen_label(next_arg_label);
521 g(load_function_offset(ctx, R_SCRATCH_1, offsetof(struct data, u_.function.local_directory[fn_idx])));
523 g(gen_address(ctx, R_SCRATCH_1, 0, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
524 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
525 gen_one(R_SCRATCH_1);
526 gen_address_offset();
528 g(gen_decompress_pointer(ctx, ARCH_PREFERS_SX(OP_SIZE_SLOT), R_SCRATCH_1, R_SCRATCH_1, 0));
530 g(gen_frame_store_raw(ctx, OP_SIZE_ADDRESS, 0, frame_offs(function) + new_fp_offset, R_SCRATCH_1));
532 #if !defined(ARCH_X86) && !defined(ARCH_PARISC)
533 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_SUB, R_FRAME, R_FRAME, -new_fp_offset, 0));
535 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_FRAME, R_FRAME, new_fp_offset, 0));
538 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.function.codegen), ARCH_PREFERS_SX(OP_SIZE_SLOT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_SLOT));
539 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
540 gen_one(R_SCRATCH_1);
541 gen_address_offset();
543 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, NO_FRAME_T, ctx->escape_nospill_label));
546 gen_pointer_compression(R_SCRATCH_1);
547 #if (defined(ARCH_X86) && !defined(ARCH_X86_X32)) || defined(ARCH_ARM32)
548 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.codegen.unoptimized_code_base), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
549 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
550 gen_address_offset_compressed();
552 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.codegen.unoptimized_code_base), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
553 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
554 gen_one(R_SCRATCH_1);
555 gen_address_offset_compressed();
557 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
558 gen_one(R_SCRATCH_1);
560 g(clear_flag_cache(ctx));
565 static bool attr_w gen_return(struct codegen_context *ctx)
567 int64_t new_fp_offset;
568 uint32_t escape_label;
570 int64_t retval_offset;
571 unsigned attr_unused reg1;
573 escape_label = alloc_escape_label(ctx);
574 if (unlikely(!escape_label))
577 new_fp_offset = (size_t)da(ctx->fn,function)->frame_slots * slot_size;
579 g(gen_frame_load_raw(ctx, OP_SIZE_ADDRESS, zero_x, 0, new_fp_offset + frame_offs(function), false, R_SCRATCH_2));
581 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_SCRATCH_2, COND_E, escape_label));
583 g(gen_address(ctx, R_SCRATCH_2, offsetof(struct data, u_.function.codegen), ARCH_PREFERS_SX(OP_SIZE_SLOT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_SLOT));
584 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
585 gen_one(R_SCRATCH_1);
586 gen_address_offset();
588 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, NO_FRAME_T, escape_label));
591 g(gen_frame_load_raw(ctx, log_2(sizeof(timestamp_t)), garbage, 0, frame_offs(timestamp), false, R_SCRATCH_1));
592 g(gen_frame_store_raw(ctx, log_2(sizeof(timestamp_t)), 0, new_fp_offset + frame_offs(timestamp), R_SCRATCH_1));
594 g(gen_frame_load_raw(ctx, log_2(sizeof(ip_t)), native, 0, frame_offs(previous_ip), false, R_SCRATCH_1));
596 g(gen_address(ctx, R_SCRATCH_2, offsetof(struct data, u_.function.code), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
597 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
598 gen_one(R_SCRATCH_2);
599 gen_address_offset();
601 g(gen_lea3(ctx, R_SAVED_1, R_SCRATCH_2, R_SCRATCH_1, log_2(sizeof(code_t)), 0));
604 for (i = 0; i < ctx->args_l; i++) {
605 const struct code_arg *src_arg = &ctx->args[i];
606 const struct type *t = get_type_of_local(ctx, src_arg->slot);
607 uint32_t copy_ptr_label, load_write_ptr_label, write_ptr_label, next_arg_label;
609 copy_ptr_label = alloc_label(ctx);
610 if (unlikely(!copy_ptr_label))
613 load_write_ptr_label = alloc_label(ctx);
614 if (unlikely(!load_write_ptr_label))
617 write_ptr_label = alloc_label(ctx);
618 if (unlikely(!write_ptr_label))
621 next_arg_label = alloc_label(ctx);
622 if (unlikely(!next_arg_label))
625 g(gen_load_code_32(ctx, R_SAVED_2, R_SAVED_1, retval_offset));
627 if (TYPE_IS_FLAT(t)) {
628 uint32_t flat_to_data_label;
629 g(gen_test_1_cached(ctx, src_arg->slot, copy_ptr_label));
631 flat_to_data_label = alloc_label(ctx);
632 if (unlikely(!flat_to_data_label))
635 #if defined(ARCH_X86)
636 g(gen_address(ctx, R_SAVED_1, retval_offset + 2 + 2 * (ARG_MODE_N >= 3), IMM_PURPOSE_LDR_OFFSET, log_2(sizeof(code_t))));
637 g(gen_imm(ctx, OPCODE_MAY_RETURN_FLAT, IMM_PURPOSE_TEST, log_2(sizeof(code_t))));
638 gen_insn(INSN_TEST, log_2(sizeof(code_t)), 0, 1);
639 gen_address_offset();
642 gen_insn(INSN_JMP_COND, log_2(sizeof(code_t)), COND_E, 0);
643 gen_four(flat_to_data_label);
645 g(gen_load_two(ctx, R_SCRATCH_1, R_SAVED_1, retval_offset + 2 + 2 * (ARG_MODE_N >= 3)));
647 g(gen_cmp_test_imm_jmp(ctx, INSN_TEST, OP_SIZE_NATIVE, R_SCRATCH_1, OPCODE_MAY_RETURN_FLAT, COND_E, flat_to_data_label));
649 #if defined(ARCH_X86)
650 if (is_power_of_2(t->size) && t->size <= 2U << OP_SIZE_NATIVE) {
651 if (t->size == 2U << OP_SIZE_NATIVE) {
652 g(gen_frame_load_2(ctx, OP_SIZE_NATIVE, src_arg->slot, 0, R_SCRATCH_1, R_SCRATCH_2));
654 gen_insn(INSN_MOV, OP_SIZE_NATIVE, 0, 0);
655 gen_one(ARG_ADDRESS_2 + OP_SIZE_SLOT);
658 gen_eight(new_fp_offset + lo_word(OP_SIZE_NATIVE));
659 gen_one(R_SCRATCH_1);
661 gen_insn(INSN_MOV, OP_SIZE_NATIVE, 0, 0);
662 gen_one(ARG_ADDRESS_2 + OP_SIZE_SLOT);
665 gen_eight(new_fp_offset + hi_word(OP_SIZE_NATIVE));
666 gen_one(R_SCRATCH_2);
668 g(gen_frame_get(ctx, log_2(t->size), garbage, src_arg->slot, R_SCRATCH_1, ®1));
670 gen_insn(INSN_MOV, log_2(t->size), 0, 0);
671 gen_one(ARG_ADDRESS_2 + OP_SIZE_SLOT);
674 gen_eight(new_fp_offset);
680 g(gen_lea3(ctx, R_SCRATCH_2, R_FRAME, R_SAVED_2, OP_SIZE_SLOT, new_fp_offset));
682 g(gen_memcpy_from_slot(ctx, R_SCRATCH_2, 0, src_arg->slot));
685 gen_insn(INSN_JMP, 0, 0, 0);
686 gen_four(next_arg_label);
688 gen_label(flat_to_data_label);
690 if (slot_is_register(ctx, src_arg->slot))
691 g(spill(ctx, src_arg->slot));
693 g(gen_upcall_start(ctx, 3));
695 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
696 g(gen_upcall_argument(ctx, 0));
698 g(gen_load_constant(ctx, R_ARG1, src_arg->slot));
699 g(gen_upcall_argument(ctx, 1));
701 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_ARG2, R_FRAME, (size_t)src_arg->slot * slot_size, 0));
702 g(gen_upcall_argument(ctx, 2));
704 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 3));
706 if (flag_is_clear(ctx, src_arg->slot))
707 goto skip_ref_argument;
709 gen_insn(INSN_JMP, 0, 0, 0);
710 gen_four(write_ptr_label);
713 gen_label(copy_ptr_label);
715 if (unlikely(!(src_arg->flags & OPCODE_FLAG_FREE_ARGUMENT))) {
716 g(gen_upcall_start(ctx, 1));
717 g(gen_frame_load_slot(ctx, src_arg->slot, R_ARG0));
718 g(gen_upcall_argument(ctx, 0));
719 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
720 } else if (da(ctx->fn,function)->local_variables_flags[src_arg->slot].may_be_borrowed) {
721 g(gen_test_1_cached(ctx, src_arg->slot, load_write_ptr_label));
722 g(gen_upcall_start(ctx, 1));
723 g(gen_frame_load_slot(ctx, src_arg->slot, R_ARG0));
724 g(gen_upcall_argument(ctx, 0));
725 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
728 gen_label(load_write_ptr_label);
730 g(gen_frame_load_slot(ctx, src_arg->slot, R_RET0));
733 gen_label(write_ptr_label);
735 #if defined(ARCH_X86)
736 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
737 gen_one(ARG_ADDRESS_2 + OP_SIZE_SLOT);
740 gen_eight(new_fp_offset);
742 goto scaled_store_done;
744 if (ARCH_HAS_SHIFTED_ADD(OP_SIZE_SLOT)) {
745 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(i_size(OP_SIZE_ADDRESS), ALU_ADD, false, false, 0));
746 gen_one(R_SCRATCH_3);
748 gen_one(ARG_SHIFTED_REGISTER);
749 gen_one(ARG_SHIFT_LSL | OP_SIZE_SLOT);
752 g(gen_address(ctx, R_SCRATCH_3, new_fp_offset, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
753 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
754 gen_address_offset();
756 goto scaled_store_done;
759 g(gen_3address_rot_imm(ctx, OP_SIZE_NATIVE, ROT_SHL, R_SCRATCH_3, R_SAVED_2, OP_SIZE_SLOT, false));
761 g(gen_3address_alu(ctx, OP_SIZE_NATIVE, ALU_ADD, R_SCRATCH_3, R_SCRATCH_3, R_FRAME, 0));
763 g(gen_address(ctx, R_SCRATCH_3, new_fp_offset, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
764 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
765 gen_address_offset();
769 g(gen_set_1_variable(ctx, R_SAVED_2, new_fp_offset, true));
771 gen_label(next_arg_label);
773 retval_offset += 4 + 2 * (ARG_MODE_N >= 3);
776 g(gen_frame_load_raw(ctx, OP_SIZE_ADDRESS, zero_x, 0, new_fp_offset + frame_offs(function), false, R_SCRATCH_1));
778 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.function.codegen), ARCH_PREFERS_SX(OP_SIZE_SLOT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_SLOT));
779 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
780 gen_one(R_SCRATCH_1);
781 gen_address_offset();
783 g(gen_decompress_pointer(ctx, ARCH_PREFERS_SX(OP_SIZE_SLOT), R_SCRATCH_1, R_SCRATCH_1, 0));
785 g(gen_load_code_32(ctx, R_SCRATCH_2, R_SAVED_1, retval_offset + 2));
787 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_FRAME, R_FRAME, new_fp_offset, 0));
789 #if defined(ARCH_X86) && !defined(ARCH_X86_X32)
790 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
791 gen_one(ARG_ADDRESS_2 + OP_SIZE_ADDRESS);
792 gen_one(R_SCRATCH_1);
793 gen_one(R_SCRATCH_2);
794 gen_eight(offsetof(struct data, u_.codegen.unoptimized_code));
796 goto scaled_jmp_done;
798 #if defined(ARCH_X86)
799 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
800 gen_one(R_SCRATCH_1);
801 gen_one(ARG_ADDRESS_2 + OP_SIZE_ADDRESS);
802 gen_one(R_SCRATCH_1);
803 gen_one(R_SCRATCH_2);
804 gen_eight(offsetof(struct data, u_.codegen.unoptimized_code));
806 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
807 gen_one(R_SCRATCH_1);
809 goto scaled_jmp_done;
811 #if defined(ARCH_ARM32)
812 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(i_size(OP_SIZE_ADDRESS), ALU_ADD, false, false, 0));
813 gen_one(R_SCRATCH_1);
814 gen_one(R_SCRATCH_1);
815 gen_one(ARG_SHIFTED_REGISTER);
816 gen_one(ARG_SHIFT_LSL | OP_SIZE_ADDRESS);
817 gen_one(R_SCRATCH_2);
819 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.codegen.unoptimized_code), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
820 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
821 gen_address_offset();
823 goto scaled_jmp_done;
825 if (ARCH_HAS_SHIFTED_ADD(OP_SIZE_ADDRESS)) {
826 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(i_size(OP_SIZE_ADDRESS), ALU_ADD, false, false, 0));
827 gen_one(R_SCRATCH_1);
828 gen_one(R_SCRATCH_1);
829 gen_one(ARG_SHIFTED_REGISTER);
830 gen_one(ARG_SHIFT_LSL | OP_SIZE_ADDRESS);
831 gen_one(R_SCRATCH_2);
833 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.codegen.unoptimized_code), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
834 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
835 gen_one(R_SCRATCH_1);
836 gen_address_offset();
838 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
839 gen_one(R_SCRATCH_1);
841 goto scaled_jmp_done;
844 g(gen_3address_rot_imm(ctx, OP_SIZE_NATIVE, ROT_SHL, R_SCRATCH_2, R_SCRATCH_2, OP_SIZE_ADDRESS, false));
846 g(gen_3address_alu(ctx, OP_SIZE_NATIVE, ALU_ADD, R_SCRATCH_1, R_SCRATCH_1, R_SCRATCH_2, 0));
848 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.codegen.unoptimized_code), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
849 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
850 gen_one(R_SCRATCH_1);
851 gen_address_offset();
853 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
854 gen_one(R_SCRATCH_1);
856 goto scaled_jmp_done;
861 static bool attr_w gen_scaled_array_address(struct codegen_context *ctx, size_t element_size, unsigned reg_dst, unsigned reg_src, unsigned reg_index, int64_t offset_src);
862 static bool attr_w gen_check_array_len(struct codegen_context *ctx, unsigned reg_array, bool allocated, unsigned reg_len, unsigned cond, uint32_t escape_label);
864 static bool attr_w gen_structured(struct codegen_context *ctx, frame_t slot_struct, frame_t slot_elem)
866 uint32_t escape_label;
867 const struct type *struct_type, *elem_type;
871 escape_label = alloc_escape_label(ctx);
872 if (unlikely(!escape_label))
875 struct_type = get_type_of_local(ctx, slot_struct);
876 elem_type = get_type_of_local(ctx, slot_elem);
878 if (TYPE_IS_FLAT(struct_type) && struct_type->tag != TYPE_TAG_flat_option) {
879 if (!TYPE_IS_FLAT(elem_type)) {
882 g(gen_test_1_cached(ctx, slot_struct, escape_label));
883 flag_set(ctx, slot_struct, false);
887 g(gen_test_1_jz_cached(ctx, slot_struct, escape_label));
891 if (slot_is_register(ctx, slot_struct))
892 g(spill(ctx, slot_struct));
893 g(gen_frame_address(ctx, slot_struct, 0, R_SAVED_1));
895 for (i = 0; i < ctx->args_l; i++) {
896 frame_t param_slot = ctx->args[i].slot;
898 switch (ctx->args[i].flags & OPCODE_STRUCTURED_MASK) {
899 case OPCODE_STRUCTURED_RECORD: {
900 struct flat_record_definition_entry *e;
901 ajla_assert_lo(struct_type->tag == TYPE_TAG_flat_record, (file_line, "gen_structured: invalid tag %u, expected %u", struct_type->tag, TYPE_TAG_flat_record));
902 e = &type_def(struct_type,flat_record)->entries[param_slot];
904 g(gen_imm(ctx, e->flat_offset, IMM_PURPOSE_ADD, i_size(OP_SIZE_ADDRESS)));
905 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(i_size(OP_SIZE_ADDRESS), ALU_ADD, false, is_imm(), ctx->const_imm));
910 struct_type = e->subtype;
913 case OPCODE_STRUCTURED_ARRAY: {
914 ajla_assert_lo(struct_type->tag == TYPE_TAG_flat_array, (file_line, "gen_structured: invalid tag %u, expected %u", struct_type->tag, TYPE_TAG_flat_array));
915 g(gen_test_1_cached(ctx, param_slot, escape_label));
916 flag_set(ctx, param_slot, false);
917 g(gen_frame_get(ctx, OP_SIZE_INT, native, param_slot, R_SCRATCH_1, ®1));
919 g(gen_cmp_test_imm_jmp(ctx, INSN_CMP, OP_SIZE_INT, reg1, type_def(struct_type,flat_array)->n_elements, COND_AE, escape_label));
921 g(gen_scaled_array_address(ctx, type_def(struct_type,flat_array)->base->size, R_SAVED_1, R_SAVED_1, reg1, 0));
923 struct_type = type_def(struct_type,flat_array)->base;
927 internal(file_line, "gen_structured: invalid structured flags %x", (unsigned)ctx->args[i].flags);
930 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
931 gen_one(R_SCRATCH_1);
932 gen_one(ARG_ADDRESS_1);
936 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, NO_FRAME_T, escape_label));
939 g(gen_decompress_pointer(ctx, ARCH_PREFERS_SX(OP_SIZE_SLOT), R_SCRATCH_1, R_SCRATCH_1, 0));
941 g(gen_compare_refcount(ctx, R_SCRATCH_1, REFCOUNT_STEP, COND_AE, escape_label));
943 switch (ctx->args[i].flags & OPCODE_STRUCTURED_MASK) {
944 case OPCODE_STRUCTURED_RECORD: {
945 const struct type *rec_type, *e_type;
946 rec_type = da_type(ctx->fn, ctx->args[i].type);
947 TYPE_TAG_VALIDATE(rec_type->tag);
948 if (unlikely(rec_type->tag == TYPE_TAG_flat_record))
949 rec_type = type_def(rec_type,flat_record)->base;
950 e_type = type_def(rec_type,record)->types[param_slot];
951 if (!TYPE_IS_FLAT(e_type) || (e_type->tag == TYPE_TAG_flat_option && !(ctx->args[i].flags & OPCODE_STRUCTURED_FLAG_END))) {
952 g(gen_test_1(ctx, R_SCRATCH_1, param_slot, data_record_offset, escape_label, true, TEST));
954 g(gen_test_1(ctx, R_SCRATCH_1, param_slot, data_record_offset, escape_label, false, TEST));
955 struct_type = e_type;
957 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_SAVED_1, R_SCRATCH_1, data_record_offset + (size_t)param_slot * slot_size, 0));
960 case OPCODE_STRUCTURED_OPTION: {
961 unsigned op_size = log_2(sizeof(ajla_option_t));
962 #if defined(ARCH_X86)
963 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.option.option), IMM_PURPOSE_LDR_OFFSET, op_size));
964 g(gen_imm(ctx, param_slot, IMM_PURPOSE_CMP, op_size));
965 gen_insn(INSN_CMP, op_size, 0, 1);
966 gen_address_offset();
969 gen_insn(INSN_JMP_COND, op_size, COND_NE, 0);
970 gen_four(escape_label);
972 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.option.option), ARCH_PREFERS_SX(op_size) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, op_size));
973 gen_insn(ARCH_PREFERS_SX(op_size) ? INSN_MOVSX : INSN_MOV, op_size, 0, 0);
974 gen_one(R_SCRATCH_2);
975 gen_address_offset();
977 g(gen_cmp_test_imm_jmp(ctx, INSN_CMP, i_size(op_size), R_SCRATCH_2, param_slot, COND_NE, escape_label));
979 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_SAVED_1, R_SCRATCH_1, offsetof(struct data, u_.option.pointer), 0));
982 case OPCODE_STRUCTURED_ARRAY: {
983 const struct type *e_type = da_type(ctx->fn, ctx->args[i].type);
985 g(gen_test_1_cached(ctx, param_slot, escape_label));
986 flag_set(ctx, param_slot, false);
988 g(gen_frame_get(ctx, OP_SIZE_INT, native, param_slot, R_SCRATCH_2, ®2));
990 g(gen_check_array_len(ctx, R_SCRATCH_1, false, reg2, COND_AE, escape_label));
992 if (!TYPE_IS_FLAT(e_type) || (e_type->tag == TYPE_TAG_flat_option && !(ctx->args[i].flags & OPCODE_STRUCTURED_FLAG_END))) {
993 g(gen_compare_ptr_tag(ctx, R_SCRATCH_1, DATA_TAG_array_pointers, COND_NE, escape_label, R_SCRATCH_3));
995 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.array_pointers.pointer), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
996 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
997 gen_one(R_SCRATCH_1);
998 gen_address_offset();
1000 g(gen_scaled_array_address(ctx, slot_size, R_SAVED_1, R_SCRATCH_1, reg2, 0));
1002 g(gen_compare_ptr_tag(ctx, R_SCRATCH_1, DATA_TAG_array_flat, COND_NE, escape_label, R_SCRATCH_3));
1004 g(gen_scaled_array_address(ctx, e_type->size, R_SAVED_1, R_SCRATCH_1, reg2, data_array_offset));
1006 struct_type = e_type;
1011 internal(file_line, "gen_structured: invalid structured flags %x", (unsigned)ctx->args[i].flags);
1018 g(gen_test_1_cached(ctx, slot_elem, escape_label));
1019 flag_set(ctx, slot_elem, false);
1020 g(gen_memcpy_from_slot(ctx, R_SAVED_1, 0, slot_elem));
1022 uint32_t skip_deref_label;
1023 skip_deref_label = alloc_label(ctx);
1024 if (unlikely(!skip_deref_label))
1027 if (TYPE_IS_FLAT(elem_type))
1028 g(gen_test_1_jz_cached(ctx, slot_elem, escape_label));
1030 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
1031 gen_one(R_SCRATCH_1);
1032 gen_one(ARG_ADDRESS_1);
1036 g(gen_jmp_on_zero(ctx, OP_SIZE_SLOT, R_SCRATCH_1, COND_E, skip_deref_label));
1038 g(gen_upcall_start(ctx, 1));
1039 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_SCRATCH_1));
1040 g(gen_upcall_argument(ctx, 0));
1041 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_dereference), 1));
1043 gen_label(skip_deref_label);
1045 g(gen_frame_get_pointer(ctx, slot_elem, (ctx->args[i - 1].flags & OPCODE_STRUCTURED_FREE_VARIABLE) != 0, R_SCRATCH_1));
1047 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
1048 gen_one(ARG_ADDRESS_1);
1051 gen_one(R_SCRATCH_1);
1057 static bool attr_w gen_record_create(struct codegen_context *ctx, frame_t slot_r)
1059 const struct type *t;
1060 const struct record_definition *def;
1061 uint32_t escape_label;
1064 escape_label = alloc_escape_label(ctx);
1065 if (unlikely(!escape_label))
1068 t = get_type_of_local(ctx, slot_r);
1069 if (t->tag == TYPE_TAG_flat_record) {
1070 const struct flat_record_definition *flat_def;
1071 const struct type *flat_type = t;
1072 t = type_def(t,flat_record)->base;
1073 def = type_def(t,record);
1074 flat_def = type_def(flat_type,flat_record);
1075 for (i = 0; i < ctx->args_l; i++) {
1076 frame_t var_slot = ctx->args[i].slot;
1077 g(gen_test_1_cached(ctx, var_slot, escape_label));
1078 flag_set(ctx, var_slot, false);
1080 for (i = 0, ii = 0; i < ctx->args_l; i++, ii++) {
1081 frame_t var_slot, flat_offset, record_slot;
1082 while (unlikely(record_definition_is_elided(def, ii)))
1084 var_slot = ctx->args[i].slot;
1085 record_slot = record_definition_slot(def, ii);
1086 flat_offset = flat_def->entries[record_slot].flat_offset;
1087 g(gen_memcpy_from_slot(ctx, R_FRAME, (size_t)slot_r * slot_size + flat_offset, var_slot));
1092 def = type_def(t,record);
1094 g(gen_upcall_start(ctx, 2));
1096 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1097 g(gen_upcall_argument(ctx, 0));
1099 g(gen_load_constant(ctx, R_ARG1, slot_r));
1100 g(gen_upcall_argument(ctx, 1));
1102 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_record_mayfail), 2));
1103 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1104 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1106 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
1108 g(gen_clear_bitmap(ctx, 0, R_SAVED_1, data_record_offset, bitmap_slots(def->n_slots)));
1110 for (i = 0, ii = 0; i < ctx->args_l; i++, ii++) {
1111 frame_t var_slot, var_flags, record_slot;
1112 const struct type *var_type, *record_type;
1113 uint32_t skip_flat_label, set_ptr_label, next_arg_label;
1115 skip_flat_label = alloc_label(ctx);
1116 if (unlikely(!skip_flat_label))
1118 set_ptr_label = alloc_label(ctx);
1119 if (unlikely(!set_ptr_label))
1121 next_arg_label = alloc_label(ctx);
1122 if (unlikely(!next_arg_label))
1125 while (unlikely(record_definition_is_elided(def, ii)))
1127 var_slot = ctx->args[i].slot;
1128 var_type = get_type_of_local(ctx, var_slot);
1129 var_flags = ctx->args[i].flags;
1130 record_slot = record_definition_slot(def, ii);
1131 record_type = def->types[record_slot];
1132 if (TYPE_IS_FLAT(var_type)) {
1133 g(gen_test_1_cached(ctx, var_slot, skip_flat_label));
1134 if (TYPE_IS_FLAT(record_type)) {
1135 g(gen_memcpy_from_slot(ctx, R_SAVED_1, data_record_offset + (size_t)record_slot * slot_size, var_slot));
1137 gen_insn(INSN_JMP, 0, 0, 0);
1138 gen_four(next_arg_label);
1140 if (slot_is_register(ctx, var_slot))
1141 g(spill(ctx, var_slot));
1143 g(gen_upcall_start(ctx, 3));
1145 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1146 g(gen_upcall_argument(ctx, 0));
1148 g(gen_load_constant(ctx, R_ARG1, var_slot));
1149 g(gen_upcall_argument(ctx, 1));
1151 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_ARG2, R_FRAME, (size_t)var_slot * slot_size, 0));
1152 g(gen_upcall_argument(ctx, 2));
1154 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 3));
1156 gen_insn(INSN_JMP, 0, 0, 0);
1157 gen_four(set_ptr_label);
1161 gen_label(skip_flat_label);
1162 g(gen_frame_get_pointer(ctx, var_slot, (var_flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_RET0));
1164 gen_label(set_ptr_label);
1165 g(gen_address(ctx, R_SAVED_1, data_record_offset + (size_t)record_slot * slot_size, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
1166 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
1167 gen_address_offset();
1170 g(gen_set_1(ctx, R_SAVED_1, record_slot, data_record_offset, true));
1172 gen_label(next_arg_label);
1175 g(gen_compress_pointer(ctx, R_SAVED_1, R_SAVED_1));
1176 g(gen_frame_set_pointer(ctx, slot_r, R_SAVED_1, false, true));
1181 static bool attr_w gen_record_load(struct codegen_context *ctx, frame_t slot_1, frame_t slot_r, frame_t rec_slot, frame_t flags)
1183 const struct type *rec_type, *entry_type;
1184 uint32_t escape_label;
1187 rec_type = get_type_of_local(ctx, slot_1);
1188 if (unlikely(rec_type->tag == TYPE_TAG_unknown)) {
1189 ajla_assert_lo(!*da(ctx->fn,function)->function_name, (file_line, "gen_record_load: function %s has record without definition", da(ctx->fn,function)->function_name));
1193 escape_label = alloc_escape_label(ctx);
1194 if (unlikely(!escape_label))
1197 /*debug("gen_record_load: %s: %u, %u", da(ctx->fn,function)->function_name, TYPE_TAG_unknown, rec_type->tag);*/
1198 if (TYPE_IS_FLAT(rec_type)) {
1199 const struct flat_record_definition_entry *ft = &type_def(rec_type,flat_record)->entries[rec_slot];
1200 g(gen_test_1_cached(ctx, slot_1, escape_label));
1201 g(gen_memcpy_to_slot(ctx, slot_r, R_FRAME, (size_t)slot_1 * slot_size + ft->flat_offset));
1202 flag_set(ctx, slot_1, false);
1203 flag_set(ctx, slot_r, false);
1206 entry_type = type_def(rec_type,record)->types[rec_slot];
1208 g(gen_frame_decompress_slot(ctx, slot_1, R_SCRATCH_2, ®, escape_label));
1210 if (TYPE_IS_FLAT(entry_type)) {
1211 g(gen_test_1(ctx, reg, rec_slot, data_record_offset, escape_label, false, TEST));
1212 g(gen_memcpy_to_slot(ctx, slot_r, reg, (size_t)rec_slot * slot_size + data_record_offset));
1213 flag_set(ctx, slot_r, false);
1217 if (flag_must_be_flat(ctx, slot_r)) {
1218 gen_insn(INSN_JMP, 0, 0, 0);
1219 gen_four(escape_label);
1223 g(gen_test_1(ctx, reg, rec_slot, data_record_offset, escape_label, true, TEST));
1225 g(gen_address(ctx, reg, (size_t)rec_slot * slot_size + data_record_offset, ARCH_PREFERS_SX(OP_SIZE_SLOT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_SLOT));
1226 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
1227 gen_one(R_SCRATCH_1);
1228 gen_address_offset();
1230 g(gen_frame_set_pointer_2(ctx, slot_r, R_SCRATCH_1, flags, escape_label));
1235 static bool attr_w gen_option_create_empty_flat(struct codegen_context *ctx, ajla_flat_option_t opt, frame_t slot_r)
1237 g(gen_frame_store_imm(ctx, log_2(sizeof(ajla_flat_option_t)), slot_r, 0, opt));
1238 flag_set(ctx, slot_r, false);
1242 static bool attr_w gen_option_create_empty(struct codegen_context *ctx, ajla_option_t opt, frame_t slot_r)
1244 unsigned option_size = log_2(sizeof(ajla_option_t));
1245 uint32_t escape_label;
1247 escape_label = alloc_escape_label(ctx);
1248 if (unlikely(!escape_label))
1251 if (flag_must_be_flat(ctx, slot_r)) {
1252 gen_insn(INSN_JMP, 0, 0, 0);
1253 gen_four(escape_label);
1257 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_option_mayfail), 0));
1258 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1259 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1261 g(gen_address(ctx, R_RET0, offsetof(struct data, u_.option.option), IMM_PURPOSE_STR_OFFSET, option_size));
1262 g(gen_imm(ctx, opt, IMM_PURPOSE_STORE_VALUE, option_size));
1263 gen_insn(INSN_MOV, option_size, 0, 0);
1264 gen_address_offset();
1267 g(gen_address(ctx, R_RET0, offsetof(struct data, u_.option.pointer), IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
1268 g(gen_imm(ctx, 0, IMM_PURPOSE_STORE_VALUE, OP_SIZE_SLOT));
1269 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
1270 gen_address_offset();
1273 g(gen_compress_pointer(ctx, R_RET0, R_RET0));
1274 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, true));
1279 static bool attr_w gen_option_create(struct codegen_context *ctx, ajla_option_t opt, frame_t slot_1, frame_t slot_r, frame_t flags)
1281 unsigned option_size = log_2(sizeof(ajla_option_t));
1282 const struct type *type;
1283 uint32_t escape_label, get_pointer_label, got_pointer_label;
1285 escape_label = alloc_escape_label(ctx);
1286 if (unlikely(!escape_label))
1289 if (flag_must_be_flat(ctx, slot_r)) {
1290 gen_insn(INSN_JMP, 0, 0, 0);
1291 gen_four(escape_label);
1295 get_pointer_label = alloc_label(ctx);
1296 if (unlikely(!get_pointer_label))
1299 got_pointer_label = alloc_label(ctx);
1300 if (unlikely(!got_pointer_label))
1303 type = get_type_of_local(ctx, slot_1);
1305 g(gen_upcall_start(ctx, 0));
1306 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_option_mayfail), 0));
1307 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1308 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1310 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
1312 g(gen_address(ctx, R_RET0, offsetof(struct data, u_.option.option), IMM_PURPOSE_STR_OFFSET, option_size));
1313 g(gen_imm(ctx, opt, IMM_PURPOSE_STORE_VALUE, option_size));
1314 gen_insn(INSN_MOV, option_size, 0, 0);
1315 gen_address_offset();
1318 if (TYPE_IS_FLAT(type)) {
1319 g(gen_test_1_cached(ctx, slot_1, get_pointer_label));
1321 if (slot_is_register(ctx, slot_1))
1322 g(spill(ctx, slot_1));
1324 g(gen_upcall_start(ctx, 3));
1326 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1327 g(gen_upcall_argument(ctx, 0));
1329 g(gen_load_constant(ctx, R_ARG1, slot_1));
1330 g(gen_upcall_argument(ctx, 1));
1332 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_ARG2, R_FRAME, (size_t)slot_1 * slot_size, 0));
1333 g(gen_upcall_argument(ctx, 2));
1335 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 3));
1337 if (flag_is_clear(ctx, slot_1))
1338 goto skip_get_pointer_label;
1340 gen_insn(INSN_JMP, 0, 0, 0);
1341 gen_four(got_pointer_label);
1344 gen_label(get_pointer_label);
1345 g(gen_frame_get_pointer(ctx, slot_1, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_RET0));
1347 skip_get_pointer_label:
1348 gen_label(got_pointer_label);
1349 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.option.pointer), IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
1350 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
1351 gen_address_offset();
1354 g(gen_compress_pointer(ctx, R_SAVED_1, R_SAVED_1));
1355 g(gen_frame_set_pointer(ctx, slot_r, R_SAVED_1, false, true));
1360 static bool attr_w gen_option_cmp(struct codegen_context *ctx, unsigned reg, frame_t opt, uint32_t label, frame_t slot_r)
1362 unsigned op_size = log_2(sizeof(ajla_option_t));
1364 #if defined(ARCH_X86)
1365 g(gen_address(ctx, reg, offsetof(struct data, u_.option.option), IMM_PURPOSE_LDR_OFFSET, op_size));
1366 g(gen_imm(ctx, opt, IMM_PURPOSE_CMP, op_size));
1367 gen_insn(INSN_CMP, op_size, 0, 1);
1368 gen_address_offset();
1371 g(gen_address(ctx, reg, offsetof(struct data, u_.option.option), ARCH_PREFERS_SX(op_size) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, op_size));
1372 gen_insn(ARCH_PREFERS_SX(op_size) ? INSN_MOVSX : INSN_MOV, op_size, 0, 0);
1373 gen_one(R_SCRATCH_2);
1374 gen_address_offset();
1376 g(gen_imm(ctx, opt, IMM_PURPOSE_CMP, op_size));
1377 gen_insn(INSN_CMP, op_size, 0, 1);
1378 gen_one(R_SCRATCH_2);
1382 gen_insn(INSN_JMP_COND, op_size, COND_NE, 0);
1385 g(gen_frame_set_cond(ctx, op_size, false, COND_E, slot_r));
1389 g(gen_address(ctx, reg, offsetof(struct data, u_.option.option), ARCH_PREFERS_SX(op_size) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, op_size));
1390 gen_insn(ARCH_PREFERS_SX(op_size) ? INSN_MOVSX : INSN_MOV, op_size, 0, 0);
1391 gen_one(R_SCRATCH_2);
1392 gen_address_offset();
1394 g(gen_cmp_dest_reg(ctx, op_size, R_SCRATCH_2, (unsigned)-1, label ? R_CMP_RESULT : R_SCRATCH_2, opt, COND_E));
1397 gen_insn(INSN_JMP_REG, i_size(op_size), COND_E, 0);
1398 gen_one(R_CMP_RESULT);
1401 g(gen_frame_store(ctx, log_2(sizeof(ajla_flat_option_t)), slot_r, 0, R_SCRATCH_2));
1407 static bool attr_w gen_option_load(struct codegen_context *ctx, frame_t slot_1, frame_t slot_r, ajla_option_t opt, frame_t flags)
1409 const struct type *type;
1410 uint32_t escape_label;
1413 escape_label = alloc_escape_label(ctx);
1414 if (unlikely(!escape_label))
1417 if (flag_must_be_flat(ctx, slot_r)) {
1418 gen_insn(INSN_JMP, 0, 0, 0);
1419 gen_four(escape_label);
1423 type = get_type_of_local(ctx, slot_1);
1424 if (TYPE_IS_FLAT(type)) {
1425 g(gen_test_1_jz_cached(ctx, slot_1, escape_label));
1428 g(gen_frame_decompress_slot(ctx, slot_1, R_SCRATCH_1, ®, escape_label));
1429 g(gen_option_cmp(ctx, reg, opt, escape_label, 0));
1431 g(gen_address(ctx, reg, offsetof(struct data, u_.option.pointer), ARCH_PREFERS_SX(OP_SIZE_SLOT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_SLOT));
1432 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
1433 gen_one(R_SCRATCH_1);
1434 gen_address_offset();
1436 g(gen_frame_set_pointer_2(ctx, slot_r, R_SCRATCH_1, flags, escape_label));
1441 static bool attr_w gen_option_test_flat(struct codegen_context *ctx, frame_t slot_1, frame_t opt, frame_t slot_r)
1443 unsigned op_size = log_2(sizeof(ajla_flat_option_t));
1444 uint32_t escape_label;
1446 escape_label = alloc_escape_label(ctx);
1447 if (unlikely(!escape_label))
1450 g(gen_test_1_cached(ctx, slot_1, escape_label));
1452 flag_set(ctx, slot_1, false);
1453 flag_set(ctx, slot_r, false);
1455 if (unlikely(opt != (ajla_flat_option_t)opt)) {
1456 g(gen_frame_clear(ctx, op_size, slot_r));
1460 g(gen_frame_load_cmp_imm_set_cond(ctx, op_size, zero_x, slot_1, opt, COND_E, slot_r));
1465 static bool attr_w gen_option_test(struct codegen_context *ctx, frame_t slot_1, frame_t opt, frame_t slot_r)
1467 uint32_t escape_label;
1470 escape_label = alloc_escape_label(ctx);
1471 if (unlikely(!escape_label))
1474 g(gen_frame_decompress_slot(ctx, slot_1, R_SCRATCH_1, ®, escape_label));
1476 flag_set(ctx, slot_r, false);
1478 if (unlikely(opt != (ajla_option_t)opt)) {
1479 g(gen_frame_clear(ctx, log_2(sizeof(ajla_flat_option_t)), slot_r));
1483 g(gen_option_cmp(ctx, reg, opt, 0, slot_r));
1488 static bool attr_w gen_option_ord(struct codegen_context *ctx, frame_t slot_1, frame_t slot_r, bool flat)
1490 unsigned op_size = log_2(sizeof(ajla_option_t));
1491 unsigned op_size_flat = log_2(sizeof(ajla_flat_option_t));
1492 uint32_t escape_label, ptr_label, store_label;
1493 unsigned reg, target;
1495 escape_label = alloc_escape_label(ctx);
1496 if (unlikely(!escape_label))
1499 ptr_label = alloc_label(ctx);
1500 if (unlikely(!ptr_label))
1503 store_label = alloc_label(ctx);
1504 if (unlikely(!store_label))
1507 target = gen_frame_target(ctx, slot_r, NO_FRAME_T, NO_FRAME_T, R_SCRATCH_1);
1510 g(gen_test_1_cached(ctx, slot_1, ptr_label));
1512 g(gen_frame_load(ctx, op_size_flat, zero_x, slot_1, 0, false, target));
1514 if (flag_is_clear(ctx, slot_1))
1515 goto skip_ptr_label;
1517 gen_insn(INSN_JMP, 0, 0, 0);
1518 gen_four(store_label);
1521 gen_label(ptr_label);
1522 g(gen_frame_decompress_slot(ctx, slot_1, R_SCRATCH_1, ®, escape_label));
1524 g(gen_address(ctx, reg, offsetof(struct data, u_.option.option), ARCH_PREFERS_SX(op_size) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, op_size));
1525 gen_insn(ARCH_PREFERS_SX(op_size) ? INSN_MOVSX : INSN_MOV, op_size, 0, 0);
1527 gen_address_offset();
1530 gen_label(store_label);
1531 g(gen_frame_store(ctx, OP_SIZE_INT, slot_r, 0, target));
1532 flag_set(ctx, slot_r, false);
1537 static bool attr_w gen_array_create(struct codegen_context *ctx, frame_t slot_r)
1540 const struct type *type;
1541 uint32_t escape_label;
1543 escape_label = alloc_escape_label(ctx);
1544 if (unlikely(!escape_label))
1547 ajla_assert_lo(ctx->args_l != 0, (file_line, "gen_array_create: zero entries"));
1549 if (unlikely(ctx->args_l >= sign_bit(uint_default_t))) {
1550 gen_insn(INSN_JMP, 0, 0, 0);
1551 gen_four(escape_label);
1555 type = get_type_of_local(ctx, ctx->args[0].slot);
1556 for (i = 1; i < ctx->args_l; i++) {
1557 const struct type *t = get_type_of_local(ctx, ctx->args[i].slot);
1558 if (unlikely(t != type))
1559 internal(file_line, "gen_array_create: types do not match: %u != %u", type->tag, t->tag);
1562 if (TYPE_IS_FLAT(type)) {
1564 for (i = 0; i < ctx->args_l; i++) {
1565 g(gen_test_1_cached(ctx, ctx->args[i].slot, escape_label));
1566 flag_set(ctx, ctx->args[i].slot, false);
1569 g(gen_upcall_start(ctx, 3));
1571 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1572 g(gen_upcall_argument(ctx, 0));
1574 g(gen_load_constant(ctx, R_ARG1, ctx->args[0].slot));
1575 g(gen_upcall_argument(ctx, 1));
1577 g(gen_load_constant(ctx, R_ARG2, ctx->args_l));
1578 g(gen_upcall_argument(ctx, 2));
1580 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_flat_slot_mayfail), 3));
1581 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1582 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1584 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
1586 offset = data_array_offset;
1587 for (i = 0; i < ctx->args_l; i++) {
1588 g(gen_memcpy_from_slot(ctx, R_SAVED_1, offset, ctx->args[i].slot));
1589 offset += type->size;
1593 g(gen_upcall_start(ctx, 2));
1595 g(gen_load_constant(ctx, R_ARG0, ctx->args_l));
1596 g(gen_upcall_argument(ctx, 0));
1598 g(gen_load_constant(ctx, R_ARG1, ctx->args_l));
1599 g(gen_upcall_argument(ctx, 1));
1601 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_pointers_mayfail), 2));
1602 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1603 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1605 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
1607 g(gen_address(ctx, R_RET0, offsetof(struct data, u_.array_pointers.pointer), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
1608 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
1610 gen_address_offset();
1613 for (i = 0; i < ctx->args_l; i++) {
1614 g(gen_frame_get_pointer(ctx, ctx->args[i].slot, (ctx->args[i].flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_SCRATCH_1));
1615 g(gen_address(ctx, R_SAVED_2, offset, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
1616 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
1617 gen_address_offset();
1618 gen_one(R_SCRATCH_1);
1619 offset += sizeof(pointer_t);
1622 g(gen_compress_pointer(ctx, R_SAVED_1, R_SAVED_1));
1623 g(gen_frame_set_pointer(ctx, slot_r, R_SAVED_1, false, true));
1627 static bool attr_w gen_array_create_empty_flat(struct codegen_context *ctx, frame_t slot_r, frame_t local_type)
1629 uint32_t escape_label;
1631 escape_label = alloc_escape_label(ctx);
1632 if (unlikely(!escape_label))
1635 g(gen_upcall_start(ctx, 4));
1637 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1638 g(gen_upcall_argument(ctx, 0));
1640 g(gen_load_constant(ctx, R_ARG1, local_type));
1641 g(gen_upcall_argument(ctx, 1));
1643 g(gen_load_constant(ctx, R_ARG2, ARRAY_PREALLOC_SIZE));
1644 g(gen_upcall_argument(ctx, 2));
1646 g(gen_load_constant(ctx, R_ARG3, 0));
1647 g(gen_upcall_argument(ctx, 3));
1649 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_flat_types_ptr_mayfail), 4));
1650 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1651 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1653 g(gen_compress_pointer(ctx, R_RET0, R_RET0));
1654 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, true));
1659 static bool attr_w gen_array_create_empty(struct codegen_context *ctx, frame_t slot_r)
1661 uint32_t escape_label;
1663 escape_label = alloc_escape_label(ctx);
1664 if (unlikely(!escape_label))
1667 g(gen_upcall_start(ctx, 2));
1669 g(gen_load_constant(ctx, R_ARG0, ARRAY_PREALLOC_SIZE));
1670 g(gen_upcall_argument(ctx, 0));
1672 g(gen_load_constant(ctx, R_ARG1, 0));
1673 g(gen_upcall_argument(ctx, 1));
1675 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_pointers_mayfail), 2));
1676 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1677 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1679 g(gen_compress_pointer(ctx, R_RET0, R_RET0));
1680 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, true));
1685 static bool attr_w gen_array_fill(struct codegen_context *ctx, frame_t slot_1, frame_t flags, frame_t slot_2, frame_t slot_r)
1687 const struct type *content_type, *array_type;
1688 uint32_t escape_label;
1689 unsigned reg1, reg4;
1691 escape_label = alloc_escape_label(ctx);
1692 if (unlikely(!escape_label))
1695 g(gen_test_1_cached(ctx, slot_2, escape_label));
1697 content_type = get_type_of_local(ctx, slot_1);
1698 array_type = get_type_of_local(ctx, slot_r);
1700 if (TYPE_IS_FLAT(array_type)) {
1701 int64_t dest_offset;
1703 const struct flat_array_definition *def = type_def(array_type,flat_array);
1705 ajla_assert_lo(TYPE_IS_FLAT(content_type), (file_line, "gen_array_fill: array is flat but content is not"));
1707 g(gen_test_1_cached(ctx, slot_1, escape_label));
1709 dest_offset = (size_t)slot_r * slot_size;
1710 for (i = 0; i < def->n_elements; i++) {
1711 g(gen_memcpy_from_slot(ctx, R_FRAME, dest_offset, slot_1));
1712 dest_offset += def->base->size;
1714 flag_set(ctx, slot_1, false);
1715 flag_set(ctx, slot_r, false);
1720 if (slot_is_register(ctx, slot_1))
1721 g(spill(ctx, slot_1));
1723 if (unlikely((flags & OPCODE_ARRAY_FILL_FLAG_SPARSE) != 0)) {
1724 uint32_t get_ptr_label, got_ptr_label;
1726 get_ptr_label = alloc_label(ctx);
1727 if (unlikely(!get_ptr_label))
1730 got_ptr_label = alloc_label(ctx);
1731 if (unlikely(!got_ptr_label))
1734 if (TYPE_IS_FLAT(content_type)) {
1735 g(gen_test_1_cached(ctx, slot_1, get_ptr_label));
1737 g(gen_upcall_start(ctx, 3));
1739 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1740 g(gen_upcall_argument(ctx, 0));
1742 g(gen_load_constant(ctx, R_ARG1, slot_1));
1743 g(gen_upcall_argument(ctx, 1));
1745 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_ARG2, R_FRAME, (size_t)slot_1 * slot_size, 0));
1746 g(gen_upcall_argument(ctx, 2));
1748 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 3));
1750 g(gen_mov(ctx, i_size(OP_SIZE_SLOT), R_SCRATCH_4, R_RET0));
1752 gen_insn(INSN_JMP, 0, 0, 0);
1753 gen_four(got_ptr_label);
1756 gen_label(get_ptr_label);
1758 g(gen_frame_get_pointer(ctx, slot_1, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_SCRATCH_4));
1760 gen_label(got_ptr_label);
1762 g(gen_frame_get(ctx, OP_SIZE_INT, sign_x, slot_2, R_SCRATCH_1, ®1));
1763 g(gen_jmp_if_negative(ctx, reg1, escape_label));
1765 g(gen_upcall_start(ctx, 2));
1766 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, reg1));
1767 g(gen_upcall_argument(ctx, 0));
1769 g(gen_mov(ctx, i_size(OP_SIZE_SLOT), R_ARG1, R_SCRATCH_4));
1770 g(gen_upcall_argument(ctx, 1));
1772 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_create_sparse), 2));
1773 } else if (TYPE_IS_FLAT(content_type)) {
1774 g(gen_test_1_cached(ctx, slot_1, escape_label));
1775 flag_set(ctx, slot_1, false);
1777 g(gen_frame_get(ctx, OP_SIZE_INT, sign_x, slot_2, R_SCRATCH_4, ®4));
1778 g(gen_jmp_if_negative(ctx, reg4, escape_label));
1780 g(gen_upcall_start(ctx, 3));
1781 g(gen_mov(ctx, i_size(OP_SIZE_INT), R_ARG1, reg4));
1782 g(gen_upcall_argument(ctx, 1));
1784 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1785 g(gen_upcall_argument(ctx, 0));
1787 g(gen_load_constant(ctx, R_ARG2, slot_1));
1788 g(gen_upcall_argument(ctx, 2));
1790 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_create_flat), 3));
1792 if (slot_is_register(ctx, slot_2))
1793 g(spill(ctx, slot_2));
1795 g(gen_frame_get_pointer(ctx, slot_1, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_SCRATCH_1));
1797 g(gen_upcall_start(ctx, 4));
1799 g(gen_mov(ctx, i_size(OP_SIZE_SLOT), R_ARG3, R_SCRATCH_1));
1800 g(gen_upcall_argument(ctx, 3));
1802 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1803 g(gen_upcall_argument(ctx, 0));
1805 g(gen_load_constant(ctx, R_ARG1, ctx->instr_start - da(ctx->fn,function)->code));
1806 g(gen_upcall_argument(ctx, 1));
1808 g(gen_load_constant(ctx, R_ARG2, slot_2));
1809 g(gen_upcall_argument(ctx, 2));
1811 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_create_pointers), 4));
1813 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, false));
1818 static bool attr_w gen_array_string(struct codegen_context *ctx, type_tag_t tag, uint8_t *string, frame_t len, frame_t slot_r)
1820 uint32_t escape_label;
1822 const struct type *type;
1824 escape_label = alloc_escape_label(ctx);
1825 if (unlikely(!escape_label))
1828 g(gen_upcall_start(ctx, 2));
1830 g(gen_load_constant(ctx, R_ARG0, tag));
1831 g(gen_upcall_argument(ctx, 0));
1833 g(gen_load_constant(ctx, R_ARG1, len));
1834 g(gen_upcall_argument(ctx, 1));
1836 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_flat_tag_mayfail), 2));
1837 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1838 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1840 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
1842 g(gen_compress_pointer(ctx, R_RET0, R_RET0));
1843 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, true));
1845 g(load_function_offset(ctx, R_SCRATCH_3, offsetof(struct data, u_.function.code)));
1847 offset = string - cast_ptr(uint8_t *, da(ctx->fn,function)->code);
1848 type = type_get_from_tag(tag);
1849 g(gen_memcpy_raw(ctx, R_SAVED_1, data_array_offset, R_SCRATCH_3, offset, (size_t)len * type->size, minimum(type->align, align_of(code_t))));
1854 static bool attr_w gen_scaled_array_address(struct codegen_context *ctx, size_t element_size, unsigned reg_dst, unsigned reg_src, unsigned reg_index, int64_t offset_src)
1856 if (is_power_of_2(element_size)) {
1857 unsigned shift = log_2(element_size);
1858 #if defined(ARCH_X86)
1859 if (shift <= 3 && imm_is_32bit(offset_src)) {
1860 gen_insn(INSN_LEA3, i_size(OP_SIZE_ADDRESS), shift, 0);
1865 gen_eight(offset_src);
1869 if (ARCH_HAS_SHIFTED_ADD(shift)) {
1870 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(i_size(OP_SIZE_ADDRESS), ALU_ADD, false, false, 0));
1873 gen_one(ARG_SHIFTED_REGISTER);
1874 gen_one(ARG_SHIFT_LSL | shift);
1881 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, reg_dst, reg_index, shift, 0));
1882 reg_index = reg_dst;
1886 g(gen_imm(ctx, element_size, IMM_PURPOSE_MUL, i_size(OP_SIZE_ADDRESS)));
1887 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_MUL, ALU_WRITES_FLAGS(i_size(OP_SIZE_ADDRESS), ALU_MUL, false, is_imm(), ctx->const_imm));
1891 reg_index = reg_dst;
1893 size_t e_size = element_size;
1895 bool first_match = true;
1897 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_CONST_IMM, reg_index));
1898 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), reg_dst, reg_index));
1899 reg_index = reg_dst;
1902 g(gen_load_constant(ctx, reg_index, 0));
1908 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, reg_index, reg_index, sh, false));
1909 first_match = false;
1910 } else if (ARCH_HAS_SHIFTED_ADD(sh)) {
1911 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(i_size(OP_SIZE_ADDRESS), ALU_ADD, false, false, 0));
1914 gen_one(ARG_SHIFTED_REGISTER);
1915 gen_one(ARG_SHIFT_LSL | sh);
1916 gen_one(R_CONST_IMM);
1919 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, R_CONST_IMM, R_CONST_IMM, sh, false));
1922 g(gen_3address_alu(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, reg_index, reg_index, R_CONST_IMM, 0));
1930 #if defined(ARCH_S390)
1931 if (offset_src && s390_inline_address(offset_src)) {
1932 gen_insn(INSN_LEA3, i_size(OP_SIZE_ADDRESS), 0, 0);
1937 gen_eight(offset_src);
1941 g(gen_3address_alu(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, reg_dst, reg_index, reg_src, 0));
1946 g(gen_imm(ctx, offset_src, IMM_PURPOSE_ADD, i_size(OP_SIZE_ADDRESS)));
1947 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(i_size(OP_SIZE_ADDRESS), ALU_ADD, false, is_imm(), ctx->const_imm));
1955 static bool attr_w gen_scaled_array_load(struct codegen_context *ctx, unsigned reg_src, unsigned reg_idx, int64_t offset_src, frame_t slot_r)
1957 const struct type *t = get_type_of_local(ctx, slot_r);
1958 #if defined(ARCH_X86)
1959 if (is_power_of_2(t->size)) {
1960 unsigned shift = log_2(t->size);
1961 if (shift <= 3 && shift <= OP_SIZE_NATIVE && imm_is_32bit(offset_src)) {
1962 short reg = ctx->registers[slot_r];
1963 gen_insn(INSN_MOV, shift, 0, 0);
1964 gen_one(reg >= 0 ? reg : R_SCRATCH_2);
1965 gen_one(ARG_ADDRESS_2 + shift);
1968 gen_eight(offset_src);
1971 g(gen_address(ctx, R_FRAME, (size_t)slot_r * slot_size, IMM_PURPOSE_STR_OFFSET, shift));
1972 gen_insn(INSN_MOV, shift, 0, 0);
1973 gen_address_offset();
1974 gen_one(R_SCRATCH_2);
1981 #if defined(ARCH_S390)
1982 if (t->size == 1 && s390_inline_address(offset_src) && cpu_test_feature(CPU_FEATURE_long_displacement) && cpu_test_feature(CPU_FEATURE_extended_imm)) {
1983 short reg = ctx->registers[slot_r];
1984 gen_insn(INSN_MOVSX, OP_SIZE_1, 0, 0);
1985 gen_one(reg >= 0 ? reg : R_SCRATCH_2);
1986 gen_one(ARG_ADDRESS_2);
1989 gen_eight(offset_src);
1992 g(gen_address(ctx, R_FRAME, (size_t)slot_r * slot_size, IMM_PURPOSE_STR_OFFSET, OP_SIZE_1));
1993 gen_insn(INSN_MOV, OP_SIZE_1, 0, 0);
1994 gen_address_offset();
1995 gen_one(R_SCRATCH_2);
2001 g(gen_scaled_array_address(ctx, t->size, R_SCRATCH_2, reg_src, reg_idx, 0));
2003 g(gen_memcpy_to_slot(ctx, slot_r, R_SCRATCH_2, offset_src));
2008 static bool attr_w gen_scaled_array_store(struct codegen_context *ctx, unsigned reg_src, int64_t offset_src, frame_t slot_1)
2010 const struct type *t = get_type_of_local(ctx, slot_1);
2011 #if defined(ARCH_X86)
2012 if (is_power_of_2(t->size)) {
2013 unsigned shift = log_2(t->size);
2014 if (shift <= 3 && shift <= OP_SIZE_NATIVE && imm_is_32bit(offset_src)) {
2015 short reg = ctx->registers[slot_1];
2017 g(gen_address(ctx, R_FRAME, (size_t)slot_1 * slot_size, IMM_PURPOSE_LDR_OFFSET, shift));
2018 gen_insn(INSN_MOV, shift, 0, 0);
2019 gen_one(R_SCRATCH_3);
2020 gen_address_offset();
2024 gen_insn(INSN_MOV, shift, 0, 0);
2025 gen_one(ARG_ADDRESS_2 + shift);
2027 gen_one(R_SCRATCH_2);
2028 gen_eight(offset_src);
2035 #if defined(ARCH_S390)
2036 if (t->size == 1 && s390_inline_address(offset_src) && cpu_test_feature(CPU_FEATURE_long_displacement) && cpu_test_feature(CPU_FEATURE_extended_imm)) {
2037 short reg = ctx->registers[slot_1];
2039 g(gen_address(ctx, R_FRAME, (size_t)slot_1 * slot_size, IMM_PURPOSE_LDR_SX_OFFSET, OP_SIZE_1));
2040 gen_insn(INSN_MOVSX, OP_SIZE_1, 0, 0);
2041 gen_one(R_SCRATCH_3);
2042 gen_address_offset();
2046 gen_insn(INSN_MOV, OP_SIZE_1, 0, 0);
2047 gen_one(ARG_ADDRESS_2);
2049 gen_one(R_SCRATCH_2);
2050 gen_eight(offset_src);
2056 g(gen_scaled_array_address(ctx, t->size, R_SCRATCH_2, reg_src, R_SCRATCH_2, 0));
2058 g(gen_memcpy_from_slot(ctx, R_SCRATCH_2, offset_src, slot_1));
2063 static bool attr_w gen_check_array_len(struct codegen_context *ctx, unsigned reg_array, bool allocated, unsigned reg_len, unsigned cond, uint32_t escape_label)
2065 size_t offset = !allocated ? offsetof(struct data, u_.array_flat.n_used_entries) : offsetof(struct data, u_.array_flat.n_allocated_entries);
2066 #if defined(ARCH_X86)
2067 g(gen_address(ctx, reg_array, offset, IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2068 gen_insn(INSN_CMP, OP_SIZE_INT, 0, 1);
2070 gen_address_offset();
2072 gen_insn(INSN_JMP_COND, OP_SIZE_INT, cond, 0);
2073 gen_four(escape_label);
2075 g(gen_address(ctx, reg_array, offset, ARCH_PREFERS_SX(OP_SIZE_INT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2076 gen_insn(ARCH_PREFERS_SX(OP_SIZE_INT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_INT, 0, 0);
2077 gen_one(R_SCRATCH_3);
2078 gen_address_offset();
2080 g(gen_cmp_test_jmp(ctx, INSN_CMP, i_size(OP_SIZE_INT), reg_len, R_SCRATCH_3, cond, escape_label));
2085 static bool attr_w gen_array_load(struct codegen_context *ctx, frame_t slot_1, frame_t slot_idx, frame_t slot_r, frame_t flags)
2087 const struct type *t = get_type_of_local(ctx, slot_1);
2088 const struct type *tr = get_type_of_local(ctx, slot_r);
2089 uint32_t escape_label;
2090 unsigned reg1, reg2;
2092 escape_label = alloc_escape_label(ctx);
2093 if (unlikely(!escape_label))
2096 if (unlikely(t->tag == TYPE_TAG_flat_array)) {
2097 const struct flat_array_definition *def = type_def(t,flat_array);
2099 g(gen_test_2_cached(ctx, slot_1, slot_idx, escape_label));
2101 flag_set(ctx, slot_1, false);
2102 flag_set(ctx, slot_idx, false);
2104 g(gen_frame_get(ctx, OP_SIZE_INT, native, slot_idx, R_SCRATCH_2, ®2));
2106 if (!(flags & OPCODE_ARRAY_INDEX_IN_RANGE))
2107 g(gen_cmp_test_imm_jmp(ctx, INSN_CMP, OP_SIZE_INT, reg2, def->n_elements, COND_AE, escape_label));
2109 g(gen_scaled_array_load(ctx, R_FRAME, reg2, (size_t)slot_1 * slot_size, slot_r));
2113 g(gen_frame_decompress_slot(ctx, slot_1, R_SCRATCH_1, ®1, escape_label));
2115 g(gen_test_1_cached(ctx, slot_idx, escape_label));
2116 flag_set(ctx, slot_idx, false);
2117 g(gen_frame_get(ctx, OP_SIZE_INT, native, slot_idx, R_SCRATCH_2, ®2));
2119 if (!(flags & OPCODE_ARRAY_INDEX_IN_RANGE))
2120 g(gen_check_array_len(ctx, reg1, false, reg2, COND_AE, escape_label));
2122 if (TYPE_IS_FLAT(tr)) {
2124 g(gen_compare_ptr_tag(ctx, reg1, DATA_TAG_array_slice, COND_A, escape_label, R_SCRATCH_4));
2125 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SCRATCH_1, reg1));
2126 #if defined(ARCH_X86) || defined(ARCH_S390)
2127 #if defined(ARCH_X86)
2128 if (unlikely(!cpu_test_feature(CPU_FEATURE_cmov)))
2130 if (unlikely(!cpu_test_feature(CPU_FEATURE_misc_45)))
2133 g(gen_address(ctx, reg1, offsetof(struct data, u_.array_slice.flat_data_minus_data_array_offset), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2134 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
2135 gen_one(R_SCRATCH_3);
2136 gen_address_offset();
2139 g(gen_address(ctx, reg1, offsetof(struct data, u_.array_slice.flat_data_minus_data_array_offset), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2140 gen_insn(INSN_CMOV, OP_SIZE_ADDRESS, COND_E, 0);
2141 gen_one(R_SCRATCH_1);
2142 gen_one(R_SCRATCH_1);
2143 gen_address_offset();
2144 #elif defined(ARCH_PARISC)
2145 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.array_slice.flat_data_minus_data_array_offset), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2146 gen_insn(INSN_CMP_CMOV, OP_SIZE_ADDRESS, COND_E, 0);
2147 gen_one(R_SCRATCH_1);
2148 gen_one(R_SCRATCH_1);
2149 gen_address_offset();
2150 gen_one(R_SCRATCH_4);
2152 gen_eight(DATA_TAG_array_slice);
2153 #elif defined(ARCH_LOONGARCH64) || defined(ARCH_MIPS) || defined(ARCH_RISCV64)
2154 g(gen_3address_alu_imm(ctx, OP_SIZE_NATIVE, ALU_XOR, R_SCRATCH_4, R_SCRATCH_4, DATA_TAG_array_slice, 0));
2156 label = alloc_label(ctx);
2157 if (unlikely(!label))
2160 gen_insn(INSN_JMP_REG, OP_SIZE_NATIVE, COND_NE, 0);
2161 gen_one(R_SCRATCH_4);
2164 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.array_slice.flat_data_minus_data_array_offset), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2165 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
2166 gen_one(R_SCRATCH_1);
2167 gen_address_offset();
2171 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.array_slice.flat_data_minus_data_array_offset), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2172 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
2173 gen_one(R_SCRATCH_3);
2174 gen_address_offset();
2176 #if defined(ARCH_POWER)
2177 if (!cpu_test_feature(CPU_FEATURE_v203))
2180 #if defined(ARCH_SPARC)
2184 gen_insn(INSN_CMOV, i_size(OP_SIZE_ADDRESS), COND_E, 0);
2185 gen_one(R_SCRATCH_1);
2186 gen_one(R_SCRATCH_1);
2187 gen_one(R_SCRATCH_3);
2189 g(gen_imm(ctx, DATA_TAG_array_slice, IMM_PURPOSE_CMP, OP_SIZE_NATIVE));
2190 gen_insn(INSN_CMP_DEST_REG, OP_SIZE_NATIVE, COND_E, 0);
2191 gen_one(R_CMP_RESULT);
2192 gen_one(R_SCRATCH_4);
2195 gen_insn(INSN_MOVR, OP_SIZE_NATIVE, COND_NE, 0);
2196 gen_one(R_SCRATCH_1);
2197 gen_one(R_SCRATCH_1);
2198 gen_one(R_CMP_RESULT);
2199 gen_one(R_SCRATCH_3);
2205 label = alloc_label(ctx);
2206 if (unlikely(!label))
2208 gen_insn(INSN_JMP_COND, OP_SIZE_4, COND_NE, 0);
2211 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SCRATCH_1, R_SCRATCH_3));
2215 g(gen_scaled_array_load(ctx, R_SCRATCH_1, reg2, data_array_offset, slot_r));
2216 flag_set(ctx, slot_r, false);
2219 if (flag_must_be_flat(ctx, slot_r)) {
2220 gen_insn(INSN_JMP, 0, 0, 0);
2221 gen_four(escape_label);
2225 g(gen_compare_ptr_tag(ctx, reg1, DATA_TAG_array_pointers, COND_NE, escape_label, R_SCRATCH_3));
2227 g(gen_address(ctx, reg1, offsetof(struct data, u_.array_pointers.pointer), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2228 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
2229 gen_one(R_SCRATCH_1);
2230 gen_address_offset();
2232 #if defined(ARCH_X86) || defined(ARCH_ARM)
2233 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
2234 gen_one(R_SCRATCH_1);
2235 gen_one(ARG_ADDRESS_2 + OP_SIZE_SLOT);
2236 gen_one(R_SCRATCH_1);
2240 goto scaled_load_done;
2242 #if defined(ARCH_LOONGARCH64) || defined(ARCH_PARISC) || defined(ARCH_POWER) || defined(ARCH_S390) || defined(ARCH_SPARC)
2243 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, R_SCRATCH_2, reg2, OP_SIZE_SLOT, false));
2245 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
2246 gen_one(R_SCRATCH_1);
2247 gen_one(ARG_ADDRESS_2);
2248 gen_one(R_SCRATCH_1);
2249 gen_one(R_SCRATCH_2);
2252 goto scaled_load_done;
2254 if (ARCH_HAS_SHIFTED_ADD(OP_SIZE_SLOT)) {
2255 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(i_size(OP_SIZE_ADDRESS), ALU_ADD, false, false, 0));
2256 gen_one(R_SCRATCH_2);
2257 gen_one(ARG_SHIFTED_REGISTER);
2258 gen_one(ARG_SHIFT_LSL | OP_SIZE_SLOT);
2260 gen_one(R_SCRATCH_1);
2262 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
2263 gen_one(R_SCRATCH_1);
2264 gen_one(ARG_ADDRESS_1);
2265 gen_one(R_SCRATCH_2);
2268 goto scaled_load_done;
2271 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, R_SCRATCH_2, reg2, OP_SIZE_SLOT, false));
2273 g(gen_3address_alu(ctx, OP_SIZE_ADDRESS, ALU_ADD, R_SCRATCH_2, R_SCRATCH_2, R_SCRATCH_1, 0));
2275 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
2276 gen_one(R_SCRATCH_1);
2277 gen_one(ARG_ADDRESS_1);
2278 gen_one(R_SCRATCH_2);
2281 g(gen_frame_set_pointer_2(ctx, slot_r, R_SCRATCH_1, flags, escape_label));
2286 static bool attr_w gen_array_len(struct codegen_context *ctx, frame_t slot_1, frame_t slot_2, frame_t slot_r, bool fused, int32_t offs_false)
2288 const struct type *t = get_type_of_local(ctx, slot_1);
2289 uint32_t escape_label;
2290 unsigned reg, target;
2292 escape_label = alloc_escape_label(ctx);
2293 if (unlikely(!escape_label))
2296 if (slot_2 != NO_FRAME_T) {
2297 g(gen_test_1_cached(ctx, slot_2, escape_label));
2298 flag_set(ctx, slot_2, false);
2301 if (unlikely(t->tag == TYPE_TAG_flat_array)) {
2303 target = R_SCRATCH_1;
2304 g(gen_load_constant(ctx, target, type_def(t,flat_array)->n_elements));
2306 } else if (slot_2 == NO_FRAME_T) {
2307 g(gen_frame_store_imm(ctx, OP_SIZE_INT, slot_r, 0, (unsigned)type_def(t,flat_array)->n_elements));
2309 g(gen_frame_load_cmp_imm_set_cond(ctx, OP_SIZE_INT, zero_x, slot_2, type_def(t,flat_array)->n_elements, COND_G, slot_r));
2311 flag_set(ctx, slot_r, false);
2313 g(gen_frame_decompress_slot(ctx, slot_1, R_SCRATCH_1, ®, escape_label));
2315 if (offsetof(struct data, u_.array_flat.n_used_entries) != offsetof(struct data, u_.array_slice.n_entries) ||
2316 offsetof(struct data, u_.array_flat.n_used_entries) != offsetof(struct data, u_.array_pointers.n_used_entries)) {
2320 if (DATA_TAG_array_flat != DATA_TAG_array_slice - 1 ||
2321 DATA_TAG_array_slice != DATA_TAG_array_pointers - 1 ||
2322 DATA_TAG_array_same < DATA_TAG_array_flat ||
2323 DATA_TAG_array_btree < DATA_TAG_array_flat ||
2324 DATA_TAG_array_incomplete < DATA_TAG_array_flat) {
2329 g(gen_compare_ptr_tag(ctx, reg, DATA_TAG_array_pointers, COND_A, escape_label, R_SCRATCH_2));
2331 if (slot_2 == NO_FRAME_T) {
2332 target = gen_frame_target(ctx, slot_r, NO_FRAME_T, NO_FRAME_T, R_SCRATCH_1);
2334 target = R_SCRATCH_1;
2337 #if defined(ARCH_X86) || defined(ARCH_S390)
2338 if (fused && slot_is_register(ctx, slot_2)) {
2339 g(gen_address(ctx, reg, offsetof(struct data, u_.array_flat.n_used_entries), ARCH_PREFERS_SX(OP_SIZE_INT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2340 gen_insn(INSN_CMP, OP_SIZE_INT, 0, 1 + COND_IS_LOGICAL(COND_GE));
2341 gen_one(ctx->registers[slot_2]);
2342 gen_address_offset();
2344 g(gen_jump(ctx, offs_false, i_size_cmp(OP_SIZE_INT), COND_GE, -1U, -1U));
2348 g(gen_address(ctx, reg, offsetof(struct data, u_.array_flat.n_used_entries), ARCH_PREFERS_SX(OP_SIZE_INT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2349 gen_insn(ARCH_PREFERS_SX(OP_SIZE_INT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_INT, 0, 0);
2351 gen_address_offset();
2355 enum extend ex = OP_SIZE_INT == i_size_cmp(OP_SIZE_INT) + (unsigned)zero ? garbage : sign_x;
2357 if (ARCH_HAS_JMP_2REGS(COND_LE)) {
2358 g(gen_frame_get(ctx, OP_SIZE_INT, ex, slot_2, R_SCRATCH_2, ®2));
2359 g(gen_jump(ctx, offs_false, i_size_cmp(OP_SIZE_INT), COND_LE, target, reg2));
2363 g(gen_frame_load_cmp(ctx, OP_SIZE_INT, COND_IS_LOGICAL(COND_LE), ex, false, slot_2, 0, false, target));
2364 g(gen_jump(ctx, offs_false, i_size_cmp(OP_SIZE_INT), COND_LE, -1U, -1U));
2366 g(gen_frame_get(ctx, OP_SIZE_INT, ex, slot_2, R_SCRATCH_2, ®2));
2367 g(gen_cmp_dest_reg(ctx, i_size_cmp(OP_SIZE_INT), target, reg2, R_CMP_RESULT, 0, COND_G));
2368 g(gen_jump(ctx, offs_false, OP_SIZE_NATIVE, COND_E, R_CMP_RESULT, -1U));
2370 } else if (slot_2 == NO_FRAME_T) {
2371 g(gen_frame_store(ctx, OP_SIZE_INT, slot_r, 0, target));
2373 g(gen_frame_load_cmp_set_cond(ctx, OP_SIZE_INT, zero_x, slot_2, R_SCRATCH_1, COND_G, slot_r));
2375 flag_set(ctx, slot_r, false);
2380 static bool attr_w gen_array_sub(struct codegen_context *ctx, frame_t slot_array, frame_t slot_from, frame_t slot_to, frame_t slot_r, frame_t flags)
2382 const struct type *t = get_type_of_local(ctx, slot_array);
2383 uint32_t escape_label, upcall_label;
2385 escape_label = alloc_escape_label(ctx);
2386 if (unlikely(!escape_label))
2389 upcall_label = alloc_label(ctx);
2390 if (unlikely(!upcall_label))
2393 if (unlikely(TYPE_IS_FLAT(t))) {
2394 g(gen_test_1_jz_cached(ctx, slot_array, escape_label));
2397 g(gen_test_2_cached(ctx, slot_from, slot_to, escape_label));
2399 if (slot_is_register(ctx, slot_array))
2400 g(spill(ctx, slot_array));
2401 if (slot_is_register(ctx, slot_from))
2402 g(spill(ctx, slot_from));
2403 if (slot_is_register(ctx, slot_to))
2404 g(spill(ctx, slot_to));
2406 g(gen_upcall_start(ctx, 4));
2408 g(gen_frame_load_raw(ctx, OP_SIZE_SLOT, garbage, slot_array, 0, false, R_ARG0));
2409 g(gen_upcall_argument(ctx, 0));
2411 g(gen_frame_load_raw(ctx, OP_SIZE_INT, garbage, slot_from, 0, false, R_ARG1));
2412 g(gen_upcall_argument(ctx, 1));
2414 g(gen_frame_load_raw(ctx, OP_SIZE_INT, garbage, slot_to, 0, false, R_ARG2));
2415 g(gen_upcall_argument(ctx, 2));
2417 g(gen_load_constant(ctx, R_ARG3, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0));
2418 g(gen_upcall_argument(ctx, 3));
2420 if ((flags & OPCODE_FLAG_FREE_ARGUMENT) != 0) {
2421 g(gen_test_1_cached(ctx, slot_array, upcall_label));
2422 g(gen_load_constant(ctx, R_ARG3, 0));
2423 g(gen_upcall_argument(ctx, 3));
2426 gen_label(upcall_label);
2427 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_sub), 4));
2429 g(gen_jmp_on_zero(ctx, OP_SIZE_SLOT, R_RET0, COND_E, escape_label));
2431 if (slot_array != slot_r) {
2432 if (flags & OPCODE_FLAG_FREE_ARGUMENT) {
2433 g(gen_set_1(ctx, R_FRAME, slot_array, 0, false));
2434 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_array));
2435 flag_set(ctx, slot_array, false);
2439 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, false));
2444 static bool attr_w gen_array_skip(struct codegen_context *ctx, frame_t slot_array, frame_t slot_from, frame_t slot_r, frame_t flags)
2446 const struct type *t = get_type_of_local(ctx, slot_array);
2447 uint32_t escape_label, upcall_label;
2449 escape_label = alloc_escape_label(ctx);
2450 if (unlikely(!escape_label))
2453 upcall_label = alloc_label(ctx);
2454 if (unlikely(!upcall_label))
2457 if (unlikely(TYPE_IS_FLAT(t))) {
2458 g(gen_test_1_jz_cached(ctx, slot_array, escape_label));
2461 g(gen_test_1_cached(ctx, slot_from, escape_label));
2463 if (slot_is_register(ctx, slot_array))
2464 g(spill(ctx, slot_array));
2465 if (slot_is_register(ctx, slot_from))
2466 g(spill(ctx, slot_from));
2468 g(gen_upcall_start(ctx, 3));
2470 g(gen_frame_load_raw(ctx, OP_SIZE_SLOT, garbage, slot_array, 0, false, R_ARG0));
2471 g(gen_upcall_argument(ctx, 0));
2473 g(gen_frame_load_raw(ctx, OP_SIZE_INT, garbage, slot_from, 0, false, R_ARG1));
2474 g(gen_upcall_argument(ctx, 1));
2476 g(gen_load_constant(ctx, R_ARG2, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0));
2477 g(gen_upcall_argument(ctx, 2));
2479 if ((flags & OPCODE_FLAG_FREE_ARGUMENT) != 0) {
2480 g(gen_test_1_cached(ctx, slot_array, upcall_label));
2481 g(gen_load_constant(ctx, R_ARG2, 0));
2482 g(gen_upcall_argument(ctx, 2));
2485 gen_label(upcall_label);
2486 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_skip), 3));
2488 g(gen_jmp_on_zero(ctx, OP_SIZE_SLOT, R_RET0, COND_E, escape_label));
2490 if (slot_array != slot_r) {
2491 if (flags & OPCODE_FLAG_FREE_ARGUMENT) {
2492 g(gen_set_1(ctx, R_FRAME, slot_array, 0, false));
2493 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_array));
2494 flag_set(ctx, slot_array, false);
2498 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, false));
2503 static bool attr_w gen_array_append(struct codegen_context *ctx, frame_t slot_1, frame_t slot_2, frame_t slot_r, frame_t flags)
2505 uint32_t escape_label;
2507 escape_label = alloc_escape_label(ctx);
2508 if (unlikely(!escape_label))
2511 if (unlikely(TYPE_IS_FLAT(get_type_of_local(ctx, slot_1))))
2512 g(gen_test_1_jz_cached(ctx, slot_1, escape_label));
2513 if (unlikely(TYPE_IS_FLAT(get_type_of_local(ctx, slot_2))))
2514 g(gen_test_1_jz_cached(ctx, slot_2, escape_label));
2516 g(gen_frame_load(ctx, OP_SIZE_SLOT, zero_x, slot_1, 0, false, R_SCRATCH_1));
2517 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, slot_1, escape_label));
2518 g(gen_frame_load(ctx, OP_SIZE_SLOT, zero_x, slot_2, 0, false, R_SCRATCH_2));
2519 g(gen_ptr_is_thunk(ctx, R_SCRATCH_2, slot_2, escape_label));
2521 g(gen_compare_da_tag(ctx, R_SCRATCH_1, DATA_TAG_array_incomplete, COND_E, escape_label, R_SCRATCH_1));
2522 g(gen_compare_da_tag(ctx, R_SCRATCH_2, DATA_TAG_array_incomplete, COND_E, escape_label, R_SCRATCH_2));
2524 g(gen_frame_get_pointer(ctx, slot_2, (flags & OPCODE_FLAG_FREE_ARGUMENT_2) != 0, R_SAVED_1));
2525 g(gen_frame_get_pointer(ctx, slot_1, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_SCRATCH_1));
2526 g(gen_upcall_start(ctx, 2));
2527 g(gen_mov(ctx, i_size(OP_SIZE_SLOT), R_ARG0, R_SCRATCH_1));
2528 g(gen_upcall_argument(ctx, 0));
2529 g(gen_mov(ctx, i_size(OP_SIZE_SLOT), R_ARG1, R_SAVED_1));
2530 g(gen_upcall_argument(ctx, 1));
2531 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_join), 2));
2532 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, false));
2536 static bool attr_w gen_array_append_one_flat(struct codegen_context *ctx, frame_t slot_1, frame_t slot_2, frame_t slot_r, frame_t flags)
2538 uint32_t escape_label;
2541 escape_label = alloc_escape_label(ctx);
2542 if (unlikely(!escape_label))
2545 if (unlikely(!(flags & OPCODE_FLAG_FREE_ARGUMENT))) {
2546 gen_insn(INSN_JMP, 0, 0, 0);
2547 gen_four(escape_label);
2551 g(gen_test_1_jz_cached(ctx, slot_1, escape_label));
2552 g(gen_test_1_cached(ctx, slot_2, escape_label));
2553 flag_set(ctx, slot_2, false);
2555 g(gen_frame_decompress_slot(ctx, slot_1, R_SAVED_1, ®, escape_label));
2557 g(gen_compare_tag_and_refcount(ctx, reg, DATA_TAG_array_flat, escape_label, R_SCRATCH_1));
2559 g(gen_address(ctx, reg, offsetof(struct data, u_.array_flat.n_used_entries), ARCH_PREFERS_SX(OP_SIZE_INT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2560 gen_insn(ARCH_PREFERS_SX(OP_SIZE_INT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_INT, 0, 0);
2561 gen_one(R_SCRATCH_2);
2562 gen_address_offset();
2564 g(gen_check_array_len(ctx, reg, true, R_SCRATCH_2, COND_E, escape_label));
2566 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_INT), ALU_ADD, R_SCRATCH_1, R_SCRATCH_2, 1, 0));
2568 g(gen_address(ctx, reg, offsetof(struct data, u_.array_flat.n_used_entries), IMM_PURPOSE_STR_OFFSET, OP_SIZE_INT));
2569 gen_insn(INSN_MOV, OP_SIZE_INT, 0, 0);
2570 gen_address_offset();
2571 gen_one(R_SCRATCH_1);
2573 g(gen_scaled_array_store(ctx, reg, data_array_offset, slot_2));
2575 if (slot_1 != slot_r) {
2576 #if !defined(POINTER_COMPRESSION)
2577 g(gen_frame_set_pointer(ctx, slot_r, reg, false, true));
2579 unsigned target = gen_frame_target(ctx, slot_r, NO_FRAME_T, NO_FRAME_T, R_SAVED_1);
2580 g(gen_compress_pointer(ctx, target, reg));
2581 g(gen_frame_set_pointer(ctx, slot_r, target, false, true));
2583 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_1));
2584 g(gen_set_1(ctx, R_FRAME, slot_1, 0, false));
2585 flag_set(ctx, slot_1, false);
2591 static bool attr_w gen_array_append_one(struct codegen_context *ctx, frame_t slot_1, frame_t slot_2, frame_t slot_r, frame_t flags)
2593 uint32_t escape_label;
2596 escape_label = alloc_escape_label(ctx);
2597 if (unlikely(!escape_label))
2600 if (unlikely(!(flags & OPCODE_FLAG_FREE_ARGUMENT))) {
2601 gen_insn(INSN_JMP, 0, 0, 0);
2602 gen_four(escape_label);
2606 g(gen_test_1_jz_cached(ctx, slot_1, escape_label));
2608 g(gen_frame_decompress_slot(ctx, slot_1, R_SAVED_1, ®, escape_label));
2610 g(gen_compare_tag_and_refcount(ctx, reg, DATA_TAG_array_pointers, escape_label, R_SCRATCH_1));
2612 g(gen_address(ctx, reg, offsetof(struct data, u_.array_pointers.n_used_entries), ARCH_PREFERS_SX(OP_SIZE_INT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2613 gen_insn(ARCH_PREFERS_SX(OP_SIZE_INT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_INT, 0, 0);
2615 gen_address_offset();
2617 g(gen_check_array_len(ctx, reg, true, R_SAVED_2, COND_E, escape_label));
2619 g(gen_frame_get_pointer(ctx, slot_2, (flags & OPCODE_FLAG_FREE_ARGUMENT_2) != 0, R_SCRATCH_2));
2621 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_INT), ALU_ADD, R_SCRATCH_1, R_SAVED_2, 1, 0));
2623 g(gen_address(ctx, reg, offsetof(struct data, u_.array_pointers.n_used_entries), IMM_PURPOSE_STR_OFFSET, OP_SIZE_INT));
2624 gen_insn(INSN_MOV, OP_SIZE_INT, 0, 0);
2625 gen_address_offset();
2626 gen_one(R_SCRATCH_1);
2628 g(gen_address(ctx, reg, offsetof(struct data, u_.array_pointers.pointer), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2629 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
2630 gen_one(R_SCRATCH_3);
2631 gen_address_offset();
2633 g(gen_scaled_array_address(ctx, slot_size, R_SAVED_2, R_SCRATCH_3, R_SAVED_2, 0));
2635 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
2636 gen_one(ARG_ADDRESS_1);
2639 gen_one(R_SCRATCH_2);
2641 if (slot_1 != slot_r) {
2642 #if !defined(POINTER_COMPRESSION)
2643 g(gen_frame_set_pointer(ctx, slot_r, reg, false, true));
2645 unsigned target = gen_frame_target(ctx, slot_r, NO_FRAME_T, NO_FRAME_T, R_SAVED_1);
2646 g(gen_compress_pointer(ctx, target, reg));
2647 g(gen_frame_set_pointer(ctx, slot_r, target, false, true));
2649 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_1));
2650 g(gen_set_1(ctx, R_FRAME, slot_1, 0, false));
2651 flag_set(ctx, slot_1, false);
2657 static bool attr_w gen_io(struct codegen_context *ctx, frame_t code, frame_t slot_1, frame_t slot_2, frame_t slot_3)
2659 uint32_t reload_label, escape_label;
2662 reload_label = alloc_reload_label(ctx);
2663 if (unlikely(!reload_label))
2667 mem_free(ctx->var_aux);
2668 ctx->var_aux = NULL;
2670 ctx->var_aux = mem_alloc_array_mayfail(mem_alloc_mayfail, frame_t *, 0, 0, slot_1 + slot_2, sizeof(frame_t), &ctx->err);
2671 if (unlikely(!ctx->var_aux))
2674 for (i = 0; i < slot_1 + slot_2; i++)
2675 ctx->var_aux[i] = get_uint32(ctx);
2676 for (i = 0; i < slot_3; i++)
2679 for (i = 0; i < slot_2; i++) {
2680 frame_t input_slot = ctx->var_aux[slot_1 + i];
2681 if (slot_is_register(ctx, input_slot))
2682 g(spill(ctx, input_slot));
2685 /*gen_insn(INSN_JMP, 0, 0, 0); gen_four(alloc_escape_label(ctx));*/
2687 g(gen_upcall_start(ctx, 3));
2688 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
2689 g(gen_upcall_argument(ctx, 0));
2691 g(gen_load_constant(ctx, R_ARG1, ctx->instr_start - da(ctx->fn,function)->code));
2692 g(gen_upcall_argument(ctx, 1));
2694 g(gen_load_constant(ctx, R_ARG2, ((uint32_t)code << 24) | ((uint32_t)slot_1 << 16) | ((uint32_t)slot_2 << 8) | slot_3));
2695 g(gen_upcall_argument(ctx, 2));
2696 /*debug("arg2: %08x", ((uint32_t)code << 24) | ((uint32_t)slot_1 << 16) | ((uint32_t)slot_2 << 8) | slot_3);*/
2698 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_ipret_io), 3));
2699 g(gen_sanitize_returned_pointer(ctx, R_RET0));
2700 g(gen_cmp_test_imm_jmp(ctx, INSN_CMP, OP_SIZE_ADDRESS, R_RET0, ptr_to_num(POINTER_FOLLOW_THUNK_GO), COND_NE, reload_label));
2702 for (i = 0; i < slot_1; i++) {
2703 frame_t output_slot = ctx->var_aux[i];
2704 flag_set_unknown(ctx, output_slot);
2707 escape_label = alloc_escape_label_for_ip(ctx, ctx->current_position);
2708 if (unlikely(!escape_label))
2711 g(gen_test_variables(ctx, ctx->var_aux, slot_1, escape_label));