2 * Copyright (C) 2024 Mikulas Patocka
4 * This file is part of Ajla.
6 * Ajla is free software: you can redistribute it and/or modify it under the
7 * terms of the GNU General Public License as published by the Free Software
8 * Foundation, either version 3 of the License, or (at your option) any later
11 * Ajla is distributed in the hope that it will be useful, but WITHOUT ANY
12 * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
13 * A PARTICULAR PURPOSE. See the GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along with
16 * Ajla. If not, see <https://www.gnu.org/licenses/>.
19 static bool attr_w gen_flat_move_copy(struct codegen_context *ctx, frame_t slot_1, frame_t slot_r)
21 uint32_t escape_label;
23 escape_label = alloc_escape_label(ctx);
24 if (unlikely(!escape_label))
27 g(gen_test_1_cached(ctx, slot_1, escape_label));
29 g(gen_memcpy_slots(ctx, slot_r, slot_1));
31 flag_set(ctx, slot_1, false);
32 flag_set(ctx, slot_r, false);
37 static bool attr_w gen_ref_move_copy(struct codegen_context *ctx, code_t code, frame_t slot_1, frame_t slot_r)
39 g(gen_frame_load(ctx, OP_SIZE_SLOT, garbage, slot_1, 0, R_SCRATCH_1));
40 g(gen_frame_store(ctx, OP_SIZE_SLOT, slot_r, 0, R_SCRATCH_1));
41 g(gen_set_1(ctx, R_FRAME, slot_r, 0, true));
42 flag_set(ctx, slot_r, true);
43 if (code == OPCODE_REF_COPY) {
44 g(gen_upcall_start(ctx, 1));
45 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_SCRATCH_1));
46 g(gen_upcall_argument(ctx, 0));
47 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
48 } else if (code == OPCODE_REF_MOVE && !da(ctx->fn,function)->local_variables_flags[slot_1].may_be_borrowed) {
49 g(gen_set_1(ctx, R_FRAME, slot_1, 0, false));
50 flag_set(ctx, slot_1, false);
53 if (unlikely(!(label_id = alloc_label(ctx))))
55 if (flag_is_set(ctx, slot_1)) {
56 g(gen_set_1(ctx, R_FRAME, slot_1, 0, false));
59 if (flag_is_clear(ctx, slot_1))
61 g(gen_test_1(ctx, R_FRAME, slot_1, 0, label_id, false, TEST_CLEAR));
63 g(gen_upcall_start(ctx, 1));
64 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_SCRATCH_1));
65 g(gen_upcall_argument(ctx, 0));
66 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
69 if (code == OPCODE_REF_MOVE_CLEAR)
70 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_1));
71 flag_set(ctx, slot_1, false);
76 static bool attr_w gen_box_move_copy(struct codegen_context *ctx, code_t code, frame_t slot_1, frame_t slot_r)
78 if (flag_must_be_flat(ctx, slot_r)) {
79 uint32_t escape_label = alloc_escape_label(ctx);
80 if (unlikely(!escape_label))
82 gen_insn(INSN_JMP, 0, 0, 0);
83 gen_four(escape_label);
87 if (ctx->registers[slot_1] >= 0)
88 g(spill(ctx, slot_1));
90 g(gen_upcall_start(ctx, 3));
92 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
93 g(gen_upcall_argument(ctx, 0));
95 g(gen_load_constant(ctx, R_ARG1, slot_1));
96 g(gen_upcall_argument(ctx, 1));
98 g(gen_load_constant(ctx, R_ARG2, code == OPCODE_BOX_MOVE_CLEAR));
99 g(gen_upcall_argument(ctx, 2));
101 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_ipret_copy_variable_to_pointer), 3));
103 if (code == OPCODE_BOX_MOVE_CLEAR) {
104 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_1));
105 flag_set(ctx, slot_1, false);
108 g(gen_frame_set_pointer(ctx, slot_r, R_RET0));
113 static bool attr_w gen_eval(struct codegen_context *ctx, frame_t slot_1)
115 uint32_t escape_label, skip_label;
117 escape_label = alloc_escape_label(ctx);
118 if (unlikely(!escape_label))
121 skip_label = alloc_label(ctx);
122 if (unlikely(!skip_label))
125 g(gen_test_1_jz_cached(ctx, slot_1, skip_label));
127 g(gen_frame_load(ctx, OP_SIZE_SLOT, garbage, slot_1, 0, R_SCRATCH_1));
128 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, true, escape_label));
130 gen_label(skip_label);
135 static bool attr_w gen_jump(struct codegen_context *ctx, int32_t jmp_offset, unsigned op_size, unsigned cond, unsigned reg1, unsigned reg2)
137 ip_t ip = (ctx->current_position - da(ctx->fn,function)->code) + (jmp_offset / (int)sizeof(code_t));
138 if (likely(!ctx->code_labels[ip])) {
139 ctx->code_labels[ip] = alloc_label(ctx);
140 if (unlikely(!ctx->code_labels[ip]))
143 if (reg1 != -1U && reg2 != -1U) {
144 g(gen_cmp_test_jmp(ctx, INSN_CMP, i_size_cmp(op_size), reg1, reg2, cond, ctx->code_labels[ip]));
145 } else if (reg1 != -1U) {
146 g(gen_jmp_on_zero(ctx, op_size, reg1, cond, ctx->code_labels[ip]));
147 } else if (cond == COND_ALWAYS) {
148 gen_insn(INSN_JMP, 0, 0, 0);
149 gen_four(ctx->code_labels[ip]);
150 #if defined(ARCH_LOONGARCH64) || defined(ARCH_MIPS) || defined(ARCH_PARISC)
151 } else if (cond & COND_FP) {
152 gen_insn(INSN_JMP_FP_TEST, 0, cond, 0);
153 gen_four(ctx->code_labels[ip]);
156 gen_insn(COND_IS_LOGICAL(cond) ? INSN_JMP_COND_LOGICAL : INSN_JMP_COND, i_size_cmp(op_size), cond, 0);
157 gen_four(ctx->code_labels[ip]);
162 static bool attr_w gen_cond_jump(struct codegen_context *ctx, frame_t slot, int32_t jmp_offset)
164 unsigned size = log_2(sizeof(ajla_flat_option_t));
165 size_t attr_unused offset;
167 if (ctx->registers[slot] >= 0) {
170 #if defined(ARCH_S390) || defined(ARCH_X86)
171 offset = (size_t)slot * slot_size;
172 #if defined(ARCH_S390)
173 if (size != OP_SIZE_1)
176 g(gen_address(ctx, R_FRAME, offset, IMM_PURPOSE_MVI_CLI_OFFSET, size));
177 gen_insn(INSN_CMP, size, 0, 2);
178 gen_address_offset();
182 g(gen_jump(ctx, jmp_offset, size, COND_E, -1U, -1U));
187 g(gen_frame_get(ctx, size, i_size(size) == size ? garbage : native, slot, 0, R_SCRATCH_1, ®1));
188 g(gen_jump(ctx, jmp_offset, size, COND_E, reg1, -1U));
192 static bool attr_w gen_load_fn_or_curry(struct codegen_context *ctx, frame_t fn_idx, frame_t slot_fn, frame_t slot_r, unsigned flags)
194 bool curry = fn_idx == NO_FRAME_T;
195 uint32_t escape_label;
198 escape_label = alloc_escape_label(ctx);
199 if (unlikely(!escape_label))
202 g(gen_upcall_start(ctx, 1));
204 g(gen_load_constant(ctx, R_ARG0, ctx->args_l));
205 g(gen_upcall_argument(ctx, 0));
207 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_function_reference_mayfail), 1));
208 g(gen_sanitize_returned_pointer(ctx, R_RET0));
209 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
211 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
214 g(load_function_offset(ctx, R_SCRATCH_1, offsetof(struct data, u_.function.local_directory[fn_idx])));
216 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.function_reference.u.direct), IMM_PURPOSE_STR_OFFSET, OP_SIZE_ADDRESS));
217 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
218 gen_address_offset();
219 gen_one(R_SCRATCH_1);
221 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.function_reference.is_indirect), IMM_PURPOSE_STR_OFFSET, log_2(sizeof(bool))));
222 g(gen_imm(ctx, 0, IMM_PURPOSE_STORE_VALUE, log_2(sizeof(uchar_efficient_t))));
223 gen_insn(INSN_MOV, log_2(sizeof(uchar_efficient_t)), 0, 0);
224 gen_address_offset();
227 g(gen_frame_get_pointer(ctx, slot_fn, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_SCRATCH_1));
229 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.function_reference.u.indirect), IMM_PURPOSE_STR_OFFSET, OP_SIZE_ADDRESS));
230 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
231 gen_address_offset();
232 gen_one(R_SCRATCH_1);
234 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.function_reference.is_indirect), IMM_PURPOSE_STR_OFFSET, log_2(sizeof(bool))));
235 g(gen_imm(ctx, 1, IMM_PURPOSE_STORE_VALUE, log_2(sizeof(uchar_efficient_t))));
236 gen_insn(INSN_MOV, log_2(sizeof(uchar_efficient_t)), 0, 0);
237 gen_address_offset();
241 for (i = 0; i < ctx->args_l; i++) {
242 uintptr_t arg_offset_tag = offsetof(struct data, u_.function_reference.arguments[i].tag);
243 uintptr_t arg_offset_ptr = offsetof(struct data, u_.function_reference.arguments[i].u.ptr);
244 uintptr_t arg_offset_slot = offsetof(struct data, u_.function_reference.arguments[i].u.slot);
245 frame_t arg_slot = ctx->args[i].slot;
246 const struct type *t = get_type_of_local(ctx, arg_slot);
247 uint32_t skip_flat_label, set_ptr_label, next_arg_label;
248 skip_flat_label = alloc_label(ctx);
249 if (unlikely(!skip_flat_label))
251 set_ptr_label = alloc_label(ctx);
252 if (unlikely(!set_ptr_label))
254 next_arg_label = alloc_label(ctx);
255 if (unlikely(!next_arg_label))
257 if (TYPE_IS_FLAT(t)) {
258 g(gen_test_1_cached(ctx, arg_slot, skip_flat_label));
259 if (t->size <= slot_size && TYPE_TAG_IS_BUILTIN(t->tag)) {
260 unsigned copy_size = OP_SIZE_SLOT;
261 if (is_power_of_2(t->size))
262 copy_size = log_2(t->size);
264 copy_size = maximum(copy_size, OP_SIZE_4);
265 g(gen_address(ctx, R_SAVED_1, arg_offset_tag, IMM_PURPOSE_STR_OFFSET, log_2(sizeof(type_tag_t))));
266 g(gen_imm(ctx, t->tag, IMM_PURPOSE_STORE_VALUE, log_2(sizeof(type_tag_t))));
267 gen_insn(INSN_MOV, log_2(sizeof(type_tag_t)), 0, 0);
268 gen_address_offset();
271 if (ctx->registers[arg_slot] >= 0) {
272 g(gen_address(ctx, R_SAVED_1, arg_offset_slot, IMM_PURPOSE_STR_OFFSET, copy_size));
273 gen_insn(INSN_MOV, spill_size(t), 0, 0);
274 gen_address_offset();
275 gen_one(ctx->registers[arg_slot]);
278 #if defined(ARCH_S390)
279 if (copy_size == OP_SIZE_1 && !cpu_test_feature(CPU_FEATURE_long_displacement)) {
280 g(gen_address(ctx, R_FRAME, (size_t)arg_slot * slot_size, IMM_PURPOSE_LDR_OFFSET, copy_size));
281 gen_insn(INSN_MOV_MASK, OP_SIZE_NATIVE, MOV_MASK_0_8, 0);
282 gen_one(R_SCRATCH_1);
283 gen_one(R_SCRATCH_1);
284 gen_address_offset();
288 g(gen_address(ctx, R_FRAME, (size_t)arg_slot * slot_size, ARCH_PREFERS_SX(copy_size) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, copy_size));
289 gen_insn(ARCH_PREFERS_SX(copy_size) ? INSN_MOVSX : INSN_MOV, copy_size, 0, 0);
290 gen_one(R_SCRATCH_1);
291 gen_address_offset();
294 g(gen_address(ctx, R_SAVED_1, arg_offset_slot, IMM_PURPOSE_STR_OFFSET, copy_size));
295 gen_insn(INSN_MOV, copy_size, 0, 0);
296 gen_address_offset();
297 gen_one(R_SCRATCH_1);
299 gen_insn(INSN_JMP, 0, 0, 0);
300 gen_four(next_arg_label);
302 if (ctx->registers[arg_slot] >= 0)
303 g(spill(ctx, arg_slot));
305 g(gen_upcall_start(ctx, 3));
307 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
308 g(gen_upcall_argument(ctx, 0));
310 g(gen_load_constant(ctx, R_ARG1, arg_slot));
311 g(gen_upcall_argument(ctx, 1));
313 g(gen_imm(ctx, (size_t)arg_slot * slot_size, IMM_PURPOSE_ADD, i_size(OP_SIZE_ADDRESS)));
314 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, is_imm()));
318 g(gen_upcall_argument(ctx, 2));
320 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 3));
322 gen_insn(INSN_JMP, 0, 0, 0);
323 gen_four(set_ptr_label);
327 gen_label(skip_flat_label);
328 g(gen_frame_get_pointer(ctx, arg_slot, (ctx->args[i].flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_RET0));
330 gen_label(set_ptr_label);
331 g(gen_address(ctx, R_SAVED_1, arg_offset_tag, IMM_PURPOSE_STR_OFFSET, log_2(sizeof(type_tag_t))));
332 g(gen_imm(ctx, TYPE_TAG_unknown, IMM_PURPOSE_STORE_VALUE, log_2(sizeof(type_tag_t))));
333 gen_insn(INSN_MOV, log_2(sizeof(type_tag_t)), 0, 0);
334 gen_address_offset();
337 g(gen_address(ctx, R_SAVED_1, arg_offset_ptr, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
338 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
339 gen_address_offset();
342 gen_label(next_arg_label);
345 g(gen_compress_pointer(ctx, R_SAVED_1));
346 g(gen_frame_set_pointer(ctx, slot_r, R_SAVED_1));
351 static bool attr_w gen_call(struct codegen_context *ctx, code_t code, frame_t fn_idx)
353 struct data *new_fn = ctx->local_directory[fn_idx];
354 frame_t required_slots = da(new_fn,function)->frame_slots;
355 frame_t bitmap_slots = da(new_fn,function)->n_bitmap_slots;
357 uint32_t escape_label;
358 int64_t new_fp_offset;
359 uchar_efficient_t call_mode;
361 bool arch_use_flags = ARCH_HAS_FLAGS;
362 #if defined(ARCH_POWER)
363 arch_use_flags = false;
366 escape_label = alloc_escape_label(ctx);
367 if (unlikely(!escape_label))
370 for (v = MIN_USEABLE_SLOT; v < function_n_variables(ctx->fn); v++) {
371 if (ctx->registers[v] >= 0) {
376 g(gen_frame_load_raw(ctx, log_2(sizeof(stack_size_t)), native, 0, frame_offs(available_slots), R_SCRATCH_1));
377 g(gen_imm(ctx, required_slots, IMM_PURPOSE_SUB, i_size(log_2(sizeof(stack_size_t)))));
378 gen_insn(INSN_ALU + ARCH_PARTIAL_ALU(i_size(log_2(sizeof(stack_size_t)))), i_size(log_2(sizeof(stack_size_t))), ALU_SUB, arch_use_flags);
379 gen_one(R_SCRATCH_1);
380 gen_one(R_SCRATCH_1);
383 if (arch_use_flags) {
384 gen_insn(COND_IS_LOGICAL(COND_B) ? INSN_JMP_COND_LOGICAL : INSN_JMP_COND, log_2(sizeof(stack_size_t)), COND_B, 0);
385 gen_four(escape_label);
387 g(gen_cmp_test_jmp(ctx, INSN_TEST, OP_SIZE_NATIVE, R_SCRATCH_1, R_SCRATCH_1, COND_S, escape_label));
390 new_fp_offset = -(ssize_t)(required_slots * slot_size);
392 g(gen_frame_store_raw(ctx, log_2(sizeof(stack_size_t)), 0, new_fp_offset + frame_offs(available_slots), R_SCRATCH_1));
393 g(gen_frame_store_imm_raw(ctx, log_2(sizeof(ip_t)), 0, new_fp_offset + frame_offs(previous_ip), ctx->return_values - da(ctx->fn,function)->code));
394 g(gen_frame_load_raw(ctx, log_2(sizeof(timestamp_t)), garbage, 0, frame_offs(timestamp), R_SCRATCH_1));
395 g(gen_frame_store_raw(ctx, log_2(sizeof(timestamp_t)), 0, new_fp_offset + frame_offs(timestamp), R_SCRATCH_1));
396 call_mode = code == OPCODE_CALL ? CALL_MODE_NORMAL : code == OPCODE_CALL_STRICT ? CALL_MODE_STRICT : CALL_MODE_SPARK;
397 g(gen_frame_store_imm_raw(ctx, log_2(sizeof(uchar_efficient_t)), 0, new_fp_offset + frame_offs(mode), call_mode));
399 g(gen_clear_bitmap(ctx, frame_offset, R_FRAME, new_fp_offset, bitmap_slots));
401 for (i = 0; i < ctx->args_l; i++) {
402 const struct code_arg *src_arg = &ctx->args[i];
403 const struct local_arg *dest_arg = &da(new_fn,function)->args[i];
404 const struct type *t = get_type_of_local(ctx, src_arg->slot);
405 uint32_t non_flat_label, thunk_label, incr_ref_label, next_arg_label;
406 non_flat_label = alloc_label(ctx);
407 if (unlikely(!non_flat_label))
409 thunk_label = alloc_label(ctx);
410 if (unlikely(!thunk_label))
412 incr_ref_label = alloc_label(ctx);
413 if (unlikely(!incr_ref_label))
415 next_arg_label = alloc_label(ctx);
416 if (unlikely(!next_arg_label))
418 if (TYPE_IS_FLAT(t)) {
419 g(gen_test_1_cached(ctx, src_arg->slot, non_flat_label));
420 if (dest_arg->may_be_flat) {
421 g(gen_memcpy_from_slot(ctx, R_FRAME, new_fp_offset + (size_t)dest_arg->slot * slot_size, src_arg->slot));
423 g(gen_upcall_start(ctx, 3));
425 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
426 g(gen_upcall_argument(ctx, 0));
428 g(gen_load_constant(ctx, R_ARG1, src_arg->slot));
429 g(gen_upcall_argument(ctx, 1));
431 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_ARG2, R_FRAME, (size_t)src_arg->slot * slot_size, 0));
432 g(gen_upcall_argument(ctx, 2));
434 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 3));
436 g(gen_frame_store_raw(ctx, OP_SIZE_SLOT, dest_arg->slot, new_fp_offset, R_RET0));
438 g(gen_set_1(ctx, R_FRAME, dest_arg->slot, new_fp_offset, true));
441 if (flag_is_clear(ctx, src_arg->slot))
442 goto skip_ref_argument;
444 gen_insn(INSN_JMP, 0, 0, 0);
445 gen_four(next_arg_label);
447 gen_label(non_flat_label);
449 if (dest_arg->may_be_borrowed && src_arg->flags & OPCODE_CALL_MAY_LEND) {
450 g(gen_frame_load(ctx, OP_SIZE_SLOT, garbage, src_arg->slot, 0, R_SCRATCH_1));
451 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, true, thunk_label));
452 g(gen_frame_store_raw(ctx, OP_SIZE_SLOT, dest_arg->slot, new_fp_offset, R_SCRATCH_1));
453 gen_insn(INSN_JMP, 0, 0, 0);
454 gen_four(next_arg_label);
455 } else if (dest_arg->may_be_borrowed && src_arg->flags & OPCODE_CALL_MAY_GIVE) {
456 g(gen_test_1_cached(ctx, src_arg->slot, thunk_label));
457 g(gen_frame_load(ctx, OP_SIZE_SLOT, garbage, src_arg->slot, 0, R_SCRATCH_1));
458 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, true, thunk_label));
459 g(gen_frame_store_raw(ctx, OP_SIZE_SLOT, dest_arg->slot, new_fp_offset, R_SCRATCH_1));
460 g(gen_frame_clear_raw(ctx, OP_SIZE_SLOT, src_arg->slot));
461 gen_insn(INSN_JMP, 0, 0, 0);
462 gen_four(next_arg_label);
465 gen_label(thunk_label);
466 g(gen_set_1(ctx, R_FRAME, dest_arg->slot, new_fp_offset, true));
467 g(gen_frame_load(ctx, OP_SIZE_SLOT, garbage, src_arg->slot, 0, R_SCRATCH_1));
468 g(gen_frame_store_raw(ctx, OP_SIZE_SLOT, dest_arg->slot, new_fp_offset, R_SCRATCH_1));
469 if (src_arg->flags & OPCODE_FLAG_FREE_ARGUMENT) {
470 g(gen_frame_clear_raw(ctx, OP_SIZE_SLOT, src_arg->slot));
471 if (flag_is_set(ctx, src_arg->slot)) {
472 g(gen_set_1(ctx, R_FRAME, src_arg->slot, 0, false));
473 flag_set(ctx, src_arg->slot, false);
474 goto skip_ref_argument;
476 if (flag_is_clear(ctx, src_arg->slot))
478 g(gen_test_1(ctx, R_FRAME, src_arg->slot, 0, incr_ref_label, true, TEST_CLEAR));
479 gen_insn(INSN_JMP, 0, 0, 0);
480 gen_four(next_arg_label);
483 gen_label(incr_ref_label);
485 g(gen_upcall_start(ctx, 1));
487 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_SCRATCH_1));
488 g(gen_upcall_argument(ctx, 0));
490 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
493 gen_label(next_arg_label);
496 g(load_function_offset(ctx, R_SCRATCH_1, offsetof(struct data, u_.function.local_directory[fn_idx])));
498 g(gen_address(ctx, R_SCRATCH_1, 0, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
499 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
500 gen_one(R_SCRATCH_1);
501 gen_address_offset();
503 g(gen_decompress_pointer(ctx, ARCH_PREFERS_SX(OP_SIZE_SLOT), R_SCRATCH_1, 0));
505 g(gen_frame_store_raw(ctx, OP_SIZE_ADDRESS, 0, frame_offs(function) + new_fp_offset, R_SCRATCH_1));
507 #if !defined(ARCH_X86) && !defined(ARCH_PARISC)
508 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_SUB, R_FRAME, R_FRAME, -new_fp_offset, 0));
510 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_FRAME, R_FRAME, new_fp_offset, 0));
513 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.function.codegen), ARCH_PREFERS_SX(OP_SIZE_SLOT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_SLOT));
514 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
515 gen_one(R_SCRATCH_1);
516 gen_address_offset();
518 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, true, ctx->escape_nospill_label));
521 gen_pointer_compression(R_SCRATCH_1);
522 #if (defined(ARCH_X86) && !defined(ARCH_X86_X32)) || defined(ARCH_ARM32)
523 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.codegen.unoptimized_code_base), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
524 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
525 gen_address_offset_compressed();
527 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.codegen.unoptimized_code_base), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
528 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
529 gen_one(R_SCRATCH_1);
530 gen_address_offset_compressed();
532 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
533 gen_one(R_SCRATCH_1);
535 g(clear_flag_cache(ctx));
540 static bool attr_w gen_return(struct codegen_context *ctx)
542 int64_t new_fp_offset;
543 uint32_t escape_label;
545 int64_t retval_offset;
546 unsigned attr_unused reg1;
548 escape_label = alloc_escape_label(ctx);
549 if (unlikely(!escape_label))
552 new_fp_offset = (size_t)da(ctx->fn,function)->frame_slots * slot_size;
554 g(gen_frame_load_raw(ctx, OP_SIZE_ADDRESS, zero_x, 0, new_fp_offset + frame_offs(function), R_SCRATCH_2));
556 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_SCRATCH_2, COND_E, escape_label));
558 g(gen_address(ctx, R_SCRATCH_2, offsetof(struct data, u_.function.codegen), ARCH_PREFERS_SX(OP_SIZE_SLOT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_SLOT));
559 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
560 gen_one(R_SCRATCH_1);
561 gen_address_offset();
563 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, true, escape_label));
566 g(gen_frame_load_raw(ctx, log_2(sizeof(timestamp_t)), garbage, 0, frame_offs(timestamp), R_SCRATCH_1));
567 g(gen_frame_store_raw(ctx, log_2(sizeof(timestamp_t)), 0, new_fp_offset + frame_offs(timestamp), R_SCRATCH_1));
569 g(gen_frame_load_raw(ctx, log_2(sizeof(ip_t)), native, 0, frame_offs(previous_ip), R_SCRATCH_1));
571 g(gen_address(ctx, R_SCRATCH_2, offsetof(struct data, u_.function.code), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
572 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
573 gen_one(R_SCRATCH_2);
574 gen_address_offset();
576 g(gen_lea3(ctx, R_SAVED_1, R_SCRATCH_2, R_SCRATCH_1, log_2(sizeof(code_t)), 0));
579 for (i = 0; i < ctx->args_l; i++) {
580 const struct code_arg *src_arg = &ctx->args[i];
581 const struct type *t = get_type_of_local(ctx, src_arg->slot);
582 uint32_t copy_ptr_label, load_write_ptr_label, write_ptr_label, next_arg_label;
584 copy_ptr_label = alloc_label(ctx);
585 if (unlikely(!copy_ptr_label))
588 load_write_ptr_label = alloc_label(ctx);
589 if (unlikely(!load_write_ptr_label))
592 write_ptr_label = alloc_label(ctx);
593 if (unlikely(!write_ptr_label))
596 next_arg_label = alloc_label(ctx);
597 if (unlikely(!next_arg_label))
600 g(gen_load_code_32(ctx, R_SAVED_2, R_SAVED_1, retval_offset));
602 if (TYPE_IS_FLAT(t)) {
603 uint32_t flat_to_data_label;
604 g(gen_test_1_cached(ctx, src_arg->slot, copy_ptr_label));
606 flat_to_data_label = alloc_label(ctx);
607 if (unlikely(!flat_to_data_label))
610 #if defined(ARCH_X86)
611 g(gen_address(ctx, R_SAVED_1, retval_offset + 2 + 2 * (ARG_MODE_N >= 3), IMM_PURPOSE_LDR_OFFSET, log_2(sizeof(code_t))));
612 g(gen_imm(ctx, OPCODE_MAY_RETURN_FLAT, IMM_PURPOSE_TEST, log_2(sizeof(code_t))));
613 gen_insn(INSN_TEST, log_2(sizeof(code_t)), 0, 1);
614 gen_address_offset();
617 gen_insn(INSN_JMP_COND, log_2(sizeof(code_t)), COND_E, 0);
618 gen_four(flat_to_data_label);
620 g(gen_load_two(ctx, R_SCRATCH_1, R_SAVED_1, retval_offset + 2 + 2 * (ARG_MODE_N >= 3)));
622 g(gen_cmp_test_imm_jmp(ctx, INSN_TEST, OP_SIZE_NATIVE, R_SCRATCH_1, OPCODE_MAY_RETURN_FLAT, COND_E, flat_to_data_label));
624 #if defined(ARCH_X86)
625 if (is_power_of_2(t->size) && t->size <= 2U << OP_SIZE_NATIVE) {
626 if (t->size == 2U << OP_SIZE_NATIVE) {
627 g(gen_frame_load_2(ctx, OP_SIZE_NATIVE, src_arg->slot, 0, R_SCRATCH_1, R_SCRATCH_2));
629 gen_insn(INSN_MOV, OP_SIZE_NATIVE, 0, 0);
630 gen_one(ARG_ADDRESS_2 + OP_SIZE_SLOT);
633 gen_eight(new_fp_offset + lo_word(OP_SIZE_NATIVE));
634 gen_one(R_SCRATCH_1);
636 gen_insn(INSN_MOV, OP_SIZE_NATIVE, 0, 0);
637 gen_one(ARG_ADDRESS_2 + OP_SIZE_SLOT);
640 gen_eight(new_fp_offset + hi_word(OP_SIZE_NATIVE));
641 gen_one(R_SCRATCH_2);
643 g(gen_frame_get(ctx, log_2(t->size), garbage, src_arg->slot, 0, R_SCRATCH_1, ®1));
645 gen_insn(INSN_MOV, log_2(t->size), 0, 0);
646 gen_one(ARG_ADDRESS_2 + OP_SIZE_SLOT);
649 gen_eight(new_fp_offset);
655 g(gen_lea3(ctx, R_SCRATCH_2, R_FRAME, R_SAVED_2, OP_SIZE_SLOT, new_fp_offset));
657 g(gen_memcpy_from_slot(ctx, R_SCRATCH_2, 0, src_arg->slot));
660 gen_insn(INSN_JMP, 0, 0, 0);
661 gen_four(next_arg_label);
663 gen_label(flat_to_data_label);
665 if (ctx->registers[src_arg->slot] >= 0)
666 g(spill(ctx, src_arg->slot));
668 g(gen_upcall_start(ctx, 3));
670 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
671 g(gen_upcall_argument(ctx, 0));
673 g(gen_load_constant(ctx, R_ARG1, src_arg->slot));
674 g(gen_upcall_argument(ctx, 1));
676 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_ARG2, R_FRAME, (size_t)src_arg->slot * slot_size, 0));
677 g(gen_upcall_argument(ctx, 2));
679 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 3));
681 if (flag_is_clear(ctx, src_arg->slot))
682 goto skip_ref_argument;
684 gen_insn(INSN_JMP, 0, 0, 0);
685 gen_four(write_ptr_label);
688 gen_label(copy_ptr_label);
690 if (unlikely(!(src_arg->flags & OPCODE_FLAG_FREE_ARGUMENT))) {
691 g(gen_upcall_start(ctx, 1));
692 g(gen_frame_load(ctx, OP_SIZE_SLOT, garbage, src_arg->slot, 0, R_ARG0));
693 g(gen_upcall_argument(ctx, 0));
694 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
695 } else if (da(ctx->fn,function)->local_variables_flags[src_arg->slot].may_be_borrowed) {
696 g(gen_test_1_cached(ctx, src_arg->slot, load_write_ptr_label));
697 g(gen_upcall_start(ctx, 1));
698 g(gen_frame_load(ctx, OP_SIZE_SLOT, garbage, src_arg->slot, 0, R_ARG0));
699 g(gen_upcall_argument(ctx, 0));
700 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
703 gen_label(load_write_ptr_label);
705 g(gen_frame_load(ctx, OP_SIZE_SLOT, garbage, src_arg->slot, 0, R_RET0));
708 gen_label(write_ptr_label);
710 #if defined(ARCH_X86)
711 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
712 gen_one(ARG_ADDRESS_2 + OP_SIZE_SLOT);
715 gen_eight(new_fp_offset);
717 goto scaled_store_done;
719 if (ARCH_HAS_SHIFTED_ADD(OP_SIZE_SLOT)) {
720 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, false));
721 gen_one(R_SCRATCH_3);
723 gen_one(ARG_SHIFTED_REGISTER);
724 gen_one(ARG_SHIFT_LSL | OP_SIZE_SLOT);
727 g(gen_address(ctx, R_SCRATCH_3, new_fp_offset, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
728 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
729 gen_address_offset();
731 goto scaled_store_done;
734 g(gen_3address_rot_imm(ctx, OP_SIZE_NATIVE, ROT_SHL, R_SCRATCH_3, R_SAVED_2, OP_SIZE_SLOT, false));
736 g(gen_3address_alu(ctx, OP_SIZE_NATIVE, ALU_ADD, R_SCRATCH_3, R_SCRATCH_3, R_FRAME, 0));
738 g(gen_address(ctx, R_SCRATCH_3, new_fp_offset, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
739 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
740 gen_address_offset();
744 g(gen_set_1_variable(ctx, R_SAVED_2, new_fp_offset, true));
746 gen_label(next_arg_label);
748 retval_offset += 4 + 2 * (ARG_MODE_N >= 3);
751 g(gen_frame_load_raw(ctx, OP_SIZE_ADDRESS, zero_x, 0, new_fp_offset + frame_offs(function), R_SCRATCH_1));
753 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.function.codegen), ARCH_PREFERS_SX(OP_SIZE_SLOT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_SLOT));
754 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
755 gen_one(R_SCRATCH_1);
756 gen_address_offset();
758 g(gen_decompress_pointer(ctx, ARCH_PREFERS_SX(OP_SIZE_SLOT), R_SCRATCH_1, 0));
760 g(gen_load_code_32(ctx, R_SCRATCH_2, R_SAVED_1, retval_offset + 2));
762 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_FRAME, R_FRAME, new_fp_offset, 0));
764 #if defined(ARCH_X86) && !defined(ARCH_X86_X32)
765 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
766 gen_one(ARG_ADDRESS_2 + OP_SIZE_ADDRESS);
767 gen_one(R_SCRATCH_1);
768 gen_one(R_SCRATCH_2);
769 gen_eight(offsetof(struct data, u_.codegen.unoptimized_code));
771 goto scaled_jmp_done;
773 #if defined(ARCH_X86)
774 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
775 gen_one(R_SCRATCH_1);
776 gen_one(ARG_ADDRESS_2 + OP_SIZE_ADDRESS);
777 gen_one(R_SCRATCH_1);
778 gen_one(R_SCRATCH_2);
779 gen_eight(offsetof(struct data, u_.codegen.unoptimized_code));
781 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
782 gen_one(R_SCRATCH_1);
784 goto scaled_jmp_done;
786 #if defined(ARCH_ARM32)
787 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, false));
788 gen_one(R_SCRATCH_1);
789 gen_one(R_SCRATCH_1);
790 gen_one(ARG_SHIFTED_REGISTER);
791 gen_one(ARG_SHIFT_LSL | OP_SIZE_ADDRESS);
792 gen_one(R_SCRATCH_2);
794 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.codegen.unoptimized_code), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
795 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
796 gen_address_offset();
798 goto scaled_jmp_done;
800 if (ARCH_HAS_SHIFTED_ADD(OP_SIZE_ADDRESS)) {
801 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, false));
802 gen_one(R_SCRATCH_1);
803 gen_one(R_SCRATCH_1);
804 gen_one(ARG_SHIFTED_REGISTER);
805 gen_one(ARG_SHIFT_LSL | OP_SIZE_ADDRESS);
806 gen_one(R_SCRATCH_2);
808 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.codegen.unoptimized_code), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
809 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
810 gen_one(R_SCRATCH_1);
811 gen_address_offset();
813 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
814 gen_one(R_SCRATCH_1);
816 goto scaled_jmp_done;
819 g(gen_3address_rot_imm(ctx, OP_SIZE_NATIVE, ROT_SHL, R_SCRATCH_2, R_SCRATCH_2, OP_SIZE_ADDRESS, false));
821 g(gen_3address_alu(ctx, OP_SIZE_NATIVE, ALU_ADD, R_SCRATCH_1, R_SCRATCH_1, R_SCRATCH_2, 0));
823 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.codegen.unoptimized_code), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
824 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
825 gen_one(R_SCRATCH_1);
826 gen_address_offset();
828 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
829 gen_one(R_SCRATCH_1);
831 goto scaled_jmp_done;
836 static bool attr_w gen_scaled_array_address(struct codegen_context *ctx, size_t element_size, unsigned reg_dst, unsigned reg_src, unsigned reg_index, int64_t offset_src);
837 static bool attr_w gen_check_array_len(struct codegen_context *ctx, unsigned reg_array, bool allocated, unsigned reg_len, unsigned cond, uint32_t escape_label);
839 static bool attr_w gen_structured(struct codegen_context *ctx, frame_t slot_struct, frame_t slot_elem)
841 uint32_t escape_label;
842 const struct type *struct_type, *elem_type;
846 escape_label = alloc_escape_label(ctx);
847 if (unlikely(!escape_label))
850 struct_type = get_type_of_local(ctx, slot_struct);
851 elem_type = get_type_of_local(ctx, slot_elem);
853 if (TYPE_IS_FLAT(struct_type) && struct_type->tag != TYPE_TAG_flat_option) {
854 if (!TYPE_IS_FLAT(elem_type)) {
857 g(gen_test_1_cached(ctx, slot_struct, escape_label));
858 flag_set(ctx, slot_struct, false);
862 g(gen_test_1_jz_cached(ctx, slot_struct, escape_label));
866 g(gen_frame_address(ctx, slot_struct, 0, R_SAVED_1));
868 for (i = 0; i < ctx->args_l; i++) {
869 frame_t param_slot = ctx->args[i].slot;
871 switch (ctx->args[i].flags & OPCODE_STRUCTURED_MASK) {
872 case OPCODE_STRUCTURED_RECORD: {
873 struct flat_record_definition_entry *e;
874 ajla_assert_lo(struct_type->tag == TYPE_TAG_flat_record, (file_line, "gen_structured: invalid tag %u, expected %u", struct_type->tag, TYPE_TAG_flat_record));
875 e = &type_def(struct_type,flat_record)->entries[param_slot];
877 g(gen_imm(ctx, e->flat_offset, IMM_PURPOSE_ADD, i_size(OP_SIZE_ADDRESS)));
878 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, is_imm()));
883 struct_type = e->subtype;
886 case OPCODE_STRUCTURED_ARRAY: {
887 ajla_assert_lo(struct_type->tag == TYPE_TAG_flat_array, (file_line, "gen_structured: invalid tag %u, expected %u", struct_type->tag, TYPE_TAG_flat_array));
888 g(gen_test_1_cached(ctx, param_slot, escape_label));
889 flag_set(ctx, param_slot, false);
890 g(gen_frame_get(ctx, OP_SIZE_INT, native, param_slot, 0, R_SCRATCH_1, ®1));
892 g(gen_cmp_test_imm_jmp(ctx, INSN_CMP, OP_SIZE_INT, reg1, type_def(struct_type,flat_array)->n_elements, COND_AE, escape_label));
894 g(gen_scaled_array_address(ctx, type_def(struct_type,flat_array)->base->size, R_SAVED_1, R_SAVED_1, reg1, 0));
896 struct_type = type_def(struct_type,flat_array)->base;
900 internal(file_line, "gen_structured: invalid structured flags %x", (unsigned)ctx->args[i].flags);
903 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
904 gen_one(R_SCRATCH_1);
905 gen_one(ARG_ADDRESS_1);
909 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, true, escape_label));
912 g(gen_decompress_pointer(ctx, ARCH_PREFERS_SX(OP_SIZE_SLOT), R_SCRATCH_1, 0));
914 g(gen_compare_refcount(ctx, R_SCRATCH_1, REFCOUNT_STEP, COND_AE, escape_label));
916 switch (ctx->args[i].flags & OPCODE_STRUCTURED_MASK) {
917 case OPCODE_STRUCTURED_RECORD: {
918 const struct type *rec_type, *e_type;
919 rec_type = da_type(ctx->fn, ctx->args[i].type);
920 TYPE_TAG_VALIDATE(rec_type->tag);
921 if (unlikely(rec_type->tag == TYPE_TAG_flat_record))
922 rec_type = type_def(rec_type,flat_record)->base;
923 e_type = type_def(rec_type,record)->types[param_slot];
924 if (!TYPE_IS_FLAT(e_type) || (e_type->tag == TYPE_TAG_flat_option && !(ctx->args[i].flags & OPCODE_STRUCTURED_FLAG_END))) {
925 g(gen_test_1(ctx, R_SCRATCH_1, param_slot, data_record_offset, escape_label, true, TEST));
927 g(gen_test_1(ctx, R_SCRATCH_1, param_slot, data_record_offset, escape_label, false, TEST));
928 struct_type = e_type;
930 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_SAVED_1, R_SCRATCH_1, data_record_offset + (size_t)param_slot * slot_size, 0));
933 case OPCODE_STRUCTURED_OPTION: {
934 unsigned op_size = log_2(sizeof(ajla_option_t));
935 #if defined(ARCH_X86)
936 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.option.option), IMM_PURPOSE_LDR_OFFSET, op_size));
937 g(gen_imm(ctx, param_slot, IMM_PURPOSE_CMP, op_size));
938 gen_insn(INSN_CMP, op_size, 0, 1);
939 gen_address_offset();
942 gen_insn(INSN_JMP_COND, op_size, COND_NE, 0);
943 gen_four(escape_label);
945 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.option.option), ARCH_PREFERS_SX(op_size) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, op_size));
946 gen_insn(ARCH_PREFERS_SX(op_size) ? INSN_MOVSX : INSN_MOV, op_size, 0, 0);
947 gen_one(R_SCRATCH_2);
948 gen_address_offset();
950 g(gen_cmp_test_imm_jmp(ctx, INSN_CMP, i_size(op_size), R_SCRATCH_2, param_slot, COND_NE, escape_label));
952 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_SAVED_1, R_SCRATCH_1, offsetof(struct data, u_.option.pointer), 0));
955 case OPCODE_STRUCTURED_ARRAY: {
956 const struct type *e_type = da_type(ctx->fn, ctx->args[i].type);
958 g(gen_test_1_cached(ctx, param_slot, escape_label));
959 flag_set(ctx, param_slot, false);
961 g(gen_frame_get(ctx, OP_SIZE_INT, native, param_slot, 0, R_SCRATCH_2, ®2));
963 g(gen_check_array_len(ctx, R_SCRATCH_1, false, reg2, COND_AE, escape_label));
965 if (!TYPE_IS_FLAT(e_type) || (e_type->tag == TYPE_TAG_flat_option && !(ctx->args[i].flags & OPCODE_STRUCTURED_FLAG_END))) {
966 g(gen_compare_ptr_tag(ctx, R_SCRATCH_1, DATA_TAG_array_pointers, COND_NE, escape_label, R_SCRATCH_3));
968 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.array_pointers.pointer), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
969 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
970 gen_one(R_SCRATCH_1);
971 gen_address_offset();
973 g(gen_scaled_array_address(ctx, slot_size, R_SAVED_1, R_SCRATCH_1, reg2, 0));
975 g(gen_compare_ptr_tag(ctx, R_SCRATCH_1, DATA_TAG_array_flat, COND_NE, escape_label, R_SCRATCH_3));
977 g(gen_scaled_array_address(ctx, e_type->size, R_SAVED_1, R_SCRATCH_1, reg2, data_array_offset));
979 struct_type = e_type;
984 internal(file_line, "gen_structured: invalid structured flags %x", (unsigned)ctx->args[i].flags);
991 g(gen_test_1_cached(ctx, slot_elem, escape_label));
992 flag_set(ctx, slot_elem, false);
993 g(gen_memcpy_from_slot(ctx, R_SAVED_1, 0, slot_elem));
995 uint32_t skip_deref_label;
996 skip_deref_label = alloc_label(ctx);
997 if (unlikely(!skip_deref_label))
1000 if (TYPE_IS_FLAT(elem_type))
1001 g(gen_test_1_jz_cached(ctx, slot_elem, escape_label));
1003 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
1004 gen_one(R_SCRATCH_1);
1005 gen_one(ARG_ADDRESS_1);
1009 g(gen_jmp_on_zero(ctx, OP_SIZE_SLOT, R_SCRATCH_1, COND_E, skip_deref_label));
1011 g(gen_upcall_start(ctx, 1));
1012 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_SCRATCH_1));
1013 g(gen_upcall_argument(ctx, 0));
1014 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_dereference), 1));
1016 gen_label(skip_deref_label);
1018 g(gen_frame_get_pointer(ctx, slot_elem, (ctx->args[i - 1].flags & OPCODE_STRUCTURED_FREE_VARIABLE) != 0, R_SCRATCH_1));
1020 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
1021 gen_one(ARG_ADDRESS_1);
1024 gen_one(R_SCRATCH_1);
1030 static bool attr_w gen_record_create(struct codegen_context *ctx, frame_t slot_r)
1032 const struct type *t;
1033 const struct record_definition *def;
1034 uint32_t escape_label;
1037 escape_label = alloc_escape_label(ctx);
1038 if (unlikely(!escape_label))
1041 t = get_type_of_local(ctx, slot_r);
1042 if (t->tag == TYPE_TAG_flat_record) {
1043 const struct flat_record_definition *flat_def;
1044 const struct type *flat_type = t;
1045 t = type_def(t,flat_record)->base;
1046 def = type_def(t,record);
1047 flat_def = type_def(flat_type,flat_record);
1048 for (i = 0; i < ctx->args_l; i++) {
1049 frame_t var_slot = ctx->args[i].slot;
1050 g(gen_test_1_cached(ctx, var_slot, escape_label));
1051 flag_set(ctx, var_slot, false);
1053 for (i = 0, ii = 0; i < ctx->args_l; i++, ii++) {
1054 frame_t var_slot, flat_offset, record_slot;
1055 while (unlikely(record_definition_is_elided(def, ii)))
1057 var_slot = ctx->args[i].slot;
1058 record_slot = record_definition_slot(def, ii);
1059 flat_offset = flat_def->entries[record_slot].flat_offset;
1060 g(gen_memcpy_from_slot(ctx, R_FRAME, (size_t)slot_r * slot_size + flat_offset, var_slot));
1065 def = type_def(t,record);
1067 g(gen_upcall_start(ctx, 2));
1069 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1070 g(gen_upcall_argument(ctx, 0));
1072 g(gen_load_constant(ctx, R_ARG1, slot_r));
1073 g(gen_upcall_argument(ctx, 1));
1075 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_record_mayfail), 2));
1076 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1077 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1079 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
1081 g(gen_clear_bitmap(ctx, 0, R_SAVED_1, data_record_offset, bitmap_slots(def->n_slots)));
1083 for (i = 0, ii = 0; i < ctx->args_l; i++, ii++) {
1084 frame_t var_slot, var_flags, record_slot;
1085 const struct type *var_type, *record_type;
1086 uint32_t skip_flat_label, set_ptr_label, next_arg_label;
1088 skip_flat_label = alloc_label(ctx);
1089 if (unlikely(!skip_flat_label))
1091 set_ptr_label = alloc_label(ctx);
1092 if (unlikely(!set_ptr_label))
1094 next_arg_label = alloc_label(ctx);
1095 if (unlikely(!next_arg_label))
1098 while (unlikely(record_definition_is_elided(def, ii)))
1100 var_slot = ctx->args[i].slot;
1101 var_type = get_type_of_local(ctx, var_slot);
1102 var_flags = ctx->args[i].flags;
1103 record_slot = record_definition_slot(def, ii);
1104 record_type = def->types[record_slot];
1105 if (TYPE_IS_FLAT(var_type)) {
1106 g(gen_test_1_cached(ctx, var_slot, skip_flat_label));
1107 if (TYPE_IS_FLAT(record_type)) {
1108 g(gen_memcpy_from_slot(ctx, R_SAVED_1, data_record_offset + (size_t)record_slot * slot_size, var_slot));
1110 gen_insn(INSN_JMP, 0, 0, 0);
1111 gen_four(next_arg_label);
1113 if (ctx->registers[var_slot] >= 0)
1114 g(spill(ctx, var_slot));
1116 g(gen_upcall_start(ctx, 3));
1118 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1119 g(gen_upcall_argument(ctx, 0));
1121 g(gen_load_constant(ctx, R_ARG1, var_slot));
1122 g(gen_upcall_argument(ctx, 1));
1124 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_ARG2, R_FRAME, (size_t)var_slot * slot_size, 0));
1125 g(gen_upcall_argument(ctx, 2));
1127 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 3));
1129 gen_insn(INSN_JMP, 0, 0, 0);
1130 gen_four(set_ptr_label);
1134 gen_label(skip_flat_label);
1135 g(gen_frame_get_pointer(ctx, var_slot, (var_flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_RET0));
1137 gen_label(set_ptr_label);
1138 g(gen_address(ctx, R_SAVED_1, data_record_offset + (size_t)record_slot * slot_size, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
1139 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
1140 gen_address_offset();
1143 g(gen_set_1(ctx, R_SAVED_1, record_slot, data_record_offset, true));
1145 gen_label(next_arg_label);
1148 g(gen_compress_pointer(ctx, R_SAVED_1));
1149 g(gen_frame_set_pointer(ctx, slot_r, R_SAVED_1));
1154 static bool attr_w gen_record_load(struct codegen_context *ctx, frame_t slot_1, frame_t slot_r, frame_t rec_slot, frame_t flags)
1156 const struct type *rec_type, *entry_type;
1157 uint32_t escape_label;
1159 rec_type = get_type_of_local(ctx, slot_1);
1160 if (unlikely(rec_type->tag == TYPE_TAG_unknown)) {
1161 ajla_assert_lo(!*da(ctx->fn,function)->function_name, (file_line, "gen_record_load: function %s has record without definition", da(ctx->fn,function)->function_name));
1165 escape_label = alloc_escape_label(ctx);
1166 if (unlikely(!escape_label))
1169 /*debug("gen_record_load: %s: %u, %u", da(ctx->fn,function)->function_name, TYPE_TAG_unknown, rec_type->tag);*/
1170 if (TYPE_IS_FLAT(rec_type)) {
1171 const struct flat_record_definition_entry *ft = &type_def(rec_type,flat_record)->entries[rec_slot];
1172 g(gen_test_1_cached(ctx, slot_1, escape_label));
1173 g(gen_memcpy_to_slot(ctx, slot_r, R_FRAME, (size_t)slot_1 * slot_size + ft->flat_offset));
1174 flag_set(ctx, slot_1, false);
1175 flag_set(ctx, slot_r, false);
1178 entry_type = type_def(rec_type,record)->types[rec_slot];
1180 g(gen_frame_load(ctx, OP_SIZE_SLOT, zero_x, slot_1, 0, R_SCRATCH_2));
1181 g(gen_ptr_is_thunk(ctx, R_SCRATCH_2, true, escape_label));
1182 g(gen_barrier(ctx));
1184 g(gen_decompress_pointer(ctx, false, R_SCRATCH_2, 0));
1186 if (TYPE_IS_FLAT(entry_type)) {
1187 g(gen_test_1(ctx, R_SCRATCH_2, rec_slot, data_record_offset, escape_label, false, TEST));
1188 g(gen_memcpy_to_slot(ctx, slot_r, R_SCRATCH_2, (size_t)rec_slot * slot_size + data_record_offset));
1189 flag_set(ctx, slot_r, false);
1193 if (flag_must_be_flat(ctx, slot_r)) {
1194 gen_insn(INSN_JMP, 0, 0, 0);
1195 gen_four(escape_label);
1199 g(gen_test_1(ctx, R_SCRATCH_2, rec_slot, data_record_offset, escape_label, true, TEST));
1201 g(gen_address(ctx, R_SCRATCH_2, (size_t)rec_slot * slot_size + data_record_offset, ARCH_PREFERS_SX(OP_SIZE_SLOT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_SLOT));
1202 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
1203 gen_one(R_SCRATCH_1);
1204 gen_address_offset();
1206 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, true, escape_label));
1208 if (flags & OPCODE_STRUCT_MAY_BORROW) {
1209 g(gen_frame_store(ctx, OP_SIZE_SLOT, slot_r, 0, R_SCRATCH_1));
1210 flag_set(ctx, slot_r, false);
1212 g(gen_frame_set_pointer(ctx, slot_r, R_SCRATCH_1));
1214 g(gen_upcall_start(ctx, 1));
1215 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_SCRATCH_1));
1216 g(gen_upcall_argument(ctx, 0));
1217 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
1222 static bool attr_w gen_option_create_empty_flat(struct codegen_context *ctx, ajla_flat_option_t opt, frame_t slot_r)
1224 g(gen_frame_store_imm(ctx, log_2(sizeof(ajla_flat_option_t)), slot_r, 0, opt));
1225 flag_set(ctx, slot_r, false);
1229 static bool attr_w gen_option_create_empty(struct codegen_context *ctx, ajla_option_t opt, frame_t slot_r)
1231 unsigned option_size = log_2(sizeof(ajla_option_t));
1232 uint32_t escape_label;
1234 escape_label = alloc_escape_label(ctx);
1235 if (unlikely(!escape_label))
1238 if (flag_must_be_flat(ctx, slot_r)) {
1239 gen_insn(INSN_JMP, 0, 0, 0);
1240 gen_four(escape_label);
1244 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_option_mayfail), 0));
1245 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1246 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1248 g(gen_address(ctx, R_RET0, offsetof(struct data, u_.option.option), IMM_PURPOSE_STR_OFFSET, option_size));
1249 g(gen_imm(ctx, opt, IMM_PURPOSE_STORE_VALUE, option_size));
1250 gen_insn(INSN_MOV, option_size, 0, 0);
1251 gen_address_offset();
1254 g(gen_address(ctx, R_RET0, offsetof(struct data, u_.option.pointer), IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
1255 g(gen_imm(ctx, 0, IMM_PURPOSE_STORE_VALUE, OP_SIZE_SLOT));
1256 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
1257 gen_address_offset();
1260 g(gen_compress_pointer(ctx, R_RET0));
1261 g(gen_frame_set_pointer(ctx, slot_r, R_RET0));
1266 static bool attr_w gen_option_create(struct codegen_context *ctx, ajla_option_t opt, frame_t slot_1, frame_t slot_r, frame_t flags)
1268 unsigned option_size = log_2(sizeof(ajla_option_t));
1269 const struct type *type;
1270 uint32_t escape_label, get_pointer_label, got_pointer_label;
1272 escape_label = alloc_escape_label(ctx);
1273 if (unlikely(!escape_label))
1276 if (flag_must_be_flat(ctx, slot_r)) {
1277 gen_insn(INSN_JMP, 0, 0, 0);
1278 gen_four(escape_label);
1282 get_pointer_label = alloc_label(ctx);
1283 if (unlikely(!get_pointer_label))
1286 got_pointer_label = alloc_label(ctx);
1287 if (unlikely(!got_pointer_label))
1290 type = get_type_of_local(ctx, slot_1);
1292 g(gen_upcall_start(ctx, 0));
1293 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_option_mayfail), 0));
1294 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1295 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1297 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
1299 g(gen_address(ctx, R_RET0, offsetof(struct data, u_.option.option), IMM_PURPOSE_STR_OFFSET, option_size));
1300 g(gen_imm(ctx, opt, IMM_PURPOSE_STORE_VALUE, option_size));
1301 gen_insn(INSN_MOV, option_size, 0, 0);
1302 gen_address_offset();
1305 if (TYPE_IS_FLAT(type)) {
1306 g(gen_test_1_cached(ctx, slot_1, get_pointer_label));
1308 if (ctx->registers[slot_1] >= 0)
1309 g(spill(ctx, slot_1));
1311 g(gen_upcall_start(ctx, 3));
1313 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1314 g(gen_upcall_argument(ctx, 0));
1316 g(gen_load_constant(ctx, R_ARG1, slot_1));
1317 g(gen_upcall_argument(ctx, 1));
1319 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_ARG2, R_FRAME, (size_t)slot_1 * slot_size, 0));
1320 g(gen_upcall_argument(ctx, 2));
1322 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 3));
1324 if (flag_is_clear(ctx, slot_1))
1325 goto skip_get_pointer_label;
1327 gen_insn(INSN_JMP, 0, 0, 0);
1328 gen_four(got_pointer_label);
1331 gen_label(get_pointer_label);
1332 g(gen_frame_get_pointer(ctx, slot_1, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_RET0));
1334 skip_get_pointer_label:
1335 gen_label(got_pointer_label);
1336 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.option.pointer), IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
1337 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
1338 gen_address_offset();
1341 g(gen_compress_pointer(ctx, R_SAVED_1));
1342 g(gen_frame_set_pointer(ctx, slot_r, R_SAVED_1));
1347 static bool attr_w gen_option_cmp(struct codegen_context *ctx, unsigned reg, frame_t opt, uint32_t label, frame_t slot_r)
1349 unsigned op_size = log_2(sizeof(ajla_option_t));
1351 #if defined(ARCH_X86)
1352 g(gen_address(ctx, reg, offsetof(struct data, u_.option.option), IMM_PURPOSE_LDR_OFFSET, op_size));
1353 g(gen_imm(ctx, opt, IMM_PURPOSE_CMP, op_size));
1354 gen_insn(INSN_CMP, op_size, 0, 1);
1355 gen_address_offset();
1358 g(gen_address(ctx, reg, offsetof(struct data, u_.option.option), ARCH_PREFERS_SX(op_size) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, op_size));
1359 gen_insn(ARCH_PREFERS_SX(op_size) ? INSN_MOVSX : INSN_MOV, op_size, 0, 0);
1360 gen_one(R_SCRATCH_2);
1361 gen_address_offset();
1363 g(gen_imm(ctx, opt, IMM_PURPOSE_CMP, op_size));
1364 gen_insn(INSN_CMP, op_size, 0, 1);
1365 gen_one(R_SCRATCH_2);
1369 gen_insn(INSN_JMP_COND, op_size, COND_NE, 0);
1372 g(gen_frame_set_cond(ctx, op_size, false, COND_E, slot_r));
1376 g(gen_address(ctx, reg, offsetof(struct data, u_.option.option), ARCH_PREFERS_SX(op_size) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, op_size));
1377 gen_insn(ARCH_PREFERS_SX(op_size) ? INSN_MOVSX : INSN_MOV, op_size, 0, 0);
1378 gen_one(R_SCRATCH_2);
1379 gen_address_offset();
1381 g(gen_cmp_dest_reg(ctx, op_size, R_SCRATCH_2, (unsigned)-1, label ? R_CMP_RESULT : R_SCRATCH_2, opt, COND_E));
1384 gen_insn(INSN_JMP_REG, i_size(op_size), COND_E, 0);
1385 gen_one(R_CMP_RESULT);
1388 g(gen_frame_store(ctx, log_2(sizeof(ajla_flat_option_t)), slot_r, 0, R_SCRATCH_2));
1394 static bool attr_w gen_option_load(struct codegen_context *ctx, frame_t slot_1, frame_t slot_r, ajla_option_t opt, frame_t flags)
1396 const struct type *type;
1397 uint32_t escape_label;
1399 escape_label = alloc_escape_label(ctx);
1400 if (unlikely(!escape_label))
1403 if (flag_must_be_flat(ctx, slot_r)) {
1404 gen_insn(INSN_JMP, 0, 0, 0);
1405 gen_four(escape_label);
1409 type = get_type_of_local(ctx, slot_1);
1410 if (TYPE_IS_FLAT(type)) {
1411 g(gen_test_1_jz_cached(ctx, slot_1, escape_label));
1414 g(gen_frame_load(ctx, OP_SIZE_SLOT, zero_x, slot_1, 0, R_SCRATCH_1));
1415 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, true, escape_label));
1416 g(gen_barrier(ctx));
1417 g(gen_decompress_pointer(ctx, false, R_SCRATCH_1, 0));
1418 g(gen_option_cmp(ctx, R_SCRATCH_1, opt, escape_label, 0));
1420 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.option.pointer), ARCH_PREFERS_SX(OP_SIZE_SLOT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_SLOT));
1421 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
1422 gen_one(R_SCRATCH_1);
1423 gen_address_offset();
1425 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, true, escape_label));
1427 if (flags & OPCODE_STRUCT_MAY_BORROW) {
1428 g(gen_frame_store(ctx, OP_SIZE_SLOT, slot_r, 0, R_SCRATCH_1));
1429 flag_set(ctx, slot_r, false);
1431 g(gen_frame_set_pointer(ctx, slot_r, R_SCRATCH_1));
1433 g(gen_upcall_start(ctx, 1));
1434 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_SCRATCH_1));
1435 g(gen_upcall_argument(ctx, 0));
1436 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
1442 static bool attr_w gen_option_test_flat(struct codegen_context *ctx, frame_t slot_1, frame_t opt, frame_t slot_r)
1444 unsigned op_size = log_2(sizeof(ajla_flat_option_t));
1445 uint32_t escape_label;
1447 escape_label = alloc_escape_label(ctx);
1448 if (unlikely(!escape_label))
1451 g(gen_test_1_cached(ctx, slot_1, escape_label));
1453 flag_set(ctx, slot_1, false);
1454 flag_set(ctx, slot_r, false);
1456 if (unlikely(opt != (ajla_flat_option_t)opt)) {
1457 g(gen_frame_clear(ctx, op_size, slot_r));
1461 g(gen_frame_load_cmp_imm_set_cond(ctx, op_size, zero_x, slot_1, 0, opt, COND_E, slot_r));
1466 static bool attr_w gen_option_test(struct codegen_context *ctx, frame_t slot_1, frame_t opt, frame_t slot_r)
1468 uint32_t escape_label;
1470 escape_label = alloc_escape_label(ctx);
1471 if (unlikely(!escape_label))
1474 g(gen_frame_load(ctx, OP_SIZE_SLOT, zero_x, slot_1, 0, R_SCRATCH_1));
1475 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, true, escape_label));
1476 g(gen_barrier(ctx));
1478 flag_set(ctx, slot_r, false);
1480 if (unlikely(opt != (ajla_option_t)opt)) {
1481 g(gen_frame_clear(ctx, log_2(sizeof(ajla_flat_option_t)), slot_r));
1485 g(gen_decompress_pointer(ctx, false, R_SCRATCH_1, 0));
1486 g(gen_option_cmp(ctx, R_SCRATCH_1, opt, 0, slot_r));
1491 static bool attr_w gen_option_ord(struct codegen_context *ctx, frame_t slot_1, frame_t slot_r, bool flat)
1493 unsigned op_size = log_2(sizeof(ajla_option_t));
1494 unsigned op_size_flat = log_2(sizeof(ajla_flat_option_t));
1495 uint32_t escape_label, ptr_label, store_label;
1498 escape_label = alloc_escape_label(ctx);
1499 if (unlikely(!escape_label))
1502 ptr_label = alloc_label(ctx);
1503 if (unlikely(!ptr_label))
1506 store_label = alloc_label(ctx);
1507 if (unlikely(!store_label))
1510 target = gen_frame_target(ctx, slot_r, NO_FRAME_T, NO_FRAME_T, R_SCRATCH_1);
1513 g(gen_test_1_cached(ctx, slot_1, ptr_label));
1515 g(gen_frame_load(ctx, op_size_flat, zero_x, slot_1, 0, target));
1517 if (flag_is_clear(ctx, slot_1))
1518 goto skip_ptr_label;
1520 gen_insn(INSN_JMP, 0, 0, 0);
1521 gen_four(store_label);
1524 gen_label(ptr_label);
1525 g(gen_frame_load(ctx, OP_SIZE_SLOT, zero_x, slot_1, 0, R_SCRATCH_1));
1526 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, true, escape_label));
1527 g(gen_barrier(ctx));
1529 g(gen_decompress_pointer(ctx, false, R_SCRATCH_1, 0));
1531 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.option.option), ARCH_PREFERS_SX(op_size) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, op_size));
1532 gen_insn(ARCH_PREFERS_SX(op_size) ? INSN_MOVSX : INSN_MOV, op_size, 0, 0);
1534 gen_address_offset();
1537 gen_label(store_label);
1538 g(gen_frame_store(ctx, OP_SIZE_INT, slot_r, 0, target));
1539 flag_set(ctx, slot_r, false);
1544 static bool attr_w gen_array_create(struct codegen_context *ctx, frame_t slot_r)
1547 const struct type *type;
1548 uint32_t escape_label;
1550 escape_label = alloc_escape_label(ctx);
1551 if (unlikely(!escape_label))
1554 ajla_assert_lo(ctx->args_l != 0, (file_line, "gen_array_create: zero entries"));
1556 if (unlikely(ctx->args_l >= sign_bit(uint_default_t))) {
1557 gen_insn(INSN_JMP, 0, 0, 0);
1558 gen_four(escape_label);
1562 type = get_type_of_local(ctx, ctx->args[0].slot);
1563 for (i = 1; i < ctx->args_l; i++) {
1564 const struct type *t = get_type_of_local(ctx, ctx->args[i].slot);
1565 if (unlikely(t != type))
1566 internal(file_line, "gen_array_create: types do not match: %u != %u", type->tag, t->tag);
1569 if (TYPE_IS_FLAT(type)) {
1571 for (i = 0; i < ctx->args_l; i++) {
1572 g(gen_test_1_cached(ctx, ctx->args[i].slot, escape_label));
1573 flag_set(ctx, ctx->args[i].slot, false);
1576 g(gen_upcall_start(ctx, 3));
1578 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1579 g(gen_upcall_argument(ctx, 0));
1581 g(gen_load_constant(ctx, R_ARG1, ctx->args[0].slot));
1582 g(gen_upcall_argument(ctx, 1));
1584 g(gen_load_constant(ctx, R_ARG2, ctx->args_l));
1585 g(gen_upcall_argument(ctx, 2));
1587 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_flat_slot_mayfail), 3));
1588 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1589 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1591 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
1593 offset = data_array_offset;
1594 for (i = 0; i < ctx->args_l; i++) {
1595 g(gen_memcpy_from_slot(ctx, R_SAVED_1, offset, ctx->args[i].slot));
1596 offset += type->size;
1600 g(gen_upcall_start(ctx, 2));
1602 g(gen_load_constant(ctx, R_ARG0, ctx->args_l));
1603 g(gen_upcall_argument(ctx, 0));
1605 g(gen_load_constant(ctx, R_ARG1, ctx->args_l));
1606 g(gen_upcall_argument(ctx, 1));
1608 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_pointers_mayfail), 2));
1609 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1610 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1612 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
1614 g(gen_address(ctx, R_RET0, offsetof(struct data, u_.array_pointers.pointer), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
1615 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
1617 gen_address_offset();
1620 for (i = 0; i < ctx->args_l; i++) {
1621 g(gen_frame_get_pointer(ctx, ctx->args[i].slot, (ctx->args[i].flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_SCRATCH_1));
1622 g(gen_address(ctx, R_SAVED_2, offset, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
1623 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
1624 gen_address_offset();
1625 gen_one(R_SCRATCH_1);
1626 offset += sizeof(pointer_t);
1629 g(gen_compress_pointer(ctx, R_SAVED_1));
1630 g(gen_frame_set_pointer(ctx, slot_r, R_SAVED_1));
1634 static bool attr_w gen_array_create_empty_flat(struct codegen_context *ctx, frame_t slot_r, frame_t local_type)
1636 uint32_t escape_label;
1638 escape_label = alloc_escape_label(ctx);
1639 if (unlikely(!escape_label))
1642 g(gen_upcall_start(ctx, 3));
1644 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1645 g(gen_upcall_argument(ctx, 0));
1647 g(gen_load_constant(ctx, R_ARG1, local_type));
1648 g(gen_upcall_argument(ctx, 1));
1650 g(gen_load_constant(ctx, R_ARG2, 0));
1651 g(gen_upcall_argument(ctx, 2));
1653 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_flat_types_ptr_mayfail), 3));
1654 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1655 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1657 g(gen_compress_pointer(ctx, R_RET0));
1658 g(gen_frame_set_pointer(ctx, slot_r, R_RET0));
1663 static bool attr_w gen_array_create_empty(struct codegen_context *ctx, frame_t slot_r)
1665 uint32_t escape_label;
1667 escape_label = alloc_escape_label(ctx);
1668 if (unlikely(!escape_label))
1671 g(gen_upcall_start(ctx, 2));
1673 g(gen_load_constant(ctx, R_ARG0, 0));
1674 g(gen_upcall_argument(ctx, 0));
1676 g(gen_load_constant(ctx, R_ARG1, 0));
1677 g(gen_upcall_argument(ctx, 1));
1679 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_pointers_mayfail), 2));
1680 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1681 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1683 g(gen_compress_pointer(ctx, R_RET0));
1684 g(gen_frame_set_pointer(ctx, slot_r, R_RET0));
1689 static bool attr_w gen_array_fill(struct codegen_context *ctx, frame_t slot_1, frame_t flags, frame_t slot_2, frame_t slot_r)
1691 const struct type *content_type, *array_type;
1692 uint32_t escape_label;
1693 unsigned reg1, reg4;
1695 escape_label = alloc_escape_label(ctx);
1696 if (unlikely(!escape_label))
1699 g(gen_test_1_cached(ctx, slot_2, escape_label));
1701 content_type = get_type_of_local(ctx, slot_1);
1702 array_type = get_type_of_local(ctx, slot_r);
1704 if (TYPE_IS_FLAT(array_type)) {
1705 int64_t dest_offset;
1707 const struct flat_array_definition *def = type_def(array_type,flat_array);
1709 ajla_assert_lo(TYPE_IS_FLAT(content_type), (file_line, "gen_array_fill: array is flat but content is not"));
1711 g(gen_test_1_cached(ctx, slot_1, escape_label));
1713 dest_offset = (size_t)slot_r * slot_size;
1714 for (i = 0; i < def->n_elements; i++) {
1715 g(gen_memcpy_from_slot(ctx, R_FRAME, dest_offset, slot_1));
1716 dest_offset += def->base->size;
1718 flag_set(ctx, slot_1, false);
1719 flag_set(ctx, slot_r, false);
1724 if (ctx->registers[slot_1] >= 0)
1725 g(spill(ctx, slot_1));
1727 if (unlikely((flags & OPCODE_ARRAY_FILL_FLAG_SPARSE) != 0)) {
1728 uint32_t get_ptr_label, got_ptr_label;
1730 get_ptr_label = alloc_label(ctx);
1731 if (unlikely(!get_ptr_label))
1734 got_ptr_label = alloc_label(ctx);
1735 if (unlikely(!got_ptr_label))
1738 if (TYPE_IS_FLAT(content_type)) {
1739 g(gen_test_1_cached(ctx, slot_1, get_ptr_label));
1741 g(gen_upcall_start(ctx, 3));
1743 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1744 g(gen_upcall_argument(ctx, 0));
1746 g(gen_load_constant(ctx, R_ARG1, slot_1));
1747 g(gen_upcall_argument(ctx, 1));
1749 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_ARG2, R_FRAME, (size_t)slot_1 * slot_size, 0));
1750 g(gen_upcall_argument(ctx, 2));
1752 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 3));
1754 g(gen_mov(ctx, i_size(OP_SIZE_SLOT), R_SCRATCH_4, R_RET0));
1756 gen_insn(INSN_JMP, 0, 0, 0);
1757 gen_four(got_ptr_label);
1760 gen_label(get_ptr_label);
1762 g(gen_frame_get_pointer(ctx, slot_1, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_SCRATCH_4));
1764 gen_label(got_ptr_label);
1766 g(gen_frame_get(ctx, OP_SIZE_INT, sign_x, slot_2, 0, R_SCRATCH_1, ®1));
1767 g(gen_jmp_if_negative(ctx, reg1, escape_label));
1769 g(gen_upcall_start(ctx, 2));
1770 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, reg1));
1771 g(gen_upcall_argument(ctx, 0));
1773 g(gen_mov(ctx, i_size(OP_SIZE_SLOT), R_ARG1, R_SCRATCH_4));
1774 g(gen_upcall_argument(ctx, 1));
1776 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_create_sparse), 2));
1777 } else if (TYPE_IS_FLAT(content_type)) {
1778 g(gen_test_1_cached(ctx, slot_1, escape_label));
1779 flag_set(ctx, slot_1, false);
1781 g(gen_frame_get(ctx, OP_SIZE_INT, sign_x, slot_2, 0, R_SCRATCH_4, ®4));
1782 g(gen_jmp_if_negative(ctx, reg4, escape_label));
1784 g(gen_upcall_start(ctx, 3));
1785 g(gen_mov(ctx, i_size(OP_SIZE_INT), R_ARG1, reg4));
1786 g(gen_upcall_argument(ctx, 1));
1788 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1789 g(gen_upcall_argument(ctx, 0));
1791 g(gen_load_constant(ctx, R_ARG2, slot_1));
1792 g(gen_upcall_argument(ctx, 2));
1794 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_create_flat), 3));
1796 g(gen_frame_get_pointer(ctx, slot_1, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_SCRATCH_1));
1798 g(gen_upcall_start(ctx, 4));
1800 g(gen_mov(ctx, i_size(OP_SIZE_SLOT), R_ARG3, R_SCRATCH_1));
1801 g(gen_upcall_argument(ctx, 3));
1803 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1804 g(gen_upcall_argument(ctx, 0));
1806 g(gen_load_constant(ctx, R_ARG1, ctx->instr_start - da(ctx->fn,function)->code));
1807 g(gen_upcall_argument(ctx, 1));
1809 g(gen_load_constant(ctx, R_ARG2, slot_2));
1810 g(gen_upcall_argument(ctx, 2));
1812 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_create_pointers), 4));
1814 g(gen_frame_set_pointer(ctx, slot_r, R_RET0));
1819 static bool attr_w gen_array_string(struct codegen_context *ctx, type_tag_t tag, uint8_t *string, frame_t len, frame_t slot_r)
1821 uint32_t escape_label;
1823 const struct type *type;
1825 escape_label = alloc_escape_label(ctx);
1826 if (unlikely(!escape_label))
1829 g(gen_upcall_start(ctx, 2));
1831 g(gen_load_constant(ctx, R_ARG0, tag));
1832 g(gen_upcall_argument(ctx, 0));
1834 g(gen_load_constant(ctx, R_ARG1, len));
1835 g(gen_upcall_argument(ctx, 1));
1837 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_flat_tag_mayfail), 2));
1838 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1839 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1841 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
1843 g(gen_compress_pointer(ctx, R_RET0));
1844 g(gen_frame_set_pointer(ctx, slot_r, R_RET0));
1846 g(load_function_offset(ctx, R_SCRATCH_3, offsetof(struct data, u_.function.code)));
1848 offset = string - cast_ptr(uint8_t *, da(ctx->fn,function)->code);
1849 type = type_get_from_tag(tag);
1850 g(gen_memcpy_raw(ctx, R_SAVED_1, data_array_offset, R_SCRATCH_3, offset, (size_t)len * type->size, minimum(type->align, align_of(code_t))));
1855 static bool attr_w gen_scaled_array_address(struct codegen_context *ctx, size_t element_size, unsigned reg_dst, unsigned reg_src, unsigned reg_index, int64_t offset_src)
1857 if (is_power_of_2(element_size)) {
1858 unsigned shift = log_2(element_size);
1859 #if defined(ARCH_X86)
1860 if (shift <= 3 && imm_is_32bit(offset_src)) {
1861 gen_insn(INSN_LEA3, i_size(OP_SIZE_ADDRESS), shift, 0);
1866 gen_eight(offset_src);
1870 if (ARCH_HAS_SHIFTED_ADD(shift)) {
1871 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, false));
1874 gen_one(ARG_SHIFTED_REGISTER);
1875 gen_one(ARG_SHIFT_LSL | shift);
1882 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, reg_dst, reg_index, shift, 0));
1883 reg_index = reg_dst;
1887 g(gen_imm(ctx, element_size, IMM_PURPOSE_MUL, i_size(OP_SIZE_ADDRESS)));
1888 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_MUL, ALU_WRITES_FLAGS(ALU_MUL, is_imm()));
1892 reg_index = reg_dst;
1894 size_t e_size = element_size;
1896 bool first_match = true;
1898 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_CONST_IMM, reg_index));
1899 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), reg_dst, reg_index));
1900 reg_index = reg_dst;
1903 g(gen_load_constant(ctx, reg_index, 0));
1909 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, reg_index, reg_index, sh, false));
1910 first_match = false;
1911 } else if (ARCH_HAS_SHIFTED_ADD(sh)) {
1912 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, false));
1915 gen_one(ARG_SHIFTED_REGISTER);
1916 gen_one(ARG_SHIFT_LSL | sh);
1917 gen_one(R_CONST_IMM);
1920 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, R_CONST_IMM, R_CONST_IMM, sh, false));
1923 g(gen_3address_alu(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, reg_index, reg_index, R_CONST_IMM, 0));
1931 #if defined(ARCH_S390)
1932 if (offset_src && s390_inline_address(offset_src)) {
1933 gen_insn(INSN_LEA3, i_size(OP_SIZE_ADDRESS), 0, 0);
1938 gen_eight(offset_src);
1942 g(gen_3address_alu(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, reg_dst, reg_index, reg_src, 0));
1947 g(gen_imm(ctx, offset_src, IMM_PURPOSE_ADD, i_size(OP_SIZE_ADDRESS)));
1948 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, is_imm()));
1956 static bool attr_w gen_scaled_array_load(struct codegen_context *ctx, unsigned reg_src, unsigned reg_idx, int64_t offset_src, frame_t slot_r)
1958 const struct type *t = get_type_of_local(ctx, slot_r);
1959 #if defined(ARCH_X86)
1960 if (is_power_of_2(t->size)) {
1961 unsigned shift = log_2(t->size);
1962 if (shift <= 3 && shift <= OP_SIZE_NATIVE && imm_is_32bit(offset_src)) {
1963 short reg = ctx->registers[slot_r];
1964 gen_insn(INSN_MOV, shift, 0, 0);
1965 gen_one(reg >= 0 ? reg : R_SCRATCH_2);
1966 gen_one(ARG_ADDRESS_2 + shift);
1969 gen_eight(offset_src);
1972 g(gen_address(ctx, R_FRAME, (size_t)slot_r * slot_size, IMM_PURPOSE_STR_OFFSET, shift));
1973 gen_insn(INSN_MOV, shift, 0, 0);
1974 gen_address_offset();
1975 gen_one(R_SCRATCH_2);
1982 #if defined(ARCH_S390)
1983 if (t->size == 1 && s390_inline_address(offset_src) && cpu_test_feature(CPU_FEATURE_extended_imm)) {
1984 short reg = ctx->registers[slot_r];
1985 gen_insn(INSN_MOVSX, OP_SIZE_1, 0, 0);
1986 gen_one(reg >= 0 ? reg : R_SCRATCH_2);
1987 gen_one(ARG_ADDRESS_2);
1990 gen_eight(offset_src);
1993 g(gen_address(ctx, R_FRAME, (size_t)slot_r * slot_size, IMM_PURPOSE_STR_OFFSET, OP_SIZE_1));
1994 gen_insn(INSN_MOV, OP_SIZE_1, 0, 0);
1995 gen_address_offset();
1996 gen_one(R_SCRATCH_2);
2002 g(gen_scaled_array_address(ctx, t->size, R_SCRATCH_2, reg_src, reg_idx, 0));
2004 g(gen_memcpy_to_slot(ctx, slot_r, R_SCRATCH_2, offset_src));
2009 static bool attr_w gen_scaled_array_store(struct codegen_context *ctx, unsigned reg_src, int64_t offset_src, frame_t slot_1)
2011 const struct type *t = get_type_of_local(ctx, slot_1);
2012 #if defined(ARCH_X86)
2013 if (is_power_of_2(t->size)) {
2014 unsigned shift = log_2(t->size);
2015 if (shift <= 3 && shift <= OP_SIZE_NATIVE && imm_is_32bit(offset_src)) {
2016 short reg = ctx->registers[slot_1];
2018 g(gen_address(ctx, R_FRAME, (size_t)slot_1 * slot_size, IMM_PURPOSE_LDR_OFFSET, shift));
2019 gen_insn(INSN_MOV, shift, 0, 0);
2020 gen_one(R_SCRATCH_3);
2021 gen_address_offset();
2025 gen_insn(INSN_MOV, shift, 0, 0);
2026 gen_one(ARG_ADDRESS_2 + shift);
2028 gen_one(R_SCRATCH_2);
2029 gen_eight(offset_src);
2036 #if defined(ARCH_S390)
2037 if (t->size == 1 && s390_inline_address(offset_src) && cpu_test_feature(CPU_FEATURE_extended_imm)) {
2038 short reg = ctx->registers[slot_1];
2040 g(gen_address(ctx, R_FRAME, (size_t)slot_1 * slot_size, IMM_PURPOSE_LDR_SX_OFFSET, OP_SIZE_1));
2041 gen_insn(INSN_MOVSX, OP_SIZE_1, 0, 0);
2042 gen_one(R_SCRATCH_3);
2043 gen_address_offset();
2047 gen_insn(INSN_MOV, OP_SIZE_1, 0, 0);
2048 gen_one(ARG_ADDRESS_2);
2050 gen_one(R_SCRATCH_2);
2051 gen_eight(offset_src);
2057 g(gen_scaled_array_address(ctx, t->size, R_SCRATCH_2, reg_src, R_SCRATCH_2, 0));
2059 g(gen_memcpy_from_slot(ctx, R_SCRATCH_2, offset_src, slot_1));
2064 static bool attr_w gen_check_array_len(struct codegen_context *ctx, unsigned reg_array, bool allocated, unsigned reg_len, unsigned cond, uint32_t escape_label)
2066 size_t offset = !allocated ? offsetof(struct data, u_.array_flat.n_used_entries) : offsetof(struct data, u_.array_flat.n_allocated_entries);
2067 #if defined(ARCH_X86)
2068 g(gen_address(ctx, reg_array, offset, IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2069 gen_insn(INSN_CMP, OP_SIZE_INT, 0, 1);
2071 gen_address_offset();
2073 gen_insn(INSN_JMP_COND, OP_SIZE_INT, cond, 0);
2074 gen_four(escape_label);
2076 g(gen_address(ctx, reg_array, offset, ARCH_PREFERS_SX(OP_SIZE_INT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2077 gen_insn(ARCH_PREFERS_SX(OP_SIZE_INT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_INT, 0, 0);
2078 gen_one(R_SCRATCH_3);
2079 gen_address_offset();
2081 g(gen_cmp_test_jmp(ctx, INSN_CMP, i_size(OP_SIZE_INT), reg_len, R_SCRATCH_3, cond, escape_label));
2086 static bool attr_w gen_array_load(struct codegen_context *ctx, frame_t slot_1, frame_t slot_idx, frame_t slot_r, frame_t flags)
2088 const struct type *t = get_type_of_local(ctx, slot_1);
2089 const struct type *tr = get_type_of_local(ctx, slot_r);
2090 uint32_t escape_label;
2093 escape_label = alloc_escape_label(ctx);
2094 if (unlikely(!escape_label))
2097 if (unlikely(t->tag == TYPE_TAG_flat_array)) {
2098 const struct flat_array_definition *def = type_def(t,flat_array);
2100 g(gen_test_2_cached(ctx, slot_1, slot_idx, escape_label));
2102 flag_set(ctx, slot_1, false);
2103 flag_set(ctx, slot_idx, false);
2105 g(gen_frame_get(ctx, OP_SIZE_INT, native, slot_idx, 0, R_SCRATCH_2, ®2));
2107 if (!(flags & OPCODE_ARRAY_INDEX_IN_RANGE))
2108 g(gen_cmp_test_imm_jmp(ctx, INSN_CMP, OP_SIZE_INT, reg2, def->n_elements, COND_AE, escape_label));
2110 g(gen_scaled_array_load(ctx, R_FRAME, reg2, (size_t)slot_1 * slot_size, slot_r));
2114 g(gen_frame_load(ctx, OP_SIZE_SLOT, zero_x, slot_1, 0, R_SCRATCH_1));
2115 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, true, escape_label));
2116 g(gen_barrier(ctx));
2117 g(gen_decompress_pointer(ctx, false, R_SCRATCH_1, 0));
2119 g(gen_test_1_cached(ctx, slot_idx, escape_label));
2120 flag_set(ctx, slot_idx, false);
2121 g(gen_frame_get(ctx, OP_SIZE_INT, native, slot_idx, 0, R_SCRATCH_2, ®2));
2123 if (!(flags & OPCODE_ARRAY_INDEX_IN_RANGE))
2124 g(gen_check_array_len(ctx, R_SCRATCH_1, false, reg2, COND_AE, escape_label));
2126 if (TYPE_IS_FLAT(tr)) {
2128 g(gen_compare_ptr_tag(ctx, R_SCRATCH_1, DATA_TAG_array_slice, COND_A, escape_label, R_SCRATCH_4));
2129 #if defined(ARCH_X86) || defined(ARCH_S390)
2130 #if defined(ARCH_X86)
2131 if (unlikely(!cpu_test_feature(CPU_FEATURE_cmov)))
2133 if (unlikely(!cpu_test_feature(CPU_FEATURE_misc_45)))
2136 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.array_slice.flat_data_minus_data_array_offset), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2137 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
2138 gen_one(R_SCRATCH_3);
2139 gen_address_offset();
2142 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.array_slice.flat_data_minus_data_array_offset), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2143 gen_insn(INSN_CMOV, OP_SIZE_ADDRESS, COND_E, 0);
2144 gen_one(R_SCRATCH_1);
2145 gen_one(R_SCRATCH_1);
2146 gen_address_offset();
2147 #elif defined(ARCH_LOONGARCH64) || defined(ARCH_MIPS) || defined(ARCH_RISCV64)
2148 g(gen_3address_alu_imm(ctx, OP_SIZE_NATIVE, ALU_XOR, R_SCRATCH_4, R_SCRATCH_4, DATA_TAG_array_slice, 0));
2150 label = alloc_label(ctx);
2151 if (unlikely(!label))
2154 gen_insn(INSN_JMP_REG, OP_SIZE_NATIVE, COND_NE, 0);
2155 gen_one(R_SCRATCH_4);
2158 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.array_slice.flat_data_minus_data_array_offset), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2159 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
2160 gen_one(R_SCRATCH_1);
2161 gen_address_offset();
2165 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.array_slice.flat_data_minus_data_array_offset), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2166 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
2167 gen_one(R_SCRATCH_3);
2168 gen_address_offset();
2170 #if defined(ARCH_POWER)
2171 if (!cpu_test_feature(CPU_FEATURE_v203))
2174 #if defined(ARCH_SPARC)
2178 gen_insn(INSN_CMOV, i_size(OP_SIZE_ADDRESS), COND_E, 0);
2179 gen_one(R_SCRATCH_1);
2180 gen_one(R_SCRATCH_1);
2181 gen_one(R_SCRATCH_3);
2183 g(gen_imm(ctx, DATA_TAG_array_slice, IMM_PURPOSE_CMP, OP_SIZE_NATIVE));
2184 gen_insn(INSN_CMP_DEST_REG, OP_SIZE_NATIVE, COND_E, 0);
2185 gen_one(R_CMP_RESULT);
2186 gen_one(R_SCRATCH_4);
2189 gen_insn(INSN_MOVR, OP_SIZE_NATIVE, COND_NE, 0);
2190 gen_one(R_SCRATCH_1);
2191 gen_one(R_SCRATCH_1);
2192 gen_one(R_CMP_RESULT);
2193 gen_one(R_SCRATCH_3);
2199 label = alloc_label(ctx);
2200 if (unlikely(!label))
2202 gen_insn(INSN_JMP_COND, OP_SIZE_4, COND_NE, 0);
2205 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SCRATCH_1, R_SCRATCH_3));
2209 g(gen_scaled_array_load(ctx, R_SCRATCH_1, reg2, data_array_offset, slot_r));
2210 flag_set(ctx, slot_r, false);
2213 if (flag_must_be_flat(ctx, slot_r)) {
2214 gen_insn(INSN_JMP, 0, 0, 0);
2215 gen_four(escape_label);
2219 g(gen_compare_ptr_tag(ctx, R_SCRATCH_1, DATA_TAG_array_pointers, COND_NE, escape_label, R_SCRATCH_3));
2221 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.array_pointers.pointer), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2222 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
2223 gen_one(R_SCRATCH_1);
2224 gen_address_offset();
2226 #if defined(ARCH_X86) || defined(ARCH_ARM)
2227 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
2228 gen_one(R_SCRATCH_1);
2229 gen_one(ARG_ADDRESS_2 + OP_SIZE_SLOT);
2230 gen_one(R_SCRATCH_1);
2234 goto scaled_load_done;
2236 #if defined(ARCH_LOONGARCH64) || defined(ARCH_PARISC) || defined(ARCH_POWER) || defined(ARCH_S390) || defined(ARCH_SPARC)
2237 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, R_SCRATCH_2, reg2, OP_SIZE_SLOT, false));
2239 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
2240 gen_one(R_SCRATCH_1);
2241 gen_one(ARG_ADDRESS_2);
2242 gen_one(R_SCRATCH_1);
2243 gen_one(R_SCRATCH_2);
2246 goto scaled_load_done;
2248 if (ARCH_HAS_SHIFTED_ADD(OP_SIZE_SLOT)) {
2249 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, false));
2250 gen_one(R_SCRATCH_2);
2251 gen_one(ARG_SHIFTED_REGISTER);
2252 gen_one(ARG_SHIFT_LSL | OP_SIZE_SLOT);
2254 gen_one(R_SCRATCH_1);
2256 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
2257 gen_one(R_SCRATCH_1);
2258 gen_one(ARG_ADDRESS_1);
2259 gen_one(R_SCRATCH_2);
2262 goto scaled_load_done;
2265 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, R_SCRATCH_2, reg2, OP_SIZE_SLOT, false));
2267 g(gen_3address_alu(ctx, OP_SIZE_ADDRESS, ALU_ADD, R_SCRATCH_2, R_SCRATCH_2, R_SCRATCH_1, 0));
2269 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
2270 gen_one(R_SCRATCH_1);
2271 gen_one(ARG_ADDRESS_1);
2272 gen_one(R_SCRATCH_2);
2275 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, true, escape_label));
2277 if (flags & OPCODE_STRUCT_MAY_BORROW) {
2278 g(gen_frame_store(ctx, OP_SIZE_SLOT, slot_r, 0, R_SCRATCH_1));
2279 flag_set(ctx, slot_r, false);
2281 g(gen_frame_set_pointer(ctx, slot_r, R_SCRATCH_1));
2283 g(gen_upcall_start(ctx, 1));
2284 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_SCRATCH_1));
2286 g(gen_upcall_argument(ctx, 0));
2287 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
2293 static bool attr_w gen_array_len(struct codegen_context *ctx, frame_t slot_1, frame_t slot_2, frame_t slot_r)
2295 const struct type *t = get_type_of_local(ctx, slot_1);
2296 uint32_t escape_label;
2299 escape_label = alloc_escape_label(ctx);
2300 if (unlikely(!escape_label))
2303 if (slot_2 != NO_FRAME_T) {
2304 g(gen_test_1_cached(ctx, slot_2, escape_label));
2305 flag_set(ctx, slot_2, false);
2308 if (unlikely(t->tag == TYPE_TAG_flat_array)) {
2309 if (slot_2 == NO_FRAME_T) {
2310 g(gen_frame_store_imm(ctx, OP_SIZE_INT, slot_r, 0, (unsigned)type_def(t,flat_array)->n_elements));
2312 g(gen_frame_load_cmp_imm_set_cond(ctx, OP_SIZE_INT, zero_x, slot_2, 0, type_def(t,flat_array)->n_elements, COND_G, slot_r));
2314 flag_set(ctx, slot_r, false);
2316 g(gen_frame_load(ctx, OP_SIZE_SLOT, zero_x, slot_1, 0, R_SCRATCH_1));
2317 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, true, escape_label));
2318 g(gen_barrier(ctx));
2320 if (offsetof(struct data, u_.array_flat.n_used_entries) != offsetof(struct data, u_.array_slice.n_entries) ||
2321 offsetof(struct data, u_.array_flat.n_used_entries) != offsetof(struct data, u_.array_pointers.n_used_entries)) {
2325 if (DATA_TAG_array_flat != DATA_TAG_array_slice - 1 ||
2326 DATA_TAG_array_slice != DATA_TAG_array_pointers - 1 ||
2327 DATA_TAG_array_same < DATA_TAG_array_flat ||
2328 DATA_TAG_array_btree < DATA_TAG_array_flat ||
2329 DATA_TAG_array_incomplete < DATA_TAG_array_flat) {
2334 g(gen_mov(ctx, OP_SIZE_NATIVE, R_SCRATCH_2, R_SCRATCH_1));
2336 g(gen_compare_da_tag(ctx, R_SCRATCH_1, DATA_TAG_array_pointers, COND_A, escape_label, R_SCRATCH_1));
2338 if (slot_2 == NO_FRAME_T) {
2339 target = gen_frame_target(ctx, slot_r, NO_FRAME_T, NO_FRAME_T, R_SCRATCH_1);
2341 target = R_SCRATCH_1;
2344 gen_pointer_compression(R_SCRATCH_2);
2345 g(gen_address(ctx, R_SCRATCH_2, offsetof(struct data, u_.array_flat.n_used_entries), ARCH_PREFERS_SX(OP_SIZE_INT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2346 gen_insn(ARCH_PREFERS_SX(OP_SIZE_INT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_INT, 0, 0);
2348 gen_address_offset_compressed();
2350 if (slot_2 == NO_FRAME_T) {
2351 g(gen_frame_store(ctx, OP_SIZE_INT, slot_r, 0, target));
2353 g(gen_frame_load_cmp_set_cond(ctx, OP_SIZE_INT, zero_x, slot_2, 0, R_SCRATCH_1, COND_G, slot_r));
2355 flag_set(ctx, slot_r, false);
2360 static bool attr_w gen_array_sub(struct codegen_context *ctx, frame_t slot_array, frame_t slot_from, frame_t slot_to, frame_t slot_r, frame_t flags)
2362 const struct type *t = get_type_of_local(ctx, slot_array);
2363 uint32_t escape_label, upcall_label;
2365 escape_label = alloc_escape_label(ctx);
2366 if (unlikely(!escape_label))
2369 upcall_label = alloc_label(ctx);
2370 if (unlikely(!upcall_label))
2373 if (unlikely(TYPE_IS_FLAT(t))) {
2374 g(gen_test_1_jz_cached(ctx, slot_array, escape_label));
2377 g(gen_test_2_cached(ctx, slot_from, slot_to, escape_label));
2379 if (ctx->registers[slot_array] >= 0)
2380 g(spill(ctx, slot_array));
2381 if (ctx->registers[slot_from] >= 0)
2382 g(spill(ctx, slot_from));
2383 if (ctx->registers[slot_to] >= 0)
2384 g(spill(ctx, slot_to));
2386 g(gen_upcall_start(ctx, 4));
2388 g(gen_frame_load_raw(ctx, OP_SIZE_SLOT, garbage, slot_array, 0, R_ARG0));
2389 g(gen_upcall_argument(ctx, 0));
2391 g(gen_frame_load_raw(ctx, OP_SIZE_INT, garbage, slot_from, 0, R_ARG1));
2392 g(gen_upcall_argument(ctx, 1));
2394 g(gen_frame_load_raw(ctx, OP_SIZE_INT, garbage, slot_to, 0, R_ARG2));
2395 g(gen_upcall_argument(ctx, 2));
2397 g(gen_load_constant(ctx, R_ARG3, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0));
2398 g(gen_upcall_argument(ctx, 3));
2400 if ((flags & OPCODE_FLAG_FREE_ARGUMENT) != 0) {
2401 g(gen_test_1_cached(ctx, slot_array, upcall_label));
2402 g(gen_load_constant(ctx, R_ARG3, 0));
2403 g(gen_upcall_argument(ctx, 3));
2406 gen_label(upcall_label);
2407 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_sub), 4));
2409 g(gen_jmp_on_zero(ctx, OP_SIZE_SLOT, R_RET0, COND_E, escape_label));
2411 if (slot_array != slot_r) {
2412 if (flags & OPCODE_FLAG_FREE_ARGUMENT) {
2413 g(gen_set_1(ctx, R_FRAME, slot_array, 0, false));
2414 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_array));
2415 flag_set(ctx, slot_array, false);
2419 g(gen_frame_set_pointer(ctx, slot_r, R_RET0));
2424 static bool attr_w gen_array_skip(struct codegen_context *ctx, frame_t slot_array, frame_t slot_from, frame_t slot_r, frame_t flags)
2426 const struct type *t = get_type_of_local(ctx, slot_array);
2427 uint32_t escape_label, upcall_label;
2429 escape_label = alloc_escape_label(ctx);
2430 if (unlikely(!escape_label))
2433 upcall_label = alloc_label(ctx);
2434 if (unlikely(!upcall_label))
2437 if (unlikely(TYPE_IS_FLAT(t))) {
2438 g(gen_test_1_jz_cached(ctx, slot_array, escape_label));
2441 g(gen_test_1_cached(ctx, slot_from, escape_label));
2443 if (ctx->registers[slot_array] >= 0)
2444 g(spill(ctx, slot_array));
2445 if (ctx->registers[slot_from] >= 0)
2446 g(spill(ctx, slot_from));
2448 g(gen_upcall_start(ctx, 3));
2450 g(gen_frame_load_raw(ctx, OP_SIZE_SLOT, garbage, slot_array, 0, R_ARG0));
2451 g(gen_upcall_argument(ctx, 0));
2453 g(gen_frame_load_raw(ctx, OP_SIZE_INT, garbage, slot_from, 0, R_ARG1));
2454 g(gen_upcall_argument(ctx, 1));
2456 g(gen_load_constant(ctx, R_ARG2, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0));
2457 g(gen_upcall_argument(ctx, 2));
2459 if ((flags & OPCODE_FLAG_FREE_ARGUMENT) != 0) {
2460 g(gen_test_1_cached(ctx, slot_array, upcall_label));
2461 g(gen_load_constant(ctx, R_ARG2, 0));
2462 g(gen_upcall_argument(ctx, 2));
2465 gen_label(upcall_label);
2466 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_skip), 3));
2468 g(gen_jmp_on_zero(ctx, OP_SIZE_SLOT, R_RET0, COND_E, escape_label));
2470 if (slot_array != slot_r) {
2471 if (flags & OPCODE_FLAG_FREE_ARGUMENT) {
2472 g(gen_set_1(ctx, R_FRAME, slot_array, 0, false));
2473 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_array));
2474 flag_set(ctx, slot_array, false);
2478 g(gen_frame_set_pointer(ctx, slot_r, R_RET0));
2483 static bool attr_w gen_array_append(struct codegen_context *ctx, frame_t slot_1, frame_t slot_2, frame_t slot_r, frame_t flags)
2485 uint32_t escape_label;
2487 escape_label = alloc_escape_label(ctx);
2488 if (unlikely(!escape_label))
2491 if (unlikely(TYPE_IS_FLAT(get_type_of_local(ctx, slot_1))))
2492 g(gen_test_1_jz_cached(ctx, slot_1, escape_label));
2493 if (unlikely(TYPE_IS_FLAT(get_type_of_local(ctx, slot_2))))
2494 g(gen_test_1_jz_cached(ctx, slot_2, escape_label));
2496 g(gen_frame_load(ctx, OP_SIZE_SLOT, zero_x, slot_1, 0, R_SCRATCH_1));
2497 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, true, escape_label));
2498 g(gen_frame_load(ctx, OP_SIZE_SLOT, zero_x, slot_2, 0, R_SCRATCH_2));
2499 g(gen_ptr_is_thunk(ctx, R_SCRATCH_2, true, escape_label));
2500 g(gen_barrier(ctx));
2502 g(gen_compare_da_tag(ctx, R_SCRATCH_1, DATA_TAG_array_incomplete, COND_E, escape_label, R_SCRATCH_1));
2503 g(gen_compare_da_tag(ctx, R_SCRATCH_2, DATA_TAG_array_incomplete, COND_E, escape_label, R_SCRATCH_2));
2505 g(gen_frame_get_pointer(ctx, slot_2, (flags & OPCODE_FLAG_FREE_ARGUMENT_2) != 0, R_SAVED_1));
2506 g(gen_frame_get_pointer(ctx, slot_1, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_SCRATCH_1));
2507 g(gen_upcall_start(ctx, 2));
2508 gen_insn(ARCH_PREFERS_SX(i_size(OP_SIZE_SLOT)) ? INSN_MOVSX : INSN_MOV, i_size(OP_SIZE_SLOT), 0, 0);
2510 gen_one(R_SCRATCH_1);
2511 g(gen_upcall_argument(ctx, 0));
2512 gen_insn(ARCH_PREFERS_SX(i_size(OP_SIZE_SLOT)) ? INSN_MOVSX : INSN_MOV, i_size(OP_SIZE_SLOT), 0, 0);
2515 g(gen_upcall_argument(ctx, 1));
2516 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_join), 2));
2517 g(gen_frame_set_pointer(ctx, slot_r, R_RET0));
2521 static bool attr_w gen_array_append_one_flat(struct codegen_context *ctx, frame_t slot_1, frame_t slot_2, frame_t slot_r, frame_t flags)
2523 uint32_t escape_label;
2525 escape_label = alloc_escape_label(ctx);
2526 if (unlikely(!escape_label))
2529 if (unlikely(!(flags & OPCODE_FLAG_FREE_ARGUMENT))) {
2530 gen_insn(INSN_JMP, 0, 0, 0);
2531 gen_four(escape_label);
2535 g(gen_test_1_jz_cached(ctx, slot_1, escape_label));
2536 g(gen_test_1_cached(ctx, slot_2, escape_label));
2537 flag_set(ctx, slot_2, false);
2539 g(gen_frame_load(ctx, OP_SIZE_SLOT, zero_x, slot_1, 0, R_SAVED_1));
2540 g(gen_ptr_is_thunk(ctx, R_SAVED_1, true, escape_label));
2541 g(gen_barrier(ctx));
2543 g(gen_decompress_pointer(ctx, false, R_SAVED_1, 0));
2545 g(gen_compare_tag_and_refcount(ctx, R_SAVED_1, DATA_TAG_array_flat, escape_label, R_SCRATCH_1));
2547 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.array_flat.n_used_entries), ARCH_PREFERS_SX(OP_SIZE_INT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2548 gen_insn(ARCH_PREFERS_SX(OP_SIZE_INT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_INT, 0, 0);
2549 gen_one(R_SCRATCH_2);
2550 gen_address_offset();
2552 g(gen_check_array_len(ctx, R_SAVED_1, true, R_SCRATCH_2, COND_E, escape_label));
2554 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_INT), ALU_ADD, R_SCRATCH_1, R_SCRATCH_2, 1, 0));
2556 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.array_flat.n_used_entries), IMM_PURPOSE_STR_OFFSET, OP_SIZE_INT));
2557 gen_insn(INSN_MOV, OP_SIZE_INT, 0, 0);
2558 gen_address_offset();
2559 gen_one(R_SCRATCH_1);
2561 g(gen_scaled_array_store(ctx, R_SAVED_1, data_array_offset, slot_2));
2563 if (slot_1 != slot_r) {
2564 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_1));
2565 g(gen_set_1(ctx, R_FRAME, slot_1, 0, false));
2566 flag_set(ctx, slot_1, false);
2567 g(gen_compress_pointer(ctx, R_SAVED_1));
2568 g(gen_frame_set_pointer(ctx, slot_r, R_SAVED_1));
2574 static bool attr_w gen_array_append_one(struct codegen_context *ctx, frame_t slot_1, frame_t slot_2, frame_t slot_r, frame_t flags)
2576 uint32_t escape_label;
2578 escape_label = alloc_escape_label(ctx);
2579 if (unlikely(!escape_label))
2582 if (unlikely(!(flags & OPCODE_FLAG_FREE_ARGUMENT))) {
2583 gen_insn(INSN_JMP, 0, 0, 0);
2584 gen_four(escape_label);
2588 g(gen_test_1_jz_cached(ctx, slot_1, escape_label));
2590 g(gen_frame_load(ctx, OP_SIZE_SLOT, zero_x, slot_1, 0, R_SAVED_1));
2591 g(gen_ptr_is_thunk(ctx, R_SAVED_1, true, escape_label));
2592 g(gen_barrier(ctx));
2594 g(gen_decompress_pointer(ctx, false, R_SAVED_1, 0));
2596 g(gen_compare_tag_and_refcount(ctx, R_SAVED_1, DATA_TAG_array_pointers, escape_label, R_SCRATCH_1));
2598 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.array_pointers.n_used_entries), ARCH_PREFERS_SX(OP_SIZE_INT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2599 gen_insn(ARCH_PREFERS_SX(OP_SIZE_INT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_INT, 0, 0);
2601 gen_address_offset();
2603 g(gen_check_array_len(ctx, R_SAVED_1, true, R_SAVED_2, COND_E, escape_label));
2605 g(gen_frame_get_pointer(ctx, slot_2, (flags & OPCODE_FLAG_FREE_ARGUMENT_2) != 0, R_SCRATCH_2));
2607 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_INT), ALU_ADD, R_SCRATCH_1, R_SAVED_2, 1, 0));
2609 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.array_pointers.n_used_entries), IMM_PURPOSE_STR_OFFSET, OP_SIZE_INT));
2610 gen_insn(INSN_MOV, OP_SIZE_INT, 0, 0);
2611 gen_address_offset();
2612 gen_one(R_SCRATCH_1);
2614 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.array_pointers.pointer), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2615 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
2616 gen_one(R_SCRATCH_3);
2617 gen_address_offset();
2619 g(gen_scaled_array_address(ctx, slot_size, R_SAVED_2, R_SCRATCH_3, R_SAVED_2, 0));
2621 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
2622 gen_one(ARG_ADDRESS_1);
2625 gen_one(R_SCRATCH_2);
2627 if (slot_1 != slot_r) {
2628 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_1));
2629 g(gen_set_1(ctx, R_FRAME, slot_1, 0, false));
2630 flag_set(ctx, slot_1, false);
2631 g(gen_compress_pointer(ctx, R_SAVED_1));
2632 g(gen_frame_set_pointer(ctx, slot_r, R_SAVED_1));
2638 static bool attr_w gen_io(struct codegen_context *ctx, frame_t code, frame_t slot_1, frame_t slot_2, frame_t slot_3)
2640 uint32_t reload_label;
2643 reload_label = alloc_reload_label(ctx);
2644 if (unlikely(!reload_label))
2648 mem_free(ctx->var_aux);
2649 ctx->var_aux = NULL;
2651 ctx->var_aux = mem_alloc_array_mayfail(mem_alloc_mayfail, frame_t *, 0, 0, slot_1 + slot_2, sizeof(frame_t), &ctx->err);
2652 if (unlikely(!ctx->var_aux))
2655 for (i = 0; i < slot_1 + slot_2; i++)
2656 ctx->var_aux[i] = get_uint32(ctx);
2657 for (i = 0; i < slot_3; i++)
2660 for (i = 0; i < slot_2; i++) {
2661 frame_t input_slot = ctx->var_aux[slot_1 + i];
2662 if (ctx->registers[input_slot] >= 0)
2663 g(spill(ctx, input_slot));
2666 /*gen_insn(INSN_JMP, 0, 0, 0); gen_four(alloc_escape_label(ctx));*/
2668 g(gen_upcall_start(ctx, 3));
2669 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
2670 g(gen_upcall_argument(ctx, 0));
2672 g(gen_load_constant(ctx, R_ARG1, ctx->instr_start - da(ctx->fn,function)->code));
2673 g(gen_upcall_argument(ctx, 1));
2675 g(gen_load_constant(ctx, R_ARG2, ((uint32_t)code << 24) | ((uint32_t)slot_1 << 16) | ((uint32_t)slot_2 << 8) | slot_3));
2676 g(gen_upcall_argument(ctx, 2));
2677 /*debug("arg2: %08x", ((uint32_t)code << 24) | ((uint32_t)slot_1 << 16) | ((uint32_t)slot_2 << 8) | slot_3);*/
2679 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_ipret_io), 3));
2680 g(gen_sanitize_returned_pointer(ctx, R_RET0));
2681 g(gen_cmp_test_imm_jmp(ctx, INSN_CMP, OP_SIZE_ADDRESS, R_RET0, ptr_to_num(POINTER_FOLLOW_THUNK_GO), COND_NE, reload_label));
2683 for (i = 0; i < slot_1; i++) {
2684 frame_t output_slot = ctx->var_aux[i];
2685 if (ctx->registers[output_slot] >= 0)
2686 g(unspill(ctx, output_slot));
2688 for (i = 0; i < slot_1; i++) {
2689 frame_t output_slot = ctx->var_aux[i];
2690 flag_set_unknown(ctx, output_slot);
2691 if (da(ctx->fn,function)->local_variables_flags[output_slot].must_be_flat) {
2692 uint32_t escape_label = alloc_escape_label_for_ip(ctx, ctx->current_position);
2693 if (unlikely(!escape_label)) {
2694 mem_free(ctx->var_aux);
2697 g(gen_test_1(ctx, R_FRAME, output_slot, 0, escape_label, false, TEST));