2 * Copyright (C) 2024 Mikulas Patocka
4 * This file is part of Ajla.
6 * Ajla is free software: you can redistribute it and/or modify it under the
7 * terms of the GNU General Public License as published by the Free Software
8 * Foundation, either version 3 of the License, or (at your option) any later
11 * Ajla is distributed in the hope that it will be useful, but WITHOUT ANY
12 * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
13 * A PARTICULAR PURPOSE. See the GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along with
16 * Ajla. If not, see <https://www.gnu.org/licenses/>.
19 static bool attr_w gen_flat_move_copy(struct codegen_context *ctx, frame_t slot_1, frame_t slot_r)
21 uint32_t escape_label;
23 escape_label = alloc_escape_label(ctx);
24 if (unlikely(!escape_label))
27 g(gen_test_1_cached(ctx, slot_1, escape_label));
29 g(gen_memcpy_slots(ctx, slot_r, slot_1));
31 flag_set(ctx, slot_1, false);
32 flag_set(ctx, slot_r, false);
37 static bool attr_w gen_ref_move_copy(struct codegen_context *ctx, code_t code, frame_t slot_1, frame_t slot_r)
40 if (flag_must_be_flat(ctx, slot_r)) {
41 uint32_t escape_label = alloc_escape_label(ctx);
42 if (unlikely(!escape_label))
44 gen_insn(INSN_JMP, 0, 0, 0);
45 gen_four(escape_label);
48 g(gen_frame_get_slot(ctx, slot_1, R_SCRATCH_1, ®));
49 if (!da(ctx->fn,function)->local_variables_flags[slot_1].must_be_data &&
50 da(ctx->fn,function)->local_variables_flags[slot_r].must_be_data) {
51 uint32_t escape_label = alloc_escape_label(ctx);
52 if (unlikely(!escape_label))
54 g(gen_ptr_is_thunk(ctx, reg, slot_1, escape_label));
55 ctx->flag_cache[slot_r] |= FLAG_CACHE_IS_NOT_THUNK;
57 g(gen_frame_store(ctx, OP_SIZE_SLOT, slot_r, 0, reg));
58 g(gen_set_1(ctx, R_FRAME, slot_r, 0, true));
59 flag_set(ctx, slot_r, true);
60 if (code == OPCODE_REF_COPY) {
61 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
62 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, reg));
63 g(gen_upcall_argument(ctx, 0));
64 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
65 } else if (code == OPCODE_REF_MOVE && !da(ctx->fn,function)->local_variables_flags[slot_1].may_be_borrowed) {
66 g(gen_set_1(ctx, R_FRAME, slot_1, 0, false));
67 flag_set(ctx, slot_1, false);
70 if (unlikely(!(label_id = alloc_label(ctx))))
72 if (flag_is_set(ctx, slot_1)) {
73 g(gen_set_1(ctx, R_FRAME, slot_1, 0, false));
76 if (flag_is_clear(ctx, slot_1))
78 g(gen_test_1(ctx, R_FRAME, slot_1, 0, label_id, false, TEST_CLEAR));
80 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
81 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, reg));
82 g(gen_upcall_argument(ctx, 0));
83 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
86 if (code == OPCODE_REF_MOVE_CLEAR)
87 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_1));
88 flag_set(ctx, slot_1, false);
93 static bool attr_w gen_box_move_copy(struct codegen_context *ctx, code_t code, frame_t slot_1, frame_t slot_r)
95 if (flag_must_be_flat(ctx, slot_r)) {
96 uint32_t escape_label = alloc_escape_label(ctx);
97 if (unlikely(!escape_label))
99 gen_insn(INSN_JMP, 0, 0, 0);
100 gen_four(escape_label);
104 if (slot_is_register(ctx, slot_1))
105 g(spill(ctx, slot_1));
107 if (code != OPCODE_BOX_MOVE_CLEAR) {
108 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_ipret_copy_variable_to_pointer_noderef), 2));
110 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
111 g(gen_upcall_argument(ctx, 0));
113 g(gen_load_constant(ctx, R_ARG1, slot_1));
114 g(gen_upcall_argument(ctx, 1));
116 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_ipret_copy_variable_to_pointer_noderef), 2));
118 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_ipret_copy_variable_to_pointer_deref), 2));
120 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
121 g(gen_upcall_argument(ctx, 0));
123 g(gen_load_constant(ctx, R_ARG1, slot_1));
124 g(gen_upcall_argument(ctx, 1));
126 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_ipret_copy_variable_to_pointer_deref), 2));
128 flag_set(ctx, slot_1, false);
131 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, false));
136 static bool attr_w gen_eval(struct codegen_context *ctx, frame_t slot_1)
138 uint32_t escape_label, skip_label;
141 escape_label = alloc_escape_label(ctx);
142 if (unlikely(!escape_label))
145 skip_label = alloc_label(ctx);
146 if (unlikely(!skip_label))
149 g(gen_test_1_jz_cached(ctx, slot_1, skip_label));
151 g(gen_frame_get_slot(ctx, slot_1, R_SCRATCH_1, ®));
152 g(gen_ptr_is_thunk(ctx, reg, slot_1, escape_label));
154 gen_label(skip_label);
159 static bool attr_w gen_jump(struct codegen_context *ctx, int32_t jmp_offset, unsigned op_size, unsigned cond, unsigned reg1, unsigned reg2)
161 ip_t ip = (ctx->current_position - da(ctx->fn,function)->code) + (jmp_offset / (int)sizeof(code_t));
162 if (likely(!ctx->code_labels[ip])) {
163 ctx->code_labels[ip] = alloc_label(ctx);
164 if (unlikely(!ctx->code_labels[ip]))
167 if (reg1 != -1U && reg2 != -1U) {
168 g(gen_cmp_test_jmp(ctx, INSN_CMP, i_size_cmp(op_size), reg1, reg2, cond, ctx->code_labels[ip]));
169 #if defined(ARCH_IA64)
170 } else if (reg_is_p(reg1)) {
171 gen_insn(INSN_JMP_REG, OP_SIZE_NATIVE, cond, 0);
173 gen_four(ctx->code_labels[ip]);
175 } else if (reg1 != -1U) {
176 g(gen_jmp_on_zero(ctx, op_size, reg1, cond, ctx->code_labels[ip]));
177 } else if (cond == COND_ALWAYS) {
178 gen_insn(INSN_JMP, 0, 0, 0);
179 gen_four(ctx->code_labels[ip]);
180 #if defined(ARCH_LOONGARCH64) || defined(ARCH_MIPS) || defined(ARCH_PARISC)
181 } else if (cond & COND_FP) {
182 gen_insn(INSN_JMP_FP_TEST, 0, cond, 0);
183 gen_four(ctx->code_labels[ip]);
186 gen_insn(COND_IS_LOGICAL(cond) ? INSN_JMP_COND_LOGICAL : INSN_JMP_COND, i_size_cmp(op_size), cond, 0);
187 gen_four(ctx->code_labels[ip]);
192 static bool attr_w gen_cond_jump(struct codegen_context *ctx, frame_t slot, int32_t jmp_offset)
194 unsigned size = log_2(sizeof(ajla_flat_option_t));
195 size_t attr_unused offset;
197 if (slot_is_register(ctx, slot)) {
200 #if defined(ARCH_S390) || defined(ARCH_X86)
201 offset = (size_t)slot * slot_size;
202 #if defined(ARCH_S390)
203 if (size != OP_SIZE_1)
206 g(gen_address(ctx, R_FRAME, offset, IMM_PURPOSE_MVI_CLI_OFFSET, size));
207 gen_insn(INSN_CMP, size, 0, 2);
208 gen_address_offset();
212 g(gen_jump(ctx, jmp_offset, size, COND_E, -1U, -1U));
217 g(gen_frame_get(ctx, size, i_size(size) == size ? garbage : native, slot, R_SCRATCH_1, ®1));
218 g(gen_jump(ctx, jmp_offset, size, COND_E, reg1, -1U));
222 static bool attr_w gen_load_fn_or_curry(struct codegen_context *ctx, frame_t fn_idx, frame_t slot_fn, frame_t slot_r, unsigned flags)
224 bool curry = fn_idx == NO_FRAME_T;
225 uint32_t escape_label;
228 escape_label = alloc_escape_label(ctx);
229 if (unlikely(!escape_label))
232 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_function_reference_mayfail), 1));
234 g(gen_load_constant(ctx, R_ARG0, ctx->args_l));
235 g(gen_upcall_argument(ctx, 0));
237 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_function_reference_mayfail), 1));
238 g(gen_sanitize_returned_pointer(ctx, R_RET0));
239 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
241 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
244 g(load_function_offset(ctx, R_SCRATCH_1, offsetof(struct data, u_.function.local_directory[fn_idx])));
246 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.function_reference.u.direct), IMM_PURPOSE_STR_OFFSET, OP_SIZE_ADDRESS));
247 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
248 gen_address_offset();
249 gen_one(R_SCRATCH_1);
251 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.function_reference.is_indirect), IMM_PURPOSE_STR_OFFSET, log_2(sizeof(bool))));
252 g(gen_imm(ctx, 0, IMM_PURPOSE_STORE_VALUE, log_2(sizeof(uchar_efficient_t))));
253 gen_insn(INSN_MOV, log_2(sizeof(uchar_efficient_t)), 0, 0);
254 gen_address_offset();
257 g(gen_frame_get_pointer(ctx, slot_fn, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_SCRATCH_1));
259 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.function_reference.u.indirect), IMM_PURPOSE_STR_OFFSET, OP_SIZE_ADDRESS));
260 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
261 gen_address_offset();
262 gen_one(R_SCRATCH_1);
264 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.function_reference.is_indirect), IMM_PURPOSE_STR_OFFSET, log_2(sizeof(bool))));
265 g(gen_imm(ctx, 1, IMM_PURPOSE_STORE_VALUE, log_2(sizeof(uchar_efficient_t))));
266 gen_insn(INSN_MOV, log_2(sizeof(uchar_efficient_t)), 0, 0);
267 gen_address_offset();
271 for (i = 0; i < ctx->args_l; i++) {
272 uintptr_t arg_offset_tag = offsetof(struct data, u_.function_reference.arguments[i].tag);
273 uintptr_t arg_offset_ptr = offsetof(struct data, u_.function_reference.arguments[i].u.ptr);
274 uintptr_t arg_offset_slot = offsetof(struct data, u_.function_reference.arguments[i].u.slot);
275 frame_t arg_slot = ctx->args[i].slot;
276 const struct type *t = get_type_of_local(ctx, arg_slot);
277 uint32_t skip_flat_label, set_ptr_label, next_arg_label;
278 skip_flat_label = alloc_label(ctx);
279 if (unlikely(!skip_flat_label))
281 set_ptr_label = alloc_label(ctx);
282 if (unlikely(!set_ptr_label))
284 next_arg_label = alloc_label(ctx);
285 if (unlikely(!next_arg_label))
287 if (TYPE_IS_FLAT(t)) {
288 g(gen_test_1_cached(ctx, arg_slot, skip_flat_label));
289 if (t->size <= slot_size && TYPE_TAG_IS_BUILTIN(t->tag)) {
290 unsigned copy_size = OP_SIZE_SLOT;
291 if (is_power_of_2(t->size))
292 copy_size = log_2(t->size);
294 copy_size = maximum(copy_size, OP_SIZE_4);
295 g(gen_address(ctx, R_SAVED_1, arg_offset_tag, IMM_PURPOSE_STR_OFFSET, log_2(sizeof(type_tag_t))));
296 g(gen_imm(ctx, t->tag, IMM_PURPOSE_STORE_VALUE, log_2(sizeof(type_tag_t))));
297 gen_insn(INSN_MOV, log_2(sizeof(type_tag_t)), 0, 0);
298 gen_address_offset();
301 if (slot_is_register(ctx, arg_slot)) {
302 g(gen_address(ctx, R_SAVED_1, arg_offset_slot, IMM_PURPOSE_STR_OFFSET, copy_size));
303 gen_insn(INSN_MOV, spill_size(t), 0, 0);
304 gen_address_offset();
305 gen_one(ctx->registers[arg_slot]);
308 #if defined(ARCH_S390)
309 if (copy_size == OP_SIZE_1 && !cpu_test_feature(CPU_FEATURE_long_displacement)) {
310 g(gen_address(ctx, R_FRAME, (size_t)arg_slot * slot_size, IMM_PURPOSE_LDR_OFFSET, copy_size));
311 gen_insn(INSN_MOV_MASK, OP_SIZE_NATIVE, MOV_MASK_0_8, 0);
312 gen_one(R_SCRATCH_1);
313 gen_one(R_SCRATCH_1);
314 gen_address_offset();
318 g(gen_address(ctx, R_FRAME, (size_t)arg_slot * slot_size, ARCH_PREFERS_SX(copy_size) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, copy_size));
319 gen_insn(ARCH_PREFERS_SX(copy_size) ? INSN_MOVSX : INSN_MOV, copy_size, 0, 0);
320 gen_one(R_SCRATCH_1);
321 gen_address_offset();
324 g(gen_address(ctx, R_SAVED_1, arg_offset_slot, IMM_PURPOSE_STR_OFFSET, copy_size));
325 gen_insn(INSN_MOV, copy_size, 0, 0);
326 gen_address_offset();
327 gen_one(R_SCRATCH_1);
329 gen_insn(INSN_JMP, 0, 0, 0);
330 gen_four(next_arg_label);
332 if (slot_is_register(ctx, arg_slot))
333 g(spill(ctx, arg_slot));
335 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 2));
337 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
338 g(gen_upcall_argument(ctx, 0));
340 g(gen_load_constant(ctx, R_ARG1, arg_slot));
341 g(gen_upcall_argument(ctx, 1));
343 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 2));
345 gen_insn(INSN_JMP, 0, 0, 0);
346 gen_four(set_ptr_label);
350 gen_label(skip_flat_label);
351 g(gen_frame_get_pointer(ctx, arg_slot, (ctx->args[i].flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_RET0));
353 gen_label(set_ptr_label);
354 g(gen_address(ctx, R_SAVED_1, arg_offset_tag, IMM_PURPOSE_STR_OFFSET, log_2(sizeof(type_tag_t))));
355 g(gen_imm(ctx, TYPE_TAG_unknown, IMM_PURPOSE_STORE_VALUE, log_2(sizeof(type_tag_t))));
356 gen_insn(INSN_MOV, log_2(sizeof(type_tag_t)), 0, 0);
357 gen_address_offset();
360 g(gen_address(ctx, R_SAVED_1, arg_offset_ptr, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
361 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
362 gen_address_offset();
365 gen_label(next_arg_label);
368 g(gen_compress_pointer(ctx, R_SAVED_1, R_SAVED_1));
369 g(gen_frame_set_pointer(ctx, slot_r, R_SAVED_1, false, true));
374 static bool attr_w gen_call(struct codegen_context *ctx, code_t code, frame_t fn_idx)
376 struct data *new_fn = ctx->local_directory[fn_idx];
377 frame_t required_slots = da(new_fn,function)->frame_slots;
378 frame_t bitmap_slots = da(new_fn,function)->n_bitmap_slots;
380 uint32_t escape_label;
381 int64_t new_fp_offset;
382 uchar_efficient_t call_mode;
384 bool arch_use_flags = ARCH_HAS_FLAGS;
385 #if defined(ARCH_POWER)
386 arch_use_flags = false;
389 escape_label = alloc_escape_label(ctx);
390 if (unlikely(!escape_label))
393 for (v = MIN_USEABLE_SLOT; v < function_n_variables(ctx->fn); v++) {
394 if (slot_is_register(ctx, v)) {
399 g(gen_frame_load_raw(ctx, log_2(sizeof(stack_size_t)), native, 0, frame_offs(available_slots), false, R_SCRATCH_1));
400 g(gen_imm(ctx, required_slots, IMM_PURPOSE_SUB, i_size(log_2(sizeof(stack_size_t)))));
401 gen_insn(INSN_ALU + ARCH_PARTIAL_ALU(i_size(log_2(sizeof(stack_size_t)))), i_size(log_2(sizeof(stack_size_t))), ALU_SUB, arch_use_flags);
402 gen_one(R_SCRATCH_1);
403 gen_one(R_SCRATCH_1);
406 if (arch_use_flags) {
407 gen_insn(COND_IS_LOGICAL(COND_B) ? INSN_JMP_COND_LOGICAL : INSN_JMP_COND, log_2(sizeof(stack_size_t)), COND_B, 0);
408 gen_four(escape_label);
410 g(gen_cmp_test_jmp(ctx, INSN_TEST, OP_SIZE_NATIVE, R_SCRATCH_1, R_SCRATCH_1, COND_S, escape_label));
413 new_fp_offset = -(ssize_t)(required_slots * slot_size);
415 g(gen_frame_store_raw(ctx, log_2(sizeof(stack_size_t)), 0, new_fp_offset + frame_offs(available_slots), R_SCRATCH_1));
416 g(gen_frame_store_imm_raw(ctx, log_2(sizeof(ip_t)), 0, new_fp_offset + frame_offs(previous_ip), ctx->return_values - da(ctx->fn,function)->code));
417 g(gen_frame_load_raw(ctx, log_2(sizeof(timestamp_t)), garbage, 0, frame_offs(timestamp), false, R_SCRATCH_1));
418 g(gen_frame_store_raw(ctx, log_2(sizeof(timestamp_t)), 0, new_fp_offset + frame_offs(timestamp), R_SCRATCH_1));
419 call_mode = code == OPCODE_CALL ? CALL_MODE_NORMAL : code == OPCODE_CALL_STRICT ? CALL_MODE_STRICT : CALL_MODE_SPARK;
420 g(gen_frame_store_imm_raw(ctx, log_2(sizeof(uchar_efficient_t)), 0, new_fp_offset + frame_offs(mode), call_mode));
422 g(gen_clear_bitmap(ctx, frame_offset, R_FRAME, new_fp_offset, bitmap_slots));
424 for (i = 0; i < ctx->args_l; i++) {
425 const struct code_arg *src_arg = &ctx->args[i];
426 const struct local_arg *dest_arg = &da(new_fn,function)->args[i];
427 const struct type *t = get_type_of_local(ctx, src_arg->slot);
428 uint32_t non_flat_label, thunk_label, incr_ref_label, next_arg_label;
430 non_flat_label = alloc_label(ctx);
431 if (unlikely(!non_flat_label))
433 thunk_label = alloc_label(ctx);
434 if (unlikely(!thunk_label))
436 incr_ref_label = alloc_label(ctx);
437 if (unlikely(!incr_ref_label))
439 next_arg_label = alloc_label(ctx);
440 if (unlikely(!next_arg_label))
442 if (TYPE_IS_FLAT(t)) {
443 g(gen_test_1_cached(ctx, src_arg->slot, non_flat_label));
444 if (dest_arg->may_be_flat) {
445 g(gen_memcpy_from_slot(ctx, R_FRAME, new_fp_offset + (size_t)dest_arg->slot * slot_size, src_arg->slot));
447 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 2));
449 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
450 g(gen_upcall_argument(ctx, 0));
452 g(gen_load_constant(ctx, R_ARG1, src_arg->slot));
453 g(gen_upcall_argument(ctx, 1));
455 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 2));
457 g(gen_frame_store_raw(ctx, OP_SIZE_SLOT, dest_arg->slot, new_fp_offset, R_RET0));
459 g(gen_set_1(ctx, R_FRAME, dest_arg->slot, new_fp_offset, true));
462 if (flag_is_clear(ctx, src_arg->slot))
463 goto skip_ref_argument;
465 gen_insn(INSN_JMP, 0, 0, 0);
466 gen_four(next_arg_label);
468 gen_label(non_flat_label);
470 if (dest_arg->may_be_borrowed && src_arg->flags & OPCODE_CALL_MAY_LEND) {
471 g(gen_frame_get_slot(ctx, src_arg->slot, R_SCRATCH_1, ®));
472 g(gen_ptr_is_thunk(ctx, reg, src_arg->slot, thunk_label));
473 g(gen_frame_store_raw(ctx, OP_SIZE_SLOT, dest_arg->slot, new_fp_offset, reg));
474 gen_insn(INSN_JMP, 0, 0, 0);
475 gen_four(next_arg_label);
476 } else if (dest_arg->may_be_borrowed && src_arg->flags & OPCODE_CALL_MAY_GIVE) {
477 g(gen_test_1_cached(ctx, src_arg->slot, thunk_label));
478 g(gen_frame_get_slot(ctx, src_arg->slot, R_SCRATCH_1, ®));
479 g(gen_ptr_is_thunk(ctx, reg, src_arg->slot, thunk_label));
480 g(gen_frame_store_raw(ctx, OP_SIZE_SLOT, dest_arg->slot, new_fp_offset, reg));
481 g(gen_frame_clear_raw(ctx, OP_SIZE_SLOT, src_arg->slot));
482 gen_insn(INSN_JMP, 0, 0, 0);
483 gen_four(next_arg_label);
486 gen_label(thunk_label);
487 g(gen_set_1(ctx, R_FRAME, dest_arg->slot, new_fp_offset, true));
488 g(gen_frame_get_slot(ctx, src_arg->slot, R_SCRATCH_1, ®));
489 g(gen_frame_store_raw(ctx, OP_SIZE_SLOT, dest_arg->slot, new_fp_offset, reg));
490 if (src_arg->flags & OPCODE_FLAG_FREE_ARGUMENT) {
491 g(gen_frame_clear_raw(ctx, OP_SIZE_SLOT, src_arg->slot));
492 if (flag_is_set(ctx, src_arg->slot)) {
493 g(gen_set_1(ctx, R_FRAME, src_arg->slot, 0, false));
494 flag_set(ctx, src_arg->slot, false);
495 goto skip_ref_argument;
497 if (flag_is_clear(ctx, src_arg->slot))
499 g(gen_test_1(ctx, R_FRAME, src_arg->slot, 0, incr_ref_label, true, TEST_CLEAR));
500 gen_insn(INSN_JMP, 0, 0, 0);
501 gen_four(next_arg_label);
504 gen_label(incr_ref_label);
506 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
508 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, reg));
509 g(gen_upcall_argument(ctx, 0));
511 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
514 gen_label(next_arg_label);
517 g(load_function_offset(ctx, R_SCRATCH_1, offsetof(struct data, u_.function.local_directory[fn_idx])));
519 g(gen_address(ctx, R_SCRATCH_1, 0, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
520 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
521 gen_one(R_SCRATCH_1);
522 gen_address_offset();
524 g(gen_decompress_pointer(ctx, ARCH_PREFERS_SX(OP_SIZE_SLOT), R_SCRATCH_1, R_SCRATCH_1, 0));
526 g(gen_frame_store_raw(ctx, OP_SIZE_ADDRESS, 0, frame_offs(function) + new_fp_offset, R_SCRATCH_1));
528 #if !defined(ARCH_X86) && !defined(ARCH_PARISC)
529 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_SUB, R_FRAME, R_FRAME, -new_fp_offset, 0));
531 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_FRAME, R_FRAME, new_fp_offset, 0));
534 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.function.codegen), ARCH_PREFERS_SX(OP_SIZE_SLOT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_SLOT));
535 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
536 gen_one(R_SCRATCH_1);
537 gen_address_offset();
539 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, NO_FRAME_T, ctx->escape_nospill_label));
542 gen_pointer_compression(R_SCRATCH_1);
543 #if (defined(ARCH_X86) && !defined(ARCH_X86_X32)) || defined(ARCH_ARM32)
544 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.codegen.unoptimized_code_base), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
545 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
546 gen_address_offset_compressed();
548 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.codegen.unoptimized_code_base), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
549 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
550 gen_one(R_SCRATCH_1);
551 gen_address_offset_compressed();
553 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
554 gen_one(R_SCRATCH_1);
556 g(clear_flag_cache(ctx));
561 static bool attr_w gen_return(struct codegen_context *ctx)
563 int64_t new_fp_offset;
564 uint32_t escape_label;
566 int64_t retval_offset;
567 unsigned attr_unused reg1;
569 escape_label = alloc_escape_label(ctx);
570 if (unlikely(!escape_label))
573 new_fp_offset = (size_t)da(ctx->fn,function)->frame_slots * slot_size;
575 g(gen_frame_load_raw(ctx, OP_SIZE_ADDRESS, zero_x, 0, new_fp_offset + frame_offs(function), false, R_SCRATCH_2));
577 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_SCRATCH_2, COND_E, escape_label));
579 g(gen_address(ctx, R_SCRATCH_2, offsetof(struct data, u_.function.codegen), ARCH_PREFERS_SX(OP_SIZE_SLOT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_SLOT));
580 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
581 gen_one(R_SCRATCH_1);
582 gen_address_offset();
584 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, NO_FRAME_T, escape_label));
587 g(gen_frame_load_raw(ctx, log_2(sizeof(timestamp_t)), garbage, 0, frame_offs(timestamp), false, R_SCRATCH_1));
588 g(gen_frame_store_raw(ctx, log_2(sizeof(timestamp_t)), 0, new_fp_offset + frame_offs(timestamp), R_SCRATCH_1));
590 g(gen_frame_load_raw(ctx, log_2(sizeof(ip_t)), native, 0, frame_offs(previous_ip), false, R_SCRATCH_1));
592 g(gen_address(ctx, R_SCRATCH_2, offsetof(struct data, u_.function.code), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
593 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
594 gen_one(R_SCRATCH_2);
595 gen_address_offset();
597 g(gen_lea3(ctx, R_SAVED_1, R_SCRATCH_2, R_SCRATCH_1, log_2(sizeof(code_t)), 0));
600 for (i = 0; i < ctx->args_l; i++) {
601 const struct code_arg *src_arg = &ctx->args[i];
602 const struct type *t = get_type_of_local(ctx, src_arg->slot);
603 uint32_t copy_ptr_label, load_write_ptr_label, write_ptr_label, next_arg_label;
605 copy_ptr_label = alloc_label(ctx);
606 if (unlikely(!copy_ptr_label))
609 load_write_ptr_label = alloc_label(ctx);
610 if (unlikely(!load_write_ptr_label))
613 write_ptr_label = alloc_label(ctx);
614 if (unlikely(!write_ptr_label))
617 next_arg_label = alloc_label(ctx);
618 if (unlikely(!next_arg_label))
621 g(gen_load_code_32(ctx, R_SAVED_2, R_SAVED_1, retval_offset));
623 if (TYPE_IS_FLAT(t)) {
624 uint32_t flat_to_data_label;
625 g(gen_test_1_cached(ctx, src_arg->slot, copy_ptr_label));
627 flat_to_data_label = alloc_label(ctx);
628 if (unlikely(!flat_to_data_label))
631 #if defined(ARCH_X86)
632 g(gen_address(ctx, R_SAVED_1, retval_offset + 2 + 2 * (ARG_MODE_N >= 3), IMM_PURPOSE_LDR_OFFSET, log_2(sizeof(code_t))));
633 g(gen_imm(ctx, OPCODE_MAY_RETURN_FLAT, IMM_PURPOSE_TEST, log_2(sizeof(code_t))));
634 gen_insn(INSN_TEST, log_2(sizeof(code_t)), 0, 1);
635 gen_address_offset();
638 gen_insn(INSN_JMP_COND, log_2(sizeof(code_t)), COND_E, 0);
639 gen_four(flat_to_data_label);
641 g(gen_load_two(ctx, R_SCRATCH_1, R_SAVED_1, retval_offset + 2 + 2 * (ARG_MODE_N >= 3)));
643 g(gen_cmp_test_imm_jmp(ctx, INSN_TEST, OP_SIZE_NATIVE, R_SCRATCH_1, OPCODE_MAY_RETURN_FLAT, COND_E, flat_to_data_label));
645 #if defined(ARCH_X86)
646 if (is_power_of_2(t->size) && t->size <= 2U << OP_SIZE_NATIVE) {
647 if (t->size == 2U << OP_SIZE_NATIVE) {
648 g(gen_frame_load_2(ctx, OP_SIZE_NATIVE, src_arg->slot, 0, R_SCRATCH_1, R_SCRATCH_2));
650 gen_insn(INSN_MOV, OP_SIZE_NATIVE, 0, 0);
651 gen_one(ARG_ADDRESS_2 + OP_SIZE_SLOT);
654 gen_eight(new_fp_offset + lo_word(OP_SIZE_NATIVE));
655 gen_one(R_SCRATCH_1);
657 gen_insn(INSN_MOV, OP_SIZE_NATIVE, 0, 0);
658 gen_one(ARG_ADDRESS_2 + OP_SIZE_SLOT);
661 gen_eight(new_fp_offset + hi_word(OP_SIZE_NATIVE));
662 gen_one(R_SCRATCH_2);
664 g(gen_frame_get(ctx, log_2(t->size), garbage, src_arg->slot, R_SCRATCH_1, ®1));
666 gen_insn(INSN_MOV, log_2(t->size), 0, 0);
667 gen_one(ARG_ADDRESS_2 + OP_SIZE_SLOT);
670 gen_eight(new_fp_offset);
676 g(gen_lea3(ctx, R_SCRATCH_2, R_FRAME, R_SAVED_2, OP_SIZE_SLOT, new_fp_offset));
678 g(gen_memcpy_from_slot(ctx, R_SCRATCH_2, 0, src_arg->slot));
681 gen_insn(INSN_JMP, 0, 0, 0);
682 gen_four(next_arg_label);
684 gen_label(flat_to_data_label);
686 if (slot_is_register(ctx, src_arg->slot))
687 g(spill(ctx, src_arg->slot));
689 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 2));
691 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
692 g(gen_upcall_argument(ctx, 0));
694 g(gen_load_constant(ctx, R_ARG1, src_arg->slot));
695 g(gen_upcall_argument(ctx, 1));
697 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 2));
699 if (flag_is_clear(ctx, src_arg->slot))
700 goto skip_ref_argument;
702 gen_insn(INSN_JMP, 0, 0, 0);
703 gen_four(write_ptr_label);
706 gen_label(copy_ptr_label);
708 if (unlikely(!(src_arg->flags & OPCODE_FLAG_FREE_ARGUMENT))) {
709 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
710 g(gen_frame_load_slot(ctx, src_arg->slot, R_ARG0));
711 g(gen_upcall_argument(ctx, 0));
712 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
713 } else if (da(ctx->fn,function)->local_variables_flags[src_arg->slot].may_be_borrowed) {
714 g(gen_test_1_cached(ctx, src_arg->slot, load_write_ptr_label));
715 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
716 g(gen_frame_load_slot(ctx, src_arg->slot, R_ARG0));
717 g(gen_upcall_argument(ctx, 0));
718 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
721 gen_label(load_write_ptr_label);
723 g(gen_frame_load_slot(ctx, src_arg->slot, R_RET0));
726 gen_label(write_ptr_label);
728 #if defined(ARCH_X86)
729 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
730 gen_one(ARG_ADDRESS_2 + OP_SIZE_SLOT);
733 gen_eight(new_fp_offset);
735 goto scaled_store_done;
737 if (ARCH_HAS_SHIFTED_ADD(OP_SIZE_SLOT)) {
738 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(i_size(OP_SIZE_ADDRESS), ALU_ADD, false, false, 0));
739 gen_one(R_SCRATCH_3);
741 gen_one(ARG_SHIFTED_REGISTER);
742 gen_one(ARG_SHIFT_LSL | OP_SIZE_SLOT);
745 g(gen_address(ctx, R_SCRATCH_3, new_fp_offset, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
746 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
747 gen_address_offset();
749 goto scaled_store_done;
752 g(gen_3address_rot_imm(ctx, OP_SIZE_NATIVE, ROT_SHL, R_SCRATCH_3, R_SAVED_2, OP_SIZE_SLOT, false));
754 g(gen_3address_alu(ctx, OP_SIZE_NATIVE, ALU_ADD, R_SCRATCH_3, R_SCRATCH_3, R_FRAME, 0));
756 g(gen_address(ctx, R_SCRATCH_3, new_fp_offset, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
757 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
758 gen_address_offset();
762 g(gen_set_1_variable(ctx, R_SAVED_2, new_fp_offset, true));
764 gen_label(next_arg_label);
766 retval_offset += 4 + 2 * (ARG_MODE_N >= 3);
769 g(gen_frame_load_raw(ctx, OP_SIZE_ADDRESS, zero_x, 0, new_fp_offset + frame_offs(function), false, R_SCRATCH_1));
771 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.function.codegen), ARCH_PREFERS_SX(OP_SIZE_SLOT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_SLOT));
772 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
773 gen_one(R_SCRATCH_1);
774 gen_address_offset();
776 g(gen_decompress_pointer(ctx, ARCH_PREFERS_SX(OP_SIZE_SLOT), R_SCRATCH_1, R_SCRATCH_1, 0));
778 g(gen_load_code_32(ctx, R_SCRATCH_2, R_SAVED_1, retval_offset + 2));
780 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_FRAME, R_FRAME, new_fp_offset, 0));
782 #if defined(ARCH_X86) && !defined(ARCH_X86_X32)
783 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
784 gen_one(ARG_ADDRESS_2 + OP_SIZE_ADDRESS);
785 gen_one(R_SCRATCH_1);
786 gen_one(R_SCRATCH_2);
787 gen_eight(offsetof(struct data, u_.codegen.unoptimized_code));
789 goto scaled_jmp_done;
791 #if defined(ARCH_X86)
792 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
793 gen_one(R_SCRATCH_1);
794 gen_one(ARG_ADDRESS_2 + OP_SIZE_ADDRESS);
795 gen_one(R_SCRATCH_1);
796 gen_one(R_SCRATCH_2);
797 gen_eight(offsetof(struct data, u_.codegen.unoptimized_code));
799 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
800 gen_one(R_SCRATCH_1);
802 goto scaled_jmp_done;
804 #if defined(ARCH_ARM32)
805 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(i_size(OP_SIZE_ADDRESS), ALU_ADD, false, false, 0));
806 gen_one(R_SCRATCH_1);
807 gen_one(R_SCRATCH_1);
808 gen_one(ARG_SHIFTED_REGISTER);
809 gen_one(ARG_SHIFT_LSL | OP_SIZE_ADDRESS);
810 gen_one(R_SCRATCH_2);
812 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.codegen.unoptimized_code), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
813 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
814 gen_address_offset();
816 goto scaled_jmp_done;
818 if (ARCH_HAS_SHIFTED_ADD(OP_SIZE_ADDRESS)) {
819 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(i_size(OP_SIZE_ADDRESS), ALU_ADD, false, false, 0));
820 gen_one(R_SCRATCH_1);
821 gen_one(R_SCRATCH_1);
822 gen_one(ARG_SHIFTED_REGISTER);
823 gen_one(ARG_SHIFT_LSL | OP_SIZE_ADDRESS);
824 gen_one(R_SCRATCH_2);
826 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.codegen.unoptimized_code), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
827 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
828 gen_one(R_SCRATCH_1);
829 gen_address_offset();
831 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
832 gen_one(R_SCRATCH_1);
834 goto scaled_jmp_done;
837 g(gen_3address_rot_imm(ctx, OP_SIZE_NATIVE, ROT_SHL, R_SCRATCH_2, R_SCRATCH_2, OP_SIZE_ADDRESS, false));
839 g(gen_3address_alu(ctx, OP_SIZE_NATIVE, ALU_ADD, R_SCRATCH_1, R_SCRATCH_1, R_SCRATCH_2, 0));
841 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.codegen.unoptimized_code), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
842 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
843 gen_one(R_SCRATCH_1);
844 gen_address_offset();
846 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
847 gen_one(R_SCRATCH_1);
849 goto scaled_jmp_done;
854 static bool attr_w gen_scaled_array_address(struct codegen_context *ctx, size_t element_size, unsigned reg_dst, unsigned reg_src, unsigned reg_index, int64_t offset_src);
855 static bool attr_w gen_check_array_len(struct codegen_context *ctx, unsigned reg_array, bool allocated, unsigned reg_len, unsigned cond, uint32_t escape_label);
857 static bool attr_w gen_structured(struct codegen_context *ctx, frame_t slot_struct, frame_t slot_elem)
859 uint32_t escape_label;
860 const struct type *struct_type, *elem_type;
864 escape_label = alloc_escape_label(ctx);
865 if (unlikely(!escape_label))
868 struct_type = get_type_of_local(ctx, slot_struct);
869 elem_type = get_type_of_local(ctx, slot_elem);
871 if (TYPE_IS_FLAT(struct_type) && struct_type->tag != TYPE_TAG_flat_option) {
872 if (!TYPE_IS_FLAT(elem_type)) {
875 g(gen_test_1_cached(ctx, slot_struct, escape_label));
876 flag_set(ctx, slot_struct, false);
880 g(gen_test_1_jz_cached(ctx, slot_struct, escape_label));
884 if (slot_is_register(ctx, slot_struct))
885 g(spill(ctx, slot_struct));
886 g(gen_frame_address(ctx, slot_struct, 0, R_SAVED_1));
888 for (i = 0; i < ctx->args_l; i++) {
889 frame_t param_slot = ctx->args[i].slot;
891 switch (ctx->args[i].flags & OPCODE_STRUCTURED_MASK) {
892 case OPCODE_STRUCTURED_RECORD: {
893 struct flat_record_definition_entry *e;
894 ajla_assert_lo(struct_type->tag == TYPE_TAG_flat_record, (file_line, "gen_structured: invalid tag %u, expected %u", struct_type->tag, TYPE_TAG_flat_record));
895 e = &type_def(struct_type,flat_record)->entries[param_slot];
897 g(gen_imm(ctx, e->flat_offset, IMM_PURPOSE_ADD, i_size(OP_SIZE_ADDRESS)));
898 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(i_size(OP_SIZE_ADDRESS), ALU_ADD, false, is_imm(), ctx->const_imm));
903 struct_type = e->subtype;
906 case OPCODE_STRUCTURED_ARRAY: {
907 ajla_assert_lo(struct_type->tag == TYPE_TAG_flat_array, (file_line, "gen_structured: invalid tag %u, expected %u", struct_type->tag, TYPE_TAG_flat_array));
908 g(gen_test_1_cached(ctx, param_slot, escape_label));
909 flag_set(ctx, param_slot, false);
910 g(gen_frame_get(ctx, OP_SIZE_INT, native, param_slot, R_SCRATCH_1, ®1));
912 g(gen_cmp_test_imm_jmp(ctx, INSN_CMP, OP_SIZE_INT, reg1, type_def(struct_type,flat_array)->n_elements, COND_AE, escape_label));
914 g(gen_scaled_array_address(ctx, type_def(struct_type,flat_array)->base->size, R_SAVED_1, R_SAVED_1, reg1, 0));
916 struct_type = type_def(struct_type,flat_array)->base;
920 internal(file_line, "gen_structured: invalid structured flags %x", (unsigned)ctx->args[i].flags);
923 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
924 gen_one(R_SCRATCH_1);
925 gen_one(ARG_ADDRESS_1);
929 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, NO_FRAME_T, escape_label));
932 g(gen_decompress_pointer(ctx, ARCH_PREFERS_SX(OP_SIZE_SLOT), R_SCRATCH_1, R_SCRATCH_1, 0));
934 g(gen_compare_refcount(ctx, R_SCRATCH_1, REFCOUNT_STEP, COND_AE, escape_label));
936 switch (ctx->args[i].flags & OPCODE_STRUCTURED_MASK) {
937 case OPCODE_STRUCTURED_RECORD: {
938 const struct type *rec_type, *e_type;
939 rec_type = da_type(ctx->fn, ctx->args[i].type);
940 TYPE_TAG_VALIDATE(rec_type->tag);
941 if (unlikely(rec_type->tag == TYPE_TAG_flat_record))
942 rec_type = type_def(rec_type,flat_record)->base;
943 e_type = type_def(rec_type,record)->types[param_slot];
944 if (!TYPE_IS_FLAT(e_type) || (e_type->tag == TYPE_TAG_flat_option && !(ctx->args[i].flags & OPCODE_STRUCTURED_FLAG_END))) {
945 g(gen_test_1(ctx, R_SCRATCH_1, param_slot, data_record_offset, escape_label, true, TEST));
947 g(gen_test_1(ctx, R_SCRATCH_1, param_slot, data_record_offset, escape_label, false, TEST));
948 struct_type = e_type;
950 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_SAVED_1, R_SCRATCH_1, data_record_offset + (size_t)param_slot * slot_size, 0));
953 case OPCODE_STRUCTURED_OPTION: {
954 unsigned op_size = log_2(sizeof(ajla_option_t));
955 #if defined(ARCH_X86)
956 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.option.option), IMM_PURPOSE_LDR_OFFSET, op_size));
957 g(gen_imm(ctx, param_slot, IMM_PURPOSE_CMP, op_size));
958 gen_insn(INSN_CMP, op_size, 0, 1);
959 gen_address_offset();
962 gen_insn(INSN_JMP_COND, op_size, COND_NE, 0);
963 gen_four(escape_label);
965 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.option.option), ARCH_PREFERS_SX(op_size) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, op_size));
966 gen_insn(ARCH_PREFERS_SX(op_size) ? INSN_MOVSX : INSN_MOV, op_size, 0, 0);
967 gen_one(R_SCRATCH_2);
968 gen_address_offset();
970 g(gen_cmp_test_imm_jmp(ctx, INSN_CMP, i_size(op_size), R_SCRATCH_2, param_slot, COND_NE, escape_label));
972 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_SAVED_1, R_SCRATCH_1, offsetof(struct data, u_.option.pointer), 0));
975 case OPCODE_STRUCTURED_ARRAY: {
976 const struct type *e_type = da_type(ctx->fn, ctx->args[i].type);
978 g(gen_test_1_cached(ctx, param_slot, escape_label));
979 flag_set(ctx, param_slot, false);
981 g(gen_frame_get(ctx, OP_SIZE_INT, native, param_slot, R_SCRATCH_2, ®2));
983 g(gen_check_array_len(ctx, R_SCRATCH_1, false, reg2, COND_AE, escape_label));
985 if (!TYPE_IS_FLAT(e_type) || (e_type->tag == TYPE_TAG_flat_option && !(ctx->args[i].flags & OPCODE_STRUCTURED_FLAG_END))) {
986 g(gen_compare_ptr_tag(ctx, R_SCRATCH_1, DATA_TAG_array_pointers, COND_NE, escape_label, R_SCRATCH_3));
988 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.array_pointers.pointer), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
989 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
990 gen_one(R_SCRATCH_1);
991 gen_address_offset();
993 g(gen_scaled_array_address(ctx, slot_size, R_SAVED_1, R_SCRATCH_1, reg2, 0));
995 g(gen_compare_ptr_tag(ctx, R_SCRATCH_1, DATA_TAG_array_flat, COND_NE, escape_label, R_SCRATCH_3));
997 g(gen_scaled_array_address(ctx, e_type->size, R_SAVED_1, R_SCRATCH_1, reg2, data_array_offset));
999 struct_type = e_type;
1004 internal(file_line, "gen_structured: invalid structured flags %x", (unsigned)ctx->args[i].flags);
1011 g(gen_test_1_cached(ctx, slot_elem, escape_label));
1012 flag_set(ctx, slot_elem, false);
1013 g(gen_memcpy_from_slot(ctx, R_SAVED_1, 0, slot_elem));
1015 uint32_t skip_deref_label;
1016 skip_deref_label = alloc_label(ctx);
1017 if (unlikely(!skip_deref_label))
1020 if (TYPE_IS_FLAT(elem_type))
1021 g(gen_test_1_jz_cached(ctx, slot_elem, escape_label));
1023 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
1024 gen_one(R_SCRATCH_1);
1025 gen_one(ARG_ADDRESS_1);
1029 g(gen_jmp_on_zero(ctx, OP_SIZE_SLOT, R_SCRATCH_1, COND_E, skip_deref_label));
1031 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_dereference), 1));
1032 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_SCRATCH_1));
1033 g(gen_upcall_argument(ctx, 0));
1034 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_dereference), 1));
1036 gen_label(skip_deref_label);
1038 g(gen_frame_get_pointer(ctx, slot_elem, (ctx->args[i - 1].flags & OPCODE_STRUCTURED_FREE_VARIABLE) != 0, R_SCRATCH_1));
1040 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
1041 gen_one(ARG_ADDRESS_1);
1044 gen_one(R_SCRATCH_1);
1050 static bool attr_w gen_record_create(struct codegen_context *ctx, frame_t slot_r)
1052 const struct type *t;
1053 const struct record_definition *def;
1054 uint32_t escape_label;
1057 escape_label = alloc_escape_label(ctx);
1058 if (unlikely(!escape_label))
1061 t = get_type_of_local(ctx, slot_r);
1062 if (t->tag == TYPE_TAG_flat_record) {
1063 const struct flat_record_definition *flat_def;
1064 const struct type *flat_type = t;
1065 t = type_def(t,flat_record)->base;
1066 def = type_def(t,record);
1067 flat_def = type_def(flat_type,flat_record);
1068 for (i = 0; i < ctx->args_l; i++) {
1069 frame_t var_slot = ctx->args[i].slot;
1070 g(gen_test_1_cached(ctx, var_slot, escape_label));
1071 flag_set(ctx, var_slot, false);
1073 for (i = 0, ii = 0; i < ctx->args_l; i++, ii++) {
1074 frame_t var_slot, flat_offset, record_slot;
1075 while (unlikely(record_definition_is_elided(def, ii)))
1077 var_slot = ctx->args[i].slot;
1078 record_slot = record_definition_slot(def, ii);
1079 flat_offset = flat_def->entries[record_slot].flat_offset;
1080 g(gen_memcpy_from_slot(ctx, R_FRAME, (size_t)slot_r * slot_size + flat_offset, var_slot));
1085 def = type_def(t,record);
1087 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_record_mayfail), 2));
1089 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1090 g(gen_upcall_argument(ctx, 0));
1092 g(gen_load_constant(ctx, R_ARG1, slot_r));
1093 g(gen_upcall_argument(ctx, 1));
1095 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_record_mayfail), 2));
1096 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1097 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1099 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
1101 g(gen_clear_bitmap(ctx, 0, R_SAVED_1, data_record_offset, bitmap_slots(def->n_slots)));
1103 for (i = 0, ii = 0; i < ctx->args_l; i++, ii++) {
1104 frame_t var_slot, var_flags, record_slot;
1105 const struct type *var_type, *record_type;
1106 uint32_t skip_flat_label, set_ptr_label, next_arg_label;
1108 skip_flat_label = alloc_label(ctx);
1109 if (unlikely(!skip_flat_label))
1111 set_ptr_label = alloc_label(ctx);
1112 if (unlikely(!set_ptr_label))
1114 next_arg_label = alloc_label(ctx);
1115 if (unlikely(!next_arg_label))
1118 while (unlikely(record_definition_is_elided(def, ii)))
1120 var_slot = ctx->args[i].slot;
1121 var_type = get_type_of_local(ctx, var_slot);
1122 var_flags = ctx->args[i].flags;
1123 record_slot = record_definition_slot(def, ii);
1124 record_type = def->types[record_slot];
1125 if (TYPE_IS_FLAT(var_type)) {
1126 g(gen_test_1_cached(ctx, var_slot, skip_flat_label));
1127 if (TYPE_IS_FLAT(record_type)) {
1128 g(gen_memcpy_from_slot(ctx, R_SAVED_1, data_record_offset + (size_t)record_slot * slot_size, var_slot));
1130 gen_insn(INSN_JMP, 0, 0, 0);
1131 gen_four(next_arg_label);
1133 if (slot_is_register(ctx, var_slot))
1134 g(spill(ctx, var_slot));
1136 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 2));
1138 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1139 g(gen_upcall_argument(ctx, 0));
1141 g(gen_load_constant(ctx, R_ARG1, var_slot));
1142 g(gen_upcall_argument(ctx, 1));
1144 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 2));
1146 gen_insn(INSN_JMP, 0, 0, 0);
1147 gen_four(set_ptr_label);
1151 gen_label(skip_flat_label);
1152 g(gen_frame_get_pointer(ctx, var_slot, (var_flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_RET0));
1154 gen_label(set_ptr_label);
1155 g(gen_address(ctx, R_SAVED_1, data_record_offset + (size_t)record_slot * slot_size, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
1156 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
1157 gen_address_offset();
1160 g(gen_set_1(ctx, R_SAVED_1, record_slot, data_record_offset, true));
1162 gen_label(next_arg_label);
1165 g(gen_compress_pointer(ctx, R_SAVED_1, R_SAVED_1));
1166 g(gen_frame_set_pointer(ctx, slot_r, R_SAVED_1, false, true));
1171 static bool attr_w gen_record_load(struct codegen_context *ctx, frame_t slot_1, frame_t slot_r, frame_t rec_slot, frame_t flags)
1173 const struct type *rec_type, *entry_type;
1174 uint32_t escape_label;
1177 rec_type = get_type_of_local(ctx, slot_1);
1178 if (unlikely(rec_type->tag == TYPE_TAG_unknown)) {
1179 ajla_assert_lo(!*da(ctx->fn,function)->function_name, (file_line, "gen_record_load: function %s has record without definition", da(ctx->fn,function)->function_name));
1183 escape_label = alloc_escape_label(ctx);
1184 if (unlikely(!escape_label))
1187 /*debug("gen_record_load: %s: %u, %u", da(ctx->fn,function)->function_name, TYPE_TAG_unknown, rec_type->tag);*/
1188 if (TYPE_IS_FLAT(rec_type)) {
1189 const struct flat_record_definition_entry *ft = &type_def(rec_type,flat_record)->entries[rec_slot];
1190 g(gen_test_1_cached(ctx, slot_1, escape_label));
1191 g(gen_memcpy_to_slot(ctx, slot_r, R_FRAME, (size_t)slot_1 * slot_size + ft->flat_offset));
1192 flag_set(ctx, slot_1, false);
1193 flag_set(ctx, slot_r, false);
1196 entry_type = type_def(rec_type,record)->types[rec_slot];
1198 g(gen_frame_decompress_slot(ctx, slot_1, R_SCRATCH_2, ®, escape_label));
1200 if (TYPE_IS_FLAT(entry_type)) {
1201 g(gen_test_1(ctx, reg, rec_slot, data_record_offset, escape_label, false, TEST));
1202 g(gen_memcpy_to_slot(ctx, slot_r, reg, (size_t)rec_slot * slot_size + data_record_offset));
1203 flag_set(ctx, slot_r, false);
1207 if (flag_must_be_flat(ctx, slot_r)) {
1208 gen_insn(INSN_JMP, 0, 0, 0);
1209 gen_four(escape_label);
1213 g(gen_test_1(ctx, reg, rec_slot, data_record_offset, escape_label, true, TEST));
1215 g(gen_address(ctx, reg, (size_t)rec_slot * slot_size + data_record_offset, ARCH_PREFERS_SX(OP_SIZE_SLOT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_SLOT));
1216 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
1217 gen_one(R_SCRATCH_1);
1218 gen_address_offset();
1220 g(gen_frame_set_pointer_2(ctx, slot_r, R_SCRATCH_1, flags, escape_label));
1225 static bool attr_w gen_option_create_empty_flat(struct codegen_context *ctx, ajla_flat_option_t opt, frame_t slot_r)
1227 g(gen_frame_store_imm(ctx, log_2(sizeof(ajla_flat_option_t)), slot_r, 0, opt));
1228 flag_set(ctx, slot_r, false);
1232 static bool attr_w gen_option_create_empty(struct codegen_context *ctx, ajla_option_t opt, frame_t slot_r)
1234 unsigned option_size = log_2(sizeof(ajla_option_t));
1235 uint32_t escape_label;
1237 escape_label = alloc_escape_label(ctx);
1238 if (unlikely(!escape_label))
1241 if (flag_must_be_flat(ctx, slot_r)) {
1242 gen_insn(INSN_JMP, 0, 0, 0);
1243 gen_four(escape_label);
1247 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_option_mayfail), 0));
1248 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_option_mayfail), 0));
1249 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1250 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1252 g(gen_address(ctx, R_RET0, offsetof(struct data, u_.option.option), IMM_PURPOSE_STR_OFFSET, option_size));
1253 g(gen_imm(ctx, opt, IMM_PURPOSE_STORE_VALUE, option_size));
1254 gen_insn(INSN_MOV, option_size, 0, 0);
1255 gen_address_offset();
1258 g(gen_address(ctx, R_RET0, offsetof(struct data, u_.option.pointer), IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
1259 g(gen_imm(ctx, 0, IMM_PURPOSE_STORE_VALUE, OP_SIZE_SLOT));
1260 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
1261 gen_address_offset();
1264 g(gen_compress_pointer(ctx, R_RET0, R_RET0));
1265 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, true));
1270 static bool attr_w gen_option_create(struct codegen_context *ctx, ajla_option_t opt, frame_t slot_1, frame_t slot_r, frame_t flags)
1272 unsigned option_size = log_2(sizeof(ajla_option_t));
1273 const struct type *type;
1274 uint32_t escape_label, get_pointer_label, got_pointer_label;
1276 escape_label = alloc_escape_label(ctx);
1277 if (unlikely(!escape_label))
1280 if (flag_must_be_flat(ctx, slot_r)) {
1281 gen_insn(INSN_JMP, 0, 0, 0);
1282 gen_four(escape_label);
1286 get_pointer_label = alloc_label(ctx);
1287 if (unlikely(!get_pointer_label))
1290 got_pointer_label = alloc_label(ctx);
1291 if (unlikely(!got_pointer_label))
1294 type = get_type_of_local(ctx, slot_1);
1296 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_option_mayfail), 0));
1297 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_option_mayfail), 0));
1298 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1299 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1301 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
1303 g(gen_address(ctx, R_RET0, offsetof(struct data, u_.option.option), IMM_PURPOSE_STR_OFFSET, option_size));
1304 g(gen_imm(ctx, opt, IMM_PURPOSE_STORE_VALUE, option_size));
1305 gen_insn(INSN_MOV, option_size, 0, 0);
1306 gen_address_offset();
1309 if (TYPE_IS_FLAT(type)) {
1310 g(gen_test_1_cached(ctx, slot_1, get_pointer_label));
1312 if (slot_is_register(ctx, slot_1))
1313 g(spill(ctx, slot_1));
1315 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 2));
1317 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1318 g(gen_upcall_argument(ctx, 0));
1320 g(gen_load_constant(ctx, R_ARG1, slot_1));
1321 g(gen_upcall_argument(ctx, 1));
1323 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 2));
1325 if (flag_is_clear(ctx, slot_1))
1326 goto skip_get_pointer_label;
1328 gen_insn(INSN_JMP, 0, 0, 0);
1329 gen_four(got_pointer_label);
1332 gen_label(get_pointer_label);
1333 g(gen_frame_get_pointer(ctx, slot_1, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_RET0));
1335 skip_get_pointer_label:
1336 gen_label(got_pointer_label);
1337 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.option.pointer), IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
1338 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
1339 gen_address_offset();
1342 g(gen_compress_pointer(ctx, R_SAVED_1, R_SAVED_1));
1343 g(gen_frame_set_pointer(ctx, slot_r, R_SAVED_1, false, true));
1348 static bool attr_w gen_option_cmp(struct codegen_context *ctx, unsigned reg, frame_t opt, uint32_t label, frame_t slot_r)
1350 unsigned op_size = log_2(sizeof(ajla_option_t));
1352 #if defined(ARCH_X86)
1353 g(gen_address(ctx, reg, offsetof(struct data, u_.option.option), IMM_PURPOSE_LDR_OFFSET, op_size));
1354 g(gen_imm(ctx, opt, IMM_PURPOSE_CMP, op_size));
1355 gen_insn(INSN_CMP, op_size, 0, 1);
1356 gen_address_offset();
1359 g(gen_address(ctx, reg, offsetof(struct data, u_.option.option), ARCH_PREFERS_SX(op_size) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, op_size));
1360 gen_insn(ARCH_PREFERS_SX(op_size) ? INSN_MOVSX : INSN_MOV, op_size, 0, 0);
1361 gen_one(R_SCRATCH_2);
1362 gen_address_offset();
1364 g(gen_imm(ctx, opt, IMM_PURPOSE_CMP, op_size));
1365 gen_insn(INSN_CMP, op_size, 0, 1);
1366 gen_one(R_SCRATCH_2);
1370 gen_insn(INSN_JMP_COND, op_size, COND_NE, 0);
1373 g(gen_frame_set_cond(ctx, op_size, false, COND_E, slot_r));
1377 g(gen_address(ctx, reg, offsetof(struct data, u_.option.option), ARCH_PREFERS_SX(op_size) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, op_size));
1378 gen_insn(ARCH_PREFERS_SX(op_size) ? INSN_MOVSX : INSN_MOV, op_size, 0, 0);
1379 gen_one(R_SCRATCH_2);
1380 gen_address_offset();
1382 g(gen_cmp_dest_reg(ctx, op_size, R_SCRATCH_2, (unsigned)-1, label ? R_CMP_RESULT : R_SCRATCH_2, opt, COND_E));
1385 gen_insn(INSN_JMP_REG, i_size(op_size), COND_E, 0);
1386 gen_one(R_CMP_RESULT);
1389 g(gen_frame_store(ctx, log_2(sizeof(ajla_flat_option_t)), slot_r, 0, R_SCRATCH_2));
1395 static bool attr_w gen_option_load(struct codegen_context *ctx, frame_t slot_1, frame_t slot_r, ajla_option_t opt, frame_t flags)
1397 const struct type *type;
1398 uint32_t escape_label;
1401 escape_label = alloc_escape_label(ctx);
1402 if (unlikely(!escape_label))
1405 if (flag_must_be_flat(ctx, slot_r)) {
1406 gen_insn(INSN_JMP, 0, 0, 0);
1407 gen_four(escape_label);
1411 type = get_type_of_local(ctx, slot_1);
1412 if (TYPE_IS_FLAT(type)) {
1413 g(gen_test_1_jz_cached(ctx, slot_1, escape_label));
1416 g(gen_frame_decompress_slot(ctx, slot_1, R_SCRATCH_1, ®, escape_label));
1417 g(gen_option_cmp(ctx, reg, opt, escape_label, 0));
1419 g(gen_address(ctx, reg, offsetof(struct data, u_.option.pointer), ARCH_PREFERS_SX(OP_SIZE_SLOT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_SLOT));
1420 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
1421 gen_one(R_SCRATCH_1);
1422 gen_address_offset();
1424 g(gen_frame_set_pointer_2(ctx, slot_r, R_SCRATCH_1, flags, escape_label));
1429 static bool attr_w gen_option_test_flat(struct codegen_context *ctx, frame_t slot_1, frame_t opt, frame_t slot_r)
1431 unsigned op_size = log_2(sizeof(ajla_flat_option_t));
1432 uint32_t escape_label;
1434 escape_label = alloc_escape_label(ctx);
1435 if (unlikely(!escape_label))
1438 g(gen_test_1_cached(ctx, slot_1, escape_label));
1440 flag_set(ctx, slot_1, false);
1441 flag_set(ctx, slot_r, false);
1443 if (unlikely(opt != (ajla_flat_option_t)opt)) {
1444 g(gen_frame_clear(ctx, op_size, slot_r));
1448 g(gen_frame_load_cmp_imm_set_cond(ctx, op_size, zero_x, slot_1, opt, COND_E, slot_r));
1453 static bool attr_w gen_option_test(struct codegen_context *ctx, frame_t slot_1, frame_t opt, frame_t slot_r)
1455 uint32_t escape_label;
1458 escape_label = alloc_escape_label(ctx);
1459 if (unlikely(!escape_label))
1462 g(gen_frame_decompress_slot(ctx, slot_1, R_SCRATCH_1, ®, escape_label));
1464 flag_set(ctx, slot_r, false);
1466 if (unlikely(opt != (ajla_option_t)opt)) {
1467 g(gen_frame_clear(ctx, log_2(sizeof(ajla_flat_option_t)), slot_r));
1471 g(gen_option_cmp(ctx, reg, opt, 0, slot_r));
1476 static bool attr_w gen_option_ord(struct codegen_context *ctx, frame_t slot_1, frame_t slot_r, bool flat)
1478 unsigned op_size = log_2(sizeof(ajla_option_t));
1479 unsigned op_size_flat = log_2(sizeof(ajla_flat_option_t));
1480 uint32_t escape_label, ptr_label, store_label;
1481 unsigned reg, target;
1483 escape_label = alloc_escape_label(ctx);
1484 if (unlikely(!escape_label))
1487 ptr_label = alloc_label(ctx);
1488 if (unlikely(!ptr_label))
1491 store_label = alloc_label(ctx);
1492 if (unlikely(!store_label))
1495 target = gen_frame_target(ctx, slot_r, NO_FRAME_T, NO_FRAME_T, R_SCRATCH_1);
1498 g(gen_test_1_cached(ctx, slot_1, ptr_label));
1500 g(gen_frame_load(ctx, op_size_flat, zero_x, slot_1, 0, false, target));
1502 if (flag_is_clear(ctx, slot_1))
1503 goto skip_ptr_label;
1505 gen_insn(INSN_JMP, 0, 0, 0);
1506 gen_four(store_label);
1509 gen_label(ptr_label);
1510 g(gen_frame_decompress_slot(ctx, slot_1, R_SCRATCH_1, ®, escape_label));
1512 g(gen_address(ctx, reg, offsetof(struct data, u_.option.option), ARCH_PREFERS_SX(op_size) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, op_size));
1513 gen_insn(ARCH_PREFERS_SX(op_size) ? INSN_MOVSX : INSN_MOV, op_size, 0, 0);
1515 gen_address_offset();
1518 gen_label(store_label);
1519 g(gen_frame_store(ctx, OP_SIZE_INT, slot_r, 0, target));
1520 flag_set(ctx, slot_r, false);
1525 static bool attr_w gen_array_create(struct codegen_context *ctx, frame_t slot_r)
1528 const struct type *type;
1529 uint32_t escape_label;
1531 escape_label = alloc_escape_label(ctx);
1532 if (unlikely(!escape_label))
1535 ajla_assert_lo(ctx->args_l != 0, (file_line, "gen_array_create: zero entries"));
1537 if (unlikely(ctx->args_l >= sign_bit(uint_default_t))) {
1538 gen_insn(INSN_JMP, 0, 0, 0);
1539 gen_four(escape_label);
1543 type = get_type_of_local(ctx, ctx->args[0].slot);
1544 for (i = 1; i < ctx->args_l; i++) {
1545 const struct type *t = get_type_of_local(ctx, ctx->args[i].slot);
1546 if (unlikely(t != type))
1547 internal(file_line, "gen_array_create: types do not match: %u != %u", type->tag, t->tag);
1550 if (TYPE_IS_FLAT(type)) {
1552 for (i = 0; i < ctx->args_l; i++) {
1553 g(gen_test_1_cached(ctx, ctx->args[i].slot, escape_label));
1554 flag_set(ctx, ctx->args[i].slot, false);
1557 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_flat_slot_mayfail), 3));
1559 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1560 g(gen_upcall_argument(ctx, 0));
1562 g(gen_load_constant(ctx, R_ARG1, ctx->args[0].slot));
1563 g(gen_upcall_argument(ctx, 1));
1565 g(gen_load_constant(ctx, R_ARG2, ctx->args_l));
1566 g(gen_upcall_argument(ctx, 2));
1568 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_flat_slot_mayfail), 3));
1569 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1570 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1572 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
1574 offset = data_array_offset;
1575 for (i = 0; i < ctx->args_l; i++) {
1576 g(gen_memcpy_from_slot(ctx, R_SAVED_1, offset, ctx->args[i].slot));
1577 offset += type->size;
1581 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_pointers_mayfail), 2));
1583 g(gen_load_constant(ctx, R_ARG0, ctx->args_l));
1584 g(gen_upcall_argument(ctx, 0));
1586 g(gen_load_constant(ctx, R_ARG1, ctx->args_l));
1587 g(gen_upcall_argument(ctx, 1));
1589 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_pointers_mayfail), 2));
1590 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1591 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1593 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
1595 g(gen_address(ctx, R_RET0, offsetof(struct data, u_.array_pointers.pointer), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
1596 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
1598 gen_address_offset();
1601 for (i = 0; i < ctx->args_l; i++) {
1602 g(gen_frame_get_pointer(ctx, ctx->args[i].slot, (ctx->args[i].flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_SCRATCH_1));
1603 g(gen_address(ctx, R_SAVED_2, offset, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
1604 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
1605 gen_address_offset();
1606 gen_one(R_SCRATCH_1);
1607 offset += sizeof(pointer_t);
1610 g(gen_compress_pointer(ctx, R_SAVED_1, R_SAVED_1));
1611 g(gen_frame_set_pointer(ctx, slot_r, R_SAVED_1, false, true));
1615 static bool attr_w gen_array_create_empty_flat(struct codegen_context *ctx, frame_t slot_r, frame_t local_type)
1617 uint32_t escape_label;
1619 escape_label = alloc_escape_label(ctx);
1620 if (unlikely(!escape_label))
1623 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_flat_types_ptr_mayfail), 4));
1625 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1626 g(gen_upcall_argument(ctx, 0));
1628 g(gen_load_constant(ctx, R_ARG1, local_type));
1629 g(gen_upcall_argument(ctx, 1));
1631 g(gen_load_constant(ctx, R_ARG2, ARRAY_PREALLOC_SIZE));
1632 g(gen_upcall_argument(ctx, 2));
1634 g(gen_load_constant(ctx, R_ARG3, 0));
1635 g(gen_upcall_argument(ctx, 3));
1637 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_flat_types_ptr_mayfail), 4));
1638 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1639 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1641 g(gen_compress_pointer(ctx, R_RET0, R_RET0));
1642 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, true));
1647 static bool attr_w gen_array_create_empty(struct codegen_context *ctx, frame_t slot_r)
1649 uint32_t escape_label;
1651 escape_label = alloc_escape_label(ctx);
1652 if (unlikely(!escape_label))
1655 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_pointers_mayfail), 2));
1657 g(gen_load_constant(ctx, R_ARG0, ARRAY_PREALLOC_SIZE));
1658 g(gen_upcall_argument(ctx, 0));
1660 g(gen_load_constant(ctx, R_ARG1, 0));
1661 g(gen_upcall_argument(ctx, 1));
1663 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_pointers_mayfail), 2));
1664 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1665 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1667 g(gen_compress_pointer(ctx, R_RET0, R_RET0));
1668 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, true));
1673 static bool attr_w gen_array_fill(struct codegen_context *ctx, frame_t slot_1, frame_t flags, frame_t slot_2, frame_t slot_r)
1675 const struct type *content_type, *array_type;
1676 uint32_t escape_label;
1677 unsigned reg1, reg4;
1679 escape_label = alloc_escape_label(ctx);
1680 if (unlikely(!escape_label))
1683 g(gen_test_1_cached(ctx, slot_2, escape_label));
1685 content_type = get_type_of_local(ctx, slot_1);
1686 array_type = get_type_of_local(ctx, slot_r);
1688 if (TYPE_IS_FLAT(array_type)) {
1689 int64_t dest_offset;
1691 const struct flat_array_definition *def = type_def(array_type,flat_array);
1693 ajla_assert_lo(TYPE_IS_FLAT(content_type), (file_line, "gen_array_fill: array is flat but content is not"));
1695 g(gen_test_1_cached(ctx, slot_1, escape_label));
1697 dest_offset = (size_t)slot_r * slot_size;
1698 for (i = 0; i < def->n_elements; i++) {
1699 g(gen_memcpy_from_slot(ctx, R_FRAME, dest_offset, slot_1));
1700 dest_offset += def->base->size;
1702 flag_set(ctx, slot_1, false);
1703 flag_set(ctx, slot_r, false);
1708 if (slot_is_register(ctx, slot_1))
1709 g(spill(ctx, slot_1));
1711 if (unlikely((flags & OPCODE_ARRAY_FILL_FLAG_SPARSE) != 0)) {
1712 uint32_t get_ptr_label, got_ptr_label;
1714 get_ptr_label = alloc_label(ctx);
1715 if (unlikely(!get_ptr_label))
1718 got_ptr_label = alloc_label(ctx);
1719 if (unlikely(!got_ptr_label))
1722 if (TYPE_IS_FLAT(content_type)) {
1723 g(gen_test_1_cached(ctx, slot_1, get_ptr_label));
1725 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 2));
1727 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1728 g(gen_upcall_argument(ctx, 0));
1730 g(gen_load_constant(ctx, R_ARG1, slot_1));
1731 g(gen_upcall_argument(ctx, 1));
1733 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 2));
1735 g(gen_mov(ctx, i_size(OP_SIZE_SLOT), R_SCRATCH_4, R_RET0));
1737 gen_insn(INSN_JMP, 0, 0, 0);
1738 gen_four(got_ptr_label);
1741 gen_label(get_ptr_label);
1743 g(gen_frame_get_pointer(ctx, slot_1, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_SCRATCH_4));
1745 gen_label(got_ptr_label);
1747 g(gen_frame_get(ctx, OP_SIZE_INT, sign_x, slot_2, R_SCRATCH_1, ®1));
1748 g(gen_jmp_if_negative(ctx, reg1, escape_label));
1750 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_create_sparse), 2));
1751 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, reg1));
1752 g(gen_upcall_argument(ctx, 0));
1754 g(gen_mov(ctx, i_size(OP_SIZE_SLOT), R_ARG1, R_SCRATCH_4));
1755 g(gen_upcall_argument(ctx, 1));
1757 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_create_sparse), 2));
1758 } else if (TYPE_IS_FLAT(content_type)) {
1759 g(gen_test_1_cached(ctx, slot_1, escape_label));
1760 flag_set(ctx, slot_1, false);
1762 g(gen_frame_get(ctx, OP_SIZE_INT, sign_x, slot_2, R_SCRATCH_4, ®4));
1763 g(gen_jmp_if_negative(ctx, reg4, escape_label));
1765 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_create_flat), 3));
1766 g(gen_mov(ctx, i_size(OP_SIZE_INT), R_ARG1, reg4));
1767 g(gen_upcall_argument(ctx, 1));
1769 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1770 g(gen_upcall_argument(ctx, 0));
1772 g(gen_load_constant(ctx, R_ARG2, slot_1));
1773 g(gen_upcall_argument(ctx, 2));
1775 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_create_flat), 3));
1777 if (slot_is_register(ctx, slot_2))
1778 g(spill(ctx, slot_2));
1780 g(gen_frame_get_pointer(ctx, slot_1, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_SCRATCH_1));
1782 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_create_pointers), 4));
1784 g(gen_mov(ctx, i_size(OP_SIZE_SLOT), R_ARG3, R_SCRATCH_1));
1785 g(gen_upcall_argument(ctx, 3));
1787 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1788 g(gen_upcall_argument(ctx, 0));
1790 g(gen_load_constant(ctx, R_ARG1, ctx->instr_start - da(ctx->fn,function)->code));
1791 g(gen_upcall_argument(ctx, 1));
1793 g(gen_load_constant(ctx, R_ARG2, slot_2));
1794 g(gen_upcall_argument(ctx, 2));
1796 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_create_pointers), 4));
1798 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, false));
1803 static bool attr_w gen_array_string(struct codegen_context *ctx, type_tag_t tag, uint8_t *string, frame_t len, frame_t slot_r)
1805 uint32_t escape_label;
1807 const struct type *type;
1809 escape_label = alloc_escape_label(ctx);
1810 if (unlikely(!escape_label))
1813 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_flat_tag_mayfail), 2));
1815 g(gen_load_constant(ctx, R_ARG0, tag));
1816 g(gen_upcall_argument(ctx, 0));
1818 g(gen_load_constant(ctx, R_ARG1, len));
1819 g(gen_upcall_argument(ctx, 1));
1821 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_flat_tag_mayfail), 2));
1822 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1823 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1825 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
1827 g(gen_compress_pointer(ctx, R_RET0, R_RET0));
1828 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, true));
1830 g(load_function_offset(ctx, R_SCRATCH_3, offsetof(struct data, u_.function.code)));
1832 offset = string - cast_ptr(uint8_t *, da(ctx->fn,function)->code);
1833 type = type_get_from_tag(tag);
1834 g(gen_memcpy_raw(ctx, R_SAVED_1, data_array_offset, R_SCRATCH_3, offset, (size_t)len * type->size, minimum(type->align, align_of(code_t))));
1839 static bool attr_w gen_scaled_array_address(struct codegen_context *ctx, size_t element_size, unsigned reg_dst, unsigned reg_src, unsigned reg_index, int64_t offset_src)
1841 if (is_power_of_2(element_size)) {
1842 unsigned shift = log_2(element_size);
1843 #if defined(ARCH_X86)
1844 if (shift <= 3 && imm_is_32bit(offset_src)) {
1845 gen_insn(INSN_LEA3, i_size(OP_SIZE_ADDRESS), shift, 0);
1850 gen_eight(offset_src);
1854 if (ARCH_HAS_SHIFTED_ADD(shift)) {
1855 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(i_size(OP_SIZE_ADDRESS), ALU_ADD, false, false, 0));
1858 gen_one(ARG_SHIFTED_REGISTER);
1859 gen_one(ARG_SHIFT_LSL | shift);
1866 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, reg_dst, reg_index, shift, 0));
1867 reg_index = reg_dst;
1871 g(gen_imm(ctx, element_size, IMM_PURPOSE_MUL, i_size(OP_SIZE_ADDRESS)));
1872 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_MUL, ALU_WRITES_FLAGS(i_size(OP_SIZE_ADDRESS), ALU_MUL, false, is_imm(), ctx->const_imm));
1876 reg_index = reg_dst;
1878 size_t e_size = element_size;
1880 bool first_match = true;
1882 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_CONST_IMM, reg_index));
1883 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), reg_dst, reg_index));
1884 reg_index = reg_dst;
1887 g(gen_load_constant(ctx, reg_index, 0));
1893 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, reg_index, reg_index, sh, false));
1894 first_match = false;
1895 } else if (ARCH_HAS_SHIFTED_ADD(sh)) {
1896 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(i_size(OP_SIZE_ADDRESS), ALU_ADD, false, false, 0));
1899 gen_one(ARG_SHIFTED_REGISTER);
1900 gen_one(ARG_SHIFT_LSL | sh);
1901 gen_one(R_CONST_IMM);
1904 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, R_CONST_IMM, R_CONST_IMM, sh, false));
1907 g(gen_3address_alu(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, reg_index, reg_index, R_CONST_IMM, 0));
1915 #if defined(ARCH_S390)
1916 if (offset_src && s390_inline_address(offset_src)) {
1917 gen_insn(INSN_LEA3, i_size(OP_SIZE_ADDRESS), 0, 0);
1922 gen_eight(offset_src);
1926 g(gen_3address_alu(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, reg_dst, reg_index, reg_src, 0));
1931 g(gen_imm(ctx, offset_src, IMM_PURPOSE_ADD, i_size(OP_SIZE_ADDRESS)));
1932 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(i_size(OP_SIZE_ADDRESS), ALU_ADD, false, is_imm(), ctx->const_imm));
1940 static bool attr_w gen_scaled_array_load(struct codegen_context *ctx, unsigned reg_src, unsigned reg_idx, int64_t offset_src, frame_t slot_r)
1942 const struct type *t = get_type_of_local(ctx, slot_r);
1943 #if defined(ARCH_X86)
1944 if (is_power_of_2(t->size)) {
1945 unsigned shift = log_2(t->size);
1946 if (shift <= 3 && shift <= OP_SIZE_NATIVE && imm_is_32bit(offset_src)) {
1947 short reg = ctx->registers[slot_r];
1948 gen_insn(INSN_MOV, shift, 0, 0);
1949 gen_one(reg >= 0 ? reg : R_SCRATCH_2);
1950 gen_one(ARG_ADDRESS_2 + shift);
1953 gen_eight(offset_src);
1956 g(gen_address(ctx, R_FRAME, (size_t)slot_r * slot_size, IMM_PURPOSE_STR_OFFSET, shift));
1957 gen_insn(INSN_MOV, shift, 0, 0);
1958 gen_address_offset();
1959 gen_one(R_SCRATCH_2);
1966 #if defined(ARCH_S390)
1967 if (t->size == 1 && s390_inline_address(offset_src) && cpu_test_feature(CPU_FEATURE_long_displacement) && cpu_test_feature(CPU_FEATURE_extended_imm)) {
1968 short reg = ctx->registers[slot_r];
1969 gen_insn(INSN_MOVSX, OP_SIZE_1, 0, 0);
1970 gen_one(reg >= 0 ? reg : R_SCRATCH_2);
1971 gen_one(ARG_ADDRESS_2);
1974 gen_eight(offset_src);
1977 g(gen_address(ctx, R_FRAME, (size_t)slot_r * slot_size, IMM_PURPOSE_STR_OFFSET, OP_SIZE_1));
1978 gen_insn(INSN_MOV, OP_SIZE_1, 0, 0);
1979 gen_address_offset();
1980 gen_one(R_SCRATCH_2);
1986 g(gen_scaled_array_address(ctx, t->size, R_SCRATCH_2, reg_src, reg_idx, 0));
1988 g(gen_memcpy_to_slot(ctx, slot_r, R_SCRATCH_2, offset_src));
1993 static bool attr_w gen_scaled_array_store(struct codegen_context *ctx, unsigned reg_src, int64_t offset_src, frame_t slot_1)
1995 const struct type *t = get_type_of_local(ctx, slot_1);
1996 #if defined(ARCH_X86)
1997 if (is_power_of_2(t->size)) {
1998 unsigned shift = log_2(t->size);
1999 if (shift <= 3 && shift <= OP_SIZE_NATIVE && imm_is_32bit(offset_src)) {
2000 short reg = ctx->registers[slot_1];
2002 g(gen_address(ctx, R_FRAME, (size_t)slot_1 * slot_size, IMM_PURPOSE_LDR_OFFSET, shift));
2003 gen_insn(INSN_MOV, shift, 0, 0);
2004 gen_one(R_SCRATCH_3);
2005 gen_address_offset();
2009 gen_insn(INSN_MOV, shift, 0, 0);
2010 gen_one(ARG_ADDRESS_2 + shift);
2012 gen_one(R_SCRATCH_2);
2013 gen_eight(offset_src);
2020 #if defined(ARCH_S390)
2021 if (t->size == 1 && s390_inline_address(offset_src) && cpu_test_feature(CPU_FEATURE_long_displacement) && cpu_test_feature(CPU_FEATURE_extended_imm)) {
2022 short reg = ctx->registers[slot_1];
2024 g(gen_address(ctx, R_FRAME, (size_t)slot_1 * slot_size, IMM_PURPOSE_LDR_SX_OFFSET, OP_SIZE_1));
2025 gen_insn(INSN_MOVSX, OP_SIZE_1, 0, 0);
2026 gen_one(R_SCRATCH_3);
2027 gen_address_offset();
2031 gen_insn(INSN_MOV, OP_SIZE_1, 0, 0);
2032 gen_one(ARG_ADDRESS_2);
2034 gen_one(R_SCRATCH_2);
2035 gen_eight(offset_src);
2041 g(gen_scaled_array_address(ctx, t->size, R_SCRATCH_2, reg_src, R_SCRATCH_2, 0));
2043 g(gen_memcpy_from_slot(ctx, R_SCRATCH_2, offset_src, slot_1));
2048 static bool attr_w gen_check_array_len(struct codegen_context *ctx, unsigned reg_array, bool allocated, unsigned reg_len, unsigned cond, uint32_t escape_label)
2050 size_t offset = !allocated ? offsetof(struct data, u_.array_flat.n_used_entries) : offsetof(struct data, u_.array_flat.n_allocated_entries);
2051 #if defined(ARCH_X86)
2052 g(gen_address(ctx, reg_array, offset, IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2053 gen_insn(INSN_CMP, OP_SIZE_INT, 0, 1);
2055 gen_address_offset();
2057 gen_insn(INSN_JMP_COND, OP_SIZE_INT, cond, 0);
2058 gen_four(escape_label);
2060 g(gen_address(ctx, reg_array, offset, ARCH_PREFERS_SX(OP_SIZE_INT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2061 gen_insn(ARCH_PREFERS_SX(OP_SIZE_INT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_INT, 0, 0);
2062 gen_one(R_SCRATCH_3);
2063 gen_address_offset();
2065 g(gen_cmp_test_jmp(ctx, INSN_CMP, i_size(OP_SIZE_INT), reg_len, R_SCRATCH_3, cond, escape_label));
2070 static bool attr_w gen_array_load(struct codegen_context *ctx, frame_t slot_1, frame_t slot_idx, frame_t slot_r, frame_t flags)
2072 const struct type *t = get_type_of_local(ctx, slot_1);
2073 const struct type *tr = get_type_of_local(ctx, slot_r);
2074 uint32_t escape_label;
2075 unsigned reg1, reg2;
2077 escape_label = alloc_escape_label(ctx);
2078 if (unlikely(!escape_label))
2081 if (unlikely(t->tag == TYPE_TAG_flat_array)) {
2082 const struct flat_array_definition *def = type_def(t,flat_array);
2084 g(gen_test_2_cached(ctx, slot_1, slot_idx, escape_label));
2086 flag_set(ctx, slot_1, false);
2087 flag_set(ctx, slot_idx, false);
2089 g(gen_frame_get(ctx, OP_SIZE_INT, native, slot_idx, R_SCRATCH_2, ®2));
2091 if (!(flags & OPCODE_ARRAY_INDEX_IN_RANGE))
2092 g(gen_cmp_test_imm_jmp(ctx, INSN_CMP, OP_SIZE_INT, reg2, def->n_elements, COND_AE, escape_label));
2094 g(gen_scaled_array_load(ctx, R_FRAME, reg2, (size_t)slot_1 * slot_size, slot_r));
2098 g(gen_frame_decompress_slot(ctx, slot_1, R_SCRATCH_1, ®1, escape_label));
2100 g(gen_test_1_cached(ctx, slot_idx, escape_label));
2101 flag_set(ctx, slot_idx, false);
2102 g(gen_frame_get(ctx, OP_SIZE_INT, native, slot_idx, R_SCRATCH_2, ®2));
2104 if (!(flags & OPCODE_ARRAY_INDEX_IN_RANGE))
2105 g(gen_check_array_len(ctx, reg1, false, reg2, COND_AE, escape_label));
2107 if (TYPE_IS_FLAT(tr)) {
2109 g(gen_compare_ptr_tag(ctx, reg1, DATA_TAG_array_slice, COND_A, escape_label, R_SCRATCH_4));
2110 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SCRATCH_1, reg1));
2111 #if defined(ARCH_X86) || defined(ARCH_S390)
2112 #if defined(ARCH_X86)
2113 if (unlikely(!cpu_test_feature(CPU_FEATURE_cmov)))
2115 if (unlikely(!cpu_test_feature(CPU_FEATURE_misc_45)))
2118 g(gen_address(ctx, reg1, offsetof(struct data, u_.array_slice.flat_data_minus_data_array_offset), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2119 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
2120 gen_one(R_SCRATCH_3);
2121 gen_address_offset();
2124 g(gen_address(ctx, reg1, offsetof(struct data, u_.array_slice.flat_data_minus_data_array_offset), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2125 gen_insn(INSN_CMOV, OP_SIZE_ADDRESS, COND_E, 0);
2126 gen_one(R_SCRATCH_1);
2127 gen_one(R_SCRATCH_1);
2128 gen_address_offset();
2129 #elif defined(ARCH_PARISC)
2130 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.array_slice.flat_data_minus_data_array_offset), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2131 gen_insn(INSN_CMP_CMOV, OP_SIZE_ADDRESS, COND_E, 0);
2132 gen_one(R_SCRATCH_1);
2133 gen_one(R_SCRATCH_1);
2134 gen_address_offset();
2135 gen_one(R_SCRATCH_4);
2137 gen_eight(DATA_TAG_array_slice);
2138 #elif defined(ARCH_LOONGARCH64) || defined(ARCH_MIPS) || defined(ARCH_RISCV64)
2139 g(gen_3address_alu_imm(ctx, OP_SIZE_NATIVE, ALU_XOR, R_SCRATCH_4, R_SCRATCH_4, DATA_TAG_array_slice, 0));
2141 label = alloc_label(ctx);
2142 if (unlikely(!label))
2145 gen_insn(INSN_JMP_REG, OP_SIZE_NATIVE, COND_NE, 0);
2146 gen_one(R_SCRATCH_4);
2149 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.array_slice.flat_data_minus_data_array_offset), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2150 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
2151 gen_one(R_SCRATCH_1);
2152 gen_address_offset();
2156 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.array_slice.flat_data_minus_data_array_offset), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2157 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
2158 gen_one(R_SCRATCH_3);
2159 gen_address_offset();
2161 #if defined(ARCH_POWER)
2162 if (!cpu_test_feature(CPU_FEATURE_v203))
2165 #if defined(ARCH_SPARC)
2169 gen_insn(INSN_CMOV, i_size(OP_SIZE_ADDRESS), COND_E, 0);
2170 gen_one(R_SCRATCH_1);
2171 gen_one(R_SCRATCH_1);
2172 gen_one(R_SCRATCH_3);
2174 g(gen_imm(ctx, DATA_TAG_array_slice, IMM_PURPOSE_CMP, OP_SIZE_NATIVE));
2175 gen_insn(INSN_CMP_DEST_REG, OP_SIZE_NATIVE, COND_E, 0);
2176 gen_one(R_CMP_RESULT);
2177 gen_one(R_SCRATCH_4);
2180 gen_insn(INSN_MOVR, OP_SIZE_NATIVE, COND_NE, 0);
2181 gen_one(R_SCRATCH_1);
2182 gen_one(R_SCRATCH_1);
2183 gen_one(R_CMP_RESULT);
2184 gen_one(R_SCRATCH_3);
2190 label = alloc_label(ctx);
2191 if (unlikely(!label))
2193 gen_insn(INSN_JMP_COND, OP_SIZE_4, COND_NE, 0);
2196 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SCRATCH_1, R_SCRATCH_3));
2200 g(gen_scaled_array_load(ctx, R_SCRATCH_1, reg2, data_array_offset, slot_r));
2201 flag_set(ctx, slot_r, false);
2204 if (flag_must_be_flat(ctx, slot_r)) {
2205 gen_insn(INSN_JMP, 0, 0, 0);
2206 gen_four(escape_label);
2210 g(gen_compare_ptr_tag(ctx, reg1, DATA_TAG_array_pointers, COND_NE, escape_label, R_SCRATCH_3));
2212 g(gen_address(ctx, reg1, offsetof(struct data, u_.array_pointers.pointer), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2213 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
2214 gen_one(R_SCRATCH_1);
2215 gen_address_offset();
2217 #if defined(ARCH_X86) || defined(ARCH_ARM)
2218 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
2219 gen_one(R_SCRATCH_1);
2220 gen_one(ARG_ADDRESS_2 + OP_SIZE_SLOT);
2221 gen_one(R_SCRATCH_1);
2225 goto scaled_load_done;
2227 #if defined(ARCH_LOONGARCH64) || defined(ARCH_PARISC) || defined(ARCH_POWER) || defined(ARCH_S390) || defined(ARCH_SPARC)
2228 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, R_SCRATCH_2, reg2, OP_SIZE_SLOT, false));
2230 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
2231 gen_one(R_SCRATCH_1);
2232 gen_one(ARG_ADDRESS_2);
2233 gen_one(R_SCRATCH_1);
2234 gen_one(R_SCRATCH_2);
2237 goto scaled_load_done;
2239 if (ARCH_HAS_SHIFTED_ADD(OP_SIZE_SLOT)) {
2240 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(i_size(OP_SIZE_ADDRESS), ALU_ADD, false, false, 0));
2241 gen_one(R_SCRATCH_2);
2242 gen_one(ARG_SHIFTED_REGISTER);
2243 gen_one(ARG_SHIFT_LSL | OP_SIZE_SLOT);
2245 gen_one(R_SCRATCH_1);
2247 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
2248 gen_one(R_SCRATCH_1);
2249 gen_one(ARG_ADDRESS_1);
2250 gen_one(R_SCRATCH_2);
2253 goto scaled_load_done;
2256 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, R_SCRATCH_2, reg2, OP_SIZE_SLOT, false));
2258 g(gen_3address_alu(ctx, OP_SIZE_ADDRESS, ALU_ADD, R_SCRATCH_2, R_SCRATCH_2, R_SCRATCH_1, 0));
2260 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
2261 gen_one(R_SCRATCH_1);
2262 gen_one(ARG_ADDRESS_1);
2263 gen_one(R_SCRATCH_2);
2266 g(gen_frame_set_pointer_2(ctx, slot_r, R_SCRATCH_1, flags, escape_label));
2271 static bool attr_w gen_array_len(struct codegen_context *ctx, frame_t slot_1, frame_t slot_2, frame_t slot_r, bool fused, int32_t offs_false)
2273 const struct type *t = get_type_of_local(ctx, slot_1);
2274 uint32_t escape_label;
2275 unsigned reg, target;
2277 escape_label = alloc_escape_label(ctx);
2278 if (unlikely(!escape_label))
2281 if (slot_2 != NO_FRAME_T) {
2282 g(gen_test_1_cached(ctx, slot_2, escape_label));
2283 flag_set(ctx, slot_2, false);
2286 if (unlikely(t->tag == TYPE_TAG_flat_array)) {
2288 target = R_SCRATCH_1;
2289 g(gen_load_constant(ctx, target, type_def(t,flat_array)->n_elements));
2291 } else if (slot_2 == NO_FRAME_T) {
2292 g(gen_frame_store_imm(ctx, OP_SIZE_INT, slot_r, 0, (unsigned)type_def(t,flat_array)->n_elements));
2294 g(gen_frame_load_cmp_imm_set_cond(ctx, OP_SIZE_INT, zero_x, slot_2, type_def(t,flat_array)->n_elements, COND_G, slot_r));
2296 flag_set(ctx, slot_r, false);
2298 g(gen_frame_decompress_slot(ctx, slot_1, R_SCRATCH_1, ®, escape_label));
2300 if (offsetof(struct data, u_.array_flat.n_used_entries) != offsetof(struct data, u_.array_slice.n_entries) ||
2301 offsetof(struct data, u_.array_flat.n_used_entries) != offsetof(struct data, u_.array_pointers.n_used_entries)) {
2305 if (DATA_TAG_array_flat != DATA_TAG_array_slice - 1 ||
2306 DATA_TAG_array_slice != DATA_TAG_array_pointers - 1 ||
2307 DATA_TAG_array_same < DATA_TAG_array_flat ||
2308 DATA_TAG_array_btree < DATA_TAG_array_flat ||
2309 DATA_TAG_array_incomplete < DATA_TAG_array_flat) {
2314 g(gen_compare_ptr_tag(ctx, reg, DATA_TAG_array_pointers, COND_A, escape_label, R_SCRATCH_2));
2316 if (slot_2 == NO_FRAME_T) {
2317 target = gen_frame_target(ctx, slot_r, NO_FRAME_T, NO_FRAME_T, R_SCRATCH_1);
2319 target = R_SCRATCH_1;
2322 #if defined(ARCH_X86) || defined(ARCH_S390)
2323 if (fused && slot_is_register(ctx, slot_2)) {
2324 g(gen_address(ctx, reg, offsetof(struct data, u_.array_flat.n_used_entries), ARCH_PREFERS_SX(OP_SIZE_INT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2325 gen_insn(INSN_CMP, OP_SIZE_INT, 0, 1 + COND_IS_LOGICAL(COND_GE));
2326 gen_one(ctx->registers[slot_2]);
2327 gen_address_offset();
2329 g(gen_jump(ctx, offs_false, i_size_cmp(OP_SIZE_INT), COND_GE, -1U, -1U));
2333 g(gen_address(ctx, reg, offsetof(struct data, u_.array_flat.n_used_entries), ARCH_PREFERS_SX(OP_SIZE_INT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2334 gen_insn(ARCH_PREFERS_SX(OP_SIZE_INT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_INT, 0, 0);
2336 gen_address_offset();
2340 enum extend ex = OP_SIZE_INT == i_size_cmp(OP_SIZE_INT) + (unsigned)zero ? garbage : sign_x;
2342 if (ARCH_HAS_JMP_2REGS(COND_LE)) {
2343 g(gen_frame_get(ctx, OP_SIZE_INT, ex, slot_2, R_SCRATCH_2, ®2));
2344 g(gen_jump(ctx, offs_false, i_size_cmp(OP_SIZE_INT), COND_LE, target, reg2));
2348 g(gen_frame_load_cmp(ctx, OP_SIZE_INT, COND_IS_LOGICAL(COND_LE), ex, false, slot_2, 0, false, target));
2349 g(gen_jump(ctx, offs_false, i_size_cmp(OP_SIZE_INT), COND_LE, -1U, -1U));
2351 g(gen_frame_get(ctx, OP_SIZE_INT, ex, slot_2, R_SCRATCH_2, ®2));
2352 g(gen_cmp_dest_reg(ctx, i_size_cmp(OP_SIZE_INT), target, reg2, R_CMP_RESULT, 0, COND_G));
2353 g(gen_jump(ctx, offs_false, OP_SIZE_NATIVE, COND_E, R_CMP_RESULT, -1U));
2355 } else if (slot_2 == NO_FRAME_T) {
2356 g(gen_frame_store(ctx, OP_SIZE_INT, slot_r, 0, target));
2358 g(gen_frame_load_cmp_set_cond(ctx, OP_SIZE_INT, zero_x, slot_2, R_SCRATCH_1, COND_G, slot_r));
2360 flag_set(ctx, slot_r, false);
2365 static bool attr_w gen_array_sub(struct codegen_context *ctx, frame_t slot_array, frame_t slot_from, frame_t slot_to, frame_t slot_r, frame_t flags)
2367 const struct type *t = get_type_of_local(ctx, slot_array);
2368 uint32_t escape_label, upcall_label;
2370 escape_label = alloc_escape_label(ctx);
2371 if (unlikely(!escape_label))
2374 upcall_label = alloc_label(ctx);
2375 if (unlikely(!upcall_label))
2378 if (unlikely(TYPE_IS_FLAT(t))) {
2379 g(gen_test_1_jz_cached(ctx, slot_array, escape_label));
2382 g(gen_test_2_cached(ctx, slot_from, slot_to, escape_label));
2384 if (slot_is_register(ctx, slot_array))
2385 g(spill(ctx, slot_array));
2386 if (slot_is_register(ctx, slot_from))
2387 g(spill(ctx, slot_from));
2388 if (slot_is_register(ctx, slot_to))
2389 g(spill(ctx, slot_to));
2391 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_sub), 4));
2393 g(gen_frame_load_raw(ctx, OP_SIZE_SLOT, garbage, slot_array, 0, false, R_ARG0));
2394 g(gen_upcall_argument(ctx, 0));
2396 g(gen_frame_load_raw(ctx, OP_SIZE_INT, garbage, slot_from, 0, false, R_ARG1));
2397 g(gen_upcall_argument(ctx, 1));
2399 g(gen_frame_load_raw(ctx, OP_SIZE_INT, garbage, slot_to, 0, false, R_ARG2));
2400 g(gen_upcall_argument(ctx, 2));
2402 g(gen_load_constant(ctx, R_ARG3, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0));
2403 g(gen_upcall_argument(ctx, 3));
2405 if ((flags & OPCODE_FLAG_FREE_ARGUMENT) != 0) {
2406 g(gen_test_1_cached(ctx, slot_array, upcall_label));
2407 g(gen_load_constant(ctx, R_ARG3, 0));
2408 g(gen_upcall_argument(ctx, 3));
2411 gen_label(upcall_label);
2412 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_sub), 4));
2414 g(gen_jmp_on_zero(ctx, OP_SIZE_SLOT, R_RET0, COND_E, escape_label));
2416 if (slot_array != slot_r) {
2417 if (flags & OPCODE_FLAG_FREE_ARGUMENT) {
2418 g(gen_set_1(ctx, R_FRAME, slot_array, 0, false));
2419 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_array));
2420 flag_set(ctx, slot_array, false);
2424 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, false));
2429 static bool attr_w gen_array_skip(struct codegen_context *ctx, frame_t slot_array, frame_t slot_from, frame_t slot_r, frame_t flags)
2431 const struct type *t = get_type_of_local(ctx, slot_array);
2432 uint32_t escape_label, upcall_label;
2434 escape_label = alloc_escape_label(ctx);
2435 if (unlikely(!escape_label))
2438 upcall_label = alloc_label(ctx);
2439 if (unlikely(!upcall_label))
2442 if (unlikely(TYPE_IS_FLAT(t))) {
2443 g(gen_test_1_jz_cached(ctx, slot_array, escape_label));
2446 g(gen_test_1_cached(ctx, slot_from, escape_label));
2448 if (slot_is_register(ctx, slot_array))
2449 g(spill(ctx, slot_array));
2450 if (slot_is_register(ctx, slot_from))
2451 g(spill(ctx, slot_from));
2453 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_skip), 3));
2455 g(gen_frame_load_raw(ctx, OP_SIZE_SLOT, garbage, slot_array, 0, false, R_ARG0));
2456 g(gen_upcall_argument(ctx, 0));
2458 g(gen_frame_load_raw(ctx, OP_SIZE_INT, garbage, slot_from, 0, false, R_ARG1));
2459 g(gen_upcall_argument(ctx, 1));
2461 g(gen_load_constant(ctx, R_ARG2, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0));
2462 g(gen_upcall_argument(ctx, 2));
2464 if ((flags & OPCODE_FLAG_FREE_ARGUMENT) != 0) {
2465 g(gen_test_1_cached(ctx, slot_array, upcall_label));
2466 g(gen_load_constant(ctx, R_ARG2, 0));
2467 g(gen_upcall_argument(ctx, 2));
2470 gen_label(upcall_label);
2471 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_skip), 3));
2473 g(gen_jmp_on_zero(ctx, OP_SIZE_SLOT, R_RET0, COND_E, escape_label));
2475 if (slot_array != slot_r) {
2476 if (flags & OPCODE_FLAG_FREE_ARGUMENT) {
2477 g(gen_set_1(ctx, R_FRAME, slot_array, 0, false));
2478 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_array));
2479 flag_set(ctx, slot_array, false);
2483 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, false));
2488 static bool attr_w gen_array_append(struct codegen_context *ctx, frame_t slot_1, frame_t slot_2, frame_t slot_r, frame_t flags)
2490 uint32_t escape_label;
2492 escape_label = alloc_escape_label(ctx);
2493 if (unlikely(!escape_label))
2496 if (unlikely(TYPE_IS_FLAT(get_type_of_local(ctx, slot_1))))
2497 g(gen_test_1_jz_cached(ctx, slot_1, escape_label));
2498 if (unlikely(TYPE_IS_FLAT(get_type_of_local(ctx, slot_2))))
2499 g(gen_test_1_jz_cached(ctx, slot_2, escape_label));
2501 g(gen_frame_load(ctx, OP_SIZE_SLOT, zero_x, slot_1, 0, false, R_SCRATCH_1));
2502 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, slot_1, escape_label));
2503 g(gen_frame_load(ctx, OP_SIZE_SLOT, zero_x, slot_2, 0, false, R_SCRATCH_2));
2504 g(gen_ptr_is_thunk(ctx, R_SCRATCH_2, slot_2, escape_label));
2506 g(gen_compare_da_tag(ctx, R_SCRATCH_1, DATA_TAG_array_incomplete, COND_E, escape_label, R_SCRATCH_1));
2507 g(gen_compare_da_tag(ctx, R_SCRATCH_2, DATA_TAG_array_incomplete, COND_E, escape_label, R_SCRATCH_2));
2509 g(gen_frame_get_pointer(ctx, slot_2, (flags & OPCODE_FLAG_FREE_ARGUMENT_2) != 0, R_SAVED_1));
2510 g(gen_frame_get_pointer(ctx, slot_1, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_SCRATCH_1));
2511 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_join), 2));
2512 g(gen_mov(ctx, i_size(OP_SIZE_SLOT), R_ARG0, R_SCRATCH_1));
2513 g(gen_upcall_argument(ctx, 0));
2514 g(gen_mov(ctx, i_size(OP_SIZE_SLOT), R_ARG1, R_SAVED_1));
2515 g(gen_upcall_argument(ctx, 1));
2516 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_join), 2));
2517 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, false));
2521 static bool attr_w gen_array_append_one_flat(struct codegen_context *ctx, frame_t slot_1, frame_t slot_2, frame_t slot_r, frame_t flags)
2523 uint32_t escape_label;
2526 escape_label = alloc_escape_label(ctx);
2527 if (unlikely(!escape_label))
2530 if (unlikely(!(flags & OPCODE_FLAG_FREE_ARGUMENT))) {
2531 gen_insn(INSN_JMP, 0, 0, 0);
2532 gen_four(escape_label);
2536 g(gen_test_1_jz_cached(ctx, slot_1, escape_label));
2537 g(gen_test_1_cached(ctx, slot_2, escape_label));
2538 flag_set(ctx, slot_2, false);
2540 g(gen_frame_decompress_slot(ctx, slot_1, R_SAVED_1, ®, escape_label));
2542 g(gen_compare_tag_and_refcount(ctx, reg, DATA_TAG_array_flat, escape_label, R_SCRATCH_1));
2544 g(gen_address(ctx, reg, offsetof(struct data, u_.array_flat.n_used_entries), ARCH_PREFERS_SX(OP_SIZE_INT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2545 gen_insn(ARCH_PREFERS_SX(OP_SIZE_INT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_INT, 0, 0);
2546 gen_one(R_SCRATCH_2);
2547 gen_address_offset();
2549 g(gen_check_array_len(ctx, reg, true, R_SCRATCH_2, COND_E, escape_label));
2551 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_INT), ALU_ADD, R_SCRATCH_1, R_SCRATCH_2, 1, 0));
2553 g(gen_address(ctx, reg, offsetof(struct data, u_.array_flat.n_used_entries), IMM_PURPOSE_STR_OFFSET, OP_SIZE_INT));
2554 gen_insn(INSN_MOV, OP_SIZE_INT, 0, 0);
2555 gen_address_offset();
2556 gen_one(R_SCRATCH_1);
2558 g(gen_scaled_array_store(ctx, reg, data_array_offset, slot_2));
2560 if (slot_1 != slot_r) {
2561 #if !defined(POINTER_COMPRESSION)
2562 g(gen_frame_set_pointer(ctx, slot_r, reg, false, true));
2564 unsigned target = gen_frame_target(ctx, slot_r, NO_FRAME_T, NO_FRAME_T, R_SAVED_1);
2565 g(gen_compress_pointer(ctx, target, reg));
2566 g(gen_frame_set_pointer(ctx, slot_r, target, false, true));
2568 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_1));
2569 g(gen_set_1(ctx, R_FRAME, slot_1, 0, false));
2570 flag_set(ctx, slot_1, false);
2576 static bool attr_w gen_array_append_one(struct codegen_context *ctx, frame_t slot_1, frame_t slot_2, frame_t slot_r, frame_t flags)
2578 uint32_t escape_label;
2581 escape_label = alloc_escape_label(ctx);
2582 if (unlikely(!escape_label))
2585 if (unlikely(!(flags & OPCODE_FLAG_FREE_ARGUMENT))) {
2586 gen_insn(INSN_JMP, 0, 0, 0);
2587 gen_four(escape_label);
2591 g(gen_test_1_jz_cached(ctx, slot_1, escape_label));
2593 g(gen_frame_decompress_slot(ctx, slot_1, R_SAVED_1, ®, escape_label));
2595 g(gen_compare_tag_and_refcount(ctx, reg, DATA_TAG_array_pointers, escape_label, R_SCRATCH_1));
2597 g(gen_address(ctx, reg, offsetof(struct data, u_.array_pointers.n_used_entries), ARCH_PREFERS_SX(OP_SIZE_INT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2598 gen_insn(ARCH_PREFERS_SX(OP_SIZE_INT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_INT, 0, 0);
2600 gen_address_offset();
2602 g(gen_check_array_len(ctx, reg, true, R_SAVED_2, COND_E, escape_label));
2604 g(gen_frame_get_pointer(ctx, slot_2, (flags & OPCODE_FLAG_FREE_ARGUMENT_2) != 0, R_SCRATCH_2));
2606 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_INT), ALU_ADD, R_SCRATCH_1, R_SAVED_2, 1, 0));
2608 g(gen_address(ctx, reg, offsetof(struct data, u_.array_pointers.n_used_entries), IMM_PURPOSE_STR_OFFSET, OP_SIZE_INT));
2609 gen_insn(INSN_MOV, OP_SIZE_INT, 0, 0);
2610 gen_address_offset();
2611 gen_one(R_SCRATCH_1);
2613 g(gen_address(ctx, reg, offsetof(struct data, u_.array_pointers.pointer), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2614 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
2615 gen_one(R_SCRATCH_3);
2616 gen_address_offset();
2618 g(gen_scaled_array_address(ctx, slot_size, R_SAVED_2, R_SCRATCH_3, R_SAVED_2, 0));
2620 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
2621 gen_one(ARG_ADDRESS_1);
2624 gen_one(R_SCRATCH_2);
2626 if (slot_1 != slot_r) {
2627 #if !defined(POINTER_COMPRESSION)
2628 g(gen_frame_set_pointer(ctx, slot_r, reg, false, true));
2630 unsigned target = gen_frame_target(ctx, slot_r, NO_FRAME_T, NO_FRAME_T, R_SAVED_1);
2631 g(gen_compress_pointer(ctx, target, reg));
2632 g(gen_frame_set_pointer(ctx, slot_r, target, false, true));
2634 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_1));
2635 g(gen_set_1(ctx, R_FRAME, slot_1, 0, false));
2636 flag_set(ctx, slot_1, false);
2642 static bool attr_w gen_io(struct codegen_context *ctx, frame_t code, frame_t slot_1, frame_t slot_2, frame_t slot_3)
2644 uint32_t reload_label, escape_label;
2647 reload_label = alloc_reload_label(ctx);
2648 if (unlikely(!reload_label))
2652 mem_free(ctx->var_aux);
2653 ctx->var_aux = NULL;
2655 ctx->var_aux = mem_alloc_array_mayfail(mem_alloc_mayfail, frame_t *, 0, 0, slot_1 + slot_2, sizeof(frame_t), &ctx->err);
2656 if (unlikely(!ctx->var_aux))
2659 for (i = 0; i < slot_1 + slot_2; i++)
2660 ctx->var_aux[i] = get_uint32(ctx);
2661 for (i = 0; i < slot_3; i++)
2664 for (i = 0; i < slot_2; i++) {
2665 frame_t input_slot = ctx->var_aux[slot_1 + i];
2666 if (slot_is_register(ctx, input_slot))
2667 g(spill(ctx, input_slot));
2670 /*gen_insn(INSN_JMP, 0, 0, 0); gen_four(alloc_escape_label(ctx));*/
2672 g(gen_upcall_start(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_ipret_io), 3));
2673 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
2674 g(gen_upcall_argument(ctx, 0));
2676 g(gen_load_constant(ctx, R_ARG1, ctx->instr_start - da(ctx->fn,function)->code));
2677 g(gen_upcall_argument(ctx, 1));
2679 g(gen_load_constant(ctx, R_ARG2, ((uint32_t)code << 24) | ((uint32_t)slot_1 << 16) | ((uint32_t)slot_2 << 8) | slot_3));
2680 g(gen_upcall_argument(ctx, 2));
2681 /*debug("arg2: %08x", ((uint32_t)code << 24) | ((uint32_t)slot_1 << 16) | ((uint32_t)slot_2 << 8) | slot_3);*/
2683 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_ipret_io), 3));
2684 g(gen_sanitize_returned_pointer(ctx, R_RET0));
2685 g(gen_cmp_test_imm_jmp(ctx, INSN_CMP, OP_SIZE_ADDRESS, R_RET0, ptr_to_num(POINTER_FOLLOW_THUNK_GO), COND_NE, reload_label));
2687 for (i = 0; i < slot_1; i++) {
2688 frame_t output_slot = ctx->var_aux[i];
2689 flag_set_unknown(ctx, output_slot);
2692 escape_label = alloc_escape_label_for_ip(ctx, ctx->current_position);
2693 if (unlikely(!escape_label))
2696 g(gen_test_variables(ctx, ctx->var_aux, slot_1, escape_label));