2 * Copyright (C) 2024 Mikulas Patocka
4 * This file is part of Ajla.
6 * Ajla is free software: you can redistribute it and/or modify it under the
7 * terms of the GNU General Public License as published by the Free Software
8 * Foundation, either version 3 of the License, or (at your option) any later
11 * Ajla is distributed in the hope that it will be useful, but WITHOUT ANY
12 * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
13 * A PARTICULAR PURPOSE. See the GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along with
16 * Ajla. If not, see <https://www.gnu.org/licenses/>.
19 static bool attr_w gen_flat_move_copy(struct codegen_context *ctx, frame_t slot_1, frame_t slot_r)
21 uint32_t escape_label;
23 escape_label = alloc_escape_label(ctx);
24 if (unlikely(!escape_label))
27 g(gen_test_1_cached(ctx, slot_1, escape_label));
29 g(gen_memcpy_slots(ctx, slot_r, slot_1));
31 flag_set(ctx, slot_1, false);
32 flag_set(ctx, slot_r, false);
37 static bool attr_w gen_ref_move_copy(struct codegen_context *ctx, code_t code, frame_t slot_1, frame_t slot_r)
40 if (flag_must_be_flat(ctx, slot_r)) {
41 uint32_t escape_label = alloc_escape_label(ctx);
42 if (unlikely(!escape_label))
44 gen_insn(INSN_JMP, 0, 0, 0);
45 gen_four(escape_label);
48 g(gen_frame_get_slot(ctx, slot_1, R_SCRATCH_1, ®));
49 if (!da(ctx->fn,function)->local_variables_flags[slot_1].must_be_data &&
50 da(ctx->fn,function)->local_variables_flags[slot_r].must_be_data) {
51 uint32_t escape_label = alloc_escape_label(ctx);
52 if (unlikely(!escape_label))
54 g(gen_ptr_is_thunk(ctx, reg, slot_1, escape_label));
55 ctx->flag_cache[slot_r] |= FLAG_CACHE_IS_NOT_THUNK;
57 g(gen_frame_store(ctx, OP_SIZE_SLOT, slot_r, 0, reg));
58 g(gen_set_1(ctx, R_FRAME, slot_r, 0, true));
59 flag_set(ctx, slot_r, true);
60 if (code == OPCODE_REF_COPY) {
61 g(gen_upcall_start(ctx, 1));
62 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, reg));
63 g(gen_upcall_argument(ctx, 0));
64 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
65 } else if (code == OPCODE_REF_MOVE && !da(ctx->fn,function)->local_variables_flags[slot_1].may_be_borrowed) {
66 g(gen_set_1(ctx, R_FRAME, slot_1, 0, false));
67 flag_set(ctx, slot_1, false);
70 if (unlikely(!(label_id = alloc_label(ctx))))
72 if (flag_is_set(ctx, slot_1)) {
73 g(gen_set_1(ctx, R_FRAME, slot_1, 0, false));
76 if (flag_is_clear(ctx, slot_1))
78 g(gen_test_1(ctx, R_FRAME, slot_1, 0, label_id, false, TEST_CLEAR));
80 g(gen_upcall_start(ctx, 1));
81 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, reg));
82 g(gen_upcall_argument(ctx, 0));
83 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
86 if (code == OPCODE_REF_MOVE_CLEAR)
87 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_1));
88 flag_set(ctx, slot_1, false);
93 static bool attr_w gen_box_move_copy(struct codegen_context *ctx, code_t code, frame_t slot_1, frame_t slot_r)
95 if (flag_must_be_flat(ctx, slot_r)) {
96 uint32_t escape_label = alloc_escape_label(ctx);
97 if (unlikely(!escape_label))
99 gen_insn(INSN_JMP, 0, 0, 0);
100 gen_four(escape_label);
104 if (ctx->registers[slot_1] >= 0)
105 g(spill(ctx, slot_1));
107 g(gen_upcall_start(ctx, 3));
109 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
110 g(gen_upcall_argument(ctx, 0));
112 g(gen_load_constant(ctx, R_ARG1, slot_1));
113 g(gen_upcall_argument(ctx, 1));
115 g(gen_load_constant(ctx, R_ARG2, code == OPCODE_BOX_MOVE_CLEAR));
116 g(gen_upcall_argument(ctx, 2));
118 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_ipret_copy_variable_to_pointer), 3));
120 if (code == OPCODE_BOX_MOVE_CLEAR) {
121 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_1));
122 flag_set(ctx, slot_1, false);
125 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, false));
130 static bool attr_w gen_eval(struct codegen_context *ctx, frame_t slot_1)
132 uint32_t escape_label, skip_label;
135 escape_label = alloc_escape_label(ctx);
136 if (unlikely(!escape_label))
139 skip_label = alloc_label(ctx);
140 if (unlikely(!skip_label))
143 g(gen_test_1_jz_cached(ctx, slot_1, skip_label));
145 g(gen_frame_get_slot(ctx, slot_1, R_SCRATCH_1, ®));
146 g(gen_ptr_is_thunk(ctx, reg, slot_1, escape_label));
148 gen_label(skip_label);
153 static bool attr_w gen_jump(struct codegen_context *ctx, int32_t jmp_offset, unsigned op_size, unsigned cond, unsigned reg1, unsigned reg2)
155 ip_t ip = (ctx->current_position - da(ctx->fn,function)->code) + (jmp_offset / (int)sizeof(code_t));
156 if (likely(!ctx->code_labels[ip])) {
157 ctx->code_labels[ip] = alloc_label(ctx);
158 if (unlikely(!ctx->code_labels[ip]))
161 if (reg1 != -1U && reg2 != -1U) {
162 g(gen_cmp_test_jmp(ctx, INSN_CMP, i_size_cmp(op_size), reg1, reg2, cond, ctx->code_labels[ip]));
163 } else if (reg1 != -1U) {
164 g(gen_jmp_on_zero(ctx, op_size, reg1, cond, ctx->code_labels[ip]));
165 } else if (cond == COND_ALWAYS) {
166 gen_insn(INSN_JMP, 0, 0, 0);
167 gen_four(ctx->code_labels[ip]);
168 #if defined(ARCH_LOONGARCH64) || defined(ARCH_MIPS) || defined(ARCH_PARISC)
169 } else if (cond & COND_FP) {
170 gen_insn(INSN_JMP_FP_TEST, 0, cond, 0);
171 gen_four(ctx->code_labels[ip]);
174 gen_insn(COND_IS_LOGICAL(cond) ? INSN_JMP_COND_LOGICAL : INSN_JMP_COND, i_size_cmp(op_size), cond, 0);
175 gen_four(ctx->code_labels[ip]);
180 static bool attr_w gen_cond_jump(struct codegen_context *ctx, frame_t slot, int32_t jmp_offset)
182 unsigned size = log_2(sizeof(ajla_flat_option_t));
183 size_t attr_unused offset;
185 if (ctx->registers[slot] >= 0) {
188 #if defined(ARCH_S390) || defined(ARCH_X86)
189 offset = (size_t)slot * slot_size;
190 #if defined(ARCH_S390)
191 if (size != OP_SIZE_1)
194 g(gen_address(ctx, R_FRAME, offset, IMM_PURPOSE_MVI_CLI_OFFSET, size));
195 gen_insn(INSN_CMP, size, 0, 2);
196 gen_address_offset();
200 g(gen_jump(ctx, jmp_offset, size, COND_E, -1U, -1U));
205 g(gen_frame_get(ctx, size, i_size(size) == size ? garbage : native, slot, 0, R_SCRATCH_1, ®1));
206 g(gen_jump(ctx, jmp_offset, size, COND_E, reg1, -1U));
210 static bool attr_w gen_load_fn_or_curry(struct codegen_context *ctx, frame_t fn_idx, frame_t slot_fn, frame_t slot_r, unsigned flags)
212 bool curry = fn_idx == NO_FRAME_T;
213 uint32_t escape_label;
216 escape_label = alloc_escape_label(ctx);
217 if (unlikely(!escape_label))
220 g(gen_upcall_start(ctx, 1));
222 g(gen_load_constant(ctx, R_ARG0, ctx->args_l));
223 g(gen_upcall_argument(ctx, 0));
225 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_function_reference_mayfail), 1));
226 g(gen_sanitize_returned_pointer(ctx, R_RET0));
227 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
229 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
232 g(load_function_offset(ctx, R_SCRATCH_1, offsetof(struct data, u_.function.local_directory[fn_idx])));
234 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.function_reference.u.direct), IMM_PURPOSE_STR_OFFSET, OP_SIZE_ADDRESS));
235 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
236 gen_address_offset();
237 gen_one(R_SCRATCH_1);
239 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.function_reference.is_indirect), IMM_PURPOSE_STR_OFFSET, log_2(sizeof(bool))));
240 g(gen_imm(ctx, 0, IMM_PURPOSE_STORE_VALUE, log_2(sizeof(uchar_efficient_t))));
241 gen_insn(INSN_MOV, log_2(sizeof(uchar_efficient_t)), 0, 0);
242 gen_address_offset();
245 g(gen_frame_get_pointer(ctx, slot_fn, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_SCRATCH_1));
247 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.function_reference.u.indirect), IMM_PURPOSE_STR_OFFSET, OP_SIZE_ADDRESS));
248 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
249 gen_address_offset();
250 gen_one(R_SCRATCH_1);
252 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.function_reference.is_indirect), IMM_PURPOSE_STR_OFFSET, log_2(sizeof(bool))));
253 g(gen_imm(ctx, 1, IMM_PURPOSE_STORE_VALUE, log_2(sizeof(uchar_efficient_t))));
254 gen_insn(INSN_MOV, log_2(sizeof(uchar_efficient_t)), 0, 0);
255 gen_address_offset();
259 for (i = 0; i < ctx->args_l; i++) {
260 uintptr_t arg_offset_tag = offsetof(struct data, u_.function_reference.arguments[i].tag);
261 uintptr_t arg_offset_ptr = offsetof(struct data, u_.function_reference.arguments[i].u.ptr);
262 uintptr_t arg_offset_slot = offsetof(struct data, u_.function_reference.arguments[i].u.slot);
263 frame_t arg_slot = ctx->args[i].slot;
264 const struct type *t = get_type_of_local(ctx, arg_slot);
265 uint32_t skip_flat_label, set_ptr_label, next_arg_label;
266 skip_flat_label = alloc_label(ctx);
267 if (unlikely(!skip_flat_label))
269 set_ptr_label = alloc_label(ctx);
270 if (unlikely(!set_ptr_label))
272 next_arg_label = alloc_label(ctx);
273 if (unlikely(!next_arg_label))
275 if (TYPE_IS_FLAT(t)) {
276 g(gen_test_1_cached(ctx, arg_slot, skip_flat_label));
277 if (t->size <= slot_size && TYPE_TAG_IS_BUILTIN(t->tag)) {
278 unsigned copy_size = OP_SIZE_SLOT;
279 if (is_power_of_2(t->size))
280 copy_size = log_2(t->size);
282 copy_size = maximum(copy_size, OP_SIZE_4);
283 g(gen_address(ctx, R_SAVED_1, arg_offset_tag, IMM_PURPOSE_STR_OFFSET, log_2(sizeof(type_tag_t))));
284 g(gen_imm(ctx, t->tag, IMM_PURPOSE_STORE_VALUE, log_2(sizeof(type_tag_t))));
285 gen_insn(INSN_MOV, log_2(sizeof(type_tag_t)), 0, 0);
286 gen_address_offset();
289 if (ctx->registers[arg_slot] >= 0) {
290 g(gen_address(ctx, R_SAVED_1, arg_offset_slot, IMM_PURPOSE_STR_OFFSET, copy_size));
291 gen_insn(INSN_MOV, spill_size(t), 0, 0);
292 gen_address_offset();
293 gen_one(ctx->registers[arg_slot]);
296 #if defined(ARCH_S390)
297 if (copy_size == OP_SIZE_1 && !cpu_test_feature(CPU_FEATURE_long_displacement)) {
298 g(gen_address(ctx, R_FRAME, (size_t)arg_slot * slot_size, IMM_PURPOSE_LDR_OFFSET, copy_size));
299 gen_insn(INSN_MOV_MASK, OP_SIZE_NATIVE, MOV_MASK_0_8, 0);
300 gen_one(R_SCRATCH_1);
301 gen_one(R_SCRATCH_1);
302 gen_address_offset();
306 g(gen_address(ctx, R_FRAME, (size_t)arg_slot * slot_size, ARCH_PREFERS_SX(copy_size) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, copy_size));
307 gen_insn(ARCH_PREFERS_SX(copy_size) ? INSN_MOVSX : INSN_MOV, copy_size, 0, 0);
308 gen_one(R_SCRATCH_1);
309 gen_address_offset();
312 g(gen_address(ctx, R_SAVED_1, arg_offset_slot, IMM_PURPOSE_STR_OFFSET, copy_size));
313 gen_insn(INSN_MOV, copy_size, 0, 0);
314 gen_address_offset();
315 gen_one(R_SCRATCH_1);
317 gen_insn(INSN_JMP, 0, 0, 0);
318 gen_four(next_arg_label);
320 if (ctx->registers[arg_slot] >= 0)
321 g(spill(ctx, arg_slot));
323 g(gen_upcall_start(ctx, 3));
325 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
326 g(gen_upcall_argument(ctx, 0));
328 g(gen_load_constant(ctx, R_ARG1, arg_slot));
329 g(gen_upcall_argument(ctx, 1));
331 g(gen_imm(ctx, (size_t)arg_slot * slot_size, IMM_PURPOSE_ADD, i_size(OP_SIZE_ADDRESS)));
332 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, is_imm()));
336 g(gen_upcall_argument(ctx, 2));
338 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 3));
340 gen_insn(INSN_JMP, 0, 0, 0);
341 gen_four(set_ptr_label);
345 gen_label(skip_flat_label);
346 g(gen_frame_get_pointer(ctx, arg_slot, (ctx->args[i].flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_RET0));
348 gen_label(set_ptr_label);
349 g(gen_address(ctx, R_SAVED_1, arg_offset_tag, IMM_PURPOSE_STR_OFFSET, log_2(sizeof(type_tag_t))));
350 g(gen_imm(ctx, TYPE_TAG_unknown, IMM_PURPOSE_STORE_VALUE, log_2(sizeof(type_tag_t))));
351 gen_insn(INSN_MOV, log_2(sizeof(type_tag_t)), 0, 0);
352 gen_address_offset();
355 g(gen_address(ctx, R_SAVED_1, arg_offset_ptr, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
356 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
357 gen_address_offset();
360 gen_label(next_arg_label);
363 g(gen_compress_pointer(ctx, R_SAVED_1, R_SAVED_1));
364 g(gen_frame_set_pointer(ctx, slot_r, R_SAVED_1, false, true));
369 static bool attr_w gen_call(struct codegen_context *ctx, code_t code, frame_t fn_idx)
371 struct data *new_fn = ctx->local_directory[fn_idx];
372 frame_t required_slots = da(new_fn,function)->frame_slots;
373 frame_t bitmap_slots = da(new_fn,function)->n_bitmap_slots;
375 uint32_t escape_label;
376 int64_t new_fp_offset;
377 uchar_efficient_t call_mode;
379 bool arch_use_flags = ARCH_HAS_FLAGS;
380 #if defined(ARCH_POWER)
381 arch_use_flags = false;
384 escape_label = alloc_escape_label(ctx);
385 if (unlikely(!escape_label))
388 for (v = MIN_USEABLE_SLOT; v < function_n_variables(ctx->fn); v++) {
389 if (ctx->registers[v] >= 0) {
394 g(gen_frame_load_raw(ctx, log_2(sizeof(stack_size_t)), native, 0, frame_offs(available_slots), R_SCRATCH_1));
395 g(gen_imm(ctx, required_slots, IMM_PURPOSE_SUB, i_size(log_2(sizeof(stack_size_t)))));
396 gen_insn(INSN_ALU + ARCH_PARTIAL_ALU(i_size(log_2(sizeof(stack_size_t)))), i_size(log_2(sizeof(stack_size_t))), ALU_SUB, arch_use_flags);
397 gen_one(R_SCRATCH_1);
398 gen_one(R_SCRATCH_1);
401 if (arch_use_flags) {
402 gen_insn(COND_IS_LOGICAL(COND_B) ? INSN_JMP_COND_LOGICAL : INSN_JMP_COND, log_2(sizeof(stack_size_t)), COND_B, 0);
403 gen_four(escape_label);
405 g(gen_cmp_test_jmp(ctx, INSN_TEST, OP_SIZE_NATIVE, R_SCRATCH_1, R_SCRATCH_1, COND_S, escape_label));
408 new_fp_offset = -(ssize_t)(required_slots * slot_size);
410 g(gen_frame_store_raw(ctx, log_2(sizeof(stack_size_t)), 0, new_fp_offset + frame_offs(available_slots), R_SCRATCH_1));
411 g(gen_frame_store_imm_raw(ctx, log_2(sizeof(ip_t)), 0, new_fp_offset + frame_offs(previous_ip), ctx->return_values - da(ctx->fn,function)->code));
412 g(gen_frame_load_raw(ctx, log_2(sizeof(timestamp_t)), garbage, 0, frame_offs(timestamp), R_SCRATCH_1));
413 g(gen_frame_store_raw(ctx, log_2(sizeof(timestamp_t)), 0, new_fp_offset + frame_offs(timestamp), R_SCRATCH_1));
414 call_mode = code == OPCODE_CALL ? CALL_MODE_NORMAL : code == OPCODE_CALL_STRICT ? CALL_MODE_STRICT : CALL_MODE_SPARK;
415 g(gen_frame_store_imm_raw(ctx, log_2(sizeof(uchar_efficient_t)), 0, new_fp_offset + frame_offs(mode), call_mode));
417 g(gen_clear_bitmap(ctx, frame_offset, R_FRAME, new_fp_offset, bitmap_slots));
419 for (i = 0; i < ctx->args_l; i++) {
420 const struct code_arg *src_arg = &ctx->args[i];
421 const struct local_arg *dest_arg = &da(new_fn,function)->args[i];
422 const struct type *t = get_type_of_local(ctx, src_arg->slot);
423 uint32_t non_flat_label, thunk_label, incr_ref_label, next_arg_label;
425 non_flat_label = alloc_label(ctx);
426 if (unlikely(!non_flat_label))
428 thunk_label = alloc_label(ctx);
429 if (unlikely(!thunk_label))
431 incr_ref_label = alloc_label(ctx);
432 if (unlikely(!incr_ref_label))
434 next_arg_label = alloc_label(ctx);
435 if (unlikely(!next_arg_label))
437 if (TYPE_IS_FLAT(t)) {
438 g(gen_test_1_cached(ctx, src_arg->slot, non_flat_label));
439 if (dest_arg->may_be_flat) {
440 g(gen_memcpy_from_slot(ctx, R_FRAME, new_fp_offset + (size_t)dest_arg->slot * slot_size, src_arg->slot));
442 g(gen_upcall_start(ctx, 3));
444 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
445 g(gen_upcall_argument(ctx, 0));
447 g(gen_load_constant(ctx, R_ARG1, src_arg->slot));
448 g(gen_upcall_argument(ctx, 1));
450 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_ARG2, R_FRAME, (size_t)src_arg->slot * slot_size, 0));
451 g(gen_upcall_argument(ctx, 2));
453 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 3));
455 g(gen_frame_store_raw(ctx, OP_SIZE_SLOT, dest_arg->slot, new_fp_offset, R_RET0));
457 g(gen_set_1(ctx, R_FRAME, dest_arg->slot, new_fp_offset, true));
460 if (flag_is_clear(ctx, src_arg->slot))
461 goto skip_ref_argument;
463 gen_insn(INSN_JMP, 0, 0, 0);
464 gen_four(next_arg_label);
466 gen_label(non_flat_label);
468 if (dest_arg->may_be_borrowed && src_arg->flags & OPCODE_CALL_MAY_LEND) {
469 g(gen_frame_get_slot(ctx, src_arg->slot, R_SCRATCH_1, ®));
470 g(gen_ptr_is_thunk(ctx, reg, src_arg->slot, thunk_label));
471 g(gen_frame_store_raw(ctx, OP_SIZE_SLOT, dest_arg->slot, new_fp_offset, reg));
472 gen_insn(INSN_JMP, 0, 0, 0);
473 gen_four(next_arg_label);
474 } else if (dest_arg->may_be_borrowed && src_arg->flags & OPCODE_CALL_MAY_GIVE) {
475 g(gen_test_1_cached(ctx, src_arg->slot, thunk_label));
476 g(gen_frame_get_slot(ctx, src_arg->slot, R_SCRATCH_1, ®));
477 g(gen_ptr_is_thunk(ctx, reg, src_arg->slot, thunk_label));
478 g(gen_frame_store_raw(ctx, OP_SIZE_SLOT, dest_arg->slot, new_fp_offset, reg));
479 g(gen_frame_clear_raw(ctx, OP_SIZE_SLOT, src_arg->slot));
480 gen_insn(INSN_JMP, 0, 0, 0);
481 gen_four(next_arg_label);
484 gen_label(thunk_label);
485 g(gen_set_1(ctx, R_FRAME, dest_arg->slot, new_fp_offset, true));
486 g(gen_frame_get_slot(ctx, src_arg->slot, R_SCRATCH_1, ®));
487 g(gen_frame_store_raw(ctx, OP_SIZE_SLOT, dest_arg->slot, new_fp_offset, reg));
488 if (src_arg->flags & OPCODE_FLAG_FREE_ARGUMENT) {
489 g(gen_frame_clear_raw(ctx, OP_SIZE_SLOT, src_arg->slot));
490 if (flag_is_set(ctx, src_arg->slot)) {
491 g(gen_set_1(ctx, R_FRAME, src_arg->slot, 0, false));
492 flag_set(ctx, src_arg->slot, false);
493 goto skip_ref_argument;
495 if (flag_is_clear(ctx, src_arg->slot))
497 g(gen_test_1(ctx, R_FRAME, src_arg->slot, 0, incr_ref_label, true, TEST_CLEAR));
498 gen_insn(INSN_JMP, 0, 0, 0);
499 gen_four(next_arg_label);
502 gen_label(incr_ref_label);
504 g(gen_upcall_start(ctx, 1));
506 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, reg));
507 g(gen_upcall_argument(ctx, 0));
509 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
512 gen_label(next_arg_label);
515 g(load_function_offset(ctx, R_SCRATCH_1, offsetof(struct data, u_.function.local_directory[fn_idx])));
517 g(gen_address(ctx, R_SCRATCH_1, 0, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
518 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
519 gen_one(R_SCRATCH_1);
520 gen_address_offset();
522 g(gen_decompress_pointer(ctx, ARCH_PREFERS_SX(OP_SIZE_SLOT), R_SCRATCH_1, R_SCRATCH_1, 0));
524 g(gen_frame_store_raw(ctx, OP_SIZE_ADDRESS, 0, frame_offs(function) + new_fp_offset, R_SCRATCH_1));
526 #if !defined(ARCH_X86) && !defined(ARCH_PARISC)
527 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_SUB, R_FRAME, R_FRAME, -new_fp_offset, 0));
529 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_FRAME, R_FRAME, new_fp_offset, 0));
532 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.function.codegen), ARCH_PREFERS_SX(OP_SIZE_SLOT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_SLOT));
533 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
534 gen_one(R_SCRATCH_1);
535 gen_address_offset();
537 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, NO_FRAME_T, ctx->escape_nospill_label));
540 gen_pointer_compression(R_SCRATCH_1);
541 #if (defined(ARCH_X86) && !defined(ARCH_X86_X32)) || defined(ARCH_ARM32)
542 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.codegen.unoptimized_code_base), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
543 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
544 gen_address_offset_compressed();
546 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.codegen.unoptimized_code_base), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
547 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
548 gen_one(R_SCRATCH_1);
549 gen_address_offset_compressed();
551 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
552 gen_one(R_SCRATCH_1);
554 g(clear_flag_cache(ctx));
559 static bool attr_w gen_return(struct codegen_context *ctx)
561 int64_t new_fp_offset;
562 uint32_t escape_label;
564 int64_t retval_offset;
565 unsigned attr_unused reg1;
567 escape_label = alloc_escape_label(ctx);
568 if (unlikely(!escape_label))
571 new_fp_offset = (size_t)da(ctx->fn,function)->frame_slots * slot_size;
573 g(gen_frame_load_raw(ctx, OP_SIZE_ADDRESS, zero_x, 0, new_fp_offset + frame_offs(function), R_SCRATCH_2));
575 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_SCRATCH_2, COND_E, escape_label));
577 g(gen_address(ctx, R_SCRATCH_2, offsetof(struct data, u_.function.codegen), ARCH_PREFERS_SX(OP_SIZE_SLOT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_SLOT));
578 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
579 gen_one(R_SCRATCH_1);
580 gen_address_offset();
582 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, NO_FRAME_T, escape_label));
585 g(gen_frame_load_raw(ctx, log_2(sizeof(timestamp_t)), garbage, 0, frame_offs(timestamp), R_SCRATCH_1));
586 g(gen_frame_store_raw(ctx, log_2(sizeof(timestamp_t)), 0, new_fp_offset + frame_offs(timestamp), R_SCRATCH_1));
588 g(gen_frame_load_raw(ctx, log_2(sizeof(ip_t)), native, 0, frame_offs(previous_ip), R_SCRATCH_1));
590 g(gen_address(ctx, R_SCRATCH_2, offsetof(struct data, u_.function.code), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
591 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
592 gen_one(R_SCRATCH_2);
593 gen_address_offset();
595 g(gen_lea3(ctx, R_SAVED_1, R_SCRATCH_2, R_SCRATCH_1, log_2(sizeof(code_t)), 0));
598 for (i = 0; i < ctx->args_l; i++) {
599 const struct code_arg *src_arg = &ctx->args[i];
600 const struct type *t = get_type_of_local(ctx, src_arg->slot);
601 uint32_t copy_ptr_label, load_write_ptr_label, write_ptr_label, next_arg_label;
603 copy_ptr_label = alloc_label(ctx);
604 if (unlikely(!copy_ptr_label))
607 load_write_ptr_label = alloc_label(ctx);
608 if (unlikely(!load_write_ptr_label))
611 write_ptr_label = alloc_label(ctx);
612 if (unlikely(!write_ptr_label))
615 next_arg_label = alloc_label(ctx);
616 if (unlikely(!next_arg_label))
619 g(gen_load_code_32(ctx, R_SAVED_2, R_SAVED_1, retval_offset));
621 if (TYPE_IS_FLAT(t)) {
622 uint32_t flat_to_data_label;
623 g(gen_test_1_cached(ctx, src_arg->slot, copy_ptr_label));
625 flat_to_data_label = alloc_label(ctx);
626 if (unlikely(!flat_to_data_label))
629 #if defined(ARCH_X86)
630 g(gen_address(ctx, R_SAVED_1, retval_offset + 2 + 2 * (ARG_MODE_N >= 3), IMM_PURPOSE_LDR_OFFSET, log_2(sizeof(code_t))));
631 g(gen_imm(ctx, OPCODE_MAY_RETURN_FLAT, IMM_PURPOSE_TEST, log_2(sizeof(code_t))));
632 gen_insn(INSN_TEST, log_2(sizeof(code_t)), 0, 1);
633 gen_address_offset();
636 gen_insn(INSN_JMP_COND, log_2(sizeof(code_t)), COND_E, 0);
637 gen_four(flat_to_data_label);
639 g(gen_load_two(ctx, R_SCRATCH_1, R_SAVED_1, retval_offset + 2 + 2 * (ARG_MODE_N >= 3)));
641 g(gen_cmp_test_imm_jmp(ctx, INSN_TEST, OP_SIZE_NATIVE, R_SCRATCH_1, OPCODE_MAY_RETURN_FLAT, COND_E, flat_to_data_label));
643 #if defined(ARCH_X86)
644 if (is_power_of_2(t->size) && t->size <= 2U << OP_SIZE_NATIVE) {
645 if (t->size == 2U << OP_SIZE_NATIVE) {
646 g(gen_frame_load_2(ctx, OP_SIZE_NATIVE, src_arg->slot, 0, R_SCRATCH_1, R_SCRATCH_2));
648 gen_insn(INSN_MOV, OP_SIZE_NATIVE, 0, 0);
649 gen_one(ARG_ADDRESS_2 + OP_SIZE_SLOT);
652 gen_eight(new_fp_offset + lo_word(OP_SIZE_NATIVE));
653 gen_one(R_SCRATCH_1);
655 gen_insn(INSN_MOV, OP_SIZE_NATIVE, 0, 0);
656 gen_one(ARG_ADDRESS_2 + OP_SIZE_SLOT);
659 gen_eight(new_fp_offset + hi_word(OP_SIZE_NATIVE));
660 gen_one(R_SCRATCH_2);
662 g(gen_frame_get(ctx, log_2(t->size), garbage, src_arg->slot, 0, R_SCRATCH_1, ®1));
664 gen_insn(INSN_MOV, log_2(t->size), 0, 0);
665 gen_one(ARG_ADDRESS_2 + OP_SIZE_SLOT);
668 gen_eight(new_fp_offset);
674 g(gen_lea3(ctx, R_SCRATCH_2, R_FRAME, R_SAVED_2, OP_SIZE_SLOT, new_fp_offset));
676 g(gen_memcpy_from_slot(ctx, R_SCRATCH_2, 0, src_arg->slot));
679 gen_insn(INSN_JMP, 0, 0, 0);
680 gen_four(next_arg_label);
682 gen_label(flat_to_data_label);
684 if (ctx->registers[src_arg->slot] >= 0)
685 g(spill(ctx, src_arg->slot));
687 g(gen_upcall_start(ctx, 3));
689 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
690 g(gen_upcall_argument(ctx, 0));
692 g(gen_load_constant(ctx, R_ARG1, src_arg->slot));
693 g(gen_upcall_argument(ctx, 1));
695 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_ARG2, R_FRAME, (size_t)src_arg->slot * slot_size, 0));
696 g(gen_upcall_argument(ctx, 2));
698 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 3));
700 if (flag_is_clear(ctx, src_arg->slot))
701 goto skip_ref_argument;
703 gen_insn(INSN_JMP, 0, 0, 0);
704 gen_four(write_ptr_label);
707 gen_label(copy_ptr_label);
709 if (unlikely(!(src_arg->flags & OPCODE_FLAG_FREE_ARGUMENT))) {
710 g(gen_upcall_start(ctx, 1));
711 g(gen_frame_load_slot(ctx, src_arg->slot, R_ARG0));
712 g(gen_upcall_argument(ctx, 0));
713 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
714 } else if (da(ctx->fn,function)->local_variables_flags[src_arg->slot].may_be_borrowed) {
715 g(gen_test_1_cached(ctx, src_arg->slot, load_write_ptr_label));
716 g(gen_upcall_start(ctx, 1));
717 g(gen_frame_load_slot(ctx, src_arg->slot, R_ARG0));
718 g(gen_upcall_argument(ctx, 0));
719 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
722 gen_label(load_write_ptr_label);
724 g(gen_frame_load_slot(ctx, src_arg->slot, R_RET0));
727 gen_label(write_ptr_label);
729 #if defined(ARCH_X86)
730 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
731 gen_one(ARG_ADDRESS_2 + OP_SIZE_SLOT);
734 gen_eight(new_fp_offset);
736 goto scaled_store_done;
738 if (ARCH_HAS_SHIFTED_ADD(OP_SIZE_SLOT)) {
739 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, false));
740 gen_one(R_SCRATCH_3);
742 gen_one(ARG_SHIFTED_REGISTER);
743 gen_one(ARG_SHIFT_LSL | OP_SIZE_SLOT);
746 g(gen_address(ctx, R_SCRATCH_3, new_fp_offset, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
747 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
748 gen_address_offset();
750 goto scaled_store_done;
753 g(gen_3address_rot_imm(ctx, OP_SIZE_NATIVE, ROT_SHL, R_SCRATCH_3, R_SAVED_2, OP_SIZE_SLOT, false));
755 g(gen_3address_alu(ctx, OP_SIZE_NATIVE, ALU_ADD, R_SCRATCH_3, R_SCRATCH_3, R_FRAME, 0));
757 g(gen_address(ctx, R_SCRATCH_3, new_fp_offset, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
758 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
759 gen_address_offset();
763 g(gen_set_1_variable(ctx, R_SAVED_2, new_fp_offset, true));
765 gen_label(next_arg_label);
767 retval_offset += 4 + 2 * (ARG_MODE_N >= 3);
770 g(gen_frame_load_raw(ctx, OP_SIZE_ADDRESS, zero_x, 0, new_fp_offset + frame_offs(function), R_SCRATCH_1));
772 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.function.codegen), ARCH_PREFERS_SX(OP_SIZE_SLOT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_SLOT));
773 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
774 gen_one(R_SCRATCH_1);
775 gen_address_offset();
777 g(gen_decompress_pointer(ctx, ARCH_PREFERS_SX(OP_SIZE_SLOT), R_SCRATCH_1, R_SCRATCH_1, 0));
779 g(gen_load_code_32(ctx, R_SCRATCH_2, R_SAVED_1, retval_offset + 2));
781 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_FRAME, R_FRAME, new_fp_offset, 0));
783 #if defined(ARCH_X86) && !defined(ARCH_X86_X32)
784 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
785 gen_one(ARG_ADDRESS_2 + OP_SIZE_ADDRESS);
786 gen_one(R_SCRATCH_1);
787 gen_one(R_SCRATCH_2);
788 gen_eight(offsetof(struct data, u_.codegen.unoptimized_code));
790 goto scaled_jmp_done;
792 #if defined(ARCH_X86)
793 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
794 gen_one(R_SCRATCH_1);
795 gen_one(ARG_ADDRESS_2 + OP_SIZE_ADDRESS);
796 gen_one(R_SCRATCH_1);
797 gen_one(R_SCRATCH_2);
798 gen_eight(offsetof(struct data, u_.codegen.unoptimized_code));
800 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
801 gen_one(R_SCRATCH_1);
803 goto scaled_jmp_done;
805 #if defined(ARCH_ARM32)
806 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, false));
807 gen_one(R_SCRATCH_1);
808 gen_one(R_SCRATCH_1);
809 gen_one(ARG_SHIFTED_REGISTER);
810 gen_one(ARG_SHIFT_LSL | OP_SIZE_ADDRESS);
811 gen_one(R_SCRATCH_2);
813 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.codegen.unoptimized_code), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
814 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
815 gen_address_offset();
817 goto scaled_jmp_done;
819 if (ARCH_HAS_SHIFTED_ADD(OP_SIZE_ADDRESS)) {
820 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, false));
821 gen_one(R_SCRATCH_1);
822 gen_one(R_SCRATCH_1);
823 gen_one(ARG_SHIFTED_REGISTER);
824 gen_one(ARG_SHIFT_LSL | OP_SIZE_ADDRESS);
825 gen_one(R_SCRATCH_2);
827 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.codegen.unoptimized_code), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
828 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
829 gen_one(R_SCRATCH_1);
830 gen_address_offset();
832 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
833 gen_one(R_SCRATCH_1);
835 goto scaled_jmp_done;
838 g(gen_3address_rot_imm(ctx, OP_SIZE_NATIVE, ROT_SHL, R_SCRATCH_2, R_SCRATCH_2, OP_SIZE_ADDRESS, false));
840 g(gen_3address_alu(ctx, OP_SIZE_NATIVE, ALU_ADD, R_SCRATCH_1, R_SCRATCH_1, R_SCRATCH_2, 0));
842 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.codegen.unoptimized_code), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
843 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
844 gen_one(R_SCRATCH_1);
845 gen_address_offset();
847 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
848 gen_one(R_SCRATCH_1);
850 goto scaled_jmp_done;
855 static bool attr_w gen_scaled_array_address(struct codegen_context *ctx, size_t element_size, unsigned reg_dst, unsigned reg_src, unsigned reg_index, int64_t offset_src);
856 static bool attr_w gen_check_array_len(struct codegen_context *ctx, unsigned reg_array, bool allocated, unsigned reg_len, unsigned cond, uint32_t escape_label);
858 static bool attr_w gen_structured(struct codegen_context *ctx, frame_t slot_struct, frame_t slot_elem)
860 uint32_t escape_label;
861 const struct type *struct_type, *elem_type;
865 escape_label = alloc_escape_label(ctx);
866 if (unlikely(!escape_label))
869 struct_type = get_type_of_local(ctx, slot_struct);
870 elem_type = get_type_of_local(ctx, slot_elem);
872 if (TYPE_IS_FLAT(struct_type) && struct_type->tag != TYPE_TAG_flat_option) {
873 if (!TYPE_IS_FLAT(elem_type)) {
876 g(gen_test_1_cached(ctx, slot_struct, escape_label));
877 flag_set(ctx, slot_struct, false);
881 g(gen_test_1_jz_cached(ctx, slot_struct, escape_label));
885 if (ctx->registers[slot_struct] >= 0)
886 g(spill(ctx, slot_struct));
887 g(gen_frame_address(ctx, slot_struct, 0, R_SAVED_1));
889 for (i = 0; i < ctx->args_l; i++) {
890 frame_t param_slot = ctx->args[i].slot;
892 switch (ctx->args[i].flags & OPCODE_STRUCTURED_MASK) {
893 case OPCODE_STRUCTURED_RECORD: {
894 struct flat_record_definition_entry *e;
895 ajla_assert_lo(struct_type->tag == TYPE_TAG_flat_record, (file_line, "gen_structured: invalid tag %u, expected %u", struct_type->tag, TYPE_TAG_flat_record));
896 e = &type_def(struct_type,flat_record)->entries[param_slot];
898 g(gen_imm(ctx, e->flat_offset, IMM_PURPOSE_ADD, i_size(OP_SIZE_ADDRESS)));
899 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, is_imm()));
904 struct_type = e->subtype;
907 case OPCODE_STRUCTURED_ARRAY: {
908 ajla_assert_lo(struct_type->tag == TYPE_TAG_flat_array, (file_line, "gen_structured: invalid tag %u, expected %u", struct_type->tag, TYPE_TAG_flat_array));
909 g(gen_test_1_cached(ctx, param_slot, escape_label));
910 flag_set(ctx, param_slot, false);
911 g(gen_frame_get(ctx, OP_SIZE_INT, native, param_slot, 0, R_SCRATCH_1, ®1));
913 g(gen_cmp_test_imm_jmp(ctx, INSN_CMP, OP_SIZE_INT, reg1, type_def(struct_type,flat_array)->n_elements, COND_AE, escape_label));
915 g(gen_scaled_array_address(ctx, type_def(struct_type,flat_array)->base->size, R_SAVED_1, R_SAVED_1, reg1, 0));
917 struct_type = type_def(struct_type,flat_array)->base;
921 internal(file_line, "gen_structured: invalid structured flags %x", (unsigned)ctx->args[i].flags);
924 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
925 gen_one(R_SCRATCH_1);
926 gen_one(ARG_ADDRESS_1);
930 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, NO_FRAME_T, escape_label));
933 g(gen_decompress_pointer(ctx, ARCH_PREFERS_SX(OP_SIZE_SLOT), R_SCRATCH_1, R_SCRATCH_1, 0));
935 g(gen_compare_refcount(ctx, R_SCRATCH_1, REFCOUNT_STEP, COND_AE, escape_label));
937 switch (ctx->args[i].flags & OPCODE_STRUCTURED_MASK) {
938 case OPCODE_STRUCTURED_RECORD: {
939 const struct type *rec_type, *e_type;
940 rec_type = da_type(ctx->fn, ctx->args[i].type);
941 TYPE_TAG_VALIDATE(rec_type->tag);
942 if (unlikely(rec_type->tag == TYPE_TAG_flat_record))
943 rec_type = type_def(rec_type,flat_record)->base;
944 e_type = type_def(rec_type,record)->types[param_slot];
945 if (!TYPE_IS_FLAT(e_type) || (e_type->tag == TYPE_TAG_flat_option && !(ctx->args[i].flags & OPCODE_STRUCTURED_FLAG_END))) {
946 g(gen_test_1(ctx, R_SCRATCH_1, param_slot, data_record_offset, escape_label, true, TEST));
948 g(gen_test_1(ctx, R_SCRATCH_1, param_slot, data_record_offset, escape_label, false, TEST));
949 struct_type = e_type;
951 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_SAVED_1, R_SCRATCH_1, data_record_offset + (size_t)param_slot * slot_size, 0));
954 case OPCODE_STRUCTURED_OPTION: {
955 unsigned op_size = log_2(sizeof(ajla_option_t));
956 #if defined(ARCH_X86)
957 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.option.option), IMM_PURPOSE_LDR_OFFSET, op_size));
958 g(gen_imm(ctx, param_slot, IMM_PURPOSE_CMP, op_size));
959 gen_insn(INSN_CMP, op_size, 0, 1);
960 gen_address_offset();
963 gen_insn(INSN_JMP_COND, op_size, COND_NE, 0);
964 gen_four(escape_label);
966 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.option.option), ARCH_PREFERS_SX(op_size) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, op_size));
967 gen_insn(ARCH_PREFERS_SX(op_size) ? INSN_MOVSX : INSN_MOV, op_size, 0, 0);
968 gen_one(R_SCRATCH_2);
969 gen_address_offset();
971 g(gen_cmp_test_imm_jmp(ctx, INSN_CMP, i_size(op_size), R_SCRATCH_2, param_slot, COND_NE, escape_label));
973 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_SAVED_1, R_SCRATCH_1, offsetof(struct data, u_.option.pointer), 0));
976 case OPCODE_STRUCTURED_ARRAY: {
977 const struct type *e_type = da_type(ctx->fn, ctx->args[i].type);
979 g(gen_test_1_cached(ctx, param_slot, escape_label));
980 flag_set(ctx, param_slot, false);
982 g(gen_frame_get(ctx, OP_SIZE_INT, native, param_slot, 0, R_SCRATCH_2, ®2));
984 g(gen_check_array_len(ctx, R_SCRATCH_1, false, reg2, COND_AE, escape_label));
986 if (!TYPE_IS_FLAT(e_type) || (e_type->tag == TYPE_TAG_flat_option && !(ctx->args[i].flags & OPCODE_STRUCTURED_FLAG_END))) {
987 g(gen_compare_ptr_tag(ctx, R_SCRATCH_1, DATA_TAG_array_pointers, COND_NE, escape_label, R_SCRATCH_3));
989 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.array_pointers.pointer), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
990 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
991 gen_one(R_SCRATCH_1);
992 gen_address_offset();
994 g(gen_scaled_array_address(ctx, slot_size, R_SAVED_1, R_SCRATCH_1, reg2, 0));
996 g(gen_compare_ptr_tag(ctx, R_SCRATCH_1, DATA_TAG_array_flat, COND_NE, escape_label, R_SCRATCH_3));
998 g(gen_scaled_array_address(ctx, e_type->size, R_SAVED_1, R_SCRATCH_1, reg2, data_array_offset));
1000 struct_type = e_type;
1005 internal(file_line, "gen_structured: invalid structured flags %x", (unsigned)ctx->args[i].flags);
1012 g(gen_test_1_cached(ctx, slot_elem, escape_label));
1013 flag_set(ctx, slot_elem, false);
1014 g(gen_memcpy_from_slot(ctx, R_SAVED_1, 0, slot_elem));
1016 uint32_t skip_deref_label;
1017 skip_deref_label = alloc_label(ctx);
1018 if (unlikely(!skip_deref_label))
1021 if (TYPE_IS_FLAT(elem_type))
1022 g(gen_test_1_jz_cached(ctx, slot_elem, escape_label));
1024 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
1025 gen_one(R_SCRATCH_1);
1026 gen_one(ARG_ADDRESS_1);
1030 g(gen_jmp_on_zero(ctx, OP_SIZE_SLOT, R_SCRATCH_1, COND_E, skip_deref_label));
1032 g(gen_upcall_start(ctx, 1));
1033 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_SCRATCH_1));
1034 g(gen_upcall_argument(ctx, 0));
1035 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_dereference), 1));
1037 gen_label(skip_deref_label);
1039 g(gen_frame_get_pointer(ctx, slot_elem, (ctx->args[i - 1].flags & OPCODE_STRUCTURED_FREE_VARIABLE) != 0, R_SCRATCH_1));
1041 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
1042 gen_one(ARG_ADDRESS_1);
1045 gen_one(R_SCRATCH_1);
1051 static bool attr_w gen_record_create(struct codegen_context *ctx, frame_t slot_r)
1053 const struct type *t;
1054 const struct record_definition *def;
1055 uint32_t escape_label;
1058 escape_label = alloc_escape_label(ctx);
1059 if (unlikely(!escape_label))
1062 t = get_type_of_local(ctx, slot_r);
1063 if (t->tag == TYPE_TAG_flat_record) {
1064 const struct flat_record_definition *flat_def;
1065 const struct type *flat_type = t;
1066 t = type_def(t,flat_record)->base;
1067 def = type_def(t,record);
1068 flat_def = type_def(flat_type,flat_record);
1069 for (i = 0; i < ctx->args_l; i++) {
1070 frame_t var_slot = ctx->args[i].slot;
1071 g(gen_test_1_cached(ctx, var_slot, escape_label));
1072 flag_set(ctx, var_slot, false);
1074 for (i = 0, ii = 0; i < ctx->args_l; i++, ii++) {
1075 frame_t var_slot, flat_offset, record_slot;
1076 while (unlikely(record_definition_is_elided(def, ii)))
1078 var_slot = ctx->args[i].slot;
1079 record_slot = record_definition_slot(def, ii);
1080 flat_offset = flat_def->entries[record_slot].flat_offset;
1081 g(gen_memcpy_from_slot(ctx, R_FRAME, (size_t)slot_r * slot_size + flat_offset, var_slot));
1086 def = type_def(t,record);
1088 g(gen_upcall_start(ctx, 2));
1090 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1091 g(gen_upcall_argument(ctx, 0));
1093 g(gen_load_constant(ctx, R_ARG1, slot_r));
1094 g(gen_upcall_argument(ctx, 1));
1096 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_record_mayfail), 2));
1097 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1098 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1100 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
1102 g(gen_clear_bitmap(ctx, 0, R_SAVED_1, data_record_offset, bitmap_slots(def->n_slots)));
1104 for (i = 0, ii = 0; i < ctx->args_l; i++, ii++) {
1105 frame_t var_slot, var_flags, record_slot;
1106 const struct type *var_type, *record_type;
1107 uint32_t skip_flat_label, set_ptr_label, next_arg_label;
1109 skip_flat_label = alloc_label(ctx);
1110 if (unlikely(!skip_flat_label))
1112 set_ptr_label = alloc_label(ctx);
1113 if (unlikely(!set_ptr_label))
1115 next_arg_label = alloc_label(ctx);
1116 if (unlikely(!next_arg_label))
1119 while (unlikely(record_definition_is_elided(def, ii)))
1121 var_slot = ctx->args[i].slot;
1122 var_type = get_type_of_local(ctx, var_slot);
1123 var_flags = ctx->args[i].flags;
1124 record_slot = record_definition_slot(def, ii);
1125 record_type = def->types[record_slot];
1126 if (TYPE_IS_FLAT(var_type)) {
1127 g(gen_test_1_cached(ctx, var_slot, skip_flat_label));
1128 if (TYPE_IS_FLAT(record_type)) {
1129 g(gen_memcpy_from_slot(ctx, R_SAVED_1, data_record_offset + (size_t)record_slot * slot_size, var_slot));
1131 gen_insn(INSN_JMP, 0, 0, 0);
1132 gen_four(next_arg_label);
1134 if (ctx->registers[var_slot] >= 0)
1135 g(spill(ctx, var_slot));
1137 g(gen_upcall_start(ctx, 3));
1139 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1140 g(gen_upcall_argument(ctx, 0));
1142 g(gen_load_constant(ctx, R_ARG1, var_slot));
1143 g(gen_upcall_argument(ctx, 1));
1145 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_ARG2, R_FRAME, (size_t)var_slot * slot_size, 0));
1146 g(gen_upcall_argument(ctx, 2));
1148 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 3));
1150 gen_insn(INSN_JMP, 0, 0, 0);
1151 gen_four(set_ptr_label);
1155 gen_label(skip_flat_label);
1156 g(gen_frame_get_pointer(ctx, var_slot, (var_flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_RET0));
1158 gen_label(set_ptr_label);
1159 g(gen_address(ctx, R_SAVED_1, data_record_offset + (size_t)record_slot * slot_size, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
1160 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
1161 gen_address_offset();
1164 g(gen_set_1(ctx, R_SAVED_1, record_slot, data_record_offset, true));
1166 gen_label(next_arg_label);
1169 g(gen_compress_pointer(ctx, R_SAVED_1, R_SAVED_1));
1170 g(gen_frame_set_pointer(ctx, slot_r, R_SAVED_1, false, true));
1175 static bool attr_w gen_record_load(struct codegen_context *ctx, frame_t slot_1, frame_t slot_r, frame_t rec_slot, frame_t flags)
1177 const struct type *rec_type, *entry_type;
1178 uint32_t escape_label;
1181 rec_type = get_type_of_local(ctx, slot_1);
1182 if (unlikely(rec_type->tag == TYPE_TAG_unknown)) {
1183 ajla_assert_lo(!*da(ctx->fn,function)->function_name, (file_line, "gen_record_load: function %s has record without definition", da(ctx->fn,function)->function_name));
1187 escape_label = alloc_escape_label(ctx);
1188 if (unlikely(!escape_label))
1191 /*debug("gen_record_load: %s: %u, %u", da(ctx->fn,function)->function_name, TYPE_TAG_unknown, rec_type->tag);*/
1192 if (TYPE_IS_FLAT(rec_type)) {
1193 const struct flat_record_definition_entry *ft = &type_def(rec_type,flat_record)->entries[rec_slot];
1194 g(gen_test_1_cached(ctx, slot_1, escape_label));
1195 g(gen_memcpy_to_slot(ctx, slot_r, R_FRAME, (size_t)slot_1 * slot_size + ft->flat_offset));
1196 flag_set(ctx, slot_1, false);
1197 flag_set(ctx, slot_r, false);
1200 entry_type = type_def(rec_type,record)->types[rec_slot];
1202 g(gen_frame_decompress_slot(ctx, slot_1, R_SCRATCH_2, ®, escape_label));
1204 if (TYPE_IS_FLAT(entry_type)) {
1205 g(gen_test_1(ctx, reg, rec_slot, data_record_offset, escape_label, false, TEST));
1206 g(gen_memcpy_to_slot(ctx, slot_r, reg, (size_t)rec_slot * slot_size + data_record_offset));
1207 flag_set(ctx, slot_r, false);
1211 if (flag_must_be_flat(ctx, slot_r)) {
1212 gen_insn(INSN_JMP, 0, 0, 0);
1213 gen_four(escape_label);
1217 g(gen_test_1(ctx, reg, rec_slot, data_record_offset, escape_label, true, TEST));
1219 g(gen_address(ctx, reg, (size_t)rec_slot * slot_size + data_record_offset, ARCH_PREFERS_SX(OP_SIZE_SLOT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_SLOT));
1220 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
1221 gen_one(R_SCRATCH_1);
1222 gen_address_offset();
1224 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, NO_FRAME_T, escape_label));
1226 if (flags & OPCODE_STRUCT_MAY_BORROW) {
1227 g(gen_frame_store(ctx, OP_SIZE_SLOT, slot_r, 0, R_SCRATCH_1));
1228 flag_set(ctx, slot_r, false);
1230 g(gen_frame_set_pointer(ctx, slot_r, R_SCRATCH_1, true, false));
1235 static bool attr_w gen_option_create_empty_flat(struct codegen_context *ctx, ajla_flat_option_t opt, frame_t slot_r)
1237 g(gen_frame_store_imm(ctx, log_2(sizeof(ajla_flat_option_t)), slot_r, 0, opt));
1238 flag_set(ctx, slot_r, false);
1242 static bool attr_w gen_option_create_empty(struct codegen_context *ctx, ajla_option_t opt, frame_t slot_r)
1244 unsigned option_size = log_2(sizeof(ajla_option_t));
1245 uint32_t escape_label;
1247 escape_label = alloc_escape_label(ctx);
1248 if (unlikely(!escape_label))
1251 if (flag_must_be_flat(ctx, slot_r)) {
1252 gen_insn(INSN_JMP, 0, 0, 0);
1253 gen_four(escape_label);
1257 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_option_mayfail), 0));
1258 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1259 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1261 g(gen_address(ctx, R_RET0, offsetof(struct data, u_.option.option), IMM_PURPOSE_STR_OFFSET, option_size));
1262 g(gen_imm(ctx, opt, IMM_PURPOSE_STORE_VALUE, option_size));
1263 gen_insn(INSN_MOV, option_size, 0, 0);
1264 gen_address_offset();
1267 g(gen_address(ctx, R_RET0, offsetof(struct data, u_.option.pointer), IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
1268 g(gen_imm(ctx, 0, IMM_PURPOSE_STORE_VALUE, OP_SIZE_SLOT));
1269 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
1270 gen_address_offset();
1273 g(gen_compress_pointer(ctx, R_RET0, R_RET0));
1274 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, true));
1279 static bool attr_w gen_option_create(struct codegen_context *ctx, ajla_option_t opt, frame_t slot_1, frame_t slot_r, frame_t flags)
1281 unsigned option_size = log_2(sizeof(ajla_option_t));
1282 const struct type *type;
1283 uint32_t escape_label, get_pointer_label, got_pointer_label;
1285 escape_label = alloc_escape_label(ctx);
1286 if (unlikely(!escape_label))
1289 if (flag_must_be_flat(ctx, slot_r)) {
1290 gen_insn(INSN_JMP, 0, 0, 0);
1291 gen_four(escape_label);
1295 get_pointer_label = alloc_label(ctx);
1296 if (unlikely(!get_pointer_label))
1299 got_pointer_label = alloc_label(ctx);
1300 if (unlikely(!got_pointer_label))
1303 type = get_type_of_local(ctx, slot_1);
1305 g(gen_upcall_start(ctx, 0));
1306 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_option_mayfail), 0));
1307 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1308 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1310 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
1312 g(gen_address(ctx, R_RET0, offsetof(struct data, u_.option.option), IMM_PURPOSE_STR_OFFSET, option_size));
1313 g(gen_imm(ctx, opt, IMM_PURPOSE_STORE_VALUE, option_size));
1314 gen_insn(INSN_MOV, option_size, 0, 0);
1315 gen_address_offset();
1318 if (TYPE_IS_FLAT(type)) {
1319 g(gen_test_1_cached(ctx, slot_1, get_pointer_label));
1321 if (ctx->registers[slot_1] >= 0)
1322 g(spill(ctx, slot_1));
1324 g(gen_upcall_start(ctx, 3));
1326 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1327 g(gen_upcall_argument(ctx, 0));
1329 g(gen_load_constant(ctx, R_ARG1, slot_1));
1330 g(gen_upcall_argument(ctx, 1));
1332 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_ARG2, R_FRAME, (size_t)slot_1 * slot_size, 0));
1333 g(gen_upcall_argument(ctx, 2));
1335 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 3));
1337 if (flag_is_clear(ctx, slot_1))
1338 goto skip_get_pointer_label;
1340 gen_insn(INSN_JMP, 0, 0, 0);
1341 gen_four(got_pointer_label);
1344 gen_label(get_pointer_label);
1345 g(gen_frame_get_pointer(ctx, slot_1, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_RET0));
1347 skip_get_pointer_label:
1348 gen_label(got_pointer_label);
1349 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.option.pointer), IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
1350 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
1351 gen_address_offset();
1354 g(gen_compress_pointer(ctx, R_SAVED_1, R_SAVED_1));
1355 g(gen_frame_set_pointer(ctx, slot_r, R_SAVED_1, false, true));
1360 static bool attr_w gen_option_cmp(struct codegen_context *ctx, unsigned reg, frame_t opt, uint32_t label, frame_t slot_r)
1362 unsigned op_size = log_2(sizeof(ajla_option_t));
1364 #if defined(ARCH_X86)
1365 g(gen_address(ctx, reg, offsetof(struct data, u_.option.option), IMM_PURPOSE_LDR_OFFSET, op_size));
1366 g(gen_imm(ctx, opt, IMM_PURPOSE_CMP, op_size));
1367 gen_insn(INSN_CMP, op_size, 0, 1);
1368 gen_address_offset();
1371 g(gen_address(ctx, reg, offsetof(struct data, u_.option.option), ARCH_PREFERS_SX(op_size) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, op_size));
1372 gen_insn(ARCH_PREFERS_SX(op_size) ? INSN_MOVSX : INSN_MOV, op_size, 0, 0);
1373 gen_one(R_SCRATCH_2);
1374 gen_address_offset();
1376 g(gen_imm(ctx, opt, IMM_PURPOSE_CMP, op_size));
1377 gen_insn(INSN_CMP, op_size, 0, 1);
1378 gen_one(R_SCRATCH_2);
1382 gen_insn(INSN_JMP_COND, op_size, COND_NE, 0);
1385 g(gen_frame_set_cond(ctx, op_size, false, COND_E, slot_r));
1389 g(gen_address(ctx, reg, offsetof(struct data, u_.option.option), ARCH_PREFERS_SX(op_size) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, op_size));
1390 gen_insn(ARCH_PREFERS_SX(op_size) ? INSN_MOVSX : INSN_MOV, op_size, 0, 0);
1391 gen_one(R_SCRATCH_2);
1392 gen_address_offset();
1394 g(gen_cmp_dest_reg(ctx, op_size, R_SCRATCH_2, (unsigned)-1, label ? R_CMP_RESULT : R_SCRATCH_2, opt, COND_E));
1397 gen_insn(INSN_JMP_REG, i_size(op_size), COND_E, 0);
1398 gen_one(R_CMP_RESULT);
1401 g(gen_frame_store(ctx, log_2(sizeof(ajla_flat_option_t)), slot_r, 0, R_SCRATCH_2));
1407 static bool attr_w gen_option_load(struct codegen_context *ctx, frame_t slot_1, frame_t slot_r, ajla_option_t opt, frame_t flags)
1409 const struct type *type;
1410 uint32_t escape_label;
1413 escape_label = alloc_escape_label(ctx);
1414 if (unlikely(!escape_label))
1417 if (flag_must_be_flat(ctx, slot_r)) {
1418 gen_insn(INSN_JMP, 0, 0, 0);
1419 gen_four(escape_label);
1423 type = get_type_of_local(ctx, slot_1);
1424 if (TYPE_IS_FLAT(type)) {
1425 g(gen_test_1_jz_cached(ctx, slot_1, escape_label));
1428 g(gen_frame_decompress_slot(ctx, slot_1, R_SCRATCH_1, ®, escape_label));
1429 g(gen_option_cmp(ctx, reg, opt, escape_label, 0));
1431 g(gen_address(ctx, reg, offsetof(struct data, u_.option.pointer), ARCH_PREFERS_SX(OP_SIZE_SLOT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_SLOT));
1432 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
1433 gen_one(R_SCRATCH_1);
1434 gen_address_offset();
1436 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, NO_FRAME_T, escape_label));
1438 if (flags & OPCODE_STRUCT_MAY_BORROW) {
1439 g(gen_frame_store(ctx, OP_SIZE_SLOT, slot_r, 0, R_SCRATCH_1));
1440 flag_set(ctx, slot_r, false);
1442 g(gen_frame_set_pointer(ctx, slot_r, R_SCRATCH_1, true, false));
1448 static bool attr_w gen_option_test_flat(struct codegen_context *ctx, frame_t slot_1, frame_t opt, frame_t slot_r)
1450 unsigned op_size = log_2(sizeof(ajla_flat_option_t));
1451 uint32_t escape_label;
1453 escape_label = alloc_escape_label(ctx);
1454 if (unlikely(!escape_label))
1457 g(gen_test_1_cached(ctx, slot_1, escape_label));
1459 flag_set(ctx, slot_1, false);
1460 flag_set(ctx, slot_r, false);
1462 if (unlikely(opt != (ajla_flat_option_t)opt)) {
1463 g(gen_frame_clear(ctx, op_size, slot_r));
1467 g(gen_frame_load_cmp_imm_set_cond(ctx, op_size, zero_x, slot_1, 0, opt, COND_E, slot_r));
1472 static bool attr_w gen_option_test(struct codegen_context *ctx, frame_t slot_1, frame_t opt, frame_t slot_r)
1474 uint32_t escape_label;
1477 escape_label = alloc_escape_label(ctx);
1478 if (unlikely(!escape_label))
1481 g(gen_frame_decompress_slot(ctx, slot_1, R_SCRATCH_1, ®, escape_label));
1483 flag_set(ctx, slot_r, false);
1485 if (unlikely(opt != (ajla_option_t)opt)) {
1486 g(gen_frame_clear(ctx, log_2(sizeof(ajla_flat_option_t)), slot_r));
1490 g(gen_option_cmp(ctx, reg, opt, 0, slot_r));
1495 static bool attr_w gen_option_ord(struct codegen_context *ctx, frame_t slot_1, frame_t slot_r, bool flat)
1497 unsigned op_size = log_2(sizeof(ajla_option_t));
1498 unsigned op_size_flat = log_2(sizeof(ajla_flat_option_t));
1499 uint32_t escape_label, ptr_label, store_label;
1500 unsigned reg, target;
1502 escape_label = alloc_escape_label(ctx);
1503 if (unlikely(!escape_label))
1506 ptr_label = alloc_label(ctx);
1507 if (unlikely(!ptr_label))
1510 store_label = alloc_label(ctx);
1511 if (unlikely(!store_label))
1514 target = gen_frame_target(ctx, slot_r, NO_FRAME_T, NO_FRAME_T, R_SCRATCH_1);
1517 g(gen_test_1_cached(ctx, slot_1, ptr_label));
1519 g(gen_frame_load(ctx, op_size_flat, zero_x, slot_1, 0, target));
1521 if (flag_is_clear(ctx, slot_1))
1522 goto skip_ptr_label;
1524 gen_insn(INSN_JMP, 0, 0, 0);
1525 gen_four(store_label);
1528 gen_label(ptr_label);
1529 g(gen_frame_decompress_slot(ctx, slot_1, R_SCRATCH_1, ®, escape_label));
1531 g(gen_address(ctx, reg, offsetof(struct data, u_.option.option), ARCH_PREFERS_SX(op_size) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, op_size));
1532 gen_insn(ARCH_PREFERS_SX(op_size) ? INSN_MOVSX : INSN_MOV, op_size, 0, 0);
1534 gen_address_offset();
1537 gen_label(store_label);
1538 g(gen_frame_store(ctx, OP_SIZE_INT, slot_r, 0, target));
1539 flag_set(ctx, slot_r, false);
1544 static bool attr_w gen_array_create(struct codegen_context *ctx, frame_t slot_r)
1547 const struct type *type;
1548 uint32_t escape_label;
1550 escape_label = alloc_escape_label(ctx);
1551 if (unlikely(!escape_label))
1554 ajla_assert_lo(ctx->args_l != 0, (file_line, "gen_array_create: zero entries"));
1556 if (unlikely(ctx->args_l >= sign_bit(uint_default_t))) {
1557 gen_insn(INSN_JMP, 0, 0, 0);
1558 gen_four(escape_label);
1562 type = get_type_of_local(ctx, ctx->args[0].slot);
1563 for (i = 1; i < ctx->args_l; i++) {
1564 const struct type *t = get_type_of_local(ctx, ctx->args[i].slot);
1565 if (unlikely(t != type))
1566 internal(file_line, "gen_array_create: types do not match: %u != %u", type->tag, t->tag);
1569 if (TYPE_IS_FLAT(type)) {
1571 for (i = 0; i < ctx->args_l; i++) {
1572 g(gen_test_1_cached(ctx, ctx->args[i].slot, escape_label));
1573 flag_set(ctx, ctx->args[i].slot, false);
1576 g(gen_upcall_start(ctx, 3));
1578 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1579 g(gen_upcall_argument(ctx, 0));
1581 g(gen_load_constant(ctx, R_ARG1, ctx->args[0].slot));
1582 g(gen_upcall_argument(ctx, 1));
1584 g(gen_load_constant(ctx, R_ARG2, ctx->args_l));
1585 g(gen_upcall_argument(ctx, 2));
1587 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_flat_slot_mayfail), 3));
1588 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1589 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1591 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
1593 offset = data_array_offset;
1594 for (i = 0; i < ctx->args_l; i++) {
1595 g(gen_memcpy_from_slot(ctx, R_SAVED_1, offset, ctx->args[i].slot));
1596 offset += type->size;
1600 g(gen_upcall_start(ctx, 2));
1602 g(gen_load_constant(ctx, R_ARG0, ctx->args_l));
1603 g(gen_upcall_argument(ctx, 0));
1605 g(gen_load_constant(ctx, R_ARG1, ctx->args_l));
1606 g(gen_upcall_argument(ctx, 1));
1608 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_pointers_mayfail), 2));
1609 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1610 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1612 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
1614 g(gen_address(ctx, R_RET0, offsetof(struct data, u_.array_pointers.pointer), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
1615 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
1617 gen_address_offset();
1620 for (i = 0; i < ctx->args_l; i++) {
1621 g(gen_frame_get_pointer(ctx, ctx->args[i].slot, (ctx->args[i].flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_SCRATCH_1));
1622 g(gen_address(ctx, R_SAVED_2, offset, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
1623 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
1624 gen_address_offset();
1625 gen_one(R_SCRATCH_1);
1626 offset += sizeof(pointer_t);
1629 g(gen_compress_pointer(ctx, R_SAVED_1, R_SAVED_1));
1630 g(gen_frame_set_pointer(ctx, slot_r, R_SAVED_1, false, true));
1634 static bool attr_w gen_array_create_empty_flat(struct codegen_context *ctx, frame_t slot_r, frame_t local_type)
1636 uint32_t escape_label;
1638 escape_label = alloc_escape_label(ctx);
1639 if (unlikely(!escape_label))
1642 g(gen_upcall_start(ctx, 3));
1644 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1645 g(gen_upcall_argument(ctx, 0));
1647 g(gen_load_constant(ctx, R_ARG1, local_type));
1648 g(gen_upcall_argument(ctx, 1));
1650 g(gen_load_constant(ctx, R_ARG2, 0));
1651 g(gen_upcall_argument(ctx, 2));
1653 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_flat_types_ptr_mayfail), 3));
1654 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1655 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1657 g(gen_compress_pointer(ctx, R_RET0, R_RET0));
1658 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, true));
1663 static bool attr_w gen_array_create_empty(struct codegen_context *ctx, frame_t slot_r)
1665 uint32_t escape_label;
1667 escape_label = alloc_escape_label(ctx);
1668 if (unlikely(!escape_label))
1671 g(gen_upcall_start(ctx, 2));
1673 g(gen_load_constant(ctx, R_ARG0, 0));
1674 g(gen_upcall_argument(ctx, 0));
1676 g(gen_load_constant(ctx, R_ARG1, 0));
1677 g(gen_upcall_argument(ctx, 1));
1679 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_pointers_mayfail), 2));
1680 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1681 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1683 g(gen_compress_pointer(ctx, R_RET0, R_RET0));
1684 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, true));
1689 static bool attr_w gen_array_fill(struct codegen_context *ctx, frame_t slot_1, frame_t flags, frame_t slot_2, frame_t slot_r)
1691 const struct type *content_type, *array_type;
1692 uint32_t escape_label;
1693 unsigned reg1, reg4;
1695 escape_label = alloc_escape_label(ctx);
1696 if (unlikely(!escape_label))
1699 g(gen_test_1_cached(ctx, slot_2, escape_label));
1701 content_type = get_type_of_local(ctx, slot_1);
1702 array_type = get_type_of_local(ctx, slot_r);
1704 if (TYPE_IS_FLAT(array_type)) {
1705 int64_t dest_offset;
1707 const struct flat_array_definition *def = type_def(array_type,flat_array);
1709 ajla_assert_lo(TYPE_IS_FLAT(content_type), (file_line, "gen_array_fill: array is flat but content is not"));
1711 g(gen_test_1_cached(ctx, slot_1, escape_label));
1713 dest_offset = (size_t)slot_r * slot_size;
1714 for (i = 0; i < def->n_elements; i++) {
1715 g(gen_memcpy_from_slot(ctx, R_FRAME, dest_offset, slot_1));
1716 dest_offset += def->base->size;
1718 flag_set(ctx, slot_1, false);
1719 flag_set(ctx, slot_r, false);
1724 if (ctx->registers[slot_1] >= 0)
1725 g(spill(ctx, slot_1));
1727 if (unlikely((flags & OPCODE_ARRAY_FILL_FLAG_SPARSE) != 0)) {
1728 uint32_t get_ptr_label, got_ptr_label;
1730 get_ptr_label = alloc_label(ctx);
1731 if (unlikely(!get_ptr_label))
1734 got_ptr_label = alloc_label(ctx);
1735 if (unlikely(!got_ptr_label))
1738 if (TYPE_IS_FLAT(content_type)) {
1739 g(gen_test_1_cached(ctx, slot_1, get_ptr_label));
1741 g(gen_upcall_start(ctx, 3));
1743 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1744 g(gen_upcall_argument(ctx, 0));
1746 g(gen_load_constant(ctx, R_ARG1, slot_1));
1747 g(gen_upcall_argument(ctx, 1));
1749 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_ARG2, R_FRAME, (size_t)slot_1 * slot_size, 0));
1750 g(gen_upcall_argument(ctx, 2));
1752 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 3));
1754 g(gen_mov(ctx, i_size(OP_SIZE_SLOT), R_SCRATCH_4, R_RET0));
1756 gen_insn(INSN_JMP, 0, 0, 0);
1757 gen_four(got_ptr_label);
1760 gen_label(get_ptr_label);
1762 g(gen_frame_get_pointer(ctx, slot_1, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_SCRATCH_4));
1764 gen_label(got_ptr_label);
1766 g(gen_frame_get(ctx, OP_SIZE_INT, sign_x, slot_2, 0, R_SCRATCH_1, ®1));
1767 g(gen_jmp_if_negative(ctx, reg1, escape_label));
1769 g(gen_upcall_start(ctx, 2));
1770 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, reg1));
1771 g(gen_upcall_argument(ctx, 0));
1773 g(gen_mov(ctx, i_size(OP_SIZE_SLOT), R_ARG1, R_SCRATCH_4));
1774 g(gen_upcall_argument(ctx, 1));
1776 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_create_sparse), 2));
1777 } else if (TYPE_IS_FLAT(content_type)) {
1778 g(gen_test_1_cached(ctx, slot_1, escape_label));
1779 flag_set(ctx, slot_1, false);
1781 g(gen_frame_get(ctx, OP_SIZE_INT, sign_x, slot_2, 0, R_SCRATCH_4, ®4));
1782 g(gen_jmp_if_negative(ctx, reg4, escape_label));
1784 g(gen_upcall_start(ctx, 3));
1785 g(gen_mov(ctx, i_size(OP_SIZE_INT), R_ARG1, reg4));
1786 g(gen_upcall_argument(ctx, 1));
1788 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1789 g(gen_upcall_argument(ctx, 0));
1791 g(gen_load_constant(ctx, R_ARG2, slot_1));
1792 g(gen_upcall_argument(ctx, 2));
1794 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_create_flat), 3));
1796 if (ctx->registers[slot_2] >= 0)
1797 g(spill(ctx, slot_2));
1799 g(gen_frame_get_pointer(ctx, slot_1, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_SCRATCH_1));
1801 g(gen_upcall_start(ctx, 4));
1803 g(gen_mov(ctx, i_size(OP_SIZE_SLOT), R_ARG3, R_SCRATCH_1));
1804 g(gen_upcall_argument(ctx, 3));
1806 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1807 g(gen_upcall_argument(ctx, 0));
1809 g(gen_load_constant(ctx, R_ARG1, ctx->instr_start - da(ctx->fn,function)->code));
1810 g(gen_upcall_argument(ctx, 1));
1812 g(gen_load_constant(ctx, R_ARG2, slot_2));
1813 g(gen_upcall_argument(ctx, 2));
1815 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_create_pointers), 4));
1817 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, false));
1822 static bool attr_w gen_array_string(struct codegen_context *ctx, type_tag_t tag, uint8_t *string, frame_t len, frame_t slot_r)
1824 uint32_t escape_label;
1826 const struct type *type;
1828 escape_label = alloc_escape_label(ctx);
1829 if (unlikely(!escape_label))
1832 g(gen_upcall_start(ctx, 2));
1834 g(gen_load_constant(ctx, R_ARG0, tag));
1835 g(gen_upcall_argument(ctx, 0));
1837 g(gen_load_constant(ctx, R_ARG1, len));
1838 g(gen_upcall_argument(ctx, 1));
1840 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_flat_tag_mayfail), 2));
1841 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1842 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1844 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
1846 g(gen_compress_pointer(ctx, R_RET0, R_RET0));
1847 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, true));
1849 g(load_function_offset(ctx, R_SCRATCH_3, offsetof(struct data, u_.function.code)));
1851 offset = string - cast_ptr(uint8_t *, da(ctx->fn,function)->code);
1852 type = type_get_from_tag(tag);
1853 g(gen_memcpy_raw(ctx, R_SAVED_1, data_array_offset, R_SCRATCH_3, offset, (size_t)len * type->size, minimum(type->align, align_of(code_t))));
1858 static bool attr_w gen_scaled_array_address(struct codegen_context *ctx, size_t element_size, unsigned reg_dst, unsigned reg_src, unsigned reg_index, int64_t offset_src)
1860 if (is_power_of_2(element_size)) {
1861 unsigned shift = log_2(element_size);
1862 #if defined(ARCH_X86)
1863 if (shift <= 3 && imm_is_32bit(offset_src)) {
1864 gen_insn(INSN_LEA3, i_size(OP_SIZE_ADDRESS), shift, 0);
1869 gen_eight(offset_src);
1873 if (ARCH_HAS_SHIFTED_ADD(shift)) {
1874 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, false));
1877 gen_one(ARG_SHIFTED_REGISTER);
1878 gen_one(ARG_SHIFT_LSL | shift);
1885 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, reg_dst, reg_index, shift, 0));
1886 reg_index = reg_dst;
1890 g(gen_imm(ctx, element_size, IMM_PURPOSE_MUL, i_size(OP_SIZE_ADDRESS)));
1891 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_MUL, ALU_WRITES_FLAGS(ALU_MUL, is_imm()));
1895 reg_index = reg_dst;
1897 size_t e_size = element_size;
1899 bool first_match = true;
1901 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_CONST_IMM, reg_index));
1902 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), reg_dst, reg_index));
1903 reg_index = reg_dst;
1906 g(gen_load_constant(ctx, reg_index, 0));
1912 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, reg_index, reg_index, sh, false));
1913 first_match = false;
1914 } else if (ARCH_HAS_SHIFTED_ADD(sh)) {
1915 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, false));
1918 gen_one(ARG_SHIFTED_REGISTER);
1919 gen_one(ARG_SHIFT_LSL | sh);
1920 gen_one(R_CONST_IMM);
1923 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, R_CONST_IMM, R_CONST_IMM, sh, false));
1926 g(gen_3address_alu(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, reg_index, reg_index, R_CONST_IMM, 0));
1934 #if defined(ARCH_S390)
1935 if (offset_src && s390_inline_address(offset_src)) {
1936 gen_insn(INSN_LEA3, i_size(OP_SIZE_ADDRESS), 0, 0);
1941 gen_eight(offset_src);
1945 g(gen_3address_alu(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, reg_dst, reg_index, reg_src, 0));
1950 g(gen_imm(ctx, offset_src, IMM_PURPOSE_ADD, i_size(OP_SIZE_ADDRESS)));
1951 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, is_imm()));
1959 static bool attr_w gen_scaled_array_load(struct codegen_context *ctx, unsigned reg_src, unsigned reg_idx, int64_t offset_src, frame_t slot_r)
1961 const struct type *t = get_type_of_local(ctx, slot_r);
1962 #if defined(ARCH_X86)
1963 if (is_power_of_2(t->size)) {
1964 unsigned shift = log_2(t->size);
1965 if (shift <= 3 && shift <= OP_SIZE_NATIVE && imm_is_32bit(offset_src)) {
1966 short reg = ctx->registers[slot_r];
1967 gen_insn(INSN_MOV, shift, 0, 0);
1968 gen_one(reg >= 0 ? reg : R_SCRATCH_2);
1969 gen_one(ARG_ADDRESS_2 + shift);
1972 gen_eight(offset_src);
1975 g(gen_address(ctx, R_FRAME, (size_t)slot_r * slot_size, IMM_PURPOSE_STR_OFFSET, shift));
1976 gen_insn(INSN_MOV, shift, 0, 0);
1977 gen_address_offset();
1978 gen_one(R_SCRATCH_2);
1985 #if defined(ARCH_S390)
1986 if (t->size == 1 && s390_inline_address(offset_src) && cpu_test_feature(CPU_FEATURE_extended_imm)) {
1987 short reg = ctx->registers[slot_r];
1988 gen_insn(INSN_MOVSX, OP_SIZE_1, 0, 0);
1989 gen_one(reg >= 0 ? reg : R_SCRATCH_2);
1990 gen_one(ARG_ADDRESS_2);
1993 gen_eight(offset_src);
1996 g(gen_address(ctx, R_FRAME, (size_t)slot_r * slot_size, IMM_PURPOSE_STR_OFFSET, OP_SIZE_1));
1997 gen_insn(INSN_MOV, OP_SIZE_1, 0, 0);
1998 gen_address_offset();
1999 gen_one(R_SCRATCH_2);
2005 g(gen_scaled_array_address(ctx, t->size, R_SCRATCH_2, reg_src, reg_idx, 0));
2007 g(gen_memcpy_to_slot(ctx, slot_r, R_SCRATCH_2, offset_src));
2012 static bool attr_w gen_scaled_array_store(struct codegen_context *ctx, unsigned reg_src, int64_t offset_src, frame_t slot_1)
2014 const struct type *t = get_type_of_local(ctx, slot_1);
2015 #if defined(ARCH_X86)
2016 if (is_power_of_2(t->size)) {
2017 unsigned shift = log_2(t->size);
2018 if (shift <= 3 && shift <= OP_SIZE_NATIVE && imm_is_32bit(offset_src)) {
2019 short reg = ctx->registers[slot_1];
2021 g(gen_address(ctx, R_FRAME, (size_t)slot_1 * slot_size, IMM_PURPOSE_LDR_OFFSET, shift));
2022 gen_insn(INSN_MOV, shift, 0, 0);
2023 gen_one(R_SCRATCH_3);
2024 gen_address_offset();
2028 gen_insn(INSN_MOV, shift, 0, 0);
2029 gen_one(ARG_ADDRESS_2 + shift);
2031 gen_one(R_SCRATCH_2);
2032 gen_eight(offset_src);
2039 #if defined(ARCH_S390)
2040 if (t->size == 1 && s390_inline_address(offset_src) && cpu_test_feature(CPU_FEATURE_extended_imm)) {
2041 short reg = ctx->registers[slot_1];
2043 g(gen_address(ctx, R_FRAME, (size_t)slot_1 * slot_size, IMM_PURPOSE_LDR_SX_OFFSET, OP_SIZE_1));
2044 gen_insn(INSN_MOVSX, OP_SIZE_1, 0, 0);
2045 gen_one(R_SCRATCH_3);
2046 gen_address_offset();
2050 gen_insn(INSN_MOV, OP_SIZE_1, 0, 0);
2051 gen_one(ARG_ADDRESS_2);
2053 gen_one(R_SCRATCH_2);
2054 gen_eight(offset_src);
2060 g(gen_scaled_array_address(ctx, t->size, R_SCRATCH_2, reg_src, R_SCRATCH_2, 0));
2062 g(gen_memcpy_from_slot(ctx, R_SCRATCH_2, offset_src, slot_1));
2067 static bool attr_w gen_check_array_len(struct codegen_context *ctx, unsigned reg_array, bool allocated, unsigned reg_len, unsigned cond, uint32_t escape_label)
2069 size_t offset = !allocated ? offsetof(struct data, u_.array_flat.n_used_entries) : offsetof(struct data, u_.array_flat.n_allocated_entries);
2070 #if defined(ARCH_X86)
2071 g(gen_address(ctx, reg_array, offset, IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2072 gen_insn(INSN_CMP, OP_SIZE_INT, 0, 1);
2074 gen_address_offset();
2076 gen_insn(INSN_JMP_COND, OP_SIZE_INT, cond, 0);
2077 gen_four(escape_label);
2079 g(gen_address(ctx, reg_array, offset, ARCH_PREFERS_SX(OP_SIZE_INT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2080 gen_insn(ARCH_PREFERS_SX(OP_SIZE_INT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_INT, 0, 0);
2081 gen_one(R_SCRATCH_3);
2082 gen_address_offset();
2084 g(gen_cmp_test_jmp(ctx, INSN_CMP, i_size(OP_SIZE_INT), reg_len, R_SCRATCH_3, cond, escape_label));
2089 static bool attr_w gen_array_load(struct codegen_context *ctx, frame_t slot_1, frame_t slot_idx, frame_t slot_r, frame_t flags)
2091 const struct type *t = get_type_of_local(ctx, slot_1);
2092 const struct type *tr = get_type_of_local(ctx, slot_r);
2093 uint32_t escape_label;
2094 unsigned reg1, reg2;
2096 escape_label = alloc_escape_label(ctx);
2097 if (unlikely(!escape_label))
2100 if (unlikely(t->tag == TYPE_TAG_flat_array)) {
2101 const struct flat_array_definition *def = type_def(t,flat_array);
2103 g(gen_test_2_cached(ctx, slot_1, slot_idx, escape_label));
2105 flag_set(ctx, slot_1, false);
2106 flag_set(ctx, slot_idx, false);
2108 g(gen_frame_get(ctx, OP_SIZE_INT, native, slot_idx, 0, R_SCRATCH_2, ®2));
2110 if (!(flags & OPCODE_ARRAY_INDEX_IN_RANGE))
2111 g(gen_cmp_test_imm_jmp(ctx, INSN_CMP, OP_SIZE_INT, reg2, def->n_elements, COND_AE, escape_label));
2113 g(gen_scaled_array_load(ctx, R_FRAME, reg2, (size_t)slot_1 * slot_size, slot_r));
2117 g(gen_frame_decompress_slot(ctx, slot_1, R_SCRATCH_1, ®1, escape_label));
2119 g(gen_test_1_cached(ctx, slot_idx, escape_label));
2120 flag_set(ctx, slot_idx, false);
2121 g(gen_frame_get(ctx, OP_SIZE_INT, native, slot_idx, 0, R_SCRATCH_2, ®2));
2123 if (!(flags & OPCODE_ARRAY_INDEX_IN_RANGE))
2124 g(gen_check_array_len(ctx, reg1, false, reg2, COND_AE, escape_label));
2126 if (TYPE_IS_FLAT(tr)) {
2128 g(gen_compare_ptr_tag(ctx, reg1, DATA_TAG_array_slice, COND_A, escape_label, R_SCRATCH_4));
2129 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SCRATCH_1, reg1));
2130 #if defined(ARCH_X86) || defined(ARCH_S390)
2131 #if defined(ARCH_X86)
2132 if (unlikely(!cpu_test_feature(CPU_FEATURE_cmov)))
2134 if (unlikely(!cpu_test_feature(CPU_FEATURE_misc_45)))
2137 g(gen_address(ctx, reg1, offsetof(struct data, u_.array_slice.flat_data_minus_data_array_offset), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2138 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
2139 gen_one(R_SCRATCH_3);
2140 gen_address_offset();
2143 g(gen_address(ctx, reg1, offsetof(struct data, u_.array_slice.flat_data_minus_data_array_offset), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2144 gen_insn(INSN_CMOV, OP_SIZE_ADDRESS, COND_E, 0);
2145 gen_one(R_SCRATCH_1);
2146 gen_one(R_SCRATCH_1);
2147 gen_address_offset();
2148 #elif defined(ARCH_PARISC)
2149 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.array_slice.flat_data_minus_data_array_offset), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2150 gen_insn(INSN_CMP_CMOV, OP_SIZE_ADDRESS, COND_E, 0);
2151 gen_one(R_SCRATCH_1);
2152 gen_one(R_SCRATCH_1);
2153 gen_address_offset();
2154 gen_one(R_SCRATCH_4);
2156 gen_eight(DATA_TAG_array_slice);
2157 #elif defined(ARCH_LOONGARCH64) || defined(ARCH_MIPS) || defined(ARCH_RISCV64)
2158 g(gen_3address_alu_imm(ctx, OP_SIZE_NATIVE, ALU_XOR, R_SCRATCH_4, R_SCRATCH_4, DATA_TAG_array_slice, 0));
2160 label = alloc_label(ctx);
2161 if (unlikely(!label))
2164 gen_insn(INSN_JMP_REG, OP_SIZE_NATIVE, COND_NE, 0);
2165 gen_one(R_SCRATCH_4);
2168 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.array_slice.flat_data_minus_data_array_offset), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2169 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
2170 gen_one(R_SCRATCH_1);
2171 gen_address_offset();
2175 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.array_slice.flat_data_minus_data_array_offset), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2176 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
2177 gen_one(R_SCRATCH_3);
2178 gen_address_offset();
2180 #if defined(ARCH_POWER)
2181 if (!cpu_test_feature(CPU_FEATURE_v203))
2184 #if defined(ARCH_SPARC)
2188 gen_insn(INSN_CMOV, i_size(OP_SIZE_ADDRESS), COND_E, 0);
2189 gen_one(R_SCRATCH_1);
2190 gen_one(R_SCRATCH_1);
2191 gen_one(R_SCRATCH_3);
2193 g(gen_imm(ctx, DATA_TAG_array_slice, IMM_PURPOSE_CMP, OP_SIZE_NATIVE));
2194 gen_insn(INSN_CMP_DEST_REG, OP_SIZE_NATIVE, COND_E, 0);
2195 gen_one(R_CMP_RESULT);
2196 gen_one(R_SCRATCH_4);
2199 gen_insn(INSN_MOVR, OP_SIZE_NATIVE, COND_NE, 0);
2200 gen_one(R_SCRATCH_1);
2201 gen_one(R_SCRATCH_1);
2202 gen_one(R_CMP_RESULT);
2203 gen_one(R_SCRATCH_3);
2209 label = alloc_label(ctx);
2210 if (unlikely(!label))
2212 gen_insn(INSN_JMP_COND, OP_SIZE_4, COND_NE, 0);
2215 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SCRATCH_1, R_SCRATCH_3));
2219 g(gen_scaled_array_load(ctx, R_SCRATCH_1, reg2, data_array_offset, slot_r));
2220 flag_set(ctx, slot_r, false);
2223 if (flag_must_be_flat(ctx, slot_r)) {
2224 gen_insn(INSN_JMP, 0, 0, 0);
2225 gen_four(escape_label);
2229 g(gen_compare_ptr_tag(ctx, reg1, DATA_TAG_array_pointers, COND_NE, escape_label, R_SCRATCH_3));
2231 g(gen_address(ctx, reg1, offsetof(struct data, u_.array_pointers.pointer), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2232 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
2233 gen_one(R_SCRATCH_1);
2234 gen_address_offset();
2236 #if defined(ARCH_X86) || defined(ARCH_ARM)
2237 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
2238 gen_one(R_SCRATCH_1);
2239 gen_one(ARG_ADDRESS_2 + OP_SIZE_SLOT);
2240 gen_one(R_SCRATCH_1);
2244 goto scaled_load_done;
2246 #if defined(ARCH_LOONGARCH64) || defined(ARCH_PARISC) || defined(ARCH_POWER) || defined(ARCH_S390) || defined(ARCH_SPARC)
2247 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, R_SCRATCH_2, reg2, OP_SIZE_SLOT, false));
2249 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
2250 gen_one(R_SCRATCH_1);
2251 gen_one(ARG_ADDRESS_2);
2252 gen_one(R_SCRATCH_1);
2253 gen_one(R_SCRATCH_2);
2256 goto scaled_load_done;
2258 if (ARCH_HAS_SHIFTED_ADD(OP_SIZE_SLOT)) {
2259 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, false));
2260 gen_one(R_SCRATCH_2);
2261 gen_one(ARG_SHIFTED_REGISTER);
2262 gen_one(ARG_SHIFT_LSL | OP_SIZE_SLOT);
2264 gen_one(R_SCRATCH_1);
2266 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
2267 gen_one(R_SCRATCH_1);
2268 gen_one(ARG_ADDRESS_1);
2269 gen_one(R_SCRATCH_2);
2272 goto scaled_load_done;
2275 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, R_SCRATCH_2, reg2, OP_SIZE_SLOT, false));
2277 g(gen_3address_alu(ctx, OP_SIZE_ADDRESS, ALU_ADD, R_SCRATCH_2, R_SCRATCH_2, R_SCRATCH_1, 0));
2279 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
2280 gen_one(R_SCRATCH_1);
2281 gen_one(ARG_ADDRESS_1);
2282 gen_one(R_SCRATCH_2);
2285 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, NO_FRAME_T, escape_label));
2287 if (flags & OPCODE_STRUCT_MAY_BORROW) {
2288 g(gen_frame_store(ctx, OP_SIZE_SLOT, slot_r, 0, R_SCRATCH_1));
2289 flag_set(ctx, slot_r, false);
2291 g(gen_frame_set_pointer(ctx, slot_r, R_SCRATCH_1, true, false));
2297 static bool attr_w gen_array_len(struct codegen_context *ctx, frame_t slot_1, frame_t slot_2, frame_t slot_r, bool fused, int32_t offs_false)
2299 const struct type *t = get_type_of_local(ctx, slot_1);
2300 uint32_t escape_label;
2301 unsigned reg, target;
2303 escape_label = alloc_escape_label(ctx);
2304 if (unlikely(!escape_label))
2307 if (slot_2 != NO_FRAME_T) {
2308 g(gen_test_1_cached(ctx, slot_2, escape_label));
2309 flag_set(ctx, slot_2, false);
2312 if (unlikely(t->tag == TYPE_TAG_flat_array)) {
2314 target = R_SCRATCH_1;
2315 g(gen_load_constant(ctx, target, type_def(t,flat_array)->n_elements));
2317 } else if (slot_2 == NO_FRAME_T) {
2318 g(gen_frame_store_imm(ctx, OP_SIZE_INT, slot_r, 0, (unsigned)type_def(t,flat_array)->n_elements));
2320 g(gen_frame_load_cmp_imm_set_cond(ctx, OP_SIZE_INT, zero_x, slot_2, 0, type_def(t,flat_array)->n_elements, COND_G, slot_r));
2322 flag_set(ctx, slot_r, false);
2324 g(gen_frame_decompress_slot(ctx, slot_1, R_SCRATCH_1, ®, escape_label));
2326 if (offsetof(struct data, u_.array_flat.n_used_entries) != offsetof(struct data, u_.array_slice.n_entries) ||
2327 offsetof(struct data, u_.array_flat.n_used_entries) != offsetof(struct data, u_.array_pointers.n_used_entries)) {
2331 if (DATA_TAG_array_flat != DATA_TAG_array_slice - 1 ||
2332 DATA_TAG_array_slice != DATA_TAG_array_pointers - 1 ||
2333 DATA_TAG_array_same < DATA_TAG_array_flat ||
2334 DATA_TAG_array_btree < DATA_TAG_array_flat ||
2335 DATA_TAG_array_incomplete < DATA_TAG_array_flat) {
2340 g(gen_compare_ptr_tag(ctx, reg, DATA_TAG_array_pointers, COND_A, escape_label, R_SCRATCH_2));
2342 if (slot_2 == NO_FRAME_T) {
2343 target = gen_frame_target(ctx, slot_r, NO_FRAME_T, NO_FRAME_T, R_SCRATCH_1);
2345 target = R_SCRATCH_1;
2348 #if defined(ARCH_X86) || defined(ARCH_S390)
2349 if (fused && ctx->registers[slot_2] >= 0) {
2350 g(gen_address(ctx, reg, offsetof(struct data, u_.array_flat.n_used_entries), ARCH_PREFERS_SX(OP_SIZE_INT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2351 gen_insn(INSN_CMP, OP_SIZE_INT, 0, 1 + COND_IS_LOGICAL(COND_GE));
2352 gen_one(ctx->registers[slot_2]);
2353 gen_address_offset();
2355 g(gen_jump(ctx, offs_false, i_size_cmp(OP_SIZE_INT), COND_GE, -1U, -1U));
2359 g(gen_address(ctx, reg, offsetof(struct data, u_.array_flat.n_used_entries), ARCH_PREFERS_SX(OP_SIZE_INT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2360 gen_insn(ARCH_PREFERS_SX(OP_SIZE_INT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_INT, 0, 0);
2362 gen_address_offset();
2366 enum extend ex = OP_SIZE_INT == i_size_cmp(OP_SIZE_INT) + (unsigned)zero ? garbage : sign_x;
2368 if (ARCH_HAS_JMP_2REGS(COND_LE)) {
2369 g(gen_frame_get(ctx, OP_SIZE_INT, ex, slot_2, 0, R_SCRATCH_2, ®2));
2370 g(gen_jump(ctx, offs_false, i_size_cmp(OP_SIZE_INT), COND_LE, target, reg2));
2374 g(gen_frame_load_cmp(ctx, OP_SIZE_INT, COND_IS_LOGICAL(COND_LE), ex, false, slot_2, 0, target));
2375 g(gen_jump(ctx, offs_false, i_size_cmp(OP_SIZE_INT), COND_LE, -1U, -1U));
2377 g(gen_frame_get(ctx, OP_SIZE_INT, ex, slot_2, 0, R_SCRATCH_2, ®2));
2378 g(gen_cmp_dest_reg(ctx, i_size_cmp(OP_SIZE_INT), target, reg2, R_CMP_RESULT, 0, COND_G));
2379 g(gen_jump(ctx, offs_false, OP_SIZE_NATIVE, COND_E, R_CMP_RESULT, -1U));
2381 } else if (slot_2 == NO_FRAME_T) {
2382 g(gen_frame_store(ctx, OP_SIZE_INT, slot_r, 0, target));
2384 g(gen_frame_load_cmp_set_cond(ctx, OP_SIZE_INT, zero_x, slot_2, 0, R_SCRATCH_1, COND_G, slot_r));
2386 flag_set(ctx, slot_r, false);
2391 static bool attr_w gen_array_sub(struct codegen_context *ctx, frame_t slot_array, frame_t slot_from, frame_t slot_to, frame_t slot_r, frame_t flags)
2393 const struct type *t = get_type_of_local(ctx, slot_array);
2394 uint32_t escape_label, upcall_label;
2396 escape_label = alloc_escape_label(ctx);
2397 if (unlikely(!escape_label))
2400 upcall_label = alloc_label(ctx);
2401 if (unlikely(!upcall_label))
2404 if (unlikely(TYPE_IS_FLAT(t))) {
2405 g(gen_test_1_jz_cached(ctx, slot_array, escape_label));
2408 g(gen_test_2_cached(ctx, slot_from, slot_to, escape_label));
2410 if (ctx->registers[slot_array] >= 0)
2411 g(spill(ctx, slot_array));
2412 if (ctx->registers[slot_from] >= 0)
2413 g(spill(ctx, slot_from));
2414 if (ctx->registers[slot_to] >= 0)
2415 g(spill(ctx, slot_to));
2417 g(gen_upcall_start(ctx, 4));
2419 g(gen_frame_load_raw(ctx, OP_SIZE_SLOT, garbage, slot_array, 0, R_ARG0));
2420 g(gen_upcall_argument(ctx, 0));
2422 g(gen_frame_load_raw(ctx, OP_SIZE_INT, garbage, slot_from, 0, R_ARG1));
2423 g(gen_upcall_argument(ctx, 1));
2425 g(gen_frame_load_raw(ctx, OP_SIZE_INT, garbage, slot_to, 0, R_ARG2));
2426 g(gen_upcall_argument(ctx, 2));
2428 g(gen_load_constant(ctx, R_ARG3, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0));
2429 g(gen_upcall_argument(ctx, 3));
2431 if ((flags & OPCODE_FLAG_FREE_ARGUMENT) != 0) {
2432 g(gen_test_1_cached(ctx, slot_array, upcall_label));
2433 g(gen_load_constant(ctx, R_ARG3, 0));
2434 g(gen_upcall_argument(ctx, 3));
2437 gen_label(upcall_label);
2438 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_sub), 4));
2440 g(gen_jmp_on_zero(ctx, OP_SIZE_SLOT, R_RET0, COND_E, escape_label));
2442 if (slot_array != slot_r) {
2443 if (flags & OPCODE_FLAG_FREE_ARGUMENT) {
2444 g(gen_set_1(ctx, R_FRAME, slot_array, 0, false));
2445 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_array));
2446 flag_set(ctx, slot_array, false);
2450 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, false));
2455 static bool attr_w gen_array_skip(struct codegen_context *ctx, frame_t slot_array, frame_t slot_from, frame_t slot_r, frame_t flags)
2457 const struct type *t = get_type_of_local(ctx, slot_array);
2458 uint32_t escape_label, upcall_label;
2460 escape_label = alloc_escape_label(ctx);
2461 if (unlikely(!escape_label))
2464 upcall_label = alloc_label(ctx);
2465 if (unlikely(!upcall_label))
2468 if (unlikely(TYPE_IS_FLAT(t))) {
2469 g(gen_test_1_jz_cached(ctx, slot_array, escape_label));
2472 g(gen_test_1_cached(ctx, slot_from, escape_label));
2474 if (ctx->registers[slot_array] >= 0)
2475 g(spill(ctx, slot_array));
2476 if (ctx->registers[slot_from] >= 0)
2477 g(spill(ctx, slot_from));
2479 g(gen_upcall_start(ctx, 3));
2481 g(gen_frame_load_raw(ctx, OP_SIZE_SLOT, garbage, slot_array, 0, R_ARG0));
2482 g(gen_upcall_argument(ctx, 0));
2484 g(gen_frame_load_raw(ctx, OP_SIZE_INT, garbage, slot_from, 0, R_ARG1));
2485 g(gen_upcall_argument(ctx, 1));
2487 g(gen_load_constant(ctx, R_ARG2, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0));
2488 g(gen_upcall_argument(ctx, 2));
2490 if ((flags & OPCODE_FLAG_FREE_ARGUMENT) != 0) {
2491 g(gen_test_1_cached(ctx, slot_array, upcall_label));
2492 g(gen_load_constant(ctx, R_ARG2, 0));
2493 g(gen_upcall_argument(ctx, 2));
2496 gen_label(upcall_label);
2497 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_skip), 3));
2499 g(gen_jmp_on_zero(ctx, OP_SIZE_SLOT, R_RET0, COND_E, escape_label));
2501 if (slot_array != slot_r) {
2502 if (flags & OPCODE_FLAG_FREE_ARGUMENT) {
2503 g(gen_set_1(ctx, R_FRAME, slot_array, 0, false));
2504 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_array));
2505 flag_set(ctx, slot_array, false);
2509 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, false));
2514 static bool attr_w gen_array_append(struct codegen_context *ctx, frame_t slot_1, frame_t slot_2, frame_t slot_r, frame_t flags)
2516 uint32_t escape_label;
2518 escape_label = alloc_escape_label(ctx);
2519 if (unlikely(!escape_label))
2522 if (unlikely(TYPE_IS_FLAT(get_type_of_local(ctx, slot_1))))
2523 g(gen_test_1_jz_cached(ctx, slot_1, escape_label));
2524 if (unlikely(TYPE_IS_FLAT(get_type_of_local(ctx, slot_2))))
2525 g(gen_test_1_jz_cached(ctx, slot_2, escape_label));
2527 g(gen_frame_load(ctx, OP_SIZE_SLOT, zero_x, slot_1, 0, R_SCRATCH_1));
2528 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, slot_1, escape_label));
2529 g(gen_frame_load(ctx, OP_SIZE_SLOT, zero_x, slot_2, 0, R_SCRATCH_2));
2530 g(gen_ptr_is_thunk(ctx, R_SCRATCH_2, slot_2, escape_label));
2531 g(gen_barrier(ctx));
2533 g(gen_compare_da_tag(ctx, R_SCRATCH_1, DATA_TAG_array_incomplete, COND_E, escape_label, R_SCRATCH_1));
2534 g(gen_compare_da_tag(ctx, R_SCRATCH_2, DATA_TAG_array_incomplete, COND_E, escape_label, R_SCRATCH_2));
2536 g(gen_frame_get_pointer(ctx, slot_2, (flags & OPCODE_FLAG_FREE_ARGUMENT_2) != 0, R_SAVED_1));
2537 g(gen_frame_get_pointer(ctx, slot_1, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_SCRATCH_1));
2538 g(gen_upcall_start(ctx, 2));
2539 g(gen_mov(ctx, i_size(OP_SIZE_SLOT), R_ARG0, R_SCRATCH_1));
2540 g(gen_upcall_argument(ctx, 0));
2541 g(gen_mov(ctx, i_size(OP_SIZE_SLOT), R_ARG1, R_SAVED_1));
2542 g(gen_upcall_argument(ctx, 1));
2543 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_join), 2));
2544 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, false));
2548 static bool attr_w gen_array_append_one_flat(struct codegen_context *ctx, frame_t slot_1, frame_t slot_2, frame_t slot_r, frame_t flags)
2550 uint32_t escape_label;
2553 escape_label = alloc_escape_label(ctx);
2554 if (unlikely(!escape_label))
2557 if (unlikely(!(flags & OPCODE_FLAG_FREE_ARGUMENT))) {
2558 gen_insn(INSN_JMP, 0, 0, 0);
2559 gen_four(escape_label);
2563 g(gen_test_1_jz_cached(ctx, slot_1, escape_label));
2564 g(gen_test_1_cached(ctx, slot_2, escape_label));
2565 flag_set(ctx, slot_2, false);
2567 g(gen_frame_decompress_slot(ctx, slot_1, R_SAVED_1, ®, escape_label));
2569 g(gen_compare_tag_and_refcount(ctx, reg, DATA_TAG_array_flat, escape_label, R_SCRATCH_1));
2571 g(gen_address(ctx, reg, offsetof(struct data, u_.array_flat.n_used_entries), ARCH_PREFERS_SX(OP_SIZE_INT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2572 gen_insn(ARCH_PREFERS_SX(OP_SIZE_INT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_INT, 0, 0);
2573 gen_one(R_SCRATCH_2);
2574 gen_address_offset();
2576 g(gen_check_array_len(ctx, reg, true, R_SCRATCH_2, COND_E, escape_label));
2578 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_INT), ALU_ADD, R_SCRATCH_1, R_SCRATCH_2, 1, 0));
2580 g(gen_address(ctx, reg, offsetof(struct data, u_.array_flat.n_used_entries), IMM_PURPOSE_STR_OFFSET, OP_SIZE_INT));
2581 gen_insn(INSN_MOV, OP_SIZE_INT, 0, 0);
2582 gen_address_offset();
2583 gen_one(R_SCRATCH_1);
2585 g(gen_scaled_array_store(ctx, reg, data_array_offset, slot_2));
2587 if (slot_1 != slot_r) {
2588 #if !defined(POINTER_COMPRESSION)
2589 g(gen_frame_set_pointer(ctx, slot_r, reg, false, true));
2591 unsigned target = gen_frame_target(ctx, slot_r, NO_FRAME_T, NO_FRAME_T, R_SAVED_1);
2592 g(gen_compress_pointer(ctx, target, reg));
2593 g(gen_frame_set_pointer(ctx, slot_r, target, false, true));
2595 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_1));
2596 g(gen_set_1(ctx, R_FRAME, slot_1, 0, false));
2597 flag_set(ctx, slot_1, false);
2603 static bool attr_w gen_array_append_one(struct codegen_context *ctx, frame_t slot_1, frame_t slot_2, frame_t slot_r, frame_t flags)
2605 uint32_t escape_label;
2608 escape_label = alloc_escape_label(ctx);
2609 if (unlikely(!escape_label))
2612 if (unlikely(!(flags & OPCODE_FLAG_FREE_ARGUMENT))) {
2613 gen_insn(INSN_JMP, 0, 0, 0);
2614 gen_four(escape_label);
2618 g(gen_test_1_jz_cached(ctx, slot_1, escape_label));
2620 g(gen_frame_decompress_slot(ctx, slot_1, R_SAVED_1, ®, escape_label));
2622 g(gen_compare_tag_and_refcount(ctx, reg, DATA_TAG_array_pointers, escape_label, R_SCRATCH_1));
2624 g(gen_address(ctx, reg, offsetof(struct data, u_.array_pointers.n_used_entries), ARCH_PREFERS_SX(OP_SIZE_INT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2625 gen_insn(ARCH_PREFERS_SX(OP_SIZE_INT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_INT, 0, 0);
2627 gen_address_offset();
2629 g(gen_check_array_len(ctx, reg, true, R_SAVED_2, COND_E, escape_label));
2631 g(gen_frame_get_pointer(ctx, slot_2, (flags & OPCODE_FLAG_FREE_ARGUMENT_2) != 0, R_SCRATCH_2));
2633 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_INT), ALU_ADD, R_SCRATCH_1, R_SAVED_2, 1, 0));
2635 g(gen_address(ctx, reg, offsetof(struct data, u_.array_pointers.n_used_entries), IMM_PURPOSE_STR_OFFSET, OP_SIZE_INT));
2636 gen_insn(INSN_MOV, OP_SIZE_INT, 0, 0);
2637 gen_address_offset();
2638 gen_one(R_SCRATCH_1);
2640 g(gen_address(ctx, reg, offsetof(struct data, u_.array_pointers.pointer), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2641 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
2642 gen_one(R_SCRATCH_3);
2643 gen_address_offset();
2645 g(gen_scaled_array_address(ctx, slot_size, R_SAVED_2, R_SCRATCH_3, R_SAVED_2, 0));
2647 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
2648 gen_one(ARG_ADDRESS_1);
2651 gen_one(R_SCRATCH_2);
2653 if (slot_1 != slot_r) {
2654 #if !defined(POINTER_COMPRESSION)
2655 g(gen_frame_set_pointer(ctx, slot_r, reg, false, true));
2657 unsigned target = gen_frame_target(ctx, slot_r, NO_FRAME_T, NO_FRAME_T, R_SAVED_1);
2658 g(gen_compress_pointer(ctx, target, reg));
2659 g(gen_frame_set_pointer(ctx, slot_r, target, false, true));
2661 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_1));
2662 g(gen_set_1(ctx, R_FRAME, slot_1, 0, false));
2663 flag_set(ctx, slot_1, false);
2669 static bool attr_w gen_io(struct codegen_context *ctx, frame_t code, frame_t slot_1, frame_t slot_2, frame_t slot_3)
2671 uint32_t reload_label;
2674 reload_label = alloc_reload_label(ctx);
2675 if (unlikely(!reload_label))
2679 mem_free(ctx->var_aux);
2680 ctx->var_aux = NULL;
2682 ctx->var_aux = mem_alloc_array_mayfail(mem_alloc_mayfail, frame_t *, 0, 0, slot_1 + slot_2, sizeof(frame_t), &ctx->err);
2683 if (unlikely(!ctx->var_aux))
2686 for (i = 0; i < slot_1 + slot_2; i++)
2687 ctx->var_aux[i] = get_uint32(ctx);
2688 for (i = 0; i < slot_3; i++)
2691 for (i = 0; i < slot_2; i++) {
2692 frame_t input_slot = ctx->var_aux[slot_1 + i];
2693 if (ctx->registers[input_slot] >= 0)
2694 g(spill(ctx, input_slot));
2697 /*gen_insn(INSN_JMP, 0, 0, 0); gen_four(alloc_escape_label(ctx));*/
2699 g(gen_upcall_start(ctx, 3));
2700 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
2701 g(gen_upcall_argument(ctx, 0));
2703 g(gen_load_constant(ctx, R_ARG1, ctx->instr_start - da(ctx->fn,function)->code));
2704 g(gen_upcall_argument(ctx, 1));
2706 g(gen_load_constant(ctx, R_ARG2, ((uint32_t)code << 24) | ((uint32_t)slot_1 << 16) | ((uint32_t)slot_2 << 8) | slot_3));
2707 g(gen_upcall_argument(ctx, 2));
2708 /*debug("arg2: %08x", ((uint32_t)code << 24) | ((uint32_t)slot_1 << 16) | ((uint32_t)slot_2 << 8) | slot_3);*/
2710 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_ipret_io), 3));
2711 g(gen_sanitize_returned_pointer(ctx, R_RET0));
2712 g(gen_cmp_test_imm_jmp(ctx, INSN_CMP, OP_SIZE_ADDRESS, R_RET0, ptr_to_num(POINTER_FOLLOW_THUNK_GO), COND_NE, reload_label));
2714 for (i = 0; i < slot_1; i++) {
2715 frame_t output_slot = ctx->var_aux[i];
2716 if (ctx->registers[output_slot] >= 0)
2717 g(unspill(ctx, output_slot));
2719 for (i = 0; i < slot_1; i++) {
2720 frame_t output_slot = ctx->var_aux[i];
2721 flag_set_unknown(ctx, output_slot);
2722 if (da(ctx->fn,function)->local_variables_flags[output_slot].must_be_flat) {
2723 uint32_t escape_label = alloc_escape_label_for_ip(ctx, ctx->current_position);
2724 if (unlikely(!escape_label))
2726 g(gen_test_1(ctx, R_FRAME, output_slot, 0, escape_label, false, TEST));
2728 if (da(ctx->fn,function)->local_variables_flags[output_slot].must_be_data) {
2729 uint32_t escape_label = alloc_escape_label_for_ip(ctx, ctx->current_position);
2730 if (unlikely(!escape_label))
2732 g(gen_frame_load(ctx, OP_SIZE_SLOT, garbage, slot_1, 0, R_SCRATCH_1));
2733 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, slot_1, escape_label));