2 * Copyright (C) 2024 Mikulas Patocka
4 * This file is part of Ajla.
6 * Ajla is free software: you can redistribute it and/or modify it under the
7 * terms of the GNU General Public License as published by the Free Software
8 * Foundation, either version 3 of the License, or (at your option) any later
11 * Ajla is distributed in the hope that it will be useful, but WITHOUT ANY
12 * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
13 * A PARTICULAR PURPOSE. See the GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along with
16 * Ajla. If not, see <https://www.gnu.org/licenses/>.
19 static bool attr_w gen_flat_move_copy(struct codegen_context *ctx, frame_t slot_1, frame_t slot_r)
21 uint32_t escape_label;
23 escape_label = alloc_escape_label(ctx);
24 if (unlikely(!escape_label))
27 g(gen_test_1_cached(ctx, slot_1, escape_label));
29 g(gen_memcpy_slots(ctx, slot_r, slot_1));
31 flag_set(ctx, slot_1, false);
32 flag_set(ctx, slot_r, false);
37 static bool attr_w gen_ref_move_copy(struct codegen_context *ctx, code_t code, frame_t slot_1, frame_t slot_r)
40 if (flag_must_be_flat(ctx, slot_r)) {
41 uint32_t escape_label = alloc_escape_label(ctx);
42 if (unlikely(!escape_label))
44 gen_insn(INSN_JMP, 0, 0, 0);
45 gen_four(escape_label);
48 g(gen_frame_get_slot(ctx, slot_1, R_SCRATCH_1, ®));
49 if (!da(ctx->fn,function)->local_variables_flags[slot_1].must_be_data &&
50 da(ctx->fn,function)->local_variables_flags[slot_r].must_be_data) {
51 uint32_t escape_label = alloc_escape_label(ctx);
52 if (unlikely(!escape_label))
54 g(gen_ptr_is_thunk(ctx, reg, slot_1, escape_label));
55 ctx->flag_cache[slot_r] |= FLAG_CACHE_IS_NOT_THUNK;
57 g(gen_frame_store(ctx, OP_SIZE_SLOT, slot_r, 0, reg));
58 g(gen_set_1(ctx, R_FRAME, slot_r, 0, true));
59 flag_set(ctx, slot_r, true);
60 if (code == OPCODE_REF_COPY) {
61 g(gen_upcall_start(ctx, 1));
62 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, reg));
63 g(gen_upcall_argument(ctx, 0));
64 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
65 } else if (code == OPCODE_REF_MOVE && !da(ctx->fn,function)->local_variables_flags[slot_1].may_be_borrowed) {
66 g(gen_set_1(ctx, R_FRAME, slot_1, 0, false));
67 flag_set(ctx, slot_1, false);
70 if (unlikely(!(label_id = alloc_label(ctx))))
72 if (flag_is_set(ctx, slot_1)) {
73 g(gen_set_1(ctx, R_FRAME, slot_1, 0, false));
76 if (flag_is_clear(ctx, slot_1))
78 g(gen_test_1(ctx, R_FRAME, slot_1, 0, label_id, false, TEST_CLEAR));
80 g(gen_upcall_start(ctx, 1));
81 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, reg));
82 g(gen_upcall_argument(ctx, 0));
83 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
86 if (code == OPCODE_REF_MOVE_CLEAR)
87 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_1));
88 flag_set(ctx, slot_1, false);
93 static bool attr_w gen_box_move_copy(struct codegen_context *ctx, code_t code, frame_t slot_1, frame_t slot_r)
95 if (flag_must_be_flat(ctx, slot_r)) {
96 uint32_t escape_label = alloc_escape_label(ctx);
97 if (unlikely(!escape_label))
99 gen_insn(INSN_JMP, 0, 0, 0);
100 gen_four(escape_label);
104 if (slot_is_register(ctx, slot_1))
105 g(spill(ctx, slot_1));
107 g(gen_upcall_start(ctx, 3));
109 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
110 g(gen_upcall_argument(ctx, 0));
112 g(gen_load_constant(ctx, R_ARG1, slot_1));
113 g(gen_upcall_argument(ctx, 1));
115 g(gen_load_constant(ctx, R_ARG2, code == OPCODE_BOX_MOVE_CLEAR));
116 g(gen_upcall_argument(ctx, 2));
118 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_ipret_copy_variable_to_pointer), 3));
120 if (code == OPCODE_BOX_MOVE_CLEAR) {
121 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_1));
122 flag_set(ctx, slot_1, false);
125 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, false));
130 static bool attr_w gen_eval(struct codegen_context *ctx, frame_t slot_1)
132 uint32_t escape_label, skip_label;
135 escape_label = alloc_escape_label(ctx);
136 if (unlikely(!escape_label))
139 skip_label = alloc_label(ctx);
140 if (unlikely(!skip_label))
143 g(gen_test_1_jz_cached(ctx, slot_1, skip_label));
145 g(gen_frame_get_slot(ctx, slot_1, R_SCRATCH_1, ®));
146 g(gen_ptr_is_thunk(ctx, reg, slot_1, escape_label));
148 gen_label(skip_label);
153 static bool attr_w gen_jump(struct codegen_context *ctx, int32_t jmp_offset, unsigned op_size, unsigned cond, unsigned reg1, unsigned reg2)
155 ip_t ip = (ctx->current_position - da(ctx->fn,function)->code) + (jmp_offset / (int)sizeof(code_t));
156 if (likely(!ctx->code_labels[ip])) {
157 ctx->code_labels[ip] = alloc_label(ctx);
158 if (unlikely(!ctx->code_labels[ip]))
161 if (reg1 != -1U && reg2 != -1U) {
162 g(gen_cmp_test_jmp(ctx, INSN_CMP, i_size_cmp(op_size), reg1, reg2, cond, ctx->code_labels[ip]));
163 } else if (reg1 != -1U) {
164 g(gen_jmp_on_zero(ctx, op_size, reg1, cond, ctx->code_labels[ip]));
165 } else if (cond == COND_ALWAYS) {
166 gen_insn(INSN_JMP, 0, 0, 0);
167 gen_four(ctx->code_labels[ip]);
168 #if defined(ARCH_LOONGARCH64) || defined(ARCH_MIPS) || defined(ARCH_PARISC)
169 } else if (cond & COND_FP) {
170 gen_insn(INSN_JMP_FP_TEST, 0, cond, 0);
171 gen_four(ctx->code_labels[ip]);
174 gen_insn(COND_IS_LOGICAL(cond) ? INSN_JMP_COND_LOGICAL : INSN_JMP_COND, i_size_cmp(op_size), cond, 0);
175 gen_four(ctx->code_labels[ip]);
180 static bool attr_w gen_cond_jump(struct codegen_context *ctx, frame_t slot, int32_t jmp_offset)
182 unsigned size = log_2(sizeof(ajla_flat_option_t));
183 size_t attr_unused offset;
185 if (slot_is_register(ctx, slot)) {
188 #if defined(ARCH_S390) || defined(ARCH_X86)
189 offset = (size_t)slot * slot_size;
190 #if defined(ARCH_S390)
191 if (size != OP_SIZE_1)
194 g(gen_address(ctx, R_FRAME, offset, IMM_PURPOSE_MVI_CLI_OFFSET, size));
195 gen_insn(INSN_CMP, size, 0, 2);
196 gen_address_offset();
200 g(gen_jump(ctx, jmp_offset, size, COND_E, -1U, -1U));
205 g(gen_frame_get(ctx, size, i_size(size) == size ? garbage : native, slot, R_SCRATCH_1, ®1));
206 g(gen_jump(ctx, jmp_offset, size, COND_E, reg1, -1U));
210 static bool attr_w gen_load_fn_or_curry(struct codegen_context *ctx, frame_t fn_idx, frame_t slot_fn, frame_t slot_r, unsigned flags)
212 bool curry = fn_idx == NO_FRAME_T;
213 uint32_t escape_label;
216 escape_label = alloc_escape_label(ctx);
217 if (unlikely(!escape_label))
220 g(gen_upcall_start(ctx, 1));
222 g(gen_load_constant(ctx, R_ARG0, ctx->args_l));
223 g(gen_upcall_argument(ctx, 0));
225 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_function_reference_mayfail), 1));
226 g(gen_sanitize_returned_pointer(ctx, R_RET0));
227 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
229 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
232 g(load_function_offset(ctx, R_SCRATCH_1, offsetof(struct data, u_.function.local_directory[fn_idx])));
234 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.function_reference.u.direct), IMM_PURPOSE_STR_OFFSET, OP_SIZE_ADDRESS));
235 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
236 gen_address_offset();
237 gen_one(R_SCRATCH_1);
239 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.function_reference.is_indirect), IMM_PURPOSE_STR_OFFSET, log_2(sizeof(bool))));
240 g(gen_imm(ctx, 0, IMM_PURPOSE_STORE_VALUE, log_2(sizeof(uchar_efficient_t))));
241 gen_insn(INSN_MOV, log_2(sizeof(uchar_efficient_t)), 0, 0);
242 gen_address_offset();
245 g(gen_frame_get_pointer(ctx, slot_fn, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_SCRATCH_1));
247 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.function_reference.u.indirect), IMM_PURPOSE_STR_OFFSET, OP_SIZE_ADDRESS));
248 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
249 gen_address_offset();
250 gen_one(R_SCRATCH_1);
252 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.function_reference.is_indirect), IMM_PURPOSE_STR_OFFSET, log_2(sizeof(bool))));
253 g(gen_imm(ctx, 1, IMM_PURPOSE_STORE_VALUE, log_2(sizeof(uchar_efficient_t))));
254 gen_insn(INSN_MOV, log_2(sizeof(uchar_efficient_t)), 0, 0);
255 gen_address_offset();
259 for (i = 0; i < ctx->args_l; i++) {
260 uintptr_t arg_offset_tag = offsetof(struct data, u_.function_reference.arguments[i].tag);
261 uintptr_t arg_offset_ptr = offsetof(struct data, u_.function_reference.arguments[i].u.ptr);
262 uintptr_t arg_offset_slot = offsetof(struct data, u_.function_reference.arguments[i].u.slot);
263 frame_t arg_slot = ctx->args[i].slot;
264 const struct type *t = get_type_of_local(ctx, arg_slot);
265 uint32_t skip_flat_label, set_ptr_label, next_arg_label;
266 skip_flat_label = alloc_label(ctx);
267 if (unlikely(!skip_flat_label))
269 set_ptr_label = alloc_label(ctx);
270 if (unlikely(!set_ptr_label))
272 next_arg_label = alloc_label(ctx);
273 if (unlikely(!next_arg_label))
275 if (TYPE_IS_FLAT(t)) {
276 g(gen_test_1_cached(ctx, arg_slot, skip_flat_label));
277 if (t->size <= slot_size && TYPE_TAG_IS_BUILTIN(t->tag)) {
278 unsigned copy_size = OP_SIZE_SLOT;
279 if (is_power_of_2(t->size))
280 copy_size = log_2(t->size);
282 copy_size = maximum(copy_size, OP_SIZE_4);
283 g(gen_address(ctx, R_SAVED_1, arg_offset_tag, IMM_PURPOSE_STR_OFFSET, log_2(sizeof(type_tag_t))));
284 g(gen_imm(ctx, t->tag, IMM_PURPOSE_STORE_VALUE, log_2(sizeof(type_tag_t))));
285 gen_insn(INSN_MOV, log_2(sizeof(type_tag_t)), 0, 0);
286 gen_address_offset();
289 if (slot_is_register(ctx, arg_slot)) {
290 g(gen_address(ctx, R_SAVED_1, arg_offset_slot, IMM_PURPOSE_STR_OFFSET, copy_size));
291 gen_insn(INSN_MOV, spill_size(t), 0, 0);
292 gen_address_offset();
293 gen_one(ctx->registers[arg_slot]);
296 #if defined(ARCH_S390)
297 if (copy_size == OP_SIZE_1 && !cpu_test_feature(CPU_FEATURE_long_displacement)) {
298 g(gen_address(ctx, R_FRAME, (size_t)arg_slot * slot_size, IMM_PURPOSE_LDR_OFFSET, copy_size));
299 gen_insn(INSN_MOV_MASK, OP_SIZE_NATIVE, MOV_MASK_0_8, 0);
300 gen_one(R_SCRATCH_1);
301 gen_one(R_SCRATCH_1);
302 gen_address_offset();
306 g(gen_address(ctx, R_FRAME, (size_t)arg_slot * slot_size, ARCH_PREFERS_SX(copy_size) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, copy_size));
307 gen_insn(ARCH_PREFERS_SX(copy_size) ? INSN_MOVSX : INSN_MOV, copy_size, 0, 0);
308 gen_one(R_SCRATCH_1);
309 gen_address_offset();
312 g(gen_address(ctx, R_SAVED_1, arg_offset_slot, IMM_PURPOSE_STR_OFFSET, copy_size));
313 gen_insn(INSN_MOV, copy_size, 0, 0);
314 gen_address_offset();
315 gen_one(R_SCRATCH_1);
317 gen_insn(INSN_JMP, 0, 0, 0);
318 gen_four(next_arg_label);
320 if (slot_is_register(ctx, arg_slot))
321 g(spill(ctx, arg_slot));
323 g(gen_upcall_start(ctx, 3));
325 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
326 g(gen_upcall_argument(ctx, 0));
328 g(gen_load_constant(ctx, R_ARG1, arg_slot));
329 g(gen_upcall_argument(ctx, 1));
331 g(gen_imm(ctx, (size_t)arg_slot * slot_size, IMM_PURPOSE_ADD, i_size(OP_SIZE_ADDRESS)));
332 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, is_imm()));
336 g(gen_upcall_argument(ctx, 2));
338 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 3));
340 gen_insn(INSN_JMP, 0, 0, 0);
341 gen_four(set_ptr_label);
345 gen_label(skip_flat_label);
346 g(gen_frame_get_pointer(ctx, arg_slot, (ctx->args[i].flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_RET0));
348 gen_label(set_ptr_label);
349 g(gen_address(ctx, R_SAVED_1, arg_offset_tag, IMM_PURPOSE_STR_OFFSET, log_2(sizeof(type_tag_t))));
350 g(gen_imm(ctx, TYPE_TAG_unknown, IMM_PURPOSE_STORE_VALUE, log_2(sizeof(type_tag_t))));
351 gen_insn(INSN_MOV, log_2(sizeof(type_tag_t)), 0, 0);
352 gen_address_offset();
355 g(gen_address(ctx, R_SAVED_1, arg_offset_ptr, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
356 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
357 gen_address_offset();
360 gen_label(next_arg_label);
363 g(gen_compress_pointer(ctx, R_SAVED_1, R_SAVED_1));
364 g(gen_frame_set_pointer(ctx, slot_r, R_SAVED_1, false, true));
369 static bool attr_w gen_call(struct codegen_context *ctx, code_t code, frame_t fn_idx)
371 struct data *new_fn = ctx->local_directory[fn_idx];
372 frame_t required_slots = da(new_fn,function)->frame_slots;
373 frame_t bitmap_slots = da(new_fn,function)->n_bitmap_slots;
375 uint32_t escape_label;
376 int64_t new_fp_offset;
377 uchar_efficient_t call_mode;
379 bool arch_use_flags = ARCH_HAS_FLAGS;
380 #if defined(ARCH_POWER)
381 arch_use_flags = false;
384 escape_label = alloc_escape_label(ctx);
385 if (unlikely(!escape_label))
388 for (v = MIN_USEABLE_SLOT; v < function_n_variables(ctx->fn); v++) {
389 if (slot_is_register(ctx, v)) {
394 g(gen_frame_load_raw(ctx, log_2(sizeof(stack_size_t)), native, 0, frame_offs(available_slots), false, R_SCRATCH_1));
395 g(gen_imm(ctx, required_slots, IMM_PURPOSE_SUB, i_size(log_2(sizeof(stack_size_t)))));
396 gen_insn(INSN_ALU + ARCH_PARTIAL_ALU(i_size(log_2(sizeof(stack_size_t)))), i_size(log_2(sizeof(stack_size_t))), ALU_SUB, arch_use_flags);
397 gen_one(R_SCRATCH_1);
398 gen_one(R_SCRATCH_1);
401 if (arch_use_flags) {
402 gen_insn(COND_IS_LOGICAL(COND_B) ? INSN_JMP_COND_LOGICAL : INSN_JMP_COND, log_2(sizeof(stack_size_t)), COND_B, 0);
403 gen_four(escape_label);
405 g(gen_cmp_test_jmp(ctx, INSN_TEST, OP_SIZE_NATIVE, R_SCRATCH_1, R_SCRATCH_1, COND_S, escape_label));
408 new_fp_offset = -(ssize_t)(required_slots * slot_size);
410 g(gen_frame_store_raw(ctx, log_2(sizeof(stack_size_t)), 0, new_fp_offset + frame_offs(available_slots), R_SCRATCH_1));
411 g(gen_frame_store_imm_raw(ctx, log_2(sizeof(ip_t)), 0, new_fp_offset + frame_offs(previous_ip), ctx->return_values - da(ctx->fn,function)->code));
412 g(gen_frame_load_raw(ctx, log_2(sizeof(timestamp_t)), garbage, 0, frame_offs(timestamp), false, R_SCRATCH_1));
413 g(gen_frame_store_raw(ctx, log_2(sizeof(timestamp_t)), 0, new_fp_offset + frame_offs(timestamp), R_SCRATCH_1));
414 call_mode = code == OPCODE_CALL ? CALL_MODE_NORMAL : code == OPCODE_CALL_STRICT ? CALL_MODE_STRICT : CALL_MODE_SPARK;
415 g(gen_frame_store_imm_raw(ctx, log_2(sizeof(uchar_efficient_t)), 0, new_fp_offset + frame_offs(mode), call_mode));
417 g(gen_clear_bitmap(ctx, frame_offset, R_FRAME, new_fp_offset, bitmap_slots));
419 for (i = 0; i < ctx->args_l; i++) {
420 const struct code_arg *src_arg = &ctx->args[i];
421 const struct local_arg *dest_arg = &da(new_fn,function)->args[i];
422 const struct type *t = get_type_of_local(ctx, src_arg->slot);
423 uint32_t non_flat_label, thunk_label, incr_ref_label, next_arg_label;
425 non_flat_label = alloc_label(ctx);
426 if (unlikely(!non_flat_label))
428 thunk_label = alloc_label(ctx);
429 if (unlikely(!thunk_label))
431 incr_ref_label = alloc_label(ctx);
432 if (unlikely(!incr_ref_label))
434 next_arg_label = alloc_label(ctx);
435 if (unlikely(!next_arg_label))
437 if (TYPE_IS_FLAT(t)) {
438 g(gen_test_1_cached(ctx, src_arg->slot, non_flat_label));
439 if (dest_arg->may_be_flat) {
440 g(gen_memcpy_from_slot(ctx, R_FRAME, new_fp_offset + (size_t)dest_arg->slot * slot_size, src_arg->slot));
442 g(gen_upcall_start(ctx, 3));
444 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
445 g(gen_upcall_argument(ctx, 0));
447 g(gen_load_constant(ctx, R_ARG1, src_arg->slot));
448 g(gen_upcall_argument(ctx, 1));
450 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_ARG2, R_FRAME, (size_t)src_arg->slot * slot_size, 0));
451 g(gen_upcall_argument(ctx, 2));
453 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 3));
455 g(gen_frame_store_raw(ctx, OP_SIZE_SLOT, dest_arg->slot, new_fp_offset, R_RET0));
457 g(gen_set_1(ctx, R_FRAME, dest_arg->slot, new_fp_offset, true));
460 if (flag_is_clear(ctx, src_arg->slot))
461 goto skip_ref_argument;
463 gen_insn(INSN_JMP, 0, 0, 0);
464 gen_four(next_arg_label);
466 gen_label(non_flat_label);
468 if (dest_arg->may_be_borrowed && src_arg->flags & OPCODE_CALL_MAY_LEND) {
469 g(gen_frame_get_slot(ctx, src_arg->slot, R_SCRATCH_1, ®));
470 g(gen_ptr_is_thunk(ctx, reg, src_arg->slot, thunk_label));
471 g(gen_frame_store_raw(ctx, OP_SIZE_SLOT, dest_arg->slot, new_fp_offset, reg));
472 gen_insn(INSN_JMP, 0, 0, 0);
473 gen_four(next_arg_label);
474 } else if (dest_arg->may_be_borrowed && src_arg->flags & OPCODE_CALL_MAY_GIVE) {
475 g(gen_test_1_cached(ctx, src_arg->slot, thunk_label));
476 g(gen_frame_get_slot(ctx, src_arg->slot, R_SCRATCH_1, ®));
477 g(gen_ptr_is_thunk(ctx, reg, src_arg->slot, thunk_label));
478 g(gen_frame_store_raw(ctx, OP_SIZE_SLOT, dest_arg->slot, new_fp_offset, reg));
479 g(gen_frame_clear_raw(ctx, OP_SIZE_SLOT, src_arg->slot));
480 gen_insn(INSN_JMP, 0, 0, 0);
481 gen_four(next_arg_label);
484 gen_label(thunk_label);
485 g(gen_set_1(ctx, R_FRAME, dest_arg->slot, new_fp_offset, true));
486 g(gen_frame_get_slot(ctx, src_arg->slot, R_SCRATCH_1, ®));
487 g(gen_frame_store_raw(ctx, OP_SIZE_SLOT, dest_arg->slot, new_fp_offset, reg));
488 if (src_arg->flags & OPCODE_FLAG_FREE_ARGUMENT) {
489 g(gen_frame_clear_raw(ctx, OP_SIZE_SLOT, src_arg->slot));
490 if (flag_is_set(ctx, src_arg->slot)) {
491 g(gen_set_1(ctx, R_FRAME, src_arg->slot, 0, false));
492 flag_set(ctx, src_arg->slot, false);
493 goto skip_ref_argument;
495 if (flag_is_clear(ctx, src_arg->slot))
497 g(gen_test_1(ctx, R_FRAME, src_arg->slot, 0, incr_ref_label, true, TEST_CLEAR));
498 gen_insn(INSN_JMP, 0, 0, 0);
499 gen_four(next_arg_label);
502 gen_label(incr_ref_label);
504 g(gen_upcall_start(ctx, 1));
506 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, reg));
507 g(gen_upcall_argument(ctx, 0));
509 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
512 gen_label(next_arg_label);
515 g(load_function_offset(ctx, R_SCRATCH_1, offsetof(struct data, u_.function.local_directory[fn_idx])));
517 g(gen_address(ctx, R_SCRATCH_1, 0, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
518 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
519 gen_one(R_SCRATCH_1);
520 gen_address_offset();
522 g(gen_decompress_pointer(ctx, ARCH_PREFERS_SX(OP_SIZE_SLOT), R_SCRATCH_1, R_SCRATCH_1, 0));
524 g(gen_frame_store_raw(ctx, OP_SIZE_ADDRESS, 0, frame_offs(function) + new_fp_offset, R_SCRATCH_1));
526 #if !defined(ARCH_X86) && !defined(ARCH_PARISC)
527 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_SUB, R_FRAME, R_FRAME, -new_fp_offset, 0));
529 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_FRAME, R_FRAME, new_fp_offset, 0));
532 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.function.codegen), ARCH_PREFERS_SX(OP_SIZE_SLOT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_SLOT));
533 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
534 gen_one(R_SCRATCH_1);
535 gen_address_offset();
537 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, NO_FRAME_T, ctx->escape_nospill_label));
540 gen_pointer_compression(R_SCRATCH_1);
541 #if (defined(ARCH_X86) && !defined(ARCH_X86_X32)) || defined(ARCH_ARM32)
542 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.codegen.unoptimized_code_base), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
543 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
544 gen_address_offset_compressed();
546 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.codegen.unoptimized_code_base), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
547 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
548 gen_one(R_SCRATCH_1);
549 gen_address_offset_compressed();
551 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
552 gen_one(R_SCRATCH_1);
554 g(clear_flag_cache(ctx));
559 static bool attr_w gen_return(struct codegen_context *ctx)
561 int64_t new_fp_offset;
562 uint32_t escape_label;
564 int64_t retval_offset;
565 unsigned attr_unused reg1;
567 escape_label = alloc_escape_label(ctx);
568 if (unlikely(!escape_label))
571 new_fp_offset = (size_t)da(ctx->fn,function)->frame_slots * slot_size;
573 g(gen_frame_load_raw(ctx, OP_SIZE_ADDRESS, zero_x, 0, new_fp_offset + frame_offs(function), false, R_SCRATCH_2));
575 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_SCRATCH_2, COND_E, escape_label));
577 g(gen_address(ctx, R_SCRATCH_2, offsetof(struct data, u_.function.codegen), ARCH_PREFERS_SX(OP_SIZE_SLOT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_SLOT));
578 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
579 gen_one(R_SCRATCH_1);
580 gen_address_offset();
582 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, NO_FRAME_T, escape_label));
585 g(gen_frame_load_raw(ctx, log_2(sizeof(timestamp_t)), garbage, 0, frame_offs(timestamp), false, R_SCRATCH_1));
586 g(gen_frame_store_raw(ctx, log_2(sizeof(timestamp_t)), 0, new_fp_offset + frame_offs(timestamp), R_SCRATCH_1));
588 g(gen_frame_load_raw(ctx, log_2(sizeof(ip_t)), native, 0, frame_offs(previous_ip), false, R_SCRATCH_1));
590 g(gen_address(ctx, R_SCRATCH_2, offsetof(struct data, u_.function.code), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
591 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
592 gen_one(R_SCRATCH_2);
593 gen_address_offset();
595 g(gen_lea3(ctx, R_SAVED_1, R_SCRATCH_2, R_SCRATCH_1, log_2(sizeof(code_t)), 0));
598 for (i = 0; i < ctx->args_l; i++) {
599 const struct code_arg *src_arg = &ctx->args[i];
600 const struct type *t = get_type_of_local(ctx, src_arg->slot);
601 uint32_t copy_ptr_label, load_write_ptr_label, write_ptr_label, next_arg_label;
603 copy_ptr_label = alloc_label(ctx);
604 if (unlikely(!copy_ptr_label))
607 load_write_ptr_label = alloc_label(ctx);
608 if (unlikely(!load_write_ptr_label))
611 write_ptr_label = alloc_label(ctx);
612 if (unlikely(!write_ptr_label))
615 next_arg_label = alloc_label(ctx);
616 if (unlikely(!next_arg_label))
619 g(gen_load_code_32(ctx, R_SAVED_2, R_SAVED_1, retval_offset));
621 if (TYPE_IS_FLAT(t)) {
622 uint32_t flat_to_data_label;
623 g(gen_test_1_cached(ctx, src_arg->slot, copy_ptr_label));
625 flat_to_data_label = alloc_label(ctx);
626 if (unlikely(!flat_to_data_label))
629 #if defined(ARCH_X86)
630 g(gen_address(ctx, R_SAVED_1, retval_offset + 2 + 2 * (ARG_MODE_N >= 3), IMM_PURPOSE_LDR_OFFSET, log_2(sizeof(code_t))));
631 g(gen_imm(ctx, OPCODE_MAY_RETURN_FLAT, IMM_PURPOSE_TEST, log_2(sizeof(code_t))));
632 gen_insn(INSN_TEST, log_2(sizeof(code_t)), 0, 1);
633 gen_address_offset();
636 gen_insn(INSN_JMP_COND, log_2(sizeof(code_t)), COND_E, 0);
637 gen_four(flat_to_data_label);
639 g(gen_load_two(ctx, R_SCRATCH_1, R_SAVED_1, retval_offset + 2 + 2 * (ARG_MODE_N >= 3)));
641 g(gen_cmp_test_imm_jmp(ctx, INSN_TEST, OP_SIZE_NATIVE, R_SCRATCH_1, OPCODE_MAY_RETURN_FLAT, COND_E, flat_to_data_label));
643 #if defined(ARCH_X86)
644 if (is_power_of_2(t->size) && t->size <= 2U << OP_SIZE_NATIVE) {
645 if (t->size == 2U << OP_SIZE_NATIVE) {
646 g(gen_frame_load_2(ctx, OP_SIZE_NATIVE, src_arg->slot, 0, R_SCRATCH_1, R_SCRATCH_2));
648 gen_insn(INSN_MOV, OP_SIZE_NATIVE, 0, 0);
649 gen_one(ARG_ADDRESS_2 + OP_SIZE_SLOT);
652 gen_eight(new_fp_offset + lo_word(OP_SIZE_NATIVE));
653 gen_one(R_SCRATCH_1);
655 gen_insn(INSN_MOV, OP_SIZE_NATIVE, 0, 0);
656 gen_one(ARG_ADDRESS_2 + OP_SIZE_SLOT);
659 gen_eight(new_fp_offset + hi_word(OP_SIZE_NATIVE));
660 gen_one(R_SCRATCH_2);
662 g(gen_frame_get(ctx, log_2(t->size), garbage, src_arg->slot, R_SCRATCH_1, ®1));
664 gen_insn(INSN_MOV, log_2(t->size), 0, 0);
665 gen_one(ARG_ADDRESS_2 + OP_SIZE_SLOT);
668 gen_eight(new_fp_offset);
674 g(gen_lea3(ctx, R_SCRATCH_2, R_FRAME, R_SAVED_2, OP_SIZE_SLOT, new_fp_offset));
676 g(gen_memcpy_from_slot(ctx, R_SCRATCH_2, 0, src_arg->slot));
679 gen_insn(INSN_JMP, 0, 0, 0);
680 gen_four(next_arg_label);
682 gen_label(flat_to_data_label);
684 if (slot_is_register(ctx, src_arg->slot))
685 g(spill(ctx, src_arg->slot));
687 g(gen_upcall_start(ctx, 3));
689 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
690 g(gen_upcall_argument(ctx, 0));
692 g(gen_load_constant(ctx, R_ARG1, src_arg->slot));
693 g(gen_upcall_argument(ctx, 1));
695 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_ARG2, R_FRAME, (size_t)src_arg->slot * slot_size, 0));
696 g(gen_upcall_argument(ctx, 2));
698 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 3));
700 if (flag_is_clear(ctx, src_arg->slot))
701 goto skip_ref_argument;
703 gen_insn(INSN_JMP, 0, 0, 0);
704 gen_four(write_ptr_label);
707 gen_label(copy_ptr_label);
709 if (unlikely(!(src_arg->flags & OPCODE_FLAG_FREE_ARGUMENT))) {
710 g(gen_upcall_start(ctx, 1));
711 g(gen_frame_load_slot(ctx, src_arg->slot, R_ARG0));
712 g(gen_upcall_argument(ctx, 0));
713 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
714 } else if (da(ctx->fn,function)->local_variables_flags[src_arg->slot].may_be_borrowed) {
715 g(gen_test_1_cached(ctx, src_arg->slot, load_write_ptr_label));
716 g(gen_upcall_start(ctx, 1));
717 g(gen_frame_load_slot(ctx, src_arg->slot, R_ARG0));
718 g(gen_upcall_argument(ctx, 0));
719 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_reference_owned), 1));
722 gen_label(load_write_ptr_label);
724 g(gen_frame_load_slot(ctx, src_arg->slot, R_RET0));
727 gen_label(write_ptr_label);
729 #if defined(ARCH_X86)
730 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
731 gen_one(ARG_ADDRESS_2 + OP_SIZE_SLOT);
734 gen_eight(new_fp_offset);
736 goto scaled_store_done;
738 if (ARCH_HAS_SHIFTED_ADD(OP_SIZE_SLOT)) {
739 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, false));
740 gen_one(R_SCRATCH_3);
742 gen_one(ARG_SHIFTED_REGISTER);
743 gen_one(ARG_SHIFT_LSL | OP_SIZE_SLOT);
746 g(gen_address(ctx, R_SCRATCH_3, new_fp_offset, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
747 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
748 gen_address_offset();
750 goto scaled_store_done;
753 g(gen_3address_rot_imm(ctx, OP_SIZE_NATIVE, ROT_SHL, R_SCRATCH_3, R_SAVED_2, OP_SIZE_SLOT, false));
755 g(gen_3address_alu(ctx, OP_SIZE_NATIVE, ALU_ADD, R_SCRATCH_3, R_SCRATCH_3, R_FRAME, 0));
757 g(gen_address(ctx, R_SCRATCH_3, new_fp_offset, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
758 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
759 gen_address_offset();
763 g(gen_set_1_variable(ctx, R_SAVED_2, new_fp_offset, true));
765 gen_label(next_arg_label);
767 retval_offset += 4 + 2 * (ARG_MODE_N >= 3);
770 g(gen_frame_load_raw(ctx, OP_SIZE_ADDRESS, zero_x, 0, new_fp_offset + frame_offs(function), false, R_SCRATCH_1));
772 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.function.codegen), ARCH_PREFERS_SX(OP_SIZE_SLOT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_SLOT));
773 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
774 gen_one(R_SCRATCH_1);
775 gen_address_offset();
777 g(gen_decompress_pointer(ctx, ARCH_PREFERS_SX(OP_SIZE_SLOT), R_SCRATCH_1, R_SCRATCH_1, 0));
779 g(gen_load_code_32(ctx, R_SCRATCH_2, R_SAVED_1, retval_offset + 2));
781 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_FRAME, R_FRAME, new_fp_offset, 0));
783 #if defined(ARCH_X86) && !defined(ARCH_X86_X32)
784 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
785 gen_one(ARG_ADDRESS_2 + OP_SIZE_ADDRESS);
786 gen_one(R_SCRATCH_1);
787 gen_one(R_SCRATCH_2);
788 gen_eight(offsetof(struct data, u_.codegen.unoptimized_code));
790 goto scaled_jmp_done;
792 #if defined(ARCH_X86)
793 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
794 gen_one(R_SCRATCH_1);
795 gen_one(ARG_ADDRESS_2 + OP_SIZE_ADDRESS);
796 gen_one(R_SCRATCH_1);
797 gen_one(R_SCRATCH_2);
798 gen_eight(offsetof(struct data, u_.codegen.unoptimized_code));
800 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
801 gen_one(R_SCRATCH_1);
803 goto scaled_jmp_done;
805 #if defined(ARCH_ARM32)
806 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, false));
807 gen_one(R_SCRATCH_1);
808 gen_one(R_SCRATCH_1);
809 gen_one(ARG_SHIFTED_REGISTER);
810 gen_one(ARG_SHIFT_LSL | OP_SIZE_ADDRESS);
811 gen_one(R_SCRATCH_2);
813 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.codegen.unoptimized_code), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
814 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
815 gen_address_offset();
817 goto scaled_jmp_done;
819 if (ARCH_HAS_SHIFTED_ADD(OP_SIZE_ADDRESS)) {
820 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, false));
821 gen_one(R_SCRATCH_1);
822 gen_one(R_SCRATCH_1);
823 gen_one(ARG_SHIFTED_REGISTER);
824 gen_one(ARG_SHIFT_LSL | OP_SIZE_ADDRESS);
825 gen_one(R_SCRATCH_2);
827 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.codegen.unoptimized_code), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
828 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
829 gen_one(R_SCRATCH_1);
830 gen_address_offset();
832 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
833 gen_one(R_SCRATCH_1);
835 goto scaled_jmp_done;
838 g(gen_3address_rot_imm(ctx, OP_SIZE_NATIVE, ROT_SHL, R_SCRATCH_2, R_SCRATCH_2, OP_SIZE_ADDRESS, false));
840 g(gen_3address_alu(ctx, OP_SIZE_NATIVE, ALU_ADD, R_SCRATCH_1, R_SCRATCH_1, R_SCRATCH_2, 0));
842 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.codegen.unoptimized_code), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
843 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
844 gen_one(R_SCRATCH_1);
845 gen_address_offset();
847 gen_insn(INSN_JMP_INDIRECT, 0, 0, 0);
848 gen_one(R_SCRATCH_1);
850 goto scaled_jmp_done;
855 static bool attr_w gen_scaled_array_address(struct codegen_context *ctx, size_t element_size, unsigned reg_dst, unsigned reg_src, unsigned reg_index, int64_t offset_src);
856 static bool attr_w gen_check_array_len(struct codegen_context *ctx, unsigned reg_array, bool allocated, unsigned reg_len, unsigned cond, uint32_t escape_label);
858 static bool attr_w gen_structured(struct codegen_context *ctx, frame_t slot_struct, frame_t slot_elem)
860 uint32_t escape_label;
861 const struct type *struct_type, *elem_type;
865 escape_label = alloc_escape_label(ctx);
866 if (unlikely(!escape_label))
869 struct_type = get_type_of_local(ctx, slot_struct);
870 elem_type = get_type_of_local(ctx, slot_elem);
872 if (TYPE_IS_FLAT(struct_type) && struct_type->tag != TYPE_TAG_flat_option) {
873 if (!TYPE_IS_FLAT(elem_type)) {
876 g(gen_test_1_cached(ctx, slot_struct, escape_label));
877 flag_set(ctx, slot_struct, false);
881 g(gen_test_1_jz_cached(ctx, slot_struct, escape_label));
885 if (slot_is_register(ctx, slot_struct))
886 g(spill(ctx, slot_struct));
887 g(gen_frame_address(ctx, slot_struct, 0, R_SAVED_1));
889 for (i = 0; i < ctx->args_l; i++) {
890 frame_t param_slot = ctx->args[i].slot;
892 switch (ctx->args[i].flags & OPCODE_STRUCTURED_MASK) {
893 case OPCODE_STRUCTURED_RECORD: {
894 struct flat_record_definition_entry *e;
895 ajla_assert_lo(struct_type->tag == TYPE_TAG_flat_record, (file_line, "gen_structured: invalid tag %u, expected %u", struct_type->tag, TYPE_TAG_flat_record));
896 e = &type_def(struct_type,flat_record)->entries[param_slot];
898 g(gen_imm(ctx, e->flat_offset, IMM_PURPOSE_ADD, i_size(OP_SIZE_ADDRESS)));
899 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, is_imm()));
904 struct_type = e->subtype;
907 case OPCODE_STRUCTURED_ARRAY: {
908 ajla_assert_lo(struct_type->tag == TYPE_TAG_flat_array, (file_line, "gen_structured: invalid tag %u, expected %u", struct_type->tag, TYPE_TAG_flat_array));
909 g(gen_test_1_cached(ctx, param_slot, escape_label));
910 flag_set(ctx, param_slot, false);
911 g(gen_frame_get(ctx, OP_SIZE_INT, native, param_slot, R_SCRATCH_1, ®1));
913 g(gen_cmp_test_imm_jmp(ctx, INSN_CMP, OP_SIZE_INT, reg1, type_def(struct_type,flat_array)->n_elements, COND_AE, escape_label));
915 g(gen_scaled_array_address(ctx, type_def(struct_type,flat_array)->base->size, R_SAVED_1, R_SAVED_1, reg1, 0));
917 struct_type = type_def(struct_type,flat_array)->base;
921 internal(file_line, "gen_structured: invalid structured flags %x", (unsigned)ctx->args[i].flags);
924 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
925 gen_one(R_SCRATCH_1);
926 gen_one(ARG_ADDRESS_1);
930 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, NO_FRAME_T, escape_label));
933 g(gen_decompress_pointer(ctx, ARCH_PREFERS_SX(OP_SIZE_SLOT), R_SCRATCH_1, R_SCRATCH_1, 0));
935 g(gen_compare_refcount(ctx, R_SCRATCH_1, REFCOUNT_STEP, COND_AE, escape_label));
937 switch (ctx->args[i].flags & OPCODE_STRUCTURED_MASK) {
938 case OPCODE_STRUCTURED_RECORD: {
939 const struct type *rec_type, *e_type;
940 rec_type = da_type(ctx->fn, ctx->args[i].type);
941 TYPE_TAG_VALIDATE(rec_type->tag);
942 if (unlikely(rec_type->tag == TYPE_TAG_flat_record))
943 rec_type = type_def(rec_type,flat_record)->base;
944 e_type = type_def(rec_type,record)->types[param_slot];
945 if (!TYPE_IS_FLAT(e_type) || (e_type->tag == TYPE_TAG_flat_option && !(ctx->args[i].flags & OPCODE_STRUCTURED_FLAG_END))) {
946 g(gen_test_1(ctx, R_SCRATCH_1, param_slot, data_record_offset, escape_label, true, TEST));
948 g(gen_test_1(ctx, R_SCRATCH_1, param_slot, data_record_offset, escape_label, false, TEST));
949 struct_type = e_type;
951 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_SAVED_1, R_SCRATCH_1, data_record_offset + (size_t)param_slot * slot_size, 0));
954 case OPCODE_STRUCTURED_OPTION: {
955 unsigned op_size = log_2(sizeof(ajla_option_t));
956 #if defined(ARCH_X86)
957 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.option.option), IMM_PURPOSE_LDR_OFFSET, op_size));
958 g(gen_imm(ctx, param_slot, IMM_PURPOSE_CMP, op_size));
959 gen_insn(INSN_CMP, op_size, 0, 1);
960 gen_address_offset();
963 gen_insn(INSN_JMP_COND, op_size, COND_NE, 0);
964 gen_four(escape_label);
966 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.option.option), ARCH_PREFERS_SX(op_size) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, op_size));
967 gen_insn(ARCH_PREFERS_SX(op_size) ? INSN_MOVSX : INSN_MOV, op_size, 0, 0);
968 gen_one(R_SCRATCH_2);
969 gen_address_offset();
971 g(gen_cmp_test_imm_jmp(ctx, INSN_CMP, i_size(op_size), R_SCRATCH_2, param_slot, COND_NE, escape_label));
973 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_SAVED_1, R_SCRATCH_1, offsetof(struct data, u_.option.pointer), 0));
976 case OPCODE_STRUCTURED_ARRAY: {
977 const struct type *e_type = da_type(ctx->fn, ctx->args[i].type);
979 g(gen_test_1_cached(ctx, param_slot, escape_label));
980 flag_set(ctx, param_slot, false);
982 g(gen_frame_get(ctx, OP_SIZE_INT, native, param_slot, R_SCRATCH_2, ®2));
984 g(gen_check_array_len(ctx, R_SCRATCH_1, false, reg2, COND_AE, escape_label));
986 if (!TYPE_IS_FLAT(e_type) || (e_type->tag == TYPE_TAG_flat_option && !(ctx->args[i].flags & OPCODE_STRUCTURED_FLAG_END))) {
987 g(gen_compare_ptr_tag(ctx, R_SCRATCH_1, DATA_TAG_array_pointers, COND_NE, escape_label, R_SCRATCH_3));
989 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.array_pointers.pointer), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
990 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
991 gen_one(R_SCRATCH_1);
992 gen_address_offset();
994 g(gen_scaled_array_address(ctx, slot_size, R_SAVED_1, R_SCRATCH_1, reg2, 0));
996 g(gen_compare_ptr_tag(ctx, R_SCRATCH_1, DATA_TAG_array_flat, COND_NE, escape_label, R_SCRATCH_3));
998 g(gen_scaled_array_address(ctx, e_type->size, R_SAVED_1, R_SCRATCH_1, reg2, data_array_offset));
1000 struct_type = e_type;
1005 internal(file_line, "gen_structured: invalid structured flags %x", (unsigned)ctx->args[i].flags);
1012 g(gen_test_1_cached(ctx, slot_elem, escape_label));
1013 flag_set(ctx, slot_elem, false);
1014 g(gen_memcpy_from_slot(ctx, R_SAVED_1, 0, slot_elem));
1016 uint32_t skip_deref_label;
1017 skip_deref_label = alloc_label(ctx);
1018 if (unlikely(!skip_deref_label))
1021 if (TYPE_IS_FLAT(elem_type))
1022 g(gen_test_1_jz_cached(ctx, slot_elem, escape_label));
1024 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
1025 gen_one(R_SCRATCH_1);
1026 gen_one(ARG_ADDRESS_1);
1030 g(gen_jmp_on_zero(ctx, OP_SIZE_SLOT, R_SCRATCH_1, COND_E, skip_deref_label));
1032 g(gen_upcall_start(ctx, 1));
1033 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_SCRATCH_1));
1034 g(gen_upcall_argument(ctx, 0));
1035 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_pointer_dereference), 1));
1037 gen_label(skip_deref_label);
1039 g(gen_frame_get_pointer(ctx, slot_elem, (ctx->args[i - 1].flags & OPCODE_STRUCTURED_FREE_VARIABLE) != 0, R_SCRATCH_1));
1041 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
1042 gen_one(ARG_ADDRESS_1);
1045 gen_one(R_SCRATCH_1);
1051 static bool attr_w gen_record_create(struct codegen_context *ctx, frame_t slot_r)
1053 const struct type *t;
1054 const struct record_definition *def;
1055 uint32_t escape_label;
1058 escape_label = alloc_escape_label(ctx);
1059 if (unlikely(!escape_label))
1062 t = get_type_of_local(ctx, slot_r);
1063 if (t->tag == TYPE_TAG_flat_record) {
1064 const struct flat_record_definition *flat_def;
1065 const struct type *flat_type = t;
1066 t = type_def(t,flat_record)->base;
1067 def = type_def(t,record);
1068 flat_def = type_def(flat_type,flat_record);
1069 for (i = 0; i < ctx->args_l; i++) {
1070 frame_t var_slot = ctx->args[i].slot;
1071 g(gen_test_1_cached(ctx, var_slot, escape_label));
1072 flag_set(ctx, var_slot, false);
1074 for (i = 0, ii = 0; i < ctx->args_l; i++, ii++) {
1075 frame_t var_slot, flat_offset, record_slot;
1076 while (unlikely(record_definition_is_elided(def, ii)))
1078 var_slot = ctx->args[i].slot;
1079 record_slot = record_definition_slot(def, ii);
1080 flat_offset = flat_def->entries[record_slot].flat_offset;
1081 g(gen_memcpy_from_slot(ctx, R_FRAME, (size_t)slot_r * slot_size + flat_offset, var_slot));
1086 def = type_def(t,record);
1088 g(gen_upcall_start(ctx, 2));
1090 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1091 g(gen_upcall_argument(ctx, 0));
1093 g(gen_load_constant(ctx, R_ARG1, slot_r));
1094 g(gen_upcall_argument(ctx, 1));
1096 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_record_mayfail), 2));
1097 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1098 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1100 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
1102 g(gen_clear_bitmap(ctx, 0, R_SAVED_1, data_record_offset, bitmap_slots(def->n_slots)));
1104 for (i = 0, ii = 0; i < ctx->args_l; i++, ii++) {
1105 frame_t var_slot, var_flags, record_slot;
1106 const struct type *var_type, *record_type;
1107 uint32_t skip_flat_label, set_ptr_label, next_arg_label;
1109 skip_flat_label = alloc_label(ctx);
1110 if (unlikely(!skip_flat_label))
1112 set_ptr_label = alloc_label(ctx);
1113 if (unlikely(!set_ptr_label))
1115 next_arg_label = alloc_label(ctx);
1116 if (unlikely(!next_arg_label))
1119 while (unlikely(record_definition_is_elided(def, ii)))
1121 var_slot = ctx->args[i].slot;
1122 var_type = get_type_of_local(ctx, var_slot);
1123 var_flags = ctx->args[i].flags;
1124 record_slot = record_definition_slot(def, ii);
1125 record_type = def->types[record_slot];
1126 if (TYPE_IS_FLAT(var_type)) {
1127 g(gen_test_1_cached(ctx, var_slot, skip_flat_label));
1128 if (TYPE_IS_FLAT(record_type)) {
1129 g(gen_memcpy_from_slot(ctx, R_SAVED_1, data_record_offset + (size_t)record_slot * slot_size, var_slot));
1131 gen_insn(INSN_JMP, 0, 0, 0);
1132 gen_four(next_arg_label);
1134 if (slot_is_register(ctx, var_slot))
1135 g(spill(ctx, var_slot));
1137 g(gen_upcall_start(ctx, 3));
1139 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1140 g(gen_upcall_argument(ctx, 0));
1142 g(gen_load_constant(ctx, R_ARG1, var_slot));
1143 g(gen_upcall_argument(ctx, 1));
1145 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_ARG2, R_FRAME, (size_t)var_slot * slot_size, 0));
1146 g(gen_upcall_argument(ctx, 2));
1148 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 3));
1150 gen_insn(INSN_JMP, 0, 0, 0);
1151 gen_four(set_ptr_label);
1155 gen_label(skip_flat_label);
1156 g(gen_frame_get_pointer(ctx, var_slot, (var_flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_RET0));
1158 gen_label(set_ptr_label);
1159 g(gen_address(ctx, R_SAVED_1, data_record_offset + (size_t)record_slot * slot_size, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
1160 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
1161 gen_address_offset();
1164 g(gen_set_1(ctx, R_SAVED_1, record_slot, data_record_offset, true));
1166 gen_label(next_arg_label);
1169 g(gen_compress_pointer(ctx, R_SAVED_1, R_SAVED_1));
1170 g(gen_frame_set_pointer(ctx, slot_r, R_SAVED_1, false, true));
1175 static bool attr_w gen_record_load(struct codegen_context *ctx, frame_t slot_1, frame_t slot_r, frame_t rec_slot, frame_t flags)
1177 const struct type *rec_type, *entry_type;
1178 uint32_t escape_label;
1181 rec_type = get_type_of_local(ctx, slot_1);
1182 if (unlikely(rec_type->tag == TYPE_TAG_unknown)) {
1183 ajla_assert_lo(!*da(ctx->fn,function)->function_name, (file_line, "gen_record_load: function %s has record without definition", da(ctx->fn,function)->function_name));
1187 escape_label = alloc_escape_label(ctx);
1188 if (unlikely(!escape_label))
1191 /*debug("gen_record_load: %s: %u, %u", da(ctx->fn,function)->function_name, TYPE_TAG_unknown, rec_type->tag);*/
1192 if (TYPE_IS_FLAT(rec_type)) {
1193 const struct flat_record_definition_entry *ft = &type_def(rec_type,flat_record)->entries[rec_slot];
1194 g(gen_test_1_cached(ctx, slot_1, escape_label));
1195 g(gen_memcpy_to_slot(ctx, slot_r, R_FRAME, (size_t)slot_1 * slot_size + ft->flat_offset));
1196 flag_set(ctx, slot_1, false);
1197 flag_set(ctx, slot_r, false);
1200 entry_type = type_def(rec_type,record)->types[rec_slot];
1202 g(gen_frame_decompress_slot(ctx, slot_1, R_SCRATCH_2, ®, escape_label));
1204 if (TYPE_IS_FLAT(entry_type)) {
1205 g(gen_test_1(ctx, reg, rec_slot, data_record_offset, escape_label, false, TEST));
1206 g(gen_memcpy_to_slot(ctx, slot_r, reg, (size_t)rec_slot * slot_size + data_record_offset));
1207 flag_set(ctx, slot_r, false);
1211 if (flag_must_be_flat(ctx, slot_r)) {
1212 gen_insn(INSN_JMP, 0, 0, 0);
1213 gen_four(escape_label);
1217 g(gen_test_1(ctx, reg, rec_slot, data_record_offset, escape_label, true, TEST));
1219 g(gen_address(ctx, reg, (size_t)rec_slot * slot_size + data_record_offset, ARCH_PREFERS_SX(OP_SIZE_SLOT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_SLOT));
1220 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
1221 gen_one(R_SCRATCH_1);
1222 gen_address_offset();
1224 g(gen_frame_set_pointer_2(ctx, slot_r, R_SCRATCH_1, flags, escape_label));
1229 static bool attr_w gen_option_create_empty_flat(struct codegen_context *ctx, ajla_flat_option_t opt, frame_t slot_r)
1231 g(gen_frame_store_imm(ctx, log_2(sizeof(ajla_flat_option_t)), slot_r, 0, opt));
1232 flag_set(ctx, slot_r, false);
1236 static bool attr_w gen_option_create_empty(struct codegen_context *ctx, ajla_option_t opt, frame_t slot_r)
1238 unsigned option_size = log_2(sizeof(ajla_option_t));
1239 uint32_t escape_label;
1241 escape_label = alloc_escape_label(ctx);
1242 if (unlikely(!escape_label))
1245 if (flag_must_be_flat(ctx, slot_r)) {
1246 gen_insn(INSN_JMP, 0, 0, 0);
1247 gen_four(escape_label);
1251 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_option_mayfail), 0));
1252 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1253 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1255 g(gen_address(ctx, R_RET0, offsetof(struct data, u_.option.option), IMM_PURPOSE_STR_OFFSET, option_size));
1256 g(gen_imm(ctx, opt, IMM_PURPOSE_STORE_VALUE, option_size));
1257 gen_insn(INSN_MOV, option_size, 0, 0);
1258 gen_address_offset();
1261 g(gen_address(ctx, R_RET0, offsetof(struct data, u_.option.pointer), IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
1262 g(gen_imm(ctx, 0, IMM_PURPOSE_STORE_VALUE, OP_SIZE_SLOT));
1263 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
1264 gen_address_offset();
1267 g(gen_compress_pointer(ctx, R_RET0, R_RET0));
1268 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, true));
1273 static bool attr_w gen_option_create(struct codegen_context *ctx, ajla_option_t opt, frame_t slot_1, frame_t slot_r, frame_t flags)
1275 unsigned option_size = log_2(sizeof(ajla_option_t));
1276 const struct type *type;
1277 uint32_t escape_label, get_pointer_label, got_pointer_label;
1279 escape_label = alloc_escape_label(ctx);
1280 if (unlikely(!escape_label))
1283 if (flag_must_be_flat(ctx, slot_r)) {
1284 gen_insn(INSN_JMP, 0, 0, 0);
1285 gen_four(escape_label);
1289 get_pointer_label = alloc_label(ctx);
1290 if (unlikely(!get_pointer_label))
1293 got_pointer_label = alloc_label(ctx);
1294 if (unlikely(!got_pointer_label))
1297 type = get_type_of_local(ctx, slot_1);
1299 g(gen_upcall_start(ctx, 0));
1300 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_option_mayfail), 0));
1301 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1302 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1304 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
1306 g(gen_address(ctx, R_RET0, offsetof(struct data, u_.option.option), IMM_PURPOSE_STR_OFFSET, option_size));
1307 g(gen_imm(ctx, opt, IMM_PURPOSE_STORE_VALUE, option_size));
1308 gen_insn(INSN_MOV, option_size, 0, 0);
1309 gen_address_offset();
1312 if (TYPE_IS_FLAT(type)) {
1313 g(gen_test_1_cached(ctx, slot_1, get_pointer_label));
1315 if (slot_is_register(ctx, slot_1))
1316 g(spill(ctx, slot_1));
1318 g(gen_upcall_start(ctx, 3));
1320 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1321 g(gen_upcall_argument(ctx, 0));
1323 g(gen_load_constant(ctx, R_ARG1, slot_1));
1324 g(gen_upcall_argument(ctx, 1));
1326 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_ARG2, R_FRAME, (size_t)slot_1 * slot_size, 0));
1327 g(gen_upcall_argument(ctx, 2));
1329 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 3));
1331 if (flag_is_clear(ctx, slot_1))
1332 goto skip_get_pointer_label;
1334 gen_insn(INSN_JMP, 0, 0, 0);
1335 gen_four(got_pointer_label);
1338 gen_label(get_pointer_label);
1339 g(gen_frame_get_pointer(ctx, slot_1, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_RET0));
1341 skip_get_pointer_label:
1342 gen_label(got_pointer_label);
1343 g(gen_address(ctx, R_SAVED_1, offsetof(struct data, u_.option.pointer), IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
1344 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
1345 gen_address_offset();
1348 g(gen_compress_pointer(ctx, R_SAVED_1, R_SAVED_1));
1349 g(gen_frame_set_pointer(ctx, slot_r, R_SAVED_1, false, true));
1354 static bool attr_w gen_option_cmp(struct codegen_context *ctx, unsigned reg, frame_t opt, uint32_t label, frame_t slot_r)
1356 unsigned op_size = log_2(sizeof(ajla_option_t));
1358 #if defined(ARCH_X86)
1359 g(gen_address(ctx, reg, offsetof(struct data, u_.option.option), IMM_PURPOSE_LDR_OFFSET, op_size));
1360 g(gen_imm(ctx, opt, IMM_PURPOSE_CMP, op_size));
1361 gen_insn(INSN_CMP, op_size, 0, 1);
1362 gen_address_offset();
1365 g(gen_address(ctx, reg, offsetof(struct data, u_.option.option), ARCH_PREFERS_SX(op_size) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, op_size));
1366 gen_insn(ARCH_PREFERS_SX(op_size) ? INSN_MOVSX : INSN_MOV, op_size, 0, 0);
1367 gen_one(R_SCRATCH_2);
1368 gen_address_offset();
1370 g(gen_imm(ctx, opt, IMM_PURPOSE_CMP, op_size));
1371 gen_insn(INSN_CMP, op_size, 0, 1);
1372 gen_one(R_SCRATCH_2);
1376 gen_insn(INSN_JMP_COND, op_size, COND_NE, 0);
1379 g(gen_frame_set_cond(ctx, op_size, false, COND_E, slot_r));
1383 g(gen_address(ctx, reg, offsetof(struct data, u_.option.option), ARCH_PREFERS_SX(op_size) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, op_size));
1384 gen_insn(ARCH_PREFERS_SX(op_size) ? INSN_MOVSX : INSN_MOV, op_size, 0, 0);
1385 gen_one(R_SCRATCH_2);
1386 gen_address_offset();
1388 g(gen_cmp_dest_reg(ctx, op_size, R_SCRATCH_2, (unsigned)-1, label ? R_CMP_RESULT : R_SCRATCH_2, opt, COND_E));
1391 gen_insn(INSN_JMP_REG, i_size(op_size), COND_E, 0);
1392 gen_one(R_CMP_RESULT);
1395 g(gen_frame_store(ctx, log_2(sizeof(ajla_flat_option_t)), slot_r, 0, R_SCRATCH_2));
1401 static bool attr_w gen_option_load(struct codegen_context *ctx, frame_t slot_1, frame_t slot_r, ajla_option_t opt, frame_t flags)
1403 const struct type *type;
1404 uint32_t escape_label;
1407 escape_label = alloc_escape_label(ctx);
1408 if (unlikely(!escape_label))
1411 if (flag_must_be_flat(ctx, slot_r)) {
1412 gen_insn(INSN_JMP, 0, 0, 0);
1413 gen_four(escape_label);
1417 type = get_type_of_local(ctx, slot_1);
1418 if (TYPE_IS_FLAT(type)) {
1419 g(gen_test_1_jz_cached(ctx, slot_1, escape_label));
1422 g(gen_frame_decompress_slot(ctx, slot_1, R_SCRATCH_1, ®, escape_label));
1423 g(gen_option_cmp(ctx, reg, opt, escape_label, 0));
1425 g(gen_address(ctx, reg, offsetof(struct data, u_.option.pointer), ARCH_PREFERS_SX(OP_SIZE_SLOT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_SLOT));
1426 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
1427 gen_one(R_SCRATCH_1);
1428 gen_address_offset();
1430 g(gen_frame_set_pointer_2(ctx, slot_r, R_SCRATCH_1, flags, escape_label));
1435 static bool attr_w gen_option_test_flat(struct codegen_context *ctx, frame_t slot_1, frame_t opt, frame_t slot_r)
1437 unsigned op_size = log_2(sizeof(ajla_flat_option_t));
1438 uint32_t escape_label;
1440 escape_label = alloc_escape_label(ctx);
1441 if (unlikely(!escape_label))
1444 g(gen_test_1_cached(ctx, slot_1, escape_label));
1446 flag_set(ctx, slot_1, false);
1447 flag_set(ctx, slot_r, false);
1449 if (unlikely(opt != (ajla_flat_option_t)opt)) {
1450 g(gen_frame_clear(ctx, op_size, slot_r));
1454 g(gen_frame_load_cmp_imm_set_cond(ctx, op_size, zero_x, slot_1, opt, COND_E, slot_r));
1459 static bool attr_w gen_option_test(struct codegen_context *ctx, frame_t slot_1, frame_t opt, frame_t slot_r)
1461 uint32_t escape_label;
1464 escape_label = alloc_escape_label(ctx);
1465 if (unlikely(!escape_label))
1468 g(gen_frame_decompress_slot(ctx, slot_1, R_SCRATCH_1, ®, escape_label));
1470 flag_set(ctx, slot_r, false);
1472 if (unlikely(opt != (ajla_option_t)opt)) {
1473 g(gen_frame_clear(ctx, log_2(sizeof(ajla_flat_option_t)), slot_r));
1477 g(gen_option_cmp(ctx, reg, opt, 0, slot_r));
1482 static bool attr_w gen_option_ord(struct codegen_context *ctx, frame_t slot_1, frame_t slot_r, bool flat)
1484 unsigned op_size = log_2(sizeof(ajla_option_t));
1485 unsigned op_size_flat = log_2(sizeof(ajla_flat_option_t));
1486 uint32_t escape_label, ptr_label, store_label;
1487 unsigned reg, target;
1489 escape_label = alloc_escape_label(ctx);
1490 if (unlikely(!escape_label))
1493 ptr_label = alloc_label(ctx);
1494 if (unlikely(!ptr_label))
1497 store_label = alloc_label(ctx);
1498 if (unlikely(!store_label))
1501 target = gen_frame_target(ctx, slot_r, NO_FRAME_T, NO_FRAME_T, R_SCRATCH_1);
1504 g(gen_test_1_cached(ctx, slot_1, ptr_label));
1506 g(gen_frame_load(ctx, op_size_flat, zero_x, slot_1, 0, false, target));
1508 if (flag_is_clear(ctx, slot_1))
1509 goto skip_ptr_label;
1511 gen_insn(INSN_JMP, 0, 0, 0);
1512 gen_four(store_label);
1515 gen_label(ptr_label);
1516 g(gen_frame_decompress_slot(ctx, slot_1, R_SCRATCH_1, ®, escape_label));
1518 g(gen_address(ctx, reg, offsetof(struct data, u_.option.option), ARCH_PREFERS_SX(op_size) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, op_size));
1519 gen_insn(ARCH_PREFERS_SX(op_size) ? INSN_MOVSX : INSN_MOV, op_size, 0, 0);
1521 gen_address_offset();
1524 gen_label(store_label);
1525 g(gen_frame_store(ctx, OP_SIZE_INT, slot_r, 0, target));
1526 flag_set(ctx, slot_r, false);
1531 static bool attr_w gen_array_create(struct codegen_context *ctx, frame_t slot_r)
1534 const struct type *type;
1535 uint32_t escape_label;
1537 escape_label = alloc_escape_label(ctx);
1538 if (unlikely(!escape_label))
1541 ajla_assert_lo(ctx->args_l != 0, (file_line, "gen_array_create: zero entries"));
1543 if (unlikely(ctx->args_l >= sign_bit(uint_default_t))) {
1544 gen_insn(INSN_JMP, 0, 0, 0);
1545 gen_four(escape_label);
1549 type = get_type_of_local(ctx, ctx->args[0].slot);
1550 for (i = 1; i < ctx->args_l; i++) {
1551 const struct type *t = get_type_of_local(ctx, ctx->args[i].slot);
1552 if (unlikely(t != type))
1553 internal(file_line, "gen_array_create: types do not match: %u != %u", type->tag, t->tag);
1556 if (TYPE_IS_FLAT(type)) {
1558 for (i = 0; i < ctx->args_l; i++) {
1559 g(gen_test_1_cached(ctx, ctx->args[i].slot, escape_label));
1560 flag_set(ctx, ctx->args[i].slot, false);
1563 g(gen_upcall_start(ctx, 3));
1565 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1566 g(gen_upcall_argument(ctx, 0));
1568 g(gen_load_constant(ctx, R_ARG1, ctx->args[0].slot));
1569 g(gen_upcall_argument(ctx, 1));
1571 g(gen_load_constant(ctx, R_ARG2, ctx->args_l));
1572 g(gen_upcall_argument(ctx, 2));
1574 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_flat_slot_mayfail), 3));
1575 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1576 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1578 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
1580 offset = data_array_offset;
1581 for (i = 0; i < ctx->args_l; i++) {
1582 g(gen_memcpy_from_slot(ctx, R_SAVED_1, offset, ctx->args[i].slot));
1583 offset += type->size;
1587 g(gen_upcall_start(ctx, 2));
1589 g(gen_load_constant(ctx, R_ARG0, ctx->args_l));
1590 g(gen_upcall_argument(ctx, 0));
1592 g(gen_load_constant(ctx, R_ARG1, ctx->args_l));
1593 g(gen_upcall_argument(ctx, 1));
1595 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_pointers_mayfail), 2));
1596 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1597 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1599 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
1601 g(gen_address(ctx, R_RET0, offsetof(struct data, u_.array_pointers.pointer), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
1602 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
1604 gen_address_offset();
1607 for (i = 0; i < ctx->args_l; i++) {
1608 g(gen_frame_get_pointer(ctx, ctx->args[i].slot, (ctx->args[i].flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_SCRATCH_1));
1609 g(gen_address(ctx, R_SAVED_2, offset, IMM_PURPOSE_STR_OFFSET, OP_SIZE_SLOT));
1610 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
1611 gen_address_offset();
1612 gen_one(R_SCRATCH_1);
1613 offset += sizeof(pointer_t);
1616 g(gen_compress_pointer(ctx, R_SAVED_1, R_SAVED_1));
1617 g(gen_frame_set_pointer(ctx, slot_r, R_SAVED_1, false, true));
1621 static bool attr_w gen_array_create_empty_flat(struct codegen_context *ctx, frame_t slot_r, frame_t local_type)
1623 uint32_t escape_label;
1625 escape_label = alloc_escape_label(ctx);
1626 if (unlikely(!escape_label))
1629 g(gen_upcall_start(ctx, 3));
1631 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1632 g(gen_upcall_argument(ctx, 0));
1634 g(gen_load_constant(ctx, R_ARG1, local_type));
1635 g(gen_upcall_argument(ctx, 1));
1637 g(gen_load_constant(ctx, R_ARG2, 0));
1638 g(gen_upcall_argument(ctx, 2));
1640 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_flat_types_ptr_mayfail), 3));
1641 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1642 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1644 g(gen_compress_pointer(ctx, R_RET0, R_RET0));
1645 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, true));
1650 static bool attr_w gen_array_create_empty(struct codegen_context *ctx, frame_t slot_r)
1652 uint32_t escape_label;
1654 escape_label = alloc_escape_label(ctx);
1655 if (unlikely(!escape_label))
1658 g(gen_upcall_start(ctx, 2));
1660 g(gen_load_constant(ctx, R_ARG0, ARRAY_PREALLOC_SIZE));
1661 g(gen_upcall_argument(ctx, 0));
1663 g(gen_load_constant(ctx, R_ARG1, 0));
1664 g(gen_upcall_argument(ctx, 1));
1666 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_pointers_mayfail), 2));
1667 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1668 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1670 g(gen_compress_pointer(ctx, R_RET0, R_RET0));
1671 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, true));
1676 static bool attr_w gen_array_fill(struct codegen_context *ctx, frame_t slot_1, frame_t flags, frame_t slot_2, frame_t slot_r)
1678 const struct type *content_type, *array_type;
1679 uint32_t escape_label;
1680 unsigned reg1, reg4;
1682 escape_label = alloc_escape_label(ctx);
1683 if (unlikely(!escape_label))
1686 g(gen_test_1_cached(ctx, slot_2, escape_label));
1688 content_type = get_type_of_local(ctx, slot_1);
1689 array_type = get_type_of_local(ctx, slot_r);
1691 if (TYPE_IS_FLAT(array_type)) {
1692 int64_t dest_offset;
1694 const struct flat_array_definition *def = type_def(array_type,flat_array);
1696 ajla_assert_lo(TYPE_IS_FLAT(content_type), (file_line, "gen_array_fill: array is flat but content is not"));
1698 g(gen_test_1_cached(ctx, slot_1, escape_label));
1700 dest_offset = (size_t)slot_r * slot_size;
1701 for (i = 0; i < def->n_elements; i++) {
1702 g(gen_memcpy_from_slot(ctx, R_FRAME, dest_offset, slot_1));
1703 dest_offset += def->base->size;
1705 flag_set(ctx, slot_1, false);
1706 flag_set(ctx, slot_r, false);
1711 if (slot_is_register(ctx, slot_1))
1712 g(spill(ctx, slot_1));
1714 if (unlikely((flags & OPCODE_ARRAY_FILL_FLAG_SPARSE) != 0)) {
1715 uint32_t get_ptr_label, got_ptr_label;
1717 get_ptr_label = alloc_label(ctx);
1718 if (unlikely(!get_ptr_label))
1721 got_ptr_label = alloc_label(ctx);
1722 if (unlikely(!got_ptr_label))
1725 if (TYPE_IS_FLAT(content_type)) {
1726 g(gen_test_1_cached(ctx, slot_1, get_ptr_label));
1728 g(gen_upcall_start(ctx, 3));
1730 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1731 g(gen_upcall_argument(ctx, 0));
1733 g(gen_load_constant(ctx, R_ARG1, slot_1));
1734 g(gen_upcall_argument(ctx, 1));
1736 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, R_ARG2, R_FRAME, (size_t)slot_1 * slot_size, 0));
1737 g(gen_upcall_argument(ctx, 2));
1739 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_flat_to_data), 3));
1741 g(gen_mov(ctx, i_size(OP_SIZE_SLOT), R_SCRATCH_4, R_RET0));
1743 gen_insn(INSN_JMP, 0, 0, 0);
1744 gen_four(got_ptr_label);
1747 gen_label(get_ptr_label);
1749 g(gen_frame_get_pointer(ctx, slot_1, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_SCRATCH_4));
1751 gen_label(got_ptr_label);
1753 g(gen_frame_get(ctx, OP_SIZE_INT, sign_x, slot_2, R_SCRATCH_1, ®1));
1754 g(gen_jmp_if_negative(ctx, reg1, escape_label));
1756 g(gen_upcall_start(ctx, 2));
1757 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, reg1));
1758 g(gen_upcall_argument(ctx, 0));
1760 g(gen_mov(ctx, i_size(OP_SIZE_SLOT), R_ARG1, R_SCRATCH_4));
1761 g(gen_upcall_argument(ctx, 1));
1763 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_create_sparse), 2));
1764 } else if (TYPE_IS_FLAT(content_type)) {
1765 g(gen_test_1_cached(ctx, slot_1, escape_label));
1766 flag_set(ctx, slot_1, false);
1768 g(gen_frame_get(ctx, OP_SIZE_INT, sign_x, slot_2, R_SCRATCH_4, ®4));
1769 g(gen_jmp_if_negative(ctx, reg4, escape_label));
1771 g(gen_upcall_start(ctx, 3));
1772 g(gen_mov(ctx, i_size(OP_SIZE_INT), R_ARG1, reg4));
1773 g(gen_upcall_argument(ctx, 1));
1775 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1776 g(gen_upcall_argument(ctx, 0));
1778 g(gen_load_constant(ctx, R_ARG2, slot_1));
1779 g(gen_upcall_argument(ctx, 2));
1781 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_create_flat), 3));
1783 if (slot_is_register(ctx, slot_2))
1784 g(spill(ctx, slot_2));
1786 g(gen_frame_get_pointer(ctx, slot_1, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_SCRATCH_1));
1788 g(gen_upcall_start(ctx, 4));
1790 g(gen_mov(ctx, i_size(OP_SIZE_SLOT), R_ARG3, R_SCRATCH_1));
1791 g(gen_upcall_argument(ctx, 3));
1793 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
1794 g(gen_upcall_argument(ctx, 0));
1796 g(gen_load_constant(ctx, R_ARG1, ctx->instr_start - da(ctx->fn,function)->code));
1797 g(gen_upcall_argument(ctx, 1));
1799 g(gen_load_constant(ctx, R_ARG2, slot_2));
1800 g(gen_upcall_argument(ctx, 2));
1802 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_create_pointers), 4));
1804 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, false));
1809 static bool attr_w gen_array_string(struct codegen_context *ctx, type_tag_t tag, uint8_t *string, frame_t len, frame_t slot_r)
1811 uint32_t escape_label;
1813 const struct type *type;
1815 escape_label = alloc_escape_label(ctx);
1816 if (unlikely(!escape_label))
1819 g(gen_upcall_start(ctx, 2));
1821 g(gen_load_constant(ctx, R_ARG0, tag));
1822 g(gen_upcall_argument(ctx, 0));
1824 g(gen_load_constant(ctx, R_ARG1, len));
1825 g(gen_upcall_argument(ctx, 1));
1827 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_data_alloc_array_flat_tag_mayfail), 2));
1828 g(gen_sanitize_returned_pointer(ctx, R_RET0));
1829 g(gen_jmp_on_zero(ctx, OP_SIZE_ADDRESS, R_RET0, COND_E, escape_label));
1831 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SAVED_1, R_RET0));
1833 g(gen_compress_pointer(ctx, R_RET0, R_RET0));
1834 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, true));
1836 g(load_function_offset(ctx, R_SCRATCH_3, offsetof(struct data, u_.function.code)));
1838 offset = string - cast_ptr(uint8_t *, da(ctx->fn,function)->code);
1839 type = type_get_from_tag(tag);
1840 g(gen_memcpy_raw(ctx, R_SAVED_1, data_array_offset, R_SCRATCH_3, offset, (size_t)len * type->size, minimum(type->align, align_of(code_t))));
1845 static bool attr_w gen_scaled_array_address(struct codegen_context *ctx, size_t element_size, unsigned reg_dst, unsigned reg_src, unsigned reg_index, int64_t offset_src)
1847 if (is_power_of_2(element_size)) {
1848 unsigned shift = log_2(element_size);
1849 #if defined(ARCH_X86)
1850 if (shift <= 3 && imm_is_32bit(offset_src)) {
1851 gen_insn(INSN_LEA3, i_size(OP_SIZE_ADDRESS), shift, 0);
1856 gen_eight(offset_src);
1860 if (ARCH_HAS_SHIFTED_ADD(shift)) {
1861 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, false));
1864 gen_one(ARG_SHIFTED_REGISTER);
1865 gen_one(ARG_SHIFT_LSL | shift);
1872 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, reg_dst, reg_index, shift, 0));
1873 reg_index = reg_dst;
1877 g(gen_imm(ctx, element_size, IMM_PURPOSE_MUL, i_size(OP_SIZE_ADDRESS)));
1878 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_MUL, ALU_WRITES_FLAGS(ALU_MUL, is_imm()));
1882 reg_index = reg_dst;
1884 size_t e_size = element_size;
1886 bool first_match = true;
1888 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_CONST_IMM, reg_index));
1889 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), reg_dst, reg_index));
1890 reg_index = reg_dst;
1893 g(gen_load_constant(ctx, reg_index, 0));
1899 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, reg_index, reg_index, sh, false));
1900 first_match = false;
1901 } else if (ARCH_HAS_SHIFTED_ADD(sh)) {
1902 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, false));
1905 gen_one(ARG_SHIFTED_REGISTER);
1906 gen_one(ARG_SHIFT_LSL | sh);
1907 gen_one(R_CONST_IMM);
1910 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, R_CONST_IMM, R_CONST_IMM, sh, false));
1913 g(gen_3address_alu(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, reg_index, reg_index, R_CONST_IMM, 0));
1921 #if defined(ARCH_S390)
1922 if (offset_src && s390_inline_address(offset_src)) {
1923 gen_insn(INSN_LEA3, i_size(OP_SIZE_ADDRESS), 0, 0);
1928 gen_eight(offset_src);
1932 g(gen_3address_alu(ctx, i_size(OP_SIZE_ADDRESS), ALU_ADD, reg_dst, reg_index, reg_src, 0));
1937 g(gen_imm(ctx, offset_src, IMM_PURPOSE_ADD, i_size(OP_SIZE_ADDRESS)));
1938 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, is_imm()));
1946 static bool attr_w gen_scaled_array_load(struct codegen_context *ctx, unsigned reg_src, unsigned reg_idx, int64_t offset_src, frame_t slot_r)
1948 const struct type *t = get_type_of_local(ctx, slot_r);
1949 #if defined(ARCH_X86)
1950 if (is_power_of_2(t->size)) {
1951 unsigned shift = log_2(t->size);
1952 if (shift <= 3 && shift <= OP_SIZE_NATIVE && imm_is_32bit(offset_src)) {
1953 short reg = ctx->registers[slot_r];
1954 gen_insn(INSN_MOV, shift, 0, 0);
1955 gen_one(reg >= 0 ? reg : R_SCRATCH_2);
1956 gen_one(ARG_ADDRESS_2 + shift);
1959 gen_eight(offset_src);
1962 g(gen_address(ctx, R_FRAME, (size_t)slot_r * slot_size, IMM_PURPOSE_STR_OFFSET, shift));
1963 gen_insn(INSN_MOV, shift, 0, 0);
1964 gen_address_offset();
1965 gen_one(R_SCRATCH_2);
1972 #if defined(ARCH_S390)
1973 if (t->size == 1 && s390_inline_address(offset_src) && cpu_test_feature(CPU_FEATURE_long_displacement) && cpu_test_feature(CPU_FEATURE_extended_imm)) {
1974 short reg = ctx->registers[slot_r];
1975 gen_insn(INSN_MOVSX, OP_SIZE_1, 0, 0);
1976 gen_one(reg >= 0 ? reg : R_SCRATCH_2);
1977 gen_one(ARG_ADDRESS_2);
1980 gen_eight(offset_src);
1983 g(gen_address(ctx, R_FRAME, (size_t)slot_r * slot_size, IMM_PURPOSE_STR_OFFSET, OP_SIZE_1));
1984 gen_insn(INSN_MOV, OP_SIZE_1, 0, 0);
1985 gen_address_offset();
1986 gen_one(R_SCRATCH_2);
1992 g(gen_scaled_array_address(ctx, t->size, R_SCRATCH_2, reg_src, reg_idx, 0));
1994 g(gen_memcpy_to_slot(ctx, slot_r, R_SCRATCH_2, offset_src));
1999 static bool attr_w gen_scaled_array_store(struct codegen_context *ctx, unsigned reg_src, int64_t offset_src, frame_t slot_1)
2001 const struct type *t = get_type_of_local(ctx, slot_1);
2002 #if defined(ARCH_X86)
2003 if (is_power_of_2(t->size)) {
2004 unsigned shift = log_2(t->size);
2005 if (shift <= 3 && shift <= OP_SIZE_NATIVE && imm_is_32bit(offset_src)) {
2006 short reg = ctx->registers[slot_1];
2008 g(gen_address(ctx, R_FRAME, (size_t)slot_1 * slot_size, IMM_PURPOSE_LDR_OFFSET, shift));
2009 gen_insn(INSN_MOV, shift, 0, 0);
2010 gen_one(R_SCRATCH_3);
2011 gen_address_offset();
2015 gen_insn(INSN_MOV, shift, 0, 0);
2016 gen_one(ARG_ADDRESS_2 + shift);
2018 gen_one(R_SCRATCH_2);
2019 gen_eight(offset_src);
2026 #if defined(ARCH_S390)
2027 if (t->size == 1 && s390_inline_address(offset_src) && cpu_test_feature(CPU_FEATURE_long_displacement) && cpu_test_feature(CPU_FEATURE_extended_imm)) {
2028 short reg = ctx->registers[slot_1];
2030 g(gen_address(ctx, R_FRAME, (size_t)slot_1 * slot_size, IMM_PURPOSE_LDR_SX_OFFSET, OP_SIZE_1));
2031 gen_insn(INSN_MOVSX, OP_SIZE_1, 0, 0);
2032 gen_one(R_SCRATCH_3);
2033 gen_address_offset();
2037 gen_insn(INSN_MOV, OP_SIZE_1, 0, 0);
2038 gen_one(ARG_ADDRESS_2);
2040 gen_one(R_SCRATCH_2);
2041 gen_eight(offset_src);
2047 g(gen_scaled_array_address(ctx, t->size, R_SCRATCH_2, reg_src, R_SCRATCH_2, 0));
2049 g(gen_memcpy_from_slot(ctx, R_SCRATCH_2, offset_src, slot_1));
2054 static bool attr_w gen_check_array_len(struct codegen_context *ctx, unsigned reg_array, bool allocated, unsigned reg_len, unsigned cond, uint32_t escape_label)
2056 size_t offset = !allocated ? offsetof(struct data, u_.array_flat.n_used_entries) : offsetof(struct data, u_.array_flat.n_allocated_entries);
2057 #if defined(ARCH_X86)
2058 g(gen_address(ctx, reg_array, offset, IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2059 gen_insn(INSN_CMP, OP_SIZE_INT, 0, 1);
2061 gen_address_offset();
2063 gen_insn(INSN_JMP_COND, OP_SIZE_INT, cond, 0);
2064 gen_four(escape_label);
2066 g(gen_address(ctx, reg_array, offset, ARCH_PREFERS_SX(OP_SIZE_INT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2067 gen_insn(ARCH_PREFERS_SX(OP_SIZE_INT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_INT, 0, 0);
2068 gen_one(R_SCRATCH_3);
2069 gen_address_offset();
2071 g(gen_cmp_test_jmp(ctx, INSN_CMP, i_size(OP_SIZE_INT), reg_len, R_SCRATCH_3, cond, escape_label));
2076 static bool attr_w gen_array_load(struct codegen_context *ctx, frame_t slot_1, frame_t slot_idx, frame_t slot_r, frame_t flags)
2078 const struct type *t = get_type_of_local(ctx, slot_1);
2079 const struct type *tr = get_type_of_local(ctx, slot_r);
2080 uint32_t escape_label;
2081 unsigned reg1, reg2;
2083 escape_label = alloc_escape_label(ctx);
2084 if (unlikely(!escape_label))
2087 if (unlikely(t->tag == TYPE_TAG_flat_array)) {
2088 const struct flat_array_definition *def = type_def(t,flat_array);
2090 g(gen_test_2_cached(ctx, slot_1, slot_idx, escape_label));
2092 flag_set(ctx, slot_1, false);
2093 flag_set(ctx, slot_idx, false);
2095 g(gen_frame_get(ctx, OP_SIZE_INT, native, slot_idx, R_SCRATCH_2, ®2));
2097 if (!(flags & OPCODE_ARRAY_INDEX_IN_RANGE))
2098 g(gen_cmp_test_imm_jmp(ctx, INSN_CMP, OP_SIZE_INT, reg2, def->n_elements, COND_AE, escape_label));
2100 g(gen_scaled_array_load(ctx, R_FRAME, reg2, (size_t)slot_1 * slot_size, slot_r));
2104 g(gen_frame_decompress_slot(ctx, slot_1, R_SCRATCH_1, ®1, escape_label));
2106 g(gen_test_1_cached(ctx, slot_idx, escape_label));
2107 flag_set(ctx, slot_idx, false);
2108 g(gen_frame_get(ctx, OP_SIZE_INT, native, slot_idx, R_SCRATCH_2, ®2));
2110 if (!(flags & OPCODE_ARRAY_INDEX_IN_RANGE))
2111 g(gen_check_array_len(ctx, reg1, false, reg2, COND_AE, escape_label));
2113 if (TYPE_IS_FLAT(tr)) {
2115 g(gen_compare_ptr_tag(ctx, reg1, DATA_TAG_array_slice, COND_A, escape_label, R_SCRATCH_4));
2116 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SCRATCH_1, reg1));
2117 #if defined(ARCH_X86) || defined(ARCH_S390)
2118 #if defined(ARCH_X86)
2119 if (unlikely(!cpu_test_feature(CPU_FEATURE_cmov)))
2121 if (unlikely(!cpu_test_feature(CPU_FEATURE_misc_45)))
2124 g(gen_address(ctx, reg1, offsetof(struct data, u_.array_slice.flat_data_minus_data_array_offset), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2125 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
2126 gen_one(R_SCRATCH_3);
2127 gen_address_offset();
2130 g(gen_address(ctx, reg1, offsetof(struct data, u_.array_slice.flat_data_minus_data_array_offset), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2131 gen_insn(INSN_CMOV, OP_SIZE_ADDRESS, COND_E, 0);
2132 gen_one(R_SCRATCH_1);
2133 gen_one(R_SCRATCH_1);
2134 gen_address_offset();
2135 #elif defined(ARCH_PARISC)
2136 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.array_slice.flat_data_minus_data_array_offset), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2137 gen_insn(INSN_CMP_CMOV, OP_SIZE_ADDRESS, COND_E, 0);
2138 gen_one(R_SCRATCH_1);
2139 gen_one(R_SCRATCH_1);
2140 gen_address_offset();
2141 gen_one(R_SCRATCH_4);
2143 gen_eight(DATA_TAG_array_slice);
2144 #elif defined(ARCH_LOONGARCH64) || defined(ARCH_MIPS) || defined(ARCH_RISCV64)
2145 g(gen_3address_alu_imm(ctx, OP_SIZE_NATIVE, ALU_XOR, R_SCRATCH_4, R_SCRATCH_4, DATA_TAG_array_slice, 0));
2147 label = alloc_label(ctx);
2148 if (unlikely(!label))
2151 gen_insn(INSN_JMP_REG, OP_SIZE_NATIVE, COND_NE, 0);
2152 gen_one(R_SCRATCH_4);
2155 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.array_slice.flat_data_minus_data_array_offset), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2156 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
2157 gen_one(R_SCRATCH_1);
2158 gen_address_offset();
2162 g(gen_address(ctx, R_SCRATCH_1, offsetof(struct data, u_.array_slice.flat_data_minus_data_array_offset), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2163 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
2164 gen_one(R_SCRATCH_3);
2165 gen_address_offset();
2167 #if defined(ARCH_POWER)
2168 if (!cpu_test_feature(CPU_FEATURE_v203))
2171 #if defined(ARCH_SPARC)
2175 gen_insn(INSN_CMOV, i_size(OP_SIZE_ADDRESS), COND_E, 0);
2176 gen_one(R_SCRATCH_1);
2177 gen_one(R_SCRATCH_1);
2178 gen_one(R_SCRATCH_3);
2180 g(gen_imm(ctx, DATA_TAG_array_slice, IMM_PURPOSE_CMP, OP_SIZE_NATIVE));
2181 gen_insn(INSN_CMP_DEST_REG, OP_SIZE_NATIVE, COND_E, 0);
2182 gen_one(R_CMP_RESULT);
2183 gen_one(R_SCRATCH_4);
2186 gen_insn(INSN_MOVR, OP_SIZE_NATIVE, COND_NE, 0);
2187 gen_one(R_SCRATCH_1);
2188 gen_one(R_SCRATCH_1);
2189 gen_one(R_CMP_RESULT);
2190 gen_one(R_SCRATCH_3);
2196 label = alloc_label(ctx);
2197 if (unlikely(!label))
2199 gen_insn(INSN_JMP_COND, OP_SIZE_4, COND_NE, 0);
2202 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_SCRATCH_1, R_SCRATCH_3));
2206 g(gen_scaled_array_load(ctx, R_SCRATCH_1, reg2, data_array_offset, slot_r));
2207 flag_set(ctx, slot_r, false);
2210 if (flag_must_be_flat(ctx, slot_r)) {
2211 gen_insn(INSN_JMP, 0, 0, 0);
2212 gen_four(escape_label);
2216 g(gen_compare_ptr_tag(ctx, reg1, DATA_TAG_array_pointers, COND_NE, escape_label, R_SCRATCH_3));
2218 g(gen_address(ctx, reg1, offsetof(struct data, u_.array_pointers.pointer), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2219 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
2220 gen_one(R_SCRATCH_1);
2221 gen_address_offset();
2223 #if defined(ARCH_X86) || defined(ARCH_ARM)
2224 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
2225 gen_one(R_SCRATCH_1);
2226 gen_one(ARG_ADDRESS_2 + OP_SIZE_SLOT);
2227 gen_one(R_SCRATCH_1);
2231 goto scaled_load_done;
2233 #if defined(ARCH_LOONGARCH64) || defined(ARCH_PARISC) || defined(ARCH_POWER) || defined(ARCH_S390) || defined(ARCH_SPARC)
2234 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, R_SCRATCH_2, reg2, OP_SIZE_SLOT, false));
2236 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
2237 gen_one(R_SCRATCH_1);
2238 gen_one(ARG_ADDRESS_2);
2239 gen_one(R_SCRATCH_1);
2240 gen_one(R_SCRATCH_2);
2243 goto scaled_load_done;
2245 if (ARCH_HAS_SHIFTED_ADD(OP_SIZE_SLOT)) {
2246 gen_insn(INSN_ALU, i_size(OP_SIZE_ADDRESS), ALU_ADD, ALU_WRITES_FLAGS(ALU_ADD, false));
2247 gen_one(R_SCRATCH_2);
2248 gen_one(ARG_SHIFTED_REGISTER);
2249 gen_one(ARG_SHIFT_LSL | OP_SIZE_SLOT);
2251 gen_one(R_SCRATCH_1);
2253 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
2254 gen_one(R_SCRATCH_1);
2255 gen_one(ARG_ADDRESS_1);
2256 gen_one(R_SCRATCH_2);
2259 goto scaled_load_done;
2262 g(gen_3address_rot_imm(ctx, OP_SIZE_ADDRESS, ROT_SHL, R_SCRATCH_2, reg2, OP_SIZE_SLOT, false));
2264 g(gen_3address_alu(ctx, OP_SIZE_ADDRESS, ALU_ADD, R_SCRATCH_2, R_SCRATCH_2, R_SCRATCH_1, 0));
2266 gen_insn(ARCH_PREFERS_SX(OP_SIZE_SLOT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_SLOT, 0, 0);
2267 gen_one(R_SCRATCH_1);
2268 gen_one(ARG_ADDRESS_1);
2269 gen_one(R_SCRATCH_2);
2272 g(gen_frame_set_pointer_2(ctx, slot_r, R_SCRATCH_1, flags, escape_label));
2277 static bool attr_w gen_array_len(struct codegen_context *ctx, frame_t slot_1, frame_t slot_2, frame_t slot_r, bool fused, int32_t offs_false)
2279 const struct type *t = get_type_of_local(ctx, slot_1);
2280 uint32_t escape_label;
2281 unsigned reg, target;
2283 escape_label = alloc_escape_label(ctx);
2284 if (unlikely(!escape_label))
2287 if (slot_2 != NO_FRAME_T) {
2288 g(gen_test_1_cached(ctx, slot_2, escape_label));
2289 flag_set(ctx, slot_2, false);
2292 if (unlikely(t->tag == TYPE_TAG_flat_array)) {
2294 target = R_SCRATCH_1;
2295 g(gen_load_constant(ctx, target, type_def(t,flat_array)->n_elements));
2297 } else if (slot_2 == NO_FRAME_T) {
2298 g(gen_frame_store_imm(ctx, OP_SIZE_INT, slot_r, 0, (unsigned)type_def(t,flat_array)->n_elements));
2300 g(gen_frame_load_cmp_imm_set_cond(ctx, OP_SIZE_INT, zero_x, slot_2, type_def(t,flat_array)->n_elements, COND_G, slot_r));
2302 flag_set(ctx, slot_r, false);
2304 g(gen_frame_decompress_slot(ctx, slot_1, R_SCRATCH_1, ®, escape_label));
2306 if (offsetof(struct data, u_.array_flat.n_used_entries) != offsetof(struct data, u_.array_slice.n_entries) ||
2307 offsetof(struct data, u_.array_flat.n_used_entries) != offsetof(struct data, u_.array_pointers.n_used_entries)) {
2311 if (DATA_TAG_array_flat != DATA_TAG_array_slice - 1 ||
2312 DATA_TAG_array_slice != DATA_TAG_array_pointers - 1 ||
2313 DATA_TAG_array_same < DATA_TAG_array_flat ||
2314 DATA_TAG_array_btree < DATA_TAG_array_flat ||
2315 DATA_TAG_array_incomplete < DATA_TAG_array_flat) {
2320 g(gen_compare_ptr_tag(ctx, reg, DATA_TAG_array_pointers, COND_A, escape_label, R_SCRATCH_2));
2322 if (slot_2 == NO_FRAME_T) {
2323 target = gen_frame_target(ctx, slot_r, NO_FRAME_T, NO_FRAME_T, R_SCRATCH_1);
2325 target = R_SCRATCH_1;
2328 #if defined(ARCH_X86) || defined(ARCH_S390)
2329 if (fused && slot_is_register(ctx, slot_2)) {
2330 g(gen_address(ctx, reg, offsetof(struct data, u_.array_flat.n_used_entries), ARCH_PREFERS_SX(OP_SIZE_INT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2331 gen_insn(INSN_CMP, OP_SIZE_INT, 0, 1 + COND_IS_LOGICAL(COND_GE));
2332 gen_one(ctx->registers[slot_2]);
2333 gen_address_offset();
2335 g(gen_jump(ctx, offs_false, i_size_cmp(OP_SIZE_INT), COND_GE, -1U, -1U));
2339 g(gen_address(ctx, reg, offsetof(struct data, u_.array_flat.n_used_entries), ARCH_PREFERS_SX(OP_SIZE_INT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2340 gen_insn(ARCH_PREFERS_SX(OP_SIZE_INT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_INT, 0, 0);
2342 gen_address_offset();
2346 enum extend ex = OP_SIZE_INT == i_size_cmp(OP_SIZE_INT) + (unsigned)zero ? garbage : sign_x;
2348 if (ARCH_HAS_JMP_2REGS(COND_LE)) {
2349 g(gen_frame_get(ctx, OP_SIZE_INT, ex, slot_2, R_SCRATCH_2, ®2));
2350 g(gen_jump(ctx, offs_false, i_size_cmp(OP_SIZE_INT), COND_LE, target, reg2));
2354 g(gen_frame_load_cmp(ctx, OP_SIZE_INT, COND_IS_LOGICAL(COND_LE), ex, false, slot_2, 0, false, target));
2355 g(gen_jump(ctx, offs_false, i_size_cmp(OP_SIZE_INT), COND_LE, -1U, -1U));
2357 g(gen_frame_get(ctx, OP_SIZE_INT, ex, slot_2, R_SCRATCH_2, ®2));
2358 g(gen_cmp_dest_reg(ctx, i_size_cmp(OP_SIZE_INT), target, reg2, R_CMP_RESULT, 0, COND_G));
2359 g(gen_jump(ctx, offs_false, OP_SIZE_NATIVE, COND_E, R_CMP_RESULT, -1U));
2361 } else if (slot_2 == NO_FRAME_T) {
2362 g(gen_frame_store(ctx, OP_SIZE_INT, slot_r, 0, target));
2364 g(gen_frame_load_cmp_set_cond(ctx, OP_SIZE_INT, zero_x, slot_2, R_SCRATCH_1, COND_G, slot_r));
2366 flag_set(ctx, slot_r, false);
2371 static bool attr_w gen_array_sub(struct codegen_context *ctx, frame_t slot_array, frame_t slot_from, frame_t slot_to, frame_t slot_r, frame_t flags)
2373 const struct type *t = get_type_of_local(ctx, slot_array);
2374 uint32_t escape_label, upcall_label;
2376 escape_label = alloc_escape_label(ctx);
2377 if (unlikely(!escape_label))
2380 upcall_label = alloc_label(ctx);
2381 if (unlikely(!upcall_label))
2384 if (unlikely(TYPE_IS_FLAT(t))) {
2385 g(gen_test_1_jz_cached(ctx, slot_array, escape_label));
2388 g(gen_test_2_cached(ctx, slot_from, slot_to, escape_label));
2390 if (slot_is_register(ctx, slot_array))
2391 g(spill(ctx, slot_array));
2392 if (slot_is_register(ctx, slot_from))
2393 g(spill(ctx, slot_from));
2394 if (slot_is_register(ctx, slot_to))
2395 g(spill(ctx, slot_to));
2397 g(gen_upcall_start(ctx, 4));
2399 g(gen_frame_load_raw(ctx, OP_SIZE_SLOT, garbage, slot_array, 0, false, R_ARG0));
2400 g(gen_upcall_argument(ctx, 0));
2402 g(gen_frame_load_raw(ctx, OP_SIZE_INT, garbage, slot_from, 0, false, R_ARG1));
2403 g(gen_upcall_argument(ctx, 1));
2405 g(gen_frame_load_raw(ctx, OP_SIZE_INT, garbage, slot_to, 0, false, R_ARG2));
2406 g(gen_upcall_argument(ctx, 2));
2408 g(gen_load_constant(ctx, R_ARG3, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0));
2409 g(gen_upcall_argument(ctx, 3));
2411 if ((flags & OPCODE_FLAG_FREE_ARGUMENT) != 0) {
2412 g(gen_test_1_cached(ctx, slot_array, upcall_label));
2413 g(gen_load_constant(ctx, R_ARG3, 0));
2414 g(gen_upcall_argument(ctx, 3));
2417 gen_label(upcall_label);
2418 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_sub), 4));
2420 g(gen_jmp_on_zero(ctx, OP_SIZE_SLOT, R_RET0, COND_E, escape_label));
2422 if (slot_array != slot_r) {
2423 if (flags & OPCODE_FLAG_FREE_ARGUMENT) {
2424 g(gen_set_1(ctx, R_FRAME, slot_array, 0, false));
2425 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_array));
2426 flag_set(ctx, slot_array, false);
2430 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, false));
2435 static bool attr_w gen_array_skip(struct codegen_context *ctx, frame_t slot_array, frame_t slot_from, frame_t slot_r, frame_t flags)
2437 const struct type *t = get_type_of_local(ctx, slot_array);
2438 uint32_t escape_label, upcall_label;
2440 escape_label = alloc_escape_label(ctx);
2441 if (unlikely(!escape_label))
2444 upcall_label = alloc_label(ctx);
2445 if (unlikely(!upcall_label))
2448 if (unlikely(TYPE_IS_FLAT(t))) {
2449 g(gen_test_1_jz_cached(ctx, slot_array, escape_label));
2452 g(gen_test_1_cached(ctx, slot_from, escape_label));
2454 if (slot_is_register(ctx, slot_array))
2455 g(spill(ctx, slot_array));
2456 if (slot_is_register(ctx, slot_from))
2457 g(spill(ctx, slot_from));
2459 g(gen_upcall_start(ctx, 3));
2461 g(gen_frame_load_raw(ctx, OP_SIZE_SLOT, garbage, slot_array, 0, false, R_ARG0));
2462 g(gen_upcall_argument(ctx, 0));
2464 g(gen_frame_load_raw(ctx, OP_SIZE_INT, garbage, slot_from, 0, false, R_ARG1));
2465 g(gen_upcall_argument(ctx, 1));
2467 g(gen_load_constant(ctx, R_ARG2, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0));
2468 g(gen_upcall_argument(ctx, 2));
2470 if ((flags & OPCODE_FLAG_FREE_ARGUMENT) != 0) {
2471 g(gen_test_1_cached(ctx, slot_array, upcall_label));
2472 g(gen_load_constant(ctx, R_ARG2, 0));
2473 g(gen_upcall_argument(ctx, 2));
2476 gen_label(upcall_label);
2477 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_skip), 3));
2479 g(gen_jmp_on_zero(ctx, OP_SIZE_SLOT, R_RET0, COND_E, escape_label));
2481 if (slot_array != slot_r) {
2482 if (flags & OPCODE_FLAG_FREE_ARGUMENT) {
2483 g(gen_set_1(ctx, R_FRAME, slot_array, 0, false));
2484 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_array));
2485 flag_set(ctx, slot_array, false);
2489 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, false));
2494 static bool attr_w gen_array_append(struct codegen_context *ctx, frame_t slot_1, frame_t slot_2, frame_t slot_r, frame_t flags)
2496 uint32_t escape_label;
2498 escape_label = alloc_escape_label(ctx);
2499 if (unlikely(!escape_label))
2502 if (unlikely(TYPE_IS_FLAT(get_type_of_local(ctx, slot_1))))
2503 g(gen_test_1_jz_cached(ctx, slot_1, escape_label));
2504 if (unlikely(TYPE_IS_FLAT(get_type_of_local(ctx, slot_2))))
2505 g(gen_test_1_jz_cached(ctx, slot_2, escape_label));
2507 g(gen_frame_load(ctx, OP_SIZE_SLOT, zero_x, slot_1, 0, false, R_SCRATCH_1));
2508 g(gen_ptr_is_thunk(ctx, R_SCRATCH_1, slot_1, escape_label));
2509 g(gen_frame_load(ctx, OP_SIZE_SLOT, zero_x, slot_2, 0, false, R_SCRATCH_2));
2510 g(gen_ptr_is_thunk(ctx, R_SCRATCH_2, slot_2, escape_label));
2512 g(gen_compare_da_tag(ctx, R_SCRATCH_1, DATA_TAG_array_incomplete, COND_E, escape_label, R_SCRATCH_1));
2513 g(gen_compare_da_tag(ctx, R_SCRATCH_2, DATA_TAG_array_incomplete, COND_E, escape_label, R_SCRATCH_2));
2515 g(gen_frame_get_pointer(ctx, slot_2, (flags & OPCODE_FLAG_FREE_ARGUMENT_2) != 0, R_SAVED_1));
2516 g(gen_frame_get_pointer(ctx, slot_1, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0, R_SCRATCH_1));
2517 g(gen_upcall_start(ctx, 2));
2518 g(gen_mov(ctx, i_size(OP_SIZE_SLOT), R_ARG0, R_SCRATCH_1));
2519 g(gen_upcall_argument(ctx, 0));
2520 g(gen_mov(ctx, i_size(OP_SIZE_SLOT), R_ARG1, R_SAVED_1));
2521 g(gen_upcall_argument(ctx, 1));
2522 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_array_join), 2));
2523 g(gen_frame_set_pointer(ctx, slot_r, R_RET0, false, false));
2527 static bool attr_w gen_array_append_one_flat(struct codegen_context *ctx, frame_t slot_1, frame_t slot_2, frame_t slot_r, frame_t flags)
2529 uint32_t escape_label;
2532 escape_label = alloc_escape_label(ctx);
2533 if (unlikely(!escape_label))
2536 if (unlikely(!(flags & OPCODE_FLAG_FREE_ARGUMENT))) {
2537 gen_insn(INSN_JMP, 0, 0, 0);
2538 gen_four(escape_label);
2542 g(gen_test_1_jz_cached(ctx, slot_1, escape_label));
2543 g(gen_test_1_cached(ctx, slot_2, escape_label));
2544 flag_set(ctx, slot_2, false);
2546 g(gen_frame_decompress_slot(ctx, slot_1, R_SAVED_1, ®, escape_label));
2548 g(gen_compare_tag_and_refcount(ctx, reg, DATA_TAG_array_flat, escape_label, R_SCRATCH_1));
2550 g(gen_address(ctx, reg, offsetof(struct data, u_.array_flat.n_used_entries), ARCH_PREFERS_SX(OP_SIZE_INT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2551 gen_insn(ARCH_PREFERS_SX(OP_SIZE_INT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_INT, 0, 0);
2552 gen_one(R_SCRATCH_2);
2553 gen_address_offset();
2555 g(gen_check_array_len(ctx, reg, true, R_SCRATCH_2, COND_E, escape_label));
2557 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_INT), ALU_ADD, R_SCRATCH_1, R_SCRATCH_2, 1, 0));
2559 g(gen_address(ctx, reg, offsetof(struct data, u_.array_flat.n_used_entries), IMM_PURPOSE_STR_OFFSET, OP_SIZE_INT));
2560 gen_insn(INSN_MOV, OP_SIZE_INT, 0, 0);
2561 gen_address_offset();
2562 gen_one(R_SCRATCH_1);
2564 g(gen_scaled_array_store(ctx, reg, data_array_offset, slot_2));
2566 if (slot_1 != slot_r) {
2567 #if !defined(POINTER_COMPRESSION)
2568 g(gen_frame_set_pointer(ctx, slot_r, reg, false, true));
2570 unsigned target = gen_frame_target(ctx, slot_r, NO_FRAME_T, NO_FRAME_T, R_SAVED_1);
2571 g(gen_compress_pointer(ctx, target, reg));
2572 g(gen_frame_set_pointer(ctx, slot_r, target, false, true));
2574 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_1));
2575 g(gen_set_1(ctx, R_FRAME, slot_1, 0, false));
2576 flag_set(ctx, slot_1, false);
2582 static bool attr_w gen_array_append_one(struct codegen_context *ctx, frame_t slot_1, frame_t slot_2, frame_t slot_r, frame_t flags)
2584 uint32_t escape_label;
2587 escape_label = alloc_escape_label(ctx);
2588 if (unlikely(!escape_label))
2591 if (unlikely(!(flags & OPCODE_FLAG_FREE_ARGUMENT))) {
2592 gen_insn(INSN_JMP, 0, 0, 0);
2593 gen_four(escape_label);
2597 g(gen_test_1_jz_cached(ctx, slot_1, escape_label));
2599 g(gen_frame_decompress_slot(ctx, slot_1, R_SAVED_1, ®, escape_label));
2601 g(gen_compare_tag_and_refcount(ctx, reg, DATA_TAG_array_pointers, escape_label, R_SCRATCH_1));
2603 g(gen_address(ctx, reg, offsetof(struct data, u_.array_pointers.n_used_entries), ARCH_PREFERS_SX(OP_SIZE_INT) ? IMM_PURPOSE_LDR_SX_OFFSET : IMM_PURPOSE_LDR_OFFSET, OP_SIZE_INT));
2604 gen_insn(ARCH_PREFERS_SX(OP_SIZE_INT) ? INSN_MOVSX : INSN_MOV, OP_SIZE_INT, 0, 0);
2606 gen_address_offset();
2608 g(gen_check_array_len(ctx, reg, true, R_SAVED_2, COND_E, escape_label));
2610 g(gen_frame_get_pointer(ctx, slot_2, (flags & OPCODE_FLAG_FREE_ARGUMENT_2) != 0, R_SCRATCH_2));
2612 g(gen_3address_alu_imm(ctx, i_size(OP_SIZE_INT), ALU_ADD, R_SCRATCH_1, R_SAVED_2, 1, 0));
2614 g(gen_address(ctx, reg, offsetof(struct data, u_.array_pointers.n_used_entries), IMM_PURPOSE_STR_OFFSET, OP_SIZE_INT));
2615 gen_insn(INSN_MOV, OP_SIZE_INT, 0, 0);
2616 gen_address_offset();
2617 gen_one(R_SCRATCH_1);
2619 g(gen_address(ctx, reg, offsetof(struct data, u_.array_pointers.pointer), IMM_PURPOSE_LDR_OFFSET, OP_SIZE_ADDRESS));
2620 gen_insn(INSN_MOV, OP_SIZE_ADDRESS, 0, 0);
2621 gen_one(R_SCRATCH_3);
2622 gen_address_offset();
2624 g(gen_scaled_array_address(ctx, slot_size, R_SAVED_2, R_SCRATCH_3, R_SAVED_2, 0));
2626 gen_insn(INSN_MOV, OP_SIZE_SLOT, 0, 0);
2627 gen_one(ARG_ADDRESS_1);
2630 gen_one(R_SCRATCH_2);
2632 if (slot_1 != slot_r) {
2633 #if !defined(POINTER_COMPRESSION)
2634 g(gen_frame_set_pointer(ctx, slot_r, reg, false, true));
2636 unsigned target = gen_frame_target(ctx, slot_r, NO_FRAME_T, NO_FRAME_T, R_SAVED_1);
2637 g(gen_compress_pointer(ctx, target, reg));
2638 g(gen_frame_set_pointer(ctx, slot_r, target, false, true));
2640 g(gen_frame_clear(ctx, OP_SIZE_SLOT, slot_1));
2641 g(gen_set_1(ctx, R_FRAME, slot_1, 0, false));
2642 flag_set(ctx, slot_1, false);
2648 static bool attr_w gen_io(struct codegen_context *ctx, frame_t code, frame_t slot_1, frame_t slot_2, frame_t slot_3)
2650 uint32_t reload_label, escape_label;
2653 reload_label = alloc_reload_label(ctx);
2654 if (unlikely(!reload_label))
2658 mem_free(ctx->var_aux);
2659 ctx->var_aux = NULL;
2661 ctx->var_aux = mem_alloc_array_mayfail(mem_alloc_mayfail, frame_t *, 0, 0, slot_1 + slot_2, sizeof(frame_t), &ctx->err);
2662 if (unlikely(!ctx->var_aux))
2665 for (i = 0; i < slot_1 + slot_2; i++)
2666 ctx->var_aux[i] = get_uint32(ctx);
2667 for (i = 0; i < slot_3; i++)
2670 for (i = 0; i < slot_2; i++) {
2671 frame_t input_slot = ctx->var_aux[slot_1 + i];
2672 if (slot_is_register(ctx, input_slot))
2673 g(spill(ctx, input_slot));
2676 /*gen_insn(INSN_JMP, 0, 0, 0); gen_four(alloc_escape_label(ctx));*/
2678 g(gen_upcall_start(ctx, 3));
2679 g(gen_mov(ctx, i_size(OP_SIZE_ADDRESS), R_ARG0, R_FRAME));
2680 g(gen_upcall_argument(ctx, 0));
2682 g(gen_load_constant(ctx, R_ARG1, ctx->instr_start - da(ctx->fn,function)->code));
2683 g(gen_upcall_argument(ctx, 1));
2685 g(gen_load_constant(ctx, R_ARG2, ((uint32_t)code << 24) | ((uint32_t)slot_1 << 16) | ((uint32_t)slot_2 << 8) | slot_3));
2686 g(gen_upcall_argument(ctx, 2));
2687 /*debug("arg2: %08x", ((uint32_t)code << 24) | ((uint32_t)slot_1 << 16) | ((uint32_t)slot_2 << 8) | slot_3);*/
2689 g(gen_upcall(ctx, offsetof(struct cg_upcall_vector_s, cg_upcall_ipret_io), 3));
2690 g(gen_sanitize_returned_pointer(ctx, R_RET0));
2691 g(gen_cmp_test_imm_jmp(ctx, INSN_CMP, OP_SIZE_ADDRESS, R_RET0, ptr_to_num(POINTER_FOLLOW_THUNK_GO), COND_NE, reload_label));
2693 for (i = 0; i < slot_1; i++) {
2694 frame_t output_slot = ctx->var_aux[i];
2695 flag_set_unknown(ctx, output_slot);
2698 escape_label = alloc_escape_label_for_ip(ctx, ctx->current_position);
2699 if (unlikely(!escape_label))
2702 g(gen_test_variables(ctx, ctx->var_aux, slot_1, escape_label));