codegen: add a 'size' argument to ALU_WRITES_FLAGS
[ajla.git] / ipfn.c
blob7417402557240ad0c73409f0822d05304dc6c1d4
1 /*
2 * Copyright (C) 2024 Mikulas Patocka
4 * This file is part of Ajla.
6 * Ajla is free software: you can redistribute it and/or modify it under the
7 * terms of the GNU General Public License as published by the Free Software
8 * Foundation, either version 3 of the License, or (at your option) any later
9 * version.
11 * Ajla is distributed in the hope that it will be useful, but WITHOUT ANY
12 * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
13 * A PARTICULAR PURPOSE. See the GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along with
16 * Ajla. If not, see <https://www.gnu.org/licenses/>.
19 #include "ajla.h"
21 #ifndef FILE_OMIT
23 #include "args.h"
24 #include "mem_al.h"
25 #include "data.h"
26 #include "array.h"
27 #include "arrayu.h"
28 #include "task.h"
29 #include "pcode.h"
30 #include "ipio.h"
31 #include "funct.h"
32 #include "os.h"
34 #include "ipfn.h"
36 shared_var bool ipret_strict_calls shared_init(false);
37 shared_var bool ipret_is_privileged shared_init(false);
38 shared_var bool ipret_compile shared_init(false);
40 static const timestamp_t break_ticks = 1;
42 void eval_both(frame_s *fp, const code_t *ip, frame_t slot_1, frame_t slot_2)
44 struct execution_control *ex = frame_execution_control(fp);
45 if (slot_1 != NO_FRAME_T) {
46 if (!frame_variable_is_flat(fp, slot_1)) {
47 pointer_t *ptr = frame_pointer(fp, slot_1);
48 struct data attr_unused *result;
49 pointer_follow(ptr, true, result, PF_PREPARE0, fp, ip,
50 SUBMIT_EX(ex_); goto brk1,
51 break);
53 execution_control_acquire(ex);
55 brk1:
56 if (slot_2 != NO_FRAME_T) {
57 if (!frame_variable_is_flat(fp, slot_2)) {
58 pointer_t *ptr = frame_pointer(fp, slot_2);
59 struct data attr_unused *result;
60 pointer_follow(ptr, true, result, PF_PREPARE1, fp, ip,
61 SUBMIT_EX(ex_); goto brk2,
62 break);
64 execution_control_acquire(ex);
66 brk2:
67 pointer_follow_wait(fp, ip);
70 static void pointer_copy_owned(frame_s *fp, frame_t src_slot, frame_t dest_slot)
72 pointer_t ptr;
73 if (dest_slot == src_slot)
74 return;
75 ptr = *frame_pointer(fp, src_slot);
76 frame_free_and_set_pointer(fp, dest_slot, ptr);
77 pointer_reference_owned(ptr);
80 void attr_hot_fastcall ipret_fill_function_reference_from_slot(struct data *function_reference, arg_t a, frame_s *fp, frame_t slot, bool deref)
82 const struct type *type;
83 pointer_t ptr;
85 if (unlikely(!function_reference)) {
86 if (deref)
87 frame_free_and_clear(fp, slot);
88 return;
91 ajla_assert(a < da(function_reference,function_reference)->n_curried_arguments, (file_line, "ipret_fill_function_reference_from_slot: invalid argument %"PRIuMAX" (%"PRIuMAX" arguments)", (uintmax_t)a, (uintmax_t)da(function_reference,function_reference)->n_curried_arguments));
93 if (frame_variable_is_flat(fp, slot)) {
94 type = frame_get_type_of_local(fp, slot);
95 data_fill_function_reference_flat(function_reference, a, type, frame_var(fp, slot));
96 } else {
97 ptr = frame_get_pointer_reference(fp, slot, deref);
98 data_fill_function_reference(function_reference, a, ptr);
103 static struct thunk *build_thunk(pointer_t *fn_ptr, arg_t n_arguments, struct data **function_reference)
105 struct thunk *result;
106 ajla_error_t err;
108 *function_reference = data_alloc_function_reference_mayfail(n_arguments, &err pass_file_line);
109 if (unlikely(!*function_reference))
110 goto fail_err;
111 da(*function_reference,function_reference)->is_indirect = false;
112 da(*function_reference,function_reference)->u.direct = fn_ptr;
114 if (unlikely(!thunk_alloc_function_call(pointer_data(*function_reference), 1, &result, &err))) {
115 data_dereference(*function_reference);
116 goto fail_err;
119 return result;
121 fail_err:
122 *function_reference = NULL;
123 return thunk_alloc_exception_error(err, NULL, NULL, NULL pass_file_line);
126 static void *ipret_op_build_thunk(frame_s *fp, const code_t *ip, frame_t slot_1, frame_t slot_2, frame_t slot_r, unsigned strict_flag)
128 unsigned flags;
129 pointer_t *fn_ptr;
130 code_t code;
131 void *ex;
132 struct data *function_reference;
133 struct thunk *result;
134 frame_t slot_1_eval = NO_FRAME_T;
135 frame_t slot_2_eval = NO_FRAME_T;
137 code = *ip % OPCODE_MODE_MULT;
138 if (code == OPCODE_IS_EXCEPTION)
139 strict_flag |= FLAG_TESTING_FOR_EXCEPTION;
140 if (code >= OPCODE_FIXED_OP + zero && code < OPCODE_FIXED_OP + OPCODE_FIXED_TYPE_MULT * TYPE_FIXED_N) {
141 code_t op = (code - OPCODE_FIXED_OP) % OPCODE_FIXED_TYPE_MULT;
142 if (op >= OPCODE_FIXED_OP_C && op < OPCODE_FIXED_OP_UNARY) {
143 code -= OPCODE_FIXED_OP_C;
146 if (code >= OPCODE_INT_OP && code < OPCODE_INT_OP + OPCODE_INT_TYPE_MULT * TYPE_INT_N) {
147 code_t op = (code - OPCODE_INT_OP) % OPCODE_INT_TYPE_MULT;
148 if (op >= OPCODE_INT_OP_C && op < OPCODE_INT_OP_UNARY) {
149 code -= OPCODE_INT_OP_C;
152 if (code >= OPCODE_REAL_OP && code < OPCODE_REAL_OP + OPCODE_REAL_TYPE_MULT * TYPE_REAL_N) {
153 code_t op = (code - OPCODE_REAL_OP) % OPCODE_REAL_TYPE_MULT;
154 if (op == OPCODE_REAL_OP_is_exception || op == OPCODE_REAL_OP_is_exception_alt1 || op == OPCODE_REAL_OP_is_exception_alt2)
155 strict_flag |= FLAG_TESTING_FOR_EXCEPTION;
158 if (frame_test_flag(fp, slot_1) && pointer_is_thunk(*frame_pointer(fp, slot_1))) {
159 pointer_follow_thunk_noeval(frame_pointer(fp, slot_1),
160 return POINTER_FOLLOW_THUNK_RETRY,
161 if (strict_flag & FLAG_TESTING_FOR_EXCEPTION) {
162 frame_free(fp, slot_r);
163 barrier_aliasing();
164 *frame_slot(fp, slot_r, ajla_flat_option_t) = 1;
165 barrier_aliasing();
166 return POINTER_FOLLOW_THUNK_GO;
168 if (!(strict_flag & FLAG_NEED_BOTH_EXCEPTIONS_TO_FAIL)) {
169 pointer_copy_owned(fp, slot_1, slot_r);
170 return POINTER_FOLLOW_THUNK_GO;
172 strict_flag |= FLAG_FIRST_EXCEPTION;
173 break,
174 slot_1_eval = slot_1; break
178 if (slot_2 != NO_FRAME_T && !frame_t_is_const(slot_2) && frame_test_flag(fp, slot_2) && pointer_is_thunk(*frame_pointer(fp, slot_2))) {
179 pointer_follow_thunk_noeval(frame_pointer(fp, slot_2),
180 return POINTER_FOLLOW_THUNK_RETRY,
181 if ((strict_flag & (FLAG_NEED_BOTH_EXCEPTIONS_TO_FAIL | FLAG_FIRST_EXCEPTION)) != FLAG_NEED_BOTH_EXCEPTIONS_TO_FAIL) {
182 pointer_copy_owned(fp, slot_2, slot_r);
183 return POINTER_FOLLOW_THUNK_GO;
185 break,
186 slot_2_eval = slot_2; break
190 if (strict_flag & OPCODE_OP_FLAG_STRICT) {
191 if (slot_1_eval != NO_FRAME_T || slot_2_eval != NO_FRAME_T) {
192 eval_both(fp, ip, slot_1_eval, slot_2_eval);
193 return POINTER_FOLLOW_THUNK_EXIT;
195 return POINTER_FOLLOW_THUNK_RETRY;
198 flags = 0;
199 if (slot_2 == NO_FRAME_T) {
200 flags |= PCODE_FIND_OP_UNARY;
201 if (type_is_equal(frame_get_type_of_local(fp, slot_1), type_get_int(INT_DEFAULT_N)) &&
202 !type_is_equal(frame_get_type_of_local(fp, slot_r), type_get_int(INT_DEFAULT_N)))
203 flags |= PCODE_CONVERT_FROM_INT;
205 if (code == OPCODE_IS_EXCEPTION)
206 ex = pcode_find_is_exception(fp, ip, &fn_ptr);
207 else if (code == OPCODE_EXCEPTION_CLASS || code == OPCODE_EXCEPTION_TYPE || code == OPCODE_EXCEPTION_AUX)
208 ex = pcode_find_get_exception(code - OPCODE_EXCEPTION_CLASS, fp, ip, &fn_ptr);
209 else
210 ex = pcode_find_op_function(frame_get_type_of_local(fp, slot_1), frame_get_type_of_local(fp, slot_r), code, flags, fp, ip, &fn_ptr);
211 if (unlikely(ex != POINTER_FOLLOW_THUNK_RETRY))
212 return ex;
214 result = build_thunk(fn_ptr, slot_2 != NO_FRAME_T ? 2 : 1, &function_reference);
215 ipret_fill_function_reference_from_slot(function_reference, 0, fp, slot_1, false);
216 if (slot_2 != NO_FRAME_T) {
217 ajla_error_t err;
218 struct data *d;
219 const struct type *type = frame_get_type_of_local(fp, slot_1);
220 if (!frame_t_is_const(slot_2)) {
221 ipret_fill_function_reference_from_slot(function_reference, 1, fp, slot_2, false);
222 } else {
223 int32_t c = frame_t_get_const(slot_2);
224 union {
225 #define f(n, s, u, sz, bits) \
226 s cat(int_val_,bits);
227 for_all_int(f, for_all_empty)
228 #undef f
229 #define f(n, s, u, sz, bits) \
230 s cat(sfixed_val_,bits); \
231 u cat(ufixed_val_,bits);
232 for_all_fixed(f)
233 #undef f
234 unsigned char flat[1 << (TYPE_INT_N - 1)];
235 } un;
236 switch (type->tag) {
237 #define f(n, s, u, sz, bits) \
238 case TYPE_TAG_integer + n: \
239 un.cat(int_val_,bits) = c; \
240 if (unlikely(c != un.cat(int_val_,bits)))\
241 goto do_mpint; \
242 break; \
243 case TYPE_TAG_fixed + 2 * n + TYPE_TAG_fixed_signed:\
244 un.cat(sfixed_val_,bits) = c; \
245 if (unlikely(c != un.cat(sfixed_val_,bits)))\
246 internal(file_line, "ipret_op_build_thunk: invalid constant %ld for type %u", (long)c, type->tag);\
247 break; \
248 case TYPE_TAG_fixed + 2 * n + TYPE_TAG_fixed_unsigned:\
249 un.cat(ufixed_val_,bits) = c; \
250 if (unlikely(c < 0) || unlikely((u)c != un.cat(ufixed_val_,bits)))\
251 internal(file_line, "ipret_op_build_thunk: invalid constant %ld for type %u", (long)c, type->tag);\
252 break;
253 for_all_fixed(f)
254 #undef f
255 default:
256 internal(file_line, "ipret_op_build_thunk: invalid type tag %u", type->tag);
258 d = data_alloc_flat_mayfail(type->tag, un.flat, type->size, &err pass_file_line);
259 if (unlikely(!d)) {
260 data_fill_function_reference(function_reference, 1, pointer_error(err, NULL, NULL pass_file_line));
261 } else {
262 data_fill_function_reference(function_reference, 1, pointer_data(d));
265 if (false) {
266 do_mpint:
267 d = data_alloc_longint_mayfail(32, &err pass_file_line);
268 if (unlikely(!d)) {
269 data_fill_function_reference(function_reference, 1, pointer_error(err, NULL, NULL pass_file_line));
270 } else {
271 int32_t c = frame_t_get_const(slot_2);
272 mpint_import_from_variable(&da(d,longint)->mp, int32_t, c);
273 data_fill_function_reference(function_reference, 1, pointer_data(d));
278 frame_free_and_set_pointer(fp, slot_r, pointer_thunk(result));
280 return POINTER_FOLLOW_THUNK_GO;
283 #define UNBOX_THUNK 1
284 #define UNBOX_DID_SOMETHING 2
285 #define UNBOX_LONGINT 4
286 static int attr_hot_fastcall ipret_unbox_value(frame_s *fp, const struct type *type, frame_t slot)
288 ajla_assert(TYPE_IS_FLAT(type), (file_line, "ipret_unbox_value: non-flat type %u", type->tag));
289 if (frame_test_flag(fp, slot)) {
290 pointer_t ptr = *frame_pointer(fp, slot);
291 if (pointer_is_thunk(ptr))
292 return UNBOX_THUNK;
293 if (da_tag(pointer_get_data(ptr)) == DATA_TAG_longint) {
294 ajla_assert(TYPE_TAG_IS_INT(type->tag), (file_line, "ipret_unbox_value: unexpected longint, type %u", type->tag));
295 return UNBOX_LONGINT;
297 memcpy_fast(frame_var(fp, slot), da_flat(pointer_get_data(ptr)), type->size);
298 frame_clear_flag(fp, slot);
299 pointer_dereference(ptr);
300 return UNBOX_DID_SOMETHING;
302 return 0;
305 static bool test_and_copy_nan(frame_s attr_unused *fp, const code_t attr_unused *ip, unsigned char type_tag, frame_t attr_unused slot, frame_t attr_unused slot_r)
307 switch (type_tag) {
308 #define f(n, t, nt, pack, unpack) \
309 case TYPE_TAG_real + n: { \
310 t val; \
311 barrier_aliasing(); \
312 val = *frame_slot(fp, slot, t); \
313 barrier_aliasing(); \
314 if (unlikely(cat(isnan_,t)(val))) { \
315 if (type_tag == frame_get_type_of_local(fp, slot_r)->tag) {\
316 barrier_aliasing(); \
317 *frame_slot(fp, slot_r, t) = val;\
318 barrier_aliasing(); \
319 } else { \
320 frame_set_pointer(fp, slot_r, pointer_error(error_ajla(EC_SYNC, AJLA_ERROR_NAN), fp, ip pass_file_line));\
322 return true; \
324 break; \
326 for_all_real(f, for_all_empty)
327 #undef f
329 return false;
332 void * attr_hot_fastcall thunk_fixed_operator(frame_s *fp, const code_t *ip, frame_t slot_1, frame_t slot_2, frame_t slot_r, unsigned strict_flag)
334 const struct type *type;
335 int converted;
337 type = frame_get_type_of_local(fp, slot_1);
338 ajla_assert((TYPE_TAG_IS_FIXED(type->tag) || TYPE_TAG_IS_REAL(type->tag)) &&
339 (slot_2 == NO_FRAME_T || frame_t_is_const(slot_2) || frame_get_type_of_local(fp, slot_2) == type),
340 (file_line, "thunk_fixed_operator: invalid types on opcode %04x: %u, %u, %u",
341 *ip,
342 type->tag,
343 slot_2 == NO_FRAME_T || frame_t_is_const(slot_2) ? type->tag : frame_get_type_of_local(fp, slot_2)->tag,
344 frame_get_type_of_local(fp, slot_r)->tag));
346 converted = ipret_unbox_value(fp, type, slot_1);
347 if (!frame_test_flag(fp, slot_1) && unlikely(test_and_copy_nan(fp, ip, type->tag, slot_1, slot_r)))
348 return POINTER_FOLLOW_THUNK_GO;
349 if (slot_2 != NO_FRAME_T && !frame_t_is_const(slot_2)) {
350 converted |= ipret_unbox_value(fp, type, slot_2);
351 if (!frame_test_flag(fp, slot_2) && unlikely(test_and_copy_nan(fp, ip, type->tag, slot_2, slot_r)))
352 return POINTER_FOLLOW_THUNK_GO;
354 if (converted & UNBOX_THUNK)
355 return ipret_op_build_thunk(fp, ip, slot_1, slot_2, slot_r, strict_flag);
357 return POINTER_FOLLOW_THUNK_RETRY;
361 void * attr_hot_fastcall is_thunk_operator(frame_s *fp, const code_t *ip, frame_t slot_1, frame_t slot_r, unsigned strict_flag)
363 ajla_flat_option_t value;
364 pointer_t *ptr = frame_pointer(fp, slot_1);
365 if (!pointer_is_thunk(*ptr)) {
366 struct data *d = pointer_get_data(*ptr);
367 value = 0;
368 if (da_tag(d) == DATA_TAG_flat) {
369 value = data_is_nan(da(d,flat)->data_type, da_flat(d));
371 goto return_val;
374 pointer_follow_thunk_noeval(ptr,
375 return POINTER_FOLLOW_THUNK_RETRY,
376 value = 1; goto return_val,
377 goto create_thunk;
380 return_val:
381 frame_free(fp, slot_r);
382 barrier_aliasing();
383 *frame_slot(fp, slot_r, ajla_flat_option_t) = value;
384 barrier_aliasing();
385 return POINTER_FOLLOW_THUNK_GO;
387 create_thunk:
388 return ipret_op_build_thunk(fp, ip, slot_1, NO_FRAME_T, slot_r, strict_flag);
391 void * attr_hot_fastcall thunk_get_param(frame_s *fp, const code_t *ip, frame_t slot_1, frame_t slot_r, unsigned strict_flag, unsigned mode)
393 struct thunk *ex;
394 pointer_t *ptr;
395 int result;
397 ajla_assert(slot_r == slot_1 || !frame_test_flag(fp, slot_r), (file_line, "thunk_get_param: flag already set for destination slot %"PRIuMAX"", (uintmax_t)slot_r));
399 if (unlikely(!frame_test_flag(fp, slot_1))) {
400 const struct type *type;
401 type = frame_get_type_of_local(fp, slot_1);
402 if (likely(data_is_nan(type->tag, frame_var(fp, slot_1)))) {
403 have_nan:
404 switch (mode) {
405 case 0:
406 result = EC_SYNC;
407 break;
408 case 1:
409 result = AJLA_ERROR_NAN;
410 break;
411 case 2:
412 result = 0;
413 break;
414 default:
415 internal(file_line, "thunk_get_param: invalid mode %u", mode);
417 goto set_result;
419 goto not_thunk;
422 ptr = frame_pointer(fp, slot_1);
423 if (!pointer_is_thunk(*ptr)) {
424 struct data *data = pointer_get_data(*ptr);
425 if (likely(da_tag(data) == DATA_TAG_flat)) {
426 if (likely(data_is_nan(da(data,flat)->data_type, da_flat(data)))) {
427 goto have_nan;
430 goto not_thunk;
432 pointer_follow_thunk_noeval(ptr,
433 return POINTER_FOLLOW_THUNK_RETRY,
434 goto have_ex,
435 goto create_thunk;
438 have_ex:
439 ex = pointer_get_thunk(*ptr);
440 switch (mode) {
441 case 0:
442 result = ex->u.exception.err.error_class;
443 break;
444 case 1:
445 result = ex->u.exception.err.error_type;
446 break;
447 case 2:
448 result = ex->u.exception.err.error_aux;
449 break;
450 default:
451 internal(file_line, "thunk_get_param: invalid mode %u", mode);
454 set_result:
455 frame_free(fp, slot_r);
456 barrier_aliasing();
457 *frame_slot(fp, slot_r, int_default_t) = result;
458 barrier_aliasing();
460 return POINTER_FOLLOW_THUNK_GO;
462 not_thunk:
463 frame_free_and_set_pointer(fp, slot_r, pointer_error(error_ajla(EC_SYNC, AJLA_ERROR_INVALID_OPERATION), fp, ip pass_file_line));
465 return POINTER_FOLLOW_THUNK_GO;
467 create_thunk:
468 return ipret_op_build_thunk(fp, ip, slot_1, NO_FRAME_T, slot_r, strict_flag);
471 int_default_t ipret_system_property(int_default_t idx)
473 int_default_t result;
475 switch (idx) {
476 case SystemProperty_OS:
477 #if defined(OS_DOS)
478 result = SystemProperty_OS_DOS;
479 #elif defined(OS_OS2)
480 result = SystemProperty_OS_OS2;
481 #elif defined(OS_CYGWIN)
482 result = SystemProperty_OS_Cygwin;
483 #elif defined(OS_WIN32)
484 result = SystemProperty_OS_Windows;
485 #else
486 result = SystemProperty_OS_Posix;
487 #endif
488 break;
489 #if defined(OS_DOS) || defined(OS_OS2) || defined(OS_WIN32)
490 case SystemProperty_Charset:
491 result = os_charset();
492 break;
493 #endif
494 #if defined(OS_WIN32)
495 case SystemProperty_Charset_Console:
496 result = os_charset_console();
497 break;
498 #endif
499 case SystemProperty_Fixed:
500 result = INT_MASK;
501 break;
502 case SystemProperty_Real:
503 result = REAL_MASK;
504 break;
505 case SystemProperty_Privileged:
506 result = ipret_is_privileged;
507 break;
508 case SystemProperty_Compile:
509 result = ipret_compile;
510 break;
511 default:
512 return -1;
515 return result;
518 void * attr_hot_fastcall ipret_get_system_property(frame_s *fp, const code_t *ip, frame_t slot_1, frame_t slot_r)
520 void *ex;
521 array_index_t idx_l;
522 int_default_t idx, result;
523 pointer_t result_ptr;
525 ex = ipret_get_index(fp, ip, fp, slot_1, NULL, &idx_l, &result_ptr pass_file_line);
526 if (unlikely(ex != POINTER_FOLLOW_THUNK_GO)) {
527 if (ex == POINTER_FOLLOW_THUNK_EXCEPTION) {
528 frame_free_and_set_pointer(fp, slot_r, result_ptr);
529 return POINTER_FOLLOW_THUNK_GO;
532 idx = index_to_int(idx_l);
533 index_free(&idx_l);
535 result = ipret_system_property(idx);
537 frame_free(fp, slot_r);
538 barrier_aliasing();
539 *frame_slot(fp, slot_r, int_default_t) = result;
540 barrier_aliasing();
542 return POINTER_FOLLOW_THUNK_GO;
546 static bool int_to_mpint(mpint_t *m, const unsigned char *ptr, unsigned intx, ajla_error_t *err)
548 #define f(n, s, u, sz, bits) \
549 case n: { \
550 bool ret; \
551 barrier_aliasing(); \
552 ret = cat(mpint_init_from_,s)(m, *cast_ptr(const s *, ptr), err);\
553 barrier_aliasing(); \
554 return ret; \
556 switch (intx) {
557 for_all_int(f, for_all_empty)
558 default:
559 internal(file_line, "int_to_mpint: invalid type %d", intx);
561 #undef f
562 not_reached();
563 return false;
566 static mpint_t * attr_hot_fastcall int_get_mpint(frame_s *fp, frame_t slot, mpint_t *storage, unsigned intx, ajla_error_t *err)
568 unsigned char *flat;
569 if (frame_t_is_const(slot)) {
570 if (unlikely(!mpint_init_from_int32_t(storage, frame_t_get_const(slot), err)))
571 return NULL;
572 return storage;
574 if (frame_test_flag(fp, slot)) {
575 struct data *d = pointer_get_data(*frame_pointer(fp, slot));
576 if (likely(da_tag(d) == DATA_TAG_longint))
577 return &da(d,longint)->mp;
578 flat = da_flat(d);
579 } else {
580 flat = frame_var(fp, slot);
582 if (unlikely(!int_to_mpint(storage, flat, intx, err)))
583 return NULL;
584 return storage;
587 static struct data * attr_hot_fastcall int_allocate_result(frame_s *fp, frame_t slot, unsigned long bits, pointer_t *to_free, ajla_error_t *err)
589 struct data *d;
591 *to_free = pointer_empty();
593 if (frame_test_and_set_flag(fp, slot)) {
594 pointer_t ptr = *frame_pointer(fp, slot);
595 if (!pointer_is_thunk(ptr)) {
596 d = pointer_get_data(ptr);
597 if (da_tag(d) == DATA_TAG_longint && data_is_writable(d))
598 return d;
600 *to_free = ptr;
603 d = data_alloc_longint_mayfail(bits, err pass_file_line);
604 if (unlikely(!d)) {
605 frame_clear_flag(fp, slot);
606 return NULL;
608 *frame_pointer(fp, slot) = pointer_data(d);
609 return d;
612 void * attr_hot_fastcall thunk_int_binary_operator(frame_s *fp, const code_t *ip, frame_t slot_1, frame_t slot_2, frame_t slot_r, unsigned strict_flag, bool (attr_fastcall *do_op)(const mpint_t *op1, const mpint_t *op2, mpint_t *res, ajla_error_t *err))
614 ajla_error_t err;
615 const struct type *type;
616 unsigned intx;
617 int converted;
618 mpint_t s1, s2;
619 mpint_t *val1, *val2;
620 struct data *result;
621 pointer_t to_free;
623 type = frame_get_type_of_local(fp, slot_1);
624 if (!frame_t_is_const(slot_2)) {
625 ajla_assert(TYPE_TAG_IS_INT(type->tag) &&
626 frame_get_type_of_local(fp, slot_2) == type &&
627 frame_get_type_of_local(fp, slot_r) == type,
628 (file_line, "thunk_int_binary_operator: invalid types on opcode %04x: %u, %u, %u",
629 *ip,
630 type->tag,
631 frame_get_type_of_local(fp, slot_2)->tag,
632 frame_get_type_of_local(fp, slot_r)->tag));
633 } else {
634 ajla_assert(TYPE_TAG_IS_INT(type->tag) &&
635 frame_get_type_of_local(fp, slot_r) == type,
636 (file_line, "thunk_int_binary_operator: invalid types on opcode %04x: %u, %u",
637 *ip,
638 type->tag,
639 frame_get_type_of_local(fp, slot_r)->tag));
642 converted = 0;
644 converted |= ipret_unbox_value(fp, type, slot_1);
645 if (!frame_t_is_const(slot_2))
646 converted |= ipret_unbox_value(fp, type, slot_2);
648 if (converted & UNBOX_THUNK)
649 return ipret_op_build_thunk(fp, ip, slot_1, slot_2, slot_r, strict_flag);
651 if (converted == UNBOX_DID_SOMETHING)
652 return POINTER_FOLLOW_THUNK_RETRY;
654 intx = TYPE_TAG_IDX_INT(type->tag);
656 if (unlikely(!(val1 = int_get_mpint(fp, slot_1, &s1, intx, &err))))
657 goto fail_oom_1;
658 if (unlikely(!(val2 = int_get_mpint(fp, slot_2, &s2, intx, &err))))
659 goto fail_oom_2;
660 if (unlikely(!(result = int_allocate_result(fp, slot_r, maximum(mpint_estimate_bits(val1), mpint_estimate_bits(val2)), &to_free, &err))))
661 goto fail_oom_3;
662 if (unlikely(!do_op(val1, val2, &da(result,longint)->mp, &err)))
663 goto fail_oom_3;
664 if (val1 == &s1)
665 mpint_free(&s1);
666 if (val2 == &s2)
667 mpint_free(&s2);
668 if (!pointer_is_empty(to_free))
669 pointer_dereference(to_free);
671 if (mpint_export(&da(result,longint)->mp, frame_var(fp, slot_r), intx, &err)) {
672 frame_clear_flag(fp, slot_r);
673 data_dereference(result);
675 return POINTER_FOLLOW_THUNK_GO;
677 fail_oom_3:
678 if (!pointer_is_empty(to_free))
679 pointer_dereference(to_free);
680 if (val2 == &s2)
681 mpint_free(&s2);
682 fail_oom_2:
683 if (val1 == &s1)
684 mpint_free(&s1);
685 fail_oom_1:
686 frame_free_and_set_pointer(fp, slot_r, pointer_error(err, fp, ip pass_file_line));
687 return POINTER_FOLLOW_THUNK_GO;
690 void * attr_hot_fastcall thunk_int_binary_logical_operator(frame_s *fp, const code_t *ip, frame_t slot_1, frame_t slot_2, frame_t slot_r, unsigned strict_flag, bool (attr_fastcall *do_op)(const mpint_t *op1, const mpint_t *op2, ajla_flat_option_t *res, ajla_error_t *err))
692 ajla_error_t err;
693 const struct type *type;
694 unsigned intx;
695 int converted;
696 mpint_t s1, s2;
697 mpint_t *val1, *val2;
699 type = frame_get_type_of_local(fp, slot_1);
700 if (!frame_t_is_const(slot_2)) {
701 ajla_assert(TYPE_TAG_IS_INT(type->tag) &&
702 frame_get_type_of_local(fp, slot_2) == type &&
703 frame_get_type_of_local(fp, slot_r)->tag == TYPE_TAG_flat_option,
704 (file_line, "thunk_int_binary_logical_operator: invalid types on opcode %04x: %u, %u, %u",
705 *ip,
706 type->tag,
707 frame_get_type_of_local(fp, slot_2)->tag,
708 frame_get_type_of_local(fp, slot_r)->tag));
709 } else {
710 ajla_assert(TYPE_TAG_IS_INT(type->tag) &&
711 frame_get_type_of_local(fp, slot_r)->tag == TYPE_TAG_flat_option,
712 (file_line, "thunk_int_binary_logical_operator: invalid types on opcode %04x: %u, %u",
713 *ip,
714 type->tag,
715 frame_get_type_of_local(fp, slot_r)->tag));
718 converted = 0;
720 converted |= ipret_unbox_value(fp, type, slot_1);
721 if (!frame_t_is_const(slot_2))
722 converted |= ipret_unbox_value(fp, type, slot_2);
724 if (converted & UNBOX_THUNK)
725 return ipret_op_build_thunk(fp, ip, slot_1, slot_2, slot_r, strict_flag);
727 if (converted == UNBOX_DID_SOMETHING)
728 return POINTER_FOLLOW_THUNK_RETRY;
730 intx = TYPE_TAG_IDX_INT(type->tag);
732 if (unlikely(!(val1 = int_get_mpint(fp, slot_1, &s1, intx, &err))))
733 goto fail_oom_1;
734 if (unlikely(!(val2 = int_get_mpint(fp, slot_2, &s2, intx, &err))))
735 goto fail_oom_2;
736 barrier_aliasing();
737 if (unlikely(!do_op(val1, val2, frame_slot(fp, slot_r, ajla_flat_option_t), &err))) {
738 barrier_aliasing();
739 goto fail_oom_3;
741 barrier_aliasing();
742 if (val1 == &s1)
743 mpint_free(&s1);
744 if (val2 == &s2)
745 mpint_free(&s2);
747 return POINTER_FOLLOW_THUNK_GO;
749 fail_oom_3:
750 if (val2 == &s2)
751 mpint_free(&s2);
752 fail_oom_2:
753 if (val1 == &s1)
754 mpint_free(&s1);
755 fail_oom_1:
756 frame_free_and_set_pointer(fp, slot_r, pointer_error(err, fp, ip pass_file_line));
757 return POINTER_FOLLOW_THUNK_GO;
760 void * attr_hot_fastcall thunk_int_unary_operator(frame_s *fp, const code_t *ip, frame_t slot_1, frame_t slot_r, unsigned strict_flag, bool (attr_fastcall *do_op)(const mpint_t *op1, mpint_t *res, ajla_error_t *err))
762 ajla_error_t err;
763 const struct type *type;
764 unsigned intx;
765 int converted;
766 mpint_t s1;
767 mpint_t *val1;
768 struct data *result;
769 pointer_t to_free;
771 type = frame_get_type_of_local(fp, slot_1);
772 ajla_assert(TYPE_TAG_IS_INT(type->tag) &&
773 frame_get_type_of_local(fp, slot_r) == type,
774 (file_line, "thunk_int_unary_operator: invalid types on opcode %04x: %u, %u",
775 *ip,
776 type->tag,
777 frame_get_type_of_local(fp, slot_r)->tag));
779 converted = 0;
781 converted |= ipret_unbox_value(fp, type, slot_1);
783 if (converted & UNBOX_THUNK)
784 return ipret_op_build_thunk(fp, ip, slot_1, NO_FRAME_T, slot_r, strict_flag);
786 if (converted == UNBOX_DID_SOMETHING)
787 return POINTER_FOLLOW_THUNK_RETRY;
789 intx = TYPE_TAG_IDX_INT(type->tag);
791 if (unlikely(!(val1 = int_get_mpint(fp, slot_1, &s1, intx, &err))))
792 goto fail_oom_1;
793 if (unlikely(!(result = int_allocate_result(fp, slot_r, mpint_estimate_bits(val1), &to_free, &err))))
794 goto fail_oom_3;
795 if (unlikely(!do_op(val1, &da(result,longint)->mp, &err)))
796 goto fail_oom_3;
797 if (val1 == &s1)
798 mpint_free(&s1);
799 if (!pointer_is_empty(to_free))
800 pointer_dereference(to_free);
802 if (mpint_export(&da(result,longint)->mp, frame_var(fp, slot_r), intx, &err)) {
803 frame_clear_flag(fp, slot_r);
804 data_dereference(result);
806 return POINTER_FOLLOW_THUNK_GO;
808 fail_oom_3:
809 if (!pointer_is_empty(to_free))
810 pointer_dereference(to_free);
811 if (val1 == &s1)
812 mpint_free(&s1);
813 fail_oom_1:
814 frame_free_and_set_pointer(fp, slot_r, pointer_error(err, fp, ip pass_file_line));
815 return POINTER_FOLLOW_THUNK_GO;
818 ip_t attr_hot_fastcall ipret_int_ldc_long(frame_s *fp, frame_t slot, const code_t *ip)
820 ajla_error_t err;
822 uint32_t n_words_32;
823 ip_t n_words;
824 struct data *d;
826 n_words_32 = get_unaligned_32(ip);
827 n_words = (ip_t)n_words_32;
828 ajla_assert(n_words == n_words_32, (file_line, "ipret_int_ldc_long: n_words overflow: %lu != %lu", (unsigned long)n_words_32, (unsigned long)n_words));
830 d = data_alloc_longint_mayfail((n_words + sizeof(code_t) - 1) / sizeof(code_t), &err pass_file_line);
831 if (unlikely(!d))
832 goto fail;
834 if (unlikely(!mpint_import_from_code(&da(d,longint)->mp, ip + 2, n_words, &err))) {
835 data_dereference(d);
836 goto fail;
839 frame_set_pointer(fp, slot, pointer_data(d));
841 return 2 + n_words;
843 fail:
844 frame_set_pointer(fp, slot, pointer_error(err, fp, ip pass_file_line));
846 return 2 + n_words;
850 pointer_t attr_fastcall convert_fixed_to_mpint(uintbig_t val, bool uns)
852 ajla_error_t err;
853 struct data *d;
855 d = data_alloc_longint_mayfail(sizeof(uintbig_t) * 8 + uns, &err pass_file_line);
856 if (unlikely(!d))
857 goto fail;
858 if (unlikely(!cat(mpint_set_from_,TYPE_INT_MAX)(&da(d,longint)->mp, (intbig_t)val, uns, &err))) {
859 goto fail_deref;
861 return pointer_data(d);
863 fail_deref:
864 data_dereference(d);
865 fail:
866 return pointer_error(err, NULL, NULL pass_file_line);
869 pointer_t attr_fastcall convert_real_to_mpint(frame_s *fp, frame_t src_slot, const struct type *src_type)
871 unsigned char attr_unused *src_ptr;
872 ajla_error_t err;
873 struct data *d;
875 d = data_alloc_longint_mayfail(0, &err pass_file_line);
876 if (unlikely(!d))
877 goto fail;
878 mpint_free(&da(d,longint)->mp);
880 #define re(n, rtype, ntype, pack, unpack) \
881 case TYPE_TAG_real + n: { \
882 if (unlikely(!cat(mpint_init_from_,rtype)(&da(d,longint)->mp, cast_ptr(rtype *, src_ptr), &err))) {\
883 data_free_r1(d); \
884 goto fail; \
886 break; \
889 barrier_aliasing();
890 src_ptr = frame_var(fp, src_slot);
891 switch (src_type->tag) {
892 for_all_real(re, for_all_empty)
893 default:
894 internal(file_line, "convert_real_to_mpint: invalid type %u", src_type->tag);
896 barrier_aliasing();
897 return pointer_data(d);
899 #undef re
901 fail:
902 barrier_aliasing();
903 return pointer_error(err, NULL, NULL pass_file_line);
906 static attr_noinline void convert_mpint_to_real(frame_s *fp, frame_t dest_slot, const struct type *dest_type, const mpint_t attr_unused *mp)
908 unsigned char attr_unused *dest_ptr;
910 #define re(n, rtype, ntype, pack, unpack) \
911 case TYPE_TAG_real + n: \
912 cat(mpint_export_to_,rtype)(mp, cast_ptr(rtype *, dest_ptr));\
913 break;
915 barrier_aliasing();
916 dest_ptr = frame_var(fp, dest_slot);
917 switch (dest_type->tag) {
918 for_all_real(re, for_all_empty)
919 default:
920 internal(file_line, "convert_mpint_to_real: invalid type %u", dest_type->tag);
922 barrier_aliasing();
924 #undef re
927 void * attr_hot_fastcall thunk_convert(frame_s *fp, const code_t *ip, frame_t src_slot, frame_t dest_slot, unsigned strict_flag)
929 int converted;
930 const struct type *src_type;
931 const struct type *dest_type;
932 ajla_error_t err;
933 struct data *d;
935 if (unlikely(src_slot == dest_slot))
936 return POINTER_FOLLOW_THUNK_GO;
938 src_type = frame_get_type_of_local(fp, src_slot);
939 dest_type = frame_get_type_of_local(fp, dest_slot);
941 converted = ipret_unbox_value(fp, src_type, src_slot);
942 if (unlikely(converted == UNBOX_THUNK)) {
943 return ipret_op_build_thunk(fp, ip, src_slot, NO_FRAME_T, dest_slot, strict_flag);
945 if (converted == UNBOX_DID_SOMETHING) {
946 return POINTER_FOLLOW_THUNK_RETRY;
949 if (type_is_equal(dest_type, type_get_int(INT_DEFAULT_N))) {
950 if (likely(TYPE_TAG_IS_INT(src_type->tag))) {
951 if (likely(converted == UNBOX_LONGINT)) {
952 goto convert_longint;
955 } else {
956 if (unlikely(!type_is_equal(src_type, type_get_int(INT_DEFAULT_N))))
957 goto int_err;
958 if (TYPE_TAG_IS_FIXED(dest_type->tag)) {
959 if (likely(converted == UNBOX_LONGINT)) {
960 bool res;
961 if (!TYPE_TAG_FIXED_IS_UNSIGNED(dest_type->tag))
962 res = mpint_export(&da(pointer_get_data(*frame_pointer(fp, src_slot)), longint)->mp, frame_var(fp, dest_slot), TYPE_TAG_IDX_FIXED(dest_type->tag) >> 1, &err);
963 else
964 res = mpint_export_unsigned(&da(pointer_get_data(*frame_pointer(fp, src_slot)), longint)->mp, frame_var(fp, dest_slot), TYPE_TAG_IDX_FIXED(dest_type->tag) >> 1, &err);
965 if (unlikely(!res))
966 frame_set_pointer(fp, dest_slot, pointer_error(error_ajla(EC_SYNC, AJLA_ERROR_DOESNT_FIT), fp, ip pass_file_line));
967 return POINTER_FOLLOW_THUNK_GO;
969 } else if (TYPE_TAG_IS_INT(dest_type->tag)) {
970 if (likely(converted == UNBOX_LONGINT)) {
971 goto convert_longint;
973 } else if (likely(TYPE_TAG_IS_REAL(dest_type->tag))) {
974 if (likely(converted == UNBOX_LONGINT)) {
975 convert_mpint_to_real(fp, dest_slot, dest_type, &da(pointer_get_data(*frame_pointer(fp, src_slot)), longint)->mp);
976 return POINTER_FOLLOW_THUNK_GO;
980 goto int_err;
982 convert_longint:
983 d = pointer_get_data(*frame_pointer(fp, src_slot));
984 if (mpint_export(&da(d,longint)->mp, frame_var(fp, dest_slot), TYPE_TAG_IDX_INT(dest_type->tag), &err)) {
985 return POINTER_FOLLOW_THUNK_GO;
987 pointer_copy_owned(fp, src_slot, dest_slot);
988 return POINTER_FOLLOW_THUNK_GO;
990 int_err:
991 internal(file_line, "thunk_convert: invalid conversion %u->%u (%d)", src_type->tag, dest_type->tag, converted);
992 return POINTER_FOLLOW_THUNK_RETRY;
996 static bool attr_hot_fastcall ipret_unbox_bool(frame_s *fp, frame_t slot)
998 if (frame_test_flag(fp, slot)) {
999 pointer_t ptr = *frame_pointer(fp, slot);
1000 if (pointer_is_thunk(ptr))
1001 return true;
1002 barrier_aliasing();
1003 *frame_slot(fp, slot, ajla_flat_option_t) = (ajla_flat_option_t)da(pointer_get_data(ptr),option)->option;
1004 barrier_aliasing();
1005 frame_clear_flag(fp, slot);
1006 pointer_dereference(ptr);
1008 return false;
1011 void * attr_hot_fastcall thunk_bool_operator(frame_s *fp, const code_t *ip, frame_t slot_1, frame_t slot_2, frame_t slot_r, unsigned strict_flag)
1013 code_t code;
1014 ajla_flat_option_t val1, val2, result;
1016 code = *ip;
1017 code %= OPCODE_MODE_MULT;
1018 code = (code - OPCODE_BOOL_OP) / OPCODE_BOOL_OP_MULT;
1020 val1 = val2 = 2;
1021 if (!ipret_unbox_bool(fp, slot_1)) {
1022 barrier_aliasing();
1023 val1 = *frame_slot(fp, slot_1, ajla_flat_option_t);
1024 barrier_aliasing();
1025 switch (code) {
1026 case OPCODE_BOOL_OP_less:
1027 if (val1) {
1028 result = 0;
1029 goto have_result;
1031 break;
1032 case OPCODE_BOOL_OP_less_equal:
1033 if (!val1) {
1034 result = 1;
1035 goto have_result;
1037 break;
1038 case OPCODE_BOOL_OP_and:
1039 case OPCODE_BOOL_OP_greater:
1040 if (!val1) {
1041 result = 0;
1042 goto have_result;
1044 break;
1045 case OPCODE_BOOL_OP_or:
1046 case OPCODE_BOOL_OP_greater_equal:
1047 if (val1) {
1048 result = 1;
1049 goto have_result;
1051 break;
1052 case OPCODE_BOOL_OP_not:
1053 result = val1 ^ 1;
1054 goto have_result;
1057 if (slot_2 != NO_FRAME_T && !ipret_unbox_bool(fp, slot_2)) {
1058 barrier_aliasing();
1059 val2 = *frame_slot(fp, slot_2, ajla_flat_option_t);
1060 barrier_aliasing();
1061 switch (code) {
1062 case OPCODE_BOOL_OP_less:
1063 case OPCODE_BOOL_OP_and:
1064 if (!val2) {
1065 result = 0;
1066 goto have_result;
1068 break;
1069 case OPCODE_BOOL_OP_less_equal:
1070 case OPCODE_BOOL_OP_or:
1071 if (val2) {
1072 result = 1;
1073 goto have_result;
1075 break;
1076 case OPCODE_BOOL_OP_greater:
1077 if (val2) {
1078 result = 0;
1079 goto have_result;
1081 break;
1082 case OPCODE_BOOL_OP_greater_equal:
1083 if (!val2) {
1084 result = 1;
1085 goto have_result;
1087 break;
1090 if (!((val1 | val2) & 2)) {
1091 return POINTER_FOLLOW_THUNK_RETRY;
1093 if (val1 & val2 & 2) {
1094 switch (code) {
1095 case OPCODE_BOOL_OP_and:
1096 case OPCODE_BOOL_OP_or:
1097 case OPCODE_BOOL_OP_less:
1098 case OPCODE_BOOL_OP_less_equal:
1099 case OPCODE_BOOL_OP_greater:
1100 case OPCODE_BOOL_OP_greater_equal:
1101 strict_flag |= FLAG_NEED_BOTH_EXCEPTIONS_TO_FAIL;
1105 return ipret_op_build_thunk(fp, ip, slot_1, slot_2, slot_r, strict_flag);
1107 have_result:
1108 frame_free(fp, slot_r);
1109 barrier_aliasing();
1110 *frame_slot(fp, slot_r, ajla_flat_option_t) = result;
1111 barrier_aliasing();
1112 return POINTER_FOLLOW_THUNK_GO;
1115 void * attr_hot_fastcall thunk_bool_jump(frame_s *fp, const code_t *ip, frame_t slot)
1117 pointer_t *thunk = frame_pointer(fp, slot);
1118 struct data *data;
1120 pointer_follow(thunk, true, data, PF_WAIT, fp, ip,
1121 return ex_,
1122 return POINTER_FOLLOW_THUNK_EXCEPTION
1125 barrier_aliasing();
1126 *frame_slot(fp, slot, ajla_flat_option_t) = (ajla_flat_option_t)da(data,option)->option;
1127 barrier_aliasing();
1128 frame_clear_flag(fp, slot);
1129 data_dereference(data);
1130 return POINTER_FOLLOW_THUNK_RETRY;
1134 void attr_fastcall ipret_copy_variable(frame_s *src_fp, frame_t src_slot, frame_s *dst_fp, frame_t dst_slot, bool deref)
1136 pointer_t ptr;
1137 const struct type *src_type;
1138 ajla_assert(!frame_test_flag(dst_fp, dst_slot), (file_line, "ipret_copy_variable: flag already set for destination slot %"PRIuMAX"", (uintmax_t)dst_slot));
1139 src_type = frame_get_type_of_local(src_fp, src_slot);
1140 if (!frame_variable_is_flat(src_fp, src_slot)) {
1141 ptr = frame_get_pointer_reference(src_fp, src_slot, deref);
1142 } else {
1143 const struct type *dst_type = frame_get_type_of_local(dst_fp, dst_slot);
1144 if (likely(TYPE_IS_FLAT(dst_type))) {
1145 ajla_assert(type_is_equal(src_type, dst_type), (file_line, "ipret_copy_variable: copying between different types (%u,%u,%u) -> (%u,%u,%u)", src_type->tag, src_type->size, src_type->align, dst_type->tag, dst_type->size, dst_type->align));
1146 memcpy_fast(frame_var(dst_fp, dst_slot), frame_var(src_fp, src_slot), dst_type->size);
1147 return;
1148 } else {
1149 ptr = flat_to_data(src_type, frame_var(src_fp, src_slot));
1152 frame_set_pointer(dst_fp, dst_slot, ptr);
1155 pointer_t ipret_copy_variable_to_pointer(frame_s *src_fp, frame_t src_slot, bool deref)
1157 const struct type *src_type = frame_get_type_of_local(src_fp, src_slot);
1158 if (!frame_variable_is_flat(src_fp, src_slot)) {
1159 return frame_get_pointer_reference(src_fp, src_slot, deref);
1160 } else {
1161 return flat_to_data(src_type, frame_var(src_fp, src_slot));
1166 struct data_compare_context {
1167 struct ipret_call_cache_arg *arguments;
1168 arg_t n_arguments;
1169 arg_t n_return_values;
1170 ajla_error_t err;
1173 static int saved_cache_compare(struct data *saved_cache, size_t idx, struct data_compare_context *ctx)
1175 size_t ptr_idx = idx * ((size_t)ctx->n_arguments + (size_t)ctx->n_return_values);
1176 arg_t ai;
1177 for (ai = 0; ai < ctx->n_arguments; ai++) {
1178 int c;
1179 c = data_compare(da(saved_cache,saved_cache)->pointers[ptr_idx + ai], ctx->arguments[ai].ptr, &ctx->err);
1180 if (c)
1181 return c;
1183 return 0;
1186 static pointer_t *saved_cache_find(struct data *function, struct data_compare_context *ctx)
1188 struct data *saved_cache = da(function,function)->loaded_cache;
1189 size_t n_entries = da(saved_cache,saved_cache)->n_entries;
1190 size_t result;
1191 int cmp;
1192 /*debug("searching: %s, %zu", da(function,function)->function_name, n_entries);*/
1193 binary_search(size_t, n_entries, result, !(cmp = saved_cache_compare(saved_cache, result, ctx)), cmp < 0, return NULL);
1194 /*debug("found it: %s, %zu", da(function,function)->function_name, result);*/
1195 return &da(saved_cache,saved_cache)->pointers[result * ((size_t)ctx->n_arguments + (size_t)ctx->n_return_values) + (size_t)ctx->n_arguments];
1198 static int cache_entry_compare(const struct tree_entry *e1, uintptr_t e2)
1200 struct cache_entry *c1 = get_struct(e1, struct cache_entry, entry);
1201 struct data_compare_context *ctx = cast_ptr(struct data_compare_context *, num_to_ptr(e2));
1202 arg_t ai;
1203 for (ai = 0; ai < ctx->n_arguments; ai++) {
1204 int c;
1205 c = data_compare(c1->arguments[ai], ctx->arguments[ai].ptr, MEM_DONT_TRY_TO_FREE);
1206 if (c == -1 || c == 1)
1207 return c;
1208 if (c == DATA_COMPARE_OOM) {
1209 ctx->err = error_ajla(EC_ASYNC, AJLA_ERROR_OUT_OF_MEMORY);
1210 return 0;
1212 if (unlikely(c))
1213 internal(file_line, "cache_entry_compare: data_compare returned %d", c);
1215 return 0;
1218 static void cache_evaluated(void *cookie, pointer_t ptr)
1220 struct cache_entry_return *ret = cookie;
1221 struct cache_entry *c;
1222 pointer_reference_owned(ptr);
1223 ret->ptr = ptr;
1224 c = ret->ce;
1225 address_lock(c, DEPTH_THUNK);
1226 /*debug("cache evaluated: %p, pending %u", c, c->n_pending);*/
1227 if (likely(!--c->n_pending)) {
1228 wake_up_wait_list(&c->wait_list, address_get_mutex(c, DEPTH_THUNK), true);
1229 } else {
1230 address_unlock(c, DEPTH_THUNK);
1234 void * attr_fastcall ipret_call_cache(frame_s *fp, const code_t *ip, pointer_t *direct_function, struct ipret_call_cache_arg *arguments, frame_t *return_values, frame_t free_fn_slot)
1236 struct thunk *thunk = NULL;
1237 struct data_compare_context ctx;
1238 struct tree_insert_position ins;
1239 struct tree_entry *e;
1240 struct cache_entry *c = NULL; /* avoid warning */
1241 void *ex;
1242 arg_t ai;
1243 bool wr_lock;
1244 struct thunk **results;
1245 struct data *function_reference;
1246 struct data *function = pointer_get_data(*direct_function);
1247 arg_t n_arguments = da(function,function)->n_arguments;
1248 arg_t n_return_values = da(function,function)->n_return_values;
1249 bool save = *ip % OPCODE_MODE_MULT == OPCODE_CALL_SAVE || *ip % OPCODE_MODE_MULT == OPCODE_CALL_INDIRECT_SAVE;
1251 ctx.err.error_class = EC_NONE;
1253 for (ai = 0; ai < n_arguments; ai++)
1254 arguments[ai].need_free_ptr = false;
1255 for (ai = 0; ai < n_arguments; ai++) {
1256 struct function_argument *f_arg = arguments[ai].f_arg;
1257 if (unlikely(f_arg != NULL)) {
1258 if (f_arg->tag == TYPE_TAG_unknown) {
1259 ex = pointer_deep_eval(&f_arg->u.ptr, fp, ip, &thunk);
1260 arguments[ai].ptr = pointer_reference(&f_arg->u.ptr);
1261 arguments[ai].need_free_ptr = true;
1262 } else {
1263 arguments[ai].ptr = flat_to_data(type_get_from_tag(f_arg->tag), f_arg->u.slot);
1264 ex = pointer_deep_eval(&arguments[ai].ptr, fp, ip, &thunk);
1265 arguments[ai].need_free_ptr = true;
1267 } else {
1268 frame_t slot = arguments[ai].slot;
1269 if (!frame_variable_is_flat(fp, slot)) {
1270 ex = frame_pointer_deep_eval(fp, ip, slot, &thunk);
1271 arguments[ai].ptr = *frame_pointer(fp, slot);
1272 } else {
1273 arguments[ai].ptr = flat_to_data(frame_get_type_of_local(fp, slot), frame_var(fp, slot));
1274 ex = pointer_deep_eval(&arguments[ai].ptr, fp, ip, &thunk);
1275 arguments[ai].need_free_ptr = true;
1278 if (ex == POINTER_FOLLOW_THUNK_EXCEPTION) {
1279 if (!rwmutex_supported) {
1280 wr_lock = true;
1281 address_write_lock(function);
1282 } else {
1283 wr_lock = false;
1284 address_read_lock(function);
1286 goto ret_c;
1288 if (ex != POINTER_FOLLOW_THUNK_GO)
1289 goto ret1;
1292 ctx.arguments = arguments;
1293 ctx.n_arguments = n_arguments;
1294 ctx.n_return_values = n_return_values;
1296 if (da(function,function)->loaded_cache) {
1297 pointer_t *results;
1298 /*debug("loaded cache: %s", da(function,function)->function_name);*/
1299 ctx.err.error_class = EC_NONE;
1300 results = saved_cache_find(function, &ctx);
1301 if (results || unlikely(ctx.err.error_class != EC_NONE)) {
1302 for (ai = 0; ai < n_arguments; ai++) {
1303 if (arguments[ai].deref) {
1304 frame_t slot = arguments[ai].slot;
1305 if (frame_test_and_clear_flag(fp, slot))
1306 pointer_dereference(*frame_pointer(fp, slot));
1307 *frame_pointer(fp, slot) = pointer_empty();
1310 if (unlikely(free_fn_slot != NO_FRAME_T)) {
1311 frame_free_and_clear(fp, free_fn_slot);
1313 if (unlikely(ctx.err.error_class != EC_NONE)) {
1314 for (ai = 0; ai < n_return_values; ai++) {
1315 frame_set_pointer(fp, return_values[ai], pointer_error(ctx.err, NULL, NULL pass_file_line));
1317 } else {
1318 for (ai = 0; ai < n_return_values; ai++) {
1319 pointer_t ptr = pointer_reference(&results[ai]);
1320 frame_set_pointer(fp, return_values[ai], ptr);
1323 ex = POINTER_FOLLOW_THUNK_GO;
1324 goto ret1;
1328 if (!rwmutex_supported) {
1329 wr_lock = true;
1330 address_write_lock(function);
1331 } else {
1332 wr_lock = false;
1333 address_read_lock(function);
1336 again:
1337 ctx.err.error_class = EC_NONE;
1338 e = tree_find_for_insert(&da(function,function)->cache, cache_entry_compare, ptr_to_num(&ctx), &ins);
1339 if (e) {
1340 if (unlikely(ctx.err.error_class != EC_NONE)) {
1341 if (!wr_lock)
1342 address_read_unlock(function);
1343 else
1344 address_write_unlock(function);
1345 if (ctx.err.error_type == AJLA_ERROR_OUT_OF_MEMORY && mem_trim_cache()) {
1346 if (!wr_lock)
1347 address_read_lock(function);
1348 else
1349 address_write_lock(function);
1350 goto again;
1352 wr_lock = false;
1353 address_read_lock(function);
1354 goto ret_c;
1356 c = get_struct(e, struct cache_entry, entry);
1357 address_lock(c, DEPTH_THUNK);
1358 goto have_c;
1360 if (!wr_lock) {
1361 address_read_unlock(function);
1362 wr_lock = true;
1363 address_write_lock(function);
1364 goto again;
1367 c = struct_alloc_array_mayfail(mem_alloc_mayfail, struct cache_entry, arguments, n_arguments, MEM_DONT_TRY_TO_FREE);
1368 if (unlikely(!c))
1369 goto oom1;
1370 c->save = save;
1371 c->returns = mem_alloc_array_mayfail(mem_alloc_mayfail, struct cache_entry_return *, 0, 0, n_return_values, sizeof(struct cache_entry_return), MEM_DONT_TRY_TO_FREE);
1372 if (unlikely(!c->returns))
1373 goto oom2;
1374 for (ai = 0; ai < n_return_values; ai++) {
1375 c->returns[ai].ex = NULL;
1377 for (ai = 0; ai < n_return_values; ai++) {
1378 c->returns[ai].ex = function_evaluate_prepare(MEM_DONT_TRY_TO_FREE);
1379 if (unlikely(!c->returns[ai].ex))
1380 goto oom3;
1382 for (ai = 0; ai < n_arguments; ai++) {
1383 pointer_reference_owned(arguments[ai].ptr);
1384 c->arguments[ai] = arguments[ai].ptr;
1386 results = mem_alloc_array_mayfail(mem_alloc_mayfail, struct thunk **, 0, 0, n_return_values, sizeof(struct thunk *), MEM_DONT_TRY_TO_FREE);
1387 if (unlikely(!results))
1388 goto oom3;
1389 if (!(function_reference = data_alloc_function_reference_mayfail(n_arguments, MEM_DONT_TRY_TO_FREE pass_file_line)))
1390 goto oom4;
1391 da(function_reference,function_reference)->is_indirect = false;
1392 da(function_reference,function_reference)->u.direct = direct_function;
1393 if (unlikely(!thunk_alloc_function_call(pointer_data(function_reference), n_return_values, results, MEM_DONT_TRY_TO_FREE))) {
1394 data_free_r1(function_reference);
1395 oom4:
1396 mem_free(results);
1397 oom3:
1398 for (ai = 0; ai < n_return_values; ai++) {
1399 if (c->returns[ai].ex)
1400 execution_control_free(c->returns[ai].ex);
1402 mem_free(c->returns);
1403 oom2:
1404 mem_free(c);
1405 oom1:
1406 address_write_unlock(function);
1407 if (mem_trim_cache()) {
1408 address_write_lock(function);
1409 goto again;
1411 address_write_lock(function);
1412 ctx.err = error_ajla(EC_ASYNC, AJLA_ERROR_OUT_OF_MEMORY);
1413 goto ret_c;
1415 for (ai = 0; ai < n_arguments; ai++) {
1416 pointer_reference_owned(c->arguments[ai]);
1417 data_fill_function_reference(function_reference, ai, c->arguments[ai]);
1419 address_lock(c, DEPTH_THUNK);
1420 /*debug("evaluaring: %p", c);*/
1421 c->n_pending = n_return_values;
1422 list_init(&c->wait_list);
1423 for (ai = 0; ai < n_return_values; ai++) {
1424 c->returns[ai].ce = c;
1425 function_evaluate_submit(c->returns[ai].ex, pointer_thunk(results[ai]), cache_evaluated, &c->returns[ai]);
1427 mem_free(results);
1429 tree_insert_after_find(&c->entry, &ins);
1431 have_c:
1432 if (!c->save && unlikely(save))
1433 c->save = true;
1434 if (c->n_pending) {
1435 struct execution_control *exx;
1436 /*debug("waiting on %p, pending %u", c, c->n_pending);*/
1437 exx = frame_execution_control(fp);
1438 exx->wait[0].mutex_to_lock = address_get_mutex(c, DEPTH_THUNK);
1439 list_add(&c->wait_list, &exx->wait[0].wait_entry);
1440 address_unlock(c, DEPTH_THUNK);
1441 pointer_follow_wait(fp, ip);
1442 ex = POINTER_FOLLOW_THUNK_EXIT;
1443 goto ret2;
1445 address_unlock(c, DEPTH_THUNK);
1447 ret_c:
1448 for (ai = 0; ai < n_arguments; ai++) {
1449 if (arguments[ai].deref) {
1450 frame_t slot = arguments[ai].slot;
1451 if (frame_test_and_clear_flag(fp, slot))
1452 pointer_dereference(*frame_pointer(fp, slot));
1453 *frame_pointer(fp, slot) = pointer_empty();
1456 if (unlikely(free_fn_slot != NO_FRAME_T)) {
1457 frame_free_and_clear(fp, free_fn_slot);
1459 if (likely(!thunk) && unlikely(ctx.err.error_class != EC_NONE))
1460 thunk = thunk_alloc_exception_error(ctx.err, NULL, NULL, NULL pass_file_line);
1461 for (ai = 0; ai < n_return_values; ai++) {
1462 pointer_t ptr;
1463 if (likely(!thunk)) {
1464 ptr = pointer_reference(&c->returns[ai].ptr);
1465 } else {
1466 if (ai)
1467 thunk_reference(thunk);
1468 ptr = pointer_thunk(thunk);
1470 frame_set_pointer(fp, return_values[ai], ptr);
1472 ex = POINTER_FOLLOW_THUNK_GO;
1474 ret2:
1475 if (likely(!wr_lock))
1476 address_read_unlock(function);
1477 else
1478 address_write_unlock(function);
1480 ret1:
1481 for (ai = 0; ai < n_arguments; ai++) {
1482 if (arguments[ai].need_free_ptr) {
1483 pointer_dereference(arguments[ai].ptr);
1486 mem_free(arguments);
1487 mem_free(return_values);
1488 return ex;
1492 static attr_noinline void * ipret_get_index_complicated(frame_s *fp, const code_t *ip, frame_s *fp_slot, frame_t slot, bool *is_negative, array_index_t *idx, pointer_t *thunk argument_position)
1494 again:
1495 if (likely(!frame_test_flag(fp_slot, slot))) {
1496 int_default_t in;
1497 barrier_aliasing();
1498 in = *frame_slot(fp_slot, slot, int_default_t);
1499 barrier_aliasing();
1500 if (unlikely(in < 0)) {
1501 negative:
1502 if (!is_negative) {
1503 *thunk = pointer_error(error_ajla(EC_SYNC, AJLA_ERROR_NEGATIVE_INDEX), fp, ip pass_position);
1504 return POINTER_FOLLOW_THUNK_EXCEPTION;
1506 *is_negative = true;
1507 return POINTER_FOLLOW_THUNK_GO;
1509 index_from_int_(idx, in pass_position);
1510 } else {
1511 pointer_t *ptr = frame_pointer(fp_slot, slot);
1512 struct data *d;
1514 pointer_follow(ptr, true, d, PF_WAIT, fp, ip,
1515 return ex_,
1516 thunk_reference(thunk_);
1517 *thunk = pointer_thunk(thunk_);
1518 return POINTER_FOLLOW_THUNK_EXCEPTION;
1520 if (da_tag(d) == DATA_TAG_flat) {
1521 ipret_unbox_value(fp_slot, type_get_int(INT_DEFAULT_N), slot);
1522 goto again;
1524 if (unlikely(mpint_negative(&da(d,longint)->mp))) {
1525 goto negative;
1527 index_from_mp_(idx, &da(d,longint)->mp pass_position);
1529 return POINTER_FOLLOW_THUNK_GO;
1532 void * attr_hot_fastcall ipret_get_index(frame_s *fp, const code_t *ip, frame_s *fp_slot, frame_t slot, bool *is_negative, array_index_t *idx, pointer_t *thunk argument_position)
1534 if (likely(fp == fp_slot))
1535 ajla_assert(frame_get_type_of_local(fp_slot, slot)->tag == type_get_int(INT_DEFAULT_N)->tag, (file_line, "ipret_get_index: invalid type %u", (unsigned)frame_get_type_of_local(fp_slot, slot)->tag));
1536 if (likely(!frame_test_flag(fp_slot, slot))) {
1537 int_default_t in;
1538 barrier_aliasing();
1539 in = *frame_slot(fp_slot, slot, int_default_t);
1540 barrier_aliasing();
1541 if (unlikely(in < 0))
1542 goto complicated;
1543 index_from_int_(idx, in pass_position);
1544 } else {
1545 complicated:
1546 return ipret_get_index_complicated(fp, ip, fp_slot, slot, is_negative, idx, thunk pass_position);
1548 return POINTER_FOLLOW_THUNK_GO;
1552 void * attr_hot_fastcall ipret_record_load_create_thunk(frame_s *fp, const code_t *ip, frame_t record, frame_t record_slot, frame_t result_slot)
1554 pointer_t *fn_ptr;
1555 void *ex;
1556 struct data *function_reference;
1557 struct thunk *result;
1559 ex = pcode_find_record_option_load_function(PCODE_FUNCTION_RECORD_LOAD, record_slot, fp, ip, &fn_ptr);
1561 if (unlikely(ex != POINTER_FOLLOW_THUNK_RETRY))
1562 return ex;
1564 result = build_thunk(fn_ptr, 1, &function_reference);
1565 ipret_fill_function_reference_from_slot(function_reference, 0, fp, record, false);
1567 frame_set_pointer(fp, result_slot, pointer_thunk(result));
1569 return POINTER_FOLLOW_THUNK_GO;
1572 void * attr_hot_fastcall ipret_option_load_create_thunk(frame_s *fp, const code_t *ip, frame_t option, frame_t option_idx, frame_t result_slot)
1574 pointer_t *fn_ptr;
1575 void *ex;
1576 struct data *function_reference;
1577 struct thunk *result;
1579 ex = pcode_find_record_option_load_function(PCODE_FUNCTION_OPTION_LOAD, option_idx, fp, ip, &fn_ptr);
1581 if (unlikely(ex != POINTER_FOLLOW_THUNK_RETRY))
1582 return ex;
1584 result = build_thunk(fn_ptr, 1, &function_reference);
1585 ipret_fill_function_reference_from_slot(function_reference, 0, fp, option, false);
1587 frame_set_pointer(fp, result_slot, pointer_thunk(result));
1589 return POINTER_FOLLOW_THUNK_GO;
1592 void * attr_hot_fastcall thunk_option_test(frame_s *fp, const code_t *ip, frame_t slot_1, ajla_option_t option, frame_t slot_r)
1594 pointer_t *fn_ptr;
1595 void *ex;
1596 struct data *function_reference;
1597 struct thunk *result;
1599 pointer_follow_thunk_noeval(frame_pointer(fp, slot_1),
1600 return POINTER_FOLLOW_THUNK_RETRY,
1601 pointer_copy_owned(fp, slot_1, slot_r);
1602 return POINTER_FOLLOW_THUNK_GO,
1603 break
1606 ex = pcode_find_record_option_load_function(PCODE_FUNCTION_OPTION_TEST, option, fp, ip, &fn_ptr);
1607 if (unlikely(ex != POINTER_FOLLOW_THUNK_RETRY))
1608 return ex;
1610 result = build_thunk(fn_ptr, 1, &function_reference);
1611 ipret_fill_function_reference_from_slot(function_reference, 0, fp, slot_1, false);
1613 frame_set_pointer(fp, slot_r, pointer_thunk(result));
1615 return POINTER_FOLLOW_THUNK_GO;
1618 void * attr_hot_fastcall thunk_option_ord(frame_s *fp, const code_t *ip, frame_t slot_1, frame_t slot_r)
1620 pointer_t *fn_ptr;
1621 void *ex;
1622 struct data *function_reference;
1623 struct thunk *result;
1625 pointer_follow_thunk_noeval(frame_pointer(fp, slot_1),
1626 return POINTER_FOLLOW_THUNK_RETRY,
1627 pointer_copy_owned(fp, slot_1, slot_r);
1628 return POINTER_FOLLOW_THUNK_GO,
1629 break
1632 ex = pcode_find_option_ord_function(fp, ip, &fn_ptr);
1633 if (unlikely(ex != POINTER_FOLLOW_THUNK_RETRY))
1634 return ex;
1636 result = build_thunk(fn_ptr, 1, &function_reference);
1637 ipret_fill_function_reference_from_slot(function_reference, 0, fp, slot_1, false);
1639 frame_set_pointer(fp, slot_r, pointer_thunk(result));
1641 return POINTER_FOLLOW_THUNK_GO;
1645 void * attr_hot_fastcall ipret_array_load_create_thunk(frame_s *fp, const code_t *ip, frame_t array, frame_t index, frame_t result_slot)
1647 pointer_t *fn_ptr;
1648 void *ex;
1649 struct data *function_reference;
1650 struct thunk *result;
1652 ex = pcode_find_array_load_function(fp, ip, &fn_ptr);
1654 if (unlikely(ex != POINTER_FOLLOW_THUNK_RETRY))
1655 return ex;
1657 result = build_thunk(fn_ptr, 2, &function_reference);
1658 ipret_fill_function_reference_from_slot(function_reference, 0, fp, array, false);
1659 ipret_fill_function_reference_from_slot(function_reference, 1, fp, index, false);
1661 frame_set_pointer(fp, result_slot, pointer_thunk(result));
1663 return POINTER_FOLLOW_THUNK_GO;
1666 static attr_noinline void *array_len_create_thunk(frame_s *fp, const code_t *ip, frame_t array_slot, frame_t result_slot)
1668 pointer_t *fn_ptr;
1669 void *ex;
1670 struct data *function_reference;
1671 struct thunk *result;
1673 ex = pcode_find_array_len_function(fp, ip, &fn_ptr);
1675 if (unlikely(ex != POINTER_FOLLOW_THUNK_RETRY))
1676 return ex;
1678 result = build_thunk(fn_ptr, 1, &function_reference);
1679 ipret_fill_function_reference_from_slot(function_reference, 0, fp, array_slot, false);
1681 frame_set_pointer(fp, result_slot, pointer_thunk(result));
1683 return POINTER_FOLLOW_THUNK_GO;
1686 static attr_noinline void *array_len_greater_than_create_thunk(frame_s *fp, const code_t *ip, frame_t array_slot, frame_t length_slot, frame_t result_slot)
1688 pointer_t *fn_ptr;
1689 void *ex;
1690 struct data *function_reference;
1691 struct thunk *result;
1693 ex = pcode_find_array_len_greater_than_function(fp, ip, &fn_ptr);
1695 if (unlikely(ex != POINTER_FOLLOW_THUNK_RETRY))
1696 return ex;
1698 result = build_thunk(fn_ptr, 2, &function_reference);
1699 ipret_fill_function_reference_from_slot(function_reference, 0, fp, array_slot, false);
1700 ipret_fill_function_reference_from_slot(function_reference, 1, fp, length_slot, false);
1702 frame_set_pointer(fp, result_slot, pointer_thunk(result));
1704 return POINTER_FOLLOW_THUNK_GO;
1707 static attr_noinline void *array_sub_create_thunk(frame_s *fp, const code_t *ip, frame_t array_slot, frame_t start_slot, frame_t end_slot, frame_t result_slot, unsigned flags)
1709 pointer_t *fn_ptr;
1710 void *ex;
1711 struct data *function_reference;
1712 struct thunk *result;
1714 ex = pcode_find_array_sub_function(fp, ip, &fn_ptr);
1716 if (unlikely(ex != POINTER_FOLLOW_THUNK_RETRY))
1717 return ex;
1719 result = build_thunk(fn_ptr, 3, &function_reference);
1720 ipret_fill_function_reference_from_slot(function_reference, 0, fp, array_slot, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0);
1721 ipret_fill_function_reference_from_slot(function_reference, 1, fp, start_slot, false);
1722 ipret_fill_function_reference_from_slot(function_reference, 2, fp, end_slot, false);
1724 frame_set_pointer(fp, result_slot, pointer_thunk(result));
1726 return POINTER_FOLLOW_THUNK_GO;
1729 static attr_noinline void *array_skip_create_thunk(frame_s *fp, const code_t *ip, frame_t array_slot, frame_t start_slot, frame_t result_slot, unsigned flags)
1731 pointer_t *fn_ptr;
1732 void *ex;
1733 struct data *function_reference;
1734 struct thunk *result;
1736 ex = pcode_find_array_skip_function(fp, ip, &fn_ptr);
1738 if (unlikely(ex != POINTER_FOLLOW_THUNK_RETRY))
1739 return ex;
1741 result = build_thunk(fn_ptr, 2, &function_reference);
1742 ipret_fill_function_reference_from_slot(function_reference, 0, fp, array_slot, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0);
1743 ipret_fill_function_reference_from_slot(function_reference, 1, fp, start_slot, false);
1745 frame_set_pointer(fp, result_slot, pointer_thunk(result));
1747 return POINTER_FOLLOW_THUNK_GO;
1750 static bool array_resolve_thunk(frame_s *fp, frame_t slot)
1752 pointer_t *ptr;
1753 if (unlikely(frame_variable_is_flat(fp, slot)))
1754 return false;
1755 retry:
1756 ptr = frame_pointer(fp, slot);
1757 if (likely(frame_test_flag(fp, slot))) {
1758 if (unlikely(pointer_is_thunk(*ptr))) {
1759 if (thunk_is_finished(pointer_get_thunk(*ptr))) {
1760 pointer_follow_thunk_(ptr, POINTER_FOLLOW_THUNK_NOEVAL);
1761 goto retry;
1763 return true;
1766 if (da_tag(pointer_get_data(*ptr)) == DATA_TAG_array_incomplete) {
1767 if (unlikely(!frame_test_and_set_flag(fp, slot)))
1768 data_reference(pointer_get_data(*ptr));
1769 array_incomplete_collapse(ptr);
1770 return true;
1772 return false;
1775 static void *array_walk(frame_s *fp, const code_t *ip, pointer_t *ptr, array_index_t *idx, unsigned flags, pointer_t *result, pointer_t **can_modify)
1777 struct data *a;
1778 array_index_t this_len;
1780 *can_modify = ptr;
1782 again:
1783 pointer_follow(ptr, false, a, flags & OPCODE_OP_FLAG_STRICT ? PF_WAIT : PF_NOEVAL, fp, ip,
1784 return ex_,
1785 thunk_reference(thunk_);
1786 *result = pointer_thunk(thunk_);
1787 return POINTER_FOLLOW_THUNK_EXCEPTION
1790 if (unlikely(index_eq_int(*idx, 0))) {
1791 *result = *ptr;
1792 return POINTER_FOLLOW_THUNK_GO;
1795 if (unlikely(da_tag(a) == DATA_TAG_array_incomplete)) {
1796 if (!data_is_writable(a))
1797 *can_modify = NULL;
1798 this_len = array_len(pointer_get_data(da(a,array_incomplete)->first));
1799 if (!index_ge_index(this_len, *idx)) {
1800 index_sub(idx, this_len);
1801 index_free(&this_len);
1802 ptr = &da(a,array_incomplete)->next;
1803 if (*can_modify)
1804 *can_modify = ptr;
1805 goto again;
1807 index_free(&this_len);
1808 *result = pointer_data(a);
1809 } else {
1810 this_len = array_len(a);
1811 if (unlikely(!index_ge_index(this_len, *idx))) {
1812 index_free(&this_len);
1813 return POINTER_FOLLOW_THUNK_RETRY; /* this means index out of range, not a retry */
1815 index_free(&this_len);
1816 *result = pointer_data(a);
1818 return POINTER_FOLLOW_THUNK_GO;
1821 void * attr_hot_fastcall ipret_array_len(frame_s *fp, const code_t *ip, frame_t slot_r, frame_t slot_a, unsigned flags)
1823 const struct type *type;
1824 pointer_t *ptr;
1825 array_index_t idx_len;
1826 ajla_error_t err;
1828 ajla_assert(type_is_equal(frame_get_type_of_local(fp, slot_r), type_get_int(INT_DEFAULT_N)), (file_line, "ipret_array_len: invalid index type %u", frame_get_type_of_local(fp, slot_r)->tag));
1829 ajla_assert(!frame_test_flag(fp, slot_r), (file_line, "ipret_array_len: flag already set for destination slot %"PRIuMAX"", (uintmax_t)slot_r));
1831 type = frame_get_type_of_local(fp, slot_a);
1832 if (unlikely(TYPE_IS_FLAT(type))) {
1833 const struct flat_array_definition *flat_def = type_def(type,flat_array);
1834 barrier_aliasing();
1835 *frame_slot(fp, slot_r, int_default_t) = flat_def->n_elements;
1836 barrier_aliasing();
1837 return POINTER_FOLLOW_THUNK_GO;
1840 index_from_int(&idx_len, 0);
1841 ptr = frame_pointer(fp, slot_a);
1843 if (flags & OPCODE_OP_FLAG_STRICT) {
1844 array_resolve_thunk(fp, slot_a);
1847 while (1) {
1848 struct data *array_data;
1849 struct data *this_ptr;
1850 array_index_t this_len;
1852 pointer_follow(ptr, false, array_data, flags & OPCODE_OP_FLAG_STRICT ? PF_WAIT : PF_NOEVAL, fp, ip,
1853 index_free(&idx_len);
1854 if (!(flags & OPCODE_OP_FLAG_STRICT)) {
1855 ex_ = array_len_create_thunk(fp, ip, slot_a, slot_r);
1857 return ex_,
1858 index_free(&idx_len);
1859 thunk_reference(thunk_);
1860 frame_set_pointer(fp, slot_r, pointer_thunk(thunk_));
1861 return POINTER_FOLLOW_THUNK_GO
1864 if (da_tag(array_data) == DATA_TAG_array_incomplete)
1865 this_ptr = pointer_get_data(da(array_data,array_incomplete)->first);
1866 else
1867 this_ptr = array_data;
1869 this_len = array_len(this_ptr);
1871 if (unlikely(!index_add_(&idx_len, this_len, &err pass_file_line))) {
1872 index_free(&this_len);
1873 goto array_len_error;
1875 index_free(&this_len);
1877 if (da_tag(array_data) == DATA_TAG_array_incomplete) {
1878 ptr = &da(array_data,array_incomplete)->next;
1879 continue;
1882 break;
1885 if (likely(!index_is_mp(idx_len))) {
1886 int_default_t len = index_to_int(idx_len);
1887 index_free(&idx_len);
1888 barrier_aliasing();
1889 *frame_slot(fp, slot_r, int_default_t) = len;
1890 barrier_aliasing();
1891 } else {
1892 struct data *d;
1893 d = data_alloc_longint_mayfail(0, &err pass_file_line);
1894 if (unlikely(!d)) {
1895 index_free(&idx_len);
1896 array_len_error:
1897 frame_set_pointer(fp, slot_r, pointer_error(err, fp, ip pass_file_line));
1898 } else {
1899 mpint_free(&da(d,longint)->mp);
1900 index_free_get_mp(&idx_len, &da(d,longint)->mp);
1901 frame_set_pointer(fp, slot_r, pointer_data(d));
1905 return POINTER_FOLLOW_THUNK_GO;
1908 void * attr_hot_fastcall ipret_array_len_greater_than(frame_s *fp, const code_t *ip, frame_t slot_r, frame_t slot_a, frame_t slot_l, unsigned flags)
1910 const struct type *type;
1911 bool neg = false;
1912 int result = 1;
1913 pointer_t *ptr;
1914 void *ex;
1915 array_index_t remaining_length;
1916 pointer_t res_ptr;
1917 pointer_t *can_modify;
1919 ajla_assert(type_is_equal(frame_get_type_of_local(fp, slot_r), type_get_flat_option()), (file_line, "ipret_array_len_greater_than: invalid index type %u", frame_get_type_of_local(fp, slot_r)->tag));
1920 ajla_assert(!frame_test_flag(fp, slot_r), (file_line, "ipret_array_len_greater_than: flag already set for destination slot %"PRIuMAX"", (uintmax_t)slot_r));
1922 ex = ipret_get_index(fp, ip, fp, slot_l, &neg, &remaining_length, &res_ptr pass_file_line);
1923 if (unlikely(ex != POINTER_FOLLOW_THUNK_GO)) {
1924 if (ex == POINTER_FOLLOW_THUNK_EXCEPTION)
1925 goto err;
1926 return ex;
1928 if (unlikely(neg)) {
1929 result = 1;
1930 goto ret_result;
1933 type = frame_get_type_of_local(fp, slot_a);
1934 if (unlikely(TYPE_IS_FLAT(type))) {
1935 const struct flat_array_definition *flat_def = type_def(type,flat_array);
1936 if (index_ge_int(remaining_length, flat_def->n_elements))
1937 result = 0;
1938 goto ret_result;
1941 ptr = frame_pointer(fp, slot_a);
1943 if (flags & OPCODE_OP_FLAG_STRICT) {
1944 array_resolve_thunk(fp, slot_a);
1947 index_add_int(&remaining_length, 1);
1948 ex = array_walk(fp, ip, ptr, &remaining_length, flags, &res_ptr, &can_modify);
1949 if (unlikely(ex != POINTER_FOLLOW_THUNK_GO)) {
1950 if (likely(ex == POINTER_FOLLOW_THUNK_RETRY)) {
1951 result = 0;
1952 goto ret_result;
1954 if (unlikely(ex == POINTER_FOLLOW_THUNK_EXCEPTION))
1955 goto err_free;
1956 if (!(flags & OPCODE_OP_FLAG_STRICT))
1957 ex = array_len_greater_than_create_thunk(fp, ip, slot_a, slot_l, slot_r);
1958 index_free(&remaining_length);
1959 return ex;
1962 ret_result:
1963 barrier_aliasing();
1964 *frame_slot(fp, slot_r, ajla_flat_option_t) = result;
1965 barrier_aliasing();
1966 index_free(&remaining_length);
1967 return POINTER_FOLLOW_THUNK_GO;
1969 err_free:
1970 index_free(&remaining_length);
1971 err:
1972 frame_set_pointer(fp, slot_r, res_ptr);
1973 return POINTER_FOLLOW_THUNK_GO;
1976 void * attr_hot_fastcall ipret_array_sub(frame_s *fp, const code_t *ip, frame_t slot_r, frame_t slot_a, frame_t slot_start, frame_t slot_end, unsigned flags)
1978 array_index_t start, end, end_tmp;
1979 pointer_t res_ptr;
1980 pointer_t *can_modify;
1981 void *ex;
1982 pointer_t *ptr;
1983 struct data *acc = NULL;
1985 ajla_assert(flags & OPCODE_FLAG_FREE_ARGUMENT || !frame_test_flag(fp, slot_r), (file_line, "ipret_array_sub: flag already set for destination slot %"PRIuMAX"", (uintmax_t)slot_r));
1987 ex = ipret_get_index(fp, ip, fp, slot_start, NULL, &start, &res_ptr pass_file_line);
1988 if (unlikely(ex != POINTER_FOLLOW_THUNK_GO)) {
1989 if (ex == POINTER_FOLLOW_THUNK_EXCEPTION)
1990 goto except;
1991 return ex;
1993 ex = ipret_get_index(fp, ip, fp, slot_end, NULL, &end, &res_ptr pass_file_line);
1994 if (unlikely(ex != POINTER_FOLLOW_THUNK_GO)) {
1995 if (ex == POINTER_FOLLOW_THUNK_EXCEPTION)
1996 goto except_start;
1997 index_free(&start);
1998 return ex;
2000 if (!index_ge_index(end, start)) {
2001 res_ptr = pointer_error(error_ajla(EC_SYNC, AJLA_ERROR_INVALID_OPERATION), fp, ip pass_file_line);
2002 goto except_end;
2005 if (frame_variable_is_flat(fp, slot_a)) {
2006 struct data *d;
2007 ajla_error_t err;
2008 int_default_t st, len;
2009 const struct type *type = frame_get_type_of_local(fp, slot_a);
2010 const struct flat_array_definition *flat_def = type_def(type,flat_array);
2011 if (index_ge_int(end, flat_def->n_elements + 1)) {
2012 res_ptr = pointer_error(error_ajla(EC_SYNC, AJLA_ERROR_INDEX_OUT_OF_RANGE), fp, ip pass_file_line);
2013 goto except_end;
2015 st = index_to_int(start);
2016 len = index_to_int(end) - st;
2017 d = data_alloc_array_flat_mayfail(flat_def->base, len, len, false, &err pass_file_line);
2018 if (unlikely(!d)) {
2019 res_ptr = pointer_error(err, fp, ip pass_file_line);
2020 goto except_end;
2022 memcpy(da_array_flat(d), frame_var(fp, slot_a) + st * flat_def->base->size, len * flat_def->base->size);
2023 res_ptr = pointer_data(d);
2024 goto except_end;
2027 ptr = frame_pointer(fp, slot_a);
2029 if (flags & OPCODE_OP_FLAG_STRICT) {
2030 array_resolve_thunk(fp, slot_a);
2033 index_copy(&end_tmp, end);
2034 ex = array_walk(fp, ip, ptr, &end_tmp, flags, &res_ptr, &can_modify);
2035 index_free(&end_tmp);
2036 if (unlikely(ex != POINTER_FOLLOW_THUNK_GO)) {
2037 if (likely(ex == POINTER_FOLLOW_THUNK_RETRY)) {
2038 res_ptr = pointer_error(error_ajla(EC_SYNC, AJLA_ERROR_INDEX_OUT_OF_RANGE), fp, ip pass_file_line);
2039 goto except_end;
2041 if (unlikely(ex == POINTER_FOLLOW_THUNK_EXCEPTION))
2042 goto except_end;
2043 if (!(flags & OPCODE_OP_FLAG_STRICT))
2044 ex = array_sub_create_thunk(fp, ip, slot_a, slot_start, slot_end, slot_r, flags);
2045 index_free(&start);
2046 index_free(&end);
2047 return ex;
2050 if (!(flags & OPCODE_FLAG_FREE_ARGUMENT) || !frame_test_flag(fp, slot_a))
2051 can_modify = NULL;
2053 while (1) {
2054 struct data *array_data;
2055 struct data *this_ptr;
2056 array_index_t this_len;
2058 #if 0
2059 if (pointer_is_thunk(*ptr)) {
2060 struct stack_trace st;
2061 stack_trace_init(&st);
2062 stack_trace_capture(&st, fp, ip, 20);
2063 stack_trace_print(&st);
2064 stack_trace_free(&st);
2066 #endif
2067 array_data = pointer_get_data(*ptr);
2069 if (da_tag(array_data) == DATA_TAG_array_incomplete)
2070 this_ptr = pointer_get_data(da(array_data,array_incomplete)->first);
2071 else
2072 this_ptr = array_data;
2074 this_len = array_len(this_ptr);
2076 if (!index_ge_index(this_len, start)) {
2077 index_sub(&start, this_len);
2078 index_sub(&end, this_len);
2079 index_free(&this_len);
2080 } else {
2081 bool done = false;
2082 array_index_t this_step;
2083 ajla_error_t err;
2084 struct data *t;
2086 if (can_modify) {
2087 if (da_tag(array_data) == DATA_TAG_array_incomplete)
2088 da(array_data,array_incomplete)->first = pointer_empty();
2089 else
2090 *ptr = pointer_empty();
2093 if (!index_ge_index(this_len, end)) {
2094 index_sub3(&this_step, this_len, start);
2095 } else {
2096 index_sub3(&this_step, end, start);
2097 done = true;
2099 /*debug("start %lu, end %lu, this_len %lu, this_step %lu", start, end, this_len, this_step);*/
2100 index_free(&this_len);
2101 index_sub(&end, this_step);
2102 index_sub(&end, start);
2103 t = array_sub(this_ptr, start, this_step, can_modify != NULL, &err);
2104 index_from_int(&start, 0);
2106 if (unlikely(!t)) {
2107 res_ptr = pointer_error(err, fp, ip pass_file_line);
2108 goto except_end;
2111 if (!acc) {
2112 acc = t;
2113 } else {
2114 acc = array_join(acc, t, &err);
2115 if (unlikely(!acc)) {
2116 res_ptr = pointer_error(err, fp, ip pass_file_line);
2117 goto except_end;
2121 if (done) {
2122 res_ptr = pointer_data(acc);
2123 acc = NULL;
2124 break;
2128 if (unlikely(da_tag(array_data) != DATA_TAG_array_incomplete)) {
2129 res_ptr = pointer_error(error_ajla(EC_SYNC, AJLA_ERROR_INDEX_OUT_OF_RANGE), fp, ip pass_file_line);
2130 break;
2132 ptr = &da(array_data,array_incomplete)->next;
2135 except_end:
2136 index_free(&end);
2137 except_start:
2138 index_free(&start);
2139 except:
2140 if (acc)
2141 data_dereference(acc);
2142 if (flags & OPCODE_FLAG_FREE_ARGUMENT) {
2143 if (pointer_is_empty(*frame_pointer(fp, slot_a)))
2144 frame_clear_flag(fp, slot_a);
2145 else
2146 frame_free_and_clear(fp, slot_a);
2148 frame_set_pointer(fp, slot_r, res_ptr);
2149 return POINTER_FOLLOW_THUNK_GO;
2152 void * attr_hot_fastcall ipret_array_skip(frame_s *fp, const code_t *ip, frame_t slot_r, frame_t slot_a, frame_t slot_start, unsigned flags)
2154 array_index_t start, len;
2155 pointer_t res_ptr;
2156 pointer_t *can_modify;
2157 void *ex;
2158 pointer_t *ptr;
2159 struct data *a, *ta, *ts;
2160 ajla_error_t err;
2161 bool deref;
2163 ajla_assert(flags & OPCODE_FLAG_FREE_ARGUMENT || !frame_test_flag(fp, slot_r), (file_line, "ipret_array_skip: flag already set for destination slot %"PRIuMAX"", (uintmax_t)slot_r));
2165 ex = ipret_get_index(fp, ip, fp, slot_start, NULL, &start, &res_ptr pass_file_line);
2166 if (unlikely(ex != POINTER_FOLLOW_THUNK_GO)) {
2167 if (ex == POINTER_FOLLOW_THUNK_EXCEPTION)
2168 goto ret;
2169 return ex;
2172 if (frame_variable_is_flat(fp, slot_a)) {
2173 struct data *d;
2174 ajla_error_t err;
2175 int_default_t st, len;
2176 const struct type *type = frame_get_type_of_local(fp, slot_a);
2177 const struct flat_array_definition *flat_def = type_def(type,flat_array);
2178 if (index_ge_int(start, flat_def->n_elements + 1)) {
2179 res_ptr = pointer_error(error_ajla(EC_SYNC, AJLA_ERROR_INDEX_OUT_OF_RANGE), fp, ip pass_file_line);
2180 goto ret_free_start;
2182 st = index_to_int(start);
2183 len = flat_def->n_elements - st;
2184 d = data_alloc_array_flat_mayfail(flat_def->base, len, len, false, &err pass_file_line);
2185 if (unlikely(!d)) {
2186 res_ptr = pointer_error(err, fp, ip pass_file_line);
2187 goto ret_free_start;
2189 memcpy(da_flat(d), frame_var(fp, slot_a) + st * flat_def->base->size, len * flat_def->base->size);
2190 res_ptr = pointer_data(d);
2191 goto ret_free_start;
2194 ptr = frame_pointer(fp, slot_a);
2196 if (flags & OPCODE_OP_FLAG_STRICT) {
2197 array_resolve_thunk(fp, slot_a);
2200 ex = array_walk(fp, ip, ptr, &start, flags, &res_ptr, &can_modify);
2201 if (unlikely(ex != POINTER_FOLLOW_THUNK_GO)) {
2202 if (likely(ex == POINTER_FOLLOW_THUNK_RETRY)) {
2203 res_ptr = pointer_error(error_ajla(EC_SYNC, AJLA_ERROR_INDEX_OUT_OF_RANGE), fp, ip pass_file_line);
2204 goto ret_free_start;
2206 if (unlikely(ex == POINTER_FOLLOW_THUNK_EXCEPTION))
2207 goto ret_free_start;
2208 if (!(flags & OPCODE_OP_FLAG_STRICT))
2209 ex = array_skip_create_thunk(fp, ip, slot_a, slot_start, slot_r, flags);
2210 index_free(&start);
2211 return ex;
2214 if (unlikely(index_eq_int(start, 0))) {
2215 pointer_reference_owned(res_ptr);
2216 goto ret_free_start;
2219 if (!(flags & OPCODE_FLAG_FREE_ARGUMENT) || !frame_test_flag(fp, slot_a))
2220 can_modify = NULL;
2222 a = pointer_get_data(res_ptr);
2223 if (da_tag(a) == DATA_TAG_array_incomplete) {
2224 ta = pointer_get_data(da(a,array_incomplete)->first);
2225 } else {
2226 ta = a;
2229 len = array_len(ta);
2230 index_sub(&len, start);
2231 if (unlikely(index_eq_int(len, 0)) && da_tag(a) == DATA_TAG_array_incomplete) {
2232 res_ptr = pointer_reference(&da(a,array_incomplete)->next);
2233 index_free(&len);
2234 goto ret_free_start;
2236 deref = false;
2237 if (can_modify) {
2238 *can_modify = pointer_empty();
2239 deref = true;
2241 ts = array_sub(ta, start, len, deref, &err);
2242 if (unlikely(!ts)) {
2243 res_ptr = pointer_error(err, fp, ip pass_file_line);
2244 goto ret;
2247 if (a != ta) {
2248 if (deref) {
2249 da(a,array_incomplete)->first = pointer_data(ts);
2250 res_ptr = pointer_data(a);
2251 } else {
2252 struct data *inc;
2253 pointer_t next = pointer_reference(&da(a,array_incomplete)->next);
2254 inc = data_alloc_array_incomplete(ts, next, &err pass_file_line);
2255 if (unlikely(!inc)) {
2256 data_dereference(ts);
2257 pointer_dereference(next);
2258 res_ptr = pointer_error(err, fp, ip pass_file_line);
2259 goto ret;
2261 res_ptr = pointer_data(inc);
2263 } else {
2264 res_ptr = pointer_data(ts);
2266 goto ret;
2268 ret_free_start:
2269 index_free(&start);
2270 ret:
2271 if (flags & OPCODE_FLAG_FREE_ARGUMENT) {
2272 if (pointer_is_empty(*frame_pointer(fp, slot_a)))
2273 frame_clear_flag(fp, slot_a);
2274 else
2275 frame_free_and_clear(fp, slot_a);
2277 frame_set_pointer(fp, slot_r, res_ptr);
2278 return POINTER_FOLLOW_THUNK_GO;
2281 static void attr_hot ipret_array_append_pointers(frame_s *fp, const code_t *ip, pointer_t *ptr_r, pointer_t ptr_1, pointer_t ptr_2, pointer_t *fn_ptr)
2283 ajla_error_t err;
2284 struct data *d;
2286 if (unlikely(fn_ptr != NULL)) {
2287 if (unlikely(pointer_is_thunk(ptr_1))) {
2288 struct thunk *result;
2289 struct data *function_reference;
2291 if (pointer_is_thunk(ptr_1) && thunk_tag_volatile(pointer_get_thunk(ptr_1)) == THUNK_TAG_EXCEPTION) {
2292 *ptr_r = ptr_1;
2293 pointer_dereference(ptr_2);
2294 return;
2297 result = build_thunk(fn_ptr, 2, &function_reference);
2298 data_fill_function_reference(function_reference, 0, ptr_1);
2299 data_fill_function_reference(function_reference, 1, ptr_2);
2300 *ptr_r = pointer_thunk(result);
2301 return;
2302 } else if (likely(da_tag(pointer_get_data(ptr_1)) == DATA_TAG_array_incomplete)) {
2303 struct data *first;
2304 pointer_t last;
2305 struct thunk *thunk;
2306 struct data *function_reference, *result;
2308 array_incomplete_decompose(pointer_get_data(ptr_1), &first, &last);
2310 thunk = build_thunk(fn_ptr, 2, &function_reference);
2311 data_fill_function_reference(function_reference, 0, last);
2312 data_fill_function_reference(function_reference, 1, ptr_2);
2314 result = data_alloc_array_incomplete(first, pointer_thunk(thunk), &err pass_file_line);
2315 if (unlikely(!result)) {
2316 data_dereference(first);
2317 pointer_dereference(pointer_thunk(thunk));
2318 *ptr_r = pointer_error(err, fp, ip pass_file_line);
2319 } else {
2320 *ptr_r = pointer_data(result);
2322 return;
2326 if (unlikely(array_is_empty(pointer_get_data(ptr_1)))) {
2327 *ptr_r = ptr_2;
2328 pointer_dereference(ptr_1);
2329 return;
2332 if (unlikely(pointer_is_thunk(ptr_2)) || unlikely(da_tag(pointer_get_data(ptr_2)) == DATA_TAG_array_incomplete)) {
2333 struct data *result;
2334 result = data_alloc_array_incomplete(pointer_get_data(ptr_1), ptr_2, &err pass_file_line);
2335 if (unlikely(!result)) {
2336 pointer_dereference(ptr_1);
2337 pointer_dereference(ptr_2);
2338 *ptr_r = pointer_error(err, fp, ip pass_file_line);
2339 } else {
2340 *ptr_r = pointer_data(result);
2341 if (!pointer_is_thunk(ptr_2))
2342 array_incomplete_collapse(ptr_r);
2344 return;
2347 d = array_join(pointer_get_data(ptr_1), pointer_get_data(ptr_2), &err);
2348 if (!d)
2349 *ptr_r = pointer_error(err, fp, ip pass_file_line);
2350 else
2351 *ptr_r = pointer_data(d);
2354 void * attr_hot_fastcall ipret_array_append(frame_s *fp, const code_t *ip, frame_t slot_r, frame_t slot_1, frame_t slot_2, unsigned flags)
2356 pointer_t *fn_ptr = NULL;
2357 pointer_t ptr_1, ptr_2, *ptr_r;
2359 if (unlikely(array_resolve_thunk(fp, slot_1))) {
2360 void *ex = pcode_find_array_append_function(fp, ip, &fn_ptr);
2361 if (unlikely(ex != POINTER_FOLLOW_THUNK_RETRY))
2362 return ex;
2364 array_resolve_thunk(fp, slot_2);
2366 ptr_1 = ipret_copy_variable_to_pointer(fp, slot_1, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0);
2367 ptr_2 = ipret_copy_variable_to_pointer(fp, slot_2, (flags & OPCODE_FLAG_FREE_ARGUMENT_2) != 0);
2369 ajla_assert(!frame_test_flag(fp, slot_r), (file_line, "ipret_array_append: flag already set for destination slot %"PRIuMAX"", (uintmax_t)slot_r));
2371 frame_set_flag(fp, slot_r);
2372 ptr_r = frame_pointer(fp, slot_r);
2374 ipret_array_append_pointers(fp, ip, ptr_r, ptr_1, ptr_2, fn_ptr);
2376 return POINTER_FOLLOW_THUNK_GO;
2379 void * attr_hot_fastcall ipret_array_append_one_flat(frame_s *fp, const code_t *ip, frame_t slot_r, frame_t slot_1, frame_t slot_2, unsigned flags)
2381 const int minimum_size = 16;
2382 ajla_error_t sink;
2383 pointer_t ptr;
2384 struct data *data;
2385 const struct type *type;
2386 if (unlikely(!(flags & OPCODE_FLAG_FREE_ARGUMENT)))
2387 goto fallback;
2388 if (unlikely(!frame_variable_is_flat(fp, slot_2)))
2389 goto fallback;
2390 if (unlikely(!frame_test_flag(fp, slot_1)))
2391 goto fallback;
2392 ptr = *frame_pointer(fp, slot_1);
2393 if (unlikely(pointer_is_thunk(ptr)))
2394 goto fallback;
2395 data = pointer_get_data(ptr);
2396 if (unlikely(da_tag(data) != DATA_TAG_array_flat)) {
2397 if (likely(da_tag(data) == DATA_TAG_array_pointers) && likely(!da(data,array_pointers)->n_used_entries)) {
2398 type = frame_get_type_of_local(fp, slot_2);
2399 data = data_alloc_array_flat_mayfail(type, minimum_size, 0, false, &sink pass_file_line);
2400 if (unlikely(!data))
2401 goto fallback;
2402 pointer_dereference(ptr);
2403 goto do_copy;
2405 goto fallback;
2407 if (unlikely(!data_is_writable(data)))
2408 goto fallback;
2409 if (unlikely(da(data,array_flat)->n_used_entries == da(data,array_flat)->n_allocated_entries)) {
2410 struct data *new_data;
2411 int_default_t new_size = (uint_default_t)da(data,array_flat)->n_used_entries * 2;
2412 new_size = maximum(new_size, minimum_size);
2413 if (unlikely(new_size < 0) ||
2414 unlikely(new_size <= da(data,array_flat)->n_used_entries))
2415 goto fallback;
2416 type = da(data,array_flat)->type;
2417 new_data = data_alloc_array_flat_mayfail(type, new_size, da(data,array_flat)->n_used_entries, false, &sink pass_file_line);
2418 if (unlikely(!new_data))
2419 goto fallback;
2420 memcpy(da_array_flat(new_data), da_array_flat(data), da(data,array_flat)->n_used_entries * type->size);
2421 data_free_r1(data);
2422 data = new_data;
2423 goto do_copy;
2425 do_copy:
2426 type = da(data,array_flat)->type;
2427 memcpy_fast(da_array_flat(data) + (size_t)da(data,array_flat)->n_used_entries * type->size, frame_var(fp, slot_2), type->size);
2428 da(data,array_flat)->n_used_entries++;
2430 frame_clear_flag(fp, slot_1);
2431 *frame_pointer(fp, slot_1) = pointer_empty();
2432 frame_set_flag(fp, slot_r);
2433 *frame_pointer(fp, slot_r) = pointer_data(data);
2435 return POINTER_FOLLOW_THUNK_GO;
2436 fallback:
2437 return ipret_array_append_one(fp, ip, slot_r, slot_1, slot_2, flags);
2440 void * attr_hot_fastcall ipret_array_append_one(frame_s *fp, const code_t *ip, frame_t slot_r, frame_t slot_1, frame_t slot_2, unsigned flags)
2442 ajla_error_t err;
2443 pointer_t *fn_ptr = NULL;
2444 pointer_t ptr_1, ptr_2, ptr_e, *ptr_r;
2445 struct data *data;
2447 if (unlikely(array_resolve_thunk(fp, slot_1))) {
2448 void *ex = pcode_find_array_append_function(fp, ip, &fn_ptr);
2449 if (unlikely(ex != POINTER_FOLLOW_THUNK_RETRY))
2450 return ex;
2453 ptr_1 = ipret_copy_variable_to_pointer(fp, slot_1, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0);
2455 if (frame_variable_is_flat(fp, slot_2)) {
2456 const struct type *type = frame_get_type_of_local(fp, slot_2);
2457 data = data_alloc_array_flat_mayfail(type, 1, 1, false, &err pass_file_line);
2458 if (unlikely(!data))
2459 goto no_flat;
2460 memcpy_fast(da_array_flat(data), frame_var(fp, slot_2), type->size);
2461 ptr_2 = pointer_data(data);
2462 goto have_data;
2465 no_flat:
2466 ptr_e = ipret_copy_variable_to_pointer(fp, slot_2, (flags & OPCODE_FLAG_FREE_ARGUMENT_2) != 0);
2468 if (unlikely(pointer_is_thunk(ptr_1)))
2469 goto fallback;
2470 data = pointer_get_data(ptr_1);
2471 if (unlikely(da_tag(data) != DATA_TAG_array_pointers))
2472 goto fallback;
2473 if (unlikely(!data_is_writable(data)))
2474 goto fallback;
2475 if (unlikely(da(data,array_pointers)->n_used_entries == da(data,array_pointers)->n_allocated_entries)) {
2476 struct data *new_data;
2477 size_t new_size = (size_t)da(data,array_pointers)->n_used_entries * 2;
2478 new_size = maximum(new_size, 16);
2479 if (unlikely(new_size <= (size_t)da(data,array_pointers)->n_used_entries))
2480 goto fallback;
2481 new_data = data_alloc_array_pointers_mayfail(new_size, da(data,array_pointers)->n_used_entries, &err pass_file_line);
2482 if (unlikely(!new_data))
2483 goto fallback;
2484 memcpy(da(new_data,array_pointers)->pointer, da(data,array_pointers)->pointer, da(data,array_pointers)->n_used_entries * sizeof(pointer_t));
2485 data_free_r1(data);
2486 data = new_data;
2488 da(data,array_pointers)->pointer[da(data,array_pointers)->n_used_entries++] = ptr_e;
2490 frame_set_flag(fp, slot_r);
2491 *frame_pointer(fp, slot_r) = pointer_data(data);
2493 return POINTER_FOLLOW_THUNK_GO;
2495 fallback:
2496 data = data_alloc_array_pointers_mayfail(1, 1, &err pass_file_line);
2497 if (likely(data != NULL)) {
2498 da(data,array_pointers)->pointer[0] = ptr_e;
2499 ptr_2 = pointer_data(data);
2500 } else {
2501 pointer_dereference(ptr_e);
2502 ptr_2 = pointer_error(err, fp, ip pass_file_line);
2505 have_data:
2506 ajla_assert(!frame_test_flag(fp, slot_r), (file_line, "ipret_array_append_one: flag already set for destination slot %"PRIuMAX"", (uintmax_t)slot_r));
2507 frame_set_flag(fp, slot_r);
2508 ptr_r = frame_pointer(fp, slot_r);
2510 ipret_array_append_pointers(fp, ip, ptr_r, ptr_1, ptr_2, fn_ptr);
2512 return POINTER_FOLLOW_THUNK_GO;
2515 static int_default_t get_array_pointers(unsigned char *flat, const struct type attr_unused *type, int_default_t attr_unused n_elements, pointer_t *ptr, void *context)
2517 struct data *na = context;
2518 if (unlikely(flat != NULL))
2519 return 0;
2520 ajla_assert(da(na,array_pointers)->n_used_entries < da(na,array_pointers)->n_allocated_entries, (file_line, "get_array_pointers: array overrun"));
2521 da(na,array_pointers)->pointer[da(na,array_pointers)->n_used_entries++] = pointer_reference(ptr);
2522 return 1;
2525 static int_default_t get_array_flat(unsigned char *flat, const struct type *type, int_default_t n_elements, pointer_t attr_unused *ptr, void *context)
2527 size_t size, i;
2528 struct data *na = context;
2529 unsigned char *dest;
2530 if (unlikely(!flat))
2531 return 0;
2532 ajla_assert(da(na,array_flat)->n_used_entries + n_elements <= da(na,array_flat)->n_allocated_entries, (file_line, "get_array_flat: array overrun"));
2533 size = (size_t)n_elements * type->size;
2534 dest = da_array_flat(na) + type->size * (size_t)da(na,array_flat)->n_used_entries;
2535 for (i = 0; i < size; i++, flat++, dest++) {
2536 if (*flat != 0)
2537 *dest = *flat;
2539 da(na,array_flat)->n_used_entries += n_elements;
2540 return n_elements;
2543 static int_default_t get_array_type(unsigned char *flat, const struct type *type, int_default_t attr_unused n_elements, pointer_t attr_unused *ptr, void *context)
2545 if (flat)
2546 *cast_ptr(const struct type **, context) = type;
2547 return 0;
2550 void * attr_fastcall ipret_array_flatten(frame_s *fp, const code_t *ip, frame_t slot_r, frame_t slot_1, unsigned flags)
2552 ajla_error_t sink;
2553 pointer_t *ptr;
2554 struct data *data, *na;
2555 array_index_t len_long;
2556 int_default_t len;
2557 bool success;
2558 const struct type *flat_type;
2559 const struct type *array_type = frame_get_type_of_local(fp, slot_r);
2561 if (frame_variable_is_flat(fp, slot_1))
2562 goto do_nothing;
2564 ptr = frame_pointer(fp, slot_1);
2565 next_ptr:
2566 pointer_follow(ptr, false, data, PF_WAIT, fp, ip,
2567 return ex_,
2568 goto do_nothing
2570 if (unlikely(da_tag(data) == DATA_TAG_array_incomplete)) {
2571 ptr = &da(data,array_incomplete)->next;
2572 goto next_ptr;
2575 array_resolve_thunk(fp, slot_1);
2577 data = pointer_get_data(*frame_pointer(fp, slot_1));
2578 if (da_tag(data) == DATA_TAG_array_flat && da(data,array_flat)->n_used_entries == da(data,array_flat)->n_allocated_entries) {
2579 if (array_type->tag == TYPE_TAG_flat_array) {
2580 pointer_t p = frame_get_pointer_reference(fp, slot_1, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0);
2581 na = pointer_get_data(p);
2582 goto try_to_flatten;
2584 goto do_nothing;
2586 if (da_tag(data) == DATA_TAG_array_pointers && da(data,array_pointers)->n_used_entries == da(data,array_pointers)->n_allocated_entries && da(data,array_pointers)->pointer == da(data,array_pointers)->pointer_array) {
2587 goto do_nothing;
2590 len_long = array_len(data);
2591 if (unlikely(!index_is_int(len_long))) {
2592 index_free(&len_long);
2593 goto do_nothing;
2595 len = index_to_int(len_long);
2596 index_free(&len_long);
2598 flat_type = NULL;
2599 index_from_int(&len_long, 0);
2600 array_onstack_iterate(fp, slot_1, &len_long, get_array_type, &flat_type);
2601 index_free(&len_long);
2603 if (!flat_type) {
2604 na = data_alloc_array_pointers_mayfail(len, 0, &sink pass_file_line);
2605 if (unlikely(!na))
2606 goto do_nothing;
2607 index_from_int(&len_long, 0);
2608 success = array_onstack_iterate(fp, slot_1, &len_long, get_array_pointers, na);
2609 index_free(&len_long);
2610 if (unlikely(!success)) {
2611 pointer_dereference(pointer_data(na));
2612 goto do_nothing;
2614 } else {
2615 na = data_alloc_array_flat_mayfail(flat_type, len, 0, true, &sink pass_file_line);
2616 if (unlikely(!na))
2617 goto do_nothing;
2618 index_from_int(&len_long, 0);
2619 success = array_onstack_iterate(fp, slot_1, &len_long, get_array_flat, na);
2620 index_free(&len_long);
2621 if (unlikely(!success)) {
2622 data_free_r1(na);
2623 goto do_nothing;
2627 if (flags & OPCODE_FLAG_FREE_ARGUMENT)
2628 frame_free_and_clear(fp, slot_1);
2630 try_to_flatten:
2631 if (array_type->tag == TYPE_TAG_flat_array && da_tag(na) == DATA_TAG_array_flat) {
2632 struct flat_array_definition *fa = type_def(array_type,flat_array);
2633 if (fa->n_elements == da(na,array_flat)->n_used_entries) {
2634 memcpy(frame_var(fp, slot_r), da_array_flat(na), array_type->size);
2635 pointer_dereference(pointer_data(na));
2636 return POINTER_FOLLOW_THUNK_GO;
2640 frame_set_pointer(fp, slot_r, pointer_data(na));
2642 return POINTER_FOLLOW_THUNK_GO;
2644 do_nothing:
2645 ipret_copy_variable(fp, slot_1, fp, slot_r, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0);
2646 return POINTER_FOLLOW_THUNK_GO;
2649 void attr_fastcall ipret_prefetch_functions(struct data *function)
2651 frame_t x;
2652 for (x = 0; x < da(function,function)->local_directory_size; x++) {
2653 ajla_error_t sink;
2654 pointer_t *lfnp, lfn;
2655 lfnp = da(function,function)->local_directory[x];
2656 if (pointer_is_thunk(pointer_locked_read(lfnp))) {
2657 struct execution_control *ex;
2658 lfn = pointer_reference(lfnp);
2659 if (unlikely(!pointer_is_thunk(lfn)) || thunk_tag(pointer_get_thunk(lfn)) != THUNK_TAG_FUNCTION_CALL) {
2660 pointer_dereference(lfn);
2661 continue;
2663 ex = function_evaluate_prepare(&sink);
2664 if (likely(ex != NULL))
2665 function_evaluate_submit(ex, lfn, NULL, NULL);
2666 else
2667 pointer_dereference(lfn);
2673 static attr_noinline frame_s *ipret_break(frame_s *top_fp, frame_s *high, frame_s *low)
2675 frame_s *fp;
2676 struct execution_control *high_ex, *low_ex;
2677 struct data *high_function, *low_function;
2678 ajla_error_t sink;
2679 struct thunk *t;
2680 struct thunk **result;
2681 const code_t *ip;
2682 arg_t ia;
2684 fp = top_fp;
2685 do {
2686 struct data *function = get_frame(fp)->function;
2687 const struct local_arg *la = da(function,function)->args;
2688 for (ia = 0; ia < da(function,function)->n_arguments; ia++, la++) {
2689 frame_t slot;
2690 pointer_t ptr;
2691 if (!la->may_be_borrowed)
2692 continue;
2693 slot = la->slot;
2694 ptr = *frame_pointer(fp, slot);
2695 if (!pointer_is_empty(ptr) && !frame_test_and_set_flag(fp, slot))
2696 pointer_reference_owned(ptr);
2698 } while ((fp = frame_up(fp)) != low);
2700 high_ex = frame_execution_control(high);
2702 top_fp = stack_split(top_fp, low, &high, &sink);
2703 if (unlikely(!top_fp))
2704 goto err0;
2706 low_ex = execution_control_alloc(&sink);
2707 if (unlikely(!low_ex))
2708 goto err1;
2710 low_ex->stack = frame_stack_bottom(low);
2711 low_ex->stack->ex = low_ex;
2712 low_ex->callback = high_ex->callback;
2714 t = high_ex->thunk;
2715 low_ex->thunk = t;
2716 if (t) {
2717 address_lock(t, DEPTH_THUNK);
2718 t->u.function_call.u.execution_control = low_ex;
2719 list_take(&low_ex->wait_list, &high_ex->wait_list);
2720 address_unlock(t, DEPTH_THUNK);
2723 high_ex->stack = frame_stack_bottom(high);
2724 high_ex->stack->ex = high_ex;
2726 high_function = get_frame(high)->function;
2727 result = mem_alloc_array_mayfail(mem_alloc_mayfail, struct thunk **, 0, 0, da(high_function,function)->n_return_values, sizeof(struct thunk *), &sink);
2728 if (unlikely(!result))
2729 goto err2;
2731 if (unlikely(!thunk_alloc_blackhole(high_ex, da(get_frame(high)->function,function)->n_return_values, result, &sink)))
2732 goto err3;
2734 low_function = get_frame(low)->function;
2735 ip = da(low_function,function)->code + get_frame(high)->previous_ip;
2736 low_ex->current_frame = low;
2738 ia = 0;
2739 do {
2740 frame_t dst_slot;
2742 dst_slot = get_max_param(ip, 0);
2743 frame_set_pointer(low, dst_slot, pointer_thunk(result[ia]));
2745 ip += max_param_size(1) + 1;
2746 } while (++ia < da(high_function,function)->n_return_values);
2748 low_ex->current_ip = frame_ip(low, ip);
2750 mem_free(result);
2752 /*get_frame(low)->timestamp++;*/
2754 return top_fp;
2756 err3:
2757 mem_free(result);
2758 err2:
2759 mem_free(high_ex);
2760 err1:
2761 stack_free(frame_stack_bottom(top_fp));
2762 err0:
2763 return NULL;
2766 static void * attr_hot_fastcall ipret_break_the_chain(frame_s *fp, const code_t *ip, int waiting, bool *something_breakable)
2768 frame_s *top_fp = fp;
2769 frame_s *prev_fp;
2770 struct execution_control *ex;
2771 timestamp_t t = get_frame(fp)->timestamp++;
2773 if (!waiting) {
2774 struct data *top_fn = get_frame(fp)->function;
2775 if (unlikely(profiling)) {
2776 profile_counter_t profiling_counter;
2777 profiling_counter = load_relaxed(&da(top_fn,function)->profiling_counter);
2778 profiling_counter += profile_sample();
2779 store_relaxed(&da(top_fn,function)->profiling_counter, profiling_counter);
2783 *something_breakable = false;
2785 if (unlikely(frame_execution_control(fp)->atomic != 0))
2786 goto no_break;
2788 while (1) {
2789 prev_fp = frame_up(fp);
2790 if (frame_is_top(prev_fp))
2791 break;
2792 if (get_frame(fp)->mode == CALL_MODE_STRICT)
2793 goto skip_this;
2794 if (get_frame(fp)->mode == CALL_MODE_SPARK || (likely(!ipret_strict_calls) && (timestamp_t)(t - get_frame(prev_fp)->timestamp) > break_ticks)) {
2795 struct execution_control *low_ex, *high_ex;
2796 frame_s *new_fp;
2797 /*debug("break: %s - %s (%u - %u - %u)", da(get_frame(prev_fp)->function,function)->function_name, da(get_frame(fp)->function,function)->function_name, t, get_frame(prev_fp)->timestamp, get_frame(fp)->timestamp);*/
2798 /*debug("break %"PRIuMAX"", (uintmax_t)++break_count);*/
2799 new_fp = ipret_break(top_fp, fp, prev_fp);
2800 if (unlikely(!new_fp))
2801 break;
2802 low_ex = frame_execution_control(prev_fp);
2803 if (waiting > 0) {
2804 high_ex = frame_execution_control(new_fp);
2805 high_ex->current_frame = new_fp;
2806 #if 0
2807 task_submit(low_ex, true);
2808 return NULL;
2809 #else
2810 waiting = -1;
2811 goto cont_with_low_ex;
2812 #endif
2814 #if 0
2815 task_submit(low_ex, true);
2816 top_fp = new_fp;
2817 break;
2818 #else
2819 high_ex = frame_execution_control(new_fp);
2820 high_ex->current_frame = new_fp;
2821 high_ex->current_ip = frame_ip(new_fp, ip);
2822 task_submit(high_ex, true);
2823 cont_with_low_ex:
2824 prev_fp = top_fp = low_ex->current_frame;
2825 ip = da(get_frame(top_fp)->function,function)->code + low_ex->current_ip;
2826 /*t = get_frame(top_fp)->timestamp;*/
2827 #endif
2828 } else {
2829 *something_breakable = true;
2831 skip_this:
2832 fp = prev_fp;
2835 no_break:
2836 if (waiting > 0)
2837 return NULL;
2838 ex = frame_execution_control(top_fp);
2839 ex->current_frame = top_fp;
2840 ex->current_ip = frame_ip(top_fp, ip);
2841 return ex;
2844 bool attr_fastcall ipret_break_waiting_chain(frame_s *fp, ip_t ip)
2846 bool something_breakable;
2847 struct execution_control *ex;
2849 ex = ipret_break_the_chain(fp, da(get_frame(fp)->function,function)->code + ip, 1, &something_breakable);
2850 if (ex)
2851 task_submit(ex, true);
2853 return something_breakable;
2856 void * attr_hot_fastcall ipret_tick(frame_s *fp, const code_t *ip)
2858 bool sink;
2859 struct execution_control *ex;
2861 waiting_list_break();
2863 ex = ipret_break_the_chain(fp, ip, 0, &sink);
2865 return task_schedule(ex);
2868 #endif