alpha: fix compilation failure in do_cvt_to_int
[ajla.git] / ipfn.c
blobc8f61337fba51c5d3826f1155e879cfefdc8116e
1 /*
2 * Copyright (C) 2024 Mikulas Patocka
4 * This file is part of Ajla.
6 * Ajla is free software: you can redistribute it and/or modify it under the
7 * terms of the GNU General Public License as published by the Free Software
8 * Foundation, either version 3 of the License, or (at your option) any later
9 * version.
11 * Ajla is distributed in the hope that it will be useful, but WITHOUT ANY
12 * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
13 * A PARTICULAR PURPOSE. See the GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along with
16 * Ajla. If not, see <https://www.gnu.org/licenses/>.
19 #include "ajla.h"
21 #ifndef FILE_OMIT
23 #include "args.h"
24 #include "mem_al.h"
25 #include "data.h"
26 #include "array.h"
27 #include "arrayu.h"
28 #include "task.h"
29 #include "pcode.h"
30 #include "ipio.h"
31 #include "funct.h"
32 #include "os.h"
34 #include "ipfn.h"
36 shared_var bool ipret_strict_calls shared_init(false);
37 shared_var bool ipret_is_privileged shared_init(false);
38 shared_var bool ipret_compile shared_init(false);
40 static const timestamp_t break_ticks = 1;
42 void eval_both(frame_s *fp, const code_t *ip, frame_t slot_1, frame_t slot_2)
44 struct execution_control *ex = frame_execution_control(fp);
45 if (slot_1 != NO_FRAME_T) {
46 if (!frame_variable_is_flat(fp, slot_1)) {
47 pointer_t *ptr = frame_pointer(fp, slot_1);
48 struct data attr_unused *result;
49 pointer_follow(ptr, true, result, PF_PREPARE0, fp, ip,
50 SUBMIT_EX(ex_); goto brk1,
51 break);
53 execution_control_acquire(ex);
55 brk1:
56 if (slot_2 != NO_FRAME_T) {
57 if (!frame_variable_is_flat(fp, slot_2)) {
58 pointer_t *ptr = frame_pointer(fp, slot_2);
59 struct data attr_unused *result;
60 pointer_follow(ptr, true, result, PF_PREPARE1, fp, ip,
61 SUBMIT_EX(ex_); goto brk2,
62 break);
64 execution_control_acquire(ex);
66 brk2:
67 pointer_follow_wait(fp, ip);
70 static void pointer_copy_owned(frame_s *fp, frame_t src_slot, frame_t dest_slot)
72 pointer_t ptr;
73 if (dest_slot == src_slot)
74 return;
75 ptr = *frame_pointer(fp, src_slot);
76 frame_free_and_set_pointer(fp, dest_slot, ptr);
77 pointer_reference_owned(ptr);
80 void attr_hot_fastcall ipret_fill_function_reference_from_slot(struct data *function_reference, arg_t a, frame_s *fp, frame_t slot, bool deref)
82 const struct type *type;
83 pointer_t ptr;
85 if (unlikely(!function_reference)) {
86 if (deref)
87 frame_free_and_clear(fp, slot);
88 return;
91 ajla_assert(a < da(function_reference,function_reference)->n_curried_arguments, (file_line, "ipret_fill_function_reference_from_slot: invalid argument %"PRIuMAX" (%"PRIuMAX" arguments)", (uintmax_t)a, (uintmax_t)da(function_reference,function_reference)->n_curried_arguments));
93 if (frame_variable_is_flat(fp, slot)) {
94 type = frame_get_type_of_local(fp, slot);
95 data_fill_function_reference_flat(function_reference, a, type, frame_var(fp, slot));
96 } else {
97 ptr = frame_get_pointer_reference(fp, slot, deref);
98 data_fill_function_reference(function_reference, a, ptr);
103 static struct thunk *build_thunk(pointer_t *fn_ptr, arg_t n_arguments, struct data **function_reference)
105 struct thunk *result;
106 ajla_error_t err;
108 *function_reference = data_alloc_function_reference_mayfail(n_arguments, &err pass_file_line);
109 if (unlikely(!*function_reference))
110 goto fail_err;
111 da(*function_reference,function_reference)->is_indirect = false;
112 da(*function_reference,function_reference)->u.direct = fn_ptr;
114 if (unlikely(!thunk_alloc_function_call(pointer_data(*function_reference), 1, &result, &err))) {
115 data_dereference(*function_reference);
116 goto fail_err;
119 return result;
121 fail_err:
122 *function_reference = NULL;
123 return thunk_alloc_exception_error(err, NULL, NULL, NULL pass_file_line);
126 static void *ipret_op_build_thunk(frame_s *fp, const code_t *ip, frame_t slot_1, frame_t slot_2, frame_t slot_r, unsigned strict_flag)
128 unsigned flags;
129 pointer_t *fn_ptr;
130 code_t code;
131 void *ex;
132 struct data *function_reference;
133 struct thunk *result;
134 frame_t slot_1_eval = NO_FRAME_T;
135 frame_t slot_2_eval = NO_FRAME_T;
137 code = *ip % OPCODE_MODE_MULT;
138 if (code == OPCODE_IS_EXCEPTION)
139 strict_flag |= FLAG_TESTING_FOR_EXCEPTION;
140 if (code >= OPCODE_REAL_OP && code < OPCODE_REAL_OP + OPCODE_REAL_TYPE_MULT * TYPE_REAL_N) {
141 code_t op = (code - OPCODE_REAL_OP) % OPCODE_REAL_TYPE_MULT;
142 if (op == OPCODE_REAL_OP_is_exception || op == OPCODE_REAL_OP_is_exception_alt1 || op == OPCODE_REAL_OP_is_exception_alt2)
143 strict_flag |= FLAG_TESTING_FOR_EXCEPTION;
146 if (frame_test_flag(fp, slot_1) && pointer_is_thunk(*frame_pointer(fp, slot_1))) {
147 pointer_follow_thunk_noeval(frame_pointer(fp, slot_1),
148 return POINTER_FOLLOW_THUNK_RETRY,
149 if (strict_flag & FLAG_TESTING_FOR_EXCEPTION) {
150 frame_free(fp, slot_r);
151 barrier_aliasing();
152 *frame_slot(fp, slot_r, ajla_flat_option_t) = 1;
153 barrier_aliasing();
154 return POINTER_FOLLOW_THUNK_GO;
156 if (!(strict_flag & FLAG_NEED_BOTH_EXCEPTIONS_TO_FAIL)) {
157 pointer_copy_owned(fp, slot_1, slot_r);
158 return POINTER_FOLLOW_THUNK_GO;
160 strict_flag |= FLAG_FIRST_EXCEPTION;
161 break,
162 slot_1_eval = slot_1; break
166 if (slot_2 != NO_FRAME_T && frame_test_flag(fp, slot_2) && pointer_is_thunk(*frame_pointer(fp, slot_2))) {
167 pointer_follow_thunk_noeval(frame_pointer(fp, slot_2),
168 return POINTER_FOLLOW_THUNK_RETRY,
169 if ((strict_flag & (FLAG_NEED_BOTH_EXCEPTIONS_TO_FAIL | FLAG_FIRST_EXCEPTION)) != FLAG_NEED_BOTH_EXCEPTIONS_TO_FAIL) {
170 pointer_copy_owned(fp, slot_2, slot_r);
171 return POINTER_FOLLOW_THUNK_GO;
173 break,
174 slot_2_eval = slot_2; break
178 if (strict_flag & OPCODE_OP_FLAG_STRICT) {
179 if (slot_1_eval != NO_FRAME_T || slot_2_eval != NO_FRAME_T) {
180 eval_both(fp, ip, slot_1_eval, slot_2_eval);
181 return POINTER_FOLLOW_THUNK_EXIT;
183 return POINTER_FOLLOW_THUNK_RETRY;
186 flags = 0;
187 if (slot_2 == NO_FRAME_T) {
188 flags |= PCODE_FIND_OP_UNARY;
189 if (type_is_equal(frame_get_type_of_local(fp, slot_1), type_get_int(INT_DEFAULT_N)) &&
190 !type_is_equal(frame_get_type_of_local(fp, slot_r), type_get_int(INT_DEFAULT_N)))
191 flags |= PCODE_CONVERT_FROM_INT;
193 if (code == OPCODE_IS_EXCEPTION)
194 ex = pcode_find_is_exception(fp, ip, &fn_ptr);
195 else if (code == OPCODE_EXCEPTION_CLASS || code == OPCODE_EXCEPTION_TYPE || code == OPCODE_EXCEPTION_AUX)
196 ex = pcode_find_get_exception(code - OPCODE_EXCEPTION_CLASS, fp, ip, &fn_ptr);
197 else
198 ex = pcode_find_op_function(frame_get_type_of_local(fp, slot_1), frame_get_type_of_local(fp, slot_r), code, flags, fp, ip, &fn_ptr);
199 if (unlikely(ex != POINTER_FOLLOW_THUNK_RETRY))
200 return ex;
202 result = build_thunk(fn_ptr, slot_2 != NO_FRAME_T ? 2 : 1, &function_reference);
203 ipret_fill_function_reference_from_slot(function_reference, 0, fp, slot_1, false);
204 if (slot_2 != NO_FRAME_T)
205 ipret_fill_function_reference_from_slot(function_reference, 1, fp, slot_2, false);
207 frame_free_and_set_pointer(fp, slot_r, pointer_thunk(result));
209 return POINTER_FOLLOW_THUNK_GO;
212 #define UNBOX_THUNK 1
213 #define UNBOX_DID_SOMETHING 2
214 #define UNBOX_LONGINT 4
215 static int attr_hot_fastcall ipret_unbox_value(frame_s *fp, const struct type *type, frame_t slot)
217 ajla_assert(TYPE_IS_FLAT(type), (file_line, "ipret_unbox_value: non-flat type %u", type->tag));
218 if (frame_test_flag(fp, slot)) {
219 pointer_t ptr = *frame_pointer(fp, slot);
220 if (pointer_is_thunk(ptr))
221 return UNBOX_THUNK;
222 if (da_tag(pointer_get_data(ptr)) == DATA_TAG_longint) {
223 ajla_assert(TYPE_TAG_IS_INT(type->tag), (file_line, "ipret_unbox_value: unexpected longint, type %u", type->tag));
224 return UNBOX_LONGINT;
226 memcpy_fast(frame_var(fp, slot), da_flat(pointer_get_data(ptr)), type->size);
227 frame_clear_flag(fp, slot);
228 pointer_dereference(ptr);
229 return UNBOX_DID_SOMETHING;
231 return 0;
234 static bool test_and_copy_nan(frame_s attr_unused *fp, const code_t attr_unused *ip, unsigned char type_tag, frame_t attr_unused slot, frame_t attr_unused slot_r)
236 switch (type_tag) {
237 #define f(n, t, nt, pack, unpack) \
238 case TYPE_TAG_real + n: { \
239 t val; \
240 barrier_aliasing(); \
241 val = *frame_slot(fp, slot, t); \
242 barrier_aliasing(); \
243 if (unlikely(cat(isnan_,t)(val))) { \
244 if (type_tag == frame_get_type_of_local(fp, slot_r)->tag) {\
245 barrier_aliasing(); \
246 *frame_slot(fp, slot_r, t) = val;\
247 barrier_aliasing(); \
248 } else { \
249 frame_set_pointer(fp, slot_r, pointer_error(error_ajla(EC_SYNC, AJLA_ERROR_NAN), fp, ip pass_file_line));\
251 return true; \
253 break; \
255 for_all_real(f, for_all_empty)
256 #undef f
258 return false;
261 void * attr_hot_fastcall thunk_fixed_operator(frame_s *fp, const code_t *ip, frame_t slot_1, frame_t slot_2, frame_t slot_r, unsigned strict_flag)
263 const struct type *type;
264 int converted;
266 type = frame_get_type_of_local(fp, slot_1);
267 ajla_assert((TYPE_TAG_IS_FIXED(type->tag) || TYPE_TAG_IS_REAL(type->tag)) &&
268 (slot_2 == NO_FRAME_T || frame_get_type_of_local(fp, slot_2) == type),
269 (file_line, "thunk_fixed_operator: invalid types on opcode %04x: %u, %u, %u",
270 *ip,
271 type->tag,
272 slot_2 == NO_FRAME_T ? type->tag : frame_get_type_of_local(fp, slot_2)->tag,
273 frame_get_type_of_local(fp, slot_r)->tag));
275 converted = ipret_unbox_value(fp, type, slot_1);
276 if (!frame_test_flag(fp, slot_1) && unlikely(test_and_copy_nan(fp, ip, type->tag, slot_1, slot_r)))
277 return POINTER_FOLLOW_THUNK_GO;
278 if (slot_2 != NO_FRAME_T) {
279 converted |= ipret_unbox_value(fp, type, slot_2);
280 if (!frame_test_flag(fp, slot_2) && unlikely(test_and_copy_nan(fp, ip, type->tag, slot_2, slot_r)))
281 return POINTER_FOLLOW_THUNK_GO;
283 if (converted & UNBOX_THUNK)
284 return ipret_op_build_thunk(fp, ip, slot_1, slot_2, slot_r, strict_flag);
286 return POINTER_FOLLOW_THUNK_RETRY;
290 void * attr_hot_fastcall is_thunk_operator(frame_s *fp, const code_t *ip, frame_t slot_1, frame_t slot_r, unsigned strict_flag)
292 ajla_flat_option_t value;
293 pointer_t *ptr = frame_pointer(fp, slot_1);
294 if (!pointer_is_thunk(*ptr)) {
295 struct data *d = pointer_get_data(*ptr);
296 value = 0;
297 if (da_tag(d) == DATA_TAG_flat) {
298 value = data_is_nan(da(d,flat)->data_type, da_flat(d));
300 goto return_val;
303 pointer_follow_thunk_noeval(ptr,
304 return POINTER_FOLLOW_THUNK_RETRY,
305 value = 1; goto return_val,
306 goto create_thunk;
309 return_val:
310 frame_free(fp, slot_r);
311 barrier_aliasing();
312 *frame_slot(fp, slot_r, ajla_flat_option_t) = value;
313 barrier_aliasing();
314 return POINTER_FOLLOW_THUNK_GO;
316 create_thunk:
317 return ipret_op_build_thunk(fp, ip, slot_1, NO_FRAME_T, slot_r, strict_flag);
320 void * attr_hot_fastcall thunk_get_param(frame_s *fp, const code_t *ip, frame_t slot_1, frame_t slot_r, unsigned strict_flag, unsigned mode)
322 struct thunk *ex;
323 pointer_t *ptr;
324 int result;
326 ajla_assert(slot_r == slot_1 || !frame_test_flag(fp, slot_r), (file_line, "thunk_get_param: flag already set for destination slot %"PRIuMAX"", (uintmax_t)slot_r));
328 if (unlikely(!frame_test_flag(fp, slot_1))) {
329 const struct type *type;
330 type = frame_get_type_of_local(fp, slot_1);
331 if (likely(data_is_nan(type->tag, frame_var(fp, slot_1)))) {
332 have_nan:
333 switch (mode) {
334 case 0:
335 result = EC_SYNC;
336 break;
337 case 1:
338 result = AJLA_ERROR_NAN;
339 break;
340 case 2:
341 result = 0;
342 break;
343 default:
344 internal(file_line, "thunk_get_param: invalid mode %u", mode);
346 goto set_result;
348 goto not_thunk;
351 ptr = frame_pointer(fp, slot_1);
352 if (!pointer_is_thunk(*ptr)) {
353 struct data *data = pointer_get_data(*ptr);
354 if (likely(da_tag(data) == DATA_TAG_flat)) {
355 if (likely(data_is_nan(da(data,flat)->data_type, da_flat(data)))) {
356 goto have_nan;
359 goto not_thunk;
361 pointer_follow_thunk_noeval(ptr,
362 return POINTER_FOLLOW_THUNK_RETRY,
363 goto have_ex,
364 goto create_thunk;
367 have_ex:
368 ex = pointer_get_thunk(*ptr);
369 switch (mode) {
370 case 0:
371 result = ex->u.exception.err.error_class;
372 break;
373 case 1:
374 result = ex->u.exception.err.error_type;
375 break;
376 case 2:
377 result = ex->u.exception.err.error_aux;
378 break;
379 default:
380 internal(file_line, "thunk_get_param: invalid mode %u", mode);
383 set_result:
384 frame_free(fp, slot_r);
385 barrier_aliasing();
386 *frame_slot(fp, slot_r, int_default_t) = result;
387 barrier_aliasing();
389 return POINTER_FOLLOW_THUNK_GO;
391 not_thunk:
392 frame_free_and_set_pointer(fp, slot_r, pointer_error(error_ajla(EC_SYNC, AJLA_ERROR_INVALID_OPERATION), fp, ip pass_file_line));
394 return POINTER_FOLLOW_THUNK_GO;
396 create_thunk:
397 return ipret_op_build_thunk(fp, ip, slot_1, NO_FRAME_T, slot_r, strict_flag);
400 int_default_t ipret_system_property(int_default_t idx)
402 int_default_t result;
404 switch (idx) {
405 case SystemProperty_OS:
406 #if defined(OS_DOS)
407 result = SystemProperty_OS_DOS;
408 #elif defined(OS_OS2)
409 result = SystemProperty_OS_OS2;
410 #elif defined(OS_CYGWIN)
411 result = SystemProperty_OS_Cygwin;
412 #elif defined(OS_WIN32)
413 result = SystemProperty_OS_Windows;
414 #else
415 result = SystemProperty_OS_Posix;
416 #endif
417 break;
418 #if defined(OS_DOS) || defined(OS_OS2) || defined(OS_WIN32)
419 case SystemProperty_Charset:
420 result = os_charset();
421 break;
422 #endif
423 #if defined(OS_WIN32)
424 case SystemProperty_Charset_Console:
425 result = os_charset_console();
426 break;
427 #endif
428 case SystemProperty_Fixed:
429 result = INT_MASK;
430 break;
431 case SystemProperty_Real:
432 result = REAL_MASK;
433 break;
434 case SystemProperty_Privileged:
435 result = ipret_is_privileged;
436 break;
437 case SystemProperty_Compile:
438 result = ipret_compile;
439 break;
440 default:
441 return -1;
444 return result;
447 void * attr_hot_fastcall ipret_get_system_property(frame_s *fp, const code_t *ip, frame_t slot_1, frame_t slot_r)
449 void *ex;
450 array_index_t idx_l;
451 int_default_t idx, result;
452 pointer_t result_ptr;
454 ex = ipret_get_index(fp, ip, fp, slot_1, NULL, &idx_l, &result_ptr pass_file_line);
455 if (unlikely(ex != POINTER_FOLLOW_THUNK_GO)) {
456 if (ex == POINTER_FOLLOW_THUNK_EXCEPTION) {
457 frame_free_and_set_pointer(fp, slot_r, result_ptr);
458 return POINTER_FOLLOW_THUNK_GO;
461 idx = index_to_int(idx_l);
462 index_free(&idx_l);
464 result = ipret_system_property(idx);
466 frame_free(fp, slot_r);
467 barrier_aliasing();
468 *frame_slot(fp, slot_r, int_default_t) = result;
469 barrier_aliasing();
471 return POINTER_FOLLOW_THUNK_GO;
475 static bool int_to_mpint(mpint_t *m, const unsigned char *ptr, unsigned intx, ajla_error_t *err)
477 #define f(n, s, u, sz, bits) \
478 case n: { \
479 bool ret; \
480 barrier_aliasing(); \
481 ret = cat(mpint_init_from_,s)(m, *cast_ptr(const s *, ptr), err);\
482 barrier_aliasing(); \
483 return ret; \
485 switch (intx) {
486 for_all_int(f, for_all_empty)
487 default:
488 internal(file_line, "int_to_mpint: invalid type %d", intx);
490 #undef f
491 not_reached();
492 return false;
495 static mpint_t * attr_hot_fastcall int_get_mpint(frame_s *fp, frame_t slot, mpint_t *storage, unsigned intx, ajla_error_t *err)
497 unsigned char *flat;
498 if (frame_test_flag(fp, slot)) {
499 struct data *d = pointer_get_data(*frame_pointer(fp, slot));
500 if (likely(da_tag(d) == DATA_TAG_longint))
501 return &da(d,longint)->mp;
502 flat = da_flat(d);
503 } else {
504 flat = frame_var(fp, slot);
506 if (unlikely(!int_to_mpint(storage, flat, intx, err)))
507 return NULL;
508 return storage;
511 static struct data * attr_hot_fastcall int_allocate_result(frame_s *fp, frame_t slot, unsigned long bits, pointer_t *to_free, ajla_error_t *err)
513 struct data *d;
515 *to_free = pointer_empty();
517 if (frame_test_and_set_flag(fp, slot)) {
518 pointer_t ptr = *frame_pointer(fp, slot);
519 if (!pointer_is_thunk(ptr)) {
520 d = pointer_get_data(ptr);
521 if (da_tag(d) == DATA_TAG_longint && data_is_writable(d))
522 return d;
524 *to_free = ptr;
527 d = data_alloc_longint_mayfail(bits, err pass_file_line);
528 if (unlikely(!d)) {
529 frame_clear_flag(fp, slot);
530 return NULL;
532 *frame_pointer(fp, slot) = pointer_data(d);
533 return d;
536 void * attr_hot_fastcall thunk_int_binary_operator(frame_s *fp, const code_t *ip, frame_t slot_1, frame_t slot_2, frame_t slot_r, unsigned strict_flag, bool (attr_fastcall *do_op)(const mpint_t *op1, const mpint_t *op2, mpint_t *res, ajla_error_t *err))
538 ajla_error_t err;
539 const struct type *type;
540 unsigned intx;
541 int converted;
542 mpint_t s1, s2;
543 mpint_t *val1, *val2;
544 struct data *result;
545 pointer_t to_free;
547 type = frame_get_type_of_local(fp, slot_1);
548 ajla_assert(TYPE_TAG_IS_INT(type->tag) &&
549 frame_get_type_of_local(fp, slot_2) == type &&
550 frame_get_type_of_local(fp, slot_r) == type,
551 (file_line, "thunk_int_binary_operator: invalid types on opcode %04x: %u, %u, %u",
552 *ip,
553 type->tag,
554 frame_get_type_of_local(fp, slot_2)->tag,
555 frame_get_type_of_local(fp, slot_r)->tag));
557 converted = 0;
559 converted |= ipret_unbox_value(fp, type, slot_1);
560 converted |= ipret_unbox_value(fp, type, slot_2);
562 if (converted & UNBOX_THUNK)
563 return ipret_op_build_thunk(fp, ip, slot_1, slot_2, slot_r, strict_flag);
565 if (converted == UNBOX_DID_SOMETHING)
566 return POINTER_FOLLOW_THUNK_RETRY;
568 intx = TYPE_TAG_IDX_INT(type->tag);
570 if (unlikely(!(val1 = int_get_mpint(fp, slot_1, &s1, intx, &err))))
571 goto fail_oom_1;
572 if (unlikely(!(val2 = int_get_mpint(fp, slot_2, &s2, intx, &err))))
573 goto fail_oom_2;
574 if (unlikely(!(result = int_allocate_result(fp, slot_r, maximum(mpint_estimate_bits(val1), mpint_estimate_bits(val2)), &to_free, &err))))
575 goto fail_oom_3;
576 if (unlikely(!do_op(val1, val2, &da(result,longint)->mp, &err)))
577 goto fail_oom_3;
578 if (val1 == &s1)
579 mpint_free(&s1);
580 if (val2 == &s2)
581 mpint_free(&s2);
582 if (!pointer_is_empty(to_free))
583 pointer_dereference(to_free);
585 if (mpint_export(&da(result,longint)->mp, frame_var(fp, slot_r), intx, &err)) {
586 frame_clear_flag(fp, slot_r);
587 data_dereference(result);
589 return POINTER_FOLLOW_THUNK_GO;
591 fail_oom_3:
592 if (!pointer_is_empty(to_free))
593 pointer_dereference(to_free);
594 if (val2 == &s2)
595 mpint_free(&s2);
596 fail_oom_2:
597 if (val1 == &s1)
598 mpint_free(&s1);
599 fail_oom_1:
600 frame_free_and_set_pointer(fp, slot_r, pointer_error(err, fp, ip pass_file_line));
601 return POINTER_FOLLOW_THUNK_GO;
604 void * attr_hot_fastcall thunk_int_unary_operator(frame_s *fp, const code_t *ip, frame_t slot_1, frame_t slot_r, unsigned strict_flag, bool (attr_fastcall *do_op)(const mpint_t *op1, mpint_t *res, ajla_error_t *err))
606 ajla_error_t err;
607 const struct type *type;
608 unsigned intx;
609 int converted;
610 mpint_t s1;
611 mpint_t *val1;
612 struct data *result;
613 pointer_t to_free;
615 type = frame_get_type_of_local(fp, slot_1);
616 ajla_assert(TYPE_TAG_IS_INT(type->tag) &&
617 frame_get_type_of_local(fp, slot_r) == type,
618 (file_line, "thunk_int_unary_operator: invalid types on opcode %04x: %u, %u",
619 *ip,
620 type->tag,
621 frame_get_type_of_local(fp, slot_r)->tag));
623 converted = 0;
625 converted |= ipret_unbox_value(fp, type, slot_1);
627 if (converted & UNBOX_THUNK)
628 return ipret_op_build_thunk(fp, ip, slot_1, NO_FRAME_T, slot_r, strict_flag);
630 if (converted == UNBOX_DID_SOMETHING)
631 return POINTER_FOLLOW_THUNK_RETRY;
633 intx = TYPE_TAG_IDX_INT(type->tag);
635 if (unlikely(!(val1 = int_get_mpint(fp, slot_1, &s1, intx, &err))))
636 goto fail_oom_1;
637 if (unlikely(!(result = int_allocate_result(fp, slot_r, mpint_estimate_bits(val1), &to_free, &err))))
638 goto fail_oom_3;
639 if (unlikely(!do_op(val1, &da(result,longint)->mp, &err)))
640 goto fail_oom_3;
641 if (val1 == &s1)
642 mpint_free(&s1);
643 if (!pointer_is_empty(to_free))
644 pointer_dereference(to_free);
646 if (mpint_export(&da(result,longint)->mp, frame_var(fp, slot_r), intx, &err)) {
647 frame_clear_flag(fp, slot_r);
648 data_dereference(result);
650 return POINTER_FOLLOW_THUNK_GO;
652 fail_oom_3:
653 if (!pointer_is_empty(to_free))
654 pointer_dereference(to_free);
655 if (val1 == &s1)
656 mpint_free(&s1);
657 fail_oom_1:
658 frame_free_and_set_pointer(fp, slot_r, pointer_error(err, fp, ip pass_file_line));
659 return POINTER_FOLLOW_THUNK_GO;
662 void * attr_hot_fastcall thunk_int_binary_logical_operator(frame_s *fp, const code_t *ip, frame_t slot_1, frame_t slot_2, frame_t slot_r, unsigned strict_flag, bool (attr_fastcall *do_op)(const mpint_t *op1, const mpint_t *op2, ajla_flat_option_t *res, ajla_error_t *err))
664 ajla_error_t err;
665 const struct type *type;
666 unsigned intx;
667 int converted;
668 mpint_t s1, s2;
669 mpint_t *val1, *val2;
671 type = frame_get_type_of_local(fp, slot_1);
672 ajla_assert(TYPE_TAG_IS_INT(type->tag) &&
673 frame_get_type_of_local(fp, slot_2) == type &&
674 frame_get_type_of_local(fp, slot_r)->tag == TYPE_TAG_flat_option,
675 (file_line, "thunk_int_binary_logical_operator: invalid types on opcode %04x: %u, %u, %u",
676 *ip,
677 type->tag,
678 frame_get_type_of_local(fp, slot_2)->tag,
679 frame_get_type_of_local(fp, slot_r)->tag));
681 converted = 0;
683 converted |= ipret_unbox_value(fp, type, slot_1);
684 converted |= ipret_unbox_value(fp, type, slot_2);
686 if (converted & UNBOX_THUNK)
687 return ipret_op_build_thunk(fp, ip, slot_1, slot_2, slot_r, strict_flag);
689 if (converted == UNBOX_DID_SOMETHING)
690 return POINTER_FOLLOW_THUNK_RETRY;
692 intx = TYPE_TAG_IDX_INT(type->tag);
694 if (unlikely(!(val1 = int_get_mpint(fp, slot_1, &s1, intx, &err))))
695 goto fail_oom_1;
696 if (unlikely(!(val2 = int_get_mpint(fp, slot_2, &s2, intx, &err))))
697 goto fail_oom_2;
698 barrier_aliasing();
699 if (unlikely(!do_op(val1, val2, frame_slot(fp, slot_r, ajla_flat_option_t), &err))) {
700 barrier_aliasing();
701 goto fail_oom_3;
703 barrier_aliasing();
704 if (val1 == &s1)
705 mpint_free(&s1);
706 if (val2 == &s2)
707 mpint_free(&s2);
709 return POINTER_FOLLOW_THUNK_GO;
711 fail_oom_3:
712 if (val2 == &s2)
713 mpint_free(&s2);
714 fail_oom_2:
715 if (val1 == &s1)
716 mpint_free(&s1);
717 fail_oom_1:
718 frame_free_and_set_pointer(fp, slot_r, pointer_error(err, fp, ip pass_file_line));
719 return POINTER_FOLLOW_THUNK_GO;
722 ip_t attr_hot_fastcall ipret_int_ldc_long(frame_s *fp, frame_t slot, const code_t *ip)
724 ajla_error_t err;
726 uint32_t n_words_32;
727 ip_t n_words;
728 struct data *d;
730 n_words_32 = get_unaligned_32(ip);
731 n_words = (ip_t)n_words_32;
732 ajla_assert(n_words == n_words_32, (file_line, "ipret_int_ldc_long: n_words overflow: %lu != %lu", (unsigned long)n_words_32, (unsigned long)n_words));
734 d = data_alloc_longint_mayfail((n_words + sizeof(code_t) - 1) / sizeof(code_t), &err pass_file_line);
735 if (unlikely(!d))
736 goto fail;
738 if (unlikely(!mpint_import_from_code(&da(d,longint)->mp, ip + 2, n_words, &err))) {
739 data_dereference(d);
740 goto fail;
743 frame_set_pointer(fp, slot, pointer_data(d));
745 return 2 + n_words;
747 fail:
748 frame_set_pointer(fp, slot, pointer_error(err, fp, ip pass_file_line));
750 return 2 + n_words;
754 pointer_t attr_fastcall convert_fixed_to_mpint(uintbig_t val, bool uns)
756 ajla_error_t err;
757 struct data *d;
759 d = data_alloc_longint_mayfail(sizeof(uintbig_t) * 8 + uns, &err pass_file_line);
760 if (unlikely(!d))
761 goto fail;
762 if (unlikely(!cat(mpint_set_from_,TYPE_INT_MAX)(&da(d,longint)->mp, (intbig_t)val, uns, &err))) {
763 goto fail_deref;
765 return pointer_data(d);
767 fail_deref:
768 data_dereference(d);
769 fail:
770 return pointer_error(err, NULL, NULL pass_file_line);
773 pointer_t attr_fastcall convert_real_to_mpint(frame_s *fp, frame_t src_slot, const struct type *src_type)
775 unsigned char attr_unused *src_ptr;
776 ajla_error_t err;
777 struct data *d;
779 d = data_alloc_longint_mayfail(0, &err pass_file_line);
780 if (unlikely(!d))
781 goto fail;
782 mpint_free(&da(d,longint)->mp);
784 #define re(n, rtype, ntype, pack, unpack) \
785 case TYPE_TAG_real + n: { \
786 if (unlikely(!cat(mpint_init_from_,rtype)(&da(d,longint)->mp, cast_ptr(rtype *, src_ptr), &err))) {\
787 data_free_r1(d); \
788 goto fail; \
790 break; \
793 barrier_aliasing();
794 src_ptr = frame_var(fp, src_slot);
795 switch (src_type->tag) {
796 for_all_real(re, for_all_empty)
797 default:
798 internal(file_line, "convert_real_to_mpint: invalid type %u", src_type->tag);
800 barrier_aliasing();
801 return pointer_data(d);
803 #undef re
805 fail:
806 barrier_aliasing();
807 return pointer_error(err, NULL, NULL pass_file_line);
810 static attr_noinline void convert_mpint_to_real(frame_s *fp, frame_t dest_slot, const struct type *dest_type, const mpint_t attr_unused *mp)
812 unsigned char attr_unused *dest_ptr;
814 #define re(n, rtype, ntype, pack, unpack) \
815 case TYPE_TAG_real + n: \
816 cat(mpint_export_to_,rtype)(mp, cast_ptr(rtype *, dest_ptr));\
817 break;
819 barrier_aliasing();
820 dest_ptr = frame_var(fp, dest_slot);
821 switch (dest_type->tag) {
822 for_all_real(re, for_all_empty)
823 default:
824 internal(file_line, "convert_mpint_to_real: invalid type %u", dest_type->tag);
826 barrier_aliasing();
828 #undef re
831 void * attr_hot_fastcall thunk_convert(frame_s *fp, const code_t *ip, frame_t src_slot, frame_t dest_slot, unsigned strict_flag)
833 int converted;
834 const struct type *src_type;
835 const struct type *dest_type;
836 ajla_error_t err;
837 struct data *d;
839 if (unlikely(src_slot == dest_slot))
840 return POINTER_FOLLOW_THUNK_GO;
842 src_type = frame_get_type_of_local(fp, src_slot);
843 dest_type = frame_get_type_of_local(fp, dest_slot);
845 converted = ipret_unbox_value(fp, src_type, src_slot);
846 if (unlikely(converted == UNBOX_THUNK)) {
847 return ipret_op_build_thunk(fp, ip, src_slot, NO_FRAME_T, dest_slot, strict_flag);
849 if (converted == UNBOX_DID_SOMETHING) {
850 return POINTER_FOLLOW_THUNK_RETRY;
853 if (type_is_equal(dest_type, type_get_int(INT_DEFAULT_N))) {
854 if (likely(TYPE_TAG_IS_INT(src_type->tag))) {
855 if (likely(converted == UNBOX_LONGINT)) {
856 goto convert_longint;
859 } else {
860 if (unlikely(!type_is_equal(src_type, type_get_int(INT_DEFAULT_N))))
861 goto int_err;
862 if (TYPE_TAG_IS_FIXED(dest_type->tag)) {
863 if (likely(converted == UNBOX_LONGINT)) {
864 bool res;
865 if (!TYPE_TAG_FIXED_IS_UNSIGNED(dest_type->tag))
866 res = mpint_export(&da(pointer_get_data(*frame_pointer(fp, src_slot)), longint)->mp, frame_var(fp, dest_slot), TYPE_TAG_IDX_FIXED(dest_type->tag) >> 1, &err);
867 else
868 res = mpint_export_unsigned(&da(pointer_get_data(*frame_pointer(fp, src_slot)), longint)->mp, frame_var(fp, dest_slot), TYPE_TAG_IDX_FIXED(dest_type->tag) >> 1, &err);
869 if (unlikely(!res))
870 frame_set_pointer(fp, dest_slot, pointer_error(error_ajla(EC_SYNC, AJLA_ERROR_DOESNT_FIT), fp, ip pass_file_line));
871 return POINTER_FOLLOW_THUNK_GO;
873 } else if (TYPE_TAG_IS_INT(dest_type->tag)) {
874 if (likely(converted == UNBOX_LONGINT)) {
875 goto convert_longint;
877 } else if (likely(TYPE_TAG_IS_REAL(dest_type->tag))) {
878 if (likely(converted == UNBOX_LONGINT)) {
879 convert_mpint_to_real(fp, dest_slot, dest_type, &da(pointer_get_data(*frame_pointer(fp, src_slot)), longint)->mp);
880 return POINTER_FOLLOW_THUNK_GO;
884 goto int_err;
886 convert_longint:
887 d = pointer_get_data(*frame_pointer(fp, src_slot));
888 if (mpint_export(&da(d,longint)->mp, frame_var(fp, dest_slot), TYPE_TAG_IDX_INT(dest_type->tag), &err)) {
889 return POINTER_FOLLOW_THUNK_GO;
891 pointer_copy_owned(fp, src_slot, dest_slot);
892 return POINTER_FOLLOW_THUNK_GO;
894 int_err:
895 internal(file_line, "thunk_convert: invalid conversion %u->%u (%d)", src_type->tag, dest_type->tag, converted);
896 return POINTER_FOLLOW_THUNK_RETRY;
900 static bool attr_hot_fastcall ipret_unbox_bool(frame_s *fp, frame_t slot)
902 if (frame_test_flag(fp, slot)) {
903 pointer_t ptr = *frame_pointer(fp, slot);
904 if (pointer_is_thunk(ptr))
905 return true;
906 barrier_aliasing();
907 *frame_slot(fp, slot, ajla_flat_option_t) = (ajla_flat_option_t)da(pointer_get_data(ptr),option)->option;
908 barrier_aliasing();
909 frame_clear_flag(fp, slot);
910 pointer_dereference(ptr);
912 return false;
915 void * attr_hot_fastcall thunk_bool_operator(frame_s *fp, const code_t *ip, frame_t slot_1, frame_t slot_2, frame_t slot_r, unsigned strict_flag)
917 code_t code;
918 ajla_flat_option_t val1, val2, result;
920 code = *ip;
921 code %= OPCODE_MODE_MULT;
922 code = (code - OPCODE_BOOL_OP) / OPCODE_BOOL_OP_MULT;
924 val1 = val2 = 2;
925 if (!ipret_unbox_bool(fp, slot_1)) {
926 barrier_aliasing();
927 val1 = *frame_slot(fp, slot_1, ajla_flat_option_t);
928 barrier_aliasing();
929 switch (code) {
930 case OPCODE_BOOL_OP_and:
931 case OPCODE_BOOL_OP_less_equal:
932 if (!val1) {
933 result = 0;
934 goto have_result;
936 break;
937 case OPCODE_BOOL_OP_or:
938 case OPCODE_BOOL_OP_less:
939 if (val1) {
940 result = 1;
941 goto have_result;
943 break;
944 case OPCODE_BOOL_OP_not:
945 result = val1 ^ 1;
946 goto have_result;
949 if (slot_2 != NO_FRAME_T && !ipret_unbox_bool(fp, slot_2)) {
950 barrier_aliasing();
951 val2 = *frame_slot(fp, slot_2, ajla_flat_option_t);
952 barrier_aliasing();
953 switch (code) {
954 case OPCODE_BOOL_OP_and:
955 case OPCODE_BOOL_OP_less:
956 if (!val2) {
957 result = 0;
958 goto have_result;
960 break;
961 case OPCODE_BOOL_OP_or:
962 case OPCODE_BOOL_OP_less_equal:
963 if (val2) {
964 result = 1;
965 goto have_result;
967 break;
970 if (!((val1 | val2) & 2)) {
971 #if 1
972 return POINTER_FOLLOW_THUNK_RETRY;
973 #else
974 switch (code) {
975 case OPCODE_BOOL_OP_and:
976 case OPCODE_BOOL_OP_less:
977 result = 1;
978 goto have_result;
979 case OPCODE_BOOL_OP_or:
980 case OPCODE_BOOL_OP_less_equal:
981 result = 0;
982 goto have_result;
983 case OPCODE_BOOL_OP_equal:
984 result = val1 ^ val2 ^ 1;
985 goto have_result;
986 case OPCODE_BOOL_OP_not_equal:
987 result = val1 ^ val2;
988 goto have_result;
990 internal(file_line, "thunk_bool_operator: invalid opcode: %04x -> %x", *ip, code);
991 #endif
993 if (val1 & val2 & 2) {
994 switch (code) {
995 case OPCODE_BOOL_OP_and:
996 case OPCODE_BOOL_OP_or:
997 case OPCODE_BOOL_OP_less:
998 case OPCODE_BOOL_OP_less_equal:
999 strict_flag |= FLAG_NEED_BOTH_EXCEPTIONS_TO_FAIL;
1003 return ipret_op_build_thunk(fp, ip, slot_1, slot_2, slot_r, strict_flag);
1005 have_result:
1006 frame_free(fp, slot_r);
1007 barrier_aliasing();
1008 *frame_slot(fp, slot_r, ajla_flat_option_t) = result;
1009 barrier_aliasing();
1010 return POINTER_FOLLOW_THUNK_GO;
1013 void * attr_hot_fastcall thunk_bool_jump(frame_s *fp, const code_t *ip, frame_t slot)
1015 pointer_t *thunk = frame_pointer(fp, slot);
1016 struct data *data;
1018 pointer_follow(thunk, true, data, PF_WAIT, fp, ip,
1019 return ex_,
1020 return POINTER_FOLLOW_THUNK_EXCEPTION
1023 barrier_aliasing();
1024 *frame_slot(fp, slot, ajla_flat_option_t) = (ajla_flat_option_t)da(data,option)->option;
1025 barrier_aliasing();
1026 frame_clear_flag(fp, slot);
1027 data_dereference(data);
1028 return POINTER_FOLLOW_THUNK_RETRY;
1032 void attr_fastcall ipret_copy_variable(frame_s *src_fp, frame_t src_slot, frame_s *dst_fp, frame_t dst_slot, bool deref)
1034 pointer_t ptr;
1035 const struct type *src_type;
1036 ajla_assert(!frame_test_flag(dst_fp, dst_slot), (file_line, "ipret_copy_variable: flag already set for destination slot %"PRIuMAX"", (uintmax_t)dst_slot));
1037 src_type = frame_get_type_of_local(src_fp, src_slot);
1038 if (!frame_variable_is_flat(src_fp, src_slot)) {
1039 ptr = frame_get_pointer_reference(src_fp, src_slot, deref);
1040 } else {
1041 const struct type *dst_type = frame_get_type_of_local(dst_fp, dst_slot);
1042 if (likely(TYPE_IS_FLAT(dst_type))) {
1043 ajla_assert(type_is_equal(src_type, dst_type), (file_line, "ipret_copy_variable: copying between different types (%u,%u,%u) -> (%u,%u,%u)", src_type->tag, src_type->size, src_type->align, dst_type->tag, dst_type->size, dst_type->align));
1044 memcpy_fast(frame_var(dst_fp, dst_slot), frame_var(src_fp, src_slot), dst_type->size);
1045 return;
1046 } else {
1047 ptr = flat_to_data(src_type, frame_var(src_fp, src_slot));
1050 frame_set_pointer(dst_fp, dst_slot, ptr);
1053 pointer_t ipret_copy_variable_to_pointer(frame_s *src_fp, frame_t src_slot, bool deref)
1055 const struct type *src_type = frame_get_type_of_local(src_fp, src_slot);
1056 if (!frame_variable_is_flat(src_fp, src_slot)) {
1057 return frame_get_pointer_reference(src_fp, src_slot, deref);
1058 } else {
1059 return flat_to_data(src_type, frame_var(src_fp, src_slot));
1064 struct data_compare_context {
1065 struct ipret_call_cache_arg *arguments;
1066 arg_t n_arguments;
1067 arg_t n_return_values;
1068 ajla_error_t err;
1071 static int saved_cache_compare(struct data *saved_cache, size_t idx, struct data_compare_context *ctx)
1073 size_t ptr_idx = idx * ((size_t)ctx->n_arguments + (size_t)ctx->n_return_values);
1074 arg_t ai;
1075 for (ai = 0; ai < ctx->n_arguments; ai++) {
1076 int c;
1077 c = data_compare(da(saved_cache,saved_cache)->pointers[ptr_idx + ai], ctx->arguments[ai].ptr, &ctx->err);
1078 if (c)
1079 return c;
1081 return 0;
1084 static pointer_t *saved_cache_find(struct data *function, struct data_compare_context *ctx)
1086 struct data *saved_cache = da(function,function)->loaded_cache;
1087 size_t n_entries = da(saved_cache,saved_cache)->n_entries;
1088 size_t result;
1089 int cmp;
1090 /*debug("searching: %s, %zu", da(function,function)->function_name, n_entries);*/
1091 binary_search(size_t, n_entries, result, !(cmp = saved_cache_compare(saved_cache, result, ctx)), cmp < 0, return NULL);
1092 /*debug("found it: %s, %zu", da(function,function)->function_name, result);*/
1093 return &da(saved_cache,saved_cache)->pointers[result * ((size_t)ctx->n_arguments + (size_t)ctx->n_return_values) + (size_t)ctx->n_arguments];
1096 static int cache_entry_compare(const struct tree_entry *e1, uintptr_t e2)
1098 struct cache_entry *c1 = get_struct(e1, struct cache_entry, entry);
1099 struct data_compare_context *ctx = cast_ptr(struct data_compare_context *, num_to_ptr(e2));
1100 arg_t ai;
1101 for (ai = 0; ai < ctx->n_arguments; ai++) {
1102 int c;
1103 c = data_compare(c1->arguments[ai], ctx->arguments[ai].ptr, MEM_DONT_TRY_TO_FREE);
1104 if (c == -1 || c == 1)
1105 return c;
1106 if (c == DATA_COMPARE_OOM) {
1107 ctx->err = error_ajla(EC_ASYNC, AJLA_ERROR_OUT_OF_MEMORY);
1108 return 0;
1110 if (unlikely(c))
1111 internal(file_line, "cache_entry_compare: data_compare returned %d", c);
1113 return 0;
1116 static void cache_evaluated(void *cookie, pointer_t ptr)
1118 struct cache_entry_return *ret = cookie;
1119 struct cache_entry *c;
1120 pointer_reference_owned(ptr);
1121 ret->ptr = ptr;
1122 c = ret->ce;
1123 address_lock(c, DEPTH_THUNK);
1124 /*debug("cache evaluated: %p, pending %u", c, c->n_pending);*/
1125 if (likely(!--c->n_pending)) {
1126 wake_up_wait_list(&c->wait_list, address_get_mutex(c, DEPTH_THUNK), true);
1127 } else {
1128 address_unlock(c, DEPTH_THUNK);
1132 void * attr_fastcall ipret_call_cache(frame_s *fp, const code_t *ip, pointer_t *direct_function, struct ipret_call_cache_arg *arguments, frame_t *return_values, frame_t free_fn_slot)
1134 struct thunk *thunk = NULL;
1135 struct data_compare_context ctx;
1136 struct tree_insert_position ins;
1137 struct tree_entry *e;
1138 struct cache_entry *c = NULL; /* avoid warning */
1139 void *ex;
1140 arg_t ai;
1141 bool wr_lock;
1142 struct thunk **results;
1143 struct data *function_reference;
1144 struct data *function = pointer_get_data(*direct_function);
1145 arg_t n_arguments = da(function,function)->n_arguments;
1146 arg_t n_return_values = da(function,function)->n_return_values;
1147 bool save = *ip % OPCODE_MODE_MULT == OPCODE_CALL_SAVE || *ip % OPCODE_MODE_MULT == OPCODE_CALL_INDIRECT_SAVE;
1149 ctx.err.error_class = EC_NONE;
1151 for (ai = 0; ai < n_arguments; ai++)
1152 arguments[ai].need_free_ptr = false;
1153 for (ai = 0; ai < n_arguments; ai++) {
1154 struct function_argument *f_arg = arguments[ai].f_arg;
1155 if (unlikely(f_arg != NULL)) {
1156 if (f_arg->tag == TYPE_TAG_unknown) {
1157 ex = pointer_deep_eval(&f_arg->u.ptr, fp, ip, &thunk);
1158 arguments[ai].ptr = pointer_reference(&f_arg->u.ptr);
1159 arguments[ai].need_free_ptr = true;
1160 } else {
1161 arguments[ai].ptr = flat_to_data(type_get_from_tag(f_arg->tag), f_arg->u.slot);
1162 ex = pointer_deep_eval(&arguments[ai].ptr, fp, ip, &thunk);
1163 arguments[ai].need_free_ptr = true;
1165 } else {
1166 frame_t slot = arguments[ai].slot;
1167 if (!frame_variable_is_flat(fp, slot)) {
1168 ex = frame_pointer_deep_eval(fp, ip, slot, &thunk);
1169 arguments[ai].ptr = *frame_pointer(fp, slot);
1170 } else {
1171 arguments[ai].ptr = flat_to_data(frame_get_type_of_local(fp, slot), frame_var(fp, slot));
1172 ex = pointer_deep_eval(&arguments[ai].ptr, fp, ip, &thunk);
1173 arguments[ai].need_free_ptr = true;
1176 if (ex == POINTER_FOLLOW_THUNK_EXCEPTION) {
1177 if (!rwmutex_supported) {
1178 wr_lock = true;
1179 address_write_lock(function);
1180 } else {
1181 wr_lock = false;
1182 address_read_lock(function);
1184 goto ret_c;
1186 if (ex != POINTER_FOLLOW_THUNK_GO)
1187 goto ret1;
1190 ctx.arguments = arguments;
1191 ctx.n_arguments = n_arguments;
1192 ctx.n_return_values = n_return_values;
1194 if (da(function,function)->loaded_cache) {
1195 pointer_t *results;
1196 /*debug("loaded cache: %s", da(function,function)->function_name);*/
1197 ctx.err.error_class = EC_NONE;
1198 results = saved_cache_find(function, &ctx);
1199 if (results || unlikely(ctx.err.error_class != EC_NONE)) {
1200 for (ai = 0; ai < n_arguments; ai++) {
1201 if (arguments[ai].deref) {
1202 frame_t slot = arguments[ai].slot;
1203 if (frame_test_and_clear_flag(fp, slot))
1204 pointer_dereference(*frame_pointer(fp, slot));
1205 *frame_pointer(fp, slot) = pointer_empty();
1208 if (unlikely(free_fn_slot != NO_FRAME_T)) {
1209 frame_free_and_clear(fp, free_fn_slot);
1211 if (unlikely(ctx.err.error_class != EC_NONE)) {
1212 for (ai = 0; ai < n_return_values; ai++) {
1213 frame_set_pointer(fp, return_values[ai], pointer_error(ctx.err, NULL, NULL pass_file_line));
1215 } else {
1216 for (ai = 0; ai < n_return_values; ai++) {
1217 pointer_t ptr = pointer_reference(&results[ai]);
1218 frame_set_pointer(fp, return_values[ai], ptr);
1221 ex = POINTER_FOLLOW_THUNK_GO;
1222 goto ret1;
1226 if (!rwmutex_supported) {
1227 wr_lock = true;
1228 address_write_lock(function);
1229 } else {
1230 wr_lock = false;
1231 address_read_lock(function);
1234 again:
1235 ctx.err.error_class = EC_NONE;
1236 e = tree_find_for_insert(&da(function,function)->cache, cache_entry_compare, ptr_to_num(&ctx), &ins);
1237 if (e) {
1238 if (unlikely(ctx.err.error_class != EC_NONE)) {
1239 if (!wr_lock)
1240 address_read_unlock(function);
1241 else
1242 address_write_unlock(function);
1243 if (ctx.err.error_type == AJLA_ERROR_OUT_OF_MEMORY && mem_trim_cache()) {
1244 if (!wr_lock)
1245 address_read_lock(function);
1246 else
1247 address_write_lock(function);
1248 goto again;
1250 wr_lock = false;
1251 address_read_lock(function);
1252 goto ret_c;
1254 c = get_struct(e, struct cache_entry, entry);
1255 address_lock(c, DEPTH_THUNK);
1256 goto have_c;
1258 if (!wr_lock) {
1259 address_read_unlock(function);
1260 wr_lock = true;
1261 address_write_lock(function);
1262 goto again;
1265 c = struct_alloc_array_mayfail(mem_alloc_mayfail, struct cache_entry, arguments, n_arguments, MEM_DONT_TRY_TO_FREE);
1266 if (unlikely(!c))
1267 goto oom1;
1268 c->save = save;
1269 c->returns = mem_alloc_array_mayfail(mem_alloc_mayfail, struct cache_entry_return *, 0, 0, n_return_values, sizeof(struct cache_entry_return), MEM_DONT_TRY_TO_FREE);
1270 if (unlikely(!c->returns))
1271 goto oom2;
1272 for (ai = 0; ai < n_return_values; ai++) {
1273 c->returns[ai].ex = NULL;
1275 for (ai = 0; ai < n_return_values; ai++) {
1276 c->returns[ai].ex = function_evaluate_prepare(MEM_DONT_TRY_TO_FREE);
1277 if (unlikely(!c->returns[ai].ex))
1278 goto oom3;
1280 for (ai = 0; ai < n_arguments; ai++) {
1281 pointer_reference_owned(arguments[ai].ptr);
1282 c->arguments[ai] = arguments[ai].ptr;
1284 results = mem_alloc_array_mayfail(mem_alloc_mayfail, struct thunk **, 0, 0, n_return_values, sizeof(struct thunk *), MEM_DONT_TRY_TO_FREE);
1285 if (unlikely(!results))
1286 goto oom3;
1287 if (!(function_reference = data_alloc_function_reference_mayfail(n_arguments, MEM_DONT_TRY_TO_FREE pass_file_line)))
1288 goto oom4;
1289 da(function_reference,function_reference)->is_indirect = false;
1290 da(function_reference,function_reference)->u.direct = direct_function;
1291 if (unlikely(!thunk_alloc_function_call(pointer_data(function_reference), n_return_values, results, MEM_DONT_TRY_TO_FREE))) {
1292 data_free_r1(function_reference);
1293 oom4:
1294 mem_free(results);
1295 oom3:
1296 for (ai = 0; ai < n_return_values; ai++) {
1297 if (c->returns[ai].ex)
1298 execution_control_free(c->returns[ai].ex);
1300 mem_free(c->returns);
1301 oom2:
1302 mem_free(c);
1303 oom1:
1304 address_write_unlock(function);
1305 if (mem_trim_cache()) {
1306 address_write_lock(function);
1307 goto again;
1309 address_write_lock(function);
1310 ctx.err = error_ajla(EC_ASYNC, AJLA_ERROR_OUT_OF_MEMORY);
1311 goto ret_c;
1313 for (ai = 0; ai < n_arguments; ai++) {
1314 pointer_reference_owned(c->arguments[ai]);
1315 data_fill_function_reference(function_reference, ai, c->arguments[ai]);
1317 address_lock(c, DEPTH_THUNK);
1318 /*debug("evaluaring: %p", c);*/
1319 c->n_pending = n_return_values;
1320 list_init(&c->wait_list);
1321 for (ai = 0; ai < n_return_values; ai++) {
1322 c->returns[ai].ce = c;
1323 function_evaluate_submit(c->returns[ai].ex, pointer_thunk(results[ai]), cache_evaluated, &c->returns[ai]);
1325 mem_free(results);
1327 tree_insert_after_find(&c->entry, &ins);
1329 have_c:
1330 if (!c->save && unlikely(save))
1331 c->save = true;
1332 if (c->n_pending) {
1333 struct execution_control *exx;
1334 /*debug("waiting on %p, pending %u", c, c->n_pending);*/
1335 exx = frame_execution_control(fp);
1336 exx->wait[0].mutex_to_lock = address_get_mutex(c, DEPTH_THUNK);
1337 list_add(&c->wait_list, &exx->wait[0].wait_entry);
1338 address_unlock(c, DEPTH_THUNK);
1339 pointer_follow_wait(fp, ip);
1340 ex = POINTER_FOLLOW_THUNK_EXIT;
1341 goto ret2;
1343 address_unlock(c, DEPTH_THUNK);
1345 ret_c:
1346 for (ai = 0; ai < n_arguments; ai++) {
1347 if (arguments[ai].deref) {
1348 frame_t slot = arguments[ai].slot;
1349 if (frame_test_and_clear_flag(fp, slot))
1350 pointer_dereference(*frame_pointer(fp, slot));
1351 *frame_pointer(fp, slot) = pointer_empty();
1354 if (unlikely(free_fn_slot != NO_FRAME_T)) {
1355 frame_free_and_clear(fp, free_fn_slot);
1357 if (likely(!thunk) && unlikely(ctx.err.error_class != EC_NONE))
1358 thunk = thunk_alloc_exception_error(ctx.err, NULL, NULL, NULL pass_file_line);
1359 for (ai = 0; ai < n_return_values; ai++) {
1360 pointer_t ptr;
1361 if (likely(!thunk)) {
1362 ptr = pointer_reference(&c->returns[ai].ptr);
1363 } else {
1364 if (ai)
1365 thunk_reference(thunk);
1366 ptr = pointer_thunk(thunk);
1368 frame_set_pointer(fp, return_values[ai], ptr);
1370 ex = POINTER_FOLLOW_THUNK_GO;
1372 ret2:
1373 if (likely(!wr_lock))
1374 address_read_unlock(function);
1375 else
1376 address_write_unlock(function);
1378 ret1:
1379 for (ai = 0; ai < n_arguments; ai++) {
1380 if (arguments[ai].need_free_ptr) {
1381 pointer_dereference(arguments[ai].ptr);
1384 mem_free(arguments);
1385 mem_free(return_values);
1386 return ex;
1390 static attr_noinline void * ipret_get_index_complicated(frame_s *fp, const code_t *ip, frame_s *fp_slot, frame_t slot, bool *is_negative, array_index_t *idx, pointer_t *thunk argument_position)
1392 again:
1393 if (likely(!frame_test_flag(fp_slot, slot))) {
1394 int_default_t in;
1395 barrier_aliasing();
1396 in = *frame_slot(fp_slot, slot, int_default_t);
1397 barrier_aliasing();
1398 if (unlikely(in < 0)) {
1399 negative:
1400 if (!is_negative) {
1401 *thunk = pointer_error(error_ajla(EC_SYNC, AJLA_ERROR_NEGATIVE_INDEX), fp, ip pass_position);
1402 return POINTER_FOLLOW_THUNK_EXCEPTION;
1404 *is_negative = true;
1405 return POINTER_FOLLOW_THUNK_GO;
1407 index_from_int_(idx, in pass_position);
1408 } else {
1409 pointer_t *ptr = frame_pointer(fp_slot, slot);
1410 struct data *d;
1412 pointer_follow(ptr, true, d, PF_WAIT, fp, ip,
1413 return ex_,
1414 thunk_reference(thunk_);
1415 *thunk = pointer_thunk(thunk_);
1416 return POINTER_FOLLOW_THUNK_EXCEPTION;
1418 if (da_tag(d) == DATA_TAG_flat) {
1419 ipret_unbox_value(fp_slot, type_get_int(INT_DEFAULT_N), slot);
1420 goto again;
1422 if (unlikely(mpint_negative(&da(d,longint)->mp))) {
1423 goto negative;
1425 index_from_mp_(idx, &da(d,longint)->mp pass_position);
1427 return POINTER_FOLLOW_THUNK_GO;
1430 void * attr_hot_fastcall ipret_get_index(frame_s *fp, const code_t *ip, frame_s *fp_slot, frame_t slot, bool *is_negative, array_index_t *idx, pointer_t *thunk argument_position)
1432 if (likely(fp == fp_slot))
1433 ajla_assert(frame_get_type_of_local(fp_slot, slot)->tag == type_get_int(INT_DEFAULT_N)->tag, (file_line, "ipret_get_index: invalid type %u", (unsigned)frame_get_type_of_local(fp_slot, slot)->tag));
1434 if (likely(!frame_test_flag(fp_slot, slot))) {
1435 int_default_t in;
1436 barrier_aliasing();
1437 in = *frame_slot(fp_slot, slot, int_default_t);
1438 barrier_aliasing();
1439 if (unlikely(in < 0))
1440 goto complicated;
1441 index_from_int_(idx, in pass_position);
1442 } else {
1443 complicated:
1444 return ipret_get_index_complicated(fp, ip, fp_slot, slot, is_negative, idx, thunk pass_position);
1446 return POINTER_FOLLOW_THUNK_GO;
1450 void * attr_hot_fastcall ipret_record_load_create_thunk(frame_s *fp, const code_t *ip, frame_t record, frame_t record_slot, frame_t result_slot)
1452 pointer_t *fn_ptr;
1453 void *ex;
1454 struct data *function_reference;
1455 struct thunk *result;
1457 ex = pcode_find_record_option_load_function(PCODE_FUNCTION_RECORD_LOAD, record_slot, fp, ip, &fn_ptr);
1459 if (unlikely(ex != POINTER_FOLLOW_THUNK_RETRY))
1460 return ex;
1462 result = build_thunk(fn_ptr, 1, &function_reference);
1463 ipret_fill_function_reference_from_slot(function_reference, 0, fp, record, false);
1465 frame_set_pointer(fp, result_slot, pointer_thunk(result));
1467 return POINTER_FOLLOW_THUNK_GO;
1470 void * attr_hot_fastcall ipret_option_load_create_thunk(frame_s *fp, const code_t *ip, frame_t option, frame_t option_idx, frame_t result_slot)
1472 pointer_t *fn_ptr;
1473 void *ex;
1474 struct data *function_reference;
1475 struct thunk *result;
1477 ex = pcode_find_record_option_load_function(PCODE_FUNCTION_OPTION_LOAD, option_idx, fp, ip, &fn_ptr);
1479 if (unlikely(ex != POINTER_FOLLOW_THUNK_RETRY))
1480 return ex;
1482 result = build_thunk(fn_ptr, 1, &function_reference);
1483 ipret_fill_function_reference_from_slot(function_reference, 0, fp, option, false);
1485 frame_set_pointer(fp, result_slot, pointer_thunk(result));
1487 return POINTER_FOLLOW_THUNK_GO;
1490 void * attr_hot_fastcall thunk_option_test(frame_s *fp, const code_t *ip, frame_t slot_1, ajla_option_t option, frame_t slot_r)
1492 pointer_t *fn_ptr;
1493 void *ex;
1494 struct data *function_reference;
1495 struct thunk *result;
1497 pointer_follow_thunk_noeval(frame_pointer(fp, slot_1),
1498 return POINTER_FOLLOW_THUNK_RETRY,
1499 pointer_copy_owned(fp, slot_1, slot_r);
1500 return POINTER_FOLLOW_THUNK_GO,
1501 break
1504 ex = pcode_find_record_option_load_function(PCODE_FUNCTION_OPTION_TEST, option, fp, ip, &fn_ptr);
1505 if (unlikely(ex != POINTER_FOLLOW_THUNK_RETRY))
1506 return ex;
1508 result = build_thunk(fn_ptr, 1, &function_reference);
1509 ipret_fill_function_reference_from_slot(function_reference, 0, fp, slot_1, false);
1511 frame_set_pointer(fp, slot_r, pointer_thunk(result));
1513 return POINTER_FOLLOW_THUNK_GO;
1516 void * attr_hot_fastcall thunk_option_ord(frame_s *fp, const code_t *ip, frame_t slot_1, frame_t slot_r)
1518 pointer_t *fn_ptr;
1519 void *ex;
1520 struct data *function_reference;
1521 struct thunk *result;
1523 pointer_follow_thunk_noeval(frame_pointer(fp, slot_1),
1524 return POINTER_FOLLOW_THUNK_RETRY,
1525 pointer_copy_owned(fp, slot_1, slot_r);
1526 return POINTER_FOLLOW_THUNK_GO,
1527 break
1530 ex = pcode_find_option_ord_function(fp, ip, &fn_ptr);
1531 if (unlikely(ex != POINTER_FOLLOW_THUNK_RETRY))
1532 return ex;
1534 result = build_thunk(fn_ptr, 1, &function_reference);
1535 ipret_fill_function_reference_from_slot(function_reference, 0, fp, slot_1, false);
1537 frame_set_pointer(fp, slot_r, pointer_thunk(result));
1539 return POINTER_FOLLOW_THUNK_GO;
1543 void * attr_hot_fastcall ipret_array_load_create_thunk(frame_s *fp, const code_t *ip, frame_t array, frame_t index, frame_t result_slot)
1545 pointer_t *fn_ptr;
1546 void *ex;
1547 struct data *function_reference;
1548 struct thunk *result;
1550 ex = pcode_find_array_load_function(fp, ip, &fn_ptr);
1552 if (unlikely(ex != POINTER_FOLLOW_THUNK_RETRY))
1553 return ex;
1555 result = build_thunk(fn_ptr, 2, &function_reference);
1556 ipret_fill_function_reference_from_slot(function_reference, 0, fp, array, false);
1557 ipret_fill_function_reference_from_slot(function_reference, 1, fp, index, false);
1559 frame_set_pointer(fp, result_slot, pointer_thunk(result));
1561 return POINTER_FOLLOW_THUNK_GO;
1564 static attr_noinline void *array_len_create_thunk(frame_s *fp, const code_t *ip, frame_t array_slot, frame_t result_slot)
1566 pointer_t *fn_ptr;
1567 void *ex;
1568 struct data *function_reference;
1569 struct thunk *result;
1571 ex = pcode_find_array_len_function(fp, ip, &fn_ptr);
1573 if (unlikely(ex != POINTER_FOLLOW_THUNK_RETRY))
1574 return ex;
1576 result = build_thunk(fn_ptr, 1, &function_reference);
1577 ipret_fill_function_reference_from_slot(function_reference, 0, fp, array_slot, false);
1579 frame_set_pointer(fp, result_slot, pointer_thunk(result));
1581 return POINTER_FOLLOW_THUNK_GO;
1584 static attr_noinline void *array_len_greater_than_create_thunk(frame_s *fp, const code_t *ip, frame_t array_slot, frame_t length_slot, frame_t result_slot)
1586 pointer_t *fn_ptr;
1587 void *ex;
1588 struct data *function_reference;
1589 struct thunk *result;
1591 ex = pcode_find_array_len_greater_than_function(fp, ip, &fn_ptr);
1593 if (unlikely(ex != POINTER_FOLLOW_THUNK_RETRY))
1594 return ex;
1596 result = build_thunk(fn_ptr, 2, &function_reference);
1597 ipret_fill_function_reference_from_slot(function_reference, 0, fp, array_slot, false);
1598 ipret_fill_function_reference_from_slot(function_reference, 1, fp, length_slot, false);
1600 frame_set_pointer(fp, result_slot, pointer_thunk(result));
1602 return POINTER_FOLLOW_THUNK_GO;
1605 static attr_noinline void *array_sub_create_thunk(frame_s *fp, const code_t *ip, frame_t array_slot, frame_t start_slot, frame_t end_slot, frame_t result_slot, unsigned flags)
1607 pointer_t *fn_ptr;
1608 void *ex;
1609 struct data *function_reference;
1610 struct thunk *result;
1612 ex = pcode_find_array_sub_function(fp, ip, &fn_ptr);
1614 if (unlikely(ex != POINTER_FOLLOW_THUNK_RETRY))
1615 return ex;
1617 result = build_thunk(fn_ptr, 3, &function_reference);
1618 ipret_fill_function_reference_from_slot(function_reference, 0, fp, array_slot, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0);
1619 ipret_fill_function_reference_from_slot(function_reference, 1, fp, start_slot, false);
1620 ipret_fill_function_reference_from_slot(function_reference, 2, fp, end_slot, false);
1622 frame_set_pointer(fp, result_slot, pointer_thunk(result));
1624 return POINTER_FOLLOW_THUNK_GO;
1627 static attr_noinline void *array_skip_create_thunk(frame_s *fp, const code_t *ip, frame_t array_slot, frame_t start_slot, frame_t result_slot, unsigned flags)
1629 pointer_t *fn_ptr;
1630 void *ex;
1631 struct data *function_reference;
1632 struct thunk *result;
1634 ex = pcode_find_array_skip_function(fp, ip, &fn_ptr);
1636 if (unlikely(ex != POINTER_FOLLOW_THUNK_RETRY))
1637 return ex;
1639 result = build_thunk(fn_ptr, 2, &function_reference);
1640 ipret_fill_function_reference_from_slot(function_reference, 0, fp, array_slot, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0);
1641 ipret_fill_function_reference_from_slot(function_reference, 1, fp, start_slot, false);
1643 frame_set_pointer(fp, result_slot, pointer_thunk(result));
1645 return POINTER_FOLLOW_THUNK_GO;
1648 static bool array_resolve_thunk(frame_s *fp, frame_t slot)
1650 pointer_t *ptr;
1651 if (unlikely(frame_variable_is_flat(fp, slot)))
1652 return false;
1653 retry:
1654 ptr = frame_pointer(fp, slot);
1655 if (likely(frame_test_flag(fp, slot))) {
1656 if (unlikely(pointer_is_thunk(*ptr))) {
1657 if (thunk_is_finished(pointer_get_thunk(*ptr))) {
1658 pointer_follow_thunk_(ptr, POINTER_FOLLOW_THUNK_NOEVAL);
1659 goto retry;
1661 return true;
1664 if (da_tag(pointer_get_data(*ptr)) == DATA_TAG_array_incomplete) {
1665 if (unlikely(!frame_test_and_set_flag(fp, slot)))
1666 data_reference(pointer_get_data(*ptr));
1667 array_incomplete_collapse(ptr);
1668 return true;
1670 return false;
1673 static void *array_walk(frame_s *fp, const code_t *ip, pointer_t *ptr, array_index_t *idx, unsigned flags, pointer_t *result, pointer_t **can_modify)
1675 struct data *a;
1676 array_index_t this_len;
1678 *can_modify = ptr;
1680 again:
1681 pointer_follow(ptr, false, a, flags & OPCODE_OP_FLAG_STRICT ? PF_WAIT : PF_NOEVAL, fp, ip,
1682 return ex_,
1683 thunk_reference(thunk_);
1684 *result = pointer_thunk(thunk_);
1685 return POINTER_FOLLOW_THUNK_EXCEPTION
1688 if (unlikely(index_eq_int(*idx, 0))) {
1689 *result = *ptr;
1690 return POINTER_FOLLOW_THUNK_GO;
1693 if (unlikely(da_tag(a) == DATA_TAG_array_incomplete)) {
1694 if (!data_is_writable(a))
1695 *can_modify = NULL;
1696 this_len = array_len(pointer_get_data(da(a,array_incomplete)->first));
1697 if (!index_ge_index(this_len, *idx)) {
1698 index_sub(idx, this_len);
1699 index_free(&this_len);
1700 ptr = &da(a,array_incomplete)->next;
1701 if (*can_modify)
1702 *can_modify = ptr;
1703 goto again;
1705 index_free(&this_len);
1706 *result = pointer_data(a);
1707 } else {
1708 this_len = array_len(a);
1709 if (unlikely(!index_ge_index(this_len, *idx))) {
1710 index_free(&this_len);
1711 return POINTER_FOLLOW_THUNK_RETRY; /* this means index out of range, not a retry */
1713 index_free(&this_len);
1714 *result = pointer_data(a);
1716 return POINTER_FOLLOW_THUNK_GO;
1719 void * attr_hot_fastcall ipret_array_len(frame_s *fp, const code_t *ip, frame_t slot_r, frame_t slot_a, unsigned flags)
1721 const struct type *type;
1722 pointer_t *ptr;
1723 array_index_t idx_len;
1724 ajla_error_t err;
1726 ajla_assert(type_is_equal(frame_get_type_of_local(fp, slot_r), type_get_int(INT_DEFAULT_N)), (file_line, "ipret_array_len: invalid index type %u", frame_get_type_of_local(fp, slot_r)->tag));
1727 ajla_assert(!frame_test_flag(fp, slot_r), (file_line, "ipret_array_len: flag already set for destination slot %"PRIuMAX"", (uintmax_t)slot_r));
1729 type = frame_get_type_of_local(fp, slot_a);
1730 if (unlikely(TYPE_IS_FLAT(type))) {
1731 const struct flat_array_definition *flat_def = type_def(type,flat_array);
1732 barrier_aliasing();
1733 *frame_slot(fp, slot_r, int_default_t) = flat_def->n_elements;
1734 barrier_aliasing();
1735 return POINTER_FOLLOW_THUNK_GO;
1738 index_from_int(&idx_len, 0);
1739 ptr = frame_pointer(fp, slot_a);
1741 if (flags & OPCODE_OP_FLAG_STRICT) {
1742 array_resolve_thunk(fp, slot_a);
1745 while (1) {
1746 struct data *array_data;
1747 struct data *this_ptr;
1748 array_index_t this_len;
1750 pointer_follow(ptr, false, array_data, flags & OPCODE_OP_FLAG_STRICT ? PF_WAIT : PF_NOEVAL, fp, ip,
1751 index_free(&idx_len);
1752 if (!(flags & OPCODE_OP_FLAG_STRICT)) {
1753 ex_ = array_len_create_thunk(fp, ip, slot_a, slot_r);
1755 return ex_,
1756 index_free(&idx_len);
1757 thunk_reference(thunk_);
1758 frame_set_pointer(fp, slot_r, pointer_thunk(thunk_));
1759 return POINTER_FOLLOW_THUNK_GO
1762 if (da_tag(array_data) == DATA_TAG_array_incomplete)
1763 this_ptr = pointer_get_data(da(array_data,array_incomplete)->first);
1764 else
1765 this_ptr = array_data;
1767 this_len = array_len(this_ptr);
1769 if (unlikely(!index_add_(&idx_len, this_len, &err pass_file_line))) {
1770 index_free(&this_len);
1771 goto array_len_error;
1773 index_free(&this_len);
1775 if (da_tag(array_data) == DATA_TAG_array_incomplete) {
1776 ptr = &da(array_data,array_incomplete)->next;
1777 continue;
1780 break;
1783 if (likely(!index_is_mp(idx_len))) {
1784 int_default_t len = index_to_int(idx_len);
1785 index_free(&idx_len);
1786 barrier_aliasing();
1787 *frame_slot(fp, slot_r, int_default_t) = len;
1788 barrier_aliasing();
1789 } else {
1790 struct data *d;
1791 d = data_alloc_longint_mayfail(0, &err pass_file_line);
1792 if (unlikely(!d)) {
1793 index_free(&idx_len);
1794 array_len_error:
1795 frame_set_pointer(fp, slot_r, pointer_error(err, fp, ip pass_file_line));
1796 } else {
1797 mpint_free(&da(d,longint)->mp);
1798 index_free_get_mp(&idx_len, &da(d,longint)->mp);
1799 frame_set_pointer(fp, slot_r, pointer_data(d));
1803 return POINTER_FOLLOW_THUNK_GO;
1806 void * attr_hot_fastcall ipret_array_len_greater_than(frame_s *fp, const code_t *ip, frame_t slot_r, frame_t slot_a, frame_t slot_l, unsigned flags)
1808 const struct type *type;
1809 bool neg = false;
1810 int result = 1;
1811 pointer_t *ptr;
1812 void *ex;
1813 array_index_t remaining_length;
1814 pointer_t res_ptr;
1815 pointer_t *can_modify;
1817 ajla_assert(type_is_equal(frame_get_type_of_local(fp, slot_r), type_get_flat_option()), (file_line, "ipret_array_len_greater_than: invalid index type %u", frame_get_type_of_local(fp, slot_r)->tag));
1818 ajla_assert(!frame_test_flag(fp, slot_r), (file_line, "ipret_array_len_greater_than: flag already set for destination slot %"PRIuMAX"", (uintmax_t)slot_r));
1820 ex = ipret_get_index(fp, ip, fp, slot_l, &neg, &remaining_length, &res_ptr pass_file_line);
1821 if (unlikely(ex != POINTER_FOLLOW_THUNK_GO)) {
1822 if (ex == POINTER_FOLLOW_THUNK_EXCEPTION)
1823 goto err;
1824 return ex;
1826 if (unlikely(neg)) {
1827 result = 1;
1828 goto ret_result;
1831 type = frame_get_type_of_local(fp, slot_a);
1832 if (unlikely(TYPE_IS_FLAT(type))) {
1833 const struct flat_array_definition *flat_def = type_def(type,flat_array);
1834 if (index_ge_int(remaining_length, flat_def->n_elements))
1835 result = 0;
1836 goto ret_result;
1839 ptr = frame_pointer(fp, slot_a);
1841 if (flags & OPCODE_OP_FLAG_STRICT) {
1842 array_resolve_thunk(fp, slot_a);
1845 index_add_int(&remaining_length, 1);
1846 ex = array_walk(fp, ip, ptr, &remaining_length, flags, &res_ptr, &can_modify);
1847 if (unlikely(ex != POINTER_FOLLOW_THUNK_GO)) {
1848 if (likely(ex == POINTER_FOLLOW_THUNK_RETRY)) {
1849 result = 0;
1850 goto ret_result;
1852 if (unlikely(ex == POINTER_FOLLOW_THUNK_EXCEPTION))
1853 goto err_free;
1854 if (!(flags & OPCODE_OP_FLAG_STRICT))
1855 ex = array_len_greater_than_create_thunk(fp, ip, slot_a, slot_l, slot_r);
1856 index_free(&remaining_length);
1857 return ex;
1860 ret_result:
1861 barrier_aliasing();
1862 *frame_slot(fp, slot_r, ajla_flat_option_t) = result;
1863 barrier_aliasing();
1864 index_free(&remaining_length);
1865 return POINTER_FOLLOW_THUNK_GO;
1867 err_free:
1868 index_free(&remaining_length);
1869 err:
1870 frame_set_pointer(fp, slot_r, res_ptr);
1871 return POINTER_FOLLOW_THUNK_GO;
1874 void * attr_hot_fastcall ipret_array_sub(frame_s *fp, const code_t *ip, frame_t slot_r, frame_t slot_a, frame_t slot_start, frame_t slot_end, unsigned flags)
1876 array_index_t start, end, end_tmp;
1877 pointer_t res_ptr;
1878 pointer_t *can_modify;
1879 void *ex;
1880 pointer_t *ptr;
1881 struct data *acc = NULL;
1883 ajla_assert(flags & OPCODE_FLAG_FREE_ARGUMENT || !frame_test_flag(fp, slot_r), (file_line, "ipret_array_sub: flag already set for destination slot %"PRIuMAX"", (uintmax_t)slot_r));
1885 ex = ipret_get_index(fp, ip, fp, slot_start, NULL, &start, &res_ptr pass_file_line);
1886 if (unlikely(ex != POINTER_FOLLOW_THUNK_GO)) {
1887 if (ex == POINTER_FOLLOW_THUNK_EXCEPTION)
1888 goto except;
1889 return ex;
1891 ex = ipret_get_index(fp, ip, fp, slot_end, NULL, &end, &res_ptr pass_file_line);
1892 if (unlikely(ex != POINTER_FOLLOW_THUNK_GO)) {
1893 if (ex == POINTER_FOLLOW_THUNK_EXCEPTION)
1894 goto except_start;
1895 index_free(&start);
1896 return ex;
1898 if (!index_ge_index(end, start)) {
1899 res_ptr = pointer_error(error_ajla(EC_SYNC, AJLA_ERROR_INVALID_OPERATION), fp, ip pass_file_line);
1900 goto except_end;
1903 if (frame_variable_is_flat(fp, slot_a)) {
1904 struct data *d;
1905 ajla_error_t err;
1906 int_default_t st, len;
1907 const struct type *type = frame_get_type_of_local(fp, slot_a);
1908 const struct flat_array_definition *flat_def = type_def(type,flat_array);
1909 if (index_ge_int(end, flat_def->n_elements + 1)) {
1910 res_ptr = pointer_error(error_ajla(EC_SYNC, AJLA_ERROR_INDEX_OUT_OF_RANGE), fp, ip pass_file_line);
1911 goto except_end;
1913 st = index_to_int(start);
1914 len = index_to_int(end) - st;
1915 d = data_alloc_array_flat_mayfail(flat_def->base, len, len, false, &err pass_file_line);
1916 if (unlikely(!d)) {
1917 res_ptr = pointer_error(err, fp, ip pass_file_line);
1918 goto except_end;
1920 memcpy(da_array_flat(d), frame_var(fp, slot_a) + st * flat_def->base->size, len * flat_def->base->size);
1921 res_ptr = pointer_data(d);
1922 goto except_end;
1925 ptr = frame_pointer(fp, slot_a);
1927 if (flags & OPCODE_OP_FLAG_STRICT) {
1928 array_resolve_thunk(fp, slot_a);
1931 index_copy(&end_tmp, end);
1932 ex = array_walk(fp, ip, ptr, &end_tmp, flags, &res_ptr, &can_modify);
1933 index_free(&end_tmp);
1934 if (unlikely(ex != POINTER_FOLLOW_THUNK_GO)) {
1935 if (likely(ex == POINTER_FOLLOW_THUNK_RETRY)) {
1936 res_ptr = pointer_error(error_ajla(EC_SYNC, AJLA_ERROR_INDEX_OUT_OF_RANGE), fp, ip pass_file_line);
1937 goto except_end;
1939 if (unlikely(ex == POINTER_FOLLOW_THUNK_EXCEPTION))
1940 goto except_end;
1941 if (!(flags & OPCODE_OP_FLAG_STRICT))
1942 ex = array_sub_create_thunk(fp, ip, slot_a, slot_start, slot_end, slot_r, flags);
1943 index_free(&start);
1944 index_free(&end);
1945 return ex;
1948 if (!(flags & OPCODE_FLAG_FREE_ARGUMENT) || !frame_test_flag(fp, slot_a))
1949 can_modify = NULL;
1951 while (1) {
1952 struct data *array_data;
1953 struct data *this_ptr;
1954 array_index_t this_len;
1956 #if 0
1957 if (pointer_is_thunk(*ptr)) {
1958 struct stack_trace st;
1959 stack_trace_init(&st);
1960 stack_trace_capture(&st, fp, ip, 20);
1961 stack_trace_print(&st);
1962 stack_trace_free(&st);
1964 #endif
1965 array_data = pointer_get_data(*ptr);
1967 if (da_tag(array_data) == DATA_TAG_array_incomplete)
1968 this_ptr = pointer_get_data(da(array_data,array_incomplete)->first);
1969 else
1970 this_ptr = array_data;
1972 this_len = array_len(this_ptr);
1974 if (!index_ge_index(this_len, start)) {
1975 index_sub(&start, this_len);
1976 index_sub(&end, this_len);
1977 index_free(&this_len);
1978 } else {
1979 bool done = false;
1980 array_index_t this_step;
1981 ajla_error_t err;
1982 struct data *t;
1984 if (can_modify) {
1985 if (da_tag(array_data) == DATA_TAG_array_incomplete)
1986 da(array_data,array_incomplete)->first = pointer_empty();
1987 else
1988 *ptr = pointer_empty();
1991 if (!index_ge_index(this_len, end)) {
1992 index_sub3(&this_step, this_len, start);
1993 } else {
1994 index_sub3(&this_step, end, start);
1995 done = true;
1997 /*debug("start %lu, end %lu, this_len %lu, this_step %lu", start, end, this_len, this_step);*/
1998 index_free(&this_len);
1999 index_sub(&end, this_step);
2000 index_sub(&end, start);
2001 t = array_sub(this_ptr, start, this_step, can_modify != NULL, &err);
2002 index_from_int(&start, 0);
2004 if (unlikely(!t)) {
2005 res_ptr = pointer_error(err, fp, ip pass_file_line);
2006 goto except_end;
2009 if (!acc) {
2010 acc = t;
2011 } else {
2012 acc = array_join(acc, t, &err);
2013 if (unlikely(!acc)) {
2014 res_ptr = pointer_error(err, fp, ip pass_file_line);
2015 goto except_end;
2019 if (done) {
2020 res_ptr = pointer_data(acc);
2021 acc = NULL;
2022 break;
2026 if (unlikely(da_tag(array_data) != DATA_TAG_array_incomplete)) {
2027 res_ptr = pointer_error(error_ajla(EC_SYNC, AJLA_ERROR_INDEX_OUT_OF_RANGE), fp, ip pass_file_line);
2028 break;
2030 ptr = &da(array_data,array_incomplete)->next;
2033 except_end:
2034 index_free(&end);
2035 except_start:
2036 index_free(&start);
2037 except:
2038 if (acc)
2039 data_dereference(acc);
2040 if (flags & OPCODE_FLAG_FREE_ARGUMENT) {
2041 if (pointer_is_empty(*frame_pointer(fp, slot_a)))
2042 frame_clear_flag(fp, slot_a);
2043 else
2044 frame_free_and_clear(fp, slot_a);
2046 frame_set_pointer(fp, slot_r, res_ptr);
2047 return POINTER_FOLLOW_THUNK_GO;
2050 void * attr_hot_fastcall ipret_array_skip(frame_s *fp, const code_t *ip, frame_t slot_r, frame_t slot_a, frame_t slot_start, unsigned flags)
2052 array_index_t start, len;
2053 pointer_t res_ptr;
2054 pointer_t *can_modify;
2055 void *ex;
2056 pointer_t *ptr;
2057 struct data *a, *ta, *ts;
2058 ajla_error_t err;
2059 bool deref;
2061 ajla_assert(flags & OPCODE_FLAG_FREE_ARGUMENT || !frame_test_flag(fp, slot_r), (file_line, "ipret_array_skip: flag already set for destination slot %"PRIuMAX"", (uintmax_t)slot_r));
2063 ex = ipret_get_index(fp, ip, fp, slot_start, NULL, &start, &res_ptr pass_file_line);
2064 if (unlikely(ex != POINTER_FOLLOW_THUNK_GO)) {
2065 if (ex == POINTER_FOLLOW_THUNK_EXCEPTION)
2066 goto ret;
2067 return ex;
2070 if (frame_variable_is_flat(fp, slot_a)) {
2071 struct data *d;
2072 ajla_error_t err;
2073 int_default_t st, len;
2074 const struct type *type = frame_get_type_of_local(fp, slot_a);
2075 const struct flat_array_definition *flat_def = type_def(type,flat_array);
2076 if (index_ge_int(start, flat_def->n_elements + 1)) {
2077 res_ptr = pointer_error(error_ajla(EC_SYNC, AJLA_ERROR_INDEX_OUT_OF_RANGE), fp, ip pass_file_line);
2078 goto ret_free_start;
2080 st = index_to_int(start);
2081 len = flat_def->n_elements - st;
2082 d = data_alloc_array_flat_mayfail(flat_def->base, len, len, false, &err pass_file_line);
2083 if (unlikely(!d)) {
2084 res_ptr = pointer_error(err, fp, ip pass_file_line);
2085 goto ret_free_start;
2087 memcpy(da_flat(d), frame_var(fp, slot_a) + st * flat_def->base->size, len * flat_def->base->size);
2088 res_ptr = pointer_data(d);
2089 goto ret_free_start;
2092 ptr = frame_pointer(fp, slot_a);
2094 if (flags & OPCODE_OP_FLAG_STRICT) {
2095 array_resolve_thunk(fp, slot_a);
2098 ex = array_walk(fp, ip, ptr, &start, flags, &res_ptr, &can_modify);
2099 if (unlikely(ex != POINTER_FOLLOW_THUNK_GO)) {
2100 if (likely(ex == POINTER_FOLLOW_THUNK_RETRY)) {
2101 res_ptr = pointer_error(error_ajla(EC_SYNC, AJLA_ERROR_INDEX_OUT_OF_RANGE), fp, ip pass_file_line);
2102 goto ret_free_start;
2104 if (unlikely(ex == POINTER_FOLLOW_THUNK_EXCEPTION))
2105 goto ret_free_start;
2106 if (!(flags & OPCODE_OP_FLAG_STRICT))
2107 ex = array_skip_create_thunk(fp, ip, slot_a, slot_start, slot_r, flags);
2108 index_free(&start);
2109 return ex;
2112 if (unlikely(index_eq_int(start, 0))) {
2113 pointer_reference_owned(res_ptr);
2114 goto ret_free_start;
2117 if (!(flags & OPCODE_FLAG_FREE_ARGUMENT) || !frame_test_flag(fp, slot_a))
2118 can_modify = NULL;
2120 a = pointer_get_data(res_ptr);
2121 if (da_tag(a) == DATA_TAG_array_incomplete) {
2122 ta = pointer_get_data(da(a,array_incomplete)->first);
2123 } else {
2124 ta = a;
2127 len = array_len(ta);
2128 index_sub(&len, start);
2129 if (unlikely(index_eq_int(len, 0)) && da_tag(a) == DATA_TAG_array_incomplete) {
2130 res_ptr = pointer_reference(&da(a,array_incomplete)->next);
2131 index_free(&len);
2132 goto ret_free_start;
2134 deref = false;
2135 if (can_modify) {
2136 *can_modify = pointer_empty();
2137 deref = true;
2139 ts = array_sub(ta, start, len, deref, &err);
2140 if (unlikely(!ts)) {
2141 res_ptr = pointer_error(err, fp, ip pass_file_line);
2142 goto ret;
2145 if (a != ta) {
2146 if (deref) {
2147 da(a,array_incomplete)->first = pointer_data(ts);
2148 res_ptr = pointer_data(a);
2149 } else {
2150 struct data *inc;
2151 pointer_t next = pointer_reference(&da(a,array_incomplete)->next);
2152 inc = data_alloc_array_incomplete(ts, next, &err pass_file_line);
2153 if (unlikely(!inc)) {
2154 data_dereference(ts);
2155 pointer_dereference(next);
2156 res_ptr = pointer_error(err, fp, ip pass_file_line);
2157 goto ret;
2159 res_ptr = pointer_data(inc);
2161 } else {
2162 res_ptr = pointer_data(ts);
2164 goto ret;
2166 ret_free_start:
2167 index_free(&start);
2168 ret:
2169 if (flags & OPCODE_FLAG_FREE_ARGUMENT) {
2170 if (pointer_is_empty(*frame_pointer(fp, slot_a)))
2171 frame_clear_flag(fp, slot_a);
2172 else
2173 frame_free_and_clear(fp, slot_a);
2175 frame_set_pointer(fp, slot_r, res_ptr);
2176 return POINTER_FOLLOW_THUNK_GO;
2179 static void attr_hot ipret_array_append_pointers(frame_s *fp, const code_t *ip, pointer_t *ptr_r, pointer_t ptr_1, pointer_t ptr_2, pointer_t *fn_ptr)
2181 ajla_error_t err;
2182 struct data *d;
2184 if (unlikely(fn_ptr != NULL)) {
2185 if (unlikely(pointer_is_thunk(ptr_1))) {
2186 struct thunk *result;
2187 struct data *function_reference;
2189 if (pointer_is_thunk(ptr_1) && thunk_tag_volatile(pointer_get_thunk(ptr_1)) == THUNK_TAG_EXCEPTION) {
2190 *ptr_r = ptr_1;
2191 pointer_dereference(ptr_2);
2192 return;
2195 result = build_thunk(fn_ptr, 2, &function_reference);
2196 data_fill_function_reference(function_reference, 0, ptr_1);
2197 data_fill_function_reference(function_reference, 1, ptr_2);
2198 *ptr_r = pointer_thunk(result);
2199 return;
2200 } else if (likely(da_tag(pointer_get_data(ptr_1)) == DATA_TAG_array_incomplete)) {
2201 struct data *first;
2202 pointer_t last;
2203 struct thunk *thunk;
2204 struct data *function_reference, *result;
2206 array_incomplete_decompose(pointer_get_data(ptr_1), &first, &last);
2208 thunk = build_thunk(fn_ptr, 2, &function_reference);
2209 data_fill_function_reference(function_reference, 0, last);
2210 data_fill_function_reference(function_reference, 1, ptr_2);
2212 result = data_alloc_array_incomplete(first, pointer_thunk(thunk), &err pass_file_line);
2213 if (unlikely(!result)) {
2214 data_dereference(first);
2215 pointer_dereference(pointer_thunk(thunk));
2216 *ptr_r = pointer_error(err, fp, ip pass_file_line);
2217 } else {
2218 *ptr_r = pointer_data(result);
2220 return;
2224 if (unlikely(array_is_empty(pointer_get_data(ptr_1)))) {
2225 *ptr_r = ptr_2;
2226 pointer_dereference(ptr_1);
2227 return;
2230 if (unlikely(pointer_is_thunk(ptr_2)) || unlikely(da_tag(pointer_get_data(ptr_2)) == DATA_TAG_array_incomplete)) {
2231 struct data *result;
2232 result = data_alloc_array_incomplete(pointer_get_data(ptr_1), ptr_2, &err pass_file_line);
2233 if (unlikely(!result)) {
2234 pointer_dereference(ptr_1);
2235 pointer_dereference(ptr_2);
2236 *ptr_r = pointer_error(err, fp, ip pass_file_line);
2237 } else {
2238 *ptr_r = pointer_data(result);
2239 if (!pointer_is_thunk(ptr_2))
2240 array_incomplete_collapse(ptr_r);
2242 return;
2245 d = array_join(pointer_get_data(ptr_1), pointer_get_data(ptr_2), &err);
2246 if (!d)
2247 *ptr_r = pointer_error(err, fp, ip pass_file_line);
2248 else
2249 *ptr_r = pointer_data(d);
2252 void * attr_hot_fastcall ipret_array_append(frame_s *fp, const code_t *ip, frame_t slot_r, frame_t slot_1, frame_t slot_2, unsigned flags)
2254 pointer_t *fn_ptr = NULL;
2255 pointer_t ptr_1, ptr_2, *ptr_r;
2257 if (unlikely(array_resolve_thunk(fp, slot_1))) {
2258 void *ex = pcode_find_array_append_function(fp, ip, &fn_ptr);
2259 if (unlikely(ex != POINTER_FOLLOW_THUNK_RETRY))
2260 return ex;
2262 array_resolve_thunk(fp, slot_2);
2264 ptr_1 = ipret_copy_variable_to_pointer(fp, slot_1, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0);
2265 ptr_2 = ipret_copy_variable_to_pointer(fp, slot_2, (flags & OPCODE_FLAG_FREE_ARGUMENT_2) != 0);
2267 ajla_assert(!frame_test_flag(fp, slot_r), (file_line, "ipret_array_append: flag already set for destination slot %"PRIuMAX"", (uintmax_t)slot_r));
2269 frame_set_flag(fp, slot_r);
2270 ptr_r = frame_pointer(fp, slot_r);
2272 ipret_array_append_pointers(fp, ip, ptr_r, ptr_1, ptr_2, fn_ptr);
2274 return POINTER_FOLLOW_THUNK_GO;
2277 void * attr_hot_fastcall ipret_array_append_one_flat(frame_s *fp, const code_t *ip, frame_t slot_r, frame_t slot_1, frame_t slot_2, unsigned flags)
2279 const int minimum_size = 16;
2280 ajla_error_t sink;
2281 pointer_t ptr;
2282 struct data *data;
2283 const struct type *type;
2284 if (unlikely(!(flags & OPCODE_FLAG_FREE_ARGUMENT)))
2285 goto fallback;
2286 if (unlikely(!frame_variable_is_flat(fp, slot_2)))
2287 goto fallback;
2288 if (unlikely(!frame_test_flag(fp, slot_1)))
2289 goto fallback;
2290 ptr = *frame_pointer(fp, slot_1);
2291 if (unlikely(pointer_is_thunk(ptr)))
2292 goto fallback;
2293 data = pointer_get_data(ptr);
2294 if (unlikely(da_tag(data) != DATA_TAG_array_flat)) {
2295 if (likely(da_tag(data) == DATA_TAG_array_pointers) && likely(!da(data,array_pointers)->n_used_entries)) {
2296 type = frame_get_type_of_local(fp, slot_2);
2297 data = data_alloc_array_flat_mayfail(type, minimum_size, 0, false, &sink pass_file_line);
2298 if (unlikely(!data))
2299 goto fallback;
2300 pointer_dereference(ptr);
2301 goto do_copy;
2303 goto fallback;
2305 if (unlikely(!data_is_writable(data)))
2306 goto fallback;
2307 if (unlikely(da(data,array_flat)->n_used_entries == da(data,array_flat)->n_allocated_entries)) {
2308 struct data *new_data;
2309 int_default_t new_size = (uint_default_t)da(data,array_flat)->n_used_entries * 2;
2310 new_size = maximum(new_size, minimum_size);
2311 if (unlikely(new_size < 0) ||
2312 unlikely(new_size <= da(data,array_flat)->n_used_entries))
2313 goto fallback;
2314 type = da(data,array_flat)->type;
2315 new_data = data_alloc_array_flat_mayfail(type, new_size, da(data,array_flat)->n_used_entries, false, &sink pass_file_line);
2316 if (unlikely(!new_data))
2317 goto fallback;
2318 memcpy(da_array_flat(new_data), da_array_flat(data), da(data,array_flat)->n_used_entries * type->size);
2319 data_free_r1(data);
2320 data = new_data;
2321 goto do_copy;
2323 do_copy:
2324 type = da(data,array_flat)->type;
2325 memcpy_fast(da_array_flat(data) + (size_t)da(data,array_flat)->n_used_entries * type->size, frame_var(fp, slot_2), type->size);
2326 da(data,array_flat)->n_used_entries++;
2328 frame_clear_flag(fp, slot_1);
2329 *frame_pointer(fp, slot_1) = pointer_empty();
2330 frame_set_flag(fp, slot_r);
2331 *frame_pointer(fp, slot_r) = pointer_data(data);
2333 return POINTER_FOLLOW_THUNK_GO;
2334 fallback:
2335 return ipret_array_append_one(fp, ip, slot_r, slot_1, slot_2, flags);
2338 void * attr_hot_fastcall ipret_array_append_one(frame_s *fp, const code_t *ip, frame_t slot_r, frame_t slot_1, frame_t slot_2, unsigned flags)
2340 ajla_error_t err;
2341 pointer_t *fn_ptr = NULL;
2342 pointer_t ptr_1, ptr_2, ptr_e, *ptr_r;
2343 struct data *data;
2345 if (unlikely(array_resolve_thunk(fp, slot_1))) {
2346 void *ex = pcode_find_array_append_function(fp, ip, &fn_ptr);
2347 if (unlikely(ex != POINTER_FOLLOW_THUNK_RETRY))
2348 return ex;
2351 ptr_1 = ipret_copy_variable_to_pointer(fp, slot_1, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0);
2353 if (frame_variable_is_flat(fp, slot_2)) {
2354 const struct type *type = frame_get_type_of_local(fp, slot_2);
2355 data = data_alloc_array_flat_mayfail(type, 1, 1, false, &err pass_file_line);
2356 if (unlikely(!data))
2357 goto no_flat;
2358 memcpy_fast(da_array_flat(data), frame_var(fp, slot_2), type->size);
2359 ptr_2 = pointer_data(data);
2360 goto have_data;
2363 no_flat:
2364 ptr_e = ipret_copy_variable_to_pointer(fp, slot_2, (flags & OPCODE_FLAG_FREE_ARGUMENT_2) != 0);
2366 if (unlikely(pointer_is_thunk(ptr_1)))
2367 goto fallback;
2368 data = pointer_get_data(ptr_1);
2369 if (unlikely(da_tag(data) != DATA_TAG_array_pointers))
2370 goto fallback;
2371 if (unlikely(!data_is_writable(data)))
2372 goto fallback;
2373 if (unlikely(da(data,array_pointers)->n_used_entries == da(data,array_pointers)->n_allocated_entries)) {
2374 struct data *new_data;
2375 size_t new_size = (size_t)da(data,array_pointers)->n_used_entries * 2;
2376 new_size = maximum(new_size, 16);
2377 if (unlikely(new_size <= (size_t)da(data,array_pointers)->n_used_entries))
2378 goto fallback;
2379 new_data = data_alloc_array_pointers_mayfail(new_size, da(data,array_pointers)->n_used_entries, &err pass_file_line);
2380 if (unlikely(!new_data))
2381 goto fallback;
2382 memcpy(da(new_data,array_pointers)->pointer, da(data,array_pointers)->pointer, da(data,array_pointers)->n_used_entries * sizeof(pointer_t));
2383 data_free_r1(data);
2384 data = new_data;
2386 da(data,array_pointers)->pointer[da(data,array_pointers)->n_used_entries++] = ptr_e;
2388 frame_set_flag(fp, slot_r);
2389 *frame_pointer(fp, slot_r) = pointer_data(data);
2391 return POINTER_FOLLOW_THUNK_GO;
2393 fallback:
2394 data = data_alloc_array_pointers_mayfail(1, 1, &err pass_file_line);
2395 if (likely(data != NULL)) {
2396 da(data,array_pointers)->pointer[0] = ptr_e;
2397 ptr_2 = pointer_data(data);
2398 } else {
2399 pointer_dereference(ptr_e);
2400 ptr_2 = pointer_error(err, fp, ip pass_file_line);
2403 have_data:
2404 ajla_assert(!frame_test_flag(fp, slot_r), (file_line, "ipret_array_append_one: flag already set for destination slot %"PRIuMAX"", (uintmax_t)slot_r));
2405 frame_set_flag(fp, slot_r);
2406 ptr_r = frame_pointer(fp, slot_r);
2408 ipret_array_append_pointers(fp, ip, ptr_r, ptr_1, ptr_2, fn_ptr);
2410 return POINTER_FOLLOW_THUNK_GO;
2413 static int_default_t get_array_pointers(unsigned char *flat, const struct type attr_unused *type, int_default_t attr_unused n_elements, pointer_t *ptr, void *context)
2415 struct data *na = context;
2416 if (unlikely(flat != NULL))
2417 return 0;
2418 ajla_assert(da(na,array_pointers)->n_used_entries < da(na,array_pointers)->n_allocated_entries, (file_line, "get_array_pointers: array overrun"));
2419 da(na,array_pointers)->pointer[da(na,array_pointers)->n_used_entries++] = pointer_reference(ptr);
2420 return 1;
2423 static int_default_t get_array_flat(unsigned char *flat, const struct type *type, int_default_t n_elements, pointer_t attr_unused *ptr, void *context)
2425 size_t size, i;
2426 struct data *na = context;
2427 unsigned char *dest;
2428 if (unlikely(!flat))
2429 return 0;
2430 ajla_assert(da(na,array_flat)->n_used_entries + n_elements <= da(na,array_flat)->n_allocated_entries, (file_line, "get_array_flat: array overrun"));
2431 size = (size_t)n_elements * type->size;
2432 dest = da_array_flat(na) + type->size * (size_t)da(na,array_flat)->n_used_entries;
2433 for (i = 0; i < size; i++, flat++, dest++) {
2434 if (*flat != 0)
2435 *dest = *flat;
2437 da(na,array_flat)->n_used_entries += n_elements;
2438 return n_elements;
2441 static int_default_t get_array_type(unsigned char *flat, const struct type *type, int_default_t attr_unused n_elements, pointer_t attr_unused *ptr, void *context)
2443 if (flat)
2444 *cast_ptr(const struct type **, context) = type;
2445 return 0;
2448 void * attr_fastcall ipret_array_flatten(frame_s *fp, const code_t *ip, frame_t slot_r, frame_t slot_1, unsigned flags)
2450 ajla_error_t sink;
2451 pointer_t *ptr;
2452 struct data *data, *na;
2453 array_index_t len_long;
2454 int_default_t len;
2455 bool success;
2456 const struct type *flat_type;
2457 const struct type *array_type = frame_get_type_of_local(fp, slot_r);
2459 if (frame_variable_is_flat(fp, slot_1))
2460 goto do_nothing;
2462 ptr = frame_pointer(fp, slot_1);
2463 next_ptr:
2464 pointer_follow(ptr, false, data, PF_WAIT, fp, ip,
2465 return ex_,
2466 goto do_nothing
2468 if (unlikely(da_tag(data) == DATA_TAG_array_incomplete)) {
2469 ptr = &da(data,array_incomplete)->next;
2470 goto next_ptr;
2473 array_resolve_thunk(fp, slot_1);
2475 data = pointer_get_data(*frame_pointer(fp, slot_1));
2476 if (da_tag(data) == DATA_TAG_array_flat && da(data,array_flat)->n_used_entries == da(data,array_flat)->n_allocated_entries) {
2477 if (array_type->tag == TYPE_TAG_flat_array) {
2478 pointer_t p = frame_get_pointer_reference(fp, slot_1, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0);
2479 na = pointer_get_data(p);
2480 goto try_to_flatten;
2482 goto do_nothing;
2484 if (da_tag(data) == DATA_TAG_array_pointers && da(data,array_pointers)->n_used_entries == da(data,array_pointers)->n_allocated_entries && da(data,array_pointers)->pointer == da(data,array_pointers)->pointer_array) {
2485 goto do_nothing;
2488 len_long = array_len(data);
2489 if (unlikely(!index_is_int(len_long))) {
2490 index_free(&len_long);
2491 goto do_nothing;
2493 len = index_to_int(len_long);
2494 index_free(&len_long);
2496 flat_type = NULL;
2497 index_from_int(&len_long, 0);
2498 array_onstack_iterate(fp, slot_1, &len_long, get_array_type, &flat_type);
2499 index_free(&len_long);
2501 if (!flat_type) {
2502 na = data_alloc_array_pointers_mayfail(len, 0, &sink pass_file_line);
2503 if (unlikely(!na))
2504 goto do_nothing;
2505 index_from_int(&len_long, 0);
2506 success = array_onstack_iterate(fp, slot_1, &len_long, get_array_pointers, na);
2507 index_free(&len_long);
2508 if (unlikely(!success)) {
2509 pointer_dereference(pointer_data(na));
2510 goto do_nothing;
2512 } else {
2513 na = data_alloc_array_flat_mayfail(flat_type, len, 0, true, &sink pass_file_line);
2514 if (unlikely(!na))
2515 goto do_nothing;
2516 index_from_int(&len_long, 0);
2517 success = array_onstack_iterate(fp, slot_1, &len_long, get_array_flat, na);
2518 index_free(&len_long);
2519 if (unlikely(!success)) {
2520 data_free_r1(na);
2521 goto do_nothing;
2525 if (flags & OPCODE_FLAG_FREE_ARGUMENT)
2526 frame_free_and_clear(fp, slot_1);
2528 try_to_flatten:
2529 if (array_type->tag == TYPE_TAG_flat_array && da_tag(na) == DATA_TAG_array_flat) {
2530 struct flat_array_definition *fa = type_def(array_type,flat_array);
2531 if (fa->n_elements == da(na,array_flat)->n_used_entries) {
2532 memcpy(frame_var(fp, slot_r), da_array_flat(na), array_type->size);
2533 pointer_dereference(pointer_data(na));
2534 return POINTER_FOLLOW_THUNK_GO;
2538 frame_set_pointer(fp, slot_r, pointer_data(na));
2540 return POINTER_FOLLOW_THUNK_GO;
2542 do_nothing:
2543 ipret_copy_variable(fp, slot_1, fp, slot_r, (flags & OPCODE_FLAG_FREE_ARGUMENT) != 0);
2544 return POINTER_FOLLOW_THUNK_GO;
2547 void attr_fastcall ipret_prefetch_functions(struct data *function)
2549 frame_t x;
2550 for (x = 0; x < da(function,function)->local_directory_size; x++) {
2551 ajla_error_t sink;
2552 pointer_t *lfnp, lfn;
2553 lfnp = da(function,function)->local_directory[x];
2554 if (pointer_is_thunk(pointer_locked_read(lfnp))) {
2555 struct execution_control *ex;
2556 lfn = pointer_reference(lfnp);
2557 if (unlikely(!pointer_is_thunk(lfn)) || thunk_tag(pointer_get_thunk(lfn)) != THUNK_TAG_FUNCTION_CALL) {
2558 pointer_dereference(lfn);
2559 continue;
2561 ex = function_evaluate_prepare(&sink);
2562 if (likely(ex != NULL))
2563 function_evaluate_submit(ex, lfn, NULL, NULL);
2564 else
2565 pointer_dereference(lfn);
2571 static attr_noinline frame_s *ipret_break(frame_s *top_fp, frame_s *high, frame_s *low)
2573 frame_s *fp;
2574 struct execution_control *high_ex, *low_ex;
2575 struct data *high_function, *low_function;
2576 ajla_error_t sink;
2577 struct thunk *t;
2578 struct thunk **result;
2579 const code_t *ip;
2580 arg_t ia;
2582 fp = top_fp;
2583 do {
2584 struct data *function = get_frame(fp)->function;
2585 const struct local_arg *la = da(function,function)->args;
2586 for (ia = 0; ia < da(function,function)->n_arguments; ia++, la++) {
2587 frame_t slot;
2588 pointer_t ptr;
2589 if (!la->may_be_borrowed)
2590 continue;
2591 slot = la->slot;
2592 ptr = *frame_pointer(fp, slot);
2593 if (!pointer_is_empty(ptr) && !frame_test_and_set_flag(fp, slot))
2594 pointer_reference_owned(ptr);
2596 } while ((fp = frame_up(fp)) != low);
2598 high_ex = frame_execution_control(high);
2600 top_fp = stack_split(top_fp, low, &high, &sink);
2601 if (unlikely(!top_fp))
2602 goto err0;
2604 low_ex = execution_control_alloc(&sink);
2605 if (unlikely(!low_ex))
2606 goto err1;
2608 low_ex->stack = frame_stack_bottom(low);
2609 low_ex->stack->ex = low_ex;
2610 low_ex->callback = high_ex->callback;
2612 t = high_ex->thunk;
2613 low_ex->thunk = t;
2614 if (t) {
2615 address_lock(t, DEPTH_THUNK);
2616 t->u.function_call.u.execution_control = low_ex;
2617 list_take(&low_ex->wait_list, &high_ex->wait_list);
2618 address_unlock(t, DEPTH_THUNK);
2621 high_ex->stack = frame_stack_bottom(high);
2622 high_ex->stack->ex = high_ex;
2624 high_function = get_frame(high)->function;
2625 result = mem_alloc_array_mayfail(mem_alloc_mayfail, struct thunk **, 0, 0, da(high_function,function)->n_return_values, sizeof(struct thunk *), &sink);
2626 if (unlikely(!result))
2627 goto err2;
2629 if (unlikely(!thunk_alloc_blackhole(high_ex, da(get_frame(high)->function,function)->n_return_values, result, &sink)))
2630 goto err3;
2632 low_function = get_frame(low)->function;
2633 ip = da(low_function,function)->code + get_frame(high)->previous_ip;
2634 low_ex->current_frame = low;
2636 ia = 0;
2637 do {
2638 frame_t dst_slot;
2640 dst_slot = get_max_param(ip, 0);
2641 frame_set_pointer(low, dst_slot, pointer_thunk(result[ia]));
2643 ip += max_param_size(1) + 1;
2644 } while (++ia < da(high_function,function)->n_return_values);
2646 low_ex->current_ip = frame_ip(low, ip);
2648 mem_free(result);
2650 /*get_frame(low)->timestamp++;*/
2652 return top_fp;
2654 err3:
2655 mem_free(result);
2656 err2:
2657 mem_free(high_ex);
2658 err1:
2659 stack_free(frame_stack_bottom(top_fp));
2660 err0:
2661 return NULL;
2664 static void * attr_hot_fastcall ipret_break_the_chain(frame_s *fp, const code_t *ip, int waiting, bool *something_breakable)
2666 frame_s *top_fp = fp;
2667 frame_s *prev_fp;
2668 struct execution_control *ex;
2669 timestamp_t t = get_frame(fp)->timestamp++;
2671 if (!waiting) {
2672 struct data *top_fn = get_frame(fp)->function;
2673 if (unlikely(profiling)) {
2674 profile_counter_t profiling_counter;
2675 profiling_counter = load_relaxed(&da(top_fn,function)->profiling_counter);
2676 profiling_counter += profile_sample();
2677 store_relaxed(&da(top_fn,function)->profiling_counter, profiling_counter);
2681 *something_breakable = false;
2683 if (unlikely(frame_execution_control(fp)->atomic != 0))
2684 goto no_break;
2686 while (1) {
2687 prev_fp = frame_up(fp);
2688 if (frame_is_top(prev_fp))
2689 break;
2690 if (get_frame(fp)->mode == CALL_MODE_STRICT)
2691 goto skip_this;
2692 if (get_frame(fp)->mode == CALL_MODE_SPARK || (likely(!ipret_strict_calls) && (timestamp_t)(t - get_frame(prev_fp)->timestamp) > break_ticks)) {
2693 struct execution_control *low_ex, *high_ex;
2694 frame_s *new_fp;
2695 /*debug("break: %s - %s (%u - %u - %u)", da(get_frame(prev_fp)->function,function)->function_name, da(get_frame(fp)->function,function)->function_name, t, get_frame(prev_fp)->timestamp, get_frame(fp)->timestamp);*/
2696 /*debug("break %"PRIuMAX"", (uintmax_t)++break_count);*/
2697 new_fp = ipret_break(top_fp, fp, prev_fp);
2698 if (unlikely(!new_fp))
2699 break;
2700 low_ex = frame_execution_control(prev_fp);
2701 if (waiting > 0) {
2702 high_ex = frame_execution_control(new_fp);
2703 high_ex->current_frame = new_fp;
2704 #if 0
2705 task_submit(low_ex, true);
2706 return NULL;
2707 #else
2708 waiting = -1;
2709 goto cont_with_low_ex;
2710 #endif
2712 #if 0
2713 task_submit(low_ex, true);
2714 top_fp = new_fp;
2715 break;
2716 #else
2717 high_ex = frame_execution_control(new_fp);
2718 high_ex->current_frame = new_fp;
2719 high_ex->current_ip = frame_ip(new_fp, ip);
2720 task_submit(high_ex, true);
2721 cont_with_low_ex:
2722 prev_fp = top_fp = low_ex->current_frame;
2723 ip = da(get_frame(top_fp)->function,function)->code + low_ex->current_ip;
2724 /*t = get_frame(top_fp)->timestamp;*/
2725 #endif
2726 } else {
2727 *something_breakable = true;
2729 skip_this:
2730 fp = prev_fp;
2733 no_break:
2734 if (waiting > 0)
2735 return NULL;
2736 ex = frame_execution_control(top_fp);
2737 ex->current_frame = top_fp;
2738 ex->current_ip = frame_ip(top_fp, ip);
2739 return ex;
2742 bool attr_fastcall ipret_break_waiting_chain(frame_s *fp, ip_t ip)
2744 bool something_breakable;
2745 struct execution_control *ex;
2747 ex = ipret_break_the_chain(fp, da(get_frame(fp)->function,function)->code + ip, 1, &something_breakable);
2748 if (ex)
2749 task_submit(ex, true);
2751 return something_breakable;
2754 void * attr_hot_fastcall ipret_tick(frame_s *fp, const code_t *ip)
2756 bool sink;
2757 struct execution_control *ex;
2759 waiting_list_break();
2761 ex = ipret_break_the_chain(fp, ip, 0, &sink);
2763 return task_schedule(ex);
2766 #endif