2 * Copyright (C) 2024 Mikulas Patocka
4 * This file is part of Ajla.
6 * Ajla is free software: you can redistribute it and/or modify it under the
7 * terms of the GNU General Public License as published by the Free Software
8 * Foundation, either version 3 of the License, or (at your option) any later
11 * Ajla is distributed in the hope that it will be useful, but WITHOUT ANY
12 * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
13 * A PARTICULAR PURPOSE. See the GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along with
16 * Ajla. If not, see <https://www.gnu.org/licenses/>.
36 shared_var
bool ipret_strict_calls
shared_init(false);
37 shared_var
bool ipret_is_privileged
shared_init(false);
38 shared_var
bool ipret_compile
shared_init(false);
40 static const timestamp_t break_ticks
= 1;
42 void eval_both(frame_s
*fp
, const code_t
*ip
, frame_t slot_1
, frame_t slot_2
)
44 struct execution_control
*ex
= frame_execution_control(fp
);
45 if (slot_1
!= NO_FRAME_T
) {
46 if (!frame_variable_is_flat(fp
, slot_1
)) {
47 pointer_t
*ptr
= frame_pointer(fp
, slot_1
);
48 struct data attr_unused
*result
;
49 pointer_follow(ptr
, true, result
, PF_PREPARE0
, fp
, ip
,
50 SUBMIT_EX(ex_
); goto brk1
,
53 execution_control_acquire(ex
);
56 if (slot_2
!= NO_FRAME_T
) {
57 if (!frame_variable_is_flat(fp
, slot_2
)) {
58 pointer_t
*ptr
= frame_pointer(fp
, slot_2
);
59 struct data attr_unused
*result
;
60 pointer_follow(ptr
, true, result
, PF_PREPARE1
, fp
, ip
,
61 SUBMIT_EX(ex_
); goto brk2
,
64 execution_control_acquire(ex
);
67 pointer_follow_wait(fp
, ip
);
70 static void pointer_copy_owned(frame_s
*fp
, frame_t src_slot
, frame_t dest_slot
)
73 if (dest_slot
== src_slot
)
75 ptr
= *frame_pointer(fp
, src_slot
);
76 frame_free_and_set_pointer(fp
, dest_slot
, ptr
);
77 pointer_reference_owned(ptr
);
80 void attr_hot_fastcall
ipret_fill_function_reference_from_slot(struct data
*function_reference
, arg_t a
, frame_s
*fp
, frame_t slot
, bool deref
)
82 const struct type
*type
;
85 if (unlikely(!function_reference
)) {
87 frame_free_and_clear(fp
, slot
);
91 ajla_assert(a
< da(function_reference
,function_reference
)->n_curried_arguments
, (file_line
, "ipret_fill_function_reference_from_slot: invalid argument %"PRIuMAX
" (%"PRIuMAX
" arguments)", (uintmax_t)a
, (uintmax_t)da(function_reference
,function_reference
)->n_curried_arguments
));
93 if (frame_variable_is_flat(fp
, slot
)) {
94 type
= frame_get_type_of_local(fp
, slot
);
95 data_fill_function_reference_flat(function_reference
, a
, type
, frame_var(fp
, slot
));
97 ptr
= frame_get_pointer_reference(fp
, slot
, deref
);
98 data_fill_function_reference(function_reference
, a
, ptr
);
103 static struct thunk
*build_thunk(pointer_t
*fn_ptr
, arg_t n_arguments
, struct data
**function_reference
)
105 struct thunk
*result
;
108 *function_reference
= data_alloc_function_reference_mayfail(n_arguments
, &err pass_file_line
);
109 if (unlikely(!*function_reference
))
111 da(*function_reference
,function_reference
)->is_indirect
= false;
112 da(*function_reference
,function_reference
)->u
.direct
= fn_ptr
;
114 if (unlikely(!thunk_alloc_function_call(pointer_data(*function_reference
), 1, &result
, &err
))) {
115 data_dereference(*function_reference
);
122 *function_reference
= NULL
;
123 return thunk_alloc_exception_error(err
, NULL
, NULL
, NULL pass_file_line
);
126 static void *ipret_op_build_thunk(frame_s
*fp
, const code_t
*ip
, frame_t slot_1
, frame_t slot_2
, frame_t slot_r
, unsigned strict_flag
)
132 struct data
*function_reference
;
133 struct thunk
*result
;
134 frame_t slot_1_eval
= NO_FRAME_T
;
135 frame_t slot_2_eval
= NO_FRAME_T
;
137 code
= *ip
% OPCODE_MODE_MULT
;
138 if (code
== OPCODE_IS_EXCEPTION
)
139 strict_flag
|= FLAG_TESTING_FOR_EXCEPTION
;
140 if (code
>= OPCODE_FIXED_OP
+ zero
&& code
< OPCODE_FIXED_OP
+ OPCODE_FIXED_TYPE_MULT
* TYPE_FIXED_N
) {
141 code_t op
= (code
- OPCODE_FIXED_OP
) % OPCODE_FIXED_TYPE_MULT
;
142 if (op
>= OPCODE_FIXED_OP_C
&& op
< OPCODE_FIXED_OP_UNARY
) {
143 code
-= OPCODE_FIXED_OP_C
;
146 if (code
>= OPCODE_INT_OP
&& code
< OPCODE_INT_OP
+ OPCODE_INT_TYPE_MULT
* TYPE_INT_N
) {
147 code_t op
= (code
- OPCODE_INT_OP
) % OPCODE_INT_TYPE_MULT
;
148 if (op
>= OPCODE_INT_OP_C
&& op
< OPCODE_INT_OP_UNARY
) {
149 code
-= OPCODE_INT_OP_C
;
152 if (code
>= OPCODE_REAL_OP
&& code
< OPCODE_REAL_OP
+ OPCODE_REAL_TYPE_MULT
* TYPE_REAL_N
) {
153 code_t op
= (code
- OPCODE_REAL_OP
) % OPCODE_REAL_TYPE_MULT
;
154 if (op
== OPCODE_REAL_OP_is_exception
|| op
== OPCODE_REAL_OP_is_exception_alt1
|| op
== OPCODE_REAL_OP_is_exception_alt2
)
155 strict_flag
|= FLAG_TESTING_FOR_EXCEPTION
;
158 if (frame_test_flag(fp
, slot_1
) && pointer_is_thunk(*frame_pointer(fp
, slot_1
))) {
159 pointer_follow_thunk_noeval(frame_pointer(fp
, slot_1
),
160 return POINTER_FOLLOW_THUNK_RETRY
,
161 if (strict_flag
& FLAG_TESTING_FOR_EXCEPTION
) {
162 frame_free(fp
, slot_r
);
164 *frame_slot(fp
, slot_r
, ajla_flat_option_t
) = 1;
166 return POINTER_FOLLOW_THUNK_GO
;
168 if (!(strict_flag
& FLAG_NEED_BOTH_EXCEPTIONS_TO_FAIL
)) {
169 pointer_copy_owned(fp
, slot_1
, slot_r
);
170 return POINTER_FOLLOW_THUNK_GO
;
172 strict_flag
|= FLAG_FIRST_EXCEPTION
;
174 slot_1_eval
= slot_1
; break
178 if (slot_2
!= NO_FRAME_T
&& !frame_t_is_const(slot_2
) && frame_test_flag(fp
, slot_2
) && pointer_is_thunk(*frame_pointer(fp
, slot_2
))) {
179 pointer_follow_thunk_noeval(frame_pointer(fp
, slot_2
),
180 return POINTER_FOLLOW_THUNK_RETRY
,
181 if ((strict_flag
& (FLAG_NEED_BOTH_EXCEPTIONS_TO_FAIL
| FLAG_FIRST_EXCEPTION
)) != FLAG_NEED_BOTH_EXCEPTIONS_TO_FAIL
) {
182 pointer_copy_owned(fp
, slot_2
, slot_r
);
183 return POINTER_FOLLOW_THUNK_GO
;
186 slot_2_eval
= slot_2
; break
190 if (strict_flag
& OPCODE_OP_FLAG_STRICT
) {
191 if (slot_1_eval
!= NO_FRAME_T
|| slot_2_eval
!= NO_FRAME_T
) {
192 eval_both(fp
, ip
, slot_1_eval
, slot_2_eval
);
193 return POINTER_FOLLOW_THUNK_EXIT
;
195 return POINTER_FOLLOW_THUNK_RETRY
;
199 if (slot_2
== NO_FRAME_T
) {
200 flags
|= PCODE_FIND_OP_UNARY
;
201 if (type_is_equal(frame_get_type_of_local(fp
, slot_1
), type_get_int(INT_DEFAULT_N
)) &&
202 !type_is_equal(frame_get_type_of_local(fp
, slot_r
), type_get_int(INT_DEFAULT_N
)))
203 flags
|= PCODE_CONVERT_FROM_INT
;
205 if (code
== OPCODE_IS_EXCEPTION
)
206 ex
= pcode_find_is_exception(fp
, ip
, &fn_ptr
);
207 else if (code
== OPCODE_EXCEPTION_CLASS
|| code
== OPCODE_EXCEPTION_TYPE
|| code
== OPCODE_EXCEPTION_AUX
)
208 ex
= pcode_find_get_exception(code
- OPCODE_EXCEPTION_CLASS
, fp
, ip
, &fn_ptr
);
210 ex
= pcode_find_op_function(frame_get_type_of_local(fp
, slot_1
), frame_get_type_of_local(fp
, slot_r
), code
, flags
, fp
, ip
, &fn_ptr
);
211 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_RETRY
))
214 result
= build_thunk(fn_ptr
, slot_2
!= NO_FRAME_T
? 2 : 1, &function_reference
);
215 ipret_fill_function_reference_from_slot(function_reference
, 0, fp
, slot_1
, false);
216 if (slot_2
!= NO_FRAME_T
) {
219 const struct type
*type
= frame_get_type_of_local(fp
, slot_1
);
220 if (!frame_t_is_const(slot_2
)) {
221 ipret_fill_function_reference_from_slot(function_reference
, 1, fp
, slot_2
, false);
223 int32_t c
= frame_t_get_const(slot_2
);
225 #define f(n, s, u, sz, bits) \
226 s cat(int_val_,bits);
227 for_all_int(f
, for_all_empty
)
229 #define f(n, s, u, sz, bits) \
230 s cat(sfixed_val_,bits); \
231 u cat(ufixed_val_,bits);
234 unsigned char flat
[1 << (TYPE_INT_N
- 1)];
237 #define f(n, s, u, sz, bits) \
238 case TYPE_TAG_integer + n: \
239 un.cat(int_val_,bits) = c; \
240 if (unlikely(c != un.cat(int_val_,bits)))\
243 case TYPE_TAG_fixed + 2 * n + TYPE_TAG_fixed_signed:\
244 un.cat(sfixed_val_,bits) = c; \
245 if (unlikely(c != un.cat(sfixed_val_,bits)))\
246 internal(file_line, "ipret_op_build_thunk: invalid constant %ld for type %u", (long)c, type->tag);\
248 case TYPE_TAG_fixed + 2 * n + TYPE_TAG_fixed_unsigned:\
249 un.cat(ufixed_val_,bits) = c; \
250 if (unlikely(c < 0) || unlikely((u)c != un.cat(ufixed_val_,bits)))\
251 internal(file_line, "ipret_op_build_thunk: invalid constant %ld for type %u", (long)c, type->tag);\
256 internal(file_line
, "ipret_op_build_thunk: invalid type tag %u", type
->tag
);
258 d
= data_alloc_flat_mayfail(type
->tag
, un
.flat
, type
->size
, &err pass_file_line
);
260 data_fill_function_reference(function_reference
, 1, pointer_error(err
, NULL
, NULL pass_file_line
));
262 data_fill_function_reference(function_reference
, 1, pointer_data(d
));
267 d
= data_alloc_longint_mayfail(32, &err pass_file_line
);
269 data_fill_function_reference(function_reference
, 1, pointer_error(err
, NULL
, NULL pass_file_line
));
271 int32_t c
= frame_t_get_const(slot_2
);
272 mpint_import_from_variable(&da(d
,longint
)->mp
, int32_t, c
);
273 data_fill_function_reference(function_reference
, 1, pointer_data(d
));
278 frame_free_and_set_pointer(fp
, slot_r
, pointer_thunk(result
));
280 return POINTER_FOLLOW_THUNK_GO
;
283 #define UNBOX_THUNK 1
284 #define UNBOX_DID_SOMETHING 2
285 #define UNBOX_LONGINT 4
286 static int attr_hot_fastcall
ipret_unbox_value(frame_s
*fp
, const struct type
*type
, frame_t slot
)
288 ajla_assert(TYPE_IS_FLAT(type
), (file_line
, "ipret_unbox_value: non-flat type %u", type
->tag
));
289 if (frame_test_flag(fp
, slot
)) {
290 pointer_t ptr
= *frame_pointer(fp
, slot
);
291 if (pointer_is_thunk(ptr
))
293 if (da_tag(pointer_get_data(ptr
)) == DATA_TAG_longint
) {
294 ajla_assert(TYPE_TAG_IS_INT(type
->tag
), (file_line
, "ipret_unbox_value: unexpected longint, type %u", type
->tag
));
295 return UNBOX_LONGINT
;
297 memcpy_fast(frame_var(fp
, slot
), da_flat(pointer_get_data(ptr
)), type
->size
);
298 frame_clear_flag(fp
, slot
);
299 pointer_dereference(ptr
);
300 return UNBOX_DID_SOMETHING
;
305 static bool test_and_copy_nan(frame_s attr_unused
*fp
, const code_t attr_unused
*ip
, unsigned char type_tag
, frame_t attr_unused slot
, frame_t attr_unused slot_r
)
308 #define f(n, t, nt, pack, unpack) \
309 case TYPE_TAG_real + n: { \
311 barrier_aliasing(); \
312 val = *frame_slot(fp, slot, t); \
313 barrier_aliasing(); \
314 if (unlikely(cat(isnan_,t)(val))) { \
315 if (type_tag == frame_get_type_of_local(fp, slot_r)->tag) {\
316 barrier_aliasing(); \
317 *frame_slot(fp, slot_r, t) = val;\
318 barrier_aliasing(); \
320 frame_set_pointer(fp, slot_r, pointer_error(error_ajla(EC_SYNC, AJLA_ERROR_NAN), fp, ip pass_file_line));\
326 for_all_real(f
, for_all_empty
)
332 void * attr_hot_fastcall
thunk_fixed_operator(frame_s
*fp
, const code_t
*ip
, frame_t slot_1
, frame_t slot_2
, frame_t slot_r
, unsigned strict_flag
)
334 const struct type
*type
;
337 type
= frame_get_type_of_local(fp
, slot_1
);
338 ajla_assert((TYPE_TAG_IS_FIXED(type
->tag
) || TYPE_TAG_IS_REAL(type
->tag
)) &&
339 (slot_2
== NO_FRAME_T
|| frame_t_is_const(slot_2
) || frame_get_type_of_local(fp
, slot_2
) == type
),
340 (file_line
, "thunk_fixed_operator: invalid types on opcode %04x: %u, %u, %u",
343 slot_2
== NO_FRAME_T
|| frame_t_is_const(slot_2
) ? type
->tag
: frame_get_type_of_local(fp
, slot_2
)->tag
,
344 frame_get_type_of_local(fp
, slot_r
)->tag
));
346 converted
= ipret_unbox_value(fp
, type
, slot_1
);
347 if (!frame_test_flag(fp
, slot_1
) && unlikely(test_and_copy_nan(fp
, ip
, type
->tag
, slot_1
, slot_r
)))
348 return POINTER_FOLLOW_THUNK_GO
;
349 if (slot_2
!= NO_FRAME_T
&& !frame_t_is_const(slot_2
)) {
350 converted
|= ipret_unbox_value(fp
, type
, slot_2
);
351 if (!frame_test_flag(fp
, slot_2
) && unlikely(test_and_copy_nan(fp
, ip
, type
->tag
, slot_2
, slot_r
)))
352 return POINTER_FOLLOW_THUNK_GO
;
354 if (converted
& UNBOX_THUNK
)
355 return ipret_op_build_thunk(fp
, ip
, slot_1
, slot_2
, slot_r
, strict_flag
);
357 return POINTER_FOLLOW_THUNK_RETRY
;
361 void * attr_hot_fastcall
is_thunk_operator(frame_s
*fp
, const code_t
*ip
, frame_t slot_1
, frame_t slot_r
, unsigned strict_flag
)
363 ajla_flat_option_t value
;
364 pointer_t
*ptr
= frame_pointer(fp
, slot_1
);
365 if (!pointer_is_thunk(*ptr
)) {
366 struct data
*d
= pointer_get_data(*ptr
);
368 if (da_tag(d
) == DATA_TAG_flat
) {
369 value
= data_is_nan(da(d
,flat
)->data_type
, da_flat(d
));
374 pointer_follow_thunk_noeval(ptr
,
375 return POINTER_FOLLOW_THUNK_RETRY
,
376 value
= 1; goto return_val
,
381 frame_free(fp
, slot_r
);
383 *frame_slot(fp
, slot_r
, ajla_flat_option_t
) = value
;
385 return POINTER_FOLLOW_THUNK_GO
;
388 return ipret_op_build_thunk(fp
, ip
, slot_1
, NO_FRAME_T
, slot_r
, strict_flag
);
391 void * attr_hot_fastcall
thunk_get_param(frame_s
*fp
, const code_t
*ip
, frame_t slot_1
, frame_t slot_r
, unsigned strict_flag
, unsigned mode
)
397 ajla_assert(slot_r
== slot_1
|| !frame_test_flag(fp
, slot_r
), (file_line
, "thunk_get_param: flag already set for destination slot %"PRIuMAX
"", (uintmax_t)slot_r
));
399 if (unlikely(!frame_test_flag(fp
, slot_1
))) {
400 const struct type
*type
;
401 type
= frame_get_type_of_local(fp
, slot_1
);
402 if (likely(data_is_nan(type
->tag
, frame_var(fp
, slot_1
)))) {
409 result
= AJLA_ERROR_NAN
;
415 internal(file_line
, "thunk_get_param: invalid mode %u", mode
);
422 ptr
= frame_pointer(fp
, slot_1
);
423 if (!pointer_is_thunk(*ptr
)) {
424 struct data
*data
= pointer_get_data(*ptr
);
425 if (likely(da_tag(data
) == DATA_TAG_flat
)) {
426 if (likely(data_is_nan(da(data
,flat
)->data_type
, da_flat(data
)))) {
432 pointer_follow_thunk_noeval(ptr
,
433 return POINTER_FOLLOW_THUNK_RETRY
,
439 ex
= pointer_get_thunk(*ptr
);
442 result
= ex
->u
.exception
.err
.error_class
;
445 result
= ex
->u
.exception
.err
.error_type
;
448 result
= ex
->u
.exception
.err
.error_aux
;
451 internal(file_line
, "thunk_get_param: invalid mode %u", mode
);
455 frame_free(fp
, slot_r
);
457 *frame_slot(fp
, slot_r
, int_default_t
) = result
;
460 return POINTER_FOLLOW_THUNK_GO
;
463 frame_free_and_set_pointer(fp
, slot_r
, pointer_error(error_ajla(EC_SYNC
, AJLA_ERROR_INVALID_OPERATION
), fp
, ip pass_file_line
));
465 return POINTER_FOLLOW_THUNK_GO
;
468 return ipret_op_build_thunk(fp
, ip
, slot_1
, NO_FRAME_T
, slot_r
, strict_flag
);
471 int_default_t
ipret_system_property(int_default_t idx
)
473 int_default_t result
;
476 case SystemProperty_OS
:
478 result
= SystemProperty_OS_DOS
;
479 #elif defined(OS_OS2)
480 result
= SystemProperty_OS_OS2
;
481 #elif defined(OS_CYGWIN)
482 result
= SystemProperty_OS_Cygwin
;
483 #elif defined(OS_WIN32)
484 result
= SystemProperty_OS_Windows
;
486 result
= SystemProperty_OS_Posix
;
489 #if defined(OS_DOS) || defined(OS_OS2) || defined(OS_WIN32)
490 case SystemProperty_Charset
:
491 result
= os_charset();
494 #if defined(OS_WIN32)
495 case SystemProperty_Charset_Console
:
496 result
= os_charset_console();
499 case SystemProperty_Fixed
:
502 case SystemProperty_Real
:
505 case SystemProperty_Privileged
:
506 result
= ipret_is_privileged
;
508 case SystemProperty_Compile
:
509 result
= ipret_compile
;
518 void * attr_hot_fastcall
ipret_get_system_property(frame_s
*fp
, const code_t
*ip
, frame_t slot_1
, frame_t slot_r
)
522 int_default_t idx
, result
;
523 pointer_t result_ptr
;
525 ex
= ipret_get_index(fp
, ip
, fp
, slot_1
, NULL
, &idx_l
, &result_ptr pass_file_line
);
526 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_GO
)) {
527 if (ex
== POINTER_FOLLOW_THUNK_EXCEPTION
) {
528 frame_free_and_set_pointer(fp
, slot_r
, result_ptr
);
529 return POINTER_FOLLOW_THUNK_GO
;
532 idx
= index_to_int(idx_l
);
535 result
= ipret_system_property(idx
);
537 frame_free(fp
, slot_r
);
539 *frame_slot(fp
, slot_r
, int_default_t
) = result
;
542 return POINTER_FOLLOW_THUNK_GO
;
546 static bool int_to_mpint(mpint_t
*m
, const unsigned char *ptr
, unsigned intx
, ajla_error_t
*err
)
548 #define f(n, s, u, sz, bits) \
551 barrier_aliasing(); \
552 ret = cat(mpint_init_from_,s)(m, *cast_ptr(const s *, ptr), err);\
553 barrier_aliasing(); \
557 for_all_int(f
, for_all_empty
)
559 internal(file_line
, "int_to_mpint: invalid type %d", intx
);
566 static mpint_t
* attr_hot_fastcall
int_get_mpint(frame_s
*fp
, frame_t slot
, mpint_t
*storage
, unsigned intx
, ajla_error_t
*err
)
569 if (frame_t_is_const(slot
)) {
570 if (unlikely(!mpint_init_from_int32_t(storage
, frame_t_get_const(slot
), err
)))
574 if (frame_test_flag(fp
, slot
)) {
575 struct data
*d
= pointer_get_data(*frame_pointer(fp
, slot
));
576 if (likely(da_tag(d
) == DATA_TAG_longint
))
577 return &da(d
,longint
)->mp
;
580 flat
= frame_var(fp
, slot
);
582 if (unlikely(!int_to_mpint(storage
, flat
, intx
, err
)))
587 static struct data
* attr_hot_fastcall
int_allocate_result(frame_s
*fp
, frame_t slot
, unsigned long bits
, pointer_t
*to_free
, ajla_error_t
*err
)
591 *to_free
= pointer_empty();
593 if (frame_test_and_set_flag(fp
, slot
)) {
594 pointer_t ptr
= *frame_pointer(fp
, slot
);
595 if (!pointer_is_thunk(ptr
)) {
596 d
= pointer_get_data(ptr
);
597 if (da_tag(d
) == DATA_TAG_longint
&& data_is_writable(d
))
603 d
= data_alloc_longint_mayfail(bits
, err pass_file_line
);
605 frame_clear_flag(fp
, slot
);
608 *frame_pointer(fp
, slot
) = pointer_data(d
);
612 void * attr_hot_fastcall
thunk_int_binary_operator(frame_s
*fp
, const code_t
*ip
, frame_t slot_1
, frame_t slot_2
, frame_t slot_r
, unsigned strict_flag
, bool (attr_fastcall
*do_op
)(const mpint_t
*op1
, const mpint_t
*op2
, mpint_t
*res
, ajla_error_t
*err
))
615 const struct type
*type
;
619 mpint_t
*val1
, *val2
;
623 type
= frame_get_type_of_local(fp
, slot_1
);
624 if (!frame_t_is_const(slot_2
)) {
625 ajla_assert(TYPE_TAG_IS_INT(type
->tag
) &&
626 frame_get_type_of_local(fp
, slot_2
) == type
&&
627 frame_get_type_of_local(fp
, slot_r
) == type
,
628 (file_line
, "thunk_int_binary_operator: invalid types on opcode %04x: %u, %u, %u",
631 frame_get_type_of_local(fp
, slot_2
)->tag
,
632 frame_get_type_of_local(fp
, slot_r
)->tag
));
634 ajla_assert(TYPE_TAG_IS_INT(type
->tag
) &&
635 frame_get_type_of_local(fp
, slot_r
) == type
,
636 (file_line
, "thunk_int_binary_operator: invalid types on opcode %04x: %u, %u",
639 frame_get_type_of_local(fp
, slot_r
)->tag
));
644 converted
|= ipret_unbox_value(fp
, type
, slot_1
);
645 if (!frame_t_is_const(slot_2
))
646 converted
|= ipret_unbox_value(fp
, type
, slot_2
);
648 if (converted
& UNBOX_THUNK
)
649 return ipret_op_build_thunk(fp
, ip
, slot_1
, slot_2
, slot_r
, strict_flag
);
651 if (converted
== UNBOX_DID_SOMETHING
)
652 return POINTER_FOLLOW_THUNK_RETRY
;
654 intx
= TYPE_TAG_IDX_INT(type
->tag
);
656 if (unlikely(!(val1
= int_get_mpint(fp
, slot_1
, &s1
, intx
, &err
))))
658 if (unlikely(!(val2
= int_get_mpint(fp
, slot_2
, &s2
, intx
, &err
))))
660 if (unlikely(!(result
= int_allocate_result(fp
, slot_r
, maximum(mpint_estimate_bits(val1
), mpint_estimate_bits(val2
)), &to_free
, &err
))))
662 if (unlikely(!do_op(val1
, val2
, &da(result
,longint
)->mp
, &err
)))
668 if (!pointer_is_empty(to_free
))
669 pointer_dereference(to_free
);
671 if (mpint_export(&da(result
,longint
)->mp
, frame_var(fp
, slot_r
), intx
, &err
)) {
672 frame_clear_flag(fp
, slot_r
);
673 data_dereference(result
);
675 return POINTER_FOLLOW_THUNK_GO
;
678 if (!pointer_is_empty(to_free
))
679 pointer_dereference(to_free
);
686 frame_free_and_set_pointer(fp
, slot_r
, pointer_error(err
, fp
, ip pass_file_line
));
687 return POINTER_FOLLOW_THUNK_GO
;
690 void * attr_hot_fastcall
thunk_int_binary_logical_operator(frame_s
*fp
, const code_t
*ip
, frame_t slot_1
, frame_t slot_2
, frame_t slot_r
, unsigned strict_flag
, bool (attr_fastcall
*do_op
)(const mpint_t
*op1
, const mpint_t
*op2
, ajla_flat_option_t
*res
, ajla_error_t
*err
))
693 const struct type
*type
;
697 mpint_t
*val1
, *val2
;
699 type
= frame_get_type_of_local(fp
, slot_1
);
700 if (!frame_t_is_const(slot_2
)) {
701 ajla_assert(TYPE_TAG_IS_INT(type
->tag
) &&
702 frame_get_type_of_local(fp
, slot_2
) == type
&&
703 frame_get_type_of_local(fp
, slot_r
)->tag
== TYPE_TAG_flat_option
,
704 (file_line
, "thunk_int_binary_logical_operator: invalid types on opcode %04x: %u, %u, %u",
707 frame_get_type_of_local(fp
, slot_2
)->tag
,
708 frame_get_type_of_local(fp
, slot_r
)->tag
));
710 ajla_assert(TYPE_TAG_IS_INT(type
->tag
) &&
711 frame_get_type_of_local(fp
, slot_r
)->tag
== TYPE_TAG_flat_option
,
712 (file_line
, "thunk_int_binary_logical_operator: invalid types on opcode %04x: %u, %u",
715 frame_get_type_of_local(fp
, slot_r
)->tag
));
720 converted
|= ipret_unbox_value(fp
, type
, slot_1
);
721 if (!frame_t_is_const(slot_2
))
722 converted
|= ipret_unbox_value(fp
, type
, slot_2
);
724 if (converted
& UNBOX_THUNK
)
725 return ipret_op_build_thunk(fp
, ip
, slot_1
, slot_2
, slot_r
, strict_flag
);
727 if (converted
== UNBOX_DID_SOMETHING
)
728 return POINTER_FOLLOW_THUNK_RETRY
;
730 intx
= TYPE_TAG_IDX_INT(type
->tag
);
732 if (unlikely(!(val1
= int_get_mpint(fp
, slot_1
, &s1
, intx
, &err
))))
734 if (unlikely(!(val2
= int_get_mpint(fp
, slot_2
, &s2
, intx
, &err
))))
737 if (unlikely(!do_op(val1
, val2
, frame_slot(fp
, slot_r
, ajla_flat_option_t
), &err
))) {
747 return POINTER_FOLLOW_THUNK_GO
;
756 frame_free_and_set_pointer(fp
, slot_r
, pointer_error(err
, fp
, ip pass_file_line
));
757 return POINTER_FOLLOW_THUNK_GO
;
760 void * attr_hot_fastcall
thunk_int_unary_operator(frame_s
*fp
, const code_t
*ip
, frame_t slot_1
, frame_t slot_r
, unsigned strict_flag
, bool (attr_fastcall
*do_op
)(const mpint_t
*op1
, mpint_t
*res
, ajla_error_t
*err
))
763 const struct type
*type
;
771 type
= frame_get_type_of_local(fp
, slot_1
);
772 ajla_assert(TYPE_TAG_IS_INT(type
->tag
) &&
773 frame_get_type_of_local(fp
, slot_r
) == type
,
774 (file_line
, "thunk_int_unary_operator: invalid types on opcode %04x: %u, %u",
777 frame_get_type_of_local(fp
, slot_r
)->tag
));
781 converted
|= ipret_unbox_value(fp
, type
, slot_1
);
783 if (converted
& UNBOX_THUNK
)
784 return ipret_op_build_thunk(fp
, ip
, slot_1
, NO_FRAME_T
, slot_r
, strict_flag
);
786 if (converted
== UNBOX_DID_SOMETHING
)
787 return POINTER_FOLLOW_THUNK_RETRY
;
789 intx
= TYPE_TAG_IDX_INT(type
->tag
);
791 if (unlikely(!(val1
= int_get_mpint(fp
, slot_1
, &s1
, intx
, &err
))))
793 if (unlikely(!(result
= int_allocate_result(fp
, slot_r
, mpint_estimate_bits(val1
), &to_free
, &err
))))
795 if (unlikely(!do_op(val1
, &da(result
,longint
)->mp
, &err
)))
799 if (!pointer_is_empty(to_free
))
800 pointer_dereference(to_free
);
802 if (mpint_export(&da(result
,longint
)->mp
, frame_var(fp
, slot_r
), intx
, &err
)) {
803 frame_clear_flag(fp
, slot_r
);
804 data_dereference(result
);
806 return POINTER_FOLLOW_THUNK_GO
;
809 if (!pointer_is_empty(to_free
))
810 pointer_dereference(to_free
);
814 frame_free_and_set_pointer(fp
, slot_r
, pointer_error(err
, fp
, ip pass_file_line
));
815 return POINTER_FOLLOW_THUNK_GO
;
818 ip_t attr_hot_fastcall
ipret_int_ldc_long(frame_s
*fp
, frame_t slot
, const code_t
*ip
)
826 n_words_32
= get_unaligned_32(ip
);
827 n_words
= (ip_t
)n_words_32
;
828 ajla_assert(n_words
== n_words_32
, (file_line
, "ipret_int_ldc_long: n_words overflow: %lu != %lu", (unsigned long)n_words_32
, (unsigned long)n_words
));
830 d
= data_alloc_longint_mayfail((n_words
+ sizeof(code_t
) - 1) / sizeof(code_t
), &err pass_file_line
);
834 if (unlikely(!mpint_import_from_code(&da(d
,longint
)->mp
, ip
+ 2, n_words
, &err
))) {
839 frame_set_pointer(fp
, slot
, pointer_data(d
));
844 frame_set_pointer(fp
, slot
, pointer_error(err
, fp
, ip pass_file_line
));
850 pointer_t attr_fastcall
convert_fixed_to_mpint(uintbig_t val
, bool uns
)
855 d
= data_alloc_longint_mayfail(sizeof(uintbig_t
) * 8 + uns
, &err pass_file_line
);
858 if (unlikely(!cat(mpint_set_from_
,TYPE_INT_MAX
)(&da(d
,longint
)->mp
, (intbig_t
)val
, uns
, &err
))) {
861 return pointer_data(d
);
866 return pointer_error(err
, NULL
, NULL pass_file_line
);
869 pointer_t attr_fastcall
convert_real_to_mpint(frame_s
*fp
, frame_t src_slot
, const struct type
*src_type
)
871 unsigned char attr_unused
*src_ptr
;
875 d
= data_alloc_longint_mayfail(0, &err pass_file_line
);
878 mpint_free(&da(d
,longint
)->mp
);
880 #define re(n, rtype, ntype, pack, unpack) \
881 case TYPE_TAG_real + n: { \
882 if (unlikely(!cat(mpint_init_from_,rtype)(&da(d,longint)->mp, cast_ptr(rtype *, src_ptr), &err))) {\
890 src_ptr
= frame_var(fp
, src_slot
);
891 switch (src_type
->tag
) {
892 for_all_real(re
, for_all_empty
)
894 internal(file_line
, "convert_real_to_mpint: invalid type %u", src_type
->tag
);
897 return pointer_data(d
);
903 return pointer_error(err
, NULL
, NULL pass_file_line
);
906 static attr_noinline
void convert_mpint_to_real(frame_s
*fp
, frame_t dest_slot
, const struct type
*dest_type
, const mpint_t attr_unused
*mp
)
908 unsigned char attr_unused
*dest_ptr
;
910 #define re(n, rtype, ntype, pack, unpack) \
911 case TYPE_TAG_real + n: \
912 cat(mpint_export_to_,rtype)(mp, cast_ptr(rtype *, dest_ptr));\
916 dest_ptr
= frame_var(fp
, dest_slot
);
917 switch (dest_type
->tag
) {
918 for_all_real(re
, for_all_empty
)
920 internal(file_line
, "convert_mpint_to_real: invalid type %u", dest_type
->tag
);
927 void * attr_hot_fastcall
thunk_convert(frame_s
*fp
, const code_t
*ip
, frame_t src_slot
, frame_t dest_slot
, unsigned strict_flag
)
930 const struct type
*src_type
;
931 const struct type
*dest_type
;
935 if (unlikely(src_slot
== dest_slot
))
936 return POINTER_FOLLOW_THUNK_GO
;
938 src_type
= frame_get_type_of_local(fp
, src_slot
);
939 dest_type
= frame_get_type_of_local(fp
, dest_slot
);
941 converted
= ipret_unbox_value(fp
, src_type
, src_slot
);
942 if (unlikely(converted
== UNBOX_THUNK
)) {
943 return ipret_op_build_thunk(fp
, ip
, src_slot
, NO_FRAME_T
, dest_slot
, strict_flag
);
945 if (converted
== UNBOX_DID_SOMETHING
) {
946 return POINTER_FOLLOW_THUNK_RETRY
;
949 if (type_is_equal(dest_type
, type_get_int(INT_DEFAULT_N
))) {
950 if (likely(TYPE_TAG_IS_INT(src_type
->tag
))) {
951 if (likely(converted
== UNBOX_LONGINT
)) {
952 goto convert_longint
;
956 if (unlikely(!type_is_equal(src_type
, type_get_int(INT_DEFAULT_N
))))
958 if (TYPE_TAG_IS_FIXED(dest_type
->tag
)) {
959 if (likely(converted
== UNBOX_LONGINT
)) {
961 if (!TYPE_TAG_FIXED_IS_UNSIGNED(dest_type
->tag
))
962 res
= mpint_export(&da(pointer_get_data(*frame_pointer(fp
, src_slot
)), longint
)->mp
, frame_var(fp
, dest_slot
), TYPE_TAG_IDX_FIXED(dest_type
->tag
) >> 1, &err
);
964 res
= mpint_export_unsigned(&da(pointer_get_data(*frame_pointer(fp
, src_slot
)), longint
)->mp
, frame_var(fp
, dest_slot
), TYPE_TAG_IDX_FIXED(dest_type
->tag
) >> 1, &err
);
966 frame_set_pointer(fp
, dest_slot
, pointer_error(error_ajla(EC_SYNC
, AJLA_ERROR_DOESNT_FIT
), fp
, ip pass_file_line
));
967 return POINTER_FOLLOW_THUNK_GO
;
969 } else if (TYPE_TAG_IS_INT(dest_type
->tag
)) {
970 if (likely(converted
== UNBOX_LONGINT
)) {
971 goto convert_longint
;
973 } else if (likely(TYPE_TAG_IS_REAL(dest_type
->tag
))) {
974 if (likely(converted
== UNBOX_LONGINT
)) {
975 convert_mpint_to_real(fp
, dest_slot
, dest_type
, &da(pointer_get_data(*frame_pointer(fp
, src_slot
)), longint
)->mp
);
976 return POINTER_FOLLOW_THUNK_GO
;
983 d
= pointer_get_data(*frame_pointer(fp
, src_slot
));
984 if (mpint_export(&da(d
,longint
)->mp
, frame_var(fp
, dest_slot
), TYPE_TAG_IDX_INT(dest_type
->tag
), &err
)) {
985 return POINTER_FOLLOW_THUNK_GO
;
987 pointer_copy_owned(fp
, src_slot
, dest_slot
);
988 return POINTER_FOLLOW_THUNK_GO
;
991 internal(file_line
, "thunk_convert: invalid conversion %u->%u (%d)", src_type
->tag
, dest_type
->tag
, converted
);
992 return POINTER_FOLLOW_THUNK_RETRY
;
996 static bool attr_hot_fastcall
ipret_unbox_bool(frame_s
*fp
, frame_t slot
)
998 if (frame_test_flag(fp
, slot
)) {
999 pointer_t ptr
= *frame_pointer(fp
, slot
);
1000 if (pointer_is_thunk(ptr
))
1003 *frame_slot(fp
, slot
, ajla_flat_option_t
) = (ajla_flat_option_t
)da(pointer_get_data(ptr
),option
)->option
;
1005 frame_clear_flag(fp
, slot
);
1006 pointer_dereference(ptr
);
1011 void * attr_hot_fastcall
thunk_bool_operator(frame_s
*fp
, const code_t
*ip
, frame_t slot_1
, frame_t slot_2
, frame_t slot_r
, unsigned strict_flag
)
1014 ajla_flat_option_t val1
, val2
, result
;
1017 code
%= OPCODE_MODE_MULT
;
1018 code
= (code
- OPCODE_BOOL_OP
) / OPCODE_BOOL_OP_MULT
;
1021 if (!ipret_unbox_bool(fp
, slot_1
)) {
1023 val1
= *frame_slot(fp
, slot_1
, ajla_flat_option_t
);
1026 case OPCODE_BOOL_OP_less
:
1032 case OPCODE_BOOL_OP_less_equal
:
1038 case OPCODE_BOOL_OP_and
:
1039 case OPCODE_BOOL_OP_greater
:
1045 case OPCODE_BOOL_OP_or
:
1046 case OPCODE_BOOL_OP_greater_equal
:
1052 case OPCODE_BOOL_OP_not
:
1057 if (slot_2
!= NO_FRAME_T
&& !ipret_unbox_bool(fp
, slot_2
)) {
1059 val2
= *frame_slot(fp
, slot_2
, ajla_flat_option_t
);
1062 case OPCODE_BOOL_OP_less
:
1063 case OPCODE_BOOL_OP_and
:
1069 case OPCODE_BOOL_OP_less_equal
:
1070 case OPCODE_BOOL_OP_or
:
1076 case OPCODE_BOOL_OP_greater
:
1082 case OPCODE_BOOL_OP_greater_equal
:
1090 if (!((val1
| val2
) & 2)) {
1091 return POINTER_FOLLOW_THUNK_RETRY
;
1093 if (val1
& val2
& 2) {
1095 case OPCODE_BOOL_OP_and
:
1096 case OPCODE_BOOL_OP_or
:
1097 case OPCODE_BOOL_OP_less
:
1098 case OPCODE_BOOL_OP_less_equal
:
1099 case OPCODE_BOOL_OP_greater
:
1100 case OPCODE_BOOL_OP_greater_equal
:
1101 strict_flag
|= FLAG_NEED_BOTH_EXCEPTIONS_TO_FAIL
;
1105 return ipret_op_build_thunk(fp
, ip
, slot_1
, slot_2
, slot_r
, strict_flag
);
1108 frame_free(fp
, slot_r
);
1110 *frame_slot(fp
, slot_r
, ajla_flat_option_t
) = result
;
1112 return POINTER_FOLLOW_THUNK_GO
;
1115 void * attr_hot_fastcall
thunk_bool_jump(frame_s
*fp
, const code_t
*ip
, frame_t slot
)
1117 pointer_t
*thunk
= frame_pointer(fp
, slot
);
1120 pointer_follow(thunk
, true, data
, PF_WAIT
, fp
, ip
,
1122 return POINTER_FOLLOW_THUNK_EXCEPTION
1126 *frame_slot(fp
, slot
, ajla_flat_option_t
) = (ajla_flat_option_t
)da(data
,option
)->option
;
1128 frame_clear_flag(fp
, slot
);
1129 data_dereference(data
);
1130 return POINTER_FOLLOW_THUNK_RETRY
;
1134 void attr_fastcall
ipret_copy_variable(frame_s
*src_fp
, frame_t src_slot
, frame_s
*dst_fp
, frame_t dst_slot
, bool deref
)
1137 const struct type
*src_type
;
1138 ajla_assert(!frame_test_flag(dst_fp
, dst_slot
), (file_line
, "ipret_copy_variable: flag already set for destination slot %"PRIuMAX
"", (uintmax_t)dst_slot
));
1139 src_type
= frame_get_type_of_local(src_fp
, src_slot
);
1140 if (!frame_variable_is_flat(src_fp
, src_slot
)) {
1141 ptr
= frame_get_pointer_reference(src_fp
, src_slot
, deref
);
1143 const struct type
*dst_type
= frame_get_type_of_local(dst_fp
, dst_slot
);
1144 if (likely(TYPE_IS_FLAT(dst_type
))) {
1145 ajla_assert(type_is_equal(src_type
, dst_type
), (file_line
, "ipret_copy_variable: copying between different types (%u,%u,%u) -> (%u,%u,%u)", src_type
->tag
, src_type
->size
, src_type
->align
, dst_type
->tag
, dst_type
->size
, dst_type
->align
));
1146 memcpy_fast(frame_var(dst_fp
, dst_slot
), frame_var(src_fp
, src_slot
), dst_type
->size
);
1149 ptr
= flat_to_data(src_type
, frame_var(src_fp
, src_slot
));
1152 frame_set_pointer(dst_fp
, dst_slot
, ptr
);
1155 pointer_t
ipret_copy_variable_to_pointer(frame_s
*src_fp
, frame_t src_slot
, bool deref
)
1157 const struct type
*src_type
= frame_get_type_of_local(src_fp
, src_slot
);
1158 if (!frame_variable_is_flat(src_fp
, src_slot
)) {
1159 return frame_get_pointer_reference(src_fp
, src_slot
, deref
);
1161 return flat_to_data(src_type
, frame_var(src_fp
, src_slot
));
1166 struct data_compare_context
{
1167 struct ipret_call_cache_arg
*arguments
;
1169 arg_t n_return_values
;
1173 static int saved_cache_compare(struct data
*saved_cache
, size_t idx
, struct data_compare_context
*ctx
)
1175 size_t ptr_idx
= idx
* ((size_t)ctx
->n_arguments
+ (size_t)ctx
->n_return_values
);
1177 for (ai
= 0; ai
< ctx
->n_arguments
; ai
++) {
1179 c
= data_compare(da(saved_cache
,saved_cache
)->pointers
[ptr_idx
+ ai
], ctx
->arguments
[ai
].ptr
, &ctx
->err
);
1186 static pointer_t
*saved_cache_find(struct data
*function
, struct data_compare_context
*ctx
)
1188 struct data
*saved_cache
= da(function
,function
)->loaded_cache
;
1189 size_t n_entries
= da(saved_cache
,saved_cache
)->n_entries
;
1192 /*debug("searching: %s, %zu", da(function,function)->function_name, n_entries);*/
1193 binary_search(size_t, n_entries
, result
, !(cmp
= saved_cache_compare(saved_cache
, result
, ctx
)), cmp
< 0, return NULL
);
1194 /*debug("found it: %s, %zu", da(function,function)->function_name, result);*/
1195 return &da(saved_cache
,saved_cache
)->pointers
[result
* ((size_t)ctx
->n_arguments
+ (size_t)ctx
->n_return_values
) + (size_t)ctx
->n_arguments
];
1198 static int cache_entry_compare(const struct tree_entry
*e1
, uintptr_t e2
)
1200 struct cache_entry
*c1
= get_struct(e1
, struct cache_entry
, entry
);
1201 struct data_compare_context
*ctx
= cast_ptr(struct data_compare_context
*, num_to_ptr(e2
));
1203 for (ai
= 0; ai
< ctx
->n_arguments
; ai
++) {
1205 c
= data_compare(c1
->arguments
[ai
], ctx
->arguments
[ai
].ptr
, MEM_DONT_TRY_TO_FREE
);
1206 if (c
== -1 || c
== 1)
1208 if (c
== DATA_COMPARE_OOM
) {
1209 ctx
->err
= error_ajla(EC_ASYNC
, AJLA_ERROR_OUT_OF_MEMORY
);
1213 internal(file_line
, "cache_entry_compare: data_compare returned %d", c
);
1218 static void cache_evaluated(void *cookie
, pointer_t ptr
)
1220 struct cache_entry_return
*ret
= cookie
;
1221 struct cache_entry
*c
;
1222 pointer_reference_owned(ptr
);
1225 address_lock(c
, DEPTH_THUNK
);
1226 /*debug("cache evaluated: %p, pending %u", c, c->n_pending);*/
1227 if (likely(!--c
->n_pending
)) {
1228 wake_up_wait_list(&c
->wait_list
, address_get_mutex(c
, DEPTH_THUNK
), true);
1230 address_unlock(c
, DEPTH_THUNK
);
1234 void * attr_fastcall
ipret_call_cache(frame_s
*fp
, const code_t
*ip
, pointer_t
*direct_function
, struct ipret_call_cache_arg
*arguments
, frame_t
*return_values
, frame_t free_fn_slot
)
1236 struct thunk
*thunk
= NULL
;
1237 struct data_compare_context ctx
;
1238 struct tree_insert_position ins
;
1239 struct tree_entry
*e
;
1240 struct cache_entry
*c
= NULL
; /* avoid warning */
1244 struct thunk
**results
;
1245 struct data
*function_reference
;
1246 struct data
*function
= pointer_get_data(*direct_function
);
1247 arg_t n_arguments
= da(function
,function
)->n_arguments
;
1248 arg_t n_return_values
= da(function
,function
)->n_return_values
;
1249 bool save
= *ip
% OPCODE_MODE_MULT
== OPCODE_CALL_SAVE
|| *ip
% OPCODE_MODE_MULT
== OPCODE_CALL_INDIRECT_SAVE
;
1251 ctx
.err
.error_class
= EC_NONE
;
1253 for (ai
= 0; ai
< n_arguments
; ai
++)
1254 arguments
[ai
].need_free_ptr
= false;
1255 for (ai
= 0; ai
< n_arguments
; ai
++) {
1256 struct function_argument
*f_arg
= arguments
[ai
].f_arg
;
1257 if (unlikely(f_arg
!= NULL
)) {
1258 if (f_arg
->tag
== TYPE_TAG_unknown
) {
1259 ex
= pointer_deep_eval(&f_arg
->u
.ptr
, fp
, ip
, &thunk
);
1260 arguments
[ai
].ptr
= pointer_reference(&f_arg
->u
.ptr
);
1261 arguments
[ai
].need_free_ptr
= true;
1263 arguments
[ai
].ptr
= flat_to_data(type_get_from_tag(f_arg
->tag
), f_arg
->u
.slot
);
1264 ex
= pointer_deep_eval(&arguments
[ai
].ptr
, fp
, ip
, &thunk
);
1265 arguments
[ai
].need_free_ptr
= true;
1268 frame_t slot
= arguments
[ai
].slot
;
1269 if (!frame_variable_is_flat(fp
, slot
)) {
1270 ex
= frame_pointer_deep_eval(fp
, ip
, slot
, &thunk
);
1271 arguments
[ai
].ptr
= *frame_pointer(fp
, slot
);
1273 arguments
[ai
].ptr
= flat_to_data(frame_get_type_of_local(fp
, slot
), frame_var(fp
, slot
));
1274 ex
= pointer_deep_eval(&arguments
[ai
].ptr
, fp
, ip
, &thunk
);
1275 arguments
[ai
].need_free_ptr
= true;
1278 if (ex
== POINTER_FOLLOW_THUNK_EXCEPTION
) {
1279 if (!rwmutex_supported
) {
1281 address_write_lock(function
);
1284 address_read_lock(function
);
1288 if (ex
!= POINTER_FOLLOW_THUNK_GO
)
1292 ctx
.arguments
= arguments
;
1293 ctx
.n_arguments
= n_arguments
;
1294 ctx
.n_return_values
= n_return_values
;
1296 if (da(function
,function
)->loaded_cache
) {
1298 /*debug("loaded cache: %s", da(function,function)->function_name);*/
1299 ctx
.err
.error_class
= EC_NONE
;
1300 results
= saved_cache_find(function
, &ctx
);
1301 if (results
|| unlikely(ctx
.err
.error_class
!= EC_NONE
)) {
1302 for (ai
= 0; ai
< n_arguments
; ai
++) {
1303 if (arguments
[ai
].deref
) {
1304 frame_t slot
= arguments
[ai
].slot
;
1305 if (frame_test_and_clear_flag(fp
, slot
))
1306 pointer_dereference(*frame_pointer(fp
, slot
));
1307 *frame_pointer(fp
, slot
) = pointer_empty();
1310 if (unlikely(free_fn_slot
!= NO_FRAME_T
)) {
1311 frame_free_and_clear(fp
, free_fn_slot
);
1313 if (unlikely(ctx
.err
.error_class
!= EC_NONE
)) {
1314 for (ai
= 0; ai
< n_return_values
; ai
++) {
1315 frame_set_pointer(fp
, return_values
[ai
], pointer_error(ctx
.err
, NULL
, NULL pass_file_line
));
1318 for (ai
= 0; ai
< n_return_values
; ai
++) {
1319 pointer_t ptr
= pointer_reference(&results
[ai
]);
1320 frame_set_pointer(fp
, return_values
[ai
], ptr
);
1323 ex
= POINTER_FOLLOW_THUNK_GO
;
1328 if (!rwmutex_supported
) {
1330 address_write_lock(function
);
1333 address_read_lock(function
);
1337 ctx
.err
.error_class
= EC_NONE
;
1338 e
= tree_find_for_insert(&da(function
,function
)->cache
, cache_entry_compare
, ptr_to_num(&ctx
), &ins
);
1340 if (unlikely(ctx
.err
.error_class
!= EC_NONE
)) {
1342 address_read_unlock(function
);
1344 address_write_unlock(function
);
1345 if (ctx
.err
.error_type
== AJLA_ERROR_OUT_OF_MEMORY
&& mem_trim_cache()) {
1347 address_read_lock(function
);
1349 address_write_lock(function
);
1353 address_read_lock(function
);
1356 c
= get_struct(e
, struct cache_entry
, entry
);
1357 address_lock(c
, DEPTH_THUNK
);
1361 address_read_unlock(function
);
1363 address_write_lock(function
);
1367 c
= struct_alloc_array_mayfail(mem_alloc_mayfail
, struct cache_entry
, arguments
, n_arguments
, MEM_DONT_TRY_TO_FREE
);
1371 c
->returns
= mem_alloc_array_mayfail(mem_alloc_mayfail
, struct cache_entry_return
*, 0, 0, n_return_values
, sizeof(struct cache_entry_return
), MEM_DONT_TRY_TO_FREE
);
1372 if (unlikely(!c
->returns
))
1374 for (ai
= 0; ai
< n_return_values
; ai
++) {
1375 c
->returns
[ai
].ex
= NULL
;
1377 for (ai
= 0; ai
< n_return_values
; ai
++) {
1378 c
->returns
[ai
].ex
= function_evaluate_prepare(MEM_DONT_TRY_TO_FREE
);
1379 if (unlikely(!c
->returns
[ai
].ex
))
1382 for (ai
= 0; ai
< n_arguments
; ai
++) {
1383 pointer_reference_owned(arguments
[ai
].ptr
);
1384 c
->arguments
[ai
] = arguments
[ai
].ptr
;
1386 results
= mem_alloc_array_mayfail(mem_alloc_mayfail
, struct thunk
**, 0, 0, n_return_values
, sizeof(struct thunk
*), MEM_DONT_TRY_TO_FREE
);
1387 if (unlikely(!results
))
1389 if (!(function_reference
= data_alloc_function_reference_mayfail(n_arguments
, MEM_DONT_TRY_TO_FREE pass_file_line
)))
1391 da(function_reference
,function_reference
)->is_indirect
= false;
1392 da(function_reference
,function_reference
)->u
.direct
= direct_function
;
1393 if (unlikely(!thunk_alloc_function_call(pointer_data(function_reference
), n_return_values
, results
, MEM_DONT_TRY_TO_FREE
))) {
1394 data_free_r1(function_reference
);
1398 for (ai
= 0; ai
< n_return_values
; ai
++) {
1399 if (c
->returns
[ai
].ex
)
1400 execution_control_free(c
->returns
[ai
].ex
);
1402 mem_free(c
->returns
);
1406 address_write_unlock(function
);
1407 if (mem_trim_cache()) {
1408 address_write_lock(function
);
1411 address_write_lock(function
);
1412 ctx
.err
= error_ajla(EC_ASYNC
, AJLA_ERROR_OUT_OF_MEMORY
);
1415 for (ai
= 0; ai
< n_arguments
; ai
++) {
1416 pointer_reference_owned(c
->arguments
[ai
]);
1417 data_fill_function_reference(function_reference
, ai
, c
->arguments
[ai
]);
1419 address_lock(c
, DEPTH_THUNK
);
1420 /*debug("evaluaring: %p", c);*/
1421 c
->n_pending
= n_return_values
;
1422 list_init(&c
->wait_list
);
1423 for (ai
= 0; ai
< n_return_values
; ai
++) {
1424 c
->returns
[ai
].ce
= c
;
1425 function_evaluate_submit(c
->returns
[ai
].ex
, pointer_thunk(results
[ai
]), cache_evaluated
, &c
->returns
[ai
]);
1429 tree_insert_after_find(&c
->entry
, &ins
);
1432 if (!c
->save
&& unlikely(save
))
1435 struct execution_control
*exx
;
1436 /*debug("waiting on %p, pending %u", c, c->n_pending);*/
1437 exx
= frame_execution_control(fp
);
1438 exx
->wait
[0].mutex_to_lock
= address_get_mutex(c
, DEPTH_THUNK
);
1439 list_add(&c
->wait_list
, &exx
->wait
[0].wait_entry
);
1440 address_unlock(c
, DEPTH_THUNK
);
1441 pointer_follow_wait(fp
, ip
);
1442 ex
= POINTER_FOLLOW_THUNK_EXIT
;
1445 address_unlock(c
, DEPTH_THUNK
);
1448 for (ai
= 0; ai
< n_arguments
; ai
++) {
1449 if (arguments
[ai
].deref
) {
1450 frame_t slot
= arguments
[ai
].slot
;
1451 if (frame_test_and_clear_flag(fp
, slot
))
1452 pointer_dereference(*frame_pointer(fp
, slot
));
1453 *frame_pointer(fp
, slot
) = pointer_empty();
1456 if (unlikely(free_fn_slot
!= NO_FRAME_T
)) {
1457 frame_free_and_clear(fp
, free_fn_slot
);
1459 if (likely(!thunk
) && unlikely(ctx
.err
.error_class
!= EC_NONE
))
1460 thunk
= thunk_alloc_exception_error(ctx
.err
, NULL
, NULL
, NULL pass_file_line
);
1461 for (ai
= 0; ai
< n_return_values
; ai
++) {
1463 if (likely(!thunk
)) {
1464 ptr
= pointer_reference(&c
->returns
[ai
].ptr
);
1467 thunk_reference(thunk
);
1468 ptr
= pointer_thunk(thunk
);
1470 frame_set_pointer(fp
, return_values
[ai
], ptr
);
1472 ex
= POINTER_FOLLOW_THUNK_GO
;
1475 if (likely(!wr_lock
))
1476 address_read_unlock(function
);
1478 address_write_unlock(function
);
1481 for (ai
= 0; ai
< n_arguments
; ai
++) {
1482 if (arguments
[ai
].need_free_ptr
) {
1483 pointer_dereference(arguments
[ai
].ptr
);
1486 mem_free(arguments
);
1487 mem_free(return_values
);
1492 static attr_noinline
void * ipret_get_index_complicated(frame_s
*fp
, const code_t
*ip
, frame_s
*fp_slot
, frame_t slot
, bool *is_negative
, array_index_t
*idx
, pointer_t
*thunk argument_position
)
1495 if (likely(!frame_test_flag(fp_slot
, slot
))) {
1498 in
= *frame_slot(fp_slot
, slot
, int_default_t
);
1500 if (unlikely(in
< 0)) {
1503 *thunk
= pointer_error(error_ajla(EC_SYNC
, AJLA_ERROR_NEGATIVE_INDEX
), fp
, ip pass_position
);
1504 return POINTER_FOLLOW_THUNK_EXCEPTION
;
1506 *is_negative
= true;
1507 return POINTER_FOLLOW_THUNK_GO
;
1509 index_from_int_(idx
, in pass_position
);
1511 pointer_t
*ptr
= frame_pointer(fp_slot
, slot
);
1514 pointer_follow(ptr
, true, d
, PF_WAIT
, fp
, ip
,
1516 thunk_reference(thunk_
);
1517 *thunk
= pointer_thunk(thunk_
);
1518 return POINTER_FOLLOW_THUNK_EXCEPTION
;
1520 if (da_tag(d
) == DATA_TAG_flat
) {
1521 ipret_unbox_value(fp_slot
, type_get_int(INT_DEFAULT_N
), slot
);
1524 if (unlikely(mpint_negative(&da(d
,longint
)->mp
))) {
1527 index_from_mp_(idx
, &da(d
,longint
)->mp pass_position
);
1529 return POINTER_FOLLOW_THUNK_GO
;
1532 void * attr_hot_fastcall
ipret_get_index(frame_s
*fp
, const code_t
*ip
, frame_s
*fp_slot
, frame_t slot
, bool *is_negative
, array_index_t
*idx
, pointer_t
*thunk argument_position
)
1534 if (likely(fp
== fp_slot
))
1535 ajla_assert(frame_get_type_of_local(fp_slot
, slot
)->tag
== type_get_int(INT_DEFAULT_N
)->tag
, (file_line
, "ipret_get_index: invalid type %u", (unsigned)frame_get_type_of_local(fp_slot
, slot
)->tag
));
1536 if (likely(!frame_test_flag(fp_slot
, slot
))) {
1539 in
= *frame_slot(fp_slot
, slot
, int_default_t
);
1541 if (unlikely(in
< 0))
1543 index_from_int_(idx
, in pass_position
);
1546 return ipret_get_index_complicated(fp
, ip
, fp_slot
, slot
, is_negative
, idx
, thunk pass_position
);
1548 return POINTER_FOLLOW_THUNK_GO
;
1552 void * attr_hot_fastcall
ipret_record_load_create_thunk(frame_s
*fp
, const code_t
*ip
, frame_t record
, frame_t record_slot
, frame_t result_slot
)
1556 struct data
*function_reference
;
1557 struct thunk
*result
;
1559 ex
= pcode_find_record_option_load_function(PCODE_FUNCTION_RECORD_LOAD
, record_slot
, fp
, ip
, &fn_ptr
);
1561 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_RETRY
))
1564 result
= build_thunk(fn_ptr
, 1, &function_reference
);
1565 ipret_fill_function_reference_from_slot(function_reference
, 0, fp
, record
, false);
1567 frame_set_pointer(fp
, result_slot
, pointer_thunk(result
));
1569 return POINTER_FOLLOW_THUNK_GO
;
1572 void * attr_hot_fastcall
ipret_option_load_create_thunk(frame_s
*fp
, const code_t
*ip
, frame_t option
, frame_t option_idx
, frame_t result_slot
)
1576 struct data
*function_reference
;
1577 struct thunk
*result
;
1579 ex
= pcode_find_record_option_load_function(PCODE_FUNCTION_OPTION_LOAD
, option_idx
, fp
, ip
, &fn_ptr
);
1581 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_RETRY
))
1584 result
= build_thunk(fn_ptr
, 1, &function_reference
);
1585 ipret_fill_function_reference_from_slot(function_reference
, 0, fp
, option
, false);
1587 frame_set_pointer(fp
, result_slot
, pointer_thunk(result
));
1589 return POINTER_FOLLOW_THUNK_GO
;
1592 void * attr_hot_fastcall
thunk_option_test(frame_s
*fp
, const code_t
*ip
, frame_t slot_1
, ajla_option_t option
, frame_t slot_r
)
1596 struct data
*function_reference
;
1597 struct thunk
*result
;
1599 pointer_follow_thunk_noeval(frame_pointer(fp
, slot_1
),
1600 return POINTER_FOLLOW_THUNK_RETRY
,
1601 pointer_copy_owned(fp
, slot_1
, slot_r
);
1602 return POINTER_FOLLOW_THUNK_GO
,
1606 ex
= pcode_find_record_option_load_function(PCODE_FUNCTION_OPTION_TEST
, option
, fp
, ip
, &fn_ptr
);
1607 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_RETRY
))
1610 result
= build_thunk(fn_ptr
, 1, &function_reference
);
1611 ipret_fill_function_reference_from_slot(function_reference
, 0, fp
, slot_1
, false);
1613 frame_set_pointer(fp
, slot_r
, pointer_thunk(result
));
1615 return POINTER_FOLLOW_THUNK_GO
;
1618 void * attr_hot_fastcall
thunk_option_ord(frame_s
*fp
, const code_t
*ip
, frame_t slot_1
, frame_t slot_r
)
1622 struct data
*function_reference
;
1623 struct thunk
*result
;
1625 pointer_follow_thunk_noeval(frame_pointer(fp
, slot_1
),
1626 return POINTER_FOLLOW_THUNK_RETRY
,
1627 pointer_copy_owned(fp
, slot_1
, slot_r
);
1628 return POINTER_FOLLOW_THUNK_GO
,
1632 ex
= pcode_find_option_ord_function(fp
, ip
, &fn_ptr
);
1633 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_RETRY
))
1636 result
= build_thunk(fn_ptr
, 1, &function_reference
);
1637 ipret_fill_function_reference_from_slot(function_reference
, 0, fp
, slot_1
, false);
1639 frame_set_pointer(fp
, slot_r
, pointer_thunk(result
));
1641 return POINTER_FOLLOW_THUNK_GO
;
1645 void * attr_hot_fastcall
ipret_array_load_create_thunk(frame_s
*fp
, const code_t
*ip
, frame_t array
, frame_t index
, frame_t result_slot
)
1649 struct data
*function_reference
;
1650 struct thunk
*result
;
1652 ex
= pcode_find_array_load_function(fp
, ip
, &fn_ptr
);
1654 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_RETRY
))
1657 result
= build_thunk(fn_ptr
, 2, &function_reference
);
1658 ipret_fill_function_reference_from_slot(function_reference
, 0, fp
, array
, false);
1659 ipret_fill_function_reference_from_slot(function_reference
, 1, fp
, index
, false);
1661 frame_set_pointer(fp
, result_slot
, pointer_thunk(result
));
1663 return POINTER_FOLLOW_THUNK_GO
;
1666 static attr_noinline
void *array_len_create_thunk(frame_s
*fp
, const code_t
*ip
, frame_t array_slot
, frame_t result_slot
)
1670 struct data
*function_reference
;
1671 struct thunk
*result
;
1673 ex
= pcode_find_array_len_function(fp
, ip
, &fn_ptr
);
1675 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_RETRY
))
1678 result
= build_thunk(fn_ptr
, 1, &function_reference
);
1679 ipret_fill_function_reference_from_slot(function_reference
, 0, fp
, array_slot
, false);
1681 frame_set_pointer(fp
, result_slot
, pointer_thunk(result
));
1683 return POINTER_FOLLOW_THUNK_GO
;
1686 static attr_noinline
void *array_len_greater_than_create_thunk(frame_s
*fp
, const code_t
*ip
, frame_t array_slot
, frame_t length_slot
, frame_t result_slot
)
1690 struct data
*function_reference
;
1691 struct thunk
*result
;
1693 ex
= pcode_find_array_len_greater_than_function(fp
, ip
, &fn_ptr
);
1695 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_RETRY
))
1698 result
= build_thunk(fn_ptr
, 2, &function_reference
);
1699 ipret_fill_function_reference_from_slot(function_reference
, 0, fp
, array_slot
, false);
1700 ipret_fill_function_reference_from_slot(function_reference
, 1, fp
, length_slot
, false);
1702 frame_set_pointer(fp
, result_slot
, pointer_thunk(result
));
1704 return POINTER_FOLLOW_THUNK_GO
;
1707 static attr_noinline
void *array_sub_create_thunk(frame_s
*fp
, const code_t
*ip
, frame_t array_slot
, frame_t start_slot
, frame_t end_slot
, frame_t result_slot
, unsigned flags
)
1711 struct data
*function_reference
;
1712 struct thunk
*result
;
1714 ex
= pcode_find_array_sub_function(fp
, ip
, &fn_ptr
);
1716 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_RETRY
))
1719 result
= build_thunk(fn_ptr
, 3, &function_reference
);
1720 ipret_fill_function_reference_from_slot(function_reference
, 0, fp
, array_slot
, (flags
& OPCODE_FLAG_FREE_ARGUMENT
) != 0);
1721 ipret_fill_function_reference_from_slot(function_reference
, 1, fp
, start_slot
, false);
1722 ipret_fill_function_reference_from_slot(function_reference
, 2, fp
, end_slot
, false);
1724 frame_set_pointer(fp
, result_slot
, pointer_thunk(result
));
1726 return POINTER_FOLLOW_THUNK_GO
;
1729 static attr_noinline
void *array_skip_create_thunk(frame_s
*fp
, const code_t
*ip
, frame_t array_slot
, frame_t start_slot
, frame_t result_slot
, unsigned flags
)
1733 struct data
*function_reference
;
1734 struct thunk
*result
;
1736 ex
= pcode_find_array_skip_function(fp
, ip
, &fn_ptr
);
1738 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_RETRY
))
1741 result
= build_thunk(fn_ptr
, 2, &function_reference
);
1742 ipret_fill_function_reference_from_slot(function_reference
, 0, fp
, array_slot
, (flags
& OPCODE_FLAG_FREE_ARGUMENT
) != 0);
1743 ipret_fill_function_reference_from_slot(function_reference
, 1, fp
, start_slot
, false);
1745 frame_set_pointer(fp
, result_slot
, pointer_thunk(result
));
1747 return POINTER_FOLLOW_THUNK_GO
;
1750 static bool array_resolve_thunk(frame_s
*fp
, frame_t slot
)
1753 if (unlikely(frame_variable_is_flat(fp
, slot
)))
1756 ptr
= frame_pointer(fp
, slot
);
1757 if (likely(frame_test_flag(fp
, slot
))) {
1758 if (unlikely(pointer_is_thunk(*ptr
))) {
1759 if (thunk_is_finished(pointer_get_thunk(*ptr
))) {
1760 pointer_follow_thunk_(ptr
, POINTER_FOLLOW_THUNK_NOEVAL
);
1766 if (da_tag(pointer_get_data(*ptr
)) == DATA_TAG_array_incomplete
) {
1767 if (unlikely(!frame_test_and_set_flag(fp
, slot
)))
1768 data_reference(pointer_get_data(*ptr
));
1769 array_incomplete_collapse(ptr
);
1775 static void *array_walk(frame_s
*fp
, const code_t
*ip
, pointer_t
*ptr
, array_index_t
*idx
, unsigned flags
, pointer_t
*result
, pointer_t
**can_modify
)
1778 array_index_t this_len
;
1783 pointer_follow(ptr
, false, a
, flags
& OPCODE_OP_FLAG_STRICT
? PF_WAIT
: PF_NOEVAL
, fp
, ip
,
1785 thunk_reference(thunk_
);
1786 *result
= pointer_thunk(thunk_
);
1787 return POINTER_FOLLOW_THUNK_EXCEPTION
1790 if (unlikely(index_eq_int(*idx
, 0))) {
1792 return POINTER_FOLLOW_THUNK_GO
;
1795 if (unlikely(da_tag(a
) == DATA_TAG_array_incomplete
)) {
1796 if (!data_is_writable(a
))
1798 this_len
= array_len(pointer_get_data(da(a
,array_incomplete
)->first
));
1799 if (!index_ge_index(this_len
, *idx
)) {
1800 index_sub(idx
, this_len
);
1801 index_free(&this_len
);
1802 ptr
= &da(a
,array_incomplete
)->next
;
1807 index_free(&this_len
);
1808 *result
= pointer_data(a
);
1810 this_len
= array_len(a
);
1811 if (unlikely(!index_ge_index(this_len
, *idx
))) {
1812 index_free(&this_len
);
1813 return POINTER_FOLLOW_THUNK_RETRY
; /* this means index out of range, not a retry */
1815 index_free(&this_len
);
1816 *result
= pointer_data(a
);
1818 return POINTER_FOLLOW_THUNK_GO
;
1821 void * attr_hot_fastcall
ipret_array_len(frame_s
*fp
, const code_t
*ip
, frame_t slot_r
, frame_t slot_a
, unsigned flags
)
1823 const struct type
*type
;
1825 array_index_t idx_len
;
1828 ajla_assert(type_is_equal(frame_get_type_of_local(fp
, slot_r
), type_get_int(INT_DEFAULT_N
)), (file_line
, "ipret_array_len: invalid index type %u", frame_get_type_of_local(fp
, slot_r
)->tag
));
1829 ajla_assert(!frame_test_flag(fp
, slot_r
), (file_line
, "ipret_array_len: flag already set for destination slot %"PRIuMAX
"", (uintmax_t)slot_r
));
1831 type
= frame_get_type_of_local(fp
, slot_a
);
1832 if (unlikely(TYPE_IS_FLAT(type
))) {
1833 const struct flat_array_definition
*flat_def
= type_def(type
,flat_array
);
1835 *frame_slot(fp
, slot_r
, int_default_t
) = flat_def
->n_elements
;
1837 return POINTER_FOLLOW_THUNK_GO
;
1840 index_from_int(&idx_len
, 0);
1841 ptr
= frame_pointer(fp
, slot_a
);
1843 if (flags
& OPCODE_OP_FLAG_STRICT
) {
1844 array_resolve_thunk(fp
, slot_a
);
1848 struct data
*array_data
;
1849 struct data
*this_ptr
;
1850 array_index_t this_len
;
1852 pointer_follow(ptr
, false, array_data
, flags
& OPCODE_OP_FLAG_STRICT
? PF_WAIT
: PF_NOEVAL
, fp
, ip
,
1853 index_free(&idx_len
);
1854 if (!(flags
& OPCODE_OP_FLAG_STRICT
)) {
1855 ex_
= array_len_create_thunk(fp
, ip
, slot_a
, slot_r
);
1858 index_free(&idx_len
);
1859 thunk_reference(thunk_
);
1860 frame_set_pointer(fp
, slot_r
, pointer_thunk(thunk_
));
1861 return POINTER_FOLLOW_THUNK_GO
1864 if (da_tag(array_data
) == DATA_TAG_array_incomplete
)
1865 this_ptr
= pointer_get_data(da(array_data
,array_incomplete
)->first
);
1867 this_ptr
= array_data
;
1869 this_len
= array_len(this_ptr
);
1871 if (unlikely(!index_add_(&idx_len
, this_len
, &err pass_file_line
))) {
1872 index_free(&this_len
);
1873 goto array_len_error
;
1875 index_free(&this_len
);
1877 if (da_tag(array_data
) == DATA_TAG_array_incomplete
) {
1878 ptr
= &da(array_data
,array_incomplete
)->next
;
1885 if (likely(!index_is_mp(idx_len
))) {
1886 int_default_t len
= index_to_int(idx_len
);
1887 index_free(&idx_len
);
1889 *frame_slot(fp
, slot_r
, int_default_t
) = len
;
1893 d
= data_alloc_longint_mayfail(0, &err pass_file_line
);
1895 index_free(&idx_len
);
1897 frame_set_pointer(fp
, slot_r
, pointer_error(err
, fp
, ip pass_file_line
));
1899 mpint_free(&da(d
,longint
)->mp
);
1900 index_free_get_mp(&idx_len
, &da(d
,longint
)->mp
);
1901 frame_set_pointer(fp
, slot_r
, pointer_data(d
));
1905 return POINTER_FOLLOW_THUNK_GO
;
1908 void * attr_hot_fastcall
ipret_array_len_greater_than(frame_s
*fp
, const code_t
*ip
, frame_t slot_r
, frame_t slot_a
, frame_t slot_l
, unsigned flags
)
1910 const struct type
*type
;
1915 array_index_t remaining_length
;
1917 pointer_t
*can_modify
;
1919 ajla_assert(type_is_equal(frame_get_type_of_local(fp
, slot_r
), type_get_flat_option()), (file_line
, "ipret_array_len_greater_than: invalid index type %u", frame_get_type_of_local(fp
, slot_r
)->tag
));
1920 ajla_assert(!frame_test_flag(fp
, slot_r
), (file_line
, "ipret_array_len_greater_than: flag already set for destination slot %"PRIuMAX
"", (uintmax_t)slot_r
));
1922 ex
= ipret_get_index(fp
, ip
, fp
, slot_l
, &neg
, &remaining_length
, &res_ptr pass_file_line
);
1923 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_GO
)) {
1924 if (ex
== POINTER_FOLLOW_THUNK_EXCEPTION
)
1928 if (unlikely(neg
)) {
1933 type
= frame_get_type_of_local(fp
, slot_a
);
1934 if (unlikely(TYPE_IS_FLAT(type
))) {
1935 const struct flat_array_definition
*flat_def
= type_def(type
,flat_array
);
1936 if (index_ge_int(remaining_length
, flat_def
->n_elements
))
1941 ptr
= frame_pointer(fp
, slot_a
);
1943 if (flags
& OPCODE_OP_FLAG_STRICT
) {
1944 array_resolve_thunk(fp
, slot_a
);
1947 index_add_int(&remaining_length
, 1);
1948 ex
= array_walk(fp
, ip
, ptr
, &remaining_length
, flags
, &res_ptr
, &can_modify
);
1949 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_GO
)) {
1950 if (likely(ex
== POINTER_FOLLOW_THUNK_RETRY
)) {
1954 if (unlikely(ex
== POINTER_FOLLOW_THUNK_EXCEPTION
))
1956 if (!(flags
& OPCODE_OP_FLAG_STRICT
))
1957 ex
= array_len_greater_than_create_thunk(fp
, ip
, slot_a
, slot_l
, slot_r
);
1958 index_free(&remaining_length
);
1964 *frame_slot(fp
, slot_r
, ajla_flat_option_t
) = result
;
1966 index_free(&remaining_length
);
1967 return POINTER_FOLLOW_THUNK_GO
;
1970 index_free(&remaining_length
);
1972 frame_set_pointer(fp
, slot_r
, res_ptr
);
1973 return POINTER_FOLLOW_THUNK_GO
;
1976 void * attr_hot_fastcall
ipret_array_sub(frame_s
*fp
, const code_t
*ip
, frame_t slot_r
, frame_t slot_a
, frame_t slot_start
, frame_t slot_end
, unsigned flags
)
1978 array_index_t start
, end
, end_tmp
;
1980 pointer_t
*can_modify
;
1983 struct data
*acc
= NULL
;
1985 ajla_assert(flags
& OPCODE_FLAG_FREE_ARGUMENT
|| !frame_test_flag(fp
, slot_r
), (file_line
, "ipret_array_sub: flag already set for destination slot %"PRIuMAX
"", (uintmax_t)slot_r
));
1987 ex
= ipret_get_index(fp
, ip
, fp
, slot_start
, NULL
, &start
, &res_ptr pass_file_line
);
1988 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_GO
)) {
1989 if (ex
== POINTER_FOLLOW_THUNK_EXCEPTION
)
1993 ex
= ipret_get_index(fp
, ip
, fp
, slot_end
, NULL
, &end
, &res_ptr pass_file_line
);
1994 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_GO
)) {
1995 if (ex
== POINTER_FOLLOW_THUNK_EXCEPTION
)
2000 if (!index_ge_index(end
, start
)) {
2001 res_ptr
= pointer_error(error_ajla(EC_SYNC
, AJLA_ERROR_INVALID_OPERATION
), fp
, ip pass_file_line
);
2005 if (frame_variable_is_flat(fp
, slot_a
)) {
2008 int_default_t st
, len
;
2009 const struct type
*type
= frame_get_type_of_local(fp
, slot_a
);
2010 const struct flat_array_definition
*flat_def
= type_def(type
,flat_array
);
2011 if (index_ge_int(end
, flat_def
->n_elements
+ 1)) {
2012 res_ptr
= pointer_error(error_ajla(EC_SYNC
, AJLA_ERROR_INDEX_OUT_OF_RANGE
), fp
, ip pass_file_line
);
2015 st
= index_to_int(start
);
2016 len
= index_to_int(end
) - st
;
2017 d
= data_alloc_array_flat_mayfail(flat_def
->base
, len
, len
, false, &err pass_file_line
);
2019 res_ptr
= pointer_error(err
, fp
, ip pass_file_line
);
2022 memcpy(da_array_flat(d
), frame_var(fp
, slot_a
) + st
* flat_def
->base
->size
, len
* flat_def
->base
->size
);
2023 res_ptr
= pointer_data(d
);
2027 ptr
= frame_pointer(fp
, slot_a
);
2029 if (flags
& OPCODE_OP_FLAG_STRICT
) {
2030 array_resolve_thunk(fp
, slot_a
);
2033 index_copy(&end_tmp
, end
);
2034 ex
= array_walk(fp
, ip
, ptr
, &end_tmp
, flags
, &res_ptr
, &can_modify
);
2035 index_free(&end_tmp
);
2036 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_GO
)) {
2037 if (likely(ex
== POINTER_FOLLOW_THUNK_RETRY
)) {
2038 res_ptr
= pointer_error(error_ajla(EC_SYNC
, AJLA_ERROR_INDEX_OUT_OF_RANGE
), fp
, ip pass_file_line
);
2041 if (unlikely(ex
== POINTER_FOLLOW_THUNK_EXCEPTION
))
2043 if (!(flags
& OPCODE_OP_FLAG_STRICT
))
2044 ex
= array_sub_create_thunk(fp
, ip
, slot_a
, slot_start
, slot_end
, slot_r
, flags
);
2050 if (!(flags
& OPCODE_FLAG_FREE_ARGUMENT
) || !frame_test_flag(fp
, slot_a
))
2054 struct data
*array_data
;
2055 struct data
*this_ptr
;
2056 array_index_t this_len
;
2059 if (pointer_is_thunk(*ptr
)) {
2060 struct stack_trace st
;
2061 stack_trace_init(&st
);
2062 stack_trace_capture(&st
, fp
, ip
, 20);
2063 stack_trace_print(&st
);
2064 stack_trace_free(&st
);
2067 array_data
= pointer_get_data(*ptr
);
2069 if (da_tag(array_data
) == DATA_TAG_array_incomplete
)
2070 this_ptr
= pointer_get_data(da(array_data
,array_incomplete
)->first
);
2072 this_ptr
= array_data
;
2074 this_len
= array_len(this_ptr
);
2076 if (!index_ge_index(this_len
, start
)) {
2077 index_sub(&start
, this_len
);
2078 index_sub(&end
, this_len
);
2079 index_free(&this_len
);
2082 array_index_t this_step
;
2087 if (da_tag(array_data
) == DATA_TAG_array_incomplete
)
2088 da(array_data
,array_incomplete
)->first
= pointer_empty();
2090 *ptr
= pointer_empty();
2093 if (!index_ge_index(this_len
, end
)) {
2094 index_sub3(&this_step
, this_len
, start
);
2096 index_sub3(&this_step
, end
, start
);
2099 /*debug("start %lu, end %lu, this_len %lu, this_step %lu", start, end, this_len, this_step);*/
2100 index_free(&this_len
);
2101 index_sub(&end
, this_step
);
2102 index_sub(&end
, start
);
2103 t
= array_sub(this_ptr
, start
, this_step
, can_modify
!= NULL
, &err
);
2104 index_from_int(&start
, 0);
2107 res_ptr
= pointer_error(err
, fp
, ip pass_file_line
);
2114 acc
= array_join(acc
, t
, &err
);
2115 if (unlikely(!acc
)) {
2116 res_ptr
= pointer_error(err
, fp
, ip pass_file_line
);
2122 res_ptr
= pointer_data(acc
);
2128 if (unlikely(da_tag(array_data
) != DATA_TAG_array_incomplete
)) {
2129 res_ptr
= pointer_error(error_ajla(EC_SYNC
, AJLA_ERROR_INDEX_OUT_OF_RANGE
), fp
, ip pass_file_line
);
2132 ptr
= &da(array_data
,array_incomplete
)->next
;
2141 data_dereference(acc
);
2142 if (flags
& OPCODE_FLAG_FREE_ARGUMENT
) {
2143 if (pointer_is_empty(*frame_pointer(fp
, slot_a
)))
2144 frame_clear_flag(fp
, slot_a
);
2146 frame_free_and_clear(fp
, slot_a
);
2148 frame_set_pointer(fp
, slot_r
, res_ptr
);
2149 return POINTER_FOLLOW_THUNK_GO
;
2152 void * attr_hot_fastcall
ipret_array_skip(frame_s
*fp
, const code_t
*ip
, frame_t slot_r
, frame_t slot_a
, frame_t slot_start
, unsigned flags
)
2154 array_index_t start
, len
;
2156 pointer_t
*can_modify
;
2159 struct data
*a
, *ta
, *ts
;
2163 ajla_assert(flags
& OPCODE_FLAG_FREE_ARGUMENT
|| !frame_test_flag(fp
, slot_r
), (file_line
, "ipret_array_skip: flag already set for destination slot %"PRIuMAX
"", (uintmax_t)slot_r
));
2165 ex
= ipret_get_index(fp
, ip
, fp
, slot_start
, NULL
, &start
, &res_ptr pass_file_line
);
2166 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_GO
)) {
2167 if (ex
== POINTER_FOLLOW_THUNK_EXCEPTION
)
2172 if (frame_variable_is_flat(fp
, slot_a
)) {
2175 int_default_t st
, len
;
2176 const struct type
*type
= frame_get_type_of_local(fp
, slot_a
);
2177 const struct flat_array_definition
*flat_def
= type_def(type
,flat_array
);
2178 if (index_ge_int(start
, flat_def
->n_elements
+ 1)) {
2179 res_ptr
= pointer_error(error_ajla(EC_SYNC
, AJLA_ERROR_INDEX_OUT_OF_RANGE
), fp
, ip pass_file_line
);
2180 goto ret_free_start
;
2182 st
= index_to_int(start
);
2183 len
= flat_def
->n_elements
- st
;
2184 d
= data_alloc_array_flat_mayfail(flat_def
->base
, len
, len
, false, &err pass_file_line
);
2186 res_ptr
= pointer_error(err
, fp
, ip pass_file_line
);
2187 goto ret_free_start
;
2189 memcpy(da_flat(d
), frame_var(fp
, slot_a
) + st
* flat_def
->base
->size
, len
* flat_def
->base
->size
);
2190 res_ptr
= pointer_data(d
);
2191 goto ret_free_start
;
2194 ptr
= frame_pointer(fp
, slot_a
);
2196 if (flags
& OPCODE_OP_FLAG_STRICT
) {
2197 array_resolve_thunk(fp
, slot_a
);
2200 ex
= array_walk(fp
, ip
, ptr
, &start
, flags
, &res_ptr
, &can_modify
);
2201 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_GO
)) {
2202 if (likely(ex
== POINTER_FOLLOW_THUNK_RETRY
)) {
2203 res_ptr
= pointer_error(error_ajla(EC_SYNC
, AJLA_ERROR_INDEX_OUT_OF_RANGE
), fp
, ip pass_file_line
);
2204 goto ret_free_start
;
2206 if (unlikely(ex
== POINTER_FOLLOW_THUNK_EXCEPTION
))
2207 goto ret_free_start
;
2208 if (!(flags
& OPCODE_OP_FLAG_STRICT
))
2209 ex
= array_skip_create_thunk(fp
, ip
, slot_a
, slot_start
, slot_r
, flags
);
2214 if (unlikely(index_eq_int(start
, 0))) {
2215 pointer_reference_owned(res_ptr
);
2216 goto ret_free_start
;
2219 if (!(flags
& OPCODE_FLAG_FREE_ARGUMENT
) || !frame_test_flag(fp
, slot_a
))
2222 a
= pointer_get_data(res_ptr
);
2223 if (da_tag(a
) == DATA_TAG_array_incomplete
) {
2224 ta
= pointer_get_data(da(a
,array_incomplete
)->first
);
2229 len
= array_len(ta
);
2230 index_sub(&len
, start
);
2231 if (unlikely(index_eq_int(len
, 0)) && da_tag(a
) == DATA_TAG_array_incomplete
) {
2232 res_ptr
= pointer_reference(&da(a
,array_incomplete
)->next
);
2234 goto ret_free_start
;
2238 *can_modify
= pointer_empty();
2241 ts
= array_sub(ta
, start
, len
, deref
, &err
);
2242 if (unlikely(!ts
)) {
2243 res_ptr
= pointer_error(err
, fp
, ip pass_file_line
);
2249 da(a
,array_incomplete
)->first
= pointer_data(ts
);
2250 res_ptr
= pointer_data(a
);
2253 pointer_t next
= pointer_reference(&da(a
,array_incomplete
)->next
);
2254 inc
= data_alloc_array_incomplete(ts
, next
, &err pass_file_line
);
2255 if (unlikely(!inc
)) {
2256 data_dereference(ts
);
2257 pointer_dereference(next
);
2258 res_ptr
= pointer_error(err
, fp
, ip pass_file_line
);
2261 res_ptr
= pointer_data(inc
);
2264 res_ptr
= pointer_data(ts
);
2271 if (flags
& OPCODE_FLAG_FREE_ARGUMENT
) {
2272 if (pointer_is_empty(*frame_pointer(fp
, slot_a
)))
2273 frame_clear_flag(fp
, slot_a
);
2275 frame_free_and_clear(fp
, slot_a
);
2277 frame_set_pointer(fp
, slot_r
, res_ptr
);
2278 return POINTER_FOLLOW_THUNK_GO
;
2281 static void attr_hot
ipret_array_append_pointers(frame_s
*fp
, const code_t
*ip
, pointer_t
*ptr_r
, pointer_t ptr_1
, pointer_t ptr_2
, pointer_t
*fn_ptr
)
2286 if (unlikely(fn_ptr
!= NULL
)) {
2287 if (unlikely(pointer_is_thunk(ptr_1
))) {
2288 struct thunk
*result
;
2289 struct data
*function_reference
;
2291 if (pointer_is_thunk(ptr_1
) && thunk_tag_volatile(pointer_get_thunk(ptr_1
)) == THUNK_TAG_EXCEPTION
) {
2293 pointer_dereference(ptr_2
);
2297 result
= build_thunk(fn_ptr
, 2, &function_reference
);
2298 data_fill_function_reference(function_reference
, 0, ptr_1
);
2299 data_fill_function_reference(function_reference
, 1, ptr_2
);
2300 *ptr_r
= pointer_thunk(result
);
2302 } else if (likely(da_tag(pointer_get_data(ptr_1
)) == DATA_TAG_array_incomplete
)) {
2305 struct thunk
*thunk
;
2306 struct data
*function_reference
, *result
;
2308 array_incomplete_decompose(pointer_get_data(ptr_1
), &first
, &last
);
2310 thunk
= build_thunk(fn_ptr
, 2, &function_reference
);
2311 data_fill_function_reference(function_reference
, 0, last
);
2312 data_fill_function_reference(function_reference
, 1, ptr_2
);
2314 result
= data_alloc_array_incomplete(first
, pointer_thunk(thunk
), &err pass_file_line
);
2315 if (unlikely(!result
)) {
2316 data_dereference(first
);
2317 pointer_dereference(pointer_thunk(thunk
));
2318 *ptr_r
= pointer_error(err
, fp
, ip pass_file_line
);
2320 *ptr_r
= pointer_data(result
);
2326 if (unlikely(array_is_empty(pointer_get_data(ptr_1
)))) {
2328 pointer_dereference(ptr_1
);
2332 if (unlikely(pointer_is_thunk(ptr_2
)) || unlikely(da_tag(pointer_get_data(ptr_2
)) == DATA_TAG_array_incomplete
)) {
2333 struct data
*result
;
2334 result
= data_alloc_array_incomplete(pointer_get_data(ptr_1
), ptr_2
, &err pass_file_line
);
2335 if (unlikely(!result
)) {
2336 pointer_dereference(ptr_1
);
2337 pointer_dereference(ptr_2
);
2338 *ptr_r
= pointer_error(err
, fp
, ip pass_file_line
);
2340 *ptr_r
= pointer_data(result
);
2341 if (!pointer_is_thunk(ptr_2
))
2342 array_incomplete_collapse(ptr_r
);
2347 d
= array_join(pointer_get_data(ptr_1
), pointer_get_data(ptr_2
), &err
);
2349 *ptr_r
= pointer_error(err
, fp
, ip pass_file_line
);
2351 *ptr_r
= pointer_data(d
);
2354 void * attr_hot_fastcall
ipret_array_append(frame_s
*fp
, const code_t
*ip
, frame_t slot_r
, frame_t slot_1
, frame_t slot_2
, unsigned flags
)
2356 pointer_t
*fn_ptr
= NULL
;
2357 pointer_t ptr_1
, ptr_2
, *ptr_r
;
2359 if (unlikely(array_resolve_thunk(fp
, slot_1
))) {
2360 void *ex
= pcode_find_array_append_function(fp
, ip
, &fn_ptr
);
2361 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_RETRY
))
2364 array_resolve_thunk(fp
, slot_2
);
2366 ptr_1
= ipret_copy_variable_to_pointer(fp
, slot_1
, (flags
& OPCODE_FLAG_FREE_ARGUMENT
) != 0);
2367 ptr_2
= ipret_copy_variable_to_pointer(fp
, slot_2
, (flags
& OPCODE_FLAG_FREE_ARGUMENT_2
) != 0);
2369 ajla_assert(!frame_test_flag(fp
, slot_r
), (file_line
, "ipret_array_append: flag already set for destination slot %"PRIuMAX
"", (uintmax_t)slot_r
));
2371 frame_set_flag(fp
, slot_r
);
2372 ptr_r
= frame_pointer(fp
, slot_r
);
2374 ipret_array_append_pointers(fp
, ip
, ptr_r
, ptr_1
, ptr_2
, fn_ptr
);
2376 return POINTER_FOLLOW_THUNK_GO
;
2379 void * attr_hot_fastcall
ipret_array_append_one_flat(frame_s
*fp
, const code_t
*ip
, frame_t slot_r
, frame_t slot_1
, frame_t slot_2
, unsigned flags
)
2381 const int minimum_size
= 16;
2385 const struct type
*type
;
2386 if (unlikely(!(flags
& OPCODE_FLAG_FREE_ARGUMENT
)))
2388 if (unlikely(!frame_variable_is_flat(fp
, slot_2
)))
2390 if (unlikely(!frame_test_flag(fp
, slot_1
)))
2392 ptr
= *frame_pointer(fp
, slot_1
);
2393 if (unlikely(pointer_is_thunk(ptr
)))
2395 data
= pointer_get_data(ptr
);
2396 if (unlikely(da_tag(data
) != DATA_TAG_array_flat
)) {
2397 if (likely(da_tag(data
) == DATA_TAG_array_pointers
) && likely(!da(data
,array_pointers
)->n_used_entries
)) {
2398 type
= frame_get_type_of_local(fp
, slot_2
);
2399 data
= data_alloc_array_flat_mayfail(type
, minimum_size
, 0, false, &sink pass_file_line
);
2400 if (unlikely(!data
))
2402 pointer_dereference(ptr
);
2407 if (unlikely(!data_is_writable(data
)))
2409 if (unlikely(da(data
,array_flat
)->n_used_entries
== da(data
,array_flat
)->n_allocated_entries
)) {
2410 struct data
*new_data
;
2411 int_default_t new_size
= (uint_default_t
)da(data
,array_flat
)->n_used_entries
* 2;
2412 new_size
= maximum(new_size
, minimum_size
);
2413 if (unlikely(new_size
< 0) ||
2414 unlikely(new_size
<= da(data
,array_flat
)->n_used_entries
))
2416 type
= da(data
,array_flat
)->type
;
2417 new_data
= data_alloc_array_flat_mayfail(type
, new_size
, da(data
,array_flat
)->n_used_entries
, false, &sink pass_file_line
);
2418 if (unlikely(!new_data
))
2420 memcpy(da_array_flat(new_data
), da_array_flat(data
), da(data
,array_flat
)->n_used_entries
* type
->size
);
2426 type
= da(data
,array_flat
)->type
;
2427 memcpy_fast(da_array_flat(data
) + (size_t)da(data
,array_flat
)->n_used_entries
* type
->size
, frame_var(fp
, slot_2
), type
->size
);
2428 da(data
,array_flat
)->n_used_entries
++;
2430 frame_clear_flag(fp
, slot_1
);
2431 *frame_pointer(fp
, slot_1
) = pointer_empty();
2432 frame_set_flag(fp
, slot_r
);
2433 *frame_pointer(fp
, slot_r
) = pointer_data(data
);
2435 return POINTER_FOLLOW_THUNK_GO
;
2437 return ipret_array_append_one(fp
, ip
, slot_r
, slot_1
, slot_2
, flags
);
2440 void * attr_hot_fastcall
ipret_array_append_one(frame_s
*fp
, const code_t
*ip
, frame_t slot_r
, frame_t slot_1
, frame_t slot_2
, unsigned flags
)
2443 pointer_t
*fn_ptr
= NULL
;
2444 pointer_t ptr_1
, ptr_2
, ptr_e
, *ptr_r
;
2447 if (unlikely(array_resolve_thunk(fp
, slot_1
))) {
2448 void *ex
= pcode_find_array_append_function(fp
, ip
, &fn_ptr
);
2449 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_RETRY
))
2453 ptr_1
= ipret_copy_variable_to_pointer(fp
, slot_1
, (flags
& OPCODE_FLAG_FREE_ARGUMENT
) != 0);
2455 if (frame_variable_is_flat(fp
, slot_2
)) {
2456 const struct type
*type
= frame_get_type_of_local(fp
, slot_2
);
2457 data
= data_alloc_array_flat_mayfail(type
, 1, 1, false, &err pass_file_line
);
2458 if (unlikely(!data
))
2460 memcpy_fast(da_array_flat(data
), frame_var(fp
, slot_2
), type
->size
);
2461 ptr_2
= pointer_data(data
);
2466 ptr_e
= ipret_copy_variable_to_pointer(fp
, slot_2
, (flags
& OPCODE_FLAG_FREE_ARGUMENT_2
) != 0);
2468 if (unlikely(pointer_is_thunk(ptr_1
)))
2470 data
= pointer_get_data(ptr_1
);
2471 if (unlikely(da_tag(data
) != DATA_TAG_array_pointers
))
2473 if (unlikely(!data_is_writable(data
)))
2475 if (unlikely(da(data
,array_pointers
)->n_used_entries
== da(data
,array_pointers
)->n_allocated_entries
)) {
2476 struct data
*new_data
;
2477 size_t new_size
= (size_t)da(data
,array_pointers
)->n_used_entries
* 2;
2478 new_size
= maximum(new_size
, 16);
2479 if (unlikely(new_size
<= (size_t)da(data
,array_pointers
)->n_used_entries
))
2481 new_data
= data_alloc_array_pointers_mayfail(new_size
, da(data
,array_pointers
)->n_used_entries
, &err pass_file_line
);
2482 if (unlikely(!new_data
))
2484 memcpy(da(new_data
,array_pointers
)->pointer
, da(data
,array_pointers
)->pointer
, da(data
,array_pointers
)->n_used_entries
* sizeof(pointer_t
));
2488 da(data
,array_pointers
)->pointer
[da(data
,array_pointers
)->n_used_entries
++] = ptr_e
;
2490 frame_set_flag(fp
, slot_r
);
2491 *frame_pointer(fp
, slot_r
) = pointer_data(data
);
2493 return POINTER_FOLLOW_THUNK_GO
;
2496 data
= data_alloc_array_pointers_mayfail(1, 1, &err pass_file_line
);
2497 if (likely(data
!= NULL
)) {
2498 da(data
,array_pointers
)->pointer
[0] = ptr_e
;
2499 ptr_2
= pointer_data(data
);
2501 pointer_dereference(ptr_e
);
2502 ptr_2
= pointer_error(err
, fp
, ip pass_file_line
);
2506 ajla_assert(!frame_test_flag(fp
, slot_r
), (file_line
, "ipret_array_append_one: flag already set for destination slot %"PRIuMAX
"", (uintmax_t)slot_r
));
2507 frame_set_flag(fp
, slot_r
);
2508 ptr_r
= frame_pointer(fp
, slot_r
);
2510 ipret_array_append_pointers(fp
, ip
, ptr_r
, ptr_1
, ptr_2
, fn_ptr
);
2512 return POINTER_FOLLOW_THUNK_GO
;
2515 static int_default_t
get_array_pointers(unsigned char *flat
, const struct type attr_unused
*type
, int_default_t attr_unused n_elements
, pointer_t
*ptr
, void *context
)
2517 struct data
*na
= context
;
2518 if (unlikely(flat
!= NULL
))
2520 ajla_assert(da(na
,array_pointers
)->n_used_entries
< da(na
,array_pointers
)->n_allocated_entries
, (file_line
, "get_array_pointers: array overrun"));
2521 da(na
,array_pointers
)->pointer
[da(na
,array_pointers
)->n_used_entries
++] = pointer_reference(ptr
);
2525 static int_default_t
get_array_flat(unsigned char *flat
, const struct type
*type
, int_default_t n_elements
, pointer_t attr_unused
*ptr
, void *context
)
2528 struct data
*na
= context
;
2529 unsigned char *dest
;
2530 if (unlikely(!flat
))
2532 ajla_assert(da(na
,array_flat
)->n_used_entries
+ n_elements
<= da(na
,array_flat
)->n_allocated_entries
, (file_line
, "get_array_flat: array overrun"));
2533 size
= (size_t)n_elements
* type
->size
;
2534 dest
= da_array_flat(na
) + type
->size
* (size_t)da(na
,array_flat
)->n_used_entries
;
2535 for (i
= 0; i
< size
; i
++, flat
++, dest
++) {
2539 da(na
,array_flat
)->n_used_entries
+= n_elements
;
2543 static int_default_t
get_array_type(unsigned char *flat
, const struct type
*type
, int_default_t attr_unused n_elements
, pointer_t attr_unused
*ptr
, void *context
)
2546 *cast_ptr(const struct type
**, context
) = type
;
2550 void * attr_fastcall
ipret_array_flatten(frame_s
*fp
, const code_t
*ip
, frame_t slot_r
, frame_t slot_1
, unsigned flags
)
2554 struct data
*data
, *na
;
2555 array_index_t len_long
;
2558 const struct type
*flat_type
;
2559 const struct type
*array_type
= frame_get_type_of_local(fp
, slot_r
);
2561 if (frame_variable_is_flat(fp
, slot_1
))
2564 ptr
= frame_pointer(fp
, slot_1
);
2566 pointer_follow(ptr
, false, data
, PF_WAIT
, fp
, ip
,
2570 if (unlikely(da_tag(data
) == DATA_TAG_array_incomplete
)) {
2571 ptr
= &da(data
,array_incomplete
)->next
;
2575 array_resolve_thunk(fp
, slot_1
);
2577 data
= pointer_get_data(*frame_pointer(fp
, slot_1
));
2578 if (da_tag(data
) == DATA_TAG_array_flat
&& da(data
,array_flat
)->n_used_entries
== da(data
,array_flat
)->n_allocated_entries
) {
2579 if (array_type
->tag
== TYPE_TAG_flat_array
) {
2580 pointer_t p
= frame_get_pointer_reference(fp
, slot_1
, (flags
& OPCODE_FLAG_FREE_ARGUMENT
) != 0);
2581 na
= pointer_get_data(p
);
2582 goto try_to_flatten
;
2586 if (da_tag(data
) == DATA_TAG_array_pointers
&& da(data
,array_pointers
)->n_used_entries
== da(data
,array_pointers
)->n_allocated_entries
&& da(data
,array_pointers
)->pointer
== da(data
,array_pointers
)->pointer_array
) {
2590 len_long
= array_len(data
);
2591 if (unlikely(!index_is_int(len_long
))) {
2592 index_free(&len_long
);
2595 len
= index_to_int(len_long
);
2596 index_free(&len_long
);
2599 index_from_int(&len_long
, 0);
2600 array_onstack_iterate(fp
, slot_1
, &len_long
, get_array_type
, &flat_type
);
2601 index_free(&len_long
);
2604 na
= data_alloc_array_pointers_mayfail(len
, 0, &sink pass_file_line
);
2607 index_from_int(&len_long
, 0);
2608 success
= array_onstack_iterate(fp
, slot_1
, &len_long
, get_array_pointers
, na
);
2609 index_free(&len_long
);
2610 if (unlikely(!success
)) {
2611 pointer_dereference(pointer_data(na
));
2615 na
= data_alloc_array_flat_mayfail(flat_type
, len
, 0, true, &sink pass_file_line
);
2618 index_from_int(&len_long
, 0);
2619 success
= array_onstack_iterate(fp
, slot_1
, &len_long
, get_array_flat
, na
);
2620 index_free(&len_long
);
2621 if (unlikely(!success
)) {
2627 if (flags
& OPCODE_FLAG_FREE_ARGUMENT
)
2628 frame_free_and_clear(fp
, slot_1
);
2631 if (array_type
->tag
== TYPE_TAG_flat_array
&& da_tag(na
) == DATA_TAG_array_flat
) {
2632 struct flat_array_definition
*fa
= type_def(array_type
,flat_array
);
2633 if (fa
->n_elements
== da(na
,array_flat
)->n_used_entries
) {
2634 memcpy(frame_var(fp
, slot_r
), da_array_flat(na
), array_type
->size
);
2635 pointer_dereference(pointer_data(na
));
2636 return POINTER_FOLLOW_THUNK_GO
;
2640 frame_set_pointer(fp
, slot_r
, pointer_data(na
));
2642 return POINTER_FOLLOW_THUNK_GO
;
2645 ipret_copy_variable(fp
, slot_1
, fp
, slot_r
, (flags
& OPCODE_FLAG_FREE_ARGUMENT
) != 0);
2646 return POINTER_FOLLOW_THUNK_GO
;
2649 void attr_fastcall
ipret_prefetch_functions(struct data
*function
)
2652 for (x
= 0; x
< da(function
,function
)->local_directory_size
; x
++) {
2654 pointer_t
*lfnp
, lfn
;
2655 lfnp
= da(function
,function
)->local_directory
[x
];
2656 if (pointer_is_thunk(pointer_locked_read(lfnp
))) {
2657 struct execution_control
*ex
;
2658 lfn
= pointer_reference(lfnp
);
2659 if (unlikely(!pointer_is_thunk(lfn
)) || thunk_tag(pointer_get_thunk(lfn
)) != THUNK_TAG_FUNCTION_CALL
) {
2660 pointer_dereference(lfn
);
2663 ex
= function_evaluate_prepare(&sink
);
2664 if (likely(ex
!= NULL
))
2665 function_evaluate_submit(ex
, lfn
, NULL
, NULL
);
2667 pointer_dereference(lfn
);
2673 static attr_noinline frame_s
*ipret_break(frame_s
*top_fp
, frame_s
*high
, frame_s
*low
)
2676 struct execution_control
*high_ex
, *low_ex
;
2677 struct data
*high_function
, *low_function
;
2680 struct thunk
**result
;
2686 struct data
*function
= get_frame(fp
)->function
;
2687 const struct local_arg
*la
= da(function
,function
)->args
;
2688 for (ia
= 0; ia
< da(function
,function
)->n_arguments
; ia
++, la
++) {
2691 if (!la
->may_be_borrowed
)
2694 ptr
= *frame_pointer(fp
, slot
);
2695 if (!pointer_is_empty(ptr
) && !frame_test_and_set_flag(fp
, slot
))
2696 pointer_reference_owned(ptr
);
2698 } while ((fp
= frame_up(fp
)) != low
);
2700 high_ex
= frame_execution_control(high
);
2702 top_fp
= stack_split(top_fp
, low
, &high
, &sink
);
2703 if (unlikely(!top_fp
))
2706 low_ex
= execution_control_alloc(&sink
);
2707 if (unlikely(!low_ex
))
2710 low_ex
->stack
= frame_stack_bottom(low
);
2711 low_ex
->stack
->ex
= low_ex
;
2712 low_ex
->callback
= high_ex
->callback
;
2717 address_lock(t
, DEPTH_THUNK
);
2718 t
->u
.function_call
.u
.execution_control
= low_ex
;
2719 list_take(&low_ex
->wait_list
, &high_ex
->wait_list
);
2720 address_unlock(t
, DEPTH_THUNK
);
2723 high_ex
->stack
= frame_stack_bottom(high
);
2724 high_ex
->stack
->ex
= high_ex
;
2726 high_function
= get_frame(high
)->function
;
2727 result
= mem_alloc_array_mayfail(mem_alloc_mayfail
, struct thunk
**, 0, 0, da(high_function
,function
)->n_return_values
, sizeof(struct thunk
*), &sink
);
2728 if (unlikely(!result
))
2731 if (unlikely(!thunk_alloc_blackhole(high_ex
, da(get_frame(high
)->function
,function
)->n_return_values
, result
, &sink
)))
2734 low_function
= get_frame(low
)->function
;
2735 ip
= da(low_function
,function
)->code
+ get_frame(high
)->previous_ip
;
2736 low_ex
->current_frame
= low
;
2742 dst_slot
= get_max_param(ip
, 0);
2743 frame_set_pointer(low
, dst_slot
, pointer_thunk(result
[ia
]));
2745 ip
+= max_param_size(1) + 1;
2746 } while (++ia
< da(high_function
,function
)->n_return_values
);
2748 low_ex
->current_ip
= frame_ip(low
, ip
);
2752 /*get_frame(low)->timestamp++;*/
2761 stack_free(frame_stack_bottom(top_fp
));
2766 static void * attr_hot_fastcall
ipret_break_the_chain(frame_s
*fp
, const code_t
*ip
, int waiting
, bool *something_breakable
)
2768 frame_s
*top_fp
= fp
;
2770 struct execution_control
*ex
;
2771 timestamp_t t
= get_frame(fp
)->timestamp
++;
2774 struct data
*top_fn
= get_frame(fp
)->function
;
2775 if (unlikely(profiling
)) {
2776 profile_counter_t profiling_counter
;
2777 profiling_counter
= load_relaxed(&da(top_fn
,function
)->profiling_counter
);
2778 profiling_counter
+= profile_sample();
2779 store_relaxed(&da(top_fn
,function
)->profiling_counter
, profiling_counter
);
2783 *something_breakable
= false;
2785 if (unlikely(frame_execution_control(fp
)->atomic
!= 0))
2789 prev_fp
= frame_up(fp
);
2790 if (frame_is_top(prev_fp
))
2792 if (get_frame(fp
)->mode
== CALL_MODE_STRICT
)
2794 if (get_frame(fp
)->mode
== CALL_MODE_SPARK
|| (likely(!ipret_strict_calls
) && (timestamp_t
)(t
- get_frame(prev_fp
)->timestamp
) > break_ticks
)) {
2795 struct execution_control
*low_ex
, *high_ex
;
2797 /*debug("break: %s - %s (%u - %u - %u)", da(get_frame(prev_fp)->function,function)->function_name, da(get_frame(fp)->function,function)->function_name, t, get_frame(prev_fp)->timestamp, get_frame(fp)->timestamp);*/
2798 /*debug("break %"PRIuMAX"", (uintmax_t)++break_count);*/
2799 new_fp
= ipret_break(top_fp
, fp
, prev_fp
);
2800 if (unlikely(!new_fp
))
2802 low_ex
= frame_execution_control(prev_fp
);
2804 high_ex
= frame_execution_control(new_fp
);
2805 high_ex
->current_frame
= new_fp
;
2807 task_submit(low_ex
, true);
2811 goto cont_with_low_ex
;
2815 task_submit(low_ex
, true);
2819 high_ex
= frame_execution_control(new_fp
);
2820 high_ex
->current_frame
= new_fp
;
2821 high_ex
->current_ip
= frame_ip(new_fp
, ip
);
2822 task_submit(high_ex
, true);
2824 prev_fp
= top_fp
= low_ex
->current_frame
;
2825 ip
= da(get_frame(top_fp
)->function
,function
)->code
+ low_ex
->current_ip
;
2826 /*t = get_frame(top_fp)->timestamp;*/
2829 *something_breakable
= true;
2838 ex
= frame_execution_control(top_fp
);
2839 ex
->current_frame
= top_fp
;
2840 ex
->current_ip
= frame_ip(top_fp
, ip
);
2844 bool attr_fastcall
ipret_break_waiting_chain(frame_s
*fp
, ip_t ip
)
2846 bool something_breakable
;
2847 struct execution_control
*ex
;
2849 ex
= ipret_break_the_chain(fp
, da(get_frame(fp
)->function
,function
)->code
+ ip
, 1, &something_breakable
);
2851 task_submit(ex
, true);
2853 return something_breakable
;
2856 void * attr_hot_fastcall
ipret_tick(frame_s
*fp
, const code_t
*ip
)
2859 struct execution_control
*ex
;
2861 waiting_list_break();
2863 ex
= ipret_break_the_chain(fp
, ip
, 0, &sink
);
2865 return task_schedule(ex
);