2 * Copyright (C) 2024 Mikulas Patocka
4 * This file is part of Ajla.
6 * Ajla is free software: you can redistribute it and/or modify it under the
7 * terms of the GNU General Public License as published by the Free Software
8 * Foundation, either version 3 of the License, or (at your option) any later
11 * Ajla is distributed in the hope that it will be useful, but WITHOUT ANY
12 * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
13 * A PARTICULAR PURPOSE. See the GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along with
16 * Ajla. If not, see <https://www.gnu.org/licenses/>.
36 shared_var
bool ipret_strict_calls
shared_init(false);
37 shared_var
bool ipret_is_privileged
shared_init(false);
38 shared_var
bool ipret_compile
shared_init(false);
40 static const timestamp_t break_ticks
= 1;
42 void eval_both(frame_s
*fp
, const code_t
*ip
, frame_t slot_1
, frame_t slot_2
)
44 struct execution_control
*ex
= frame_execution_control(fp
);
45 if (slot_1
!= NO_FRAME_T
) {
46 if (!frame_variable_is_flat(fp
, slot_1
)) {
47 pointer_t
*ptr
= frame_pointer(fp
, slot_1
);
48 struct data attr_unused
*result
;
49 pointer_follow(ptr
, true, result
, PF_PREPARE0
, fp
, ip
,
50 SUBMIT_EX(ex_
); goto brk1
,
53 execution_control_acquire(ex
);
56 if (slot_2
!= NO_FRAME_T
) {
57 if (!frame_variable_is_flat(fp
, slot_2
)) {
58 pointer_t
*ptr
= frame_pointer(fp
, slot_2
);
59 struct data attr_unused
*result
;
60 pointer_follow(ptr
, true, result
, PF_PREPARE1
, fp
, ip
,
61 SUBMIT_EX(ex_
); goto brk2
,
64 execution_control_acquire(ex
);
67 pointer_follow_wait(fp
, ip
);
70 static void pointer_copy_owned(frame_s
*fp
, frame_t src_slot
, frame_t dest_slot
)
73 if (dest_slot
== src_slot
)
75 ptr
= *frame_pointer(fp
, src_slot
);
76 frame_free_and_set_pointer(fp
, dest_slot
, ptr
);
77 pointer_reference_owned(ptr
);
80 void attr_hot_fastcall
ipret_fill_function_reference_from_slot(struct data
*function_reference
, arg_t a
, frame_s
*fp
, frame_t slot
, bool deref
)
82 const struct type
*type
;
85 if (unlikely(!function_reference
)) {
87 frame_free_and_clear(fp
, slot
);
91 ajla_assert(a
< da(function_reference
,function_reference
)->n_curried_arguments
, (file_line
, "ipret_fill_function_reference_from_slot: invalid argument %"PRIuMAX
" (%"PRIuMAX
" arguments)", (uintmax_t)a
, (uintmax_t)da(function_reference
,function_reference
)->n_curried_arguments
));
93 if (frame_variable_is_flat(fp
, slot
)) {
94 type
= frame_get_type_of_local(fp
, slot
);
95 data_fill_function_reference_flat(function_reference
, a
, type
, frame_var(fp
, slot
));
97 ptr
= frame_get_pointer_reference(fp
, slot
, deref
);
98 data_fill_function_reference(function_reference
, a
, ptr
);
103 static struct thunk
*build_thunk(pointer_t
*fn_ptr
, arg_t n_arguments
, struct data
**function_reference
)
105 struct thunk
*result
;
108 *function_reference
= data_alloc_function_reference_mayfail(n_arguments
, &err pass_file_line
);
109 if (unlikely(!*function_reference
))
111 da(*function_reference
,function_reference
)->is_indirect
= false;
112 da(*function_reference
,function_reference
)->u
.direct
= fn_ptr
;
114 if (unlikely(!thunk_alloc_function_call(pointer_data(*function_reference
), 1, &result
, &err
))) {
115 data_dereference(*function_reference
);
122 *function_reference
= NULL
;
123 return thunk_alloc_exception_error(err
, NULL
, NULL
, NULL pass_file_line
);
126 static void *ipret_op_build_thunk(frame_s
*fp
, const code_t
*ip
, frame_t slot_1
, frame_t slot_2
, frame_t slot_r
, unsigned strict_flag
)
132 struct data
*function_reference
;
133 struct thunk
*result
;
134 frame_t slot_1_eval
= NO_FRAME_T
;
135 frame_t slot_2_eval
= NO_FRAME_T
;
137 code
= *ip
% OPCODE_MODE_MULT
;
138 if (code
== OPCODE_IS_EXCEPTION
)
139 strict_flag
|= FLAG_TESTING_FOR_EXCEPTION
;
140 if (code
>= OPCODE_REAL_OP
&& code
< OPCODE_REAL_OP
+ OPCODE_REAL_TYPE_MULT
* TYPE_REAL_N
) {
141 code_t op
= (code
- OPCODE_REAL_OP
) % OPCODE_REAL_TYPE_MULT
;
142 if (op
== OPCODE_REAL_OP_is_exception
|| op
== OPCODE_REAL_OP_is_exception_alt1
|| op
== OPCODE_REAL_OP_is_exception_alt2
)
143 strict_flag
|= FLAG_TESTING_FOR_EXCEPTION
;
146 if (frame_test_flag(fp
, slot_1
) && pointer_is_thunk(*frame_pointer(fp
, slot_1
))) {
147 pointer_follow_thunk_noeval(frame_pointer(fp
, slot_1
),
148 return POINTER_FOLLOW_THUNK_RETRY
,
149 if (strict_flag
& FLAG_TESTING_FOR_EXCEPTION
) {
150 frame_free(fp
, slot_r
);
152 *frame_slot(fp
, slot_r
, ajla_flat_option_t
) = 1;
154 return POINTER_FOLLOW_THUNK_GO
;
156 if (!(strict_flag
& FLAG_NEED_BOTH_EXCEPTIONS_TO_FAIL
)) {
157 pointer_copy_owned(fp
, slot_1
, slot_r
);
158 return POINTER_FOLLOW_THUNK_GO
;
160 strict_flag
|= FLAG_FIRST_EXCEPTION
;
162 slot_1_eval
= slot_1
; break
166 if (slot_2
!= NO_FRAME_T
&& frame_test_flag(fp
, slot_2
) && pointer_is_thunk(*frame_pointer(fp
, slot_2
))) {
167 pointer_follow_thunk_noeval(frame_pointer(fp
, slot_2
),
168 return POINTER_FOLLOW_THUNK_RETRY
,
169 if ((strict_flag
& (FLAG_NEED_BOTH_EXCEPTIONS_TO_FAIL
| FLAG_FIRST_EXCEPTION
)) != FLAG_NEED_BOTH_EXCEPTIONS_TO_FAIL
) {
170 pointer_copy_owned(fp
, slot_2
, slot_r
);
171 return POINTER_FOLLOW_THUNK_GO
;
174 slot_2_eval
= slot_2
; break
178 if (strict_flag
& OPCODE_OP_FLAG_STRICT
) {
179 if (slot_1_eval
!= NO_FRAME_T
|| slot_2_eval
!= NO_FRAME_T
) {
180 eval_both(fp
, ip
, slot_1_eval
, slot_2_eval
);
181 return POINTER_FOLLOW_THUNK_EXIT
;
183 return POINTER_FOLLOW_THUNK_RETRY
;
187 if (slot_2
== NO_FRAME_T
) {
188 flags
|= PCODE_FIND_OP_UNARY
;
189 if (type_is_equal(frame_get_type_of_local(fp
, slot_1
), type_get_int(INT_DEFAULT_N
)) &&
190 !type_is_equal(frame_get_type_of_local(fp
, slot_r
), type_get_int(INT_DEFAULT_N
)))
191 flags
|= PCODE_CONVERT_FROM_INT
;
193 if (code
== OPCODE_IS_EXCEPTION
)
194 ex
= pcode_find_is_exception(fp
, ip
, &fn_ptr
);
195 else if (code
== OPCODE_EXCEPTION_CLASS
|| code
== OPCODE_EXCEPTION_TYPE
|| code
== OPCODE_EXCEPTION_AUX
)
196 ex
= pcode_find_get_exception(code
- OPCODE_EXCEPTION_CLASS
, fp
, ip
, &fn_ptr
);
198 ex
= pcode_find_op_function(frame_get_type_of_local(fp
, slot_1
), frame_get_type_of_local(fp
, slot_r
), code
, flags
, fp
, ip
, &fn_ptr
);
199 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_RETRY
))
202 result
= build_thunk(fn_ptr
, slot_2
!= NO_FRAME_T
? 2 : 1, &function_reference
);
203 ipret_fill_function_reference_from_slot(function_reference
, 0, fp
, slot_1
, false);
204 if (slot_2
!= NO_FRAME_T
)
205 ipret_fill_function_reference_from_slot(function_reference
, 1, fp
, slot_2
, false);
207 frame_free_and_set_pointer(fp
, slot_r
, pointer_thunk(result
));
209 return POINTER_FOLLOW_THUNK_GO
;
212 #define UNBOX_THUNK 1
213 #define UNBOX_DID_SOMETHING 2
214 #define UNBOX_LONGINT 4
215 static int attr_hot_fastcall
ipret_unbox_value(frame_s
*fp
, const struct type
*type
, frame_t slot
)
217 ajla_assert(TYPE_IS_FLAT(type
), (file_line
, "ipret_unbox_value: non-flat type %u", type
->tag
));
218 if (frame_test_flag(fp
, slot
)) {
219 pointer_t ptr
= *frame_pointer(fp
, slot
);
220 if (pointer_is_thunk(ptr
))
222 if (da_tag(pointer_get_data(ptr
)) == DATA_TAG_longint
) {
223 ajla_assert(TYPE_TAG_IS_INT(type
->tag
), (file_line
, "ipret_unbox_value: unexpected longint, type %u", type
->tag
));
224 return UNBOX_LONGINT
;
226 memcpy_fast(frame_var(fp
, slot
), da_flat(pointer_get_data(ptr
)), type
->size
);
227 frame_clear_flag(fp
, slot
);
228 pointer_dereference(ptr
);
229 return UNBOX_DID_SOMETHING
;
234 static bool test_and_copy_nan(frame_s attr_unused
*fp
, const code_t attr_unused
*ip
, unsigned char type_tag
, frame_t attr_unused slot
, frame_t attr_unused slot_r
)
237 #define f(n, t, nt, pack, unpack) \
238 case TYPE_TAG_real + n: { \
240 barrier_aliasing(); \
241 val = *frame_slot(fp, slot, t); \
242 barrier_aliasing(); \
243 if (unlikely(cat(isnan_,t)(val))) { \
244 if (type_tag == frame_get_type_of_local(fp, slot_r)->tag) {\
245 barrier_aliasing(); \
246 *frame_slot(fp, slot_r, t) = val;\
247 barrier_aliasing(); \
249 frame_set_pointer(fp, slot_r, pointer_error(error_ajla(EC_SYNC, AJLA_ERROR_NAN), fp, ip pass_file_line));\
255 for_all_real(f
, for_all_empty
)
261 void * attr_hot_fastcall
thunk_fixed_operator(frame_s
*fp
, const code_t
*ip
, frame_t slot_1
, frame_t slot_2
, frame_t slot_r
, unsigned strict_flag
)
263 const struct type
*type
;
266 type
= frame_get_type_of_local(fp
, slot_1
);
267 ajla_assert((TYPE_TAG_IS_FIXED(type
->tag
) || TYPE_TAG_IS_REAL(type
->tag
)) &&
268 (slot_2
== NO_FRAME_T
|| frame_get_type_of_local(fp
, slot_2
) == type
),
269 (file_line
, "thunk_fixed_operator: invalid types on opcode %04x: %u, %u, %u",
272 slot_2
== NO_FRAME_T
? type
->tag
: frame_get_type_of_local(fp
, slot_2
)->tag
,
273 frame_get_type_of_local(fp
, slot_r
)->tag
));
275 converted
= ipret_unbox_value(fp
, type
, slot_1
);
276 if (!frame_test_flag(fp
, slot_1
) && unlikely(test_and_copy_nan(fp
, ip
, type
->tag
, slot_1
, slot_r
)))
277 return POINTER_FOLLOW_THUNK_GO
;
278 if (slot_2
!= NO_FRAME_T
) {
279 converted
|= ipret_unbox_value(fp
, type
, slot_2
);
280 if (!frame_test_flag(fp
, slot_2
) && unlikely(test_and_copy_nan(fp
, ip
, type
->tag
, slot_2
, slot_r
)))
281 return POINTER_FOLLOW_THUNK_GO
;
283 if (converted
& UNBOX_THUNK
)
284 return ipret_op_build_thunk(fp
, ip
, slot_1
, slot_2
, slot_r
, strict_flag
);
286 return POINTER_FOLLOW_THUNK_RETRY
;
290 void * attr_hot_fastcall
is_thunk_operator(frame_s
*fp
, const code_t
*ip
, frame_t slot_1
, frame_t slot_r
, unsigned strict_flag
)
292 ajla_flat_option_t value
;
293 pointer_t
*ptr
= frame_pointer(fp
, slot_1
);
294 if (!pointer_is_thunk(*ptr
)) {
295 struct data
*d
= pointer_get_data(*ptr
);
297 if (da_tag(d
) == DATA_TAG_flat
) {
298 value
= data_is_nan(da(d
,flat
)->data_type
, da_flat(d
));
303 pointer_follow_thunk_noeval(ptr
,
304 return POINTER_FOLLOW_THUNK_RETRY
,
305 value
= 1; goto return_val
,
310 frame_free(fp
, slot_r
);
312 *frame_slot(fp
, slot_r
, ajla_flat_option_t
) = value
;
314 return POINTER_FOLLOW_THUNK_GO
;
317 return ipret_op_build_thunk(fp
, ip
, slot_1
, NO_FRAME_T
, slot_r
, strict_flag
);
320 void * attr_hot_fastcall
thunk_get_param(frame_s
*fp
, const code_t
*ip
, frame_t slot_1
, frame_t slot_r
, unsigned strict_flag
, unsigned mode
)
326 ajla_assert(slot_r
== slot_1
|| !frame_test_flag(fp
, slot_r
), (file_line
, "thunk_get_param: flag already set for destination slot %"PRIuMAX
"", (uintmax_t)slot_r
));
328 if (unlikely(!frame_test_flag(fp
, slot_1
))) {
329 const struct type
*type
;
330 type
= frame_get_type_of_local(fp
, slot_1
);
331 if (likely(data_is_nan(type
->tag
, frame_var(fp
, slot_1
)))) {
338 result
= AJLA_ERROR_NAN
;
344 internal(file_line
, "thunk_get_param: invalid mode %u", mode
);
351 ptr
= frame_pointer(fp
, slot_1
);
352 if (!pointer_is_thunk(*ptr
)) {
353 struct data
*data
= pointer_get_data(*ptr
);
354 if (likely(da_tag(data
) == DATA_TAG_flat
)) {
355 if (likely(data_is_nan(da(data
,flat
)->data_type
, da_flat(data
)))) {
361 pointer_follow_thunk_noeval(ptr
,
362 return POINTER_FOLLOW_THUNK_RETRY
,
368 ex
= pointer_get_thunk(*ptr
);
371 result
= ex
->u
.exception
.err
.error_class
;
374 result
= ex
->u
.exception
.err
.error_type
;
377 result
= ex
->u
.exception
.err
.error_aux
;
380 internal(file_line
, "thunk_get_param: invalid mode %u", mode
);
384 frame_free(fp
, slot_r
);
386 *frame_slot(fp
, slot_r
, int_default_t
) = result
;
389 return POINTER_FOLLOW_THUNK_GO
;
392 frame_free_and_set_pointer(fp
, slot_r
, pointer_error(error_ajla(EC_SYNC
, AJLA_ERROR_INVALID_OPERATION
), fp
, ip pass_file_line
));
394 return POINTER_FOLLOW_THUNK_GO
;
397 return ipret_op_build_thunk(fp
, ip
, slot_1
, NO_FRAME_T
, slot_r
, strict_flag
);
400 int_default_t
ipret_system_property(int_default_t idx
)
402 int_default_t result
;
405 case SystemProperty_OS
:
407 result
= SystemProperty_OS_DOS
;
408 #elif defined(OS_OS2)
409 result
= SystemProperty_OS_OS2
;
410 #elif defined(OS_CYGWIN)
411 result
= SystemProperty_OS_Cygwin
;
412 #elif defined(OS_WIN32)
413 result
= SystemProperty_OS_Windows
;
415 result
= SystemProperty_OS_Posix
;
418 #if defined(OS_DOS) || defined(OS_OS2) || defined(OS_WIN32)
419 case SystemProperty_Charset
:
420 result
= os_charset();
423 #if defined(OS_WIN32)
424 case SystemProperty_Charset_Console
:
425 result
= os_charset_console();
428 case SystemProperty_Fixed
:
431 case SystemProperty_Real
:
434 case SystemProperty_Privileged
:
435 result
= ipret_is_privileged
;
437 case SystemProperty_Compile
:
438 result
= ipret_compile
;
447 void * attr_hot_fastcall
ipret_get_system_property(frame_s
*fp
, const code_t
*ip
, frame_t slot_1
, frame_t slot_r
)
451 int_default_t idx
, result
;
452 pointer_t result_ptr
;
454 ex
= ipret_get_index(fp
, ip
, fp
, slot_1
, NULL
, &idx_l
, &result_ptr pass_file_line
);
455 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_GO
)) {
456 if (ex
== POINTER_FOLLOW_THUNK_EXCEPTION
) {
457 frame_free_and_set_pointer(fp
, slot_r
, result_ptr
);
458 return POINTER_FOLLOW_THUNK_GO
;
461 idx
= index_to_int(idx_l
);
464 result
= ipret_system_property(idx
);
466 frame_free(fp
, slot_r
);
468 *frame_slot(fp
, slot_r
, int_default_t
) = result
;
471 return POINTER_FOLLOW_THUNK_GO
;
475 static bool int_to_mpint(mpint_t
*m
, const unsigned char *ptr
, unsigned intx
, ajla_error_t
*err
)
477 #define f(n, s, u, sz, bits) \
480 barrier_aliasing(); \
481 ret = cat(mpint_init_from_,s)(m, *cast_ptr(const s *, ptr), err);\
482 barrier_aliasing(); \
486 for_all_int(f
, for_all_empty
)
488 internal(file_line
, "int_to_mpint: invalid type %d", intx
);
495 static mpint_t
* attr_hot_fastcall
int_get_mpint(frame_s
*fp
, frame_t slot
, mpint_t
*storage
, unsigned intx
, ajla_error_t
*err
)
498 if (frame_test_flag(fp
, slot
)) {
499 struct data
*d
= pointer_get_data(*frame_pointer(fp
, slot
));
500 if (likely(da_tag(d
) == DATA_TAG_longint
))
501 return &da(d
,longint
)->mp
;
504 flat
= frame_var(fp
, slot
);
506 if (unlikely(!int_to_mpint(storage
, flat
, intx
, err
)))
511 static struct data
* attr_hot_fastcall
int_allocate_result(frame_s
*fp
, frame_t slot
, unsigned long bits
, pointer_t
*to_free
, ajla_error_t
*err
)
515 *to_free
= pointer_empty();
517 if (frame_test_and_set_flag(fp
, slot
)) {
518 pointer_t ptr
= *frame_pointer(fp
, slot
);
519 if (!pointer_is_thunk(ptr
)) {
520 d
= pointer_get_data(ptr
);
521 if (da_tag(d
) == DATA_TAG_longint
&& data_is_writable(d
))
527 d
= data_alloc_longint_mayfail(bits
, err pass_file_line
);
529 frame_clear_flag(fp
, slot
);
532 *frame_pointer(fp
, slot
) = pointer_data(d
);
536 void * attr_hot_fastcall
thunk_int_binary_operator(frame_s
*fp
, const code_t
*ip
, frame_t slot_1
, frame_t slot_2
, frame_t slot_r
, unsigned strict_flag
, bool (attr_fastcall
*do_op
)(const mpint_t
*op1
, const mpint_t
*op2
, mpint_t
*res
, ajla_error_t
*err
))
539 const struct type
*type
;
543 mpint_t
*val1
, *val2
;
547 type
= frame_get_type_of_local(fp
, slot_1
);
548 ajla_assert(TYPE_TAG_IS_INT(type
->tag
) &&
549 frame_get_type_of_local(fp
, slot_2
) == type
&&
550 frame_get_type_of_local(fp
, slot_r
) == type
,
551 (file_line
, "thunk_int_binary_operator: invalid types on opcode %04x: %u, %u, %u",
554 frame_get_type_of_local(fp
, slot_2
)->tag
,
555 frame_get_type_of_local(fp
, slot_r
)->tag
));
559 converted
|= ipret_unbox_value(fp
, type
, slot_1
);
560 converted
|= ipret_unbox_value(fp
, type
, slot_2
);
562 if (converted
& UNBOX_THUNK
)
563 return ipret_op_build_thunk(fp
, ip
, slot_1
, slot_2
, slot_r
, strict_flag
);
565 if (converted
== UNBOX_DID_SOMETHING
)
566 return POINTER_FOLLOW_THUNK_RETRY
;
568 intx
= TYPE_TAG_IDX_INT(type
->tag
);
570 if (unlikely(!(val1
= int_get_mpint(fp
, slot_1
, &s1
, intx
, &err
))))
572 if (unlikely(!(val2
= int_get_mpint(fp
, slot_2
, &s2
, intx
, &err
))))
574 if (unlikely(!(result
= int_allocate_result(fp
, slot_r
, maximum(mpint_estimate_bits(val1
), mpint_estimate_bits(val2
)), &to_free
, &err
))))
576 if (unlikely(!do_op(val1
, val2
, &da(result
,longint
)->mp
, &err
)))
582 if (!pointer_is_empty(to_free
))
583 pointer_dereference(to_free
);
585 if (mpint_export(&da(result
,longint
)->mp
, frame_var(fp
, slot_r
), intx
, &err
)) {
586 frame_clear_flag(fp
, slot_r
);
587 data_dereference(result
);
589 return POINTER_FOLLOW_THUNK_GO
;
592 if (!pointer_is_empty(to_free
))
593 pointer_dereference(to_free
);
600 frame_free_and_set_pointer(fp
, slot_r
, pointer_error(err
, fp
, ip pass_file_line
));
601 return POINTER_FOLLOW_THUNK_GO
;
604 void * attr_hot_fastcall
thunk_int_unary_operator(frame_s
*fp
, const code_t
*ip
, frame_t slot_1
, frame_t slot_r
, unsigned strict_flag
, bool (attr_fastcall
*do_op
)(const mpint_t
*op1
, mpint_t
*res
, ajla_error_t
*err
))
607 const struct type
*type
;
615 type
= frame_get_type_of_local(fp
, slot_1
);
616 ajla_assert(TYPE_TAG_IS_INT(type
->tag
) &&
617 frame_get_type_of_local(fp
, slot_r
) == type
,
618 (file_line
, "thunk_int_unary_operator: invalid types on opcode %04x: %u, %u",
621 frame_get_type_of_local(fp
, slot_r
)->tag
));
625 converted
|= ipret_unbox_value(fp
, type
, slot_1
);
627 if (converted
& UNBOX_THUNK
)
628 return ipret_op_build_thunk(fp
, ip
, slot_1
, NO_FRAME_T
, slot_r
, strict_flag
);
630 if (converted
== UNBOX_DID_SOMETHING
)
631 return POINTER_FOLLOW_THUNK_RETRY
;
633 intx
= TYPE_TAG_IDX_INT(type
->tag
);
635 if (unlikely(!(val1
= int_get_mpint(fp
, slot_1
, &s1
, intx
, &err
))))
637 if (unlikely(!(result
= int_allocate_result(fp
, slot_r
, mpint_estimate_bits(val1
), &to_free
, &err
))))
639 if (unlikely(!do_op(val1
, &da(result
,longint
)->mp
, &err
)))
643 if (!pointer_is_empty(to_free
))
644 pointer_dereference(to_free
);
646 if (mpint_export(&da(result
,longint
)->mp
, frame_var(fp
, slot_r
), intx
, &err
)) {
647 frame_clear_flag(fp
, slot_r
);
648 data_dereference(result
);
650 return POINTER_FOLLOW_THUNK_GO
;
653 if (!pointer_is_empty(to_free
))
654 pointer_dereference(to_free
);
658 frame_free_and_set_pointer(fp
, slot_r
, pointer_error(err
, fp
, ip pass_file_line
));
659 return POINTER_FOLLOW_THUNK_GO
;
662 void * attr_hot_fastcall
thunk_int_binary_logical_operator(frame_s
*fp
, const code_t
*ip
, frame_t slot_1
, frame_t slot_2
, frame_t slot_r
, unsigned strict_flag
, bool (attr_fastcall
*do_op
)(const mpint_t
*op1
, const mpint_t
*op2
, ajla_flat_option_t
*res
, ajla_error_t
*err
))
665 const struct type
*type
;
669 mpint_t
*val1
, *val2
;
671 type
= frame_get_type_of_local(fp
, slot_1
);
672 ajla_assert(TYPE_TAG_IS_INT(type
->tag
) &&
673 frame_get_type_of_local(fp
, slot_2
) == type
&&
674 frame_get_type_of_local(fp
, slot_r
)->tag
== TYPE_TAG_flat_option
,
675 (file_line
, "thunk_int_binary_logical_operator: invalid types on opcode %04x: %u, %u, %u",
678 frame_get_type_of_local(fp
, slot_2
)->tag
,
679 frame_get_type_of_local(fp
, slot_r
)->tag
));
683 converted
|= ipret_unbox_value(fp
, type
, slot_1
);
684 converted
|= ipret_unbox_value(fp
, type
, slot_2
);
686 if (converted
& UNBOX_THUNK
)
687 return ipret_op_build_thunk(fp
, ip
, slot_1
, slot_2
, slot_r
, strict_flag
);
689 if (converted
== UNBOX_DID_SOMETHING
)
690 return POINTER_FOLLOW_THUNK_RETRY
;
692 intx
= TYPE_TAG_IDX_INT(type
->tag
);
694 if (unlikely(!(val1
= int_get_mpint(fp
, slot_1
, &s1
, intx
, &err
))))
696 if (unlikely(!(val2
= int_get_mpint(fp
, slot_2
, &s2
, intx
, &err
))))
699 if (unlikely(!do_op(val1
, val2
, frame_slot(fp
, slot_r
, ajla_flat_option_t
), &err
))) {
709 return POINTER_FOLLOW_THUNK_GO
;
718 frame_free_and_set_pointer(fp
, slot_r
, pointer_error(err
, fp
, ip pass_file_line
));
719 return POINTER_FOLLOW_THUNK_GO
;
722 ip_t attr_hot_fastcall
ipret_int_ldc_long(frame_s
*fp
, frame_t slot
, const code_t
*ip
)
730 n_words_32
= get_unaligned_32(ip
);
731 n_words
= (ip_t
)n_words_32
;
732 ajla_assert(n_words
== n_words_32
, (file_line
, "ipret_int_ldc_long: n_words overflow: %lu != %lu", (unsigned long)n_words_32
, (unsigned long)n_words
));
734 d
= data_alloc_longint_mayfail((n_words
+ sizeof(code_t
) - 1) / sizeof(code_t
), &err pass_file_line
);
738 if (unlikely(!mpint_import_from_code(&da(d
,longint
)->mp
, ip
+ 2, n_words
, &err
))) {
743 frame_set_pointer(fp
, slot
, pointer_data(d
));
748 frame_set_pointer(fp
, slot
, pointer_error(err
, fp
, ip pass_file_line
));
754 pointer_t attr_fastcall
convert_fixed_to_mpint(uintbig_t val
, bool uns
)
759 d
= data_alloc_longint_mayfail(sizeof(uintbig_t
) * 8 + uns
, &err pass_file_line
);
762 if (unlikely(!cat(mpint_set_from_
,TYPE_INT_MAX
)(&da(d
,longint
)->mp
, (intbig_t
)val
, uns
, &err
))) {
765 return pointer_data(d
);
770 return pointer_error(err
, NULL
, NULL pass_file_line
);
773 pointer_t attr_fastcall
convert_real_to_mpint(frame_s
*fp
, frame_t src_slot
, const struct type
*src_type
)
775 unsigned char attr_unused
*src_ptr
;
779 d
= data_alloc_longint_mayfail(0, &err pass_file_line
);
782 mpint_free(&da(d
,longint
)->mp
);
784 #define re(n, rtype, ntype, pack, unpack) \
785 case TYPE_TAG_real + n: { \
786 if (unlikely(!cat(mpint_init_from_,rtype)(&da(d,longint)->mp, cast_ptr(rtype *, src_ptr), &err))) {\
794 src_ptr
= frame_var(fp
, src_slot
);
795 switch (src_type
->tag
) {
796 for_all_real(re
, for_all_empty
)
798 internal(file_line
, "convert_real_to_mpint: invalid type %u", src_type
->tag
);
801 return pointer_data(d
);
807 return pointer_error(err
, NULL
, NULL pass_file_line
);
810 static attr_noinline
void convert_mpint_to_real(frame_s
*fp
, frame_t dest_slot
, const struct type
*dest_type
, const mpint_t attr_unused
*mp
)
812 unsigned char attr_unused
*dest_ptr
;
814 #define re(n, rtype, ntype, pack, unpack) \
815 case TYPE_TAG_real + n: \
816 cat(mpint_export_to_,rtype)(mp, cast_ptr(rtype *, dest_ptr));\
820 dest_ptr
= frame_var(fp
, dest_slot
);
821 switch (dest_type
->tag
) {
822 for_all_real(re
, for_all_empty
)
824 internal(file_line
, "convert_mpint_to_real: invalid type %u", dest_type
->tag
);
831 void * attr_hot_fastcall
thunk_convert(frame_s
*fp
, const code_t
*ip
, frame_t src_slot
, frame_t dest_slot
, unsigned strict_flag
)
834 const struct type
*src_type
;
835 const struct type
*dest_type
;
839 if (unlikely(src_slot
== dest_slot
))
840 return POINTER_FOLLOW_THUNK_GO
;
842 src_type
= frame_get_type_of_local(fp
, src_slot
);
843 dest_type
= frame_get_type_of_local(fp
, dest_slot
);
845 converted
= ipret_unbox_value(fp
, src_type
, src_slot
);
846 if (unlikely(converted
== UNBOX_THUNK
)) {
847 return ipret_op_build_thunk(fp
, ip
, src_slot
, NO_FRAME_T
, dest_slot
, strict_flag
);
849 if (converted
== UNBOX_DID_SOMETHING
) {
850 return POINTER_FOLLOW_THUNK_RETRY
;
853 if (type_is_equal(dest_type
, type_get_int(INT_DEFAULT_N
))) {
854 if (likely(TYPE_TAG_IS_INT(src_type
->tag
))) {
855 if (likely(converted
== UNBOX_LONGINT
)) {
856 goto convert_longint
;
860 if (unlikely(!type_is_equal(src_type
, type_get_int(INT_DEFAULT_N
))))
862 if (TYPE_TAG_IS_FIXED(dest_type
->tag
)) {
863 if (likely(converted
== UNBOX_LONGINT
)) {
865 if (!TYPE_TAG_FIXED_IS_UNSIGNED(dest_type
->tag
))
866 res
= mpint_export(&da(pointer_get_data(*frame_pointer(fp
, src_slot
)), longint
)->mp
, frame_var(fp
, dest_slot
), TYPE_TAG_IDX_FIXED(dest_type
->tag
) >> 1, &err
);
868 res
= mpint_export_unsigned(&da(pointer_get_data(*frame_pointer(fp
, src_slot
)), longint
)->mp
, frame_var(fp
, dest_slot
), TYPE_TAG_IDX_FIXED(dest_type
->tag
) >> 1, &err
);
870 frame_set_pointer(fp
, dest_slot
, pointer_error(error_ajla(EC_SYNC
, AJLA_ERROR_DOESNT_FIT
), fp
, ip pass_file_line
));
871 return POINTER_FOLLOW_THUNK_GO
;
873 } else if (TYPE_TAG_IS_INT(dest_type
->tag
)) {
874 if (likely(converted
== UNBOX_LONGINT
)) {
875 goto convert_longint
;
877 } else if (likely(TYPE_TAG_IS_REAL(dest_type
->tag
))) {
878 if (likely(converted
== UNBOX_LONGINT
)) {
879 convert_mpint_to_real(fp
, dest_slot
, dest_type
, &da(pointer_get_data(*frame_pointer(fp
, src_slot
)), longint
)->mp
);
880 return POINTER_FOLLOW_THUNK_GO
;
887 d
= pointer_get_data(*frame_pointer(fp
, src_slot
));
888 if (mpint_export(&da(d
,longint
)->mp
, frame_var(fp
, dest_slot
), TYPE_TAG_IDX_INT(dest_type
->tag
), &err
)) {
889 return POINTER_FOLLOW_THUNK_GO
;
891 pointer_copy_owned(fp
, src_slot
, dest_slot
);
892 return POINTER_FOLLOW_THUNK_GO
;
895 internal(file_line
, "thunk_convert: invalid conversion %u->%u (%d)", src_type
->tag
, dest_type
->tag
, converted
);
896 return POINTER_FOLLOW_THUNK_RETRY
;
900 static bool attr_hot_fastcall
ipret_unbox_bool(frame_s
*fp
, frame_t slot
)
902 if (frame_test_flag(fp
, slot
)) {
903 pointer_t ptr
= *frame_pointer(fp
, slot
);
904 if (pointer_is_thunk(ptr
))
907 *frame_slot(fp
, slot
, ajla_flat_option_t
) = (ajla_flat_option_t
)da(pointer_get_data(ptr
),option
)->option
;
909 frame_clear_flag(fp
, slot
);
910 pointer_dereference(ptr
);
915 void * attr_hot_fastcall
thunk_bool_operator(frame_s
*fp
, const code_t
*ip
, frame_t slot_1
, frame_t slot_2
, frame_t slot_r
, unsigned strict_flag
)
918 ajla_flat_option_t val1
, val2
, result
;
921 code
%= OPCODE_MODE_MULT
;
922 code
= (code
- OPCODE_BOOL_OP
) / OPCODE_BOOL_OP_MULT
;
925 if (!ipret_unbox_bool(fp
, slot_1
)) {
927 val1
= *frame_slot(fp
, slot_1
, ajla_flat_option_t
);
930 case OPCODE_BOOL_OP_and
:
931 case OPCODE_BOOL_OP_less_equal
:
937 case OPCODE_BOOL_OP_or
:
938 case OPCODE_BOOL_OP_less
:
944 case OPCODE_BOOL_OP_not
:
949 if (slot_2
!= NO_FRAME_T
&& !ipret_unbox_bool(fp
, slot_2
)) {
951 val2
= *frame_slot(fp
, slot_2
, ajla_flat_option_t
);
954 case OPCODE_BOOL_OP_and
:
955 case OPCODE_BOOL_OP_less
:
961 case OPCODE_BOOL_OP_or
:
962 case OPCODE_BOOL_OP_less_equal
:
970 if (!((val1
| val2
) & 2)) {
972 return POINTER_FOLLOW_THUNK_RETRY
;
975 case OPCODE_BOOL_OP_and
:
976 case OPCODE_BOOL_OP_less
:
979 case OPCODE_BOOL_OP_or
:
980 case OPCODE_BOOL_OP_less_equal
:
983 case OPCODE_BOOL_OP_equal
:
984 result
= val1
^ val2
^ 1;
986 case OPCODE_BOOL_OP_not_equal
:
987 result
= val1
^ val2
;
990 internal(file_line
, "thunk_bool_operator: invalid opcode: %04x -> %x", *ip
, code
);
993 if (val1
& val2
& 2) {
995 case OPCODE_BOOL_OP_and
:
996 case OPCODE_BOOL_OP_or
:
997 case OPCODE_BOOL_OP_less
:
998 case OPCODE_BOOL_OP_less_equal
:
999 strict_flag
|= FLAG_NEED_BOTH_EXCEPTIONS_TO_FAIL
;
1003 return ipret_op_build_thunk(fp
, ip
, slot_1
, slot_2
, slot_r
, strict_flag
);
1006 frame_free(fp
, slot_r
);
1008 *frame_slot(fp
, slot_r
, ajla_flat_option_t
) = result
;
1010 return POINTER_FOLLOW_THUNK_GO
;
1013 void * attr_hot_fastcall
thunk_bool_jump(frame_s
*fp
, const code_t
*ip
, frame_t slot
)
1015 pointer_t
*thunk
= frame_pointer(fp
, slot
);
1018 pointer_follow(thunk
, true, data
, PF_WAIT
, fp
, ip
,
1020 return POINTER_FOLLOW_THUNK_EXCEPTION
1024 *frame_slot(fp
, slot
, ajla_flat_option_t
) = (ajla_flat_option_t
)da(data
,option
)->option
;
1026 frame_clear_flag(fp
, slot
);
1027 data_dereference(data
);
1028 return POINTER_FOLLOW_THUNK_RETRY
;
1032 void attr_fastcall
ipret_copy_variable(frame_s
*src_fp
, frame_t src_slot
, frame_s
*dst_fp
, frame_t dst_slot
, bool deref
)
1035 const struct type
*src_type
;
1036 ajla_assert(!frame_test_flag(dst_fp
, dst_slot
), (file_line
, "ipret_copy_variable: flag already set for destination slot %"PRIuMAX
"", (uintmax_t)dst_slot
));
1037 src_type
= frame_get_type_of_local(src_fp
, src_slot
);
1038 if (!frame_variable_is_flat(src_fp
, src_slot
)) {
1039 ptr
= frame_get_pointer_reference(src_fp
, src_slot
, deref
);
1041 const struct type
*dst_type
= frame_get_type_of_local(dst_fp
, dst_slot
);
1042 if (likely(TYPE_IS_FLAT(dst_type
))) {
1043 ajla_assert(type_is_equal(src_type
, dst_type
), (file_line
, "ipret_copy_variable: copying between different types (%u,%u,%u) -> (%u,%u,%u)", src_type
->tag
, src_type
->size
, src_type
->align
, dst_type
->tag
, dst_type
->size
, dst_type
->align
));
1044 memcpy_fast(frame_var(dst_fp
, dst_slot
), frame_var(src_fp
, src_slot
), dst_type
->size
);
1047 ptr
= flat_to_data(src_type
, frame_var(src_fp
, src_slot
));
1050 frame_set_pointer(dst_fp
, dst_slot
, ptr
);
1053 pointer_t
ipret_copy_variable_to_pointer(frame_s
*src_fp
, frame_t src_slot
, bool deref
)
1055 const struct type
*src_type
= frame_get_type_of_local(src_fp
, src_slot
);
1056 if (!frame_variable_is_flat(src_fp
, src_slot
)) {
1057 return frame_get_pointer_reference(src_fp
, src_slot
, deref
);
1059 return flat_to_data(src_type
, frame_var(src_fp
, src_slot
));
1064 struct data_compare_context
{
1065 struct ipret_call_cache_arg
*arguments
;
1067 arg_t n_return_values
;
1071 static int saved_cache_compare(struct data
*saved_cache
, size_t idx
, struct data_compare_context
*ctx
)
1073 size_t ptr_idx
= idx
* ((size_t)ctx
->n_arguments
+ (size_t)ctx
->n_return_values
);
1075 for (ai
= 0; ai
< ctx
->n_arguments
; ai
++) {
1077 c
= data_compare(da(saved_cache
,saved_cache
)->pointers
[ptr_idx
+ ai
], ctx
->arguments
[ai
].ptr
, &ctx
->err
);
1084 static pointer_t
*saved_cache_find(struct data
*function
, struct data_compare_context
*ctx
)
1086 struct data
*saved_cache
= da(function
,function
)->loaded_cache
;
1087 size_t n_entries
= da(saved_cache
,saved_cache
)->n_entries
;
1090 /*debug("searching: %s, %zu", da(function,function)->function_name, n_entries);*/
1091 binary_search(size_t, n_entries
, result
, !(cmp
= saved_cache_compare(saved_cache
, result
, ctx
)), cmp
< 0, return NULL
);
1092 /*debug("found it: %s, %zu", da(function,function)->function_name, result);*/
1093 return &da(saved_cache
,saved_cache
)->pointers
[result
* ((size_t)ctx
->n_arguments
+ (size_t)ctx
->n_return_values
) + (size_t)ctx
->n_arguments
];
1096 static int cache_entry_compare(const struct tree_entry
*e1
, uintptr_t e2
)
1098 struct cache_entry
*c1
= get_struct(e1
, struct cache_entry
, entry
);
1099 struct data_compare_context
*ctx
= cast_ptr(struct data_compare_context
*, num_to_ptr(e2
));
1101 for (ai
= 0; ai
< ctx
->n_arguments
; ai
++) {
1103 c
= data_compare(c1
->arguments
[ai
], ctx
->arguments
[ai
].ptr
, MEM_DONT_TRY_TO_FREE
);
1104 if (c
== -1 || c
== 1)
1106 if (c
== DATA_COMPARE_OOM
) {
1107 ctx
->err
= error_ajla(EC_ASYNC
, AJLA_ERROR_OUT_OF_MEMORY
);
1111 internal(file_line
, "cache_entry_compare: data_compare returned %d", c
);
1116 static void cache_evaluated(void *cookie
, pointer_t ptr
)
1118 struct cache_entry_return
*ret
= cookie
;
1119 struct cache_entry
*c
;
1120 pointer_reference_owned(ptr
);
1123 address_lock(c
, DEPTH_THUNK
);
1124 /*debug("cache evaluated: %p, pending %u", c, c->n_pending);*/
1125 if (likely(!--c
->n_pending
)) {
1126 wake_up_wait_list(&c
->wait_list
, address_get_mutex(c
, DEPTH_THUNK
), true);
1128 address_unlock(c
, DEPTH_THUNK
);
1132 void * attr_fastcall
ipret_call_cache(frame_s
*fp
, const code_t
*ip
, pointer_t
*direct_function
, struct ipret_call_cache_arg
*arguments
, frame_t
*return_values
, frame_t free_fn_slot
)
1134 struct thunk
*thunk
= NULL
;
1135 struct data_compare_context ctx
;
1136 struct tree_insert_position ins
;
1137 struct tree_entry
*e
;
1138 struct cache_entry
*c
= NULL
; /* avoid warning */
1142 struct thunk
**results
;
1143 struct data
*function_reference
;
1144 struct data
*function
= pointer_get_data(*direct_function
);
1145 arg_t n_arguments
= da(function
,function
)->n_arguments
;
1146 arg_t n_return_values
= da(function
,function
)->n_return_values
;
1147 bool save
= *ip
% OPCODE_MODE_MULT
== OPCODE_CALL_SAVE
|| *ip
% OPCODE_MODE_MULT
== OPCODE_CALL_INDIRECT_SAVE
;
1149 ctx
.err
.error_class
= EC_NONE
;
1151 for (ai
= 0; ai
< n_arguments
; ai
++)
1152 arguments
[ai
].need_free_ptr
= false;
1153 for (ai
= 0; ai
< n_arguments
; ai
++) {
1154 struct function_argument
*f_arg
= arguments
[ai
].f_arg
;
1155 if (unlikely(f_arg
!= NULL
)) {
1156 if (f_arg
->tag
== TYPE_TAG_unknown
) {
1157 ex
= pointer_deep_eval(&f_arg
->u
.ptr
, fp
, ip
, &thunk
);
1158 arguments
[ai
].ptr
= pointer_reference(&f_arg
->u
.ptr
);
1159 arguments
[ai
].need_free_ptr
= true;
1161 arguments
[ai
].ptr
= flat_to_data(type_get_from_tag(f_arg
->tag
), f_arg
->u
.slot
);
1162 ex
= pointer_deep_eval(&arguments
[ai
].ptr
, fp
, ip
, &thunk
);
1163 arguments
[ai
].need_free_ptr
= true;
1166 frame_t slot
= arguments
[ai
].slot
;
1167 if (!frame_variable_is_flat(fp
, slot
)) {
1168 ex
= frame_pointer_deep_eval(fp
, ip
, slot
, &thunk
);
1169 arguments
[ai
].ptr
= *frame_pointer(fp
, slot
);
1171 arguments
[ai
].ptr
= flat_to_data(frame_get_type_of_local(fp
, slot
), frame_var(fp
, slot
));
1172 ex
= pointer_deep_eval(&arguments
[ai
].ptr
, fp
, ip
, &thunk
);
1173 arguments
[ai
].need_free_ptr
= true;
1176 if (ex
== POINTER_FOLLOW_THUNK_EXCEPTION
) {
1177 if (!rwmutex_supported
) {
1179 address_write_lock(function
);
1182 address_read_lock(function
);
1186 if (ex
!= POINTER_FOLLOW_THUNK_GO
)
1190 ctx
.arguments
= arguments
;
1191 ctx
.n_arguments
= n_arguments
;
1192 ctx
.n_return_values
= n_return_values
;
1194 if (da(function
,function
)->loaded_cache
) {
1196 /*debug("loaded cache: %s", da(function,function)->function_name);*/
1197 ctx
.err
.error_class
= EC_NONE
;
1198 results
= saved_cache_find(function
, &ctx
);
1199 if (results
|| unlikely(ctx
.err
.error_class
!= EC_NONE
)) {
1200 for (ai
= 0; ai
< n_arguments
; ai
++) {
1201 if (arguments
[ai
].deref
) {
1202 frame_t slot
= arguments
[ai
].slot
;
1203 if (frame_test_and_clear_flag(fp
, slot
))
1204 pointer_dereference(*frame_pointer(fp
, slot
));
1205 *frame_pointer(fp
, slot
) = pointer_empty();
1208 if (unlikely(free_fn_slot
!= NO_FRAME_T
)) {
1209 frame_free_and_clear(fp
, free_fn_slot
);
1211 if (unlikely(ctx
.err
.error_class
!= EC_NONE
)) {
1212 for (ai
= 0; ai
< n_return_values
; ai
++) {
1213 frame_set_pointer(fp
, return_values
[ai
], pointer_error(ctx
.err
, NULL
, NULL pass_file_line
));
1216 for (ai
= 0; ai
< n_return_values
; ai
++) {
1217 pointer_t ptr
= pointer_reference(&results
[ai
]);
1218 frame_set_pointer(fp
, return_values
[ai
], ptr
);
1221 ex
= POINTER_FOLLOW_THUNK_GO
;
1226 if (!rwmutex_supported
) {
1228 address_write_lock(function
);
1231 address_read_lock(function
);
1235 ctx
.err
.error_class
= EC_NONE
;
1236 e
= tree_find_for_insert(&da(function
,function
)->cache
, cache_entry_compare
, ptr_to_num(&ctx
), &ins
);
1238 if (unlikely(ctx
.err
.error_class
!= EC_NONE
)) {
1240 address_read_unlock(function
);
1242 address_write_unlock(function
);
1243 if (ctx
.err
.error_type
== AJLA_ERROR_OUT_OF_MEMORY
&& mem_trim_cache()) {
1245 address_read_lock(function
);
1247 address_write_lock(function
);
1251 address_read_lock(function
);
1254 c
= get_struct(e
, struct cache_entry
, entry
);
1255 address_lock(c
, DEPTH_THUNK
);
1259 address_read_unlock(function
);
1261 address_write_lock(function
);
1265 c
= struct_alloc_array_mayfail(mem_alloc_mayfail
, struct cache_entry
, arguments
, n_arguments
, MEM_DONT_TRY_TO_FREE
);
1269 c
->returns
= mem_alloc_array_mayfail(mem_alloc_mayfail
, struct cache_entry_return
*, 0, 0, n_return_values
, sizeof(struct cache_entry_return
), MEM_DONT_TRY_TO_FREE
);
1270 if (unlikely(!c
->returns
))
1272 for (ai
= 0; ai
< n_return_values
; ai
++) {
1273 c
->returns
[ai
].ex
= NULL
;
1275 for (ai
= 0; ai
< n_return_values
; ai
++) {
1276 c
->returns
[ai
].ex
= function_evaluate_prepare(MEM_DONT_TRY_TO_FREE
);
1277 if (unlikely(!c
->returns
[ai
].ex
))
1280 for (ai
= 0; ai
< n_arguments
; ai
++) {
1281 pointer_reference_owned(arguments
[ai
].ptr
);
1282 c
->arguments
[ai
] = arguments
[ai
].ptr
;
1284 results
= mem_alloc_array_mayfail(mem_alloc_mayfail
, struct thunk
**, 0, 0, n_return_values
, sizeof(struct thunk
*), MEM_DONT_TRY_TO_FREE
);
1285 if (unlikely(!results
))
1287 if (!(function_reference
= data_alloc_function_reference_mayfail(n_arguments
, MEM_DONT_TRY_TO_FREE pass_file_line
)))
1289 da(function_reference
,function_reference
)->is_indirect
= false;
1290 da(function_reference
,function_reference
)->u
.direct
= direct_function
;
1291 if (unlikely(!thunk_alloc_function_call(pointer_data(function_reference
), n_return_values
, results
, MEM_DONT_TRY_TO_FREE
))) {
1292 data_free_r1(function_reference
);
1296 for (ai
= 0; ai
< n_return_values
; ai
++) {
1297 if (c
->returns
[ai
].ex
)
1298 execution_control_free(c
->returns
[ai
].ex
);
1300 mem_free(c
->returns
);
1304 address_write_unlock(function
);
1305 if (mem_trim_cache()) {
1306 address_write_lock(function
);
1309 address_write_lock(function
);
1310 ctx
.err
= error_ajla(EC_ASYNC
, AJLA_ERROR_OUT_OF_MEMORY
);
1313 for (ai
= 0; ai
< n_arguments
; ai
++) {
1314 pointer_reference_owned(c
->arguments
[ai
]);
1315 data_fill_function_reference(function_reference
, ai
, c
->arguments
[ai
]);
1317 address_lock(c
, DEPTH_THUNK
);
1318 /*debug("evaluaring: %p", c);*/
1319 c
->n_pending
= n_return_values
;
1320 list_init(&c
->wait_list
);
1321 for (ai
= 0; ai
< n_return_values
; ai
++) {
1322 c
->returns
[ai
].ce
= c
;
1323 function_evaluate_submit(c
->returns
[ai
].ex
, pointer_thunk(results
[ai
]), cache_evaluated
, &c
->returns
[ai
]);
1327 tree_insert_after_find(&c
->entry
, &ins
);
1330 if (!c
->save
&& unlikely(save
))
1333 struct execution_control
*exx
;
1334 /*debug("waiting on %p, pending %u", c, c->n_pending);*/
1335 exx
= frame_execution_control(fp
);
1336 exx
->wait
[0].mutex_to_lock
= address_get_mutex(c
, DEPTH_THUNK
);
1337 list_add(&c
->wait_list
, &exx
->wait
[0].wait_entry
);
1338 address_unlock(c
, DEPTH_THUNK
);
1339 pointer_follow_wait(fp
, ip
);
1340 ex
= POINTER_FOLLOW_THUNK_EXIT
;
1343 address_unlock(c
, DEPTH_THUNK
);
1346 for (ai
= 0; ai
< n_arguments
; ai
++) {
1347 if (arguments
[ai
].deref
) {
1348 frame_t slot
= arguments
[ai
].slot
;
1349 if (frame_test_and_clear_flag(fp
, slot
))
1350 pointer_dereference(*frame_pointer(fp
, slot
));
1351 *frame_pointer(fp
, slot
) = pointer_empty();
1354 if (unlikely(free_fn_slot
!= NO_FRAME_T
)) {
1355 frame_free_and_clear(fp
, free_fn_slot
);
1357 if (likely(!thunk
) && unlikely(ctx
.err
.error_class
!= EC_NONE
))
1358 thunk
= thunk_alloc_exception_error(ctx
.err
, NULL
, NULL
, NULL pass_file_line
);
1359 for (ai
= 0; ai
< n_return_values
; ai
++) {
1361 if (likely(!thunk
)) {
1362 ptr
= pointer_reference(&c
->returns
[ai
].ptr
);
1365 thunk_reference(thunk
);
1366 ptr
= pointer_thunk(thunk
);
1368 frame_set_pointer(fp
, return_values
[ai
], ptr
);
1370 ex
= POINTER_FOLLOW_THUNK_GO
;
1373 if (likely(!wr_lock
))
1374 address_read_unlock(function
);
1376 address_write_unlock(function
);
1379 for (ai
= 0; ai
< n_arguments
; ai
++) {
1380 if (arguments
[ai
].need_free_ptr
) {
1381 pointer_dereference(arguments
[ai
].ptr
);
1384 mem_free(arguments
);
1385 mem_free(return_values
);
1390 static attr_noinline
void * ipret_get_index_complicated(frame_s
*fp
, const code_t
*ip
, frame_s
*fp_slot
, frame_t slot
, bool *is_negative
, array_index_t
*idx
, pointer_t
*thunk argument_position
)
1393 if (likely(!frame_test_flag(fp_slot
, slot
))) {
1396 in
= *frame_slot(fp_slot
, slot
, int_default_t
);
1398 if (unlikely(in
< 0)) {
1401 *thunk
= pointer_error(error_ajla(EC_SYNC
, AJLA_ERROR_NEGATIVE_INDEX
), fp
, ip pass_position
);
1402 return POINTER_FOLLOW_THUNK_EXCEPTION
;
1404 *is_negative
= true;
1405 return POINTER_FOLLOW_THUNK_GO
;
1407 index_from_int_(idx
, in pass_position
);
1409 pointer_t
*ptr
= frame_pointer(fp_slot
, slot
);
1412 pointer_follow(ptr
, true, d
, PF_WAIT
, fp
, ip
,
1414 thunk_reference(thunk_
);
1415 *thunk
= pointer_thunk(thunk_
);
1416 return POINTER_FOLLOW_THUNK_EXCEPTION
;
1418 if (da_tag(d
) == DATA_TAG_flat
) {
1419 ipret_unbox_value(fp_slot
, type_get_int(INT_DEFAULT_N
), slot
);
1422 if (unlikely(mpint_negative(&da(d
,longint
)->mp
))) {
1425 index_from_mp_(idx
, &da(d
,longint
)->mp pass_position
);
1427 return POINTER_FOLLOW_THUNK_GO
;
1430 void * attr_hot_fastcall
ipret_get_index(frame_s
*fp
, const code_t
*ip
, frame_s
*fp_slot
, frame_t slot
, bool *is_negative
, array_index_t
*idx
, pointer_t
*thunk argument_position
)
1432 if (likely(fp
== fp_slot
))
1433 ajla_assert(frame_get_type_of_local(fp_slot
, slot
)->tag
== type_get_int(INT_DEFAULT_N
)->tag
, (file_line
, "ipret_get_index: invalid type %u", (unsigned)frame_get_type_of_local(fp_slot
, slot
)->tag
));
1434 if (likely(!frame_test_flag(fp_slot
, slot
))) {
1437 in
= *frame_slot(fp_slot
, slot
, int_default_t
);
1439 if (unlikely(in
< 0))
1441 index_from_int_(idx
, in pass_position
);
1444 return ipret_get_index_complicated(fp
, ip
, fp_slot
, slot
, is_negative
, idx
, thunk pass_position
);
1446 return POINTER_FOLLOW_THUNK_GO
;
1450 void * attr_hot_fastcall
ipret_record_load_create_thunk(frame_s
*fp
, const code_t
*ip
, frame_t record
, frame_t record_slot
, frame_t result_slot
)
1454 struct data
*function_reference
;
1455 struct thunk
*result
;
1457 ex
= pcode_find_record_option_load_function(PCODE_FUNCTION_RECORD_LOAD
, record_slot
, fp
, ip
, &fn_ptr
);
1459 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_RETRY
))
1462 result
= build_thunk(fn_ptr
, 1, &function_reference
);
1463 ipret_fill_function_reference_from_slot(function_reference
, 0, fp
, record
, false);
1465 frame_set_pointer(fp
, result_slot
, pointer_thunk(result
));
1467 return POINTER_FOLLOW_THUNK_GO
;
1470 void * attr_hot_fastcall
ipret_option_load_create_thunk(frame_s
*fp
, const code_t
*ip
, frame_t option
, frame_t option_idx
, frame_t result_slot
)
1474 struct data
*function_reference
;
1475 struct thunk
*result
;
1477 ex
= pcode_find_record_option_load_function(PCODE_FUNCTION_OPTION_LOAD
, option_idx
, fp
, ip
, &fn_ptr
);
1479 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_RETRY
))
1482 result
= build_thunk(fn_ptr
, 1, &function_reference
);
1483 ipret_fill_function_reference_from_slot(function_reference
, 0, fp
, option
, false);
1485 frame_set_pointer(fp
, result_slot
, pointer_thunk(result
));
1487 return POINTER_FOLLOW_THUNK_GO
;
1490 void * attr_hot_fastcall
thunk_option_test(frame_s
*fp
, const code_t
*ip
, frame_t slot_1
, ajla_option_t option
, frame_t slot_r
)
1494 struct data
*function_reference
;
1495 struct thunk
*result
;
1497 pointer_follow_thunk_noeval(frame_pointer(fp
, slot_1
),
1498 return POINTER_FOLLOW_THUNK_RETRY
,
1499 pointer_copy_owned(fp
, slot_1
, slot_r
);
1500 return POINTER_FOLLOW_THUNK_GO
,
1504 ex
= pcode_find_record_option_load_function(PCODE_FUNCTION_OPTION_TEST
, option
, fp
, ip
, &fn_ptr
);
1505 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_RETRY
))
1508 result
= build_thunk(fn_ptr
, 1, &function_reference
);
1509 ipret_fill_function_reference_from_slot(function_reference
, 0, fp
, slot_1
, false);
1511 frame_set_pointer(fp
, slot_r
, pointer_thunk(result
));
1513 return POINTER_FOLLOW_THUNK_GO
;
1516 void * attr_hot_fastcall
thunk_option_ord(frame_s
*fp
, const code_t
*ip
, frame_t slot_1
, frame_t slot_r
)
1520 struct data
*function_reference
;
1521 struct thunk
*result
;
1523 pointer_follow_thunk_noeval(frame_pointer(fp
, slot_1
),
1524 return POINTER_FOLLOW_THUNK_RETRY
,
1525 pointer_copy_owned(fp
, slot_1
, slot_r
);
1526 return POINTER_FOLLOW_THUNK_GO
,
1530 ex
= pcode_find_option_ord_function(fp
, ip
, &fn_ptr
);
1531 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_RETRY
))
1534 result
= build_thunk(fn_ptr
, 1, &function_reference
);
1535 ipret_fill_function_reference_from_slot(function_reference
, 0, fp
, slot_1
, false);
1537 frame_set_pointer(fp
, slot_r
, pointer_thunk(result
));
1539 return POINTER_FOLLOW_THUNK_GO
;
1543 void * attr_hot_fastcall
ipret_array_load_create_thunk(frame_s
*fp
, const code_t
*ip
, frame_t array
, frame_t index
, frame_t result_slot
)
1547 struct data
*function_reference
;
1548 struct thunk
*result
;
1550 ex
= pcode_find_array_load_function(fp
, ip
, &fn_ptr
);
1552 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_RETRY
))
1555 result
= build_thunk(fn_ptr
, 2, &function_reference
);
1556 ipret_fill_function_reference_from_slot(function_reference
, 0, fp
, array
, false);
1557 ipret_fill_function_reference_from_slot(function_reference
, 1, fp
, index
, false);
1559 frame_set_pointer(fp
, result_slot
, pointer_thunk(result
));
1561 return POINTER_FOLLOW_THUNK_GO
;
1564 static attr_noinline
void *array_len_create_thunk(frame_s
*fp
, const code_t
*ip
, frame_t array_slot
, frame_t result_slot
)
1568 struct data
*function_reference
;
1569 struct thunk
*result
;
1571 ex
= pcode_find_array_len_function(fp
, ip
, &fn_ptr
);
1573 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_RETRY
))
1576 result
= build_thunk(fn_ptr
, 1, &function_reference
);
1577 ipret_fill_function_reference_from_slot(function_reference
, 0, fp
, array_slot
, false);
1579 frame_set_pointer(fp
, result_slot
, pointer_thunk(result
));
1581 return POINTER_FOLLOW_THUNK_GO
;
1584 static attr_noinline
void *array_len_greater_than_create_thunk(frame_s
*fp
, const code_t
*ip
, frame_t array_slot
, frame_t length_slot
, frame_t result_slot
)
1588 struct data
*function_reference
;
1589 struct thunk
*result
;
1591 ex
= pcode_find_array_len_greater_than_function(fp
, ip
, &fn_ptr
);
1593 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_RETRY
))
1596 result
= build_thunk(fn_ptr
, 2, &function_reference
);
1597 ipret_fill_function_reference_from_slot(function_reference
, 0, fp
, array_slot
, false);
1598 ipret_fill_function_reference_from_slot(function_reference
, 1, fp
, length_slot
, false);
1600 frame_set_pointer(fp
, result_slot
, pointer_thunk(result
));
1602 return POINTER_FOLLOW_THUNK_GO
;
1605 static attr_noinline
void *array_sub_create_thunk(frame_s
*fp
, const code_t
*ip
, frame_t array_slot
, frame_t start_slot
, frame_t end_slot
, frame_t result_slot
, unsigned flags
)
1609 struct data
*function_reference
;
1610 struct thunk
*result
;
1612 ex
= pcode_find_array_sub_function(fp
, ip
, &fn_ptr
);
1614 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_RETRY
))
1617 result
= build_thunk(fn_ptr
, 3, &function_reference
);
1618 ipret_fill_function_reference_from_slot(function_reference
, 0, fp
, array_slot
, (flags
& OPCODE_FLAG_FREE_ARGUMENT
) != 0);
1619 ipret_fill_function_reference_from_slot(function_reference
, 1, fp
, start_slot
, false);
1620 ipret_fill_function_reference_from_slot(function_reference
, 2, fp
, end_slot
, false);
1622 frame_set_pointer(fp
, result_slot
, pointer_thunk(result
));
1624 return POINTER_FOLLOW_THUNK_GO
;
1627 static attr_noinline
void *array_skip_create_thunk(frame_s
*fp
, const code_t
*ip
, frame_t array_slot
, frame_t start_slot
, frame_t result_slot
, unsigned flags
)
1631 struct data
*function_reference
;
1632 struct thunk
*result
;
1634 ex
= pcode_find_array_skip_function(fp
, ip
, &fn_ptr
);
1636 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_RETRY
))
1639 result
= build_thunk(fn_ptr
, 2, &function_reference
);
1640 ipret_fill_function_reference_from_slot(function_reference
, 0, fp
, array_slot
, (flags
& OPCODE_FLAG_FREE_ARGUMENT
) != 0);
1641 ipret_fill_function_reference_from_slot(function_reference
, 1, fp
, start_slot
, false);
1643 frame_set_pointer(fp
, result_slot
, pointer_thunk(result
));
1645 return POINTER_FOLLOW_THUNK_GO
;
1648 static bool array_resolve_thunk(frame_s
*fp
, frame_t slot
)
1651 if (unlikely(frame_variable_is_flat(fp
, slot
)))
1654 ptr
= frame_pointer(fp
, slot
);
1655 if (likely(frame_test_flag(fp
, slot
))) {
1656 if (unlikely(pointer_is_thunk(*ptr
))) {
1657 if (thunk_is_finished(pointer_get_thunk(*ptr
))) {
1658 pointer_follow_thunk_(ptr
, POINTER_FOLLOW_THUNK_NOEVAL
);
1664 if (da_tag(pointer_get_data(*ptr
)) == DATA_TAG_array_incomplete
) {
1665 if (unlikely(!frame_test_and_set_flag(fp
, slot
)))
1666 data_reference(pointer_get_data(*ptr
));
1667 array_incomplete_collapse(ptr
);
1673 static void *array_walk(frame_s
*fp
, const code_t
*ip
, pointer_t
*ptr
, array_index_t
*idx
, unsigned flags
, pointer_t
*result
, pointer_t
**can_modify
)
1676 array_index_t this_len
;
1681 pointer_follow(ptr
, false, a
, flags
& OPCODE_OP_FLAG_STRICT
? PF_WAIT
: PF_NOEVAL
, fp
, ip
,
1683 thunk_reference(thunk_
);
1684 *result
= pointer_thunk(thunk_
);
1685 return POINTER_FOLLOW_THUNK_EXCEPTION
1688 if (unlikely(index_eq_int(*idx
, 0))) {
1690 return POINTER_FOLLOW_THUNK_GO
;
1693 if (unlikely(da_tag(a
) == DATA_TAG_array_incomplete
)) {
1694 if (!data_is_writable(a
))
1696 this_len
= array_len(pointer_get_data(da(a
,array_incomplete
)->first
));
1697 if (!index_ge_index(this_len
, *idx
)) {
1698 index_sub(idx
, this_len
);
1699 index_free(&this_len
);
1700 ptr
= &da(a
,array_incomplete
)->next
;
1705 index_free(&this_len
);
1706 *result
= pointer_data(a
);
1708 this_len
= array_len(a
);
1709 if (unlikely(!index_ge_index(this_len
, *idx
))) {
1710 index_free(&this_len
);
1711 return POINTER_FOLLOW_THUNK_RETRY
; /* this means index out of range, not a retry */
1713 index_free(&this_len
);
1714 *result
= pointer_data(a
);
1716 return POINTER_FOLLOW_THUNK_GO
;
1719 void * attr_hot_fastcall
ipret_array_len(frame_s
*fp
, const code_t
*ip
, frame_t slot_r
, frame_t slot_a
, unsigned flags
)
1721 const struct type
*type
;
1723 array_index_t idx_len
;
1726 ajla_assert(type_is_equal(frame_get_type_of_local(fp
, slot_r
), type_get_int(INT_DEFAULT_N
)), (file_line
, "ipret_array_len: invalid index type %u", frame_get_type_of_local(fp
, slot_r
)->tag
));
1727 ajla_assert(!frame_test_flag(fp
, slot_r
), (file_line
, "ipret_array_len: flag already set for destination slot %"PRIuMAX
"", (uintmax_t)slot_r
));
1729 type
= frame_get_type_of_local(fp
, slot_a
);
1730 if (unlikely(TYPE_IS_FLAT(type
))) {
1731 const struct flat_array_definition
*flat_def
= type_def(type
,flat_array
);
1733 *frame_slot(fp
, slot_r
, int_default_t
) = flat_def
->n_elements
;
1735 return POINTER_FOLLOW_THUNK_GO
;
1738 index_from_int(&idx_len
, 0);
1739 ptr
= frame_pointer(fp
, slot_a
);
1741 if (flags
& OPCODE_OP_FLAG_STRICT
) {
1742 array_resolve_thunk(fp
, slot_a
);
1746 struct data
*array_data
;
1747 struct data
*this_ptr
;
1748 array_index_t this_len
;
1750 pointer_follow(ptr
, false, array_data
, flags
& OPCODE_OP_FLAG_STRICT
? PF_WAIT
: PF_NOEVAL
, fp
, ip
,
1751 index_free(&idx_len
);
1752 if (!(flags
& OPCODE_OP_FLAG_STRICT
)) {
1753 ex_
= array_len_create_thunk(fp
, ip
, slot_a
, slot_r
);
1756 index_free(&idx_len
);
1757 thunk_reference(thunk_
);
1758 frame_set_pointer(fp
, slot_r
, pointer_thunk(thunk_
));
1759 return POINTER_FOLLOW_THUNK_GO
1762 if (da_tag(array_data
) == DATA_TAG_array_incomplete
)
1763 this_ptr
= pointer_get_data(da(array_data
,array_incomplete
)->first
);
1765 this_ptr
= array_data
;
1767 this_len
= array_len(this_ptr
);
1769 if (unlikely(!index_add_(&idx_len
, this_len
, &err pass_file_line
))) {
1770 index_free(&this_len
);
1771 goto array_len_error
;
1773 index_free(&this_len
);
1775 if (da_tag(array_data
) == DATA_TAG_array_incomplete
) {
1776 ptr
= &da(array_data
,array_incomplete
)->next
;
1783 if (likely(!index_is_mp(idx_len
))) {
1784 int_default_t len
= index_to_int(idx_len
);
1785 index_free(&idx_len
);
1787 *frame_slot(fp
, slot_r
, int_default_t
) = len
;
1791 d
= data_alloc_longint_mayfail(0, &err pass_file_line
);
1793 index_free(&idx_len
);
1795 frame_set_pointer(fp
, slot_r
, pointer_error(err
, fp
, ip pass_file_line
));
1797 mpint_free(&da(d
,longint
)->mp
);
1798 index_free_get_mp(&idx_len
, &da(d
,longint
)->mp
);
1799 frame_set_pointer(fp
, slot_r
, pointer_data(d
));
1803 return POINTER_FOLLOW_THUNK_GO
;
1806 void * attr_hot_fastcall
ipret_array_len_greater_than(frame_s
*fp
, const code_t
*ip
, frame_t slot_r
, frame_t slot_a
, frame_t slot_l
, unsigned flags
)
1808 const struct type
*type
;
1813 array_index_t remaining_length
;
1815 pointer_t
*can_modify
;
1817 ajla_assert(type_is_equal(frame_get_type_of_local(fp
, slot_r
), type_get_flat_option()), (file_line
, "ipret_array_len_greater_than: invalid index type %u", frame_get_type_of_local(fp
, slot_r
)->tag
));
1818 ajla_assert(!frame_test_flag(fp
, slot_r
), (file_line
, "ipret_array_len_greater_than: flag already set for destination slot %"PRIuMAX
"", (uintmax_t)slot_r
));
1820 ex
= ipret_get_index(fp
, ip
, fp
, slot_l
, &neg
, &remaining_length
, &res_ptr pass_file_line
);
1821 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_GO
)) {
1822 if (ex
== POINTER_FOLLOW_THUNK_EXCEPTION
)
1826 if (unlikely(neg
)) {
1831 type
= frame_get_type_of_local(fp
, slot_a
);
1832 if (unlikely(TYPE_IS_FLAT(type
))) {
1833 const struct flat_array_definition
*flat_def
= type_def(type
,flat_array
);
1834 if (index_ge_int(remaining_length
, flat_def
->n_elements
))
1839 ptr
= frame_pointer(fp
, slot_a
);
1841 if (flags
& OPCODE_OP_FLAG_STRICT
) {
1842 array_resolve_thunk(fp
, slot_a
);
1845 index_add_int(&remaining_length
, 1);
1846 ex
= array_walk(fp
, ip
, ptr
, &remaining_length
, flags
, &res_ptr
, &can_modify
);
1847 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_GO
)) {
1848 if (likely(ex
== POINTER_FOLLOW_THUNK_RETRY
)) {
1852 if (unlikely(ex
== POINTER_FOLLOW_THUNK_EXCEPTION
))
1854 if (!(flags
& OPCODE_OP_FLAG_STRICT
))
1855 ex
= array_len_greater_than_create_thunk(fp
, ip
, slot_a
, slot_l
, slot_r
);
1856 index_free(&remaining_length
);
1862 *frame_slot(fp
, slot_r
, ajla_flat_option_t
) = result
;
1864 index_free(&remaining_length
);
1865 return POINTER_FOLLOW_THUNK_GO
;
1868 index_free(&remaining_length
);
1870 frame_set_pointer(fp
, slot_r
, res_ptr
);
1871 return POINTER_FOLLOW_THUNK_GO
;
1874 void * attr_hot_fastcall
ipret_array_sub(frame_s
*fp
, const code_t
*ip
, frame_t slot_r
, frame_t slot_a
, frame_t slot_start
, frame_t slot_end
, unsigned flags
)
1876 array_index_t start
, end
, end_tmp
;
1878 pointer_t
*can_modify
;
1881 struct data
*acc
= NULL
;
1883 ajla_assert(flags
& OPCODE_FLAG_FREE_ARGUMENT
|| !frame_test_flag(fp
, slot_r
), (file_line
, "ipret_array_sub: flag already set for destination slot %"PRIuMAX
"", (uintmax_t)slot_r
));
1885 ex
= ipret_get_index(fp
, ip
, fp
, slot_start
, NULL
, &start
, &res_ptr pass_file_line
);
1886 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_GO
)) {
1887 if (ex
== POINTER_FOLLOW_THUNK_EXCEPTION
)
1891 ex
= ipret_get_index(fp
, ip
, fp
, slot_end
, NULL
, &end
, &res_ptr pass_file_line
);
1892 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_GO
)) {
1893 if (ex
== POINTER_FOLLOW_THUNK_EXCEPTION
)
1898 if (!index_ge_index(end
, start
)) {
1899 res_ptr
= pointer_error(error_ajla(EC_SYNC
, AJLA_ERROR_INVALID_OPERATION
), fp
, ip pass_file_line
);
1903 if (frame_variable_is_flat(fp
, slot_a
)) {
1906 int_default_t st
, len
;
1907 const struct type
*type
= frame_get_type_of_local(fp
, slot_a
);
1908 const struct flat_array_definition
*flat_def
= type_def(type
,flat_array
);
1909 if (index_ge_int(end
, flat_def
->n_elements
+ 1)) {
1910 res_ptr
= pointer_error(error_ajla(EC_SYNC
, AJLA_ERROR_INDEX_OUT_OF_RANGE
), fp
, ip pass_file_line
);
1913 st
= index_to_int(start
);
1914 len
= index_to_int(end
) - st
;
1915 d
= data_alloc_array_flat_mayfail(flat_def
->base
, len
, len
, false, &err pass_file_line
);
1917 res_ptr
= pointer_error(err
, fp
, ip pass_file_line
);
1920 memcpy(da_array_flat(d
), frame_var(fp
, slot_a
) + st
* flat_def
->base
->size
, len
* flat_def
->base
->size
);
1921 res_ptr
= pointer_data(d
);
1925 ptr
= frame_pointer(fp
, slot_a
);
1927 if (flags
& OPCODE_OP_FLAG_STRICT
) {
1928 array_resolve_thunk(fp
, slot_a
);
1931 index_copy(&end_tmp
, end
);
1932 ex
= array_walk(fp
, ip
, ptr
, &end_tmp
, flags
, &res_ptr
, &can_modify
);
1933 index_free(&end_tmp
);
1934 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_GO
)) {
1935 if (likely(ex
== POINTER_FOLLOW_THUNK_RETRY
)) {
1936 res_ptr
= pointer_error(error_ajla(EC_SYNC
, AJLA_ERROR_INDEX_OUT_OF_RANGE
), fp
, ip pass_file_line
);
1939 if (unlikely(ex
== POINTER_FOLLOW_THUNK_EXCEPTION
))
1941 if (!(flags
& OPCODE_OP_FLAG_STRICT
))
1942 ex
= array_sub_create_thunk(fp
, ip
, slot_a
, slot_start
, slot_end
, slot_r
, flags
);
1948 if (!(flags
& OPCODE_FLAG_FREE_ARGUMENT
) || !frame_test_flag(fp
, slot_a
))
1952 struct data
*array_data
;
1953 struct data
*this_ptr
;
1954 array_index_t this_len
;
1957 if (pointer_is_thunk(*ptr
)) {
1958 struct stack_trace st
;
1959 stack_trace_init(&st
);
1960 stack_trace_capture(&st
, fp
, ip
, 20);
1961 stack_trace_print(&st
);
1962 stack_trace_free(&st
);
1965 array_data
= pointer_get_data(*ptr
);
1967 if (da_tag(array_data
) == DATA_TAG_array_incomplete
)
1968 this_ptr
= pointer_get_data(da(array_data
,array_incomplete
)->first
);
1970 this_ptr
= array_data
;
1972 this_len
= array_len(this_ptr
);
1974 if (!index_ge_index(this_len
, start
)) {
1975 index_sub(&start
, this_len
);
1976 index_sub(&end
, this_len
);
1977 index_free(&this_len
);
1980 array_index_t this_step
;
1985 if (da_tag(array_data
) == DATA_TAG_array_incomplete
)
1986 da(array_data
,array_incomplete
)->first
= pointer_empty();
1988 *ptr
= pointer_empty();
1991 if (!index_ge_index(this_len
, end
)) {
1992 index_sub3(&this_step
, this_len
, start
);
1994 index_sub3(&this_step
, end
, start
);
1997 /*debug("start %lu, end %lu, this_len %lu, this_step %lu", start, end, this_len, this_step);*/
1998 index_free(&this_len
);
1999 index_sub(&end
, this_step
);
2000 index_sub(&end
, start
);
2001 t
= array_sub(this_ptr
, start
, this_step
, can_modify
!= NULL
, &err
);
2002 index_from_int(&start
, 0);
2005 res_ptr
= pointer_error(err
, fp
, ip pass_file_line
);
2012 acc
= array_join(acc
, t
, &err
);
2013 if (unlikely(!acc
)) {
2014 res_ptr
= pointer_error(err
, fp
, ip pass_file_line
);
2020 res_ptr
= pointer_data(acc
);
2026 if (unlikely(da_tag(array_data
) != DATA_TAG_array_incomplete
)) {
2027 res_ptr
= pointer_error(error_ajla(EC_SYNC
, AJLA_ERROR_INDEX_OUT_OF_RANGE
), fp
, ip pass_file_line
);
2030 ptr
= &da(array_data
,array_incomplete
)->next
;
2039 data_dereference(acc
);
2040 if (flags
& OPCODE_FLAG_FREE_ARGUMENT
) {
2041 if (pointer_is_empty(*frame_pointer(fp
, slot_a
)))
2042 frame_clear_flag(fp
, slot_a
);
2044 frame_free_and_clear(fp
, slot_a
);
2046 frame_set_pointer(fp
, slot_r
, res_ptr
);
2047 return POINTER_FOLLOW_THUNK_GO
;
2050 void * attr_hot_fastcall
ipret_array_skip(frame_s
*fp
, const code_t
*ip
, frame_t slot_r
, frame_t slot_a
, frame_t slot_start
, unsigned flags
)
2052 array_index_t start
, len
;
2054 pointer_t
*can_modify
;
2057 struct data
*a
, *ta
, *ts
;
2061 ajla_assert(flags
& OPCODE_FLAG_FREE_ARGUMENT
|| !frame_test_flag(fp
, slot_r
), (file_line
, "ipret_array_skip: flag already set for destination slot %"PRIuMAX
"", (uintmax_t)slot_r
));
2063 ex
= ipret_get_index(fp
, ip
, fp
, slot_start
, NULL
, &start
, &res_ptr pass_file_line
);
2064 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_GO
)) {
2065 if (ex
== POINTER_FOLLOW_THUNK_EXCEPTION
)
2070 if (frame_variable_is_flat(fp
, slot_a
)) {
2073 int_default_t st
, len
;
2074 const struct type
*type
= frame_get_type_of_local(fp
, slot_a
);
2075 const struct flat_array_definition
*flat_def
= type_def(type
,flat_array
);
2076 if (index_ge_int(start
, flat_def
->n_elements
+ 1)) {
2077 res_ptr
= pointer_error(error_ajla(EC_SYNC
, AJLA_ERROR_INDEX_OUT_OF_RANGE
), fp
, ip pass_file_line
);
2078 goto ret_free_start
;
2080 st
= index_to_int(start
);
2081 len
= flat_def
->n_elements
- st
;
2082 d
= data_alloc_array_flat_mayfail(flat_def
->base
, len
, len
, false, &err pass_file_line
);
2084 res_ptr
= pointer_error(err
, fp
, ip pass_file_line
);
2085 goto ret_free_start
;
2087 memcpy(da_flat(d
), frame_var(fp
, slot_a
) + st
* flat_def
->base
->size
, len
* flat_def
->base
->size
);
2088 res_ptr
= pointer_data(d
);
2089 goto ret_free_start
;
2092 ptr
= frame_pointer(fp
, slot_a
);
2094 if (flags
& OPCODE_OP_FLAG_STRICT
) {
2095 array_resolve_thunk(fp
, slot_a
);
2098 ex
= array_walk(fp
, ip
, ptr
, &start
, flags
, &res_ptr
, &can_modify
);
2099 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_GO
)) {
2100 if (likely(ex
== POINTER_FOLLOW_THUNK_RETRY
)) {
2101 res_ptr
= pointer_error(error_ajla(EC_SYNC
, AJLA_ERROR_INDEX_OUT_OF_RANGE
), fp
, ip pass_file_line
);
2102 goto ret_free_start
;
2104 if (unlikely(ex
== POINTER_FOLLOW_THUNK_EXCEPTION
))
2105 goto ret_free_start
;
2106 if (!(flags
& OPCODE_OP_FLAG_STRICT
))
2107 ex
= array_skip_create_thunk(fp
, ip
, slot_a
, slot_start
, slot_r
, flags
);
2112 if (unlikely(index_eq_int(start
, 0))) {
2113 pointer_reference_owned(res_ptr
);
2114 goto ret_free_start
;
2117 if (!(flags
& OPCODE_FLAG_FREE_ARGUMENT
) || !frame_test_flag(fp
, slot_a
))
2120 a
= pointer_get_data(res_ptr
);
2121 if (da_tag(a
) == DATA_TAG_array_incomplete
) {
2122 ta
= pointer_get_data(da(a
,array_incomplete
)->first
);
2127 len
= array_len(ta
);
2128 index_sub(&len
, start
);
2129 if (unlikely(index_eq_int(len
, 0)) && da_tag(a
) == DATA_TAG_array_incomplete
) {
2130 res_ptr
= pointer_reference(&da(a
,array_incomplete
)->next
);
2132 goto ret_free_start
;
2136 *can_modify
= pointer_empty();
2139 ts
= array_sub(ta
, start
, len
, deref
, &err
);
2140 if (unlikely(!ts
)) {
2141 res_ptr
= pointer_error(err
, fp
, ip pass_file_line
);
2147 da(a
,array_incomplete
)->first
= pointer_data(ts
);
2148 res_ptr
= pointer_data(a
);
2151 pointer_t next
= pointer_reference(&da(a
,array_incomplete
)->next
);
2152 inc
= data_alloc_array_incomplete(ts
, next
, &err pass_file_line
);
2153 if (unlikely(!inc
)) {
2154 data_dereference(ts
);
2155 pointer_dereference(next
);
2156 res_ptr
= pointer_error(err
, fp
, ip pass_file_line
);
2159 res_ptr
= pointer_data(inc
);
2162 res_ptr
= pointer_data(ts
);
2169 if (flags
& OPCODE_FLAG_FREE_ARGUMENT
) {
2170 if (pointer_is_empty(*frame_pointer(fp
, slot_a
)))
2171 frame_clear_flag(fp
, slot_a
);
2173 frame_free_and_clear(fp
, slot_a
);
2175 frame_set_pointer(fp
, slot_r
, res_ptr
);
2176 return POINTER_FOLLOW_THUNK_GO
;
2179 static void attr_hot
ipret_array_append_pointers(frame_s
*fp
, const code_t
*ip
, pointer_t
*ptr_r
, pointer_t ptr_1
, pointer_t ptr_2
, pointer_t
*fn_ptr
)
2184 if (unlikely(fn_ptr
!= NULL
)) {
2185 if (unlikely(pointer_is_thunk(ptr_1
))) {
2186 struct thunk
*result
;
2187 struct data
*function_reference
;
2189 if (pointer_is_thunk(ptr_1
) && thunk_tag_volatile(pointer_get_thunk(ptr_1
)) == THUNK_TAG_EXCEPTION
) {
2191 pointer_dereference(ptr_2
);
2195 result
= build_thunk(fn_ptr
, 2, &function_reference
);
2196 data_fill_function_reference(function_reference
, 0, ptr_1
);
2197 data_fill_function_reference(function_reference
, 1, ptr_2
);
2198 *ptr_r
= pointer_thunk(result
);
2200 } else if (likely(da_tag(pointer_get_data(ptr_1
)) == DATA_TAG_array_incomplete
)) {
2203 struct thunk
*thunk
;
2204 struct data
*function_reference
, *result
;
2206 array_incomplete_decompose(pointer_get_data(ptr_1
), &first
, &last
);
2208 thunk
= build_thunk(fn_ptr
, 2, &function_reference
);
2209 data_fill_function_reference(function_reference
, 0, last
);
2210 data_fill_function_reference(function_reference
, 1, ptr_2
);
2212 result
= data_alloc_array_incomplete(first
, pointer_thunk(thunk
), &err pass_file_line
);
2213 if (unlikely(!result
)) {
2214 data_dereference(first
);
2215 pointer_dereference(pointer_thunk(thunk
));
2216 *ptr_r
= pointer_error(err
, fp
, ip pass_file_line
);
2218 *ptr_r
= pointer_data(result
);
2224 if (unlikely(array_is_empty(pointer_get_data(ptr_1
)))) {
2226 pointer_dereference(ptr_1
);
2230 if (unlikely(pointer_is_thunk(ptr_2
)) || unlikely(da_tag(pointer_get_data(ptr_2
)) == DATA_TAG_array_incomplete
)) {
2231 struct data
*result
;
2232 result
= data_alloc_array_incomplete(pointer_get_data(ptr_1
), ptr_2
, &err pass_file_line
);
2233 if (unlikely(!result
)) {
2234 pointer_dereference(ptr_1
);
2235 pointer_dereference(ptr_2
);
2236 *ptr_r
= pointer_error(err
, fp
, ip pass_file_line
);
2238 *ptr_r
= pointer_data(result
);
2239 if (!pointer_is_thunk(ptr_2
))
2240 array_incomplete_collapse(ptr_r
);
2245 d
= array_join(pointer_get_data(ptr_1
), pointer_get_data(ptr_2
), &err
);
2247 *ptr_r
= pointer_error(err
, fp
, ip pass_file_line
);
2249 *ptr_r
= pointer_data(d
);
2252 void * attr_hot_fastcall
ipret_array_append(frame_s
*fp
, const code_t
*ip
, frame_t slot_r
, frame_t slot_1
, frame_t slot_2
, unsigned flags
)
2254 pointer_t
*fn_ptr
= NULL
;
2255 pointer_t ptr_1
, ptr_2
, *ptr_r
;
2257 if (unlikely(array_resolve_thunk(fp
, slot_1
))) {
2258 void *ex
= pcode_find_array_append_function(fp
, ip
, &fn_ptr
);
2259 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_RETRY
))
2262 array_resolve_thunk(fp
, slot_2
);
2264 ptr_1
= ipret_copy_variable_to_pointer(fp
, slot_1
, (flags
& OPCODE_FLAG_FREE_ARGUMENT
) != 0);
2265 ptr_2
= ipret_copy_variable_to_pointer(fp
, slot_2
, (flags
& OPCODE_FLAG_FREE_ARGUMENT_2
) != 0);
2267 ajla_assert(!frame_test_flag(fp
, slot_r
), (file_line
, "ipret_array_append: flag already set for destination slot %"PRIuMAX
"", (uintmax_t)slot_r
));
2269 frame_set_flag(fp
, slot_r
);
2270 ptr_r
= frame_pointer(fp
, slot_r
);
2272 ipret_array_append_pointers(fp
, ip
, ptr_r
, ptr_1
, ptr_2
, fn_ptr
);
2274 return POINTER_FOLLOW_THUNK_GO
;
2277 void * attr_hot_fastcall
ipret_array_append_one_flat(frame_s
*fp
, const code_t
*ip
, frame_t slot_r
, frame_t slot_1
, frame_t slot_2
, unsigned flags
)
2279 const int minimum_size
= 16;
2283 const struct type
*type
;
2284 if (unlikely(!(flags
& OPCODE_FLAG_FREE_ARGUMENT
)))
2286 if (unlikely(!frame_variable_is_flat(fp
, slot_2
)))
2288 if (unlikely(!frame_test_flag(fp
, slot_1
)))
2290 ptr
= *frame_pointer(fp
, slot_1
);
2291 if (unlikely(pointer_is_thunk(ptr
)))
2293 data
= pointer_get_data(ptr
);
2294 if (unlikely(da_tag(data
) != DATA_TAG_array_flat
)) {
2295 if (likely(da_tag(data
) == DATA_TAG_array_pointers
) && likely(!da(data
,array_pointers
)->n_used_entries
)) {
2296 type
= frame_get_type_of_local(fp
, slot_2
);
2297 data
= data_alloc_array_flat_mayfail(type
, minimum_size
, 0, false, &sink pass_file_line
);
2298 if (unlikely(!data
))
2300 pointer_dereference(ptr
);
2305 if (unlikely(!data_is_writable(data
)))
2307 if (unlikely(da(data
,array_flat
)->n_used_entries
== da(data
,array_flat
)->n_allocated_entries
)) {
2308 struct data
*new_data
;
2309 int_default_t new_size
= (uint_default_t
)da(data
,array_flat
)->n_used_entries
* 2;
2310 new_size
= maximum(new_size
, minimum_size
);
2311 if (unlikely(new_size
< 0) ||
2312 unlikely(new_size
<= da(data
,array_flat
)->n_used_entries
))
2314 type
= da(data
,array_flat
)->type
;
2315 new_data
= data_alloc_array_flat_mayfail(type
, new_size
, da(data
,array_flat
)->n_used_entries
, false, &sink pass_file_line
);
2316 if (unlikely(!new_data
))
2318 memcpy(da_array_flat(new_data
), da_array_flat(data
), da(data
,array_flat
)->n_used_entries
* type
->size
);
2324 type
= da(data
,array_flat
)->type
;
2325 memcpy_fast(da_array_flat(data
) + (size_t)da(data
,array_flat
)->n_used_entries
* type
->size
, frame_var(fp
, slot_2
), type
->size
);
2326 da(data
,array_flat
)->n_used_entries
++;
2328 frame_clear_flag(fp
, slot_1
);
2329 *frame_pointer(fp
, slot_1
) = pointer_empty();
2330 frame_set_flag(fp
, slot_r
);
2331 *frame_pointer(fp
, slot_r
) = pointer_data(data
);
2333 return POINTER_FOLLOW_THUNK_GO
;
2335 return ipret_array_append_one(fp
, ip
, slot_r
, slot_1
, slot_2
, flags
);
2338 void * attr_hot_fastcall
ipret_array_append_one(frame_s
*fp
, const code_t
*ip
, frame_t slot_r
, frame_t slot_1
, frame_t slot_2
, unsigned flags
)
2341 pointer_t
*fn_ptr
= NULL
;
2342 pointer_t ptr_1
, ptr_2
, ptr_e
, *ptr_r
;
2345 if (unlikely(array_resolve_thunk(fp
, slot_1
))) {
2346 void *ex
= pcode_find_array_append_function(fp
, ip
, &fn_ptr
);
2347 if (unlikely(ex
!= POINTER_FOLLOW_THUNK_RETRY
))
2351 ptr_1
= ipret_copy_variable_to_pointer(fp
, slot_1
, (flags
& OPCODE_FLAG_FREE_ARGUMENT
) != 0);
2353 if (frame_variable_is_flat(fp
, slot_2
)) {
2354 const struct type
*type
= frame_get_type_of_local(fp
, slot_2
);
2355 data
= data_alloc_array_flat_mayfail(type
, 1, 1, false, &err pass_file_line
);
2356 if (unlikely(!data
))
2358 memcpy_fast(da_array_flat(data
), frame_var(fp
, slot_2
), type
->size
);
2359 ptr_2
= pointer_data(data
);
2364 ptr_e
= ipret_copy_variable_to_pointer(fp
, slot_2
, (flags
& OPCODE_FLAG_FREE_ARGUMENT_2
) != 0);
2366 if (unlikely(pointer_is_thunk(ptr_1
)))
2368 data
= pointer_get_data(ptr_1
);
2369 if (unlikely(da_tag(data
) != DATA_TAG_array_pointers
))
2371 if (unlikely(!data_is_writable(data
)))
2373 if (unlikely(da(data
,array_pointers
)->n_used_entries
== da(data
,array_pointers
)->n_allocated_entries
)) {
2374 struct data
*new_data
;
2375 size_t new_size
= (size_t)da(data
,array_pointers
)->n_used_entries
* 2;
2376 new_size
= maximum(new_size
, 16);
2377 if (unlikely(new_size
<= (size_t)da(data
,array_pointers
)->n_used_entries
))
2379 new_data
= data_alloc_array_pointers_mayfail(new_size
, da(data
,array_pointers
)->n_used_entries
, &err pass_file_line
);
2380 if (unlikely(!new_data
))
2382 memcpy(da(new_data
,array_pointers
)->pointer
, da(data
,array_pointers
)->pointer
, da(data
,array_pointers
)->n_used_entries
* sizeof(pointer_t
));
2386 da(data
,array_pointers
)->pointer
[da(data
,array_pointers
)->n_used_entries
++] = ptr_e
;
2388 frame_set_flag(fp
, slot_r
);
2389 *frame_pointer(fp
, slot_r
) = pointer_data(data
);
2391 return POINTER_FOLLOW_THUNK_GO
;
2394 data
= data_alloc_array_pointers_mayfail(1, 1, &err pass_file_line
);
2395 if (likely(data
!= NULL
)) {
2396 da(data
,array_pointers
)->pointer
[0] = ptr_e
;
2397 ptr_2
= pointer_data(data
);
2399 pointer_dereference(ptr_e
);
2400 ptr_2
= pointer_error(err
, fp
, ip pass_file_line
);
2404 ajla_assert(!frame_test_flag(fp
, slot_r
), (file_line
, "ipret_array_append_one: flag already set for destination slot %"PRIuMAX
"", (uintmax_t)slot_r
));
2405 frame_set_flag(fp
, slot_r
);
2406 ptr_r
= frame_pointer(fp
, slot_r
);
2408 ipret_array_append_pointers(fp
, ip
, ptr_r
, ptr_1
, ptr_2
, fn_ptr
);
2410 return POINTER_FOLLOW_THUNK_GO
;
2413 static int_default_t
get_array_pointers(unsigned char *flat
, const struct type attr_unused
*type
, int_default_t attr_unused n_elements
, pointer_t
*ptr
, void *context
)
2415 struct data
*na
= context
;
2416 if (unlikely(flat
!= NULL
))
2418 ajla_assert(da(na
,array_pointers
)->n_used_entries
< da(na
,array_pointers
)->n_allocated_entries
, (file_line
, "get_array_pointers: array overrun"));
2419 da(na
,array_pointers
)->pointer
[da(na
,array_pointers
)->n_used_entries
++] = pointer_reference(ptr
);
2423 static int_default_t
get_array_flat(unsigned char *flat
, const struct type
*type
, int_default_t n_elements
, pointer_t attr_unused
*ptr
, void *context
)
2426 struct data
*na
= context
;
2427 unsigned char *dest
;
2428 if (unlikely(!flat
))
2430 ajla_assert(da(na
,array_flat
)->n_used_entries
+ n_elements
<= da(na
,array_flat
)->n_allocated_entries
, (file_line
, "get_array_flat: array overrun"));
2431 size
= (size_t)n_elements
* type
->size
;
2432 dest
= da_array_flat(na
) + type
->size
* (size_t)da(na
,array_flat
)->n_used_entries
;
2433 for (i
= 0; i
< size
; i
++, flat
++, dest
++) {
2437 da(na
,array_flat
)->n_used_entries
+= n_elements
;
2441 static int_default_t
get_array_type(unsigned char *flat
, const struct type
*type
, int_default_t attr_unused n_elements
, pointer_t attr_unused
*ptr
, void *context
)
2444 *cast_ptr(const struct type
**, context
) = type
;
2448 void * attr_fastcall
ipret_array_flatten(frame_s
*fp
, const code_t
*ip
, frame_t slot_r
, frame_t slot_1
, unsigned flags
)
2452 struct data
*data
, *na
;
2453 array_index_t len_long
;
2456 const struct type
*flat_type
;
2457 const struct type
*array_type
= frame_get_type_of_local(fp
, slot_r
);
2459 if (frame_variable_is_flat(fp
, slot_1
))
2462 ptr
= frame_pointer(fp
, slot_1
);
2464 pointer_follow(ptr
, false, data
, PF_WAIT
, fp
, ip
,
2468 if (unlikely(da_tag(data
) == DATA_TAG_array_incomplete
)) {
2469 ptr
= &da(data
,array_incomplete
)->next
;
2473 array_resolve_thunk(fp
, slot_1
);
2475 data
= pointer_get_data(*frame_pointer(fp
, slot_1
));
2476 if (da_tag(data
) == DATA_TAG_array_flat
&& da(data
,array_flat
)->n_used_entries
== da(data
,array_flat
)->n_allocated_entries
) {
2477 if (array_type
->tag
== TYPE_TAG_flat_array
) {
2478 pointer_t p
= frame_get_pointer_reference(fp
, slot_1
, (flags
& OPCODE_FLAG_FREE_ARGUMENT
) != 0);
2479 na
= pointer_get_data(p
);
2480 goto try_to_flatten
;
2484 if (da_tag(data
) == DATA_TAG_array_pointers
&& da(data
,array_pointers
)->n_used_entries
== da(data
,array_pointers
)->n_allocated_entries
&& da(data
,array_pointers
)->pointer
== da(data
,array_pointers
)->pointer_array
) {
2488 len_long
= array_len(data
);
2489 if (unlikely(!index_is_int(len_long
))) {
2490 index_free(&len_long
);
2493 len
= index_to_int(len_long
);
2494 index_free(&len_long
);
2497 index_from_int(&len_long
, 0);
2498 array_onstack_iterate(fp
, slot_1
, &len_long
, get_array_type
, &flat_type
);
2499 index_free(&len_long
);
2502 na
= data_alloc_array_pointers_mayfail(len
, 0, &sink pass_file_line
);
2505 index_from_int(&len_long
, 0);
2506 success
= array_onstack_iterate(fp
, slot_1
, &len_long
, get_array_pointers
, na
);
2507 index_free(&len_long
);
2508 if (unlikely(!success
)) {
2509 pointer_dereference(pointer_data(na
));
2513 na
= data_alloc_array_flat_mayfail(flat_type
, len
, 0, true, &sink pass_file_line
);
2516 index_from_int(&len_long
, 0);
2517 success
= array_onstack_iterate(fp
, slot_1
, &len_long
, get_array_flat
, na
);
2518 index_free(&len_long
);
2519 if (unlikely(!success
)) {
2525 if (flags
& OPCODE_FLAG_FREE_ARGUMENT
)
2526 frame_free_and_clear(fp
, slot_1
);
2529 if (array_type
->tag
== TYPE_TAG_flat_array
&& da_tag(na
) == DATA_TAG_array_flat
) {
2530 struct flat_array_definition
*fa
= type_def(array_type
,flat_array
);
2531 if (fa
->n_elements
== da(na
,array_flat
)->n_used_entries
) {
2532 memcpy(frame_var(fp
, slot_r
), da_array_flat(na
), array_type
->size
);
2533 pointer_dereference(pointer_data(na
));
2534 return POINTER_FOLLOW_THUNK_GO
;
2538 frame_set_pointer(fp
, slot_r
, pointer_data(na
));
2540 return POINTER_FOLLOW_THUNK_GO
;
2543 ipret_copy_variable(fp
, slot_1
, fp
, slot_r
, (flags
& OPCODE_FLAG_FREE_ARGUMENT
) != 0);
2544 return POINTER_FOLLOW_THUNK_GO
;
2547 void attr_fastcall
ipret_prefetch_functions(struct data
*function
)
2550 for (x
= 0; x
< da(function
,function
)->local_directory_size
; x
++) {
2552 pointer_t
*lfnp
, lfn
;
2553 lfnp
= da(function
,function
)->local_directory
[x
];
2554 if (pointer_is_thunk(pointer_locked_read(lfnp
))) {
2555 struct execution_control
*ex
;
2556 lfn
= pointer_reference(lfnp
);
2557 if (unlikely(!pointer_is_thunk(lfn
)) || thunk_tag(pointer_get_thunk(lfn
)) != THUNK_TAG_FUNCTION_CALL
) {
2558 pointer_dereference(lfn
);
2561 ex
= function_evaluate_prepare(&sink
);
2562 if (likely(ex
!= NULL
))
2563 function_evaluate_submit(ex
, lfn
, NULL
, NULL
);
2565 pointer_dereference(lfn
);
2571 static attr_noinline frame_s
*ipret_break(frame_s
*top_fp
, frame_s
*high
, frame_s
*low
)
2574 struct execution_control
*high_ex
, *low_ex
;
2575 struct data
*high_function
, *low_function
;
2578 struct thunk
**result
;
2584 struct data
*function
= get_frame(fp
)->function
;
2585 const struct local_arg
*la
= da(function
,function
)->args
;
2586 for (ia
= 0; ia
< da(function
,function
)->n_arguments
; ia
++, la
++) {
2589 if (!la
->may_be_borrowed
)
2592 ptr
= *frame_pointer(fp
, slot
);
2593 if (!pointer_is_empty(ptr
) && !frame_test_and_set_flag(fp
, slot
))
2594 pointer_reference_owned(ptr
);
2596 } while ((fp
= frame_up(fp
)) != low
);
2598 high_ex
= frame_execution_control(high
);
2600 top_fp
= stack_split(top_fp
, low
, &high
, &sink
);
2601 if (unlikely(!top_fp
))
2604 low_ex
= execution_control_alloc(&sink
);
2605 if (unlikely(!low_ex
))
2608 low_ex
->stack
= frame_stack_bottom(low
);
2609 low_ex
->stack
->ex
= low_ex
;
2610 low_ex
->callback
= high_ex
->callback
;
2615 address_lock(t
, DEPTH_THUNK
);
2616 t
->u
.function_call
.u
.execution_control
= low_ex
;
2617 list_take(&low_ex
->wait_list
, &high_ex
->wait_list
);
2618 address_unlock(t
, DEPTH_THUNK
);
2621 high_ex
->stack
= frame_stack_bottom(high
);
2622 high_ex
->stack
->ex
= high_ex
;
2624 high_function
= get_frame(high
)->function
;
2625 result
= mem_alloc_array_mayfail(mem_alloc_mayfail
, struct thunk
**, 0, 0, da(high_function
,function
)->n_return_values
, sizeof(struct thunk
*), &sink
);
2626 if (unlikely(!result
))
2629 if (unlikely(!thunk_alloc_blackhole(high_ex
, da(get_frame(high
)->function
,function
)->n_return_values
, result
, &sink
)))
2632 low_function
= get_frame(low
)->function
;
2633 ip
= da(low_function
,function
)->code
+ get_frame(high
)->previous_ip
;
2634 low_ex
->current_frame
= low
;
2640 dst_slot
= get_max_param(ip
, 0);
2641 frame_set_pointer(low
, dst_slot
, pointer_thunk(result
[ia
]));
2643 ip
+= max_param_size(1) + 1;
2644 } while (++ia
< da(high_function
,function
)->n_return_values
);
2646 low_ex
->current_ip
= frame_ip(low
, ip
);
2650 /*get_frame(low)->timestamp++;*/
2659 stack_free(frame_stack_bottom(top_fp
));
2664 static void * attr_hot_fastcall
ipret_break_the_chain(frame_s
*fp
, const code_t
*ip
, int waiting
, bool *something_breakable
)
2666 frame_s
*top_fp
= fp
;
2668 struct execution_control
*ex
;
2669 timestamp_t t
= get_frame(fp
)->timestamp
++;
2672 struct data
*top_fn
= get_frame(fp
)->function
;
2673 if (unlikely(profiling
)) {
2674 profile_counter_t profiling_counter
;
2675 profiling_counter
= load_relaxed(&da(top_fn
,function
)->profiling_counter
);
2676 profiling_counter
+= profile_sample();
2677 store_relaxed(&da(top_fn
,function
)->profiling_counter
, profiling_counter
);
2681 *something_breakable
= false;
2683 if (unlikely(frame_execution_control(fp
)->atomic
!= 0))
2687 prev_fp
= frame_up(fp
);
2688 if (frame_is_top(prev_fp
))
2690 if (get_frame(fp
)->mode
== CALL_MODE_STRICT
)
2692 if (get_frame(fp
)->mode
== CALL_MODE_SPARK
|| (likely(!ipret_strict_calls
) && (timestamp_t
)(t
- get_frame(prev_fp
)->timestamp
) > break_ticks
)) {
2693 struct execution_control
*low_ex
, *high_ex
;
2695 /*debug("break: %s - %s (%u - %u - %u)", da(get_frame(prev_fp)->function,function)->function_name, da(get_frame(fp)->function,function)->function_name, t, get_frame(prev_fp)->timestamp, get_frame(fp)->timestamp);*/
2696 /*debug("break %"PRIuMAX"", (uintmax_t)++break_count);*/
2697 new_fp
= ipret_break(top_fp
, fp
, prev_fp
);
2698 if (unlikely(!new_fp
))
2700 low_ex
= frame_execution_control(prev_fp
);
2702 high_ex
= frame_execution_control(new_fp
);
2703 high_ex
->current_frame
= new_fp
;
2705 task_submit(low_ex
, true);
2709 goto cont_with_low_ex
;
2713 task_submit(low_ex
, true);
2717 high_ex
= frame_execution_control(new_fp
);
2718 high_ex
->current_frame
= new_fp
;
2719 high_ex
->current_ip
= frame_ip(new_fp
, ip
);
2720 task_submit(high_ex
, true);
2722 prev_fp
= top_fp
= low_ex
->current_frame
;
2723 ip
= da(get_frame(top_fp
)->function
,function
)->code
+ low_ex
->current_ip
;
2724 /*t = get_frame(top_fp)->timestamp;*/
2727 *something_breakable
= true;
2736 ex
= frame_execution_control(top_fp
);
2737 ex
->current_frame
= top_fp
;
2738 ex
->current_ip
= frame_ip(top_fp
, ip
);
2742 bool attr_fastcall
ipret_break_waiting_chain(frame_s
*fp
, ip_t ip
)
2744 bool something_breakable
;
2745 struct execution_control
*ex
;
2747 ex
= ipret_break_the_chain(fp
, da(get_frame(fp
)->function
,function
)->code
+ ip
, 1, &something_breakable
);
2749 task_submit(ex
, true);
2751 return something_breakable
;
2754 void * attr_hot_fastcall
ipret_tick(frame_s
*fp
, const code_t
*ip
)
2757 struct execution_control
*ex
;
2759 waiting_list_break();
2761 ex
= ipret_break_the_chain(fp
, ip
, 0, &sink
);
2763 return task_schedule(ex
);