2 * Copyright (C) 2024 Mikulas Patocka
4 * This file is part of Ajla.
6 * Ajla is free software: you can redistribute it and/or modify it under the
7 * terms of the GNU General Public License as published by the Free Software
8 * Foundation, either version 3 of the License, or (at your option) any later
11 * Ajla is distributed in the hope that it will be useful, but WITHOUT ANY
12 * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
13 * A PARTICULAR PURPOSE. See the GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along with
16 * Ajla. If not, see <https://www.gnu.org/licenses/>.
36 atomic_type uchar_efficient_t trace_enabled
= 0;
39 #ifdef HAVE_CODEGEN_TRAPS
40 static rwmutex_t traps_lock
;
41 static struct tree traps_tree
;
45 static refcount_t n_dereferenced
;
52 static struct stack_bottom
*stack_alloc_space(size_t needed_size
, bool leaf
, ajla_error_t
*mayfail
)
54 const size_t additional_space
= SIZEOF_STACK_BOTTOM
+ SIZEOF_FRAME_STRUCT
;
55 size_t test_size
, extra_space
;
58 struct stack_bottom
*stack
;
59 struct frame_struct
*stack_end
;
61 if (unlikely(needed_size
+ additional_space
< additional_space
) ||
62 unlikely(needed_size
/ slot_size
>= sign_bit(stack_size_t
))) {
63 fatal_mayfail(error_ajla(EC_ASYNC
, AJLA_ERROR_SIZE_OVERFLOW
), mayfail
, "stack allocation size overflow");
68 test_size
= STACK_INITIAL_SIZE
;
69 if (unlikely(test_size
< additional_space
))
71 while (test_size
- additional_space
< needed_size
) {
72 size_t new_test_size
= test_size
* 2;
73 if (unlikely(new_test_size
<= test_size
))
75 test_size
= new_test_size
;
77 extra_space
= round_down(test_size
- additional_space
- needed_size
, frame_align
);
78 slots
= (extra_space
+ needed_size
) / slot_size
;
79 if (unlikely(slots
>= sign_bit(stack_size_t
)))
81 stack
= mem_align_mayfail(struct stack_bottom
*, extra_space
+ needed_size
+ additional_space
, frame_align
, &sink
);
82 if (unlikely(!stack
)) {
85 slots
= (extra_space
+ needed_size
) / slot_size
;
86 stack
= mem_align_mayfail(struct stack_bottom
*, needed_size
+ additional_space
, frame_align
, mayfail
);
90 stack_end
= cast_ptr(struct frame_struct
*, cast_ptr(char *, stack
) + SIZEOF_STACK_BOTTOM
+ extra_space
+ needed_size
);
91 stack_end
->function
= NULL
;
92 stack_end
->available_slots
= stack
->useable_slots
= (stack_size_t
)slots
;
96 frame_s
* attr_fastcall
stack_alloc(struct execution_control
*ex
, struct data
*function
, ajla_error_t
*mayfail
)
98 struct stack_bottom
*stack
;
99 char *stack_start
, *stack_end
;
102 stack
= stack_alloc_space(function_frame_size(function
), da(function
,function
)->leaf
, mayfail
);
103 if (unlikely(!stack
))
106 if ((stack
->ex
= ex
))
108 stack_start
= cast_ptr(char *, stack
) + SIZEOF_STACK_BOTTOM
;
109 stack_end
= stack_start
+ stack
->useable_slots
* slot_size
;
110 frame
= ptr_frame(cast_ptr(struct frame_struct
*, stack_end
- function_frame_size(function
)));
111 get_frame(frame
)->available_slots
= stack
->useable_slots
- da(function
,function
)->frame_slots
;
112 get_frame(frame
)->function
= function
;
116 frame_s
* attr_fastcall
stack_expand(frame_s
*fp
, struct data
*function
, ajla_error_t
*mayfail
)
118 struct stack_bottom
*old_stack
, *new_stack
;
119 size_t new_size
, old_stack_size
;
120 char *old_stack_end
, *new_stack_end
;
123 old_stack
= frame_stack_bottom(fp
);
124 new_size
= (old_stack
->useable_slots
- get_frame(fp
)->available_slots
) * slot_size
+ function_frame_size(function
);
125 if (unlikely(new_size
< function_frame_size(function
))) {
126 fatal_mayfail(error_ajla(EC_ASYNC
, AJLA_ERROR_SIZE_OVERFLOW
), mayfail
, "stack allocation size overflow");
130 new_stack
= stack_alloc_space(new_size
, false, mayfail
);
131 if (unlikely(!new_stack
))
134 old_stack_end
= cast_ptr(char *, old_stack
) + SIZEOF_STACK_BOTTOM
+ old_stack
->useable_slots
* slot_size
;
135 old_stack_size
= old_stack_end
- cast_ptr(char *, get_frame(fp
));
136 new_stack_end
= cast_ptr(char *, new_stack
) + SIZEOF_STACK_BOTTOM
+ new_stack
->useable_slots
* slot_size
;
137 (void)memcpy(new_stack_end
- old_stack_size
, old_stack_end
- old_stack_size
, old_stack_size
);
138 (new_stack
->ex
= old_stack
->ex
)->stack
= new_stack
;
139 stack_free(old_stack
);
141 new_fp
= ptr_frame(cast_ptr(const struct frame_struct
*, new_stack_end
- old_stack_size
- function_frame_size(function
)));
142 get_frame(new_fp
)->function
= function
;
145 get_frame(fp
)->available_slots
= (stack_size_t
)((size_t)(cast_ptr(char *, get_frame(fp
)) - cast_ptr(char *, new_stack
) - SIZEOF_STACK_BOTTOM
) / slot_size
);
147 } while (!frame_is_top(fp
));
152 frame_s
* attr_fastcall
stack_split(frame_s
*from_fp
, frame_s
*to_fp
, frame_s
**high
, ajla_error_t
*mayfail
)
154 struct stack_bottom
*new_stack
;
156 frame_s
*fp
, *new_fp
;
157 size_t new_stack_size
= cast_ptr(char *, to_fp
) - cast_ptr(char *, from_fp
);
159 new_stack
= stack_alloc_space(new_stack_size
, false, mayfail
);
160 if (unlikely(!new_stack
))
163 new_stack_end
= cast_ptr(char *, new_stack
) + SIZEOF_STACK_BOTTOM
+ new_stack
->useable_slots
* slot_size
;
164 new_fp
= ptr_frame(memcpy(new_stack_end
- new_stack_size
, get_frame(from_fp
), new_stack_size
));
168 get_frame(fp
)->available_slots
= (stack_size_t
)((size_t)(cast_ptr(char *, get_frame(fp
)) - cast_ptr(char *, new_stack
) - SIZEOF_STACK_BOTTOM
) / slot_size
);
171 } while (!frame_is_top(fp
));
176 static void frame_cleanup(frame_s
*fp
)
179 const struct data
*function
= get_frame(fp
)->function
;
181 for (l
= MIN_USEABLE_SLOT
; l
< function_n_variables(function
); l
++) {
182 if (!frame_test_flag(fp
, l
))
184 pointer_dereference(*frame_pointer(fp
, l
));
193 void stack_trace_init(struct stack_trace
*st
)
199 void stack_trace_free(struct stack_trace
*st
)
205 bool stack_trace_get_location(struct data
*function
, ip_t ip_rel
, struct stack_trace_entry
*result
)
208 struct line_position
*lp
, *my_lp
;
210 lp
= da(function
,function
)->lp
;
211 n_lp
= da(function
,function
)->lp_size
;
214 binary_search(code_t
, da(function
,function
)->lp_size
, idx
, false, idx
+ 1 >= n_lp
? false : lp
[idx
+ 1].ip
< ip_rel
, break);
216 result
->module_designator
= da(function
,function
)->module_designator
;
217 result
->function_name
= da(function
,function
)->function_name
;
218 result
->line
= my_lp
->line
;
222 void stack_trace_capture(struct stack_trace
*st
, frame_s
*fp
, const code_t
*ip
, unsigned max_depth
)
224 struct data
*function
;
225 ip_t ip_rel
, previous_ip
;
226 struct stack_trace_entry ste
;
230 if (!array_init_mayfail(struct stack_trace_entry
, &st
->trace
, &st
->trace_n
, &sink
))
234 function
= get_frame(fp
)->function
;
235 ip_rel
= ip
- da(function
,function
)->code
;
236 if (unlikely(!stack_trace_get_location(function
, ip_rel
, &ste
)))
237 goto skip_this_frame
;
238 if (unlikely(!array_add_mayfail(struct stack_trace_entry
, &st
->trace
, &st
->trace_n
, ste
, NULL
, &sink
))) {
246 previous_ip
= get_frame(fp
)->previous_ip
;
248 if (!frame_is_top(fp
)) {
249 ip
= da(get_frame(fp
)->function
,function
)->code
+ previous_ip
;
254 array_finish(struct stack_trace_entry
, &st
->trace
, &st
->trace_n
);
257 char * attr_cold
stack_trace_string(struct stack_trace
*st
, ajla_error_t
*err
)
262 if (unlikely(!array_init_mayfail(char, &msg
, &msg_l
, err
)))
265 for (t
= 0; t
< st
->trace_n
; t
++) {
269 struct stack_trace_entry
*ste
= &st
->trace
[t
];
270 if (unlikely(!array_add_mayfail(char, &msg
, &msg_l
, ' ', NULL
, err
)))
272 if (unlikely(!array_add_multiple_mayfail(char, &msg
, &msg_l
, ste
->module_designator
->path
, ste
->module_designator
->path_len
, NULL
, err
)))
274 if (unlikely(!array_add_multiple_mayfail(char, &msg
, &msg_l
, " : ", 3, NULL
, err
)))
276 xl
= strlen(ste
->function_name
);
277 if (unlikely(!array_add_multiple_mayfail(char, &msg
, &msg_l
, ste
->function_name
, xl
, NULL
, err
)))
279 if (unlikely(!array_add_mayfail(char, &msg
, &msg_l
, ':', NULL
, err
)))
282 str_add_unsigned(&b
, NULL
, ste
->line
& 0xffffffffU
, 10);
284 if (unlikely(!array_add_multiple_mayfail(char, &msg
, &msg_l
, buffer
, xl
, NULL
, err
)))
286 if (unlikely(!array_add_mayfail(char, &msg
, &msg_l
, '\n', NULL
, err
)))
289 if (unlikely(!array_add_mayfail(char, &msg
, &msg_l
, 0, NULL
, err
)))
295 void attr_cold
stack_trace_print(struct stack_trace
*st
)
298 char *m
= stack_trace_string(st
, &sink
);
302 stderr_msg("stack trace:");
303 m
[strlen(m
) - 1] = 0;
310 /*********************
311 * OBJECT ALLOCATION *
312 *********************/
314 /* !!! TODO: make it return pointer_t */
315 struct data
* attr_fastcall
data_alloc_flat_mayfail(type_tag_t type
, const unsigned char *flat
, size_t size
, ajla_error_t
*mayfail argument_position
)
317 struct data
*d
= data_align(flat
, data_flat_offset
+ size
, scalar_align
, mayfail
);
320 mem_set_position(data_untag(d
) pass_position
);
321 da(d
,flat
)->data_type
= type
;
322 memcpy_fast(da_flat(d
), flat
, size
);
326 struct data
* attr_fastcall
data_alloc_longint_mayfail(unsigned long bits
, ajla_error_t
*mayfail argument_position
)
328 struct data
*d
= data_alloc(longint
, mayfail
);
331 mem_set_position(data_untag(d
) pass_position
);
332 if (unlikely(!mpint_alloc_mayfail(&da(d
,longint
)->mp
, bits
, mayfail
))) {
339 struct data
* attr_fastcall
data_alloc_record_mayfail(const struct record_definition
*def
, ajla_error_t
*mayfail argument_position
)
341 struct data
*d
= data_align(record
, data_record_offset
+ def
->n_slots
* slot_size
, def
->alignment
, mayfail
);
344 mem_set_position(data_untag(d
) pass_position
);
345 da(d
,record
)->definition
= &def
->type
;
349 struct data
* attr_fastcall
data_alloc_option_mayfail(ajla_error_t
*mayfail argument_position
)
351 struct data
*d
= data_alloc(option
, mayfail
);
354 mem_set_position(data_untag(d
) pass_position
);
358 struct data
* attr_fastcall
data_alloc_array_flat_mayfail(const struct type
*type
, int_default_t n_allocated
, int_default_t n_used
, bool clear
, ajla_error_t
*mayfail argument_position
)
362 ajla_assert(TYPE_IS_FLAT(type
), (caller_file_line
, "data_alloc_array_flat_mayfail: type is not flat, tag %u", type
->tag
));
363 ajla_assert((n_allocated
| n_used
) >= 0, (caller_file_line
, "data_alloc_array_flat_mayfail: negative size %"PRIdMAX
", %"PRIdMAX
"", (intmax_t)n_allocated
, (intmax_t)n_used
));
364 #if defined(HAVE_BUILTIN_ADD_SUB_OVERFLOW) && defined(HAVE_BUILTIN_MUL_OVERFLOW) && !defined(UNUSUAL)
365 if (unlikely(__builtin_mul_overflow((uint_default_t
)n_allocated
, type
->size
, &size
)))
367 if (unlikely(__builtin_add_overflow(size
, data_array_offset
, &size
)))
370 size
= (uint_default_t
)n_allocated
* (size_t)type
->size
;
371 if (unlikely((size_t)type
->size
+ uzero
>= 0x100) || unlikely((uint_default_t
)n_allocated
+ (size_t)uzero
>= sign_bit(size_t) / 0x100)) {
372 if (unlikely(size
/ type
->size
!= (uint_default_t
)n_allocated
))
374 if ((size_t)(size
+ data_array_offset
) < size
)
377 size
+= data_array_offset
;
380 d
= data_align(array_flat
, size
, scalar_align
, mayfail
);
382 d
= data_calign(array_flat
, size
, scalar_align
, mayfail
);
385 mem_set_position(data_untag(d
) pass_position
);
386 da(d
,array_flat
)->type
= type
;
387 da(d
,array_flat
)->n_allocated_entries
= n_allocated
;
388 da(d
,array_flat
)->n_used_entries
= n_used
;
392 fatal_mayfail(error_ajla(EC_ASYNC
, AJLA_ERROR_SIZE_OVERFLOW
), mayfail
, "flat array allocation size overflow");
396 struct data
* attr_fastcall
data_alloc_array_slice_mayfail(struct data
*base
, unsigned char *data
, int_default_t start
, int_default_t len
, ajla_error_t
*mayfail argument_position
)
398 const struct type
*t
;
401 d
= data_alloc(array_slice
, mayfail
);
404 mem_set_position(data_untag(d
) pass_position
);
406 data_reference(base
);
408 t
= da(base
,array_flat
)->type
;
409 da(d
,array_slice
)->type
= t
;
410 da(d
,array_slice
)->reference
= pointer_data(base
);
411 da(d
,array_slice
)->flat_data_minus_data_array_offset
= data
- data_array_offset
+ start
* t
->size
;
412 da(d
,array_slice
)->n_entries
= len
;
417 struct data
* attr_fastcall
data_alloc_array_pointers_mayfail(int_default_t n_allocated
, int_default_t n_used
, ajla_error_t
*mayfail argument_position
)
420 d
= data_alloc_flexible(array_pointers
, pointer_array
, n_allocated
, mayfail
);
423 mem_set_position(data_untag(d
) pass_position
);
424 da(d
,array_pointers
)->pointer
= da(d
,array_pointers
)->pointer_array
;
425 da(d
,array_pointers
)->n_allocated_entries
= n_allocated
;
426 da(d
,array_pointers
)->n_used_entries
= n_used
;
430 struct data
* attr_fastcall
data_alloc_array_same_mayfail(array_index_t n_entries
, ajla_error_t
*mayfail argument_position
)
432 struct data
*d
= data_alloc(array_same
, mayfail
);
434 index_free(&n_entries
);
437 mem_set_position(data_untag(d
) pass_position
);
438 da(d
,array_same
)->n_entries
= n_entries
;
442 struct data
* attr_fastcall
data_alloc_array_incomplete(struct data
*first
, pointer_t next
, ajla_error_t
*mayfail argument_position
)
445 ajla_assert(!array_is_empty(first
), (caller_file_line
, "data_alloc_array_incomplete: the first array is empty"));
446 d
= data_alloc(array_incomplete
, mayfail
);
449 mem_set_position(data_untag(d
) pass_position
);
450 da(d
,array_incomplete
)->first
= pointer_data(first
);
451 da(d
,array_incomplete
)->next
= next
;
455 struct data
* attr_fastcall
data_alloc_function_reference_mayfail(arg_t n_curried_arguments
, ajla_error_t
*mayfail argument_position
)
458 arg_t alloc_size
= n_curried_arguments
;
460 d
= data_alloc_flexible(function_reference
, arguments
, alloc_size
, mayfail
);
463 mem_set_position(data_untag(d
) pass_position
);
464 da(d
,function_reference
)->n_curried_arguments
= n_curried_arguments
;
469 void attr_fastcall
data_fill_function_reference(struct data
*function_reference
, arg_t a
, pointer_t ptr
)
471 if (unlikely(!function_reference
)) {
472 pointer_dereference(ptr
);
476 ajla_assert(a
< da(function_reference
,function_reference
)->n_curried_arguments
, (file_line
, "data_fill_function_reference: invalid argument %"PRIuMAX
" (%"PRIuMAX
" arguments)", (uintmax_t)a
, (uintmax_t)da(function_reference
,function_reference
)->n_curried_arguments
));
478 da(function_reference
,function_reference
)->arguments
[a
].tag
= TYPE_TAG_unknown
;
479 da(function_reference
,function_reference
)->arguments
[a
].u
.ptr
= ptr
;
482 void attr_fastcall
data_fill_function_reference_flat(struct data
*function_reference
, arg_t a
, const struct type
*type
, const unsigned char *data
)
484 if (unlikely(!function_reference
))
487 if (type
->size
<= slot_size
&& TYPE_TAG_IS_BUILTIN(type
->tag
)) {
488 da(function_reference
,function_reference
)->arguments
[a
].tag
= type
->tag
;
489 memcpy_fast(da(function_reference
,function_reference
)->arguments
[a
].u
.slot
, data
, type
->size
);
491 pointer_t ptr
= flat_to_data(type
, data
);
492 data_fill_function_reference(function_reference
, a
, ptr
);
496 struct data
* attr_fastcall
data_alloc_resource_mayfail(size_t size
, void (*close
)(struct data
*), ajla_error_t
*mayfail argument_position
)
498 struct data
*d
= data_calign(resource
, data_resource_offset
+ size
, scalar_align
, mayfail
);
501 mem_set_position(data_untag(d
) pass_position
);
502 da(d
,resource
)->close
= close
;
507 static inline void thunk_init_refcount_tag(struct thunk
*t
, tag_t tag
)
511 refcount_init(&t
->refcount_
);
513 refcount_init_tag(&t
->refcount_
, tag
);
517 static attr_always_inline
struct thunk
*thunk_alloc_exception_mayfail(ajla_error_t err
, ajla_error_t
*mayfail argument_position
)
524 /*debug("thunk_alloc_exception(%s) at %s", error_decode(err), position_arg);*/
525 thunk
= mem_alloc_compressed_mayfail(struct thunk
*, partial_sizeof(struct thunk
, u
.exception
), mayfail
);
526 if (unlikely(!thunk
))
528 mem_set_position(thunk pass_position
);
529 thunk
= thunk_pointer_tag(thunk
);
530 thunk_init_refcount_tag(thunk
, THUNK_TAG_EXCEPTION
);
531 thunk
->u
.exception
.err
= err
;
532 thunk
->u
.exception
.msg
= NULL
;
533 stack_trace_init(&thunk
->u
.exception
.tr
);
537 static pointer_t out_of_memory_thunk
;
538 pointer_t
*out_of_memory_ptr
= &out_of_memory_thunk
;
540 struct thunk
* attr_fastcall
thunk_alloc_exception_error(ajla_error_t err
, char *msg
, frame_s
*fp
, const code_t
*ip argument_position
)
544 /*debug("thunk_alloc_exception_error: %d, %d @ %p", err.error_type, err.error_code, __builtin_return_address(0));*/
545 thunk
= thunk_alloc_exception_mayfail(err
, &sink pass_position
);
546 if (unlikely(!thunk
)) {
547 pointer_reference_owned(out_of_memory_thunk
);
548 return pointer_get_thunk(out_of_memory_thunk
);
551 thunk
->u
.exception
.msg
= str_dup(msg
, -1, &sink
);
553 stack_trace_capture(&thunk
->u
.exception
.tr
, fp
, ip
, -1);
554 /*debug("err: %d, %d, %d", err.error_class, err.error_type, err.error_aux);
555 stack_trace_print(&thunk->u.exception.tr);*/
560 pointer_t attr_fastcall
pointer_error(ajla_error_t err
, frame_s
*fp
, const code_t
*ip argument_position
)
562 struct thunk
*thunk
= thunk_alloc_exception_error(err
, NULL
, fp
, ip pass_position
);
563 return pointer_thunk(thunk
);
566 char * attr_cold
thunk_exception_string(struct thunk
*thunk
, ajla_error_t
*err
)
571 ajla_assert_lo(thunk_tag(thunk
) == THUNK_TAG_EXCEPTION
, (file_line
, "thunk_exception_string: invalid thunk tag %u", thunk_tag(thunk
)));
572 if (unlikely(!array_init_mayfail(char, &msg
, &msg_l
, err
)))
574 m
= error_decode(thunk
->u
.exception
.err
);
576 if (unlikely(!array_add_multiple_mayfail(char, &msg
, &msg_l
, m
, ml
, NULL
, err
)))
578 if (thunk
->u
.exception
.msg
&& *thunk
->u
.exception
.msg
) {
580 if (unlikely(!array_add_multiple_mayfail(char, &msg
, &msg_l
, " (", 2, NULL
, err
)))
582 xl
= strlen(thunk
->u
.exception
.msg
);
583 if (unlikely(!array_add_multiple_mayfail(char, &msg
, &msg_l
, thunk
->u
.exception
.msg
, xl
, NULL
, err
)))
585 if (unlikely(!array_add_mayfail(char, &msg
, &msg_l
, ')', NULL
, err
)))
588 if (unlikely(!array_add_mayfail(char, &msg
, &msg_l
, 0, NULL
, err
)))
593 char * attr_cold
thunk_exception_payload(struct thunk
*thunk
, ajla_error_t
*err
)
596 ajla_assert_lo(thunk_tag(thunk
) == THUNK_TAG_EXCEPTION
, (file_line
, "thunk_exception_payload: invalid thunk tag %u", thunk_tag(thunk
)));
597 m
= thunk
->u
.exception
.msg
;
600 return str_dup(m
, -1, err
);
603 void attr_cold
thunk_exception_print(struct thunk
*thunk
)
606 #if defined(DEBUG_ERROR) && defined(DEBUG_TRACK_FILE_LINE)
607 stderr_msg("error at %s", thunk
->u
.exception
.err
.position
);
609 m
= thunk_exception_string(thunk
, NULL
);
610 stderr_msg("exception: %s", m
);
612 stack_trace_print(&thunk
->u
.exception
.tr
);
615 static struct thunk
* attr_fastcall
thunk_alloc_struct(tag_t tag
, arg_t n_return_values
, ajla_error_t
*mayfail
)
620 /* not needed because of ARG_LIMIT
621 if (!struct_check_overflow(struct thunk, u.function_call.results, n_return_values, mayfail))
625 s
= partial_sizeof_array(struct thunk
, u
.function_call
.results
, n_return_values
);
627 t
= mem_alloc_compressed_mayfail(struct thunk
*, s
, mayfail
);
631 t
= thunk_pointer_tag(t
);
633 thunk_init_refcount_tag(t
, tag
);
638 static bool attr_fastcall
thunk_alloc_result(struct thunk
*t
, arg_t n_return_values
, struct thunk
*result
[], ajla_error_t
*mayfail
)
642 if (n_return_values
== 1) {
644 } else for (ia
= 0; ia
< n_return_values
; ia
++) {
647 thunk_reference_nonatomic(t
);
648 t
->u
.function_call
.results
[ia
].wanted
= true;
649 tm
= thunk_alloc_struct(THUNK_TAG_MULTI_RET_REFERENCE
, 1, mayfail
);
653 thunk_free(result
[ia
]);
658 tm
->u
.multi_ret_reference
.thunk
= t
;
659 tm
->u
.multi_ret_reference
.idx
= ia
;
665 bool attr_fastcall
thunk_alloc_function_call(pointer_t function_reference
, arg_t n_return_values
, struct thunk
*result
[], ajla_error_t
*mayfail
)
669 t
= thunk_alloc_struct(THUNK_TAG_FUNCTION_CALL
, n_return_values
, mayfail
);
673 t
->u
.function_call
.u
.function_reference
= function_reference
;
675 return thunk_alloc_result(t
, n_return_values
, result
, mayfail
);
678 bool attr_fastcall
thunk_alloc_blackhole(struct execution_control
*ex
, arg_t n_return_values
, struct thunk
*result
[], ajla_error_t
*mayfail
)
682 t
= thunk_alloc_struct(THUNK_TAG_BLACKHOLE
, n_return_values
, mayfail
);
686 t
->u
.function_call
.u
.execution_control
= ex
;
689 return thunk_alloc_result(t
, n_return_values
, result
, mayfail
);
692 bool are_there_dereferenced(void)
694 return !refcount_is_one(&n_dereferenced
);
697 static void execution_control_unlink(struct execution_control
*ex
)
700 waiting_list_remove(ex
);
701 for (i
= 0; i
< N_EXECUTION_CONTROL_WAIT
; i
++) {
702 struct execution_control_wait
*w
= &ex
->wait
[i
];
703 mutex_t
*t
= w
->mutex_to_lock
;
704 if (unlikely(t
!= NULL
)) {
706 list_del(&w
->wait_entry
);
707 w
->mutex_to_lock
= NULL
;
711 refcount_set(&ex
->wait_state
, EXECUTION_CONTROL_NORMAL
);
714 void execution_control_unlink_and_submit(struct execution_control
*ex
, bool can_allocate_memory
)
716 execution_control_unlink(ex
);
717 task_submit(ex
, can_allocate_memory
);
720 bool execution_control_acquire(struct execution_control
*ex
)
722 return refcount_xchgcmp(&ex
->wait_state
, EXECUTION_CONTROL_FIRED
, EXECUTION_CONTROL_ARMED
);
725 static struct execution_control
*execution_control_acquire_from_thunk(struct thunk
*t
)
727 struct execution_control
*ex
= t
->u
.function_call
.u
.execution_control
;
728 ajla_assert_lo(ex
->thunk
== t
, (file_line
, "execution_control_acquire_from_thunk: pointer mismatch"));
730 return execution_control_acquire(ex
) ? ex
: NULL
;
733 static void *wake_up_wait_list_internal(struct list
*wait_list
, mutex_t
*mutex_to_lock
, bool can_allocate_memory
)
735 struct list ex_to_resume
;
736 void *ret
= POINTER_FOLLOW_THUNK_EXIT
;
738 list_init(&ex_to_resume
);
740 while (!list_is_empty(wait_list
)) {
741 struct execution_control_wait
*w
= get_struct(wait_list
->prev
, struct execution_control_wait
, wait_entry
);
742 ajla_assert_lo(w
->mutex_to_lock
== mutex_to_lock
, (file_line
, "wake_up_wait_list: mutex_to_lock pointer does not match: %p != %p", w
->mutex_to_lock
, mutex_to_lock
));
743 list_del(&w
->wait_entry
);
744 if (likely(refcount_xchgcmp(&w
->execution_control
->wait_state
, EXECUTION_CONTROL_FIRED
, EXECUTION_CONTROL_ARMED
))) {
745 w
->mutex_to_lock
= NULL
;
746 list_add(&ex_to_resume
, &w
->wait_entry
);
748 list_init(&w
->wait_entry
);
751 mutex_unlock(mutex_to_lock
);
753 while (!list_is_empty(&ex_to_resume
)) {
754 struct execution_control_wait
*w
;
755 struct execution_control
*new_ex
;
756 w
= get_struct(ex_to_resume
.prev
, struct execution_control_wait
, wait_entry
);
757 list_del(&w
->wait_entry
);
758 new_ex
= w
->execution_control
;
759 if (ret
== POINTER_FOLLOW_THUNK_EXIT
) {
760 execution_control_unlink(new_ex
);
763 execution_control_unlink_and_submit(new_ex
, can_allocate_memory
);
770 void wake_up_wait_list(struct list
*wait_list
, mutex_t
*mutex_to_lock
, bool can_allocate_memory
)
772 void *ex
= wake_up_wait_list_internal(wait_list
, mutex_to_lock
, can_allocate_memory
);
773 if (ex
!= POINTER_FOLLOW_THUNK_EXIT
)
774 task_submit(ex
, can_allocate_memory
);
777 void *thunk_terminate(struct thunk
*t
, arg_t n_return_values
)
781 struct execution_control
*ex
;
784 address_lock(t
, DEPTH_THUNK
);
785 ex
= t
->u
.function_call
.u
.execution_control
;
788 likely(tag
== THUNK_TAG_BLACKHOLE
) ||
789 (tag
== THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED
&& n_return_values
> 1) ||
790 (tag
== THUNK_TAG_BLACKHOLE_DEREFERENCED
)
791 ), (file_line
, "thunk_terminate: invalid thunk tag %u (n_return_values %lu)", tag
, (unsigned long)n_return_values
));
792 if (unlikely(tag
== THUNK_TAG_BLACKHOLE_DEREFERENCED
)) {
793 thunk_init_refcount_tag(t
, THUNK_TAG_BLACKHOLE_DEREFERENCED
);
794 goto return_dereference_unused
;
796 thunk_tag_set(t
, tag
, THUNK_TAG_RESULT
);
797 #ifdef barrier_write_before_unlock_lock
798 barrier_write_before_unlock_lock();
800 if (tag
== THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED
) {
801 thunk_reference_nonatomic(t
);
802 return_dereference_unused
:
803 address_unlock(t
, DEPTH_THUNK
);
806 if (n_return_values
== 1 || !t
->u
.function_call
.results
[i
].wanted
) {
807 pointer_dereference(t
->u
.function_call
.results
[i
].ptr
);
808 pointer_poison(&t
->u
.function_call
.results
[i
].ptr
);
810 } while (++i
< n_return_values
);
811 address_lock(t
, DEPTH_THUNK
);
812 thunk_assert_refcount(t
);
813 if (thunk_dereference_nonatomic(t
)) {
814 if (unlikely(tag
== THUNK_TAG_BLACKHOLE_DEREFERENCED
))
815 refcount_add(&n_dereferenced
, -1);
820 ret
= wake_up_wait_list_internal(&ex
->wait_list
, address_get_mutex(t
, DEPTH_THUNK
), true);
822 execution_control_free(ex
);
827 static void thunk_terminate_with_value(struct thunk
*t
, arg_t n_return_values
, pointer_t val
)
831 for (i
= 0; i
< n_return_values
; i
++) {
833 pointer_reference_owned(val
);
834 t
->u
.function_call
.results
[i
].ptr
= val
;
836 ex
= thunk_terminate(t
, n_return_values
);
837 if (ex
!= POINTER_FOLLOW_THUNK_EXIT
)
838 task_submit(ex
, true);
842 /*********************
843 * EXECUTION CONTROL *
844 *********************/
846 struct execution_control
*execution_control_alloc(ajla_error_t
*mayfail
)
849 struct execution_control
*ex
= mem_alloc_mayfail(struct execution_control
*, sizeof(struct execution_control
), mayfail
);
856 list_init(&ex
->wait_list
);
857 refcount_init_val(&ex
->wait_state
, EXECUTION_CONTROL_NORMAL
);
858 for (i
= 0; i
< N_EXECUTION_CONTROL_WAIT
; i
++) {
859 struct execution_control_wait
*w
= &ex
->wait
[i
];
860 w
->execution_control
= ex
;
861 w
->mutex_to_lock
= NULL
;
865 ex
->atomic_interrupted
= false;
867 task_ex_control_started();
872 void execution_control_free(struct execution_control
*ex
)
875 stack_free(ex
->stack
);
877 task_ex_control_exited();
882 void execution_control_terminate(struct execution_control
*ex
, pointer_t ptr
)
884 arg_t n_return_values
;
888 ex
->callback(ex
->callback_cookie
, ptr
);
890 fp
= ex
->current_frame
;
892 n_return_values
= da(get_frame(fp
)->function
,function
)->n_return_values
;
897 } while (!frame_is_top(fp
));
899 ajla_assert_lo(frame_stack_bottom(fp
)->ex
== ex
, (file_line
, "execution_control_terminate: execution control pointer mismatch: %p != %p", frame_stack_bottom(fp
)->ex
, ex
));
902 struct thunk
*err
= thunk_alloc_exception_error(error_ajla(EC_ASYNC
, AJLA_ERROR_NOT_SUPPORTED
), NULL
, NULL
, NULL pass_file_line
);
903 thunk_terminate_with_value(ex
->thunk
, n_return_values
, pointer_thunk(err
));
905 execution_control_free(ex
);
910 /**********************
911 * POINTER OPERATIONS *
912 **********************/
914 struct compare_status
{
918 void (attr_fastcall
*destruct
)(struct compare_status
*cs
);
931 struct function_argument
**args1
;
932 struct function_argument
**args2
;
933 } function_reference
;
938 void *(attr_fastcall
*get_sub
)(void *data
);
939 void (attr_fastcall
*free_object
)(void *data
);
940 bool (attr_fastcall
*deep_eval
)(struct data
*d
, pointer_t
***data_stack
, size_t *data_stack_size
, ajla_error_t
*err
);
941 int (attr_fastcall
*compare
)(struct compare_status
*cs
, struct compare_status
*new_cs
, bool init
);
942 bool (attr_fastcall
*save
)(void *data
, uintptr_t offset
, size_t *align
, size_t *size
, struct stack_entry
**subptrs
, size_t *subptrs_l
);
945 static struct data_method data_method_table
[TAG_END
];
947 static void * attr_hot_fastcall
no_sub(void attr_unused
*data
)
952 static void attr_hot_fastcall
free_primitive(void *data
)
954 struct data
*d
= cast_cpp(struct data
*, data
);
958 static void attr_hot_fastcall
free_primitive_thunk(void *data
)
960 struct thunk
*t
= cast_cpp(struct thunk
*, data
);
964 static void attr_hot_fastcall
free_none(void attr_unused
*data
)
968 static void attr_hot_fastcall
free_integer(void *data
)
970 struct data
*d
= cast_cpp(struct data
*, data
);
971 mpint_free(&da(d
,longint
)->mp
);
975 static void attr_hot_fastcall
free_array_same(void *data
)
977 struct data
*d
= cast_cpp(struct data
*, data
);
978 index_free(&da(d
,array_same
)->n_entries
);
982 static void attr_hot_fastcall
free_resource(void *data
)
984 struct data
*d
= cast_cpp(struct data
*, data
);
985 da(d
,resource
)->close(d
);
989 void free_cache_entry(struct data
*d
, struct cache_entry
*ce
)
992 for (i
= 0; i
< da(d
,function
)->n_arguments
; i
++)
993 pointer_dereference(ce
->arguments
[i
]);
994 for (i
= 0; i
< da(d
,function
)->n_return_values
; i
++)
995 pointer_dereference(ce
->returns
[i
].ptr
);
996 mem_free(ce
->returns
);
1000 static void attr_fastcall
free_function(void *data
)
1002 struct data
*d
= cast_cpp(struct data
*, data
);
1003 pointer_dereference(da(d
,function
)->types_ptr
);
1005 pointer_dereference(da(d
,function
)->codegen
);
1007 if (unlikely(!da(d
,function
)->is_saved
))
1008 mem_free(da(d
,function
)->code
);
1009 mem_free(da(d
,function
)->local_variables
);
1010 if (unlikely(!da(d
,function
)->is_saved
))
1011 mem_free(da(d
,function
)->local_variables_flags
);
1012 if (da(d
,function
)->args
)
1013 mem_free(da(d
,function
)->args
);
1014 mem_free(da(d
,function
)->function_name
);
1015 if (unlikely(!da(d
,function
)->is_saved
) && da(d
,function
)->lp
)
1016 mem_free(da(d
,function
)->lp
);
1017 while (unlikely(!tree_is_empty(&da(d
,function
)->cache
))) {
1018 struct cache_entry
*ce
= get_struct(tree_any(&da(d
,function
)->cache
), struct cache_entry
, entry
);
1019 tree_delete(&ce
->entry
);
1020 free_cache_entry(d
, ce
);
1022 if (profiling_escapes
)
1023 mem_free(da(d
,function
)->escape_data
);
1028 static void attr_fastcall
free_codegen(void *data
)
1030 struct data
*d
= cast_cpp(struct data
*, data
);
1031 #ifdef HAVE_CODEGEN_TRAPS
1032 #ifndef DEBUG_CRASH_HANDLER
1033 if (da(d
,codegen
)->trap_records_size
)
1036 rwmutex_lock_write(&traps_lock
);
1037 tree_delete(&da(d
,codegen
)->codegen_tree
);
1038 rwmutex_unlock_write(&traps_lock
);
1046 static void * attr_hot_fastcall
get_sub_record(void *data
)
1048 struct data
*d
= cast_cpp(struct data
*, data
);
1049 const struct record_definition
*def
= type_def(da(d
,record
)->definition
,record
);
1050 frame_s
*f
= da_record_frame(d
);
1052 for (slot
= 0; slot
< def
->n_slots
; slot
++) {
1054 /* !!! TODO: test multiple flags at once */
1055 if (!frame_test_flag(f
, slot
))
1057 ptr
= frame_pointer(f
, slot
);
1058 if (!pointer_is_empty(*ptr
))
1060 frame_clear_flag(f
, slot
);
1065 static void * attr_hot_fastcall
get_sub_option(void *data
)
1067 struct data
*d
= cast_cpp(struct data
*, data
);
1069 ptr
= &da(d
,option
)->pointer
;
1070 if (!pointer_is_empty(*ptr
))
1075 static void * attr_hot_fastcall
get_sub_array_slice(void *data
)
1077 struct data
*d
= cast_cpp(struct data
*, data
);
1078 pointer_t
*ptr
= &da(d
,array_slice
)->reference
;
1079 if (!pointer_is_empty(*ptr
))
1084 static void * attr_hot_fastcall
get_sub_array_pointers(void *data
)
1086 struct data
*d
= cast_cpp(struct data
*, data
);
1087 int_default_t x
= da(d
,array_pointers
)->n_used_entries
;
1089 pointer_t
*ptr
= &da(d
,array_pointers
)->pointer
[x
];
1090 if (!pointer_is_empty(*ptr
))
1092 da(d
,array_pointers
)->n_used_entries
= x
;
1097 static void * attr_hot_fastcall
get_sub_array_same(void *data
)
1099 struct data
*d
= cast_cpp(struct data
*, data
);
1100 pointer_t
*ptr
= &da(d
,array_same
)->pointer
;
1101 if (!pointer_is_empty(*ptr
))
1106 static void * attr_hot_fastcall
get_sub_array_btree(void *data
)
1108 struct data
*d
= cast_cpp(struct data
*, data
);
1109 btree_entries_t x
= da(d
,array_btree
)->n_used_btree_entries
;
1111 pointer_t
*ptr
= &da(d
,array_btree
)->btree
[x
].node
;
1112 if (!pointer_is_empty(*ptr
))
1114 da(d
,array_btree
)->n_used_btree_entries
= x
;
1115 index_free(&da(d
,array_btree
)->btree
[x
].end_index
);
1120 static void * attr_hot_fastcall
get_sub_array_incomplete(void *data
)
1122 struct data
*d
= cast_cpp(struct data
*, data
);
1123 if (!pointer_is_empty(da(d
,array_incomplete
)->first
))
1124 return &da(d
,array_incomplete
)->first
;
1125 if (!pointer_is_empty(da(d
,array_incomplete
)->next
))
1126 return &da(d
,array_incomplete
)->next
;
1130 static void * attr_hot_fastcall
get_sub_function_reference(void *data
)
1132 struct data
*d
= cast_cpp(struct data
*, data
);
1133 arg_t ia
= da(d
,function_reference
)->n_curried_arguments
;
1136 ia
= da(d
,function_reference
)->n_curried_arguments
;
1138 if (da(d
,function_reference
)->arguments
[ia
].tag
== TYPE_TAG_unknown
) {
1139 pointer_t
*ptr
= &da(d
,function_reference
)->arguments
[ia
].u
.ptr
;
1140 if (!pointer_is_empty(*ptr
))
1143 da(d
,function_reference
)->n_curried_arguments
= ia
;
1146 if (da(d
,function_reference
)->is_indirect
) {
1147 prev
= &da(d
,function_reference
)->u
.indirect
;
1148 if (!pointer_is_empty(*prev
))
1155 static void * attr_hot_fastcall
get_sub_function_call(void *data
)
1157 struct thunk
*t
= cast_cpp(struct thunk
*, data
);
1159 address_unlock(t
, DEPTH_THUNK
);
1161 if (!pointer_is_empty(t
->u
.function_call
.u
.function_reference
))
1162 return &t
->u
.function_call
.u
.function_reference
;
1167 static void * attr_hot_fastcall
get_sub_blackhole(void *data
)
1169 struct thunk
*t
= cast_cpp(struct thunk
*, data
);
1170 struct execution_control
*ex
;
1172 refcount_add(&n_dereferenced
, 1);
1173 thunk_tag_set(t
, THUNK_TAG_BLACKHOLE
, THUNK_TAG_BLACKHOLE_DEREFERENCED
);
1174 ex
= execution_control_acquire_from_thunk(t
);
1175 address_unlock(t
, DEPTH_THUNK
);
1177 execution_control_unlink_and_submit(ex
, true);
1182 static void * attr_hot_fastcall
get_sub_blackhole_some_dereferenced(void *data
)
1184 struct thunk
*t
= cast_cpp(struct thunk
*, data
);
1185 struct execution_control
*ex
;
1187 refcount_add(&n_dereferenced
, 1);
1188 thunk_tag_set(t
, THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED
, THUNK_TAG_BLACKHOLE_DEREFERENCED
);
1189 ex
= execution_control_acquire_from_thunk(t
);
1190 address_unlock(t
, DEPTH_THUNK
);
1192 execution_control_unlink_and_submit(ex
, true);
1197 static void * attr_cold attr_fastcall
get_sub_blackhole_dereferenced(void attr_unused
*data
)
1199 internal(file_line
, "get_sub_blackhole_dereferenced called");
1203 static void attr_cold attr_fastcall
free_blackhole_dereferenced(void attr_unused
*data
)
1205 internal(file_line
, "free_blackhole_dereferenced called");
1208 static void * attr_hot_fastcall
get_sub_result(void *data
)
1210 struct thunk
*t
= cast_cpp(struct thunk
*, data
);
1213 address_unlock(t
, DEPTH_THUNK
);
1215 ptr
= &t
->u
.function_call
.results
[0].ptr
;
1216 if (!pointer_is_empty(*ptr
))
1222 static void * attr_hot_fastcall
get_sub_exception(void *data
)
1224 struct thunk
*t
= cast_cpp(struct thunk
*, data
);
1226 address_unlock(t
, DEPTH_THUNK
);
1231 static void * attr_hot_fastcall
get_sub_multi_ret_reference(void *data
)
1233 struct thunk
*t
= cast_cpp(struct thunk
*, data
);
1235 struct execution_control
*ex
= NULL
;
1239 address_unlock(t
, DEPTH_THUNK
);
1241 mt
= t
->u
.multi_ret_reference
.thunk
;
1243 address_lock(mt
, DEPTH_THUNK
);
1245 idx
= t
->u
.multi_ret_reference
.idx
;
1247 tag
= thunk_tag(mt
);
1248 if (tag
== THUNK_TAG_FUNCTION_CALL
) {
1249 if (thunk_refcount_is_one_nonatomic(mt
)) {
1250 /* get_sub_function_call unlocks mt */
1251 pointer_t
*ptr
= get_sub_function_call(mt
);
1257 (void)thunk_dereference_nonatomic(mt
);
1258 mt
->u
.function_call
.results
[idx
].wanted
= false;
1259 goto unlock_ret_false
;
1261 if (tag
== THUNK_TAG_BLACKHOLE
) {
1262 thunk_tag_set(mt
, THUNK_TAG_BLACKHOLE
, THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED
);
1263 tag
= THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED
;
1265 if (tag
== THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED
) {
1266 mt
->u
.function_call
.results
[idx
].wanted
= false;
1267 if (thunk_dereference_nonatomic(mt
)) {
1268 refcount_add(&n_dereferenced
, 1);
1269 thunk_tag_set(mt
, THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED
, THUNK_TAG_BLACKHOLE_DEREFERENCED
);
1270 tag
= THUNK_TAG_BLACKHOLE_DEREFERENCED
;
1271 ex
= execution_control_acquire_from_thunk(mt
);
1273 goto unlock_ret_false
;
1275 if (likely(tag
== THUNK_TAG_RESULT
)) {
1276 pointer_t
*ptr
= &mt
->u
.function_call
.results
[idx
].ptr
;
1277 if (!pointer_is_empty(*ptr
)) {
1278 address_unlock(mt
, DEPTH_THUNK
);
1281 if (thunk_dereference_nonatomic(mt
))
1283 goto unlock_ret_false
;
1285 internal(file_line
, "get_sub_multi_ret_reference: invalid thunk tag %u", tag
);
1287 address_unlock(mt
, DEPTH_THUNK
);
1289 execution_control_unlink_and_submit(ex
, true);
1293 static void attr_cold attr_fastcall
free_exception(void attr_unused
*data
)
1295 struct thunk
*t
= cast_cpp(struct thunk
*, data
);
1297 stack_trace_free(&t
->u
.exception
.tr
);
1298 if (t
->u
.exception
.msg
)
1299 mem_free(t
->u
.exception
.msg
);
1304 void attr_hot_fastcall
pointer_dereference_(pointer_t top_ptr argument_position
)
1307 const struct data_method
*m
;
1309 pointer_t current_ptr
, backlink
, *sub_ptr
;
1312 current_ptr
= top_ptr
;
1313 backlink
= pointer_mark();
1316 r
= pointer_get_refcount_(current_ptr
);
1317 if (unlikely(refcount_is_read_only(r
)))
1318 goto go_to_backlink
;
1319 if (!refcount_dec_(r
, caller_file_line_x
))
1320 goto go_to_backlink
;
1323 p
= pointer_get_value_strip_tag_(current_ptr
);
1325 if (unlikely(pointer_is_thunk(current_ptr
))) {
1326 struct thunk
*thunk
= cast_cpp(struct thunk
*, p
);
1327 address_lock(thunk
, DEPTH_THUNK
);
1328 tag
= thunk_tag(thunk
);
1330 struct data
*data
= cast_cpp(struct data
*, p
);
1333 m
= &data_method_table
[tag
];
1335 sub_ptr
= m
->get_sub(p
);
1337 ajla_assert(!pointer_is_empty(*sub_ptr
), (file_line
, "pointer_dereference_: empty pointer returned from %p", cast_ptr(void *, m
)));
1339 if (!pointer_is_equal(current_ptr
, backlink
)) {
1340 #if defined(__IBMC__)
1341 /* a compiler bug */
1344 pointer_t old_current_ptr
= current_ptr
;
1345 current_ptr
= *sub_ptr
;
1346 *sub_ptr
= backlink
;
1347 backlink
= old_current_ptr
;
1350 backlink
= *sub_ptr
;
1351 *sub_ptr
= pointer_empty();
1352 goto process_current
;
1359 if (!pointer_is_mark(backlink
)) {
1360 current_ptr
= backlink
;
1361 goto process_current
;
1366 static inline bool pointer_verify(pointer_t attr_unused
*ptr
, pointer_t attr_unused val
)
1371 ret
= pointer_is_equal(*pointer_volatile(ptr
), val
);
1372 pointer_unlock(ptr
);
1379 pointer_t attr_hot_fastcall
pointer_reference_(pointer_t
*ptr argument_position
)
1381 #ifdef POINTER_FOLLOW_IS_LOCKLESS
1385 p
= *pointer_volatile(ptr
);
1386 r
= pointer_get_refcount_(p
);
1387 if (likely(!pointer_is_thunk(p
))) {
1388 pointer_dependency_barrier();
1389 if (unlikely(refcount_is_read_only(r
)))
1391 refcount_inc_(r
, caller_file_line_x
);
1394 struct thunk
*t
= pointer_get_thunk(p
);
1395 if (likely(!refcount_is_read_only(r
))) {
1396 address_lock(t
, DEPTH_THUNK
);
1397 if (unlikely(!pointer_verify(ptr
, p
))) {
1398 address_unlock(t
, DEPTH_THUNK
);
1401 if (thunk_is_finished(t
)) {
1402 address_unlock(t
, DEPTH_THUNK
);
1403 pointer_follow_thunk_(ptr
, POINTER_FOLLOW_THUNK_NOEVAL
);
1406 refcount_inc_(r
, caller_file_line_x
);
1407 address_unlock(t
, DEPTH_THUNK
);
1416 r
= pointer_get_refcount_(p
);
1417 if (likely(!refcount_is_read_only(r
)))
1418 refcount_inc_(r
, caller_file_line_x
);
1419 pointer_unlock(ptr
);
1424 void pointer_reference_maybe_(frame_s
*fp
, frame_t result
, pointer_t
*ptr
, unsigned char flags argument_position
)
1427 if (flags
& OPCODE_STRUCT_MAY_BORROW
) {
1428 p
= pointer_locked_read(ptr
);
1429 if (likely(!pointer_is_thunk(p
))) {
1430 ajla_assert(!frame_test_flag(fp
, result
), (file_line
, "pointer_reference_maybe_: flag for slot %"PRIuMAX
" already set", (uintmax_t)result
));
1431 *frame_pointer(fp
, result
) = p
;
1435 p
= pointer_reference_(ptr pass_position
);
1436 frame_set_pointer(fp
, result
, p
);
1439 void copy_from_function_reference_to_frame(frame_s
*new_fp
, struct data
*ref
, arg_t ia
, char can_move
)
1441 struct data
*function
= get_frame(new_fp
)->function
;
1444 if (!data_is_writable(ref
))
1447 pi
= da(ref
,function_reference
)->n_curried_arguments
;
1450 frame_t target
= da(function
,function
)->args
[--ia
].slot
;
1451 type_tag_t tag
= da(ref
,function_reference
)->arguments
[pi
].tag
;
1452 if (tag
!= TYPE_TAG_unknown
) {
1453 const struct type
*new_type
= frame_get_type_of_local(new_fp
, target
);
1454 if (TYPE_IS_FLAT(new_type
)) {
1455 ajla_assert_lo(TYPE_TAG_IS_BUILTIN(new_type
->tag
) && new_type
->size
<= slot_size
, (file_line
, "copy_from_function_reference_to_frame: invalid type tag %u,%u,%u", new_type
->tag
, new_type
->size
, new_type
->align
));
1456 memcpy_fast(frame_var(new_fp
, target
), da(ref
,function_reference
)->arguments
[pi
].u
.slot
, new_type
->size
);
1458 pointer_t ptr_data
= flat_to_data(type_get_from_tag(tag
), da(ref
,function_reference
)->arguments
[pi
].u
.slot
);
1459 frame_set_pointer(new_fp
, target
, ptr_data
);
1462 pointer_t
*p
= &da(ref
,function_reference
)->arguments
[pi
].u
.ptr
;
1464 frame_set_pointer(new_fp
, target
, pointer_reference(p
));
1466 frame_set_pointer(new_fp
, target
, *p
);
1467 *p
= pointer_empty();
1471 if (!da(ref
,function_reference
)->is_indirect
)
1473 ref
= pointer_get_data(da(ref
,function_reference
)->u
.indirect
);
1476 ajla_assert_lo(!ia
, (file_line
, "copy_from_function_reference_to_frame: the number of arguments doesn't match: %s, %"PRIuMAX
"", da(function
,function
)->function_name
, (uintmax_t)ia
));
1480 void * attr_hot_fastcall
pointer_follow_thunk_(pointer_t
*ptr
, void *ex_wait
)
1482 pointer_t
*orig_ptr
= ptr
;
1485 struct execution_control
*new_ex
= NULL
;
1487 struct thunk
*error_thunk
= NULL
;
1490 ajla_assert(ex_wait
== POINTER_FOLLOW_THUNK_NOEVAL
|| ex_wait
== POINTER_FOLLOW_THUNK_SPARK
|| !((struct execution_control_wait
*)ex_wait
)->mutex_to_lock
, (file_line
, "pointer_follow_thunk_: execution_control_wait is already waiting on %p", ((struct execution_control_wait
*)ex_wait
)->mutex_to_lock
));
1493 pv
= pointer_locked_read(ptr
);
1494 if (unlikely(!pointer_is_thunk(pv
))) {
1495 ret
= POINTER_FOLLOW_THUNK_RETRY
;
1498 t
= pointer_get_thunk(pv
);
1499 address_lock(t
, DEPTH_THUNK
);
1500 if (unlikely(!pointer_verify(ptr
, pointer_thunk(t
)))) {
1501 address_unlock(t
, DEPTH_THUNK
);
1502 ret
= POINTER_FOLLOW_THUNK_RETRY
;
1506 t_tag
= thunk_tag(t
);
1507 if (unlikely(t_tag
== THUNK_TAG_EXCEPTION
)) {
1508 if (unlikely(orig_ptr
!= ptr
)) {
1510 address_unlock(t
, DEPTH_THUNK
);
1511 if (unlikely(error_thunk
!= NULL
))
1512 pointer_dereference(pointer_thunk(error_thunk
));
1517 address_unlock(t
, DEPTH_THUNK
);
1518 ret
= POINTER_FOLLOW_THUNK_EXCEPTION
;
1522 if (unlikely(error_thunk
!= NULL
)) {
1523 pointer_locked_write(ptr
, pointer_thunk(error_thunk
));
1524 address_unlock(t
, DEPTH_THUNK
);
1526 pointer_dereference(pointer_thunk(t
));
1528 ret
= unlikely(orig_ptr
!= ptr
) ? POINTER_FOLLOW_THUNK_RETRY
: POINTER_FOLLOW_THUNK_EXCEPTION
;
1532 if (t_tag
== THUNK_TAG_RESULT
) {
1535 if (thunk_is_writable(t
)) {
1536 *pointer_volatile(ptr
) = t
->u
.function_call
.results
[0].ptr
;
1537 pointer_unlock(ptr
);
1538 address_unlock(t
, DEPTH_THUNK
);
1542 pointer_t px
= t
->u
.function_call
.results
[0].ptr
;
1543 pointer_reference_owned(px
);
1544 *pointer_volatile(ptr
) = px
;
1545 pointer_unlock(ptr
);
1546 address_unlock(t
, DEPTH_THUNK
);
1548 pointer_dereference(pointer_thunk(t
));
1550 ret
= POINTER_FOLLOW_THUNK_RETRY
;
1554 if (t_tag
== THUNK_TAG_MULTI_RET_REFERENCE
) {
1555 struct thunk
*mt
= t
->u
.multi_ret_reference
.thunk
;
1557 if (unlikely(!address_trylock_second(t
, mt
, DEPTH_THUNK
))) {
1558 address_unlock(t
, DEPTH_THUNK
);
1559 address_lock_two(t
, mt
, DEPTH_THUNK
);
1560 if (unlikely(!pointer_verify(ptr
, pointer_thunk(t
))) || unlikely(thunk_tag(t
) != THUNK_TAG_MULTI_RET_REFERENCE
)) {
1561 address_unlock_second(t
, mt
, DEPTH_THUNK
);
1562 address_unlock(t
, DEPTH_THUNK
);
1563 ret
= POINTER_FOLLOW_THUNK_RETRY
;
1567 mt_tag
= thunk_tag(mt
);
1568 if (mt_tag
== THUNK_TAG_RESULT
) {
1569 arg_t idx
= t
->u
.multi_ret_reference
.idx
;
1570 thunk_tag_set(t
, THUNK_TAG_MULTI_RET_REFERENCE
, THUNK_TAG_RESULT
);
1571 t
->u
.function_call
.results
[0].ptr
= mt
->u
.function_call
.results
[idx
].ptr
;
1572 pointer_poison(&mt
->u
.function_call
.results
[idx
].ptr
);
1573 if (thunk_dereference_nonatomic(mt
)) {
1574 address_unlock_second(t
, mt
, DEPTH_THUNK
);
1575 address_unlock(t
, DEPTH_THUNK
);
1577 ret
= POINTER_FOLLOW_THUNK_RETRY
;
1580 address_unlock_second(t
, mt
, DEPTH_THUNK
);
1581 goto process_result
;
1583 address_unlock_second(mt
, t
, DEPTH_THUNK
);
1588 if (ex_wait
== POINTER_FOLLOW_THUNK_NOEVAL
) {
1589 /* the user doesn't want to evaluate the thunk */
1590 ret
= POINTER_FOLLOW_THUNK_EXIT
;
1591 address_unlock(t
, DEPTH_THUNK
);
1595 if (t_tag
== THUNK_TAG_FUNCTION_CALL
) {
1596 ajla_error_t mayfail
;
1598 struct data
*top_reference
, *function
, *function_reference
;
1599 arg_t total_arguments
;
1603 total_arguments
= 0;
1604 pr
= &t
->u
.function_call
.u
.function_reference
;
1606 pq
= pointer_locked_read(pr
);
1607 if (unlikely(pointer_is_thunk(pq
))) {
1610 address_unlock(t
, DEPTH_THUNK
);
1613 function_reference
= pointer_get_data(pq
);
1614 total_arguments
+= da(function_reference
,function_reference
)->n_curried_arguments
;
1615 if (!da(function_reference
,function_reference
)->is_indirect
)
1617 pr
= &da(function_reference
,function_reference
)->u
.indirect
;
1619 pr
= da(function_reference
,function_reference
)->u
.direct
;
1620 pq
= pointer_locked_read(pr
);
1621 if (unlikely(pointer_is_thunk(pq
)))
1622 goto evaluate_thunk
;
1623 function
= pointer_get_data(pq
);
1625 ajla_assert_lo(da(function
,function
)->n_arguments
== total_arguments
, (file_line
, "pointer_follow_thunk_: the number of arguments does not match: %s: %"PRIuMAX
", %"PRIuMAX
"", da(function
,function
)->function_name
, (uintmax_t)da(function
,function
)->n_arguments
, (uintmax_t)total_arguments
));
1627 if (likely(!new_ex
)) {
1628 new_ex
= execution_control_alloc(MEM_DONT_TRY_TO_FREE
);
1629 if (unlikely(!new_ex
)) {
1630 address_unlock(t
, DEPTH_THUNK
);
1631 new_ex
= execution_control_alloc(&mayfail
);
1632 if (unlikely(!new_ex
)) {
1633 error_thunk
= thunk_alloc_exception_error(mayfail
, NULL
, NULL
, NULL pass_file_line
);
1640 if (likely(ex_wait
!= POINTER_FOLLOW_THUNK_SPARK
)) {
1641 list_add(&new_ex
->wait_list
, &((struct execution_control_wait
*)ex_wait
)->wait_entry
);
1642 ((struct execution_control_wait
*)ex_wait
)->mutex_to_lock
= address_get_mutex(t
, DEPTH_THUNK
);
1644 top_reference
= pointer_get_data(t
->u
.function_call
.u
.function_reference
);
1645 t
->u
.function_call
.u
.execution_control
= new_ex
;
1646 if (da(function
,function
)->n_return_values
== 1 || likely(thunk_refcount_get_nonatomic(t
) == da(function
,function
)->n_return_values
))
1647 thunk_tag_set(t
, THUNK_TAG_FUNCTION_CALL
, THUNK_TAG_BLACKHOLE
);
1649 thunk_tag_set(t
, THUNK_TAG_FUNCTION_CALL
, THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED
);
1650 address_unlock(t
, DEPTH_THUNK
);
1653 if (!(rand() & 127)) {
1655 mayfail
= error_ajla(EC_ASYNC
, AJLA_ERROR_OUT_OF_MEMORY
);
1656 debug("testing failure");
1660 new_fp
= stack_alloc(new_ex
, function
, &mayfail
);
1661 if (unlikely(!new_fp
)) {
1662 new_ex
->stack
= NULL
;
1663 data_dereference(top_reference
);
1664 error_thunk
= thunk_alloc_exception_error(mayfail
, NULL
, NULL
, NULL pass_file_line
);
1665 thunk_terminate_with_value(new_ex
->thunk
, da(function
,function
)->n_return_values
, pointer_thunk(error_thunk
));
1668 ret
= POINTER_FOLLOW_THUNK_EXIT
;
1671 new_ex
->current_frame
= new_fp
;
1672 new_ex
->current_ip
= 0;
1674 frame_init(new_fp
, function
, 0, CALL_MODE_NORMAL
);
1675 copy_from_function_reference_to_frame(new_fp
, top_reference
, da(function
,function
)->n_arguments
, true);
1677 data_dereference(top_reference
);
1684 if (t_tag
== THUNK_TAG_BLACKHOLE
|| t_tag
== THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED
) {
1685 if (likely(ex_wait
!= POINTER_FOLLOW_THUNK_SPARK
)) {
1686 list_add(&t
->u
.function_call
.u
.execution_control
->wait_list
, &((struct execution_control_wait
*)ex_wait
)->wait_entry
);
1687 ((struct execution_control_wait
*)ex_wait
)->mutex_to_lock
= address_get_mutex(t
, DEPTH_THUNK
);
1689 address_unlock(t
, DEPTH_THUNK
);
1691 ret
= POINTER_FOLLOW_THUNK_EXIT
;
1696 internal(file_line
, "pointer_follow_thunk_: invalid thunk tag %u", t_tag
);
1699 if (unlikely(error_thunk
!= NULL
))
1700 pointer_dereference(pointer_thunk(error_thunk
));
1701 if (unlikely(new_ex
!= NULL
))
1706 void attr_fastcall
pointer_resolve_result(pointer_t
*ptr
)
1709 if (pointer_is_thunk(*ptr
))
1710 pointer_follow_thunk_noeval(ptr
, goto again
, break, break);
1713 void attr_fastcall
pointer_follow_wait(frame_s
*fp
, const code_t
*ip
)
1715 struct execution_control
*ex
= frame_execution_control(fp
);
1716 ex
->current_frame
= fp
;
1717 ex
->current_ip
= frame_ip(fp
, ip
);
1719 waiting_list_add(ex
);
1721 if (unlikely(refcount_dec(&ex
->wait_state
)))
1722 execution_control_unlink_and_submit(ex
, true);
1725 bool attr_fastcall
data_is_nan(type_tag_t type
, const unsigned char attr_unused
*ptr
)
1728 #define f(n, t, nt, pack, unpack) \
1729 case TYPE_TAG_real + n: { \
1731 barrier_aliasing(); \
1733 barrier_aliasing(); \
1734 return cat(isnan_,t)(val); \
1736 for_all_real(f
, for_all_empty
)
1742 pointer_t
flat_to_data(const struct type
*type
, const unsigned char *flat
)
1747 unsigned tag
= type
->tag
;
1749 if (tag
== TYPE_TAG_flat_option
) {
1750 d
= data_alloc_option_mayfail(&err pass_file_line
);
1753 da(d
,option
)->pointer
= pointer_empty();
1754 da(d
,option
)->option
= *cast_ptr(ajla_flat_option_t
*, flat
);
1755 } else if (TYPE_TAG_IS_FIXED(tag
) || TYPE_TAG_IS_REAL(tag
) || TYPE_TAG_IS_INT(tag
)) {
1758 if (unlikely(data_is_nan(tag
, flat
))) {
1759 err
= error_ajla(EC_SYNC
, AJLA_ERROR_NAN
);
1762 d
= data_alloc_flat_mayfail(tag
, flat
, size
, &err pass_file_line
);
1765 } else if (tag
== TYPE_TAG_flat_record
) {
1767 const struct record_definition
*def
= type_def(type_def(type
,flat_record
)->base
,record
);
1768 d
= data_alloc_record_mayfail(def
, &err pass_file_line
);
1771 (void)memset(da_record_frame(d
), 0, bitmap_slots(def
->n_slots
) * slot_size
);
1772 for (ai
= 0; ai
< def
->n_entries
; ai
++) {
1773 frame_t slot
= record_definition_slot(def
,ai
);
1774 flat_size_t flat_offset
= type_def(type
,flat_record
)->entries
[slot
].flat_offset
;
1775 const struct type
*entry_type
= def
->types
[slot
];
1776 const struct type
*flat_type
= type_def(type
,flat_record
)->entries
[slot
].subtype
;
1777 if (TYPE_IS_FLAT(entry_type
)) {
1778 ajla_assert_lo(type_is_equal(entry_type
, flat_type
), (file_line
, "flat_to_data: copying between different types (%u,%u,%u) -> (%u,%u,%u)", flat_type
->tag
, flat_type
->size
, flat_type
->align
, entry_type
->tag
, entry_type
->size
, entry_type
->align
));
1779 memcpy_fast(frame_var(da_record_frame(d
), slot
), flat
+ flat_offset
, entry_type
->size
);
1781 pointer_t ptr
= flat_to_data(flat_type
, flat
+ flat_offset
);
1782 frame_set_pointer(da_record_frame(d
), slot
, ptr
);
1785 } else if (tag
== TYPE_TAG_flat_array
) {
1786 const struct flat_array_definition
*flat_def
= type_def(type
,flat_array
);
1787 ajla_assert(type
->size
== flat_def
->n_elements
* flat_def
->base
->size
, (file_line
, "flat_to_data: array size mismatch: %"PRIuMAX
" != %"PRIuMAX
" * %"PRIuMAX
"", (uintmax_t)type
->size
, (uintmax_t)flat_def
->n_elements
, (uintmax_t)flat_def
->base
->size
));
1788 d
= data_alloc_array_flat_mayfail(flat_def
->base
, flat_def
->n_elements
, flat_def
->n_elements
, false, &err pass_file_line
);
1791 (void)memcpy(da_array_flat(d
), flat
, type
->size
);
1793 internal(file_line
, "flat_to_data: unknown type %u", tag
);
1795 return pointer_data(d
);
1798 return pointer_error(err
, NULL
, NULL pass_file_line
);
1802 void attr_fastcall
struct_clone(pointer_t
*ptr
)
1805 struct data
*orig
, *clone
;
1807 orig
= pointer_get_data(*ptr
);
1808 switch (da_tag(orig
)) {
1809 case DATA_TAG_record
: {
1810 const struct record_definition
*def
;
1811 frame_t n_slots
, slot
;
1813 def
= type_def(da(orig
,record
)->definition
,record
);
1814 n_slots
= def
->n_slots
;
1815 clone
= data_alloc_record_mayfail(def
, &err pass_file_line
);
1816 if (unlikely(!clone
))
1818 (void)memcpy_slots(cast_ptr(unsigned char *, da_record_frame(clone
)), cast_ptr(unsigned char *, da_record_frame(orig
)), n_slots
);
1819 for (slot
= 0; slot
< n_slots
; slot
++) {
1820 if (frame_test_flag(da_record_frame(orig
), slot
))
1821 *frame_pointer(da_record_frame(clone
), slot
) = pointer_reference(frame_pointer(da_record_frame(orig
), slot
));
1825 case DATA_TAG_option
: {
1826 clone
= data_alloc(option
, &err
);
1827 if (unlikely(!clone
))
1829 da(clone
,option
)->option
= da(orig
,option
)->option
;
1830 if (likely(!pointer_is_empty(da(orig
,option
)->pointer
)))
1831 da(clone
,option
)->pointer
= pointer_reference(&da(orig
,option
)->pointer
);
1833 da(clone
,option
)->pointer
= pointer_empty();
1836 case DATA_TAG_array_flat
:
1837 case DATA_TAG_array_slice
:
1838 case DATA_TAG_array_pointers
:
1839 case DATA_TAG_array_same
:
1840 case DATA_TAG_array_btree
: {
1841 if (!array_clone(ptr
, &err
))
1845 case DATA_TAG_array_incomplete
: {
1846 pointer_t first
= pointer_reference(&da(orig
,array_incomplete
)->first
);
1847 pointer_t next
= pointer_reference(&da(orig
,array_incomplete
)->next
);
1848 clone
= data_alloc_array_incomplete(pointer_get_data(first
), next
, &err pass_file_line
);
1849 if (unlikely(!clone
)) {
1850 pointer_dereference(first
);
1851 pointer_dereference(next
);
1857 internal(file_line
, "struct_clone: invalid data tag %u", da_tag(orig
));
1859 pointer_dereference(*ptr
);
1860 *ptr
= pointer_data(clone
);
1864 pointer_dereference(*ptr
);
1865 *ptr
= pointer_error(err
, NULL
, NULL pass_file_line
);
1869 static bool attr_fastcall
deep_eval_nothing(struct data attr_unused
*d
, pointer_t attr_unused
***data_stack
, size_t attr_unused
*data_stack_size
, ajla_error_t attr_unused
*err
)
1874 static bool attr_fastcall
deep_eval_flat(struct data
*d
, pointer_t attr_unused
***data_stack
, size_t attr_unused
*data_stack_size
, ajla_error_t
*err
)
1876 if (unlikely(data_is_nan(da(d
,flat
)->data_type
, da_flat(d
)))) {
1877 fatal_mayfail(error_ajla(EC_SYNC
, AJLA_ERROR_NAN
), err
, "NaN");
1883 static bool attr_fastcall
deep_eval_record(struct data
*d
, pointer_t
***data_stack
, size_t *data_stack_size
, ajla_error_t
*err
)
1885 const struct record_definition
*def
= type_def(da(d
,record
)->definition
,record
);
1886 frame_s
*f
= da_record_frame(d
);
1887 frame_t slot
= def
->n_slots
;
1890 if (!frame_test_flag(f
, slot
)) {
1891 const struct type
*t
= def
->types
[slot
];
1894 if (unlikely(data_is_nan(t
->tag
, frame_var(f
, slot
)))) {
1895 fatal_mayfail(error_ajla(EC_SYNC
, AJLA_ERROR_NAN
), err
, "NaN");
1900 ptr
= frame_pointer(f
, slot
);
1901 if (unlikely(!array_add_mayfail(pointer_t
*, data_stack
, data_stack_size
, ptr
, NULL
, err
)))
1907 static bool attr_fastcall
deep_eval_option(struct data
*d
, pointer_t
***data_stack
, size_t *data_stack_size
, ajla_error_t
*err
)
1909 if (pointer_is_empty(da(d
,option
)->pointer
))
1911 return array_add_mayfail(pointer_t
*, data_stack
, data_stack_size
, &da(d
,option
)->pointer
, NULL
, err
);
1919 static bool recurse_type(const struct type
*type
, flat_size_t offset
, struct real_pos
**rp
, size_t *rp_size
, ajla_error_t
*err
)
1921 if (TYPE_TAG_IS_REAL(type
->tag
)) {
1925 if (!array_add_mayfail(struct real_pos
, rp
, rp_size
, p
, NULL
, err
))
1927 } else if (type
->tag
== TYPE_TAG_flat_record
) {
1928 const struct flat_record_definition
*def
= type_def(type
, flat_record
);
1929 const struct record_definition
*rec_def
= type_def(def
->base
, record
);
1931 for (slot
= 0; slot
< rec_def
->n_slots
; slot
++) {
1932 const struct flat_record_definition_entry
*frde
;
1933 const struct type
*t
= rec_def
->types
[slot
];
1936 frde
= &def
->entries
[slot
];
1937 if (unlikely(!recurse_type(frde
->subtype
, offset
+ frde
->flat_offset
, rp
, rp_size
, err
)))
1940 } else if (type
->tag
== TYPE_TAG_flat_array
) {
1941 const struct flat_array_definition
*def
= type_def(type
, flat_array
);
1942 const struct type
*base
= def
->base
;
1944 for (i
= 0; i
< def
->n_elements
; i
++, offset
+= base
->size
) {
1945 if (unlikely(!recurse_type(base
, offset
, rp
, rp_size
, err
)))
1952 static bool deep_eval_array_test_nan(const struct type
*type
, unsigned char *flat_data
, int_default_t n_entries
, ajla_error_t
*err
)
1954 struct real_pos
*rp
;
1958 if (TYPE_TAG_IS_FIXED(type
->tag
) || likely(TYPE_TAG_IS_INT(type
->tag
)) || type
->tag
== TYPE_TAG_flat_option
)
1961 if (unlikely(!array_init_mayfail(struct real_pos
, &rp
, &rp_size
, err
)))
1964 if (unlikely(!recurse_type(type
, 0, &rp
, &rp_size
, err
)))
1967 if (likely(!rp_size
))
1970 for (i
= 0; i
< n_entries
; i
++, flat_data
+= type
->size
) {
1974 if (unlikely(data_is_nan(rp
[j
].tag
, flat_data
+ rp
[j
].pos
))) {
1975 fatal_mayfail(error_ajla(EC_SYNC
, AJLA_ERROR_NAN
), err
, "NaN");
1979 } while (unlikely(++j
< rp_size
));
1987 static bool attr_fastcall
deep_eval_array_flat(struct data
*d
, pointer_t attr_unused
***data_stack
, size_t attr_unused
*data_stack_size
, ajla_error_t
*err
)
1989 return deep_eval_array_test_nan(da(d
,array_flat
)->type
, da_array_flat(d
), da(d
,array_flat
)->n_used_entries
, err
);
1992 static bool attr_fastcall
deep_eval_array_slice(struct data
*d
, pointer_t attr_unused
***data_stack
, size_t attr_unused
*data_stack_size
, ajla_error_t
*err
)
1994 return deep_eval_array_test_nan(da(d
,array_slice
)->type
, da(d
,array_slice
)->flat_data_minus_data_array_offset
+ data_array_offset
, da(d
,array_slice
)->n_entries
, err
);
1997 static bool attr_fastcall
deep_eval_array_pointers(struct data
*d
, pointer_t
***data_stack
, size_t *data_stack_size
, ajla_error_t
*err
)
1999 int_default_t x
= da(d
,array_pointers
)->n_used_entries
;
2001 pointer_t
*ptr
= &da(d
,array_pointers
)->pointer
[x
];
2002 if (unlikely(!array_add_mayfail(pointer_t
*, data_stack
, data_stack_size
, ptr
, NULL
, err
)))
2008 static bool attr_fastcall
deep_eval_array_same(struct data
*d
, pointer_t
***data_stack
, size_t *data_stack_size
, ajla_error_t
*err
)
2010 return array_add_mayfail(pointer_t
*, data_stack
, data_stack_size
, &da(d
,array_same
)->pointer
, NULL
, err
);
2013 static bool attr_fastcall
deep_eval_array_btree(struct data
*d
, pointer_t
***data_stack
, size_t *data_stack_size
, ajla_error_t
*err
)
2015 btree_entries_t x
= da(d
,array_btree
)->n_used_btree_entries
;
2017 pointer_t
*ptr
= &da(d
,array_btree
)->btree
[x
].node
;
2018 if (unlikely(!array_add_mayfail(pointer_t
*, data_stack
, data_stack_size
, ptr
, NULL
, err
)))
2024 static bool attr_fastcall
deep_eval_array_incomplete(struct data
*d
, pointer_t
***data_stack
, size_t *data_stack_size
, ajla_error_t
*err
)
2026 return likely(array_add_mayfail(pointer_t
*, data_stack
, data_stack_size
, &da(d
,array_incomplete
)->first
, NULL
, err
)) &&
2027 likely(array_add_mayfail(pointer_t
*, data_stack
, data_stack_size
, &da(d
,array_incomplete
)->next
, NULL
, err
));
2030 static bool attr_fastcall
deep_eval_function_reference(struct data
*d
, pointer_t
***data_stack
, size_t *data_stack_size
, ajla_error_t
*err
)
2034 ia
= da(d
,function_reference
)->n_curried_arguments
;
2036 if (da(d
,function_reference
)->arguments
[ia
].tag
== TYPE_TAG_unknown
)
2037 if (unlikely(!array_add_mayfail(pointer_t
*, data_stack
, data_stack_size
, &da(d
,function_reference
)->arguments
[ia
].u
.ptr
, NULL
, err
)))
2040 if (da(d
,function_reference
)->is_indirect
) {
2041 return array_add_mayfail(pointer_t
*, data_stack
, data_stack_size
, &da(d
,function_reference
)->u
.indirect
, NULL
, err
);
2043 return array_add_mayfail(pointer_t
*, data_stack
, data_stack_size
, da(d
,function_reference
)->u
.direct
, NULL
, err
);
2047 struct processed_pointer
{
2048 struct tree_entry entry
;
2052 static int processed_compare(const struct tree_entry
*e
, uintptr_t v
)
2054 struct processed_pointer
*p
= get_struct(e
, struct processed_pointer
, entry
);
2055 if (ptr_to_num(p
->ptr
) < v
)
2057 if (likely(ptr_to_num(p
->ptr
) > v
))
2062 void * attr_fastcall
pointer_deep_eval(pointer_t
*ptr
, frame_s
*fp
, const code_t
*ip
, struct thunk
**thunk
)
2068 pointer_t
**data_stack
;
2069 size_t data_stack_size
;
2071 struct tree processed
;
2075 tree_init(&processed
);
2077 if (unlikely(!array_init_mayfail(pointer_t
*, &data_stack
, &data_stack_size
, &err
))) {
2079 *thunk
= pointer_get_thunk(pointer_error(err
, NULL
, NULL pass_file_line
));
2080 ret
= POINTER_FOLLOW_THUNK_EXCEPTION
;
2085 pointer_follow(ptr
, false, d
, PF_WAIT
, fp
, ip
,
2088 thunk_reference(thunk_
);
2090 ret
= POINTER_FOLLOW_THUNK_EXCEPTION
;
2096 if (unlikely(!data_method_table
[tag
].deep_eval(d
, &data_stack
, &data_stack_size
, &err
)))
2099 if (data_stack_size
) {
2100 struct tree_insert_position ins
;
2101 struct processed_pointer
*pp
= mem_alloc_mayfail(struct processed_pointer
*, sizeof(struct processed_pointer
), &err
);
2106 if (unlikely(tree_find_for_insert(&processed
, processed_compare
, ptr_to_num(ptr
), &ins
) != NULL
))
2107 internal(file_line
, "pointer_deep_eval: pointer %p is already in the tree", ptr
);
2108 tree_insert_after_find(&pp
->entry
, &ins
);
2111 ptr
= data_stack
[--data_stack_size
];
2112 ajla_assert(!pointer_is_empty(*ptr
), (file_line
, "pointer_deep_eval: empty pointer, last tag %u", tag
));
2114 if (unlikely(tree_find(&processed
, processed_compare
, ptr_to_num(ptr
)) != NULL
)) {
2115 if (data_stack_size
)
2122 ret
= POINTER_FOLLOW_THUNK_GO
;
2125 if (likely(data_stack
!= NULL
))
2126 mem_free(data_stack
);
2128 while (!tree_is_empty(&processed
)) {
2129 struct processed_pointer
*pp
= get_struct(tree_any(&processed
), struct processed_pointer
, entry
);
2130 tree_delete(&pp
->entry
);
2137 void * attr_fastcall
frame_pointer_deep_eval(frame_s
*fp
, const code_t
*ip
, frame_t slot
, struct thunk
**thunk
)
2139 if (frame_variable_is_flat(fp
, slot
)) {
2141 if (unlikely(!deep_eval_array_test_nan(frame_get_type_of_local(fp
, slot
), frame_var(fp
, slot
), 1, &err
))) {
2142 *thunk
= pointer_get_thunk(pointer_error(err
, NULL
, NULL pass_file_line
));
2143 return POINTER_FOLLOW_THUNK_EXCEPTION
;
2145 return POINTER_FOLLOW_THUNK_GO
;
2147 return pointer_deep_eval(frame_pointer(fp
, slot
), fp
, ip
, thunk
);
2151 bool attr_fastcall
mpint_export(const mpint_t
*m
, unsigned char *ptr
, unsigned intx
, ajla_error_t
*err
)
2153 #define f(n, s, u, sz, bits) \
2156 barrier_aliasing(); \
2157 ret = cat(mpint_export_to_,s)(m, cast_ptr(s *, ptr), err);\
2158 barrier_aliasing(); \
2164 internal(file_line
, "mpint_export: invalid type %d", intx
);
2171 bool attr_fastcall
mpint_export_unsigned(const mpint_t
*m
, unsigned char *ptr
, unsigned intx
, ajla_error_t
*err
)
2173 #define f(n, s, u, sz, bits) \
2176 barrier_aliasing(); \
2177 ret = cat(mpint_export_to_,u)(m, cast_ptr(u *, ptr), err);\
2178 barrier_aliasing(); \
2184 internal(file_line
, "mpint_export_unsigned: invalid type %d", intx
);
2191 int data_compare_numbers(type_tag_t tt
, unsigned char *flat1
, pointer_t ptr1
, unsigned char *flat2
, pointer_t ptr2
)
2193 struct data
*d1
= NULL
, *d2
= NULL
; /* avoid warning */
2196 ajla_flat_option_t opt
;
2197 unsigned char flat
[1];
2201 ajla_flat_option_t opt
;
2202 unsigned char flat
[1];
2204 ajla_flat_option_t r
;
2205 ajla_error_t exp_err
;
2209 d1
= pointer_get_data(ptr1
);
2213 tt
= da(d1
,flat
)->data_type
;
2214 flat1
= da_flat(d1
);
2216 case DATA_TAG_longint
:
2217 if (tt
== TYPE_TAG_unknown
) {
2219 d2
= pointer_get_data(ptr2
);
2220 if (da_tag(d2
) == DATA_TAG_flat
)
2221 tt
= da(d2
,flat
)->data_type
;
2224 if (tt
!= TYPE_TAG_unknown
) {
2225 if (mpint_export(&da(d1
,longint
)->mp
, u1
.flat
, TYPE_TAG_IDX_INT(tt
), &exp_err
))
2229 case DATA_TAG_option
:
2230 tt
= TYPE_TAG_flat_option
;
2231 opt1
= da(d1
,option
)->option
;
2232 if (unlikely(opt1
!= (ajla_flat_option_t
)opt1
))
2238 internal(file_line
, "data_compare_numbers: invalid tag %u", tag1
);
2244 d2
= pointer_get_data(ptr2
);
2248 tt
= da(d2
,flat
)->data_type
;
2249 flat2
= da_flat(d2
);
2251 case DATA_TAG_longint
:
2252 if (tt
!= TYPE_TAG_unknown
) {
2253 if (mpint_export(&da(d2
,longint
)->mp
, u2
.flat
, TYPE_TAG_IDX_INT(tt
), &exp_err
))
2257 case DATA_TAG_option
:
2258 tt
= TYPE_TAG_flat_option
;
2259 opt2
= da(d2
,option
)->option
;
2260 if (unlikely(opt2
!= (ajla_flat_option_t
)opt2
))
2266 internal(file_line
, "data_compare_numbers: invalid tag %u", tag2
);
2269 if (flat1
&& flat2
) {
2270 int c
= type_memcmp(flat1
, flat2
, type_get_from_tag(tt
), 1);
2281 mpint_less(&da(d1
,longint
)->mp
, &da(d2
,longint
)->mp
, &r
, NULL
);
2284 mpint_equal(&da(d1
,longint
)->mp
, &da(d2
,longint
)->mp
, &r
, NULL
);
2290 struct array_compare_context
{
2291 unsigned char *flat
;
2292 const struct type
*type
;
2293 int_default_t n_elements
;
2297 static int_default_t
array_compare_callback(unsigned char *flat
, const struct type
*type
, int_default_t n_elements
, pointer_t
*ptr
, void *context
)
2299 struct array_compare_context
*ac
= context
;
2300 ajla_assert_lo(n_elements
> 0, (file_line
, "array_compare_callback: unexpected thunk"));
2303 ac
->n_elements
= n_elements
;
2308 static void attr_fastcall
cs_empty_destruct(struct compare_status attr_unused
*cs
)
2312 static int attr_fastcall
data_compare_nothing(struct compare_status attr_unused
*cs
, struct compare_status attr_unused
*new_cs
, bool attr_unused init
)
2314 struct data
*d1
= pointer_get_data(cs
->ptr1
);
2315 struct data
*d2
= pointer_get_data(cs
->ptr2
);
2316 internal(file_line
, "data_compare_nothing: comparing tags %u, %u", da_tag(d1
), da_tag(d2
));
2317 return DATA_COMPARE_OOM
;
2320 static int attr_fastcall
data_compare_number(struct compare_status
*cs
, struct compare_status attr_unused
*new_cs
, bool attr_unused init
)
2322 return data_compare_numbers(TYPE_TAG_unknown
, NULL
, cs
->ptr1
, NULL
, cs
->ptr2
);
2325 static int attr_fastcall
data_compare_record(struct compare_status
*cs
, struct compare_status
*new_cs
, bool init
)
2327 struct data
*d1
= pointer_get_data(cs
->ptr1
);
2328 struct data
*d2
= pointer_get_data(cs
->ptr2
);
2329 frame_s
*f1
= da_record_frame(d1
);
2330 frame_s
*f2
= da_record_frame(d2
);
2331 const struct record_definition
*def
= type_def(da(d1
,record
)->definition
,record
);
2332 ajla_assert(def
->n_slots
== type_def(da(d2
,record
)->definition
,record
)->n_slots
, (file_line
, "data_compare_record: mismatched record definition"));
2334 cs
->u
.record
.ai
= 0;
2335 while (cs
->u
.record
.ai
< def
->n_entries
) {
2336 frame_t slot
= record_definition_slot(def
, cs
->u
.record
.ai
);
2337 const struct type
*t
= def
->types
[slot
];
2338 if (frame_test_flag(f1
, slot
) && frame_test_flag(f2
, slot
)) {
2339 new_cs
->ptr1
= *frame_pointer(f1
, slot
);
2340 new_cs
->ptr2
= *frame_pointer(f2
, slot
);
2344 unsigned char *flat1
= !frame_test_flag(f1
, slot
) ? frame_var(f1
, slot
) : NULL
;
2345 unsigned char *flat2
= !frame_test_flag(f2
, slot
) ? frame_var(f2
, slot
) : NULL
;
2346 pointer_t ptr1
= !frame_test_flag(f1
, slot
) ? pointer_empty() : *frame_pointer(f1
, slot
);
2347 pointer_t ptr2
= !frame_test_flag(f2
, slot
) ? pointer_empty() : *frame_pointer(f2
, slot
);
2348 int c
= data_compare_numbers(t
->tag
, flat1
, ptr1
, flat2
, ptr2
);
2357 static int attr_fastcall
data_compare_option(struct compare_status
*cs
, struct compare_status
*new_cs
, bool init
)
2359 struct data
*d1
= pointer_get_data(cs
->ptr1
);
2360 struct data
*d2
= pointer_get_data(cs
->ptr2
);
2361 pointer_t ptr1
, ptr2
;
2362 if (da(d1
,option
)->option
< da(d2
,option
)->option
)
2364 if (da(d1
,option
)->option
> da(d2
,option
)->option
)
2366 ptr1
= da(d1
,option
)->pointer
;
2367 ptr2
= da(d2
,option
)->pointer
;
2368 ajla_assert(pointer_is_empty(ptr1
) == pointer_is_empty(ptr2
), (file_line
, "data_compare_option: mismatching pointers"));
2369 if (init
&& !pointer_is_empty(ptr1
)) {
2370 new_cs
->ptr1
= ptr1
;
2371 new_cs
->ptr2
= ptr2
;
2377 static void attr_fastcall
cs_array_destruct(struct compare_status
*cs
)
2379 index_free(&cs
->u
.array
.len
);
2380 index_free(&cs
->u
.array
.idx
);
2381 if (!pointer_is_empty(cs
->u
.array
.p1
))
2382 pointer_dereference(cs
->u
.array
.p1
);
2383 if (!pointer_is_empty(cs
->u
.array
.p2
))
2384 pointer_dereference(cs
->u
.array
.p2
);
2387 static int attr_fastcall
data_compare_array(struct compare_status
*cs
, struct compare_status
*new_cs
, bool init
)
2389 struct data
*d1
= pointer_get_data(cs
->ptr1
);
2390 struct data
*d2
= pointer_get_data(cs
->ptr2
);
2392 array_index_t len1
, len2
;
2393 cs
->u
.array
.p1
= pointer_empty();
2394 cs
->u
.array
.p2
= pointer_empty();
2395 len1
= array_len(d1
);
2396 len2
= array_len(d2
);
2397 if (!index_ge_index(len1
, len2
)) {
2402 if (!index_ge_index(len2
, len1
)) {
2408 cs
->u
.array
.len
= len1
;
2409 index_from_int(&cs
->u
.array
.idx
, 0);
2410 cs
->destruct
= cs_array_destruct
;
2413 while (!index_ge_index(cs
->u
.array
.idx
, cs
->u
.array
.len
)) {
2415 struct array_compare_context ctx1
, ctx2
;
2417 if (!pointer_is_empty(cs
->u
.array
.p1
))
2418 pointer_dereference(cs
->u
.array
.p1
), cs
->u
.array
.p1
= pointer_empty();
2419 if (!pointer_is_empty(cs
->u
.array
.p2
))
2420 pointer_dereference(cs
->u
.array
.p2
), cs
->u
.array
.p2
= pointer_empty();
2422 ptr
= pointer_data(d1
);
2423 if (unlikely(array_btree_iterate(&ptr
, &cs
->u
.array
.idx
, array_compare_callback
, &ctx1
)))
2424 internal(file_line
, "data_compare_array: iterator unexpectedly succeeded");
2425 ptr
= pointer_data(d2
);
2426 if (unlikely(array_btree_iterate(&ptr
, &cs
->u
.array
.idx
, array_compare_callback
, &ctx2
)))
2427 internal(file_line
, "data_compare_array: iterator unexpectedly succeeded");
2429 if (ctx1
.flat
&& ctx2
.flat
) {
2431 int_default_t m
= minimum(ctx1
.n_elements
, ctx2
.n_elements
);
2432 ajla_assert(ctx1
.type
->tag
== ctx2
.type
->tag
, (file_line
, "data_compare_array: array types do not match: %u,%u", ctx1
.type
->tag
, ctx2
.type
->tag
));
2433 c
= type_memcmp(ctx1
.flat
, ctx2
.flat
, ctx1
.type
, m
);
2440 index_add_int(&cs
->u
.array
.idx
, m
);
2442 struct thunk
*thunk
;
2443 if (unlikely(ctx1
.flat
!= NULL
)) {
2444 new_cs
->ptr1
= cs
->u
.array
.p1
= flat_to_data(ctx1
.type
, ctx1
.flat
);
2445 if (unlikely(pointer_deep_eval(&cs
->u
.array
.p1
, NULL
, NULL
, &thunk
) == POINTER_FOLLOW_THUNK_EXCEPTION
)) {
2446 pointer_dereference(pointer_thunk(thunk
));
2447 return DATA_COMPARE_OOM
;
2450 new_cs
->ptr1
= *ctx1
.ptr
;
2452 if (unlikely(ctx2
.flat
!= NULL
)) {
2453 new_cs
->ptr2
= cs
->u
.array
.p2
= flat_to_data(ctx2
.type
, ctx2
.flat
);
2454 if (unlikely(pointer_deep_eval(&cs
->u
.array
.p2
, NULL
, NULL
, &thunk
) == POINTER_FOLLOW_THUNK_EXCEPTION
)) {
2455 pointer_dereference(pointer_thunk(thunk
));
2456 return DATA_COMPARE_OOM
;
2459 new_cs
->ptr2
= *ctx2
.ptr
;
2461 index_add_int(&cs
->u
.array
.idx
, 1);
2469 static void attr_fastcall
cs_function_reference_destruct(struct compare_status
*cs
)
2471 mem_free(cs
->u
.function_reference
.args1
);
2472 mem_free(cs
->u
.function_reference
.args2
);
2475 static void acquire_function_reference_args(struct data
*d
, struct function_argument
***args
, size_t *n_args
, struct data
**function
)
2477 array_init(struct function_argument
*, args
, n_args
);
2480 ai
= da(d
,function_reference
)->n_curried_arguments
;
2482 array_add(struct function_argument
*, args
, n_args
, &da(d
,function_reference
)->arguments
[ai
]);
2484 if (!da(d
,function_reference
)->is_indirect
)
2486 d
= pointer_get_data(da(d
,function_reference
)->u
.indirect
);
2488 *function
= pointer_get_data(*da(d
,function_reference
)->u
.direct
);
2491 static int attr_fastcall
data_compare_function_reference(struct compare_status
*cs
, struct compare_status
*new_cs
, bool init
)
2493 struct data
*d1
= pointer_get_data(cs
->ptr1
);
2494 struct data
*d2
= pointer_get_data(cs
->ptr2
);
2497 struct data
*fn1
, *fn2
;
2498 acquire_function_reference_args(d1
, &cs
->u
.function_reference
.args1
, &l1
, &fn1
);
2499 acquire_function_reference_args(d2
, &cs
->u
.function_reference
.args2
, &l2
, &fn2
);
2500 cs
->destruct
= cs_function_reference_destruct
;
2501 if (ptr_to_num(fn1
) != ptr_to_num(fn2
)) {
2502 /* !!! FIXME: compare function unique id here */
2503 if (ptr_to_num(fn1
) < ptr_to_num(fn2
))
2508 ajla_assert(l1
== l2
, (file_line
, "data_compare_function_reference: the number of arguments doesn't match: %"PRIuMAX
" != %"PRIuMAX
"", (uintmax_t)l1
, (uintmax_t)l2
));
2509 cs
->u
.function_reference
.l
= l1
;
2511 while (cs
->u
.function_reference
.l
--) {
2512 struct function_argument
*a1
= cs
->u
.function_reference
.args1
[cs
->u
.function_reference
.l
];
2513 struct function_argument
*a2
= cs
->u
.function_reference
.args2
[cs
->u
.function_reference
.l
];
2514 if (a1
->tag
== TYPE_TAG_unknown
&& a2
->tag
== TYPE_TAG_unknown
) {
2515 new_cs
->ptr1
= a1
->u
.ptr
;
2516 new_cs
->ptr2
= a2
->u
.ptr
;
2519 unsigned char *flat1
= a1
->tag
!= TYPE_TAG_unknown
? a1
->u
.slot
: NULL
;
2520 unsigned char *flat2
= a2
->tag
!= TYPE_TAG_unknown
? a2
->u
.slot
: NULL
;
2521 pointer_t ptr1
= a1
->tag
!= TYPE_TAG_unknown
? pointer_empty() : a1
->u
.ptr
;
2522 pointer_t ptr2
= a2
->tag
!= TYPE_TAG_unknown
? pointer_empty() : a2
->u
.ptr
;
2523 type_tag_t tt
= a1
->tag
!= TYPE_TAG_unknown
? a1
->tag
: a2
->tag
;
2524 int c
= data_compare_numbers(tt
, flat1
, ptr1
, flat2
, ptr2
);
2532 static int attr_fastcall
data_compare_resource(struct compare_status
*cs
, struct compare_status attr_unused
*new_cs
, bool attr_unused init
)
2534 uintptr_t p1
= ptr_to_num(pointer_get_data(cs
->ptr1
));
2535 uintptr_t p2
= ptr_to_num(pointer_get_data(cs
->ptr2
));
2543 int attr_fastcall
data_compare(pointer_t ptr1
, pointer_t ptr2
, ajla_error_t
*mayfail
)
2546 struct compare_status
*cs
;
2548 struct compare_status ccs
;
2553 if (unlikely(!array_init_mayfail(struct compare_status
, &cs
, &cs_len
, mayfail
)))
2554 return DATA_COMPARE_OOM
;
2557 ccs
.tag
= da_tag(pointer_get_data(ccs
.ptr1
));
2558 ccs
.destruct
= cs_empty_destruct
;
2559 if (unlikely(!array_add_mayfail(struct compare_status
, &cs
, &cs_len
, ccs
, &err_ptr
, mayfail
))) {
2561 c
= DATA_COMPARE_OOM
;
2565 if (pointer_is_equal(ccs
.ptr1
, ccs
.ptr2
)) {
2570 ajla_assert(data_method_table
[cs
[cs_len
- 1].tag
].compare
== data_method_table
[da_tag(pointer_get_data(ccs
.ptr2
))].compare
, (file_line
, "data_compare: mismatching tags: %u, %u", cs
[cs_len
- 1].tag
, da_tag(pointer_get_data(ccs
.ptr2
))));
2571 c
= data_method_table
[cs
[cs_len
- 1].tag
].compare(&cs
[cs_len
- 1], &ccs
, true);
2580 cs
[cs_len
- 1].destruct(&cs
[cs_len
- 1]);
2582 c
= data_method_table
[cs
[cs_len
- 1].tag
].compare(&cs
[cs_len
- 1], &ccs
, false);
2587 for (i
= 0; i
< cs_len
; i
++)
2588 cs
[i
].destruct(&cs
[i
]);
2594 /*********************
2595 * DATA SERIALIATION *
2596 *********************/
2598 static const struct stack_entry_type save_type
;
2600 static void no_fixup_after_copy(void attr_unused
*new_ptr
)
2604 static void *save_run_get_ptr(struct stack_entry
*ste
)
2607 memcpy(&p
, ste
->ptr
, sizeof(void *));
2611 static bool save_run_get_properties(struct stack_entry
*ste
, size_t *align
, size_t *size
, struct stack_entry
**subptrs
, size_t *subptrs_len
)
2613 *align
= ste
->align
;
2620 static void ptr_fixup_sub_ptr(void *loc
, uintptr_t offset
)
2622 #if defined(HAVE_REAL_GNUC) && !GNUC_ATLEAST(3,0,0) /* EGCS bug */
2623 *(char **)loc
+= offset
;
2627 memcpy(&p
, loc
, sizeof(void *));
2628 num
= ptr_to_num(p
);
2630 p
= num_to_ptr(num
);
2631 memcpy(loc
, &p
, sizeof(void *));
2635 static const struct stack_entry_type save_run
= {
2637 save_run_get_properties
,
2638 no_fixup_after_copy
,
2643 static const struct stack_entry_type save_slice
= {
2645 save_run_get_properties
,
2646 no_fixup_after_copy
,
2652 static bool save_type_get_properties(struct stack_entry
*ste
, size_t *align
, size_t *size
, struct stack_entry
**subptrs
, size_t *subptrs_len
)
2655 const struct type
*t
= *cast_ptr(const struct type
**, ste
->ptr
);
2657 case TYPE_TAG_record
: {
2658 struct record_definition
*rec
= type_def(t
,record
);
2659 struct stack_entry
*subp
;
2661 if (unlikely(!((size_t)rec
->n_entries
+ 1)))
2663 subp
= mem_alloc_array_mayfail(mem_alloc_mayfail
, struct stack_entry
*, 0, 0, (size_t)rec
->n_entries
+ 1, sizeof(struct stack_entry
), &sink
);
2664 if (unlikely(!subp
))
2666 subp
[0].t
= &save_run
;
2667 subp
[0].ptr
= &rec
->idx_to_frame
;
2668 subp
[0].align
= align_of(frame_t
);
2669 subp
[0].size
= rec
->n_entries
* sizeof(frame_t
);
2671 for (i
= 0; i
< rec
->n_entries
; i
++) {
2672 frame_t slot
= rec
->idx_to_frame
[i
];
2673 if (unlikely(slot
== NO_FRAME_T
))
2675 subp
[ii
].t
= &save_type
;
2676 subp
[ii
].ptr
= &rec
->types
[slot
];
2681 *align
= align_of(struct record_definition
);
2682 *size
= offsetof(struct record_definition
, types
[rec
->n_slots
]);
2685 case TYPE_TAG_flat_record
: {
2686 struct flat_record_definition
*def
= type_def(t
,flat_record
);
2687 struct record_definition
*rec
= type_def(def
->base
,record
);
2688 struct stack_entry
*subp
;
2690 if (unlikely(!((size_t)rec
->n_entries
+ 1)))
2692 subp
= mem_alloc_array_mayfail(mem_alloc_mayfail
, struct stack_entry
*, 0, 0, (size_t)rec
->n_entries
+ 1, sizeof(struct stack_entry
), &sink
);
2693 if (unlikely(!subp
))
2695 subp
[0].t
= &save_type
;
2696 subp
[0].ptr
= &def
->base
;
2698 for (i
= 0; i
< rec
->n_entries
; i
++) {
2699 frame_t slot
= rec
->idx_to_frame
[i
];
2700 if (unlikely(slot
== NO_FRAME_T
))
2702 subp
[ii
].t
= &save_type
;
2703 subp
[ii
].ptr
= &def
->entries
[slot
].subtype
;
2706 *align
= align_of(struct flat_record_definition
);
2707 *size
= offsetof(struct flat_record_definition
, entries
[rec
->n_slots
]);
2712 case TYPE_TAG_flat_array
: {
2713 struct flat_array_definition
*def
= type_def(t
,flat_array
);
2714 struct stack_entry
*subp
= mem_alloc_mayfail(struct stack_entry
*, sizeof(struct stack_entry
), &sink
);
2715 if (unlikely(!subp
))
2717 subp
->t
= &save_type
;
2718 subp
->ptr
= &def
->base
;
2719 *align
= align_of(struct flat_array_definition
);
2720 *size
= sizeof(struct flat_array_definition
);
2726 TYPE_TAG_VALIDATE(t
->tag
);
2727 *align
= align_of(struct type
);
2728 *size
= sizeof(struct type
);
2737 static const struct stack_entry_type save_type
= {
2739 save_type_get_properties
,
2740 no_fixup_after_copy
,
2745 static void *save_index_get_ptr(struct stack_entry
*ste
)
2747 array_index_t
*idx
= ste
->ptr
;
2748 mpint_t
*mp
= index_get_mp(*idx
);
2752 static bool save_index_get_properties(struct stack_entry
*ste
, size_t *align
, size_t *size
, struct stack_entry
**subptrs
, size_t *subptrs_len
)
2755 mpint_t
*mp
= save_index_get_ptr(ste
);
2756 struct stack_entry
*subp
;
2757 *align
= align_of(mpint_t
);
2758 *size
= sizeof(mpint_t
);
2759 if (unlikely(!mp
->_mp_size
)) {
2764 subp
= mem_alloc_mayfail(struct stack_entry
*, sizeof(struct stack_entry
), &sink
);
2765 if (unlikely(!subp
))
2767 subp
->t
= &save_run
;
2768 subp
->ptr
= &mp
->_mp_d
;
2769 subp
->align
= align_of(mp_limb_t
);
2770 subp
->size
= (size_t)abs(mp
->_mp_size
) * sizeof(mp_limb_t
);
2776 static void save_index_fixup_sub_ptr(void *loc
, uintptr_t offset
)
2778 array_index_t
*idx
= loc
;
2779 mpint_t
*mp
= index_get_mp(*idx
);
2780 mp
= num_to_ptr(ptr_to_num(mp
) + offset
);
2781 index_set_mp(idx
, mp
);
2784 static const struct stack_entry_type save_index
= {
2786 save_index_get_properties
,
2787 no_fixup_after_copy
,
2788 save_index_fixup_sub_ptr
,
2792 static void *save_pointer_get_ptr(struct stack_entry
*ste
)
2794 pointer_t
*ptr
= cast_ptr(pointer_t
*, ste
->ptr
);
2795 pointer_resolve_result(ptr
);
2796 if (likely(!pointer_is_thunk(*ptr
)))
2797 return data_untag(pointer_get_value_strip_tag_(*ptr
));
2799 return thunk_untag(pointer_get_value_strip_tag_(*ptr
));
2802 static bool save_pointer_get_properties(struct stack_entry
*ste
, size_t *align
, size_t *size
, struct stack_entry
**subptrs
, size_t *subptrs_len
)
2804 return data_save(save_pointer_get_ptr(ste
), 0, align
, size
, subptrs
, subptrs_len
);
2807 static void save_pointer_fixup_after_copy(void *new_ptr
)
2809 refcount_set_read_only(da_thunk_refcount(new_ptr
));
2812 static void save_pointer_fixup_sub_ptr(void *loc
, uintptr_t offset
)
2814 pointer_t
*ptr
= loc
;
2815 if (!pointer_is_thunk(*ptr
)) {
2816 uintptr_t num
= ptr_to_num(pointer_get_data(*ptr
));
2818 *ptr
= pointer_data(num_to_ptr(num
));
2820 uintptr_t num
= ptr_to_num(pointer_get_thunk(*ptr
));
2822 *ptr
= pointer_thunk(num_to_ptr(num
));
2826 static const struct stack_entry_type save_pointer
= {
2827 save_pointer_get_ptr
,
2828 save_pointer_get_properties
,
2829 save_pointer_fixup_after_copy
,
2830 save_pointer_fixup_sub_ptr
,
2834 static const struct stack_entry_type save_data_saved
= {
2842 static bool attr_fastcall
no_save(void attr_unused
*data
, uintptr_t attr_unused offset
, size_t attr_unused
*align
, size_t attr_unused
*size
, struct stack_entry attr_unused
**subptrs
, size_t attr_unused
*subptrs_l
)
2847 static bool attr_fastcall
save_flat(void *data
, uintptr_t attr_unused offset
, size_t *align
, size_t *size
, struct stack_entry attr_unused
**subptrs
, size_t attr_unused
*subptrs_l
)
2849 struct data
*d
= data
;
2850 const struct type
*t
= type_get_from_tag(da(d
,flat
)->data_type
);
2852 *size
= data_flat_offset
+ t
->size
;
2856 static bool attr_fastcall
save_longint(void *data
, uintptr_t attr_unused offset
, size_t attr_unused
*align
, size_t *size
, struct stack_entry
**subptrs
, size_t attr_unused
*subptrs_l
)
2859 struct data
*d
= data
;
2860 *size
= partial_sizeof(struct data
, u_
.longint
);
2861 if (unlikely(!da(d
,longint
)->mp
._mp_size
)) {
2864 *subptrs
= mem_alloc_mayfail(struct stack_entry
*, sizeof(struct stack_entry
), &sink
);
2865 if (unlikely(!*subptrs
))
2867 (*subptrs
)[0].t
= &save_run
;
2868 (*subptrs
)[0].ptr
= &da(d
,longint
)->mp
._mp_d
;
2869 (*subptrs
)[0].align
= align_of(mp_limb_t
);
2870 (*subptrs
)[0].size
= (size_t)abs(da(d
,longint
)->mp
._mp_size
) * sizeof(mp_limb_t
);
2875 static bool attr_fastcall
save_record(void *data
, uintptr_t offset
, size_t *align
, size_t *size
, struct stack_entry
**subptrs
, size_t *subptrs_l
)
2878 struct data
*d
= data
;
2879 const struct type
*t
= num_to_ptr(ptr_to_num(da(d
,record
)->definition
) + offset
);
2880 const struct record_definition
*def
= type_def(t
,record
);
2884 *align
= def
->alignment
;
2885 *size
= data_record_offset
+ def
->n_slots
* slot_size
;
2887 if (unlikely(!((size_t)def
->n_slots
+ 1)))
2889 *subptrs
= mem_alloc_array_mayfail(mem_alloc_mayfail
, struct stack_entry
*, 0, 0, (size_t)def
->n_slots
+ 1, sizeof(struct stack_entry
), &sink
);
2890 if (unlikely(!*subptrs
))
2892 (*subptrs
)[0].t
= &save_type
;
2893 (*subptrs
)[0].ptr
= &da(d
,record
)->definition
;
2896 f
= da_record_frame(d
);
2897 slot
= def
->n_slots
;
2900 if (!frame_test_flag(f
, slot
))
2902 ch
= cast_ptr(char *, frame_pointer(f
, slot
));
2903 (*subptrs
)[*subptrs_l
].t
= &save_pointer
;
2904 (*subptrs
)[*subptrs_l
].ptr
= ch
;
2910 static bool attr_fastcall
save_option(void *data
, uintptr_t attr_unused offset
, size_t attr_unused
*align
, size_t *size
, struct stack_entry
**subptrs
, size_t *subptrs_l
)
2913 struct data
*d
= data
;
2914 *size
= partial_sizeof(struct data
, u_
.option
);
2915 if (!pointer_is_empty(da(d
,option
)->pointer
)) {
2916 *subptrs
= mem_alloc_mayfail(struct stack_entry
*, sizeof(struct stack_entry
), &sink
);
2917 if (unlikely(!*subptrs
))
2919 (*subptrs
)[0].t
= &save_pointer
;
2920 (*subptrs
)[0].ptr
= &da(d
,option
)->pointer
;
2926 static bool attr_fastcall
save_array_flat(void *data
, uintptr_t offset
, size_t *align
, size_t *size
, struct stack_entry attr_unused
**subptrs
, size_t attr_unused
*subptrs_l
)
2929 struct data
*d
= data
;
2930 const struct type
*t
= num_to_ptr(ptr_to_num(da(d
,array_flat
)->type
) + offset
);
2931 ajla_assert_lo((da(d
,array_flat
)->n_allocated_entries
| da(d
,array_flat
)->n_used_entries
) >= 0, (file_line
, "save_array_flat: negative size %"PRIdMAX
", %"PRIdMAX
"", (intmax_t)da(d
,array_flat
)->n_allocated_entries
, (intmax_t)da(d
,array_flat
)->n_used_entries
));
2932 if (da(d
,array_flat
)->n_allocated_entries
!= da(d
,array_flat
)->n_used_entries
)
2933 da(d
,array_flat
)->n_allocated_entries
= da(d
,array_flat
)->n_used_entries
;
2935 *size
= data_array_offset
+ (size_t)t
->size
* da(d
,array_flat
)->n_allocated_entries
;
2936 *subptrs
= mem_alloc_mayfail(struct stack_entry
*, sizeof(struct stack_entry
), &sink
);
2937 if (unlikely(!*subptrs
))
2939 (*subptrs
)[0].t
= &save_type
;
2940 (*subptrs
)[0].ptr
= &da(d
,array_flat
)->type
;
2945 static bool attr_fastcall
save_array_slice(void *data
, uintptr_t attr_unused offset
, size_t attr_unused
*align
, size_t *size
, struct stack_entry
**subptrs
, size_t *subptrs_l
)
2948 struct data
*d
= data
;
2949 ajla_assert_lo(da(d
,array_slice
)->n_entries
>= 0, (file_line
, "save_array_slice: negative size %"PRIdMAX
"", (intmax_t)da(d
,array_slice
)->n_entries
));
2950 *size
= partial_sizeof(struct data
, u_
.array_slice
);
2951 *subptrs
= mem_alloc_array_mayfail(mem_alloc_mayfail
, struct stack_entry
*, 0, 0, 3, sizeof(struct stack_entry
), &sink
);
2952 if (unlikely(!*subptrs
))
2954 (*subptrs
)[*subptrs_l
].t
= &save_pointer
;
2955 (*subptrs
)[*subptrs_l
].ptr
= &da(d
,array_slice
)->reference
;
2957 if (da(d
,array_slice
)->n_entries
) {
2958 (*subptrs
)[*subptrs_l
].t
= &save_slice
;
2959 (*subptrs
)[*subptrs_l
].ptr
= &da(d
,array_slice
)->flat_data_minus_data_array_offset
;
2960 (*subptrs
)[*subptrs_l
].align
= 1;
2961 (*subptrs
)[*subptrs_l
].size
= 0;
2964 (*subptrs
)[*subptrs_l
].t
= &save_type
;
2965 (*subptrs
)[*subptrs_l
].ptr
= &da(d
,array_slice
)->type
;
2970 static bool attr_fastcall
save_array_pointers(void *data
, uintptr_t offset
, size_t attr_unused
*align
, size_t *size
, struct stack_entry
**subptrs
, size_t *subptrs_l
)
2973 struct data
*d
= data
;
2974 size_t n
= da(d
,array_pointers
)->n_used_entries
;
2975 pointer_t
*ptr
= da(d
,array_pointers
)->pointer
;
2976 ajla_assert_lo((da(d
,array_pointers
)->n_allocated_entries
| da(d
,array_pointers
)->n_used_entries
) >= 0, (file_line
, "save_array_pointers: negative size %"PRIdMAX
", %"PRIdMAX
"", (intmax_t)da(d
,array_pointers
)->n_allocated_entries
, (intmax_t)da(d
,array_pointers
)->n_used_entries
));
2978 if (unlikely(ptr
!= da(d
,array_pointers
)->pointer_array
)) {
2979 memmove(da(d
,array_pointers
)->pointer_array
, ptr
, n
* sizeof(pointer_t
));
2982 if (ptr
!= da(d
,array_pointers
)->pointer_array
)
2983 da(d
,array_pointers
)->pointer
= da(d
,array_pointers
)->pointer_array
;
2984 if ((size_t)da(d
,array_pointers
)->n_allocated_entries
!= n
)
2985 da(d
,array_pointers
)->n_allocated_entries
= n
;
2986 *size
= partial_sizeof_array(struct data
, u_
.array_pointers
.pointer_array
, n
);
2987 /*debug("pointers: %zx - %zx", *size, partial_sizeof(struct data, u_.array_pointers.pointer_array[n]));*/
2988 *subptrs
= mem_alloc_array_mayfail(mem_alloc_mayfail
, struct stack_entry
*, 0, 0, n
, sizeof(struct stack_entry
), &sink
);
2989 if (unlikely(!*subptrs
))
2993 (*subptrs
)[n
].t
= &save_pointer
;
2994 (*subptrs
)[n
].ptr
= &da(d
,array_pointers
)->pointer_array
[n
];
2999 static void save_array_index(array_index_t
*idx
, struct stack_entry
**subptrs
, size_t *subptrs_l
)
3001 index_detach_leak(idx
);
3002 if (likely(!index_is_mp(*idx
)))
3004 (*subptrs
)[*subptrs_l
].t
= &save_index
;
3005 (*subptrs
)[*subptrs_l
].ptr
= idx
;
3009 static bool attr_fastcall
save_array_same(void *data
, uintptr_t attr_unused offset
, size_t attr_unused
*align
, size_t *size
, struct stack_entry
**subptrs
, size_t *subptrs_l
)
3012 struct data
*d
= data
;
3013 *size
= partial_sizeof(struct data
, u_
.array_same
);
3014 *subptrs
= mem_alloc_array_mayfail(mem_alloc_mayfail
, struct stack_entry
*, 0, 0, 2, sizeof(struct stack_entry
), &sink
);
3015 if (unlikely(!*subptrs
))
3017 (*subptrs
)[0].t
= &save_pointer
;
3018 (*subptrs
)[0].ptr
= &da(d
,array_same
)->pointer
;
3020 save_array_index(&da(d
,array_same
)->n_entries
, subptrs
, subptrs_l
);
3024 static bool attr_fastcall
save_array_btree(void *data
, uintptr_t attr_unused offset
, size_t attr_unused
*align
, size_t *size
, struct stack_entry
**subptrs
, size_t *subptrs_l
)
3027 struct data
*d
= data
;
3028 size_t n
= da(d
,array_btree
)->n_used_btree_entries
;
3030 if (da(d
,array_btree
)->n_allocated_btree_entries
!= n
)
3031 da(d
,array_btree
)->n_allocated_btree_entries
= n
;
3032 *size
= partial_sizeof_array(struct data
, u_
.array_btree
.btree
, n
);
3033 /*debug("btree: %zx - %zx", *size, partial_sizeof(struct data, u_.array_btree.btree[n]));*/
3034 if (unlikely(n
* 2 < n
))
3036 *subptrs
= mem_alloc_array_mayfail(mem_alloc_mayfail
, struct stack_entry
*, 0, 0, n
* 2, sizeof(struct stack_entry
), &sink
);
3037 if (unlikely(!*subptrs
))
3039 for (i
= 0; i
< n
; i
++) {
3040 (*subptrs
)[*subptrs_l
].t
= &save_pointer
;
3041 (*subptrs
)[*subptrs_l
].ptr
= &da(d
,array_btree
)->btree
[i
].node
;
3043 save_array_index(&da(d
,array_btree
)->btree
[i
].end_index
, subptrs
, subptrs_l
);
3048 static bool attr_fastcall
save_array_incomplete(void attr_unused
*data
, uintptr_t attr_unused offset
, size_t attr_unused
*align
, size_t *size
, struct stack_entry
**subptrs
, size_t *subptrs_l
)
3051 struct data
*d
= data
;
3052 *size
= partial_sizeof(struct data
, u_
.array_incomplete
);
3053 *subptrs
= mem_alloc_array_mayfail(mem_alloc_mayfail
, struct stack_entry
*, 0, 0, 2, sizeof(struct stack_entry
), &sink
);
3054 if (unlikely(!*subptrs
))
3056 (*subptrs
)[0].t
= &save_pointer
;
3057 (*subptrs
)[0].ptr
= &da(d
,array_incomplete
)->first
;
3058 (*subptrs
)[1].t
= &save_pointer
;
3059 (*subptrs
)[1].ptr
= &da(d
,array_incomplete
)->next
;
3064 static bool attr_fastcall
save_function_types(void *data
, uintptr_t attr_unused offset
, size_t attr_unused
*align
, size_t *size
, struct stack_entry attr_unused
**subptrs
, size_t attr_unused
*subptrs_l
)
3067 struct data
*d
= data
;
3069 *size
= data_function_types_offset
+ da(d
,function_types
)->n_types
* sizeof(const struct type
*);
3070 *subptrs
= mem_alloc_array_mayfail(mem_alloc_mayfail
, struct stack_entry
*, 0, 0, da(d
,function_types
)->n_types
, sizeof(struct stack_entry
), &sink
);
3071 if (unlikely(!*subptrs
))
3073 for (i
= 0; i
< da(d
,function_types
)->n_types
; i
++) {
3074 (*subptrs
)[i
].t
= &save_type
;
3075 (*subptrs
)[i
].ptr
= &da(d
,function_types
)->types
[i
];
3077 *subptrs_l
= da(d
,function_types
)->n_types
;
3081 static bool attr_fastcall
save_saved(void *data
, uintptr_t attr_unused offset
, size_t attr_unused
*align
, size_t *size
, struct stack_entry
**subptrs
, size_t *subptrs_l
)
3084 struct data
*d
= data
;
3086 *size
= da(d
,saved
)->total_size
;
3087 *subptrs
= mem_alloc_array_mayfail(mem_alloc_mayfail
, struct stack_entry
*, 0, 0, da(d
,saved
)->n_offsets
, sizeof(struct stack_entry
), &sink
);
3088 if (unlikely(!*subptrs
))
3090 *subptrs_l
= da(d
,saved
)->n_offsets
;
3091 for (i
= 0; i
< da(d
,saved
)->n_offsets
; i
++) {
3092 (*subptrs
)[i
].t
= &save_data_saved
;
3093 (*subptrs
)[i
].ptr
= cast_ptr(char *, d
) + da(d
,saved
)->offsets
[i
];
3098 static bool attr_fastcall
save_saved_cache(void *data
, uintptr_t attr_unused offset
, size_t attr_unused
*align
, size_t *size
, struct stack_entry
**subptrs
, size_t *subptrs_l
)
3101 struct data
*d
= data
;
3103 size_t n_pointers
= da(d
,saved_cache
)->n_entries
* (da(d
,saved_cache
)->n_arguments
+ da(d
,saved_cache
)->n_return_values
);
3104 *size
= offsetof(struct data
, u_
.saved_cache
.pointers
[n_pointers
]);
3105 *subptrs
= mem_alloc_array_mayfail(mem_alloc_mayfail
, struct stack_entry
*, 0, 0, n_pointers
, sizeof(struct stack_entry
), &sink
);
3106 if (unlikely(!*subptrs
))
3108 *subptrs_l
= n_pointers
;
3109 for (i
= 0; i
< n_pointers
; i
++) {
3110 (*subptrs
)[i
].t
= &save_pointer
;
3111 (*subptrs
)[i
].ptr
= &da(d
,saved_cache
)->pointers
[i
];
3116 static bool attr_fastcall
save_exception(void *data
, uintptr_t offset
, size_t attr_unused
*align
, size_t *size
, struct stack_entry
**subptrs
, size_t *subptrs_l
)
3119 struct thunk
*t
= data
;
3120 if (t
->u
.exception
.tr
.trace_n
) {
3121 stack_trace_free(&t
->u
.exception
.tr
);
3122 stack_trace_init(&t
->u
.exception
.tr
);
3124 if (t
->u
.exception
.err
.error_class
== EC_ASYNC
)
3126 *size
= partial_sizeof(struct thunk
, u
.exception
);
3127 if (t
->u
.exception
.msg
) {
3128 const char *msg
= num_to_ptr(ptr_to_num(t
->u
.exception
.msg
) + offset
);
3129 *subptrs
= mem_alloc_mayfail(struct stack_entry
*, sizeof(struct stack_entry
), &sink
);
3130 if (unlikely(!*subptrs
))
3132 (*subptrs
)[0].t
= &save_run
;
3133 (*subptrs
)[0].ptr
= &t
->u
.exception
.msg
;
3134 (*subptrs
)[0].align
= align_of(char);
3135 (*subptrs
)[0].size
= strlen(msg
) + 1;
3141 bool data_save(void *p
, uintptr_t offset
, size_t *align
, size_t *size
, struct stack_entry
**subptrs
, size_t *subptrs_l
)
3143 tag_t tag
= da_thunk_tag(p
);
3144 if (tag
>= DATA_TAG_START
&& tag
< DATA_TAG_END
) {
3145 p
= data_pointer_tag(p
, tag
);
3147 p
= thunk_pointer_tag(p
);
3152 if (unlikely(!data_method_table
[tag
].save(p
, offset
, align
, size
, subptrs
, subptrs_l
))) {
3154 debug("failure on tag: %u", tag
);
3155 if (tag
== THUNK_TAG_FUNCTION_CALL
) {
3156 struct thunk
*t
= p
;
3158 ref
= t
->u
.function_call
.u
.function_reference
;
3160 if (pointer_is_thunk(ref
)) {
3161 debug("ref is thunk");
3164 if (!da(pointer_get_data(ref
),function_reference
)->is_indirect
)
3166 ref
= da(pointer_get_data(ref
),function_reference
)->u
.indirect
;
3168 ref
= *da(pointer_get_data(ref
),function_reference
)->u
.direct
;
3169 if (pointer_is_thunk(ref
)) {
3170 debug("function not evaluated");
3173 debug("function: '%s'", da(pointer_get_data(ref
),function
)->function_name
);
3179 *align
= maximum(*align
, SAVED_DATA_ALIGN
);
3183 bool data_save_init_stack(pointer_t
*ptr
, struct stack_entry
**stk
, size_t *stk_l
)
3185 struct stack_entry ste
;
3187 if (unlikely(!array_init_mayfail(struct stack_entry
, stk
, stk_l
, &sink
)))
3189 ste
.t
= &save_pointer
;
3191 ste
.align
= ste
.size
= 0; /* avoid warning */
3192 if (unlikely(!array_add_mayfail(struct stack_entry
, stk
, stk_l
, ste
, NULL
, &sink
)))
3197 #ifdef HAVE_CODEGEN_TRAPS
3198 static int data_traps_tree_compare(const struct tree_entry
*e
, uintptr_t ptr
)
3200 const struct data_codegen
*dc
= get_struct(e
, struct data_codegen
, codegen_tree
);
3201 uintptr_t base
= ptr_to_num(dc
->unoptimized_code_base
);
3204 if (ptr
>= base
+ dc
->unoptimized_code_size
)
3209 void *data_trap_lookup(void *ptr
)
3213 struct tree_entry
*e
;
3214 const struct data_codegen
*dc
;
3215 uintptr_t ptr_num
= ptr_to_num(ptr
);
3217 rwmutex_lock_read(&traps_lock
);
3218 e
= tree_find(&traps_tree
, data_traps_tree_compare
, ptr_num
);
3220 internal(file_line
, "data_trap_lookup: could not find function for address %p", ptr
);
3221 rwmutex_unlock_read(&traps_lock
);
3222 dc
= get_struct(e
, struct data_codegen
, codegen_tree
);
3224 offset
= ptr_num
- ptr_to_num(dc
->unoptimized_code_base
);
3226 binary_search(size_t, dc
->trap_records_size
, res
, dc
->trap_records
[res
].source_ip
== offset
, dc
->trap_records
[res
].source_ip
< offset
,
3227 internal(file_line
, "data_trap_lookup(%s): could not find trap for address %p, offset %"PRIxMAX
"", da(dc
->function
,function
)->function_name
, ptr
, (uintmax_t)offset
));
3229 return cast_ptr(char *, dc
->unoptimized_code_base
) + dc
->trap_records
[res
].destination_ip
;
3232 void data_trap_insert(struct data
*codegen
)
3234 struct tree_insert_position ins
;
3235 struct tree_entry
*e
;
3236 #ifndef DEBUG_CRASH_HANDLER
3237 if (!da(codegen
,codegen
)->trap_records_size
)
3240 /*debug("inserting trap for %p, %lx", da(codegen,codegen)->unoptimized_code_base, da(codegen,codegen)->unoptimized_code_size);*/
3241 rwmutex_lock_write(&traps_lock
);
3242 e
= tree_find_for_insert(&traps_tree
, data_traps_tree_compare
, ptr_to_num(da(codegen
,codegen
)->unoptimized_code_base
), &ins
);
3243 if (unlikely(e
!= NULL
))
3244 internal(file_line
, "data_insert_traps: the requested range is already in the tree");
3245 tree_insert_after_find(&da(codegen
,codegen
)->codegen_tree
, &ins
);
3246 rwmutex_unlock_write(&traps_lock
);
3250 void name(data_init
)(void)
3255 if (slot_size
< sizeof(pointer_t
))
3256 internal(file_line
, "data_init: invalid slot size: %lu < %lu", (unsigned long)slot_size
, (unsigned long)sizeof(pointer_t
));
3258 refcount_init(&n_dereferenced
);
3260 for (i
= DATA_TAG_START
; i
< DATA_TAG_END
; i
++) {
3261 data_method_table
[i
].get_sub
= no_sub
;
3262 data_method_table
[i
].free_object
= free_primitive
;
3263 data_method_table
[i
].deep_eval
= deep_eval_nothing
;
3264 data_method_table
[i
].compare
= data_compare_nothing
;
3265 data_method_table
[i
].save
= no_save
;
3267 for (i
= THUNK_TAG_START
; i
< THUNK_TAG_END
; i
++) {
3268 data_method_table
[i
].free_object
= free_primitive_thunk
;
3269 data_method_table
[i
].save
= no_save
;
3272 data_method_table
[DATA_TAG_longint
].free_object
= free_integer
;
3273 data_method_table
[DATA_TAG_array_same
].free_object
= free_array_same
;
3274 data_method_table
[DATA_TAG_resource
].free_object
= free_resource
;
3275 data_method_table
[DATA_TAG_function
].free_object
= free_function
;
3277 data_method_table
[DATA_TAG_codegen
].free_object
= free_codegen
;
3279 data_method_table
[DATA_TAG_record
].get_sub
= get_sub_record
;
3280 data_method_table
[DATA_TAG_option
].get_sub
= get_sub_option
;
3281 data_method_table
[DATA_TAG_array_slice
].get_sub
= get_sub_array_slice
;
3282 data_method_table
[DATA_TAG_array_pointers
].get_sub
= get_sub_array_pointers
;
3283 data_method_table
[DATA_TAG_array_same
].get_sub
= get_sub_array_same
;
3284 data_method_table
[DATA_TAG_array_btree
].get_sub
= get_sub_array_btree
;
3285 data_method_table
[DATA_TAG_array_incomplete
].get_sub
= get_sub_array_incomplete
;
3286 data_method_table
[DATA_TAG_function_reference
].get_sub
= get_sub_function_reference
;
3288 data_method_table
[THUNK_TAG_FUNCTION_CALL
].get_sub
= get_sub_function_call
;
3289 data_method_table
[THUNK_TAG_BLACKHOLE
].get_sub
= get_sub_blackhole
;
3290 data_method_table
[THUNK_TAG_BLACKHOLE
].free_object
= free_none
;
3291 data_method_table
[THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED
].get_sub
= get_sub_blackhole_some_dereferenced
;
3292 data_method_table
[THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED
].free_object
= free_none
;
3293 data_method_table
[THUNK_TAG_BLACKHOLE_DEREFERENCED
].get_sub
= get_sub_blackhole_dereferenced
;
3294 data_method_table
[THUNK_TAG_BLACKHOLE_DEREFERENCED
].free_object
= free_blackhole_dereferenced
;
3295 data_method_table
[THUNK_TAG_RESULT
].get_sub
= get_sub_result
;
3296 data_method_table
[THUNK_TAG_MULTI_RET_REFERENCE
].get_sub
= get_sub_multi_ret_reference
;
3297 data_method_table
[THUNK_TAG_EXCEPTION
].get_sub
= get_sub_exception
;
3298 data_method_table
[THUNK_TAG_EXCEPTION
].free_object
= free_exception
;
3300 data_method_table
[DATA_TAG_flat
].deep_eval
= deep_eval_flat
;
3301 data_method_table
[DATA_TAG_record
].deep_eval
= deep_eval_record
;
3302 data_method_table
[DATA_TAG_option
].deep_eval
= deep_eval_option
;
3303 data_method_table
[DATA_TAG_array_flat
].deep_eval
= deep_eval_array_flat
;
3304 data_method_table
[DATA_TAG_array_slice
].deep_eval
= deep_eval_array_slice
;
3305 data_method_table
[DATA_TAG_array_pointers
].deep_eval
= deep_eval_array_pointers
;
3306 data_method_table
[DATA_TAG_array_same
].deep_eval
= deep_eval_array_same
;
3307 data_method_table
[DATA_TAG_array_btree
].deep_eval
= deep_eval_array_btree
;
3308 data_method_table
[DATA_TAG_array_incomplete
].deep_eval
= deep_eval_array_incomplete
;
3309 data_method_table
[DATA_TAG_function_reference
].deep_eval
= deep_eval_function_reference
;
3311 data_method_table
[DATA_TAG_flat
].compare
= data_compare_number
;
3312 data_method_table
[DATA_TAG_longint
].compare
= data_compare_number
;
3313 data_method_table
[DATA_TAG_record
].compare
= data_compare_record
;
3314 data_method_table
[DATA_TAG_option
].compare
= data_compare_option
;
3315 data_method_table
[DATA_TAG_array_flat
].compare
= data_compare_array
;
3316 data_method_table
[DATA_TAG_array_slice
].compare
= data_compare_array
;
3317 data_method_table
[DATA_TAG_array_pointers
].compare
= data_compare_array
;
3318 data_method_table
[DATA_TAG_array_same
].compare
= data_compare_array
;
3319 data_method_table
[DATA_TAG_array_btree
].compare
= data_compare_array
;
3320 data_method_table
[DATA_TAG_array_incomplete
].compare
= data_compare_array
;
3321 data_method_table
[DATA_TAG_function_reference
].compare
= data_compare_function_reference
;
3322 data_method_table
[DATA_TAG_resource
].compare
= data_compare_resource
;
3324 data_method_table
[DATA_TAG_flat
].save
= save_flat
;
3325 data_method_table
[DATA_TAG_longint
].save
= save_longint
;
3326 data_method_table
[DATA_TAG_record
].save
= save_record
;
3327 data_method_table
[DATA_TAG_option
].save
= save_option
;
3328 data_method_table
[DATA_TAG_array_flat
].save
= save_array_flat
;
3329 data_method_table
[DATA_TAG_array_slice
].save
= save_array_slice
;
3330 data_method_table
[DATA_TAG_array_pointers
].save
= save_array_pointers
;
3331 data_method_table
[DATA_TAG_array_same
].save
= save_array_same
;
3332 data_method_table
[DATA_TAG_array_btree
].save
= save_array_btree
;
3333 data_method_table
[DATA_TAG_array_incomplete
].save
= save_array_incomplete
;
3334 data_method_table
[DATA_TAG_function_types
].save
= save_function_types
;
3335 data_method_table
[DATA_TAG_saved
].save
= save_saved
;
3336 data_method_table
[DATA_TAG_saved_cache
].save
= save_saved_cache
;
3337 data_method_table
[THUNK_TAG_EXCEPTION
].save
= save_exception
;
3339 oom
= thunk_alloc_exception_mayfail(error_ajla(EC_ASYNC
, AJLA_ERROR_OUT_OF_MEMORY
), NULL pass_file_line
);
3340 out_of_memory_thunk
= pointer_thunk(oom
);
3342 #ifdef HAVE_CODEGEN_TRAPS
3343 rwmutex_init(&traps_lock
);
3344 tree_init(&traps_tree
);
3348 void name(data_done
)(void)
3350 if (unlikely(!refcount_is_one(&n_dereferenced
)))
3351 internal(file_line
, "data_done: n_dereferenced_leaked: %"PRIxMAX
"", (uintmax_t)refcount_get_nonatomic(&n_dereferenced
));
3353 #ifdef HAVE_CODEGEN_TRAPS
3354 rwmutex_done(&traps_lock
);
3355 ajla_assert_lo(tree_is_empty(&traps_tree
), (file_line
, "data_done: traps_tree is not empty"));
3357 pointer_dereference(out_of_memory_thunk
);