codegen: move common code to gen_frame_set_pointer_2
[ajla.git] / data.c
blob465e5c537fcbc9675ed2873213dfd4183fff0905
1 /*
2 * Copyright (C) 2024 Mikulas Patocka
4 * This file is part of Ajla.
6 * Ajla is free software: you can redistribute it and/or modify it under the
7 * terms of the GNU General Public License as published by the Free Software
8 * Foundation, either version 3 of the License, or (at your option) any later
9 * version.
11 * Ajla is distributed in the hope that it will be useful, but WITHOUT ANY
12 * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
13 * A PARTICULAR PURPOSE. See the GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along with
16 * Ajla. If not, see <https://www.gnu.org/licenses/>.
19 #include "ajla.h"
21 #ifndef FILE_OMIT
23 #include "mem_al.h"
24 #include "tree.h"
25 #include "thread.h"
26 #include "task.h"
27 #include "array.h"
28 #include "arrayu.h"
29 #include "codegen.h"
30 #include "save.h"
32 #include "data.h"
35 #ifdef DEBUG_TRACE
36 atomic_type uchar_efficient_t trace_enabled = 0;
37 #endif
39 #ifdef HAVE_CODEGEN_TRAPS
40 static rwmutex_t traps_lock;
41 static struct tree traps_tree;
42 #endif
45 static refcount_t n_dereferenced;
48 /*********
49 * FRAME *
50 *********/
52 static struct stack_bottom *stack_alloc_space(size_t needed_size, bool leaf, ajla_error_t *mayfail)
54 const size_t additional_space = SIZEOF_STACK_BOTTOM + SIZEOF_FRAME_STRUCT;
55 size_t test_size, extra_space;
56 size_t slots;
57 ajla_error_t sink;
58 struct stack_bottom *stack;
59 struct frame_struct *stack_end;
61 if (unlikely(needed_size + additional_space < additional_space) ||
62 unlikely(needed_size / slot_size >= sign_bit(stack_size_t))) {
63 fatal_mayfail(error_ajla(EC_ASYNC, AJLA_ERROR_SIZE_OVERFLOW), mayfail, "stack allocation size overflow");
64 return NULL;
66 if (leaf)
67 goto exact;
68 test_size = STACK_INITIAL_SIZE;
69 if (unlikely(test_size < additional_space))
70 goto exact;
71 while (test_size - additional_space < needed_size) {
72 size_t new_test_size = test_size * 2;
73 if (unlikely(new_test_size <= test_size))
74 goto exact;
75 test_size = new_test_size;
77 extra_space = round_down(test_size - additional_space - needed_size, frame_align);
78 slots = (extra_space + needed_size) / slot_size;
79 if (unlikely(slots >= sign_bit(stack_size_t)))
80 goto exact;
81 stack = mem_align_mayfail(struct stack_bottom *, extra_space + needed_size + additional_space, frame_align, &sink);
82 if (unlikely(!stack)) {
83 exact:
84 extra_space = 0;
85 slots = (extra_space + needed_size) / slot_size;
86 stack = mem_align_mayfail(struct stack_bottom *, needed_size + additional_space, frame_align, mayfail);
87 if (unlikely(!stack))
88 return NULL;
90 stack_end = cast_ptr(struct frame_struct *, cast_ptr(char *, stack) + SIZEOF_STACK_BOTTOM + extra_space + needed_size);
91 stack_end->function = NULL;
92 stack_end->available_slots = stack->useable_slots = (stack_size_t)slots;
93 return stack;
96 frame_s * attr_fastcall stack_alloc(struct execution_control *ex, struct data *function, ajla_error_t *mayfail)
98 struct stack_bottom *stack;
99 char *stack_start, *stack_end;
100 frame_s *frame;
102 stack = stack_alloc_space(function_frame_size(function), da(function,function)->leaf, mayfail);
103 if (unlikely(!stack))
104 return NULL;
106 if ((stack->ex = ex))
107 ex->stack = stack;
108 stack_start = cast_ptr(char *, stack) + SIZEOF_STACK_BOTTOM;
109 stack_end = stack_start + stack->useable_slots * slot_size;
110 frame = ptr_frame(cast_ptr(struct frame_struct *, stack_end - function_frame_size(function)));
111 get_frame(frame)->available_slots = stack->useable_slots - da(function,function)->frame_slots;
112 get_frame(frame)->function = function;
113 return frame;
116 frame_s * attr_fastcall stack_expand(frame_s *fp, struct data *function, ajla_error_t *mayfail)
118 struct stack_bottom *old_stack, *new_stack;
119 size_t new_size, old_stack_size;
120 char *old_stack_end, *new_stack_end;
121 frame_s *new_fp;
123 old_stack = frame_stack_bottom(fp);
124 new_size = (old_stack->useable_slots - get_frame(fp)->available_slots) * slot_size + function_frame_size(function);
125 if (unlikely(new_size < function_frame_size(function))) {
126 fatal_mayfail(error_ajla(EC_ASYNC, AJLA_ERROR_SIZE_OVERFLOW), mayfail, "stack allocation size overflow");
127 return NULL;
130 new_stack = stack_alloc_space(new_size, false, mayfail);
131 if (unlikely(!new_stack))
132 return NULL;
134 old_stack_end = cast_ptr(char *, old_stack) + SIZEOF_STACK_BOTTOM + old_stack->useable_slots * slot_size;
135 old_stack_size = old_stack_end - cast_ptr(char *, get_frame(fp));
136 new_stack_end = cast_ptr(char *, new_stack) + SIZEOF_STACK_BOTTOM + new_stack->useable_slots * slot_size;
137 (void)memcpy(new_stack_end - old_stack_size, old_stack_end - old_stack_size, old_stack_size);
138 (new_stack->ex = old_stack->ex)->stack = new_stack;
139 stack_free(old_stack);
141 new_fp = ptr_frame(cast_ptr(const struct frame_struct *, new_stack_end - old_stack_size - function_frame_size(function)));
142 get_frame(new_fp)->function = function;
143 fp = new_fp;
144 do {
145 get_frame(fp)->available_slots = (stack_size_t)((size_t)(cast_ptr(char *, get_frame(fp)) - cast_ptr(char *, new_stack) - SIZEOF_STACK_BOTTOM) / slot_size);
146 fp = frame_up(fp);
147 } while (!frame_is_top(fp));
149 return new_fp;
152 frame_s * attr_fastcall stack_split(frame_s *from_fp, frame_s *to_fp, frame_s **high, ajla_error_t *mayfail)
154 struct stack_bottom *new_stack;
155 char *new_stack_end;
156 frame_s *fp, *new_fp;
157 size_t new_stack_size = cast_ptr(char *, to_fp) - cast_ptr(char *, from_fp);
159 new_stack = stack_alloc_space(new_stack_size, false, mayfail);
160 if (unlikely(!new_stack))
161 return NULL;
163 new_stack_end = cast_ptr(char *, new_stack) + SIZEOF_STACK_BOTTOM + new_stack->useable_slots * slot_size;
164 new_fp = ptr_frame(memcpy(new_stack_end - new_stack_size, get_frame(from_fp), new_stack_size));
166 fp = new_fp;
167 do {
168 get_frame(fp)->available_slots = (stack_size_t)((size_t)(cast_ptr(char *, get_frame(fp)) - cast_ptr(char *, new_stack) - SIZEOF_STACK_BOTTOM) / slot_size);
169 *high = fp;
170 fp = frame_up(fp);
171 } while (!frame_is_top(fp));
173 return new_fp;
176 static void frame_cleanup(frame_s *fp)
178 frame_t l;
179 const struct data *function = get_frame(fp)->function;
181 for (l = MIN_USEABLE_SLOT; l < function_n_variables(function); l++) {
182 if (!frame_test_flag(fp, l))
183 continue;
184 pointer_dereference(*frame_pointer(fp, l));
189 /***************
190 * STACK TRACE *
191 ***************/
193 void stack_trace_init(struct stack_trace *st)
195 st->trace = NULL;
196 st->trace_n = 0;
199 void stack_trace_free(struct stack_trace *st)
201 if (st->trace)
202 mem_free(st->trace);
205 bool stack_trace_get_location(struct data *function, ip_t ip_rel, struct stack_trace_entry *result)
207 size_t idx;
208 struct line_position *lp, *my_lp;
209 size_t n_lp;
210 lp = da(function,function)->lp;
211 n_lp = da(function,function)->lp_size;
212 if (!n_lp)
213 return false;
214 binary_search(code_t, da(function,function)->lp_size, idx, false, idx + 1 >= n_lp ? false : lp[idx + 1].ip < ip_rel, break);
215 my_lp = &lp[idx];
216 result->module_designator = da(function,function)->module_designator;
217 result->function_name = da(function,function)->function_name;
218 result->line = my_lp->line;
219 return true;
222 void stack_trace_capture(struct stack_trace *st, frame_s *fp, const code_t *ip, unsigned max_depth)
224 struct data *function;
225 ip_t ip_rel, previous_ip;
226 struct stack_trace_entry ste;
227 ajla_error_t sink;
228 if (!fp)
229 return;
230 if (!array_init_mayfail(struct stack_trace_entry, &st->trace, &st->trace_n, &sink))
231 return;
233 go_up:
234 function = get_frame(fp)->function;
235 ip_rel = ip - da(function,function)->code;
236 if (unlikely(!stack_trace_get_location(function, ip_rel, &ste)))
237 goto skip_this_frame;
238 if (unlikely(!array_add_mayfail(struct stack_trace_entry, &st->trace, &st->trace_n, ste, NULL, &sink))) {
239 return;
242 if (!--max_depth)
243 goto ret;
245 skip_this_frame:
246 previous_ip = get_frame(fp)->previous_ip;
247 fp = frame_up(fp);
248 if (!frame_is_top(fp)) {
249 ip = da(get_frame(fp)->function,function)->code + previous_ip;
250 goto go_up;
253 ret:
254 array_finish(struct stack_trace_entry, &st->trace, &st->trace_n);
257 char * attr_cold stack_trace_string(struct stack_trace *st, ajla_error_t *err)
259 char *msg;
260 size_t msg_l;
261 size_t t;
262 if (unlikely(!array_init_mayfail(char, &msg, &msg_l, err)))
263 return NULL;
265 for (t = 0; t < st->trace_n; t++) {
266 size_t xl;
267 char buffer[11];
268 char *b;
269 struct stack_trace_entry *ste = &st->trace[t];
270 if (unlikely(!array_add_mayfail(char, &msg, &msg_l, ' ', NULL, err)))
271 return NULL;
272 if (unlikely(!array_add_multiple_mayfail(char, &msg, &msg_l, ste->module_designator->path, ste->module_designator->path_len, NULL, err)))
273 return NULL;
274 if (unlikely(!array_add_multiple_mayfail(char, &msg, &msg_l, " : ", 3, NULL, err)))
275 return NULL;
276 xl = strlen(ste->function_name);
277 if (unlikely(!array_add_multiple_mayfail(char, &msg, &msg_l, ste->function_name, xl, NULL, err)))
278 return NULL;
279 if (unlikely(!array_add_mayfail(char, &msg, &msg_l, ':', NULL, err)))
280 return NULL;
281 b = buffer;
282 str_add_unsigned(&b, NULL, ste->line & 0xffffffffU, 10);
283 xl = strlen(buffer);
284 if (unlikely(!array_add_multiple_mayfail(char, &msg, &msg_l, buffer, xl, NULL, err)))
285 return NULL;
286 if (unlikely(!array_add_mayfail(char, &msg, &msg_l, '\n', NULL, err)))
287 return NULL;
289 if (unlikely(!array_add_mayfail(char, &msg, &msg_l, 0, NULL, err)))
290 return NULL;
291 return msg;
295 void attr_cold stack_trace_print(struct stack_trace *st)
297 ajla_error_t sink;
298 char *m = stack_trace_string(st, &sink);
299 if (unlikely(!m))
300 return;
301 if (*m) {
302 stderr_msg("stack trace:");
303 m[strlen(m) - 1] = 0;
304 stderr_msg("%s", m);
306 mem_free(m);
310 /*********************
311 * OBJECT ALLOCATION *
312 *********************/
314 /* !!! TODO: make it return pointer_t */
315 struct data * attr_fastcall data_alloc_flat_mayfail(type_tag_t type, const unsigned char *flat, size_t size, ajla_error_t *mayfail argument_position)
317 struct data *d = data_align(flat, data_flat_offset + size, scalar_align, mayfail);
318 if (unlikely(!d))
319 return NULL;
320 mem_set_position(data_untag(d) pass_position);
321 da(d,flat)->data_type = type;
322 memcpy_fast(da_flat(d), flat, size);
323 return d;
326 struct data * attr_fastcall data_alloc_longint_mayfail(unsigned long bits, ajla_error_t *mayfail argument_position)
328 struct data *d = data_alloc(longint, mayfail);
329 if (unlikely(!d))
330 return NULL;
331 mem_set_position(data_untag(d) pass_position);
332 if (unlikely(!mpint_alloc_mayfail(&da(d,longint)->mp, bits, mayfail))) {
333 data_free_r1(d);
334 return NULL;
336 return d;
339 struct data * attr_fastcall data_alloc_record_mayfail(const struct record_definition *def, ajla_error_t *mayfail argument_position)
341 struct data *d = data_align(record, data_record_offset + def->n_slots * slot_size, def->alignment, mayfail);
342 if (unlikely(!d))
343 return NULL;
344 mem_set_position(data_untag(d) pass_position);
345 da(d,record)->definition = &def->type;
346 return d;
349 struct data * attr_fastcall data_alloc_option_mayfail(ajla_error_t *mayfail argument_position)
351 struct data *d = data_alloc(option, mayfail);
352 if (unlikely(!d))
353 return NULL;
354 mem_set_position(data_untag(d) pass_position);
355 return d;
358 struct data * attr_fastcall data_alloc_array_flat_mayfail(const struct type *type, int_default_t n_allocated, int_default_t n_used, bool clear, ajla_error_t *mayfail argument_position)
360 struct data *d;
361 size_t size;
362 ajla_assert(TYPE_IS_FLAT(type), (caller_file_line, "data_alloc_array_flat_mayfail: type is not flat, tag %u", type->tag));
363 ajla_assert((n_allocated | n_used) >= 0, (caller_file_line, "data_alloc_array_flat_mayfail: negative size %"PRIdMAX", %"PRIdMAX"", (intmax_t)n_allocated, (intmax_t)n_used));
364 #if defined(HAVE_BUILTIN_ADD_SUB_OVERFLOW) && defined(HAVE_BUILTIN_MUL_OVERFLOW) && !defined(UNUSUAL)
365 if (unlikely(__builtin_mul_overflow((uint_default_t)n_allocated, type->size, &size)))
366 goto ovf;
367 if (unlikely(__builtin_add_overflow(size, data_array_offset, &size)))
368 goto ovf;
369 #else
370 size = (uint_default_t)n_allocated * (size_t)type->size;
371 if (unlikely((size_t)type->size + uzero >= 0x100) || unlikely((uint_default_t)n_allocated + (size_t)uzero >= sign_bit(size_t) / 0x100)) {
372 if (unlikely(size / type->size != (uint_default_t)n_allocated))
373 goto ovf;
374 if ((size_t)(size + data_array_offset) < size)
375 goto ovf;
377 size += data_array_offset;
378 #endif
379 if (likely(!clear))
380 d = data_align(array_flat, size, scalar_align, mayfail);
381 else
382 d = data_calign(array_flat, size, scalar_align, mayfail);
383 if (unlikely(!d))
384 return NULL;
385 mem_set_position(data_untag(d) pass_position);
386 da(d,array_flat)->type = type;
387 da(d,array_flat)->n_allocated_entries = n_allocated;
388 da(d,array_flat)->n_used_entries = n_used;
389 return d;
391 ovf:
392 fatal_mayfail(error_ajla(EC_ASYNC, AJLA_ERROR_SIZE_OVERFLOW), mayfail, "flat array allocation size overflow");
393 return NULL;
396 struct data * attr_fastcall data_alloc_array_slice_mayfail(struct data *base, unsigned char *data, int_default_t start, int_default_t len, ajla_error_t *mayfail argument_position)
398 const struct type *t;
400 struct data *d;
401 d = data_alloc(array_slice, mayfail);
402 if (unlikely(!d))
403 return NULL;
404 mem_set_position(data_untag(d) pass_position);
406 data_reference(base);
408 t = da(base,array_flat)->type;
409 da(d,array_slice)->type = t;
410 da(d,array_slice)->reference = pointer_data(base);
411 da(d,array_slice)->flat_data_minus_data_array_offset = data - data_array_offset + start * t->size;
412 da(d,array_slice)->n_entries = len;
414 return d;
417 struct data * attr_fastcall data_alloc_array_pointers_mayfail(int_default_t n_allocated, int_default_t n_used, ajla_error_t *mayfail argument_position)
419 struct data *d;
420 d = data_alloc_flexible(array_pointers, pointer_array, n_allocated, mayfail);
421 if (unlikely(!d))
422 return NULL;
423 mem_set_position(data_untag(d) pass_position);
424 da(d,array_pointers)->pointer = da(d,array_pointers)->pointer_array;
425 da(d,array_pointers)->n_allocated_entries = n_allocated;
426 da(d,array_pointers)->n_used_entries = n_used;
427 return d;
430 struct data * attr_fastcall data_alloc_array_same_mayfail(array_index_t n_entries, ajla_error_t *mayfail argument_position)
432 struct data *d = data_alloc(array_same, mayfail);
433 if (unlikely(!d)) {
434 index_free(&n_entries);
435 return NULL;
437 mem_set_position(data_untag(d) pass_position);
438 da(d,array_same)->n_entries = n_entries;
439 return d;
442 struct data * attr_fastcall data_alloc_array_incomplete(struct data *first, pointer_t next, ajla_error_t *mayfail argument_position)
444 struct data *d;
445 ajla_assert(!array_is_empty(first), (caller_file_line, "data_alloc_array_incomplete: the first array is empty"));
446 d = data_alloc(array_incomplete, mayfail);
447 if (unlikely(!d))
448 return NULL;
449 mem_set_position(data_untag(d) pass_position);
450 da(d,array_incomplete)->first = pointer_data(first);
451 da(d,array_incomplete)->next = next;
452 return d;
455 struct data * attr_fastcall data_alloc_function_reference_mayfail(arg_t n_curried_arguments, ajla_error_t *mayfail argument_position)
457 struct data *d;
458 arg_t alloc_size = n_curried_arguments;
460 d = data_alloc_flexible(function_reference, arguments, alloc_size, mayfail);
461 if (unlikely(!d))
462 return NULL;
463 mem_set_position(data_untag(d) pass_position);
464 da(d,function_reference)->n_curried_arguments = n_curried_arguments;
466 return d;
469 void attr_fastcall data_fill_function_reference(struct data *function_reference, arg_t a, pointer_t ptr)
471 if (unlikely(!function_reference)) {
472 pointer_dereference(ptr);
473 return;
476 ajla_assert(a < da(function_reference,function_reference)->n_curried_arguments, (file_line, "data_fill_function_reference: invalid argument %"PRIuMAX" (%"PRIuMAX" arguments)", (uintmax_t)a, (uintmax_t)da(function_reference,function_reference)->n_curried_arguments));
478 da(function_reference,function_reference)->arguments[a].tag = TYPE_TAG_unknown;
479 da(function_reference,function_reference)->arguments[a].u.ptr = ptr;
482 void attr_fastcall data_fill_function_reference_flat(struct data *function_reference, arg_t a, const struct type *type, const unsigned char *data)
484 if (unlikely(!function_reference))
485 return;
487 if (type->size <= slot_size && TYPE_TAG_IS_BUILTIN(type->tag)) {
488 da(function_reference,function_reference)->arguments[a].tag = type->tag;
489 memcpy_fast(da(function_reference,function_reference)->arguments[a].u.slot, data, type->size);
490 } else {
491 pointer_t ptr = flat_to_data(type, data);
492 data_fill_function_reference(function_reference, a, ptr);
496 struct data * attr_fastcall data_alloc_resource_mayfail(size_t size, void (*close)(struct data *), ajla_error_t *mayfail argument_position)
498 struct data *d = data_calign(resource, data_resource_offset + size, scalar_align, mayfail);
499 if (unlikely(!d))
500 return NULL;
501 mem_set_position(data_untag(d) pass_position);
502 da(d,resource)->close = close;
503 return d;
507 static inline void thunk_init_refcount_tag(struct thunk *t, tag_t tag)
509 #ifndef REFCOUNT_TAG
510 t->tag = tag;
511 refcount_init(&t->refcount_);
512 #else
513 refcount_init_tag(&t->refcount_, tag);
514 #endif
517 static attr_always_inline struct thunk *thunk_alloc_exception_mayfail(ajla_error_t err, ajla_error_t *mayfail argument_position)
519 struct thunk *thunk;
520 #if 0
521 if (mayfail)
522 return NULL;
523 #endif
524 /*debug("thunk_alloc_exception(%s) at %s", error_decode(err), position_arg);*/
525 thunk = mem_alloc_compressed_mayfail(struct thunk *, partial_sizeof(struct thunk, u.exception), mayfail);
526 if (unlikely(!thunk))
527 return NULL;
528 mem_set_position(thunk pass_position);
529 thunk = thunk_pointer_tag(thunk);
530 thunk_init_refcount_tag(thunk, THUNK_TAG_EXCEPTION);
531 thunk->u.exception.err = err;
532 thunk->u.exception.msg = NULL;
533 stack_trace_init(&thunk->u.exception.tr);
534 return thunk;
537 static pointer_t out_of_memory_thunk;
538 pointer_t *out_of_memory_ptr = &out_of_memory_thunk;
540 struct thunk * attr_fastcall thunk_alloc_exception_error(ajla_error_t err, char *msg, frame_s *fp, const code_t *ip argument_position)
542 ajla_error_t sink;
543 struct thunk *thunk;
544 /*debug("thunk_alloc_exception_error: %d, %d @ %p", err.error_type, err.error_code, __builtin_return_address(0));*/
545 thunk = thunk_alloc_exception_mayfail(err, &sink pass_position);
546 if (unlikely(!thunk)) {
547 pointer_reference_owned(out_of_memory_thunk);
548 return pointer_get_thunk(out_of_memory_thunk);
550 if (msg)
551 thunk->u.exception.msg = str_dup(msg, -1, &sink);
552 if (fp) {
553 stack_trace_capture(&thunk->u.exception.tr, fp, ip, -1);
554 /*debug("err: %d, %d, %d", err.error_class, err.error_type, err.error_aux);
555 stack_trace_print(&thunk->u.exception.tr);*/
557 return thunk;
560 pointer_t attr_fastcall pointer_error(ajla_error_t err, frame_s *fp, const code_t *ip argument_position)
562 struct thunk *thunk = thunk_alloc_exception_error(err, NULL, fp, ip pass_position);
563 return pointer_thunk(thunk);
566 char * attr_cold thunk_exception_string(struct thunk *thunk, ajla_error_t *err)
568 const char *m;
569 char *msg;
570 size_t msg_l, ml;
571 ajla_assert_lo(thunk_tag(thunk) == THUNK_TAG_EXCEPTION, (file_line, "thunk_exception_string: invalid thunk tag %u", thunk_tag(thunk)));
572 if (unlikely(!array_init_mayfail(char, &msg, &msg_l, err)))
573 return NULL;
574 m = error_decode(thunk->u.exception.err);
575 ml = strlen(m);
576 if (unlikely(!array_add_multiple_mayfail(char, &msg, &msg_l, m, ml, NULL, err)))
577 return NULL;
578 if (thunk->u.exception.msg && *thunk->u.exception.msg) {
579 size_t xl;
580 if (unlikely(!array_add_multiple_mayfail(char, &msg, &msg_l, " (", 2, NULL, err)))
581 return NULL;
582 xl = strlen(thunk->u.exception.msg);
583 if (unlikely(!array_add_multiple_mayfail(char, &msg, &msg_l, thunk->u.exception.msg, xl, NULL, err)))
584 return NULL;
585 if (unlikely(!array_add_mayfail(char, &msg, &msg_l, ')', NULL, err)))
586 return NULL;
588 if (unlikely(!array_add_mayfail(char, &msg, &msg_l, 0, NULL, err)))
589 return NULL;
590 return msg;
593 char * attr_cold thunk_exception_payload(struct thunk *thunk, ajla_error_t *err)
595 const char *m;
596 ajla_assert_lo(thunk_tag(thunk) == THUNK_TAG_EXCEPTION, (file_line, "thunk_exception_payload: invalid thunk tag %u", thunk_tag(thunk)));
597 m = thunk->u.exception.msg;
598 if (!m)
599 m = "";
600 return str_dup(m, -1, err);
603 void attr_cold thunk_exception_print(struct thunk *thunk)
605 char *m;
606 #if defined(DEBUG_ERROR) && defined(DEBUG_TRACK_FILE_LINE)
607 stderr_msg("error at %s", thunk->u.exception.err.position);
608 #endif
609 m = thunk_exception_string(thunk, NULL);
610 stderr_msg("exception: %s", m);
611 mem_free(m);
612 stack_trace_print(&thunk->u.exception.tr);
615 static struct thunk * attr_fastcall thunk_alloc_struct(tag_t tag, arg_t n_return_values, ajla_error_t *mayfail)
617 size_t s;
618 struct thunk *t;
620 /* not needed because of ARG_LIMIT
621 if (!struct_check_overflow(struct thunk, u.function_call.results, n_return_values, mayfail))
622 return NULL;
625 s = partial_sizeof_array(struct thunk, u.function_call.results, n_return_values);
627 t = mem_alloc_compressed_mayfail(struct thunk *, s, mayfail);
628 if (unlikely(!t))
629 return NULL;
631 t = thunk_pointer_tag(t);
633 thunk_init_refcount_tag(t, tag);
635 return t;
638 static bool attr_fastcall thunk_alloc_result(struct thunk *t, arg_t n_return_values, struct thunk *result[], ajla_error_t *mayfail)
640 arg_t ia;
642 if (n_return_values == 1) {
643 *result = t;
644 } else for (ia = 0; ia < n_return_values; ia++) {
645 struct thunk *tm;
646 if (ia)
647 thunk_reference_nonatomic(t);
648 t->u.function_call.results[ia].wanted = true;
649 tm = thunk_alloc_struct(THUNK_TAG_MULTI_RET_REFERENCE, 1, mayfail);
650 if (unlikely(!tm)) {
651 while (ia) {
652 ia--;
653 thunk_free(result[ia]);
655 thunk_free(t);
656 return false;
658 tm->u.multi_ret_reference.thunk = t;
659 tm->u.multi_ret_reference.idx = ia;
660 result[ia] = tm;
662 return true;
665 bool attr_fastcall thunk_alloc_function_call(pointer_t function_reference, arg_t n_return_values, struct thunk *result[], ajla_error_t *mayfail)
667 struct thunk *t;
669 t = thunk_alloc_struct(THUNK_TAG_FUNCTION_CALL, n_return_values, mayfail);
670 if (unlikely(!t))
671 return false;
673 t->u.function_call.u.function_reference = function_reference;
675 return thunk_alloc_result(t, n_return_values, result, mayfail);
678 bool attr_fastcall thunk_alloc_blackhole(struct execution_control *ex, arg_t n_return_values, struct thunk *result[], ajla_error_t *mayfail)
680 struct thunk *t;
682 t = thunk_alloc_struct(THUNK_TAG_BLACKHOLE, n_return_values, mayfail);
683 if (unlikely(!t))
684 return false;
686 t->u.function_call.u.execution_control = ex;
687 ex->thunk = t;
689 return thunk_alloc_result(t, n_return_values, result, mayfail);
692 bool are_there_dereferenced(void)
694 return !refcount_is_one(&n_dereferenced);
697 static void execution_control_unlink(struct execution_control *ex)
699 unsigned i;
700 waiting_list_remove(ex);
701 for (i = 0; i < N_EXECUTION_CONTROL_WAIT; i++) {
702 struct execution_control_wait *w = &ex->wait[i];
703 mutex_t *t = w->mutex_to_lock;
704 if (unlikely(t != NULL)) {
705 mutex_lock(t);
706 list_del(&w->wait_entry);
707 w->mutex_to_lock = NULL;
708 mutex_unlock(t);
711 refcount_set(&ex->wait_state, EXECUTION_CONTROL_NORMAL);
714 void execution_control_unlink_and_submit(struct execution_control *ex, bool can_allocate_memory)
716 execution_control_unlink(ex);
717 task_submit(ex, can_allocate_memory);
720 bool execution_control_acquire(struct execution_control *ex)
722 return refcount_xchgcmp(&ex->wait_state, EXECUTION_CONTROL_FIRED, EXECUTION_CONTROL_ARMED);
725 static struct execution_control *execution_control_acquire_from_thunk(struct thunk *t)
727 struct execution_control *ex = t->u.function_call.u.execution_control;
728 ajla_assert_lo(ex->thunk == t, (file_line, "execution_control_acquire_from_thunk: pointer mismatch"));
730 return execution_control_acquire(ex) ? ex : NULL;
733 static void *wake_up_wait_list_internal(struct list *wait_list, mutex_t *mutex_to_lock, bool can_allocate_memory)
735 struct list ex_to_resume;
736 void *ret = POINTER_FOLLOW_THUNK_EXIT;
738 list_init(&ex_to_resume);
740 while (!list_is_empty(wait_list)) {
741 struct execution_control_wait *w = get_struct(wait_list->prev, struct execution_control_wait, wait_entry);
742 ajla_assert_lo(w->mutex_to_lock == mutex_to_lock, (file_line, "wake_up_wait_list: mutex_to_lock pointer does not match: %p != %p", w->mutex_to_lock, mutex_to_lock));
743 list_del(&w->wait_entry);
744 if (likely(refcount_xchgcmp(&w->execution_control->wait_state, EXECUTION_CONTROL_FIRED, EXECUTION_CONTROL_ARMED))) {
745 w->mutex_to_lock = NULL;
746 list_add(&ex_to_resume, &w->wait_entry);
747 } else {
748 list_init(&w->wait_entry);
751 mutex_unlock(mutex_to_lock);
753 while (!list_is_empty(&ex_to_resume)) {
754 struct execution_control_wait *w;
755 struct execution_control *new_ex;
756 w = get_struct(ex_to_resume.prev, struct execution_control_wait, wait_entry);
757 list_del(&w->wait_entry);
758 new_ex = w->execution_control;
759 if (ret == POINTER_FOLLOW_THUNK_EXIT) {
760 execution_control_unlink(new_ex);
761 ret = new_ex;
762 } else {
763 execution_control_unlink_and_submit(new_ex, can_allocate_memory);
767 return ret;
770 void wake_up_wait_list(struct list *wait_list, mutex_t *mutex_to_lock, bool can_allocate_memory)
772 void *ex = wake_up_wait_list_internal(wait_list, mutex_to_lock, can_allocate_memory);
773 if (ex != POINTER_FOLLOW_THUNK_EXIT)
774 task_submit(ex, can_allocate_memory);
777 void *thunk_terminate(struct thunk *t, arg_t n_return_values)
779 tag_t tag;
780 arg_t i;
781 struct execution_control *ex;
782 void *ret;
784 address_lock(t, DEPTH_THUNK);
785 ex = t->u.function_call.u.execution_control;
786 tag = thunk_tag(t);
787 ajla_assert((
788 likely(tag == THUNK_TAG_BLACKHOLE) ||
789 (tag == THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED && n_return_values > 1) ||
790 (tag == THUNK_TAG_BLACKHOLE_DEREFERENCED)
791 ), (file_line, "thunk_terminate: invalid thunk tag %u (n_return_values %lu)", tag, (unsigned long)n_return_values));
792 if (unlikely(tag == THUNK_TAG_BLACKHOLE_DEREFERENCED)) {
793 thunk_init_refcount_tag(t, THUNK_TAG_BLACKHOLE_DEREFERENCED);
794 goto return_dereference_unused;
796 thunk_tag_set(t, tag, THUNK_TAG_RESULT);
797 #ifdef barrier_write_before_unlock_lock
798 barrier_write_before_unlock_lock();
799 #endif
800 if (tag == THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED) {
801 thunk_reference_nonatomic(t);
802 return_dereference_unused:
803 address_unlock(t, DEPTH_THUNK);
804 i = 0;
805 do {
806 if (n_return_values == 1 || !t->u.function_call.results[i].wanted) {
807 pointer_dereference(t->u.function_call.results[i].ptr);
808 pointer_poison(&t->u.function_call.results[i].ptr);
810 } while (++i < n_return_values);
811 address_lock(t, DEPTH_THUNK);
812 thunk_assert_refcount(t);
813 if (thunk_dereference_nonatomic(t)) {
814 if (unlikely(tag == THUNK_TAG_BLACKHOLE_DEREFERENCED))
815 refcount_add(&n_dereferenced, -1);
816 thunk_free(t);
820 ret = wake_up_wait_list_internal(&ex->wait_list, address_get_mutex(t, DEPTH_THUNK), true);
822 execution_control_free(ex);
824 return ret;
827 static void thunk_terminate_with_value(struct thunk *t, arg_t n_return_values, pointer_t val)
829 arg_t i;
830 void *ex;
831 for (i = 0; i < n_return_values; i++) {
832 if (i)
833 pointer_reference_owned(val);
834 t->u.function_call.results[i].ptr = val;
836 ex = thunk_terminate(t, n_return_values);
837 if (ex != POINTER_FOLLOW_THUNK_EXIT)
838 task_submit(ex, true);
842 /*********************
843 * EXECUTION CONTROL *
844 *********************/
846 struct execution_control *execution_control_alloc(ajla_error_t *mayfail)
848 unsigned i;
849 struct execution_control *ex = mem_alloc_mayfail(struct execution_control *, sizeof(struct execution_control), mayfail);
850 if (unlikely(!ex))
851 return NULL;
853 ex->stack = NULL;
854 ex->callback = NULL;
856 list_init(&ex->wait_list);
857 refcount_init_val(&ex->wait_state, EXECUTION_CONTROL_NORMAL);
858 for (i = 0; i < N_EXECUTION_CONTROL_WAIT; i++) {
859 struct execution_control_wait *w = &ex->wait[i];
860 w->execution_control = ex;
861 w->mutex_to_lock = NULL;
864 ex->atomic = 0;
865 ex->atomic_interrupted = false;
867 task_ex_control_started();
869 return ex;
872 void execution_control_free(struct execution_control *ex)
874 if (ex->stack)
875 stack_free(ex->stack);
877 task_ex_control_exited();
879 mem_free(ex);
882 void execution_control_terminate(struct execution_control *ex, pointer_t ptr)
884 arg_t n_return_values;
885 frame_s *fp;
887 if (ex->callback)
888 ex->callback(ex->callback_cookie, ptr);
890 fp = ex->current_frame;
891 do {
892 n_return_values = da(get_frame(fp)->function,function)->n_return_values;
894 frame_cleanup(fp);
896 fp = frame_up(fp);
897 } while (!frame_is_top(fp));
899 ajla_assert_lo(frame_stack_bottom(fp)->ex == ex, (file_line, "execution_control_terminate: execution control pointer mismatch: %p != %p", frame_stack_bottom(fp)->ex, ex));
901 if (ex->thunk) {
902 struct thunk *err = thunk_alloc_exception_error(error_ajla(EC_ASYNC, AJLA_ERROR_NOT_SUPPORTED), NULL, NULL, NULL pass_file_line);
903 thunk_terminate_with_value(ex->thunk, n_return_values, pointer_thunk(err));
904 } else {
905 execution_control_free(ex);
910 /**********************
911 * POINTER OPERATIONS *
912 **********************/
914 struct compare_status {
915 pointer_t ptr1;
916 pointer_t ptr2;
917 tag_t tag;
918 void (attr_fastcall *destruct)(struct compare_status *cs);
919 union {
920 struct {
921 arg_t ai;
922 } record;
923 struct {
924 array_index_t idx;
925 array_index_t len;
926 pointer_t p1;
927 pointer_t p2;
928 } array;
929 struct {
930 size_t l;
931 struct function_argument **args1;
932 struct function_argument **args2;
933 } function_reference;
934 } u;
937 struct data_method {
938 void *(attr_fastcall *get_sub)(void *data);
939 void (attr_fastcall *free_object)(void *data);
940 bool (attr_fastcall *deep_eval)(struct data *d, pointer_t ***data_stack, size_t *data_stack_size, ajla_error_t *err);
941 int (attr_fastcall *compare)(struct compare_status *cs, struct compare_status *new_cs, bool init);
942 bool (attr_fastcall *save)(void *data, uintptr_t offset, size_t *align, size_t *size, struct stack_entry **subptrs, size_t *subptrs_l);
945 static struct data_method data_method_table[TAG_END];
947 static void * attr_hot_fastcall no_sub(void attr_unused *data)
949 return NULL;
952 static void attr_hot_fastcall free_primitive(void *data)
954 struct data *d = cast_cpp(struct data *, data);
955 data_free(d);
958 static void attr_hot_fastcall free_primitive_thunk(void *data)
960 struct thunk *t = cast_cpp(struct thunk *, data);
961 thunk_free(t);
964 static void attr_hot_fastcall free_none(void attr_unused *data)
968 static void attr_hot_fastcall free_integer(void *data)
970 struct data *d = cast_cpp(struct data *, data);
971 mpint_free(&da(d,longint)->mp);
972 data_free(d);
975 static void attr_hot_fastcall free_array_same(void *data)
977 struct data *d = cast_cpp(struct data *, data);
978 index_free(&da(d,array_same)->n_entries);
979 data_free(d);
982 static void attr_hot_fastcall free_resource(void *data)
984 struct data *d = cast_cpp(struct data *, data);
985 da(d,resource)->close(d);
986 data_free(d);
989 void free_cache_entry(struct data *d, struct cache_entry *ce)
991 arg_t i;
992 for (i = 0; i < da(d,function)->n_arguments; i++)
993 pointer_dereference(ce->arguments[i]);
994 for (i = 0; i < da(d,function)->n_return_values; i++)
995 pointer_dereference(ce->returns[i].ptr);
996 mem_free(ce->returns);
997 mem_free(ce);
1000 static void attr_fastcall free_function(void *data)
1002 struct data *d = cast_cpp(struct data *, data);
1003 pointer_dereference(da(d,function)->types_ptr);
1004 #ifdef HAVE_CODEGEN
1005 pointer_dereference(da(d,function)->codegen);
1006 #endif
1007 if (unlikely(!da(d,function)->is_saved))
1008 mem_free(da(d,function)->code);
1009 mem_free(da(d,function)->local_variables);
1010 if (unlikely(!da(d,function)->is_saved))
1011 mem_free(da(d,function)->local_variables_flags);
1012 if (da(d,function)->args)
1013 mem_free(da(d,function)->args);
1014 mem_free(da(d,function)->function_name);
1015 if (unlikely(!da(d,function)->is_saved) && da(d,function)->lp)
1016 mem_free(da(d,function)->lp);
1017 while (unlikely(!tree_is_empty(&da(d,function)->cache))) {
1018 struct cache_entry *ce = get_struct(tree_any(&da(d,function)->cache), struct cache_entry, entry);
1019 tree_delete(&ce->entry);
1020 free_cache_entry(d, ce);
1022 if (profiling_escapes)
1023 mem_free(da(d,function)->escape_data);
1024 data_free(d);
1027 #ifdef HAVE_CODEGEN
1028 static void attr_fastcall free_codegen(void *data)
1030 struct data *d = cast_cpp(struct data *, data);
1031 #ifdef HAVE_CODEGEN_TRAPS
1032 #ifndef DEBUG_CRASH_HANDLER
1033 if (da(d,codegen)->trap_records_size)
1034 #endif
1036 rwmutex_lock_write(&traps_lock);
1037 tree_delete(&da(d,codegen)->codegen_tree);
1038 rwmutex_unlock_write(&traps_lock);
1040 #endif
1041 codegen_free(d);
1042 data_free(d);
1044 #endif
1046 static void * attr_hot_fastcall get_sub_record(void *data)
1048 struct data *d = cast_cpp(struct data *, data);
1049 const struct record_definition *def = type_def(da(d,record)->definition,record);
1050 frame_s *f = da_record_frame(d);
1051 frame_t slot;
1052 for (slot = 0; slot < def->n_slots; slot++) {
1053 pointer_t *ptr;
1054 /* !!! TODO: test multiple flags at once */
1055 if (!frame_test_flag(f, slot))
1056 continue;
1057 ptr = frame_pointer(f, slot);
1058 if (!pointer_is_empty(*ptr))
1059 return ptr;
1060 frame_clear_flag(f, slot);
1062 return NULL;
1065 static void * attr_hot_fastcall get_sub_option(void *data)
1067 struct data *d = cast_cpp(struct data *, data);
1068 pointer_t *ptr;
1069 ptr = &da(d,option)->pointer;
1070 if (!pointer_is_empty(*ptr))
1071 return ptr;
1072 return NULL;
1075 static void * attr_hot_fastcall get_sub_array_slice(void *data)
1077 struct data *d = cast_cpp(struct data *, data);
1078 pointer_t *ptr = &da(d,array_slice)->reference;
1079 if (!pointer_is_empty(*ptr))
1080 return ptr;
1081 return NULL;
1084 static void * attr_hot_fastcall get_sub_array_pointers(void *data)
1086 struct data *d = cast_cpp(struct data *, data);
1087 int_default_t x = da(d,array_pointers)->n_used_entries;
1088 while (x--) {
1089 pointer_t *ptr = &da(d,array_pointers)->pointer[x];
1090 if (!pointer_is_empty(*ptr))
1091 return ptr;
1092 da(d,array_pointers)->n_used_entries = x;
1094 return NULL;
1097 static void * attr_hot_fastcall get_sub_array_same(void *data)
1099 struct data *d = cast_cpp(struct data *, data);
1100 pointer_t *ptr = &da(d,array_same)->pointer;
1101 if (!pointer_is_empty(*ptr))
1102 return ptr;
1103 return NULL;
1106 static void * attr_hot_fastcall get_sub_array_btree(void *data)
1108 struct data *d = cast_cpp(struct data *, data);
1109 btree_entries_t x = da(d,array_btree)->n_used_btree_entries;
1110 while (x--) {
1111 pointer_t *ptr = &da(d,array_btree)->btree[x].node;
1112 if (!pointer_is_empty(*ptr))
1113 return ptr;
1114 da(d,array_btree)->n_used_btree_entries = x;
1115 index_free(&da(d,array_btree)->btree[x].end_index);
1117 return NULL;
1120 static void * attr_hot_fastcall get_sub_array_incomplete(void *data)
1122 struct data *d = cast_cpp(struct data *, data);
1123 if (!pointer_is_empty(da(d,array_incomplete)->first))
1124 return &da(d,array_incomplete)->first;
1125 if (!pointer_is_empty(da(d,array_incomplete)->next))
1126 return &da(d,array_incomplete)->next;
1127 return NULL;
1130 static void * attr_hot_fastcall get_sub_function_reference(void *data)
1132 struct data *d = cast_cpp(struct data *, data);
1133 arg_t ia = da(d,function_reference)->n_curried_arguments;
1134 pointer_t *prev;
1136 ia = da(d,function_reference)->n_curried_arguments;
1137 while (ia--) {
1138 if (da(d,function_reference)->arguments[ia].tag == TYPE_TAG_unknown) {
1139 pointer_t *ptr = &da(d,function_reference)->arguments[ia].u.ptr;
1140 if (!pointer_is_empty(*ptr))
1141 return ptr;
1143 da(d,function_reference)->n_curried_arguments = ia;
1146 if (da(d,function_reference)->is_indirect) {
1147 prev = &da(d,function_reference)->u.indirect;
1148 if (!pointer_is_empty(*prev))
1149 return prev;
1152 return NULL;
1155 static void * attr_hot_fastcall get_sub_function_call(void *data)
1157 struct thunk *t = cast_cpp(struct thunk *, data);
1159 address_unlock(t, DEPTH_THUNK);
1161 if (!pointer_is_empty(t->u.function_call.u.function_reference))
1162 return &t->u.function_call.u.function_reference;
1164 return NULL;
1167 static void * attr_hot_fastcall get_sub_blackhole(void *data)
1169 struct thunk *t = cast_cpp(struct thunk *, data);
1170 struct execution_control *ex;
1172 refcount_add(&n_dereferenced, 1);
1173 thunk_tag_set(t, THUNK_TAG_BLACKHOLE, THUNK_TAG_BLACKHOLE_DEREFERENCED);
1174 ex = execution_control_acquire_from_thunk(t);
1175 address_unlock(t, DEPTH_THUNK);
1176 if (ex)
1177 execution_control_unlink_and_submit(ex, true);
1179 return NULL;
1182 static void * attr_hot_fastcall get_sub_blackhole_some_dereferenced(void *data)
1184 struct thunk *t = cast_cpp(struct thunk *, data);
1185 struct execution_control *ex;
1187 refcount_add(&n_dereferenced, 1);
1188 thunk_tag_set(t, THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED, THUNK_TAG_BLACKHOLE_DEREFERENCED);
1189 ex = execution_control_acquire_from_thunk(t);
1190 address_unlock(t, DEPTH_THUNK);
1191 if (ex)
1192 execution_control_unlink_and_submit(ex, true);
1194 return NULL;
1197 static void * attr_cold attr_fastcall get_sub_blackhole_dereferenced(void attr_unused *data)
1199 internal(file_line, "get_sub_blackhole_dereferenced called");
1200 return NULL;
1203 static void attr_cold attr_fastcall free_blackhole_dereferenced(void attr_unused *data)
1205 internal(file_line, "free_blackhole_dereferenced called");
1208 static void * attr_hot_fastcall get_sub_result(void *data)
1210 struct thunk *t = cast_cpp(struct thunk *, data);
1211 pointer_t *ptr;
1213 address_unlock(t, DEPTH_THUNK);
1215 ptr = &t->u.function_call.results[0].ptr;
1216 if (!pointer_is_empty(*ptr))
1217 return ptr;
1219 return NULL;
1222 static void * attr_hot_fastcall get_sub_exception(void *data)
1224 struct thunk *t = cast_cpp(struct thunk *, data);
1226 address_unlock(t, DEPTH_THUNK);
1228 return NULL;
1231 static void * attr_hot_fastcall get_sub_multi_ret_reference(void *data)
1233 struct thunk *t = cast_cpp(struct thunk *, data);
1234 struct thunk *mt;
1235 struct execution_control *ex = NULL;
1236 tag_t tag;
1237 arg_t idx;
1239 address_unlock(t, DEPTH_THUNK);
1241 mt = t->u.multi_ret_reference.thunk;
1243 address_lock(mt, DEPTH_THUNK);
1245 idx = t->u.multi_ret_reference.idx;
1247 tag = thunk_tag(mt);
1248 if (tag == THUNK_TAG_FUNCTION_CALL) {
1249 if (thunk_refcount_is_one_nonatomic(mt)) {
1250 /* get_sub_function_call unlocks mt */
1251 pointer_t *ptr = get_sub_function_call(mt);
1252 if (ptr)
1253 return ptr;
1254 thunk_free(mt);
1255 return NULL;
1257 (void)thunk_dereference_nonatomic(mt);
1258 mt->u.function_call.results[idx].wanted = false;
1259 goto unlock_ret_false;
1261 if (tag == THUNK_TAG_BLACKHOLE) {
1262 thunk_tag_set(mt, THUNK_TAG_BLACKHOLE, THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED);
1263 tag = THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED;
1265 if (tag == THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED) {
1266 mt->u.function_call.results[idx].wanted = false;
1267 if (thunk_dereference_nonatomic(mt)) {
1268 refcount_add(&n_dereferenced, 1);
1269 thunk_tag_set(mt, THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED, THUNK_TAG_BLACKHOLE_DEREFERENCED);
1270 tag = THUNK_TAG_BLACKHOLE_DEREFERENCED;
1271 ex = execution_control_acquire_from_thunk(mt);
1273 goto unlock_ret_false;
1275 if (likely(tag == THUNK_TAG_RESULT)) {
1276 pointer_t *ptr = &mt->u.function_call.results[idx].ptr;
1277 if (!pointer_is_empty(*ptr)) {
1278 address_unlock(mt, DEPTH_THUNK);
1279 return ptr;
1281 if (thunk_dereference_nonatomic(mt))
1282 thunk_free(mt);
1283 goto unlock_ret_false;
1285 internal(file_line, "get_sub_multi_ret_reference: invalid thunk tag %u", tag);
1286 unlock_ret_false:
1287 address_unlock(mt, DEPTH_THUNK);
1288 if (ex)
1289 execution_control_unlink_and_submit(ex, true);
1290 return NULL;
1293 static void attr_cold attr_fastcall free_exception(void attr_unused *data)
1295 struct thunk *t = cast_cpp(struct thunk *, data);
1297 stack_trace_free(&t->u.exception.tr);
1298 if (t->u.exception.msg)
1299 mem_free(t->u.exception.msg);
1300 thunk_free(t);
1304 void attr_hot_fastcall pointer_dereference_(pointer_t top_ptr argument_position)
1306 refcount_t *r;
1307 const struct data_method *m;
1308 void *p;
1309 pointer_t current_ptr, backlink, *sub_ptr;
1310 tag_t tag;
1312 current_ptr = top_ptr;
1313 backlink = pointer_mark();
1315 retry_sub:
1316 r = pointer_get_refcount_(current_ptr);
1317 if (unlikely(refcount_is_read_only(r)))
1318 goto go_to_backlink;
1319 if (!refcount_dec_(r, caller_file_line_x))
1320 goto go_to_backlink;
1322 process_current:
1323 p = pointer_get_value_strip_tag_(current_ptr);
1325 if (unlikely(pointer_is_thunk(current_ptr))) {
1326 struct thunk *thunk = cast_cpp(struct thunk *, p);
1327 address_lock(thunk, DEPTH_THUNK);
1328 tag = thunk_tag(thunk);
1329 } else {
1330 struct data *data = cast_cpp(struct data *, p);
1331 tag = da_tag(data);
1333 m = &data_method_table[tag];
1335 sub_ptr = m->get_sub(p);
1336 if (sub_ptr) {
1337 ajla_assert(!pointer_is_empty(*sub_ptr), (file_line, "pointer_dereference_: empty pointer returned from %p", cast_ptr(void *, m)));
1339 if (!pointer_is_equal(current_ptr, backlink)) {
1340 #if defined(__IBMC__)
1341 /* a compiler bug */
1342 volatile
1343 #endif
1344 pointer_t old_current_ptr = current_ptr;
1345 current_ptr = *sub_ptr;
1346 *sub_ptr = backlink;
1347 backlink = old_current_ptr;
1348 goto retry_sub;
1349 } else {
1350 backlink = *sub_ptr;
1351 *sub_ptr = pointer_empty();
1352 goto process_current;
1356 m->free_object(p);
1358 go_to_backlink:
1359 if (!pointer_is_mark(backlink)) {
1360 current_ptr = backlink;
1361 goto process_current;
1366 static inline bool pointer_verify(pointer_t attr_unused *ptr, pointer_t attr_unused val)
1368 #ifndef THREAD_NONE
1369 bool ret;
1370 pointer_lock(ptr);
1371 ret = pointer_is_equal(*pointer_volatile(ptr), val);
1372 pointer_unlock(ptr);
1373 return ret;
1374 #else
1375 return true;
1376 #endif
1379 pointer_t attr_hot_fastcall pointer_reference_(pointer_t *ptr argument_position)
1381 #ifdef POINTER_FOLLOW_IS_LOCKLESS
1382 pointer_t p;
1383 refcount_t *r;
1384 retry:
1385 p = *pointer_volatile(ptr);
1386 r = pointer_get_refcount_(p);
1387 if (likely(!pointer_is_thunk(p))) {
1388 pointer_dependency_barrier();
1389 if (unlikely(refcount_is_read_only(r)))
1390 return p;
1391 refcount_inc_(r, caller_file_line_x);
1392 return p;
1393 } else {
1394 struct thunk *t = pointer_get_thunk(p);
1395 if (likely(!refcount_is_read_only(r))) {
1396 address_lock(t, DEPTH_THUNK);
1397 if (unlikely(!pointer_verify(ptr, p))) {
1398 address_unlock(t, DEPTH_THUNK);
1399 goto retry;
1401 if (thunk_is_finished(t)) {
1402 address_unlock(t, DEPTH_THUNK);
1403 pointer_follow_thunk_(ptr, POINTER_FOLLOW_THUNK_NOEVAL);
1404 goto retry;
1406 refcount_inc_(r, caller_file_line_x);
1407 address_unlock(t, DEPTH_THUNK);
1409 return p;
1411 #else
1412 pointer_t p;
1413 refcount_t *r;
1414 pointer_lock(ptr);
1415 p = *ptr;
1416 r = pointer_get_refcount_(p);
1417 if (likely(!refcount_is_read_only(r)))
1418 refcount_inc_(r, caller_file_line_x);
1419 pointer_unlock(ptr);
1420 return p;
1421 #endif
1424 void pointer_reference_maybe_(frame_s *fp, frame_t result, pointer_t *ptr, unsigned char flags argument_position)
1426 pointer_t p;
1427 if (flags & OPCODE_STRUCT_MAY_BORROW) {
1428 p = pointer_locked_read(ptr);
1429 if (likely(!pointer_is_thunk(p))) {
1430 ajla_assert(!frame_test_flag(fp, result), (file_line, "pointer_reference_maybe_: flag for slot %"PRIuMAX" already set", (uintmax_t)result));
1431 *frame_pointer(fp, result) = p;
1432 return;
1435 p = pointer_reference_(ptr pass_position);
1436 frame_set_pointer(fp, result, p);
1439 void copy_from_function_reference_to_frame(frame_s *new_fp, struct data *ref, arg_t ia, char can_move)
1441 struct data *function = get_frame(new_fp)->function;
1442 while (1) {
1443 arg_t pi;
1444 if (!data_is_writable(ref))
1445 can_move = 0;
1447 pi = da(ref,function_reference)->n_curried_arguments;
1449 while (pi--) {
1450 frame_t target = da(function,function)->args[--ia].slot;
1451 type_tag_t tag = da(ref,function_reference)->arguments[pi].tag;
1452 if (tag != TYPE_TAG_unknown) {
1453 const struct type *new_type = frame_get_type_of_local(new_fp, target);
1454 if (TYPE_IS_FLAT(new_type)) {
1455 ajla_assert_lo(TYPE_TAG_IS_BUILTIN(new_type->tag) && new_type->size <= slot_size, (file_line, "copy_from_function_reference_to_frame: invalid type tag %u,%u,%u", new_type->tag, new_type->size, new_type->align));
1456 memcpy_fast(frame_var(new_fp, target), da(ref,function_reference)->arguments[pi].u.slot, new_type->size);
1457 } else {
1458 pointer_t ptr_data = flat_to_data(type_get_from_tag(tag), da(ref,function_reference)->arguments[pi].u.slot);
1459 frame_set_pointer(new_fp, target, ptr_data);
1461 } else {
1462 pointer_t *p = &da(ref,function_reference)->arguments[pi].u.ptr;
1463 if (!can_move) {
1464 frame_set_pointer(new_fp, target, pointer_reference(p));
1465 } else {
1466 frame_set_pointer(new_fp, target, *p);
1467 *p = pointer_empty();
1471 if (!da(ref,function_reference)->is_indirect)
1472 break;
1473 ref = pointer_get_data(da(ref,function_reference)->u.indirect);
1476 ajla_assert_lo(!ia, (file_line, "copy_from_function_reference_to_frame: the number of arguments doesn't match: %s, %"PRIuMAX"", da(function,function)->function_name, (uintmax_t)ia));
1480 void * attr_hot_fastcall pointer_follow_thunk_(pointer_t *ptr, void *ex_wait)
1482 pointer_t *orig_ptr = ptr;
1483 pointer_t pv;
1484 void *ret;
1485 struct execution_control *new_ex = NULL;
1486 struct thunk *t;
1487 struct thunk *error_thunk = NULL;
1488 tag_t t_tag;
1490 ajla_assert(ex_wait == POINTER_FOLLOW_THUNK_NOEVAL || ex_wait == POINTER_FOLLOW_THUNK_SPARK || !((struct execution_control_wait *)ex_wait)->mutex_to_lock, (file_line, "pointer_follow_thunk_: execution_control_wait is already waiting on %p", ((struct execution_control_wait *)ex_wait)->mutex_to_lock));
1492 retry:
1493 pv = pointer_locked_read(ptr);
1494 if (unlikely(!pointer_is_thunk(pv))) {
1495 ret = POINTER_FOLLOW_THUNK_RETRY;
1496 goto return_ret;
1498 t = pointer_get_thunk(pv);
1499 address_lock(t, DEPTH_THUNK);
1500 if (unlikely(!pointer_verify(ptr, pointer_thunk(t)))) {
1501 address_unlock(t, DEPTH_THUNK);
1502 ret = POINTER_FOLLOW_THUNK_RETRY;
1503 goto return_ret;
1506 t_tag = thunk_tag(t);
1507 if (unlikely(t_tag == THUNK_TAG_EXCEPTION)) {
1508 if (unlikely(orig_ptr != ptr)) {
1509 thunk_reference(t);
1510 address_unlock(t, DEPTH_THUNK);
1511 if (unlikely(error_thunk != NULL))
1512 pointer_dereference(pointer_thunk(error_thunk));
1513 error_thunk = t;
1514 ptr = orig_ptr;
1515 goto retry;
1517 address_unlock(t, DEPTH_THUNK);
1518 ret = POINTER_FOLLOW_THUNK_EXCEPTION;
1519 goto return_ret;
1522 if (unlikely(error_thunk != NULL)) {
1523 pointer_locked_write(ptr, pointer_thunk(error_thunk));
1524 address_unlock(t, DEPTH_THUNK);
1526 pointer_dereference(pointer_thunk(t));
1527 error_thunk = NULL;
1528 ret = unlikely(orig_ptr != ptr) ? POINTER_FOLLOW_THUNK_RETRY : POINTER_FOLLOW_THUNK_EXCEPTION;
1529 goto return_ret;
1532 if (t_tag == THUNK_TAG_RESULT) {
1533 process_result:
1534 pointer_lock(ptr);
1535 if (thunk_is_writable(t)) {
1536 *pointer_volatile(ptr) = t->u.function_call.results[0].ptr;
1537 pointer_unlock(ptr);
1538 address_unlock(t, DEPTH_THUNK);
1540 thunk_free(t);
1541 } else {
1542 pointer_t px = t->u.function_call.results[0].ptr;
1543 pointer_reference_owned(px);
1544 *pointer_volatile(ptr) = px;
1545 pointer_unlock(ptr);
1546 address_unlock(t, DEPTH_THUNK);
1548 pointer_dereference(pointer_thunk(t));
1550 ret = POINTER_FOLLOW_THUNK_RETRY;
1551 goto return_ret;
1554 if (t_tag == THUNK_TAG_MULTI_RET_REFERENCE) {
1555 struct thunk *mt = t->u.multi_ret_reference.thunk;
1556 tag_t mt_tag;
1557 if (unlikely(!address_trylock_second(t, mt, DEPTH_THUNK))) {
1558 address_unlock(t, DEPTH_THUNK);
1559 address_lock_two(t, mt, DEPTH_THUNK);
1560 if (unlikely(!pointer_verify(ptr, pointer_thunk(t))) || unlikely(thunk_tag(t) != THUNK_TAG_MULTI_RET_REFERENCE)) {
1561 address_unlock_second(t, mt, DEPTH_THUNK);
1562 address_unlock(t, DEPTH_THUNK);
1563 ret = POINTER_FOLLOW_THUNK_RETRY;
1564 goto return_ret;
1567 mt_tag = thunk_tag(mt);
1568 if (mt_tag == THUNK_TAG_RESULT) {
1569 arg_t idx = t->u.multi_ret_reference.idx;
1570 thunk_tag_set(t, THUNK_TAG_MULTI_RET_REFERENCE, THUNK_TAG_RESULT);
1571 t->u.function_call.results[0].ptr = mt->u.function_call.results[idx].ptr;
1572 pointer_poison(&mt->u.function_call.results[idx].ptr);
1573 if (thunk_dereference_nonatomic(mt)) {
1574 address_unlock_second(t, mt, DEPTH_THUNK);
1575 address_unlock(t, DEPTH_THUNK);
1576 thunk_free(mt);
1577 ret = POINTER_FOLLOW_THUNK_RETRY;
1578 goto return_ret;
1580 address_unlock_second(t, mt, DEPTH_THUNK);
1581 goto process_result;
1583 address_unlock_second(mt, t, DEPTH_THUNK);
1584 t = mt;
1585 t_tag = mt_tag;
1588 if (ex_wait == POINTER_FOLLOW_THUNK_NOEVAL) {
1589 /* the user doesn't want to evaluate the thunk */
1590 ret = POINTER_FOLLOW_THUNK_EXIT;
1591 address_unlock(t, DEPTH_THUNK);
1592 goto return_ret;
1595 if (t_tag == THUNK_TAG_FUNCTION_CALL) {
1596 ajla_error_t mayfail;
1597 frame_s *new_fp;
1598 struct data *top_reference, *function, *function_reference;
1599 arg_t total_arguments;
1601 pointer_t *pr, pq;
1603 total_arguments = 0;
1604 pr = &t->u.function_call.u.function_reference;
1605 while (1) {
1606 pq = pointer_locked_read(pr);
1607 if (unlikely(pointer_is_thunk(pq))) {
1608 evaluate_thunk:
1609 ptr = pr;
1610 address_unlock(t, DEPTH_THUNK);
1611 goto retry;
1613 function_reference = pointer_get_data(pq);
1614 total_arguments += da(function_reference,function_reference)->n_curried_arguments;
1615 if (!da(function_reference,function_reference)->is_indirect)
1616 break;
1617 pr = &da(function_reference,function_reference)->u.indirect;
1619 pr = da(function_reference,function_reference)->u.direct;
1620 pq = pointer_locked_read(pr);
1621 if (unlikely(pointer_is_thunk(pq)))
1622 goto evaluate_thunk;
1623 function = pointer_get_data(pq);
1625 ajla_assert_lo(da(function,function)->n_arguments == total_arguments, (file_line, "pointer_follow_thunk_: the number of arguments does not match: %s: %"PRIuMAX", %"PRIuMAX"", da(function,function)->function_name, (uintmax_t)da(function,function)->n_arguments, (uintmax_t)total_arguments));
1627 if (likely(!new_ex)) {
1628 new_ex = execution_control_alloc(MEM_DONT_TRY_TO_FREE);
1629 if (unlikely(!new_ex)) {
1630 address_unlock(t, DEPTH_THUNK);
1631 new_ex = execution_control_alloc(&mayfail);
1632 if (unlikely(!new_ex)) {
1633 error_thunk = thunk_alloc_exception_error(mayfail, NULL, NULL, NULL pass_file_line);
1635 goto retry;
1639 new_ex->thunk = t;
1640 if (likely(ex_wait != POINTER_FOLLOW_THUNK_SPARK)) {
1641 list_add(&new_ex->wait_list, &((struct execution_control_wait *)ex_wait)->wait_entry);
1642 ((struct execution_control_wait *)ex_wait)->mutex_to_lock = address_get_mutex(t, DEPTH_THUNK);
1644 top_reference = pointer_get_data(t->u.function_call.u.function_reference);
1645 t->u.function_call.u.execution_control = new_ex;
1646 if (da(function,function)->n_return_values == 1 || likely(thunk_refcount_get_nonatomic(t) == da(function,function)->n_return_values))
1647 thunk_tag_set(t, THUNK_TAG_FUNCTION_CALL, THUNK_TAG_BLACKHOLE);
1648 else
1649 thunk_tag_set(t, THUNK_TAG_FUNCTION_CALL, THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED);
1650 address_unlock(t, DEPTH_THUNK);
1652 #if 0
1653 if (!(rand() & 127)) {
1654 new_fp = NULL;
1655 mayfail = error_ajla(EC_ASYNC, AJLA_ERROR_OUT_OF_MEMORY);
1656 debug("testing failure");
1658 else
1659 #endif
1660 new_fp = stack_alloc(new_ex, function, &mayfail);
1661 if (unlikely(!new_fp)) {
1662 new_ex->stack = NULL;
1663 data_dereference(top_reference);
1664 error_thunk = thunk_alloc_exception_error(mayfail, NULL, NULL, NULL pass_file_line);
1665 thunk_terminate_with_value(new_ex->thunk, da(function,function)->n_return_values, pointer_thunk(error_thunk));
1666 error_thunk = NULL;
1667 new_ex = NULL;
1668 ret = POINTER_FOLLOW_THUNK_EXIT;
1669 goto return_ret;
1671 new_ex->current_frame = new_fp;
1672 new_ex->current_ip = 0;
1674 frame_init(new_fp, function, 0, CALL_MODE_NORMAL);
1675 copy_from_function_reference_to_frame(new_fp, top_reference, da(function,function)->n_arguments, true);
1677 data_dereference(top_reference);
1679 ret = new_ex;
1680 new_ex = NULL;
1681 goto return_ret;
1684 if (t_tag == THUNK_TAG_BLACKHOLE || t_tag == THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED) {
1685 if (likely(ex_wait != POINTER_FOLLOW_THUNK_SPARK)) {
1686 list_add(&t->u.function_call.u.execution_control->wait_list, &((struct execution_control_wait *)ex_wait)->wait_entry);
1687 ((struct execution_control_wait *)ex_wait)->mutex_to_lock = address_get_mutex(t, DEPTH_THUNK);
1689 address_unlock(t, DEPTH_THUNK);
1691 ret = POINTER_FOLLOW_THUNK_EXIT;
1692 goto return_ret;
1695 ret = NULL;
1696 internal(file_line, "pointer_follow_thunk_: invalid thunk tag %u", t_tag);
1698 return_ret:
1699 if (unlikely(error_thunk != NULL))
1700 pointer_dereference(pointer_thunk(error_thunk));
1701 if (unlikely(new_ex != NULL))
1702 mem_free(new_ex);
1703 return ret;
1706 void attr_fastcall pointer_resolve_result(pointer_t *ptr)
1708 again:
1709 if (pointer_is_thunk(*ptr))
1710 pointer_follow_thunk_noeval(ptr, goto again, break, break);
1713 void attr_fastcall pointer_follow_wait(frame_s *fp, const code_t *ip)
1715 struct execution_control *ex = frame_execution_control(fp);
1716 ex->current_frame = fp;
1717 ex->current_ip = frame_ip(fp, ip);
1719 waiting_list_add(ex);
1721 if (unlikely(refcount_dec(&ex->wait_state)))
1722 execution_control_unlink_and_submit(ex, true);
1725 bool attr_fastcall data_is_nan(type_tag_t type, const unsigned char attr_unused *ptr)
1727 switch (type) {
1728 #define f(n, t, nt, pack, unpack) \
1729 case TYPE_TAG_real + n: { \
1730 t val; \
1731 barrier_aliasing(); \
1732 val = *(t *)ptr; \
1733 barrier_aliasing(); \
1734 return cat(isnan_,t)(val); \
1736 for_all_real(f, for_all_empty)
1737 #undef f
1739 return false;
1742 pointer_t flat_to_data(const struct type *type, const unsigned char *flat)
1744 ajla_error_t err;
1746 struct data *d;
1747 unsigned tag = type->tag;
1749 if (tag == TYPE_TAG_flat_option) {
1750 d = data_alloc_option_mayfail(&err pass_file_line);
1751 if (unlikely(!d))
1752 goto fail;
1753 da(d,option)->pointer = pointer_empty();
1754 da(d,option)->option = *cast_ptr(ajla_flat_option_t *, flat);
1755 } else if (TYPE_TAG_IS_FIXED(tag) || TYPE_TAG_IS_REAL(tag) || TYPE_TAG_IS_INT(tag)) {
1756 size_t size;
1757 size = type->size;
1758 if (unlikely(data_is_nan(tag, flat))) {
1759 err = error_ajla(EC_SYNC, AJLA_ERROR_NAN);
1760 goto fail;
1762 d = data_alloc_flat_mayfail(tag, flat, size, &err pass_file_line);
1763 if (unlikely(!d))
1764 goto fail;
1765 } else if (tag == TYPE_TAG_flat_record) {
1766 arg_t ai;
1767 const struct record_definition *def = type_def(type_def(type,flat_record)->base,record);
1768 d = data_alloc_record_mayfail(def, &err pass_file_line);
1769 if (unlikely(!d))
1770 goto fail;
1771 (void)memset(da_record_frame(d), 0, bitmap_slots(def->n_slots) * slot_size);
1772 for (ai = 0; ai < def->n_entries; ai++) {
1773 frame_t slot = record_definition_slot(def,ai);
1774 flat_size_t flat_offset = type_def(type,flat_record)->entries[slot].flat_offset;
1775 const struct type *entry_type = def->types[slot];
1776 const struct type *flat_type = type_def(type,flat_record)->entries[slot].subtype;
1777 if (TYPE_IS_FLAT(entry_type)) {
1778 ajla_assert_lo(type_is_equal(entry_type, flat_type), (file_line, "flat_to_data: copying between different types (%u,%u,%u) -> (%u,%u,%u)", flat_type->tag, flat_type->size, flat_type->align, entry_type->tag, entry_type->size, entry_type->align));
1779 memcpy_fast(frame_var(da_record_frame(d), slot), flat + flat_offset, entry_type->size);
1780 } else {
1781 pointer_t ptr = flat_to_data(flat_type, flat + flat_offset);
1782 frame_set_pointer(da_record_frame(d), slot, ptr);
1785 } else if (tag == TYPE_TAG_flat_array) {
1786 const struct flat_array_definition *flat_def = type_def(type,flat_array);
1787 ajla_assert(type->size == flat_def->n_elements * flat_def->base->size, (file_line, "flat_to_data: array size mismatch: %"PRIuMAX" != %"PRIuMAX" * %"PRIuMAX"", (uintmax_t)type->size, (uintmax_t)flat_def->n_elements, (uintmax_t)flat_def->base->size));
1788 d = data_alloc_array_flat_mayfail(flat_def->base, flat_def->n_elements, flat_def->n_elements, false, &err pass_file_line);
1789 if (unlikely(!d))
1790 goto fail;
1791 (void)memcpy(da_array_flat(d), flat, type->size);
1792 } else {
1793 internal(file_line, "flat_to_data: unknown type %u", tag);
1795 return pointer_data(d);
1797 fail:
1798 return pointer_error(err, NULL, NULL pass_file_line);
1802 void attr_fastcall struct_clone(pointer_t *ptr)
1804 ajla_error_t err;
1805 struct data *orig, *clone;
1807 orig = pointer_get_data(*ptr);
1808 switch (da_tag(orig)) {
1809 case DATA_TAG_record: {
1810 const struct record_definition *def;
1811 frame_t n_slots, slot;
1813 def = type_def(da(orig,record)->definition,record);
1814 n_slots = def->n_slots;
1815 clone = data_alloc_record_mayfail(def, &err pass_file_line);
1816 if (unlikely(!clone))
1817 goto fail;
1818 (void)memcpy_slots(cast_ptr(unsigned char *, da_record_frame(clone)), cast_ptr(unsigned char *, da_record_frame(orig)), n_slots);
1819 for (slot = 0; slot < n_slots; slot++) {
1820 if (frame_test_flag(da_record_frame(orig), slot))
1821 *frame_pointer(da_record_frame(clone), slot) = pointer_reference(frame_pointer(da_record_frame(orig), slot));
1823 break;
1825 case DATA_TAG_option: {
1826 clone = data_alloc(option, &err);
1827 if (unlikely(!clone))
1828 goto fail;
1829 da(clone,option)->option = da(orig,option)->option;
1830 if (likely(!pointer_is_empty(da(orig,option)->pointer)))
1831 da(clone,option)->pointer = pointer_reference(&da(orig,option)->pointer);
1832 else
1833 da(clone,option)->pointer = pointer_empty();
1834 break;
1836 case DATA_TAG_array_flat:
1837 case DATA_TAG_array_slice:
1838 case DATA_TAG_array_pointers:
1839 case DATA_TAG_array_same:
1840 case DATA_TAG_array_btree: {
1841 if (!array_clone(ptr, &err))
1842 goto fail;
1843 return;
1845 case DATA_TAG_array_incomplete: {
1846 pointer_t first = pointer_reference(&da(orig,array_incomplete)->first);
1847 pointer_t next = pointer_reference(&da(orig,array_incomplete)->next);
1848 clone = data_alloc_array_incomplete(pointer_get_data(first), next, &err pass_file_line);
1849 if (unlikely(!clone)) {
1850 pointer_dereference(first);
1851 pointer_dereference(next);
1852 goto fail;
1854 break;
1856 default:
1857 internal(file_line, "struct_clone: invalid data tag %u", da_tag(orig));
1859 pointer_dereference(*ptr);
1860 *ptr = pointer_data(clone);
1861 return;
1863 fail:
1864 pointer_dereference(*ptr);
1865 *ptr = pointer_error(err, NULL, NULL pass_file_line);
1869 static bool attr_fastcall deep_eval_nothing(struct data attr_unused *d, pointer_t attr_unused ***data_stack, size_t attr_unused *data_stack_size, ajla_error_t attr_unused *err)
1871 return true;
1874 static bool attr_fastcall deep_eval_flat(struct data *d, pointer_t attr_unused ***data_stack, size_t attr_unused *data_stack_size, ajla_error_t *err)
1876 if (unlikely(data_is_nan(da(d,flat)->data_type, da_flat(d)))) {
1877 fatal_mayfail(error_ajla(EC_SYNC, AJLA_ERROR_NAN), err, "NaN");
1878 return false;
1880 return true;
1883 static bool attr_fastcall deep_eval_record(struct data *d, pointer_t ***data_stack, size_t *data_stack_size, ajla_error_t *err)
1885 const struct record_definition *def = type_def(da(d,record)->definition,record);
1886 frame_s *f = da_record_frame(d);
1887 frame_t slot = def->n_slots;
1888 while (slot--) {
1889 pointer_t *ptr;
1890 if (!frame_test_flag(f, slot)) {
1891 const struct type *t = def->types[slot];
1892 if (!t)
1893 continue;
1894 if (unlikely(data_is_nan(t->tag, frame_var(f, slot)))) {
1895 fatal_mayfail(error_ajla(EC_SYNC, AJLA_ERROR_NAN), err, "NaN");
1896 return false;
1898 continue;
1900 ptr = frame_pointer(f, slot);
1901 if (unlikely(!array_add_mayfail(pointer_t *, data_stack, data_stack_size, ptr, NULL, err)))
1902 return false;
1904 return true;
1907 static bool attr_fastcall deep_eval_option(struct data *d, pointer_t ***data_stack, size_t *data_stack_size, ajla_error_t *err)
1909 if (pointer_is_empty(da(d,option)->pointer))
1910 return true;
1911 return array_add_mayfail(pointer_t *, data_stack, data_stack_size, &da(d,option)->pointer, NULL, err);
1914 struct real_pos {
1915 unsigned char tag;
1916 flat_size_t pos;
1919 static bool recurse_type(const struct type *type, flat_size_t offset, struct real_pos **rp, size_t *rp_size, ajla_error_t *err)
1921 if (TYPE_TAG_IS_REAL(type->tag)) {
1922 struct real_pos p;
1923 p.tag = type->tag;
1924 p.pos = offset;
1925 if (!array_add_mayfail(struct real_pos, rp, rp_size, p, NULL, err))
1926 return false;
1927 } else if (type->tag == TYPE_TAG_flat_record) {
1928 const struct flat_record_definition *def = type_def(type, flat_record);
1929 const struct record_definition *rec_def = type_def(def->base, record);
1930 frame_t slot;
1931 for (slot = 0; slot < rec_def->n_slots; slot++) {
1932 const struct flat_record_definition_entry *frde;
1933 const struct type *t = rec_def->types[slot];
1934 if (!t)
1935 continue;
1936 frde = &def->entries[slot];
1937 if (unlikely(!recurse_type(frde->subtype, offset + frde->flat_offset, rp, rp_size, err)))
1938 return false;
1940 } else if (type->tag == TYPE_TAG_flat_array) {
1941 const struct flat_array_definition *def = type_def(type, flat_array);
1942 const struct type *base = def->base;
1943 flat_size_t i;
1944 for (i = 0; i < def->n_elements; i++, offset += base->size) {
1945 if (unlikely(!recurse_type(base, offset, rp, rp_size, err)))
1946 return false;
1949 return true;
1952 static bool deep_eval_array_test_nan(const struct type *type, unsigned char *flat_data, int_default_t n_entries, ajla_error_t *err)
1954 struct real_pos *rp;
1955 size_t rp_size;
1956 int_default_t i;
1958 if (TYPE_TAG_IS_FIXED(type->tag) || likely(TYPE_TAG_IS_INT(type->tag)) || type->tag == TYPE_TAG_flat_option)
1959 return true;
1961 if (unlikely(!array_init_mayfail(struct real_pos, &rp, &rp_size, err)))
1962 return false;
1964 if (unlikely(!recurse_type(type, 0, &rp, &rp_size, err)))
1965 return false;
1967 if (likely(!rp_size))
1968 goto free_ret;
1970 for (i = 0; i < n_entries; i++, flat_data += type->size) {
1971 size_t j;
1972 j = 0;
1973 do {
1974 if (unlikely(data_is_nan(rp[j].tag, flat_data + rp[j].pos))) {
1975 fatal_mayfail(error_ajla(EC_SYNC, AJLA_ERROR_NAN), err, "NaN");
1976 mem_free(rp);
1977 return false;
1979 } while (unlikely(++j < rp_size));
1982 free_ret:
1983 mem_free(rp);
1984 return true;
1987 static bool attr_fastcall deep_eval_array_flat(struct data *d, pointer_t attr_unused ***data_stack, size_t attr_unused *data_stack_size, ajla_error_t *err)
1989 return deep_eval_array_test_nan(da(d,array_flat)->type, da_array_flat(d), da(d,array_flat)->n_used_entries, err);
1992 static bool attr_fastcall deep_eval_array_slice(struct data *d, pointer_t attr_unused ***data_stack, size_t attr_unused *data_stack_size, ajla_error_t *err)
1994 return deep_eval_array_test_nan(da(d,array_slice)->type, da(d,array_slice)->flat_data_minus_data_array_offset + data_array_offset, da(d,array_slice)->n_entries, err);
1997 static bool attr_fastcall deep_eval_array_pointers(struct data *d, pointer_t ***data_stack, size_t *data_stack_size, ajla_error_t *err)
1999 int_default_t x = da(d,array_pointers)->n_used_entries;
2000 while (x--) {
2001 pointer_t *ptr = &da(d,array_pointers)->pointer[x];
2002 if (unlikely(!array_add_mayfail(pointer_t *, data_stack, data_stack_size, ptr, NULL, err)))
2003 return false;
2005 return true;
2008 static bool attr_fastcall deep_eval_array_same(struct data *d, pointer_t ***data_stack, size_t *data_stack_size, ajla_error_t *err)
2010 return array_add_mayfail(pointer_t *, data_stack, data_stack_size, &da(d,array_same)->pointer, NULL, err);
2013 static bool attr_fastcall deep_eval_array_btree(struct data *d, pointer_t ***data_stack, size_t *data_stack_size, ajla_error_t *err)
2015 btree_entries_t x = da(d,array_btree)->n_used_btree_entries;
2016 while (x--) {
2017 pointer_t *ptr = &da(d,array_btree)->btree[x].node;
2018 if (unlikely(!array_add_mayfail(pointer_t *, data_stack, data_stack_size, ptr, NULL, err)))
2019 return false;
2021 return true;
2024 static bool attr_fastcall deep_eval_array_incomplete(struct data *d, pointer_t ***data_stack, size_t *data_stack_size, ajla_error_t *err)
2026 return likely(array_add_mayfail(pointer_t *, data_stack, data_stack_size, &da(d,array_incomplete)->first, NULL, err)) &&
2027 likely(array_add_mayfail(pointer_t *, data_stack, data_stack_size, &da(d,array_incomplete)->next, NULL, err));
2030 static bool attr_fastcall deep_eval_function_reference(struct data *d, pointer_t ***data_stack, size_t *data_stack_size, ajla_error_t *err)
2032 arg_t ia;
2034 ia = da(d,function_reference)->n_curried_arguments;
2035 while (ia--) {
2036 if (da(d,function_reference)->arguments[ia].tag == TYPE_TAG_unknown)
2037 if (unlikely(!array_add_mayfail(pointer_t *, data_stack, data_stack_size, &da(d,function_reference)->arguments[ia].u.ptr, NULL, err)))
2038 return false;
2040 if (da(d,function_reference)->is_indirect) {
2041 return array_add_mayfail(pointer_t *, data_stack, data_stack_size, &da(d,function_reference)->u.indirect, NULL, err);
2042 } else {
2043 return array_add_mayfail(pointer_t *, data_stack, data_stack_size, da(d,function_reference)->u.direct, NULL, err);
2047 struct processed_pointer {
2048 struct tree_entry entry;
2049 pointer_t *ptr;
2052 static int processed_compare(const struct tree_entry *e, uintptr_t v)
2054 struct processed_pointer *p = get_struct(e, struct processed_pointer, entry);
2055 if (ptr_to_num(p->ptr) < v)
2056 return -1;
2057 if (likely(ptr_to_num(p->ptr) > v))
2058 return 1;
2059 return 0;
2062 void * attr_fastcall pointer_deep_eval(pointer_t *ptr, frame_s *fp, const code_t *ip, struct thunk **thunk)
2064 ajla_error_t err;
2065 struct data *d;
2066 tag_t tag;
2068 pointer_t **data_stack;
2069 size_t data_stack_size;
2071 struct tree processed;
2073 void *ret;
2075 tree_init(&processed);
2077 if (unlikely(!array_init_mayfail(pointer_t *, &data_stack, &data_stack_size, &err))) {
2078 return_err:
2079 *thunk = pointer_get_thunk(pointer_error(err, NULL, NULL pass_file_line));
2080 ret = POINTER_FOLLOW_THUNK_EXCEPTION;
2081 goto free_tree_ret;
2084 go_sub:
2085 pointer_follow(ptr, false, d, PF_WAIT, fp, ip,
2086 ret = ex_;
2087 goto free_tree_ret,
2088 thunk_reference(thunk_);
2089 *thunk = thunk_;
2090 ret = POINTER_FOLLOW_THUNK_EXCEPTION;
2091 goto free_tree_ret;
2094 tag = da_tag(d);
2096 if (unlikely(!data_method_table[tag].deep_eval(d, &data_stack, &data_stack_size, &err)))
2097 goto return_err;
2099 if (data_stack_size) {
2100 struct tree_insert_position ins;
2101 struct processed_pointer *pp = mem_alloc_mayfail(struct processed_pointer *, sizeof(struct processed_pointer), &err);
2102 if (unlikely(!pp))
2103 goto return_err;
2105 pp->ptr = ptr;
2106 if (unlikely(tree_find_for_insert(&processed, processed_compare, ptr_to_num(ptr), &ins) != NULL))
2107 internal(file_line, "pointer_deep_eval: pointer %p is already in the tree", ptr);
2108 tree_insert_after_find(&pp->entry, &ins);
2110 pop_another:
2111 ptr = data_stack[--data_stack_size];
2112 ajla_assert(!pointer_is_empty(*ptr), (file_line, "pointer_deep_eval: empty pointer, last tag %u", tag));
2114 if (unlikely(tree_find(&processed, processed_compare, ptr_to_num(ptr)) != NULL)) {
2115 if (data_stack_size)
2116 goto pop_another;
2117 } else {
2118 goto go_sub;
2122 ret = POINTER_FOLLOW_THUNK_GO;
2124 free_tree_ret:
2125 if (likely(data_stack != NULL))
2126 mem_free(data_stack);
2128 while (!tree_is_empty(&processed)) {
2129 struct processed_pointer *pp = get_struct(tree_any(&processed), struct processed_pointer, entry);
2130 tree_delete(&pp->entry);
2131 mem_free(pp);
2134 return ret;
2137 void * attr_fastcall frame_pointer_deep_eval(frame_s *fp, const code_t *ip, frame_t slot, struct thunk **thunk)
2139 if (frame_variable_is_flat(fp, slot)) {
2140 ajla_error_t err;
2141 if (unlikely(!deep_eval_array_test_nan(frame_get_type_of_local(fp, slot), frame_var(fp, slot), 1, &err))) {
2142 *thunk = pointer_get_thunk(pointer_error(err, NULL, NULL pass_file_line));
2143 return POINTER_FOLLOW_THUNK_EXCEPTION;
2145 return POINTER_FOLLOW_THUNK_GO;
2147 return pointer_deep_eval(frame_pointer(fp, slot), fp, ip, thunk);
2151 bool attr_fastcall mpint_export(const mpint_t *m, unsigned char *ptr, unsigned intx, ajla_error_t *err)
2153 #define f(n, s, u, sz, bits) \
2154 case n: { \
2155 bool ret; \
2156 barrier_aliasing(); \
2157 ret = cat(mpint_export_to_,s)(m, cast_ptr(s *, ptr), err);\
2158 barrier_aliasing(); \
2159 return ret; \
2161 switch (intx) {
2162 for_all_fixed(f)
2163 default:
2164 internal(file_line, "mpint_export: invalid type %d", intx);
2166 #undef f
2167 not_reached();
2168 return false;
2171 bool attr_fastcall mpint_export_unsigned(const mpint_t *m, unsigned char *ptr, unsigned intx, ajla_error_t *err)
2173 #define f(n, s, u, sz, bits) \
2174 case n: { \
2175 bool ret; \
2176 barrier_aliasing(); \
2177 ret = cat(mpint_export_to_,u)(m, cast_ptr(u *, ptr), err);\
2178 barrier_aliasing(); \
2179 return ret; \
2181 switch (intx) {
2182 for_all_fixed(f)
2183 default:
2184 internal(file_line, "mpint_export_unsigned: invalid type %d", intx);
2186 #undef f
2187 not_reached();
2188 return false;
2191 int data_compare_numbers(type_tag_t tt, unsigned char *flat1, pointer_t ptr1, unsigned char *flat2, pointer_t ptr2)
2193 struct data *d1 = NULL, *d2 = NULL; /* avoid warning */
2194 union {
2195 intbig_t big;
2196 ajla_flat_option_t opt;
2197 unsigned char flat[1];
2198 } u1;
2199 union {
2200 intbig_t big;
2201 ajla_flat_option_t opt;
2202 unsigned char flat[1];
2203 } u2;
2204 ajla_flat_option_t r;
2205 ajla_error_t exp_err;
2206 if (!flat1) {
2207 tag_t tag1;
2208 ajla_option_t opt1;
2209 d1 = pointer_get_data(ptr1);
2210 tag1 = da_tag(d1);
2211 switch (tag1) {
2212 case DATA_TAG_flat:
2213 tt = da(d1,flat)->data_type;
2214 flat1 = da_flat(d1);
2215 break;
2216 case DATA_TAG_longint:
2217 if (tt == TYPE_TAG_unknown) {
2218 if (!flat2) {
2219 d2 = pointer_get_data(ptr2);
2220 if (da_tag(d2) == DATA_TAG_flat)
2221 tt = da(d2,flat)->data_type;
2224 if (tt != TYPE_TAG_unknown) {
2225 if (mpint_export(&da(d1,longint)->mp, u1.flat, TYPE_TAG_IDX_INT(tt), &exp_err))
2226 flat1 = u1.flat;
2228 break;
2229 case DATA_TAG_option:
2230 tt = TYPE_TAG_flat_option;
2231 opt1 = da(d1,option)->option;
2232 if (unlikely(opt1 != (ajla_flat_option_t)opt1))
2233 return 1;
2234 u1.opt = opt1;
2235 flat1 = u1.flat;
2236 break;
2237 default:
2238 internal(file_line, "data_compare_numbers: invalid tag %u", tag1);
2241 if (!flat2) {
2242 tag_t tag2;
2243 ajla_option_t opt2;
2244 d2 = pointer_get_data(ptr2);
2245 tag2 = da_tag(d2);
2246 switch (tag2) {
2247 case DATA_TAG_flat:
2248 tt = da(d2,flat)->data_type;
2249 flat2 = da_flat(d2);
2250 break;
2251 case DATA_TAG_longint:
2252 if (tt != TYPE_TAG_unknown) {
2253 if (mpint_export(&da(d2,longint)->mp, u2.flat, TYPE_TAG_IDX_INT(tt), &exp_err))
2254 flat2 = u2.flat;
2256 break;
2257 case DATA_TAG_option:
2258 tt = TYPE_TAG_flat_option;
2259 opt2 = da(d2,option)->option;
2260 if (unlikely(opt2 != (ajla_flat_option_t)opt2))
2261 return -1;
2262 u2.opt = opt2;
2263 flat2 = u2.flat;
2264 break;
2265 default:
2266 internal(file_line, "data_compare_numbers: invalid tag %u", tag2);
2269 if (flat1 && flat2) {
2270 int c = type_memcmp(flat1, flat2, type_get_from_tag(tt), 1);
2271 if (c < 0)
2272 return -1;
2273 if (c > 0)
2274 return 1;
2275 return 0;
2277 if (flat1)
2278 return -1;
2279 if (flat2)
2280 return 1;
2281 mpint_less(&da(d1,longint)->mp, &da(d2,longint)->mp, &r, NULL);
2282 if (r)
2283 return -1;
2284 mpint_equal(&da(d1,longint)->mp, &da(d2,longint)->mp, &r, NULL);
2285 if (r)
2286 return 0;
2287 return 1;
2290 struct array_compare_context {
2291 unsigned char *flat;
2292 const struct type *type;
2293 int_default_t n_elements;
2294 pointer_t *ptr;
2297 static int_default_t array_compare_callback(unsigned char *flat, const struct type *type, int_default_t n_elements, pointer_t *ptr, void *context)
2299 struct array_compare_context *ac = context;
2300 ajla_assert_lo(n_elements > 0, (file_line, "array_compare_callback: unexpected thunk"));
2301 ac->flat = flat;
2302 ac->type = type;
2303 ac->n_elements = n_elements;
2304 ac->ptr = ptr;
2305 return 0;
2308 static void attr_fastcall cs_empty_destruct(struct compare_status attr_unused *cs)
2312 static int attr_fastcall data_compare_nothing(struct compare_status attr_unused *cs, struct compare_status attr_unused *new_cs, bool attr_unused init)
2314 struct data *d1 = pointer_get_data(cs->ptr1);
2315 struct data *d2 = pointer_get_data(cs->ptr2);
2316 internal(file_line, "data_compare_nothing: comparing tags %u, %u", da_tag(d1), da_tag(d2));
2317 return DATA_COMPARE_OOM;
2320 static int attr_fastcall data_compare_number(struct compare_status *cs, struct compare_status attr_unused *new_cs, bool attr_unused init)
2322 return data_compare_numbers(TYPE_TAG_unknown, NULL, cs->ptr1, NULL, cs->ptr2);
2325 static int attr_fastcall data_compare_record(struct compare_status *cs, struct compare_status *new_cs, bool init)
2327 struct data *d1 = pointer_get_data(cs->ptr1);
2328 struct data *d2 = pointer_get_data(cs->ptr2);
2329 frame_s *f1 = da_record_frame(d1);
2330 frame_s *f2 = da_record_frame(d2);
2331 const struct record_definition *def = type_def(da(d1,record)->definition,record);
2332 ajla_assert(def->n_slots == type_def(da(d2,record)->definition,record)->n_slots, (file_line, "data_compare_record: mismatched record definition"));
2333 if (init)
2334 cs->u.record.ai = 0;
2335 while (cs->u.record.ai < def->n_entries) {
2336 frame_t slot = record_definition_slot(def, cs->u.record.ai);
2337 const struct type *t = def->types[slot];
2338 if (frame_test_flag(f1, slot) && frame_test_flag(f2, slot)) {
2339 new_cs->ptr1 = *frame_pointer(f1, slot);
2340 new_cs->ptr2 = *frame_pointer(f2, slot);
2341 cs->u.record.ai++;
2342 return 2;
2343 } else {
2344 unsigned char *flat1 = !frame_test_flag(f1, slot) ? frame_var(f1, slot) : NULL;
2345 unsigned char *flat2 = !frame_test_flag(f2, slot) ? frame_var(f2, slot) : NULL;
2346 pointer_t ptr1 = !frame_test_flag(f1, slot) ? pointer_empty() : *frame_pointer(f1, slot);
2347 pointer_t ptr2 = !frame_test_flag(f2, slot) ? pointer_empty() : *frame_pointer(f2, slot);
2348 int c = data_compare_numbers(t->tag, flat1, ptr1, flat2, ptr2);
2349 if (c)
2350 return c;
2351 cs->u.record.ai++;
2354 return 0;
2357 static int attr_fastcall data_compare_option(struct compare_status *cs, struct compare_status *new_cs, bool init)
2359 struct data *d1 = pointer_get_data(cs->ptr1);
2360 struct data *d2 = pointer_get_data(cs->ptr2);
2361 pointer_t ptr1, ptr2;
2362 if (da(d1,option)->option < da(d2,option)->option)
2363 return -1;
2364 if (da(d1,option)->option > da(d2,option)->option)
2365 return 1;
2366 ptr1 = da(d1,option)->pointer;
2367 ptr2 = da(d2,option)->pointer;
2368 ajla_assert(pointer_is_empty(ptr1) == pointer_is_empty(ptr2), (file_line, "data_compare_option: mismatching pointers"));
2369 if (init && !pointer_is_empty(ptr1)) {
2370 new_cs->ptr1 = ptr1;
2371 new_cs->ptr2 = ptr2;
2372 return 2;
2374 return 0;
2377 static void attr_fastcall cs_array_destruct(struct compare_status *cs)
2379 index_free(&cs->u.array.len);
2380 index_free(&cs->u.array.idx);
2381 if (!pointer_is_empty(cs->u.array.p1))
2382 pointer_dereference(cs->u.array.p1);
2383 if (!pointer_is_empty(cs->u.array.p2))
2384 pointer_dereference(cs->u.array.p2);
2387 static int attr_fastcall data_compare_array(struct compare_status *cs, struct compare_status *new_cs, bool init)
2389 struct data *d1 = pointer_get_data(cs->ptr1);
2390 struct data *d2 = pointer_get_data(cs->ptr2);
2391 if (init) {
2392 array_index_t len1, len2;
2393 cs->u.array.p1 = pointer_empty();
2394 cs->u.array.p2 = pointer_empty();
2395 len1 = array_len(d1);
2396 len2 = array_len(d2);
2397 if (!index_ge_index(len1, len2)) {
2398 index_free(&len1);
2399 index_free(&len2);
2400 return -1;
2402 if (!index_ge_index(len2, len1)) {
2403 index_free(&len1);
2404 index_free(&len2);
2405 return 1;
2407 index_free(&len2);
2408 cs->u.array.len = len1;
2409 index_from_int(&cs->u.array.idx, 0);
2410 cs->destruct = cs_array_destruct;
2413 while (!index_ge_index(cs->u.array.idx, cs->u.array.len)) {
2414 pointer_t ptr;
2415 struct array_compare_context ctx1, ctx2;
2417 if (!pointer_is_empty(cs->u.array.p1))
2418 pointer_dereference(cs->u.array.p1), cs->u.array.p1 = pointer_empty();
2419 if (!pointer_is_empty(cs->u.array.p2))
2420 pointer_dereference(cs->u.array.p2), cs->u.array.p2 = pointer_empty();
2422 ptr = pointer_data(d1);
2423 if (unlikely(array_btree_iterate(&ptr, &cs->u.array.idx, array_compare_callback, &ctx1)))
2424 internal(file_line, "data_compare_array: iterator unexpectedly succeeded");
2425 ptr = pointer_data(d2);
2426 if (unlikely(array_btree_iterate(&ptr, &cs->u.array.idx, array_compare_callback, &ctx2)))
2427 internal(file_line, "data_compare_array: iterator unexpectedly succeeded");
2429 if (ctx1.flat && ctx2.flat) {
2430 int c;
2431 int_default_t m = minimum(ctx1.n_elements, ctx2.n_elements);
2432 ajla_assert(ctx1.type->tag == ctx2.type->tag, (file_line, "data_compare_array: array types do not match: %u,%u", ctx1.type->tag, ctx2.type->tag));
2433 c = type_memcmp(ctx1.flat, ctx2.flat, ctx1.type, m);
2434 if (c) {
2435 if (c < 0)
2436 return -1;
2437 else
2438 return 1;
2440 index_add_int(&cs->u.array.idx, m);
2441 } else {
2442 struct thunk *thunk;
2443 if (unlikely(ctx1.flat != NULL)) {
2444 new_cs->ptr1 = cs->u.array.p1 = flat_to_data(ctx1.type, ctx1.flat);
2445 if (unlikely(pointer_deep_eval(&cs->u.array.p1, NULL, NULL, &thunk) == POINTER_FOLLOW_THUNK_EXCEPTION)) {
2446 pointer_dereference(pointer_thunk(thunk));
2447 return DATA_COMPARE_OOM;
2449 } else {
2450 new_cs->ptr1 = *ctx1.ptr;
2452 if (unlikely(ctx2.flat != NULL)) {
2453 new_cs->ptr2 = cs->u.array.p2 = flat_to_data(ctx2.type, ctx2.flat);
2454 if (unlikely(pointer_deep_eval(&cs->u.array.p2, NULL, NULL, &thunk) == POINTER_FOLLOW_THUNK_EXCEPTION)) {
2455 pointer_dereference(pointer_thunk(thunk));
2456 return DATA_COMPARE_OOM;
2458 } else {
2459 new_cs->ptr2 = *ctx2.ptr;
2461 index_add_int(&cs->u.array.idx, 1);
2462 return 2;
2466 return 0;
2469 static void attr_fastcall cs_function_reference_destruct(struct compare_status *cs)
2471 mem_free(cs->u.function_reference.args1);
2472 mem_free(cs->u.function_reference.args2);
2475 static void acquire_function_reference_args(struct data *d, struct function_argument ***args, size_t *n_args, struct data **function)
2477 array_init(struct function_argument *, args, n_args);
2478 while (1) {
2479 arg_t ai;
2480 ai = da(d,function_reference)->n_curried_arguments;
2481 while (ai--) {
2482 array_add(struct function_argument *, args, n_args, &da(d,function_reference)->arguments[ai]);
2484 if (!da(d,function_reference)->is_indirect)
2485 break;
2486 d = pointer_get_data(da(d,function_reference)->u.indirect);
2488 *function = pointer_get_data(*da(d,function_reference)->u.direct);
2491 static int attr_fastcall data_compare_function_reference(struct compare_status *cs, struct compare_status *new_cs, bool init)
2493 struct data *d1 = pointer_get_data(cs->ptr1);
2494 struct data *d2 = pointer_get_data(cs->ptr2);
2495 if (init) {
2496 size_t l1, l2;
2497 struct data *fn1, *fn2;
2498 acquire_function_reference_args(d1, &cs->u.function_reference.args1, &l1, &fn1);
2499 acquire_function_reference_args(d2, &cs->u.function_reference.args2, &l2, &fn2);
2500 cs->destruct = cs_function_reference_destruct;
2501 if (ptr_to_num(fn1) != ptr_to_num(fn2)) {
2502 /* !!! FIXME: compare function unique id here */
2503 if (ptr_to_num(fn1) < ptr_to_num(fn2))
2504 return -1;
2505 else
2506 return 1;
2508 ajla_assert(l1 == l2, (file_line, "data_compare_function_reference: the number of arguments doesn't match: %"PRIuMAX" != %"PRIuMAX"", (uintmax_t)l1, (uintmax_t)l2));
2509 cs->u.function_reference.l = l1;
2511 while (cs->u.function_reference.l--) {
2512 struct function_argument *a1 = cs->u.function_reference.args1[cs->u.function_reference.l];
2513 struct function_argument *a2 = cs->u.function_reference.args2[cs->u.function_reference.l];
2514 if (a1->tag == TYPE_TAG_unknown && a2->tag == TYPE_TAG_unknown) {
2515 new_cs->ptr1 = a1->u.ptr;
2516 new_cs->ptr2 = a2->u.ptr;
2517 return 2;
2518 } else {
2519 unsigned char *flat1 = a1->tag != TYPE_TAG_unknown ? a1->u.slot : NULL;
2520 unsigned char *flat2 = a2->tag != TYPE_TAG_unknown ? a2->u.slot : NULL;
2521 pointer_t ptr1 = a1->tag != TYPE_TAG_unknown ? pointer_empty() : a1->u.ptr;
2522 pointer_t ptr2 = a2->tag != TYPE_TAG_unknown ? pointer_empty() : a2->u.ptr;
2523 type_tag_t tt = a1->tag != TYPE_TAG_unknown ? a1->tag : a2->tag;
2524 int c = data_compare_numbers(tt, flat1, ptr1, flat2, ptr2);
2525 if (c)
2526 return c;
2529 return 0;
2532 static int attr_fastcall data_compare_resource(struct compare_status *cs, struct compare_status attr_unused *new_cs, bool attr_unused init)
2534 uintptr_t p1 = ptr_to_num(pointer_get_data(cs->ptr1));
2535 uintptr_t p2 = ptr_to_num(pointer_get_data(cs->ptr2));
2536 if (p1 < p2)
2537 return -1;
2538 if (p1 > p2)
2539 return 1;
2540 return 0;
2543 int attr_fastcall data_compare(pointer_t ptr1, pointer_t ptr2, ajla_error_t *mayfail)
2545 void *err_ptr;
2546 struct compare_status *cs;
2547 size_t cs_len, i;
2548 struct compare_status ccs;
2549 int c;
2551 ccs.ptr1 = ptr1;
2552 ccs.ptr2 = ptr2;
2553 if (unlikely(!array_init_mayfail(struct compare_status, &cs, &cs_len, mayfail)))
2554 return DATA_COMPARE_OOM;
2556 new_ptr:
2557 ccs.tag = da_tag(pointer_get_data(ccs.ptr1));
2558 ccs.destruct = cs_empty_destruct;
2559 if (unlikely(!array_add_mayfail(struct compare_status, &cs, &cs_len, ccs, &err_ptr, mayfail))) {
2560 cs = err_ptr;
2561 c = DATA_COMPARE_OOM;
2562 goto ret_c;
2565 if (pointer_is_equal(ccs.ptr1, ccs.ptr2)) {
2566 c = 0;
2567 goto go_up;
2570 ajla_assert(data_method_table[cs[cs_len - 1].tag].compare == data_method_table[da_tag(pointer_get_data(ccs.ptr2))].compare, (file_line, "data_compare: mismatching tags: %u, %u", cs[cs_len - 1].tag, da_tag(pointer_get_data(ccs.ptr2))));
2571 c = data_method_table[cs[cs_len - 1].tag].compare(&cs[cs_len - 1], &ccs, true);
2572 test_c:
2573 if (c) {
2574 if (c == 2)
2575 goto new_ptr;
2576 goto ret_c;
2579 go_up:
2580 cs[cs_len - 1].destruct(&cs[cs_len - 1]);
2581 if (--cs_len) {
2582 c = data_method_table[cs[cs_len - 1].tag].compare(&cs[cs_len - 1], &ccs, false);
2583 goto test_c;
2586 ret_c:
2587 for (i = 0; i < cs_len; i++)
2588 cs[i].destruct(&cs[i]);
2589 mem_free(cs);
2590 return c;
2594 /*********************
2595 * DATA SERIALIATION *
2596 *********************/
2598 static const struct stack_entry_type save_type;
2600 static void no_fixup_after_copy(void attr_unused *new_ptr)
2604 static void *save_run_get_ptr(struct stack_entry *ste)
2606 void *p;
2607 memcpy(&p, ste->ptr, sizeof(void *));
2608 return p;
2611 static bool save_run_get_properties(struct stack_entry *ste, size_t *align, size_t *size, struct stack_entry **subptrs, size_t *subptrs_len)
2613 *align = ste->align;
2614 *size = ste->size;
2615 *subptrs = NULL;
2616 *subptrs_len = 0;
2617 return true;
2620 static void ptr_fixup_sub_ptr(void *loc, uintptr_t offset)
2622 #if defined(HAVE_REAL_GNUC) && !GNUC_ATLEAST(3,0,0) /* EGCS bug */
2623 *(char **)loc += offset;
2624 #else
2625 void *p;
2626 uintptr_t num;
2627 memcpy(&p, loc, sizeof(void *));
2628 num = ptr_to_num(p);
2629 num += offset;
2630 p = num_to_ptr(num);
2631 memcpy(loc, &p, sizeof(void *));
2632 #endif
2635 static const struct stack_entry_type save_run = {
2636 save_run_get_ptr,
2637 save_run_get_properties,
2638 no_fixup_after_copy,
2639 ptr_fixup_sub_ptr,
2640 true,
2643 static const struct stack_entry_type save_slice = {
2644 NULL,
2645 save_run_get_properties,
2646 no_fixup_after_copy,
2647 ptr_fixup_sub_ptr,
2648 true,
2652 static bool save_type_get_properties(struct stack_entry *ste, size_t *align, size_t *size, struct stack_entry **subptrs, size_t *subptrs_len)
2654 ajla_error_t sink;
2655 const struct type *t = *cast_ptr(const struct type **, ste->ptr);
2656 switch (t->tag) {
2657 case TYPE_TAG_record: {
2658 struct record_definition *rec = type_def(t,record);
2659 struct stack_entry *subp;
2660 size_t i, ii;
2661 if (unlikely(!((size_t)rec->n_entries + 1)))
2662 return false;
2663 subp = mem_alloc_array_mayfail(mem_alloc_mayfail, struct stack_entry *, 0, 0, (size_t)rec->n_entries + 1, sizeof(struct stack_entry), &sink);
2664 if (unlikely(!subp))
2665 return false;
2666 subp[0].t = &save_run;
2667 subp[0].ptr = &rec->idx_to_frame;
2668 subp[0].align = align_of(frame_t);
2669 subp[0].size = rec->n_entries * sizeof(frame_t);
2670 ii = 1;
2671 for (i = 0; i < rec->n_entries; i++) {
2672 frame_t slot = rec->idx_to_frame[i];
2673 if (unlikely(slot == NO_FRAME_T))
2674 continue;
2675 subp[ii].t = &save_type;
2676 subp[ii].ptr = &rec->types[slot];
2677 ii++;
2679 *subptrs = subp;
2680 *subptrs_len = ii;
2681 *align = align_of(struct record_definition);
2682 *size = offsetof(struct record_definition, types[rec->n_slots]);
2683 break;
2685 case TYPE_TAG_flat_record: {
2686 struct flat_record_definition *def = type_def(t,flat_record);
2687 struct record_definition *rec = type_def(def->base,record);
2688 struct stack_entry *subp;
2689 size_t i, ii;
2690 if (unlikely(!((size_t)rec->n_entries + 1)))
2691 return false;
2692 subp = mem_alloc_array_mayfail(mem_alloc_mayfail, struct stack_entry *, 0, 0, (size_t)rec->n_entries + 1, sizeof(struct stack_entry), &sink);
2693 if (unlikely(!subp))
2694 return false;
2695 subp[0].t = &save_type;
2696 subp[0].ptr = &def->base;
2697 ii = 1;
2698 for (i = 0; i < rec->n_entries; i++) {
2699 frame_t slot = rec->idx_to_frame[i];
2700 if (unlikely(slot == NO_FRAME_T))
2701 continue;
2702 subp[ii].t = &save_type;
2703 subp[ii].ptr = &def->entries[slot].subtype;
2704 ii++;
2706 *align = align_of(struct flat_record_definition);
2707 *size = offsetof(struct flat_record_definition, entries[rec->n_slots]);
2708 *subptrs = subp;
2709 *subptrs_len = ii;
2710 break;
2712 case TYPE_TAG_flat_array: {
2713 struct flat_array_definition *def = type_def(t,flat_array);
2714 struct stack_entry *subp = mem_alloc_mayfail(struct stack_entry *, sizeof(struct stack_entry), &sink);
2715 if (unlikely(!subp))
2716 return false;
2717 subp->t = &save_type;
2718 subp->ptr = &def->base;
2719 *align = align_of(struct flat_array_definition);
2720 *size = sizeof(struct flat_array_definition);
2721 *subptrs = subp;
2722 *subptrs_len = 1;
2723 break;
2725 default: {
2726 TYPE_TAG_VALIDATE(t->tag);
2727 *align = align_of(struct type);
2728 *size = sizeof(struct type);
2729 *subptrs = NULL;
2730 *subptrs_len = 0;
2731 break;
2734 return true;
2737 static const struct stack_entry_type save_type = {
2738 save_run_get_ptr,
2739 save_type_get_properties,
2740 no_fixup_after_copy,
2741 ptr_fixup_sub_ptr,
2742 true,
2745 static void *save_index_get_ptr(struct stack_entry *ste)
2747 array_index_t *idx = ste->ptr;
2748 mpint_t *mp = index_get_mp(*idx);
2749 return mp;
2752 static bool save_index_get_properties(struct stack_entry *ste, size_t *align, size_t *size, struct stack_entry **subptrs, size_t *subptrs_len)
2754 ajla_error_t sink;
2755 mpint_t *mp = save_index_get_ptr(ste);
2756 struct stack_entry *subp;
2757 *align = align_of(mpint_t);
2758 *size = sizeof(mpint_t);
2759 if (unlikely(!mp->_mp_size)) {
2760 *subptrs = NULL;
2761 *subptrs_len = 0;
2762 return true;
2764 subp = mem_alloc_mayfail(struct stack_entry *, sizeof(struct stack_entry), &sink);
2765 if (unlikely(!subp))
2766 return false;
2767 subp->t = &save_run;
2768 subp->ptr = &mp->_mp_d;
2769 subp->align = align_of(mp_limb_t);
2770 subp->size = (size_t)abs(mp->_mp_size) * sizeof(mp_limb_t);
2771 *subptrs = subp;
2772 *subptrs_len = 1;
2773 return true;
2776 static void save_index_fixup_sub_ptr(void *loc, uintptr_t offset)
2778 array_index_t *idx = loc;
2779 mpint_t *mp = index_get_mp(*idx);
2780 mp = num_to_ptr(ptr_to_num(mp) + offset);
2781 index_set_mp(idx, mp);
2784 static const struct stack_entry_type save_index = {
2785 save_index_get_ptr,
2786 save_index_get_properties,
2787 no_fixup_after_copy,
2788 save_index_fixup_sub_ptr,
2789 true,
2792 static void *save_pointer_get_ptr(struct stack_entry *ste)
2794 pointer_t *ptr = cast_ptr(pointer_t *, ste->ptr);
2795 pointer_resolve_result(ptr);
2796 if (likely(!pointer_is_thunk(*ptr)))
2797 return data_untag(pointer_get_value_strip_tag_(*ptr));
2798 else
2799 return thunk_untag(pointer_get_value_strip_tag_(*ptr));
2802 static bool save_pointer_get_properties(struct stack_entry *ste, size_t *align, size_t *size, struct stack_entry **subptrs, size_t *subptrs_len)
2804 return data_save(save_pointer_get_ptr(ste), 0, align, size, subptrs, subptrs_len);
2807 static void save_pointer_fixup_after_copy(void *new_ptr)
2809 refcount_set_read_only(da_thunk_refcount(new_ptr));
2812 static void save_pointer_fixup_sub_ptr(void *loc, uintptr_t offset)
2814 pointer_t *ptr = loc;
2815 if (!pointer_is_thunk(*ptr)) {
2816 uintptr_t num = ptr_to_num(pointer_get_data(*ptr));
2817 num += offset;
2818 *ptr = pointer_data(num_to_ptr(num));
2819 } else {
2820 uintptr_t num = ptr_to_num(pointer_get_thunk(*ptr));
2821 num += offset;
2822 *ptr = pointer_thunk(num_to_ptr(num));
2826 static const struct stack_entry_type save_pointer = {
2827 save_pointer_get_ptr,
2828 save_pointer_get_properties,
2829 save_pointer_fixup_after_copy,
2830 save_pointer_fixup_sub_ptr,
2831 false,
2834 static const struct stack_entry_type save_data_saved = {
2835 NULL,
2836 NULL,
2837 NULL,
2838 ptr_fixup_sub_ptr,
2839 false,
2842 static bool attr_fastcall no_save(void attr_unused *data, uintptr_t attr_unused offset, size_t attr_unused *align, size_t attr_unused *size, struct stack_entry attr_unused **subptrs, size_t attr_unused *subptrs_l)
2844 return false;
2847 static bool attr_fastcall save_flat(void *data, uintptr_t attr_unused offset, size_t *align, size_t *size, struct stack_entry attr_unused **subptrs, size_t attr_unused *subptrs_l)
2849 struct data *d = data;
2850 const struct type *t = type_get_from_tag(da(d,flat)->data_type);
2851 *align = t->align;
2852 *size = data_flat_offset + t->size;
2853 return true;
2856 static bool attr_fastcall save_longint(void *data, uintptr_t attr_unused offset, size_t attr_unused *align, size_t *size, struct stack_entry **subptrs, size_t attr_unused *subptrs_l)
2858 ajla_error_t sink;
2859 struct data *d = data;
2860 *size = partial_sizeof(struct data, u_.longint);
2861 if (unlikely(!da(d,longint)->mp._mp_size)) {
2862 return true;
2864 *subptrs = mem_alloc_mayfail(struct stack_entry *, sizeof(struct stack_entry), &sink);
2865 if (unlikely(!*subptrs))
2866 return false;
2867 (*subptrs)[0].t = &save_run;
2868 (*subptrs)[0].ptr = &da(d,longint)->mp._mp_d;
2869 (*subptrs)[0].align = align_of(mp_limb_t);
2870 (*subptrs)[0].size = (size_t)abs(da(d,longint)->mp._mp_size) * sizeof(mp_limb_t);
2871 *subptrs_l = 1;
2872 return true;
2875 static bool attr_fastcall save_record(void *data, uintptr_t offset, size_t *align, size_t *size, struct stack_entry **subptrs, size_t *subptrs_l)
2877 ajla_error_t sink;
2878 struct data *d = data;
2879 const struct type *t = num_to_ptr(ptr_to_num(da(d,record)->definition) + offset);
2880 const struct record_definition *def = type_def(t,record);
2881 frame_s *f;
2882 frame_t slot;
2884 *align = def->alignment;
2885 *size = data_record_offset + def->n_slots * slot_size;
2887 if (unlikely(!((size_t)def->n_slots + 1)))
2888 return false;
2889 *subptrs = mem_alloc_array_mayfail(mem_alloc_mayfail, struct stack_entry *, 0, 0, (size_t)def->n_slots + 1, sizeof(struct stack_entry), &sink);
2890 if (unlikely(!*subptrs))
2891 return false;
2892 (*subptrs)[0].t = &save_type;
2893 (*subptrs)[0].ptr = &da(d,record)->definition;
2894 *subptrs_l = 1;
2896 f = da_record_frame(d);
2897 slot = def->n_slots;
2898 while (slot--) {
2899 char *ch;
2900 if (!frame_test_flag(f, slot))
2901 continue;
2902 ch = cast_ptr(char *, frame_pointer(f, slot));
2903 (*subptrs)[*subptrs_l].t = &save_pointer;
2904 (*subptrs)[*subptrs_l].ptr = ch;
2905 (*subptrs_l)++;
2907 return true;
2910 static bool attr_fastcall save_option(void *data, uintptr_t attr_unused offset, size_t attr_unused *align, size_t *size, struct stack_entry **subptrs, size_t *subptrs_l)
2912 ajla_error_t sink;
2913 struct data *d = data;
2914 *size = partial_sizeof(struct data, u_.option);
2915 if (!pointer_is_empty(da(d,option)->pointer)) {
2916 *subptrs = mem_alloc_mayfail(struct stack_entry *, sizeof(struct stack_entry), &sink);
2917 if (unlikely(!*subptrs))
2918 return false;
2919 (*subptrs)[0].t = &save_pointer;
2920 (*subptrs)[0].ptr = &da(d,option)->pointer;
2921 *subptrs_l = 1;
2923 return true;
2926 static bool attr_fastcall save_array_flat(void *data, uintptr_t offset, size_t *align, size_t *size, struct stack_entry attr_unused **subptrs, size_t attr_unused *subptrs_l)
2928 ajla_error_t sink;
2929 struct data *d = data;
2930 const struct type *t = num_to_ptr(ptr_to_num(da(d,array_flat)->type) + offset);
2931 ajla_assert_lo((da(d,array_flat)->n_allocated_entries | da(d,array_flat)->n_used_entries) >= 0, (file_line, "save_array_flat: negative size %"PRIdMAX", %"PRIdMAX"", (intmax_t)da(d,array_flat)->n_allocated_entries, (intmax_t)da(d,array_flat)->n_used_entries));
2932 if (da(d,array_flat)->n_allocated_entries != da(d,array_flat)->n_used_entries)
2933 da(d,array_flat)->n_allocated_entries = da(d,array_flat)->n_used_entries;
2934 *align = t->align;
2935 *size = data_array_offset + (size_t)t->size * da(d,array_flat)->n_allocated_entries;
2936 *subptrs = mem_alloc_mayfail(struct stack_entry *, sizeof(struct stack_entry), &sink);
2937 if (unlikely(!*subptrs))
2938 return false;
2939 (*subptrs)[0].t = &save_type;
2940 (*subptrs)[0].ptr = &da(d,array_flat)->type;
2941 *subptrs_l = 1;
2942 return true;
2945 static bool attr_fastcall save_array_slice(void *data, uintptr_t attr_unused offset, size_t attr_unused *align, size_t *size, struct stack_entry **subptrs, size_t *subptrs_l)
2947 ajla_error_t sink;
2948 struct data *d = data;
2949 ajla_assert_lo(da(d,array_slice)->n_entries >= 0, (file_line, "save_array_slice: negative size %"PRIdMAX"", (intmax_t)da(d,array_slice)->n_entries));
2950 *size = partial_sizeof(struct data, u_.array_slice);
2951 *subptrs = mem_alloc_array_mayfail(mem_alloc_mayfail, struct stack_entry *, 0, 0, 3, sizeof(struct stack_entry), &sink);
2952 if (unlikely(!*subptrs))
2953 return false;
2954 (*subptrs)[*subptrs_l].t = &save_pointer;
2955 (*subptrs)[*subptrs_l].ptr = &da(d,array_slice)->reference;
2956 (*subptrs_l)++;
2957 if (da(d,array_slice)->n_entries) {
2958 (*subptrs)[*subptrs_l].t = &save_slice;
2959 (*subptrs)[*subptrs_l].ptr = &da(d,array_slice)->flat_data_minus_data_array_offset;
2960 (*subptrs)[*subptrs_l].align = 1;
2961 (*subptrs)[*subptrs_l].size = 0;
2962 (*subptrs_l)++;
2964 (*subptrs)[*subptrs_l].t = &save_type;
2965 (*subptrs)[*subptrs_l].ptr = &da(d,array_slice)->type;
2966 (*subptrs_l)++;
2967 return true;
2970 static bool attr_fastcall save_array_pointers(void *data, uintptr_t offset, size_t attr_unused *align, size_t *size, struct stack_entry **subptrs, size_t *subptrs_l)
2972 ajla_error_t sink;
2973 struct data *d = data;
2974 size_t n = da(d,array_pointers)->n_used_entries;
2975 pointer_t *ptr = da(d,array_pointers)->pointer;
2976 ajla_assert_lo((da(d,array_pointers)->n_allocated_entries | da(d,array_pointers)->n_used_entries) >= 0, (file_line, "save_array_pointers: negative size %"PRIdMAX", %"PRIdMAX"", (intmax_t)da(d,array_pointers)->n_allocated_entries, (intmax_t)da(d,array_pointers)->n_used_entries));
2977 if (!offset) {
2978 if (unlikely(ptr != da(d,array_pointers)->pointer_array)) {
2979 memmove(da(d,array_pointers)->pointer_array, ptr, n * sizeof(pointer_t));
2982 if (ptr != da(d,array_pointers)->pointer_array)
2983 da(d,array_pointers)->pointer = da(d,array_pointers)->pointer_array;
2984 if ((size_t)da(d,array_pointers)->n_allocated_entries != n)
2985 da(d,array_pointers)->n_allocated_entries = n;
2986 *size = partial_sizeof_array(struct data, u_.array_pointers.pointer_array, n);
2987 /*debug("pointers: %zx - %zx", *size, partial_sizeof(struct data, u_.array_pointers.pointer_array[n]));*/
2988 *subptrs = mem_alloc_array_mayfail(mem_alloc_mayfail, struct stack_entry *, 0, 0, n, sizeof(struct stack_entry), &sink);
2989 if (unlikely(!*subptrs))
2990 return false;
2991 *subptrs_l = n;
2992 while (n--) {
2993 (*subptrs)[n].t = &save_pointer;
2994 (*subptrs)[n].ptr = &da(d,array_pointers)->pointer_array[n];
2996 return true;
2999 static void save_array_index(array_index_t *idx, struct stack_entry **subptrs, size_t *subptrs_l)
3001 index_detach_leak(idx);
3002 if (likely(!index_is_mp(*idx)))
3003 return;
3004 (*subptrs)[*subptrs_l].t = &save_index;
3005 (*subptrs)[*subptrs_l].ptr = idx;
3006 (*subptrs_l)++;
3009 static bool attr_fastcall save_array_same(void *data, uintptr_t attr_unused offset, size_t attr_unused *align, size_t *size, struct stack_entry **subptrs, size_t *subptrs_l)
3011 ajla_error_t sink;
3012 struct data *d = data;
3013 *size = partial_sizeof(struct data, u_.array_same);
3014 *subptrs = mem_alloc_array_mayfail(mem_alloc_mayfail, struct stack_entry *, 0, 0, 2, sizeof(struct stack_entry), &sink);
3015 if (unlikely(!*subptrs))
3016 return false;
3017 (*subptrs)[0].t = &save_pointer;
3018 (*subptrs)[0].ptr = &da(d,array_same)->pointer;
3019 *subptrs_l = 1;
3020 save_array_index(&da(d,array_same)->n_entries, subptrs, subptrs_l);
3021 return true;
3024 static bool attr_fastcall save_array_btree(void *data, uintptr_t attr_unused offset, size_t attr_unused *align, size_t *size, struct stack_entry **subptrs, size_t *subptrs_l)
3026 ajla_error_t sink;
3027 struct data *d = data;
3028 size_t n = da(d,array_btree)->n_used_btree_entries;
3029 size_t i;
3030 if (da(d,array_btree)->n_allocated_btree_entries != n)
3031 da(d,array_btree)->n_allocated_btree_entries = n;
3032 *size = partial_sizeof_array(struct data, u_.array_btree.btree, n);
3033 /*debug("btree: %zx - %zx", *size, partial_sizeof(struct data, u_.array_btree.btree[n]));*/
3034 if (unlikely(n * 2 < n))
3035 return false;
3036 *subptrs = mem_alloc_array_mayfail(mem_alloc_mayfail, struct stack_entry *, 0, 0, n * 2, sizeof(struct stack_entry), &sink);
3037 if (unlikely(!*subptrs))
3038 return false;
3039 for (i = 0; i < n; i++) {
3040 (*subptrs)[*subptrs_l].t = &save_pointer;
3041 (*subptrs)[*subptrs_l].ptr = &da(d,array_btree)->btree[i].node;
3042 (*subptrs_l)++;
3043 save_array_index(&da(d,array_btree)->btree[i].end_index, subptrs, subptrs_l);
3045 return true;
3048 static bool attr_fastcall save_array_incomplete(void attr_unused *data, uintptr_t attr_unused offset, size_t attr_unused *align, size_t *size, struct stack_entry **subptrs, size_t *subptrs_l)
3050 ajla_error_t sink;
3051 struct data *d = data;
3052 *size = partial_sizeof(struct data, u_.array_incomplete);
3053 *subptrs = mem_alloc_array_mayfail(mem_alloc_mayfail, struct stack_entry *, 0, 0, 2, sizeof(struct stack_entry), &sink);
3054 if (unlikely(!*subptrs))
3055 return false;
3056 (*subptrs)[0].t = &save_pointer;
3057 (*subptrs)[0].ptr = &da(d,array_incomplete)->first;
3058 (*subptrs)[1].t = &save_pointer;
3059 (*subptrs)[1].ptr = &da(d,array_incomplete)->next;
3060 *subptrs_l = 2;
3061 return true;
3064 static bool attr_fastcall save_function_types(void *data, uintptr_t attr_unused offset, size_t attr_unused *align, size_t *size, struct stack_entry attr_unused **subptrs, size_t attr_unused *subptrs_l)
3066 ajla_error_t sink;
3067 struct data *d = data;
3068 size_t i;
3069 *size = data_function_types_offset + da(d,function_types)->n_types * sizeof(const struct type *);
3070 *subptrs = mem_alloc_array_mayfail(mem_alloc_mayfail, struct stack_entry *, 0, 0, da(d,function_types)->n_types, sizeof(struct stack_entry), &sink);
3071 if (unlikely(!*subptrs))
3072 return false;
3073 for (i = 0; i < da(d,function_types)->n_types; i++) {
3074 (*subptrs)[i].t = &save_type;
3075 (*subptrs)[i].ptr = &da(d,function_types)->types[i];
3077 *subptrs_l = da(d,function_types)->n_types;
3078 return true;
3081 static bool attr_fastcall save_saved(void *data, uintptr_t attr_unused offset, size_t attr_unused *align, size_t *size, struct stack_entry **subptrs, size_t *subptrs_l)
3083 ajla_error_t sink;
3084 struct data *d = data;
3085 size_t i;
3086 *size = da(d,saved)->total_size;
3087 *subptrs = mem_alloc_array_mayfail(mem_alloc_mayfail, struct stack_entry *, 0, 0, da(d,saved)->n_offsets, sizeof(struct stack_entry), &sink);
3088 if (unlikely(!*subptrs))
3089 return false;
3090 *subptrs_l = da(d,saved)->n_offsets;
3091 for (i = 0; i < da(d,saved)->n_offsets; i++) {
3092 (*subptrs)[i].t = &save_data_saved;
3093 (*subptrs)[i].ptr = cast_ptr(char *, d) + da(d,saved)->offsets[i];
3095 return true;
3098 static bool attr_fastcall save_saved_cache(void *data, uintptr_t attr_unused offset, size_t attr_unused *align, size_t *size, struct stack_entry **subptrs, size_t *subptrs_l)
3100 ajla_error_t sink;
3101 struct data *d = data;
3102 size_t i;
3103 size_t n_pointers = da(d,saved_cache)->n_entries * (da(d,saved_cache)->n_arguments + da(d,saved_cache)->n_return_values);
3104 *size = offsetof(struct data, u_.saved_cache.pointers[n_pointers]);
3105 *subptrs = mem_alloc_array_mayfail(mem_alloc_mayfail, struct stack_entry *, 0, 0, n_pointers, sizeof(struct stack_entry), &sink);
3106 if (unlikely(!*subptrs))
3107 return false;
3108 *subptrs_l = n_pointers;
3109 for (i = 0; i < n_pointers; i++) {
3110 (*subptrs)[i].t = &save_pointer;
3111 (*subptrs)[i].ptr = &da(d,saved_cache)->pointers[i];
3113 return true;
3116 static bool attr_fastcall save_exception(void *data, uintptr_t offset, size_t attr_unused *align, size_t *size, struct stack_entry **subptrs, size_t *subptrs_l)
3118 ajla_error_t sink;
3119 struct thunk *t = data;
3120 if (t->u.exception.tr.trace_n) {
3121 stack_trace_free(&t->u.exception.tr);
3122 stack_trace_init(&t->u.exception.tr);
3124 if (t->u.exception.err.error_class == EC_ASYNC)
3125 return false;
3126 *size = partial_sizeof(struct thunk, u.exception);
3127 if (t->u.exception.msg) {
3128 const char *msg = num_to_ptr(ptr_to_num(t->u.exception.msg) + offset);
3129 *subptrs = mem_alloc_mayfail(struct stack_entry *, sizeof(struct stack_entry), &sink);
3130 if (unlikely(!*subptrs))
3131 return false;
3132 (*subptrs)[0].t = &save_run;
3133 (*subptrs)[0].ptr = &t->u.exception.msg;
3134 (*subptrs)[0].align = align_of(char);
3135 (*subptrs)[0].size = strlen(msg) + 1;
3136 *subptrs_l = 1;
3138 return true;
3141 bool data_save(void *p, uintptr_t offset, size_t *align, size_t *size, struct stack_entry **subptrs, size_t *subptrs_l)
3143 tag_t tag = da_thunk_tag(p);
3144 if (tag >= DATA_TAG_START && tag < DATA_TAG_END) {
3145 p = data_pointer_tag(p, tag);
3146 } else {
3147 p = thunk_pointer_tag(p);
3149 *align = 1;
3150 *subptrs = NULL;
3151 *subptrs_l = 0;
3152 if (unlikely(!data_method_table[tag].save(p, offset, align, size, subptrs, subptrs_l))) {
3153 #if 0
3154 debug("failure on tag: %u", tag);
3155 if (tag == THUNK_TAG_FUNCTION_CALL) {
3156 struct thunk *t = p;
3157 pointer_t ref;
3158 ref = t->u.function_call.u.function_reference;
3159 while (1) {
3160 if (pointer_is_thunk(ref)) {
3161 debug("ref is thunk");
3162 goto ret;
3164 if (!da(pointer_get_data(ref),function_reference)->is_indirect)
3165 break;
3166 ref = da(pointer_get_data(ref),function_reference)->u.indirect;
3168 ref = *da(pointer_get_data(ref),function_reference)->u.direct;
3169 if (pointer_is_thunk(ref)) {
3170 debug("function not evaluated");
3171 goto ret;
3173 debug("function: '%s'", da(pointer_get_data(ref),function)->function_name);
3175 ret:
3176 #endif
3177 return false;
3179 *align = maximum(*align, SAVED_DATA_ALIGN);
3180 return true;
3183 bool data_save_init_stack(pointer_t *ptr, struct stack_entry **stk, size_t *stk_l)
3185 struct stack_entry ste;
3186 ajla_error_t sink;
3187 if (unlikely(!array_init_mayfail(struct stack_entry, stk, stk_l, &sink)))
3188 return false;
3189 ste.t = &save_pointer;
3190 ste.ptr = ptr;
3191 ste.align = ste.size = 0; /* avoid warning */
3192 if (unlikely(!array_add_mayfail(struct stack_entry, stk, stk_l, ste, NULL, &sink)))
3193 return false;
3194 return true;
3197 #ifdef HAVE_CODEGEN_TRAPS
3198 static int data_traps_tree_compare(const struct tree_entry *e, uintptr_t ptr)
3200 const struct data_codegen *dc = get_struct(e, struct data_codegen, codegen_tree);
3201 uintptr_t base = ptr_to_num(dc->unoptimized_code_base);
3202 if (ptr < base)
3203 return 1;
3204 if (ptr >= base + dc->unoptimized_code_size)
3205 return -1;
3206 return 0;
3209 void *data_trap_lookup(void *ptr)
3211 uintptr_t offset;
3212 size_t res;
3213 struct tree_entry *e;
3214 const struct data_codegen *dc;
3215 uintptr_t ptr_num = ptr_to_num(ptr);
3217 rwmutex_lock_read(&traps_lock);
3218 e = tree_find(&traps_tree, data_traps_tree_compare, ptr_num);
3219 if (unlikely(!e))
3220 internal(file_line, "data_trap_lookup: could not find function for address %p", ptr);
3221 rwmutex_unlock_read(&traps_lock);
3222 dc = get_struct(e, struct data_codegen, codegen_tree);
3224 offset = ptr_num - ptr_to_num(dc->unoptimized_code_base);
3226 binary_search(size_t, dc->trap_records_size, res, dc->trap_records[res].source_ip == offset, dc->trap_records[res].source_ip < offset,
3227 internal(file_line, "data_trap_lookup(%s): could not find trap for address %p, offset %"PRIxMAX"", da(dc->function,function)->function_name, ptr, (uintmax_t)offset));
3229 return cast_ptr(char *, dc->unoptimized_code_base) + dc->trap_records[res].destination_ip;
3232 void data_trap_insert(struct data *codegen)
3234 struct tree_insert_position ins;
3235 struct tree_entry *e;
3236 #ifndef DEBUG_CRASH_HANDLER
3237 if (!da(codegen,codegen)->trap_records_size)
3238 return;
3239 #endif
3240 /*debug("inserting trap for %p, %lx", da(codegen,codegen)->unoptimized_code_base, da(codegen,codegen)->unoptimized_code_size);*/
3241 rwmutex_lock_write(&traps_lock);
3242 e = tree_find_for_insert(&traps_tree, data_traps_tree_compare, ptr_to_num(da(codegen,codegen)->unoptimized_code_base), &ins);
3243 if (unlikely(e != NULL))
3244 internal(file_line, "data_insert_traps: the requested range is already in the tree");
3245 tree_insert_after_find(&da(codegen,codegen)->codegen_tree, &ins);
3246 rwmutex_unlock_write(&traps_lock);
3248 #endif
3250 void name(data_init)(void)
3252 unsigned i;
3253 struct thunk *oom;
3255 if (slot_size < sizeof(pointer_t))
3256 internal(file_line, "data_init: invalid slot size: %lu < %lu", (unsigned long)slot_size, (unsigned long)sizeof(pointer_t));
3258 refcount_init(&n_dereferenced);
3260 for (i = DATA_TAG_START; i < DATA_TAG_END; i++) {
3261 data_method_table[i].get_sub = no_sub;
3262 data_method_table[i].free_object = free_primitive;
3263 data_method_table[i].deep_eval = deep_eval_nothing;
3264 data_method_table[i].compare = data_compare_nothing;
3265 data_method_table[i].save = no_save;
3267 for (i = THUNK_TAG_START; i < THUNK_TAG_END; i++) {
3268 data_method_table[i].free_object = free_primitive_thunk;
3269 data_method_table[i].save = no_save;
3272 data_method_table[DATA_TAG_longint].free_object = free_integer;
3273 data_method_table[DATA_TAG_array_same].free_object = free_array_same;
3274 data_method_table[DATA_TAG_resource].free_object = free_resource;
3275 data_method_table[DATA_TAG_function].free_object = free_function;
3276 #ifdef HAVE_CODEGEN
3277 data_method_table[DATA_TAG_codegen].free_object = free_codegen;
3278 #endif
3279 data_method_table[DATA_TAG_record].get_sub = get_sub_record;
3280 data_method_table[DATA_TAG_option].get_sub = get_sub_option;
3281 data_method_table[DATA_TAG_array_slice].get_sub = get_sub_array_slice;
3282 data_method_table[DATA_TAG_array_pointers].get_sub = get_sub_array_pointers;
3283 data_method_table[DATA_TAG_array_same].get_sub = get_sub_array_same;
3284 data_method_table[DATA_TAG_array_btree].get_sub = get_sub_array_btree;
3285 data_method_table[DATA_TAG_array_incomplete].get_sub = get_sub_array_incomplete;
3286 data_method_table[DATA_TAG_function_reference].get_sub = get_sub_function_reference;
3288 data_method_table[THUNK_TAG_FUNCTION_CALL].get_sub = get_sub_function_call;
3289 data_method_table[THUNK_TAG_BLACKHOLE].get_sub = get_sub_blackhole;
3290 data_method_table[THUNK_TAG_BLACKHOLE].free_object = free_none;
3291 data_method_table[THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED].get_sub = get_sub_blackhole_some_dereferenced;
3292 data_method_table[THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED].free_object = free_none;
3293 data_method_table[THUNK_TAG_BLACKHOLE_DEREFERENCED].get_sub = get_sub_blackhole_dereferenced;
3294 data_method_table[THUNK_TAG_BLACKHOLE_DEREFERENCED].free_object = free_blackhole_dereferenced;
3295 data_method_table[THUNK_TAG_RESULT].get_sub = get_sub_result;
3296 data_method_table[THUNK_TAG_MULTI_RET_REFERENCE].get_sub = get_sub_multi_ret_reference;
3297 data_method_table[THUNK_TAG_EXCEPTION].get_sub = get_sub_exception;
3298 data_method_table[THUNK_TAG_EXCEPTION].free_object = free_exception;
3300 data_method_table[DATA_TAG_flat].deep_eval = deep_eval_flat;
3301 data_method_table[DATA_TAG_record].deep_eval = deep_eval_record;
3302 data_method_table[DATA_TAG_option].deep_eval = deep_eval_option;
3303 data_method_table[DATA_TAG_array_flat].deep_eval = deep_eval_array_flat;
3304 data_method_table[DATA_TAG_array_slice].deep_eval = deep_eval_array_slice;
3305 data_method_table[DATA_TAG_array_pointers].deep_eval = deep_eval_array_pointers;
3306 data_method_table[DATA_TAG_array_same].deep_eval = deep_eval_array_same;
3307 data_method_table[DATA_TAG_array_btree].deep_eval = deep_eval_array_btree;
3308 data_method_table[DATA_TAG_array_incomplete].deep_eval = deep_eval_array_incomplete;
3309 data_method_table[DATA_TAG_function_reference].deep_eval = deep_eval_function_reference;
3311 data_method_table[DATA_TAG_flat].compare = data_compare_number;
3312 data_method_table[DATA_TAG_longint].compare = data_compare_number;
3313 data_method_table[DATA_TAG_record].compare = data_compare_record;
3314 data_method_table[DATA_TAG_option].compare = data_compare_option;
3315 data_method_table[DATA_TAG_array_flat].compare = data_compare_array;
3316 data_method_table[DATA_TAG_array_slice].compare = data_compare_array;
3317 data_method_table[DATA_TAG_array_pointers].compare = data_compare_array;
3318 data_method_table[DATA_TAG_array_same].compare = data_compare_array;
3319 data_method_table[DATA_TAG_array_btree].compare = data_compare_array;
3320 data_method_table[DATA_TAG_array_incomplete].compare = data_compare_array;
3321 data_method_table[DATA_TAG_function_reference].compare = data_compare_function_reference;
3322 data_method_table[DATA_TAG_resource].compare = data_compare_resource;
3324 data_method_table[DATA_TAG_flat].save = save_flat;
3325 data_method_table[DATA_TAG_longint].save = save_longint;
3326 data_method_table[DATA_TAG_record].save = save_record;
3327 data_method_table[DATA_TAG_option].save = save_option;
3328 data_method_table[DATA_TAG_array_flat].save = save_array_flat;
3329 data_method_table[DATA_TAG_array_slice].save = save_array_slice;
3330 data_method_table[DATA_TAG_array_pointers].save = save_array_pointers;
3331 data_method_table[DATA_TAG_array_same].save = save_array_same;
3332 data_method_table[DATA_TAG_array_btree].save = save_array_btree;
3333 data_method_table[DATA_TAG_array_incomplete].save = save_array_incomplete;
3334 data_method_table[DATA_TAG_function_types].save = save_function_types;
3335 data_method_table[DATA_TAG_saved].save = save_saved;
3336 data_method_table[DATA_TAG_saved_cache].save = save_saved_cache;
3337 data_method_table[THUNK_TAG_EXCEPTION].save = save_exception;
3339 oom = thunk_alloc_exception_mayfail(error_ajla(EC_ASYNC, AJLA_ERROR_OUT_OF_MEMORY), NULL pass_file_line);
3340 out_of_memory_thunk = pointer_thunk(oom);
3342 #ifdef HAVE_CODEGEN_TRAPS
3343 rwmutex_init(&traps_lock);
3344 tree_init(&traps_tree);
3345 #endif
3348 void name(data_done)(void)
3350 if (unlikely(!refcount_is_one(&n_dereferenced)))
3351 internal(file_line, "data_done: n_dereferenced_leaked: %"PRIxMAX"", (uintmax_t)refcount_get_nonatomic(&n_dereferenced));
3353 #ifdef HAVE_CODEGEN_TRAPS
3354 rwmutex_done(&traps_lock);
3355 ajla_assert_lo(tree_is_empty(&traps_tree), (file_line, "data_done: traps_tree is not empty"));
3356 #endif
3357 pointer_dereference(out_of_memory_thunk);
3360 #endif