codegen: improve the generated code on architectures with flags
[ajla.git] / data.h
blobe6329c5b1b25891bd485e0e7db371e17abd7be0d
1 /*
2 * Copyright (C) 2024 Mikulas Patocka
4 * This file is part of Ajla.
6 * Ajla is free software: you can redistribute it and/or modify it under the
7 * terms of the GNU General Public License as published by the Free Software
8 * Foundation, either version 3 of the License, or (at your option) any later
9 * version.
11 * Ajla is distributed in the hope that it will be useful, but WITHOUT ANY
12 * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
13 * A PARTICULAR PURPOSE. See the GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along with
16 * Ajla. If not, see <https://www.gnu.org/licenses/>.
19 #ifndef AJLA_DATA_H
20 #define AJLA_DATA_H
22 #include "args.h"
23 #include "type.h"
24 #include "mem_al.h"
25 #include "util.h"
26 #include "refcount.h"
27 #include "addrlock.h"
28 #include "mpint.h"
29 #include "ptrcomp.h"
30 #include "tree.h"
31 #include "thread.h"
32 #include "tick.h"
33 #include "profile.h"
36 #define stack_alloc name(stack_alloc)
37 #define stack_expand name(stack_expand)
38 #define stack_split name(stack_split)
39 #define stack_trace_init name(stack_trace_init)
40 #define stack_trace_free name(stack_trace_free)
41 #define stack_trace_capture name(stack_trace_capture)
42 #define stack_trace_string name(stack_trace_string)
43 #define stack_trace_print name(stack_trace_print)
44 #define stack_trace_get_location name(stack_trace_get_location)
45 #define data_alloc_flat_mayfail name(data_alloc_flat_mayfail)
46 #define data_alloc_longint_mayfail name(data_alloc_longint_mayfail)
47 #define data_alloc_record_mayfail name(data_alloc_record_mayfail)
48 #define data_alloc_option_mayfail name(data_alloc_option_mayfail)
49 #define data_alloc_array_flat_mayfail name(data_alloc_array_flat_mayfail)
50 #define data_alloc_array_slice_mayfail name(data_alloc_array_slice_mayfail)
51 #define data_alloc_array_pointers_mayfail name(data_alloc_array_pointers_mayfail)
52 #define data_alloc_array_same_mayfail name(data_alloc_array_same_mayfail)
53 #define data_alloc_array_incomplete name(data_alloc_array_incomplete)
54 #define data_alloc_function_reference_mayfail name(data_alloc_function_reference_mayfail)
55 #define data_fill_function_reference name(data_fill_function_reference)
56 #define data_fill_function_reference_flat name(data_fill_function_reference_flat)
57 #define data_alloc_resource_mayfail name(data_alloc_resource_mayfail)
58 #define out_of_memory_ptr name(out_of_memory_ptr)
59 #define thunk_alloc_exception_error name(thunk_alloc_exception_error)
60 #define pointer_error name(pointer_error)
61 #define thunk_exception_string name(thunk_exception_string)
62 #define thunk_exception_payload name(thunk_exception_payload)
63 #define thunk_exception_print name(thunk_exception_print)
64 #define thunk_alloc_function_call name(thunk_alloc_function_call)
65 #define thunk_alloc_blackhole name(thunk_alloc_blackhole)
66 #define are_there_dereferenced name(are_there_dereferenced)
67 #define execution_control_unlink_and_submit name(execution_control_unlink_and_submit)
68 #define execution_control_acquire name(execution_control_acquire)
69 #define wake_up_wait_list name(wake_up_wait_list)
70 #define thunk_terminate name(thunk_terminate)
71 #define execution_control_alloc name(execution_control_alloc)
72 #define execution_control_free name(execution_control_free)
73 #define execution_control_terminate name(execution_control_terminate)
74 #define free_cache_entry name(free_cache_entry)
75 #define pointer_dereference_ name(pointer_dereference_)
76 #define pointer_reference_ name(pointer_reference_)
77 #define pointer_reference_maybe_ name(pointer_reference_maybe_)
78 #define copy_from_function_reference_to_frame name(copy_from_function_reference_to_frame)
79 #define pointer_follow_thunk_ name(pointer_follow_thunk_)
80 #define pointer_resolve_result name(pointer_resolve_result)
81 #define pointer_follow_wait name(pointer_follow_wait)
82 #define data_is_nan name(data_is_nan)
83 #define flat_to_data name(flat_to_data)
84 #define struct_clone name(struct_clone)
85 #define pointer_deep_eval name(pointer_deep_eval)
86 #define frame_pointer_deep_eval name(frame_pointer_deep_eval)
87 #define mpint_export name(mpint_export)
88 #define mpint_export_unsigned name(mpint_export_unsigned)
89 #define data_compare_numbers name(data_compare_numbers)
90 #define data_compare name(data_compare)
91 #define save_index_mp name(save_index_mp)
92 #define data_save name(data_save)
93 #define data_save_init_stack name(data_save_init_stack)
94 #define data_trap_lookup name(data_trap_lookup)
95 #define data_trap_insert name(data_trap_insert)
98 #ifdef DEBUG_TRACE
99 #define trace_enabled name(trace_enabled)
100 extern atomic_type uchar_efficient_t trace_enabled;
101 #endif
104 struct data;
105 struct thunk;
106 struct execution_control;
109 /***************
110 * DEFAULT INT *
111 ***************/
113 #if (INT_MASK & (1 << 2)) && defined(POINTER_COMPRESSION)
114 #define INT_DEFAULT_BITS 32
115 #elif (INT_MASK & (1 << 3)) && defined(BIT64)
116 #define INT_DEFAULT_BITS 64
117 #elif (INT_MASK & (1 << 2))
118 #define INT_DEFAULT_BITS 32
119 #elif (INT_MASK & (1 << 3))
120 #define INT_DEFAULT_BITS 64
121 #elif (INT_MASK & (1 << 4))
122 #define INT_DEFAULT_BITS 128
123 #elif (INT_MASK & (1 << 1))
124 #define INT_DEFAULT_BITS 16
125 #elif (INT_MASK & (1 << 0))
126 #define INT_DEFAULT_BITS 8
127 #else
128 unknown integer size
129 #endif
131 #define int_default_t cat4(int,INT_DEFAULT_BITS,_,t)
132 #define uint_default_t cat4(uint,INT_DEFAULT_BITS,_,t)
134 /*#define INT_DEFAULT_N log_2(INT_DEFAULT_BITS / 8)*/
135 #if INT_DEFAULT_BITS == 8
136 #define INT_DEFAULT_N 0
137 #elif INT_DEFAULT_BITS == 16
138 #define INT_DEFAULT_N 1
139 #elif INT_DEFAULT_BITS == 32
140 #define INT_DEFAULT_N 2
141 #elif INT_DEFAULT_BITS == 64
142 #define INT_DEFAULT_N 3
143 #elif INT_DEFAULT_BITS == 128
144 #define INT_DEFAULT_N 4
145 #endif
148 /*************
149 * ALIGNMENT *
150 *************/
152 #ifdef HAVE_MAX_ALIGN_T
153 #define scalar_align_max_align_t_ (align_of(max_align_t) - 1) |
154 #else
155 #define scalar_align_max_align_t_
156 #endif
157 #define scalar_align_fixed_(n, s, u, sz, bits) (align_of(s) - 1) |
158 #define scalar_align_int_(n, s, u, sz, bits) (align_of(s) - 1) |
159 #define scalar_align_real_(n, t, nt, pack, unpack) (align_of(t) - 1) |
160 #define scalar_align (( \
161 for_all_fixed(scalar_align_fixed_) \
162 for_all_int(scalar_align_int_, for_all_empty) \
163 for_all_real(scalar_align_real_, for_all_empty) \
164 scalar_align_max_align_t_ \
165 (align_of(ajla_flat_option_t) - 1) | \
166 (align_of(void *) - 1) | \
167 1) + 1)
170 /***************
171 * COMPRESSION *
172 ***************/
174 #if defined(POINTER_COMPRESSION)
176 #define pointer_compress_test(ptr, fat) \
177 do { \
178 if (unlikely((ptr_to_num(ptr) & ~((uintptr_t)0xfffffffeUL << POINTER_COMPRESSION)) != 0)) {\
179 if (fat) \
180 fatal("the allocator returned pointer %p that is not compatible with %d-bit compression", (ptr), POINTER_COMPRESSION);\
181 else \
182 ajla_assert(false, (file_line, "pointer_compress_test: pointer %p is not compatible with %d-bit compression", (ptr), POINTER_COMPRESSION));\
184 } while (0)
186 static inline uint32_t pointer_compress(const void *ptr)
188 return (uint32_t)(ptr_to_num(ptr) >> POINTER_COMPRESSION);
191 static inline void *pointer_decompress(uint32_t num)
193 return num_to_ptr((uintptr_t)num << POINTER_COMPRESSION);
196 #define pointer_compress_alignment (2 << POINTER_COMPRESSION)
198 #else
200 #ifdef POINTER_TAG
201 #define pointer_compress_test(ptr, fat) \
202 do { \
203 if (unlikely((ptr_to_num(ptr) & POINTER_TAG) != 0)) { \
204 if (fat) \
205 fatal("the allocator returned pointer %p that is not compatible with tag %"PRIuMAX"", (ptr), (uintmax_t)POINTER_TAG);\
206 else \
207 ajla_assert(false, (file_line, "pointer_compress_test: pointer %p is not compatible with %"PRIuMAX"", (ptr), (uintmax_t)POINTER_TAG));\
209 } while (0)
210 #define pointer_compress_alignment (POINTER_TAG * 2)
211 #else
212 #define pointer_compress_test(ptr, fat) do { } while (0)
213 #define pointer_compress_alignment 1
214 #endif
216 #define pointer_compress ptr_to_num
217 #define pointer_decompress num_to_ptr
219 #endif
222 /***********
223 * POINTER *
224 ***********/
226 #if defined(POINTER_COMPRESSION)
228 typedef uint32_t pointer_t;
230 #define pointer_validate(ptr)
232 static attr_always_inline pointer_t pointer_thunk(struct thunk *thunk)
234 pointer_compress_test(thunk, false);
235 return pointer_compress(thunk) | 1;
238 static attr_always_inline pointer_t pointer_data(const struct data *data)
240 pointer_compress_test(data, false);
241 return pointer_compress(data);
244 static attr_always_inline bool pointer_is_thunk(const pointer_t ptr)
246 return ptr & 1;
249 static attr_always_inline void *pointer_get_value_(const pointer_t ptr)
251 return pointer_decompress(ptr);
254 static attr_always_inline void *pointer_get_value_strip_tag_(const pointer_t ptr)
256 return pointer_get_value_(ptr & ~(uint32_t)1);
259 static attr_always_inline void *pointer_get_value_sub_tag_(const pointer_t ptr)
261 return pointer_get_value_(ptr - 1);
264 #elif defined(POINTER_IGNORE_START)
266 #define POINTER_TAG_AT_ALLOC
268 typedef void *pointer_t;
270 #define pointer_validate(ptr)
272 static attr_always_inline pointer_t pointer_thunk(struct thunk *thunk)
274 ajla_assert((ptr_to_num(thunk) & POINTER_IGNORE_TOP) != 0, (file_line, "pointer_thunk: pointer is not tagged: %p", thunk));
275 return (pointer_t)thunk;
278 static attr_always_inline pointer_t pointer_data(const struct data *data)
280 ajla_assert((ptr_to_num(data) & POINTER_IGNORE_TOP) == 0, (file_line, "pointer_data: pointer is tagged: %p", data));
281 return (pointer_t)data;
284 static attr_always_inline bool pointer_is_thunk(const pointer_t ptr)
286 return (ptr_to_num(ptr) & POINTER_IGNORE_TOP) != 0;
289 static attr_always_inline void *pointer_get_value_(const pointer_t ptr)
291 return ptr;
294 static attr_always_inline void *pointer_get_value_strip_tag_(const pointer_t ptr)
296 return ptr;
299 static attr_always_inline void *pointer_get_value_sub_tag_(const pointer_t ptr)
301 return ptr;
304 #elif defined(POINTER_TAG)
306 #define POINTER_TAG_USED
308 typedef void *pointer_t;
310 #define pointer_validate(ptr)
312 static attr_always_inline pointer_t pointer_thunk(struct thunk *thunk)
314 return POINTER_TAG_ADD(thunk);
317 static attr_always_inline pointer_t pointer_data(const struct data *data)
319 return (pointer_t)data;
322 static attr_always_inline bool pointer_is_thunk(const pointer_t ptr)
324 return POINTER_TAG_GET(ptr) != 0;
327 static attr_always_inline void *pointer_get_value_(const pointer_t ptr)
329 return ptr;
332 static attr_always_inline void *pointer_get_value_strip_tag_(const pointer_t ptr)
334 return POINTER_TAG_CLEAR(ptr);
337 static attr_always_inline void *pointer_get_value_sub_tag_(const pointer_t ptr)
339 return POINTER_TAG_SUB(ptr);
342 #else
344 #ifdef DEBUG
345 #define POINTER_THUNK_BIAS 0x20
346 #else
347 #define POINTER_THUNK_BIAS 0x00
348 #endif
350 typedef struct {
351 void *ptr;
352 unsigned char thunk;
353 } pointer_t;
355 #define pointer_validate(ptr_) \
356 do { \
357 ajla_assert((unsigned)((ptr_).thunk - POINTER_THUNK_BIAS) <= 1, (file_line, "pointer_validate: invalid pointer type %x, value %p", ptr_.thunk, ptr_.ptr));\
358 ajla_assert((ptr_).ptr != BAD_POINTER_1 && (ptr_).ptr != BAD_POINTER_2 && (ptr_).ptr != BAD_POINTER_3, (file_line, "pointer_validate: invalid pointer type %x, value %p", ptr_.thunk, ptr_.ptr));\
359 } while (0)
361 static attr_always_inline pointer_t pointer_thunk(struct thunk *thunk)
363 pointer_t ptr;
364 ptr.ptr = thunk;
365 ptr.thunk = POINTER_THUNK_BIAS + 1;
366 return ptr;
369 static attr_always_inline pointer_t pointer_data(const struct data *data)
371 pointer_t ptr;
372 ptr.ptr = (void *)data;
373 ptr.thunk = POINTER_THUNK_BIAS;
374 return ptr;
377 static attr_always_inline bool pointer_is_thunk(const pointer_t ptr)
379 pointer_validate(ptr);
380 ajla_assert((unsigned)(ptr.thunk - POINTER_THUNK_BIAS) <= 1, (file_line, "pointer_is_thunk: invalid pointer type %x", ptr.thunk));
381 return (bool)(ptr.thunk - POINTER_THUNK_BIAS);
384 static attr_always_inline void *pointer_get_value_(const pointer_t ptr)
386 pointer_validate(ptr);
387 return ptr.ptr;
390 static attr_always_inline void *pointer_get_value_strip_tag_(const pointer_t ptr)
392 pointer_validate(ptr);
393 return ptr.ptr;
396 static attr_always_inline void *pointer_get_value_sub_tag_(const pointer_t ptr)
398 pointer_validate(ptr);
399 return ptr.ptr;
402 #endif
404 static attr_always_inline bool pointer_is_equal(pointer_t ptr1, pointer_t ptr2)
406 bool ret;
407 #if defined(POINTER_COMPRESSION) || defined(POINTER_TAG_USED) || defined(POINTER_TAG_AT_ALLOC)
408 ret = ptr1 == ptr2;
409 #else
410 ret = likely(ptr1.ptr == ptr2.ptr) && likely(ptr1.thunk == ptr2.thunk);
411 #endif
412 return ret;
415 static attr_always_inline pointer_t pointer_empty(void)
417 #if defined(POINTER_COMPRESSION)
418 return 0;
419 #else
420 return pointer_data(NULL);
421 #endif
424 static attr_always_inline bool pointer_is_empty(pointer_t ptr)
426 return pointer_is_equal(ptr, pointer_empty());
429 static attr_always_inline pointer_t pointer_mark(void)
431 #if defined(POINTER_COMPRESSION)
432 return 1;
433 #elif defined(POINTER_TAG_AT_ALLOC)
434 return (pointer_t)POINTER_IGNORE_TOP;
435 #else
436 return pointer_thunk(NULL);
437 #endif
440 static attr_always_inline bool pointer_is_mark(pointer_t ptr)
442 return pointer_is_equal(ptr, pointer_mark());
445 static attr_always_inline void pointer_poison(pointer_t attr_unused *ptr)
447 #ifdef DEBUG
448 *ptr = pointer_data((struct data *)num_to_ptr(2048));
449 #endif
452 #define verify_thunk_(ptr_, value_, file_line_) ajla_assert(pointer_is_thunk(ptr_) == (value_), (file_line_, "pointer %p is %sa thunk", pointer_get_value_(ptr_), (value_) ? "not " : ""))
454 static attr_always_inline struct thunk *pointer_get_thunk_(pointer_t ptr argument_position)
456 verify_thunk_(ptr, true, caller_file_line);
457 return (struct thunk *)pointer_get_value_sub_tag_(ptr);
460 static attr_always_inline struct data *pointer_get_data_(pointer_t ptr argument_position)
462 verify_thunk_(ptr, false, caller_file_line);
463 return (struct data *)pointer_get_value_(ptr);
466 #define pointer_get_thunk(ptr_) pointer_get_thunk_(ptr_ pass_file_line)
467 #define pointer_get_data(ptr_) pointer_get_data_(ptr_ pass_file_line)
470 #define slot_bits ( \
471 sizeof(pointer_t) <= 2 ? 1 : \
472 sizeof(pointer_t) <= 4 ? 2 : \
473 sizeof(pointer_t) <= 8 ? 3 : \
474 sizeof(pointer_t) <= 16 ? 4 : \
475 (5))
477 #define slot_size ((size_t)1 << slot_bits)
478 #if defined(ARCH_ALPHA) || defined(ARCH_PARISC)
480 * This improves generated code on parisc.
481 * The ldd/std instructions require 8-byte alignment.
482 * Aligning the offset avoids offset-generating instructions.
484 * On alpha, we need this, so that we can access flags using the ldq
485 * instruction.
487 #define slot_align maximum(slot_size, 8)
488 #else
489 #define slot_align slot_size
490 #endif
492 #define max_frame_align maximum(scalar_align, slot_align)
493 #define frame_align maximum(scalar_align, slot_align)
496 /**************
497 * ALLOCATION *
498 **************/
500 static inline void *ptrcomp_verify(void *ptr)
502 pointer_compress_test(ptr, true);
503 return ptr;
506 #define mem_align_compressed_mayfail(type, size, align, mayfail) cast_ptr(type, ptrcomp_verify(mem_align_mayfail(void *, size, maximum(pointer_compress_alignment, align), mayfail)))
507 #define mem_calign_compressed_mayfail(type, size, align, mayfail) cast_ptr(type, ptrcomp_verify(mem_calign_mayfail(void *, size, maximum(pointer_compress_alignment, align), mayfail)))
508 #define mem_alloc_compressed_mayfail(type, size, mayfail) mem_align_compressed_mayfail(type, size, 1, mayfail)
509 #define mem_free_compressed(ptr) mem_free_aligned(ptr)
511 static inline bool data_element_is_const(const unsigned char *flat, size_t size)
513 size_t i;
514 for (i = 0; i < size; i++)
515 if (flat[i] != flat[0])
516 return false;
517 return true;
521 /****************
522 * FRAME COMMON *
523 ****************/
525 typedef struct frame_s_ frame_s;
526 struct frame_struct;
528 #ifndef INLINE_WORKS
529 #define frame_char_(fp) (cast_ptr(unsigned char *, fp))
530 #define frame_uint32_(fp) (cast_ptr(uint32_t *, fp))
531 #define frame_uint64_(fp) (cast_ptr(uint64_t *, fp))
532 #else
533 static attr_always_inline unsigned char *frame_char_(frame_s *fp)
535 return cast_ptr(unsigned char *, fp);
538 static attr_always_inline uint32_t *frame_uint32_(frame_s *fp)
540 return cast_ptr(uint32_t *, fp);
543 static attr_always_inline uint64_t *frame_uint64_(frame_s *fp)
545 return cast_ptr(uint64_t *, fp);
547 #endif
549 #define frame_var(fp, idx) (cast_ptr(unsigned char *, __builtin_assume_aligned(frame_char_(fp) + ((size_t)(idx) << slot_bits), slot_size)))
550 #define frame_idx(fp, var) ((frame_t)((cast_ptr(char *, var) - frame_char_(fp)) / slot_size))
552 #define frame_slot_(p, type) \
553 (cast_ptr(type *, assert_alignment(p, align_of(type))))
554 #define frame_slot(fp, pos, type) \
555 frame_slot_(frame_var(fp, pos), type)
557 #define frame_pointer(p, pos) \
558 frame_slot(p, pos, pointer_t)
560 #if defined(HAVE_BITWISE_FRAME)
561 #define frame_flags_per_slot_bits (slot_bits + 3)
562 #if defined(INLINE_ASM_GCC_X86)
563 #define bitmap_64bit 0
564 static attr_always_inline void frame_set_flag(frame_s *fp, frame_t idx)
566 __asm__ volatile("bts %k0, %1"::"r"((size_t)idx),"m"(*(unsigned char *)fp):"cc","memory");
568 static attr_always_inline void frame_clear_flag(frame_s *fp, frame_t idx)
570 __asm__ volatile("btr %k0, %1"::"r"((size_t)idx),"m"(*(unsigned char *)fp):"cc","memory");
572 static attr_always_inline bool frame_test_flag(frame_s *fp, frame_t idx)
574 #ifndef INLINE_ASM_GCC_LABELS
575 bool res;
576 __asm__ volatile("bt %k1, %2; setc %0":"=q"(res):"r"((size_t)idx),"m"(*(unsigned char *)fp):"cc","memory");
577 return res;
578 #else
579 __asm__ goto("bt %k0, %1; jc %l[flag_set]"::"r"((size_t)idx),"m"(*(unsigned char *)fp):"cc","memory":flag_set);
580 return false;
581 flag_set:
582 return true;
583 #endif
585 static attr_always_inline bool frame_test_2(frame_s *fp, frame_t idx1, frame_t idx2)
587 #ifndef INLINE_ASM_GCC_LABELS
588 return frame_test_flag(fp, idx1) || frame_test_flag(fp, idx2);
589 #else
590 __asm__ goto("bt %k0, %2; jc 1f; bt %k1, %2; 1:jc %l[flag_set]"::"r"((size_t)idx1),"r"((size_t)idx2),"m"(*(unsigned char *)fp):"cc","memory":flag_set);
591 return false;
592 flag_set:
593 return true;
594 #endif
596 static attr_always_inline bool frame_test_and_set_flag(frame_s *fp, frame_t idx)
598 #ifndef INLINE_ASM_GCC_LABELS
599 bool res;
600 __asm__ volatile("bts %k1, %2; setc %0":"=q"(res):"r"((size_t)idx),"m"(*(unsigned char *)fp):"cc","memory");
601 return res;
602 #else
603 __asm__ goto("bts %k0, %1; jc %l[flag_set]"::"r"((size_t)idx),"m"(*(unsigned char *)fp):"cc","memory":flag_set);
604 return false;
605 flag_set:
606 return true;
607 #endif
609 static attr_always_inline bool frame_test_and_clear_flag(frame_s *fp, frame_t idx)
611 #ifndef INLINE_ASM_GCC_LABELS
612 bool res;
613 __asm__ volatile("btr %k1, %2; setc %0":"=q"(res):"r"((size_t)idx),"m"(*(unsigned char *)fp):"cc","memory");
614 return res;
615 #else
616 __asm__ goto("btr %k0, %1; jc %l[flag_set]"::"r"((size_t)idx),"m"(*(unsigned char *)fp):"cc","memory":flag_set);
617 return false;
618 flag_set:
619 return true;
620 #endif
622 #else
623 #if defined(ARCH_ARM64) || defined(ARCH_RISCV64)
624 #define bitmap_64bit (slot_size >= sizeof(uint64_t) && EFFICIENT_WORD_SIZE >= 64)
625 #else
626 #define bitmap_64bit 0
627 #endif
628 static attr_always_inline void frame_set_flag(frame_s *fp, frame_t idx)
630 if (bitmap_64bit) {
631 frame_uint64_(fp)[idx / 64] |= (uint64_t)1 << (idx & 63);
632 } else {
633 frame_uint32_(fp)[idx / 32] |= (uint32_t)1 << (idx & 31);
636 static attr_always_inline void frame_clear_flag(frame_s *fp, frame_t idx)
638 if (bitmap_64bit) {
639 frame_uint64_(fp)[idx / 64] &= ~((uint64_t)1 << (idx & 63));
640 } else {
641 frame_uint32_(fp)[idx / 32] &= ~((uint32_t)1 << (idx & 31));
644 static attr_always_inline bool frame_test_flag(frame_s *fp, frame_t idx)
646 if (bitmap_64bit) {
647 return (frame_uint64_(fp)[idx / 64] & ((uint64_t)1 << (idx & 63))) != 0;
648 } else {
649 return (frame_uint32_(fp)[idx / 32] & ((uint32_t)1 << (idx & 31))) != 0;
652 static attr_always_inline bool frame_test_and_set_flag(frame_s *fp, frame_t idx)
654 bool ret;
655 if (bitmap_64bit) {
656 uint64_t val = frame_uint64_(fp)[idx / 64];
657 ret = (val & ((uint64_t)1 << (idx & 63))) != 0;
658 val |= (uint64_t)1 << (idx & 63);
659 frame_uint64_(fp)[idx / 64] = val;
660 return ret;
661 } else {
662 uint32_t val = frame_uint32_(fp)[idx / 32];
663 ret = (val & ((uint32_t)1 << (idx & 31))) != 0;
664 val |= (uint32_t)1 << (idx & 31);
665 frame_uint32_(fp)[idx / 32] = val;
666 return ret;
669 static attr_always_inline bool frame_test_and_clear_flag(frame_s *fp, frame_t idx)
671 bool ret;
672 if (bitmap_64bit) {
673 uint64_t val = frame_uint64_(fp)[idx / 64];
674 ret = (val & ((uint64_t)1 << (idx & 63))) != 0;
675 val &= ~((uint64_t)1 << (idx & 63));
676 frame_uint64_(fp)[idx / 64] = val;
677 return ret;
678 } else {
679 uint32_t val = frame_uint32_(fp)[idx / 32];
680 ret = (val & ((uint32_t)1 << (idx & 31))) != 0;
681 val &= ~((uint32_t)1 << (idx & 31));
682 frame_uint32_(fp)[idx / 32] = val;
683 return ret;
686 static attr_always_inline bool frame_test_2(frame_s *fp, frame_t idx1, frame_t idx2)
688 return frame_test_flag(fp, idx1) || frame_test_flag(fp, idx2);
690 #endif
691 #else
692 #define frame_flags_per_slot_bits (slot_bits)
693 static attr_always_inline void frame_set_flag(frame_s *fp, frame_t idx)
695 ajla_assert(frame_char_(fp)[idx] <= 1, (file_line, "frame_set_flag: invalid value %d at index %"PRIuMAX"", (int)frame_char_(fp)[idx], (uintmax_t)idx));
696 frame_char_(fp)[idx] = 1;
698 static attr_always_inline void frame_clear_flag(frame_s *fp, frame_t idx)
700 ajla_assert(frame_char_(fp)[idx] <= 1, (file_line, "frame_clear_flag: invalid value %d at index %"PRIuMAX"", (int)frame_char_(fp)[idx], (uintmax_t)idx));
701 frame_char_(fp)[idx] = 0;
703 static attr_always_inline bool frame_test_flag(frame_s *fp, frame_t idx)
705 #if 0
706 unsigned char r1;
707 __asm__ ("movb (%2,%1), %0" : "=q"(r1) : "r"(fp), "r"(idx) : "memory");
708 return r1;
709 #else
710 unsigned char val = frame_char_(fp)[idx];
711 ajla_assert(val <= 1, (file_line, "frame_test_flag: invalid value %d at index %"PRIuMAX"", (int)val, (uintmax_t)idx));
712 return val;
713 #endif
715 static attr_always_inline bool frame_test_and_set_flag(frame_s *fp, frame_t idx)
717 unsigned char val = frame_char_(fp)[idx];
718 ajla_assert(val <= 1, (file_line, "frame_test_and_set_flag: invalid value %d at index %"PRIuMAX"", (int)val, (uintmax_t)idx));
719 if (val) return true;
720 frame_char_(fp)[idx] = 1;
721 return false;
723 static attr_always_inline bool frame_test_and_clear_flag(frame_s *fp, frame_t idx)
725 unsigned char val = frame_char_(fp)[idx];
726 ajla_assert(val <= 1, (file_line, "frame_test_and_clear_flag: invalid value %d at index %"PRIuMAX"", (int)val, (uintmax_t)idx));
727 if (!val) return false;
728 frame_char_(fp)[idx] = 0;
729 return true;
732 * On many RISC architectures, gcc generates bogus unsigned extension
733 * instruction after the "or" operator and it generates better code with "plus".
734 * On CISC architectures, it generates better code with "or".
736 #if defined(__arm__) || defined(__i386__) || defined(__m68k__) || defined(__sh__) || defined(__s390__) || defined(__x86_64__)
737 #define frame_test_operator |
738 #else /* defined(__alpha__) || defined(__aarch64__) || defined(__hppa) || defined(__mips) || defined(__powerpc__) */
739 #define frame_test_operator +
740 #endif /* doesn't care: defined(__riscv) || defined(__sparc__) */
741 #ifndef INLINE_WORKS
742 #define frame_test_2(fp, idx1, idx2) (frame_char_(fp)[idx1] frame_test_operator frame_char_(fp)[idx2])
743 #else
744 static attr_always_inline bool frame_test_2(frame_s *fp, frame_t idx1, frame_t idx2)
746 return frame_char_(fp)[idx1] frame_test_operator frame_char_(fp)[idx2];
748 #endif
749 #endif
751 #define bitmap_slots(n_vars) (round_up((frame_t)(n_vars), 1 << frame_flags_per_slot_bits) >> frame_flags_per_slot_bits)
753 static inline void memcpy_slots(unsigned char *dest, const unsigned char *src, frame_t n_slots)
755 src = assert_alignment(src, slot_size);
756 dest = assert_alignment(dest, slot_size);
757 memcpy_fast(dest, src, n_slots * slot_size);
760 #define MIN_USEABLE_SLOT 1
763 /********
764 * DATA *
765 ********/
767 struct data_flat {
768 type_tag_t data_type;
769 unsigned char flexible_array[FLEXIBLE_ARRAY_GCC];
772 struct data_longint {
773 mpint_t mp;
776 struct data_record {
777 const struct type *definition;
778 char flexible_array[FLEXIBLE_ARRAY_GCC];
781 struct data_option {
782 ajla_option_t option;
783 pointer_t pointer;
786 struct data_array_flat {
787 int_default_t n_used_entries;
788 int_default_t n_allocated_entries;
789 const struct type *type;
790 unsigned char flexible_array[FLEXIBLE_ARRAY_GCC];
793 struct data_array_slice {
794 int_default_t n_entries;
795 pointer_t reference;
796 const struct type *type;
797 unsigned char *flat_data_minus_data_array_offset;
800 struct data_array_pointers {
801 int_default_t n_used_entries;
802 int_default_t n_allocated_entries;
803 pointer_t *pointer;
804 pointer_t pointer_array[FLEXIBLE_ARRAY_GCC];
807 #if !defined(DEBUG_ARRAY_INDICES) && !defined(UNUSUAL)
808 #if !defined(POINTER_COMPRESSION) && defined(SIZEOF_VOID_P) && SIZEOF_VOID_P && SIZEOF_VOID_P * 8 <= INT_DEFAULT_BITS
809 #define SCALAR_ARRAY_INDEX_T
810 #elif defined(POINTER_COMPRESSION) && 32 <= INT_DEFAULT_BITS
811 #define SCALAR_ARRAY_INDEX_T
812 #endif
813 #endif
815 #ifndef SCALAR_ARRAY_INDEX_T
816 typedef struct {
817 uint_default_t val;
818 mpint_t *mp;
819 #ifdef DEBUG_ARRAY_INDICES
820 void *test_leak;
821 #endif
822 } array_index_t;
823 #else
824 typedef uint_default_t array_index_t;
825 #endif
827 typedef uchar_efficient_t btree_entries_t;
829 struct data_array_same {
830 array_index_t n_entries;
831 pointer_t pointer;
834 struct btree_level {
835 array_index_t end_index;
836 pointer_t node;
839 struct data_array_btree {
840 btree_entries_t n_used_btree_entries;
841 btree_entries_t n_allocated_btree_entries;
842 uchar_efficient_t depth;
843 struct btree_level btree[FLEXIBLE_ARRAY_GCC];
846 struct data_array_incomplete {
847 pointer_t first; /* a pointer to non-empty array */
848 pointer_t next; /* a pointer to array or array_incomplete or thunk */
851 struct function_argument {
852 type_tag_t tag; /* TYPE_TAG_unknown or primitive type tag */
853 union {
854 pointer_t ptr;
855 unsigned char slot[slot_size];
856 } u;
859 struct data_function_reference {
860 union {
861 pointer_t indirect;
862 pointer_t *direct;
863 } u;
864 uchar_efficient_t is_indirect;
865 arg_t n_curried_arguments;
866 #ifdef DEBUG
867 /* deliberately misalign variables to catch alignment errors */
868 char misalign;
869 #endif
870 struct function_argument arguments[FLEXIBLE_ARRAY_GCC];
873 struct data_resource {
874 void (*close)(struct data *);
875 #ifdef DEBUG
876 /* deliberately misalign variables to catch alignment errors */
877 char misalign;
878 #endif
879 char flexible_array[FLEXIBLE_ARRAY_GCC];
882 /* a rough estimation to make sure that the size of data_function_reference doesn't overflow */
883 #define ARG_LIMIT (sign_bit(size_t) / sizeof(struct function_argument))
885 struct local_variable {
886 const struct type *type;
889 struct local_variable_flags {
890 bool may_be_borrowed;
891 bool must_be_flat;
892 bool must_be_data;
895 struct local_arg {
896 frame_t slot;
897 char may_be_borrowed;
898 char may_be_flat;
901 struct line_position {
902 ip_t ip;
903 unsigned line;
906 struct cache_entry;
908 struct cache_entry_return {
909 struct cache_entry *ce;
910 pointer_t ptr;
911 struct execution_control *ex;
914 struct cache_entry {
915 struct tree_entry entry;
916 arg_t n_pending;
917 bool save;
918 struct list wait_list;
919 struct cache_entry_return *returns;
920 pointer_t arguments[FLEXIBLE_ARRAY];
923 struct escape_data {
924 atomic_type profile_counter_t counter;
925 atomic_type unsigned short line;
928 struct module_designator;
930 struct data_function {
931 frame_t frame_slots; /* (frame_offset + args + ret + vars) / slot_size */
932 frame_t n_bitmap_slots;
933 arg_t n_arguments;
934 arg_t n_return_values;
935 code_t *code;
936 ip_t code_size;
937 const struct local_variable *local_variables; /* indexed by slot */
938 const struct local_variable_flags *local_variables_flags; /* indexed by slot */
939 const struct local_arg *args; /* indexed by argument */
940 pointer_t types_ptr;
941 const struct type *record_definition;
942 const struct module_designator *module_designator;
943 const struct function_designator *function_designator;
944 char *function_name;
945 struct line_position *lp;
946 size_t lp_size;
947 #ifdef HAVE_CODEGEN
948 pointer_t codegen;
949 atomic_type uchar_efficient_t codegen_failed;
950 #endif
951 struct data *loaded_cache;
952 struct tree cache;
953 atomic_type profile_counter_t profiling_counter;
954 atomic_type profile_counter_t call_counter;
955 struct escape_data *escape_data;
956 bool leaf;
957 bool is_saved;
958 frame_t local_directory_size;
959 pointer_t *local_directory[FLEXIBLE_ARRAY_GCC];
962 struct data_function_types {
963 size_t n_types;
964 const struct type *types[FLEXIBLE_ARRAY_GCC];
967 #ifdef HAVE_CODEGEN
968 #if defined(ARCH_X86_32) || defined(ARCH_ARM32) || defined(ARCH_MIPS32) || defined(ARCH_POWER32) || defined(ARCH_SPARC32)
969 typedef uint64_t code_return_t;
970 #else
971 typedef struct {
972 void *fp;
973 #if defined(ARCH_MIPS64) || defined(ARCH_PARISC64) || defined(ARCH_S390) || defined(ARCH_SPARC64)
974 unsigned long ip;
975 #else
976 ip_t ip;
977 #endif
978 } code_return_t;
979 #endif
980 struct cg_upcall_vector_s;
982 struct trap_record {
983 size_t source_ip;
984 size_t destination_ip;
987 struct data_codegen {
988 #ifdef HAVE_CODEGEN_TRAPS
989 struct tree_entry codegen_tree;
990 struct trap_record *trap_records;
991 size_t trap_records_size;
992 #endif
993 void *unoptimized_code_base;
994 size_t unoptimized_code_size;
995 struct data *function;
996 bool is_saved;
997 frame_t n_entries;
998 size_t *offsets;
999 char *unoptimized_code[FLEXIBLE_ARRAY_GCC];
1001 #endif
1003 union internal_arg {
1004 void *ptr;
1005 size_t i;
1008 struct data_internal {
1009 void *(*fn)(frame_s *fp, const code_t *ip, union internal_arg *);
1010 union internal_arg arguments[FLEXIBLE_ARRAY_GCC];
1013 struct data_saved {
1014 size_t total_size;
1015 size_t n_offsets;
1016 size_t offsets[FLEXIBLE_ARRAY_GCC];
1019 struct data_saved_cache {
1020 size_t n_entries;
1021 arg_t n_arguments;
1022 arg_t n_return_values;
1023 pointer_t pointers[FLEXIBLE_ARRAY_GCC];
1026 typedef uchar_efficient_t tag_t;
1028 #define DATA_TAG_START 1
1029 #define DATA_TAG_flat 1
1030 #define DATA_TAG_longint 2
1031 #define DATA_TAG_record 3
1032 #define DATA_TAG_option 4
1033 #define DATA_TAG_array_flat 5
1034 #define DATA_TAG_array_slice 6
1035 #define DATA_TAG_array_pointers 7
1036 #define DATA_TAG_array_same 8
1037 #define DATA_TAG_array_btree 9
1038 #define DATA_TAG_array_incomplete 10
1039 #define DATA_TAG_function_reference 11
1040 #define DATA_TAG_resource 12
1041 #define DATA_TAG_function 13
1042 #define DATA_TAG_function_types 14
1043 #ifdef HAVE_CODEGEN
1044 #define DATA_TAG_codegen 15
1045 #endif
1046 #define DATA_TAG_internal 16
1047 #define DATA_TAG_saved 17
1048 #define DATA_TAG_saved_cache 18
1049 #define DATA_TAG_END 19
1051 #define THUNK_TAG_START 19
1052 #define THUNK_TAG_FUNCTION_CALL 19
1053 #define THUNK_TAG_BLACKHOLE 20
1054 #define THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED 21
1055 #define THUNK_TAG_BLACKHOLE_DEREFERENCED 22
1056 #define THUNK_TAG_RESULT 23
1057 #define THUNK_TAG_MULTI_RET_REFERENCE 24
1058 #define THUNK_TAG_EXCEPTION 25
1059 #define THUNK_TAG_END 26
1061 #define TAG_END 26
1063 #if defined(POINTER_TAG_AT_ALLOC) && DATA_TAG_END <= (1 << POINTER_IGNORE_BITS) / 2
1064 #define DATA_TAG_AT_ALLOC
1065 #endif
1067 struct data {
1068 refcount_t refcount_;
1069 #if !defined(REFCOUNT_TAG)
1070 tag_t tag;
1071 #endif
1072 union {
1073 struct data_flat flat;
1074 struct data_longint longint;
1075 struct data_record record;
1076 struct data_option option;
1077 struct data_array_flat array_flat;
1078 struct data_array_slice array_slice;
1079 struct data_array_pointers array_pointers;
1080 struct data_array_same array_same;
1081 struct data_array_btree array_btree;
1082 struct data_array_incomplete array_incomplete;
1083 struct data_function_reference function_reference;
1084 struct data_resource resource;
1086 /* these do not appear on the ajla heap */
1087 struct data_function function;
1088 struct data_function_types function_types;
1089 #ifdef HAVE_CODEGEN
1090 struct data_codegen codegen;
1091 #endif
1092 struct data_internal internal;
1094 /* this only appears in saved stream */
1095 struct data_saved saved;
1096 struct data_saved_cache saved_cache;
1097 } u_;
1100 #if defined(DATA_TAG_AT_ALLOC)
1101 #define da_tag_(data) ((tag_t)(ptr_to_num(data) >> POINTER_IGNORE_START))
1102 #elif defined(REFCOUNT_TAG)
1103 #define da_tag_(data) (refcount_tag_get((refcount_const refcount_t *)&(data)->refcount_))
1104 #else
1105 #define da_tag_(data) ((data)->tag)
1106 #endif
1107 #define da_tag(data) (ajla_assert(da_tag_(data) >= DATA_TAG_START && da_tag_(data) < DATA_TAG_END, (file_line, "invalid data tag %u", da_tag_(data))), da_tag_(data))
1108 #define da_assert(data, kind) (ajla_assert(da_tag_(data) == DATA_TAG_##kind, (file_line, "data tag %u, expected %u", da_tag_(data), DATA_TAG_##kind)))
1109 #define da(data, kind) (da_assert(data,kind), &(data)->u_.kind)
1111 #define data_flat_offset_ (round_up(offsetof(struct data, u_.flat.flexible_array), scalar_align))
1112 #define data_record_offset_ (round_up(offsetof(struct data, u_.record.flexible_array), slot_align))
1113 #define data_array_offset_ (round_up(offsetof(struct data, u_.array_flat.flexible_array), scalar_align))
1114 #define data_resource_offset_ (round_up(offsetof(struct data, u_.resource.flexible_array), scalar_align))
1115 #ifndef UNUSUAL
1116 #define data_flat_offset data_flat_offset_
1117 #define data_record_offset data_record_offset_
1118 #define data_array_offset data_array_offset_
1119 #define data_resource_offset data_resource_offset_
1120 #else
1121 /* add some value to make sure that we don't forget it */
1122 #define data_flat_offset (data_flat_offset_ + scalar_align)
1123 #define data_record_offset (data_record_offset_ + slot_align)
1124 #define data_array_offset (data_array_offset_ + scalar_align)
1125 #define data_resource_offset (data_resource_offset_ + scalar_align)
1126 #endif
1127 #define data_function_types_offset offsetof(struct data, u_.function_types.types)
1129 static attr_always_inline unsigned char *da_flat(struct data *d)
1131 da_assert(d,flat);
1132 return cast_ptr(unsigned char *, d) + data_flat_offset;
1134 static attr_always_inline frame_s *da_record_frame(struct data *d)
1136 da_assert(d,record);
1137 return cast_ptr(frame_s *, cast_ptr(const char *, d) + data_record_offset);
1139 static attr_always_inline unsigned char *da_array_flat(struct data *d)
1141 da_assert(d,array_flat);
1142 return cast_ptr(unsigned char *, d) + data_array_offset;
1144 #define DATA_TAG_is_array(tag) ((tag) >= DATA_TAG_array_flat && (tag) <= DATA_TAG_array_btree)
1145 #define da_array_flat_element_size(d) ((size_t)da(d,array_flat)->type->size)
1146 #define da_array_depth(d) (ajla_assert(DATA_TAG_is_array(da_tag(d)), (file_line, "da_array_depth: invalid tag %u", da_tag(d))), da_tag(d) == DATA_TAG_array_btree ? (int)da(d,array_btree)->depth : -1)
1147 #define da_array_assert_son(parent, son) ( \
1148 ajla_assert(da(parent,array_btree)->n_used_btree_entries >= 2 && da(parent,array_btree)->n_used_btree_entries <= BTREE_MAX_SIZE, (file_line, "da_array_assert_son: invalid parent size %"PRIuMAX"", (uintmax_t)da(parent,array_btree)->n_used_btree_entries)),\
1149 ajla_assert(da_array_depth(son) + 1 == da_array_depth(parent), (file_line, "da_array_assert_son: depth mismatch: %d, %d", da_array_depth(parent), da_array_depth(son)))\
1152 static attr_always_inline const struct type *da_type(struct data *fn, size_t idx)
1154 struct data *t = pointer_get_data(da(fn,function)->types_ptr);
1155 ajla_assert(idx < da(t,function_types)->n_types, (file_line, "da_type: access out of range: %"PRIuMAX" >= %"PRIuMAX"", (uintmax_t)idx, (uintmax_t)da(t,function_types)->n_types));
1156 return da(t,function_types)->types[idx];
1159 #define function_frame_size(fn) ((size_t)da(fn,function)->frame_slots * slot_size)
1160 #define function_n_variables(fn) ((size_t)da(fn,function)->frame_slots - frame_offset / slot_size)
1162 static inline void *da_resource(struct data *d)
1164 da_assert(d,resource);
1165 return cast_ptr(void *, cast_ptr(const char *, d) + data_resource_offset);
1169 static attr_always_inline struct data *data_init_(struct data *d, tag_t tag)
1171 if (unlikely(!d))
1172 return NULL;
1173 #if defined(DATA_TAG_AT_ALLOC)
1174 d = cast_cpp(struct data *, num_to_ptr(ptr_to_num(d) + ((uintptr_t)tag << POINTER_IGNORE_START)));
1175 #endif
1176 #if defined(REFCOUNT_TAG)
1177 refcount_init_tag(&d->refcount_, tag);
1178 #else
1179 d->tag = tag;
1180 refcount_init(&d->refcount_);
1181 #endif
1182 return d;
1185 static attr_always_inline void *data_pointer_tag(void *d, tag_t attr_unused tag)
1187 #if defined(DATA_TAG_AT_ALLOC)
1188 d = cast_cpp(void *, num_to_ptr(ptr_to_num(d) + ((uintptr_t)tag << POINTER_IGNORE_START)));
1189 #endif
1190 return d;
1193 #define data_alloc(kind, mayfail) data_init_(mem_alloc_compressed_mayfail(struct data *, partial_sizeof(struct data, u_.kind), mayfail), DATA_TAG_##kind)
1194 #define data_align(kind, size, align, mayfail) data_init_(mem_align_compressed_mayfail(struct data *, maximum_maybe0(size, partial_sizeof_lower_bound(struct data)), align, mayfail), DATA_TAG_##kind)
1195 #define data_calign(kind, size, align, mayfail) data_init_(mem_calign_compressed_mayfail(struct data *, maximum_maybe0(size, partial_sizeof_lower_bound(struct data)), align, mayfail), DATA_TAG_##kind)
1196 #define data_alloc_flexible(kind, array, size, mayfail) data_init_(struct_alloc_array_mayfail(mem_alloc_compressed_mayfail, struct data, u_.kind.array, size, mayfail), DATA_TAG_##kind)
1198 static inline void *data_untag_(void *d, const char attr_unused *fl)
1200 #if defined(DATA_TAG_AT_ALLOC)
1201 unsigned mask = DATA_TAG_END - 1;
1202 mask = mask | (mask >> 1);
1203 mask = mask | (mask >> 2);
1204 mask = mask | (mask >> 4);
1205 mask = mask | (mask >> 8);
1206 ajla_assert((ptr_to_num(d) & ((uintptr_t)mask << POINTER_IGNORE_START)) != 0, (fl, "data_untag_: pointer not tagged: %p", d));
1207 return num_to_ptr(ptr_to_num(d) & ~((uintptr_t)mask << POINTER_IGNORE_START));
1208 #else
1209 return d;
1210 #endif
1212 #define data_untag(d) data_untag_(d, file_line)
1213 #define data_free(d) do { refcount_poison_tag(&(d)->refcount_); mem_free_compressed(data_untag(d)); } while (0)
1214 #define data_free_r1(d) do { ajla_assert(refcount_is_one(&(d)->refcount_), (file_line, "freeing data with invalid refcount")); data_free(d); } while (0)
1217 /*********
1218 * THUNK *
1219 *********/
1221 struct stack_trace_entry {
1222 const struct module_designator *module_designator;
1223 const char *function_name;
1224 unsigned line;
1227 struct stack_trace {
1228 struct stack_trace_entry *trace;
1229 size_t trace_n;
1232 struct thunk_exception {
1233 ajla_error_t err;
1234 char *msg;
1235 struct stack_trace tr;
1238 struct thunk_result {
1239 pointer_t ptr;
1240 bool wanted;
1243 struct thunk {
1244 refcount_t refcount_;
1245 #ifndef REFCOUNT_TAG
1246 tag_t tag;
1247 #endif
1248 union {
1249 /* THUNK_TAG_FUNCTION_CALL */
1250 /* THUNK_TAG_BLACKHOLE */
1251 /* THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED */
1252 /* THUNK_TAG_BLACKHOLE_DEREFERENCED */
1253 /* THUNK_TAG_RESULT */
1254 struct {
1255 union {
1256 /* THUNK_TAG_FUNCTION_CALL */
1257 pointer_t function_reference;
1258 /* THUNK_TAG_BLACKHOLE */
1259 /* THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED */
1260 /* THUNK_TAG_BLACKHOLE_DEREFERENCED */
1261 struct execution_control *execution_control;
1262 } u;
1263 struct thunk_result results[1];
1264 } function_call;
1265 /* THUNK_TAG_MULTI_RET_REFERENCE */
1266 struct {
1267 struct thunk *thunk;
1268 arg_t idx;
1269 } multi_ret_reference;
1270 /* THUNK_TAG_EXCEPTION */
1271 struct thunk_exception exception;
1272 } u;
1275 static inline tag_t thunk_tag_(struct thunk *t, const char attr_unused *position)
1277 tag_t tag;
1278 #ifndef REFCOUNT_TAG
1279 tag = t->tag;
1280 #else
1281 tag = refcount_tag_get(&t->refcount_);
1282 #endif
1283 ajla_assert(tag >= THUNK_TAG_START && tag < THUNK_TAG_END, (position, "invalid thunk tag %u", tag));
1284 return tag;
1286 #define thunk_tag(t) thunk_tag_(t, file_line)
1288 static inline tag_t thunk_tag_volatile_(struct thunk *t, const char attr_unused *position)
1290 tag_t tag;
1291 #ifndef REFCOUNT_TAG
1292 tag = *cast_ptr(thread_volatile tag_t *, &t->tag);
1293 #else
1294 tag = refcount_tag_get(&t->refcount_);
1295 #endif
1296 ajla_assert(tag >= THUNK_TAG_START && tag < THUNK_TAG_END, (position, "invalid thunk tag %u", tag));
1297 return tag;
1299 #define thunk_tag_volatile(t) thunk_tag_volatile_(t, file_line)
1301 static inline void thunk_tag_set_(struct thunk *t, tag_t attr_unused old_tag, tag_t new_tag, const char attr_unused *position)
1303 #ifndef REFCOUNT_TAG
1304 ajla_assert(t->tag == old_tag, (position, "thunk_tag_set: tag does not match: %u != %u; new tag %u", t->tag, old_tag, new_tag));
1305 t->tag = new_tag;
1306 #else
1307 refcount_tag_set_(&t->refcount_, old_tag, new_tag, position);
1308 #endif
1310 #define thunk_tag_set(t, old_tag, new_tag) thunk_tag_set_(t, old_tag, new_tag, file_line)
1312 static inline tag_t da_thunk_tag_(void *dt, const char attr_unused *position)
1314 tag_t tag;
1315 #ifndef REFCOUNT_TAG
1316 ajla_assert(offsetof(struct data, tag) == offsetof(struct thunk, tag), (position, "da_thunk_tag: the data_structure doesn't match the thunk structure"));
1317 tag = *cast_ptr(tag_t *, cast_ptr(char *, dt) + offsetof(struct data, tag));
1318 #else
1319 ajla_assert(offsetof(struct data, refcount_) == offsetof(struct thunk, refcount_), (position, "da_thunk_tag: the data_structure doesn't match the thunk structure"));
1320 tag = refcount_tag_get(cast_ptr(refcount_t *, cast_ptr(char *, dt) + offsetof(struct data, refcount_)));
1321 #endif
1322 ajla_assert(tag >= DATA_TAG_START && tag < TAG_END, (position, "invalid thunk tag %u", tag));
1323 return tag;
1325 #define da_thunk_tag(dt) da_thunk_tag_(dt, file_line)
1327 #define tag_is_thunk(tag) ((tag) >= THUNK_TAG_START)
1329 static inline refcount_t *da_thunk_refcount_(void *dt, const char attr_unused *position)
1331 ajla_assert(offsetof(struct data, refcount_) == offsetof(struct thunk, refcount_), (position, "da_thunk_tag: the data_structure doesn't match the thunk structure"));
1332 return cast_ptr(refcount_t *, cast_ptr(char *, dt) + offsetof(struct data, refcount_));
1334 #define da_thunk_refcount(dt) da_thunk_refcount_(dt, file_line)
1337 * May be called if the thunk is locked or if the thunk is on current frame
1338 * (so that it won't be modified asynchronously)
1340 static inline bool thunk_is_finished(struct thunk *t)
1342 tag_t tag = thunk_tag_volatile(t);
1343 return tag == THUNK_TAG_RESULT ||
1344 (tag == THUNK_TAG_MULTI_RET_REFERENCE && thunk_tag_volatile(t->u.multi_ret_reference.thunk) == THUNK_TAG_RESULT);
1347 static inline struct thunk *thunk_pointer_tag(struct thunk *t)
1349 #ifdef POINTER_TAG_AT_ALLOC
1350 t = cast_cpp(struct thunk *, num_to_ptr(ptr_to_num(t) | POINTER_IGNORE_TOP));
1351 #endif
1352 return t;
1355 static inline void *thunk_untag_(struct thunk *t, const char attr_unused *fl)
1357 #if defined(POINTER_TAG_AT_ALLOC)
1358 ajla_assert((ptr_to_num(t) & POINTER_IGNORE_TOP) != 0, (fl, "thunk_untag_: pointer not tagged: %p", t));
1359 return num_to_ptr(ptr_to_num(t) & ~POINTER_IGNORE_TOP);
1360 #else
1361 return t;
1362 #endif
1364 #define thunk_untag(t) thunk_untag_(t, file_line)
1365 #define thunk_free(t) do { refcount_poison_tag(&t->refcount_); mem_free_compressed(thunk_untag(t)); } while (0)
1367 #if defined(POINTER_COMPRESSION)
1368 #define SAVED_DATA_ALIGN maximum(maximum(maximum(align_of(struct data), align_of(struct thunk)), sizeof(refcount_int_t)), pointer_compress_alignment)
1369 #else
1370 #define SAVED_DATA_ALIGN maximum(maximum(align_of(struct data), align_of(struct thunk)), sizeof(refcount_int_t))
1371 #endif
1374 /*********
1375 * FRAME *
1376 *********/
1378 typedef unsigned timestamp_t;
1380 #define CALL_MODE_NORMAL 1
1381 #define CALL_MODE_STRICT 2
1382 #define CALL_MODE_SPARK 3
1383 #define CALL_MODE_VALID(v) ((v) >= 1 && (v) <= 3)
1385 struct frame_struct {
1386 struct data *function;
1387 ip_t previous_ip;
1388 timestamp_t timestamp;
1389 stack_size_t available_slots;
1390 uchar_efficient_t mode;
1391 #ifdef DEBUG
1392 /* deliberately misalign variables to catch alignment errors */
1393 char misalign;
1394 #endif
1395 char variables_[FLEXIBLE_ARRAY];
1398 struct stack_bottom {
1399 struct execution_control *ex;
1400 stack_size_t useable_slots;
1401 pointer_t ret;
1404 #define SIZEOF_FRAME_STRUCT partial_sizeof_array(struct frame_struct, variables_, 0)
1405 #define SIZEOF_STACK_BOTTOM round_up(sizeof(struct stack_bottom), max_frame_align)
1406 #define frame_offset round_up(offsetof(struct frame_struct, variables_), slot_align)
1408 #ifndef INLINE_WORKS
1409 #define ptr_frame(fp) (cast_ptr(frame_s *, cast_ptr(const char *, fp) + frame_offset))
1410 #define get_frame(fp) (cast_ptr(struct frame_struct *, cast_ptr(const char *, fp) - frame_offset))
1411 #else
1412 static attr_always_inline frame_s *ptr_frame(const struct frame_struct *fp)
1414 return cast_ptr(frame_s *, cast_ptr(const char *, fp) + frame_offset);
1416 static attr_always_inline struct frame_struct *get_frame(const frame_s *fp)
1418 return cast_ptr(struct frame_struct *, cast_ptr(const char *, fp) - frame_offset);
1420 #endif
1422 static inline const struct type *frame_get_type_of_local(const frame_s *fp, frame_t pos)
1424 const struct type *t;
1425 const struct data *function = get_frame(fp)->function;
1426 t = da(function,function)->local_variables[pos].type;
1427 TYPE_TAG_VALIDATE(t->tag);
1428 return t;
1431 static inline ip_t frame_ip(const frame_s *fp, const code_t *ip)
1433 ajla_assert(ip >= da(get_frame(fp)->function,function)->code, (file_line, "frame_ip: invalid ip pointer: %p, %p", ip, da(get_frame(fp)->function,function)->code));
1434 return (ip_t)(ip - da(get_frame(fp)->function,function)->code);
1437 static inline frame_s * attr_fastcall frame_up(frame_s *fp)
1439 char *next = cast_ptr(char *, fp) + function_frame_size(get_frame(fp)->function);
1440 return cast_ptr(frame_s *, next);
1443 static inline bool frame_is_top(frame_s *fp)
1445 return get_frame(fp)->function == NULL;
1448 static inline struct stack_bottom *frame_stack_bottom(frame_s *fp)
1450 char *bottom = cast_ptr(char *, get_frame(fp)) - get_frame(fp)->available_slots * slot_size - SIZEOF_STACK_BOTTOM;
1451 return cast_ptr(struct stack_bottom *, bottom);
1454 static inline struct execution_control *frame_execution_control(frame_s *fp)
1456 return frame_stack_bottom(fp)->ex;
1459 static inline void stack_free(struct stack_bottom *stack)
1461 mem_free_aligned(stack);
1464 static inline void frame_init(frame_s *fp, struct data *function, timestamp_t timestamp, uchar_efficient_t mode)
1466 ajla_assert(!(da(function,function)->frame_slots & (frame_align / slot_size - 1)), (file_line, "frame_init: function size %"PRIuMAX" is not aligned to %x", (uintmax_t)da(function,function)->frame_slots, (unsigned)(frame_align / slot_size)));
1467 ajla_assert(CALL_MODE_VALID(mode), (file_line, "frame_init: invalid mode %u", mode));
1468 if (unlikely(profiling)) {
1469 profile_counter_t call_counter = load_relaxed(&da(function,function)->call_counter);
1470 call_counter++;
1471 store_relaxed(&da(function,function)->call_counter, call_counter);
1473 get_frame(fp)->timestamp = timestamp;
1474 get_frame(fp)->mode = mode;
1475 #ifdef DEBUG
1476 (void)memset(fp, rand(), da(function,function)->frame_slots * slot_size - frame_offset);
1477 #endif
1478 (void)memset(fp, 0, da(function,function)->n_bitmap_slots * slot_size);
1481 frame_s * attr_fastcall stack_alloc(struct execution_control *ex, struct data *function, ajla_error_t *mayfail);
1482 frame_s * attr_fastcall stack_expand(frame_s *fp, struct data *function, ajla_error_t *mayfail);
1483 frame_s * attr_fastcall stack_split(frame_s *from_fp, frame_s *to_fp, frame_s **high, ajla_error_t *mayfail);
1485 /*void frame_cleanup(frame_s *fp);*/
1487 /***************
1488 * STACK TRACE *
1489 ***************/
1491 void stack_trace_init(struct stack_trace *st);
1492 void stack_trace_free(struct stack_trace *st);
1493 bool stack_trace_get_location(struct data *function, ip_t ip_rel, struct stack_trace_entry *result);
1494 void stack_trace_capture(struct stack_trace *st, frame_s *fp, const code_t *ip, unsigned max_depth);
1495 char *stack_trace_string(struct stack_trace *st, ajla_error_t *err);
1496 void stack_trace_print(struct stack_trace *st);
1499 /*********************
1500 * OBJECT ALLOCATION *
1501 *********************/
1503 struct data * attr_fastcall data_alloc_flat_mayfail(type_tag_t type, const unsigned char *flat, size_t size, ajla_error_t *mayfail argument_position);
1504 struct data * attr_fastcall data_alloc_longint_mayfail(unsigned long bits, ajla_error_t *mayfail argument_position);
1505 struct data * attr_fastcall data_alloc_option_mayfail(ajla_error_t *mayfail argument_position);
1506 struct data * attr_fastcall data_alloc_record_mayfail(const struct record_definition *def, ajla_error_t *mayfail argument_position);
1507 struct data * attr_fastcall data_alloc_array_flat_mayfail(const struct type *type, int_default_t n_allocated, int_default_t n_used, bool clear, ajla_error_t *mayfail argument_position);
1508 struct data * attr_fastcall data_alloc_array_slice_mayfail(struct data *base, unsigned char *data, int_default_t start, int_default_t len, ajla_error_t *mayfail argument_position);
1509 struct data * attr_fastcall data_alloc_array_pointers_mayfail(int_default_t n_allocated, int_default_t n_used, ajla_error_t *mayfail argument_position);
1510 struct data * attr_fastcall data_alloc_array_same_mayfail(array_index_t n_entries, ajla_error_t *mayfail argument_position);
1511 struct data * attr_fastcall data_alloc_array_incomplete(struct data *first, pointer_t next, ajla_error_t *mayfail argument_position);
1512 struct data * attr_fastcall data_alloc_function_reference_mayfail(arg_t n_curried_arguments, ajla_error_t *mayfail argument_position);
1513 void attr_fastcall data_fill_function_reference(struct data *function_reference, arg_t a, pointer_t ptr);
1514 void attr_fastcall data_fill_function_reference_flat(struct data *function_reference, arg_t a, const struct type *type, const unsigned char *data);
1515 struct data * attr_fastcall data_alloc_resource_mayfail(size_t size, void (*close)(struct data *), ajla_error_t *mayfail argument_position);
1517 extern pointer_t *out_of_memory_ptr;
1518 struct thunk * attr_fastcall thunk_alloc_exception_error(ajla_error_t err, char *msg, frame_s *fp, const code_t *ip argument_position);
1519 pointer_t attr_fastcall pointer_error(ajla_error_t err, frame_s *fp, const code_t *ip argument_position);
1520 char *thunk_exception_string(struct thunk *thunk, ajla_error_t *err);
1521 char *thunk_exception_payload(struct thunk *thunk, ajla_error_t *err);
1522 void thunk_exception_print(struct thunk *thunk);
1524 bool attr_fastcall thunk_alloc_function_call(pointer_t function_reference, arg_t n_return_values, struct thunk *result[], ajla_error_t *mayfail);
1525 bool attr_fastcall thunk_alloc_blackhole(struct execution_control *ex, arg_t n_return_values, struct thunk *result[], ajla_error_t *mayfail);
1528 /*********************
1529 * EXECUTION CONTROL *
1530 *********************/
1532 #define N_EXECUTION_CONTROL_WAIT 2
1534 #define EXECUTION_CONTROL_NORMAL 4
1535 #define EXECUTION_CONTROL_ARMED 3
1536 #define EXECUTION_CONTROL_FIRED 1
1539 * execution_control_wait->thunk == NULL
1540 * - unused entry
1541 * execution_control_wait->thunk != NULL, list_is_empty(&execution_control_wait->wait_entry)
1542 * - unused, but we must take thunk lock to clear it
1543 * execution_control_wait->thunk != NULL, !list_is_empty(execution_control_wait->wait_entry.next)
1544 * - wait_entry is linked to an existing execution control
1547 struct execution_control_wait {
1548 struct list wait_entry;
1549 mutex_t *mutex_to_lock;
1550 struct execution_control *execution_control;
1553 struct execution_control {
1554 ip_t current_ip;
1555 frame_s *current_frame;
1556 struct stack_bottom *stack;
1558 struct thunk *thunk;
1559 struct list wait_list;
1561 void (*callback)(void *, pointer_t);
1562 void *callback_cookie;
1564 refcount_t wait_state;
1565 struct list waiting_list_entry;
1566 void *waiting_list_head;
1567 struct execution_control_wait wait[N_EXECUTION_CONTROL_WAIT];
1569 uint64_t atomic;
1570 bool atomic_interrupted;
1573 bool are_there_dereferenced(void);
1574 void execution_control_unlink_and_submit(struct execution_control *ex, bool can_allocate_memory);
1575 bool execution_control_acquire(struct execution_control *ex);
1576 void wake_up_wait_list(struct list *wait_list, mutex_t *mutex_to_lock, bool can_allocate_memory);
1577 void *thunk_terminate(struct thunk *t, arg_t n_return_values);
1578 struct execution_control *execution_control_alloc(ajla_error_t *mayfail);
1579 void execution_control_free(struct execution_control *ex);
1580 void execution_control_terminate(struct execution_control *ex, pointer_t ptr);
1583 /**********************
1584 * POINTER OPERATIONS *
1585 **********************/
1587 void free_cache_entry(struct data *d, struct cache_entry *ce);
1589 static attr_always_inline refcount_t *pointer_get_refcount_(pointer_t ptr)
1591 void *p = pointer_get_value_strip_tag_(ptr);
1592 return !pointer_is_thunk(ptr) ? &((struct data *)p)->refcount_ : &((struct thunk *)p)->refcount_;
1595 void attr_fastcall pointer_dereference_(pointer_t ptr argument_position);
1596 #define pointer_dereference(ptr) pointer_dereference_(ptr pass_file_line)
1598 static inline void data_dereference(struct data *data)
1600 pointer_dereference(pointer_data(data));
1603 static inline void pointer_reference_owned(pointer_t ptr)
1605 refcount_t *r = pointer_get_refcount_(ptr);
1606 if (likely(!refcount_is_read_only(r)))
1607 refcount_inc(r);
1610 static inline void pointer_reference_owned_multiple(pointer_t ptr, refcount_int_t n)
1612 refcount_t *r = pointer_get_refcount_(ptr);
1613 if (likely(!refcount_is_read_only(r)))
1614 refcount_add(r, n);
1617 static inline void data_reference(struct data *d)
1619 if (likely(!refcount_is_read_only(&d->refcount_)))
1620 refcount_inc(&d->refcount_);
1623 static inline void thunk_reference(struct thunk *t)
1625 if (likely(!refcount_is_read_only(&t->refcount_)))
1626 refcount_inc(&t->refcount_);
1629 static inline void thunk_reference_nonatomic(struct thunk *t)
1631 refcount_inc_nonatomic(&t->refcount_);
1634 static inline bool thunk_dereference_nonatomic(struct thunk *t)
1636 return refcount_dec_nonatomic(&t->refcount_);
1639 static inline bool thunk_refcount_is_one_nonatomic(struct thunk *t)
1641 return refcount_is_one_nonatomic(&t->refcount_);
1644 static inline refcount_int_t thunk_refcount_get_nonatomic(struct thunk *t)
1646 return refcount_get_nonatomic(&t->refcount_);
1649 static inline void thunk_assert_refcount(struct thunk attr_unused *t)
1651 ajla_assert_lo(!refcount_is_invalid(&t->refcount_), (file_line, "thunk_assert_refcount: invalid refcount"));
1654 pointer_t attr_fastcall pointer_reference_(pointer_t *ptr argument_position);
1655 #define pointer_reference(ptr) pointer_reference_(ptr pass_file_line)
1656 void pointer_reference_maybe_(frame_s *fp, frame_t result, pointer_t *ptr, unsigned char flags argument_position);
1657 #define pointer_reference_maybe(fp, result, ptr, flags) pointer_reference_maybe_(fp, result, ptr, flags pass_file_line)
1660 static inline bool data_is_writable(struct data *d)
1662 return refcount_is_one(&d->refcount_);
1665 static inline bool thunk_is_writable(struct thunk *t)
1667 return refcount_is_one(&t->refcount_);
1671 #ifdef POINTER_FOLLOW_IS_LOCKLESS
1672 #define pointer_volatile(ptr) ((thread_volatile pointer_t *)(ptr))
1673 #define pointer_lock(ptr) do { } while (0)
1674 #define pointer_unlock(ptr) do { } while (0)
1675 #define pointer_dependency_barrier() barrier_data_dependency()
1676 #else
1677 #define pointer_volatile(ptr) (ptr)
1678 #define pointer_lock(ptr) address_lock(ptr, DEPTH_POINTER)
1679 #define pointer_unlock(ptr) address_unlock(ptr, DEPTH_POINTER)
1680 #define pointer_dependency_barrier() do { } while (0)
1681 #endif
1683 static inline pointer_t pointer_locked_read(pointer_t *ptr)
1685 pointer_t ret;
1686 pointer_lock(ptr);
1687 ret = *pointer_volatile(ptr);
1688 pointer_validate(ret);
1689 if (!pointer_is_thunk(ret))
1690 pointer_dependency_barrier();
1691 pointer_unlock(ptr);
1692 return ret;
1695 static inline void pointer_locked_write(pointer_t *ptr, pointer_t val)
1697 pointer_validate(val);
1698 pointer_lock(ptr);
1699 *pointer_volatile(ptr) = val;
1700 pointer_unlock(ptr);
1703 #define POINTER_FOLLOW_THUNK_EXIT NULL
1704 #define POINTER_FOLLOW_THUNK_RETRY SPECIAL_POINTER_1
1705 #define POINTER_FOLLOW_THUNK_EXCEPTION SPECIAL_POINTER_2
1706 #define POINTER_FOLLOW_THUNK_GO SPECIAL_POINTER_3
1708 void copy_from_function_reference_to_frame(frame_s *new_fp, struct data *ref, arg_t ia, char can_move);
1709 #define POINTER_FOLLOW_THUNK_NOEVAL NULL
1710 #define POINTER_FOLLOW_THUNK_SPARK SPECIAL_POINTER_1
1711 void * attr_fastcall pointer_follow_thunk_(pointer_t *ptr, void *ex_wait);
1712 void attr_fastcall pointer_resolve_result(pointer_t *ptr);
1713 void attr_fastcall pointer_follow_wait(frame_s *fp, const code_t *ip);
1715 #define pointer_follow_thunk_noeval(ptr, retry_code, exception_code, uneval_code)\
1716 do { \
1717 void *ex__ = pointer_follow_thunk_(ptr, POINTER_FOLLOW_THUNK_NOEVAL);\
1718 if (ex__ == POINTER_FOLLOW_THUNK_RETRY) { \
1719 { retry_code; } \
1720 not_reached(); \
1721 } else if (ex__ == POINTER_FOLLOW_THUNK_EXCEPTION) { \
1722 { exception_code; } \
1723 not_reached(); \
1724 } else { \
1725 ajla_assert(ex__ == POINTER_FOLLOW_THUNK_EXIT, (file_line, "pointer_follow_thunk_noeval: invalid return value %p", ex__));\
1726 { uneval_code; } \
1727 not_reached(); \
1729 } while (1)
1731 #define PF_SPARK (-2)
1732 #define PF_NOEVAL (-1)
1733 #define PF_WAIT (0)
1734 #define PF_PREPARE0 (2)
1735 #define PF_PREPARE1 (3)
1737 #define pointer_follow(ptr, owned, result, wait_idx, fp, ip, xc_code, exception_code)\
1738 do { \
1739 pointer_t p_; \
1740 if (!(owned)) \
1741 p_ = pointer_locked_read(ptr); \
1742 else \
1743 p_ = *(ptr); \
1744 if (likely(!pointer_is_thunk(p_))) { \
1745 (result) = pointer_get_data(p_); \
1746 break; \
1747 } else { \
1748 void *ex__; \
1749 ex__ = pointer_follow_thunk_(ptr, (wait_idx) >= 0 ? &frame_execution_control(fp)->wait[(wait_idx) & 1] : (wait_idx) == PF_NOEVAL ? POINTER_FOLLOW_THUNK_NOEVAL : POINTER_FOLLOW_THUNK_SPARK);\
1750 if (ex__ == POINTER_FOLLOW_THUNK_RETRY) \
1751 continue; \
1752 if (ex__ == POINTER_FOLLOW_THUNK_EXCEPTION) { \
1753 struct thunk attr_unused *thunk_; \
1754 thunk_ = pointer_get_thunk(*(ptr)); \
1755 { exception_code; } \
1756 not_reached(); \
1759 struct execution_control attr_unused *ex_; \
1760 ex_ = cast_cpp(struct execution_control *, ex__);\
1761 if ((wait_idx) >= 0 && !((wait_idx) & 2)) { \
1762 pointer_follow_wait(fp, ip); \
1764 { xc_code; } \
1765 not_reached(); \
1768 } while (1)
1770 #define pointer_follow_fastfail(ptr, owned, result, success_code) \
1771 do { \
1772 pointer_t p_; \
1773 if (!(owned)) \
1774 p_ = pointer_locked_read(ptr); \
1775 else \
1776 p_ = *(ptr); \
1777 if (likely(!pointer_is_thunk(p_))) { \
1778 (result) = pointer_get_data(p_); \
1779 { success_code; } \
1781 } while (0)
1784 bool attr_fastcall data_is_nan(type_tag_t type, const unsigned char *ptr);
1785 pointer_t flat_to_data(const struct type *type, const unsigned char *flat);
1786 void attr_fastcall struct_clone(pointer_t *ptr);
1788 void * attr_fastcall pointer_deep_eval(pointer_t *ptr, frame_s *fp, const code_t *ip, struct thunk **thunk);
1789 void * attr_fastcall frame_pointer_deep_eval(frame_s *fp, const code_t *ip, frame_t slot, struct thunk **thunk);
1791 bool attr_fastcall mpint_export(const mpint_t *m, unsigned char *ptr, unsigned intx, ajla_error_t *err);
1792 bool attr_fastcall mpint_export_unsigned(const mpint_t *m, unsigned char *ptr, unsigned intx, ajla_error_t *err);
1794 int data_compare_numbers(type_tag_t tt, unsigned char *flat1, pointer_t ptr1, unsigned char *flat2, pointer_t ptr2);
1795 #define DATA_COMPARE_OOM -2
1796 int attr_fastcall data_compare(pointer_t ptr1, pointer_t ptr2, ajla_error_t *mayfail);
1799 static inline bool attr_hot_fastcall frame_variable_is_flat(frame_s *fp, frame_t slot)
1801 return !frame_test_flag(fp, slot) && TYPE_IS_FLAT(frame_get_type_of_local(fp, slot));
1804 static attr_always_inline void attr_hot_fastcall frame_free(frame_s *fp, frame_t slot)
1806 if (frame_test_and_clear_flag(fp, slot)) {
1807 pointer_dereference(*frame_pointer(fp, slot));
1808 /* when the flag is not set, we must not clear the slot */
1809 pointer_poison(frame_pointer(fp, slot));
1813 static attr_always_inline void attr_hot_fastcall frame_free_and_clear(frame_s *fp, frame_t slot)
1815 frame_free(fp, slot);
1816 *frame_pointer(fp, slot) = pointer_empty();
1819 static attr_always_inline void attr_hot_fastcall frame_free_and_set_pointer(frame_s *fp, frame_t slot, pointer_t ptr)
1821 if (frame_test_and_set_flag(fp, slot))
1822 pointer_dereference(*frame_pointer(fp, slot));
1823 *frame_pointer(fp, slot) = ptr;
1826 static attr_always_inline void frame_set_pointer(frame_s *fp, frame_t slot, pointer_t ptr)
1828 ajla_assert(!frame_test_flag(fp, slot), (file_line, "frame_set_pointer: flag for slot %"PRIuMAX" already set", (uintmax_t)slot));
1829 frame_set_flag(fp, slot);
1830 *frame_pointer(fp, slot) = ptr;
1833 static attr_always_inline pointer_t frame_get_pointer_reference(frame_s *fp, frame_t slot, bool deref)
1835 pointer_t ptr = *frame_pointer(fp, slot);
1836 pointer_validate(ptr);
1837 if (!deref) {
1838 goto do_ref_owned;
1839 } else {
1840 *frame_pointer(fp, slot) = pointer_empty();
1841 if (!frame_test_and_clear_flag(fp, slot))
1842 do_ref_owned:
1843 pointer_reference_owned(ptr);
1845 return ptr;
1849 /**********************
1850 * DATA SERIALIZATION *
1851 **********************/
1853 struct stack_entry;
1855 struct stack_entry_type {
1856 void *(*get_ptr)(struct stack_entry *ste);
1857 bool (*get_properties)(struct stack_entry *ste, size_t *align, size_t *size, struct stack_entry **subptrs, size_t *subptrs_len);
1858 void (*fixup_after_copy)(void *new_ptr);
1859 void (*fixup_sub_ptr)(void *loc, uintptr_t offset);
1860 bool wrap_on_save;
1863 struct stack_entry {
1864 const struct stack_entry_type *t;
1865 void *ptr;
1866 size_t align;
1867 size_t size;
1870 bool data_save(void *p, uintptr_t offset, size_t *align, size_t *size, struct stack_entry **subptrs, size_t *subptrs_l);
1871 bool data_save_init_stack(pointer_t *ptr, struct stack_entry **stk, size_t *stk_l);
1873 /*********
1874 * TRAPS *
1875 *********/
1877 void *data_trap_lookup(void *ptr);
1878 void data_trap_insert(struct data *codegen);
1880 #endif