2 * Copyright (C) 2024 Mikulas Patocka
4 * This file is part of Ajla.
6 * Ajla is free software: you can redistribute it and/or modify it under the
7 * terms of the GNU General Public License as published by the Free Software
8 * Foundation, either version 3 of the License, or (at your option) any later
11 * Ajla is distributed in the hope that it will be useful, but WITHOUT ANY
12 * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
13 * A PARTICULAR PURPOSE. See the GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along with
16 * Ajla. If not, see <https://www.gnu.org/licenses/>.
36 #define stack_alloc name(stack_alloc)
37 #define stack_expand name(stack_expand)
38 #define stack_split name(stack_split)
39 #define stack_trace_init name(stack_trace_init)
40 #define stack_trace_free name(stack_trace_free)
41 #define stack_trace_capture name(stack_trace_capture)
42 #define stack_trace_string name(stack_trace_string)
43 #define stack_trace_print name(stack_trace_print)
44 #define stack_trace_get_location name(stack_trace_get_location)
45 #define data_alloc_flat_mayfail name(data_alloc_flat_mayfail)
46 #define data_alloc_longint_mayfail name(data_alloc_longint_mayfail)
47 #define data_alloc_record_mayfail name(data_alloc_record_mayfail)
48 #define data_alloc_option_mayfail name(data_alloc_option_mayfail)
49 #define data_alloc_array_flat_mayfail name(data_alloc_array_flat_mayfail)
50 #define data_alloc_array_slice_mayfail name(data_alloc_array_slice_mayfail)
51 #define data_alloc_array_pointers_mayfail name(data_alloc_array_pointers_mayfail)
52 #define data_alloc_array_same_mayfail name(data_alloc_array_same_mayfail)
53 #define data_alloc_array_incomplete name(data_alloc_array_incomplete)
54 #define data_alloc_function_reference_mayfail name(data_alloc_function_reference_mayfail)
55 #define data_fill_function_reference name(data_fill_function_reference)
56 #define data_fill_function_reference_flat name(data_fill_function_reference_flat)
57 #define data_alloc_resource_mayfail name(data_alloc_resource_mayfail)
58 #define out_of_memory_ptr name(out_of_memory_ptr)
59 #define thunk_alloc_exception_error name(thunk_alloc_exception_error)
60 #define pointer_error name(pointer_error)
61 #define thunk_exception_string name(thunk_exception_string)
62 #define thunk_exception_payload name(thunk_exception_payload)
63 #define thunk_exception_print name(thunk_exception_print)
64 #define thunk_alloc_function_call name(thunk_alloc_function_call)
65 #define thunk_alloc_blackhole name(thunk_alloc_blackhole)
66 #define are_there_dereferenced name(are_there_dereferenced)
67 #define execution_control_unlink_and_submit name(execution_control_unlink_and_submit)
68 #define execution_control_acquire name(execution_control_acquire)
69 #define wake_up_wait_list name(wake_up_wait_list)
70 #define thunk_terminate name(thunk_terminate)
71 #define execution_control_alloc name(execution_control_alloc)
72 #define execution_control_free name(execution_control_free)
73 #define execution_control_terminate name(execution_control_terminate)
74 #define free_cache_entry name(free_cache_entry)
75 #define pointer_dereference_ name(pointer_dereference_)
76 #define pointer_reference_ name(pointer_reference_)
77 #define pointer_reference_maybe_ name(pointer_reference_maybe_)
78 #define copy_from_function_reference_to_frame name(copy_from_function_reference_to_frame)
79 #define pointer_follow_thunk_ name(pointer_follow_thunk_)
80 #define pointer_resolve_result name(pointer_resolve_result)
81 #define pointer_follow_wait name(pointer_follow_wait)
82 #define data_is_nan name(data_is_nan)
83 #define flat_to_data name(flat_to_data)
84 #define struct_clone name(struct_clone)
85 #define pointer_deep_eval name(pointer_deep_eval)
86 #define frame_pointer_deep_eval name(frame_pointer_deep_eval)
87 #define mpint_export name(mpint_export)
88 #define mpint_export_unsigned name(mpint_export_unsigned)
89 #define data_compare_numbers name(data_compare_numbers)
90 #define data_compare name(data_compare)
91 #define save_index_mp name(save_index_mp)
92 #define data_save name(data_save)
93 #define data_save_init_stack name(data_save_init_stack)
94 #define data_trap_lookup name(data_trap_lookup)
95 #define data_trap_insert name(data_trap_insert)
99 #define trace_enabled name(trace_enabled)
100 extern atomic_type uchar_efficient_t trace_enabled
;
106 struct execution_control
;
113 #if (INT_MASK & (1 << 2)) && defined(POINTER_COMPRESSION)
114 #define INT_DEFAULT_BITS 32
115 #elif (INT_MASK & (1 << 3)) && defined(BIT64)
116 #define INT_DEFAULT_BITS 64
117 #elif (INT_MASK & (1 << 2))
118 #define INT_DEFAULT_BITS 32
119 #elif (INT_MASK & (1 << 3))
120 #define INT_DEFAULT_BITS 64
121 #elif (INT_MASK & (1 << 4))
122 #define INT_DEFAULT_BITS 128
123 #elif (INT_MASK & (1 << 1))
124 #define INT_DEFAULT_BITS 16
125 #elif (INT_MASK & (1 << 0))
126 #define INT_DEFAULT_BITS 8
131 #define int_default_t cat4(int,INT_DEFAULT_BITS,_,t)
132 #define uint_default_t cat4(uint,INT_DEFAULT_BITS,_,t)
134 /*#define INT_DEFAULT_N log_2(INT_DEFAULT_BITS / 8)*/
135 #if INT_DEFAULT_BITS == 8
136 #define INT_DEFAULT_N 0
137 #elif INT_DEFAULT_BITS == 16
138 #define INT_DEFAULT_N 1
139 #elif INT_DEFAULT_BITS == 32
140 #define INT_DEFAULT_N 2
141 #elif INT_DEFAULT_BITS == 64
142 #define INT_DEFAULT_N 3
143 #elif INT_DEFAULT_BITS == 128
144 #define INT_DEFAULT_N 4
152 #ifdef HAVE_MAX_ALIGN_T
153 #define scalar_align_max_align_t_ (align_of(max_align_t) - 1) |
155 #define scalar_align_max_align_t_
157 #define scalar_align_fixed_(n, s, u, sz, bits) (align_of(s) - 1) |
158 #define scalar_align_int_(n, s, u, sz, bits) (align_of(s) - 1) |
159 #define scalar_align_real_(n, t, nt, pack, unpack) (align_of(t) - 1) |
160 #define scalar_align (( \
161 for_all_fixed(scalar_align_fixed_) \
162 for_all_int(scalar_align_int_, for_all_empty) \
163 for_all_real(scalar_align_real_, for_all_empty) \
164 scalar_align_max_align_t_ \
165 (align_of(ajla_flat_option_t) - 1) | \
166 (align_of(void *) - 1) | \
174 #if defined(POINTER_COMPRESSION)
176 #define pointer_compress_test(ptr, fat) \
178 if (unlikely((ptr_to_num(ptr) & ~((uintptr_t)0xfffffffeUL << POINTER_COMPRESSION)) != 0)) {\
180 fatal("the allocator returned pointer %p that is not compatible with %d-bit compression", (ptr), POINTER_COMPRESSION);\
182 ajla_assert(false, (file_line, "pointer_compress_test: pointer %p is not compatible with %d-bit compression", (ptr), POINTER_COMPRESSION));\
186 static inline uint32_t pointer_compress(const void *ptr
)
188 return (uint32_t)(ptr_to_num(ptr
) >> POINTER_COMPRESSION
);
191 static inline void *pointer_decompress(uint32_t num
)
193 return num_to_ptr((uintptr_t)num
<< POINTER_COMPRESSION
);
196 #define pointer_compress_alignment (2 << POINTER_COMPRESSION)
201 #define pointer_compress_test(ptr, fat) \
203 if (unlikely((ptr_to_num(ptr) & POINTER_TAG) != 0)) { \
205 fatal("the allocator returned pointer %p that is not compatible with tag %"PRIuMAX"", (ptr), (uintmax_t)POINTER_TAG);\
207 ajla_assert(false, (file_line, "pointer_compress_test: pointer %p is not compatible with %"PRIuMAX"", (ptr), (uintmax_t)POINTER_TAG));\
210 #define pointer_compress_alignment (POINTER_TAG * 2)
212 #define pointer_compress_test(ptr, fat) do { } while (0)
213 #define pointer_compress_alignment 1
216 #define pointer_compress ptr_to_num
217 #define pointer_decompress num_to_ptr
226 #if defined(POINTER_COMPRESSION)
228 typedef uint32_t pointer_t
;
230 #define pointer_validate(ptr)
232 static attr_always_inline pointer_t
pointer_thunk(struct thunk
*thunk
)
234 pointer_compress_test(thunk
, false);
235 return pointer_compress(thunk
) | 1;
238 static attr_always_inline pointer_t
pointer_data(const struct data
*data
)
240 pointer_compress_test(data
, false);
241 return pointer_compress(data
);
244 static attr_always_inline
bool pointer_is_thunk(const pointer_t ptr
)
249 static attr_always_inline
void *pointer_get_value_(const pointer_t ptr
)
251 return pointer_decompress(ptr
);
254 static attr_always_inline
void *pointer_get_value_strip_tag_(const pointer_t ptr
)
256 return pointer_get_value_(ptr
& ~(uint32_t)1);
259 static attr_always_inline
void *pointer_get_value_sub_tag_(const pointer_t ptr
)
261 return pointer_get_value_(ptr
- 1);
264 #elif defined(POINTER_IGNORE_START)
266 #define POINTER_TAG_AT_ALLOC
268 typedef void *pointer_t
;
270 #define pointer_validate(ptr)
272 static attr_always_inline pointer_t
pointer_thunk(struct thunk
*thunk
)
274 ajla_assert((ptr_to_num(thunk
) & POINTER_IGNORE_TOP
) != 0, (file_line
, "pointer_thunk: pointer is not tagged: %p", thunk
));
275 return (pointer_t
)thunk
;
278 static attr_always_inline pointer_t
pointer_data(const struct data
*data
)
280 ajla_assert((ptr_to_num(data
) & POINTER_IGNORE_TOP
) == 0, (file_line
, "pointer_data: pointer is tagged: %p", data
));
281 return (pointer_t
)data
;
284 static attr_always_inline
bool pointer_is_thunk(const pointer_t ptr
)
286 return (ptr_to_num(ptr
) & POINTER_IGNORE_TOP
) != 0;
289 static attr_always_inline
void *pointer_get_value_(const pointer_t ptr
)
294 static attr_always_inline
void *pointer_get_value_strip_tag_(const pointer_t ptr
)
299 static attr_always_inline
void *pointer_get_value_sub_tag_(const pointer_t ptr
)
304 #elif defined(POINTER_TAG)
306 #define POINTER_TAG_USED
308 typedef void *pointer_t
;
310 #define pointer_validate(ptr)
312 static attr_always_inline pointer_t
pointer_thunk(struct thunk
*thunk
)
314 return POINTER_TAG_ADD(thunk
);
317 static attr_always_inline pointer_t
pointer_data(const struct data
*data
)
319 return (pointer_t
)data
;
322 static attr_always_inline
bool pointer_is_thunk(const pointer_t ptr
)
324 return POINTER_TAG_GET(ptr
) != 0;
327 static attr_always_inline
void *pointer_get_value_(const pointer_t ptr
)
332 static attr_always_inline
void *pointer_get_value_strip_tag_(const pointer_t ptr
)
334 return POINTER_TAG_CLEAR(ptr
);
337 static attr_always_inline
void *pointer_get_value_sub_tag_(const pointer_t ptr
)
339 return POINTER_TAG_SUB(ptr
);
345 #define POINTER_THUNK_BIAS 0x20
347 #define POINTER_THUNK_BIAS 0x00
355 #define pointer_validate(ptr_) \
357 ajla_assert((unsigned)((ptr_).thunk - POINTER_THUNK_BIAS) <= 1, (file_line, "pointer_validate: invalid pointer type %x, value %p", ptr_.thunk, ptr_.ptr));\
358 ajla_assert((ptr_).ptr != BAD_POINTER_1 && (ptr_).ptr != BAD_POINTER_2 && (ptr_).ptr != BAD_POINTER_3, (file_line, "pointer_validate: invalid pointer type %x, value %p", ptr_.thunk, ptr_.ptr));\
361 static attr_always_inline pointer_t
pointer_thunk(struct thunk
*thunk
)
365 ptr
.thunk
= POINTER_THUNK_BIAS
+ 1;
369 static attr_always_inline pointer_t
pointer_data(const struct data
*data
)
372 ptr
.ptr
= (void *)data
;
373 ptr
.thunk
= POINTER_THUNK_BIAS
;
377 static attr_always_inline
bool pointer_is_thunk(const pointer_t ptr
)
379 pointer_validate(ptr
);
380 ajla_assert((unsigned)(ptr
.thunk
- POINTER_THUNK_BIAS
) <= 1, (file_line
, "pointer_is_thunk: invalid pointer type %x", ptr
.thunk
));
381 return (bool)(ptr
.thunk
- POINTER_THUNK_BIAS
);
384 static attr_always_inline
void *pointer_get_value_(const pointer_t ptr
)
386 pointer_validate(ptr
);
390 static attr_always_inline
void *pointer_get_value_strip_tag_(const pointer_t ptr
)
392 pointer_validate(ptr
);
396 static attr_always_inline
void *pointer_get_value_sub_tag_(const pointer_t ptr
)
398 pointer_validate(ptr
);
404 static attr_always_inline
bool pointer_is_equal(pointer_t ptr1
, pointer_t ptr2
)
407 #if defined(POINTER_COMPRESSION) || defined(POINTER_TAG_USED) || defined(POINTER_TAG_AT_ALLOC)
410 ret
= likely(ptr1
.ptr
== ptr2
.ptr
) && likely(ptr1
.thunk
== ptr2
.thunk
);
415 static attr_always_inline pointer_t
pointer_empty(void)
417 #if defined(POINTER_COMPRESSION)
420 return pointer_data(NULL
);
424 static attr_always_inline
bool pointer_is_empty(pointer_t ptr
)
426 return pointer_is_equal(ptr
, pointer_empty());
429 static attr_always_inline pointer_t
pointer_mark(void)
431 #if defined(POINTER_COMPRESSION)
433 #elif defined(POINTER_TAG_AT_ALLOC)
434 return (pointer_t
)POINTER_IGNORE_TOP
;
436 return pointer_thunk(NULL
);
440 static attr_always_inline
bool pointer_is_mark(pointer_t ptr
)
442 return pointer_is_equal(ptr
, pointer_mark());
445 static attr_always_inline
void pointer_poison(pointer_t attr_unused
*ptr
)
448 *ptr
= pointer_data((struct data
*)num_to_ptr(2048));
452 #define verify_thunk_(ptr_, value_, file_line_) ajla_assert(pointer_is_thunk(ptr_) == (value_), (file_line_, "pointer %p is %sa thunk", pointer_get_value_(ptr_), (value_) ? "not " : ""))
454 static attr_always_inline
struct thunk
*pointer_get_thunk_(pointer_t ptr argument_position
)
456 verify_thunk_(ptr
, true, caller_file_line
);
457 return (struct thunk
*)pointer_get_value_sub_tag_(ptr
);
460 static attr_always_inline
struct data
*pointer_get_data_(pointer_t ptr argument_position
)
462 verify_thunk_(ptr
, false, caller_file_line
);
463 return (struct data
*)pointer_get_value_(ptr
);
466 #define pointer_get_thunk(ptr_) pointer_get_thunk_(ptr_ pass_file_line)
467 #define pointer_get_data(ptr_) pointer_get_data_(ptr_ pass_file_line)
470 #define slot_bits ( \
471 sizeof(pointer_t) <= 2 ? 1 : \
472 sizeof(pointer_t) <= 4 ? 2 : \
473 sizeof(pointer_t) <= 8 ? 3 : \
474 sizeof(pointer_t) <= 16 ? 4 : \
477 #define slot_size ((size_t)1 << slot_bits)
478 #if defined(ARCH_ALPHA) || defined(ARCH_PARISC)
480 * This improves generated code on parisc.
481 * The ldd/std instructions require 8-byte alignment.
482 * Aligning the offset avoids offset-generating instructions.
484 * On alpha, we need this, so that we can access flags using the ldq
487 #define slot_align maximum(slot_size, 8)
489 #define slot_align slot_size
492 #define max_frame_align maximum(scalar_align, slot_align)
493 #define frame_align maximum(scalar_align, slot_align)
500 static inline void *ptrcomp_verify(void *ptr
)
502 pointer_compress_test(ptr
, true);
506 #define mem_align_compressed_mayfail(type, size, align, mayfail) cast_ptr(type, ptrcomp_verify(mem_align_mayfail(void *, size, maximum(pointer_compress_alignment, align), mayfail)))
507 #define mem_calign_compressed_mayfail(type, size, align, mayfail) cast_ptr(type, ptrcomp_verify(mem_calign_mayfail(void *, size, maximum(pointer_compress_alignment, align), mayfail)))
508 #define mem_alloc_compressed_mayfail(type, size, mayfail) mem_align_compressed_mayfail(type, size, 1, mayfail)
509 #define mem_free_compressed(ptr) mem_free_aligned(ptr)
511 static inline bool data_element_is_const(const unsigned char *flat
, size_t size
)
514 for (i
= 0; i
< size
; i
++)
515 if (flat
[i
] != flat
[0])
525 typedef struct frame_s_ frame_s
;
529 #define frame_char_(fp) (cast_ptr(unsigned char *, fp))
530 #define frame_uint32_(fp) (cast_ptr(uint32_t *, fp))
531 #define frame_uint64_(fp) (cast_ptr(uint64_t *, fp))
533 static attr_always_inline
unsigned char *frame_char_(frame_s
*fp
)
535 return cast_ptr(unsigned char *, fp
);
538 static attr_always_inline
uint32_t *frame_uint32_(frame_s
*fp
)
540 return cast_ptr(uint32_t *, fp
);
543 static attr_always_inline
uint64_t *frame_uint64_(frame_s
*fp
)
545 return cast_ptr(uint64_t *, fp
);
549 #define frame_var(fp, idx) (cast_ptr(unsigned char *, __builtin_assume_aligned(frame_char_(fp) + ((size_t)(idx) << slot_bits), slot_size)))
550 #define frame_idx(fp, var) ((frame_t)((cast_ptr(char *, var) - frame_char_(fp)) / slot_size))
552 #define frame_slot_(p, type) \
553 (cast_ptr(type *, assert_alignment(p, align_of(type))))
554 #define frame_slot(fp, pos, type) \
555 frame_slot_(frame_var(fp, pos), type)
557 #define frame_pointer(p, pos) \
558 frame_slot(p, pos, pointer_t)
560 #if defined(HAVE_BITWISE_FRAME)
561 #define frame_flags_per_slot_bits (slot_bits + 3)
562 #if defined(INLINE_ASM_GCC_X86)
563 #define bitmap_64bit 0
564 static attr_always_inline
void frame_set_flag(frame_s
*fp
, frame_t idx
)
566 __asm__
volatile("bts %k0, %1"::"r"((size_t)idx
),"m"(*(unsigned char *)fp
):"cc","memory");
568 static attr_always_inline
void frame_clear_flag(frame_s
*fp
, frame_t idx
)
570 __asm__
volatile("btr %k0, %1"::"r"((size_t)idx
),"m"(*(unsigned char *)fp
):"cc","memory");
572 static attr_always_inline
bool frame_test_flag(frame_s
*fp
, frame_t idx
)
574 #ifndef INLINE_ASM_GCC_LABELS
576 __asm__
volatile("bt %k1, %2; setc %0":"=q"(res
):"r"((size_t)idx
),"m"(*(unsigned char *)fp
):"cc","memory");
579 __asm__
goto("bt %k0, %1; jc %l[flag_set]"::"r"((size_t)idx
),"m"(*(unsigned char *)fp
):"cc","memory":flag_set
);
585 static attr_always_inline
bool frame_test_2(frame_s
*fp
, frame_t idx1
, frame_t idx2
)
587 #ifndef INLINE_ASM_GCC_LABELS
588 return frame_test_flag(fp
, idx1
) || frame_test_flag(fp
, idx2
);
590 __asm__
goto("bt %k0, %2; jc 1f; bt %k1, %2; 1:jc %l[flag_set]"::"r"((size_t)idx1
),"r"((size_t)idx2
),"m"(*(unsigned char *)fp
):"cc","memory":flag_set
);
596 static attr_always_inline
bool frame_test_and_set_flag(frame_s
*fp
, frame_t idx
)
598 #ifndef INLINE_ASM_GCC_LABELS
600 __asm__
volatile("bts %k1, %2; setc %0":"=q"(res
):"r"((size_t)idx
),"m"(*(unsigned char *)fp
):"cc","memory");
603 __asm__
goto("bts %k0, %1; jc %l[flag_set]"::"r"((size_t)idx
),"m"(*(unsigned char *)fp
):"cc","memory":flag_set
);
609 static attr_always_inline
bool frame_test_and_clear_flag(frame_s
*fp
, frame_t idx
)
611 #ifndef INLINE_ASM_GCC_LABELS
613 __asm__
volatile("btr %k1, %2; setc %0":"=q"(res
):"r"((size_t)idx
),"m"(*(unsigned char *)fp
):"cc","memory");
616 __asm__
goto("btr %k0, %1; jc %l[flag_set]"::"r"((size_t)idx
),"m"(*(unsigned char *)fp
):"cc","memory":flag_set
);
623 #if defined(ARCH_ARM64) || defined(ARCH_RISCV64)
624 #define bitmap_64bit (slot_size >= sizeof(uint64_t) && EFFICIENT_WORD_SIZE >= 64)
626 #define bitmap_64bit 0
628 static attr_always_inline
void frame_set_flag(frame_s
*fp
, frame_t idx
)
631 frame_uint64_(fp
)[idx
/ 64] |= (uint64_t)1 << (idx
& 63);
633 frame_uint32_(fp
)[idx
/ 32] |= (uint32_t)1 << (idx
& 31);
636 static attr_always_inline
void frame_clear_flag(frame_s
*fp
, frame_t idx
)
639 frame_uint64_(fp
)[idx
/ 64] &= ~((uint64_t)1 << (idx
& 63));
641 frame_uint32_(fp
)[idx
/ 32] &= ~((uint32_t)1 << (idx
& 31));
644 static attr_always_inline
bool frame_test_flag(frame_s
*fp
, frame_t idx
)
647 return (frame_uint64_(fp
)[idx
/ 64] & ((uint64_t)1 << (idx
& 63))) != 0;
649 return (frame_uint32_(fp
)[idx
/ 32] & ((uint32_t)1 << (idx
& 31))) != 0;
652 static attr_always_inline
bool frame_test_and_set_flag(frame_s
*fp
, frame_t idx
)
656 uint64_t val
= frame_uint64_(fp
)[idx
/ 64];
657 ret
= (val
& ((uint64_t)1 << (idx
& 63))) != 0;
658 val
|= (uint64_t)1 << (idx
& 63);
659 frame_uint64_(fp
)[idx
/ 64] = val
;
662 uint32_t val
= frame_uint32_(fp
)[idx
/ 32];
663 ret
= (val
& ((uint32_t)1 << (idx
& 31))) != 0;
664 val
|= (uint32_t)1 << (idx
& 31);
665 frame_uint32_(fp
)[idx
/ 32] = val
;
669 static attr_always_inline
bool frame_test_and_clear_flag(frame_s
*fp
, frame_t idx
)
673 uint64_t val
= frame_uint64_(fp
)[idx
/ 64];
674 ret
= (val
& ((uint64_t)1 << (idx
& 63))) != 0;
675 val
&= ~((uint64_t)1 << (idx
& 63));
676 frame_uint64_(fp
)[idx
/ 64] = val
;
679 uint32_t val
= frame_uint32_(fp
)[idx
/ 32];
680 ret
= (val
& ((uint32_t)1 << (idx
& 31))) != 0;
681 val
&= ~((uint32_t)1 << (idx
& 31));
682 frame_uint32_(fp
)[idx
/ 32] = val
;
686 static attr_always_inline
bool frame_test_2(frame_s
*fp
, frame_t idx1
, frame_t idx2
)
688 return frame_test_flag(fp
, idx1
) || frame_test_flag(fp
, idx2
);
692 #define frame_flags_per_slot_bits (slot_bits)
693 static attr_always_inline
void frame_set_flag(frame_s
*fp
, frame_t idx
)
695 ajla_assert(frame_char_(fp
)[idx
] <= 1, (file_line
, "frame_set_flag: invalid value %d at index %"PRIuMAX
"", (int)frame_char_(fp
)[idx
], (uintmax_t)idx
));
696 frame_char_(fp
)[idx
] = 1;
698 static attr_always_inline
void frame_clear_flag(frame_s
*fp
, frame_t idx
)
700 ajla_assert(frame_char_(fp
)[idx
] <= 1, (file_line
, "frame_clear_flag: invalid value %d at index %"PRIuMAX
"", (int)frame_char_(fp
)[idx
], (uintmax_t)idx
));
701 frame_char_(fp
)[idx
] = 0;
703 static attr_always_inline
bool frame_test_flag(frame_s
*fp
, frame_t idx
)
707 __asm__ ("movb (%2,%1), %0" : "=q"(r1
) : "r"(fp
), "r"(idx
) : "memory");
710 unsigned char val
= frame_char_(fp
)[idx
];
711 ajla_assert(val
<= 1, (file_line
, "frame_test_flag: invalid value %d at index %"PRIuMAX
"", (int)val
, (uintmax_t)idx
));
715 static attr_always_inline
bool frame_test_and_set_flag(frame_s
*fp
, frame_t idx
)
717 unsigned char val
= frame_char_(fp
)[idx
];
718 ajla_assert(val
<= 1, (file_line
, "frame_test_and_set_flag: invalid value %d at index %"PRIuMAX
"", (int)val
, (uintmax_t)idx
));
719 if (val
) return true;
720 frame_char_(fp
)[idx
] = 1;
723 static attr_always_inline
bool frame_test_and_clear_flag(frame_s
*fp
, frame_t idx
)
725 unsigned char val
= frame_char_(fp
)[idx
];
726 ajla_assert(val
<= 1, (file_line
, "frame_test_and_clear_flag: invalid value %d at index %"PRIuMAX
"", (int)val
, (uintmax_t)idx
));
727 if (!val
) return false;
728 frame_char_(fp
)[idx
] = 0;
732 * On many RISC architectures, gcc generates bogus unsigned extension
733 * instruction after the "or" operator and it generates better code with "plus".
734 * On CISC architectures, it generates better code with "or".
736 #if defined(__arm__) || defined(__i386__) || defined(__m68k__) || defined(__sh__) || defined(__s390__) || defined(__x86_64__)
737 #define frame_test_operator |
738 #else /* defined(__alpha__) || defined(__aarch64__) || defined(__hppa) || defined(__mips) || defined(__powerpc__) */
739 #define frame_test_operator +
740 #endif /* doesn't care: defined(__riscv) || defined(__sparc__) */
742 #define frame_test_2(fp, idx1, idx2) (frame_char_(fp)[idx1] frame_test_operator frame_char_(fp)[idx2])
744 static attr_always_inline
bool frame_test_2(frame_s
*fp
, frame_t idx1
, frame_t idx2
)
746 return frame_char_(fp
)[idx1
] frame_test_operator
frame_char_(fp
)[idx2
];
751 #define bitmap_slots(n_vars) (round_up((frame_t)(n_vars), 1 << frame_flags_per_slot_bits) >> frame_flags_per_slot_bits)
753 static inline void memcpy_slots(unsigned char *dest
, const unsigned char *src
, frame_t n_slots
)
755 src
= assert_alignment(src
, slot_size
);
756 dest
= assert_alignment(dest
, slot_size
);
757 memcpy_fast(dest
, src
, n_slots
* slot_size
);
760 #define MIN_USEABLE_SLOT 1
768 type_tag_t data_type
;
769 unsigned char flexible_array
[FLEXIBLE_ARRAY_GCC
];
772 struct data_longint
{
777 const struct type
*definition
;
778 char flexible_array
[FLEXIBLE_ARRAY_GCC
];
782 ajla_option_t option
;
786 struct data_array_flat
{
787 int_default_t n_used_entries
;
788 int_default_t n_allocated_entries
;
789 const struct type
*type
;
790 unsigned char flexible_array
[FLEXIBLE_ARRAY_GCC
];
793 struct data_array_slice
{
794 int_default_t n_entries
;
796 const struct type
*type
;
797 unsigned char *flat_data_minus_data_array_offset
;
800 struct data_array_pointers
{
801 int_default_t n_used_entries
;
802 int_default_t n_allocated_entries
;
804 pointer_t pointer_array
[FLEXIBLE_ARRAY_GCC
];
807 #if !defined(DEBUG_ARRAY_INDICES) && !defined(UNUSUAL)
808 #if !defined(POINTER_COMPRESSION) && defined(SIZEOF_VOID_P) && SIZEOF_VOID_P && SIZEOF_VOID_P * 8 <= INT_DEFAULT_BITS
809 #define SCALAR_ARRAY_INDEX_T
810 #elif defined(POINTER_COMPRESSION) && 32 <= INT_DEFAULT_BITS
811 #define SCALAR_ARRAY_INDEX_T
815 #ifndef SCALAR_ARRAY_INDEX_T
819 #ifdef DEBUG_ARRAY_INDICES
824 typedef uint_default_t array_index_t
;
827 typedef uchar_efficient_t btree_entries_t
;
829 struct data_array_same
{
830 array_index_t n_entries
;
835 array_index_t end_index
;
839 struct data_array_btree
{
840 btree_entries_t n_used_btree_entries
;
841 btree_entries_t n_allocated_btree_entries
;
842 uchar_efficient_t depth
;
843 struct btree_level btree
[FLEXIBLE_ARRAY_GCC
];
846 struct data_array_incomplete
{
847 pointer_t first
; /* a pointer to non-empty array */
848 pointer_t next
; /* a pointer to array or array_incomplete or thunk */
851 struct function_argument
{
852 type_tag_t tag
; /* TYPE_TAG_unknown or primitive type tag */
855 unsigned char slot
[slot_size
];
859 struct data_function_reference
{
864 uchar_efficient_t is_indirect
;
865 arg_t n_curried_arguments
;
867 /* deliberately misalign variables to catch alignment errors */
870 struct function_argument arguments
[FLEXIBLE_ARRAY_GCC
];
873 struct data_resource
{
874 void (*close
)(struct data
*);
876 /* deliberately misalign variables to catch alignment errors */
879 char flexible_array
[FLEXIBLE_ARRAY_GCC
];
882 /* a rough estimation to make sure that the size of data_function_reference doesn't overflow */
883 #define ARG_LIMIT (sign_bit(size_t) / sizeof(struct function_argument))
885 struct local_variable
{
886 const struct type
*type
;
889 struct local_variable_flags
{
890 bool may_be_borrowed
;
897 char may_be_borrowed
;
901 struct line_position
{
908 struct cache_entry_return
{
909 struct cache_entry
*ce
;
911 struct execution_control
*ex
;
915 struct tree_entry entry
;
918 struct list wait_list
;
919 struct cache_entry_return
*returns
;
920 pointer_t arguments
[FLEXIBLE_ARRAY
];
924 atomic_type profile_counter_t counter
;
927 struct module_designator
;
929 struct data_function
{
930 frame_t frame_slots
; /* (frame_offset + args + ret + vars) / slot_size */
931 frame_t n_bitmap_slots
;
933 arg_t n_return_values
;
936 const struct local_variable
*local_variables
; /* indexed by slot */
937 const struct local_variable_flags
*local_variables_flags
; /* indexed by slot */
938 const struct local_arg
*args
; /* indexed by argument */
940 const struct type
*record_definition
;
941 const struct module_designator
*module_designator
;
942 const struct function_designator
*function_designator
;
944 struct line_position
*lp
;
948 atomic_type uchar_efficient_t codegen_failed
;
950 struct data
*loaded_cache
;
952 atomic_type profile_counter_t profiling_counter
;
953 atomic_type profile_counter_t call_counter
;
954 struct escape_data
*escape_data
;
957 frame_t local_directory_size
;
958 pointer_t
*local_directory
[FLEXIBLE_ARRAY_GCC
];
961 struct data_function_types
{
963 const struct type
*types
[FLEXIBLE_ARRAY_GCC
];
967 #if defined(ARCH_X86_32) || defined(ARCH_ARM32) || defined(ARCH_MIPS32) || defined(ARCH_POWER32) || defined(ARCH_SPARC32)
968 typedef uint64_t code_return_t
;
972 #if defined(ARCH_MIPS64) || defined(ARCH_PARISC64) || defined(ARCH_S390) || defined(ARCH_SPARC64)
979 struct cg_upcall_vector_s
;
983 size_t destination_ip
;
986 struct data_codegen
{
987 #ifdef HAVE_CODEGEN_TRAPS
988 struct tree_entry codegen_tree
;
989 struct trap_record
*trap_records
;
990 size_t trap_records_size
;
992 void *unoptimized_code_base
;
993 size_t unoptimized_code_size
;
994 struct data
*function
;
998 char *unoptimized_code
[FLEXIBLE_ARRAY_GCC
];
1002 union internal_arg
{
1007 struct data_internal
{
1008 void *(*fn
)(frame_s
*fp
, const code_t
*ip
, union internal_arg
*);
1009 union internal_arg arguments
[FLEXIBLE_ARRAY_GCC
];
1015 size_t offsets
[FLEXIBLE_ARRAY_GCC
];
1018 struct data_saved_cache
{
1021 arg_t n_return_values
;
1022 pointer_t pointers
[FLEXIBLE_ARRAY_GCC
];
1025 typedef uchar_efficient_t tag_t
;
1027 #define DATA_TAG_START 1
1028 #define DATA_TAG_flat 1
1029 #define DATA_TAG_longint 2
1030 #define DATA_TAG_record 3
1031 #define DATA_TAG_option 4
1032 #define DATA_TAG_array_flat 5
1033 #define DATA_TAG_array_slice 6
1034 #define DATA_TAG_array_pointers 7
1035 #define DATA_TAG_array_same 8
1036 #define DATA_TAG_array_btree 9
1037 #define DATA_TAG_array_incomplete 10
1038 #define DATA_TAG_function_reference 11
1039 #define DATA_TAG_resource 12
1040 #define DATA_TAG_function 13
1041 #define DATA_TAG_function_types 14
1043 #define DATA_TAG_codegen 15
1045 #define DATA_TAG_internal 16
1046 #define DATA_TAG_saved 17
1047 #define DATA_TAG_saved_cache 18
1048 #define DATA_TAG_END 19
1050 #define THUNK_TAG_START 19
1051 #define THUNK_TAG_FUNCTION_CALL 19
1052 #define THUNK_TAG_BLACKHOLE 20
1053 #define THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED 21
1054 #define THUNK_TAG_BLACKHOLE_DEREFERENCED 22
1055 #define THUNK_TAG_RESULT 23
1056 #define THUNK_TAG_MULTI_RET_REFERENCE 24
1057 #define THUNK_TAG_EXCEPTION 25
1058 #define THUNK_TAG_END 26
1062 #if defined(POINTER_TAG_AT_ALLOC) && DATA_TAG_END <= (1 << POINTER_IGNORE_BITS) / 2
1063 #define DATA_TAG_AT_ALLOC
1067 refcount_t refcount_
;
1068 #if !defined(REFCOUNT_TAG)
1072 struct data_flat flat
;
1073 struct data_longint longint
;
1074 struct data_record record
;
1075 struct data_option option
;
1076 struct data_array_flat array_flat
;
1077 struct data_array_slice array_slice
;
1078 struct data_array_pointers array_pointers
;
1079 struct data_array_same array_same
;
1080 struct data_array_btree array_btree
;
1081 struct data_array_incomplete array_incomplete
;
1082 struct data_function_reference function_reference
;
1083 struct data_resource resource
;
1085 /* these do not appear on the ajla heap */
1086 struct data_function function
;
1087 struct data_function_types function_types
;
1089 struct data_codegen codegen
;
1091 struct data_internal internal
;
1093 /* this only appears in saved stream */
1094 struct data_saved saved
;
1095 struct data_saved_cache saved_cache
;
1099 #if defined(DATA_TAG_AT_ALLOC)
1100 #define da_tag_(data) ((tag_t)(ptr_to_num(data) >> POINTER_IGNORE_START))
1101 #elif defined(REFCOUNT_TAG)
1102 #define da_tag_(data) (refcount_tag_get((refcount_const refcount_t *)&(data)->refcount_))
1104 #define da_tag_(data) ((data)->tag)
1106 #define da_tag(data) (ajla_assert(da_tag_(data) >= DATA_TAG_START && da_tag_(data) < DATA_TAG_END, (file_line, "invalid data tag %u", da_tag_(data))), da_tag_(data))
1107 #define da_assert(data, kind) (ajla_assert(da_tag_(data) == DATA_TAG_##kind, (file_line, "data tag %u, expected %u", da_tag_(data), DATA_TAG_##kind)))
1108 #define da(data, kind) (da_assert(data,kind), &(data)->u_.kind)
1110 #define data_flat_offset_ (round_up(offsetof(struct data, u_.flat.flexible_array), scalar_align))
1111 #define data_record_offset_ (round_up(offsetof(struct data, u_.record.flexible_array), slot_align))
1112 #define data_array_offset_ (round_up(offsetof(struct data, u_.array_flat.flexible_array), scalar_align))
1113 #define data_resource_offset_ (round_up(offsetof(struct data, u_.resource.flexible_array), scalar_align))
1115 #define data_flat_offset data_flat_offset_
1116 #define data_record_offset data_record_offset_
1117 #define data_array_offset data_array_offset_
1118 #define data_resource_offset data_resource_offset_
1120 /* add some value to make sure that we don't forget it */
1121 #define data_flat_offset (data_flat_offset_ + scalar_align)
1122 #define data_record_offset (data_record_offset_ + slot_align)
1123 #define data_array_offset (data_array_offset_ + scalar_align)
1124 #define data_resource_offset (data_resource_offset_ + scalar_align)
1126 #define data_function_types_offset offsetof(struct data, u_.function_types.types)
1128 static attr_always_inline
unsigned char *da_flat(struct data
*d
)
1131 return cast_ptr(unsigned char *, d
) + data_flat_offset
;
1133 static attr_always_inline frame_s
*da_record_frame(struct data
*d
)
1135 da_assert(d
,record
);
1136 return cast_ptr(frame_s
*, cast_ptr(const char *, d
) + data_record_offset
);
1138 static attr_always_inline
unsigned char *da_array_flat(struct data
*d
)
1140 da_assert(d
,array_flat
);
1141 return cast_ptr(unsigned char *, d
) + data_array_offset
;
1143 #define DATA_TAG_is_array(tag) ((tag) >= DATA_TAG_array_flat && (tag) <= DATA_TAG_array_btree)
1144 #define da_array_flat_element_size(d) ((size_t)da(d,array_flat)->type->size)
1145 #define da_array_depth(d) (ajla_assert(DATA_TAG_is_array(da_tag(d)), (file_line, "da_array_depth: invalid tag %u", da_tag(d))), da_tag(d) == DATA_TAG_array_btree ? (int)da(d,array_btree)->depth : -1)
1146 #define da_array_assert_son(parent, son) ( \
1147 ajla_assert(da(parent,array_btree)->n_used_btree_entries >= 2 && da(parent,array_btree)->n_used_btree_entries <= BTREE_MAX_SIZE, (file_line, "da_array_assert_son: invalid parent size %"PRIuMAX"", (uintmax_t)da(parent,array_btree)->n_used_btree_entries)),\
1148 ajla_assert(da_array_depth(son) + 1 == da_array_depth(parent), (file_line, "da_array_assert_son: depth mismatch: %d, %d", da_array_depth(parent), da_array_depth(son)))\
1151 static attr_always_inline
const struct type
*da_type(struct data
*fn
, size_t idx
)
1153 struct data
*t
= pointer_get_data(da(fn
,function
)->types_ptr
);
1154 ajla_assert(idx
< da(t
,function_types
)->n_types
, (file_line
, "da_type: access out of range: %"PRIuMAX
" >= %"PRIuMAX
"", (uintmax_t)idx
, (uintmax_t)da(t
,function_types
)->n_types
));
1155 return da(t
,function_types
)->types
[idx
];
1158 #define function_frame_size(fn) ((size_t)da(fn,function)->frame_slots * slot_size)
1159 #define function_n_variables(fn) ((size_t)da(fn,function)->frame_slots - frame_offset / slot_size)
1161 static inline void *da_resource(struct data
*d
)
1163 da_assert(d
,resource
);
1164 return cast_ptr(void *, cast_ptr(const char *, d
) + data_resource_offset
);
1168 static attr_always_inline
struct data
*data_init_(struct data
*d
, tag_t tag
)
1172 #if defined(DATA_TAG_AT_ALLOC)
1173 d
= cast_cpp(struct data
*, num_to_ptr(ptr_to_num(d
) + ((uintptr_t)tag
<< POINTER_IGNORE_START
)));
1175 #if defined(REFCOUNT_TAG)
1176 refcount_init_tag(&d
->refcount_
, tag
);
1179 refcount_init(&d
->refcount_
);
1184 static attr_always_inline
void *data_pointer_tag(void *d
, tag_t attr_unused tag
)
1186 #if defined(DATA_TAG_AT_ALLOC)
1187 d
= cast_cpp(void *, num_to_ptr(ptr_to_num(d
) + ((uintptr_t)tag
<< POINTER_IGNORE_START
)));
1192 #define data_alloc(kind, mayfail) data_init_(mem_alloc_compressed_mayfail(struct data *, partial_sizeof(struct data, u_.kind), mayfail), DATA_TAG_##kind)
1193 #define data_align(kind, size, align, mayfail) data_init_(mem_align_compressed_mayfail(struct data *, maximum_maybe0(size, partial_sizeof_lower_bound(struct data)), align, mayfail), DATA_TAG_##kind)
1194 #define data_calign(kind, size, align, mayfail) data_init_(mem_calign_compressed_mayfail(struct data *, maximum_maybe0(size, partial_sizeof_lower_bound(struct data)), align, mayfail), DATA_TAG_##kind)
1195 #define data_alloc_flexible(kind, array, size, mayfail) data_init_(struct_alloc_array_mayfail(mem_alloc_compressed_mayfail, struct data, u_.kind.array, size, mayfail), DATA_TAG_##kind)
1197 static inline void *data_untag_(void *d
, const char attr_unused
*fl
)
1199 #if defined(DATA_TAG_AT_ALLOC)
1200 unsigned mask
= DATA_TAG_END
- 1;
1201 mask
= mask
| (mask
>> 1);
1202 mask
= mask
| (mask
>> 2);
1203 mask
= mask
| (mask
>> 4);
1204 mask
= mask
| (mask
>> 8);
1205 ajla_assert((ptr_to_num(d
) & ((uintptr_t)mask
<< POINTER_IGNORE_START
)) != 0, (fl
, "data_untag_: pointer not tagged: %p", d
));
1206 return num_to_ptr(ptr_to_num(d
) & ~((uintptr_t)mask
<< POINTER_IGNORE_START
));
1211 #define data_untag(d) data_untag_(d, file_line)
1212 #define data_free(d) do { refcount_poison_tag(&(d)->refcount_); mem_free_compressed(data_untag(d)); } while (0)
1213 #define data_free_r1(d) do { ajla_assert(refcount_is_one(&(d)->refcount_), (file_line, "freeing data with invalid refcount")); data_free(d); } while (0)
1220 struct stack_trace_entry
{
1221 const struct module_designator
*module_designator
;
1222 const char *function_name
;
1226 struct stack_trace
{
1227 struct stack_trace_entry
*trace
;
1231 struct thunk_exception
{
1234 struct stack_trace tr
;
1237 struct thunk_result
{
1243 refcount_t refcount_
;
1244 #ifndef REFCOUNT_TAG
1248 /* THUNK_TAG_FUNCTION_CALL */
1249 /* THUNK_TAG_BLACKHOLE */
1250 /* THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED */
1251 /* THUNK_TAG_BLACKHOLE_DEREFERENCED */
1252 /* THUNK_TAG_RESULT */
1255 /* THUNK_TAG_FUNCTION_CALL */
1256 pointer_t function_reference
;
1257 /* THUNK_TAG_BLACKHOLE */
1258 /* THUNK_TAG_BLACKHOLE_SOME_DEREFERENCED */
1259 /* THUNK_TAG_BLACKHOLE_DEREFERENCED */
1260 struct execution_control
*execution_control
;
1262 struct thunk_result results
[1];
1264 /* THUNK_TAG_MULTI_RET_REFERENCE */
1266 struct thunk
*thunk
;
1268 } multi_ret_reference
;
1269 /* THUNK_TAG_EXCEPTION */
1270 struct thunk_exception exception
;
1274 static inline tag_t
thunk_tag_(struct thunk
*t
, const char attr_unused
*position
)
1277 #ifndef REFCOUNT_TAG
1280 tag
= refcount_tag_get(&t
->refcount_
);
1282 ajla_assert(tag
>= THUNK_TAG_START
&& tag
< THUNK_TAG_END
, (position
, "invalid thunk tag %u", tag
));
1285 #define thunk_tag(t) thunk_tag_(t, file_line)
1287 static inline tag_t
thunk_tag_volatile_(struct thunk
*t
, const char attr_unused
*position
)
1290 #ifndef REFCOUNT_TAG
1291 tag
= *cast_ptr(thread_volatile tag_t
*, &t
->tag
);
1293 tag
= refcount_tag_get(&t
->refcount_
);
1295 ajla_assert(tag
>= THUNK_TAG_START
&& tag
< THUNK_TAG_END
, (position
, "invalid thunk tag %u", tag
));
1298 #define thunk_tag_volatile(t) thunk_tag_volatile_(t, file_line)
1300 static inline void thunk_tag_set_(struct thunk
*t
, tag_t attr_unused old_tag
, tag_t new_tag
, const char attr_unused
*position
)
1302 #ifndef REFCOUNT_TAG
1303 ajla_assert(t
->tag
== old_tag
, (position
, "thunk_tag_set: tag does not match: %u != %u; new tag %u", t
->tag
, old_tag
, new_tag
));
1306 refcount_tag_set_(&t
->refcount_
, old_tag
, new_tag
, position
);
1309 #define thunk_tag_set(t, old_tag, new_tag) thunk_tag_set_(t, old_tag, new_tag, file_line)
1311 static inline tag_t
da_thunk_tag_(void *dt
, const char attr_unused
*position
)
1314 #ifndef REFCOUNT_TAG
1315 ajla_assert(offsetof(struct data
, tag
) == offsetof(struct thunk
, tag
), (position
, "da_thunk_tag: the data_structure doesn't match the thunk structure"));
1316 tag
= *cast_ptr(tag_t
*, cast_ptr(char *, dt
) + offsetof(struct data
, tag
));
1318 ajla_assert(offsetof(struct data
, refcount_
) == offsetof(struct thunk
, refcount_
), (position
, "da_thunk_tag: the data_structure doesn't match the thunk structure"));
1319 tag
= refcount_tag_get(cast_ptr(refcount_t
*, cast_ptr(char *, dt
) + offsetof(struct data
, refcount_
)));
1321 ajla_assert(tag
>= DATA_TAG_START
&& tag
< TAG_END
, (position
, "invalid thunk tag %u", tag
));
1324 #define da_thunk_tag(dt) da_thunk_tag_(dt, file_line)
1326 #define tag_is_thunk(tag) ((tag) >= THUNK_TAG_START)
1328 static inline refcount_t
*da_thunk_refcount_(void *dt
, const char attr_unused
*position
)
1330 ajla_assert(offsetof(struct data
, refcount_
) == offsetof(struct thunk
, refcount_
), (position
, "da_thunk_tag: the data_structure doesn't match the thunk structure"));
1331 return cast_ptr(refcount_t
*, cast_ptr(char *, dt
) + offsetof(struct data
, refcount_
));
1333 #define da_thunk_refcount(dt) da_thunk_refcount_(dt, file_line)
1336 * May be called if the thunk is locked or if the thunk is on current frame
1337 * (so that it won't be modified asynchronously)
1339 static inline bool thunk_is_finished(struct thunk
*t
)
1341 tag_t tag
= thunk_tag_volatile(t
);
1342 return tag
== THUNK_TAG_RESULT
||
1343 (tag
== THUNK_TAG_MULTI_RET_REFERENCE
&& thunk_tag_volatile(t
->u
.multi_ret_reference
.thunk
) == THUNK_TAG_RESULT
);
1346 static inline struct thunk
*thunk_pointer_tag(struct thunk
*t
)
1348 #ifdef POINTER_TAG_AT_ALLOC
1349 t
= cast_cpp(struct thunk
*, num_to_ptr(ptr_to_num(t
) | POINTER_IGNORE_TOP
));
1354 static inline void *thunk_untag_(struct thunk
*t
, const char attr_unused
*fl
)
1356 #if defined(POINTER_TAG_AT_ALLOC)
1357 ajla_assert((ptr_to_num(t
) & POINTER_IGNORE_TOP
) != 0, (fl
, "thunk_untag_: pointer not tagged: %p", t
));
1358 return num_to_ptr(ptr_to_num(t
) & ~POINTER_IGNORE_TOP
);
1363 #define thunk_untag(t) thunk_untag_(t, file_line)
1364 #define thunk_free(t) do { refcount_poison_tag(&t->refcount_); mem_free_compressed(thunk_untag(t)); } while (0)
1366 #if defined(POINTER_COMPRESSION)
1367 #define SAVED_DATA_ALIGN maximum(maximum(maximum(align_of(struct data), align_of(struct thunk)), sizeof(refcount_int_t)), pointer_compress_alignment)
1369 #define SAVED_DATA_ALIGN maximum(maximum(align_of(struct data), align_of(struct thunk)), sizeof(refcount_int_t))
1377 typedef unsigned timestamp_t
;
1379 #define CALL_MODE_NORMAL 1
1380 #define CALL_MODE_STRICT 2
1381 #define CALL_MODE_SPARK 3
1382 #define CALL_MODE_VALID(v) ((v) >= 1 && (v) <= 3)
1384 struct frame_struct
{
1385 struct data
*function
;
1387 timestamp_t timestamp
;
1388 stack_size_t available_slots
;
1389 uchar_efficient_t mode
;
1391 /* deliberately misalign variables to catch alignment errors */
1394 char variables_
[FLEXIBLE_ARRAY
];
1397 struct stack_bottom
{
1398 struct execution_control
*ex
;
1399 stack_size_t useable_slots
;
1403 #define SIZEOF_FRAME_STRUCT partial_sizeof_array(struct frame_struct, variables_, 0)
1404 #define SIZEOF_STACK_BOTTOM round_up(sizeof(struct stack_bottom), max_frame_align)
1405 #define frame_offset round_up(offsetof(struct frame_struct, variables_), slot_align)
1407 #ifndef INLINE_WORKS
1408 #define ptr_frame(fp) (cast_ptr(frame_s *, cast_ptr(const char *, fp) + frame_offset))
1409 #define get_frame(fp) (cast_ptr(struct frame_struct *, cast_ptr(const char *, fp) - frame_offset))
1411 static attr_always_inline frame_s
*ptr_frame(const struct frame_struct
*fp
)
1413 return cast_ptr(frame_s
*, cast_ptr(const char *, fp
) + frame_offset
);
1415 static attr_always_inline
struct frame_struct
*get_frame(const frame_s
*fp
)
1417 return cast_ptr(struct frame_struct
*, cast_ptr(const char *, fp
) - frame_offset
);
1421 static inline const struct type
*frame_get_type_of_local(const frame_s
*fp
, frame_t pos
)
1423 const struct type
*t
;
1424 const struct data
*function
= get_frame(fp
)->function
;
1425 t
= da(function
,function
)->local_variables
[pos
].type
;
1426 TYPE_TAG_VALIDATE(t
->tag
);
1430 static inline ip_t
frame_ip(const frame_s
*fp
, const code_t
*ip
)
1432 ajla_assert(ip
>= da(get_frame(fp
)->function
,function
)->code
, (file_line
, "frame_ip: invalid ip pointer: %p, %p", ip
, da(get_frame(fp
)->function
,function
)->code
));
1433 return (ip_t
)(ip
- da(get_frame(fp
)->function
,function
)->code
);
1436 static inline frame_s
* attr_fastcall
frame_up(frame_s
*fp
)
1438 char *next
= cast_ptr(char *, fp
) + function_frame_size(get_frame(fp
)->function
);
1439 return cast_ptr(frame_s
*, next
);
1442 static inline bool frame_is_top(frame_s
*fp
)
1444 return get_frame(fp
)->function
== NULL
;
1447 static inline struct stack_bottom
*frame_stack_bottom(frame_s
*fp
)
1449 char *bottom
= cast_ptr(char *, get_frame(fp
)) - get_frame(fp
)->available_slots
* slot_size
- SIZEOF_STACK_BOTTOM
;
1450 return cast_ptr(struct stack_bottom
*, bottom
);
1453 static inline struct execution_control
*frame_execution_control(frame_s
*fp
)
1455 return frame_stack_bottom(fp
)->ex
;
1458 static inline void stack_free(struct stack_bottom
*stack
)
1460 mem_free_aligned(stack
);
1463 static inline void frame_init(frame_s
*fp
, struct data
*function
, timestamp_t timestamp
, uchar_efficient_t mode
)
1465 ajla_assert(!(da(function
,function
)->frame_slots
& (frame_align
/ slot_size
- 1)), (file_line
, "frame_init: function size %"PRIuMAX
" is not aligned to %x", (uintmax_t)da(function
,function
)->frame_slots
, (unsigned)(frame_align
/ slot_size
)));
1466 ajla_assert(CALL_MODE_VALID(mode
), (file_line
, "frame_init: invalid mode %u", mode
));
1467 if (unlikely(profiling
)) {
1468 profile_counter_t call_counter
= load_relaxed(&da(function
,function
)->call_counter
);
1470 store_relaxed(&da(function
,function
)->call_counter
, call_counter
);
1472 get_frame(fp
)->timestamp
= timestamp
;
1473 get_frame(fp
)->mode
= mode
;
1475 (void)memset(fp
, rand(), da(function
,function
)->frame_slots
* slot_size
- frame_offset
);
1477 (void)memset(fp
, 0, da(function
,function
)->n_bitmap_slots
* slot_size
);
1480 frame_s
* attr_fastcall
stack_alloc(struct execution_control
*ex
, struct data
*function
, ajla_error_t
*mayfail
);
1481 frame_s
* attr_fastcall
stack_expand(frame_s
*fp
, struct data
*function
, ajla_error_t
*mayfail
);
1482 frame_s
* attr_fastcall
stack_split(frame_s
*from_fp
, frame_s
*to_fp
, frame_s
**high
, ajla_error_t
*mayfail
);
1484 /*void frame_cleanup(frame_s *fp);*/
1490 void stack_trace_init(struct stack_trace
*st
);
1491 void stack_trace_free(struct stack_trace
*st
);
1492 bool stack_trace_get_location(struct data
*function
, ip_t ip_rel
, struct stack_trace_entry
*result
);
1493 void stack_trace_capture(struct stack_trace
*st
, frame_s
*fp
, const code_t
*ip
, unsigned max_depth
);
1494 char *stack_trace_string(struct stack_trace
*st
, ajla_error_t
*err
);
1495 void stack_trace_print(struct stack_trace
*st
);
1498 /*********************
1499 * OBJECT ALLOCATION *
1500 *********************/
1502 struct data
* attr_fastcall
data_alloc_flat_mayfail(type_tag_t type
, const unsigned char *flat
, size_t size
, ajla_error_t
*mayfail argument_position
);
1503 struct data
* attr_fastcall
data_alloc_longint_mayfail(unsigned long bits
, ajla_error_t
*mayfail argument_position
);
1504 struct data
* attr_fastcall
data_alloc_option_mayfail(ajla_error_t
*mayfail argument_position
);
1505 struct data
* attr_fastcall
data_alloc_record_mayfail(const struct record_definition
*def
, ajla_error_t
*mayfail argument_position
);
1506 struct data
* attr_fastcall
data_alloc_array_flat_mayfail(const struct type
*type
, int_default_t n_allocated
, int_default_t n_used
, bool clear
, ajla_error_t
*mayfail argument_position
);
1507 struct data
* attr_fastcall
data_alloc_array_slice_mayfail(struct data
*base
, unsigned char *data
, int_default_t start
, int_default_t len
, ajla_error_t
*mayfail argument_position
);
1508 struct data
* attr_fastcall
data_alloc_array_pointers_mayfail(int_default_t n_allocated
, int_default_t n_used
, ajla_error_t
*mayfail argument_position
);
1509 struct data
* attr_fastcall
data_alloc_array_same_mayfail(array_index_t n_entries
, ajla_error_t
*mayfail argument_position
);
1510 struct data
* attr_fastcall
data_alloc_array_incomplete(struct data
*first
, pointer_t next
, ajla_error_t
*mayfail argument_position
);
1511 struct data
* attr_fastcall
data_alloc_function_reference_mayfail(arg_t n_curried_arguments
, ajla_error_t
*mayfail argument_position
);
1512 void attr_fastcall
data_fill_function_reference(struct data
*function_reference
, arg_t a
, pointer_t ptr
);
1513 void attr_fastcall
data_fill_function_reference_flat(struct data
*function_reference
, arg_t a
, const struct type
*type
, const unsigned char *data
);
1514 struct data
* attr_fastcall
data_alloc_resource_mayfail(size_t size
, void (*close
)(struct data
*), ajla_error_t
*mayfail argument_position
);
1516 extern pointer_t
*out_of_memory_ptr
;
1517 struct thunk
* attr_fastcall
thunk_alloc_exception_error(ajla_error_t err
, char *msg
, frame_s
*fp
, const code_t
*ip argument_position
);
1518 pointer_t attr_fastcall
pointer_error(ajla_error_t err
, frame_s
*fp
, const code_t
*ip argument_position
);
1519 char *thunk_exception_string(struct thunk
*thunk
, ajla_error_t
*err
);
1520 char *thunk_exception_payload(struct thunk
*thunk
, ajla_error_t
*err
);
1521 void thunk_exception_print(struct thunk
*thunk
);
1523 bool attr_fastcall
thunk_alloc_function_call(pointer_t function_reference
, arg_t n_return_values
, struct thunk
*result
[], ajla_error_t
*mayfail
);
1524 bool attr_fastcall
thunk_alloc_blackhole(struct execution_control
*ex
, arg_t n_return_values
, struct thunk
*result
[], ajla_error_t
*mayfail
);
1527 /*********************
1528 * EXECUTION CONTROL *
1529 *********************/
1531 #define N_EXECUTION_CONTROL_WAIT 2
1533 #define EXECUTION_CONTROL_NORMAL 4
1534 #define EXECUTION_CONTROL_ARMED 3
1535 #define EXECUTION_CONTROL_FIRED 1
1538 * execution_control_wait->thunk == NULL
1540 * execution_control_wait->thunk != NULL, list_is_empty(&execution_control_wait->wait_entry)
1541 * - unused, but we must take thunk lock to clear it
1542 * execution_control_wait->thunk != NULL, !list_is_empty(execution_control_wait->wait_entry.next)
1543 * - wait_entry is linked to an existing execution control
1546 struct execution_control_wait
{
1547 struct list wait_entry
;
1548 mutex_t
*mutex_to_lock
;
1549 struct execution_control
*execution_control
;
1552 struct execution_control
{
1554 frame_s
*current_frame
;
1555 struct stack_bottom
*stack
;
1557 struct thunk
*thunk
;
1558 struct list wait_list
;
1560 void (*callback
)(void *, pointer_t
);
1561 void *callback_cookie
;
1563 refcount_t wait_state
;
1564 struct list waiting_list_entry
;
1565 void *waiting_list_head
;
1566 struct execution_control_wait wait
[N_EXECUTION_CONTROL_WAIT
];
1569 bool atomic_interrupted
;
1572 bool are_there_dereferenced(void);
1573 void execution_control_unlink_and_submit(struct execution_control
*ex
, bool can_allocate_memory
);
1574 bool execution_control_acquire(struct execution_control
*ex
);
1575 void wake_up_wait_list(struct list
*wait_list
, mutex_t
*mutex_to_lock
, bool can_allocate_memory
);
1576 void *thunk_terminate(struct thunk
*t
, arg_t n_return_values
);
1577 struct execution_control
*execution_control_alloc(ajla_error_t
*mayfail
);
1578 void execution_control_free(struct execution_control
*ex
);
1579 void execution_control_terminate(struct execution_control
*ex
, pointer_t ptr
);
1582 /**********************
1583 * POINTER OPERATIONS *
1584 **********************/
1586 void free_cache_entry(struct data
*d
, struct cache_entry
*ce
);
1588 static attr_always_inline refcount_t
*pointer_get_refcount_(pointer_t ptr
)
1590 void *p
= pointer_get_value_strip_tag_(ptr
);
1591 return !pointer_is_thunk(ptr
) ? &((struct data
*)p
)->refcount_
: &((struct thunk
*)p
)->refcount_
;
1594 void attr_fastcall
pointer_dereference_(pointer_t ptr argument_position
);
1595 #define pointer_dereference(ptr) pointer_dereference_(ptr pass_file_line)
1597 static inline void data_dereference(struct data
*data
)
1599 pointer_dereference(pointer_data(data
));
1602 static inline void pointer_reference_owned(pointer_t ptr
)
1604 refcount_t
*r
= pointer_get_refcount_(ptr
);
1605 if (likely(!refcount_is_read_only(r
)))
1609 static inline void pointer_reference_owned_multiple(pointer_t ptr
, refcount_int_t n
)
1611 refcount_t
*r
= pointer_get_refcount_(ptr
);
1612 if (likely(!refcount_is_read_only(r
)))
1616 static inline void data_reference(struct data
*d
)
1618 if (likely(!refcount_is_read_only(&d
->refcount_
)))
1619 refcount_inc(&d
->refcount_
);
1622 static inline void thunk_reference(struct thunk
*t
)
1624 if (likely(!refcount_is_read_only(&t
->refcount_
)))
1625 refcount_inc(&t
->refcount_
);
1628 static inline void thunk_reference_nonatomic(struct thunk
*t
)
1630 refcount_inc_nonatomic(&t
->refcount_
);
1633 static inline bool thunk_dereference_nonatomic(struct thunk
*t
)
1635 return refcount_dec_nonatomic(&t
->refcount_
);
1638 static inline bool thunk_refcount_is_one_nonatomic(struct thunk
*t
)
1640 return refcount_is_one_nonatomic(&t
->refcount_
);
1643 static inline refcount_int_t
thunk_refcount_get_nonatomic(struct thunk
*t
)
1645 return refcount_get_nonatomic(&t
->refcount_
);
1648 static inline void thunk_assert_refcount(struct thunk attr_unused
*t
)
1650 ajla_assert_lo(!refcount_is_invalid(&t
->refcount_
), (file_line
, "thunk_assert_refcount: invalid refcount"));
1653 pointer_t attr_fastcall
pointer_reference_(pointer_t
*ptr argument_position
);
1654 #define pointer_reference(ptr) pointer_reference_(ptr pass_file_line)
1655 void pointer_reference_maybe_(frame_s
*fp
, frame_t result
, pointer_t
*ptr
, unsigned char flags argument_position
);
1656 #define pointer_reference_maybe(fp, result, ptr, flags) pointer_reference_maybe_(fp, result, ptr, flags pass_file_line)
1659 static inline bool data_is_writable(struct data
*d
)
1661 return refcount_is_one(&d
->refcount_
);
1664 static inline bool thunk_is_writable(struct thunk
*t
)
1666 return refcount_is_one(&t
->refcount_
);
1670 #ifdef POINTER_FOLLOW_IS_LOCKLESS
1671 #define pointer_volatile(ptr) ((thread_volatile pointer_t *)(ptr))
1672 #define pointer_lock(ptr) do { } while (0)
1673 #define pointer_unlock(ptr) do { } while (0)
1674 #define pointer_dependency_barrier() barrier_data_dependency()
1676 #define pointer_volatile(ptr) (ptr)
1677 #define pointer_lock(ptr) address_lock(ptr, DEPTH_POINTER)
1678 #define pointer_unlock(ptr) address_unlock(ptr, DEPTH_POINTER)
1679 #define pointer_dependency_barrier() do { } while (0)
1682 static inline pointer_t
pointer_locked_read(pointer_t
*ptr
)
1686 ret
= *pointer_volatile(ptr
);
1687 pointer_validate(ret
);
1688 if (!pointer_is_thunk(ret
))
1689 pointer_dependency_barrier();
1690 pointer_unlock(ptr
);
1694 static inline void pointer_locked_write(pointer_t
*ptr
, pointer_t val
)
1696 pointer_validate(val
);
1698 *pointer_volatile(ptr
) = val
;
1699 pointer_unlock(ptr
);
1702 #define POINTER_FOLLOW_THUNK_EXIT NULL
1703 #define POINTER_FOLLOW_THUNK_RETRY SPECIAL_POINTER_1
1704 #define POINTER_FOLLOW_THUNK_EXCEPTION SPECIAL_POINTER_2
1705 #define POINTER_FOLLOW_THUNK_GO SPECIAL_POINTER_3
1707 void copy_from_function_reference_to_frame(frame_s
*new_fp
, struct data
*ref
, arg_t ia
, char can_move
);
1708 #define POINTER_FOLLOW_THUNK_NOEVAL NULL
1709 #define POINTER_FOLLOW_THUNK_SPARK SPECIAL_POINTER_1
1710 void * attr_fastcall
pointer_follow_thunk_(pointer_t
*ptr
, void *ex_wait
);
1711 void attr_fastcall
pointer_resolve_result(pointer_t
*ptr
);
1712 void attr_fastcall
pointer_follow_wait(frame_s
*fp
, const code_t
*ip
);
1714 #define pointer_follow_thunk_noeval(ptr, retry_code, exception_code, uneval_code)\
1716 void *ex__ = pointer_follow_thunk_(ptr, POINTER_FOLLOW_THUNK_NOEVAL);\
1717 if (ex__ == POINTER_FOLLOW_THUNK_RETRY) { \
1720 } else if (ex__ == POINTER_FOLLOW_THUNK_EXCEPTION) { \
1721 { exception_code; } \
1724 ajla_assert(ex__ == POINTER_FOLLOW_THUNK_EXIT, (file_line, "pointer_follow_thunk_noeval: invalid return value %p", ex__));\
1730 #define PF_SPARK (-2)
1731 #define PF_NOEVAL (-1)
1733 #define PF_PREPARE0 (2)
1734 #define PF_PREPARE1 (3)
1736 #define pointer_follow(ptr, owned, result, wait_idx, fp, ip, xc_code, exception_code)\
1740 p_ = pointer_locked_read(ptr); \
1743 if (likely(!pointer_is_thunk(p_))) { \
1744 (result) = pointer_get_data(p_); \
1748 ex__ = pointer_follow_thunk_(ptr, (wait_idx) >= 0 ? &frame_execution_control(fp)->wait[(wait_idx) & 1] : (wait_idx) == PF_NOEVAL ? POINTER_FOLLOW_THUNK_NOEVAL : POINTER_FOLLOW_THUNK_SPARK);\
1749 if (ex__ == POINTER_FOLLOW_THUNK_RETRY) \
1751 if (ex__ == POINTER_FOLLOW_THUNK_EXCEPTION) { \
1752 struct thunk attr_unused *thunk_; \
1753 thunk_ = pointer_get_thunk(*(ptr)); \
1754 { exception_code; } \
1758 struct execution_control attr_unused *ex_; \
1759 ex_ = cast_cpp(struct execution_control *, ex__);\
1760 if ((wait_idx) >= 0 && !((wait_idx) & 2)) { \
1761 pointer_follow_wait(fp, ip); \
1769 #define pointer_follow_fastfail(ptr, owned, result, success_code) \
1773 p_ = pointer_locked_read(ptr); \
1776 if (likely(!pointer_is_thunk(p_))) { \
1777 (result) = pointer_get_data(p_); \
1783 bool attr_fastcall
data_is_nan(type_tag_t type
, const unsigned char *ptr
);
1784 pointer_t
flat_to_data(const struct type
*type
, const unsigned char *flat
);
1785 void attr_fastcall
struct_clone(pointer_t
*ptr
);
1787 void * attr_fastcall
pointer_deep_eval(pointer_t
*ptr
, frame_s
*fp
, const code_t
*ip
, struct thunk
**thunk
);
1788 void * attr_fastcall
frame_pointer_deep_eval(frame_s
*fp
, const code_t
*ip
, frame_t slot
, struct thunk
**thunk
);
1790 bool attr_fastcall
mpint_export(const mpint_t
*m
, unsigned char *ptr
, unsigned intx
, ajla_error_t
*err
);
1791 bool attr_fastcall
mpint_export_unsigned(const mpint_t
*m
, unsigned char *ptr
, unsigned intx
, ajla_error_t
*err
);
1793 int data_compare_numbers(type_tag_t tt
, unsigned char *flat1
, pointer_t ptr1
, unsigned char *flat2
, pointer_t ptr2
);
1794 #define DATA_COMPARE_OOM -2
1795 int attr_fastcall
data_compare(pointer_t ptr1
, pointer_t ptr2
, ajla_error_t
*mayfail
);
1798 static inline bool attr_hot_fastcall
frame_variable_is_flat(frame_s
*fp
, frame_t slot
)
1800 return !frame_test_flag(fp
, slot
) && TYPE_IS_FLAT(frame_get_type_of_local(fp
, slot
));
1803 static attr_always_inline
void attr_hot_fastcall
frame_free(frame_s
*fp
, frame_t slot
)
1805 if (frame_test_and_clear_flag(fp
, slot
)) {
1806 pointer_dereference(*frame_pointer(fp
, slot
));
1807 /* when the flag is not set, we must not clear the slot */
1808 pointer_poison(frame_pointer(fp
, slot
));
1812 static attr_always_inline
void attr_hot_fastcall
frame_free_and_clear(frame_s
*fp
, frame_t slot
)
1814 frame_free(fp
, slot
);
1815 *frame_pointer(fp
, slot
) = pointer_empty();
1818 static attr_always_inline
void attr_hot_fastcall
frame_free_and_set_pointer(frame_s
*fp
, frame_t slot
, pointer_t ptr
)
1820 if (frame_test_and_set_flag(fp
, slot
))
1821 pointer_dereference(*frame_pointer(fp
, slot
));
1822 *frame_pointer(fp
, slot
) = ptr
;
1825 static attr_always_inline
void frame_set_pointer(frame_s
*fp
, frame_t slot
, pointer_t ptr
)
1827 ajla_assert(!frame_test_flag(fp
, slot
), (file_line
, "frame_set_pointer: flag for slot %"PRIuMAX
" already set", (uintmax_t)slot
));
1828 frame_set_flag(fp
, slot
);
1829 *frame_pointer(fp
, slot
) = ptr
;
1832 static attr_always_inline pointer_t
frame_get_pointer_reference(frame_s
*fp
, frame_t slot
, bool deref
)
1834 pointer_t ptr
= *frame_pointer(fp
, slot
);
1835 pointer_validate(ptr
);
1839 *frame_pointer(fp
, slot
) = pointer_empty();
1840 if (!frame_test_and_clear_flag(fp
, slot
))
1842 pointer_reference_owned(ptr
);
1848 /**********************
1849 * DATA SERIALIZATION *
1850 **********************/
1854 struct stack_entry_type
{
1855 void *(*get_ptr
)(struct stack_entry
*ste
);
1856 bool (*get_properties
)(struct stack_entry
*ste
, size_t *align
, size_t *size
, struct stack_entry
**subptrs
, size_t *subptrs_len
);
1857 void (*fixup_after_copy
)(void *new_ptr
);
1858 void (*fixup_sub_ptr
)(void *loc
, uintptr_t offset
);
1862 struct stack_entry
{
1863 const struct stack_entry_type
*t
;
1869 bool data_save(void *p
, uintptr_t offset
, size_t *align
, size_t *size
, struct stack_entry
**subptrs
, size_t *subptrs_l
);
1870 bool data_save_init_stack(pointer_t
*ptr
, struct stack_entry
**stk
, size_t *stk_l
);
1876 void *data_trap_lookup(void *ptr
);
1877 void data_trap_insert(struct data
*codegen
);