2 * Copyright (C) 2024 Mikulas Patocka
4 * This file is part of Ajla.
6 * Ajla is free software: you can redistribute it and/or modify it under the
7 * terms of the GNU General Public License as published by the Free Software
8 * Foundation, either version 3 of the License, or (at your option) any later
11 * Ajla is distributed in the hope that it will be useful, but WITHOUT ANY
12 * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
13 * A PARTICULAR PURPOSE. See the GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along with
16 * Ajla. If not, see <https://www.gnu.org/licenses/>.
38 shared_var pointer_t
*optimizer_fn
;
39 shared_var pointer_t
*parser_fn
;
41 static struct tree modules
;
42 rwlock_decl(modules_mutex
);
44 struct module_function
{
45 struct tree_entry entry
;
49 struct function_designator fd
;
53 struct tree_entry entry
;
54 struct tree functions
;
55 struct module_designator md
;
58 static pointer_t
module_create_optimizer_reference(struct module
*m
, struct function_designator
*fd
, bool optimizer
)
61 ajla_flat_option_t program
;
62 int_default_t path_idx
;
63 struct data
*filename
;
65 struct data
*nesting_path
;
70 program
= m
->md
.program
;
72 path_idx
= m
->md
.path_idx
;
73 if (path_idx
< 0 || (uint_default_t
)path_idx
!= m
->md
.path_idx
) {
74 return pointer_error(error_ajla(EC_ASYNC
, AJLA_ERROR_SIZE_OVERFLOW
), NULL
, NULL pass_file_line
);
77 filename
= array_from_flat_mem(type_get_fixed(0, true), cast_ptr(const char *, m
->md
.path
), m
->md
.path_len
, &err
);
78 if (unlikely(!filename
)) {
79 return pointer_error(err
, NULL
, NULL pass_file_line
);
82 np
= mem_alloc_array_mayfail(mem_alloc_mayfail
, int_default_t
*, 0, 0, fd
->n_entries
, sizeof(int_default_t
), &err
);
84 data_dereference(filename
);
85 return pointer_error(err
, NULL
, NULL pass_file_line
);
87 for (i
= 0; i
< fd
->n_entries
; i
++) {
88 int_default_t e
= (int_default_t
)fd
->entries
[i
];
89 if (unlikely(e
< 0) || unlikely(e
!= fd
->entries
[i
])) {
90 data_dereference(filename
);
92 return pointer_error(error_ajla(EC_ASYNC
, AJLA_ERROR_SIZE_OVERFLOW
), NULL
, NULL pass_file_line
);
96 nesting_path
= array_from_flat_mem(type_get_int(INT_DEFAULT_N
), cast_ptr(const char *, np
), fd
->n_entries
, &err
);
98 if (unlikely(!nesting_path
)) {
99 data_dereference(filename
);
100 return pointer_error(err
, NULL
, NULL pass_file_line
);
103 fn_ref
= data_alloc_function_reference_mayfail(4, &err pass_file_line
);
104 if (unlikely(!fn_ref
)) {
105 data_dereference(filename
);
106 data_dereference(nesting_path
);
107 return pointer_error(err
, NULL
, NULL pass_file_line
);
109 da(fn_ref
,function_reference
)->is_indirect
= false;
110 da(fn_ref
,function_reference
)->u
.direct
= optimizer
? optimizer_fn
: parser_fn
;
112 data_fill_function_reference_flat(fn_ref
, 0, type_get_int(INT_DEFAULT_N
), cast_ptr(unsigned char *, &path_idx
));
113 data_fill_function_reference(fn_ref
, 1, pointer_data(filename
));
114 data_fill_function_reference_flat(fn_ref
, 2, type_get_flat_option(), cast_ptr(unsigned char *, &program
));
115 data_fill_function_reference(fn_ref
, 3, pointer_data(nesting_path
));
117 if (unlikely(!thunk_alloc_function_call(pointer_data(fn_ref
), 1, &result
, &err
))) {
118 data_dereference(fn_ref
);
119 return pointer_error(err
, NULL
, NULL pass_file_line
);
122 return pointer_thunk(result
);
125 static bool module_function_init(struct module
*m
, struct module_function
*mf
, ajla_error_t attr_unused
*mayfail
)
127 pointer_t ptr
, optr
, pptr
;
128 union internal_arg ia
[3];
129 if (m
->md
.path_idx
> 0) {
130 optr
= module_create_optimizer_reference(m
, &mf
->fd
, true);
131 pptr
= module_create_optimizer_reference(m
, &mf
->fd
, false);
132 ia
[0].ptr
= &mf
->optimizer
;
135 ptr
= function_build_internal_thunk(pcode_build_function_from_array
, 3, ia
);
139 optr
= function_build_internal_thunk(pcode_array_from_builtin
, 2, ia
);
140 pointer_reference_owned(optr
);
142 ptr
= function_build_internal_thunk(pcode_build_function_from_builtin
, 2, ia
);
145 mf
->optimizer
= optr
;
150 static int function_test(const struct tree_entry
*e
, uintptr_t id
)
152 const struct function_designator
*fd
= cast_cpp(const struct function_designator
*, num_to_ptr(id
));
153 const struct module_function
*mf
= get_struct(e
, struct module_function
, entry
);
154 return function_designator_compare(&mf
->fd
, fd
);
157 static struct module_function
*module_find_function(struct module
*m
, const struct function_designator
*fd
, bool create
, ajla_error_t
*mayfail
)
159 struct tree_insert_position ins
;
160 struct tree_entry
*e
;
161 struct module_function
*mf
;
163 e
= tree_find_for_insert(&m
->functions
, function_test
, ptr_to_num(fd
), &ins
);
165 return get_struct(e
, struct module_function
, entry
);
170 mf
= struct_alloc_array_mayfail(mem_alloc_mayfail
, struct module_function
, fd
.entries
, fd
->n_entries
, mayfail
);
174 mf
->fd
.n_entries
= fd
->n_entries
;
175 memcpy(mf
->fd
.entries
, fd
->entries
, fd
->n_entries
* sizeof(fd
->entries
[0]));
177 if (unlikely(!module_function_init(m
, mf
, mayfail
))) {
182 tree_insert_after_find(&mf
->entry
, &ins
);
187 static int module_test(const struct tree_entry
*e
, uintptr_t id
)
189 const struct module_designator
*md
= cast_cpp(const struct module_designator
*, num_to_ptr(id
));
190 const struct module
*m
= get_struct(e
, struct module
, entry
);
191 return module_designator_compare(&m
->md
, md
);
194 static struct module
*module_find(const struct module_designator
*md
, bool create
, ajla_error_t
*mayfail
)
196 struct tree_insert_position ins
;
197 struct tree_entry
*e
;
200 e
= tree_find_for_insert(&modules
, module_test
, ptr_to_num(md
), &ins
);
201 if (likely(e
!= NULL
))
202 return get_struct(e
, struct module
, entry
);
207 m
= struct_alloc_array_mayfail(mem_alloc_mayfail
, struct module
, md
.path
, md
->path_len
, mayfail
);
211 m
->md
.path_len
= md
->path_len
;
212 m
->md
.path_idx
= md
->path_idx
;
213 m
->md
.program
= md
->program
;
214 memcpy(m
->md
.path
, md
->path
, md
->path_len
);
216 tree_init(&m
->functions
);
218 tree_insert_after_find(&m
->entry
, &ins
);
223 pointer_t
*module_load_function(const struct module_designator
*md
, const struct function_designator
*fd
, bool get_fn
, bool optimizer
, ajla_error_t
*mayfail
)
226 struct module_function
*mf
;
229 rwlock_lock_read(&modules_mutex
);
231 m
= module_find(md
, create
, mayfail
);
235 mf
= module_find_function(m
, fd
, create
, mayfail
);
240 rwlock_unlock_read(&modules_mutex
);
242 rwlock_unlock_write(&modules_mutex
);
245 return &mf
->function
;
247 return &mf
->optimizer
;
252 if (unlikely(create
)) {
253 rwlock_unlock_write(&modules_mutex
);
257 rwlock_unlock_read(&modules_mutex
);
258 rwlock_lock_write(&modules_mutex
);
263 static void module_finish_function(struct module_function
*mf
)
265 if (!pointer_is_thunk(mf
->function
)) {
266 struct data
*d
= pointer_get_data(mf
->function
);
267 struct tree_entry
*e
;
270 profile_collect(da(d
,function
)->function_name
, load_relaxed(&da(d
,function
)->profiling_counter
), load_relaxed(&da(d
,function
)->call_counter
));
272 if (profiling_escapes
) {
274 for (ip_rel
= 0; ip_rel
< da(d
,function
)->code_size
; ip_rel
++) {
275 struct stack_trace_entry ste
;
276 profile_counter_t profiling_counter
= load_relaxed(&da(d
,function
)->escape_data
[ip_rel
].counter
);
277 if (likely(!profiling_counter
))
279 if (unlikely(!stack_trace_get_location(d
, ip_rel
, &ste
)))
281 profile_escape_collect(ste
.function_name
, profiling_counter
, ste
.line
, da(d
,function
)->code
[ip_rel
], load_relaxed(&da(d
,function
)->escape_data
[ip_rel
].line
));
286 if (likely(!pointer_is_thunk(da(d
,function
)->codegen
))) {
287 struct data
*codegen
= pointer_get_data(da(d
,function
)->codegen
);
288 if (unlikely(!da(codegen
,codegen
)->is_saved
))
292 for (e
= tree_first(&da(d
,function
)->cache
); e
&& !new_cache
; e
= tree_next(e
)) {
293 struct cache_entry
*ce
= get_struct(e
, struct cache_entry
, entry
);
294 if (ce
->save
&& da(d
,function
)->module_designator
) {
299 save_start_function(d
, new_cache
);
300 while ((e
= tree_first(&da(d
,function
)->cache
))) {
301 struct cache_entry
*ce
= get_struct(e
, struct cache_entry
, entry
);
302 tree_delete(&ce
->entry
);
303 if (ce
->save
&& da(d
,function
)->module_designator
) {
304 /*debug("saving: %s", da(d,function)->function_name);*/
305 save_cache_entry(d
, ce
);
307 free_cache_entry(d
, ce
);
309 save_finish_function(d
);
313 static void module_free_function(struct module_function
*mf
)
315 pointer_dereference(mf
->function
);
316 pointer_dereference(mf
->optimizer
);
317 pointer_dereference(mf
->parser
);
321 struct module_designator
*module_designator_alloc(unsigned path_idx
, const uint8_t *path
, size_t path_len
, bool program
, ajla_error_t
*mayfail
)
323 struct module_designator
*md
= struct_alloc_array_mayfail(mem_alloc_mayfail
, struct module_designator
, path
, path_len
, mayfail
);
326 md
->path_idx
= path_idx
;
327 md
->path_len
= path_len
;
328 md
->program
= program
;
329 memcpy(md
->path
, path
, path_len
);
333 void module_designator_free(struct module_designator
*md
)
338 size_t module_designator_length(const struct module_designator
*md
)
340 return offsetof(struct module_designator
, path
[md
->path_len
]);
343 int module_designator_compare(const struct module_designator
*md1
, const struct module_designator
*md2
)
345 if (md1
->path_idx
< md2
->path_idx
)
347 if (md1
->path_idx
> md2
->path_idx
)
349 if (md1
->program
!= md2
->program
)
350 return md1
->program
- md2
->program
;
351 if (md1
->path_len
< md2
->path_len
)
353 if (md1
->path_len
> md2
->path_len
)
355 return memcmp(md1
->path
, md2
->path
, md1
->path_len
);
358 struct function_designator
*function_designator_alloc(const pcode_t
*p
, ajla_error_t
*mayfail
)
361 size_t n_entries
= p
[0];
362 struct function_designator
*fd
;
363 ajla_assert_lo(p
[0] > 0, (file_line
, "function_designator_alloc: invalid lenfth %ld", (long)p
[0]));
364 fd
= struct_alloc_array_mayfail(mem_alloc_mayfail
, struct function_designator
, entries
, n_entries
, mayfail
);
367 fd
->n_entries
= n_entries
;
368 for (i
= 0; i
< n_entries
; i
++)
369 fd
->entries
[i
] = p
[1 + i
];
373 struct function_designator
*function_designator_alloc_single(pcode_t idx
, ajla_error_t
*mayfail
)
378 return function_designator_alloc(p
, mayfail
);
381 void function_designator_free(struct function_designator
*fd
)
386 size_t function_designator_length(const struct function_designator
*fd
)
388 return offsetof(struct function_designator
, entries
[fd
->n_entries
]);
391 int function_designator_compare(const struct function_designator
*fd1
, const struct function_designator
*fd2
)
393 if (fd1
->n_entries
< fd2
->n_entries
)
395 if (fd1
->n_entries
> fd2
->n_entries
)
397 /*return memcmp(fd1->entries, fd2->entries, fd1->n_entries * sizeof(fd1->entries[0]));*/
400 for (i
= 0; i
< fd1
->n_entries
; i
++) {
401 if (fd1
->entries
[i
] < fd2
->entries
[i
])
403 if (fd1
->entries
[i
] > fd2
->entries
[i
])
411 void name(module_init
)(void)
414 struct module_designator
*md
;
415 struct function_designator
*fd
;
418 rwlock_init(&modules_mutex
);
422 md
= module_designator_alloc(0, cast_ptr(const uint8_t *, n
), strlen(n
), false, NULL
);
423 fd
= function_designator_alloc_single(0, NULL
);
424 start_fn
= module_load_function(md
, fd
, true, true, NULL
);
425 function_designator_free(fd
);
426 module_designator_free(md
);
428 n
= "compiler/compiler";
429 md
= module_designator_alloc(0, cast_ptr(const uint8_t *, n
), strlen(n
), false, NULL
);
430 fd
= function_designator_alloc_single(0, NULL
);
431 optimizer_fn
= module_load_function(md
, fd
, true, true, NULL
);
432 function_designator_free(fd
);
433 fd
= function_designator_alloc_single(1, NULL
);
434 parser_fn
= module_load_function(md
, fd
, true, true, NULL
);
435 function_designator_free(fd
);
436 module_designator_free(md
);
439 void name(module_done
)(void)
441 struct tree_entry
*e1
, *e2
;
443 for (e1
= tree_first(&modules
); e1
; e1
= tree_next(e1
)) {
444 struct module
*m
= get_struct(e1
, struct module
, entry
);
445 /*debug("saving: %.*s", (int)m->md.path_len, m->md.path);*/
446 for (e2
= tree_first(&m
->functions
); e2
; e2
= tree_next(e2
)) {
447 struct module_function
*mf
= get_struct(e2
, struct module_function
, entry
);
448 module_finish_function(mf
);
451 while (!tree_is_empty(&modules
)) {
452 struct module
*m
= get_struct(tree_any(&modules
), struct module
, entry
);
453 tree_delete(&m
->entry
);
454 while (!tree_is_empty(&m
->functions
)) {
455 struct module_function
*mf
= get_struct(tree_any(&m
->functions
), struct module_function
, entry
);
456 module_free_function(mf
);
457 tree_delete(&mf
->entry
);
462 rwlock_done(&modules_mutex
);