codegen: add a 'size' argument to ALU_WRITES_FLAGS
[ajla.git] / module.c
blobcdaae4053c5bbe9a4a7022f8e4429d06f43b2116
1 /*
2 * Copyright (C) 2024 Mikulas Patocka
4 * This file is part of Ajla.
6 * Ajla is free software: you can redistribute it and/or modify it under the
7 * terms of the GNU General Public License as published by the Free Software
8 * Foundation, either version 3 of the License, or (at your option) any later
9 * version.
11 * Ajla is distributed in the hope that it will be useful, but WITHOUT ANY
12 * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
13 * A PARTICULAR PURPOSE. See the GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along with
16 * Ajla. If not, see <https://www.gnu.org/licenses/>.
19 #include "ajla.h"
21 #ifndef FILE_OMIT
23 #include "args.h"
24 #include "mem_al.h"
25 #include "str.h"
26 #include "tree.h"
27 #include "rwlock.h"
28 #include "builtin.h"
29 #include "funct.h"
30 #include "pcode.h"
31 #include "array.h"
32 #include "profile.h"
33 #include "save.h"
35 #include "module.h"
37 pointer_t *start_fn;
38 shared_var pointer_t *optimizer_fn;
39 shared_var pointer_t *parser_fn;
41 static struct tree modules;
42 rwlock_decl(modules_mutex);
44 struct module_function {
45 struct tree_entry entry;
46 pointer_t function;
47 pointer_t optimizer;
48 pointer_t parser;
49 struct function_designator fd;
52 struct module {
53 struct tree_entry entry;
54 struct tree functions;
55 struct module_designator md;
58 static pointer_t module_create_optimizer_reference(struct module *m, struct function_designator *fd, bool optimizer)
60 size_t i;
61 ajla_flat_option_t program;
62 int_default_t path_idx;
63 struct data *filename;
64 int_default_t *np;
65 struct data *nesting_path;
66 struct data *fn_ref;
67 struct thunk *result;
68 ajla_error_t err;
70 program = m->md.program;
72 path_idx = m->md.path_idx;
73 if (path_idx < 0 || (uint_default_t)path_idx != m->md.path_idx) {
74 return pointer_error(error_ajla(EC_ASYNC, AJLA_ERROR_SIZE_OVERFLOW), NULL, NULL pass_file_line);
77 filename = array_from_flat_mem(type_get_fixed(0, true), cast_ptr(const char *, m->md.path), m->md.path_len, &err);
78 if (unlikely(!filename)) {
79 return pointer_error(err, NULL, NULL pass_file_line);
82 np = mem_alloc_array_mayfail(mem_alloc_mayfail, int_default_t *, 0, 0, fd->n_entries, sizeof(int_default_t), &err);
83 if (unlikely(!np)) {
84 data_dereference(filename);
85 return pointer_error(err, NULL, NULL pass_file_line);
87 for (i = 0; i < fd->n_entries; i++) {
88 int_default_t e = (int_default_t)fd->entries[i];
89 if (unlikely(e < 0) || unlikely(e != fd->entries[i])) {
90 data_dereference(filename);
91 mem_free(np);
92 return pointer_error(error_ajla(EC_ASYNC, AJLA_ERROR_SIZE_OVERFLOW), NULL, NULL pass_file_line);
94 np[i] = e;
96 nesting_path = array_from_flat_mem(type_get_int(INT_DEFAULT_N), cast_ptr(const char *, np), fd->n_entries, &err);
97 mem_free(np);
98 if (unlikely(!nesting_path)) {
99 data_dereference(filename);
100 return pointer_error(err, NULL, NULL pass_file_line);
103 fn_ref = data_alloc_function_reference_mayfail(4, &err pass_file_line);
104 if (unlikely(!fn_ref)) {
105 data_dereference(filename);
106 data_dereference(nesting_path);
107 return pointer_error(err, NULL, NULL pass_file_line);
109 da(fn_ref,function_reference)->is_indirect = false;
110 da(fn_ref,function_reference)->u.direct = optimizer ? optimizer_fn : parser_fn;
112 data_fill_function_reference_flat(fn_ref, 0, type_get_int(INT_DEFAULT_N), cast_ptr(unsigned char *, &path_idx));
113 data_fill_function_reference(fn_ref, 1, pointer_data(filename));
114 data_fill_function_reference_flat(fn_ref, 2, type_get_flat_option(), cast_ptr(unsigned char *, &program));
115 data_fill_function_reference(fn_ref, 3, pointer_data(nesting_path));
117 if (unlikely(!thunk_alloc_function_call(pointer_data(fn_ref), 1, &result, &err))) {
118 data_dereference(fn_ref);
119 return pointer_error(err, NULL, NULL pass_file_line);
122 return pointer_thunk(result);
125 static bool module_function_init(struct module *m, struct module_function *mf, ajla_error_t attr_unused *mayfail)
127 pointer_t ptr, optr, pptr;
128 union internal_arg ia[3];
129 if (m->md.path_idx > 0) {
130 optr = module_create_optimizer_reference(m, &mf->fd, true);
131 pptr = module_create_optimizer_reference(m, &mf->fd, false);
132 ia[0].ptr = &mf->optimizer;
133 ia[1].ptr = &m->md;
134 ia[2].ptr = &mf->fd;
135 ptr = function_build_internal_thunk(pcode_build_function_from_array, 3, ia);
136 } else {
137 ia[0].ptr = &m->md;
138 ia[1].ptr = &mf->fd;
139 optr = function_build_internal_thunk(pcode_array_from_builtin, 2, ia);
140 pointer_reference_owned(optr);
141 pptr = optr;
142 ptr = function_build_internal_thunk(pcode_build_function_from_builtin, 2, ia);
144 mf->function = ptr;
145 mf->optimizer = optr;
146 mf->parser = pptr;
147 return true;
150 static int function_test(const struct tree_entry *e, uintptr_t id)
152 const struct function_designator *fd = cast_cpp(const struct function_designator *, num_to_ptr(id));
153 const struct module_function *mf = get_struct(e, struct module_function, entry);
154 return function_designator_compare(&mf->fd, fd);
157 static struct module_function *module_find_function(struct module *m, const struct function_designator *fd, bool create, ajla_error_t *mayfail)
159 struct tree_insert_position ins;
160 struct tree_entry *e;
161 struct module_function *mf;
163 e = tree_find_for_insert(&m->functions, function_test, ptr_to_num(fd), &ins);
164 if (e)
165 return get_struct(e, struct module_function, entry);
167 if (!create)
168 return NULL;
170 mf = struct_alloc_array_mayfail(mem_alloc_mayfail, struct module_function, fd.entries, fd->n_entries, mayfail);
171 if (unlikely(!mf))
172 return NULL;
174 mf->fd.n_entries = fd->n_entries;
175 memcpy(mf->fd.entries, fd->entries, fd->n_entries * sizeof(fd->entries[0]));
177 if (unlikely(!module_function_init(m, mf, mayfail))) {
178 mem_free(mf);
179 return NULL;
182 tree_insert_after_find(&mf->entry, &ins);
184 return mf;
187 static int module_test(const struct tree_entry *e, uintptr_t id)
189 const struct module_designator *md = cast_cpp(const struct module_designator *, num_to_ptr(id));
190 const struct module *m = get_struct(e, struct module, entry);
191 return module_designator_compare(&m->md, md);
194 static struct module *module_find(const struct module_designator *md, bool create, ajla_error_t *mayfail)
196 struct tree_insert_position ins;
197 struct tree_entry *e;
198 struct module *m;
200 e = tree_find_for_insert(&modules, module_test, ptr_to_num(md), &ins);
201 if (likely(e != NULL))
202 return get_struct(e, struct module, entry);
204 if (!create)
205 return NULL;
207 m = struct_alloc_array_mayfail(mem_alloc_mayfail, struct module, md.path, md->path_len, mayfail);
208 if (unlikely(!m))
209 return NULL;
211 m->md.path_len = md->path_len;
212 m->md.path_idx = md->path_idx;
213 m->md.program = md->program;
214 memcpy(m->md.path, md->path, md->path_len);
216 tree_init(&m->functions);
218 tree_insert_after_find(&m->entry, &ins);
220 return m;
223 pointer_t *module_load_function(const struct module_designator *md, const struct function_designator *fd, bool get_fn, bool optimizer, ajla_error_t *mayfail)
225 struct module *m;
226 struct module_function *mf;
227 bool create = false;
229 rwlock_lock_read(&modules_mutex);
230 retry:
231 m = module_find(md, create, mayfail);
232 if (!m)
233 goto lock_for_write;
235 mf = module_find_function(m, fd, create, mayfail);
236 if (!mf)
237 goto lock_for_write;
239 if (!create)
240 rwlock_unlock_read(&modules_mutex);
241 else
242 rwlock_unlock_write(&modules_mutex);
244 if (get_fn)
245 return &mf->function;
246 else if (optimizer)
247 return &mf->optimizer;
248 else
249 return &mf->parser;
251 lock_for_write:
252 if (unlikely(create)) {
253 rwlock_unlock_write(&modules_mutex);
254 return NULL;
256 create = true;
257 rwlock_unlock_read(&modules_mutex);
258 rwlock_lock_write(&modules_mutex);
259 goto retry;
263 static void module_finish_function(struct module_function *mf)
265 if (!pointer_is_thunk(mf->function)) {
266 struct data *d = pointer_get_data(mf->function);
267 struct tree_entry *e;
268 bool new_cache;
269 if (profiling) {
270 profile_collect(da(d,function)->function_name, load_relaxed(&da(d,function)->profiling_counter), load_relaxed(&da(d,function)->call_counter));
272 if (profiling_escapes) {
273 ip_t ip_rel;
274 for (ip_rel = 0; ip_rel < da(d,function)->code_size; ip_rel++) {
275 struct stack_trace_entry ste;
276 profile_counter_t profiling_counter = load_relaxed(&da(d,function)->escape_data[ip_rel].counter);
277 if (likely(!profiling_counter))
278 continue;
279 if (unlikely(!stack_trace_get_location(d, ip_rel, &ste)))
280 continue;
281 profile_escape_collect(ste.function_name, profiling_counter, ste.line, da(d,function)->code[ip_rel], load_relaxed(&da(d,function)->escape_data[ip_rel].line));
284 new_cache = false;
285 #ifdef HAVE_CODEGEN
286 if (likely(!pointer_is_thunk(da(d,function)->codegen))) {
287 struct data *codegen = pointer_get_data(da(d,function)->codegen);
288 if (unlikely(!da(codegen,codegen)->is_saved))
289 new_cache = true;
291 #endif
292 for (e = tree_first(&da(d,function)->cache); e && !new_cache; e = tree_next(e)) {
293 struct cache_entry *ce = get_struct(e, struct cache_entry, entry);
294 if (ce->save && da(d,function)->module_designator) {
295 new_cache = true;
296 break;
299 save_start_function(d, new_cache);
300 while ((e = tree_first(&da(d,function)->cache))) {
301 struct cache_entry *ce = get_struct(e, struct cache_entry, entry);
302 tree_delete(&ce->entry);
303 if (ce->save && da(d,function)->module_designator) {
304 /*debug("saving: %s", da(d,function)->function_name);*/
305 save_cache_entry(d, ce);
307 free_cache_entry(d, ce);
309 save_finish_function(d);
313 static void module_free_function(struct module_function *mf)
315 pointer_dereference(mf->function);
316 pointer_dereference(mf->optimizer);
317 pointer_dereference(mf->parser);
321 struct module_designator *module_designator_alloc(unsigned path_idx, const uint8_t *path, size_t path_len, bool program, ajla_error_t *mayfail)
323 struct module_designator *md = struct_alloc_array_mayfail(mem_alloc_mayfail, struct module_designator, path, path_len, mayfail);
324 if (unlikely(!md))
325 return NULL;
326 md->path_idx = path_idx;
327 md->path_len = path_len;
328 md->program = program;
329 memcpy(md->path, path, path_len);
330 return md;
333 void module_designator_free(struct module_designator *md)
335 mem_free(md);
338 size_t module_designator_length(const struct module_designator *md)
340 return offsetof(struct module_designator, path[md->path_len]);
343 int module_designator_compare(const struct module_designator *md1, const struct module_designator *md2)
345 if (md1->path_idx < md2->path_idx)
346 return -1;
347 if (md1->path_idx > md2->path_idx)
348 return 1;
349 if (md1->program != md2->program)
350 return md1->program - md2->program;
351 if (md1->path_len < md2->path_len)
352 return -1;
353 if (md1->path_len > md2->path_len)
354 return 1;
355 return memcmp(md1->path, md2->path, md1->path_len);
358 struct function_designator *function_designator_alloc(const pcode_t *p, ajla_error_t *mayfail)
360 size_t i;
361 size_t n_entries = p[0];
362 struct function_designator *fd;
363 ajla_assert_lo(p[0] > 0, (file_line, "function_designator_alloc: invalid lenfth %ld", (long)p[0]));
364 fd = struct_alloc_array_mayfail(mem_alloc_mayfail, struct function_designator, entries, n_entries, mayfail);
365 if (unlikely(!fd))
366 return NULL;
367 fd->n_entries = n_entries;
368 for (i = 0; i < n_entries; i++)
369 fd->entries[i] = p[1 + i];
370 return fd;
373 struct function_designator *function_designator_alloc_single(pcode_t idx, ajla_error_t *mayfail)
375 pcode_t p[2];
376 p[0] = 1;
377 p[1] = idx;
378 return function_designator_alloc(p, mayfail);
381 void function_designator_free(struct function_designator *fd)
383 mem_free(fd);
386 size_t function_designator_length(const struct function_designator *fd)
388 return offsetof(struct function_designator, entries[fd->n_entries]);
391 int function_designator_compare(const struct function_designator *fd1, const struct function_designator *fd2)
393 if (fd1->n_entries < fd2->n_entries)
394 return -1;
395 if (fd1->n_entries > fd2->n_entries)
396 return 1;
397 /*return memcmp(fd1->entries, fd2->entries, fd1->n_entries * sizeof(fd1->entries[0]));*/
399 size_t i;
400 for (i = 0; i < fd1->n_entries; i++) {
401 if (fd1->entries[i] < fd2->entries[i])
402 return -1;
403 if (fd1->entries[i] > fd2->entries[i])
404 return 1;
406 return 0;
411 void name(module_init)(void)
413 const char *n;
414 struct module_designator *md;
415 struct function_designator *fd;
417 tree_init(&modules);
418 rwlock_init(&modules_mutex);
421 n = "start";
422 md = module_designator_alloc(0, cast_ptr(const uint8_t *, n), strlen(n), false, NULL);
423 fd = function_designator_alloc_single(0, NULL);
424 start_fn = module_load_function(md, fd, true, true, NULL);
425 function_designator_free(fd);
426 module_designator_free(md);
428 n = "compiler/compiler";
429 md = module_designator_alloc(0, cast_ptr(const uint8_t *, n), strlen(n), false, NULL);
430 fd = function_designator_alloc_single(0, NULL);
431 optimizer_fn = module_load_function(md, fd, true, true, NULL);
432 function_designator_free(fd);
433 fd = function_designator_alloc_single(1, NULL);
434 parser_fn = module_load_function(md, fd, true, true, NULL);
435 function_designator_free(fd);
436 module_designator_free(md);
439 void name(module_done)(void)
441 struct tree_entry *e1, *e2;
442 save_prepare();
443 for (e1 = tree_first(&modules); e1; e1 = tree_next(e1)) {
444 struct module *m = get_struct(e1, struct module, entry);
445 /*debug("saving: %.*s", (int)m->md.path_len, m->md.path);*/
446 for (e2 = tree_first(&m->functions); e2; e2 = tree_next(e2)) {
447 struct module_function *mf = get_struct(e2, struct module_function, entry);
448 module_finish_function(mf);
451 while (!tree_is_empty(&modules)) {
452 struct module *m = get_struct(tree_any(&modules), struct module, entry);
453 tree_delete(&m->entry);
454 while (!tree_is_empty(&m->functions)) {
455 struct module_function *mf = get_struct(tree_any(&m->functions), struct module_function, entry);
456 module_free_function(mf);
457 tree_delete(&mf->entry);
458 mem_free(mf);
460 mem_free(m);
462 rwlock_done(&modules_mutex);
465 #endif