2 * Copyright (C) 2024 Mikulas Patocka
4 * This file is part of Ajla.
6 * Ajla is free software: you can redistribute it and/or modify it under the
7 * terms of the GNU General Public License as published by the Free Software
8 * Foundation, either version 3 of the License, or (at your option) any later
11 * Ajla is distributed in the hope that it will be useful, but WITHOUT ANY
12 * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
13 * A PARTICULAR PURPOSE. See the GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along with
16 * Ajla. If not, see <https://www.gnu.org/licenses/>.
28 static int_default_t
callback_pointer(pointer_t
*p
, int_default_t (*callback
)(unsigned char *flat
, const struct type
*type
, int_default_t n_elements
, pointer_t
*ptr
, void *context
), void *context
)
30 pointer_t ptr
= pointer_locked_read(p
);
31 if (!pointer_is_thunk(ptr
) && da_tag(pointer_get_data(ptr
)) == DATA_TAG_flat
) {
32 struct data
*d
= pointer_get_data(ptr
);
33 return callback(da_flat(d
), type_get_from_tag(da(d
,flat
)->data_type
), 1, NULL
, context
);
35 return callback(NULL
, type_get_unknown(), 1, p
, context
);
39 bool attr_fastcall
array_btree_iterate(pointer_t
*array_ptr
, array_index_t
*idx
, int_default_t (*callback
)(unsigned char *flat
, const struct type
*type
, int_default_t n_elements
, pointer_t
*ptr
, void *context
), void *context
)
43 if (pointer_is_thunk(*array_ptr
)) {
44 return callback(NULL
, type_get_unknown(), 0, array_ptr
, context
) != 0;
46 array
= pointer_get_data(*array_ptr
);
48 switch (da_tag(array
)) {
49 case DATA_TAG_array_flat
: {
50 unsigned char *flat
= da_array_flat(array
);
51 int_default_t n_elements
= da(array
,array_flat
)->n_used_entries
;
53 if (unlikely(index_ge_int(*idx
, n_elements
)))
56 n_elements
-= index_to_int(*idx
);
57 flat
+= da(array
,array_flat
)->type
->size
* index_to_int(*idx
);
59 if (likely(n_elements
!= 0)) {
60 int_default_t processed
= callback(flat
, da(array
,array_flat
)->type
, n_elements
, NULL
, context
);
61 index_add_int(idx
, processed
);
62 if (processed
< n_elements
)
67 case DATA_TAG_array_slice
: {
68 unsigned char *flat
= da(array
,array_slice
)->flat_data_minus_data_array_offset
+ data_array_offset
;
69 int_default_t n_elements
= da(array
,array_slice
)->n_entries
;
71 if (unlikely(index_ge_int(*idx
, n_elements
)))
74 n_elements
-= index_to_int(*idx
);
75 flat
+= da(array
,array_slice
)->type
->size
* index_to_int(*idx
);
77 if (likely(n_elements
!= 0)) {
78 int_default_t processed
= callback(flat
, da(array
,array_slice
)->type
, n_elements
, NULL
, context
);
79 index_add_int(idx
, processed
);
80 if (processed
< n_elements
)
85 case DATA_TAG_array_pointers
: {
86 pointer_t
*ptr
= da(array
,array_pointers
)->pointer
;
87 int_default_t n_elements
= da(array
,array_pointers
)->n_used_entries
;
90 if (unlikely(index_ge_int(*idx
, n_elements
)))
93 n_elements
-= index_to_int(*idx
);
94 ptr
+= index_to_int(*idx
);
96 for (i
= 0; i
< n_elements
; i
++) {
97 if (unlikely(!callback_pointer(&ptr
[i
], callback
, context
)))
99 index_add_int(idx
, 1);
103 case DATA_TAG_array_same
: {
106 ptr
= &da(array
,array_same
)->pointer
;
107 while (!index_ge_index(*idx
, da(array
,array_same
)->n_entries
)) {
108 if (unlikely(!callback_pointer(ptr
, callback
, context
))) {
111 index_add_int(idx
, 1);
115 case DATA_TAG_array_btree
: {
116 btree_entries_t bt_pos
, bt_pos_end
;
118 binary_search(btree_entries_t
, da(array
,array_btree
)->n_used_btree_entries
, bt_pos
, false, index_ge_index(*idx
, da(array
,array_btree
)->btree
[bt_pos
].end_index
), break);
120 bt_pos_end
= da(array
,array_btree
)->n_used_btree_entries
;
121 for (; bt_pos
< bt_pos_end
; bt_pos
++) {
122 struct btree_level
*bt_level
= &da(array
,array_btree
)->btree
[bt_pos
];
125 node
= &bt_level
->node
;
127 da_array_assert_son(array
, pointer_get_data(*node
));
130 index_sub(idx
, bt_level
[-1].end_index
);
131 result
= array_btree_iterate(node
, idx
, callback
, context
);
133 index_add(idx
, bt_level
[-1].end_index
);
140 case DATA_TAG_array_incomplete
: {
141 array_index_t first_len
, total_first_len
;
144 struct data
*new_array
;
146 index_from_int(&total_first_len
, 0);
149 first_len
= array_len(pointer_get_data(da(array
,array_incomplete
)->first
));
150 if (!index_ge_index(*idx
, first_len
)) {
151 result
= array_btree_iterate(&da(array
,array_incomplete
)->first
, idx
, callback
, context
);
153 index_free(&first_len
);
154 index_add(idx
, total_first_len
);
155 index_free(&total_first_len
);
160 index_add(&total_first_len
, first_len
);
161 index_sub(idx
, first_len
);
162 index_free(&first_len
);
164 ptr
= pointer_locked_read(&da(array
,array_incomplete
)->next
);
165 if (pointer_is_thunk(ptr
)) {
166 result
= callback(NULL
, type_get_unknown(), 0, &da(array
,array_incomplete
)->next
, context
) != 0;
167 index_add(idx
, total_first_len
);
168 index_free(&total_first_len
);
172 new_array
= pointer_get_data(ptr
);
174 if (da_tag(new_array
) != DATA_TAG_array_incomplete
) {
175 result
= array_btree_iterate(&da(array
,array_incomplete
)->next
, idx
, callback
, context
);
176 index_add(idx
, total_first_len
);
177 index_free(&total_first_len
);
185 internal(file_line
, "array_btree_iterate: invalid array tag %u", da_tag(array
));
189 bool attr_fastcall
array_onstack_iterate(frame_s
*fp
, frame_t slot
, array_index_t
*idx
, int_default_t (*callback
)(unsigned char *flat
, const struct type
*type
, int_default_t n_elements
, pointer_t
*ptr
, void *context
), void *context
)
191 pointer_t
*array_ptr
;
193 if (frame_variable_is_flat(fp
, slot
)) {
194 const struct type
*type
= frame_get_type_of_local(fp
, slot
);
195 const struct flat_array_definition
*fa
= get_struct(type
, const struct flat_array_definition
, type
);
196 int_default_t n_elements
= fa
->n_elements
;
197 unsigned char *flat
= frame_slot(fp
, slot
, unsigned char);
199 if (unlikely(index_ge_int(*idx
, n_elements
)))
202 n_elements
-= index_to_int(*idx
);
203 flat
+= fa
->base
->size
* index_to_int(*idx
);
205 if (likely(n_elements
!= 0)) {
206 int_default_t processed
= callback(flat
, fa
->base
, n_elements
, NULL
, context
);
207 index_add_int(idx
, processed
);
208 if (processed
< n_elements
)
215 array_ptr
= frame_pointer(fp
, slot
);
217 return array_btree_iterate(array_ptr
, idx
, callback
, context
);
220 struct array_to_bytes_context
{
225 static int_default_t
array_to_bytes_callback(unsigned char *flat
, const struct type
*type
, int_default_t n_elements
, pointer_t attr_unused
*ptr
, void *ctx_
)
227 struct array_to_bytes_context
*ctx
= cast_ptr(struct array_to_bytes_context
*, ctx_
);
228 if (likely(flat
!= NULL
)) {
229 str_add_bytes(&ctx
->str
, &ctx
->str_l
, (char *)flat
, n_elements
* type
->size
);
232 if (pointer_is_thunk(*ptr
))
233 internal(file_line
, "array_to_bytes_callback: thunk encountered, tag %d", thunk_tag(pointer_get_thunk(*ptr
)));
235 internal(file_line
, "array_to_bytes_callback: pointer encountered, tag: %d", da_tag(pointer_get_data(*ptr
)));
240 void attr_fastcall
array_to_bytes(pointer_t
*array_ptr
, char **str
, size_t *str_l
)
243 struct array_to_bytes_context ctx
;
245 index_from_int(&idx
, 0);
246 str_init(&ctx
.str
, &ctx
.str_l
);
248 if (!array_btree_iterate(array_ptr
, &idx
, array_to_bytes_callback
, &ctx
))
249 internal(file_line
, "array_to_bytes: array_btree_iterate failed");
252 str_finish(&ctx
.str
, &ctx
.str_l
);
258 void attr_fastcall
array_onstack_to_bytes(frame_s
*fp
, frame_t slot
, char **str
, size_t *str_l
)
260 if (frame_variable_is_flat(fp
, slot
)) {
261 const struct type
*type
= frame_get_type_of_local(fp
, slot
);
262 const struct flat_array_definition
*fa
= get_struct(type
, const struct flat_array_definition
, type
);
263 int_default_t n_elements
= fa
->n_elements
;
264 unsigned char *flat
= frame_slot(fp
, slot
, unsigned char);
265 size_t size
= fa
->base
->size
* n_elements
;
268 p
= mem_alloc(unsigned char *, size
+ 1);
269 *cast_ptr(char *, mempcpy(p
, flat
, size
)) = 0;
271 *str
= cast_ptr(char *, p
);
277 array_to_bytes(frame_pointer(fp
, slot
), str
, str_l
);