Clean up some duplication
[factor/jcg.git] / vm / data_gc.h
blobdb1fbd6c8b6349bca369cc466537a2c1a4f8eca1
1 /* Set by the -S command line argument */
2 bool secure_gc;
4 /* set up guard pages to check for under/overflow.
5 size must be a multiple of the page size */
6 F_SEGMENT *alloc_segment(CELL size);
7 void dealloc_segment(F_SEGMENT *block);
9 CELL untagged_object_size(CELL pointer);
10 CELL unaligned_object_size(CELL pointer);
11 CELL object_size(CELL pointer);
12 CELL binary_payload_start(CELL pointer);
13 void begin_scan(void);
14 CELL next_object(void);
16 void primitive_data_room(void);
17 void primitive_size(void);
18 void primitive_begin_scan(void);
19 void primitive_next_object(void);
20 void primitive_end_scan(void);
22 void gc(void);
23 DLLEXPORT void minor_gc(void);
25 /* generational copying GC divides memory into zones */
26 typedef struct {
27 /* allocation pointer is 'here'; its offset is hardcoded in the
28 compiler backends, see core/compiler/.../allot.factor */
29 CELL start;
30 CELL here;
31 CELL size;
32 CELL end;
33 } F_ZONE;
35 typedef struct {
36 F_SEGMENT *segment;
38 CELL young_size;
39 CELL aging_size;
40 CELL tenured_size;
42 CELL gen_count;
44 F_ZONE *generations;
45 F_ZONE* semispaces;
47 CELL *allot_markers;
48 CELL *allot_markers_end;
50 CELL *cards;
51 CELL *cards_end;
53 CELL *decks;
54 CELL *decks_end;
55 } F_DATA_HEAP;
57 F_DATA_HEAP *data_heap;
59 /* card marking write barrier. a card is a byte storing a mark flag,
60 and the offset (in cells) of the first object in the card.
62 the mark flag is set by the write barrier when an object in the
63 card has a slot written to.
65 the offset of the first object is set by the allocator. */
67 /* if CARD_POINTS_TO_NURSERY is set, CARD_POINTS_TO_AGING must also be set. */
68 #define CARD_POINTS_TO_NURSERY 0x80
69 #define CARD_POINTS_TO_AGING 0x40
70 #define CARD_MARK_MASK (CARD_POINTS_TO_NURSERY | CARD_POINTS_TO_AGING)
71 typedef u8 F_CARD;
73 #define CARD_BITS 8
74 #define CARD_SIZE (1<<CARD_BITS)
75 #define ADDR_CARD_MASK (CARD_SIZE-1)
77 DLLEXPORT CELL cards_offset;
79 #define ADDR_TO_CARD(a) (F_CARD*)(((CELL)(a) >> CARD_BITS) + cards_offset)
80 #define CARD_TO_ADDR(c) (CELL*)(((CELL)(c) - cards_offset)<<CARD_BITS)
82 typedef u8 F_DECK;
84 #define DECK_BITS (CARD_BITS + 10)
85 #define DECK_SIZE (1<<DECK_BITS)
86 #define ADDR_DECK_MASK (DECK_SIZE-1)
88 DLLEXPORT CELL decks_offset;
90 #define ADDR_TO_DECK(a) (F_DECK*)(((CELL)(a) >> DECK_BITS) + decks_offset)
91 #define DECK_TO_ADDR(c) (CELL*)(((CELL)(c) - decks_offset) << DECK_BITS)
93 #define DECK_TO_CARD(d) (F_CARD*)((((CELL)(d) - decks_offset) << (DECK_BITS - CARD_BITS)) + cards_offset)
95 #define ADDR_TO_ALLOT_MARKER(a) (F_CARD*)(((CELL)(a) >> CARD_BITS) + allot_markers_offset)
96 #define CARD_OFFSET(c) (*((c) - (CELL)data_heap->cards + (CELL)data_heap->allot_markers))
98 #define INVALID_ALLOT_MARKER 0xff
100 DLLEXPORT CELL allot_markers_offset;
102 void init_card_decks(void);
104 /* the write barrier must be called any time we are potentially storing a
105 pointer from an older generation to a younger one */
106 INLINE void write_barrier(CELL address)
108 *ADDR_TO_CARD(address) = CARD_MARK_MASK;
109 *ADDR_TO_DECK(address) = CARD_MARK_MASK;
112 #define SLOT(obj,slot) (UNTAG(obj) + (slot) * CELLS)
114 INLINE void set_slot(CELL obj, CELL slot, CELL value)
116 put(SLOT(obj,slot),value);
117 write_barrier(obj);
120 /* we need to remember the first object allocated in the card */
121 INLINE void allot_barrier(CELL address)
123 F_CARD *ptr = ADDR_TO_ALLOT_MARKER(address);
124 if(*ptr == INVALID_ALLOT_MARKER)
125 *ptr = (address & ADDR_CARD_MASK);
128 void clear_cards(CELL from, CELL to);
130 /* the 0th generation is where new objects are allocated. */
131 #define NURSERY 0
132 #define HAVE_NURSERY_P (data_heap->gen_count>1)
133 /* where objects hang around */
134 #define AGING (data_heap->gen_count-2)
135 #define HAVE_AGING_P (data_heap->gen_count>2)
136 /* the oldest generation */
137 #define TENURED (data_heap->gen_count-1)
139 #define MIN_GEN_COUNT 1
140 #define MAX_GEN_COUNT 3
142 /* used during garbage collection only */
143 F_ZONE *newspace;
145 /* new objects are allocated here */
146 DLLEXPORT F_ZONE nursery;
148 INLINE bool in_zone(F_ZONE *z, CELL pointer)
150 return pointer >= z->start && pointer < z->end;
153 CELL init_zone(F_ZONE *z, CELL size, CELL base);
155 void init_data_heap(CELL gens,
156 CELL young_size,
157 CELL aging_size,
158 CELL tenured_size,
159 bool secure_gc_);
161 /* statistics */
162 typedef struct {
163 CELL collections;
164 u64 gc_time;
165 u64 max_gc_time;
166 CELL object_count;
167 u64 bytes_copied;
168 } F_GC_STATS;
170 F_GC_STATS gc_stats[MAX_GEN_COUNT];
171 u64 cards_scanned;
172 u64 decks_scanned;
173 CELL code_heap_scans;
175 /* only meaningful during a GC */
176 bool performing_gc;
177 CELL collecting_gen;
179 /* if true, we collecting AGING space for the second time, so if it is still
180 full, we go on to collect TENURED */
181 bool collecting_aging_again;
183 INLINE bool collecting_accumulation_gen_p(void)
185 return ((HAVE_AGING_P
186 && collecting_gen == AGING
187 && !collecting_aging_again)
188 || collecting_gen == TENURED);
191 /* What generation was being collected when collect_literals() was last
192 called? Until the next call to primitive_add_compiled_block(), future
193 collections of younger generations don't have to touch the code
194 heap. */
195 CELL last_code_heap_scan;
197 /* sometimes we grow the heap */
198 bool growing_data_heap;
199 F_DATA_HEAP *old_data_heap;
201 /* Every object has a regular representation in the runtime, which makes GC
202 much simpler. Every slot of the object until binary_payload_start is a pointer
203 to some other object. */
204 INLINE void do_slots(CELL obj, void (* iter)(CELL *))
206 CELL scan = obj;
207 CELL payload_start = binary_payload_start(obj);
208 CELL end = obj + payload_start;
210 scan += CELLS;
212 while(scan < end)
214 iter((CELL *)scan);
215 scan += CELLS;
219 /* test if the pointer is in generation being collected, or a younger one. */
220 INLINE bool should_copy(CELL untagged)
222 if(in_zone(newspace,untagged))
223 return false;
224 if(collecting_gen == TENURED)
225 return true;
226 else if(HAVE_AGING_P && collecting_gen == AGING)
227 return !in_zone(&data_heap->generations[TENURED],untagged);
228 else if(HAVE_NURSERY_P && collecting_gen == NURSERY)
229 return in_zone(&nursery,untagged);
230 else
232 critical_error("Bug in should_copy",untagged);
233 return false;
237 void copy_handle(CELL *handle);
239 /* in case a generation fills up in the middle of a gc, we jump back
240 up to try collecting the next generation. */
241 jmp_buf gc_jmp;
243 /* A heap walk allows useful things to be done, like finding all
244 references to an object for debugging purposes. */
245 CELL heap_scan_ptr;
247 /* GC is off during heap walking */
248 bool gc_off;
250 void garbage_collection(volatile CELL gen,
251 bool growing_data_heap_,
252 CELL requested_bytes);
254 /* If a runtime function needs to call another function which potentially
255 allocates memory, it must store any local variable references to Factor
256 objects on the root stack */
258 /* GC locals: stores addresses of pointers to objects. The GC updates these
259 pointers, so you can do
261 REGISTER_ROOT(some_local);
263 ... allocate memory ...
265 foo(some_local);
269 UNREGISTER_ROOT(some_local); */
270 F_SEGMENT *gc_locals_region;
271 CELL gc_locals;
273 DEFPUSHPOP(gc_local_,gc_locals)
275 #define REGISTER_ROOT(obj) gc_local_push((CELL)&obj)
276 #define UNREGISTER_ROOT(obj) \
278 if(gc_local_pop() != (CELL)&obj) \
279 critical_error("Mismatched REGISTER_ROOT/UNREGISTER_ROOT",0); \
282 /* Extra roots: stores pointers to objects in the heap. Requires extra work
283 (you have to unregister before accessing the object) but more flexible. */
284 F_SEGMENT *extra_roots_region;
285 CELL extra_roots;
287 DEFPUSHPOP(root_,extra_roots)
289 #define REGISTER_UNTAGGED(obj) root_push(obj ? tag_object(obj) : 0)
290 #define UNREGISTER_UNTAGGED(obj) obj = untag_object(root_pop())
292 INLINE bool in_data_heap_p(CELL ptr)
294 return (ptr >= data_heap->segment->start
295 && ptr <= data_heap->segment->end);
298 /* We ignore strings which point outside the data heap, but we might be given
299 a char* which points inside the data heap, in which case it is a root, for
300 example if we call unbox_char_string() the result is placed in a byte array */
301 INLINE bool root_push_alien(const void *ptr)
303 if(in_data_heap_p((CELL)ptr))
305 F_BYTE_ARRAY *objptr = ((F_BYTE_ARRAY *)ptr) - 1;
306 if(objptr->header == tag_header(BYTE_ARRAY_TYPE))
308 root_push(tag_object(objptr));
309 return true;
313 return false;
316 #define REGISTER_C_STRING(obj) \
317 bool obj##_root = root_push_alien(obj)
318 #define UNREGISTER_C_STRING(obj) \
319 if(obj##_root) obj = alien_offset(root_pop())
321 #define REGISTER_BIGNUM(obj) if(obj) root_push(tag_bignum(obj))
322 #define UNREGISTER_BIGNUM(obj) if(obj) obj = (untag_object(root_pop()))
324 INLINE void *allot_zone(F_ZONE *z, CELL a)
326 CELL h = z->here;
327 z->here = h + align8(a);
328 return (void*)h;
331 /* We leave this many bytes free at the top of the nursery so that inline
332 allocation (which does not call GC because of possible roots in volatile
333 registers) does not run out of memory */
334 #define ALLOT_BUFFER_ZONE 1024
337 * It is up to the caller to fill in the object's fields in a meaningful
338 * fashion!
340 INLINE void* allot_object(CELL type, CELL a)
342 CELL *object;
344 if(HAVE_NURSERY_P && nursery.size - ALLOT_BUFFER_ZONE > a)
346 /* If there is insufficient room, collect the nursery */
347 if(nursery.here + ALLOT_BUFFER_ZONE + a > nursery.end)
348 garbage_collection(NURSERY,false,0);
350 CELL h = nursery.here;
351 nursery.here = h + align8(a);
352 object = (void*)h;
354 /* If the object is bigger than the nursery, allocate it in
355 tenured space */
356 else
358 F_ZONE *tenured = &data_heap->generations[TENURED];
360 /* If tenured space does not have enough room, collect */
361 if(tenured->here + a > tenured->end)
363 gc();
364 tenured = &data_heap->generations[TENURED];
367 /* If it still won't fit, grow the heap */
368 if(tenured->here + a > tenured->end)
370 garbage_collection(TENURED,true,a);
371 tenured = &data_heap->generations[TENURED];
374 object = allot_zone(tenured,a);
376 /* We have to do this */
377 allot_barrier((CELL)object);
379 /* Allows initialization code to store old->new pointers
380 without hitting the write barrier in the common case of
381 a nursery allocation */
382 write_barrier((CELL)object);
385 *object = tag_header(type);
386 return object;
389 void copy_reachable_objects(CELL scan, CELL *end);
391 void primitive_gc(void);
392 void primitive_gc_stats(void);
393 void primitive_gc_reset(void);
394 void primitive_become(void);
396 CELL find_all_words(void);