remove math.blas.syntax and merge parsing words into math.blas.vectors/matrices
[factor/jcg.git] / vm / data_gc.h
blob06beb7ea33e3c323629411c38a116c79e4b53007
1 void gc(void);
2 DLLEXPORT void minor_gc(void);
4 /* used during garbage collection only */
6 F_ZONE *newspace;
7 bool performing_gc;
8 CELL collecting_gen;
10 /* if true, we collecting AGING space for the second time, so if it is still
11 full, we go on to collect TENURED */
12 bool collecting_aging_again;
14 /* in case a generation fills up in the middle of a gc, we jump back
15 up to try collecting the next generation. */
16 jmp_buf gc_jmp;
18 /* statistics */
19 typedef struct {
20 CELL collections;
21 u64 gc_time;
22 u64 max_gc_time;
23 CELL object_count;
24 u64 bytes_copied;
25 } F_GC_STATS;
27 F_GC_STATS gc_stats[MAX_GEN_COUNT];
28 u64 cards_scanned;
29 u64 decks_scanned;
30 CELL code_heap_scans;
32 /* What generation was being collected when copy_code_heap_roots() was last
33 called? Until the next call to add_compiled_block(), future
34 collections of younger generations don't have to touch the code
35 heap. */
36 CELL last_code_heap_scan;
38 /* sometimes we grow the heap */
39 bool growing_data_heap;
40 F_DATA_HEAP *old_data_heap;
42 INLINE bool collecting_accumulation_gen_p(void)
44 return ((HAVE_AGING_P
45 && collecting_gen == AGING
46 && !collecting_aging_again)
47 || collecting_gen == TENURED);
50 /* test if the pointer is in generation being collected, or a younger one. */
51 INLINE bool should_copy(CELL untagged)
53 if(in_zone(newspace,untagged))
54 return false;
55 if(collecting_gen == TENURED)
56 return true;
57 else if(HAVE_AGING_P && collecting_gen == AGING)
58 return !in_zone(&data_heap->generations[TENURED],untagged);
59 else if(HAVE_NURSERY_P && collecting_gen == NURSERY)
60 return in_zone(&nursery,untagged);
61 else
63 critical_error("Bug in should_copy",untagged);
64 return false;
68 void copy_handle(CELL *handle);
70 void garbage_collection(volatile CELL gen,
71 bool growing_data_heap_,
72 CELL requested_bytes);
74 /* We leave this many bytes free at the top of the nursery so that inline
75 allocation (which does not call GC because of possible roots in volatile
76 registers) does not run out of memory */
77 #define ALLOT_BUFFER_ZONE 1024
80 * It is up to the caller to fill in the object's fields in a meaningful
81 * fashion!
83 INLINE void *allot_object(CELL type, CELL a)
85 CELL *object;
87 if(HAVE_NURSERY_P && nursery.size - ALLOT_BUFFER_ZONE > a)
89 /* If there is insufficient room, collect the nursery */
90 if(nursery.here + ALLOT_BUFFER_ZONE + a > nursery.end)
91 garbage_collection(NURSERY,false,0);
93 CELL h = nursery.here;
94 nursery.here = h + align8(a);
95 object = (void*)h;
97 /* If the object is bigger than the nursery, allocate it in
98 tenured space */
99 else
101 F_ZONE *tenured = &data_heap->generations[TENURED];
103 /* If tenured space does not have enough room, collect */
104 if(tenured->here + a > tenured->end)
106 gc();
107 tenured = &data_heap->generations[TENURED];
110 /* If it still won't fit, grow the heap */
111 if(tenured->here + a > tenured->end)
113 garbage_collection(TENURED,true,a);
114 tenured = &data_heap->generations[TENURED];
117 object = allot_zone(tenured,a);
119 /* We have to do this */
120 allot_barrier((CELL)object);
122 /* Allows initialization code to store old->new pointers
123 without hitting the write barrier in the common case of
124 a nursery allocation */
125 write_barrier((CELL)object);
128 *object = tag_header(type);
129 return object;
132 void copy_reachable_objects(CELL scan, CELL *end);
134 void primitive_gc(void);
135 void primitive_gc_stats(void);
136 void clear_gc_stats(void);
137 void primitive_clear_gc_stats(void);
138 void primitive_become(void);