Clean up some duplication
[factor/jcg.git] / vm / code_gc.c
blob63eb724eb932216d77b7d0d89c6d9b4432f2aacc
1 #include "master.h"
3 /* This malloc-style heap code is reasonably generic. Maybe in the future, it
4 will be used for the data heap too, if we ever get incremental
5 mark/sweep/compact GC. */
6 void new_heap(F_HEAP *heap, CELL size)
8 heap->segment = alloc_segment(align_page(size));
9 if(!heap->segment)
10 fatal_error("Out of memory in new_heap",size);
11 heap->free_list = NULL;
14 /* Allocate a code heap during startup */
15 void init_code_heap(CELL size)
17 new_heap(&code_heap,size);
20 bool in_code_heap_p(CELL ptr)
22 return (ptr >= code_heap.segment->start
23 && ptr <= code_heap.segment->end);
26 /* If there is no previous block, next_free becomes the head of the free list,
27 else its linked in */
28 INLINE void update_free_list(F_HEAP *heap, F_BLOCK *prev, F_BLOCK *next_free)
30 if(prev)
31 prev->next_free = next_free;
32 else
33 heap->free_list = next_free;
36 /* Called after reading the code heap from the image file, and after code GC.
38 In the former case, we must add a large free block from compiling.base + size to
39 compiling.limit. */
40 void build_free_list(F_HEAP *heap, CELL size)
42 F_BLOCK *prev = NULL;
43 F_BLOCK *prev_free = NULL;
44 F_BLOCK *scan = first_block(heap);
45 F_BLOCK *end = (F_BLOCK *)(heap->segment->start + size);
47 /* Add all free blocks to the free list */
48 while(scan && scan < end)
50 switch(scan->status)
52 case B_FREE:
53 update_free_list(heap,prev_free,scan);
54 prev_free = scan;
55 break;
56 case B_ALLOCATED:
57 break;
58 default:
59 critical_error("Invalid scan->status",(CELL)scan);
60 break;
63 prev = scan;
64 scan = next_block(heap,scan);
67 /* If there is room at the end of the heap, add a free block. This
68 branch is only taken after loading a new image, not after code GC */
69 if((CELL)(end + 1) <= heap->segment->end)
71 end->status = B_FREE;
72 end->next_free = NULL;
73 end->size = heap->segment->end - (CELL)end;
75 /* add final free block */
76 update_free_list(heap,prev_free,end);
78 /* This branch is taken if the newly loaded image fits exactly, or
79 after code GC */
80 else
82 /* even if there's no room at the end of the heap for a new
83 free block, we might have to jigger it up by a few bytes in
84 case prev + prev->size */
85 if(prev)
86 prev->size = heap->segment->end - (CELL)prev;
88 /* this is the last free block */
89 update_free_list(heap,prev_free,NULL);
94 /* Allocate a block of memory from the mark and sweep GC heap */
95 void *heap_allot(F_HEAP *heap, CELL size)
97 F_BLOCK *prev = NULL;
98 F_BLOCK *scan = heap->free_list;
100 size = (size + 31) & ~31;
102 while(scan)
104 CELL this_size = scan->size - sizeof(F_BLOCK);
106 if(scan->status != B_FREE)
107 critical_error("Invalid block in free list",(CELL)scan);
109 if(this_size < size)
111 prev = scan;
112 scan = scan->next_free;
113 continue;
116 /* we found a candidate block */
117 F_BLOCK *next_free;
119 if(this_size - size <= sizeof(F_BLOCK))
121 /* too small to be split */
122 next_free = scan->next_free;
124 else
126 /* split the block in two */
127 CELL new_size = size + sizeof(F_BLOCK);
128 F_BLOCK *split = (F_BLOCK *)((CELL)scan + new_size);
129 split->status = B_FREE;
130 split->size = scan->size - new_size;
131 split->next_free = scan->next_free;
132 scan->size = new_size;
133 next_free = split;
136 /* update the free list */
137 update_free_list(heap,prev,next_free);
139 /* this is our new block */
140 scan->status = B_ALLOCATED;
142 return scan + 1;
145 return NULL;
148 /* If in the middle of code GC, we have to grow the heap, data GC restarts from
149 scratch, so we have to unmark any marked blocks. */
150 void unmark_marked(F_HEAP *heap)
152 F_BLOCK *scan = first_block(heap);
154 while(scan)
156 if(scan->status == B_MARKED)
157 scan->status = B_ALLOCATED;
159 scan = next_block(heap,scan);
163 /* After code GC, all referenced code blocks have status set to B_MARKED, so any
164 which are allocated and not marked can be reclaimed. */
165 void free_unmarked(F_HEAP *heap)
167 F_BLOCK *prev = NULL;
168 F_BLOCK *scan = first_block(heap);
170 while(scan)
172 switch(scan->status)
174 case B_ALLOCATED:
175 if(prev && prev->status == B_FREE)
176 prev->size += scan->size;
177 else
179 scan->status = B_FREE;
180 prev = scan;
182 break;
183 case B_FREE:
184 if(prev && prev->status == B_FREE)
185 prev->size += scan->size;
186 break;
187 case B_MARKED:
188 scan->status = B_ALLOCATED;
189 prev = scan;
190 break;
191 default:
192 critical_error("Invalid scan->status",(CELL)scan);
195 scan = next_block(heap,scan);
198 build_free_list(heap,heap->segment->size);
201 /* Compute total sum of sizes of free blocks, and size of largest free block */
202 void heap_usage(F_HEAP *heap, CELL *used, CELL *total_free, CELL *max_free)
204 *used = 0;
205 *total_free = 0;
206 *max_free = 0;
208 F_BLOCK *scan = first_block(heap);
210 while(scan)
212 switch(scan->status)
214 case B_ALLOCATED:
215 *used += scan->size;
216 break;
217 case B_FREE:
218 *total_free += scan->size;
219 if(scan->size > *max_free)
220 *max_free = scan->size;
221 break;
222 default:
223 critical_error("Invalid scan->status",(CELL)scan);
226 scan = next_block(heap,scan);
230 /* The size of the heap, not including the last block if it's free */
231 CELL heap_size(F_HEAP *heap)
233 F_BLOCK *scan = first_block(heap);
235 while(next_block(heap,scan) != NULL)
236 scan = next_block(heap,scan);
238 /* this is the last block in the heap, and it is free */
239 if(scan->status == B_FREE)
240 return (CELL)scan - heap->segment->start;
241 /* otherwise the last block is allocated */
242 else
243 return heap->segment->size;
246 /* Apply a function to every code block */
247 void iterate_code_heap(CODE_HEAP_ITERATOR iter)
249 F_BLOCK *scan = first_block(&code_heap);
251 while(scan)
253 if(scan->status != B_FREE)
254 iter(block_to_compiled(scan));
255 scan = next_block(&code_heap,scan);
259 void update_literal_references_step(F_REL *rel, F_COMPILED *compiled)
261 if(REL_TYPE(rel) == RT_IMMEDIATE)
263 CELL offset = rel->offset + (CELL)(compiled + 1);
264 F_ARRAY *literals = untag_object(compiled->literals);
265 F_FIXNUM absolute_value = array_nth(literals,REL_ARGUMENT(rel));
266 apply_relocation(REL_CLASS(rel),offset,absolute_value);
270 /* Update pointers to literals from compiled code. */
271 void update_literal_references(F_COMPILED *compiled)
273 iterate_relocations(compiled,update_literal_references_step);
274 flush_icache_for(compiled);
277 /* Copy all literals referenced from a code block to newspace. Only for
278 aging and nursery collections */
279 void copy_literal_references(F_COMPILED *compiled)
281 if(collecting_gen >= compiled->last_scan)
283 if(collecting_accumulation_gen_p())
284 compiled->last_scan = collecting_gen;
285 else
286 compiled->last_scan = collecting_gen + 1;
288 /* initialize chase pointer */
289 CELL scan = newspace->here;
291 copy_handle(&compiled->literals);
292 copy_handle(&compiled->relocation);
294 /* do some tracing so that all reachable literals are now
295 at their final address */
296 copy_reachable_objects(scan,&newspace->here);
298 update_literal_references(compiled);
302 /* Copy literals referenced from all code blocks to newspace. Only for
303 aging and nursery collections */
304 void copy_code_heap_roots(void)
306 iterate_code_heap(copy_literal_references);
309 /* Mark all XTs and literals referenced from a word XT. Only for tenured
310 collections */
311 void mark_block(F_BLOCK *block)
313 /* If already marked, do nothing */
314 switch(block->status)
316 case B_MARKED:
317 return;
318 case B_ALLOCATED:
319 block->status = B_MARKED;
320 break;
321 default:
322 critical_error("Marking the wrong block",(CELL)block);
323 break;
326 F_COMPILED *compiled = block_to_compiled(block);
328 copy_handle(&compiled->literals);
329 copy_handle(&compiled->relocation);
331 flush_icache_for(compiled);
334 /* Update literals referenced from all code blocks. Only for tenured
335 collections, done at the end. */
336 void update_code_heap_roots(void)
338 iterate_code_heap(update_literal_references);
341 /* Push the free space and total size of the code heap */
342 void primitive_code_room(void)
344 CELL used, total_free, max_free;
345 heap_usage(&code_heap,&used,&total_free,&max_free);
346 dpush(tag_fixnum((code_heap.segment->size) / 1024));
347 dpush(tag_fixnum(used / 1024));
348 dpush(tag_fixnum(total_free / 1024));
349 dpush(tag_fixnum(max_free / 1024));
352 /* Dump all code blocks for debugging */
353 void dump_heap(F_HEAP *heap)
355 CELL size = 0;
357 F_BLOCK *scan = first_block(heap);
359 while(scan)
361 char *status;
362 switch(scan->status)
364 case B_FREE:
365 status = "free";
366 break;
367 case B_ALLOCATED:
368 size += object_size(block_to_compiled(scan)->relocation);
369 status = "allocated";
370 break;
371 case B_MARKED:
372 size += object_size(block_to_compiled(scan)->relocation);
373 status = "marked";
374 break;
375 default:
376 status = "invalid";
377 break;
380 print_cell_hex((CELL)scan); print_string(" ");
381 print_cell_hex(scan->size); print_string(" ");
382 print_string(status); print_string("\n");
384 scan = next_block(heap,scan);
387 print_cell(size); print_string(" bytes of relocation data\n");
390 /* Compute where each block is going to go, after compaction */
391 CELL compute_heap_forwarding(F_HEAP *heap)
393 F_BLOCK *scan = first_block(heap);
394 CELL address = (CELL)first_block(heap);
396 while(scan)
398 if(scan->status == B_ALLOCATED)
400 scan->forwarding = (F_BLOCK *)address;
401 address += scan->size;
403 else if(scan->status == B_MARKED)
404 critical_error("Why is the block marked?",0);
406 scan = next_block(heap,scan);
409 return address - heap->segment->start;
412 F_COMPILED *forward_xt(F_COMPILED *compiled)
414 return block_to_compiled(compiled_to_block(compiled)->forwarding);
417 void forward_frame_xt(F_STACK_FRAME *frame)
419 CELL offset = (CELL)FRAME_RETURN_ADDRESS(frame) - (CELL)frame_code(frame);
420 F_COMPILED *forwarded = forward_xt(frame_code(frame));
421 frame->xt = (XT)(forwarded + 1);
422 FRAME_RETURN_ADDRESS(frame) = (XT)((CELL)forwarded + offset);
425 void forward_object_xts(void)
427 begin_scan();
429 CELL obj;
431 while((obj = next_object()) != F)
433 if(type_of(obj) == WORD_TYPE)
435 F_WORD *word = untag_object(obj);
437 word->code = forward_xt(word->code);
438 if(word->profiling)
439 word->profiling = forward_xt(word->profiling);
441 else if(type_of(obj) == QUOTATION_TYPE)
443 F_QUOTATION *quot = untag_object(obj);
445 if(quot->compiledp != F)
446 quot->code = forward_xt(quot->code);
448 else if(type_of(obj) == CALLSTACK_TYPE)
450 F_CALLSTACK *stack = untag_object(obj);
451 iterate_callstack_object(stack,forward_frame_xt);
455 /* End the heap scan */
456 gc_off = false;
459 /* Set the XT fields now that the heap has been compacted */
460 void fixup_object_xts(void)
462 begin_scan();
464 CELL obj;
466 while((obj = next_object()) != F)
468 if(type_of(obj) == WORD_TYPE)
470 F_WORD *word = untag_object(obj);
471 update_word_xt(word);
473 else if(type_of(obj) == QUOTATION_TYPE)
475 F_QUOTATION *quot = untag_object(obj);
477 if(quot->compiledp != F)
478 set_quot_xt(quot,quot->code);
482 /* End the heap scan */
483 gc_off = false;
486 void compact_heap(F_HEAP *heap)
488 F_BLOCK *scan = first_block(heap);
490 while(scan)
492 F_BLOCK *next = next_block(heap,scan);
494 if(scan->status == B_ALLOCATED && scan != scan->forwarding)
495 memcpy(scan->forwarding,scan,scan->size);
496 scan = next;
500 /* Move all free space to the end of the code heap. This is not very efficient,
501 since it makes several passes over the code and data heaps, but we only ever
502 do this before saving a deployed image and exiting, so performaance is not
503 critical here */
504 void compact_code_heap(void)
506 /* Free all unreachable code blocks */
507 gc();
509 /* Figure out where the code heap blocks are going to end up */
510 CELL size = compute_heap_forwarding(&code_heap);
512 /* Update word and quotation code pointers */
513 forward_object_xts();
515 /* Actually perform the compaction */
516 compact_heap(&code_heap);
518 /* Update word and quotation XTs */
519 fixup_object_xts();
521 /* Now update the free list; there will be a single free block at
522 the end */
523 build_free_list(&code_heap,size);