4 #include "shotgun/lib/shotgun.h"
5 #include "shotgun/lib/heap.h"
6 #include "shotgun/lib/cpu.h"
7 #include "shotgun/lib/methctx.h"
8 #include "shotgun/lib/bytearray.h"
9 #include "shotgun/lib/baker.h"
10 #include "shotgun/lib/tuple.h"
12 /* how many times object should be traced before tenuring */
13 #define DEFAULT_TENURE_AGE 6
16 creates and initializes new copying GC instance
17 it start with space A as "from" space and B as "to" space
19 baker_gc
baker_gc_new(int size
) {
21 g
= (baker_gc
)calloc(1, sizeof(struct baker_gc_struct
));
22 g
->remember_set
= ptr_array_new(8);
23 g
->seen_weak_refs
= ptr_array_new(8);
24 g
->tenured_objects
= ptr_array_new(8);
26 g
->space_a
= heap_new(size
);
27 g
->space_b
= heap_new(size
);
28 g
->current
= g
->space_a
;
31 g
->tenure_age
= DEFAULT_TENURE_AGE
;
32 g
->become_from
= Qnil
;
37 /* prints various garbage collector information */
38 void baker_gc_describe(baker_gc g
) {
39 printf("Size : %x (%d)\n", (unsigned int)g
->current
->size
, (int)g
->current
->size
);
40 printf("Current (\"from\") heap space address: %p => %p\n", (void*)g
->current
->address
, (void*)g
->current
->last
);
41 printf("Next (\"to\") heap space address: %p => %p\n", (void*)g
->next
->address
, (void*)g
->next
->last
);
42 printf("Remember set size: %zd\n", ptr_array_length(g
->remember_set
));
45 /* baker GC uses two heap spaces of the same size */
46 int baker_gc_memory_allocated(baker_gc g
) {
47 return g
->current
->size
* 2;
50 /* returns how much memory is used by "from" heap space */
51 int baker_gc_memory_in_use(baker_gc g
) {
52 return g
->current
->current
- g
->current
->address
;
55 /* enlarges "to" space of the heap (where new objects are allocated) by value of sz */
56 int baker_gc_enlarge_next(baker_gc g
, size_t sz
) {
59 return baker_gc_set_next(g
, h
);
62 /* sets "to" heap space, always returns TRUE */
63 int baker_gc_set_next(baker_gc g
, rheap h
) {
64 if(g
->next
== g
->space_a
) {
73 /* gets start address of current "from" heap space */
74 address
baker_gc_start_address(baker_gc g
) {
75 return g
->current
->address
;
78 /* total baker GC memory usage */
79 size_t baker_gc_used(baker_gc g
) {
83 /* reset memory usage info */
84 void baker_gc_reset_used(baker_gc g
) {
88 /* performs flip operation on heap spaces */
89 int baker_gc_swap(baker_gc g
) {
95 g
->last_start
= (char*)tmp
->address
;
96 g
->last_end
= (char*)tmp
->current
;
102 int baker_gc_destroy(baker_gc g
) {
103 heap_deallocate(g
->space_a
);
104 heap_deallocate(g
->space_b
);
109 /* sets forwarding pointer on object */
110 void baker_gc_set_forwarding_address(OBJECT obj
, OBJECT dest
) {
115 OBJECT
baker_gc_forwarded_object(OBJECT obj
) {
116 OBJECT out
= obj
->klass
;
121 #define baker_gc_maybe_mutate(st, g, iobj) ({ \
123 if(baker_gc_forwarded_p(iobj)) { \
124 ret = baker_gc_forwarded_object(iobj); \
125 } else if(baker_gc_contains_p(g, iobj) || heap_contains_p(state->om->contexts, iobj)) { \
126 assert(((OBJECT)iobj)->klass); \
127 ret = baker_gc_mutate_object(st, g, iobj); \
134 int _object_stores_bytes(OBJECT self
);
135 static int depth
= 0;
136 static ptr_array _track_refs
= NULL
;
138 void track_ref(OBJECT obj
) {
140 _track_refs
= ptr_array_new(8);
142 ptr_array_append(_track_refs
, (xpointer
)obj
);
145 void untrack_ref(OBJECT obj
) {
146 if(!_track_refs
) return;
147 ptr_array_remove_fast(_track_refs
, (xpointer
)obj
);
150 static void _mutate_references(STATE
, baker_gc g
, OBJECT iobj
) {
151 OBJECT cls
, tmp
, mut
;
158 if (ptr_array_contains(_track_refs
, (xpointer
)iobj
))
159 printf("Found2 %p!\n", (void*)iobj
);
163 //printf("%d: Mutating class of %p\n", ++depth, iobj);
165 /* CLASS_OBJECT() gets the object directly in the klass field
166 * of iobj, which is very likely it's metaclass, not it's
167 * formal class. FYI. */
168 cls
= CLASS_OBJECT(iobj
);
169 if(REFERENCE_P(cls
)) {
170 cls
= baker_gc_maybe_mutate(state
, g
, cls
);
173 SET_CLASS(iobj
, cls
);
175 xassert(!FORWARDED_P(iobj
));
177 if(iobj
->RefsAreWeak
) {
178 // printf("%p has weak refs.\n", (void*)iobj);
179 ptr_array_append(g
->seen_weak_refs
, (xpointer
)iobj
);
184 //printf("%d: Mutating references of %p\n", depth, iobj);
186 if(!_object_stores_bytes(iobj
)) {
187 /* follow object field references and mutate them */
188 fields
= NUM_FIELDS(iobj
);
189 for(i
= 0; i
< fields
; i
++) {
190 tmp
= NTH_FIELD(iobj
, i
);
191 if(!REFERENCE_P(tmp
)) continue;
192 /* TODO: duplicated in other places: extract to separate function? */
193 if(FORWARDED_P(tmp
)) {
195 } else if(heap_contains_p(g
->current
, tmp
) || heap_contains_p(state
->om
->contexts
, tmp
)) {
196 mut
= baker_gc_mutate_object(state
, g
, tmp
);
200 /* update field reference */
201 SET_FIELD_DIRECT(iobj
, i
, mut
);
202 /* update remember set, set "remembered" flag on iobj */
203 RUN_WB2(om
, iobj
, mut
);
206 #define fc_mutate(field) if(fc->field && REFERENCE_P(fc->field)) SET_STRUCT_FIELD(iobj, fc->field, baker_gc_maybe_mutate(state, g, fc->field))
207 if(context_p(state
, iobj
)) {
208 struct fast_context
*fc
= FASTCTX(iobj
);
214 fc_mutate(custom_iseq
);
216 fc_mutate(method_module
);
219 if(!NIL_P(fc
->method
) && fc
->method
->obj_type
== CMethodType
) {
220 /* We cache the bytecode in a char*, so adjust it.
221 We mutate the data first so we cache the newest address. */
223 ba
= cmethod_get_compiled(fc
->method
);
224 ba
= baker_gc_maybe_mutate(state
, g
, ba
);
225 if(!NIL_P(fc
->custom_iseq
)) {
226 /* A context can be set to use a custom instruction sequence
227 by the debugger; if this is the case, we need to preserve
228 the context's use of that custom iseq. */
229 ba
= fc
->custom_iseq
;
231 fc
->data
= BYTEARRAY_ADDRESS(ba
);
233 } else if(iobj
->obj_type
== TaskType
) {
234 struct cpu_task
*fc
= (struct cpu_task
*)BYTES_OF(iobj
);
235 /* Only mark up non-active tasks, because the active
236 one is handled else where. */
238 if(REFERENCE_P(fc
->active_context
)) {
239 assert(fc
->active_context
->obj_type
== MContextType
||
240 fc
->active_context
->obj_type
== BContextType
);
243 if(REFERENCE_P(fc
->home_context
)) {
244 assert(fc
->home_context
->obj_type
== MContextType
||
245 fc
->home_context
->obj_type
== BContextType
);
248 fc_mutate(exception
);
249 fc_mutate(enclosing_class
);
250 fc_mutate(active_context
);
251 fc_mutate(home_context
);
253 fc_mutate(debug_channel
);
254 fc_mutate(control_channel
);
255 fc_mutate(current_scope
);
258 // printf("marking %p, %p (%p, %d)\n", iobj, fc->stack_top, fc->sp_ptr, fc->sp_ptr - fc->stack_top);
261 assert(fc
->sp_ptr
>= sp
);
263 while(sp
<= fc
->sp_ptr
) {
264 if(*sp
&& REFERENCE_P(*sp
)) {
265 *sp
= baker_gc_maybe_mutate(state
, g
, *sp
);
271 for(i
= 0; i
< ptr_array_length(fc
->paths
); i
++) {
272 ptr_array_set_index(fc
->paths
, i
,
273 (xpointer
)baker_gc_maybe_mutate(state
, g
, (OBJECT
)ptr_array_get_index(fc
->paths
, i
)));
277 /* Handle the generic type_info prefix fields */
278 fields
= state
->type_info
[iobj
->obj_type
].object_fields
;
280 for(i
= 0; i
< fields
; i
++) {
281 tmp
= NTH_FIELD(iobj
, i
);
282 if(!REFERENCE_P(tmp
)) continue;
284 if(FORWARDED_P(tmp
)) {
286 } else if(heap_contains_p(g
->current
, tmp
) || heap_contains_p(state
->om
->contexts
, tmp
)) {
287 mut
= baker_gc_mutate_object(state
, g
, tmp
);
292 SET_FIELD_DIRECT(iobj
, i
, mut
);
293 RUN_WB2(om
, iobj
, mut
);
303 void baker_gc_mutate_context(STATE
, baker_gc g
, OBJECT iobj
, int shifted
, int top
) {
304 #define fc_mutate(field) if(fc->field && REFERENCE_P(fc->field)) fc->field = baker_gc_mutate_from(state, g, fc->field)
306 struct fast_context
*fc
= FASTCTX(iobj
);
309 old_sender
= fc
->sender
;
311 if(REFERENCE_P(fc
->sender
)) {
312 /* This is the top most stack context, handle it differently */
315 xassert(om_in_heap(state
->om
, fc
->sender
) || om_context_referenced_p(state
->om
, fc
->sender
));
316 xassert(fc
->sender
!= Qnil
);
318 xassert(om_in_heap(state
->om
, fc
->sender
));
324 old_sender = fc->sender;
325 xassert(om_on_stack(state->om, old_sender));
326 fc->sender = old_sender->klass;
327 xassert(NIL_P(fc->sender) || fc->sender == om_stack_sender(iobj));
329 xassert(om_on_stack(state->om, fc->sender));
336 assert(NIL_P(fc
->sender
) || fc
->sender
->obj_type
== MContextType
|| fc
->sender
->obj_type
== BContextType
);
343 fc_mutate(custom_iseq
);
344 if(!NIL_P(fc
->locals
) && fc
->locals
->gc_zone
== 0) {
348 fc
->locals
= object_memory_context_locals(iobj
);
349 fields
= NUM_FIELDS(fc
->locals
);
351 for(i
= 0; i
< fields
; i
++) {
352 tmp
= NTH_FIELD(fc
->locals
, i
);
353 if(!REFERENCE_P(tmp
)) continue;
355 mut
= baker_gc_mutate_from(state
, g
, tmp
);
356 fast_unsafe_set(fc
->locals
, i
, mut
);
361 fc_mutate(method_module
);
364 if(!NIL_P(fc
->method
) && fc
->method
->obj_type
== CMethodType
) {
365 /* We cache the bytecode in a char*, so adjust it.
366 We mutate the data first so we cache the newest address. */
368 ba
= cmethod_get_compiled(fc
->method
);
369 ba
= baker_gc_mutate_from(state
, g
, ba
);
370 if(!NIL_P(fc
->custom_iseq
)) {
371 /* A context can be set to use a custom instruction sequence
372 by the debugger; if this is the case, we need to preserve
373 the context's use of that custom iseq. */
374 ba
= fc
->custom_iseq
;
376 fc
->data
= BYTEARRAY_ADDRESS(ba
);
381 static int saved_contexts
;
383 OBJECT
baker_gc_mutate_object(STATE
, baker_gc g
, OBJECT obj
) {
385 if(obj
== g
->become_from
) {
386 return baker_gc_maybe_mutate(state
, g
, g
->become_to
);
389 if(heap_contains_p(state
->om
->contexts
, obj
)) {
395 if (ptr_array_contains(_track_refs
, (xpointer
)obj
))
396 printf("Found %p!\n", (void*)obj
);
400 if((AGE(obj
) == g
->tenure_age
)) {
401 xassert(obj
->klass
!= state
->global
->fastctx
);
404 dest
= (*g
->tenure
)(g
->tenure_data
, obj
);
405 baker_gc_set_forwarding_address(obj
, dest
);
406 // printf("Tenuring object %p to %p, age %d (%d).\n", obj, dest, age, g->tenure_age);
407 ptr_array_append(g
->tenured_objects
, (xpointer
)dest
);
408 // _mutate_references(state, g, dest);
410 if(heap_enough_fields_p(g
->next
, NUM_FIELDS(obj
))) {
411 xassert(obj
->klass
!= Qnil
);
412 dest
= heap_copy_object(g
->next
, obj
);
414 /* when object is moved to new heap forwarded pointer is left in new generation ("from") heap */
415 baker_gc_set_forwarding_address(obj
, dest
);
416 if(!obj
->ForeverYoung
) INCREMENT_AGE(dest
);
417 if(obj
->obj_type
== WrapsStructType
) MARK_WRAPPED_STRUCT(obj
);
420 dest
= (*g
->tenure
)(g
->tenure_data
, obj
);
421 baker_gc_set_forwarding_address(obj
, dest
);
422 _mutate_references(state
, g
, dest
);
428 if (ptr_array_contains(_track_refs
, (xpointer
) dest
))
429 printf("Found3 %p!\n", (void*)dest
);
436 int baker_gc_contains_p(baker_gc g
, OBJECT obj
) {
437 return heap_contains_p(g
->current
, obj
);
440 int baker_gc_contains_spill_p(baker_gc g
, OBJECT obj
) {
441 return heap_contains_p(g
->next
, obj
) || heap_contains_p(g
->current
, obj
);
444 OBJECT
baker_gc_mutate_from(STATE
, baker_gc g
, OBJECT orig
) {
448 ret
= baker_gc_maybe_mutate(state
, g
, orig
);
453 _mutate_references(state
, g
, iobj
);
454 iobj
= heap_next_unscanned(g
->next
);
461 void baker_gc_mutate_rest(STATE
, baker_gc g
) {
464 iobj
= heap_next_unscanned(g
->next
);
467 _mutate_references(state
, g
, iobj
);
468 iobj
= heap_next_unscanned(g
->next
);
473 unsigned int baker_gc_collect(STATE
, baker_gc g
, ptr_array roots
) {
476 struct method_cache
*end
, *ent
;
477 /* rs for remember set */
481 /* increase number of GC cycles passed */
485 ptr_array_clear(g
->seen_weak_refs
);
486 ptr_array_clear(g
->tenured_objects
);
488 // printf("Running garbage collector...\n");
489 /* start tracing from root set */
490 sz
= ptr_array_length(roots
);
491 for(i
= 0; i
< sz
; i
++) {
492 root
= (OBJECT
)(ptr_array_get_index(roots
, i
));
493 // printf("Collecting from root %d\n", i);
494 // printf("%p => %p\n", g->current->address, g->next->address);
495 // printf("Start at RS (%d): %p\n", i, root);
496 if(!REFERENCE2_P(root
)) { continue; }
497 tmp
= baker_gc_mutate_from(state
, g
, root
);
498 ptr_array_set_index(roots
, i
, (xpointer
) tmp
);
501 /* To maintain the remember set, we setup a totally new
502 set before do any walking of objects, so that only objects
503 which truely still contain pointers to this generation
504 are added back to the new rs. */
506 rs
= g
->remember_set
;
507 g
->remember_set
= ptr_array_new(8);
509 sz
= ptr_array_length(rs
);
510 for(i
= 0; i
< sz
; i
++) {
511 root
= (OBJECT
)(ptr_array_get_index(rs
, i
));
512 if(!REFERENCE2_P(root
)) { continue; }
513 root
->Remember
= FALSE
;
514 tmp
= baker_gc_mutate_from(state
, g
, root
);
515 // ptr_array_set_index(g->remember_set, i, tmp);
519 /* Now the stack, sp is for stack pointer. */
522 sp
= state
->current_stack
;
523 while(sp
<= state
->current_sp
) {
525 if(REFERENCE2_P(tmp
)) {
526 *sp
= baker_gc_mutate_from(state
, g
, tmp
);
531 /* Now the handle table. */
532 for(i
= 0; i
< state
->handle_tbl
->total
; i
++) {
533 if(state
->handle_tbl
->entries
[i
]) {
534 state
->handle_tbl
->entries
[i
]->object
=
535 baker_gc_mutate_from(state
, g
, state
->handle_tbl
->entries
[i
]->object
);
539 cpu_event_each_channel(state
,
540 (cpu_event_each_channel_cb
) baker_gc_mutate_from
, g
);
541 cpu_sampler_collect(state
,
542 (cpu_sampler_collect_cb
) baker_gc_mutate_from
, g
);
544 /* This is a little odd, so I should explain. As we encounter
545 objects which should be tenured while scanning, we put them
546 into the tenured_objects array. We finish the normal scan, and
547 get down to here. Now, we're going to scan the tenured objects
548 using _mutate_references(). That is going to cause tenured_objects
549 to grow more. So as this loop processes objects, more are added
550 to the array. Eventually, it will exhaust itself as there will
551 be no more tenured objects, and the loop will end. */
554 while(!heap_fully_scanned_p(g
->next
) || i
< ptr_array_length(g
->tenured_objects
)) {
556 baker_gc_mutate_rest(state
, g
);
558 for(; i
< ptr_array_length(g
->tenured_objects
); i
++) {
559 tmp
= (OBJECT
)ptr_array_get_index(g
->tenured_objects
, i
);
560 _mutate_references(state
, g
, tmp
);
565 /* We handle the method cache a little differently. We treat it like every
566 * ref is weak so that it doesn't cause objects to live longer than they should. */
568 ent
= state
->method_cache
;
569 end
= ent
+ CPU_CACHE_SIZE
;
573 if(ent
->klass
->gc_zone
== YoungObjectZone
) {
574 if(baker_gc_forwarded_p(ent
->klass
)) {
575 ent
->klass
= baker_gc_forwarded_object(ent
->klass
);
583 if(ent
->module
->gc_zone
== YoungObjectZone
) {
584 if(baker_gc_forwarded_p(ent
->module
)) {
585 ent
->module
= baker_gc_forwarded_object(ent
->module
);
593 if(ent
->method
->gc_zone
== YoungObjectZone
) {
594 if(baker_gc_forwarded_p(ent
->method
)) {
595 ent
->method
= baker_gc_forwarded_object(ent
->method
);
602 if(!ent
->klass
|| !ent
->module
|| !ent
->method
) {
615 for(i
= 0; i
< ptr_array_length(g
->seen_weak_refs
); i
++) {
616 tmp
= (OBJECT
)ptr_array_get_index(g
->seen_weak_refs
, i
);
617 for(j
= 0; j
< NUM_FIELDS(tmp
); j
++) {
618 t2
= tuple_at(state
, tmp
, j
);
619 if(REFERENCE_P(t2
) && t2
->gc_zone
== YoungObjectZone
) {
620 if(baker_gc_forwarded_p(t2
)) {
621 ref
= baker_gc_forwarded_object(t2
);
622 tuple_put(state
, tmp
, j
, ref
);
624 tuple_put(state
, tmp
, j
, Qnil
);
630 assert(heap_fully_scanned_p(g
->next
));
633 if(g
->current
->size
!= g
->next
->size
) {
634 baker_gc_enlarge_next(g
, g
->current
->size
);
638 if(g->used > g->current->size * 0.90) {
639 DEBUG("Enlarging next!\n");
640 baker_gc_enlarge_next(g, g->current->size * 1.5);
644 // printf("Saved %d contexts.\n", saved_contexts);
648 void baker_gc_clear_marked(baker_gc g
) {
653 cur
= (char*)g
->current
->address
;
654 end
= (char*)g
->current
->current
;
658 osz
= SIZE_IN_BYTES(obj
);
664 void baker_gc_find_lost_souls(STATE
, baker_gc g
) {
669 cur
= (char*)g
->last_start
;
670 end
= (char*)g
->last_end
;
671 // printf("Looking for lost souls between %p and %p\n", cur, end);
675 osz
= NUM_FIELDS(obj
);
677 if(!baker_gc_forwarded_p(obj
)) {
678 cls
= CLASS_OBJECT(obj
);
680 //printf("%p is dead: %d, %p, %s.\n", obj, obj->RequiresCleanup);
681 // cls, cls ? _inspect(cls) : "(NULL)");
682 if(obj
->RequiresCleanup
) {
683 if(obj
->obj_type
== MemPtrType
) {
684 void *addr
= *DATA_STRUCT(obj
, void**);
686 obj
->RequiresCleanup
= 0;
688 state_run_cleanup(state
, obj
);
690 obj
->RequiresCleanup
= 0;
693 if(obj
->obj_type
== WrapsStructType
) FREE_WRAPPED_STRUCT(obj
);
695 bs
= SIZE_IN_BYTES_FIELDS(osz
);
696 // fast_memfill(cur, 0, SIZE_IN_WORDS_FIELDS(osz));
697 // memset(cur, 0, bs);
703 // the 'next' heap has been enlarged (replaced by a new larger one).
704 // free the old buffer.
705 // the heap struct that holds a copy of the g->last_start pointer
706 // in its 'address' slot is unreachable so can't be freed.
707 if((OBJECT
)g
->last_start
!= g
->next
->address
) {
712 void baker_gc_collect_references(STATE
, baker_gc g
, OBJECT mark
, ptr_array refs
) {
718 sz
= baker_gc_used(g
);
719 cur
= (char*)g
->current
->address
;
724 osz
= SIZE_IN_BYTES(obj
);
726 if(!_object_stores_bytes(obj
)) {
727 for(i
= 0; i
< NUM_FIELDS(obj
); i
++) {
728 if(NTH_FIELD(obj
, i
) == mark
) {
729 ptr_array_append(refs
, (xpointer
)obj
);