Added spec:commit task to commit changes to spec/ruby sources.
[rbx.git] / shotgun / lib / object_memory.h
blobe1f2ba06cf9aa63b4693c0df4a70eba9fab1317e
1 #ifndef RBS_OBJECT_MEMORY_H
2 #define RBS_OBJECT_MEMORY_H
4 #include <stdlib.h>
6 #include "shotgun/lib/shotgun.h"
7 #include "shotgun/lib/baker.h"
8 #include "shotgun/lib/marksweep.h"
10 #define OMDefaultSize 4194304
11 /* A little over 1% of the total heap size. */
12 #define LargeObjectThreshold 2700
13 //#define OMDefaultSize 100000
15 #define OMCollectYoung 0x1
16 #define OMCollectMature 0x2
18 /* set of flags */
19 struct object_memory_struct {
20 /* */
21 int collect_now;
22 /* */
23 int enlarge_now;
24 /* */
25 int tenure_now;
26 /* */
27 int new_size;
28 /* */
29 int last_object_id;
30 /* Rubinius uses Baker's generational GC as well asm marking and sweeping */
31 baker_gc gc;
32 mark_sweep_gc ms;
33 /* */
34 int last_tenured;
35 /* */
36 int bootstrap_loaded;
37 /* */
38 rheap contexts;
39 /* The first not referenced stack context */
40 OBJECT context_bottom;
41 OBJECT context_last;
43 int context_offset;
46 typedef struct object_memory_struct *object_memory;
48 object_memory object_memory_new();
49 int object_memory_destroy(object_memory om);
50 size_t object_memory_used(object_memory om);
51 int object_memory_collect(STATE, object_memory om, ptr_array roots);
52 void object_memory_check_memory(object_memory om);
53 OBJECT object_memory_new_object_normal(object_memory om, OBJECT cls, unsigned int fields);
54 static inline OBJECT _om_inline_new_object(object_memory om, OBJECT cls, unsigned int fields);
56 OBJECT object_memory_new_object_mature(object_memory om, OBJECT cls, unsigned int fields);
57 void object_memory_print_stats(object_memory om);
58 OBJECT object_memory_new_opaque(STATE, OBJECT cls, unsigned int sz);
59 OBJECT object_memory_tenure_object(void* data, OBJECT obj);
60 void object_memory_major_collect(STATE, object_memory om, ptr_array roots);
61 OBJECT object_memory_collect_references(STATE, object_memory om, OBJECT mark);
62 void object_memory_setup_become(STATE, object_memory om, OBJECT from, OBJECT to);
63 void object_memory_clear_become(STATE, object_memory om);
64 void object_memory_update_rs(object_memory om, OBJECT target, OBJECT val);
66 void object_memory_shift_contexts(STATE, object_memory om);
67 void object_memory_mark_contexts(STATE, object_memory om);
68 void object_memory_formalize_contexts(STATE, object_memory om);
69 void object_memory_reset_contexts(STATE, object_memory om);
71 #define FAST_NEW 1
73 #ifdef FAST_NEW
74 #define object_memory_new_object _om_inline_new_object_init
75 #else
76 #define object_memory_new_object object_memory_new_object_normal
77 #endif
79 #define object_memory_new_dirty_object _om_inline_new_object
81 #define CTX_SIZE SIZE_IN_BYTES_FIELDS(FASTCTX_FIELDS)
83 #define BYTES_PAST(ctx, num) ((char*)ctx + num)
84 #define AFTER_CTX(ctx) BYTES_PAST(ctx, FASTCTX(ctx)->size)
86 static inline OBJECT object_memory_new_context(object_memory om, unsigned int locals) {
87 unsigned int size;
88 OBJECT ctx;
90 if(locals > 0) {
91 size = (unsigned int)(CTX_SIZE + SIZE_IN_BYTES_FIELDS(locals) + 4);
92 } else {
93 size = (unsigned int)CTX_SIZE;
96 ctx = ((OBJECT)heap_allocate_dirty(om->contexts, size));
97 // memset(ctx, 0, size);
99 /* not really the number of fields, rather the number of bytes
100 this context is using. */
101 FASTCTX(ctx)->size = size;
103 return ctx;
106 #define object_memory_context_locals(ctx) ((OBJECT)BYTES_PAST(ctx, CTX_SIZE))
108 #define om_on_stack(om, ctx) heap_contains_p(om->contexts, ctx)
109 #define om_in_heap(om, ctx) heap_contains_p(om->gc->current, ctx)
111 #define object_memory_retire_context(om, ctx) \
112 if(om_on_stack(om, ctx) && (ctx >= om->context_bottom)) { \
113 fast_memfill_s20((void*)ctx, 0); heap_putback(om->contexts, FASTCTX(ctx)->size); \
116 #define object_memory_context_referenced(om, ctx) (void)({ \
117 OBJECT _nb = (OBJECT)AFTER_CTX(ctx); \
118 if(om_on_stack(om, ctx) && (om->context_bottom < _nb)) { \
119 om->context_bottom = _nb; } })
121 #define om_context_referenced_p(om, ctx) ((ctx < om->context_bottom) && (ctx >= (OBJECT)om->contexts->address))
123 #define om_stack_context_p(om, ctx) (om_on_stack(om, ctx) && (ctx >= om->context_bottom))
125 #define om_stack_next_ctx(ctx) ((OBJECT)AFTER_CTX(ctx))
126 #define om_stack_prev_ctx(ctx) ((OBJECT)BYTES_PAST(ctx, -FASTCTX(ctx)->size))
127 #define om_stack_sender(ctx) om_stack_prev_ctx(ctx)
129 #define om_valid_context_p(state, ctx) ( \
130 (om_stack_context_p(state->om, ctx) && stack_context_p(ctx)) || \
131 (om_context_referenced_p(state->om, ctx)) || \
132 (om_in_heap(state->om, ctx) && (methctx_is_fast_p(state, ctx) || block_context_p(state, ctx))) \
135 #define EACH_CTX(om, addr) \
136 addr = (OBJECT)om->contexts->address; \
137 while(addr < (OBJECT) om->contexts->current) {
139 #define DONE_EACH_CTX(addr) addr = (address)AFTER_CTX(addr); }
141 #define EACH_REFD_CTX(om, addr) \
142 addr = (OBJECT)om->contexts->address; \
143 while(addr < (OBJECT) om->context_bottom) {
145 #define DONE_EACH_REFD_CTX(addr) addr = (address)AFTER_CTX(addr); }
147 #define EACH_STACK_CTX(om, addr) \
148 addr = (OBJECT)om->context_bottom; \
149 while(addr < (OBJECT) om->contexts->current) {
151 #define DONE_EACH_STACK_CTX(addr) addr = (address)AFTER_CTX(addr); }
153 #define om_no_referenced_ctxs_p(om) (om->context_bottom == (OBJECT)om->contexts->address)
155 /* These are the 4 scenarios detailed in doc/life_of_a_context.txt */
157 #define om_valid_sender_p(om, ctx, sender) ( \
158 (NIL_P(sender) && om_on_stack(om, ctx)) || \
159 (om_on_stack(om, ctx) && om_on_stack(om, sender) && (om_context_referenced_p(om, sender) || (sender == om_stack_sender(ctx)))) || \
160 (om_in_heap(om, sender) && om_on_stack(om, ctx) && (om->context_bottom == ctx)) || \
161 (om_in_heap(om, ctx) && (om_context_referenced_p(om, sender) || om_in_heap(om, sender))))
163 #endif