1 #ifndef RBX_CALL_FRAME_HPP
2 #define RBX_CALL_FRAME_HPP
4 #include "stack_variables.hpp"
5 #include "dispatch.hpp"
6 #include "arguments.hpp"
7 #include "object_utils.hpp"
9 #include "class/compiled_code.hpp"
10 #include "class/symbol.hpp"
17 class NativeMethodFrame
;
18 class BlockEnvironment
;
24 class RuntimeDataHolder
;
27 struct InterpreterState
{
38 cMultipleScopes
= 1 << 1,
39 cInlineFrame
= 1 << 2,
40 cClosedScope
= 1 << 3,
41 cBlockAsMethod
= 1 << 4,
44 cInlineBlock
= 1 << 7,
45 cNativeMethod
= 1 << 8,
46 cTopLevelVisibility
= 1 << 9,
48 cScopeLocked
= 1 << 11
52 LexicalScope
* lexical_scope_
;
55 CompiledCode
* compiled_code
;
60 VariableScope
* top_scope_
;
61 StackVariables
* scope
;
66 MachineCode
* machine_code
;
78 , lexical_scope_(nullptr)
79 , dispatch_data(nullptr)
80 , compiled_code(nullptr)
88 , machine_code(nullptr)
91 , return_value(nullptr)
99 bool block_as_method_p() const {
100 return flags
& cBlockAsMethod
;
103 BlockEnvironment
* block_env() {
105 return reinterpret_cast<BlockEnvironment
*>(dispatch_data
);
111 void set_block_env(BlockEnvironment
* env
) {
113 dispatch_data
= reinterpret_cast<void*>(env
);
117 Symbol
* name() const {
118 if(inline_method_p() && dispatch_data
) {
120 return nil
<Symbol
>();
121 } else if(block_p()) {
122 return nil
<Symbol
>();
123 } else if(arguments
) {
124 return arguments
->name();
127 return nil
<Symbol
>();
130 Symbol
* original_name() const {
131 return compiled_code
->name();
134 bool inline_method_p() const {
135 return flags
& cInlineFrame
;
138 bool jitted_p() const {
139 return flags
& cJITed
;
142 bool block_p() const {
143 return flags
& cBlock
;
146 LexicalScope
* lexical_scope() const {
147 return lexical_scope_
;
150 bool is_block_p(STATE
) const {
154 Object
* self() const {
155 return scope
->self();
158 bool multiple_scopes_p() const {
159 return flags
& cMultipleScopes
;
162 VariableScope
* top_scope(STATE
) {
163 if(multiple_scopes_p()) return top_scope_
;
164 return promote_scope(state
);
167 bool is_inline_frame() const {
168 return flags
& cInlineFrame
;
171 bool is_inline_block() const {
172 return flags
& cInlineBlock
;
175 bool has_closed_scope_p() const {
176 return flags
& cClosedScope
;
179 bool native_method_p() const {
180 return flags
& cNativeMethod
;
183 NativeMethodFrame
* native_method_frame() const {
184 if(native_method_p()) return reinterpret_cast<NativeMethodFrame
*>(dispatch_data
);
188 void jit_fixup(STATE
, CallFrame
* creator
);
190 Module
* module() const {
191 return scope
->module();
194 void set_ip(int new_ip
) {
206 void next_ip(int width
) {
210 void push_unwind(UnwindSite
* unwind
);
211 UnwindSite
* pop_unwind();
214 * Initialize frame for the given stack size.
216 void prepare(int stack
) {
218 return_value
= nullptr;
220 for(int i
= 0; i
< stack
; i
++) {
224 registers
= stk
+ stack
;
227 VariableScope
* promote_scope_full(STATE
);
229 VariableScope
* promote_scope(STATE
) {
230 if(VariableScope
* vs
= scope
->on_heap()) return vs
;
231 return promote_scope_full(state
);
234 VariableScope
* method_scope(STATE
);
236 void print_backtrace(STATE
, int count
=0, bool filter
=false);
237 void print_backtrace(STATE
, std::ostream
& stream
, int count
=0, bool filter
=false);
244 Object
* find_breakpoint(STATE
);
247 #define ALLOCA_CALL_FRAME(stack_size) \
248 static_cast<uintptr_t*>(alloca(sizeof(CallFrame) + (sizeof(Object*) * (stack_size))))