3 # line counts (this + clean.rb):
7 # (wc src/dispatcher.rb clean.rb)
13 attr_accessor :object, :self_type, :old_functions, :scope_hash, :time_vs_instructions_log, :ast_order, :source
16 attr_accessor :cache_id, :scope_hash, :node2type_cache, :scope_linkage
22 STACK_BYTEARRAY_PARAM_IDX = 0
23 RETURN_STACK_BYTEARRAY_PARAM_IDX = 1
24 ALL_LOCALS_BYTEARRAY_PARAM_IDX = 2
32 ID_CONSTANTS = [:INITIAL_PREV, :NULL_BLOCK, :FORCED_POP, :FINISHED] # TODO - what about outer_scope ?
35 def initialize ctx, string
36 @cache_id = Digest::MD5.digest string
40 @func_cache_hits, @func_cache_misses = 0, 0
41 @sexp = ParseTree.translate string
43 @crawler = CodeTree.new @sexp
45 @context.make_comment = proc {
48 @context.my_callback = proc {
50 my_callback value.to_s
52 @context.builder_function = proc {
53 |curr_id, prev_id, got_num_params, wib|
54 build_function curr_id, prev_id, got_num_params, wib, false
56 @context.data_inspector = proc {
57 |ptr1, ptr2, ptr3, ptr4|
58 my_data_inspect ptr1, ptr2, ptr3, ptr4
61 @node2type_cache = $debug_logged_cache ? LoggedHash.new("node2type_cache") : {}
62 @time_vs_instructions_log = []
63 @execution_started_at = Time.now
64 @scope_linkage = { OUTER_SCOPE => [] }
65 if $enable_cache && StateCache::load_cache(self)
66 puts "loaded from cache!"
71 method_trace = caller[1..-1].reject {
73 l =~ %r(test/unit|testing.rb) }.map { |l| l.gsub(/.*?:(\d+)(:in \`(.*)')?/, "\\3:\\1") }
74 method_trace[1..-1].join ", "
77 """ describe a given path id, for interactive debugging """
78 def desc id # DEBUG HELPER
79 path = @crawler.id2path id
80 @crawler.find_path_sexp(path).inspect + " :: context[ " + crawler.find_path_sexp(path[0..-2]).inspect + " ]"
83 def number_of_generated_instructions
84 old_functions.inject(0) { |a,func| a + func.size }
87 def number_of_instructions_executed
88 old_functions.inject(0) { |a,func| a + func.instructions_executed }
91 def smallest_path paths
92 path_lengths = paths.map { |path| path.length }
93 return (paths.enum_for(:each_with_index).detect { |path, idx| path.length == path_lengths.min })[0]
96 AnonBlock = Struct.new :yielder_subpaths, :subpaths, :dyn_assigns
98 """ generate the execution paths, splitting blocks out into AnonBlock's"""
101 @ast_order = @crawler.id2path_hash.keys.sort.collect {
103 path = @crawler.id2path_hash[id]
104 sexp_subtree = @crawler.find_path_sexp(path)
105 type = sexptype(sexp_subtree)
106 next unless (!([nil, :array].include? type) || (sexp_subtree == :push_block)) \
110 idbg(:gen_orders) { "ALL : #{@crawler.paths2orderdesc @ast_order}" }
111 # handle anon block definitions
113 @path2anon_block = {}
115 anon_block_path = @ast_order.detect { |path| sexp = @crawler.find_path_sexp(path);
116 (sexptype(sexp) == :iter) \
117 and !done_paths.include? path }
118 break if anon_block_path.nil?
119 anon_block = AnonBlock.new
120 yielder_rootpath = anon_block_path + [0]
121 anon_block.yielder_subpaths = CodeTree.find_subpaths(@ast_order, yielder_rootpath) + [yielder_rootpath]
122 anon_block.yielder_subpaths.each { |path| @ast_order.delete path }
123 idbg(:gen_orders) { "YIELDER CALL : #{@crawler.paths2orderdesc anon_block.yielder_subpaths}" }
124 anon_block.subpaths = CodeTree.find_subpaths(@ast_order, anon_block_path)
125 anon_block.subpaths.each { |path| @ast_order.delete path }
126 # remove the elements performing iterator param bindings
127 anon_block.dyn_assigns = []
129 sexp_element = @crawler.find_path_sexp(anon_block.subpaths.first)
130 break unless ([:dasgn_curr_hacked, :dasgn_curr, :lasgn].include? sexptype(sexp_element))
131 has_value = (sexp_element.length < 2 or sexp_element[2].nil?)
132 break unless has_value
133 path = anon_block.subpaths.slice!(0)
134 anon_block.dyn_assigns << path
136 idbg(:gen_orders) { "ANON BLOCK : #{@crawler.paths2orderdesc anon_block.subpaths}" }
137 done_paths << anon_block_path
138 (anon_block.yielder_subpaths + anon_block.subpaths + [anon_block_path]).each {
140 @path2anon_block[path] = anon_block
143 idbg(:gen_orders) { "MAIN : #{@crawler.paths2orderdesc @ast_order}" }
145 @crawler.id2path_hash.keys.sort.collect {
147 "#{key} => #{@crawler.id2path_hash[key].inspect}"
152 $builtins = [:alloc_self, :set_self, :dget, :dset, :set, :get, :typeof, :putch, :pi, :+, :-, :*, :/, :%, :==, :<, :>, :"!="]
155 def is_function_call sexp
156 [:call, :vcall, :fcall].include? sexptype(sexp)
159 """ is the node a call to a instrinsic function? """
161 (is_function_call(sexp) and $builtins.include? get_method_name(sexp))
164 """ decide if we wish to follow this trace or cut it short at this point with various heuristics """
165 def will_predict? curr_id, next_id, skip_data_inspect, func
166 idbg(:node_predict) { green("will_predict?") + " #{curr_id.inspect}, #{next_id.inspect}" }
167 curr_sexp = @crawler.find_path_sexp @crawler.id2path(curr_id)
168 next_sexp = @crawler.find_path_sexp @crawler.id2path(next_id)
169 idbg(:node_predict) { "current :: #{curr_sexp.inspect}, next :: #{next_sexp.inspect}" }
170 return false if @calling_function or !$opt_use_predict
171 parent_sexp = @crawler.find_path_sexp((@crawler.id2path curr_id).slice(0..-2))
172 if (sexptype(parent_sexp) == :while) and parent_sexp[1] === curr_sexp
175 # special case int.call()'s
176 if (sexptype(curr_sexp) == :const) and (sexptype(next_sexp) == :call)
177 @literal_receiver = :Const
178 idbg(:node_predict) { "next is a new call on a const, lets predict! - #{next_id.inspect}" }
181 if (sexptype(curr_sexp) == :call) and @literal_receiver == :Const
182 @literal_receiver = nil
183 idbg(:node_predict) { "preforming a new call on a const, lets predict! - #{next_id.inspect}" }
186 if is_builtin? next_sexp
187 @called_builtin = true
188 idbg(:node_predict) {
189 "next is a #{sexptype(next_sexp)}::#{get_method_name next_sexp} call, lets predict! - #{next_id.inspect}"
193 if !@called_builtin.nil? and is_builtin? curr_sexp
194 @called_builtin = nil
195 idbg(:node_predict) {
196 "performed a #{sexptype(curr_sexp)}::#{get_method_name curr_sexp}!, lets predict! - #{next_id.inspect}"
200 if is_function_call(next_sexp) and @node2type_cache.has_key? next_id
201 idbg(:node_predict) {
202 "predicting the next one - #{next_id} - #{next_sexp.inspect} - #{@node2type_cache[next_id].inspect}"
206 # FIXME - removed an unused special path for call and integer from here!
207 @literal_receiver = nil
208 @called_builtin = nil
209 return false unless skip_data_inspect
210 if ProfFuncWithMd::md_is_not_real? func
211 idbg(:node_predict) { "node isn't real!, lets predict! - #{next_id.inspect}" }
214 if [:while, :true, :false, :str, :lit, :lvar, :lasgn, :const, :dvar, :push_block].include? sexptype(curr_sexp)
215 idbg(:node_predict) { "got #{curr_sexp.inspect}, lets predict! - #{next_id.inspect}" }
218 idbg(:node_predict) { "like, screw this you guys, i'm not predicting" }
222 """ generate a data inspect call for the given memory context """
223 def gen_data_inspect func, mem_ctx
224 func.insn_call_data_inspect mem_ctx.stack_mem, mem_ctx.return_stack_mem,
225 NN::mk_constant(func, :int, 0), mem_ctx.all_locals_mem
228 def construct_prototype_of_length num_params
229 # stack mem, return stack, all locals mem
230 [:void_ptr] * (num_params + 3)
233 """ data structure used to abstract various information about a bounced jump in the flow """
234 Dispatch = Struct.new :bouncer, :scope_val, :already_flushed, :params
236 """ generate a dispatch based on a Dispatch descriptor """
237 def do_dispatch func, mem_ctx, dispatch
238 mem_ctx.locals_dict.switch_to_scope_with_id func, dispatch.scope_val, mem_ctx unless dispatch.scope_val.nil?
239 mem_ctx.flush if mem_ctx.can_flush? and !dispatch.already_flushed
240 return_value = Value.new
241 params = dispatch.params || []
242 pos = func.insn_call_indirect_vtable_blah \
243 return_value, dispatch.bouncer,
244 ATOM_RET_VAL, construct_prototype_of_length(params.length*2),
245 [mem_ctx.stack_mem, mem_ctx.return_stack_mem, mem_ctx.all_locals_mem, *(params.flatten)]
246 func.insn_return return_value
250 """ generate a static jump to the given position in the flow """
251 def generate_static_jump next_id, func, mem_ctx, is_bouncer
252 return_value = Value.new
253 currently_generating = (next_id == @func_ids.first)
254 func_ptr = currently_generating ? func : @func_cache[next_id].func
255 puts "CACHED FUNC_PTR - #{func_ptr.inspect}" if check_dbg(:rt_back_insertion)
256 func_ptr_value = NN::mk_constant(func, :ptr, func_ptr)
257 DebugLogger::runtime_print_string func, :rt_back_insertion, "jumping to back inserted function pointer! #{next_id}\n"
258 if currently_generating
259 DebugLogger::runtime_print_string func, :rt_back_insertion, "WOOOOOHOOO!! WE HIT A BRANCH OPT!\n"
260 mem_ctx.flush if mem_ctx.can_flush?
261 func.insn_branch ProfFuncWithMd::md_atom_main_label(func)
263 DebugLogger::runtime_print_string func, :rt_back_insertion, "performing a static dispatch\n"
264 d = Dispatch.new func_ptr_value
265 static_dispatch_pos = do_dispatch func, mem_ctx, d
267 ProfFuncWithMd::md_set_with_initial(func, :back_insertions, 0) { |c| c + 1 } unless is_bouncer
268 ProfFuncWithMd::md_set_with_initial(func, :statically_dispatches_to, []) {
269 |c| c << [next_id, static_dispatch_pos]
271 # fail "this really shouldn't be null!!!!" if static_dispatch_pos.nil?
274 """ generate a data inspect instruction if needed (has dynamic scope lookups or data inspect has been forced) """
275 def handle_runtime_compiletime_data_transfer d, func, is_bouncer, skip_data_inspect,
276 next_point_val, mem_ctx, next_id, curr_id, num_params
277 # FIXME - this next line has little to do with this method???
278 idbg(:node_predict) { "writing a dispatch to #{next_id}" } if !next_id.nil?
279 idbg(:dispatch_to_id_value) { "skip_data_inspect == #{skip_data_inspect}" }
280 if $opt_scope_templates && !is_bouncer && (ProfFuncWithMd::md_not_all_static_lookups? func)
281 DebugLogger::runtime_print_string func, :rt_data_inspect_force, "forcing a data inspect\n"
282 ProfFuncWithMd::md_force_data_inspect func
285 if !skip_data_inspect or $force_data_inspect or (ProfFuncWithMd::md_forced_data_inspect func)
286 DebugLogger::runtime_print_string func, :rt_runtime_data_inspect_trace, "data inspect!!!!\n"
287 # DebugLogger::runtime_print_string func, "data inspect!!!! - begun by #{caller.inspect}\n"
288 mem_ctx.flush if mem_ctx.can_flush?
290 gen_data_inspect func, mem_ctx
291 ProfFuncWithMd::md_set_with_initial(func, :call_data_inspect, 0) { |c| c + 1 }
293 d.already_flushed = flushed
296 """ generate a non static generation dispatch """
297 def generate_dispatch func, is_bouncer, skip_data_inspect, next_point_val, mem_ctx, next_id, curr_id, num_params, perform_logging
299 d = Dispatch.new func_ptr
300 if perform_logging # FIXME - what is this for?
301 handle_runtime_compiletime_data_transfer d, func, is_bouncer, skip_data_inspect,
302 next_point_val, mem_ctx, next_id, curr_id, num_params
304 mem_ctx.flush if mem_ctx.can_flush?
305 gen_data_inspect func, mem_ctx
307 fail "well, thats not good!" if next_point_val.nil?
308 DebugLogger::runtime_print_string func, :rt_back_insertion, "performing a build function...\n"
309 func.insn_call_build_function func_ptr, next_point_val,
310 NN::mk_constant(func, :int, curr_id),
311 NN::mk_constant(func, :int, num_params),
312 NN::mk_constant(func, :ptr, func)
313 DebugLogger::runtime_print_string func, :rt_back_insertion, "done build function\n"
314 return_value = Value.new
316 if !next_id.nil? # integer rather than dynamic
317 ProfFuncWithMd::md_set_with_initial(func, :slow_dispatch_to, []) { |c| c << next_id }
319 ProfFuncWithMd::md_set_with_initial(func, :dynamic_dispatches, 0) { |c| c+1 }
322 do_dispatch func, mem_ctx, d
325 """ jump to the given (possibly runtime) value based astid, performing an appropriate jump type """
326 def dispatch_to_id_value func, mem_ctx, next_point_val, curr_id, num_params,
327 skip_data_inspect, no_predict = false, is_bouncer = false
329 if next_point_val.is_a? Integer
330 next_id = next_point_val
331 next_point_val = NN::mk_constant(func, :int, next_point_val)
333 if !next_id.nil? && !no_predict && will_predict?(curr_id, next_id, skip_data_inspect, func)
334 @predicting_next_id << next_id
337 puts "LALA we gonna check if #{next_id} is like, already done yay! - #{@func_cache.keys.inspect}" \
338 if check_dbg(:rt_back_insertion)
339 link_statically = false
340 if $opt_static_dispatches
341 if !is_bouncer and ((@func_cache.has_key? next_id) \
342 or (next_id == @func_ids.first and (ProfFuncWithMd::md_no_assumptions? func)))
343 link_statically = true
345 if $new_optimized_returns and not (ProfFuncWithMd::md_optimal_return? func)
346 idbg(:scope_templates) { "NO STATIC LINK AS ITS NON OPTIMAL WITH RESPECT TO RETURNS" }
347 link_statically = false
349 if $opt_scope_templates and !is_bouncer and (ProfFuncWithMd::md_not_all_static_lookups? func)
350 idbg(:scope_templates) {
351 "NOT GONNA STATIC LINK TO #{next_id} AS ITS GOT A #{(ProfFuncWithMd::md_lookups func).inspect} LOOKUP!"
353 link_statically = false
357 skip_reoptimize = Label.new
358 # we use the first of the reserved spots in the hit range
359 func.insn_hit NN::mk_constant(func, :int, 0)
361 func.insn_hit_test cond, NN::mk_constant(func, :int, 0), NN::mk_constant(func, :int, 1000)
362 func.insn_branch_if_not cond, skip_reoptimize
363 DebugLogger::runtime_print_string func, :rt_call_param_opt, red("REOPTIMISATION PATH") + "!!!!:\n\n\n"
364 generate_dispatch func, is_bouncer, skip_data_inspect, next_point_val, mem_ctx, next_id, curr_id, num_params, false
365 func.insn_label skip_reoptimize
366 DebugLogger::runtime_print_string func, :rt_call_param_opt, green("OPTIMAL") + "!!!\n\n\n"
367 generate_static_jump next_id, func, mem_ctx, is_bouncer
369 generate_dispatch func, is_bouncer, skip_data_inspect, next_point_val, mem_ctx, next_id, curr_id, num_params, true
373 """ does the given instruction that the ast path points to require a data inspect? """
374 def should_skip_data_inspect? path
375 if @node2type_cache.has_key? @crawler.path2id(path)
376 idbg(:data_inspect) { "skipping a data inspect! ooo! - #{@crawler.find_path_sexp(path).class}" }
379 sexp = @crawler.find_path_sexp(path)
380 !([:call, :fcall, :vcall, :iasgn, :ivar, :const].include? sexptype(sexp))
383 """ fill the data structure which represents variable scope with prefilled items if the id's are known,
384 possibly optimizing by memcpy'ing from a set of premade template scopes """
385 def push_scope func, mem_ctx, scope, locals_mem, scope_ast_id
387 idbg(:create_new_scope) { "using a premade template!" }
388 cached_idx, cache_len = cache_scope @scopescache_addr, scope
389 cachescopes_ptr = get_cachescopes_pointer func, mem_ctx
390 cache_addr = Value.new
391 func.insn_load_elem cache_addr, cachescopes_ptr, NN::mk_constant(func, :int, cached_idx), :void_ptr
392 func.insn_call_copy_bytearray locals_mem, cache_addr, NN::mk_constant(func, :int, cache_len)
395 state_cache = DictAppendCache.new
396 DebugLogger::runtime_print_string func, :rt_prefilling,
397 "scope prefilling for scope ", NN::mk_constant(func, :int, scope_ast_id), "\n"
400 val = RuntimePrintCallback.new(PostProcs::ID, NN::mk_constant(func, :int, sym.to_i))
401 DebugLogger::runtime_print_string func, :rt_prefilling, "adding item ", val, "\n"
402 popped_int, type = NN::mk_constant(func, :int, 888), NN::mk_type(func, :undef)
403 append_temp = DictHelpers::append_to_dict func, locals_mem, NN::mk_constant(func, :int, sym), state_cache
405 func.insn_store_elem locals_mem, append_temp, popped_int
408 func.insn_add temp, append_temp, NN::mk_constant(func, :int, 1)
409 func.insn_store_elem locals_mem, temp, type
412 # FIXME this is actually part of the append_to_dict logic
413 func.insn_store_elem locals_mem, NN::mk_constant(func, :int, 0), state_cache.count_local
415 DebugLogger::runtime_print_string func, :rt_prefilling, "scope prefilling -> finished\n"
418 # we store the pointer in the type field
419 """ create a new data structure representing variable scope """
420 def create_new_scope func, mem_ctx, scope_ast_id
421 idbg(:create_new_scope) { "creating new scope" }
422 locals_mem = NN::mk_bytearray(func, 3 * 128 + 1) # struct: length, (id, type, value)*
423 func.insn_store_elem locals_mem, NN::mk_constant(func, :int, 0), NN::mk_constant(func, :int, 0) # set length to 0
424 # spare storage allocation: id (must be 0), spare, spare
425 func.insn_store_elem locals_mem, NN::mk_constant(func, :int, 1), NN::mk_constant(func, :int, 0)
426 all_locals = RbStack.new func, mem_ctx.all_locals_mem, :ret
427 position_on_stack = all_locals.push_raw locals_mem, NN::mk_constant(func, :int, scope_ast_id)
428 if $opt_scope_templates && @scope_hash && (scope = @scope_hash[scope_ast_id]) && !scope.empty?
429 push_scope func, mem_ctx, scope, locals_mem, scope_ast_id
430 ProfFuncWithMd::md_set_create_scope_template func, scope
432 DebugLogger::runtime_print_string func, :rt_data_inspect_force, "forcing a data inspect, cus of lack of prefilling\n"
433 ProfFuncWithMd::md_force_data_inspect func
435 DebugLogger::runtime_print_string func, :create_new_scope, "NEW SCOPE AT POSITION : ", position_on_stack, "\n"
436 return locals_mem, position_on_stack
439 FIELD__INDIRECTIONS_PTR = FieldDesc.new 4, :void_ptr # stack_mem
441 def get_indirections_pointer func, mem_ctx
442 return FIELD__INDIRECTIONS_PTR.load(func, mem_ctx.stack_mem)
445 FIELD__CACHESCOPES_PTR = FieldDesc.new 6, :void_ptr # stack_mem
446 def get_cachescopes_pointer func, mem_ctx
447 return FIELD__CACHESCOPES_PTR.load(func, mem_ctx.stack_mem)
450 def mk_bouncer curr_id, prev_id, num_params, func
452 ProfFuncWithMd::md_init_or_increase_bouncer_count func
453 bouncer = Function.new @context
454 ProfFuncWithMd::md_metadata bouncer
455 ProfFuncWithMd::md_set_generated_by bouncer, func
456 @old_functions << bouncer
458 mem_ctx = MemContext.new Value.new, Value.new, Value.new, nil
459 build_main_func_init bouncer, mem_ctx, curr_id
460 mem_ctx.locals_dict = DictLookup.new(self, func, @scope_linkage, mem_ctx)
461 puts "BUILDING A BOUNCER! - to #{curr_id} <- from #{prev_id}" if check_dbg(:rt_bouncer_runtime)
462 if curr_id == NULL_BLOCK
463 DebugLogger::runtime_print_string bouncer, "NULL BLOCK WAS CALLED!!\n"
464 bouncer.insn_return NN::mk_constant(bouncer, :int, -666)
465 elsif curr_id == FINISHED
466 DebugLogger::runtime_print_string bouncer, :rt_bouncer_runtime, "AN EXIT BOUNCER!!\n"
467 bouncer.insn_return NN::mk_constant(bouncer, :int, -666)
468 elsif curr_id == FORCED_POP
469 stack = RbStack.new bouncer, mem_ctx.stack_mem, :alt
470 DebugLogger::runtime_print_string bouncer, :rt_bouncer_runtime,
471 "warning: popping bouncer!! - #{curr_id} <- #{prev_id} -> #{ProfFuncWithMd::md_inspect func.metadata}\n"
472 pop_again = Label.new
473 bouncer.insn_label pop_again
475 # gen_data_inspect bouncer, mem_ctx
476 bouncer_ptr, scope_val = stack.pop
477 DebugLogger::runtime_print_string bouncer, :rt_bouncer_runtime, "popping! - ", bouncer_ptr, "\n"
479 bouncer.insn_eq cond, NN::mk_constant(bouncer, :int, FORCED_POP), bouncer_ptr
480 bouncer.insn_branch_if cond, pop_again
482 d = Dispatch.new bouncer_ptr
483 d.scope_val = scope_val
484 do_dispatch bouncer, mem_ctx, d
486 DebugLogger::runtime_print_string bouncer, :rt_bouncer_runtime,
487 "a bouncer going to #{curr_id} via dispatch_to_id_value - could be slow!\n"
488 dispatch_to_id_value bouncer, mem_ctx, curr_id, prev_id, num_params, true, true, true
492 ProfFuncWithMd::md_set_next_id bouncer, curr_id
493 ProfFuncWithMd::md_set_is_bouncer bouncer
494 @bouncers << bouncer # garbage collection fix
498 # can setup and completion be made into two classes by making this an Initer and selecting the appropriate one?
500 def jump_to_proc func, proc_addr, style_string
501 stored_id = Value.new
502 func.insn_load_elem stored_id, proc_addr, NN::mk_constant(func, :int, 0), :int
503 stored_scope_id = Value.new
504 func.insn_load_elem stored_scope_id, proc_addr, NN::mk_constant(func, :int, 1), :int
505 if check_dbg(:rt_scope)
506 DebugLogger::runtime_print_string func, "#{style_string} TO :", stored_id, ", with scope id :", stored_scope_id, "\n"
508 return stored_id, stored_scope_id
511 CachedCallData = Struct.new(:type, :should_push_self)
513 def pack_block func, statement_val, scope_val
514 block_addr = NN::mk_bytearray(func, 2)
515 func.insn_store_elem block_addr, NN::mk_constant(func, :int, 0), statement_val
516 func.insn_store_elem block_addr, NN::mk_constant(func, :int, 1), scope_val
520 def create_null_packed_block func
521 statement_val, scope_val = NN::mk_constant(func, :int, -555), NN::mk_constant(func, :int, -555)
522 return pack_block(func, statement_val, scope_val)
525 def new_indirection_block func, mem_ctx, bouncer
526 indirections = get_indirections_pointer func, mem_ctx
527 position_on_stack = Value.new
528 func.insn_load_elem position_on_stack, indirections, NN::mk_constant(func, :int, 0), :int
529 indirections_stack = RbStack.new func, indirections, :ret
530 # TODO - use this free 777 for storing a compile time ref id!
531 indirections_stack.push NN::mk_constant(func, :ptr, bouncer), NN::mk_constant(func, :int, 777)
535 def load_indirect_block func, block_indirection_idx, mem_ctx
536 idx_into_indirections = Value.new
537 func.insn_add idx_into_indirections, block_indirection_idx, NN::mk_constant(func, :int, 1)
538 indirections = get_indirections_pointer func, mem_ctx
539 dispatcher = Value.new
540 func.insn_load_elem dispatcher, indirections, idx_into_indirections, :void_ptr
544 def find_matching_method_path selected_func_defs, method
545 selected_func_defs.keys.detect { |cpath| @crawler.find_path_sexp(cpath)[1] == method }
548 def do_method_dispatch func, mem_ctx, curr_id, has_receiver, method
549 call_function, selected_func_defs = nil, nil
550 if @node2type_cache.has_key? curr_id
551 hit = @node2type_cache[curr_id]
552 if hit.type == :Super
553 selected_func_defs = @func_defs
555 selected_func_defs = @class_func_defs[hit.type]
557 if hit.should_push_self
558 idbg(:dbg_handle_call_element) { "HIT - PUSHING SELF FROM __SELF__ WITH TYPE FROM cache! (#{hit.type})" }
559 self_mem_ptr_ptr, dummy = mem_ctx.locals_dict.load_local_var :__self__
560 mem_ctx.return_rbstack.push self_mem_ptr_ptr, NN::mk_type(func, hit.type.to_sym)
562 method_path = find_matching_method_path(selected_func_defs, method)
563 call_function = selected_func_defs[method_path]
564 # remove the type from the cache after the node has been specialized
565 # and indicate the atom as specialized (see ANN1 for more information)
566 # @node2type_cache.delete curr_id TODO FIXME
567 ProfFuncWithMd::md_set_specifalized func, [:on_type, hit.type]
569 idbg(:self_cache) { "RECEIVER CACHE MISS!!! - #{method}" }
570 if !@object.nil? and has_receiver
571 @node2type_cache[curr_id] = CachedCallData.new(@object[0], false)
572 selected_func_defs = @class_func_defs[@object[0]]
573 elsif @self_type.nil?
575 @node2type_cache[curr_id] = CachedCallData.new(:Super, false)
576 selected_func_defs = nil
578 if @object[0] == :const
579 @node2type_cache[curr_id] = CachedCallData.new(Typing::ID_MAP.index(@object[1]), false)
580 selected_func_defs = @class_func_defs[Typing::ID_MAP.index(@object[1])]
582 @node2type_cache[curr_id] = CachedCallData.new(@object[0], false)
583 selected_func_defs = @class_func_defs[@object[0]]
587 @node2type_cache[curr_id] = CachedCallData.new(@self_type, false)
588 selected_func_defs = @class_func_defs[@self_type]
590 idbg(:dbg_handle_call_element) { "has_receiver == #{has_receiver}" }
591 is_kernel = (selected_func_defs.nil?)
592 selected_func_defs = @func_defs if selected_func_defs.nil?
593 path = find_matching_method_path(selected_func_defs, method)
596 selected_func_defs = @func_defs
597 path = find_matching_method_path(selected_func_defs, method)
600 @node2type_cache[curr_id].type = :Super
602 if !@self_type.nil? and !has_receiver and !is_kernel
603 @node2type_cache[curr_id].should_push_self = true
604 idbg(:dbg_handle_call_element) { "PUSHING SELF FROM __SELF__ WITH TYPE FROM @self_type" }
605 self_mem_ptr_ptr, dummy = mem_ctx.locals_dict.load_local_var :__self__
606 mem_ctx.return_rbstack.push self_mem_ptr_ptr, NN::mk_type(func, @self_type.to_sym)
607 ProfFuncWithMd::md_add_assumption func, [:assumption, :__self__, [:type, @self_type.to_sym]]
608 elsif !@object.nil? and !has_receiver and !is_kernel
610 @node2type_cache[curr_id].should_push_self = true
611 idbg(:dbg_handle_call_element) { "PUSHING SELF FROM __SELF__ WITH TYPE FROM @object[0]" }
612 self_mem_ptr_ptr, dummy = mem_ctx.locals_dict.load_local_var :__self__
613 mem_ctx.return_rbstack.push self_mem_ptr_ptr, NN::mk_type(func, @object[0].to_sym)
614 ProfFuncWithMd::md_add_assumption func, [:assumption, :object, [:type, @object, :todo]]
617 ProfFuncWithMd::md_add_assumption func, [:assumption, :object, [:type, @object, :unknown_case]]
619 idbg(:dbg_handle_call_element) { killi <<-DBG
620 OBJECT == #{@object.inspect}
621 SELF == #{@self_type.inspect}
622 CALLING A METHOD ON A OBJECT OF ABOVE TYPE
626 call_function = selected_func_defs[path]
628 raise "no such method symbol: '#{method.inspect}'"
634 def get_method_name sexp_element
635 case sexptype(sexp_element)
637 method_name = sexp_element[1]
639 method_name = sexp_element[2]
641 method_name = sexp_element[1]
643 raise "foo - #{sexp_element}"
648 def handle_call_element sexp_element, func, mem_ctx, anon_block, curr_id, next_ast_path
651 # handle prototype specifics
652 case sexptype(sexp_element)
654 num_params = (!sexp_element[2].nil?) ? (sexp_element[2][1..-1].length) : 0
656 num_params = (!sexp_element[3].nil?) ? (sexp_element[3][1..-1].length) : 0
659 num_params = 0 # is this always correct???
661 raise "foo - #{sexp_element}"
663 method = get_method_name(sexp_element)
666 mem_ctx.return_rbstack.pop
667 dummy, type = mem_ctx.return_rbstack.pop
668 mem_ctx.return_rbstack.push type, NN::mk_type(func, :type)
670 idbg(:dbg_handle_call_element) { "popping a char, printing it!!!!" }
671 mem_ctx.return_rbstack.pop
672 popped_int, type = mem_ctx.return_rbstack.pop
674 DebugLogger::runtime_print_string func, "PrintChar: ", popped_int, " -> '",
675 RuntimePrintCallback.new(PostProcs::CHAR, popped_int), "'\n"
677 DebugLogger::runtime_print_string func, RuntimePrintCallback.new(PostProcs::CHAR, popped_int)
679 mem_ctx.return_rbstack.push NN::mk_constant(func, :int, -1), NN::mk_type(func, :nil)
681 idbg(:dbg_handle_call_element) { "popping a value, printing it!!!!" }
682 mem_ctx.return_rbstack.pop
683 popped_int, type = mem_ctx.return_rbstack.pop
685 DebugLogger::runtime_print_string func, "PrintInt: '", popped_int, "'\n"
687 DebugLogger::runtime_print_string func, popped_int, "\n"
689 mem_ctx.return_rbstack.push NN::mk_constant(func, :int, -1), NN::mk_type(func, :nil)
691 DebugLogger::runtime_print_string func, "BREAKPOINT CALLBACK\n"
692 when :+, :-, :*, :/, :%
693 # TODO - move into stdlib
694 idbg(:dbg_handle_call_element) { "like erm, popping and doing a #{method}" }
695 popped_int_1, type1 = mem_ctx.return_rbstack.pop
696 mem_ctx.return_rbstack.pop # block
697 popped_int_2, type2 = mem_ctx.return_rbstack.pop
698 new_value = Value.new
699 cmps_sym2method = { :+ => :insn_add, :- => :insn_sub, :* => :insn_mul, :/ => :insn_div, :% => :insn_rem }
700 func.send cmps_sym2method[method], new_value, popped_int_1, popped_int_2
701 mem_ctx.return_rbstack.push new_value, type1 # fixme
702 when :==, :<, :>, :"!="
703 # TODO - move into stdlib
704 idbg(:dbg_handle_call_element) { "like erm, popping and doing a #{method}" }
705 popped_int_1, type1 = mem_ctx.return_rbstack.pop
706 mem_ctx.return_rbstack.pop # block
707 popped_int_2, type2 = mem_ctx.return_rbstack.pop
708 math_sym2method = { :== => :insn_eq, :"!=" => :insn_ne, :< => :insn_lt, :> => :insn_gt }
709 new_value = Value.new
710 func.send math_sym2method[method], new_value, popped_int_1, popped_int_2
711 mem_ctx.return_rbstack.push new_value, NN::mk_type(func, :bool)
714 proc_addr, dummy = mem_ctx.return_rbstack.pop
715 mem_ctx.return_rbstack.pop
716 mem_ctx.return_rbstack.push create_null_packed_block(func), NN::mk_type(func, :block)
717 block_indirection_idx, block_scope_id = jump_to_proc func, proc_addr, "CALLING"
718 dispatcher = load_indirect_block func, block_indirection_idx, mem_ctx
719 push_return_point_bouncer func, mem_ctx, curr_id, next_ast_path
720 call_bouncer func, mem_ctx, dispatcher, block_scope_id
722 idbg(:dbg_handle_call_element) { "allocating some memory for self" }
723 self_mem_ptr_ptr, dummy = mem_ctx.locals_dict.load_local_var :__self__
724 mem_ctx.return_rbstack.pop
725 self_mem = NN::mk_bytearray(func, 4096) # a large playground
726 func.insn_store_elem self_mem_ptr_ptr, NN::mk_constant(func, :int, 0), self_mem
727 # for dict's we clear the size and the first id
728 func.insn_store_elem self_mem, NN::mk_constant(func, :int, 0), NN::mk_constant(func, :int, 0)
729 func.insn_store_elem self_mem, NN::mk_constant(func, :int, 1), NN::mk_constant(func, :int, -5)
730 func.insn_store_elem self_mem, NN::mk_constant(func, :int, 2), NN::mk_constant(func, :int, -5)
731 mem_ctx.return_rbstack.push NN::mk_constant(func, :int, -1), NN::mk_type(func, :nil)
733 idbg(:dbg_handle_call_element) { "popping and setting self" }
734 mem_ctx.return_rbstack.pop
735 new_self_mem, dummy = mem_ctx.return_rbstack.pop
736 self_mem_ptr_ptr, dummy = mem_ctx.locals_dict.load_local_var :__self__
737 func.insn_store_elem self_mem_ptr_ptr, NN::mk_constant(func, :int, 0), new_self_mem
738 mem_ctx.return_rbstack.push NN::mk_constant(func, :int, -1), NN::mk_type(func, :nil)
740 # items such as this and the following several have a lot of duplication,
741 # some of this can be solved in the cheap way by using an asm, however the
742 # real fix is to place the duplicate functionality in a helper
743 idbg(:dbg_handle_call_element) { "setting the given int in self_mem" }
744 self_mem_ptr_ptr, dummy = mem_ctx.locals_dict.load_local_var :__self__
745 mem_ctx.return_rbstack.pop
746 self_mem, idx_mult, idx_mult_add = Value.new, Value.new, Value.new
747 value, type = mem_ctx.return_rbstack.pop
748 idx, dummy = mem_ctx.return_rbstack.pop
749 # calculate actual index
750 func.insn_load_elem self_mem, self_mem_ptr_ptr, NN::mk_constant(func, :int, 0), :int
751 func.insn_mul idx_mult, idx, NN::mk_constant(func, :int, 2)
752 func.insn_store_elem self_mem, idx_mult, value
753 func.insn_add idx_mult_add, idx_mult, NN::mk_constant(func, :int, 1)
754 func.insn_store_elem self_mem, idx_mult_add, type
755 mem_ctx.return_rbstack.push NN::mk_constant(func, :int, -1), NN::mk_type(func, :nil)
757 idbg(:dbg_handle_call_element) { "setting the given int in self_mem (dict)" }
758 self_mem_ptr_ptr, dummy = mem_ctx.locals_dict.load_local_var :__self__
759 mem_ctx.return_rbstack.pop
760 self_mem, idx_plus_one = Value.new, Value.new
761 value, type = mem_ctx.return_rbstack.pop
762 id, dummy = mem_ctx.return_rbstack.pop
763 # calculate actual index
765 func.insn_load_elem self_mem, self_mem_ptr_ptr, NN::mk_constant(func, :int, 0), :int
766 current_idx = DictHelpers::lookup_id_in_dict(self, func, id, self_mem, @scope_linkage)
767 func.insn_store_elem self_mem, current_idx, value
768 func.insn_add idx_plus_one, current_idx, NN::mk_constant(func, :int, 1)
769 func.insn_store_elem self_mem, idx_plus_one, type
770 mem_ctx.return_rbstack.push NN::mk_constant(func, :int, -1), NN::mk_type(func, :nil)
772 idbg(:dbg_handle_call_element) { "pushing the first int in self_mem" }
773 self_mem_ptr_ptr, dummy = mem_ctx.locals_dict.load_local_var :__self__
774 mem_ctx.return_rbstack.pop
775 self_mem, idx_mult, idx_mult_add = Value.new, Value.new, Value.new
776 idx, dummy = mem_ctx.return_rbstack.pop
777 value, type = Value.new, Value.new
778 # calculate actual index
779 func.insn_load_elem self_mem, self_mem_ptr_ptr, NN::mk_constant(func, :int, 0), :int
780 func.insn_mul idx_mult, idx, NN::mk_constant(func, :int, 2)
781 func.insn_load_elem value, self_mem, idx_mult, :int
782 func.insn_add idx_mult_add, idx_mult, NN::mk_constant(func, :int, 1)
783 func.insn_load_elem type, self_mem, idx_mult_add, :int
784 mem_ctx.return_rbstack.push value, type
786 type_sym = RuntimePrintCallback.new(PostProcs::TYPE, type)
787 DebugLogger::runtime_print_string func, :rt_get, "self == ", self_mem, " value is (", value, ":", type_sym, ")"
789 idbg(:dbg_handle_call_element) { "setting the given int in self_mem (dict)" }
790 self_mem_ptr_ptr, dummy = mem_ctx.locals_dict.load_local_var :__self__
791 mem_ctx.return_rbstack.pop
792 self_mem, idx_plus_one = Value.new, Value.new
793 id, dummy = mem_ctx.return_rbstack.pop
794 value, type = Value.new, Value.new
795 # calculate actual index
797 func.insn_load_elem self_mem, self_mem_ptr_ptr, NN::mk_constant(func, :int, 0), :int
798 current_idx = DictHelpers::lookup_id_in_dict(self, func, id, self_mem, @scope_linkage)
799 func.insn_load_elem value, self_mem, current_idx, :int
800 func.insn_add idx_plus_one, current_idx, NN::mk_constant(func, :int, 1)
801 func.insn_load_elem type, self_mem, idx_plus_one, :int
802 mem_ctx.return_rbstack.push value, type
804 # block, with its scope id, is pushed here due to handling of blocks in the 'when Call, FunCall, VCall'
805 mem_ctx.return_rbstack.pop
806 mem_ctx.return_rbstack.push NN::mk_constant(func, :int, -1), NN::mk_type(func, :nil)
808 idbg(:dbg_handle_call_element) { "USING OBJECT - #{@object}" }
810 if !@literal_receiver.nil?
811 type_id = @literal_receiver_attrib
813 raise ":new - non const" if @object[0] != :const
815 ProfFuncWithMd::md_add_assumption func, [:assumption, :object, [:type, type_id]]
817 idbg(:dbg_handle_call_element) { "pushing new object with type #{Typing::ID_MAP.index(type_id).inspect}" }
818 mem_ctx.return_rbstack.pop # we don't need the block
819 mem_ctx.return_rbstack.pop # or self
820 self_mem = NN::mk_bytearray(func, 1)
821 mem_ctx.return_rbstack.push self_mem, NN::mk_constant(func, :int, type_id)
823 call_function = do_method_dispatch func, mem_ctx, curr_id, has_receiver, method
824 if (cached_type = @node2type_cache[curr_id]) && @build_hints && @build_hints.opt_call_src
825 extra_params = cached_type.should_push_self ? 1 : 0
826 extra_params += 1 # __block__
827 if dbg_on :rt_call_param_opt
828 puts "BUILDING OPTIMAL [PUSH] ROUTE #{curr_id} - #{caller.inspect}" if dbg_on :rt_call_param_opt
829 DebugLogger::runtime_print_string func, "[OPTIMAL] #{curr_id} creating paramlist #{num_params+extra_params} vars\n"
832 if dbg_on :rt_call_param_opt
833 puts "BUILDING NON OPTIMAL ROUTE #{curr_id} - #{caller.inspect}"
834 DebugLogger::runtime_print_string func, "[normal] #{curr_id} creating paramlist #{num_params}+? vars\n"
838 return call_function, num_params
841 # bench badly needs to be replaced with a start / stop mechanic, the current version has too much code readability overhead
843 def find_outer_element_of_type type, current_ast_path, inclusive = false
844 # puts "find_outer_element_of_type -- #{caller[0..2].join " : "} -- #{type.inspect}"
845 curr_path = current_ast_path.dup
847 type = [type] unless type.is_a? Array
849 sexp = @crawler.find_path_sexp curr_path
850 break true if (type.include? sexptype(sexp)) and ((inclusive and first) or (!first))
852 break false if curr_path.empty?
855 (found ? curr_path : nil)
858 def calc_num_yield_params current_sexp_element
859 return 0 if current_sexp_element[1].nil?
860 case current_sexp_element[1].first
861 when :dvar, :lvar, :lit
866 raise "unhandled - #{current_sexp_element[1].first}"
871 def handle_element func, mem_ctx, current_sexp_element, current_ast_path, anon_block, next_ast_path, ast_order, curr_id
872 call_function, num_params = nil, 0
873 case sexptype(current_sexp_element)
875 idbg(:handle_element) { "ignoring scope ast element" }
876 ProfFuncWithMd::md_mark_not_real func
878 idbg(:handle_element) { "ignoring klass ast element" }
879 ProfFuncWithMd::md_mark_not_real func
881 const = current_sexp_element[1]
882 type_id = Typing::ID_MAP[const]
883 idbg(:handle_element) { "pushing unvalued variable with type #{const.inspect} (#{Typing::ID_MAP[const]})!!!!" }
884 mem_ctx.return_rbstack.push NN::mk_constant(func, :int, type_id), NN::mk_type(func, :const)
885 @literal_receiver_attrib = (@literal_receiver == :Const) ? nil : type_id
887 ptr = Value.string2ptr current_sexp_element[1]
888 mem_ctx.return_rbstack.push NN::mk_constant(func, :int, ptr), NN::mk_type(func, :bytearray)
890 int = current_sexp_element[1]
891 idbg(:handle_element) { "pushing #{int.inspect}!!!!" }
892 mem_ctx.return_rbstack.push NN::mk_constant(func, :int, int), NN::mk_type(func, :int)
894 idbg(:handle_element) { "pushing false!!!!" }
895 mem_ctx.return_rbstack.push NN::mk_constant(func, :int, 0), NN::mk_type(func, :bool)
897 idbg(:handle_element) { "pushing true!!!!" }
898 mem_ctx.return_rbstack.push NN::mk_constant(func, :int, 1), NN::mk_type(func, :bool)
899 when :dvar, :lvar # Self
900 # if current_ast_element.is_a? Self
901 # local_sym = :__self__
903 local_sym = current_sexp_element[1]
905 idbg(:handle_element) { "pushing a locally assigned value - #{local_sym}" }
906 stored_int, type = mem_ctx.locals_dict.load_local_var local_sym
907 mem_ctx.return_rbstack.push stored_int, type
910 idbg(:handle_element) { "got a method definition! - #{next_ast_path.inspect}" }
911 ProfFuncWithMd::md_mark_not_real func
914 idbg(:handle_element) { "skipping block arg element..." }
915 block_sym = current_sexp_element[1]
916 stored_int, type = mem_ctx.locals_dict.load_local_var :__block__
917 idbg(:handle_element) { "associating __block__ with #{block_sym}" }
918 mem_ctx.return_rbstack.push stored_int, type
919 # store back to local
920 mem_ctx.locals_dict.assign_value block_sym, stored_int, type
921 # we didn't pop from the stack, so i don't *think* we need to push, right?
924 mem_ctx.return_rbstack.push NN::mk_constant(func, :int, current_sexp_element[1].to_i), NN::mk_type(func, :int)
925 method, num_params, clear_scope = ((sexptype(current_sexp_element) == :iasgn) ? :callhook : :callhookload), 1, true
926 num_params += 1 if method == :callhook
927 selected_func_defs = @class_func_defs[@self_type]
928 self_mem_ptr_ptr, dummy = mem_ctx.locals_dict.load_local_var :__self__
929 mem_ctx.return_rbstack.push create_null_packed_block(func), NN::mk_type(func, :block)
930 mem_ctx.return_rbstack.push self_mem_ptr_ptr, NN::mk_type(func, @self_type.to_sym)
931 # stack now == __self__ (top), __block__ (top-1)
932 path = selected_func_defs.keys.detect { |cpath| @crawler.find_path_sexp(cpath)[1] == method }
933 call_function = selected_func_defs[path]
934 DebugLogger::runtime_print_string func, :rt_block, "pushing null block\n"
935 ProfFuncWithMd::md_add_assumption func, [:assumption, :self, [:type, @self_type]]
938 when :lasgn, :dasgn_curr_hacked
939 # pop the value, store to local, push the value again - as in, return it
940 local_sym = current_sexp_element[1]
941 idbg(:handle_element) { "popping and locally assigning a value to #{local_sym}" }
942 popped_int, type = mem_ctx.return_rbstack.pop
943 mem_ctx.locals_dict.assign_value local_sym, popped_int, type
944 mem_ctx.return_rbstack.push popped_int, type
945 when :call, :fcall, :vcall
946 bench("handle calls") {
947 f, n = handle_call_element current_sexp_element, func, mem_ctx, anon_block, curr_id, next_ast_path
948 call_function, num_params = f, n if !f.nil?
951 subpaths = anon_block.yielder_subpaths
952 idbg(:handle_element) { "CALLING PRODUCER OF ITERATOR : #{@crawler.paths2orderdesc subpaths}" }
953 call_function = subpaths
955 next_ast_path, num_params = CodeTree.find_subpaths(ast_order, current_ast_path).first, 0
956 idbg(:handle_element) { "WHILE :: should jump back to #{next_ast_path.inspect}" }
957 # push something, way easier than other options...
958 mem_ctx.return_rbstack.push NN::mk_constant(func, :int, -555), NN::mk_constant(func, :int, -555)
960 proc_addr, dummy = mem_ctx.locals_dict.load_local_var :__block__
961 block_indirection_idx, block_scope_id = jump_to_proc func, proc_addr, "YIELDING"
962 dispatcher = load_indirect_block func, block_indirection_idx, mem_ctx
963 num_params = calc_num_yield_params current_sexp_element
964 push_return_point_bouncer func, mem_ctx, curr_id, next_ast_path
965 marg_addr = NN::mk_bytearray(func, 4 * (1 + num_params)) # is this right?
966 func.insn_store_elem marg_addr, NN::mk_constant(func, :int, 0), NN::mk_constant(func, :int, 0) # set position
968 # by popping each, and pushing onto a new list we reverse the order here
969 multi_args_stack = RbStack.new func, marg_addr, :ret
970 popped_int, type = mem_ctx.return_rbstack.pop
971 multi_args_stack.push popped_int, type
973 mem_ctx.return_rbstack.push marg_addr, NN::mk_type(func, :multi_arg)
974 mem_ctx.return_rbstack.push create_null_packed_block(func), NN::mk_type(func, :block)
975 call_bouncer func, mem_ctx, dispatcher, block_scope_id
977 curr_path = find_outer_element_of_type(:while, current_ast_path)
978 idx = ast_order.index curr_path
979 next_ast_path = ast_order[idx + 1] # jump to the node *after* the while
980 idbg(:handle_element) { "BREAKING TO : #{next_ast_path.nil? ? "nil" : (@crawler.paths2orderdesc [next_ast_path])}" }
981 # umm break should return a val right? this is also a mini hack, related to the While push
982 mem_ctx.return_rbstack.push NN::mk_constant(func, :int, -555), NN::mk_constant(func, :int, -555)
984 bouncer, scope = nil, nil
985 if anon_block = @path2anon_block[current_ast_path] and anon_block.yielder_subpaths.include? current_ast_path
986 idbg(:blocks) { "!! @ @@ BAH --- IN AN ITERATION WITH A BLOCK" }
987 DebugLogger::runtime_print_string func, :rt_block, "pushing __block__\n"
988 iteration_anon_block = @path2anon_block[current_ast_path]
989 anon_block_first_statement_id = @crawler.path2id iteration_anon_block.subpaths.first
990 current_scope_id = mem_ctx.locals_dict.take_scope_id func, mem_ctx
991 bouncer = generate_continuation_bouncer(anon_block_first_statement_id, curr_id, 1, func) # a single multi-arg
992 scope = current_scope_id
993 ProfFuncWithMd::md_set_with_initial(func, :creates_bouncer_to, []) { |c| c << anon_block_first_statement_id }
994 # here create the compile time ref id, it contain the curr_id and anon_block_first_statement_id
995 # if the compile time ref looks up and a preexisting indirection block is found, reuse!!!
996 position_on_stack = new_indirection_block func, mem_ctx, bouncer
997 mem_ctx.return_rbstack.push pack_block(func, position_on_stack, scope), NN::mk_type(func, :block)
999 subpaths = CodeTree.find_subpaths(ast_order, current_ast_path)
1000 # block_arg = subpaths.detect { |path| @crawler.find_path_ast(path).is_a? BlockArg }
1001 # block_already_present = !block_arg.nil?
1002 # FIXME - block args are broken, fix! and update the above code!
1003 block_already_present = false
1004 if !block_already_present
1005 idbg(:blocks) { "!! @ @@ BAH --- PUSHING A NULL BLOCK" }
1006 DebugLogger::runtime_print_string func, :rt_block, "pushing null block\n"
1007 packed_null_block = Value.new
1008 func.insn_load_elem packed_null_block, mem_ctx.stack_mem, NN::mk_constant(func, :int, 5), :void_ptr
1009 mem_ctx.return_rbstack.push packed_null_block, NN::mk_type(func, :block)
1013 raise "wowzers, found an unhandled ast node! - #{current_sexp_element}"
1015 return call_function, num_params, next_ast_path
1018 def find_export_vars ts
1019 export_vars_list, export_special_vars, is_anon_block = nil, nil, false
1020 matching_func_def = @func_defs.values.detect { |order| (ts.path == order.first) }
1021 is_function = !matching_func_def.nil?
1022 export_special_vars = [:__block__]
1023 if !ts.anon_block.nil? and (ts.path == ts.anon_block.subpaths.first)
1024 export_vars_list = ts.anon_block.dyn_assigns.collect {
1026 @crawler.find_path_sexp(path)[1]
1028 is_anon_block = true
1030 func_order = matching_func_def
1031 chosen_function_path = @func_defs.index func_order
1032 export_vars_list = @crawler.find_path_sexp(chosen_function_path)[2].dup
1033 # following is from when we grokked block args...
1034 # has_block_arg = !func_order.detect { |path| (@crawler.find_path_sexp(path)).is_a? Ruby::BlockArg }.nil?
1035 idbg(:dbg_find_export_vars) { "EXPORTING VARS, HAS BLOCK ARG == #{has_block_arg}" }
1037 # this code is just dire...
1038 first_func_statements = @class_func_defs.values.inject([]) { |h,a| h + a.values }.map { |l| l.first }
1039 if first_func_statements.include? ts.path
1040 outer_klass_path = find_outer_element_of_type(:class, ts.path)
1041 if !outer_klass_path.nil?
1042 idbg(:dbg_find_export_vars) { "\n\nIN CLASS!!" }
1043 outer_func_path = find_outer_element_of_type([:defn_hacked], ts.path)
1044 if !outer_func_path.nil?
1045 idbg(:dbg_find_export_vars) { "IN FUNC" }
1046 if (@class_func_defs[get_class_name(outer_klass_path)][outer_func_path].first rescue nil) == ts.path
1047 # we have an outer class, for the moment this is good enough,
1048 # but, the exception of a Def inside a Def inside a Klass must later on be made
1049 idbg(:dbg_find_export_vars) { "EXPORTING VARS FOR AN INSTANCE METHOD" }
1050 export_special_vars = [:__block__, :__self__].reverse # THIS IS RIGHT DAMNNIT DO NOT CHANGE THIS
1051 export_vars_list = @crawler.find_path_sexp(outer_func_path)[2]
1057 return export_vars_list, export_special_vars, is_anon_block
1060 def cached_outer_element_find cache, current_ast_path, ast_order, inclusive, *element_types
1062 if cache.has_key? current_ast_path
1063 klass_path = cache[current_ast_path]
1065 klass_path = find_outer_element_of_type element_types, current_ast_path, inclusive
1066 cache[klass_path] = klass_path
1071 def dump_class_func_defs
1073 @class_func_defs.each_pair {
1074 |klass_sym, func_defs|
1075 str << "CLASS #{klass_sym}" \
1078 " FUNCTION #{@crawler.find_path_sexp(dom).inspect}\n" \
1079 + @crawler.paths2orderdesc(rng)
1085 SkipDefinition = Struct.new :should_not_execute, :new_curr_id, :jump_ast_path
1088 def handle_klass_definition klass_path, ast_order, curr_id, func
1089 new_class_name = get_class_name(klass_path)
1090 Typing::ID_MAP[new_class_name] = Typing::ID_MAP.values.max + 1
1091 idbg(:dbg_handle_definitions) { "DEFINING #{new_class_name}" }
1092 # we want to jump to the first subpath of the jump_ast_path
1093 # not the path directly, otherwise we jump half way into code!
1094 inner_body_path = klass_path + [2, 1]
1095 next_path = CodeTree.find_subpaths(ast_order, inner_body_path).first
1096 # see test::test_empty_class_def
1098 idx = ast_order.index klass_path
1099 curr_id = @crawler.path2id ast_order[idx + 1]
1100 next_path = @crawler.id2path curr_id
1102 ProfFuncWithMd::md_mark_not_real func
1103 return SkipDefinition.new(false, curr_id, next_path)
1107 def skip_to_end_of_method_def ast_order, curr_path, func
1108 last_ast = CodeTree.find_subpaths(ast_order, curr_path).last
1109 idx = ast_order.index last_ast
1110 # we modify the curr_id in order to prevent the eating of the non-existant return result
1111 curr_id = @crawler.path2id ast_order[idx + 1] # the Def
1112 jump_ast_path = ast_order[idx + 2] # +1 -> def, +2 -> one after def
1113 ProfFuncWithMd::md_mark_not_real func
1114 return SkipDefinition.new(false, curr_id, jump_ast_path)
1117 def get_class_name(path)
1118 @crawler.find_path_sexp(path)[1][2]
1122 def handle_method_definition curr_path, klass_path, ast_order, current_ast_path, func
1124 func_subpaths = CodeTree.find_subpaths(@ast_order, curr_path)
1125 if func_subpaths.first == current_ast_path and !@func_defs.has_key?(curr_path)
1126 @func_defs[curr_path] = func_subpaths
1127 return skip_to_end_of_method_def(ast_order, curr_path, func)
1130 # store ref to function def
1131 func_first_path = curr_path
1132 klass_name = get_class_name(klass_path)
1133 unless (@class_func_defs.has_key?(klass_name) and @class_func_defs[klass_name].has_key? func_first_path)
1134 idbg(:dbg_handle_definitions) { "DEFINING IT!" }
1135 func_subpaths = CodeTree.find_subpaths(@ast_order, func_first_path)
1136 @class_func_defs[klass_name] ||= {}
1137 @class_func_defs[klass_name][func_first_path] = func_subpaths
1138 idbg(:dbg_handle_definitions) { dump_class_func_defs }
1139 return skip_to_end_of_method_def(ast_order, curr_path, func)
1146 def handle_definitions ts, next_ast_path, ast_order, curr_id, func
1147 @handled_definitions ||= {}
1148 return SkipDefinition.new(true, curr_id, next_ast_path) if @handled_definitions.has_key? curr_id
1149 @handled_definitions[curr_id] = nil
1152 klass_path = cached_outer_element_find @klass_cache, ts.path, ast_order, false, :class
1153 if !klass_path.nil? and ts.path == klass_path + [0] # Colon2
1154 return handle_klass_definition(klass_path, ast_order, curr_id, func)
1156 curr_path = cached_outer_element_find @def_cache, ts.path, ast_order, true, :defn_hacked, :def, :defs
1157 # FIXME - do this : shortcut optimisation: if past first node in a function we don't need to check, we just assume its not a define
1159 result = handle_method_definition curr_path, klass_path, ast_order, ts.path, func
1160 return result if !result.nil?
1162 return SkipDefinition.new(true, curr_id, next_ast_path)
1166 def export_vars func, mem_ctx, export_vars_list, export_special_vars, got_num_params, curr_id, is_anon_block
1167 idbg(:dbg_export_vars) { <<DBG
1168 found the function entry point - exporting vars #{export_vars_list.inspect}
1169 and special vars #{export_special_vars.inspect}
1172 got_num_params += export_special_vars.length # add the special variables
1173 # function entry point
1174 idbg(:dbg_export_vars) { "got #{got_num_params} params!" }
1176 temp_cond, temp_idx = Value.new, Value.new
1177 # got_num_params includes __block__
1178 finished_loading = Label.new
1180 non_special_count = got_num_params - export_special_vars.length
1181 reversed_list = export_special_vars + export_vars_list.slice(0...non_special_count).reverse
1182 reversed_list.each_with_index {
1184 popped_int, type = mem_ctx.return_rbstack.pop
1185 cond, skip_multi_load = Value.new, Label.new
1186 func.insn_eq cond, type, NN::mk_type(func, :multi_arg)
1187 # DebugLogger::runtime_print_string func, green("we got param of type: "), type, green("\n")
1188 func.insn_branch_if_not cond, skip_multi_load
1189 # multi load (when type == :multi_arg) - multi arg array is reversed, thusly its correct param order already
1190 multi_arg_end_pos = Value.new
1191 func.insn_load_elem multi_arg_end_pos, popped_int, NN::mk_constant(func, :int, 0), :int
1192 # load_multi_arg, takes params: num args (value), args (ruby array)
1194 func.create_local pos, :int
1195 func.insn_store pos, NN::mk_constant(func, :int, 0)
1196 left_over_args = export_vars_list - reversed_list[0...idx]
1197 left_over_args.each {
1199 end_loop_cond, add_temp = Value.new, Value.new
1200 offs_val, offs_type = Value.new, Value.new
1201 temp_val, temp_type = Value.new, Value.new
1202 func.insn_eq end_loop_cond, pos, multi_arg_end_pos
1203 func.insn_branch_if end_loop_cond, finished_loading
1204 func.insn_add offs_val, pos, NN::mk_constant(func, :int, 1)
1205 func.insn_load_elem temp_val, popped_int, offs_val, :int
1206 func.insn_add offs_type, pos, NN::mk_constant(func, :int, 2)
1207 func.insn_load_elem temp_type, popped_int, offs_type, :int
1208 # DebugLogger::runtime_print_string func, "umm.. val == ", temp_val, ", and offset == ", temp_type, "\n"
1209 func.insn_add add_temp, pos, NN::mk_constant(func, :int, 2)
1210 func.insn_store pos, add_temp
1211 mem_ctx.locals_dict.assign_value arg_sym, temp_val, temp_type
1213 func.insn_branch finished_loading
1214 func.insn_label skip_multi_load
1215 mem_ctx.locals_dict.assign_value local_sym, popped_int, type
1217 func.insn_label finished_loading
1221 return sexp if sexp == :push_block
1222 is_a_sexp?(sexp) ? sexp[0] : nil
1225 # TODO this logic must be rewritten!
1226 def eat_unneeded_return_value func, mem_ctx, prev_id, ts
1227 # eat the previous statements return value if not needed
1228 previous_sexp_path = @crawler.id2path prev_id
1229 previous_sexp_element = @crawler.find_path_sexp previous_sexp_path
1230 unless [:iter, :class, :defn_hacked].include? sexptype(previous_sexp_element)
1231 # used to have or [:def].include?(sexptype(ts.sexp_elt))
1232 outer_element_path = previous_sexp_path.slice 0...-1
1233 outer_element_sexp = @crawler.find_path_sexp outer_element_path
1234 if outer_element_path != ts.path
1235 idbg(:dbg_eat_unneeded_return_value) { "OUTER PREV ELEMENT :: #{outer_element_sexp.inspect}" }
1236 case sexptype(outer_element_sexp)
1240 idbg(:dbg_eat_unneeded_return_value) { "EATING LAST STATEMENTS RETURN VALUE" }
1241 mem_ctx.return_rbstack.pop unless [:if, :while].include? sexptype(ts.sexp_elt)
1247 # should be refactored out into the per AST node type classes hierarchy / aspect blah blah
1249 def post_element func, mem_ctx, prev_id, curr_id, current_sexp, current_ast_path, ast_order
1250 outer_ast_path = @crawler.id2path curr_id
1251 outer_element_path = outer_ast_path.slice 0...-1
1252 if outer_element_path != current_ast_path and !(sexptype(current_sexp) == :break)
1253 outer_sexp = @crawler.find_path_sexp outer_element_path
1254 idbg(:dbg_post_element) { "OUTER ELEMENT :: #{outer_sexp.inspect}" }
1255 case sexptype(outer_sexp)
1257 mem_ctx.locals_dict.force_creation
1258 skip_repeat_dispatch = Label.new
1259 popped_conditional, type = mem_ctx.return_rbstack.pop
1260 func.insn_branch_if popped_conditional, skip_repeat_dispatch
1261 idx = ast_order.index outer_element_path
1262 jump_ast_path = ast_order[idx + 1]
1263 if !jump_ast_path.nil?
1264 elt = @crawler.find_path_sexp(jump_ast_path)
1265 jump_ast_path = nil if (sexptype(elt) == :defn_hacked)
1267 if jump_ast_path.nil?
1268 idbg(:dbg_post_element) { "POPPING RETURN STACK" }
1269 # is there any reason that we'd want to use FORCED_POP and delay this?
1270 gen_return func, mem_ctx
1272 idbg(:dbg_post_element) { "GOING TO JUMP TO #{jump_ast_path.inspect}" \
1273 + " - #{@crawler.find_path_sexp(jump_ast_path).inspect}" }
1274 next_curr_id = @crawler.path2id jump_ast_path
1275 dispatch_to_id_value func, mem_ctx, next_curr_id, curr_id, 0, should_skip_data_inspect?(jump_ast_path), true
1277 func.insn_label skip_repeat_dispatch
1283 ([:scope, :class, :defn_hacked].include? sexptype(elt)) ? sexptype(elt).to_s : elt.inspect
1286 def generate_continuation_bouncer next_id, curr_id, num_params, func
1287 if next_id == FORCED_POP
1288 puts "no bouncer push" if check_dbg(:rt_bouncer)
1289 bouncer = mk_bouncer(FORCED_POP, curr_id, num_params, func)
1290 elsif @func_cache.has_key?(next_id) and $opt_static_conts
1291 puts "calling function! caching the return continuation!" if check_dbg(:rt_bouncer)
1292 ProfFuncWithMd::md_add_to_static_continuation_points func, next_id
1293 bouncer = @func_cache[next_id].func
1295 puts "calling function with #{num_params} params! using a bouncer :(" if check_dbg(:rt_bouncer)
1296 ProfFuncWithMd::md_add_to_bouncing_cont_points func, next_id
1297 bouncer = mk_bouncer(next_id, curr_id, num_params, func)
1302 # create and push bouncer for jumping to position following the return of the yield - FIXME - this should use the cache!
1303 def push_return_point_bouncer func, mem_ctx, curr_id, next_ast_path, num_params = -1
1304 next_id = (@crawler.path2id next_ast_path) || FORCED_POP
1305 bouncer = (generate_continuation_bouncer next_id, curr_id, num_params, func)
1306 current_scope_id = mem_ctx.locals_dict.take_scope_id func, mem_ctx
1307 stack = RbStack.new func, mem_ctx.stack_mem, :alt
1308 stack.push NN::mk_constant(func, :ptr, bouncer), current_scope_id
1309 DebugLogger::runtime_print_string func, :rt_bouncer_runtime, "pushing a return point bouncer and stuff yay\n"
1312 def build_function_inner_inner mem_ctx, curr_id, prev_id, got_num_params, func, just_did_export, ts
1313 next_ast_path, num_params, ast_order = nil, nil, nil
1314 ProfilingFunction.record_hit(func, [:id, curr_id])
1315 DebugLogger::runtime_print_string func, :rt_runtime_curr_id_trace, "executing at id:#{curr_id} - #{elt_to_s ts.sexp_elt}\n"
1316 idbg(:dbg_build_function_inner_inner) {
1317 red("***BLOCK***") + " :: #{ts.sexp_elt.inspect}, PATH :: #{ts.path.inspect} (#{curr_id})"
1319 if !just_did_export and (not [FORCED_POP, INITIAL_PREV, OUTER_SCOPE].include? prev_id)
1320 eat_unneeded_return_value func, mem_ctx, prev_id, ts
1322 possible_orders = [@ast_order] + @func_defs.values
1323 possible_orders += [ts.anon_block.yielder_subpaths, ts.anon_block.subpaths] unless ts.anon_block.nil?
1324 ast_order = possible_orders.compact.detect { |path_list| path_list.include? ts.path }
1325 jump_type = handle_definitions ts, (ast_order[ast_order.index(ts.path) + 1]), ast_order, curr_id, func
1326 curr_id, next_ast_path = jump_type.new_curr_id, jump_type.jump_ast_path
1327 @calling_function = false
1328 if !jump_type.should_not_execute
1331 if !next_ast_path.nil?
1332 # early exit from function def
1333 next_sexp = @crawler.find_path_sexp(next_ast_path)
1334 if (sexptype(next_sexp) == :defn_hacked) and ast_order.index(ast_order.index(next_ast_path) + 1).nil?
1338 # handle element types
1339 call_function, num_params, next_ast_path = \
1340 handle_element func, mem_ctx, ts.sexp_elt, ts.path, ts.anon_block, next_ast_path, ast_order, curr_id
1341 post_element func, mem_ctx, prev_id, curr_id, ts.sexp_elt, ts.path, ast_order \
1342 unless prev_id == INITIAL_PREV or prev_id == FORCED_POP
1343 if !call_function.nil?
1344 # push next ast position, and old scope id
1345 idbg(:dbg_build_function_inner_inner) {
1346 "CALLING A FUNCTION! - WITH PARAMS #{num_params}, " \
1347 + "GOING TO #{next_ast_path.inspect} (#{@crawler.path2id(next_ast_path).inspect})"
1349 push_return_point_bouncer func, mem_ctx, curr_id, next_ast_path, num_params
1350 next_ast_path = call_function.is_a?(Array) ? call_function.first : call_function
1351 idbg(:dbg_build_function_inner_inner) {
1352 "NEXT AST PATH :: #{@crawler.find_path_sexp(next_ast_path).inspect}"
1353 } if next_ast_path.is_a? Array
1354 # N.B next_ast_path != next_id
1355 @calling_function = true
1358 return curr_id, next_ast_path, num_params, ast_order
1361 def gen_return func, mem_ctx
1362 idbg(:gen_return) { "popping next point from stack!" }
1363 # next on stack is the point to which we buncershould return, and the scope to which we return
1364 stack = RbStack.new func, mem_ctx.stack_mem, :alt
1365 bouncer, scope_val = stack.pop
1366 DebugLogger::runtime_print_string func, :rt_bouncer_runtime,
1367 "we're like, popping! -> to scope:", scope_val, " with bouncer ", bouncer, "\n"
1368 call_bouncer func, mem_ctx, bouncer, scope_val
1371 def call_bouncer func, mem_ctx, bouncer, scope_val
1372 DebugLogger::runtime_print_string func, :rt_scope, "POPPING SCOPE ID : ", scope_val, "\n"
1373 DebugLogger::runtime_print_string func,
1374 :rt_runtime_curr_id_trace, "calling a popped return continuation #{caller.first} : {", bouncer, "}\n"
1375 d = Dispatch.new bouncer
1376 d.scope_val = scope_val
1377 do_dispatch func, mem_ctx, d
1380 def dispatch_to_next func, mem_ctx, next_ast_path, ast_order, curr_id, num_params
1381 skip_data_inspect = nil
1382 if (next_ast_path.nil? and ast_order != @ast_order) or (next_ast_path == false)
1383 skip_data_inspect = false
1384 gen_return func, mem_ctx
1386 elsif next_ast_path.is_a? Value
1387 skip_data_inspect = false
1388 next_point_val = next_ast_path
1390 skip_data_inspect = should_skip_data_inspect? next_ast_path
1391 next_curr_id = @crawler.path2id next_ast_path
1392 next_point_val = next_curr_id
1393 idbg(:dbg_build_function_inner) {
1394 "going to er #{next_curr_id} (#{@crawler.find_path_sexp(next_ast_path).inspect[0..40]}) next"
1396 if [:defn_hacked].include? sexptype(@crawler.find_path_sexp(next_ast_path))
1397 idbg(:dbg_build_function_inner) { red("WOWZERS ARGH!!!!! :( ") + green("BLAAAAAAAAAAAAH!!!!!! :(:(:(") }
1398 skip_data_inspect = false
1399 gen_return func, mem_ctx
1402 dispatch_to_id_value func, mem_ctx, next_point_val, curr_id, num_params || -1, skip_data_inspect
1405 def build_as_much_as_predictable mem_ctx, curr_id, prev_id, got_num_params, func
1406 next_ast_path, num_params, ast_order = nil, nil, nil
1407 idbg(:node_predict) { "starting yay with #{curr_id} and previous was #{prev_id}" }
1408 just_did_export = false
1409 ts = TreeState.new @crawler, @path2anon_block, curr_id
1410 export_vars_list, export_special_vars, is_anon_block = find_export_vars ts
1411 created_scope = false
1412 if !export_vars_list.nil?
1413 unless is_anon_block
1414 mem_ctx.locals_dict.scope_ast_id = curr_id
1415 mem_ctx.locals_dict.needs_new_scope = true
1416 ProfFuncWithMd::md_made_scope func
1417 created_scope = true
1419 export_vars func, mem_ctx, export_vars_list, export_special_vars, got_num_params, curr_id, is_anon_block
1420 num_vars = (export_vars_list+export_special_vars).length
1421 if @build_hints && @build_hints.opt_call_dst
1422 if dbg_on :rt_call_param_opt
1423 puts "BUILDING OPTIMAL [POP] (#{num_vars}) ROUTE #{curr_id} - #{caller.inspect}"
1424 DebugLogger::runtime_print_string func, "[OPTIMAL] (#{curr_id}) exporting #{num_vars} vars\n"
1427 if dbg_on :rt_call_param_opt
1428 puts "BUILDING NON OPTIMAL [POP] (#{num_vars}) ROUTE #{curr_id} - #{caller.inspect}"
1429 DebugLogger::runtime_print_string func, "[normal] (#{curr_id}) exporting #{num_vars} vars\n"
1432 just_did_export = true
1434 mem_ctx.locals_dict.load_current_scope if curr_id >= 0 and !created_scope
1436 idbg(:node_predict) { "... continuing yay with #{curr_id} and previous was #{prev_id}" }
1437 @func_ids << curr_id
1438 old_curr_id = curr_id
1439 curr_id, next_ast_path, num_params, ast_order = \
1440 build_function_inner_inner mem_ctx, curr_id, prev_id, got_num_params, func, just_did_export, ts \
1441 unless curr_id == FINISHED
1442 gen_data_inspect func, mem_ctx if $data_inspect_every_node
1443 if curr_id == FINISHED
1444 idbg(:dbg_build_function_inner) { "got a curr_id of #{curr_id} yay!, we're finished!" }
1445 func.insn_return NN::mk_constant(func, :int, -1)
1446 elsif (next_ast_path.nil? and ast_order == @ast_order)
1447 gen_return func, mem_ctx
1449 if !@calling_function
1450 mem_ctx.locals_dict.force_creation
1452 dispatch_to_next func, mem_ctx, next_ast_path, ast_order, curr_id, num_params
1454 made_scope = ProfFuncWithMd::md_made_scope? func
1455 ProfFuncWithMd::md_unset_made_scope func
1456 initial_state = (@scope_linkage.keys.size == 1 and @scope_linkage[@scope_linkage.keys.first].empty?)
1458 initial_id = @scope_linkage.keys.first
1459 @scope_linkage[initial_id] += [initial_id, INITIAL_PREV]
1462 if !(@scope_linkage.has_key? curr_id)
1463 if made_scope and !initial_state
1464 lookup_chain_on_id = curr_id
1465 pair_to_chain_to = @scope_linkage.detect { |(k,v)| v.include? lookup_chain_on_id }
1466 if pair_to_chain_to.nil?
1467 chain_to = [curr_id]
1468 @scope_linkage[curr_id] = chain_to
1469 idbg(:scope_linking) {
1470 "trying to make a new scope, ended up with #{@scope_linkage.inspect}"
1473 idbg(:scope_linking) {
1474 "skipping creation of a new scope as #{@scope_linkage.inspect} already has #{prev_id} in it!"
1478 lookup_chain_on_id = initial_state ? @scope_linkage.keys.first : prev_id
1479 pair_to_chain_to = @scope_linkage.detect { |(k,v)| v.include? lookup_chain_on_id }
1480 idbg(:scope_linking) { "looking up on #{lookup_chain_on_id}, @scope_linkage = #{@scope_linkage.inspect}" }
1481 if !pair_to_chain_to.nil?
1482 chain_to = pair_to_chain_to[1]
1483 elsif lookup_chain_on_id != INITIAL_PREV
1484 puts "erm, oops?, we didn't find anything for #{lookup_chain_on_id} in #{@scope_linkage.inspect}"
1489 idbg(:scope_linking) { "WANT TO CONNECT #{prev_id} WITH #{curr_id}, and erm. " +
1490 "made_scope == #{made_scope} - chaining it to #{chain_to.inspect}, but like, can we?" }
1491 chain_to << curr_id if !chain_to.nil? and !chain_to.include? curr_id
1492 idbg(:scope_linking) { "ended up with: #{@scope_linkage.inspect}" }
1493 if !@predicting_next_id.empty?
1494 idbg(:node_predict) { "got next id == #{curr_id.inspect}" }
1496 idbg(:node_predict) { "no prediction, lets quit on #{curr_id}" }
1497 idbg(:scope_linking) { "are we missing #{curr_id} somehow???" }
1501 curr_id = @predicting_next_id.shift
1502 ts = TreeState.new @crawler, @path2anon_block, curr_id
1504 idbg(:node_predict) { "node prediction has finished" }
1507 def make_stack func, size, initial_position
1508 fail "sorry, but the size is stored at position 0!" if initial_position == 0
1509 all_locals_mem = NN::mk_bytearray(func, size) # struct: length, (id, value)*
1510 func.insn_store_elem all_locals_mem, NN::mk_constant(func, :int, 0), NN::mk_constant(func, :int, initial_position) # set position
1515 def build_setup_func_init func, mem_ctx
1516 func.create_with_prototype :int, []
1517 mem_ctx.stack_mem = make_stack(func, 8 * 1024 * 100, 100)
1518 mem_ctx.return_stack_mem = make_stack(func, 4 * 4096 * 100, 1)
1519 mem_ctx.all_locals_mem = make_stack(func, 20 * 3 * 1024 * 100 + 1, 32)
1520 finished_bouncer = mk_bouncer FINISHED, -1, -1, func
1521 stack = RbStack.new func, mem_ctx.stack_mem, :alt
1522 stack.push NN::mk_constant(func, :ptr, finished_bouncer), NN::mk_constant(func, :int, 0) # -> pre3 == 0
1523 dummy, orig_scope_id = create_new_scope func, mem_ctx, OUTER_SCOPE
1525 # # TODO - use FieldDesc's
1526 func.insn_store_elem mem_ctx.stack_mem, NN::mk_constant(func, :int, 1), orig_scope_id
1527 func.insn_store_elem mem_ctx.stack_mem, NN::mk_constant(func, :int, 2),
1528 NN::mk_constant(func, :int, 0) # set loaded_scope
1529 func.insn_store_elem mem_ctx.stack_mem, NN::mk_constant(func, :int, 3),
1530 NN::mk_constant(func, :int, -1) # set loaded_scope_id
1531 # setup indirections
1532 indirections = make_stack(func, 2**16, 1)
1533 func.insn_store_elem mem_ctx.stack_mem, NN::mk_constant(func, :int, 4), indirections
1534 ProfFuncWithMd::md_force_data_inspect func
1535 bouncer = generate_continuation_bouncer(NULL_BLOCK, NULL_BLOCK, -1, func) # a single multi-arg
1536 new_indirection_block(func, mem_ctx, bouncer) # should be at position 1
1537 # cache the packed null block
1538 position_on_stack, scope = NN::mk_constant(func, :int, 1), NN::mk_constant(func, :int, -555)
1539 packed_null_block = pack_block(func, position_on_stack, scope)
1540 func.insn_store_elem mem_ctx.stack_mem, NN::mk_constant(func, :int, 5), packed_null_block
1541 # generated scope cache
1542 scopescache = make_stack(func, 2**10, 1)
1543 func.insn_store_elem mem_ctx.stack_mem, NN::mk_constant(func, :int, 6), scopescache
1544 # logging a trace of calls into a stack
1545 trace_stack_ptr = make_stack(func, 2**16, 1)
1546 func.insn_store_elem mem_ctx.stack_mem, NN::mk_constant(func, :int, 7), trace_stack_ptr
1547 func.insn_store_elem mem_ctx.stack_mem, NN::mk_constant(func, :int, 8), NN::mk_constant(func, :ptr, func)
1548 trace_stack = RbStack.new func, trace_stack_ptr, :alt
1551 CURRENT_SCOPE_ID_IDX = 1
1553 def build_main_func_init func, mem_ctx, curr_id
1555 func.create_with_prototype ATOM_RET_VAL, construct_prototype_of_length(params.length)
1556 func.fill_value_with_param mem_ctx.stack_mem, STACK_BYTEARRAY_PARAM_IDX
1557 func.fill_value_with_param mem_ctx.return_stack_mem, RETURN_STACK_BYTEARRAY_PARAM_IDX
1558 func.fill_value_with_param mem_ctx.all_locals_mem, ALL_LOCALS_BYTEARRAY_PARAM_IDX
1559 params.each_with_index {
1561 func.fill_value_with_param param, (ALL_LOCALS_BYTEARRAY_PARAM_IDX + 1 + idx)
1565 DebugLogger::runtime_print_string func, "got param : ", param, "\n"
1567 if check_dbg(:rt_scope)
1568 current_scope_id = Value.new
1569 func.insn_load_elem current_scope_id, mem_ctx.stack_mem,
1570 NN::mk_constant(func, :int, CURRENT_SCOPE_ID_IDX), :int
1571 DebugLogger::runtime_print_string func, "CURRENT SCOPE ID : ", current_scope_id, "\n"
1576 def build_function_inner curr_id, prev_id, got_num_params, initialisation, func
1577 @skip_data_inspect = true
1581 mem_ctx = MemContext.new nil, nil, nil, nil
1582 func.mem_ctx = mem_ctx
1583 build_setup_func_init func, mem_ctx
1585 ProfFuncWithMd::md_init_init_func func
1586 ProfFuncWithMd::md_mark_not_real func
1588 mem_ctx = MemContext.new Value.new, Value.new, Value.new, nil
1589 func.mem_ctx = mem_ctx
1590 passed_params = build_main_func_init func, mem_ctx, curr_id
1592 old_func_ptr = Value.new
1593 # 8 == new, 9 == old
1594 func.insn_load_elem old_func_ptr, mem_ctx.stack_mem, NN::mk_constant(func, :int, 8), :void_ptr
1595 func.insn_store_elem mem_ctx.stack_mem, NN::mk_constant(func, :int, 9), old_func_ptr
1596 func.insn_store_elem mem_ctx.stack_mem, NN::mk_constant(func, :int, 8), NN::mk_constant(func, :ptr, func)
1597 mem_ctx.return_rbstack = RbStack.new func, mem_ctx.return_stack_mem, :ret, true
1598 mem_ctx.locals_dict = DictLookup.new(self, func, @scope_linkage, mem_ctx)
1599 # TODO - don't add label when there will be no static dispatch
1600 atom_main_label = Label.new
1601 func.insn_label atom_main_label
1602 ProfFuncWithMd::md_set_atom_main_label func, atom_main_label
1603 if curr_id == FORCED_POP
1604 gen_return func, mem_ctx
1606 build_as_much_as_predictable mem_ctx, curr_id, prev_id, got_num_params, func
1611 ID_CONSTANTS.detect { |id| value == self.class.const_get(id) } || value
1614 attr_reader :func_cache
1616 def hack_static_ptr dest_func, new_ref
1617 dispatch_id = new_ref[0]
1618 dispatch_pos = new_ref[1]
1619 if not @func_cache.has_key? dispatch_id
1620 puts "failure to hack up pointer #{dispatch_id}!"
1624 impl.pos = dispatch_pos
1626 fail "horrribblyyyy" if inst[0] != 21 # CCB
1627 new_dispatch_func = @func_cache[dispatch_id].func
1628 new_func_ptr_value = NN::mk_constant(impl, :ptr, new_dispatch_func)
1629 ret_val, old_func_ptr, ret_val_type, prototype, params = *inst[1..-1]
1630 impl.insn_call_indirect_vtable_blah ret_val, new_func_ptr_value, ret_val_type, prototype, params
1631 impl.pos = dispatch_pos
1634 # update all static dispatches that refer to this one
1636 def update_static_dispatches curr_id
1637 idbg(:specialisation) { cyan("!!!!!!!!!!")+magenta("CHECK")+cyan("!!!!!!!!!!") + " #{curr_id}}" }
1638 each_static_dispatcher(curr_id) {
1639 |cached_func, dispatch|
1640 idbg(:specialisation) { green("updating") + " #{cached_func} -> #{dispatch.inspect}" }
1641 hack_static_ptr cached_func, dispatch
1645 # optimisation framework
1646 def each_static_dispatcher curr_id
1647 @old_functions.each {
1649 static_dispatches = ProfFuncWithMd::md_get_statically_dispatches_to cached_func
1650 next if static_dispatches.nil?
1651 static_dispatches.each {
1653 next if dispatch[1].nil? # direct branch rather than static dispatch
1654 yield cached_func, dispatch
1660 def slow_dispatch_needs_rebuild? cached_func
1661 return false unless (ProfFuncWithMd::md_get_slow_dispatches cached_func.func)
1662 idbg(:specialisation) { magenta("SLOW DISPATCH IN [#{cached_func.inspect}]") }
1664 (ProfFuncWithMd::md_get_slow_dispatches cached_func.func).each {
1666 cached = @func_cache.has_key? slow_id
1667 cursor = AstCursor.new slow_id
1668 used = cursor.id_hit? @old_functions
1670 # we can go ahead with a rebuild if its cached anyway
1671 idbg(:specialisation) { "#{slow_id} -> cached" }
1673 elsif !cached and used
1674 # if its uncached and its used, we should rebuild as we can change a slow into a static via the rebuild
1675 idbg(:specialisation) { "#{slow_id} -> !cached and used" }
1677 elsif !cached and !used
1678 # if its not used, then there is no reason to rebuild
1679 idbg(:specialisation) { "#{slow_id} -> !cached and !used" }
1681 idbg(:specialisation) { "#{slow_id} -> #{rebuild}" }
1683 idbg(:specialisation) { magenta("WE'RE GONNA REBUILD? - #{rebuild}") }
1687 # optimisation framework
1688 def static_continuation_point_older? cached_func
1689 return false unless (ProfFuncWithMd::md_get_static_continuation_point cached_func.func)
1690 cp = (ProfFuncWithMd::md_get_static_continuation_point cached_func.func).first
1691 if @func_cache[cp].func.metadata[:build_time] > cached_func.func.metadata[:build_time]
1692 idbg(:specialisation) { "OLDER! -> " }
1700 def rebuild_id_set rebuild_ids
1704 id = rebuild_info.id
1705 next if already_built.has_key? id
1706 count = rebuild_info.hit_count
1708 # generate_rebuild_list fills this field in, you can feel free to leave empty however
1709 cursor = AstCursor.new id
1710 count = cursor.id_hit? @old_functions, true
1712 cached_func = @func_cache[id].func
1713 num_params = (ProfFuncWithMd::md_get_num_params cached_func)
1714 prev_id = (ProfFuncWithMd::md_get_prev_id cached_func)
1715 was_generated_by = (ProfFuncWithMd::md_get_was_generated_by cached_func)
1716 @func_cache.delete id
1718 @build_hints = rebuild_info.hints
1719 build_function id, prev_id, num_params, was_generated_by, false
1721 new_func = @func_cache[id].func
1722 ProfFuncWithMd::md_inc_rebuild_count new_func
1723 ProfFuncWithMd::md_set_last_hit_count new_func, count
1724 already_built[id] = nil
1729 RebuildTask = Struct.new(:id, :hit_count, :hints)
1732 def generate_rebuild_list condition_block, executed_block
1734 @func_cache.each_pair {
1736 next if !condition_block.call id, func
1737 # should we bother? did we get hit more?
1738 cursor = AstCursor.new id
1739 count = cursor.id_hit? @old_functions, true
1740 cached_func = @func_cache[id].func
1741 old_hit_count = (ProfFuncWithMd::md_get_last_hit_count cached_func)
1742 if old_hit_count != count
1743 # " + (ProfFuncWithMd::md_inspect cached_func.func.metadata) + " (#{id})
1744 executed_block.call "#{id} }(#{old_hit_count} != #{count})", id
1745 rebuild_ids << RebuildTask.new(id, count)
1759 Dissection = Struct.new :offs, :dat, :addr, :len
1762 # what if first_id != the id at which the optimisation was done???
1764 def update_indirections
1766 ind = dissect_indirections @indirections_addr
1767 indirections_addr_offs, indirections_dat, indirections_addr = ind.offs, ind.dat, ind.addr
1768 indirections_dat.each {
1770 # update this dummy is the compile time ref! -
1771 # store the index into the indirections array vs the compile time ref!
1772 ptr_addr, dummy = str.unpack("iI")
1773 func1 = Value.addr2func ptr_addr
1774 first_id = (ProfFuncWithMd::md_get_next_id func1)
1775 first_id ||= ProfFuncWithMd::md_get_path_range(func1).first
1776 idbg(:indirection_specialisation) {
1777 "got #{Value.addr2func ptr_addr} - #{dummy}, with id #{first_id.inspect}"
1779 replacement = @func_cache[first_id]
1780 if !replacement.nil? && func1 != replacement.func
1781 idbg(:indirection_specialisation) {
1782 "updating #{func1.metadata.inspect}: with #{ProfFuncWithMd::md_inspect replacement.func.metadata}"
1784 replc_ptr_addr = Value.func2addr(replacement.func)
1785 Value.stringpoke(indirections_addr + indirections_addr_offs, [replc_ptr_addr, first_id].pack("iI"), 8)
1786 condition_block = proc {
1788 return false if ProfFuncWithMd::md_get_creates_bouncer_to(func.func).nil?
1789 return false unless ProfFuncWithMd::md_get_creates_bouncer_to(func.func).include? first_id
1792 executed_block = proc {
1794 idbg(:indirection_specialisation) { magenta("re-gen #{description} - currently has old bouncer gen") }
1796 rebuild_ids += generate_rebuild_list condition_block, executed_block
1798 indirections_addr_offs += 8
1800 rebuild_id_set rebuild_ids
1803 attr_reader :trace_stack_history
1805 def import_from_trace_stack
1806 return nil if @trace_stack_addr.nil?
1807 trace_stack = dissect_trace_stack @trace_stack_addr
1808 Value.stringpoke(@trace_stack_addr, [1].pack("i"), 4)
1809 @trace_stack_history ||= []
1810 @trace_stack_history << trace_stack
1814 def trace_stack_iterator trace_stack_dat
1815 trace_stack_dat.each {
1817 new_func_ptr, old_func_ptr = str.unpack("II")
1818 old_func = Value.addr2func old_func_ptr
1819 new_func = Value.addr2func new_func_ptr
1820 yield old_func, new_func
1825 def find_param_stack_flattening_possibilities dispatch_id_list
1826 trace_stack = import_from_trace_stack
1827 return if trace_stack.nil?
1828 call_func, method_func = nil, nil
1829 expecting_continuation = false
1831 trace_stack_dat = trace_stack.dat
1832 trace_stack_dat += @last_trace_stack.dat if @last_trace_stack
1833 trace_stack_iterator(trace_stack_dat) {
1834 |old_func, new_func|
1835 elt_id = ProfFuncWithMd::md_get_path_range(new_func).first
1836 elt_path = @crawler.id2path elt_id
1837 matching_func_def = @func_defs.values.detect { |order| (elt_path == order.first) }
1838 if expecting_continuation
1839 cont_func = new_func
1840 cont_point = ProfFuncWithMd::md_get_static_continuation_point(call_func)
1841 if cont_point && (cont_point.first == ProfFuncWithMd::md_get_path_range(cont_func).first)
1842 expecting_continuation = false
1843 # this next line is fuzzy... whats the actual cause of a nil here? is the name correct?
1844 some_already_rebuilt_funcs = [call_func, method_func, cont_func].detect {
1846 @func_cache[ProfFuncWithMd::md_get_id(func)].nil?
1848 missing_type_caching = (not @node2type_cache.has_key?(ProfFuncWithMd::md_get_id(method_func)))
1849 if !some_already_rebuilt_funcs
1850 call_triplets << Struct.new(:call, :method, :cont).new(call_func, method_func, cont_func)
1855 if !matching_func_def.nil?
1856 call_func, method_func = old_func, new_func
1857 expecting_continuation = true
1859 } if !trace_stack.nil?
1861 call_triplets.each {
1864 tmp << RebuildTask.new(ProfFuncWithMd::md_get_id(triplet.cont), nil, Hints.new(:opt_call_cnt=>1))
1865 tmp << RebuildTask.new(ProfFuncWithMd::md_get_id(triplet.method), nil, Hints.new(:opt_call_dst=>1))
1866 tmp << RebuildTask.new(ProfFuncWithMd::md_get_id(triplet.call), nil, Hints.new(:opt_call_src=>1))
1868 puts magenta("FOUND AN OPTIMAL ROUTE #{[
1869 ProfFuncWithMd::md_get_id(triplet.cont),
1870 ProfFuncWithMd::md_get_id(triplet.method),
1871 ProfFuncWithMd::md_get_id(triplet.call)
1874 rebuilt, done = [], []
1875 @triplet_rebuild_lists ||= []
1876 @triplet_rebuild_lists += rebuild_ids
1877 @triplet_rebuild_lists.each {
1879 puts "CHECKING TRIPLETS"
1880 # next line is just to make CERTAIN that we don't build out of order...
1881 next if triplet.detect { |task| task.hints.opt_call_dst && (task.id == @current_execution_id) }
1882 if missing = triplet.detect { |task| not (@func_cache.has_key? task.id) }
1883 fail "missing #{missing}! so we can't even build the triplets wtf?"
1885 puts "trying to rebuild -> #{triplet.map {|obj| obj.id}.inspect}"
1886 rebuilt = rebuild_id_set triplet
1888 dispatch_id_list += rebuilt
1889 dispatch_id_list.replace(@func_cache.keys)
1890 fail "possible duplicate static update" if dispatch_id_list.sort.uniq.length != dispatch_id_list.length
1893 @triplet_rebuild_lists.reject! { |triplet| done.include? triplet }
1894 @last_trace_stack = trace_stack
1899 def find_scope_templating_possiblities
1900 # puts "looking for find_scope_templating_possiblities"
1901 condition_block = proc {
1903 return false unless ProfFuncWithMd::md_lookups(func.func) \
1904 and !ProfFuncWithMd::md_lookups(func.func).empty?
1907 executed_block = proc {
1909 idbg(:rebuild) { "GENERATING #{description} AS SCOPE TEMPLATE CHANGED!" }
1911 rebuild_ids = generate_rebuild_list condition_block, executed_block
1912 rebuilt = rebuild_id_set rebuild_ids
1917 def attempt_specialization atom, curr_id
1918 idbg(:indirection_specialisation) { "****\n" + magenta("ARGH ITS THE SPECIALISATION YAY ARGH OOO") }
1919 to_be_specialized = (ProfFuncWithMd::md_get_next_ids atom.func).detect {
1920 |next_id| @node2type_cache.has_key? next_id
1922 to_be_specialized = true if (@node2type_cache.has_key? curr_id)
1923 if (ProfFuncWithMd::md_has_slow_dispatches atom.func)
1925 # we don't care about slow_id's that haven't been used before anyway,
1926 # as they won't affect the overall execution until they *have* been
1927 # used. and at this point we can backtrack and rebuild this. thusly,
1928 # we are only interested in not rebuilding, if there are any slow_id's
1929 # that *have* been executed before
1930 (ProfFuncWithMd::md_get_slow_dispatches atom.func).each {
1932 cursor = AstCursor.new slow_id
1933 if cursor.id_hit? @old_functions
1934 idbg(:indirection_specialisation) { "want to rebuild cus like #{slow_id} is used" }
1939 to_be_specialized = true if rebuild
1941 curr_id_included = false
1942 condition_block = proc {
1944 return false unless (static_continuation_point_older? func) || (slow_dispatch_needs_rebuild? func)
1947 executed_block = proc {
1949 idbg(:rebuild) { "CHECKING CACHED FUNCTION #{description}" }
1950 curr_id_included = true if id == curr_id
1952 rebuild_ids = generate_rebuild_list condition_block, executed_block
1953 rebuilt = rebuild_id_set rebuild_ids
1954 # the above also caused regen of curr_id,
1955 # so we only specialize if this wasn't the case, otherwise we re-gen twice!
1956 to_be_specialized = false if curr_id_included
1957 atom = @func_cache[curr_id]
1958 idbg(:specialisation) { cyan("!!!!!!!!!!")+magenta("REBUILDING") + "!!! SHOULD BE FINISHED NOW!" }
1959 if to_be_specialized
1960 idbg(:func_cache) { "NOT USING CACHE AS SPECIALISATION AWAITS FOR - " \
1961 + "#{@node2type_cache[ProfFuncWithMd::md_get_next_ids(atom.func).first]}" }
1967 def generate_func curr_id, prev_id, got_num_params, caller_func, initialisation
1968 idbg(:build_function) { "GENERATING FUNCTION #{id2name curr_id}" }
1969 notes = { :next_ids => [], :assumptions => [], :was_generated_by => caller_func }
1970 func = ProfilingFunction.new @context
1971 func.metadata = notes
1972 func.metadata[:hints] = @build_hints # FIXME
1973 @old_functions << func
1974 @predicting_next_id = []
1976 ProfFuncWithMd::md_set_path_range(func, @func_ids) # NOTE - can be empty in the case of exit path
1978 idbg(:func_cache) { green("current:#{curr_id}, prev:#{prev_id}, num_params:#{got_num_params}") }
1979 build_function_inner curr_id, prev_id, got_num_params, initialisation, func
1982 # todo - following should be limited
1983 func.metadata[:prev_id] = prev_id
1984 func.metadata[:num_params] = got_num_params
1985 add = func.metadata[:assumptions].empty?
1986 add = false if ProfFuncWithMd::md_has_init_func func
1987 first_id = @func_ids.first
1988 will_add = (add and !@func_ids.empty? and first_id >= 0)
1990 assumptions = ProfFuncWithMd::md_get_assumptions(func).inspect
1991 notes = ProfFuncWithMd::md_inspect func.metadata
1992 (will_add ? cyan("ADDING") : magenta("NO ADD OF")) +
1993 " range #{@func_ids.inspect} TO CACHE - assumptions == #{assumptions} - notes == #{notes}"
1996 new_atom = Struct.new(:func).new(func)
1997 func.metadata[:build_time] = Time.now
1998 @func_cache[first_id] = new_atom
2004 $next_postproc = nil
2005 def my_callback my_string
2006 line = my_string.chomp
2008 if !$next_postproc.nil?
2009 str = $next_postproc.call(line.to_i)
2010 $next_postproc = nil
2011 elsif $message_hash.has_key? line.to_i
2012 $next_postproc = $message_hash_post_proc[line.to_i]
2013 str = $message_hash[line.to_i]
2017 if !str.nil? and !str.empty?
2018 if str == "BREAKPOINT CALLBACK\n"
2019 self.instance_eval { breakpoint("breakpoint was called!") }
2026 def typeval2desc bytes
2027 type = Typing::ID_MAP.index bytes[4,4].unpack("I").first
2028 [type, bytes[0,4].unpack("i").first]
2032 def dissect_indirections indirections_addr
2033 indirections_len = Value.ptr2string(indirections_addr, 4).unpack("i").first
2034 indirections_dat = split8 Value.ptr2string(indirections_addr + 2*4, indirections_len * 4)
2035 indirections_addr_offs = 2*4
2036 Dissection.new(indirections_addr_offs, indirections_dat, indirections_addr, indirections_len)
2039 def dissect_trace_stack trace_stack_addr
2040 trace_stack_len = Value.ptr2string(trace_stack_addr, 4).unpack("i").first
2041 trace_stack_dat = split8 Value.ptr2string(trace_stack_addr + 2*4, trace_stack_len * 4)
2042 trace_stack_addr_offs = 2*4
2043 Dissection.new(trace_stack_addr_offs, trace_stack_dat, trace_stack_addr, trace_stack_len)
2047 # appends a new scopescache element
2048 def cache_scope scopescache_addr, symbols
2049 scopescache_len = Value.ptr2string(scopescache_addr, 4).unpack("i").first
2050 scopescache_addr_offs = 1*4
2051 sym_string = symbols.map { |sym| [sym.to_i, 0, 0].pack "III" }.join
2052 scope_string = [symbols.length].pack("I") + sym_string
2053 ptr = Value.bytearray2ptr scope_string.dup
2054 Value.stringpoke(scopescache_addr + scopescache_addr_offs * (1 + scopescache_len), [ptr].pack("I"), 4)
2055 Value.stringpoke(scopescache_addr, [scopescache_len+1].pack("I"), 4)
2056 return scopescache_len + 1, (symbols.length * 3) + 1
2060 def dissect_stack_mem ptr1
2061 # TODO - this works, however is it really correct?
2062 stack_mem_len = (Value.ptr2string(ptr1, 4).unpack("i").first - 2 - 95) / 2
2063 stack_mem = split8 Value.ptr2string(ptr1 + (6+95)*4, 4 * (stack_mem_len * 2))
2064 Dissection.new((4+95)*4, stack_mem, ptr1, stack_mem_len)
2068 def dissect_return_stack_mem ptr2
2069 return_stack_mem_len = (Value.ptr2string(ptr2, 4).unpack("i").first - 1) / 2
2070 return_stack_mem = if return_stack_mem_len > 0
2071 split8(Value.ptr2string(ptr2 + 2*4, 4 * (return_stack_mem_len * 2)))
2075 Dissection.new 2*4, return_stack_mem, ptr2, return_stack_mem_len
2079 def dissect_all_locals_mem ptr4
2080 # original position == 2 # 32 is our offset
2081 all_locals_mem_len = (Value.ptr2string(ptr4, 4).unpack("i").first - 32) / 2
2082 all_locals_mem = split8 Value.ptr2string(ptr4 + 4 + 32 * 4, 4 * (2 * all_locals_mem_len))
2083 Dissection.new 4 + 32 * 4, all_locals_mem, ptr4, all_locals_mem_len
2087 def my_data_inspect ptr1, ptr2, ptr3, ptr4
2088 bench("data_inspect") {
2089 @trace_stack_addr = Value.ptr2string(ptr1 + 7*4, 4).unpack("I").first
2090 @indirections_addr = Value.ptr2string(ptr1 + 4*4, 4).unpack("I").first
2091 @scopescache_addr = Value.ptr2string(ptr1 + 6*4, 4).unpack("I").first
2092 current_scope_id = Value.ptr2string(ptr1 + 1*4, 4).unpack("i").first
2093 ind = dissect_indirections @indirections_addr
2094 indirections_len = ind.len
2095 dissected_stack_mem = dissect_stack_mem(ptr1) if dbg_on :data_inspect
2097 return_stack_mem_dissection = dissect_return_stack_mem ptr2
2098 return_stack_mem_translated = return_stack_mem_dissection.dat.map { |bytes| typeval2desc bytes } \
2099 if dbg_on :data_inspect
2101 receiver_object = return_stack_mem_dissection.dat.last # self
2102 @object = receiver_object.nil? ? nil : typeval2desc(receiver_object)
2104 all_locals_mem_dissection = dissect_all_locals_mem ptr4
2105 if $opt_scope_templates or dbg_on :data_inspect
2106 all_locals_mem_translated = all_locals_mem_dissection.dat.map {
2108 locals_addr, scope_ast_id = bytes.unpack "Ii"
2109 # locals_mem == size, n * (id / value / type)
2110 size = Value.ptr2string(locals_addr, 4).unpack("i").first
2111 locals_mem = split12 Value.ptr2string(locals_addr + 4, 4 * ((size * 3)))
2115 sym_id = bytes[0,4].unpack("I").first
2116 scope << sym_id.id2name.to_sym
2117 [sym_id.id2name, typeval2desc(bytes[4,8])]
2119 if $opt_scope_templates and @scope_hash[scope_ast_id] and !scope.empty?
2120 if scope != @scope_hash[scope_ast_id]
2121 cur_template = @scope_hash[scope_ast_id]
2122 # scope - the new one, cur_template - the old one,
2123 missing = scope - (scope & cur_template)
2124 min_matching_prefix_len = [cur_template.size, scope.size].min
2125 matching_prefix = (cur_template.slice(0...min_matching_prefix_len) \
2126 == scope.slice(0...min_matching_prefix_len))
2127 idbg(:scope_templates) {
2128 "trying to add #{scope.inspect} to #{@scope_hash[scope_ast_id].inspect}, "
2129 + "thusly the diff is -> #{missing.inspect}"
2132 matching = scope.slice(0..cur_template.length).inspect
2133 puts "#{cur_template.inspect} VS #{matching} of #{scope.inspect}"
2134 fail "prefix doesn't match!!!! its all wrong, we all cry!"
2136 @scope_hash[scope_ast_id] += missing
2137 idbg(:scope_templates) {
2138 "adding (#{missing.inspect}) to a scope!!! - #{scope.inspect} :" +
2139 "making for the current scope_hash of #{@scope_hash.inspect}"
2143 idbg(:scope_templates) { "found a scope!!! - #{scope.inspect} - #{scope_ast_id}" }
2144 @scope_hash[scope_ast_id] = scope
2146 [scope_ast_id, locals_addr, locals_mem]
2152 all_locals_mem_dissection.dat.each_with_index {
2154 locals_addr, scope_ast_id = bytes.unpack "Ii"
2155 scope_id = (32 + 2*idx)
2156 next unless scope_id == current_scope_id
2157 # locals_mem == size, n * (id / value / type)
2158 size = Value.ptr2string(locals_addr, 4).unpack("i").first
2159 locals_mem = split12 Value.ptr2string(locals_addr + 4, 4 * ((size * 3)))
2162 sym_id = bytes[0,4].unpack("I").first
2163 if sym_id.id2name.to_sym == :__self__
2164 @self_type = typeval2desc(bytes[4,8])[0]
2171 idbg(:data_inspect) {
2172 stack_mem_dump = dissected_stack_mem.dat.map {
2173 |bytes| t = bytes.unpack("ii"); "[MEM:#{t[0]}, #{t[1]}]" }
2174 all_locals_mem_dump = #{all_locals_mem_translated.map{|locals|locals.inspect}.join("\n").indent 6
2176 OBJECT == #{@object.inspect}
2177 current_scope_id == #{current_scope_id.inspect}
2178 stack_mem == #{dissected_stack_mem.len.to_s} #{stack_mem_dump}
2179 return_stack_mem == #{return_stack_mem_dissection.len.to_s} #{return_stack_mem_translated.inspect}
2180 all_locals_mem == #{all_locals_mem_dissection.len.to_s} \n#{all_locals_mem_dump}
2181 indirections_len == #{indirections_len}
2188 def build_function curr_id, prev_id, got_num_params, caller_func, initialisation
2189 is_rebuild_magic = !@build_hints.nil?
2190 @current_execution_id = curr_id unless is_rebuild_magic
2191 if !caller_func.nil? and dbg_on :generator_path_information
2192 puts "################### CALLER == #{caller_func.metadata.inspect}"
2193 if !(ProfFuncWithMd::md_has_no_bouncer_generated_annotation func)
2194 puts "################### CALLER BOUNCERS GENERATOR == "
2195 + "#{(ProfFuncWithMd::md_get_generated_by caller_func).metadata.inspect}"
2198 total_instructions = @old_functions.inject(0) { |a,func| a + func.instructions_executed }
2199 @time_vs_instructions_log << [(@execution_started_at - Time.now).to_f, total_instructions]
2200 sexp_element = @crawler.find_path_sexp @crawler.id2path(curr_id) unless curr_id < 0
2201 idbg(:build_function) { "BUILDING FUNCTION #{id2name curr_id} :: #{id2name prev_id}" +
2202 ":: #{got_num_params} :: #{sexp_element.inspect}" }
2203 idbg(:func_cache) { "FUNCTION CACHE - relating to element - #{sexp_element.inspect}" }
2204 atom = @func_cache[curr_id]
2205 idbg(:func_cache) { "FOUND!!! - #{curr_id} -> #{atom.inspect} checking if should!" } if !atom.nil?
2206 # special when the found atom doesn't have a next_id thats
2207 # part of the node2type_cache (see ANN1 for more information)
2208 # FIXME - this following line is in fact not really "to be specialized" but rather... don't rebuild
2209 to_be_specialized = false
2210 static_dispatch_updates = []
2211 if !atom.nil? and $opt_use_cache
2212 update_indirections if $opt_indirection_updates
2213 to_be_specialized = attempt_specialization atom, curr_id
2214 static_dispatch_updates << curr_id if to_be_specialized
2216 to_be_specialized = true if $opt_scope_templates && find_scope_templating_possiblities
2217 unless is_rebuild_magic
2218 if $opt_flatten_param_stack && find_param_stack_flattening_possibilities(static_dispatch_updates)
2219 to_be_specialized = true
2222 static_dispatch_updates.each {
2224 update_static_dispatches dispatch_id
2226 atom = @func_cache[curr_id]
2227 idbg(:func_cache) { "SHOULD REBUILD #{curr_id}? - #{to_be_specialized }" } if !atom.nil?
2228 if !atom.nil? and $opt_use_cache and !to_be_specialized
2229 @func_cache_hits += 1
2232 @func_cache_misses += 1
2233 if !atom.nil? and @build_hints.nil?
2234 @build_hints = atom.func.metadata[:hints]
2236 return generate_func(curr_id, prev_id, got_num_params, caller_func, initialisation)
2241 @func_defs, @class_func_defs = {}, {}
2243 function = build_function(@crawler.path2id(@ast_order.first), OUTER_SCOPE, -1, nil, true)
2246 rescue NanoVMException => e
2249 dump_instructions e.function
2252 StateCache::save_cache(self) if $enable_cache
2255 puts "MISSES : #{@func_cache_misses}, HITS :: #{@func_cache_hits}"