* transcode_data.h (rb_trans_elem_t): new field: from and to.
[ruby-svn.git] / vm_insnhelper.c
bloba886685be65ce7888f2ccd0d09e494668cd6d6a6
1 /**********************************************************************
3 insnhelper.c - instruction helper functions.
5 $Author$
7 Copyright (C) 2007 Koichi Sasada
9 **********************************************************************/
11 /* finish iseq array */
12 #include "insns.inc"
14 #include <math.h>
16 /* control stack frame */
19 #ifndef INLINE
20 #define INLINE inline
21 #endif
23 static inline rb_control_frame_t *
24 vm_push_frame(rb_thread_t * th, const rb_iseq_t * iseq,
25 VALUE type, VALUE self, VALUE specval,
26 const VALUE *pc, VALUE *sp, VALUE *lfp,
27 int local_size)
29 rb_control_frame_t * const cfp = th->cfp = th->cfp - 1;
30 int i;
32 /* setup vm value stack */
34 /* nil initialize */
35 for (i=0; i < local_size; i++) {
36 *sp = Qnil;
37 sp++;
40 /* set special val */
41 *sp = GC_GUARDED_PTR(specval);
43 if (lfp == 0) {
44 lfp = sp;
47 /* setup vm control frame stack */
49 cfp->pc = (VALUE *)pc;
50 cfp->sp = sp + 1;
51 cfp->bp = sp + 1;
52 cfp->iseq = (rb_iseq_t *) iseq;
53 cfp->flag = type;
54 cfp->self = self;
55 cfp->lfp = lfp;
56 cfp->dfp = sp;
57 cfp->proc = 0;
59 #define COLLECT_PROFILE 0
60 #if COLLECT_PROFILE
61 cfp->prof_time_self = clock();
62 cfp->prof_time_chld = 0;
63 #endif
65 if (VMDEBUG == 2) {
66 SDR();
69 return cfp;
72 static inline void
73 vm_pop_frame(rb_thread_t *th)
75 #if COLLECT_PROFILE
76 rb_control_frame_t *cfp = th->cfp;
78 if (RUBY_VM_NORMAL_ISEQ_P(cfp->iseq)) {
79 VALUE current_time = clock();
80 rb_control_frame_t *cfp = th->cfp;
81 cfp->prof_time_self = current_time - cfp->prof_time_self;
82 (cfp+1)->prof_time_chld += cfp->prof_time_self;
84 cfp->iseq->profile.count++;
85 cfp->iseq->profile.time_cumu = cfp->prof_time_self;
86 cfp->iseq->profile.time_self = cfp->prof_time_self - cfp->prof_time_chld;
88 else if (0 /* c method? */) {
91 #endif
92 th->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(th->cfp);
94 if (VMDEBUG == 2) {
95 SDR();
99 /* method dispatch */
101 #define VM_CALLEE_SETUP_ARG(ret, th, iseq, orig_argc, orig_argv, block) \
102 if (LIKELY(iseq->arg_simple & 0x01)) { \
103 /* simple check */ \
104 if (orig_argc != iseq->argc) { \
105 rb_raise(rb_eArgError, "wrong number of arguments (%d for %d)", orig_argc, iseq->argc); \
107 ret = 0; \
109 else { \
110 ret = vm_callee_setup_arg_complex(th, iseq, orig_argc, orig_argv, block); \
113 static inline int
114 vm_callee_setup_arg_complex(rb_thread_t *th, const rb_iseq_t * iseq,
115 int orig_argc, VALUE * orig_argv,
116 const rb_block_t **block)
118 const int m = iseq->argc;
119 int argc = orig_argc;
120 VALUE *argv = orig_argv;
121 int opt_pc = 0;
123 th->mark_stack_len = argc + iseq->arg_size;
125 /* mandatory */
126 if (argc < (m + iseq->arg_post_len)) { /* check with post arg */
127 rb_raise(rb_eArgError, "wrong number of arguments (%d for %d)",
128 argc, m + iseq->arg_post_len);
131 argv += m;
132 argc -= m;
134 /* post arguments */
135 if (iseq->arg_post_len) {
136 if (!(orig_argc < iseq->arg_post_start)) {
137 VALUE *new_argv = ALLOCA_N(VALUE, argc);
138 MEMCPY(new_argv, argv, VALUE, argc);
139 argv = new_argv;
142 MEMCPY(&orig_argv[iseq->arg_post_start], &argv[argc -= iseq->arg_post_len],
143 VALUE, iseq->arg_post_len);
146 /* opt arguments */
147 if (iseq->arg_opts) {
148 const int opts = iseq->arg_opts - 1 /* no opt */;
150 if (iseq->arg_rest == -1 && argc > opts) {
151 rb_raise(rb_eArgError, "wrong number of arguments (%d for %d)",
152 orig_argc, m + opts + iseq->arg_post_len);
155 if (argc > opts) {
156 argc -= opts;
157 argv += opts;
158 opt_pc = iseq->arg_opt_table[opts]; /* no opt */
160 else {
161 int i;
162 for (i = argc; i<opts; i++) {
163 orig_argv[i + m] = Qnil;
165 opt_pc = iseq->arg_opt_table[argc];
166 argc = 0;
170 /* rest arguments */
171 if (iseq->arg_rest != -1) {
172 orig_argv[iseq->arg_rest] = rb_ary_new4(argc, argv);
173 argc = 0;
176 /* block arguments */
177 if (block && iseq->arg_block != -1) {
178 VALUE blockval = Qnil;
179 const rb_block_t *blockptr = *block;
181 if (argc != 0) {
182 rb_raise(rb_eArgError, "wrong number of arguments (%d for %d)",
183 orig_argc, m + iseq->arg_post_len);
186 if (blockptr) {
187 /* make Proc object */
188 if (blockptr->proc == 0) {
189 rb_proc_t *proc;
191 blockval = vm_make_proc(th, th->cfp, blockptr);
193 GetProcPtr(blockval, proc);
194 *block = &proc->block;
196 else {
197 blockval = blockptr->proc;
201 orig_argv[iseq->arg_block] = blockval; /* Proc or nil */
204 th->mark_stack_len = 0;
205 return opt_pc;
208 static inline int
209 caller_setup_args(const rb_thread_t *th, rb_control_frame_t *cfp, VALUE flag,
210 int argc, rb_iseq_t *blockiseq, rb_block_t **block)
212 rb_block_t *blockptr = 0;
214 if (block) {
215 if (flag & VM_CALL_ARGS_BLOCKARG_BIT) {
216 rb_proc_t *po;
217 VALUE proc;
219 proc = *(--cfp->sp);
221 if (proc != Qnil) {
222 if (!rb_obj_is_proc(proc)) {
223 VALUE b = rb_check_convert_type(proc, T_DATA, "Proc", "to_proc");
224 if (NIL_P(b)) {
225 rb_raise(rb_eTypeError,
226 "wrong argument type %s (expected Proc)",
227 rb_obj_classname(proc));
229 proc = b;
231 GetProcPtr(proc, po);
232 blockptr = &po->block;
233 RUBY_VM_GET_BLOCK_PTR_IN_CFP(cfp)->proc = proc;
234 *block = blockptr;
237 else if (blockiseq) {
238 blockptr = RUBY_VM_GET_BLOCK_PTR_IN_CFP(cfp);
239 blockptr->iseq = blockiseq;
240 blockptr->proc = 0;
241 *block = blockptr;
245 /* expand top of stack? */
246 if (flag & VM_CALL_ARGS_SPLAT_BIT) {
247 VALUE ary = *(cfp->sp - 1);
248 VALUE *ptr;
249 int i;
250 VALUE tmp = rb_check_convert_type(ary, T_ARRAY, "Array", "to_a");
252 if (NIL_P(tmp)) {
253 /* do nothing */
255 else {
256 int len = RARRAY_LEN(tmp);
257 ptr = RARRAY_PTR(tmp);
258 cfp->sp -= 1;
260 CHECK_STACK_OVERFLOW(cfp, len);
262 for (i = 0; i < len; i++) {
263 *cfp->sp++ = ptr[i];
265 argc += i-1;
269 return argc;
272 static inline VALUE
273 call_cfunc(VALUE (*func)(), VALUE recv,
274 int len, int argc, const VALUE *argv)
276 /* printf("len: %d, argc: %d\n", len, argc); */
278 if (len >= 0 && argc != len) {
279 rb_raise(rb_eArgError, "wrong number of arguments(%d for %d)",
280 argc, len);
283 switch (len) {
284 case -2:
285 return (*func) (recv, rb_ary_new4(argc, argv));
286 break;
287 case -1:
288 return (*func) (argc, argv, recv);
289 break;
290 case 0:
291 return (*func) (recv);
292 break;
293 case 1:
294 return (*func) (recv, argv[0]);
295 break;
296 case 2:
297 return (*func) (recv, argv[0], argv[1]);
298 break;
299 case 3:
300 return (*func) (recv, argv[0], argv[1], argv[2]);
301 break;
302 case 4:
303 return (*func) (recv, argv[0], argv[1], argv[2], argv[3]);
304 break;
305 case 5:
306 return (*func) (recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
307 break;
308 case 6:
309 return (*func) (recv, argv[0], argv[1], argv[2], argv[3], argv[4],
310 argv[5]);
311 break;
312 case 7:
313 return (*func) (recv, argv[0], argv[1], argv[2], argv[3], argv[4],
314 argv[5], argv[6]);
315 break;
316 case 8:
317 return (*func) (recv, argv[0], argv[1], argv[2], argv[3], argv[4],
318 argv[5], argv[6], argv[7]);
319 break;
320 case 9:
321 return (*func) (recv, argv[0], argv[1], argv[2], argv[3], argv[4],
322 argv[5], argv[6], argv[7], argv[8]);
323 break;
324 case 10:
325 return (*func) (recv, argv[0], argv[1], argv[2], argv[3], argv[4],
326 argv[5], argv[6], argv[7], argv[8], argv[9]);
327 break;
328 case 11:
329 return (*func) (recv, argv[0], argv[1], argv[2], argv[3], argv[4],
330 argv[5], argv[6], argv[7], argv[8], argv[9],
331 argv[10]);
332 break;
333 case 12:
334 return (*func) (recv, argv[0], argv[1], argv[2], argv[3], argv[4],
335 argv[5], argv[6], argv[7], argv[8], argv[9],
336 argv[10], argv[11]);
337 break;
338 case 13:
339 return (*func) (recv, argv[0], argv[1], argv[2], argv[3], argv[4],
340 argv[5], argv[6], argv[7], argv[8], argv[9], argv[10],
341 argv[11], argv[12]);
342 break;
343 case 14:
344 return (*func) (recv, argv[0], argv[1], argv[2], argv[3], argv[4],
345 argv[5], argv[6], argv[7], argv[8], argv[9], argv[10],
346 argv[11], argv[12], argv[13]);
347 break;
348 case 15:
349 return (*func) (recv, argv[0], argv[1], argv[2], argv[3], argv[4],
350 argv[5], argv[6], argv[7], argv[8], argv[9], argv[10],
351 argv[11], argv[12], argv[13], argv[14]);
352 break;
353 default:
354 rb_raise(rb_eArgError, "too many arguments(%d)", len);
355 break;
357 return Qnil; /* not reached */
360 static inline VALUE
361 vm_call_cfunc(rb_thread_t *th, rb_control_frame_t *reg_cfp,
362 int num, ID id, VALUE recv, VALUE klass,
363 VALUE flag, const NODE *mn, const rb_block_t *blockptr)
365 VALUE val;
367 EXEC_EVENT_HOOK(th, RUBY_EVENT_C_CALL, recv, id, klass);
369 rb_control_frame_t *cfp =
370 vm_push_frame(th, 0, VM_FRAME_MAGIC_CFUNC,
371 recv, (VALUE) blockptr, 0, reg_cfp->sp, 0, 1);
373 cfp->method_id = id;
374 cfp->method_class = klass;
376 reg_cfp->sp -= num + 1;
378 val = call_cfunc(mn->nd_cfnc, recv, mn->nd_argc, num, reg_cfp->sp + 1);
380 if (reg_cfp != th->cfp + 1) {
381 rb_bug("cfp consistency error - send");
384 vm_pop_frame(th);
386 EXEC_EVENT_HOOK(th, RUBY_EVENT_C_RETURN, recv, id, klass);
388 return val;
391 static inline VALUE
392 vm_call_bmethod(rb_thread_t *th, ID id, VALUE procval, VALUE recv,
393 VALUE klass, int argc, VALUE *argv, rb_block_t *blockptr)
395 rb_control_frame_t *cfp = th->cfp;
396 rb_proc_t *proc;
397 VALUE val;
399 /* control block frame */
400 (cfp-2)->method_id = id;
401 (cfp-2)->method_class = klass;
403 GetProcPtr(procval, proc);
404 val = vm_invoke_proc(th, proc, recv, argc, argv, blockptr);
405 return val;
408 static inline VALUE
409 vm_method_missing(rb_thread_t *th, ID id, VALUE recv,
410 int num, rb_block_t *blockptr, int opt)
412 rb_control_frame_t * const reg_cfp = th->cfp;
413 VALUE *argv = STACK_ADDR_FROM_TOP(num + 1);
414 VALUE val;
415 argv[0] = ID2SYM(id);
416 th->method_missing_reason = opt;
417 th->passed_block = blockptr;
418 val = rb_funcall2(recv, idMethodMissing, num + 1, argv);
419 POPN(num + 1);
420 return val;
423 static inline void
424 vm_setup_method(rb_thread_t *th, rb_control_frame_t *cfp,
425 const int argc, const rb_block_t *blockptr, const VALUE flag,
426 const VALUE iseqval, const VALUE recv, const VALUE klass)
428 rb_iseq_t *iseq;
429 int opt_pc, i;
430 VALUE *sp, *rsp = cfp->sp - argc;
432 /* TODO: eliminate it */
433 GetISeqPtr(iseqval, iseq);
434 VM_CALLEE_SETUP_ARG(opt_pc, th, iseq, argc, rsp, &blockptr);
436 /* stack overflow check */
437 CHECK_STACK_OVERFLOW(cfp, iseq->stack_max);
439 sp = rsp + iseq->arg_size;
441 if (LIKELY(!(flag & VM_CALL_TAILCALL_BIT))) {
442 if (0) printf("local_size: %d, arg_size: %d\n",
443 iseq->local_size, iseq->arg_size);
445 /* clear local variables */
446 for (i = 0; i < iseq->local_size - iseq->arg_size; i++) {
447 *sp++ = Qnil;
450 vm_push_frame(th, iseq,
451 VM_FRAME_MAGIC_METHOD, recv, (VALUE) blockptr,
452 iseq->iseq_encoded + opt_pc, sp, 0, 0);
454 cfp->sp = rsp - 1 /* recv */;
456 else {
457 VALUE *p_rsp;
458 th->cfp++; /* pop cf */
459 p_rsp = th->cfp->sp;
461 /* copy arguments */
462 for (i=0; i < (sp - rsp); i++) {
463 p_rsp[i] = rsp[i];
466 sp -= rsp - p_rsp;
468 /* clear local variables */
469 for (i = 0; i < iseq->local_size - iseq->arg_size; i++) {
470 *sp++ = Qnil;
473 vm_push_frame(th, iseq,
474 VM_FRAME_MAGIC_METHOD, recv, (VALUE) blockptr,
475 iseq->iseq_encoded + opt_pc, sp, 0, 0);
479 static inline VALUE
480 vm_call_method(rb_thread_t * const th, rb_control_frame_t * const cfp,
481 const int num, rb_block_t * const blockptr, const VALUE flag,
482 const ID id, const NODE * mn, const VALUE recv, VALUE klass)
484 VALUE val;
486 start_method_dispatch:
488 if (mn != 0) {
489 if ((mn->nd_noex == 0)) {
490 /* dispatch method */
491 NODE *node;
493 normal_method_dispatch:
495 node = mn->nd_body;
497 switch (nd_type(node)) {
498 case RUBY_VM_METHOD_NODE:{
499 vm_setup_method(th, cfp, num, blockptr, flag, (VALUE)node->nd_body, recv, klass);
500 return Qundef;
502 case NODE_CFUNC:{
503 val = vm_call_cfunc(th, cfp, num, id, recv, mn->nd_clss, flag, node, blockptr);
504 break;
506 case NODE_ATTRSET:{
507 val = rb_ivar_set(recv, node->nd_vid, *(cfp->sp - 1));
508 cfp->sp -= 2;
509 break;
511 case NODE_IVAR:{
512 if (num != 0) {
513 rb_raise(rb_eArgError, "wrong number of arguments (%d for 0)",
514 num);
516 val = rb_attr_get(recv, node->nd_vid);
517 cfp->sp -= 1;
518 break;
520 case NODE_BMETHOD:{
521 VALUE *argv = cfp->sp - num;
522 val = vm_call_bmethod(th, id, node->nd_cval, recv, klass, num, argv, blockptr);
523 cfp->sp += - num - 1;
524 break;
526 case NODE_ZSUPER:{
527 klass = RCLASS_SUPER(mn->nd_clss);
528 mn = rb_method_node(klass, id);
530 if (mn != 0) {
531 goto normal_method_dispatch;
533 else {
534 goto start_method_dispatch;
537 default:{
538 printf("node: %s\n", ruby_node_name(nd_type(node)));
539 rb_bug("eval_invoke_method: unreachable");
540 /* unreachable */
541 break;
545 else {
546 int noex_safe;
548 if (!(flag & VM_CALL_FCALL_BIT) &&
549 (mn->nd_noex & NOEX_MASK) & NOEX_PRIVATE) {
550 int stat = NOEX_PRIVATE;
552 if (flag & VM_CALL_VCALL_BIT) {
553 stat |= NOEX_VCALL;
555 val = vm_method_missing(th, id, recv, num, blockptr, stat);
557 else if (((mn->nd_noex & NOEX_MASK) & NOEX_PROTECTED) &&
558 !(flag & VM_CALL_SEND_BIT)) {
559 VALUE defined_class = mn->nd_clss;
561 if (TYPE(defined_class) == T_ICLASS) {
562 defined_class = RBASIC(defined_class)->klass;
565 if (!rb_obj_is_kind_of(cfp->self, rb_class_real(defined_class))) {
566 val = vm_method_missing(th, id, recv, num, blockptr, NOEX_PROTECTED);
568 else {
569 goto normal_method_dispatch;
572 else if ((noex_safe = NOEX_SAFE(mn->nd_noex)) > th->safe_level &&
573 (noex_safe > 2)) {
574 rb_raise(rb_eSecurityError, "calling insecure method: %s", rb_id2name(id));
576 else {
577 goto normal_method_dispatch;
581 else {
582 /* method missing */
583 if (id == idMethodMissing) {
584 rb_bug("method missing");
586 else {
587 int stat = 0;
588 if (flag & VM_CALL_VCALL_BIT) {
589 stat |= NOEX_VCALL;
591 if (flag & VM_CALL_SUPER_BIT) {
592 stat |= NOEX_SUPER;
594 val = vm_method_missing(th, id, recv, num, blockptr, stat);
598 RUBY_VM_CHECK_INTS();
599 return val;
602 static inline void
603 vm_send_optimize(rb_control_frame_t * const reg_cfp, NODE ** const mn,
604 rb_num_t * const flag, rb_num_t * const num,
605 ID * const id, const VALUE klass)
607 if (*mn && nd_type((*mn)->nd_body) == NODE_CFUNC) {
608 NODE *node = (*mn)->nd_body;
609 extern VALUE rb_f_send(int argc, VALUE *argv, VALUE recv);
611 if (node->nd_cfnc == rb_f_send) {
612 int i = *num - 1;
613 VALUE sym = TOPN(i);
614 *id = SYMBOL_P(sym) ? SYM2ID(sym) : rb_to_id(sym);
616 /* shift arguments */
617 if (i > 0) {
618 MEMMOVE(&TOPN(i), &TOPN(i-1), VALUE, i);
621 *mn = rb_method_node(klass, *id);
622 *num -= 1;
623 DEC_SP(1);
624 *flag |= VM_CALL_FCALL_BIT;
629 /* yield */
631 static inline int
632 block_proc_is_lambda(const VALUE procval)
634 rb_proc_t *proc;
636 if (procval) {
637 GetProcPtr(procval, proc);
638 return proc->is_lambda;
640 else {
641 return 0;
645 static inline VALUE
646 vm_yield_with_cfunc(rb_thread_t *th, const rb_block_t *block,
647 VALUE self, int argc, const VALUE *argv,
648 const rb_block_t *blockptr)
650 NODE *ifunc = (NODE *) block->iseq;
651 VALUE val, arg, blockarg;
652 int lambda = block_proc_is_lambda(block->proc);
654 if (lambda) {
655 arg = rb_ary_new4(argc, argv);
657 else if (argc == 0) {
658 arg = Qnil;
660 else {
661 arg = argv[0];
664 if (blockptr) {
665 blockarg = vm_make_proc(th, th->cfp, blockptr);
667 else {
668 blockarg = Qnil;
671 vm_push_frame(th, 0, VM_FRAME_MAGIC_IFUNC,
672 self, (VALUE)block->dfp,
673 0, th->cfp->sp, block->lfp, 1);
675 val = (*ifunc->nd_cfnc) (arg, ifunc->nd_tval, argc, argv, blockarg);
677 th->cfp++;
678 return val;
681 static inline int
682 vm_yield_setup_args(rb_thread_t * const th, const rb_iseq_t *iseq,
683 int orig_argc, VALUE *argv,
684 const rb_block_t *blockptr, int lambda)
686 if (0) { /* for debug */
687 printf(" argc: %d\n", orig_argc);
688 printf("iseq argc: %d\n", iseq->argc);
689 printf("iseq opts: %d\n", iseq->arg_opts);
690 printf("iseq rest: %d\n", iseq->arg_rest);
691 printf("iseq post: %d\n", iseq->arg_post_len);
692 printf("iseq blck: %d\n", iseq->arg_block);
693 printf("iseq smpl: %d\n", iseq->arg_simple);
694 printf(" lambda: %s\n", lambda ? "true" : "false");
697 if (lambda) {
698 /* call as method */
699 int opt_pc;
700 VM_CALLEE_SETUP_ARG(opt_pc, th, iseq, orig_argc, argv, &blockptr);
701 return opt_pc;
703 else {
704 int i;
705 int argc = orig_argc;
706 const int m = iseq->argc;
708 th->mark_stack_len = argc;
711 * yield [1, 2]
712 * => {|a|} => a = [1, 2]
713 * => {|a, b|} => a, b = [1, 2]
715 if (!(iseq->arg_simple & 0x02) &&
716 (m + iseq->arg_post_len) > 0 &&
717 argc == 1 && TYPE(argv[0]) == T_ARRAY) {
718 VALUE ary = argv[0];
719 th->mark_stack_len = argc = RARRAY_LEN(ary);
721 CHECK_STACK_OVERFLOW(th->cfp, argc);
723 MEMCPY(argv, RARRAY_PTR(ary), VALUE, argc);
726 for (i=argc; i<m; i++) {
727 argv[i] = Qnil;
730 if (iseq->arg_rest == -1) {
731 if (m < argc) {
733 * yield 1, 2
734 * => {|a|} # truncate
736 th->mark_stack_len = argc = m;
739 else {
740 int r = iseq->arg_rest;
742 if (iseq->arg_post_len) {
743 int len = iseq->arg_post_len;
744 int start = iseq->arg_post_start;
745 int rsize = argc > m ? argc - m : 0;
746 int psize = rsize;
747 VALUE ary;
749 if (psize > len) psize = len;
751 ary = rb_ary_new4(rsize - psize, &argv[r]);
753 if (0) {
754 printf(" argc: %d\n", argc);
755 printf(" len: %d\n", len);
756 printf("start: %d\n", start);
757 printf("rsize: %d\n", rsize);
760 /* copy post argument */
761 MEMMOVE(&argv[start], &argv[r + rsize - psize], VALUE, psize);
763 for (i=psize; i<len; i++) {
764 argv[start + i] = Qnil;
766 argv[r] = ary;
768 else {
769 if (argc < r) {
770 /* yield 1
771 * => {|a, b, *r|}
773 for (i=argc; i<r; i++) {
774 argv[i] = Qnil;
776 argv[r] = rb_ary_new();
778 else {
779 argv[r] = rb_ary_new4(argc-r, &argv[r]);
783 th->mark_stack_len = iseq->arg_size;
786 /* {|&b|} */
787 if (iseq->arg_block != -1) {
788 VALUE procval = Qnil;
790 if (blockptr) {
791 procval = blockptr->proc;
794 argv[iseq->arg_block] = procval;
797 th->mark_stack_len = 0;
798 return 0;
802 static VALUE
803 vm_invoke_block(rb_thread_t *th, rb_control_frame_t *reg_cfp, rb_num_t num, rb_num_t flag)
805 rb_block_t * const block = GET_BLOCK_PTR();
806 rb_iseq_t *iseq;
807 int argc = num;
809 if (GET_ISEQ()->local_iseq->type != ISEQ_TYPE_METHOD || block == 0) {
810 vm_localjump_error("no block given (yield)", Qnil, 0);
812 iseq = block->iseq;
814 argc = caller_setup_args(th, GET_CFP(), flag, argc, 0, 0);
816 if (BUILTIN_TYPE(iseq) != T_NODE) {
817 int opt_pc;
818 const int arg_size = iseq->arg_size;
819 VALUE * const rsp = GET_SP() - argc;
820 SET_SP(rsp);
822 CHECK_STACK_OVERFLOW(GET_CFP(), iseq->stack_max);
823 opt_pc = vm_yield_setup_args(th, iseq, argc, rsp, 0,
824 block_proc_is_lambda(block->proc));
826 vm_push_frame(th, iseq,
827 VM_FRAME_MAGIC_BLOCK, block->self, (VALUE) block->dfp,
828 iseq->iseq_encoded + opt_pc, rsp + arg_size, block->lfp,
829 iseq->local_size - arg_size);
831 return Qundef;
833 else {
834 VALUE val = vm_yield_with_cfunc(th, block, block->self, argc, STACK_ADDR_FROM_TOP(argc), 0);
835 POPN(argc); /* TODO: should put before C/yield? */
836 return val;
840 /* svar */
842 static inline NODE *
843 lfp_svar_place(rb_thread_t *th, VALUE *lfp)
845 VALUE *svar;
847 if (lfp && th->local_lfp != lfp) {
848 svar = &lfp[-1];
850 else {
851 svar = &th->local_svar;
853 if (NIL_P(*svar)) {
854 *svar = (VALUE)NEW_IF(Qnil, Qnil, Qnil);
856 return (NODE *)*svar;
859 static VALUE
860 lfp_svar_get(rb_thread_t *th, VALUE *lfp, VALUE key)
862 NODE *svar = lfp_svar_place(th, lfp);
864 switch (key) {
865 case 0:
866 return svar->u1.value;
867 case 1:
868 return svar->u2.value;
869 default: {
870 const VALUE hash = svar->u3.value;
872 if (hash == Qnil) {
873 return Qnil;
875 else {
876 return rb_hash_lookup(hash, key);
882 static void
883 lfp_svar_set(rb_thread_t *th, VALUE *lfp, VALUE key, VALUE val)
885 NODE *svar = lfp_svar_place(th, lfp);
887 switch (key) {
888 case 0:
889 svar->u1.value = val;
890 return;
891 case 1:
892 svar->u2.value = val;
893 return;
894 default: {
895 VALUE hash = svar->u3.value;
897 if (hash == Qnil) {
898 svar->u3.value = hash = rb_hash_new();
900 rb_hash_aset(hash, key, val);
905 static inline VALUE
906 vm_getspecial(rb_thread_t *th, VALUE *lfp, VALUE key, rb_num_t type)
908 VALUE val;
910 if (type == 0) {
911 VALUE k = key;
912 if (FIXNUM_P(key)) {
913 k = FIX2INT(key);
915 val = lfp_svar_get(th, lfp, k);
917 else {
918 VALUE backref = lfp_svar_get(th, lfp, 1);
920 if (type & 0x01) {
921 switch (type >> 1) {
922 case '&':
923 val = rb_reg_last_match(backref);
924 break;
925 case '`':
926 val = rb_reg_match_pre(backref);
927 break;
928 case '\'':
929 val = rb_reg_match_post(backref);
930 break;
931 case '+':
932 val = rb_reg_match_last(backref);
933 break;
934 default:
935 rb_bug("unexpected back-ref");
938 else {
939 val = rb_reg_nth_match(type >> 1, backref);
942 return val;
945 static NODE *
946 vm_get_cref(const rb_iseq_t *iseq, const VALUE *lfp, const VALUE *dfp)
948 NODE *cref = 0;
950 while (1) {
951 if (lfp == dfp) {
952 cref = iseq->cref_stack;
953 break;
955 else if (dfp[-1] != Qnil) {
956 cref = (NODE *)dfp[-1];
957 break;
959 dfp = GET_PREV_DFP(dfp);
962 if (cref == 0) {
963 rb_bug("vm_get_cref: unreachable");
965 return cref;
969 static inline void
970 vm_check_if_namespace(VALUE klass)
972 switch (TYPE(klass)) {
973 case T_CLASS:
974 case T_MODULE:
975 break;
976 default:
977 rb_raise(rb_eTypeError, "%s is not a class/module",
978 RSTRING_PTR(rb_inspect(klass)));
982 static inline VALUE
983 vm_get_ev_const(rb_thread_t *th, const rb_iseq_t *iseq,
984 VALUE orig_klass, ID id, int is_defined)
986 VALUE val;
988 if (orig_klass == Qnil) {
989 /* in current lexical scope */
990 const NODE *root_cref = vm_get_cref(iseq, th->cfp->lfp, th->cfp->dfp);
991 const NODE *cref = root_cref;
992 VALUE klass = orig_klass;
994 while (cref && cref->nd_next) {
995 klass = cref->nd_clss;
996 cref = cref->nd_next;
998 if (!NIL_P(klass)) {
999 search_continue:
1000 if (RCLASS_IV_TBL(klass) &&
1001 st_lookup(RCLASS_IV_TBL(klass), id, &val)) {
1002 if (val == Qundef) {
1003 rb_autoload_load(klass, id);
1004 goto search_continue;
1006 else {
1007 if (is_defined) {
1008 return 1;
1010 else {
1011 return val;
1018 /* search self */
1019 klass = root_cref->nd_clss;
1020 if (NIL_P(klass)) {
1021 klass = CLASS_OF(th->cfp->self);
1024 if (is_defined) {
1025 return rb_const_defined(klass, id);
1027 else {
1028 return rb_const_get(klass, id);
1031 else {
1032 vm_check_if_namespace(orig_klass);
1033 if (is_defined) {
1034 return rb_const_defined_from(orig_klass, id);
1036 else {
1037 return rb_const_get_from(orig_klass, id);
1042 static inline VALUE
1043 vm_get_cvar_base(NODE *cref)
1045 VALUE klass;
1047 while (cref && cref->nd_next &&
1048 (NIL_P(cref->nd_clss) || FL_TEST(cref->nd_clss, FL_SINGLETON))) {
1049 cref = cref->nd_next;
1051 if (!cref->nd_next) {
1052 rb_warn("class variable access from toplevel");
1056 klass = cref->nd_clss;
1058 if (NIL_P(klass)) {
1059 rb_raise(rb_eTypeError, "no class variables available");
1061 return klass;
1064 static inline void
1065 vm_define_method(rb_thread_t *th, VALUE obj, ID id, rb_iseq_t *miseq,
1066 rb_num_t is_singleton, NODE *cref)
1068 NODE *newbody;
1069 VALUE klass = cref->nd_clss;
1070 int noex = cref->nd_visi;
1072 if (NIL_P(klass)) {
1073 rb_raise(rb_eTypeError, "no class/module to add method");
1076 if (is_singleton) {
1077 if (FIXNUM_P(obj) || SYMBOL_P(obj)) {
1078 rb_raise(rb_eTypeError,
1079 "can't define singleton method \"%s\" for %s",
1080 rb_id2name(id), rb_obj_classname(obj));
1083 if (OBJ_FROZEN(obj)) {
1084 rb_error_frozen("object");
1087 klass = rb_singleton_class(obj);
1088 noex = NOEX_PUBLIC;
1091 /* dup */
1092 COPY_CREF(miseq->cref_stack, cref);
1093 miseq->klass = klass;
1094 miseq->defined_method_id = id;
1095 newbody = NEW_NODE(RUBY_VM_METHOD_NODE, 0, miseq->self, 0);
1096 rb_add_method(klass, id, newbody, noex);
1098 if (!is_singleton && noex == NOEX_MODFUNC) {
1099 rb_add_method(rb_singleton_class(klass), id, newbody, NOEX_PUBLIC);
1101 INC_VM_STATE_VERSION();
1104 static inline NODE *
1105 vm_method_search(VALUE id, VALUE klass, IC ic)
1107 NODE *mn;
1109 #if OPT_INLINE_METHOD_CACHE
1111 if (LIKELY(klass == ic->ic_class) &&
1112 LIKELY(GET_VM_STATE_VERSION() == ic->ic_vmstat)) {
1113 mn = ic->ic_method;
1115 else {
1116 mn = rb_method_node(klass, id);
1117 ic->ic_class = klass;
1118 ic->ic_method = mn;
1119 ic->ic_vmstat = GET_VM_STATE_VERSION();
1122 #else
1123 mn = rb_method_node(klass, id);
1124 #endif
1125 return mn;
1128 static inline VALUE
1129 vm_search_normal_superclass(VALUE klass, VALUE recv)
1131 if (BUILTIN_TYPE(klass) == T_CLASS) {
1132 klass = RCLASS_SUPER(klass);
1134 else if (BUILTIN_TYPE(klass) == T_MODULE) {
1135 VALUE k = CLASS_OF(recv);
1136 while (k) {
1137 if (BUILTIN_TYPE(k) == T_ICLASS && RBASIC(k)->klass == klass) {
1138 klass = RCLASS_SUPER(k);
1139 break;
1141 k = RCLASS_SUPER(k);
1144 return klass;
1147 static void
1148 vm_search_superclass(rb_control_frame_t *reg_cfp, rb_iseq_t *ip,
1149 VALUE recv, VALUE sigval,
1150 ID *idp, VALUE *klassp)
1152 ID id;
1153 VALUE klass;
1155 while (ip && !ip->klass) {
1156 ip = ip->parent_iseq;
1159 if (ip == 0) {
1160 rb_raise(rb_eNoMethodError, "super called outside of method");
1163 id = ip->defined_method_id;
1165 if (ip != ip->local_iseq) {
1166 /* defined by Module#define_method() */
1167 rb_control_frame_t *lcfp = GET_CFP();
1169 while (lcfp->iseq != ip) {
1170 VALUE *tdfp = GET_PREV_DFP(lcfp->dfp);
1171 while (1) {
1172 lcfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(lcfp);
1173 if (lcfp->dfp == tdfp) {
1174 break;
1179 id = lcfp->method_id;
1180 klass = vm_search_normal_superclass(lcfp->method_class, recv);
1182 if (sigval == Qfalse) {
1183 /* zsuper */
1184 rb_raise(rb_eRuntimeError, "implicit argument passing of super from method defined by define_method() is not supported. Specify all arguments explicitly.");
1187 else {
1188 klass = vm_search_normal_superclass(ip->klass, recv);
1191 *idp = id;
1192 *klassp = klass;
1195 static VALUE
1196 vm_throw(rb_thread_t *th, rb_control_frame_t *reg_cfp,
1197 rb_num_t throw_state, VALUE throwobj)
1199 rb_num_t state = throw_state & 0xff;
1200 rb_num_t flag = throw_state & 0x8000;
1201 rb_num_t level = throw_state >> 16;
1203 if (state != 0) {
1204 VALUE *pt = 0;
1205 int i;
1206 if (flag != 0) {
1207 pt = (void *) 1;
1209 else {
1210 if (state == TAG_BREAK) {
1211 rb_control_frame_t *cfp = GET_CFP();
1212 VALUE *dfp = GET_DFP();
1213 int is_orphan = 1;
1214 rb_iseq_t *base_iseq = GET_ISEQ();
1216 search_parent:
1217 if (cfp->iseq->type != ISEQ_TYPE_BLOCK) {
1218 dfp = GC_GUARDED_PTR_REF((VALUE *) *dfp);
1219 base_iseq = base_iseq->parent_iseq;
1221 while ((VALUE *) cfp < th->stack + th->stack_size) {
1222 if (cfp->dfp == dfp) {
1223 goto search_parent;
1225 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
1227 rb_bug("VM (throw): can't find break base.");
1230 if (VM_FRAME_TYPE(cfp) == VM_FRAME_MAGIC_LAMBDA) {
1231 /* lambda{... break ...} */
1232 is_orphan = 0;
1233 pt = cfp->dfp;
1234 state = TAG_RETURN;
1236 else {
1237 dfp = GC_GUARDED_PTR_REF((VALUE *) *dfp);
1239 while ((VALUE *)cfp < th->stack + th->stack_size) {
1240 if (cfp->dfp == dfp) {
1241 VALUE epc = epc = cfp->pc - cfp->iseq->iseq_encoded;
1242 rb_iseq_t *iseq = cfp->iseq;
1243 int i;
1245 for (i=0; i<iseq->catch_table_size; i++) {
1246 struct iseq_catch_table_entry *entry = &iseq->catch_table[i];
1248 if (entry->type == CATCH_TYPE_BREAK &&
1249 entry->start < epc && entry->end >= epc) {
1250 if (entry->cont == epc) {
1251 goto found;
1253 else {
1254 break;
1258 break;
1260 found:
1261 pt = dfp;
1262 is_orphan = 0;
1263 break;
1265 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
1269 if (is_orphan) {
1270 vm_localjump_error("break from proc-closure", throwobj, TAG_BREAK);
1273 else if (state == TAG_RETRY) {
1274 pt = GC_GUARDED_PTR_REF((VALUE *) * GET_DFP());
1275 for (i = 0; i < level; i++) {
1276 pt = GC_GUARDED_PTR_REF((VALUE *) * pt);
1279 else if (state == TAG_RETURN) {
1280 rb_control_frame_t *cfp = GET_CFP();
1281 VALUE *dfp = GET_DFP();
1282 VALUE * const lfp = GET_LFP();
1284 /* check orphan and get dfp */
1285 while ((VALUE *) cfp < th->stack + th->stack_size) {
1286 if (cfp->lfp == lfp) {
1287 if (VM_FRAME_TYPE(cfp) == VM_FRAME_MAGIC_LAMBDA) {
1288 VALUE *tdfp = dfp;
1290 while (lfp != tdfp) {
1291 if (cfp->dfp == tdfp) {
1292 /* in lambda */
1293 dfp = cfp->dfp;
1294 goto valid_return;
1296 tdfp = GC_GUARDED_PTR_REF((VALUE *)*dfp);
1301 if (cfp->dfp == lfp && cfp->iseq->type == ISEQ_TYPE_METHOD) {
1302 dfp = lfp;
1303 goto valid_return;
1306 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
1309 vm_localjump_error("unexpected return", throwobj, TAG_RETURN);
1311 valid_return:
1312 pt = dfp;
1314 else {
1315 rb_bug("isns(throw): unsupport throw type");
1318 th->state = state;
1319 return (VALUE)NEW_THROW_OBJECT(throwobj, (VALUE) pt, state);
1321 else {
1322 /* continue throw */
1323 VALUE err = throwobj;
1325 if (FIXNUM_P(err)) {
1326 th->state = FIX2INT(err);
1328 else if (SYMBOL_P(err)) {
1329 th->state = TAG_THROW;
1331 else if (BUILTIN_TYPE(err) == T_NODE) {
1332 th->state = GET_THROWOBJ_STATE(err);
1334 else {
1335 th->state = TAG_RAISE;
1336 /*th->state = FIX2INT(rb_ivar_get(err, idThrowState));*/
1338 return err;
1342 static inline void
1343 vm_expandarray(rb_control_frame_t *cfp, VALUE ary, int num, int flag)
1345 int is_splat = flag & 0x01;
1346 int space_size = num + is_splat;
1347 VALUE *base = cfp->sp, *ptr;
1348 volatile VALUE tmp_ary;
1349 int len;
1351 if (TYPE(ary) != T_ARRAY) {
1352 ary = rb_ary_to_ary(ary);
1355 cfp->sp += space_size;
1357 tmp_ary = ary;
1358 ptr = RARRAY_PTR(ary);
1359 len = RARRAY_LEN(ary);
1361 if (flag & 0x02) {
1362 /* post: ..., nil ,ary[-1], ..., ary[0..-num] # top */
1363 int i = 0, j;
1365 if (len < num) {
1366 for (i=0; i<num-len; i++) {
1367 *base++ = Qnil;
1370 for (j=0; i<num; i++, j++) {
1371 VALUE v = ptr[len - j - 1];
1372 *base++ = v;
1374 if (is_splat) {
1375 *base = rb_ary_new4(len - j, ptr);
1378 else {
1379 /* normal: ary[num..-1], ary[num-2], ary[num-3], ..., ary[0] # top */
1380 int i;
1381 VALUE *bptr = &base[space_size - 1];
1383 for (i=0; i<num; i++) {
1384 if (len <= i) {
1385 for (; i<num; i++) {
1386 *bptr-- = Qnil;
1388 break;
1390 *bptr-- = ptr[i];
1392 if (is_splat) {
1393 if (num > len) {
1394 *bptr = rb_ary_new();
1396 else {
1397 *bptr = rb_ary_new4(len - num, ptr + num);
1403 static inline int
1404 check_cfunc(const NODE *mn, const void *func)
1406 if (mn && nd_type(mn->nd_body) == NODE_CFUNC &&
1407 mn->nd_body->nd_cfnc == func) {
1408 return 1;
1410 else {
1411 return 0;
1415 static
1416 #ifndef NO_BIG_INLINE
1417 inline
1418 #endif
1419 VALUE
1420 opt_eq_func(VALUE recv, VALUE obj, IC ic)
1422 VALUE val = Qundef;
1424 if (FIXNUM_2_P(recv, obj) &&
1425 BASIC_OP_UNREDEFINED_P(BOP_EQ)) {
1426 if (recv == obj) {
1427 val = Qtrue;
1429 else {
1430 val = Qfalse;
1433 else if (!SPECIAL_CONST_P(recv) && !SPECIAL_CONST_P(obj)) {
1434 if (HEAP_CLASS_OF(recv) == rb_cFloat &&
1435 HEAP_CLASS_OF(obj) == rb_cFloat &&
1436 BASIC_OP_UNREDEFINED_P(BOP_EQ)) {
1437 double a = RFLOAT_VALUE(recv);
1438 double b = RFLOAT_VALUE(obj);
1440 if (isnan(a) || isnan(b)) {
1441 val = Qfalse;
1443 else if (a == b) {
1444 val = Qtrue;
1446 else {
1447 val = Qfalse;
1450 else if (HEAP_CLASS_OF(recv) == rb_cString &&
1451 HEAP_CLASS_OF(obj) == rb_cString &&
1452 BASIC_OP_UNREDEFINED_P(BOP_EQ)) {
1453 val = rb_str_equal(recv, obj);
1455 else {
1456 NODE *mn = vm_method_search(idEq, CLASS_OF(recv), ic);
1457 extern VALUE rb_obj_equal(VALUE obj1, VALUE obj2);
1459 if (check_cfunc(mn, rb_obj_equal)) {
1460 return recv == obj ? Qtrue : Qfalse;
1465 return val;