* string.c (rb_str_splice_0): call rb_str_drop_bytes, not rb_str_drop.
[ruby-svn.git] / blockinlining.c
blob3f4aa86fe5967306f536743fa935aa418c2e5592
1 /**********************************************************************
3 blockinlining.c -
5 $Author$
7 Copyright (C) 2004-2007 Koichi Sasada
9 **********************************************************************/
11 #include "ruby/ruby.h"
12 #include "ruby/node.h"
13 #include "vm_core.h"
15 static VALUE
16 iseq_special_block(rb_iseq_t *iseq, void *builder)
18 #if OPT_BLOCKINLINING
19 VALUE parent = Qfalse;
20 VALUE iseqval;
22 if (iseq->argc > 1 || iseq->arg_simple == 0) {
23 /* argument check */
24 return 0;
27 if (iseq->cached_special_block_builder) {
28 if (iseq->cached_special_block_builder == builder) {
29 return iseq->cached_special_block;
31 else {
32 return 0;
35 else {
36 iseq->cached_special_block_builder = (void *)1;
39 if (iseq->parent_iseq) {
40 parent = iseq->parent_iseq->self;
42 iseqval = rb_iseq_new_with_bopt(iseq->node, iseq->name, iseq->filename,
43 parent, iseq->type,
44 GC_GUARDED_PTR(builder));
45 if (0) {
46 printf("%s\n", RSTRING_PTR(ruby_iseq_disasm(iseqval)));
48 iseq->cached_special_block = iseqval;
49 iseq->cached_special_block_builder = builder;
50 return iseqval;
51 #else
52 return 0;
53 #endif
56 static NODE *
57 new_block(NODE * head, NODE * tail)
59 head = NEW_BLOCK(head);
60 tail = NEW_BLOCK(tail);
61 head->nd_next = tail;
62 return head;
65 static NODE *
66 new_ary(NODE * head, NODE * tail)
68 head = NEW_ARRAY(head);
69 head->nd_next = tail;
70 return head;
73 static NODE *
74 new_assign(NODE * lnode, NODE * rhs)
76 switch (nd_type(lnode)) {
77 case NODE_LASGN:{
78 return NEW_NODE(NODE_LASGN, lnode->nd_vid, rhs, lnode->nd_cnt);
79 /* NEW_LASGN(lnode->nd_vid, rhs); */
81 case NODE_GASGN:{
82 return NEW_GASGN(lnode->nd_vid, rhs);
84 case NODE_DASGN:{
85 return NEW_DASGN(lnode->nd_vid, rhs);
87 case NODE_ATTRASGN:{
88 NODE *args = 0;
89 if (lnode->nd_args) {
90 args = NEW_ARRAY(lnode->nd_args->nd_head);
91 args->nd_next = NEW_ARRAY(rhs);
92 args->nd_alen = 2;
94 else {
95 args = NEW_ARRAY(rhs);
98 return NEW_ATTRASGN(lnode->nd_recv,
99 lnode->nd_mid,
100 args);
102 default:
103 rb_bug("unimplemented (block inlining): %s", ruby_node_name(nd_type(lnode)));
105 return 0;
108 static NODE *
109 build_Integer_times_node(rb_iseq_t *iseq, NODE * node, NODE * lnode,
110 VALUE param_vars, VALUE local_vars)
112 /* Special Block for Integer#times
113 {|e, _self|
114 _e = e
115 while(e < _self)
116 e = _e
117 redo_point:
118 BODY
119 next_point:
120 _e = _e.succ
124 {|e, _self|
125 while(e < _self)
126 BODY
127 next_point:
128 e = e.succ
132 ID _self;
133 CONST_ID(_self, "#_self");
134 if (iseq->argc == 0) {
135 ID e;
136 CONST_ID(e, "#e");
137 rb_ary_push(param_vars, ID2SYM(e));
138 rb_ary_push(param_vars, ID2SYM(_self));
139 iseq->argc += 2;
141 node =
142 NEW_WHILE(NEW_CALL
143 (NEW_DVAR(e), idLT, new_ary(NEW_DVAR(_self), 0)),
144 new_block(NEW_OPTBLOCK(node),
145 NEW_DASGN(e,
146 NEW_CALL(NEW_DVAR(e), idSucc, 0))),
147 Qundef);
149 else {
150 ID _e;
151 ID e = SYM2ID(rb_ary_entry(param_vars, 0));
152 NODE *assign;
154 CONST_ID(_e, "#_e");
155 rb_ary_push(param_vars, ID2SYM(_self));
156 rb_ary_push(local_vars, ID2SYM(_e));
157 iseq->argc++;
159 if (nd_type(lnode) == NODE_DASGN_CURR) {
160 assign = NEW_DASGN(e, NEW_DVAR(_e));
162 else {
163 assign = new_assign(lnode, NEW_DVAR(_e));
166 node =
167 new_block(NEW_DASGN(_e, NEW_DVAR(e)),
168 NEW_WHILE(NEW_CALL
169 (NEW_DVAR(_e), idLT,
170 new_ary(NEW_DVAR(_self), 0)),
171 new_block(assign,
172 new_block(NEW_OPTBLOCK(node),
173 NEW_DASGN(_e,
174 NEW_CALL
175 (NEW_DVAR(_e),
176 idSucc, 0)))),
177 Qundef));
179 return node;
182 VALUE
183 invoke_Integer_times_special_block(VALUE num)
185 rb_thread_t *th = GET_THREAD();
186 rb_block_t *orig_block = GC_GUARDED_PTR_REF(th->cfp->lfp[0]);
188 if (orig_block && BUILTIN_TYPE(orig_block->iseq) != T_NODE) {
189 VALUE tsiseqval = iseq_special_block(orig_block->iseq,
190 build_Integer_times_node);
191 rb_iseq_t *tsiseq;
192 VALUE argv[2], val;
194 if (tsiseqval) {
195 rb_block_t block = *orig_block;
196 GetISeqPtr(tsiseqval, tsiseq);
197 block.iseq = tsiseq;
198 th->cfp->lfp[0] = GC_GUARDED_PTR(&block);
199 argv[0] = INT2FIX(0);
200 argv[1] = num;
201 val = rb_yield_values(2, argv);
202 if (val == Qundef) {
203 return num;
205 else {
206 return val;
210 return Qundef;
213 static NODE *
214 build_Range_each_node(rb_iseq_t *iseq, NODE * node, NODE * lnode,
215 VALUE param_vars, VALUE local_vars, ID mid)
217 /* Special Block for Range#each
218 {|e, _last|
219 _e = e
220 while _e < _last
221 e = _e
222 next_point:
223 BODY
224 redo_point:
225 _e = _e.succ
228 {|e, _last|
229 while e < _last
230 BODY
231 redo_point:
232 e = e.succ
236 ID _last;
237 CONST_ID(_last, "#_last");
238 if (iseq->argc == 0) {
239 ID e;
240 CONST_ID(e, "#e");
241 rb_ary_push(param_vars, ID2SYM(e));
242 rb_ary_push(param_vars, ID2SYM(_last));
243 iseq->argc += 2;
245 node =
246 NEW_WHILE(NEW_CALL(NEW_DVAR(e), mid, new_ary(NEW_DVAR(_last), 0)),
247 new_block(NEW_OPTBLOCK(node),
248 NEW_DASGN(e,
249 NEW_CALL(NEW_DVAR(e), idSucc, 0))),
250 Qundef);
252 else {
253 ID _e;
254 ID e = SYM2ID(rb_ary_entry(param_vars, 0));
255 NODE *assign;
257 CONST_ID(_e, "#_e");
258 rb_ary_push(param_vars, ID2SYM(_last));
259 rb_ary_push(local_vars, ID2SYM(_e));
260 iseq->argc++;
262 if (nd_type(lnode) == NODE_DASGN_CURR) {
263 assign = NEW_DASGN(e, NEW_DVAR(_e));
265 else {
266 assign = new_assign(lnode, NEW_DVAR(_e));
269 node =
270 new_block(NEW_DASGN(_e, NEW_DVAR(e)),
271 NEW_WHILE(NEW_CALL
272 (NEW_DVAR(_e), mid,
273 new_ary(NEW_DVAR(_last), 0)),
274 new_block(assign,
275 new_block(NEW_OPTBLOCK(node),
276 NEW_DASGN(_e,
277 NEW_CALL
278 (NEW_DVAR(_e),
279 idSucc, 0)))),
280 Qundef));
282 return node;
285 static NODE *
286 build_Range_each_node_LE(rb_iseq_t *iseq, NODE * node, NODE * lnode,
287 VALUE param_vars, VALUE local_vars)
289 return build_Range_each_node(iseq, node, lnode,
290 param_vars, local_vars, idLE);
293 static NODE *
294 build_Range_each_node_LT(rb_iseq_t *iseq, NODE * node, NODE * lnode,
295 VALUE param_vars, VALUE local_vars)
297 return build_Range_each_node(iseq, node, lnode,
298 param_vars, local_vars, idLT);
301 VALUE
302 invoke_Range_each_special_block(VALUE range,
303 VALUE beg, VALUE end, int excl)
305 rb_thread_t *th = GET_THREAD();
306 rb_block_t *orig_block = GC_GUARDED_PTR_REF(th->cfp->lfp[0]);
308 if (BUILTIN_TYPE(orig_block->iseq) != T_NODE) {
309 void *builder =
310 excl ? build_Range_each_node_LT : build_Range_each_node_LE;
311 VALUE tsiseqval = iseq_special_block(orig_block->iseq, builder);
312 rb_iseq_t *tsiseq;
313 VALUE argv[2];
315 if (tsiseqval) {
316 VALUE val;
317 rb_block_t block = *orig_block;
318 GetISeqPtr(tsiseqval, tsiseq);
319 block.iseq = tsiseq;
320 th->cfp->lfp[0] = GC_GUARDED_PTR(&block);
321 argv[0] = beg;
322 argv[1] = end;
323 val = rb_yield_values(2, argv);
324 if (val == Qundef) {
325 return range;
327 else {
328 return val;
332 return Qundef;
336 static NODE *
337 build_Array_each_node(rb_iseq_t *iseq, NODE * node, NODE * lnode,
338 VALUE param_vars, VALUE local_vars)
340 /* Special block for Array#each
341 ary.each{|e|
342 BODY
345 {|e, _self|
346 _i = 0
347 while _i < _self.length
348 e = _self[_i]
349 redo_point:
350 BODY
351 next_point:
352 _i = _i.succ
356 ary.each{
357 BODY
360 {|_i, _self|
361 _i = 0
362 while _i < _self.length
363 redo_point:
364 BODY
365 next_point:
366 _i = _i.succ
371 ID _self, _i;
373 CONST_ID(_self, "#_self");
374 CONST_ID(_i, "#_i");
375 if (iseq->argc == 0) {
376 ID _e;
377 CONST_ID(_e, "#_e");
378 rb_ary_push(param_vars, ID2SYM(_e));
379 rb_ary_push(param_vars, ID2SYM(_self));
380 iseq->argc += 2;
381 rb_ary_push(local_vars, ID2SYM(_i));
383 node =
384 new_block(NEW_DASGN(_i, NEW_LIT(INT2FIX(0))),
385 NEW_WHILE(NEW_CALL(NEW_DVAR(_i), idLT,
386 new_ary(NEW_CALL
387 (NEW_DVAR(_self), idLength,
388 0), 0)),
389 new_block(NEW_OPTBLOCK(node),
390 NEW_DASGN(_i,
391 NEW_CALL(NEW_DVAR(_i),
392 idSucc, 0))),
393 Qundef));
395 else {
396 ID e = SYM2ID(rb_ary_entry(param_vars, 0));
397 NODE *assign;
399 rb_ary_push(param_vars, ID2SYM(_self));
400 iseq->argc++;
401 rb_ary_push(local_vars, ID2SYM(_i));
403 if (nd_type(lnode) == NODE_DASGN_CURR) {
404 assign = NEW_DASGN(e,
405 NEW_CALL(NEW_DVAR(_self), idAREF,
406 new_ary(NEW_DVAR(_i), 0)));
408 else {
409 assign = new_assign(lnode,
410 NEW_CALL(NEW_DVAR(_self), idAREF,
411 new_ary(NEW_DVAR(_i), 0)));
414 node =
415 new_block(NEW_DASGN(_i, NEW_LIT(INT2FIX(0))),
416 NEW_WHILE(NEW_CALL(NEW_DVAR(_i), idLT,
417 new_ary(NEW_CALL
418 (NEW_DVAR(_self), idLength,
419 0), 0)), new_block(assign,
420 new_block
421 (NEW_OPTBLOCK
422 (node),
423 NEW_DASGN
424 (_i,
425 NEW_CALL
426 (NEW_DVAR
427 (_i),
428 idSucc,
429 0)))),
430 Qundef));
432 return node;
435 VALUE
436 invoke_Array_each_special_block(VALUE ary)
438 rb_thread_t *th = GET_THREAD();
439 rb_block_t *orig_block = GC_GUARDED_PTR_REF(th->cfp->lfp[0]);
441 if (BUILTIN_TYPE(orig_block->iseq) != T_NODE) {
442 VALUE tsiseqval = iseq_special_block(orig_block->iseq,
443 build_Array_each_node);
444 rb_iseq_t *tsiseq;
445 VALUE argv[2];
447 if (tsiseqval) {
448 VALUE val;
449 rb_block_t block = *orig_block;
450 GetISeqPtr(tsiseqval, tsiseq);
451 block.iseq = tsiseq;
452 th->cfp->lfp[0] = GC_GUARDED_PTR(&block);
453 argv[0] = 0;
454 argv[1] = ary;
455 val = rb_yield_values(2, argv);
456 if (val == Qundef) {
457 return ary;
459 else {
460 return val;
464 return Qundef;