1 /* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
17 * The Original Code is Mozilla Communicator client code, released
20 * The Initial Developer of the Original Code is
21 * Netscape Communications Corporation.
22 * Portions created by the Initial Developer are Copyright (C) 1998
23 * the Initial Developer. All Rights Reserved.
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
42 * JS bytecode generation.
50 #include "jsarena.h" /* Added by JSIFY */
51 #include "jsutil.h" /* Added by JSIFY */
58 #include "jsversion.h"
68 #include "jsautooplen.h"
69 #include "jsstaticcheck.h"
71 /* Allocation chunk counts, must be powers of two in general. */
72 #define BYTECODE_CHUNK 256 /* code allocation increment */
73 #define SRCNOTE_CHUNK 64 /* initial srcnote allocation increment */
74 #define TRYNOTE_CHUNK 64 /* trynote allocation increment */
76 /* Macros to compute byte sizes from typed element counts. */
77 #define BYTECODE_SIZE(n) ((n) * sizeof(jsbytecode))
78 #define SRCNOTE_SIZE(n) ((n) * sizeof(jssrcnote))
79 #define TRYNOTE_SIZE(n) ((n) * sizeof(JSTryNote))
82 NewTryNote(JSContext
*cx
, JSCodeGenerator
*cg
, JSTryNoteKind kind
,
83 uintN stackDepth
, size_t start
, size_t end
);
86 js_InitCodeGenerator(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseContext
*pc
,
87 JSArenaPool
*codePool
, JSArenaPool
*notePool
,
90 memset(cg
, 0, sizeof *cg
);
91 TREE_CONTEXT_INIT(&cg
->treeContext
, pc
);
92 cg
->codePool
= codePool
;
93 cg
->notePool
= notePool
;
94 cg
->codeMark
= JS_ARENA_MARK(codePool
);
95 cg
->noteMark
= JS_ARENA_MARK(notePool
);
96 cg
->current
= &cg
->main
;
97 cg
->firstLine
= cg
->prolog
.currentLine
= cg
->main
.currentLine
= lineno
;
98 ATOM_LIST_INIT(&cg
->atomList
);
99 cg
->prolog
.noteMask
= cg
->main
.noteMask
= SRCNOTE_CHUNK
- 1;
100 ATOM_LIST_INIT(&cg
->constList
);
101 ATOM_LIST_INIT(&cg
->upvarList
);
105 js_FinishCodeGenerator(JSContext
*cx
, JSCodeGenerator
*cg
)
107 TREE_CONTEXT_FINISH(cx
, &cg
->treeContext
);
108 JS_ARENA_RELEASE(cg
->codePool
, cg
->codeMark
);
109 JS_ARENA_RELEASE(cg
->notePool
, cg
->noteMark
);
111 /* NB: non-null only after OOM. */
113 JS_free(cx
, cg
->spanDeps
);
115 if (cg
->upvarMap
.vector
)
116 JS_free(cx
, cg
->upvarMap
.vector
);
120 EmitCheck(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, ptrdiff_t delta
)
122 jsbytecode
*base
, *limit
, *next
;
123 ptrdiff_t offset
, length
;
128 limit
= CG_LIMIT(cg
);
129 offset
= PTRDIFF(next
, base
, jsbytecode
);
130 if (next
+ delta
> limit
) {
131 length
= offset
+ delta
;
132 length
= (length
<= BYTECODE_CHUNK
)
134 : JS_BIT(JS_CeilingLog2(length
));
135 incr
= BYTECODE_SIZE(length
);
137 JS_ARENA_ALLOCATE_CAST(base
, jsbytecode
*, cg
->codePool
, incr
);
139 size
= BYTECODE_SIZE(PTRDIFF(limit
, base
, jsbytecode
));
141 JS_ARENA_GROW_CAST(base
, jsbytecode
*, cg
->codePool
, size
, incr
);
144 js_ReportOutOfScriptQuota(cx
);
148 CG_LIMIT(cg
) = base
+ length
;
149 CG_NEXT(cg
) = base
+ offset
;
155 UpdateDepth(JSContext
*cx
, JSCodeGenerator
*cg
, ptrdiff_t target
)
159 const JSCodeSpec
*cs
;
163 pc
= CG_CODE(cg
, target
);
165 cs
= &js_CodeSpec
[op
];
166 if (cs
->format
& JOF_TMPSLOT_MASK
) {
167 depth
= (uintN
) cg
->stackDepth
+
168 ((cs
->format
& JOF_TMPSLOT_MASK
) >> JOF_TMPSLOT_SHIFT
);
169 if (depth
> cg
->maxStackDepth
)
170 cg
->maxStackDepth
= depth
;
174 nuses
= js_GetVariableStackUseLength(op
, pc
);
175 cg
->stackDepth
-= nuses
;
176 JS_ASSERT(cg
->stackDepth
>= 0);
177 if (cg
->stackDepth
< 0) {
181 JS_snprintf(numBuf
, sizeof numBuf
, "%d", target
);
182 ts
= &cg
->treeContext
.parseContext
->tokenStream
;
183 JS_ReportErrorFlagsAndNumber(cx
, JSREPORT_WARNING
,
184 js_GetErrorMessage
, NULL
,
185 JSMSG_STACK_UNDERFLOW
,
186 ts
->filename
? ts
->filename
: "stdin",
193 /* We just executed IndexParsedObject */
194 JS_ASSERT(op
== JSOP_ENTERBLOCK
);
195 JS_ASSERT(nuses
== 0);
196 blockObj
= cg
->objectList
.lastPob
->object
;
197 JS_ASSERT(STOBJ_GET_CLASS(blockObj
) == &js_BlockClass
);
198 JS_ASSERT(JSVAL_IS_VOID(blockObj
->fslots
[JSSLOT_BLOCK_DEPTH
]));
200 OBJ_SET_BLOCK_DEPTH(cx
, blockObj
, cg
->stackDepth
);
201 ndefs
= OBJ_BLOCK_COUNT(cx
, blockObj
);
203 cg
->stackDepth
+= ndefs
;
204 if ((uintN
)cg
->stackDepth
> cg
->maxStackDepth
)
205 cg
->maxStackDepth
= cg
->stackDepth
;
209 js_Emit1(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
)
211 ptrdiff_t offset
= EmitCheck(cx
, cg
, op
, 1);
214 *CG_NEXT(cg
)++ = (jsbytecode
)op
;
215 UpdateDepth(cx
, cg
, offset
);
221 js_Emit2(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, jsbytecode op1
)
223 ptrdiff_t offset
= EmitCheck(cx
, cg
, op
, 2);
226 jsbytecode
*next
= CG_NEXT(cg
);
227 next
[0] = (jsbytecode
)op
;
229 CG_NEXT(cg
) = next
+ 2;
230 UpdateDepth(cx
, cg
, offset
);
236 js_Emit3(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, jsbytecode op1
,
239 ptrdiff_t offset
= EmitCheck(cx
, cg
, op
, 3);
242 jsbytecode
*next
= CG_NEXT(cg
);
243 next
[0] = (jsbytecode
)op
;
246 CG_NEXT(cg
) = next
+ 3;
247 UpdateDepth(cx
, cg
, offset
);
253 js_EmitN(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, size_t extra
)
255 ptrdiff_t length
= 1 + (ptrdiff_t)extra
;
256 ptrdiff_t offset
= EmitCheck(cx
, cg
, op
, length
);
259 jsbytecode
*next
= CG_NEXT(cg
);
260 *next
= (jsbytecode
)op
;
261 memset(next
+ 1, 0, BYTECODE_SIZE(extra
));
262 CG_NEXT(cg
) = next
+ length
;
265 * Don't UpdateDepth if op's use-count comes from the immediate
266 * operand yet to be stored in the extra bytes after op.
268 if (js_CodeSpec
[op
].nuses
>= 0)
269 UpdateDepth(cx
, cg
, offset
);
274 /* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */
275 const char js_with_statement_str
[] = "with statement";
276 const char js_finally_block_str
[] = "finally block";
277 const char js_script_str
[] = "script";
279 static const char *statementName
[] = {
280 "label statement", /* LABEL */
281 "if statement", /* IF */
282 "else statement", /* ELSE */
283 "destructuring body", /* BODY */
284 "switch statement", /* SWITCH */
286 js_with_statement_str
, /* WITH */
287 "catch block", /* CATCH */
288 "try block", /* TRY */
289 js_finally_block_str
, /* FINALLY */
290 js_finally_block_str
, /* SUBROUTINE */
291 "do loop", /* DO_LOOP */
292 "for loop", /* FOR_LOOP */
293 "for/in loop", /* FOR_IN_LOOP */
294 "while loop", /* WHILE_LOOP */
297 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(statementName
) == STMT_LIMIT
);
300 StatementName(JSCodeGenerator
*cg
)
302 if (!cg
->treeContext
.topStmt
)
303 return js_script_str
;
304 return statementName
[cg
->treeContext
.topStmt
->type
];
308 ReportStatementTooLarge(JSContext
*cx
, JSCodeGenerator
*cg
)
310 JS_ReportErrorNumber(cx
, js_GetErrorMessage
, NULL
, JSMSG_NEED_DIET
,
315 Span-dependent instructions in JS bytecode consist of the jump (JOF_JUMP)
316 and switch (JOF_LOOKUPSWITCH, JOF_TABLESWITCH) format opcodes, subdivided
317 into unconditional (gotos and gosubs), and conditional jumps or branches
318 (which pop a value, test it, and jump depending on its value). Most jumps
319 have just one immediate operand, a signed offset from the jump opcode's pc
320 to the target bytecode. The lookup and table switch opcodes may contain
323 Mozilla bug #80981 (http://bugzilla.mozilla.org/show_bug.cgi?id=80981) was
324 fixed by adding extended "X" counterparts to the opcodes/formats (NB: X is
325 suffixed to prefer JSOP_ORX thereby avoiding a JSOP_XOR name collision for
326 the extended form of the JSOP_OR branch opcode). The unextended or short
327 formats have 16-bit signed immediate offset operands, the extended or long
328 formats have 32-bit signed immediates. The span-dependency problem consists
329 of selecting as few long instructions as possible, or about as few -- since
330 jumps can span other jumps, extending one jump may cause another to need to
333 Most JS scripts are short, so need no extended jumps. We optimize for this
334 case by generating short jumps until we know a long jump is needed. After
335 that point, we keep generating short jumps, but each jump's 16-bit immediate
336 offset operand is actually an unsigned index into cg->spanDeps, an array of
337 JSSpanDep structs. Each struct tells the top offset in the script of the
338 opcode, the "before" offset of the jump (which will be the same as top for
339 simplex jumps, but which will index further into the bytecode array for a
340 non-initial jump offset in a lookup or table switch), the after "offset"
341 adjusted during span-dependent instruction selection (initially the same
342 value as the "before" offset), and the jump target (more below).
344 Since we generate cg->spanDeps lazily, from within js_SetJumpOffset, we must
345 ensure that all bytecode generated so far can be inspected to discover where
346 the jump offset immediate operands lie within CG_CODE(cg). But the bonus is
347 that we generate span-dependency records sorted by their offsets, so we can
348 binary-search when trying to find a JSSpanDep for a given bytecode offset,
349 or the nearest JSSpanDep at or above a given pc.
351 To avoid limiting scripts to 64K jumps, if the cg->spanDeps index overflows
352 65534, we store SPANDEP_INDEX_HUGE in the jump's immediate operand. This
353 tells us that we need to binary-search for the cg->spanDeps entry by the
354 jump opcode's bytecode offset (sd->before).
356 Jump targets need to be maintained in a data structure that lets us look
357 up an already-known target by its address (jumps may have a common target),
358 and that also lets us update the addresses (script-relative, a.k.a. absolute
359 offsets) of targets that come after a jump target (for when a jump below
360 that target needs to be extended). We use an AVL tree, implemented using
361 recursion, but with some tricky optimizations to its height-balancing code
362 (see http://www.cmcrossroads.com/bradapp/ftp/src/libs/C++/AvlTrees.html).
364 A final wrinkle: backpatch chains are linked by jump-to-jump offsets with
365 positive sign, even though they link "backward" (i.e., toward lower bytecode
366 address). We don't want to waste space and search time in the AVL tree for
367 such temporary backpatch deltas, so we use a single-bit wildcard scheme to
368 tag true JSJumpTarget pointers and encode untagged, signed (positive) deltas
369 in JSSpanDep.target pointers, depending on whether the JSSpanDep has a known
370 target, or is still awaiting backpatching.
372 Note that backpatch chains would present a problem for BuildSpanDepTable,
373 which inspects bytecode to build cg->spanDeps on demand, when the first
374 short jump offset overflows. To solve this temporary problem, we emit a
375 proxy bytecode (JSOP_BACKPATCH; JSOP_BACKPATCH_POP for branch ops) whose
376 nuses/ndefs counts help keep the stack balanced, but whose opcode format
377 distinguishes its backpatch delta immediate operand from a normal jump
381 BalanceJumpTargets(JSJumpTarget
**jtp
)
383 JSJumpTarget
*jt
, *jt2
, *root
;
384 int dir
, otherDir
, heightChanged
;
388 JS_ASSERT(jt
->balance
!= 0);
390 if (jt
->balance
< -1) {
392 doubleRotate
= (jt
->kids
[JT_LEFT
]->balance
> 0);
393 } else if (jt
->balance
> 1) {
395 doubleRotate
= (jt
->kids
[JT_RIGHT
]->balance
< 0);
400 otherDir
= JT_OTHER_DIR(dir
);
402 jt2
= jt
->kids
[otherDir
];
403 *jtp
= root
= jt2
->kids
[dir
];
405 jt
->kids
[otherDir
] = root
->kids
[dir
];
406 root
->kids
[dir
] = jt
;
408 jt2
->kids
[dir
] = root
->kids
[otherDir
];
409 root
->kids
[otherDir
] = jt2
;
412 root
->kids
[JT_LEFT
]->balance
= -JS_MAX(root
->balance
, 0);
413 root
->kids
[JT_RIGHT
]->balance
= -JS_MIN(root
->balance
, 0);
416 *jtp
= root
= jt
->kids
[otherDir
];
417 jt
->kids
[otherDir
] = root
->kids
[dir
];
418 root
->kids
[dir
] = jt
;
420 heightChanged
= (root
->balance
!= 0);
421 jt
->balance
= -((dir
== JT_LEFT
) ? --root
->balance
: ++root
->balance
);
424 return heightChanged
;
427 typedef struct AddJumpTargetArgs
{
435 AddJumpTarget(AddJumpTargetArgs
*args
, JSJumpTarget
**jtp
)
442 JSCodeGenerator
*cg
= args
->cg
;
446 cg
->jtFreeList
= jt
->kids
[JT_LEFT
];
448 JS_ARENA_ALLOCATE_CAST(jt
, JSJumpTarget
*, &args
->cx
->tempPool
,
451 js_ReportOutOfScriptQuota(args
->cx
);
455 jt
->offset
= args
->offset
;
457 jt
->kids
[JT_LEFT
] = jt
->kids
[JT_RIGHT
] = NULL
;
458 cg
->numJumpTargets
++;
464 if (jt
->offset
== args
->offset
) {
469 if (args
->offset
< jt
->offset
)
470 balanceDelta
= -AddJumpTarget(args
, &jt
->kids
[JT_LEFT
]);
472 balanceDelta
= AddJumpTarget(args
, &jt
->kids
[JT_RIGHT
]);
476 jt
->balance
+= balanceDelta
;
477 return (balanceDelta
&& jt
->balance
)
478 ? 1 - BalanceJumpTargets(jtp
)
483 static int AVLCheck(JSJumpTarget
*jt
)
488 JS_ASSERT(-1 <= jt
->balance
&& jt
->balance
<= 1);
489 lh
= AVLCheck(jt
->kids
[JT_LEFT
]);
490 rh
= AVLCheck(jt
->kids
[JT_RIGHT
]);
491 JS_ASSERT(jt
->balance
== rh
- lh
);
492 return 1 + JS_MAX(lh
, rh
);
497 SetSpanDepTarget(JSContext
*cx
, JSCodeGenerator
*cg
, JSSpanDep
*sd
,
500 AddJumpTargetArgs args
;
502 if (off
< JUMPX_OFFSET_MIN
|| JUMPX_OFFSET_MAX
< off
) {
503 ReportStatementTooLarge(cx
, cg
);
509 args
.offset
= sd
->top
+ off
;
511 AddJumpTarget(&args
, &cg
->jumpTargets
);
516 AVLCheck(cg
->jumpTargets
);
519 SD_SET_TARGET(sd
, args
.node
);
523 #define SPANDEPS_MIN 256
524 #define SPANDEPS_SIZE(n) ((n) * sizeof(JSSpanDep))
525 #define SPANDEPS_SIZE_MIN SPANDEPS_SIZE(SPANDEPS_MIN)
528 AddSpanDep(JSContext
*cx
, JSCodeGenerator
*cg
, jsbytecode
*pc
, jsbytecode
*pc2
,
532 JSSpanDep
*sdbase
, *sd
;
535 index
= cg
->numSpanDeps
;
536 if (index
+ 1 == 0) {
537 ReportStatementTooLarge(cx
, cg
);
541 if ((index
& (index
- 1)) == 0 &&
542 (!(sdbase
= cg
->spanDeps
) || index
>= SPANDEPS_MIN
)) {
543 size
= sdbase
? SPANDEPS_SIZE(index
) : SPANDEPS_SIZE_MIN
/ 2;
544 sdbase
= (JSSpanDep
*) JS_realloc(cx
, sdbase
, size
+ size
);
547 cg
->spanDeps
= sdbase
;
550 cg
->numSpanDeps
= index
+ 1;
551 sd
= cg
->spanDeps
+ index
;
552 sd
->top
= PTRDIFF(pc
, CG_BASE(cg
), jsbytecode
);
553 sd
->offset
= sd
->before
= PTRDIFF(pc2
, CG_BASE(cg
), jsbytecode
);
555 if (js_CodeSpec
[*pc
].format
& JOF_BACKPATCH
) {
556 /* Jump offset will be backpatched if off is a non-zero "bpdelta". */
558 JS_ASSERT(off
>= 1 + JUMP_OFFSET_LEN
);
559 if (off
> BPDELTA_MAX
) {
560 ReportStatementTooLarge(cx
, cg
);
564 SD_SET_BPDELTA(sd
, off
);
565 } else if (off
== 0) {
566 /* Jump offset will be patched directly, without backpatch chaining. */
567 SD_SET_TARGET(sd
, 0);
569 /* The jump offset in off is non-zero, therefore it's already known. */
570 if (!SetSpanDepTarget(cx
, cg
, sd
, off
))
574 if (index
> SPANDEP_INDEX_MAX
)
575 index
= SPANDEP_INDEX_HUGE
;
576 SET_SPANDEP_INDEX(pc2
, index
);
581 AddSwitchSpanDeps(JSContext
*cx
, JSCodeGenerator
*cg
, jsbytecode
*pc
)
587 uintN njumps
, indexlen
;
590 JS_ASSERT(op
== JSOP_TABLESWITCH
|| op
== JSOP_LOOKUPSWITCH
);
592 off
= GET_JUMP_OFFSET(pc2
);
593 if (!AddSpanDep(cx
, cg
, pc
, pc2
, off
))
595 pc2
+= JUMP_OFFSET_LEN
;
596 if (op
== JSOP_TABLESWITCH
) {
597 low
= GET_JUMP_OFFSET(pc2
);
598 pc2
+= JUMP_OFFSET_LEN
;
599 high
= GET_JUMP_OFFSET(pc2
);
600 pc2
+= JUMP_OFFSET_LEN
;
601 njumps
= (uintN
) (high
- low
+ 1);
604 njumps
= GET_UINT16(pc2
);
606 indexlen
= INDEX_LEN
;
611 off
= GET_JUMP_OFFSET(pc2
);
612 if (!AddSpanDep(cx
, cg
, pc
, pc2
, off
))
614 pc2
+= JUMP_OFFSET_LEN
;
620 BuildSpanDepTable(JSContext
*cx
, JSCodeGenerator
*cg
)
622 jsbytecode
*pc
, *end
;
624 const JSCodeSpec
*cs
;
627 pc
= CG_BASE(cg
) + cg
->spanDepTodo
;
632 cs
= &js_CodeSpec
[op
];
634 switch (JOF_TYPE(cs
->format
)) {
635 case JOF_TABLESWITCH
:
636 case JOF_LOOKUPSWITCH
:
637 pc
= AddSwitchSpanDeps(cx
, cg
, pc
);
643 off
= GET_JUMP_OFFSET(pc
);
644 if (!AddSpanDep(cx
, cg
, pc
, pc
, off
))
657 GetSpanDep(JSCodeGenerator
*cg
, jsbytecode
*pc
)
664 index
= GET_SPANDEP_INDEX(pc
);
665 if (index
!= SPANDEP_INDEX_HUGE
)
666 return cg
->spanDeps
+ index
;
668 offset
= PTRDIFF(pc
, CG_BASE(cg
), jsbytecode
);
670 hi
= cg
->numSpanDeps
- 1;
673 sd
= cg
->spanDeps
+ mid
;
674 if (sd
->before
== offset
)
676 if (sd
->before
< offset
)
687 SetBackPatchDelta(JSContext
*cx
, JSCodeGenerator
*cg
, jsbytecode
*pc
,
692 JS_ASSERT(delta
>= 1 + JUMP_OFFSET_LEN
);
693 if (!cg
->spanDeps
&& delta
< JUMP_OFFSET_MAX
) {
694 SET_JUMP_OFFSET(pc
, delta
);
698 if (delta
> BPDELTA_MAX
) {
699 ReportStatementTooLarge(cx
, cg
);
703 if (!cg
->spanDeps
&& !BuildSpanDepTable(cx
, cg
))
706 sd
= GetSpanDep(cg
, pc
);
707 JS_ASSERT(SD_GET_BPDELTA(sd
) == 0);
708 SD_SET_BPDELTA(sd
, delta
);
713 UpdateJumpTargets(JSJumpTarget
*jt
, ptrdiff_t pivot
, ptrdiff_t delta
)
715 if (jt
->offset
> pivot
) {
717 if (jt
->kids
[JT_LEFT
])
718 UpdateJumpTargets(jt
->kids
[JT_LEFT
], pivot
, delta
);
720 if (jt
->kids
[JT_RIGHT
])
721 UpdateJumpTargets(jt
->kids
[JT_RIGHT
], pivot
, delta
);
725 FindNearestSpanDep(JSCodeGenerator
*cg
, ptrdiff_t offset
, int lo
,
729 JSSpanDep
*sdbase
, *sd
;
731 num
= cg
->numSpanDeps
;
734 sdbase
= cg
->spanDeps
;
738 if (sd
->before
== offset
)
740 if (sd
->before
< offset
)
748 JS_ASSERT(sd
->before
>= offset
&& (lo
== 0 || sd
[-1].before
< offset
));
753 FreeJumpTargets(JSCodeGenerator
*cg
, JSJumpTarget
*jt
)
755 if (jt
->kids
[JT_LEFT
])
756 FreeJumpTargets(cg
, jt
->kids
[JT_LEFT
]);
757 if (jt
->kids
[JT_RIGHT
])
758 FreeJumpTargets(cg
, jt
->kids
[JT_RIGHT
]);
759 jt
->kids
[JT_LEFT
] = cg
->jtFreeList
;
764 OptimizeSpanDeps(JSContext
*cx
, JSCodeGenerator
*cg
)
766 jsbytecode
*pc
, *oldpc
, *base
, *limit
, *next
;
767 JSSpanDep
*sd
, *sd2
, *sdbase
, *sdlimit
, *sdtop
, guard
;
768 ptrdiff_t offset
, growth
, delta
, top
, pivot
, span
, length
, target
;
773 jssrcnote
*sn
, *snlimit
;
775 uintN i
, n
, noteIndex
;
782 sdbase
= cg
->spanDeps
;
783 sdlimit
= sdbase
+ cg
->numSpanDeps
;
784 offset
= CG_OFFSET(cg
);
799 for (sd
= sdbase
; sd
< sdlimit
; sd
++) {
800 JS_ASSERT(JT_HAS_TAG(sd
->target
));
803 if (sd
->top
!= top
) {
806 JS_ASSERT(top
== sd
->before
);
810 type
= JOF_OPTYPE(op
);
811 if (JOF_TYPE_IS_EXTENDED_JUMP(type
)) {
813 * We already extended all the jump offset operands for
814 * the opcode at sd->top. Jumps and branches have only
815 * one jump offset operand, but switches have many, all
816 * of which are adjacent in cg->spanDeps.
821 JS_ASSERT(type
== JOF_JUMP
||
822 type
== JOF_TABLESWITCH
||
823 type
== JOF_LOOKUPSWITCH
);
826 if (!JOF_TYPE_IS_EXTENDED_JUMP(type
)) {
827 span
= SD_SPAN(sd
, pivot
);
828 if (span
< JUMP_OFFSET_MIN
|| JUMP_OFFSET_MAX
< span
) {
829 ptrdiff_t deltaFromTop
= 0;
834 case JSOP_GOTO
: op
= JSOP_GOTOX
; break;
835 case JSOP_IFEQ
: op
= JSOP_IFEQX
; break;
836 case JSOP_IFNE
: op
= JSOP_IFNEX
; break;
837 case JSOP_OR
: op
= JSOP_ORX
; break;
838 case JSOP_AND
: op
= JSOP_ANDX
; break;
839 case JSOP_GOSUB
: op
= JSOP_GOSUBX
; break;
840 case JSOP_CASE
: op
= JSOP_CASEX
; break;
841 case JSOP_DEFAULT
: op
= JSOP_DEFAULTX
; break;
842 case JSOP_TABLESWITCH
: op
= JSOP_TABLESWITCHX
; break;
843 case JSOP_LOOKUPSWITCH
: op
= JSOP_LOOKUPSWITCHX
; break;
845 ReportStatementTooLarge(cx
, cg
);
848 *pc
= (jsbytecode
) op
;
850 for (sd2
= sdtop
; sd2
< sdlimit
&& sd2
->top
== top
; sd2
++) {
853 * sd2->offset already includes delta as it stood
854 * before we entered this loop, but it must also
855 * include the delta relative to top due to all the
856 * extended jump offset immediates for the opcode
857 * starting at top, which we extend in this loop.
859 * If there is only one extended jump offset, then
860 * sd2->offset won't change and this for loop will
863 sd2
->offset
+= deltaFromTop
;
864 deltaFromTop
+= JUMPX_OFFSET_LEN
- JUMP_OFFSET_LEN
;
867 * sd2 comes after sd, and won't be revisited by
868 * the outer for loop, so we have to increase its
869 * offset by delta, not merely by deltaFromTop.
871 sd2
->offset
+= delta
;
874 delta
+= JUMPX_OFFSET_LEN
- JUMP_OFFSET_LEN
;
875 UpdateJumpTargets(cg
->jumpTargets
, sd2
->offset
,
876 JUMPX_OFFSET_LEN
- JUMP_OFFSET_LEN
);
888 JSTokenStream
*ts
= &cg
->treeContext
.parseContext
->tokenStream
;
890 printf("%s:%u: %u/%u jumps extended in %d passes (%d=%d+%d)\n",
891 ts
->filename
? ts
->filename
: "stdin", cg
->firstLine
,
892 growth
/ (JUMPX_OFFSET_LEN
- JUMP_OFFSET_LEN
), cg
->numSpanDeps
,
893 passes
, offset
+ growth
, offset
, growth
);
897 * Ensure that we have room for the extended jumps, but don't round up
898 * to a power of two -- we're done generating code, so we cut to fit.
900 limit
= CG_LIMIT(cg
);
901 length
= offset
+ growth
;
902 next
= base
+ length
;
904 JS_ASSERT(length
> BYTECODE_CHUNK
);
905 size
= BYTECODE_SIZE(PTRDIFF(limit
, base
, jsbytecode
));
906 incr
= BYTECODE_SIZE(length
) - size
;
907 JS_ARENA_GROW_CAST(base
, jsbytecode
*, cg
->codePool
, size
, incr
);
909 js_ReportOutOfScriptQuota(cx
);
913 CG_LIMIT(cg
) = next
= base
+ length
;
918 * Set up a fake span dependency record to guard the end of the code
919 * being generated. This guard record is returned as a fencepost by
920 * FindNearestSpanDep if there is no real spandep at or above a given
921 * unextended code offset.
924 guard
.offset
= offset
+ growth
;
925 guard
.before
= offset
;
930 * Now work backwards through the span dependencies, copying chunks of
931 * bytecode between each extended jump toward the end of the grown code
932 * space, and restoring immediate offset operands for all jump bytecodes.
933 * The first chunk of bytecodes, starting at base and ending at the first
934 * extended jump offset (NB: this chunk includes the operation bytecode
935 * just before that immediate jump offset), doesn't need to be copied.
937 JS_ASSERT(sd
== sdlimit
);
939 while (--sd
>= sdbase
) {
940 if (sd
->top
!= top
) {
942 op
= (JSOp
) base
[top
];
943 type
= JOF_OPTYPE(op
);
945 for (sd2
= sd
- 1; sd2
>= sdbase
&& sd2
->top
== top
; sd2
--)
949 JS_ASSERT(top
== sd2
->before
);
952 oldpc
= base
+ sd
->before
;
953 span
= SD_SPAN(sd
, pivot
);
956 * If this jump didn't need to be extended, restore its span immediate
957 * offset operand now, overwriting the index of sd within cg->spanDeps
958 * that was stored temporarily after *pc when BuildSpanDepTable ran.
960 * Note that span might fit in 16 bits even for an extended jump op,
961 * if the op has multiple span operands, not all of which overflowed
962 * (e.g. JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH where some cases are in
963 * range for a short jump, but others are not).
965 if (!JOF_TYPE_IS_EXTENDED_JUMP(type
)) {
966 JS_ASSERT(JUMP_OFFSET_MIN
<= span
&& span
<= JUMP_OFFSET_MAX
);
967 SET_JUMP_OFFSET(oldpc
, span
);
972 * Set up parameters needed to copy the next run of bytecode starting
973 * at offset (which is a cursor into the unextended, original bytecode
974 * vector), down to sd->before (a cursor of the same scale as offset,
975 * it's the index of the original jump pc). Reuse delta to count the
976 * nominal number of bytes to copy.
978 pc
= base
+ sd
->offset
;
979 delta
= offset
- sd
->before
;
980 JS_ASSERT(delta
>= 1 + JUMP_OFFSET_LEN
);
983 * Don't bother copying the jump offset we're about to reset, but do
984 * copy the bytecode at oldpc (which comes just before its immediate
985 * jump offset operand), on the next iteration through the loop, by
986 * including it in offset's new value.
988 offset
= sd
->before
+ 1;
989 size
= BYTECODE_SIZE(delta
- (1 + JUMP_OFFSET_LEN
));
991 memmove(pc
+ 1 + JUMPX_OFFSET_LEN
,
992 oldpc
+ 1 + JUMP_OFFSET_LEN
,
996 SET_JUMPX_OFFSET(pc
, span
);
1001 * Fix source note deltas. Don't hardwire the delta fixup adjustment,
1002 * even though currently it must be JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN
1003 * at each sd that moved. The future may bring different offset sizes
1004 * for span-dependent instruction operands. However, we fix only main
1005 * notes here, not prolog notes -- we know that prolog opcodes are not
1006 * span-dependent, and aren't likely ever to be.
1008 offset
= growth
= 0;
1010 for (sn
= cg
->main
.notes
, snlimit
= sn
+ cg
->main
.noteCount
;
1014 * Recall that the offset of a given note includes its delta, and
1015 * tells the offset of the annotated bytecode from the main entry
1016 * point of the script.
1018 offset
+= SN_DELTA(sn
);
1019 while (sd
< sdlimit
&& sd
->before
< offset
) {
1021 * To compute the delta to add to sn, we need to look at the
1022 * spandep after sd, whose offset - (before + growth) tells by
1023 * how many bytes sd's instruction grew.
1028 delta
= sd2
->offset
- (sd2
->before
+ growth
);
1030 JS_ASSERT(delta
== JUMPX_OFFSET_LEN
- JUMP_OFFSET_LEN
);
1031 sn
= js_AddToSrcNoteDelta(cx
, cg
, sn
, delta
);
1034 snlimit
= cg
->main
.notes
+ cg
->main
.noteCount
;
1041 * If sn has span-dependent offset operands, check whether each
1042 * covers further span-dependencies, and increase those operands
1043 * accordingly. Some source notes measure offset not from the
1044 * annotated pc, but from that pc plus some small bias. NB: we
1045 * assume that spec->offsetBias can't itself span span-dependent
1048 spec
= &js_SrcNoteSpec
[SN_TYPE(sn
)];
1049 if (spec
->isSpanDep
) {
1050 pivot
= offset
+ spec
->offsetBias
;
1052 for (i
= 0; i
< n
; i
++) {
1053 span
= js_GetSrcNoteOffset(sn
, i
);
1056 target
= pivot
+ span
* spec
->isSpanDep
;
1057 sd2
= FindNearestSpanDep(cg
, target
,
1064 * Increase target by sd2's before-vs-after offset delta,
1065 * which is absolute (i.e., relative to start of script,
1066 * as is target). Recompute the span by subtracting its
1067 * adjusted pivot from target.
1069 target
+= sd2
->offset
- sd2
->before
;
1070 span
= target
- (pivot
+ growth
);
1071 span
*= spec
->isSpanDep
;
1072 noteIndex
= sn
- cg
->main
.notes
;
1073 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, i
, span
))
1075 sn
= cg
->main
.notes
+ noteIndex
;
1076 snlimit
= cg
->main
.notes
+ cg
->main
.noteCount
;
1080 cg
->main
.lastNoteOffset
+= growth
;
1083 * Fix try/catch notes (O(numTryNotes * log2(numSpanDeps)), but it's
1084 * not clear how we can beat that).
1086 for (tryNode
= cg
->lastTryNode
; tryNode
; tryNode
= tryNode
->prev
) {
1088 * First, look for the nearest span dependency at/above tn->start.
1089 * There may not be any such spandep, in which case the guard will
1092 offset
= tryNode
->note
.start
;
1093 sd
= FindNearestSpanDep(cg
, offset
, 0, &guard
);
1094 delta
= sd
->offset
- sd
->before
;
1095 tryNode
->note
.start
= offset
+ delta
;
1098 * Next, find the nearest spandep at/above tn->start + tn->length.
1099 * Use its delta minus tn->start's delta to increase tn->length.
1101 length
= tryNode
->note
.length
;
1102 sd2
= FindNearestSpanDep(cg
, offset
+ length
, sd
- sdbase
, &guard
);
1104 tryNode
->note
.length
=
1105 length
+ sd2
->offset
- sd2
->before
- delta
;
1110 #ifdef DEBUG_brendan
1114 for (sd
= sdbase
; sd
< sdlimit
; sd
++) {
1115 offset
= sd
->offset
;
1117 /* NB: sd->top cursors into the original, unextended bytecode vector. */
1118 if (sd
->top
!= top
) {
1119 JS_ASSERT(top
== -1 ||
1120 !JOF_TYPE_IS_EXTENDED_JUMP(type
) ||
1124 JS_ASSERT(top
== sd
->before
);
1125 op
= (JSOp
) base
[offset
];
1126 type
= JOF_OPTYPE(op
);
1127 JS_ASSERT(type
== JOF_JUMP
||
1128 type
== JOF_JUMPX
||
1129 type
== JOF_TABLESWITCH
||
1130 type
== JOF_TABLESWITCHX
||
1131 type
== JOF_LOOKUPSWITCH
||
1132 type
== JOF_LOOKUPSWITCHX
);
1137 if (JOF_TYPE_IS_EXTENDED_JUMP(type
)) {
1138 span
= GET_JUMPX_OFFSET(pc
);
1139 if (span
< JUMP_OFFSET_MIN
|| JUMP_OFFSET_MAX
< span
) {
1142 JS_ASSERT(type
== JOF_TABLESWITCHX
||
1143 type
== JOF_LOOKUPSWITCHX
);
1146 span
= GET_JUMP_OFFSET(pc
);
1148 JS_ASSERT(SD_SPAN(sd
, pivot
) == span
);
1150 JS_ASSERT(!JOF_TYPE_IS_EXTENDED_JUMP(type
) || bigspans
!= 0);
1155 * Reset so we optimize at most once -- cg may be used for further code
1156 * generation of successive, independent, top-level statements. No jump
1157 * can span top-level statements, because JS lacks goto.
1159 size
= SPANDEPS_SIZE(JS_BIT(JS_CeilingLog2(cg
->numSpanDeps
)));
1160 JS_free(cx
, cg
->spanDeps
);
1161 cg
->spanDeps
= NULL
;
1162 FreeJumpTargets(cg
, cg
->jumpTargets
);
1163 cg
->jumpTargets
= NULL
;
1164 cg
->numSpanDeps
= cg
->numJumpTargets
= 0;
1165 cg
->spanDepTodo
= CG_OFFSET(cg
);
1170 EmitJump(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, ptrdiff_t off
)
1176 extend
= off
< JUMP_OFFSET_MIN
|| JUMP_OFFSET_MAX
< off
;
1177 if (extend
&& !cg
->spanDeps
&& !BuildSpanDepTable(cx
, cg
))
1180 jmp
= js_Emit3(cx
, cg
, op
, JUMP_OFFSET_HI(off
), JUMP_OFFSET_LO(off
));
1181 if (jmp
>= 0 && (extend
|| cg
->spanDeps
)) {
1182 pc
= CG_CODE(cg
, jmp
);
1183 if (!AddSpanDep(cx
, cg
, pc
, pc
, off
))
1190 GetJumpOffset(JSCodeGenerator
*cg
, jsbytecode
*pc
)
1197 return GET_JUMP_OFFSET(pc
);
1199 sd
= GetSpanDep(cg
, pc
);
1201 if (!JT_HAS_TAG(jt
))
1202 return JT_TO_BPDELTA(jt
);
1205 while (--sd
>= cg
->spanDeps
&& sd
->top
== top
)
1208 return JT_CLR_TAG(jt
)->offset
- sd
->offset
;
1212 js_SetJumpOffset(JSContext
*cx
, JSCodeGenerator
*cg
, jsbytecode
*pc
,
1215 if (!cg
->spanDeps
) {
1216 if (JUMP_OFFSET_MIN
<= off
&& off
<= JUMP_OFFSET_MAX
) {
1217 SET_JUMP_OFFSET(pc
, off
);
1221 if (!BuildSpanDepTable(cx
, cg
))
1225 return SetSpanDepTarget(cx
, cg
, GetSpanDep(cg
, pc
), off
);
1229 js_InStatement(JSTreeContext
*tc
, JSStmtType type
)
1233 for (stmt
= tc
->topStmt
; stmt
; stmt
= stmt
->down
) {
1234 if (stmt
->type
== type
)
1241 js_PushStatement(JSTreeContext
*tc
, JSStmtInfo
*stmt
, JSStmtType type
,
1246 SET_STATEMENT_TOP(stmt
, top
);
1247 stmt
->u
.label
= NULL
;
1248 JS_ASSERT(!stmt
->u
.blockObj
);
1249 stmt
->down
= tc
->topStmt
;
1251 if (STMT_LINKS_SCOPE(stmt
)) {
1252 stmt
->downScope
= tc
->topScopeStmt
;
1253 tc
->topScopeStmt
= stmt
;
1255 stmt
->downScope
= NULL
;
1260 js_PushBlockScope(JSTreeContext
*tc
, JSStmtInfo
*stmt
, JSObject
*blockObj
,
1264 js_PushStatement(tc
, stmt
, STMT_BLOCK
, top
);
1265 stmt
->flags
|= SIF_SCOPE
;
1266 STOBJ_SET_PARENT(blockObj
, tc
->blockChain
);
1267 stmt
->downScope
= tc
->topScopeStmt
;
1268 tc
->topScopeStmt
= stmt
;
1269 tc
->blockChain
= blockObj
;
1270 stmt
->u
.blockObj
= blockObj
;
1274 * Emit a backpatch op with offset pointing to the previous jump of this type,
1275 * so that we can walk back up the chain fixing up the op and jump offset.
1278 EmitBackPatchOp(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp op
, ptrdiff_t *lastp
)
1280 ptrdiff_t offset
, delta
;
1282 offset
= CG_OFFSET(cg
);
1283 delta
= offset
- *lastp
;
1285 JS_ASSERT(delta
> 0);
1286 return EmitJump(cx
, cg
, op
, delta
);
1290 * Macro to emit a bytecode followed by a uint16 immediate operand stored in
1291 * big-endian order, used for arg and var numbers as well as for atomIndexes.
1292 * NB: We use cx and cg from our caller's lexical environment, and return
1295 #define EMIT_UINT16_IMM_OP(op, i) \
1297 if (js_Emit3(cx, cg, op, UINT16_HI(i), UINT16_LO(i)) < 0) \
1302 FlushPops(JSContext
*cx
, JSCodeGenerator
*cg
, intN
*npops
)
1304 JS_ASSERT(*npops
!= 0);
1305 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
1307 EMIT_UINT16_IMM_OP(JSOP_POPN
, *npops
);
1313 * Emit additional bytecode(s) for non-local jumps.
1316 EmitNonLocalJumpFixup(JSContext
*cx
, JSCodeGenerator
*cg
, JSStmtInfo
*toStmt
)
1322 * The non-local jump fixup we emit will unbalance cg->stackDepth, because
1323 * the fixup replicates balanced code such as JSOP_LEAVEWITH emitted at the
1324 * end of a with statement, so we save cg->stackDepth here and restore it
1325 * just before a successful return.
1327 depth
= cg
->stackDepth
;
1330 #define FLUSH_POPS() if (npops && !FlushPops(cx, cg, &npops)) return JS_FALSE
1332 for (stmt
= cg
->treeContext
.topStmt
; stmt
!= toStmt
; stmt
= stmt
->down
) {
1333 switch (stmt
->type
) {
1336 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
1338 if (EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
, &GOSUBS(*stmt
)) < 0)
1343 /* There's a With object on the stack that we need to pop. */
1345 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
1347 if (js_Emit1(cx
, cg
, JSOP_LEAVEWITH
) < 0)
1351 case STMT_FOR_IN_LOOP
:
1353 * The iterator and the object being iterated need to be popped.
1356 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
1358 if (js_Emit1(cx
, cg
, JSOP_ENDITER
) < 0)
1362 case STMT_SUBROUTINE
:
1364 * There's a [exception or hole, retsub pc-index] pair on the
1365 * stack that we need to pop.
1373 if (stmt
->flags
& SIF_SCOPE
) {
1376 /* There is a Block object with locals on the stack to pop. */
1378 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
1380 i
= OBJ_BLOCK_COUNT(cx
, stmt
->u
.blockObj
);
1381 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK
, i
);
1386 cg
->stackDepth
= depth
;
1393 EmitGoto(JSContext
*cx
, JSCodeGenerator
*cg
, JSStmtInfo
*toStmt
,
1394 ptrdiff_t *lastp
, JSAtomListElement
*label
, JSSrcNoteType noteType
)
1398 if (!EmitNonLocalJumpFixup(cx
, cg
, toStmt
))
1402 index
= js_NewSrcNote2(cx
, cg
, noteType
, (ptrdiff_t) ALE_INDEX(label
));
1403 else if (noteType
!= SRC_NULL
)
1404 index
= js_NewSrcNote(cx
, cg
, noteType
);
1410 return EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
, lastp
);
1414 BackPatch(JSContext
*cx
, JSCodeGenerator
*cg
, ptrdiff_t last
,
1415 jsbytecode
*target
, jsbytecode op
)
1417 jsbytecode
*pc
, *stop
;
1418 ptrdiff_t delta
, span
;
1420 pc
= CG_CODE(cg
, last
);
1421 stop
= CG_CODE(cg
, -1);
1422 while (pc
!= stop
) {
1423 delta
= GetJumpOffset(cg
, pc
);
1424 span
= PTRDIFF(target
, pc
, jsbytecode
);
1425 CHECK_AND_SET_JUMP_OFFSET(cx
, cg
, pc
, span
);
1428 * Set *pc after jump offset in case bpdelta didn't overflow, but span
1429 * does (if so, CHECK_AND_SET_JUMP_OFFSET might call BuildSpanDepTable
1430 * and need to see the JSOP_BACKPATCH* op at *pc).
1439 js_PopStatement(JSTreeContext
*tc
)
1444 tc
->topStmt
= stmt
->down
;
1445 if (STMT_LINKS_SCOPE(stmt
)) {
1446 tc
->topScopeStmt
= stmt
->downScope
;
1447 if (stmt
->flags
& SIF_SCOPE
) {
1448 tc
->blockChain
= STOBJ_GET_PARENT(stmt
->u
.blockObj
);
1449 JS_SCOPE_DEPTH_METERING(--tc
->scopeDepth
);
1455 js_PopStatementCG(JSContext
*cx
, JSCodeGenerator
*cg
)
1459 stmt
= cg
->treeContext
.topStmt
;
1460 if (!STMT_IS_TRYING(stmt
) &&
1461 (!BackPatch(cx
, cg
, stmt
->breaks
, CG_NEXT(cg
), JSOP_GOTO
) ||
1462 !BackPatch(cx
, cg
, stmt
->continues
, CG_CODE(cg
, stmt
->update
),
1466 js_PopStatement(&cg
->treeContext
);
1471 js_DefineCompileTimeConstant(JSContext
*cx
, JSCodeGenerator
*cg
, JSAtom
*atom
,
1478 JSAtomListElement
*ale
;
1480 /* XXX just do numbers for now */
1481 if (pn
->pn_type
== TOK_NUMBER
) {
1483 if (JSDOUBLE_IS_INT(dval
, ival
) && INT_FITS_IN_JSVAL(ival
)) {
1484 v
= INT_TO_JSVAL(ival
);
1487 * We atomize double to root a jsdouble instance that we wrap as
1488 * jsval and store in cg->constList. This works because atoms are
1489 * protected from GC during compilation.
1491 valueAtom
= js_AtomizeDouble(cx
, dval
);
1494 v
= ATOM_KEY(valueAtom
);
1496 ale
= js_IndexAtom(cx
, atom
, &cg
->constList
);
1499 ALE_SET_VALUE(ale
, v
);
1505 js_LexicalLookup(JSTreeContext
*tc
, JSAtom
*atom
, jsint
*slotp
)
1510 JSScopeProperty
*sprop
;
1512 for (stmt
= tc
->topScopeStmt
; stmt
; stmt
= stmt
->downScope
) {
1513 if (stmt
->type
== STMT_WITH
)
1516 /* Skip "maybe scope" statements that don't contain let bindings. */
1517 if (!(stmt
->flags
& SIF_SCOPE
))
1520 obj
= stmt
->u
.blockObj
;
1521 JS_ASSERT(LOCKED_OBJ_GET_CLASS(obj
) == &js_BlockClass
);
1522 scope
= OBJ_SCOPE(obj
);
1523 sprop
= SCOPE_GET_PROPERTY(scope
, ATOM_TO_JSID(atom
));
1525 JS_ASSERT(sprop
->flags
& SPROP_HAS_SHORTID
);
1528 JS_ASSERT(JSVAL_IS_INT(obj
->fslots
[JSSLOT_BLOCK_DEPTH
]));
1529 *slotp
= JSVAL_TO_INT(obj
->fslots
[JSSLOT_BLOCK_DEPTH
]) +
1542 * Check if the attributes describe a property holding a compile-time constant
1543 * or a permanent, read-only property without a getter.
1545 #define IS_CONSTANT_PROPERTY(attrs) \
1546 (((attrs) & (JSPROP_READONLY | JSPROP_PERMANENT | JSPROP_GETTER)) == \
1547 (JSPROP_READONLY | JSPROP_PERMANENT))
1550 * The function sets vp to JSVAL_HOLE when the atom does not corresponds to a
1551 * name defining a constant.
1554 LookupCompileTimeConstant(JSContext
*cx
, JSCodeGenerator
*cg
, JSAtom
*atom
,
1559 JSAtomListElement
*ale
;
1560 JSObject
*obj
, *pobj
;
1565 * Chase down the cg stack, but only until we reach the outermost cg.
1566 * This enables propagating consts from top-level into switch cases in a
1567 * function compiled along with the top-level script.
1571 if (cg
->treeContext
.flags
& (TCF_IN_FUNCTION
| TCF_COMPILE_N_GO
)) {
1572 /* XXX this will need revising when 'let const' is added. */
1573 stmt
= js_LexicalLookup(&cg
->treeContext
, atom
, NULL
);
1577 ATOM_LIST_SEARCH(ale
, &cg
->constList
, atom
);
1579 JS_ASSERT(ALE_VALUE(ale
) != JSVAL_HOLE
);
1580 *vp
= ALE_VALUE(ale
);
1585 * Try looking in the variable object for a direct property that
1586 * is readonly and permanent. We know such a property can't be
1587 * shadowed by another property on obj's prototype chain, or a
1588 * with object or catch variable; nor can prop's value be changed,
1589 * nor can prop be deleted.
1591 if (cg
->treeContext
.flags
& TCF_IN_FUNCTION
) {
1592 if (js_LookupLocal(cx
, cg
->treeContext
.u
.fun
, atom
, NULL
) !=
1597 JS_ASSERT(cg
->treeContext
.flags
& TCF_COMPILE_N_GO
);
1598 obj
= cg
->treeContext
.u
.scopeChain
;
1599 ok
= OBJ_LOOKUP_PROPERTY(cx
, obj
, ATOM_TO_JSID(atom
), &pobj
,
1605 * We're compiling code that will be executed immediately,
1606 * not re-executed against a different scope chain and/or
1607 * variable object. Therefore we can get constant values
1608 * from our variable object here.
1610 ok
= OBJ_GET_ATTRIBUTES(cx
, obj
, ATOM_TO_JSID(atom
), prop
,
1612 if (ok
&& IS_CONSTANT_PROPERTY(attrs
)) {
1613 ok
= OBJ_GET_PROPERTY(cx
, obj
, ATOM_TO_JSID(atom
), vp
);
1614 JS_ASSERT_IF(ok
, *vp
!= JSVAL_HOLE
);
1618 OBJ_DROP_PROPERTY(cx
, pobj
, prop
);
1625 } while ((cg
= cg
->parent
) != NULL
);
1630 * Return JSOP_NOP to indicate that index fits 2 bytes and no index segment
1631 * reset instruction is necessary, JSOP_FALSE to indicate an error or either
1632 * JSOP_RESETBASE0 or JSOP_RESETBASE1 to indicate the reset bytecode to issue
1633 * after the main bytecode sequence.
1636 EmitBigIndexPrefix(JSContext
*cx
, JSCodeGenerator
*cg
, uintN index
)
1641 * We have max 3 bytes for indexes and check for INDEX_LIMIT overflow only
1644 JS_STATIC_ASSERT(INDEX_LIMIT
<= JS_BIT(24));
1645 JS_STATIC_ASSERT(INDEX_LIMIT
>=
1646 (JSOP_INDEXBASE3
- JSOP_INDEXBASE1
+ 2) << 16);
1648 if (index
< JS_BIT(16))
1650 indexBase
= index
>> 16;
1651 if (indexBase
<= JSOP_INDEXBASE3
- JSOP_INDEXBASE1
+ 1) {
1652 if (js_Emit1(cx
, cg
, (JSOp
)(JSOP_INDEXBASE1
+ indexBase
- 1)) < 0)
1654 return JSOP_RESETBASE0
;
1657 if (index
>= INDEX_LIMIT
) {
1658 JS_ReportErrorNumber(cx
, js_GetErrorMessage
, NULL
,
1659 JSMSG_TOO_MANY_LITERALS
);
1663 if (js_Emit2(cx
, cg
, JSOP_INDEXBASE
, (JSOp
)indexBase
) < 0)
1665 return JSOP_RESETBASE
;
1669 * Emit a bytecode and its 2-byte constant index immediate operand. If the
1670 * index requires more than 2 bytes, emit a prefix op whose 8-bit immediate
1671 * operand effectively extends the 16-bit immediate of the prefixed opcode,
1672 * by changing index "segment" (see jsinterp.c). We optimize segments 1-3
1673 * with single-byte JSOP_INDEXBASE[123] codes.
1675 * Such prefixing currently requires a suffix to restore the "zero segment"
1676 * register setting, but this could be optimized further.
1679 EmitIndexOp(JSContext
*cx
, JSOp op
, uintN index
, JSCodeGenerator
*cg
)
1683 bigSuffix
= EmitBigIndexPrefix(cx
, cg
, index
);
1684 if (bigSuffix
== JSOP_FALSE
)
1686 EMIT_UINT16_IMM_OP(op
, index
);
1687 return bigSuffix
== JSOP_NOP
|| js_Emit1(cx
, cg
, bigSuffix
) >= 0;
1691 * Slight sugar for EmitIndexOp, again accessing cx and cg from the macro
1692 * caller's lexical environment, and embedding a false return on error.
1694 #define EMIT_INDEX_OP(op, index) \
1696 if (!EmitIndexOp(cx, op, index, cg)) \
1702 EmitAtomOp(JSContext
*cx
, JSParseNode
*pn
, JSOp op
, JSCodeGenerator
*cg
)
1704 JSAtomListElement
*ale
;
1706 JS_ASSERT(JOF_OPTYPE(op
) == JOF_ATOM
);
1707 if (op
== JSOP_GETPROP
&&
1708 pn
->pn_atom
== cx
->runtime
->atomState
.lengthAtom
) {
1709 return js_Emit1(cx
, cg
, JSOP_LENGTH
) >= 0;
1711 ale
= js_IndexAtom(cx
, pn
->pn_atom
, &cg
->atomList
);
1714 return EmitIndexOp(cx
, op
, ALE_INDEX(ale
), cg
);
1718 IndexParsedObject(JSParsedObjectBox
*pob
, JSEmittedObjectList
*list
);
1721 EmitObjectOp(JSContext
*cx
, JSParsedObjectBox
*pob
, JSOp op
,
1722 JSCodeGenerator
*cg
)
1724 JS_ASSERT(JOF_OPTYPE(op
) == JOF_OBJECT
);
1725 return EmitIndexOp(cx
, op
, IndexParsedObject(pob
, &cg
->objectList
), cg
);
1729 * What good are ARGNO_LEN and SLOTNO_LEN, you ask? The answer is that, apart
1730 * from EmitSlotIndexOp, they abstract out the detail that both are 2, and in
1731 * other parts of the code there's no necessary relationship between the two.
1732 * The abstraction cracks here in order to share EmitSlotIndexOp code among
1733 * the JSOP_DEFLOCALFUN and JSOP_GET{ARG,VAR,LOCAL}PROP cases.
1735 JS_STATIC_ASSERT(ARGNO_LEN
== 2);
1736 JS_STATIC_ASSERT(SLOTNO_LEN
== 2);
1739 EmitSlotIndexOp(JSContext
*cx
, JSOp op
, uintN slot
, uintN index
,
1740 JSCodeGenerator
*cg
)
1746 JS_ASSERT(JOF_OPTYPE(op
) == JOF_SLOTATOM
||
1747 JOF_OPTYPE(op
) == JOF_SLOTOBJECT
);
1748 bigSuffix
= EmitBigIndexPrefix(cx
, cg
, index
);
1749 if (bigSuffix
== JSOP_FALSE
)
1752 /* Emit [op, slot, index]. */
1753 off
= js_EmitN(cx
, cg
, op
, 2 + INDEX_LEN
);
1756 pc
= CG_CODE(cg
, off
);
1757 SET_UINT16(pc
, slot
);
1759 SET_INDEX(pc
, index
);
1760 return bigSuffix
== JSOP_NOP
|| js_Emit1(cx
, cg
, bigSuffix
) >= 0;
1764 * Adjust the slot for a block local to account for the number of variables
1765 * that share the same index space with locals. Due to the incremental code
1766 * generation for top-level script, we do the adjustment via code patching in
1767 * js_CompileScript; see comments there.
1769 * The function returns -1 on failures.
1772 AdjustBlockSlot(JSContext
*cx
, JSCodeGenerator
*cg
, jsint slot
)
1774 JS_ASSERT((jsuint
) slot
< cg
->maxStackDepth
);
1775 if (cg
->treeContext
.flags
& TCF_IN_FUNCTION
) {
1776 slot
+= cg
->treeContext
.u
.fun
->u
.i
.nvars
;
1777 if ((uintN
) slot
>= SLOTNO_LIMIT
) {
1778 js_ReportCompileErrorNumber(cx
, CG_TS(cg
), NULL
,
1780 JSMSG_TOO_MANY_LOCALS
);
1788 * This routine tries to optimize name gets and sets to stack slot loads and
1789 * stores, given the variables object and scope chain in cx's top frame, the
1790 * compile-time context in tc, and a TOK_NAME node pn. It returns false on
1791 * error, true on success.
1793 * The caller can inspect pn->pn_slot for a non-negative slot number to tell
1794 * whether optimization occurred, in which case BindNameToSlot also updated
1795 * pn->pn_op. If pn->pn_slot is still -1 on return, pn->pn_op nevertheless
1796 * may have been optimized, e.g., from JSOP_NAME to JSOP_ARGUMENTS. Whether
1797 * or not pn->pn_op was modified, if this function finds an argument or local
1798 * variable name, pn->pn_const will be true for const properties after a
1799 * successful return.
1801 * NB: if you add more opcodes specialized from JSOP_NAME, etc., don't forget
1802 * to update the TOK_FOR (for-in) and TOK_ASSIGN (op=, e.g. +=) special cases
1806 BindNameToSlot(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
)
1813 JSLocalKind localKind
;
1815 JSAtomListElement
*ale
;
1818 JS_ASSERT(pn
->pn_type
== TOK_NAME
);
1819 if (pn
->pn_slot
>= 0 || pn
->pn_op
== JSOP_ARGUMENTS
)
1822 /* QNAME references can never be optimized to use arg/var storage. */
1823 if (pn
->pn_op
== JSOP_QNAMEPART
)
1827 * We can't optimize if we are compiling a with statement and its body,
1828 * or we're in a catch block whose exception variable has the same name
1829 * as this node. FIXME: we should be able to optimize catch vars to be
1832 tc
= &cg
->treeContext
;
1834 stmt
= js_LexicalLookup(tc
, atom
, &slot
);
1836 if (stmt
->type
== STMT_WITH
)
1839 JS_ASSERT(stmt
->flags
& SIF_SCOPE
);
1840 JS_ASSERT(slot
>= 0);
1843 case JSOP_NAME
: op
= JSOP_GETLOCAL
; break;
1844 case JSOP_SETNAME
: op
= JSOP_SETLOCAL
; break;
1845 case JSOP_INCNAME
: op
= JSOP_INCLOCAL
; break;
1846 case JSOP_NAMEINC
: op
= JSOP_LOCALINC
; break;
1847 case JSOP_DECNAME
: op
= JSOP_DECLOCAL
; break;
1848 case JSOP_NAMEDEC
: op
= JSOP_LOCALDEC
; break;
1849 case JSOP_FORNAME
: op
= JSOP_FORLOCAL
; break;
1850 case JSOP_DELNAME
: op
= JSOP_FALSE
; break;
1851 default: JS_ASSERT(0);
1853 if (op
!= pn
->pn_op
) {
1854 slot
= AdjustBlockSlot(cx
, cg
, slot
);
1864 * We can't optimize if var and closure (a local function not in a larger
1865 * expression and not at top-level within another's body) collide.
1866 * XXX suboptimal: keep track of colliding names and deoptimize only those
1868 if (tc
->flags
& TCF_FUN_CLOSURE_VS_VAR
)
1871 if (!(tc
->flags
& TCF_IN_FUNCTION
)) {
1872 JSStackFrame
*caller
;
1874 caller
= tc
->parseContext
->callerFrame
;
1876 JS_ASSERT(tc
->flags
& TCF_COMPILE_N_GO
);
1877 JS_ASSERT(caller
->script
);
1878 if (!caller
->fun
|| caller
->varobj
!= tc
->u
.scopeChain
)
1882 * We are compiling eval or debug script inside a function frame
1883 * and the scope chain matches function's variable object.
1884 * Optimize access to function's arguments and variable and the
1887 if (PN_OP(pn
) != JSOP_NAME
|| cg
->staticDepth
> JS_DISPLAY_SIZE
)
1888 goto arguments_check
;
1889 localKind
= js_LookupLocal(cx
, caller
->fun
, atom
, &index
);
1890 if (localKind
== JSLOCAL_NONE
)
1891 goto arguments_check
;
1893 ATOM_LIST_SEARCH(ale
, &cg
->upvarList
, atom
);
1895 uint32 length
, *vector
;
1897 ale
= js_IndexAtom(cx
, atom
, &cg
->upvarList
);
1900 JS_ASSERT(ALE_INDEX(ale
) == cg
->upvarList
.count
- 1);
1902 length
= cg
->upvarMap
.length
;
1903 JS_ASSERT(ALE_INDEX(ale
) <= length
);
1904 if (ALE_INDEX(ale
) == length
) {
1905 length
= 2 * JS_MAX(2, length
);
1907 JS_realloc(cx
, cg
->upvarMap
.vector
,
1908 length
* sizeof *vector
);
1911 cg
->upvarMap
.vector
= vector
;
1912 cg
->upvarMap
.length
= length
;
1915 if (localKind
!= JSLOCAL_ARG
)
1916 index
+= caller
->fun
->nargs
;
1917 if (index
>= JS_BIT(16)) {
1918 cg
->treeContext
.flags
|= TCF_FUN_USES_NONLOCALS
;
1922 JS_ASSERT(cg
->staticDepth
> caller
->fun
->u
.i
.script
->staticDepth
);
1923 uintN skip
= cg
->staticDepth
- caller
->fun
->u
.i
.script
->staticDepth
;
1924 cg
->upvarMap
.vector
[ALE_INDEX(ale
)] = MAKE_UPVAR_COOKIE(skip
, index
);
1927 pn
->pn_op
= JSOP_GETUPVAR
;
1928 pn
->pn_slot
= ALE_INDEX(ale
);
1933 * We are optimizing global variables and there may be no pre-existing
1934 * global property named atom. If atom was declared via const or var,
1935 * optimize pn to access fp->vars using the appropriate JSOP_*GVAR op.
1937 ATOM_LIST_SEARCH(ale
, &tc
->decls
, atom
);
1939 /* Use precedes declaration, or name is never declared. */
1942 constOp
= (ALE_JSOP(ale
) == JSOP_DEFCONST
);
1944 /* Index atom so we can map fast global number to name. */
1945 ale
= js_IndexAtom(cx
, atom
, &cg
->atomList
);
1949 /* Defend against tc->ngvars 16-bit overflow. */
1950 slot
= ALE_INDEX(ale
);
1951 if ((slot
+ 1) >> 16)
1954 if ((uint16
)(slot
+ 1) > tc
->ngvars
)
1955 tc
->ngvars
= (uint16
)(slot
+ 1);
1959 case JSOP_NAME
: op
= JSOP_GETGVAR
; break;
1960 case JSOP_SETNAME
: op
= JSOP_SETGVAR
; break;
1961 case JSOP_SETCONST
: /* NB: no change */ break;
1962 case JSOP_INCNAME
: op
= JSOP_INCGVAR
; break;
1963 case JSOP_NAMEINC
: op
= JSOP_GVARINC
; break;
1964 case JSOP_DECNAME
: op
= JSOP_DECGVAR
; break;
1965 case JSOP_NAMEDEC
: op
= JSOP_GVARDEC
; break;
1966 case JSOP_FORNAME
: /* NB: no change */ break;
1967 case JSOP_DELNAME
: /* NB: no change */ break;
1968 default: JS_NOT_REACHED("gvar");
1970 pn
->pn_const
= constOp
;
1971 if (op
!= pn
->pn_op
) {
1978 if (tc
->flags
& TCF_IN_FUNCTION
) {
1980 * We are compiling a function body and may be able to optimize name
1981 * to stack slot. Look for an argument or variable in the function and
1982 * rewrite pn_op and update pn accordingly.
1984 localKind
= js_LookupLocal(cx
, tc
->u
.fun
, atom
, &index
);
1985 if (localKind
!= JSLOCAL_NONE
) {
1987 if (localKind
== JSLOCAL_ARG
) {
1989 case JSOP_NAME
: op
= JSOP_GETARG
; break;
1990 case JSOP_SETNAME
: op
= JSOP_SETARG
; break;
1991 case JSOP_INCNAME
: op
= JSOP_INCARG
; break;
1992 case JSOP_NAMEINC
: op
= JSOP_ARGINC
; break;
1993 case JSOP_DECNAME
: op
= JSOP_DECARG
; break;
1994 case JSOP_NAMEDEC
: op
= JSOP_ARGDEC
; break;
1995 case JSOP_FORNAME
: op
= JSOP_FORARG
; break;
1996 case JSOP_DELNAME
: op
= JSOP_FALSE
; break;
1997 default: JS_NOT_REACHED("arg");
1999 pn
->pn_const
= JS_FALSE
;
2001 JS_ASSERT(localKind
== JSLOCAL_VAR
||
2002 localKind
== JSLOCAL_CONST
);
2004 case JSOP_NAME
: op
= JSOP_GETLOCAL
; break;
2005 case JSOP_SETNAME
: op
= JSOP_SETLOCAL
; break;
2006 case JSOP_SETCONST
: op
= JSOP_SETLOCAL
; break;
2007 case JSOP_INCNAME
: op
= JSOP_INCLOCAL
; break;
2008 case JSOP_NAMEINC
: op
= JSOP_LOCALINC
; break;
2009 case JSOP_DECNAME
: op
= JSOP_DECLOCAL
; break;
2010 case JSOP_NAMEDEC
: op
= JSOP_LOCALDEC
; break;
2011 case JSOP_FORNAME
: op
= JSOP_FORLOCAL
; break;
2012 case JSOP_DELNAME
: op
= JSOP_FALSE
; break;
2013 default: JS_NOT_REACHED("local");
2015 pn
->pn_const
= (localKind
== JSLOCAL_CONST
);
2018 pn
->pn_slot
= index
;
2021 tc
->flags
|= TCF_FUN_USES_NONLOCALS
;
2026 * Here we either compiling a function body or an eval or debug script
2027 * inside a function and couldn't optimize pn, so it's not a global or
2028 * local slot name. We are also outside of any with blocks. Check if we
2029 * can optimize the predefined arguments variable.
2031 JS_ASSERT((tc
->flags
& TCF_IN_FUNCTION
) ||
2032 (tc
->parseContext
->callerFrame
&&
2033 tc
->parseContext
->callerFrame
->fun
&&
2034 tc
->parseContext
->callerFrame
->varobj
== tc
->u
.scopeChain
));
2035 if (pn
->pn_op
== JSOP_NAME
&&
2036 atom
== cx
->runtime
->atomState
.argumentsAtom
) {
2037 pn
->pn_op
= JSOP_ARGUMENTS
;
2044 * If pn contains a useful expression, return true with *answer set to true.
2045 * If pn contains a useless expression, return true with *answer set to false.
2046 * Return false on error.
2048 * The caller should initialize *answer to false and invoke this function on
2049 * an expression statement or similar subtree to decide whether the tree could
2050 * produce code that has any side effects. For an expression statement, we
2051 * define useless code as code with no side effects, because the main effect,
2052 * the value left on the stack after the code executes, will be discarded by a
2056 CheckSideEffects(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
,
2067 switch (pn
->pn_arity
) {
2070 * A named function is presumed useful: we can't yet know that it is
2071 * not called. The side effects are the creation of a scope object
2072 * to parent this function object, and the binding of the function's
2073 * name in that scope object. See comments at case JSOP_NAMEDFUNOBJ:
2076 fun
= (JSFunction
*) pn
->pn_funpob
->object
;
2082 if (pn
->pn_op
== JSOP_NOP
||
2083 pn
->pn_op
== JSOP_OR
|| pn
->pn_op
== JSOP_AND
||
2084 pn
->pn_op
== JSOP_STRICTEQ
|| pn
->pn_op
== JSOP_STRICTNE
) {
2086 * Non-operators along with ||, &&, ===, and !== never invoke
2087 * toString or valueOf.
2089 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
)
2090 ok
&= CheckSideEffects(cx
, cg
, pn2
, answer
);
2093 * All invocation operations (construct: TOK_NEW, call: TOK_LP)
2094 * are presumed to be useful, because they may have side effects
2095 * even if their main effect (their return value) is discarded.
2097 * TOK_LB binary trees of 3 or more nodes are flattened into lists
2098 * to avoid too much recursion. All such lists must be presumed
2099 * to be useful because each index operation could invoke a getter
2100 * (the JSOP_ARGUMENTS special case below, in the PN_BINARY case,
2101 * does not apply here: arguments[i][j] might invoke a getter).
2103 * Likewise, array and object initialisers may call prototype
2104 * setters (the __defineSetter__ built-in, and writable __proto__
2105 * on Array.prototype create this hazard). Initialiser list nodes
2106 * have JSOP_NEWINIT in their pn_op.
2113 ok
= CheckSideEffects(cx
, cg
, pn
->pn_kid1
, answer
) &&
2114 CheckSideEffects(cx
, cg
, pn
->pn_kid2
, answer
) &&
2115 CheckSideEffects(cx
, cg
, pn
->pn_kid3
, answer
);
2119 if (pn
->pn_type
== TOK_ASSIGN
) {
2121 * Assignment is presumed to be useful, even if the next operation
2122 * is another assignment overwriting this one's ostensible effect,
2123 * because the left operand may be a property with a setter that
2126 * The only exception is assignment of a useless value to a const
2127 * declared in the function currently being compiled.
2130 if (pn2
->pn_type
!= TOK_NAME
) {
2133 if (!BindNameToSlot(cx
, cg
, pn2
))
2135 if (!CheckSideEffects(cx
, cg
, pn
->pn_right
, answer
))
2138 (pn
->pn_op
!= JSOP_NOP
||
2145 if (pn
->pn_op
== JSOP_OR
|| pn
->pn_op
== JSOP_AND
||
2146 pn
->pn_op
== JSOP_STRICTEQ
|| pn
->pn_op
== JSOP_STRICTNE
) {
2148 * ||, &&, ===, and !== do not convert their operands via
2149 * toString or valueOf method calls.
2151 ok
= CheckSideEffects(cx
, cg
, pn
->pn_left
, answer
) &&
2152 CheckSideEffects(cx
, cg
, pn
->pn_right
, answer
);
2155 * We can't easily prove that neither operand ever denotes an
2156 * object with a toString or valueOf method.
2164 switch (pn
->pn_type
) {
2166 ok
= CheckSideEffects(cx
, cg
, pn
->pn_kid
, answer
);
2171 switch (pn2
->pn_type
) {
2174 #if JS_HAS_XML_SUPPORT
2177 #if JS_HAS_LVALUE_RETURN
2181 /* All these delete addressing modes have effects too. */
2185 ok
= CheckSideEffects(cx
, cg
, pn2
, answer
);
2191 if (pn
->pn_op
== JSOP_NOT
) {
2192 /* ! does not convert its operand via toString or valueOf. */
2193 ok
= CheckSideEffects(cx
, cg
, pn
->pn_kid
, answer
);
2200 * All of TOK_INC, TOK_DEC, TOK_THROW, TOK_YIELD, and TOK_DEFSHARP
2201 * have direct effects. Of the remaining unary-arity node types,
2202 * we can't easily prove that the operand never denotes an object
2203 * with a toString or valueOf method.
2212 * Take care to avoid trying to bind a label name (labels, both for
2213 * statements and property values in object initialisers, have pn_op
2214 * defaulted to JSOP_NOP).
2216 if (pn
->pn_type
== TOK_NAME
&& pn
->pn_op
!= JSOP_NOP
) {
2217 if (!BindNameToSlot(cx
, cg
, pn
))
2219 if (pn
->pn_slot
< 0 && pn
->pn_op
!= JSOP_ARGUMENTS
) {
2221 * Not an argument or local variable use, so this expression
2222 * could invoke a getter that has side effects.
2228 if (pn
->pn_type
== TOK_DOT
) {
2229 if (pn2
->pn_type
== TOK_NAME
&& !BindNameToSlot(cx
, cg
, pn2
))
2231 if (!(pn2
->pn_op
== JSOP_ARGUMENTS
&&
2232 pn
->pn_atom
== cx
->runtime
->atomState
.lengthAtom
)) {
2234 * Any dotted property reference could call a getter, except
2235 * for arguments.length where arguments is unambiguous.
2240 ok
= CheckSideEffects(cx
, cg
, pn2
, answer
);
2244 if (pn
->pn_type
== TOK_DEBUGGER
)
2252 EmitNameOp(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
,
2257 if (!BindNameToSlot(cx
, cg
, pn
))
2273 op
= JSOP_CALLLOCAL
;
2276 op
= JSOP_CALLUPVAR
;
2279 JS_ASSERT(op
== JSOP_ARGUMENTS
);
2284 if (op
== JSOP_ARGUMENTS
) {
2285 if (js_Emit1(cx
, cg
, op
) < 0)
2287 if (callContext
&& js_Emit1(cx
, cg
, JSOP_NULL
) < 0)
2290 if (pn
->pn_slot
>= 0) {
2291 EMIT_UINT16_IMM_OP(op
, pn
->pn_slot
);
2293 if (!EmitAtomOp(cx
, pn
, op
, cg
))
2301 #if JS_HAS_XML_SUPPORT
2303 EmitXMLName(JSContext
*cx
, JSParseNode
*pn
, JSOp op
, JSCodeGenerator
*cg
)
2308 JS_ASSERT(pn
->pn_type
== TOK_UNARYOP
);
2309 JS_ASSERT(pn
->pn_op
== JSOP_XMLNAME
);
2310 JS_ASSERT(op
== JSOP_XMLNAME
|| op
== JSOP_CALLXMLNAME
);
2313 oldflags
= cg
->treeContext
.flags
;
2314 cg
->treeContext
.flags
&= ~TCF_IN_FOR_INIT
;
2315 if (!js_EmitTree(cx
, cg
, pn2
))
2317 cg
->treeContext
.flags
|= oldflags
& TCF_IN_FOR_INIT
;
2318 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
,
2319 CG_OFFSET(cg
) - pn2
->pn_offset
) < 0) {
2323 return js_Emit1(cx
, cg
, op
) >= 0;
2328 EmitPropOp(JSContext
*cx
, JSParseNode
*pn
, JSOp op
, JSCodeGenerator
*cg
,
2331 JSParseNode
*pn2
, *pndot
, *pnup
, *pndown
;
2336 JS_ASSERT(pn
->pn_type
== TOK_DOT
);
2337 JS_ASSERT(op
== JSOP_GETPROP
);
2339 } else if (op
== JSOP_GETPROP
&& pn
->pn_type
== TOK_DOT
) {
2340 if (pn2
->pn_op
== JSOP_THIS
) {
2341 if (pn
->pn_atom
!= cx
->runtime
->atomState
.lengthAtom
) {
2342 /* Fast path for gets of |this.foo|. */
2343 return EmitAtomOp(cx
, pn
, JSOP_GETTHISPROP
, cg
);
2345 } else if (pn2
->pn_type
== TOK_NAME
) {
2348 * - arguments.length into JSOP_ARGCNT
2349 * - argname.prop into JSOP_GETARGPROP
2350 * - localname.prop into JSOP_GETLOCALPROP
2351 * but don't do this if the property is 'length' -- prefer to emit
2352 * JSOP_GETARG, etc., and then JSOP_LENGTH.
2354 if (!BindNameToSlot(cx
, cg
, pn2
))
2356 if (pn
->pn_atom
== cx
->runtime
->atomState
.lengthAtom
) {
2357 if (pn2
->pn_op
== JSOP_ARGUMENTS
)
2358 return js_Emit1(cx
, cg
, JSOP_ARGCNT
) >= 0;
2360 switch (pn2
->pn_op
) {
2362 op
= JSOP_GETARGPROP
;
2365 op
= JSOP_GETLOCALPROP
;
2367 JSAtomListElement
*ale
;
2370 ale
= js_IndexAtom(cx
, pn
->pn_atom
, &cg
->atomList
);
2373 atomIndex
= ALE_INDEX(ale
);
2374 return EmitSlotIndexOp(cx
, op
, pn2
->pn_slot
, atomIndex
, cg
);
2384 * If the object operand is also a dotted property reference, reverse the
2385 * list linked via pn_expr temporarily so we can iterate over it from the
2386 * bottom up (reversing again as we go), to avoid excessive recursion.
2388 if (pn2
->pn_type
== TOK_DOT
) {
2391 top
= CG_OFFSET(cg
);
2393 /* Reverse pndot->pn_expr to point up, not down. */
2394 pndot
->pn_offset
= top
;
2395 pndown
= pndot
->pn_expr
;
2396 pndot
->pn_expr
= pnup
;
2397 if (pndown
->pn_type
!= TOK_DOT
)
2403 /* pndown is a primary expression, not a dotted property reference. */
2404 if (!js_EmitTree(cx
, cg
, pndown
))
2408 /* Walk back up the list, emitting annotated name ops. */
2409 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
,
2410 CG_OFFSET(cg
) - pndown
->pn_offset
) < 0) {
2413 if (!EmitAtomOp(cx
, pndot
, PN_OP(pndot
), cg
))
2416 /* Reverse the pn_expr link again. */
2417 pnup
= pndot
->pn_expr
;
2418 pndot
->pn_expr
= pndown
;
2420 } while ((pndot
= pnup
) != NULL
);
2422 if (!js_EmitTree(cx
, cg
, pn2
))
2426 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
,
2427 CG_OFFSET(cg
) - pn2
->pn_offset
) < 0) {
2431 return EmitAtomOp(cx
, pn
, op
, cg
);
2435 EmitElemOp(JSContext
*cx
, JSParseNode
*pn
, JSOp op
, JSCodeGenerator
*cg
)
2438 JSParseNode
*left
, *right
, *next
, ltmp
, rtmp
;
2441 top
= CG_OFFSET(cg
);
2442 if (pn
->pn_arity
== PN_LIST
) {
2443 /* Left-associative operator chain to avoid too much recursion. */
2444 JS_ASSERT(pn
->pn_op
== JSOP_GETELEM
);
2445 JS_ASSERT(pn
->pn_count
>= 3);
2447 right
= PN_LAST(pn
);
2448 next
= left
->pn_next
;
2449 JS_ASSERT(next
!= right
);
2452 * Try to optimize arguments[0][j]... into JSOP_ARGSUB<0> followed by
2453 * one or more index expression and JSOP_GETELEM op pairs.
2455 if (left
->pn_type
== TOK_NAME
&& next
->pn_type
== TOK_NUMBER
) {
2456 if (!BindNameToSlot(cx
, cg
, left
))
2458 if (left
->pn_op
== JSOP_ARGUMENTS
&&
2459 JSDOUBLE_IS_INT(next
->pn_dval
, slot
) &&
2460 (jsuint
)slot
< JS_BIT(16)) {
2462 * arguments[i]() requires arguments object as "this".
2463 * Check that we never generates list for that usage.
2465 JS_ASSERT(op
!= JSOP_CALLELEM
|| next
->pn_next
);
2466 left
->pn_offset
= next
->pn_offset
= top
;
2467 EMIT_UINT16_IMM_OP(JSOP_ARGSUB
, (jsatomid
)slot
);
2469 next
= left
->pn_next
;
2474 * Check whether we generated JSOP_ARGSUB, just above, and have only
2475 * one more index expression to emit. Given arguments[0][j], we must
2476 * skip the while loop altogether, falling through to emit code for j
2477 * (in the subtree referenced by right), followed by the annotated op,
2478 * at the bottom of this function.
2480 JS_ASSERT(next
!= right
|| pn
->pn_count
== 3);
2481 if (left
== pn
->pn_head
) {
2482 if (!js_EmitTree(cx
, cg
, left
))
2485 while (next
!= right
) {
2486 if (!js_EmitTree(cx
, cg
, next
))
2488 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - top
) < 0)
2490 if (js_Emit1(cx
, cg
, JSOP_GETELEM
) < 0)
2492 next
= next
->pn_next
;
2495 if (pn
->pn_arity
== PN_NAME
) {
2497 * Set left and right so pn appears to be a TOK_LB node, instead
2498 * of a TOK_DOT node. See the TOK_FOR/IN case in js_EmitTree, and
2499 * EmitDestructuringOps nearer below. In the destructuring case,
2500 * the base expression (pn_expr) of the name may be null, which
2501 * means we have to emit a JSOP_BINDNAME.
2506 left
->pn_type
= TOK_STRING
;
2507 left
->pn_op
= JSOP_BINDNAME
;
2508 left
->pn_arity
= PN_NULLARY
;
2509 left
->pn_pos
= pn
->pn_pos
;
2510 left
->pn_atom
= pn
->pn_atom
;
2513 right
->pn_type
= TOK_STRING
;
2514 JS_ASSERT(ATOM_IS_STRING(pn
->pn_atom
));
2515 right
->pn_op
= js_IsIdentifier(ATOM_TO_STRING(pn
->pn_atom
))
2518 right
->pn_arity
= PN_NULLARY
;
2519 right
->pn_pos
= pn
->pn_pos
;
2520 right
->pn_atom
= pn
->pn_atom
;
2522 JS_ASSERT(pn
->pn_arity
== PN_BINARY
);
2524 right
= pn
->pn_right
;
2527 /* Try to optimize arguments[0] (e.g.) into JSOP_ARGSUB<0>. */
2528 if (op
== JSOP_GETELEM
&&
2529 left
->pn_type
== TOK_NAME
&&
2530 right
->pn_type
== TOK_NUMBER
) {
2531 if (!BindNameToSlot(cx
, cg
, left
))
2533 if (left
->pn_op
== JSOP_ARGUMENTS
&&
2534 JSDOUBLE_IS_INT(right
->pn_dval
, slot
) &&
2535 (jsuint
)slot
< JS_BIT(16)) {
2536 left
->pn_offset
= right
->pn_offset
= top
;
2537 EMIT_UINT16_IMM_OP(JSOP_ARGSUB
, (jsatomid
)slot
);
2542 if (!js_EmitTree(cx
, cg
, left
))
2546 /* The right side of the descendant operator is implicitly quoted. */
2547 JS_ASSERT(op
!= JSOP_DESCENDANTS
|| right
->pn_type
!= TOK_STRING
||
2548 right
->pn_op
== JSOP_QNAMEPART
);
2549 if (!js_EmitTree(cx
, cg
, right
))
2551 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - top
) < 0)
2553 return js_Emit1(cx
, cg
, op
) >= 0;
2557 EmitNumberOp(JSContext
*cx
, jsdouble dval
, JSCodeGenerator
*cg
)
2564 JSAtomListElement
*ale
;
2566 if (JSDOUBLE_IS_INT(dval
, ival
) && INT_FITS_IN_JSVAL(ival
)) {
2568 return js_Emit1(cx
, cg
, JSOP_ZERO
) >= 0;
2570 return js_Emit1(cx
, cg
, JSOP_ONE
) >= 0;
2571 if ((jsint
)(int8
)ival
== ival
)
2572 return js_Emit2(cx
, cg
, JSOP_INT8
, (jsbytecode
)(int8
)ival
) >= 0;
2575 if (u
< JS_BIT(16)) {
2576 EMIT_UINT16_IMM_OP(JSOP_UINT16
, u
);
2577 } else if (u
< JS_BIT(24)) {
2578 off
= js_EmitN(cx
, cg
, JSOP_UINT24
, 3);
2581 pc
= CG_CODE(cg
, off
);
2584 off
= js_EmitN(cx
, cg
, JSOP_INT32
, 4);
2587 pc
= CG_CODE(cg
, off
);
2588 SET_INT32(pc
, ival
);
2593 atom
= js_AtomizeDouble(cx
, dval
);
2597 ale
= js_IndexAtom(cx
, atom
, &cg
->atomList
);
2600 return EmitIndexOp(cx
, JSOP_DOUBLE
, ALE_INDEX(ale
), cg
);
2604 EmitSwitch(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
,
2605 JSStmtInfo
*stmtInfo
)
2608 JSBool ok
, hasDefault
, constPropagated
;
2609 ptrdiff_t top
, off
, defaultOffset
;
2610 JSParseNode
*pn2
, *pn3
, *pn4
;
2611 uint32 caseCount
, tableLength
;
2612 JSParseNode
**table
;
2617 JSAtomListElement
*ale
;
2619 size_t switchSize
, tableSize
;
2620 jsbytecode
*pc
, *savepc
;
2621 #if JS_HAS_BLOCK_SCOPE
2625 /* Try for most optimal, fall back if not dense ints, and per ECMAv2. */
2626 switchOp
= JSOP_TABLESWITCH
;
2628 hasDefault
= constPropagated
= JS_FALSE
;
2632 * If the switch contains let variables scoped by its body, model the
2633 * resulting block on the stack first, before emitting the discriminant's
2634 * bytecode (in case the discriminant contains a stack-model dependency
2635 * such as a let expression).
2638 #if JS_HAS_BLOCK_SCOPE
2639 if (pn2
->pn_type
== TOK_LEXICALSCOPE
) {
2641 * Push the body's block scope before discriminant code-gen for proper
2642 * static block scope linkage in case the discriminant contains a let
2643 * expression. The block's locals must lie under the discriminant on
2644 * the stack so that case-dispatch bytecodes can find the discriminant
2647 count
= OBJ_BLOCK_COUNT(cx
, pn2
->pn_pob
->object
);
2648 js_PushBlockScope(&cg
->treeContext
, stmtInfo
, pn2
->pn_pob
->object
, -1);
2649 stmtInfo
->type
= STMT_SWITCH
;
2651 /* Emit JSOP_ENTERBLOCK before code to evaluate the discriminant. */
2652 if (!EmitObjectOp(cx
, pn2
->pn_pob
, JSOP_ENTERBLOCK
, cg
))
2656 * Pop the switch's statement info around discriminant code-gen. Note
2657 * how this leaves cg->treeContext.blockChain referencing the switch's
2658 * block scope object, which is necessary for correct block parenting
2659 * in the case where the discriminant contains a let expression.
2661 cg
->treeContext
.topStmt
= stmtInfo
->down
;
2662 cg
->treeContext
.topScopeStmt
= stmtInfo
->downScope
;
2672 * Emit code for the discriminant first (or nearly first, in the case of a
2673 * switch whose body is a block scope).
2675 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
2678 /* Switch bytecodes run from here till end of final case. */
2679 top
= CG_OFFSET(cg
);
2680 #if !JS_HAS_BLOCK_SCOPE
2681 js_PushStatement(&cg
->treeContext
, stmtInfo
, STMT_SWITCH
, top
);
2683 if (pn2
->pn_type
== TOK_LC
) {
2684 js_PushStatement(&cg
->treeContext
, stmtInfo
, STMT_SWITCH
, top
);
2686 /* Re-push the switch's statement info record. */
2687 cg
->treeContext
.topStmt
= cg
->treeContext
.topScopeStmt
= stmtInfo
;
2689 /* Set the statement info record's idea of top. */
2690 stmtInfo
->update
= top
;
2692 /* Advance pn2 to refer to the switch case list. */
2697 caseCount
= pn2
->pn_count
;
2701 if (caseCount
== 0 ||
2703 (hasDefault
= (pn2
->pn_head
->pn_type
== TOK_DEFAULT
)))) {
2708 #define INTMAP_LENGTH 256
2709 jsbitmap intmap_space
[INTMAP_LENGTH
];
2710 jsbitmap
*intmap
= NULL
;
2711 int32 intmap_bitlen
= 0;
2713 low
= JSVAL_INT_MAX
;
2714 high
= JSVAL_INT_MIN
;
2716 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
2717 if (pn3
->pn_type
== TOK_DEFAULT
) {
2718 hasDefault
= JS_TRUE
;
2719 caseCount
--; /* one of the "cases" was the default */
2723 JS_ASSERT(pn3
->pn_type
== TOK_CASE
);
2724 if (switchOp
== JSOP_CONDSWITCH
)
2728 switch (pn4
->pn_type
) {
2731 if (JSDOUBLE_IS_INT(d
, i
) && INT_FITS_IN_JSVAL(i
)) {
2732 pn3
->pn_val
= INT_TO_JSVAL(i
);
2734 atom
= js_AtomizeDouble(cx
, d
);
2739 pn3
->pn_val
= ATOM_KEY(atom
);
2743 pn3
->pn_val
= ATOM_KEY(pn4
->pn_atom
);
2746 if (!pn4
->pn_expr
) {
2747 ok
= LookupCompileTimeConstant(cx
, cg
, pn4
->pn_atom
, &v
);
2750 if (v
!= JSVAL_HOLE
) {
2751 if (!JSVAL_IS_PRIMITIVE(v
)) {
2753 * XXX JSOP_LOOKUPSWITCH does not support const-
2754 * propagated object values, see bug 407186.
2756 switchOp
= JSOP_CONDSWITCH
;
2760 constPropagated
= JS_TRUE
;
2766 if (pn4
->pn_op
== JSOP_TRUE
) {
2767 pn3
->pn_val
= JSVAL_TRUE
;
2770 if (pn4
->pn_op
== JSOP_FALSE
) {
2771 pn3
->pn_val
= JSVAL_FALSE
;
2776 switchOp
= JSOP_CONDSWITCH
;
2780 JS_ASSERT(JSVAL_IS_PRIMITIVE(pn3
->pn_val
));
2782 if (switchOp
!= JSOP_TABLESWITCH
)
2784 if (!JSVAL_IS_INT(pn3
->pn_val
)) {
2785 switchOp
= JSOP_LOOKUPSWITCH
;
2788 i
= JSVAL_TO_INT(pn3
->pn_val
);
2789 if ((jsuint
)(i
+ (jsint
)JS_BIT(15)) >= (jsuint
)JS_BIT(16)) {
2790 switchOp
= JSOP_LOOKUPSWITCH
;
2799 * Check for duplicates, which require a JSOP_LOOKUPSWITCH.
2800 * We bias i by 65536 if it's negative, and hope that's a rare
2801 * case (because it requires a malloc'd bitmap).
2805 if (i
>= intmap_bitlen
) {
2807 i
< (INTMAP_LENGTH
<< JS_BITS_PER_WORD_LOG2
)) {
2808 intmap
= intmap_space
;
2809 intmap_bitlen
= INTMAP_LENGTH
<< JS_BITS_PER_WORD_LOG2
;
2811 /* Just grab 8K for the worst-case bitmap. */
2812 intmap_bitlen
= JS_BIT(16);
2813 intmap
= (jsbitmap
*)
2815 (JS_BIT(16) >> JS_BITS_PER_WORD_LOG2
)
2816 * sizeof(jsbitmap
));
2818 JS_ReportOutOfMemory(cx
);
2822 memset(intmap
, 0, intmap_bitlen
>> JS_BITS_PER_BYTE_LOG2
);
2824 if (JS_TEST_BIT(intmap
, i
)) {
2825 switchOp
= JSOP_LOOKUPSWITCH
;
2828 JS_SET_BIT(intmap
, i
);
2832 if (intmap
&& intmap
!= intmap_space
)
2833 JS_free(cx
, intmap
);
2838 * Compute table length and select lookup instead if overlarge or
2839 * more than half-sparse.
2841 if (switchOp
== JSOP_TABLESWITCH
) {
2842 tableLength
= (uint32
)(high
- low
+ 1);
2843 if (tableLength
>= JS_BIT(16) || tableLength
> 2 * caseCount
)
2844 switchOp
= JSOP_LOOKUPSWITCH
;
2845 } else if (switchOp
== JSOP_LOOKUPSWITCH
) {
2847 * Lookup switch supports only atom indexes below 64K limit.
2848 * Conservatively estimate the maximum possible index during
2849 * switch generation and use conditional switch if it exceeds
2852 if (caseCount
+ cg
->atomList
.count
> JS_BIT(16))
2853 switchOp
= JSOP_CONDSWITCH
;
2858 * Emit a note with two offsets: first tells total switch code length,
2859 * second tells offset to first JSOP_CASE if condswitch.
2861 noteIndex
= js_NewSrcNote3(cx
, cg
, SRC_SWITCH
, 0, 0);
2865 if (switchOp
== JSOP_CONDSWITCH
) {
2867 * 0 bytes of immediate for unoptimized ECMAv2 switch.
2870 } else if (switchOp
== JSOP_TABLESWITCH
) {
2872 * 3 offsets (len, low, high) before the table, 1 per entry.
2874 switchSize
= (size_t)(JUMP_OFFSET_LEN
* (3 + tableLength
));
2877 * JSOP_LOOKUPSWITCH:
2878 * 1 offset (len) and 1 atom index (npairs) before the table,
2879 * 1 atom index and 1 jump offset per entry.
2881 switchSize
= (size_t)(JUMP_OFFSET_LEN
+ INDEX_LEN
+
2882 (INDEX_LEN
+ JUMP_OFFSET_LEN
) * caseCount
);
2886 * Emit switchOp followed by switchSize bytes of jump or lookup table.
2888 * If switchOp is JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH, it is crucial
2889 * to emit the immediate operand(s) by which bytecode readers such as
2890 * BuildSpanDepTable discover the length of the switch opcode *before*
2891 * calling js_SetJumpOffset (which may call BuildSpanDepTable). It's
2892 * also important to zero all unknown jump offset immediate operands,
2893 * so they can be converted to span dependencies with null targets to
2894 * be computed later (js_EmitN zeros switchSize bytes after switchOp).
2896 if (js_EmitN(cx
, cg
, switchOp
, switchSize
) < 0)
2900 if (switchOp
== JSOP_CONDSWITCH
) {
2901 intN caseNoteIndex
= -1;
2902 JSBool beforeCases
= JS_TRUE
;
2904 /* Emit code for evaluating cases and jumping to case statements. */
2905 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
2907 if (pn4
&& !js_EmitTree(cx
, cg
, pn4
))
2909 if (caseNoteIndex
>= 0) {
2910 /* off is the previous JSOP_CASE's bytecode offset. */
2911 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)caseNoteIndex
, 0,
2912 CG_OFFSET(cg
) - off
)) {
2917 JS_ASSERT(pn3
->pn_type
== TOK_DEFAULT
);
2920 caseNoteIndex
= js_NewSrcNote2(cx
, cg
, SRC_PCDELTA
, 0);
2921 if (caseNoteIndex
< 0)
2923 off
= EmitJump(cx
, cg
, JSOP_CASE
, 0);
2926 pn3
->pn_offset
= off
;
2928 uintN noteCount
, noteCountDelta
;
2930 /* Switch note's second offset is to first JSOP_CASE. */
2931 noteCount
= CG_NOTE_COUNT(cg
);
2932 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 1,
2936 noteCountDelta
= CG_NOTE_COUNT(cg
) - noteCount
;
2937 if (noteCountDelta
!= 0)
2938 caseNoteIndex
+= noteCountDelta
;
2939 beforeCases
= JS_FALSE
;
2944 * If we didn't have an explicit default (which could fall in between
2945 * cases, preventing us from fusing this js_SetSrcNoteOffset with the
2946 * call in the loop above), link the last case to the implicit default
2947 * for the decompiler.
2950 caseNoteIndex
>= 0 &&
2951 !js_SetSrcNoteOffset(cx
, cg
, (uintN
)caseNoteIndex
, 0,
2952 CG_OFFSET(cg
) - off
)) {
2956 /* Emit default even if no explicit default statement. */
2957 defaultOffset
= EmitJump(cx
, cg
, JSOP_DEFAULT
, 0);
2958 if (defaultOffset
< 0)
2961 pc
= CG_CODE(cg
, top
+ JUMP_OFFSET_LEN
);
2963 if (switchOp
== JSOP_TABLESWITCH
) {
2964 /* Fill in switch bounds, which we know fit in 16-bit offsets. */
2965 SET_JUMP_OFFSET(pc
, low
);
2966 pc
+= JUMP_OFFSET_LEN
;
2967 SET_JUMP_OFFSET(pc
, high
);
2968 pc
+= JUMP_OFFSET_LEN
;
2971 * Use malloc to avoid arena bloat for programs with many switches.
2972 * We free table if non-null at label out, so all control flow must
2973 * exit this function through goto out or goto bad.
2975 if (tableLength
!= 0) {
2976 tableSize
= (size_t)tableLength
* sizeof *table
;
2977 table
= (JSParseNode
**) JS_malloc(cx
, tableSize
);
2980 memset(table
, 0, tableSize
);
2981 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
2982 if (pn3
->pn_type
== TOK_DEFAULT
)
2984 i
= JSVAL_TO_INT(pn3
->pn_val
);
2986 JS_ASSERT((uint32
)i
< tableLength
);
2991 JS_ASSERT(switchOp
== JSOP_LOOKUPSWITCH
);
2993 /* Fill in the number of cases. */
2994 SET_INDEX(pc
, caseCount
);
2999 * After this point, all control flow involving JSOP_TABLESWITCH
3000 * must set ok and goto out to exit this function. To keep things
3001 * simple, all switchOp cases exit that way.
3003 MUST_FLOW_THROUGH("out");
3006 * We have already generated at least one big jump so we must
3007 * explicitly add span dependencies for the switch jumps. When
3008 * called below, js_SetJumpOffset can only do it when patching
3009 * the first big jump or when cg->spanDeps is null.
3011 if (!AddSwitchSpanDeps(cx
, cg
, CG_CODE(cg
, top
)))
3015 if (constPropagated
) {
3017 * Skip switchOp, as we are not setting jump offsets in the two
3018 * for loops below. We'll restore CG_NEXT(cg) from savepc after,
3019 * unless there was an error.
3021 savepc
= CG_NEXT(cg
);
3022 CG_NEXT(cg
) = pc
+ 1;
3023 if (switchOp
== JSOP_TABLESWITCH
) {
3024 for (i
= 0; i
< (jsint
)tableLength
; i
++) {
3027 (pn4
= pn3
->pn_left
) != NULL
&&
3028 pn4
->pn_type
== TOK_NAME
) {
3029 /* Note a propagated constant with the const's name. */
3030 JS_ASSERT(!pn4
->pn_expr
);
3031 ale
= js_IndexAtom(cx
, pn4
->pn_atom
, &cg
->atomList
);
3035 if (js_NewSrcNote2(cx
, cg
, SRC_LABEL
, (ptrdiff_t)
3036 ALE_INDEX(ale
)) < 0) {
3040 pc
+= JUMP_OFFSET_LEN
;
3043 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
3045 if (pn4
&& pn4
->pn_type
== TOK_NAME
) {
3046 /* Note a propagated constant with the const's name. */
3047 JS_ASSERT(!pn4
->pn_expr
);
3048 ale
= js_IndexAtom(cx
, pn4
->pn_atom
, &cg
->atomList
);
3052 if (js_NewSrcNote2(cx
, cg
, SRC_LABEL
, (ptrdiff_t)
3053 ALE_INDEX(ale
)) < 0) {
3057 pc
+= INDEX_LEN
+ JUMP_OFFSET_LEN
;
3060 CG_NEXT(cg
) = savepc
;
3064 /* Emit code for each case's statements, copying pn_offset up to pn3. */
3065 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
3066 if (switchOp
== JSOP_CONDSWITCH
&& pn3
->pn_type
!= TOK_DEFAULT
)
3067 CHECK_AND_SET_JUMP_OFFSET_AT_CUSTOM(cx
, cg
, pn3
->pn_offset
, goto bad
);
3068 pn4
= pn3
->pn_right
;
3069 ok
= js_EmitTree(cx
, cg
, pn4
);
3072 pn3
->pn_offset
= pn4
->pn_offset
;
3073 if (pn3
->pn_type
== TOK_DEFAULT
)
3074 off
= pn3
->pn_offset
- top
;
3078 /* If no default case, offset for default is to end of switch. */
3079 off
= CG_OFFSET(cg
) - top
;
3082 /* We better have set "off" by now. */
3083 JS_ASSERT(off
!= -1);
3085 /* Set the default offset (to end of switch if no default). */
3086 if (switchOp
== JSOP_CONDSWITCH
) {
3088 JS_ASSERT(defaultOffset
!= -1);
3089 ok
= js_SetJumpOffset(cx
, cg
, CG_CODE(cg
, defaultOffset
),
3090 off
- (defaultOffset
- top
));
3094 pc
= CG_CODE(cg
, top
);
3095 ok
= js_SetJumpOffset(cx
, cg
, pc
, off
);
3098 pc
+= JUMP_OFFSET_LEN
;
3101 /* Set the SRC_SWITCH note's offset operand to tell end of switch. */
3102 off
= CG_OFFSET(cg
) - top
;
3103 ok
= js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0, off
);
3107 if (switchOp
== JSOP_TABLESWITCH
) {
3108 /* Skip over the already-initialized switch bounds. */
3109 pc
+= 2 * JUMP_OFFSET_LEN
;
3111 /* Fill in the jump table, if there is one. */
3112 for (i
= 0; i
< (jsint
)tableLength
; i
++) {
3114 off
= pn3
? pn3
->pn_offset
- top
: 0;
3115 ok
= js_SetJumpOffset(cx
, cg
, pc
, off
);
3118 pc
+= JUMP_OFFSET_LEN
;
3120 } else if (switchOp
== JSOP_LOOKUPSWITCH
) {
3121 /* Skip over the already-initialized number of cases. */
3124 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
3125 if (pn3
->pn_type
== TOK_DEFAULT
)
3127 if (!js_AtomizePrimitiveValue(cx
, pn3
->pn_val
, &atom
))
3129 ale
= js_IndexAtom(cx
, atom
, &cg
->atomList
);
3132 SET_INDEX(pc
, ALE_INDEX(ale
));
3135 off
= pn3
->pn_offset
- top
;
3136 ok
= js_SetJumpOffset(cx
, cg
, pc
, off
);
3139 pc
+= JUMP_OFFSET_LEN
;
3147 ok
= js_PopStatementCG(cx
, cg
);
3149 #if JS_HAS_BLOCK_SCOPE
3150 if (ok
&& pn
->pn_right
->pn_type
== TOK_LEXICALSCOPE
)
3151 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK
, count
);
3162 js_EmitFunctionScript(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*body
)
3164 if (cg
->treeContext
.flags
& TCF_FUN_IS_GENERATOR
) {
3165 /* JSOP_GENERATOR must be the first instruction. */
3166 CG_SWITCH_TO_PROLOG(cg
);
3167 JS_ASSERT(CG_NEXT(cg
) == CG_BASE(cg
));
3168 if (js_Emit1(cx
, cg
, JSOP_GENERATOR
) < 0)
3170 CG_SWITCH_TO_MAIN(cg
);
3173 return js_EmitTree(cx
, cg
, body
) &&
3174 js_Emit1(cx
, cg
, JSOP_STOP
) >= 0 &&
3175 js_NewScriptFromCG(cx
, cg
);
3178 /* A macro for inlining at the top of js_EmitTree (whence it came). */
3179 #define UPDATE_LINE_NUMBER_NOTES(cx, cg, pn) \
3181 uintN line_ = (pn)->pn_pos.begin.lineno; \
3182 uintN delta_ = line_ - CG_CURRENT_LINE(cg); \
3183 if (delta_ != 0) { \
3185 * Encode any change in the current source line number by using \
3186 * either several SRC_NEWLINE notes or just one SRC_SETLINE note, \
3187 * whichever consumes less space. \
3189 * NB: We handle backward line number deltas (possible with for \
3190 * loops where the update part is emitted after the body, but its \
3191 * line number is <= any line number in the body) here by letting \
3192 * unsigned delta_ wrap to a very large number, which triggers a \
3195 CG_CURRENT_LINE(cg) = line_; \
3196 if (delta_ >= (uintN)(2 + ((line_ > SN_3BYTE_OFFSET_MASK)<<1))) { \
3197 if (js_NewSrcNote2(cx, cg, SRC_SETLINE, (ptrdiff_t)line_) < 0)\
3201 if (js_NewSrcNote(cx, cg, SRC_NEWLINE) < 0) \
3203 } while (--delta_ != 0); \
3208 /* A function, so that we avoid macro-bloating all the other callsites. */
3210 UpdateLineNumberNotes(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
)
3212 UPDATE_LINE_NUMBER_NOTES(cx
, cg
, pn
);
3217 MaybeEmitVarDecl(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3218 JSParseNode
*pn
, jsatomid
*result
)
3221 JSAtomListElement
*ale
;
3223 if (pn
->pn_slot
>= 0) {
3224 atomIndex
= (jsatomid
) pn
->pn_slot
;
3226 ale
= js_IndexAtom(cx
, pn
->pn_atom
, &cg
->atomList
);
3229 atomIndex
= ALE_INDEX(ale
);
3232 if (JOF_OPTYPE(pn
->pn_op
) == JOF_ATOM
&&
3233 (!(cg
->treeContext
.flags
& TCF_IN_FUNCTION
) ||
3234 (cg
->treeContext
.flags
& TCF_FUN_HEAVYWEIGHT
))) {
3235 /* Emit a prolog bytecode to predefine the variable. */
3236 CG_SWITCH_TO_PROLOG(cg
);
3237 if (!UpdateLineNumberNotes(cx
, cg
, pn
))
3239 EMIT_INDEX_OP(prologOp
, atomIndex
);
3240 CG_SWITCH_TO_MAIN(cg
);
3244 *result
= atomIndex
;
3248 #if JS_HAS_DESTRUCTURING
3251 (*DestructuringDeclEmitter
)(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3255 EmitDestructuringDecl(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3258 JS_ASSERT(pn
->pn_type
== TOK_NAME
);
3259 if (!BindNameToSlot(cx
, cg
, pn
))
3262 JS_ASSERT(pn
->pn_op
!= JSOP_ARGUMENTS
);
3263 return MaybeEmitVarDecl(cx
, cg
, prologOp
, pn
, NULL
);
3267 EmitDestructuringDecls(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp prologOp
,
3270 JSParseNode
*pn2
, *pn3
;
3271 DestructuringDeclEmitter emitter
;
3273 if (pn
->pn_type
== TOK_RB
) {
3274 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
3275 if (pn2
->pn_type
== TOK_COMMA
)
3277 emitter
= (pn2
->pn_type
== TOK_NAME
)
3278 ? EmitDestructuringDecl
3279 : EmitDestructuringDecls
;
3280 if (!emitter(cx
, cg
, prologOp
, pn2
))
3284 JS_ASSERT(pn
->pn_type
== TOK_RC
);
3285 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
3286 pn3
= pn2
->pn_right
;
3287 emitter
= (pn3
->pn_type
== TOK_NAME
)
3288 ? EmitDestructuringDecl
3289 : EmitDestructuringDecls
;
3290 if (!emitter(cx
, cg
, prologOp
, pn3
))
3298 EmitDestructuringOpsHelper(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
);
3301 EmitDestructuringLHS(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
)
3305 /* Skip any parenthesization. */
3306 while (pn
->pn_type
== TOK_RP
)
3310 * Now emit the lvalue opcode sequence. If the lvalue is a nested
3311 * destructuring initialiser-form, call ourselves to handle it, then
3312 * pop the matched value. Otherwise emit an lvalue bytecode sequence
3313 * ending with a JSOP_ENUMELEM or equivalent op.
3315 if (pn
->pn_type
== TOK_RB
|| pn
->pn_type
== TOK_RC
) {
3316 if (!EmitDestructuringOpsHelper(cx
, cg
, pn
))
3318 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
3321 if (pn
->pn_type
== TOK_NAME
&& !BindNameToSlot(cx
, cg
, pn
))
3324 switch (pn
->pn_op
) {
3327 * NB: pn is a PN_NAME node, not a PN_BINARY. Nevertheless,
3328 * we want to emit JSOP_ENUMELEM, which has format JOF_ELEM.
3329 * So here and for JSOP_ENUMCONSTELEM, we use EmitElemOp.
3331 if (!EmitElemOp(cx
, pn
, JSOP_ENUMELEM
, cg
))
3336 if (!EmitElemOp(cx
, pn
, JSOP_ENUMCONSTELEM
, cg
))
3341 slot
= (jsuint
) pn
->pn_slot
;
3342 EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP
, slot
);
3347 slot
= (jsuint
) pn
->pn_slot
;
3348 EMIT_UINT16_IMM_OP(PN_OP(pn
), slot
);
3349 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
3354 #if JS_HAS_LVALUE_RETURN || JS_HAS_XML_SUPPORT
3358 top
= CG_OFFSET(cg
);
3359 if (!js_EmitTree(cx
, cg
, pn
))
3361 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - top
) < 0)
3363 if (js_Emit1(cx
, cg
, JSOP_ENUMELEM
) < 0)
3377 * Recursive helper for EmitDestructuringOps.
3379 * Given a value to destructure on the stack, walk over an object or array
3380 * initialiser at pn, emitting bytecodes to match property values and store
3381 * them in the lvalues identified by the matched property names.
3384 EmitDestructuringOpsHelper(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
)
3387 JSParseNode
*pn2
, *pn3
;
3391 intN stackDepth
= cg
->stackDepth
;
3392 JS_ASSERT(stackDepth
!= 0);
3393 JS_ASSERT(pn
->pn_arity
== PN_LIST
);
3394 JS_ASSERT(pn
->pn_type
== TOK_RB
|| pn
->pn_type
== TOK_RC
);
3397 if (pn
->pn_count
== 0) {
3398 /* Emit a DUP;POP sequence for the decompiler. */
3399 return js_Emit1(cx
, cg
, JSOP_DUP
) >= 0 &&
3400 js_Emit1(cx
, cg
, JSOP_POP
) >= 0;
3404 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
3406 * Duplicate the value being destructured to use as a reference base.
3408 if (js_Emit1(cx
, cg
, JSOP_DUP
) < 0)
3412 * Now push the property name currently being matched, which is either
3413 * the array initialiser's current index, or the current property name
3414 * "label" on the left of a colon in the object initialiser. Set pn3
3415 * to the lvalue node, which is in the value-initializing position.
3418 if (pn
->pn_type
== TOK_RB
) {
3419 if (!EmitNumberOp(cx
, index
, cg
))
3423 JS_ASSERT(pn
->pn_type
== TOK_RC
);
3424 JS_ASSERT(pn2
->pn_type
== TOK_COLON
);
3426 if (pn3
->pn_type
== TOK_NUMBER
) {
3428 * If we are emitting an object destructuring initialiser,
3429 * annotate the index op with SRC_INITPROP so we know we are
3430 * not decompiling an array initialiser.
3432 if (js_NewSrcNote(cx
, cg
, SRC_INITPROP
) < 0)
3434 if (!EmitNumberOp(cx
, pn3
->pn_dval
, cg
))
3437 JS_ASSERT(pn3
->pn_type
== TOK_STRING
||
3438 pn3
->pn_type
== TOK_NAME
);
3439 if (!EmitAtomOp(cx
, pn3
, JSOP_GETPROP
, cg
))
3441 doElemOp
= JS_FALSE
;
3443 pn3
= pn2
->pn_right
;
3448 * Ok, get the value of the matching property name. This leaves
3449 * that value on top of the value being destructured, so the stack
3450 * is one deeper than when we started.
3452 if (js_Emit1(cx
, cg
, JSOP_GETELEM
) < 0)
3454 JS_ASSERT(cg
->stackDepth
== stackDepth
+ 1);
3457 /* Nullary comma node makes a hole in the array destructurer. */
3458 if (pn3
->pn_type
== TOK_COMMA
&& pn3
->pn_arity
== PN_NULLARY
) {
3459 JS_ASSERT(pn
->pn_type
== TOK_RB
);
3460 JS_ASSERT(pn2
== pn3
);
3461 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
3464 if (!EmitDestructuringLHS(cx
, cg
, pn3
))
3468 JS_ASSERT(cg
->stackDepth
== stackDepth
);
3476 OpToDeclType(JSOp op
)
3480 return SRC_DECL_LET
;
3482 return SRC_DECL_CONST
;
3484 return SRC_DECL_VAR
;
3486 return SRC_DECL_NONE
;
3491 EmitDestructuringOps(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp declOp
,
3495 * If we're called from a variable declaration, help the decompiler by
3496 * annotating the first JSOP_DUP that EmitDestructuringOpsHelper emits.
3497 * If the destructuring initialiser is empty, our helper will emit a
3498 * JSOP_DUP followed by a JSOP_POP for the decompiler.
3500 if (js_NewSrcNote2(cx
, cg
, SRC_DESTRUCT
, OpToDeclType(declOp
)) < 0)
3504 * Call our recursive helper to emit the destructuring assignments and
3505 * related stack manipulations.
3507 return EmitDestructuringOpsHelper(cx
, cg
, pn
);
3511 EmitGroupAssignment(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp declOp
,
3512 JSParseNode
*lhs
, JSParseNode
*rhs
)
3514 jsuint depth
, limit
, i
, nslots
;
3517 depth
= limit
= (uintN
) cg
->stackDepth
;
3518 for (pn
= rhs
->pn_head
; pn
; pn
= pn
->pn_next
) {
3519 if (limit
== JS_BIT(16)) {
3520 js_ReportCompileErrorNumber(cx
, CG_TS(cg
), rhs
, JSREPORT_ERROR
,
3521 JSMSG_ARRAY_INIT_TOO_BIG
);
3525 if (pn
->pn_type
== TOK_COMMA
) {
3526 if (js_Emit1(cx
, cg
, JSOP_PUSH
) < 0)
3529 JS_ASSERT(pn
->pn_type
!= TOK_DEFSHARP
);
3530 if (!js_EmitTree(cx
, cg
, pn
))
3536 if (js_NewSrcNote2(cx
, cg
, SRC_GROUPASSIGN
, OpToDeclType(declOp
)) < 0)
3540 for (pn
= lhs
->pn_head
; pn
; pn
= pn
->pn_next
, ++i
) {
3544 slot
= AdjustBlockSlot(cx
, cg
, i
);
3547 EMIT_UINT16_IMM_OP(JSOP_GETLOCAL
, slot
);
3549 if (js_Emit1(cx
, cg
, JSOP_PUSH
) < 0)
3552 if (pn
->pn_type
== TOK_COMMA
&& pn
->pn_arity
== PN_NULLARY
) {
3553 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
3556 if (!EmitDestructuringLHS(cx
, cg
, pn
))
3561 nslots
= limit
- depth
;
3562 EMIT_UINT16_IMM_OP(JSOP_POPN
, nslots
);
3563 cg
->stackDepth
= (uintN
) depth
;
3568 * Helper called with pop out param initialized to a JSOP_POP* opcode. If we
3569 * can emit a group assignment sequence, which results in 0 stack depth delta,
3570 * we set *pop to JSOP_NOP so callers can veto emitting pn followed by a pop.
3573 MaybeEmitGroupAssignment(JSContext
*cx
, JSCodeGenerator
*cg
, JSOp declOp
,
3574 JSParseNode
*pn
, JSOp
*pop
)
3576 JSParseNode
*lhs
, *rhs
;
3578 JS_ASSERT(pn
->pn_type
== TOK_ASSIGN
);
3579 JS_ASSERT(*pop
== JSOP_POP
|| *pop
== JSOP_POPV
);
3582 if (lhs
->pn_type
== TOK_RB
&& rhs
->pn_type
== TOK_RB
&&
3583 lhs
->pn_count
<= rhs
->pn_count
&&
3584 (rhs
->pn_count
== 0 ||
3585 rhs
->pn_head
->pn_type
!= TOK_DEFSHARP
)) {
3586 if (!EmitGroupAssignment(cx
, cg
, declOp
, lhs
, rhs
))
3593 #endif /* JS_HAS_DESTRUCTURING */
3596 EmitVariables(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
,
3597 JSBool inLetHead
, ptrdiff_t *headNoteIndex
)
3600 JSBool let
, forInVar
;
3601 #if JS_HAS_BLOCK_SCOPE
3602 JSBool forInLet
, popScope
;
3603 JSStmtInfo
*stmt
, *scopeStmt
;
3605 ptrdiff_t off
, noteIndex
, tmp
;
3606 JSParseNode
*pn2
, *pn3
;
3611 /* Default in case of JS_HAS_BLOCK_SCOPE early return, below. */
3612 *headNoteIndex
= -1;
3615 * Let blocks and expressions have a parenthesized head in which the new
3616 * scope is not yet open. Initializer evaluation uses the parent node's
3617 * lexical scope. If popScope is true below, then we hide the top lexical
3618 * block from any calls to BindNameToSlot hiding in pn2->pn_expr so that
3619 * it won't find any names in the new let block.
3621 * The same goes for let declarations in the head of any kind of for loop.
3622 * Unlike a let declaration 'let x = i' within a block, where x is hoisted
3623 * to the start of the block, a 'for (let x = i...) ...' loop evaluates i
3624 * in the containing scope, and puts x in the loop body's scope.
3626 tc
= &cg
->treeContext
;
3627 let
= (pn
->pn_op
== JSOP_NOP
);
3628 forInVar
= (pn
->pn_extra
& PNX_FORINVAR
) != 0;
3629 #if JS_HAS_BLOCK_SCOPE
3630 forInLet
= let
&& forInVar
;
3631 popScope
= (inLetHead
|| (let
&& (tc
->flags
& TCF_IN_FOR_INIT
)));
3632 JS_ASSERT(!popScope
|| let
);
3635 off
= noteIndex
= -1;
3636 for (pn2
= pn
->pn_head
; ; pn2
= pn2
->pn_next
) {
3637 #if JS_HAS_DESTRUCTURING
3638 if (pn2
->pn_type
!= TOK_NAME
) {
3639 if (pn2
->pn_type
== TOK_RB
|| pn2
->pn_type
== TOK_RC
) {
3641 * Emit variable binding ops, but not destructuring ops.
3642 * The parser (see Variables, jsparse.c) has ensured that
3643 * our caller will be the TOK_FOR/TOK_IN case in js_EmitTree,
3644 * and that case will emit the destructuring code only after
3645 * emitting an enumerating opcode and a branch that tests
3646 * whether the enumeration ended.
3648 JS_ASSERT(forInVar
);
3649 JS_ASSERT(pn
->pn_count
== 1);
3650 if (!EmitDestructuringDecls(cx
, cg
, PN_OP(pn
), pn2
))
3656 * A destructuring initialiser assignment preceded by var will
3657 * never occur to the left of 'in' in a for-in loop. As with 'for
3658 * (var x = i in o)...', this will cause the entire 'var [a, b] =
3659 * i' to be hoisted out of the loop.
3661 JS_ASSERT(pn2
->pn_type
== TOK_ASSIGN
);
3662 JS_ASSERT(!forInVar
);
3663 if (pn
->pn_count
== 1) {
3665 * If this is the only destructuring assignment in the list,
3666 * try to optimize to a group assignment. If we're in a let
3667 * head, pass JSOP_POP rather than the pseudo-prolog JSOP_NOP
3668 * in pn->pn_op, to suppress a second (and misplaced) 'let'.
3670 JS_ASSERT(noteIndex
< 0 && !pn2
->pn_next
);
3672 if (!MaybeEmitGroupAssignment(cx
, cg
,
3673 inLetHead
? JSOP_POP
: PN_OP(pn
),
3677 if (op
== JSOP_NOP
) {
3678 pn
->pn_extra
= (pn
->pn_extra
& ~PNX_POPVAR
) | PNX_GROUPINIT
;
3684 if (!EmitDestructuringDecls(cx
, cg
, PN_OP(pn
), pn3
))
3687 if (!js_EmitTree(cx
, cg
, pn2
->pn_right
))
3691 * Veto pn->pn_op if inLetHead to avoid emitting a SRC_DESTRUCT
3692 * that's redundant with respect to the SRC_DECL/SRC_DECL_LET that
3693 * we will emit at the bottom of this function.
3695 if (!EmitDestructuringOps(cx
, cg
,
3696 inLetHead
? JSOP_POP
: PN_OP(pn
),
3703 JS_ASSERT(pn2
->pn_type
== TOK_NAME
);
3706 if (!BindNameToSlot(cx
, cg
, pn2
))
3708 JS_ASSERT(pn2
->pn_slot
>= 0 || !let
);
3711 if (op
== JSOP_ARGUMENTS
) {
3712 /* JSOP_ARGUMENTS => no initializer */
3713 JS_ASSERT(!pn2
->pn_expr
&& !let
);
3716 atomIndex
= 0; /* quell GCC overwarning */
3719 if (!MaybeEmitVarDecl(cx
, cg
, PN_OP(pn
), pn2
, &atomIndex
))
3724 JS_ASSERT(!forInVar
);
3725 if (op
== JSOP_SETNAME
) {
3727 EMIT_INDEX_OP(JSOP_BINDNAME
, atomIndex
);
3729 if (pn
->pn_op
== JSOP_DEFCONST
&&
3730 !js_DefineCompileTimeConstant(cx
, cg
, pn2
->pn_atom
,
3735 #if JS_HAS_BLOCK_SCOPE
3736 /* Evaluate expr in the outer lexical scope if requested. */
3739 scopeStmt
= tc
->topScopeStmt
;
3741 tc
->topStmt
= stmt
->down
;
3742 tc
->topScopeStmt
= scopeStmt
->downScope
;
3745 else stmt
= scopeStmt
= NULL
; /* quell GCC overwarning */
3749 oldflags
= cg
->treeContext
.flags
;
3750 cg
->treeContext
.flags
&= ~TCF_IN_FOR_INIT
;
3751 if (!js_EmitTree(cx
, cg
, pn3
))
3753 cg
->treeContext
.flags
|= oldflags
& TCF_IN_FOR_INIT
;
3755 #if JS_HAS_BLOCK_SCOPE
3758 tc
->topScopeStmt
= scopeStmt
;
3765 * The parser rewrites 'for (var x = i in o)' to hoist 'var x = i' --
3766 * likewise 'for (let x = i in o)' becomes 'i; for (let x in o)' using
3767 * a TOK_SEQ node to make the two statements appear as one. Therefore
3768 * if this declaration is part of a for-in loop head, we do not need to
3769 * emit op or any source note. Our caller, the TOK_FOR/TOK_IN case in
3770 * js_EmitTree, will annotate appropriately.
3772 JS_ASSERT(pn3
== pn2
->pn_expr
);
3774 JS_ASSERT(pn
->pn_count
== 1);
3779 if (pn2
== pn
->pn_head
&&
3781 js_NewSrcNote2(cx
, cg
, SRC_DECL
,
3782 (pn
->pn_op
== JSOP_DEFCONST
)
3784 : (pn
->pn_op
== JSOP_DEFVAR
)
3786 : SRC_DECL_LET
) < 0) {
3789 if (op
== JSOP_ARGUMENTS
) {
3790 if (js_Emit1(cx
, cg
, op
) < 0)
3792 } else if (pn2
->pn_slot
>= 0) {
3793 EMIT_UINT16_IMM_OP(op
, atomIndex
);
3795 EMIT_INDEX_OP(op
, atomIndex
);
3798 #if JS_HAS_DESTRUCTURING
3801 tmp
= CG_OFFSET(cg
);
3802 if (noteIndex
>= 0) {
3803 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0, tmp
-off
))
3809 noteIndex
= js_NewSrcNote2(cx
, cg
, SRC_PCDELTA
, 0);
3810 if (noteIndex
< 0 || js_Emit1(cx
, cg
, JSOP_POP
) < 0)
3814 /* If this is a let head, emit and return a srcnote on the pop. */
3816 *headNoteIndex
= js_NewSrcNote(cx
, cg
, SRC_DECL
);
3817 if (*headNoteIndex
< 0)
3819 if (!(pn
->pn_extra
& PNX_POPVAR
))
3820 return js_Emit1(cx
, cg
, JSOP_NOP
) >= 0;
3823 return !(pn
->pn_extra
& PNX_POPVAR
) || js_Emit1(cx
, cg
, JSOP_POP
) >= 0;
3826 #if defined DEBUG_brendanXXX || defined DEBUG_mrbkap
3828 GettableNoteForNextOp(JSCodeGenerator
*cg
)
3830 ptrdiff_t offset
, target
;
3831 jssrcnote
*sn
, *end
;
3834 target
= CG_OFFSET(cg
);
3835 for (sn
= CG_NOTES(cg
), end
= sn
+ CG_NOTE_COUNT(cg
); sn
< end
;
3837 if (offset
== target
&& SN_IS_GETTABLE(sn
))
3839 offset
+= SN_DELTA(sn
);
3845 /* Top-level named functions need a nop for decompilation. */
3847 EmitFunctionDefNop(JSContext
*cx
, JSCodeGenerator
*cg
, uintN index
)
3849 return js_NewSrcNote2(cx
, cg
, SRC_FUNCDEF
, (ptrdiff_t)index
) >= 0 &&
3850 js_Emit1(cx
, cg
, JSOP_NOP
) >= 0;
3853 /* See the SRC_FOR source note offsetBias comments later in this file. */
3854 JS_STATIC_ASSERT(JSOP_NOP_LENGTH
== 1);
3855 JS_STATIC_ASSERT(JSOP_POP_LENGTH
== 1);
3858 js_EmitTree(JSContext
*cx
, JSCodeGenerator
*cg
, JSParseNode
*pn
)
3860 JSBool ok
, useful
, wantval
;
3861 JSStmtInfo
*stmt
, stmtInfo
;
3862 ptrdiff_t top
, off
, tmp
, beq
, jmp
;
3863 JSParseNode
*pn2
, *pn3
;
3865 JSAtomListElement
*ale
;
3868 ptrdiff_t noteIndex
;
3869 JSSrcNoteType noteType
;
3875 JS_CHECK_RECURSION(cx
, return JS_FALSE
);
3879 pn
->pn_offset
= top
= CG_OFFSET(cg
);
3881 /* Emit notes to tell the current bytecode's source line number. */
3882 UPDATE_LINE_NUMBER_NOTES(cx
, cg
, pn
);
3884 switch (pn
->pn_type
) {
3889 JSCodeGenerator
*cg2
;
3892 #if JS_HAS_XML_SUPPORT
3893 if (pn
->pn_arity
== PN_NULLARY
) {
3894 if (js_Emit1(cx
, cg
, JSOP_GETFUNNS
) < 0)
3900 fun
= (JSFunction
*) pn
->pn_funpob
->object
;
3901 if (fun
->u
.i
.script
) {
3903 * This second pass is needed to emit JSOP_NOP with a source note
3904 * for the already-emitted function definition prolog opcode. See
3905 * comments in the TOK_LC case.
3907 JS_ASSERT(pn
->pn_op
== JSOP_NOP
);
3908 JS_ASSERT(cg
->treeContext
.flags
& TCF_IN_FUNCTION
);
3909 JS_ASSERT(pn
->pn_index
!= (uint32
) -1);
3910 if (!EmitFunctionDefNop(cx
, cg
, pn
->pn_index
))
3916 * Limit static nesting depth to fit in 16 bits. See cg2->staticDepth
3919 if (cg
->staticDepth
== JS_BITMASK(16)) {
3920 JS_ReportErrorNumber(cx
, js_GetErrorMessage
, NULL
, JSMSG_TOO_DEEP
,
3925 /* Generate code for the function's body. */
3926 cg2mark
= JS_ARENA_MARK(cg
->codePool
);
3927 JS_ARENA_ALLOCATE_TYPE(cg2
, JSCodeGenerator
, cg
->codePool
);
3929 js_ReportOutOfScriptQuota(cx
);
3932 js_InitCodeGenerator(cx
, cg2
, cg
->treeContext
.parseContext
,
3933 cg
->codePool
, cg
->notePool
,
3934 pn
->pn_pos
.begin
.lineno
);
3935 cg2
->treeContext
.flags
= (uint16
) (pn
->pn_flags
| TCF_IN_FUNCTION
);
3936 cg2
->treeContext
.u
.fun
= fun
;
3937 cg2
->staticDepth
= cg
->staticDepth
+ 1;
3940 /* We metered the max scope depth when parsed the function. */
3941 JS_SCOPE_DEPTH_METERING(cg2
->treeContext
.maxScopeDepth
= (uintN
) -1);
3942 if (!js_EmitFunctionScript(cx
, cg2
, pn
->pn_body
)) {
3946 * We need an activation object if an inner peeks out, or if such
3947 * inner-peeking caused one of our inners to become heavyweight.
3949 if (cg2
->treeContext
.flags
&
3950 (TCF_FUN_USES_NONLOCALS
| TCF_FUN_HEAVYWEIGHT
)) {
3951 cg
->treeContext
.flags
|= TCF_FUN_HEAVYWEIGHT
;
3955 js_FinishCodeGenerator(cx
, cg2
);
3956 JS_ARENA_RELEASE(cg
->codePool
, cg2mark
);
3961 /* Make the function object a literal in the outer script's pool. */
3962 index
= IndexParsedObject(pn
->pn_funpob
, &cg
->objectList
);
3964 /* Emit a bytecode pointing to the closure object in its immediate. */
3965 if (pn
->pn_op
!= JSOP_NOP
) {
3966 if ((pn
->pn_flags
& TCF_GENEXP_LAMBDA
) &&
3967 js_NewSrcNote(cx
, cg
, SRC_GENEXP
) < 0) {
3970 EMIT_INDEX_OP(PN_OP(pn
), index
);
3975 * For a script we emit the code as we parse. Thus the bytecode for
3976 * top-level functions should go in the prolog to predefine their
3977 * names in the variable object before the already-generated main code
3978 * is executed. This extra work for top-level scripts is not necessary
3979 * when we emit the code for a function. It is fully parsed prior to
3980 * invocation of the emitter and calls to js_EmitTree for function
3981 * definitions can be scheduled before generating the rest of code.
3983 if (!(cg
->treeContext
.flags
& TCF_IN_FUNCTION
)) {
3984 JS_ASSERT(!cg
->treeContext
.topStmt
);
3985 CG_SWITCH_TO_PROLOG(cg
);
3986 EMIT_INDEX_OP(JSOP_DEFFUN
, index
);
3987 CG_SWITCH_TO_MAIN(cg
);
3989 /* Emit NOP for the decompiler. */
3990 if (!EmitFunctionDefNop(cx
, cg
, index
))
3994 JSLocalKind localKind
=
3996 js_LookupLocal(cx
, cg
->treeContext
.u
.fun
, fun
->atom
, &slot
);
3997 JS_ASSERT(localKind
== JSLOCAL_VAR
|| localKind
== JSLOCAL_CONST
);
3998 JS_ASSERT(pn
->pn_index
== (uint32
) -1);
3999 pn
->pn_index
= index
;
4000 if (!EmitSlotIndexOp(cx
, JSOP_DEFLOCALFUN
, slot
, index
, cg
))
4008 /* Initialize so we can detect else-if chains and avoid recursion. */
4009 stmtInfo
.type
= STMT_IF
;
4014 /* Emit code for the condition before pushing stmtInfo. */
4015 if (!js_EmitTree(cx
, cg
, pn
->pn_kid1
))
4017 top
= CG_OFFSET(cg
);
4018 if (stmtInfo
.type
== STMT_IF
) {
4019 js_PushStatement(&cg
->treeContext
, &stmtInfo
, STMT_IF
, top
);
4022 * We came here from the goto further below that detects else-if
4023 * chains, so we must mutate stmtInfo back into a STMT_IF record.
4024 * Also (see below for why) we need a note offset for SRC_IF_ELSE
4025 * to help the decompiler. Actually, we need two offsets, one for
4026 * decompiling any else clause and the second for decompiling an
4027 * else-if chain without bracing, overindenting, or incorrectly
4028 * scoping let declarations.
4030 JS_ASSERT(stmtInfo
.type
== STMT_ELSE
);
4031 stmtInfo
.type
= STMT_IF
;
4032 stmtInfo
.update
= top
;
4033 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 0, jmp
- beq
))
4035 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 1, top
- jmp
))
4039 /* Emit an annotated branch-if-false around the then part. */
4041 noteIndex
= js_NewSrcNote(cx
, cg
, pn3
? SRC_IF_ELSE
: SRC_IF
);
4044 beq
= EmitJump(cx
, cg
, JSOP_IFEQ
, 0);
4048 /* Emit code for the then and optional else parts. */
4049 if (!js_EmitTree(cx
, cg
, pn
->pn_kid2
))
4052 /* Modify stmtInfo so we know we're in the else part. */
4053 stmtInfo
.type
= STMT_ELSE
;
4056 * Emit a JSOP_BACKPATCH op to jump from the end of our then part
4057 * around the else part. The js_PopStatementCG call at the bottom
4058 * of this switch case will fix up the backpatch chain linked from
4061 jmp
= EmitGoto(cx
, cg
, &stmtInfo
, &stmtInfo
.breaks
, NULL
, SRC_NULL
);
4065 /* Ensure the branch-if-false comes here, then emit the else. */
4066 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, beq
);
4067 if (pn3
->pn_type
== TOK_IF
) {
4072 if (!js_EmitTree(cx
, cg
, pn3
))
4076 * Annotate SRC_IF_ELSE with the offset from branch to jump, for
4077 * the decompiler's benefit. We can't just "back up" from the pc
4078 * of the else clause, because we don't know whether an extended
4079 * jump was required to leap from the end of the then clause over
4082 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 0, jmp
- beq
))
4085 /* No else part, fixup the branch-if-false to come here. */
4086 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, beq
);
4088 ok
= js_PopStatementCG(cx
, cg
);
4092 /* Out of line to avoid bloating js_EmitTree's stack frame size. */
4093 ok
= EmitSwitch(cx
, cg
, pn
, &stmtInfo
);
4098 * Minimize bytecodes issued for one or more iterations by jumping to
4099 * the condition below the body and closing the loop if the condition
4100 * is true with a backward branch. For iteration count i:
4102 * i test at the top test at the bottom
4103 * = =============== ==================
4104 * 0 ifeq-pass goto; ifne-fail
4105 * 1 ifeq-fail; goto; ifne-pass goto; ifne-pass; ifne-fail
4106 * 2 2*(ifeq-fail; goto); ifeq-pass goto; 2*ifne-pass; ifne-fail
4108 * N N*(ifeq-fail; goto); ifeq-pass goto; N*ifne-pass; ifne-fail
4110 * SpiderMonkey, pre-mozilla.org, emitted while parsing and so used
4111 * test at the top. When JSParseNode trees were added during the ES3
4112 * work (1998-9), the code generation scheme was not optimized, and
4113 * the decompiler continued to take advantage of the branch and jump
4114 * that bracketed the body. But given the SRC_WHILE note, it is easy
4115 * to support the more efficient scheme.
4117 js_PushStatement(&cg
->treeContext
, &stmtInfo
, STMT_WHILE_LOOP
, top
);
4118 noteIndex
= js_NewSrcNote(cx
, cg
, SRC_WHILE
);
4121 jmp
= EmitJump(cx
, cg
, JSOP_GOTO
, 0);
4124 top
= CG_OFFSET(cg
);
4125 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
4127 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, jmp
);
4128 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
4130 beq
= EmitJump(cx
, cg
, JSOP_IFNE
, top
- CG_OFFSET(cg
));
4133 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 0, beq
- jmp
))
4135 ok
= js_PopStatementCG(cx
, cg
);
4139 /* Emit an annotated nop so we know to decompile a 'do' keyword. */
4140 noteIndex
= js_NewSrcNote(cx
, cg
, SRC_WHILE
);
4141 if (noteIndex
< 0 || js_Emit1(cx
, cg
, JSOP_NOP
) < 0)
4144 /* Compile the loop body. */
4145 top
= CG_OFFSET(cg
);
4146 js_PushStatement(&cg
->treeContext
, &stmtInfo
, STMT_DO_LOOP
, top
);
4147 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
4150 /* Set loop and enclosing label update offsets, for continue. */
4153 stmt
->update
= CG_OFFSET(cg
);
4154 } while ((stmt
= stmt
->down
) != NULL
&& stmt
->type
== STMT_LABEL
);
4156 /* Compile the loop condition, now that continues know where to go. */
4157 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
4161 * Since we use JSOP_IFNE for other purposes as well as for do-while
4162 * loops, we must store 1 + (beq - top) in the SRC_WHILE note offset,
4163 * and the decompiler must get that delta and decompile recursively.
4165 beq
= EmitJump(cx
, cg
, JSOP_IFNE
, top
- CG_OFFSET(cg
));
4168 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 0, 1 + (beq
- top
)))
4170 ok
= js_PopStatementCG(cx
, cg
);
4174 beq
= 0; /* suppress gcc warnings */
4177 js_PushStatement(&cg
->treeContext
, &stmtInfo
, STMT_FOR_LOOP
, top
);
4179 if (pn2
->pn_type
== TOK_IN
) {
4180 /* Set stmtInfo type for later testing. */
4181 stmtInfo
.type
= STMT_FOR_IN_LOOP
;
4184 * If the left part is 'var x', emit code to define x if necessary
4185 * using a prolog opcode, but do not emit a pop. If the left part
4186 * is 'var x = i', emit prolog code to define x if necessary; then
4187 * emit code to evaluate i, assign the result to x, and pop the
4188 * result off the stack.
4190 * All the logic to do this is implemented in the outer switch's
4191 * TOK_VAR case, conditioned on pn_extra flags set by the parser.
4193 * In the 'for (var x = i in o) ...' case, the js_EmitTree(...pn3)
4194 * called here will generate the proper note for the assignment
4195 * op that sets x = i, hoisting the initialized var declaration
4196 * out of the loop: 'var x = i; for (x in o) ...'.
4198 * In the 'for (var x in o) ...' case, nothing but the prolog op
4199 * (if needed) should be generated here, we must emit the note
4200 * just before the JSOP_FOR* opcode in the switch on pn3->pn_type
4201 * a bit below, so nothing is hoisted: 'for (var x in o) ...'.
4203 * A 'for (let x = i in o)' loop must not be hoisted, since in
4204 * this form the let variable is scoped by the loop body (but not
4205 * the head). The initializer expression i must be evaluated for
4206 * any side effects. So we hoist only i in the let case.
4209 type
= PN_TYPE(pn3
);
4210 cg
->treeContext
.flags
|= TCF_IN_FOR_INIT
;
4211 if (TOKEN_TYPE_IS_DECL(type
) && !js_EmitTree(cx
, cg
, pn3
))
4213 cg
->treeContext
.flags
&= ~TCF_IN_FOR_INIT
;
4215 /* Compile the object expression to the right of 'in'. */
4216 if (!js_EmitTree(cx
, cg
, pn2
->pn_right
))
4220 * Emit a bytecode to convert top of stack value to the iterator
4221 * object depending on the loop variant (for-in, for-each-in, or
4222 * destructuring for-in).
4224 JS_ASSERT(pn
->pn_op
== JSOP_ITER
);
4225 if (js_Emit2(cx
, cg
, JSOP_ITER
, (uint8
) pn
->pn_iflags
) < 0)
4228 /* Annotate so the decompiler can find the loop-closing jump. */
4229 noteIndex
= js_NewSrcNote(cx
, cg
, SRC_FOR_IN
);
4234 * Jump down to the loop condition to minimize overhead assuming at
4235 * least one iteration, as the other loop forms do.
4237 jmp
= EmitJump(cx
, cg
, JSOP_GOTO
, 0);
4241 top
= CG_OFFSET(cg
);
4242 SET_STATEMENT_TOP(&stmtInfo
, top
);
4245 intN loopDepth
= cg
->stackDepth
;
4249 * Compile a JSOP_FOR* bytecode based on the left hand side.
4251 * Initialize op to JSOP_SETNAME in case of |for ([a, b] in o)...|
4252 * or similar, to signify assignment, rather than declaration, to
4253 * the decompiler. EmitDestructuringOps takes a prolog bytecode
4254 * parameter and emits the appropriate source note, defaulting to
4255 * assignment, so JSOP_SETNAME is not critical here; many similar
4256 * ops could be used -- just not JSOP_NOP (which means 'let').
4260 #if JS_HAS_BLOCK_SCOPE
4264 JS_ASSERT(pn3
->pn_arity
== PN_LIST
&& pn3
->pn_count
== 1);
4266 #if JS_HAS_DESTRUCTURING
4267 if (pn3
->pn_type
== TOK_ASSIGN
) {
4269 JS_ASSERT(pn3
->pn_type
== TOK_RB
|| pn3
->pn_type
== TOK_RC
);
4271 if (pn3
->pn_type
== TOK_RB
|| pn3
->pn_type
== TOK_RC
) {
4272 op
= PN_OP(pn2
->pn_left
);
4273 goto destructuring_for
;
4276 JS_ASSERT(pn3
->pn_type
== TOK_NAME
);
4282 * Always annotate JSOP_FORLOCAL if given input of the form
4283 * 'for (let x in * o)' -- the decompiler must not hoist the
4284 * 'let x' out of the loop head, or x will be bound in the
4285 * wrong scope. Likewise, but in this case only for the sake
4286 * of higher decompilation fidelity only, do not hoist 'var x'
4287 * when given 'for (var x in o)'.
4290 #if JS_HAS_BLOCK_SCOPE
4293 (type
== TOK_VAR
&& !pn3
->pn_expr
)) &&
4294 js_NewSrcNote2(cx
, cg
, SRC_DECL
,
4297 : SRC_DECL_LET
) < 0) {
4300 if (pn3
->pn_slot
>= 0) {
4303 case JSOP_GETARG
: /* FALL THROUGH */
4304 case JSOP_SETARG
: op
= JSOP_FORARG
; break;
4305 case JSOP_GETGVAR
: /* FALL THROUGH */
4306 case JSOP_SETGVAR
: op
= JSOP_FORNAME
; break;
4307 case JSOP_GETLOCAL
: /* FALL THROUGH */
4308 case JSOP_SETLOCAL
: op
= JSOP_FORLOCAL
; break;
4309 default: JS_ASSERT(0);
4312 pn3
->pn_op
= JSOP_FORNAME
;
4313 if (!BindNameToSlot(cx
, cg
, pn3
))
4317 if (pn3
->pn_slot
>= 0) {
4318 if (pn3
->pn_const
) {
4319 JS_ASSERT(op
== JSOP_FORLOCAL
);
4320 js_ReportCompileErrorNumber(cx
, CG_TS(cg
), pn3
, JSREPORT_ERROR
,
4321 JSMSG_BAD_FOR_LEFTSIDE
);
4324 atomIndex
= (jsatomid
) pn3
->pn_slot
;
4325 EMIT_UINT16_IMM_OP(op
, atomIndex
);
4327 if (!EmitAtomOp(cx
, pn3
, op
, cg
))
4334 * 'for (o.p in q)' can use JSOP_FORPROP only if evaluating 'o'
4335 * has no side effects.
4338 if (!CheckSideEffects(cx
, cg
, pn3
->pn_expr
, &useful
))
4341 if (!EmitPropOp(cx
, pn3
, JSOP_FORPROP
, cg
, JS_FALSE
))
4347 #if JS_HAS_DESTRUCTURING
4351 if (js_Emit1(cx
, cg
, JSOP_FORELEM
) < 0)
4353 JS_ASSERT(cg
->stackDepth
>= 3);
4355 #if JS_HAS_DESTRUCTURING
4356 if (pn3
->pn_type
== TOK_RB
|| pn3
->pn_type
== TOK_RC
) {
4357 if (!EmitDestructuringOps(cx
, cg
, op
, pn3
))
4359 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
4363 #if JS_HAS_LVALUE_RETURN
4364 if (pn3
->pn_type
== TOK_LP
) {
4365 JS_ASSERT(pn3
->pn_op
== JSOP_SETCALL
);
4366 if (!js_EmitTree(cx
, cg
, pn3
))
4368 if (js_Emit1(cx
, cg
, JSOP_ENUMELEM
) < 0)
4372 #if JS_HAS_XML_SUPPORT
4373 if (pn3
->pn_type
== TOK_UNARYOP
) {
4374 JS_ASSERT(pn3
->pn_op
== JSOP_BINDXMLNAME
);
4375 if (!js_EmitTree(cx
, cg
, pn3
))
4377 if (js_Emit1(cx
, cg
, JSOP_ENUMELEM
) < 0)
4381 if (!EmitElemOp(cx
, pn3
, JSOP_ENUMELEM
, cg
))
4386 /* The stack should be balanced around the JSOP_FOR* opcode sequence. */
4387 JS_ASSERT(cg
->stackDepth
== loopDepth
);
4389 /* Set the first srcnote offset so we can find the start of the loop body. */
4390 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0, CG_OFFSET(cg
) - jmp
))
4393 /* Emit code for the loop body. */
4394 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
4397 /* Set loop and enclosing "update" offsets, for continue. */
4400 stmt
->update
= CG_OFFSET(cg
);
4401 } while ((stmt
= stmt
->down
) != NULL
&& stmt
->type
== STMT_LABEL
);
4404 * Fixup the goto that starts the loop to jump down to JSOP_NEXTITER.
4406 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, jmp
);
4407 if (js_Emit1(cx
, cg
, JSOP_NEXTITER
) < 0)
4409 beq
= EmitJump(cx
, cg
, JSOP_IFNE
, top
- CG_OFFSET(cg
));
4413 /* Set the second srcnote offset so we can find the closing jump. */
4414 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 1, beq
- jmp
))
4417 /* C-style for (init; cond; update) ... loop. */
4421 /* No initializer: emit an annotated nop for the decompiler. */
4424 cg
->treeContext
.flags
|= TCF_IN_FOR_INIT
;
4425 #if JS_HAS_DESTRUCTURING
4426 if (pn3
->pn_type
== TOK_ASSIGN
&&
4427 !MaybeEmitGroupAssignment(cx
, cg
, op
, pn3
, &op
)) {
4431 if (op
== JSOP_POP
) {
4432 if (!js_EmitTree(cx
, cg
, pn3
))
4434 if (TOKEN_TYPE_IS_DECL(pn3
->pn_type
)) {
4436 * Check whether a destructuring-initialized var decl
4437 * was optimized to a group assignment. If so, we do
4438 * not need to emit a pop below, so switch to a nop,
4439 * just for the decompiler.
4441 JS_ASSERT(pn3
->pn_arity
== PN_LIST
);
4442 if (pn3
->pn_extra
& PNX_GROUPINIT
)
4446 cg
->treeContext
.flags
&= ~TCF_IN_FOR_INIT
;
4450 * NB: the SRC_FOR note has offsetBias 1 (JSOP_{NOP,POP}_LENGTH).
4451 * Use tmp to hold the biased srcnote "top" offset, which differs
4452 * from the top local variable by the length of the JSOP_GOTO{,X}
4453 * emitted in between tmp and top if this loop has a condition.
4455 noteIndex
= js_NewSrcNote(cx
, cg
, SRC_FOR
);
4456 if (noteIndex
< 0 || js_Emit1(cx
, cg
, op
) < 0)
4458 tmp
= CG_OFFSET(cg
);
4461 /* Goto the loop condition, which branches back to iterate. */
4462 jmp
= EmitJump(cx
, cg
, JSOP_GOTO
, 0);
4467 top
= CG_OFFSET(cg
);
4468 SET_STATEMENT_TOP(&stmtInfo
, top
);
4470 /* Emit code for the loop body. */
4471 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
4474 /* Set the second note offset so we can find the update part. */
4475 JS_ASSERT(noteIndex
!= -1);
4476 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 1,
4477 CG_OFFSET(cg
) - tmp
)) {
4481 /* Set loop and enclosing "update" offsets, for continue. */
4484 stmt
->update
= CG_OFFSET(cg
);
4485 } while ((stmt
= stmt
->down
) != NULL
&& stmt
->type
== STMT_LABEL
);
4487 /* Check for update code to do before the condition (if any). */
4491 #if JS_HAS_DESTRUCTURING
4492 if (pn3
->pn_type
== TOK_ASSIGN
&&
4493 !MaybeEmitGroupAssignment(cx
, cg
, op
, pn3
, &op
)) {
4497 if (op
== JSOP_POP
&& !js_EmitTree(cx
, cg
, pn3
))
4500 /* Always emit the POP or NOP, to help the decompiler. */
4501 if (js_Emit1(cx
, cg
, op
) < 0)
4504 /* Restore the absolute line number for source note readers. */
4505 off
= (ptrdiff_t) pn
->pn_pos
.end
.lineno
;
4506 if (CG_CURRENT_LINE(cg
) != (uintN
) off
) {
4507 if (js_NewSrcNote2(cx
, cg
, SRC_SETLINE
, off
) < 0)
4509 CG_CURRENT_LINE(cg
) = (uintN
) off
;
4513 /* Set the first note offset so we can find the loop condition. */
4514 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0,
4515 CG_OFFSET(cg
) - tmp
)) {
4520 /* Fix up the goto from top to target the loop condition. */
4521 JS_ASSERT(jmp
>= 0);
4522 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, jmp
);
4524 if (!js_EmitTree(cx
, cg
, pn2
->pn_kid2
))
4528 /* The third note offset helps us find the loop-closing jump. */
4529 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 2,
4530 CG_OFFSET(cg
) - tmp
)) {
4535 beq
= EmitJump(cx
, cg
, JSOP_IFNE
, top
- CG_OFFSET(cg
));
4539 /* No loop condition -- emit the loop-closing jump. */
4540 jmp
= EmitJump(cx
, cg
, JSOP_GOTO
, top
- CG_OFFSET(cg
));
4546 /* Now fixup all breaks and continues (before for/in's JSOP_ENDITER). */
4547 if (!js_PopStatementCG(cx
, cg
))
4550 if (pn2
->pn_type
== TOK_IN
) {
4552 * JSOP_ENDITER must have a slot to save an exception thrown from
4553 * the body of for-in loop when closing the iterator object, and
4554 * fortunately it does: the slot that was set by JSOP_NEXTITER to
4555 * the return value of iterator.next().
4557 JS_ASSERT(js_CodeSpec
[JSOP_ENDITER
].nuses
== 2);
4558 if (!NewTryNote(cx
, cg
, JSTRY_ITER
, cg
->stackDepth
, top
, CG_OFFSET(cg
)) ||
4559 js_Emit1(cx
, cg
, JSOP_ENDITER
) < 0) {
4566 stmt
= cg
->treeContext
.topStmt
;
4569 ale
= js_IndexAtom(cx
, atom
, &cg
->atomList
);
4572 while (stmt
->type
!= STMT_LABEL
|| stmt
->u
.label
!= atom
)
4574 noteType
= SRC_BREAK2LABEL
;
4577 while (!STMT_IS_LOOP(stmt
) && stmt
->type
!= STMT_SWITCH
)
4579 noteType
= (stmt
->type
== STMT_SWITCH
) ? SRC_NULL
: SRC_BREAK
;
4582 if (EmitGoto(cx
, cg
, stmt
, &stmt
->breaks
, ale
, noteType
) < 0)
4587 stmt
= cg
->treeContext
.topStmt
;
4590 /* Find the loop statement enclosed by the matching label. */
4591 JSStmtInfo
*loop
= NULL
;
4592 ale
= js_IndexAtom(cx
, atom
, &cg
->atomList
);
4595 while (stmt
->type
!= STMT_LABEL
|| stmt
->u
.label
!= atom
) {
4596 if (STMT_IS_LOOP(stmt
))
4601 noteType
= SRC_CONT2LABEL
;
4604 while (!STMT_IS_LOOP(stmt
))
4606 noteType
= SRC_CONTINUE
;
4609 if (EmitGoto(cx
, cg
, stmt
, &stmt
->continues
, ale
, noteType
) < 0)
4614 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
4616 js_PushStatement(&cg
->treeContext
, &stmtInfo
, STMT_WITH
, CG_OFFSET(cg
));
4617 if (js_Emit1(cx
, cg
, JSOP_ENTERWITH
) < 0)
4619 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
4621 if (js_Emit1(cx
, cg
, JSOP_LEAVEWITH
) < 0)
4623 ok
= js_PopStatementCG(cx
, cg
);
4628 ptrdiff_t tryStart
, tryEnd
, catchJump
, finallyStart
;
4630 JSParseNode
*lastCatch
;
4635 * Push stmtInfo to track jumps-over-catches and gosubs-to-finally
4638 * When a finally block is 'active' (STMT_FINALLY on the treeContext),
4639 * non-local jumps (including jumps-over-catches) result in a GOSUB
4640 * being written into the bytecode stream and fixed-up later (c.f.
4641 * EmitBackPatchOp and BackPatch).
4643 js_PushStatement(&cg
->treeContext
, &stmtInfo
,
4644 pn
->pn_kid3
? STMT_FINALLY
: STMT_TRY
,
4648 * Since an exception can be thrown at any place inside the try block,
4649 * we need to restore the stack and the scope chain before we transfer
4650 * the control to the exception handler.
4652 * For that we store in a try note associated with the catch or
4653 * finally block the stack depth upon the try entry. The interpreter
4654 * uses this depth to properly unwind the stack and the scope chain.
4656 depth
= cg
->stackDepth
;
4658 /* Mark try location for decompilation, then emit try block. */
4659 if (js_Emit1(cx
, cg
, JSOP_TRY
) < 0)
4661 tryStart
= CG_OFFSET(cg
);
4662 if (!js_EmitTree(cx
, cg
, pn
->pn_kid1
))
4664 JS_ASSERT(depth
== cg
->stackDepth
);
4666 /* GOSUB to finally, if present. */
4668 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
4670 jmp
= EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
, &GOSUBS(stmtInfo
));
4675 /* Emit (hidden) jump over catch and/or finally. */
4676 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
4678 jmp
= EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
, &catchJump
);
4682 tryEnd
= CG_OFFSET(cg
);
4684 /* If this try has a catch block, emit it. */
4688 jsint count
= 0; /* previous catch block's population */
4691 * The emitted code for a catch block looks like:
4693 * [throwing] only if 2nd+ catch block
4694 * [leaveblock] only if 2nd+ catch block
4695 * enterblock with SRC_CATCH
4697 * [dup] only if catchguard
4698 * setlocalpop <slot> or destructuring code
4699 * [< catchguard code >] if there's a catchguard
4700 * [ifeq <offset to next catch block>] " "
4701 * [pop] only if catchguard
4702 * < catch block contents >
4704 * goto <end of catch blocks> non-local; finally applies
4706 * If there's no catch block without a catchguard, the last
4707 * <offset to next catch block> points to rethrow code. This
4708 * code will [gosub] to the finally code if appropriate, and is
4709 * also used for the catch-all trynote for capturing exceptions
4710 * thrown from catch{} blocks.
4712 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
4713 ptrdiff_t guardJump
, catchNote
;
4715 JS_ASSERT(cg
->stackDepth
== depth
);
4716 guardJump
= GUARDJUMP(stmtInfo
);
4717 if (guardJump
!= -1) {
4718 /* Fix up and clean up previous catch block. */
4719 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, guardJump
);
4722 * Account for JSOP_ENTERBLOCK (whose block object count
4723 * is saved below) and pushed exception object that we
4724 * still have after the jumping from the previous guard.
4726 cg
->stackDepth
= depth
+ count
+ 1;
4729 * Move exception back to cx->exception to prepare for
4730 * the next catch. We hide [throwing] from the decompiler
4731 * since it compensates for the hidden JSOP_DUP at the
4732 * start of the previous guarded catch.
4734 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0 ||
4735 js_Emit1(cx
, cg
, JSOP_THROWING
) < 0) {
4738 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
4740 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK
, count
);
4741 JS_ASSERT(cg
->stackDepth
== depth
);
4745 * Annotate the JSOP_ENTERBLOCK that's about to be generated
4746 * by the call to js_EmitTree immediately below. Save this
4747 * source note's index in stmtInfo for use by the TOK_CATCH:
4748 * case, where the length of the catch guard is set as the
4751 catchNote
= js_NewSrcNote2(cx
, cg
, SRC_CATCH
, 0);
4754 CATCHNOTE(stmtInfo
) = catchNote
;
4757 * Emit the lexical scope and catch body. Save the catch's
4758 * block object population via count, for use when targeting
4759 * guardJump at the next catch (the guard mismatch case).
4761 JS_ASSERT(pn3
->pn_type
== TOK_LEXICALSCOPE
);
4762 count
= OBJ_BLOCK_COUNT(cx
, pn3
->pn_pob
->object
);
4763 if (!js_EmitTree(cx
, cg
, pn3
))
4766 /* gosub <finally>, if required */
4768 jmp
= EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
,
4772 JS_ASSERT(cg
->stackDepth
== depth
);
4776 * Jump over the remaining catch blocks. This will get fixed
4777 * up to jump to after catch/finally.
4779 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
4781 jmp
= EmitBackPatchOp(cx
, cg
, JSOP_BACKPATCH
, &catchJump
);
4786 * Save a pointer to the last catch node to handle try-finally
4787 * and try-catch(guard)-finally special cases.
4789 lastCatch
= pn3
->pn_expr
;
4794 * Last catch guard jumps to the rethrow code sequence if none of the
4795 * guards match. Target guardJump at the beginning of the rethrow
4796 * sequence, just in case a guard expression throws and leaves the
4799 if (lastCatch
&& lastCatch
->pn_kid2
) {
4800 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, GUARDJUMP(stmtInfo
));
4802 /* Sync the stack to take into account pushed exception. */
4803 JS_ASSERT(cg
->stackDepth
== depth
);
4804 cg
->stackDepth
= depth
+ 1;
4807 * Rethrow the exception, delegating executing of finally if any
4808 * to the exception handler.
4810 if (js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0 ||
4811 js_Emit1(cx
, cg
, JSOP_THROW
) < 0) {
4816 JS_ASSERT(cg
->stackDepth
== depth
);
4818 /* Emit finally handler if any. */
4819 finallyStart
= 0; /* to quell GCC uninitialized warnings */
4822 * Fix up the gosubs that might have been emitted before non-local
4823 * jumps to the finally code.
4825 if (!BackPatch(cx
, cg
, GOSUBS(stmtInfo
), CG_NEXT(cg
), JSOP_GOSUB
))
4828 finallyStart
= CG_OFFSET(cg
);
4830 /* Indicate that we're emitting a subroutine body. */
4831 stmtInfo
.type
= STMT_SUBROUTINE
;
4832 if (!UpdateLineNumberNotes(cx
, cg
, pn
->pn_kid3
))
4834 if (js_Emit1(cx
, cg
, JSOP_FINALLY
) < 0 ||
4835 !js_EmitTree(cx
, cg
, pn
->pn_kid3
) ||
4836 js_Emit1(cx
, cg
, JSOP_RETSUB
) < 0) {
4839 JS_ASSERT(cg
->stackDepth
== depth
);
4841 if (!js_PopStatementCG(cx
, cg
))
4844 if (js_NewSrcNote(cx
, cg
, SRC_ENDBRACE
) < 0 ||
4845 js_Emit1(cx
, cg
, JSOP_NOP
) < 0) {
4849 /* Fix up the end-of-try/catch jumps to come here. */
4850 if (!BackPatch(cx
, cg
, catchJump
, CG_NEXT(cg
), JSOP_GOTO
))
4854 * Add the try note last, to let post-order give us the right ordering
4855 * (first to last for a given nesting level, inner to outer by level).
4858 !NewTryNote(cx
, cg
, JSTRY_CATCH
, depth
, tryStart
, tryEnd
)) {
4863 * If we've got a finally, mark try+catch region with additional
4864 * trynote to catch exceptions (re)thrown from a catch block or
4865 * for the try{}finally{} case.
4868 !NewTryNote(cx
, cg
, JSTRY_FINALLY
, depth
, tryStart
, finallyStart
)) {
4876 ptrdiff_t catchStart
, guardJump
;
4880 * Morph STMT_BLOCK to STMT_CATCH, note the block entry code offset,
4881 * and save the block object atom.
4883 stmt
= cg
->treeContext
.topStmt
;
4884 JS_ASSERT(stmt
->type
== STMT_BLOCK
&& (stmt
->flags
& SIF_SCOPE
));
4885 stmt
->type
= STMT_CATCH
;
4886 catchStart
= stmt
->update
;
4887 blockObj
= stmt
->u
.blockObj
;
4889 /* Go up one statement info record to the TRY or FINALLY record. */
4891 JS_ASSERT(stmt
->type
== STMT_TRY
|| stmt
->type
== STMT_FINALLY
);
4893 /* Pick up the pending exception and bind it to the catch variable. */
4894 if (js_Emit1(cx
, cg
, JSOP_EXCEPTION
) < 0)
4898 * Dup the exception object if there is a guard for rethrowing to use
4899 * it later when rethrowing or in other catches.
4901 if (pn
->pn_kid2
&& js_Emit1(cx
, cg
, JSOP_DUP
) < 0)
4905 switch (pn2
->pn_type
) {
4906 #if JS_HAS_DESTRUCTURING
4909 if (!EmitDestructuringOps(cx
, cg
, JSOP_NOP
, pn2
))
4911 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
4917 /* Inline BindNameToSlot for pn2. */
4918 JS_ASSERT(pn2
->pn_slot
== -1);
4919 pn2
->pn_slot
= AdjustBlockSlot(cx
, cg
,
4920 OBJ_BLOCK_DEPTH(cx
, blockObj
));
4921 if (pn2
->pn_slot
< 0)
4923 EMIT_UINT16_IMM_OP(JSOP_SETLOCALPOP
, pn2
->pn_slot
);
4930 /* Emit the guard expression, if there is one. */
4932 if (!js_EmitTree(cx
, cg
, pn
->pn_kid2
))
4934 if (!js_SetSrcNoteOffset(cx
, cg
, CATCHNOTE(*stmt
), 0,
4935 CG_OFFSET(cg
) - catchStart
)) {
4938 /* ifeq <next block> */
4939 guardJump
= EmitJump(cx
, cg
, JSOP_IFEQ
, 0);
4942 GUARDJUMP(*stmt
) = guardJump
;
4944 /* Pop duplicated exception object as we no longer need it. */
4945 if (js_Emit1(cx
, cg
, JSOP_POP
) < 0)
4949 /* Emit the catch body. */
4950 if (!js_EmitTree(cx
, cg
, pn
->pn_kid3
))
4954 * Annotate the JSOP_LEAVEBLOCK that will be emitted as we unwind via
4955 * our TOK_LEXICALSCOPE parent, so the decompiler knows to pop.
4957 off
= cg
->stackDepth
;
4958 if (js_NewSrcNote2(cx
, cg
, SRC_CATCH
, off
) < 0)
4964 if (!EmitVariables(cx
, cg
, pn
, JS_FALSE
, ¬eIndex
))
4969 /* Push a return value */
4972 if (!js_EmitTree(cx
, cg
, pn2
))
4975 if (js_Emit1(cx
, cg
, JSOP_PUSH
) < 0)
4980 * EmitNonLocalJumpFixup may add fixup bytecode to close open try
4981 * blocks having finally clauses and to exit intermingled let blocks.
4982 * We can't simply transfer control flow to our caller in that case,
4983 * because we must gosub to those finally clauses from inner to outer,
4984 * with the correct stack pointer (i.e., after popping any with,
4985 * for/in, etc., slots nested inside the finally's try).
4987 * In this case we mutate JSOP_RETURN into JSOP_SETRVAL and add an
4988 * extra JSOP_RETRVAL after the fixups.
4990 top
= CG_OFFSET(cg
);
4991 if (js_Emit1(cx
, cg
, JSOP_RETURN
) < 0)
4993 if (!EmitNonLocalJumpFixup(cx
, cg
, NULL
))
4995 if (top
+ JSOP_RETURN_LENGTH
!= CG_OFFSET(cg
)) {
4996 CG_BASE(cg
)[top
] = JSOP_SETRVAL
;
4997 if (js_Emit1(cx
, cg
, JSOP_RETRVAL
) < 0)
5002 #if JS_HAS_GENERATORS
5004 if (!(cg
->treeContext
.flags
& TCF_IN_FUNCTION
)) {
5005 js_ReportCompileErrorNumber(cx
, CG_TS(cg
), pn
, JSREPORT_ERROR
,
5006 JSMSG_BAD_RETURN_OR_YIELD
,
5011 if (!js_EmitTree(cx
, cg
, pn
->pn_kid
))
5014 if (js_Emit1(cx
, cg
, JSOP_PUSH
) < 0)
5017 if (pn
->pn_hidden
&& js_NewSrcNote(cx
, cg
, SRC_HIDDEN
) < 0)
5019 if (js_Emit1(cx
, cg
, JSOP_YIELD
) < 0)
5025 #if JS_HAS_XML_SUPPORT
5026 if (pn
->pn_arity
== PN_UNARY
) {
5027 if (!js_EmitTree(cx
, cg
, pn
->pn_kid
))
5029 if (js_Emit1(cx
, cg
, PN_OP(pn
)) < 0)
5035 JS_ASSERT(pn
->pn_arity
== PN_LIST
);
5038 tmp
= CG_OFFSET(cg
);
5039 if (pn
->pn_extra
& PNX_NEEDBRACES
) {
5040 noteIndex
= js_NewSrcNote2(cx
, cg
, SRC_BRACE
, 0);
5041 if (noteIndex
< 0 || js_Emit1(cx
, cg
, JSOP_NOP
) < 0)
5045 js_PushStatement(&cg
->treeContext
, &stmtInfo
, STMT_BLOCK
, top
);
5046 if (pn
->pn_extra
& PNX_FUNCDEFS
) {
5048 * This block contains top-level function definitions. To ensure
5049 * that we emit the bytecode defining them prior the rest of code
5050 * in the block we use a separate pass over functions. During the
5051 * main pass later the emitter will add JSOP_NOP with source notes
5052 * for the function to preserve the original functions position
5055 * Currently this is used only for functions, as compile-as-we go
5056 * mode for scripts does not allow separate emitter passes.
5058 JS_ASSERT(cg
->treeContext
.flags
& TCF_IN_FUNCTION
);
5059 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
5060 if (pn2
->pn_type
== TOK_FUNCTION
) {
5061 if (pn2
->pn_op
== JSOP_NOP
) {
5062 if (!js_EmitTree(cx
, cg
, pn2
))
5066 * JSOP_DEFFUN in a top-level block with function
5067 * definitions appears, for example, when "if (true)"
5068 * is optimized away from "if (true) function x() {}".
5071 JS_ASSERT(pn2
->pn_op
== JSOP_DEFFUN
);
5076 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
5077 if (!js_EmitTree(cx
, cg
, pn2
))
5081 if (noteIndex
>= 0 &&
5082 !js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0,
5083 CG_OFFSET(cg
) - tmp
)) {
5087 ok
= js_PopStatementCG(cx
, cg
);
5091 JS_ASSERT(pn
->pn_arity
== PN_LIST
);
5092 js_PushStatement(&cg
->treeContext
, &stmtInfo
, STMT_SEQ
, top
);
5093 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
5094 if (!js_EmitTree(cx
, cg
, pn2
))
5097 ok
= js_PopStatementCG(cx
, cg
);
5104 * Top-level or called-from-a-native JS_Execute/EvaluateScript,
5105 * debugger, and eval frames may need the value of the ultimate
5106 * expression statement as the script's result, despite the fact
5107 * that it appears useless to the compiler.
5109 * API users may also set the JSOPTION_NO_SCRIPT_RVAL option when
5110 * calling JS_Compile* to suppress JSOP_POPV.
5113 !(cg
->treeContext
.flags
& (TCF_IN_FUNCTION
| TCF_NO_SCRIPT_RVAL
));
5115 if (!CheckSideEffects(cx
, cg
, pn2
, &useful
))
5120 * Don't eliminate apparently useless expressions if they are
5121 * labeled expression statements. The tc->topStmt->update test
5122 * catches the case where we are nesting in js_EmitTree for a
5123 * labeled compound statement.
5126 (!cg
->treeContext
.topStmt
||
5127 cg
->treeContext
.topStmt
->type
!= STMT_LABEL
||
5128 cg
->treeContext
.topStmt
->update
< CG_OFFSET(cg
))) {
5129 CG_CURRENT_LINE(cg
) = pn2
->pn_pos
.begin
.lineno
;
5130 if (!js_ReportCompileErrorNumber(cx
, CG_TS(cg
), pn2
,
5133 JSMSG_USELESS_EXPR
)) {
5137 op
= wantval
? JSOP_POPV
: JSOP_POP
;
5138 #if JS_HAS_DESTRUCTURING
5140 pn2
->pn_type
== TOK_ASSIGN
&&
5141 !MaybeEmitGroupAssignment(cx
, cg
, op
, pn2
, &op
)) {
5145 if (op
!= JSOP_NOP
) {
5146 if (!js_EmitTree(cx
, cg
, pn2
))
5148 if (js_Emit1(cx
, cg
, op
) < 0)
5156 /* Emit an annotated nop so we know to decompile a label. */
5158 ale
= js_IndexAtom(cx
, atom
, &cg
->atomList
);
5162 noteType
= (pn2
->pn_type
== TOK_LC
||
5163 (pn2
->pn_type
== TOK_LEXICALSCOPE
&&
5164 pn2
->pn_expr
->pn_type
== TOK_LC
))
5167 noteIndex
= js_NewSrcNote2(cx
, cg
, noteType
,
5168 (ptrdiff_t) ALE_INDEX(ale
));
5169 if (noteIndex
< 0 ||
5170 js_Emit1(cx
, cg
, JSOP_NOP
) < 0) {
5174 /* Emit code for the labeled statement. */
5175 js_PushStatement(&cg
->treeContext
, &stmtInfo
, STMT_LABEL
,
5177 stmtInfo
.u
.label
= atom
;
5178 if (!js_EmitTree(cx
, cg
, pn2
))
5180 if (!js_PopStatementCG(cx
, cg
))
5183 /* If the statement was compound, emit a note for the end brace. */
5184 if (noteType
== SRC_LABELBRACE
) {
5185 if (js_NewSrcNote(cx
, cg
, SRC_ENDBRACE
) < 0 ||
5186 js_Emit1(cx
, cg
, JSOP_NOP
) < 0) {
5194 * Emit SRC_PCDELTA notes on each JSOP_POP between comma operands.
5195 * These notes help the decompiler bracket the bytecodes generated
5196 * from each sub-expression that follows a comma.
5198 off
= noteIndex
= -1;
5199 for (pn2
= pn
->pn_head
; ; pn2
= pn2
->pn_next
) {
5200 if (!js_EmitTree(cx
, cg
, pn2
))
5202 tmp
= CG_OFFSET(cg
);
5203 if (noteIndex
>= 0) {
5204 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0, tmp
-off
))
5210 noteIndex
= js_NewSrcNote2(cx
, cg
, SRC_PCDELTA
, 0);
5211 if (noteIndex
< 0 ||
5212 js_Emit1(cx
, cg
, JSOP_POP
) < 0) {
5220 * Check left operand type and generate specialized code for it.
5221 * Specialize to avoid ECMA "reference type" values on the operand
5222 * stack, which impose pervasive runtime "GetValue" costs.
5225 JS_ASSERT(pn2
->pn_type
!= TOK_RP
);
5226 atomIndex
= (jsatomid
) -1; /* quell GCC overwarning */
5227 switch (pn2
->pn_type
) {
5229 if (!BindNameToSlot(cx
, cg
, pn2
))
5231 if (pn2
->pn_slot
>= 0) {
5232 atomIndex
= (jsatomid
) pn2
->pn_slot
;
5234 ale
= js_IndexAtom(cx
, pn2
->pn_atom
, &cg
->atomList
);
5237 atomIndex
= ALE_INDEX(ale
);
5238 EMIT_INDEX_OP(JSOP_BINDNAME
, atomIndex
);
5242 if (!js_EmitTree(cx
, cg
, pn2
->pn_expr
))
5244 ale
= js_IndexAtom(cx
, pn2
->pn_atom
, &cg
->atomList
);
5247 atomIndex
= ALE_INDEX(ale
);
5250 JS_ASSERT(pn2
->pn_arity
== PN_BINARY
);
5251 if (!js_EmitTree(cx
, cg
, pn2
->pn_left
))
5253 if (!js_EmitTree(cx
, cg
, pn2
->pn_right
))
5256 #if JS_HAS_DESTRUCTURING
5261 #if JS_HAS_LVALUE_RETURN
5263 if (!js_EmitTree(cx
, cg
, pn2
))
5267 #if JS_HAS_XML_SUPPORT
5269 JS_ASSERT(pn2
->pn_op
== JSOP_SETXMLNAME
);
5270 if (!js_EmitTree(cx
, cg
, pn2
->pn_kid
))
5272 if (js_Emit1(cx
, cg
, JSOP_BINDXMLNAME
) < 0)
5281 #if JS_HAS_GETTER_SETTER
5282 if (op
== JSOP_GETTER
|| op
== JSOP_SETTER
) {
5283 if (pn2
->pn_type
== TOK_NAME
&& PN_OP(pn2
) != JSOP_SETNAME
) {
5285 * x getter = y where x is a local or let variable is not
5288 js_ReportCompileErrorNumber(cx
,
5289 TS(cg
->treeContext
.parseContext
),
5290 pn2
, JSREPORT_ERROR
,
5291 JSMSG_BAD_GETTER_OR_SETTER
,
5298 /* We'll emit these prefix bytecodes after emitting the r.h.s. */
5301 /* If += or similar, dup the left operand and get its value. */
5302 if (op
!= JSOP_NOP
) {
5303 switch (pn2
->pn_type
) {
5305 if (pn2
->pn_op
!= JSOP_SETNAME
) {
5306 EMIT_UINT16_IMM_OP((pn2
->pn_op
== JSOP_SETGVAR
)
5308 : (pn2
->pn_op
== JSOP_SETARG
)
5314 if (js_Emit1(cx
, cg
, JSOP_DUP
) < 0)
5316 EMIT_INDEX_OP(JSOP_GETXPROP
, atomIndex
);
5319 if (js_Emit1(cx
, cg
, JSOP_DUP
) < 0)
5321 if (pn2
->pn_atom
== cx
->runtime
->atomState
.lengthAtom
) {
5322 if (js_Emit1(cx
, cg
, JSOP_LENGTH
) < 0)
5325 EMIT_INDEX_OP(JSOP_GETPROP
, atomIndex
);
5329 #if JS_HAS_LVALUE_RETURN
5332 #if JS_HAS_XML_SUPPORT
5335 if (js_Emit1(cx
, cg
, JSOP_DUP2
) < 0)
5337 if (js_Emit1(cx
, cg
, JSOP_GETELEM
) < 0)
5344 /* Now emit the right operand (it may affect the namespace). */
5345 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
5348 /* If += etc., emit the binary operator with a decompiler note. */
5349 if (op
!= JSOP_NOP
) {
5351 * Take care to avoid SRC_ASSIGNOP if the left-hand side is a
5352 * const declared in a function (i.e., with non-negative pn_slot
5353 * and when pn_const is true), as in this case (just a bit further
5354 * below) we will avoid emitting the assignment op.
5356 if (pn2
->pn_type
!= TOK_NAME
||
5359 if (js_NewSrcNote(cx
, cg
, SRC_ASSIGNOP
) < 0)
5362 if (js_Emit1(cx
, cg
, op
) < 0)
5366 /* Left parts such as a.b.c and a[b].c need a decompiler note. */
5367 if (pn2
->pn_type
!= TOK_NAME
&&
5368 #if JS_HAS_DESTRUCTURING
5369 pn2
->pn_type
!= TOK_RB
&&
5370 pn2
->pn_type
!= TOK_RC
&&
5372 js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - top
) < 0) {
5376 /* Finally, emit the specialized assignment bytecode. */
5377 switch (pn2
->pn_type
) {
5379 if (pn2
->pn_slot
>= 0) {
5381 EMIT_UINT16_IMM_OP(PN_OP(pn2
), atomIndex
);
5386 EMIT_INDEX_OP(PN_OP(pn2
), atomIndex
);
5389 #if JS_HAS_LVALUE_RETURN
5392 if (js_Emit1(cx
, cg
, JSOP_SETELEM
) < 0)
5395 #if JS_HAS_DESTRUCTURING
5398 if (!EmitDestructuringOps(cx
, cg
, JSOP_SETNAME
, pn2
))
5402 #if JS_HAS_XML_SUPPORT
5404 if (js_Emit1(cx
, cg
, JSOP_SETXMLNAME
) < 0)
5414 /* Emit the condition, then branch if false to the else part. */
5415 if (!js_EmitTree(cx
, cg
, pn
->pn_kid1
))
5417 noteIndex
= js_NewSrcNote(cx
, cg
, SRC_COND
);
5420 beq
= EmitJump(cx
, cg
, JSOP_IFEQ
, 0);
5421 if (beq
< 0 || !js_EmitTree(cx
, cg
, pn
->pn_kid2
))
5424 /* Jump around else, fixup the branch, emit else, fixup jump. */
5425 jmp
= EmitJump(cx
, cg
, JSOP_GOTO
, 0);
5428 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, beq
);
5431 * Because each branch pushes a single value, but our stack budgeting
5432 * analysis ignores branches, we now have to adjust cg->stackDepth to
5433 * ignore the value pushed by the first branch. Execution will follow
5434 * only one path, so we must decrement cg->stackDepth.
5436 * Failing to do this will foil code, such as the try/catch/finally
5437 * exception handling code generator, that samples cg->stackDepth for
5438 * use at runtime (JSOP_SETSP), or in let expression and block code
5439 * generation, which must use the stack depth to compute local stack
5440 * indexes correctly.
5442 JS_ASSERT(cg
->stackDepth
> 0);
5444 if (!js_EmitTree(cx
, cg
, pn
->pn_kid3
))
5446 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, jmp
);
5447 if (!js_SetSrcNoteOffset(cx
, cg
, noteIndex
, 0, jmp
- beq
))
5454 * JSOP_OR converts the operand on the stack to boolean, and if true,
5455 * leaves the original operand value on the stack and jumps; otherwise
5456 * it pops and falls into the next bytecode, which evaluates the right
5457 * operand. The jump goes around the right operand evaluation.
5459 * JSOP_AND converts the operand on the stack to boolean, and if false,
5460 * leaves the original operand value on the stack and jumps; otherwise
5461 * it pops and falls into the right operand's bytecode.
5463 if (pn
->pn_arity
== PN_BINARY
) {
5464 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
5466 top
= EmitJump(cx
, cg
, JSOP_BACKPATCH_POP
, 0);
5469 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
5471 off
= CG_OFFSET(cg
);
5472 pc
= CG_CODE(cg
, top
);
5473 CHECK_AND_SET_JUMP_OFFSET(cx
, cg
, pc
, off
- top
);
5476 JS_ASSERT(pn
->pn_arity
== PN_LIST
);
5477 JS_ASSERT(pn
->pn_head
->pn_next
->pn_next
);
5479 /* Left-associative operator chain: avoid too much recursion. */
5481 if (!js_EmitTree(cx
, cg
, pn2
))
5483 top
= EmitJump(cx
, cg
, JSOP_BACKPATCH_POP
, 0);
5487 /* Emit nodes between the head and the tail. */
5489 while ((pn2
= pn2
->pn_next
)->pn_next
) {
5490 if (!js_EmitTree(cx
, cg
, pn2
))
5492 off
= EmitJump(cx
, cg
, JSOP_BACKPATCH_POP
, 0);
5495 if (!SetBackPatchDelta(cx
, cg
, CG_CODE(cg
, jmp
), off
- jmp
))
5500 if (!js_EmitTree(cx
, cg
, pn2
))
5504 off
= CG_OFFSET(cg
);
5506 pc
= CG_CODE(cg
, top
);
5507 tmp
= GetJumpOffset(cg
, pc
);
5508 CHECK_AND_SET_JUMP_OFFSET(cx
, cg
, pc
, off
- top
);
5511 } while ((pn2
= pn2
->pn_next
)->pn_next
);
5521 case TOK_INSTANCEOF
:
5527 if (pn
->pn_arity
== PN_LIST
) {
5528 /* Left-associative operator chain: avoid too much recursion. */
5530 if (!js_EmitTree(cx
, cg
, pn2
))
5533 while ((pn2
= pn2
->pn_next
) != NULL
) {
5534 if (!js_EmitTree(cx
, cg
, pn2
))
5536 if (js_Emit1(cx
, cg
, op
) < 0)
5540 #if JS_HAS_XML_SUPPORT
5544 if (pn
->pn_arity
== PN_NAME
) {
5545 if (!js_EmitTree(cx
, cg
, pn
->pn_expr
))
5547 if (!EmitAtomOp(cx
, pn
, PN_OP(pn
), cg
))
5553 * Binary :: has a right operand that brackets arbitrary code,
5554 * possibly including a let (a = b) ... expression. We must clear
5555 * TCF_IN_FOR_INIT to avoid mis-compiling such beasts.
5557 oldflags
= cg
->treeContext
.flags
;
5558 cg
->treeContext
.flags
&= ~TCF_IN_FOR_INIT
;
5561 /* Binary operators that evaluate both operands unconditionally. */
5562 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
5564 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
5566 #if JS_HAS_XML_SUPPORT
5567 cg
->treeContext
.flags
|= oldflags
& TCF_IN_FOR_INIT
;
5569 if (js_Emit1(cx
, cg
, PN_OP(pn
)) < 0)
5575 #if JS_HAS_XML_SUPPORT
5578 JS_ASSERT(pn
->pn_arity
== PN_UNARY
);
5585 /* Unary op, including unary +/-. */
5587 #if JS_HAS_XML_SUPPORT
5588 if (op
== JSOP_XMLNAME
) {
5589 if (!EmitXMLName(cx
, pn
, op
, cg
))
5595 if (op
== JSOP_TYPEOF
) {
5596 for (pn3
= pn2
; pn3
->pn_type
== TOK_RP
; pn3
= pn3
->pn_kid
)
5598 if (pn3
->pn_type
!= TOK_NAME
)
5599 op
= JSOP_TYPEOFEXPR
;
5601 oldflags
= cg
->treeContext
.flags
;
5602 cg
->treeContext
.flags
&= ~TCF_IN_FOR_INIT
;
5603 if (!js_EmitTree(cx
, cg
, pn2
))
5605 cg
->treeContext
.flags
|= oldflags
& TCF_IN_FOR_INIT
;
5606 if (js_Emit1(cx
, cg
, op
) < 0)
5613 /* Emit lvalue-specialized code for ++/-- operators. */
5615 JS_ASSERT(pn2
->pn_type
!= TOK_RP
);
5617 switch (pn2
->pn_type
) {
5619 JS_ASSERT(pn2
->pn_type
== TOK_NAME
);
5621 if (!BindNameToSlot(cx
, cg
, pn2
))
5624 if (pn2
->pn_slot
>= 0) {
5625 if (pn2
->pn_const
) {
5626 /* Incrementing a declared const: just get its value. */
5627 op
= (JOF_OPTYPE(op
) == JOF_ATOM
)
5631 atomIndex
= (jsatomid
) pn2
->pn_slot
;
5632 EMIT_UINT16_IMM_OP(op
, atomIndex
);
5634 if (!EmitAtomOp(cx
, pn2
, op
, cg
))
5639 if (!EmitPropOp(cx
, pn2
, op
, cg
, JS_FALSE
))
5643 if (!EmitElemOp(cx
, pn2
, op
, cg
))
5646 #if JS_HAS_LVALUE_RETURN
5648 if (!js_EmitTree(cx
, cg
, pn2
))
5650 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
,
5651 CG_OFFSET(cg
) - pn2
->pn_offset
) < 0) {
5654 if (js_Emit1(cx
, cg
, op
) < 0)
5658 #if JS_HAS_XML_SUPPORT
5660 JS_ASSERT(pn2
->pn_op
== JSOP_SETXMLNAME
);
5661 if (!js_EmitTree(cx
, cg
, pn2
->pn_kid
))
5663 if (js_Emit1(cx
, cg
, JSOP_BINDXMLNAME
) < 0)
5665 if (js_Emit1(cx
, cg
, op
) < 0)
5674 * Under ECMA 3, deleting a non-reference returns true -- but alas we
5675 * must evaluate the operand if it appears it might have side effects.
5678 switch (pn2
->pn_type
) {
5680 pn2
->pn_op
= JSOP_DELNAME
;
5681 if (!BindNameToSlot(cx
, cg
, pn2
))
5684 if (op
== JSOP_FALSE
) {
5685 if (js_Emit1(cx
, cg
, op
) < 0)
5688 if (!EmitAtomOp(cx
, pn2
, op
, cg
))
5693 if (!EmitPropOp(cx
, pn2
, JSOP_DELPROP
, cg
, JS_FALSE
))
5696 #if JS_HAS_XML_SUPPORT
5698 if (!EmitElemOp(cx
, pn2
, JSOP_DELDESC
, cg
))
5702 #if JS_HAS_LVALUE_RETURN
5704 top
= CG_OFFSET(cg
);
5705 if (!js_EmitTree(cx
, cg
, pn2
))
5707 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - top
) < 0)
5709 if (js_Emit1(cx
, cg
, JSOP_DELELEM
) < 0)
5714 if (!EmitElemOp(cx
, pn2
, JSOP_DELELEM
, cg
))
5719 * If useless, just emit JSOP_TRUE; otherwise convert delete foo()
5720 * to foo(), true (a comma expression, requiring SRC_PCDELTA).
5723 if (!CheckSideEffects(cx
, cg
, pn2
, &useful
))
5726 off
= noteIndex
= -1;
5728 if (!js_EmitTree(cx
, cg
, pn2
))
5730 off
= CG_OFFSET(cg
);
5731 noteIndex
= js_NewSrcNote2(cx
, cg
, SRC_PCDELTA
, 0);
5732 if (noteIndex
< 0 || js_Emit1(cx
, cg
, JSOP_POP
) < 0)
5735 if (js_Emit1(cx
, cg
, JSOP_TRUE
) < 0)
5737 if (noteIndex
>= 0) {
5738 tmp
= CG_OFFSET(cg
);
5739 if (!js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0, tmp
-off
))
5745 #if JS_HAS_XML_SUPPORT
5747 if (!js_EmitTree(cx
, cg
, pn
->pn_left
))
5749 jmp
= js_Emit3(cx
, cg
, JSOP_FILTER
, 0, 0);
5752 top
= CG_OFFSET(cg
);
5753 if (!js_EmitTree(cx
, cg
, pn
->pn_right
))
5755 CHECK_AND_SET_JUMP_OFFSET_AT(cx
, cg
, jmp
);
5756 if (EmitJump(cx
, cg
, JSOP_ENDFILTER
, top
- CG_OFFSET(cg
)) < 0)
5763 * Pop a stack operand, convert it to object, get a property named by
5764 * this bytecode's immediate-indexed atom operand, and push its value
5765 * (not a reference to it).
5767 ok
= EmitPropOp(cx
, pn
, PN_OP(pn
), cg
, JS_FALSE
);
5771 #if JS_HAS_XML_SUPPORT
5775 * Pop two operands, convert the left one to object and the right one
5776 * to property name (atom or tagged int), get the named property, and
5777 * push its value. Set the "obj" register to the result of ToObject
5778 * on the left operand.
5780 ok
= EmitElemOp(cx
, pn
, PN_OP(pn
), cg
);
5789 * Emit function call or operator new (constructor call) code.
5790 * First, emit code for the left operand to evaluate the callable or
5791 * constructable object expression.
5794 switch (pn2
->pn_type
) {
5796 if (!EmitNameOp(cx
, cg
, pn2
, JS_TRUE
))
5800 if (!EmitPropOp(cx
, pn2
, PN_OP(pn2
), cg
, JS_TRUE
))
5804 JS_ASSERT(pn2
->pn_op
== JSOP_GETELEM
);
5805 if (!EmitElemOp(cx
, pn2
, JSOP_CALLELEM
, cg
))
5809 #if JS_HAS_XML_SUPPORT
5810 if (pn2
->pn_op
== JSOP_XMLNAME
) {
5811 if (!EmitXMLName(cx
, pn2
, JSOP_CALLXMLNAME
, cg
))
5819 * Push null as a placeholder for the global object, per ECMA-262
5820 * 11.2.3 step 6. We use JSOP_NULLTHIS to distinguish this opcode
5821 * from JSOP_NULL (see jstracer.cpp for one use-case).
5823 if (!js_EmitTree(cx
, cg
, pn2
))
5825 if (js_Emit1(cx
, cg
, JSOP_NULLTHIS
) < 0)
5829 /* Remember start of callable-object bytecode for decompilation hint. */
5833 * Emit code for each argument in order, then emit the JSOP_*CALL or
5834 * JSOP_NEW bytecode with a two-byte immediate telling how many args
5835 * were pushed on the operand stack.
5837 oldflags
= cg
->treeContext
.flags
;
5838 cg
->treeContext
.flags
&= ~TCF_IN_FOR_INIT
;
5839 for (pn3
= pn2
->pn_next
; pn3
; pn3
= pn3
->pn_next
) {
5840 if (!js_EmitTree(cx
, cg
, pn3
))
5843 cg
->treeContext
.flags
|= oldflags
& TCF_IN_FOR_INIT
;
5844 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - off
) < 0)
5847 argc
= pn
->pn_count
- 1;
5848 if (js_Emit3(cx
, cg
, PN_OP(pn
), ARGC_HI(argc
), ARGC_LO(argc
)) < 0)
5850 if (PN_OP(pn
) == JSOP_EVAL
)
5851 EMIT_UINT16_IMM_OP(JSOP_LINENO
, pn
->pn_pos
.begin
.lineno
);
5855 case TOK_LEXICALSCOPE
:
5857 JSParsedObjectBox
*pob
;
5861 js_PushBlockScope(&cg
->treeContext
, &stmtInfo
, pob
->object
,
5865 * If this lexical scope is not for a catch block, let block or let
5866 * expression, or any kind of for loop (where the scope starts in the
5867 * head after the first part if for (;;), else in the body if for-in);
5868 * and if our container is top-level but not a function body, or else
5869 * a block statement; then emit a SRC_BRACE note. All other container
5870 * statements get braces by default from the decompiler.
5873 type
= PN_TYPE(pn
->pn_expr
);
5874 if (type
!= TOK_CATCH
&& type
!= TOK_LET
&& type
!= TOK_FOR
&&
5875 (!(stmt
= stmtInfo
.down
)
5876 ? !(cg
->treeContext
.flags
& TCF_IN_FUNCTION
)
5877 : stmt
->type
== STMT_BLOCK
)) {
5878 #if defined DEBUG_brendanXXX || defined DEBUG_mrbkap
5879 /* There must be no source note already output for the next op. */
5880 JS_ASSERT(CG_NOTE_COUNT(cg
) == 0 ||
5881 CG_LAST_NOTE_OFFSET(cg
) != CG_OFFSET(cg
) ||
5882 !GettableNoteForNextOp(cg
));
5884 noteIndex
= js_NewSrcNote2(cx
, cg
, SRC_BRACE
, 0);
5889 JS_ASSERT(CG_OFFSET(cg
) == top
);
5890 if (!EmitObjectOp(cx
, pob
, JSOP_ENTERBLOCK
, cg
))
5893 if (!js_EmitTree(cx
, cg
, pn
->pn_expr
))
5897 if (op
== JSOP_LEAVEBLOCKEXPR
) {
5898 if (js_NewSrcNote2(cx
, cg
, SRC_PCBASE
, CG_OFFSET(cg
) - top
) < 0)
5901 if (noteIndex
>= 0 &&
5902 !js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0,
5903 CG_OFFSET(cg
) - top
)) {
5908 /* Emit the JSOP_LEAVEBLOCK or JSOP_LEAVEBLOCKEXPR opcode. */
5909 count
= OBJ_BLOCK_COUNT(cx
, pob
->object
);
5910 EMIT_UINT16_IMM_OP(op
, count
);
5912 ok
= js_PopStatementCG(cx
, cg
);
5916 #if JS_HAS_BLOCK_SCOPE
5918 /* Let statements have their variable declarations on the left. */
5919 if (pn
->pn_arity
== PN_BINARY
) {
5926 /* Non-null pn2 means that pn is the variable list from a let head. */
5927 JS_ASSERT(pn
->pn_arity
== PN_LIST
);
5928 if (!EmitVariables(cx
, cg
, pn
, pn2
!= NULL
, ¬eIndex
))
5931 /* Thus non-null pn2 is the body of the let block or expression. */
5932 tmp
= CG_OFFSET(cg
);
5933 if (pn2
&& !js_EmitTree(cx
, cg
, pn2
))
5936 if (noteIndex
>= 0 &&
5937 !js_SetSrcNoteOffset(cx
, cg
, (uintN
)noteIndex
, 0,
5938 CG_OFFSET(cg
) - tmp
)) {
5942 #endif /* JS_HAS_BLOCK_SCOPE */
5944 #if JS_HAS_GENERATORS
5945 case TOK_ARRAYPUSH
: {
5949 * The array object's stack index is in cg->arrayCompDepth. See below
5950 * under the array initialiser code generator for array comprehension
5953 if (!js_EmitTree(cx
, cg
, pn
->pn_kid
))
5955 slot
= cg
->arrayCompDepth
;
5956 slot
= AdjustBlockSlot(cx
, cg
, slot
);
5959 EMIT_UINT16_IMM_OP(PN_OP(pn
), slot
);
5965 #if JS_HAS_GENERATORS
5969 * Emit code for [a, b, c] of the form:
5971 * t = new Array; t[0] = a; t[1] = b; t[2] = c; t;
5973 * but use a stack slot for t and avoid dup'ing and popping it using
5974 * the JSOP_NEWINIT and JSOP_INITELEM bytecodes.
5976 * If no sharp variable is defined and the initialiser is not for an
5977 * array comprehension, use JSOP_NEWARRAY.
5980 op
= JSOP_NEWINIT
; // FIXME: 260106 patch disabled for now
5982 #if JS_HAS_SHARP_VARS
5983 if (pn2
&& pn2
->pn_type
== TOK_DEFSHARP
)
5986 #if JS_HAS_GENERATORS
5987 if (pn
->pn_type
== TOK_ARRAYCOMP
)
5991 if (op
== JSOP_NEWINIT
&&
5992 js_Emit2(cx
, cg
, op
, (jsbytecode
) JSProto_Array
) < 0) {
5996 #if JS_HAS_SHARP_VARS
5997 if (pn2
&& pn2
->pn_type
== TOK_DEFSHARP
) {
5998 EMIT_UINT16_IMM_OP(JSOP_DEFSHARP
, (jsatomid
)pn2
->pn_num
);
6003 #if JS_HAS_GENERATORS
6004 if (pn
->pn_type
== TOK_ARRAYCOMP
) {
6008 * Pass the new array's stack index to the TOK_ARRAYPUSH case by
6009 * storing it in pn->pn_extra, then simply traverse the TOK_FOR
6010 * node and its kids under pn2 to generate this comprehension.
6012 JS_ASSERT(cg
->stackDepth
> 0);
6013 saveDepth
= cg
->arrayCompDepth
;
6014 cg
->arrayCompDepth
= (uint32
) (cg
->stackDepth
- 1);
6015 if (!js_EmitTree(cx
, cg
, pn2
))
6017 cg
->arrayCompDepth
= saveDepth
;
6019 /* Emit the usual op needed for decompilation. */
6020 if (js_Emit1(cx
, cg
, JSOP_ENDINIT
) < 0)
6024 #endif /* JS_HAS_GENERATORS */
6026 for (atomIndex
= 0; pn2
; atomIndex
++, pn2
= pn2
->pn_next
) {
6027 if (op
== JSOP_NEWINIT
&& !EmitNumberOp(cx
, atomIndex
, cg
))
6030 if (pn2
->pn_type
== TOK_COMMA
) {
6031 if (js_Emit1(cx
, cg
, JSOP_HOLE
) < 0)
6034 if (!js_EmitTree(cx
, cg
, pn2
))
6038 if (op
== JSOP_NEWINIT
&& js_Emit1(cx
, cg
, JSOP_INITELEM
) < 0)
6042 if (pn
->pn_extra
& PNX_ENDCOMMA
) {
6043 /* Emit a source note so we know to decompile an extra comma. */
6044 if (js_NewSrcNote(cx
, cg
, SRC_CONTINUE
) < 0)
6048 if (op
== JSOP_NEWARRAY
) {
6049 JS_ASSERT(atomIndex
== pn
->pn_count
);
6050 off
= js_EmitN(cx
, cg
, op
, 3);
6053 pc
= CG_CODE(cg
, off
);
6054 SET_UINT24(pc
, atomIndex
);
6055 UpdateDepth(cx
, cg
, off
);
6057 /* Emit an op for sharp array cleanup and decompilation. */
6058 if (js_Emit1(cx
, cg
, JSOP_ENDINIT
) < 0)
6064 #if JS_HAS_DESTRUCTURING_SHORTHAND
6065 if (pn
->pn_extra
& PNX_SHORTHAND
) {
6066 js_ReportCompileErrorNumber(cx
, CG_TS(cg
), pn
, JSREPORT_ERROR
,
6067 JSMSG_BAD_OBJECT_INIT
);
6072 * Emit code for {p:a, '%q':b, 2:c} of the form:
6074 * t = new Object; t.p = a; t['%q'] = b; t[2] = c; t;
6076 * but use a stack slot for t and avoid dup'ing and popping it via
6077 * the JSOP_NEWINIT and JSOP_INITELEM/JSOP_INITPROP bytecodes.
6079 if (js_Emit2(cx
, cg
, JSOP_NEWINIT
, (jsbytecode
) JSProto_Object
) < 0)
6083 #if JS_HAS_SHARP_VARS
6084 if (pn2
&& pn2
->pn_type
== TOK_DEFSHARP
) {
6085 EMIT_UINT16_IMM_OP(JSOP_DEFSHARP
, (jsatomid
)pn2
->pn_num
);
6090 for (; pn2
; pn2
= pn2
->pn_next
) {
6091 /* Emit an index for t[2], else map an atom for t.p or t['%q']. */
6093 if (pn3
->pn_type
== TOK_NUMBER
) {
6095 ale
= NULL
; /* quell GCC overwarning */
6097 if (!EmitNumberOp(cx
, pn3
->pn_dval
, cg
))
6100 JS_ASSERT(pn3
->pn_type
== TOK_NAME
||
6101 pn3
->pn_type
== TOK_STRING
);
6102 ale
= js_IndexAtom(cx
, pn3
->pn_atom
, &cg
->atomList
);
6107 /* Emit code for the property initializer. */
6108 if (!js_EmitTree(cx
, cg
, pn2
->pn_right
))
6111 #if JS_HAS_GETTER_SETTER
6113 if (op
== JSOP_GETTER
|| op
== JSOP_SETTER
) {
6114 if (js_Emit1(cx
, cg
, op
) < 0)
6118 /* Annotate JSOP_INITELEM so we decompile 2:c and not just c. */
6119 if (pn3
->pn_type
== TOK_NUMBER
) {
6120 if (js_NewSrcNote(cx
, cg
, SRC_INITPROP
) < 0)
6122 if (js_Emit1(cx
, cg
, JSOP_INITELEM
) < 0)
6125 EMIT_INDEX_OP(JSOP_INITPROP
, ALE_INDEX(ale
));
6129 /* Emit an op for sharpArray cleanup and decompilation. */
6130 if (js_Emit1(cx
, cg
, JSOP_ENDINIT
) < 0)
6134 #if JS_HAS_SHARP_VARS
6136 if (!js_EmitTree(cx
, cg
, pn
->pn_kid
))
6138 EMIT_UINT16_IMM_OP(JSOP_DEFSHARP
, (jsatomid
) pn
->pn_num
);
6142 EMIT_UINT16_IMM_OP(JSOP_USESHARP
, (jsatomid
) pn
->pn_num
);
6144 #endif /* JS_HAS_SHARP_VARS */
6151 * The node for (e) has e as its kid, enabling users who want to nest
6152 * assignment expressions in conditions to avoid the error correction
6153 * done by Condition (from x = y to x == y) by double-parenthesizing.
6155 oldflags
= cg
->treeContext
.flags
;
6156 cg
->treeContext
.flags
&= ~TCF_IN_FOR_INIT
;
6157 if (!js_EmitTree(cx
, cg
, pn
->pn_kid
))
6159 cg
->treeContext
.flags
|= oldflags
& TCF_IN_FOR_INIT
;
6164 if (!EmitNameOp(cx
, cg
, pn
, JS_FALSE
))
6168 #if JS_HAS_XML_SUPPORT
6173 case TOK_XMLCOMMENT
:
6176 ok
= EmitAtomOp(cx
, pn
, PN_OP(pn
), cg
);
6180 ok
= EmitNumberOp(cx
, pn
->pn_dval
, cg
);
6185 * If the regexp's script is one-shot, we can avoid the extra
6186 * fork-on-exec costs of JSOP_REGEXP by selecting JSOP_OBJECT.
6187 * Otherwise, to avoid incorrect proto, parent, and lastIndex
6188 * sharing among threads and sequentially across re-execution,
6189 * select JSOP_REGEXP.
6191 JS_ASSERT(pn
->pn_op
== JSOP_REGEXP
);
6192 if (cg
->treeContext
.flags
& TCF_COMPILE_N_GO
) {
6193 ok
= EmitObjectOp(cx
, pn
->pn_pob
, JSOP_OBJECT
, cg
);
6195 ok
= EmitIndexOp(cx
, JSOP_REGEXP
,
6196 IndexParsedObject(pn
->pn_pob
, &cg
->regexpList
),
6201 #if JS_HAS_XML_SUPPORT
6205 if (js_Emit1(cx
, cg
, PN_OP(pn
)) < 0)
6209 #if JS_HAS_DEBUGGER_KEYWORD
6211 if (js_Emit1(cx
, cg
, JSOP_DEBUGGER
) < 0)
6214 #endif /* JS_HAS_DEBUGGER_KEYWORD */
6216 #if JS_HAS_XML_SUPPORT
6219 if (pn
->pn_op
== JSOP_XMLOBJECT
) {
6220 ok
= EmitObjectOp(cx
, pn
->pn_pob
, PN_OP(pn
), cg
);
6224 JS_ASSERT(pn
->pn_type
== TOK_XMLLIST
|| pn
->pn_count
!= 0);
6225 switch (pn
->pn_head
? pn
->pn_head
->pn_type
: TOK_XMLLIST
) {
6233 if (js_Emit1(cx
, cg
, JSOP_STARTXML
) < 0)
6237 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
6238 if (pn2
->pn_type
== TOK_LC
&&
6239 js_Emit1(cx
, cg
, JSOP_STARTXMLEXPR
) < 0) {
6242 if (!js_EmitTree(cx
, cg
, pn2
))
6244 if (pn2
!= pn
->pn_head
&& js_Emit1(cx
, cg
, JSOP_ADD
) < 0)
6248 if (pn
->pn_extra
& PNX_XMLROOT
) {
6249 if (pn
->pn_count
== 0) {
6250 JS_ASSERT(pn
->pn_type
== TOK_XMLLIST
);
6251 atom
= cx
->runtime
->atomState
.emptyAtom
;
6252 ale
= js_IndexAtom(cx
, atom
, &cg
->atomList
);
6255 EMIT_INDEX_OP(JSOP_STRING
, ALE_INDEX(ale
));
6257 if (js_Emit1(cx
, cg
, PN_OP(pn
)) < 0)
6262 JS_ASSERT(pn
->pn_count
!= 0);
6267 if (pn
->pn_op
== JSOP_XMLOBJECT
) {
6268 ok
= EmitObjectOp(cx
, pn
->pn_pob
, PN_OP(pn
), cg
);
6278 if (js_Emit1(cx
, cg
, JSOP_STARTXML
) < 0)
6281 ale
= js_IndexAtom(cx
,
6282 (pn
->pn_type
== TOK_XMLETAGO
)
6283 ? cx
->runtime
->atomState
.etagoAtom
6284 : cx
->runtime
->atomState
.stagoAtom
,
6288 EMIT_INDEX_OP(JSOP_STRING
, ALE_INDEX(ale
));
6290 JS_ASSERT(pn
->pn_count
!= 0);
6292 if (pn2
->pn_type
== TOK_LC
&& js_Emit1(cx
, cg
, JSOP_STARTXMLEXPR
) < 0)
6294 if (!js_EmitTree(cx
, cg
, pn2
))
6296 if (js_Emit1(cx
, cg
, JSOP_ADD
) < 0)
6299 for (pn2
= pn2
->pn_next
, i
= 0; pn2
; pn2
= pn2
->pn_next
, i
++) {
6300 if (pn2
->pn_type
== TOK_LC
&&
6301 js_Emit1(cx
, cg
, JSOP_STARTXMLEXPR
) < 0) {
6304 if (!js_EmitTree(cx
, cg
, pn2
))
6306 if ((i
& 1) && pn2
->pn_type
== TOK_LC
) {
6307 if (js_Emit1(cx
, cg
, JSOP_TOATTRVAL
) < 0)
6310 if (js_Emit1(cx
, cg
,
6311 (i
& 1) ? JSOP_ADDATTRVAL
: JSOP_ADDATTRNAME
) < 0) {
6316 ale
= js_IndexAtom(cx
,
6317 (pn
->pn_type
== TOK_XMLPTAGC
)
6318 ? cx
->runtime
->atomState
.ptagcAtom
6319 : cx
->runtime
->atomState
.tagcAtom
,
6323 EMIT_INDEX_OP(JSOP_STRING
, ALE_INDEX(ale
));
6324 if (js_Emit1(cx
, cg
, JSOP_ADD
) < 0)
6327 if ((pn
->pn_extra
& PNX_XMLROOT
) && js_Emit1(cx
, cg
, PN_OP(pn
)) < 0)
6333 if (pn
->pn_arity
== PN_LIST
) {
6334 JS_ASSERT(pn
->pn_count
!= 0);
6335 for (pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
) {
6336 if (!js_EmitTree(cx
, cg
, pn2
))
6338 if (pn2
!= pn
->pn_head
&& js_Emit1(cx
, cg
, JSOP_ADD
) < 0)
6342 JS_ASSERT(pn
->pn_arity
== PN_NULLARY
);
6343 ok
= (pn
->pn_op
== JSOP_OBJECT
)
6344 ? EmitObjectOp(cx
, pn
->pn_pob
, PN_OP(pn
), cg
)
6345 : EmitAtomOp(cx
, pn
, PN_OP(pn
), cg
);
6350 ale
= js_IndexAtom(cx
, pn
->pn_atom2
, &cg
->atomList
);
6353 if (!EmitIndexOp(cx
, JSOP_QNAMEPART
, ALE_INDEX(ale
), cg
))
6355 if (!EmitAtomOp(cx
, pn
, JSOP_XMLPI
, cg
))
6358 #endif /* JS_HAS_XML_SUPPORT */
6364 if (ok
&& --cg
->emitLevel
== 0 && cg
->spanDeps
)
6365 ok
= OptimizeSpanDeps(cx
, cg
);
6371 * We should try to get rid of offsetBias (always 0 or 1, where 1 is
6372 * JSOP_{NOP,POP}_LENGTH), which is used only by SRC_FOR and SRC_DECL.
6374 JS_FRIEND_DATA(JSSrcNoteSpec
) js_SrcNoteSpec
[] = {
6377 {"if-else", 2, 0, 1},
6380 {"continue", 0, 0, 0},
6382 {"pcdelta", 1, 0, 1},
6383 {"assignop", 0, 0, 0},
6386 {"hidden", 0, 0, 0},
6387 {"pcbase", 1, 0, -1},
6389 {"labelbrace", 1, 0, 0},
6390 {"endbrace", 0, 0, 0},
6391 {"break2label", 1, 0, 0},
6392 {"cont2label", 1, 0, 0},
6393 {"switch", 2, 0, 1},
6394 {"funcdef", 1, 0, 0},
6396 {"extended", -1, 0, 0},
6397 {"newline", 0, 0, 0},
6398 {"setline", 1, 0, 0},
6399 {"xdelta", 0, 0, 0},
6403 AllocSrcNote(JSContext
*cx
, JSCodeGenerator
*cg
)
6409 index
= CG_NOTE_COUNT(cg
);
6410 if (((uintN
)index
& CG_NOTE_MASK(cg
)) == 0) {
6411 pool
= cg
->notePool
;
6412 size
= SRCNOTE_SIZE(CG_NOTE_MASK(cg
) + 1);
6413 if (!CG_NOTES(cg
)) {
6414 /* Allocate the first note array lazily; leave noteMask alone. */
6415 JS_ARENA_ALLOCATE_CAST(CG_NOTES(cg
), jssrcnote
*, pool
, size
);
6417 /* Grow by doubling note array size; update noteMask on success. */
6418 JS_ARENA_GROW_CAST(CG_NOTES(cg
), jssrcnote
*, pool
, size
, size
);
6420 CG_NOTE_MASK(cg
) = (CG_NOTE_MASK(cg
) << 1) | 1;
6422 if (!CG_NOTES(cg
)) {
6423 js_ReportOutOfScriptQuota(cx
);
6428 CG_NOTE_COUNT(cg
) = index
+ 1;
6433 js_NewSrcNote(JSContext
*cx
, JSCodeGenerator
*cg
, JSSrcNoteType type
)
6437 ptrdiff_t offset
, delta
, xdelta
;
6440 * Claim a note slot in CG_NOTES(cg) by growing it if necessary and then
6441 * incrementing CG_NOTE_COUNT(cg).
6443 index
= AllocSrcNote(cx
, cg
);
6446 sn
= &CG_NOTES(cg
)[index
];
6449 * Compute delta from the last annotated bytecode's offset. If it's too
6450 * big to fit in sn, allocate one or more xdelta notes and reset sn.
6452 offset
= CG_OFFSET(cg
);
6453 delta
= offset
- CG_LAST_NOTE_OFFSET(cg
);
6454 CG_LAST_NOTE_OFFSET(cg
) = offset
;
6455 if (delta
>= SN_DELTA_LIMIT
) {
6457 xdelta
= JS_MIN(delta
, SN_XDELTA_MASK
);
6458 SN_MAKE_XDELTA(sn
, xdelta
);
6460 index
= AllocSrcNote(cx
, cg
);
6463 sn
= &CG_NOTES(cg
)[index
];
6464 } while (delta
>= SN_DELTA_LIMIT
);
6468 * Initialize type and delta, then allocate the minimum number of notes
6469 * needed for type's arity. Usually, we won't need more, but if an offset
6470 * does take two bytes, js_SetSrcNoteOffset will grow CG_NOTES(cg).
6472 SN_MAKE_NOTE(sn
, type
, delta
);
6473 for (n
= (intN
)js_SrcNoteSpec
[type
].arity
; n
> 0; n
--) {
6474 if (js_NewSrcNote(cx
, cg
, SRC_NULL
) < 0)
6481 js_NewSrcNote2(JSContext
*cx
, JSCodeGenerator
*cg
, JSSrcNoteType type
,
6486 index
= js_NewSrcNote(cx
, cg
, type
);
6488 if (!js_SetSrcNoteOffset(cx
, cg
, index
, 0, offset
))
6495 js_NewSrcNote3(JSContext
*cx
, JSCodeGenerator
*cg
, JSSrcNoteType type
,
6496 ptrdiff_t offset1
, ptrdiff_t offset2
)
6500 index
= js_NewSrcNote(cx
, cg
, type
);
6502 if (!js_SetSrcNoteOffset(cx
, cg
, index
, 0, offset1
))
6504 if (!js_SetSrcNoteOffset(cx
, cg
, index
, 1, offset2
))
6511 GrowSrcNotes(JSContext
*cx
, JSCodeGenerator
*cg
)
6516 /* Grow by doubling note array size; update noteMask on success. */
6517 pool
= cg
->notePool
;
6518 size
= SRCNOTE_SIZE(CG_NOTE_MASK(cg
) + 1);
6519 JS_ARENA_GROW_CAST(CG_NOTES(cg
), jssrcnote
*, pool
, size
, size
);
6520 if (!CG_NOTES(cg
)) {
6521 js_ReportOutOfScriptQuota(cx
);
6524 CG_NOTE_MASK(cg
) = (CG_NOTE_MASK(cg
) << 1) | 1;
6529 js_AddToSrcNoteDelta(JSContext
*cx
, JSCodeGenerator
*cg
, jssrcnote
*sn
,
6532 ptrdiff_t base
, limit
, newdelta
, diff
;
6536 * Called only from OptimizeSpanDeps and js_FinishTakingSrcNotes to add to
6537 * main script note deltas, and only by a small positive amount.
6539 JS_ASSERT(cg
->current
== &cg
->main
);
6540 JS_ASSERT((unsigned) delta
< (unsigned) SN_XDELTA_LIMIT
);
6542 base
= SN_DELTA(sn
);
6543 limit
= SN_IS_XDELTA(sn
) ? SN_XDELTA_LIMIT
: SN_DELTA_LIMIT
;
6544 newdelta
= base
+ delta
;
6545 if (newdelta
< limit
) {
6546 SN_SET_DELTA(sn
, newdelta
);
6548 index
= sn
- cg
->main
.notes
;
6549 if ((cg
->main
.noteCount
& cg
->main
.noteMask
) == 0) {
6550 if (!GrowSrcNotes(cx
, cg
))
6552 sn
= cg
->main
.notes
+ index
;
6554 diff
= cg
->main
.noteCount
- index
;
6555 cg
->main
.noteCount
++;
6556 memmove(sn
+ 1, sn
, SRCNOTE_SIZE(diff
));
6557 SN_MAKE_XDELTA(sn
, delta
);
6563 JS_FRIEND_API(uintN
)
6564 js_SrcNoteLength(jssrcnote
*sn
)
6569 arity
= (intN
)js_SrcNoteSpec
[SN_TYPE(sn
)].arity
;
6570 for (base
= sn
++; arity
; sn
++, arity
--) {
6571 if (*sn
& SN_3BYTE_OFFSET_FLAG
)
6577 JS_FRIEND_API(ptrdiff_t)
6578 js_GetSrcNoteOffset(jssrcnote
*sn
, uintN which
)
6580 /* Find the offset numbered which (i.e., skip exactly which offsets). */
6581 JS_ASSERT(SN_TYPE(sn
) != SRC_XDELTA
);
6582 JS_ASSERT((intN
) which
< js_SrcNoteSpec
[SN_TYPE(sn
)].arity
);
6583 for (sn
++; which
; sn
++, which
--) {
6584 if (*sn
& SN_3BYTE_OFFSET_FLAG
)
6587 if (*sn
& SN_3BYTE_OFFSET_FLAG
) {
6588 return (ptrdiff_t)(((uint32
)(sn
[0] & SN_3BYTE_OFFSET_MASK
) << 16)
6592 return (ptrdiff_t)*sn
;
6596 js_SetSrcNoteOffset(JSContext
*cx
, JSCodeGenerator
*cg
, uintN index
,
6597 uintN which
, ptrdiff_t offset
)
6602 if ((jsuword
)offset
>= (jsuword
)((ptrdiff_t)SN_3BYTE_OFFSET_FLAG
<< 16)) {
6603 ReportStatementTooLarge(cx
, cg
);
6607 /* Find the offset numbered which (i.e., skip exactly which offsets). */
6608 sn
= &CG_NOTES(cg
)[index
];
6609 JS_ASSERT(SN_TYPE(sn
) != SRC_XDELTA
);
6610 JS_ASSERT((intN
) which
< js_SrcNoteSpec
[SN_TYPE(sn
)].arity
);
6611 for (sn
++; which
; sn
++, which
--) {
6612 if (*sn
& SN_3BYTE_OFFSET_FLAG
)
6616 /* See if the new offset requires three bytes. */
6617 if (offset
> (ptrdiff_t)SN_3BYTE_OFFSET_MASK
) {
6618 /* Maybe this offset was already set to a three-byte value. */
6619 if (!(*sn
& SN_3BYTE_OFFSET_FLAG
)) {
6620 /* Losing, need to insert another two bytes for this offset. */
6621 index
= PTRDIFF(sn
, CG_NOTES(cg
), jssrcnote
);
6624 * Simultaneously test to see if the source note array must grow to
6625 * accomodate either the first or second byte of additional storage
6626 * required by this 3-byte offset.
6628 if (((CG_NOTE_COUNT(cg
) + 1) & CG_NOTE_MASK(cg
)) <= 1) {
6629 if (!GrowSrcNotes(cx
, cg
))
6631 sn
= CG_NOTES(cg
) + index
;
6633 CG_NOTE_COUNT(cg
) += 2;
6635 diff
= CG_NOTE_COUNT(cg
) - (index
+ 3);
6636 JS_ASSERT(diff
>= 0);
6638 memmove(sn
+ 3, sn
+ 1, SRCNOTE_SIZE(diff
));
6640 *sn
++ = (jssrcnote
)(SN_3BYTE_OFFSET_FLAG
| (offset
>> 16));
6641 *sn
++ = (jssrcnote
)(offset
>> 8);
6643 *sn
= (jssrcnote
)offset
;
6648 #define DEBUG_srcnotesize
6651 #ifdef DEBUG_srcnotesize
6653 static uint32 hist
[NBINS
];
6655 void DumpSrcNoteSizeHist()
6661 fp
= fopen("/tmp/srcnotes.hist", "w");
6664 setvbuf(fp
, NULL
, _IONBF
, 0);
6666 fprintf(fp
, "SrcNote size histogram:\n");
6667 for (i
= 0; i
< NBINS
; i
++) {
6668 fprintf(fp
, "%4u %4u ", JS_BIT(i
), hist
[i
]);
6669 for (n
= (int) JS_HOWMANY(hist
[i
], 10); n
> 0; --n
)
6678 * Fill in the storage at notes with prolog and main srcnotes; the space at
6679 * notes was allocated using the CG_COUNT_FINAL_SRCNOTES macro from jsemit.h.
6680 * SO DON'T CHANGE THIS FUNCTION WITHOUT AT LEAST CHECKING WHETHER jsemit.h's
6681 * CG_COUNT_FINAL_SRCNOTES MACRO NEEDS CORRESPONDING CHANGES!
6684 js_FinishTakingSrcNotes(JSContext
*cx
, JSCodeGenerator
*cg
, jssrcnote
*notes
)
6686 uintN prologCount
, mainCount
, totalCount
;
6687 ptrdiff_t offset
, delta
;
6690 JS_ASSERT(cg
->current
== &cg
->main
);
6692 prologCount
= cg
->prolog
.noteCount
;
6693 if (prologCount
&& cg
->prolog
.currentLine
!= cg
->firstLine
) {
6694 CG_SWITCH_TO_PROLOG(cg
);
6695 if (js_NewSrcNote2(cx
, cg
, SRC_SETLINE
, (ptrdiff_t)cg
->firstLine
) < 0)
6697 prologCount
= cg
->prolog
.noteCount
;
6698 CG_SWITCH_TO_MAIN(cg
);
6701 * Either no prolog srcnotes, or no line number change over prolog.
6702 * We don't need a SRC_SETLINE, but we may need to adjust the offset
6703 * of the first main note, by adding to its delta and possibly even
6704 * prepending SRC_XDELTA notes to it to account for prolog bytecodes
6705 * that came at and after the last annotated bytecode.
6707 offset
= CG_PROLOG_OFFSET(cg
) - cg
->prolog
.lastNoteOffset
;
6708 JS_ASSERT(offset
>= 0);
6709 if (offset
> 0 && cg
->main
.noteCount
!= 0) {
6710 /* NB: Use as much of the first main note's delta as we can. */
6711 sn
= cg
->main
.notes
;
6712 delta
= SN_IS_XDELTA(sn
)
6713 ? SN_XDELTA_MASK
- (*sn
& SN_XDELTA_MASK
)
6714 : SN_DELTA_MASK
- (*sn
& SN_DELTA_MASK
);
6718 if (!js_AddToSrcNoteDelta(cx
, cg
, sn
, delta
))
6723 delta
= JS_MIN(offset
, SN_XDELTA_MASK
);
6724 sn
= cg
->main
.notes
;
6729 mainCount
= cg
->main
.noteCount
;
6730 totalCount
= prologCount
+ mainCount
;
6732 memcpy(notes
, cg
->prolog
.notes
, SRCNOTE_SIZE(prologCount
));
6733 memcpy(notes
+ prologCount
, cg
->main
.notes
, SRCNOTE_SIZE(mainCount
));
6734 SN_MAKE_TERMINATOR(¬es
[totalCount
]);
6737 { int bin
= JS_CeilingLog2(totalCount
);
6747 NewTryNote(JSContext
*cx
, JSCodeGenerator
*cg
, JSTryNoteKind kind
,
6748 uintN stackDepth
, size_t start
, size_t end
)
6752 JS_ASSERT((uintN
)(uint16
)stackDepth
== stackDepth
);
6753 JS_ASSERT(start
<= end
);
6754 JS_ASSERT((size_t)(uint32
)start
== start
);
6755 JS_ASSERT((size_t)(uint32
)end
== end
);
6757 JS_ARENA_ALLOCATE_TYPE(tryNode
, JSTryNode
, &cx
->tempPool
);
6759 js_ReportOutOfScriptQuota(cx
);
6763 tryNode
->note
.kind
= kind
;
6764 tryNode
->note
.stackDepth
= (uint16
)stackDepth
;
6765 tryNode
->note
.start
= (uint32
)start
;
6766 tryNode
->note
.length
= (uint32
)(end
- start
);
6767 tryNode
->prev
= cg
->lastTryNode
;
6768 cg
->lastTryNode
= tryNode
;
6774 js_FinishTakingTryNotes(JSCodeGenerator
*cg
, JSTryNoteArray
*array
)
6779 JS_ASSERT(array
->length
> 0 && array
->length
== cg
->ntrynotes
);
6780 tn
= array
->vector
+ array
->length
;
6781 tryNode
= cg
->lastTryNode
;
6783 *--tn
= tryNode
->note
;
6784 } while ((tryNode
= tryNode
->prev
) != NULL
);
6785 JS_ASSERT(tn
== array
->vector
);
6789 * Find the index of the given object for code generator.
6791 * Since the emitter refers to each parsed object only once, for the index we
6792 * use the number of already indexes objects. We also add the object to a list
6793 * to convert the list to a fixed-size array when we complete code generation,
6794 * see FinishParsedObjects bellow.
6796 * Most of the objects go to JSCodeGenerator.objectList but for regexp we use
6797 * a separated JSCodeGenerator.regexpList. In this way the emitted index can
6798 * be directly used to store and fetch a reference to a cloned RegExp object
6799 * that shares the same JSRegExp private data created for the object literal
6800 * in pob. We need clones to hold lastIndex and other direct properties that
6801 * should not be shared among threads sharing a precompiled function or
6804 * If the code being compiled is function code, allocate a reserved slot in
6805 * the cloned function object that shares its precompiled script with other
6806 * cloned function objects and with the compiler-created clone-parent. There
6807 * are nregexps = JS_SCRIPT_REGEXPS(script)->length such reserved slots in each
6808 * function object cloned from fun->object. NB: during compilation, a funobj
6809 * slots element must never be allocated, because js_AllocSlot could hand out
6810 * one of the slots that should be given to a regexp clone.
6812 * If the code being compiled is global code, the cloned regexp are stored in
6813 * fp->vars slot after cg->treeContext.ngvars and to protect regexp slots from
6814 * GC we set fp->nvars to ngvars + nregexps.
6816 * The slots initially contain undefined or null. We populate them lazily when
6817 * JSOP_REGEXP is executed for the first time.
6819 * Why clone regexp objects? ECMA specifies that when a regular expression
6820 * literal is scanned, a RegExp object is created. In the spec, compilation
6821 * and execution happen indivisibly, but in this implementation and many of
6822 * its embeddings, code is precompiled early and re-executed in multiple
6823 * threads, or using multiple global objects, or both, for efficiency.
6825 * In such cases, naively following ECMA leads to wrongful sharing of RegExp
6826 * objects, which makes for collisions on the lastIndex property (especially
6827 * for global regexps) and on any ad-hoc properties. Also, __proto__ and
6828 * __parent__ refer to the pre-compilation prototype and global objects, a
6829 * pigeon-hole problem for instanceof tests.
6832 IndexParsedObject(JSParsedObjectBox
*pob
, JSEmittedObjectList
*list
)
6834 JS_ASSERT(!pob
->emitLink
);
6835 pob
->emitLink
= list
->lastPob
;
6836 list
->lastPob
= pob
;
6837 return list
->length
++;
6841 FinishParsedObjects(JSEmittedObjectList
*emittedList
, JSObjectArray
*array
)
6844 JSParsedObjectBox
*pob
;
6846 JS_ASSERT(emittedList
->length
<= INDEX_LIMIT
);
6847 JS_ASSERT(emittedList
->length
== array
->length
);
6849 cursor
= array
->vector
+ array
->length
;
6850 pob
= emittedList
->lastPob
;
6853 JS_ASSERT(!*cursor
);
6854 *cursor
= pob
->object
;
6855 } while ((pob
= pob
->emitLink
) != NULL
);
6856 JS_ASSERT(cursor
== array
->vector
);