1 /* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=4 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
17 * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
20 * The Initial Developer of the Original Code is
21 * Brendan Eich <brendan@mozilla.org>
24 * Andreas Gal <gal@mozilla.com>
25 * Mike Shaver <shaver@mozilla.org>
26 * David Anderson <danderson@mozilla.com>
28 * Alternatively, the contents of this file may be used under the terms of
29 * either of the GNU General Public License Version 2 or later (the "GPL"),
30 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
31 * in which case the provisions of the GPL or the LGPL are applicable instead
32 * of those above. If you wish to allow use of your version of this file only
33 * under the terms of either the GPL or the LGPL, and not to allow others to
34 * use your version of this file under the terms of the MPL, indicate your
35 * decision by deleting the provisions above and replace them with the notice
36 * and other provisions required by the GPL or the LGPL. If you do not delete
37 * the provisions above, a recipient may use your version of this file under
38 * the terms of any one of the MPL, the GPL or the LGPL.
40 * ***** END LICENSE BLOCK ***** */
42 #include "jsstddef.h" // always first
43 #include "jsbit.h" // low-level (NSPR-based) headers next
45 #include <math.h> // standard headers next
48 #define alloca _alloca
51 #include "nanojit/avmplus.h" // nanojit
52 #include "nanojit/nanojit.h"
53 #include "jsarray.h" // higher-level library and API headers
68 #include "jsautooplen.h" // generated headers last
70 /* Number of iterations of a loop before we start tracing. */
73 /* Number of times we wait to exit on a side exit before we try to extend the tree. */
76 /* Max call depths for inlining. */
77 #define MAX_CALLDEPTH 5
79 /* Max number of type mismatchs before we trash the tree. */
80 #define MAX_MISMATCH 5
82 /* Max native stack size. */
83 #define MAX_NATIVE_STACK_SLOTS 1024
85 /* Max call stack size. */
86 #define MAX_CALL_STACK_ENTRIES 64
89 #define ABORT_TRACE(msg) do { debug_only_v(fprintf(stdout, "abort: %d: %s\n", __LINE__, msg);) return false; } while (0)
91 #define ABORT_TRACE(msg) return false
97 recorderStarted
, recorderAborted
, traceCompleted
, sideExitIntoInterpreter
,
98 typeMapMismatchAtEntry
, returnToDifferentLoopHeader
, traceTriggered
,
99 globalShapeMismatchAtEntry
, treesTrashed
, slotPromoted
,
100 unstableLoopVariable
, breakLoopExits
;
102 #define AUDIT(x) (stat.x++)
104 #define AUDIT(x) ((void)0)
107 #define INS_CONST(c) addName(lir->insImm(c), #c)
108 #define INS_CONSTPTR(p) addName(lir->insImmPtr((void*) (p)), #p)
110 using namespace avmplus
;
111 using namespace nanojit
;
114 static avmplus::AvmCore s_core
= avmplus::AvmCore();
115 static avmplus::AvmCore
* core
= &s_core
;
117 /* We really need a better way to configure the JIT. Shaver, where is my fancy JIT object? */
118 static bool nesting_enabled
= true;
119 static bool oracle_enabled
= true;
120 static bool did_we_check_sse2
= false;
123 static bool verbose_debug
= getenv("TRACEMONKEY") && strstr(getenv("TRACEMONKEY"), "verbose");
124 #define debug_only_v(x) if (verbose_debug) { x; }
126 #define debug_only_v(x)
129 /* The entire VM shares one oracle. Collisions and concurrent updates are tolerated and worst
130 case cause performance regressions. */
131 static Oracle oracle
;
144 Tracker::getPageBase(const void* v
) const
146 return jsuword(v
) & ~jsuword(NJ_PAGE_SIZE
-1);
149 struct Tracker::Page
*
150 Tracker::findPage(const void* v
) const
152 jsuword base
= getPageBase(v
);
153 struct Tracker::Page
* p
= pagelist
;
155 if (p
->base
== base
) {
163 struct Tracker::Page
*
164 Tracker::addPage(const void* v
) {
165 jsuword base
= getPageBase(v
);
166 struct Tracker::Page
* p
= (struct Tracker::Page
*)
167 GC::Alloc(sizeof(*p
) - sizeof(p
->map
) + (NJ_PAGE_SIZE
>> 2) * sizeof(LIns
*));
179 pagelist
= pagelist
->next
;
185 Tracker::has(const void *v
) const
187 return get(v
) != NULL
;
190 #if defined NANOJIT_64BIT
191 #define PAGEMASK 0x7ff
193 #define PAGEMASK 0xfff
197 Tracker::get(const void* v
) const
199 struct Tracker::Page
* p
= findPage(v
);
202 return p
->map
[(jsuword(v
) & PAGEMASK
) >> 2];
206 Tracker::set(const void* v
, LIns
* i
)
208 struct Tracker::Page
* p
= findPage(v
);
211 p
->map
[(jsuword(v
) & PAGEMASK
) >> 2] = i
;
214 static inline bool isNumber(jsval v
)
216 return JSVAL_IS_INT(v
) || JSVAL_IS_DOUBLE(v
);
219 static inline jsdouble
asNumber(jsval v
)
221 JS_ASSERT(isNumber(v
));
222 if (JSVAL_IS_DOUBLE(v
))
223 return *JSVAL_TO_DOUBLE(v
);
224 return (jsdouble
)JSVAL_TO_INT(v
);
227 static inline bool isInt32(jsval v
)
231 jsdouble d
= asNumber(v
);
233 return JSDOUBLE_IS_INT(d
, i
);
236 static inline uint8
getCoercedType(jsval v
)
238 return isInt32(v
) ? JSVAL_INT
: (uint8
) JSVAL_TAG(v
);
241 /* Tell the oracle that a certain global variable should not be demoted. */
243 Oracle::markGlobalSlotUndemotable(JSScript
* script
, unsigned slot
)
245 _dontDemote
.set(&gc
, (slot
% ORACLE_SIZE
));
248 /* Consult with the oracle whether we shouldn't demote a certain global variable. */
250 Oracle::isGlobalSlotUndemotable(JSScript
* script
, unsigned slot
) const
252 return !oracle_enabled
|| _dontDemote
.get(slot
% ORACLE_SIZE
);
255 /* Tell the oracle that a certain slot at a certain bytecode location should not be demoted. */
257 Oracle::markStackSlotUndemotable(JSScript
* script
, jsbytecode
* ip
, unsigned slot
)
259 uint32 hash
= uint32(intptr_t(ip
)) + (slot
<< 5);
261 _dontDemote
.set(&gc
, hash
);
264 /* Consult with the oracle whether we shouldn't demote a certain slot. */
266 Oracle::isStackSlotUndemotable(JSScript
* script
, jsbytecode
* ip
, unsigned slot
) const
268 uint32 hash
= uint32(intptr_t(ip
)) + (slot
<< 5);
270 return !oracle_enabled
|| _dontDemote
.get(hash
);
273 /* Clear the oracle. */
280 static LIns
* demote(LirWriter
*out
, LInsp i
)
283 return callArgN(i
, 0);
284 if (i
->isop(LIR_i2f
) || i
->isop(LIR_u2f
))
288 AvmAssert(i
->isconstq());
289 double cf
= i
->constvalf();
290 int32_t ci
= cf
> 0x7fffffff ? uint32_t(cf
) : int32_t(cf
);
291 return out
->insImm(ci
);
294 static bool isPromoteInt(LIns
* i
)
297 return i
->isop(LIR_i2f
) || i
->isconst() ||
298 (i
->isconstq() && ((d
= i
->constvalf()) == (jsdouble
)(jsint
)d
) && !JSDOUBLE_IS_NEGZERO(d
));
301 static bool isPromoteUint(LIns
* i
)
304 return i
->isop(LIR_u2f
) || i
->isconst() ||
305 (i
->isconstq() && ((d
= i
->constvalf()) == (jsdouble
)(jsuint
)d
));
308 static bool isPromote(LIns
* i
)
310 return isPromoteInt(i
) || isPromoteUint(i
);
313 static bool isconst(LIns
* i
, int32_t c
)
315 return i
->isconst() && i
->constval() == c
;
318 static bool overflowSafe(LIns
* i
)
321 return (i
->isop(LIR_and
) && ((c
= i
->oprnd2())->isconst()) &&
322 ((c
->constval() & 0xc0000000) == 0)) ||
323 (i
->isop(LIR_rsh
) && ((c
= i
->oprnd2())->isconst()) &&
324 ((c
->constval() > 0)));
327 class FuncFilter
: public LirWriter
329 TraceRecorder
& recorder
;
331 FuncFilter(LirWriter
* out
, TraceRecorder
& _recorder
):
332 LirWriter(out
), recorder(_recorder
)
336 LInsp
ins1(LOpcode v
, LInsp s0
)
340 if (isPromoteInt(s0
)) {
341 LIns
* result
= out
->ins1(LIR_neg
, demote(out
, s0
));
342 out
->insGuard(LIR_xt
, out
->ins1(LIR_ov
, result
),
343 recorder
.snapshot(OVERFLOW_EXIT
));
344 return out
->ins1(LIR_i2f
, result
);
349 return out
->ins1(v
, s0
);
352 LInsp
ins2(LOpcode v
, LInsp s0
, LInsp s1
)
354 if (s0
== s1
&& v
== LIR_feq
) {
356 // double(int) and double(uint) cannot be nan
359 if (s0
->isop(LIR_fmul
) || s0
->isop(LIR_fsub
) || s0
->isop(LIR_fadd
)) {
360 LInsp lhs
= s0
->oprnd1();
361 LInsp rhs
= s0
->oprnd2();
362 if (isPromote(lhs
) && isPromote(rhs
)) {
363 // add/sub/mul promoted ints can't be nan
367 } else if (LIR_feq
<= v
&& v
<= LIR_fge
) {
368 if (isPromoteInt(s0
) && isPromoteInt(s1
)) {
369 // demote fcmp to cmp
370 v
= LOpcode(v
+ (LIR_eq
- LIR_feq
));
371 return out
->ins2(v
, demote(out
, s0
), demote(out
, s1
));
372 } else if (isPromoteUint(s0
) && isPromoteUint(s1
)) {
374 v
= LOpcode(v
+ (LIR_eq
- LIR_feq
));
376 v
= LOpcode(v
+ (LIR_ult
- LIR_lt
)); // cmp -> ucmp
377 return out
->ins2(v
, demote(out
, s0
), demote(out
, s1
));
379 } else if (v
== LIR_fadd
|| v
== LIR_fsub
) {
380 /* demoting multiplication seems to be tricky since it can quickly overflow the
381 value range of int32 */
382 if (isPromoteInt(s0
) && isPromoteInt(s1
)) {
384 v
= (LOpcode
)((int)v
& ~LIR64
);
387 LIns
* result
= out
->ins2(v
, d0
= demote(out
, s0
), d1
= demote(out
, s1
));
388 if (!overflowSafe(d0
) || !overflowSafe(d1
)) {
389 out
->insGuard(LIR_xt
, out
->ins1(LIR_ov
, result
),
390 recorder
.snapshot(OVERFLOW_EXIT
));
392 return out
->ins1(LIR_i2f
, result
);
394 } else if (v
== LIR_or
&&
395 s0
->isop(LIR_lsh
) && isconst(s0
->oprnd2(), 16) &&
396 s1
->isop(LIR_and
) && isconst(s1
->oprnd2(), 0xffff)) {
397 LIns
* msw
= s0
->oprnd1();
398 LIns
* lsw
= s1
->oprnd1();
401 if (lsw
->isop(LIR_add
) &&
402 lsw
->oprnd1()->isop(LIR_and
) &&
403 lsw
->oprnd2()->isop(LIR_and
) &&
404 isconst(lsw
->oprnd1()->oprnd2(), 0xffff) &&
405 isconst(lsw
->oprnd2()->oprnd2(), 0xffff) &&
406 msw
->isop(LIR_add
) &&
407 msw
->oprnd1()->isop(LIR_add
) &&
408 msw
->oprnd2()->isop(LIR_rsh
) &&
409 msw
->oprnd1()->oprnd1()->isop(LIR_rsh
) &&
410 msw
->oprnd1()->oprnd2()->isop(LIR_rsh
) &&
411 isconst(msw
->oprnd2()->oprnd2(), 16) &&
412 isconst(msw
->oprnd1()->oprnd1()->oprnd2(), 16) &&
413 isconst(msw
->oprnd1()->oprnd2()->oprnd2(), 16) &&
414 (x
= lsw
->oprnd1()->oprnd1()) == msw
->oprnd1()->oprnd1()->oprnd1() &&
415 (y
= lsw
->oprnd2()->oprnd1()) == msw
->oprnd1()->oprnd2()->oprnd1() &&
416 lsw
== msw
->oprnd2()->oprnd1()) {
417 return out
->ins2(LIR_add
, x
, y
);
420 return out
->ins2(v
, s0
, s1
);
423 LInsp
insCall(uint32_t fid
, LInsp args
[])
427 case F_DoubleToUint32
:
429 return out
->insImm(js_DoubleToECMAUint32(s0
->constvalf()));
430 if (s0
->isop(LIR_i2f
) || s0
->isop(LIR_u2f
)) {
434 case F_DoubleToInt32
:
436 return out
->insImm(js_DoubleToECMAInt32(s0
->constvalf()));
437 if (s0
->isop(LIR_fadd
) || s0
->isop(LIR_fsub
) || s0
->isop(LIR_fmul
)) {
438 LInsp lhs
= s0
->oprnd1();
439 LInsp rhs
= s0
->oprnd2();
440 if (isPromote(lhs
) && isPromote(rhs
)) {
441 LOpcode op
= LOpcode(s0
->opcode() & ~LIR64
);
442 return out
->ins2(op
, demote(out
, lhs
), demote(out
, rhs
));
445 if (s0
->isop(LIR_i2f
) || s0
->isop(LIR_u2f
)) {
448 if (s0
->isCall() && s0
->fid() == F_UnboxDouble
) {
449 LIns
* args2
[] = { callArgN(s0
, 0) };
450 return out
->insCall(F_UnboxInt32
, args2
);
452 if (s0
->isCall() && s0
->fid() == F_StringToNumber
) {
453 // callArgN's ordering is that as seen by the builtin, not as stored in args here.
455 LIns
* args2
[] = { callArgN(s0
, 1), callArgN(s0
, 0) };
456 return out
->insCall(F_StringToInt32
, args2
);
460 JS_ASSERT(s0
->isQuad());
461 if (s0
->isop(LIR_i2f
)) {
462 LIns
* args2
[] = { s0
->oprnd1(), args
[1] };
463 return out
->insCall(F_BoxInt32
, args2
);
465 if (s0
->isCall() && s0
->fid() == F_UnboxDouble
)
466 return callArgN(s0
, 0);
469 return out
->insCall(fid
, args
);
473 /* In debug mode vpname contains a textual description of the type of the
474 slot during the forall iteration over al slots. */
476 #define DEF_VPNAME const char* vpname; unsigned vpnum
477 #define SET_VPNAME(name) do { vpname = name; vpnum = 0; } while(0)
478 #define INC_VPNUM() do { ++vpnum; } while(0)
480 #define DEF_VPNAME do {} while (0)
483 #define SET_VPNAME(name) ((void)0)
484 #define INC_VPNUM() ((void)0)
487 /* Iterate over all interned global variables. */
488 #define FORALL_GLOBAL_SLOTS(cx, ngslots, gslots, code) \
491 JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain); \
494 SET_VPNAME("global"); \
495 for (n = 0; n < ngslots; ++n) { \
496 vp = &STOBJ_GET_SLOT(globalObj, gslots[n]); \
502 /* Iterate over all slots in the frame, consisting of args, vars, and stack
503 (except for the top-level frame which does not have args or vars. */
504 #define FORALL_FRAME_SLOTS(fp, depth, code) \
510 SET_VPNAME("callee"); \
511 vp = &fp->argv[-2]; \
513 SET_VPNAME("this"); \
514 vp = &fp->argv[-1]; \
516 SET_VPNAME("argv"); \
517 vp = &fp->argv[0]; vpstop = &fp->argv[fp->fun->nargs]; \
518 while (vp < vpstop) { code; ++vp; INC_VPNUM(); } \
520 SET_VPNAME("vars"); \
521 vp = fp->slots; vpstop = &fp->slots[fp->script->nfixed]; \
522 while (vp < vpstop) { code; ++vp; INC_VPNUM(); } \
524 SET_VPNAME("stack"); \
525 vp = StackBase(fp); vpstop = fp->regs->sp; \
526 while (vp < vpstop) { code; ++vp; INC_VPNUM(); } \
527 if (fsp < fspstop - 1) { \
528 JSStackFrame* fp2 = fsp[1]; \
529 int missing = fp2->fun->nargs - fp2->argc; \
531 SET_VPNAME("missing"); \
533 vpstop = vp + missing; \
534 while (vp < vpstop) { code; ++vp; INC_VPNUM(); } \
539 /* Iterate over all slots in each pending frame. */
540 #define FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth, code) \
544 JSStackFrame* currentFrame = cx->fp; \
545 JSStackFrame* entryFrame; \
546 JSStackFrame* fp = currentFrame; \
547 for (n = 0; n < callDepth; ++n) { fp = fp->down; } \
549 unsigned frames = callDepth+1; \
550 JSStackFrame** fstack = \
551 (JSStackFrame**) alloca(frames * sizeof (JSStackFrame*)); \
552 JSStackFrame** fspstop = &fstack[frames]; \
553 JSStackFrame** fsp = fspstop-1; \
555 for (;; fp = fp->down) { *fsp-- = fp; if (fp == entryFrame) break; } \
557 for (depth = 0, fsp = fstack; fsp < fspstop; ++fsp, ++depth) { \
559 FORALL_FRAME_SLOTS(fp, depth, code); \
563 #define FORALL_SLOTS(cx, ngslots, gslots, callDepth, code) \
565 FORALL_GLOBAL_SLOTS(cx, ngslots, gslots, code); \
566 FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth, code); \
569 /* Calculate the total number of native frame slots we need from this frame
570 all the way back to the entry frame, including the current stack usage. */
572 js_NativeStackSlots(JSContext
*cx
, unsigned callDepth
)
574 JSStackFrame
* fp
= cx
->fp
;
577 unsigned int origCallDepth
= callDepth
;
580 unsigned operands
= fp
->regs
->sp
- StackBase(fp
);
581 JS_ASSERT(operands
<= unsigned(fp
->script
->nslots
- fp
->script
->nfixed
));
584 slots
+= fp
->script
->nfixed
;
585 if (callDepth
-- == 0) {
587 slots
+= 2/*callee,this*/ + fp
->fun
->nargs
;
590 FORALL_SLOTS_IN_PENDING_FRAMES(cx
, origCallDepth
, m
++);
591 JS_ASSERT(m
== slots
);
595 JSStackFrame
* fp2
= fp
;
597 int missing
= fp2
->fun
->nargs
- fp2
->argc
;
601 JS_NOT_REACHED("js_NativeStackSlots");
604 /* Capture the type map for the selected slots of the global object. */
606 TypeMap::captureGlobalTypes(JSContext
* cx
, SlotList
& slots
)
608 unsigned ngslots
= slots
.length();
609 uint16
* gslots
= slots
.data();
613 FORALL_GLOBAL_SLOTS(cx
, ngslots
, gslots
,
614 uint8 type
= getCoercedType(*vp
);
615 if ((type
== JSVAL_INT
) && oracle
.isGlobalSlotUndemotable(cx
->fp
->script
, gslots
[n
]))
621 /* Capture the type map for the currently pending stack frames. */
623 TypeMap::captureStackTypes(JSContext
* cx
, unsigned callDepth
)
625 setLength(js_NativeStackSlots(cx
, callDepth
));
628 FORALL_SLOTS_IN_PENDING_FRAMES(cx
, callDepth
,
629 uint8 type
= getCoercedType(*vp
);
630 if ((type
== JSVAL_INT
) &&
631 oracle
.isStackSlotUndemotable(cx
->fp
->script
, cx
->fp
->regs
->pc
, unsigned(m
- map
))) {
638 /* Compare this type map to another one and see whether they match. */
640 TypeMap::matches(TypeMap
& other
) const
642 if (length() != other
.length())
644 return !memcmp(data(), other
.data(), length());
647 /* Use the provided storage area to create a new type map that contains the partial type map
648 with the rest of it filled up from the complete type map. */
650 mergeTypeMaps(uint8
** partial
, unsigned* plength
, uint8
* complete
, unsigned clength
, uint8
* mem
)
652 unsigned l
= *plength
;
653 JS_ASSERT(l
< clength
);
654 memcpy(mem
, *partial
, l
* sizeof(uint8
));
655 memcpy(mem
+ l
, complete
+ l
, (clength
- l
) * sizeof(uint8
));
661 js_TrashTree(JSContext
* cx
, Fragment
* f
);
663 TraceRecorder::TraceRecorder(JSContext
* cx
, GuardRecord
* _anchor
, Fragment
* _fragment
,
664 TreeInfo
* ti
, unsigned ngslots
, uint8
* globalTypeMap
, uint8
* stackTypeMap
,
665 GuardRecord
* innermostNestedGuard
)
667 JS_ASSERT(!_fragment
->vmprivate
&& ti
);
670 this->traceMonitor
= &JS_TRACE_MONITOR(cx
);
671 this->globalObj
= JS_GetGlobalForObject(cx
, cx
->fp
->scopeChain
);
672 this->anchor
= _anchor
;
673 this->fragment
= _fragment
;
674 this->lirbuf
= _fragment
->lirbuf
;
676 this->callDepth
= _fragment
->calldepth
;
677 JS_ASSERT(!_anchor
|| _anchor
->calldepth
== _fragment
->calldepth
);
678 this->atoms
= cx
->fp
->script
->atomMap
.vector
;
679 this->trashTree
= false;
680 this->deepAborted
= false;
681 this->whichTreeToTrash
= _fragment
->root
;
683 debug_only_v(printf("recording starting from %s:%u@%u\n", cx
->fp
->script
->filename
,
684 js_PCToLineNumber(cx
, cx
->fp
->script
, cx
->fp
->regs
->pc
),
685 cx
->fp
->regs
->pc
- cx
->fp
->script
->code
););
687 lir
= lir_buf_writer
= new (&gc
) LirBufWriter(lirbuf
);
690 lir
= verbose_filter
= new (&gc
) VerboseWriter(&gc
, lir
, lirbuf
->names
);
692 lir
= cse_filter
= new (&gc
) CseFilter(lir
, &gc
);
693 lir
= expr_filter
= new (&gc
) ExprFilter(lir
);
694 lir
= func_filter
= new (&gc
) FuncFilter(lir
, *this);
695 lir
->ins0(LIR_trace
);
697 if (!nanojit::AvmCore::config
.tree_opt
|| fragment
->root
== fragment
) {
698 lirbuf
->state
= addName(lir
->insParam(0), "state");
699 lirbuf
->param1
= addName(lir
->insParam(1), "param1");
701 lirbuf
->sp
= addName(lir
->insLoad(LIR_ldp
, lirbuf
->state
, (int)offsetof(InterpState
, sp
)), "sp");
702 lirbuf
->rp
= addName(lir
->insLoad(LIR_ldp
, lirbuf
->state
, offsetof(InterpState
, rp
)), "rp");
703 cx_ins
= addName(lir
->insLoad(LIR_ldp
, lirbuf
->state
, offsetof(InterpState
, cx
)), "cx");
704 gp_ins
= addName(lir
->insLoad(LIR_ldp
, lirbuf
->state
, offsetof(InterpState
, gp
)), "gp");
705 eos_ins
= addName(lir
->insLoad(LIR_ldp
, lirbuf
->state
, offsetof(InterpState
, eos
)), "eos");
706 eor_ins
= addName(lir
->insLoad(LIR_ldp
, lirbuf
->state
, offsetof(InterpState
, eor
)), "eor");
708 /* read into registers all values on the stack and all globals we know so far */
709 import(treeInfo
, lirbuf
->sp
, ngslots
, callDepth
, globalTypeMap
, stackTypeMap
);
711 /* If we are attached to a tree call guard, make sure the guard the inner tree exited from
712 is what we expect it to be. */
713 if (_anchor
&& _anchor
->exit
->exitType
== NESTED_EXIT
) {
714 LIns
* nested_ins
= addName(lir
->insLoad(LIR_ldp
, lirbuf
->state
,
715 offsetof(InterpState
, nestedExit
)), "nestedExit");
716 guard(true, lir
->ins2(LIR_eq
, nested_ins
, INS_CONSTPTR(innermostNestedGuard
)), NESTED_EXIT
);
720 TraceRecorder::~TraceRecorder()
723 if (fragment
->root
== fragment
&& !fragment
->root
->code()) {
724 JS_ASSERT(!fragment
->root
->vmprivate
);
728 js_TrashTree(cx
, whichTreeToTrash
);
730 delete verbose_filter
;
735 delete lir_buf_writer
;
738 /* Add debug information to a LIR instruction as we emit it. */
740 TraceRecorder::addName(LIns
* ins
, const char* name
)
743 lirbuf
->names
->addName(ins
, name
);
748 /* Determine the current call depth (starting with the entry frame.) */
750 TraceRecorder::getCallDepth() const
755 /* Determine whether we should unroll a loop (only do so at most once for every loop). */
757 TraceRecorder::trackLoopEdges()
759 jsbytecode
* pc
= cx
->fp
->regs
->pc
;
760 if (inlinedLoopEdges
.contains(pc
))
762 inlinedLoopEdges
.add(pc
);
766 /* Determine the offset in the native global frame for a jsval we track */
768 TraceRecorder::nativeGlobalOffset(jsval
* p
) const
770 JS_ASSERT(isGlobal(p
));
771 if (size_t(p
- globalObj
->fslots
) < JS_INITIAL_NSLOTS
)
772 return size_t(p
- globalObj
->fslots
) * sizeof(double);
773 return ((p
- globalObj
->dslots
) + JS_INITIAL_NSLOTS
) * sizeof(double);
776 /* Determine whether a value is a global stack slot */
778 TraceRecorder::isGlobal(jsval
* p
) const
780 return ((size_t(p
- globalObj
->fslots
) < JS_INITIAL_NSLOTS
) ||
781 (size_t(p
- globalObj
->dslots
) < (STOBJ_NSLOTS(globalObj
) - JS_INITIAL_NSLOTS
)));
784 /* Determine the offset in the native stack for a jsval we track */
786 TraceRecorder::nativeStackOffset(jsval
* p
) const
789 size_t slow_offset
= 0;
790 FORALL_SLOTS_IN_PENDING_FRAMES(cx
, callDepth
,
791 if (vp
== p
) goto done
;
792 slow_offset
+= sizeof(double)
796 * If it's not in a pending frame, it must be on the stack of the current frame above
797 * sp but below fp->slots + script->nslots.
799 JS_ASSERT(size_t(p
- cx
->fp
->slots
) < cx
->fp
->script
->nslots
);
800 slow_offset
+= size_t(p
- cx
->fp
->regs
->sp
) * sizeof(double);
803 #define RETURN(offset) { JS_ASSERT((offset) == slow_offset); return offset; }
805 #define RETURN(offset) { return offset; }
808 JSStackFrame
* currentFrame
= cx
->fp
;
809 JSStackFrame
* entryFrame
;
810 JSStackFrame
* fp
= currentFrame
;
811 for (unsigned n
= 0; n
< callDepth
; ++n
) { fp
= fp
->down
; }
813 unsigned frames
= callDepth
+1;
814 JSStackFrame
** fstack
= (JSStackFrame
**)alloca(frames
* sizeof (JSStackFrame
*));
815 JSStackFrame
** fspstop
= &fstack
[frames
];
816 JSStackFrame
** fsp
= fspstop
-1;
818 for (;; fp
= fp
->down
) { *fsp
-- = fp
; if (fp
== entryFrame
) break; }
819 for (fsp
= fstack
; fsp
< fspstop
; ++fsp
) {
823 if (size_t(p
- &fp
->argv
[-2]) < size_t(2/*callee,this*/ + fp
->fun
->nargs
))
824 RETURN(offset
+ size_t(p
- &fp
->argv
[-2]) * sizeof(double));
825 offset
+= (2/*callee,this*/ + fp
->fun
->nargs
) * sizeof(double);
827 if (size_t(p
- &fp
->slots
[0]) < fp
->script
->nfixed
)
828 RETURN(offset
+ size_t(p
- &fp
->slots
[0]) * sizeof(double));
829 offset
+= fp
->script
->nfixed
* sizeof(double);
831 jsval
* spbase
= StackBase(fp
);
832 if (size_t(p
- spbase
) < size_t(fp
->regs
->sp
- spbase
))
833 RETURN(offset
+ size_t(p
- spbase
) * sizeof(double));
834 offset
+= size_t(fp
->regs
->sp
- spbase
) * sizeof(double);
835 if (fsp
< fspstop
- 1) {
836 JSStackFrame
* fp2
= fsp
[1];
837 int missing
= fp2
->fun
->nargs
- fp2
->argc
;
839 if (size_t(p
- fp
->regs
->sp
) < size_t(missing
))
840 RETURN(offset
+ size_t(p
- fp
->regs
->sp
) * sizeof(double));
841 offset
+= size_t(missing
) * sizeof(double);
847 * If it's not in a pending frame, it must be on the stack of the current frame above
848 * sp but below fp->slots + script->nslots.
850 JS_ASSERT(size_t(p
- currentFrame
->slots
) < currentFrame
->script
->nslots
);
851 offset
+= size_t(p
- currentFrame
->regs
->sp
) * sizeof(double);
856 /* Track the maximum number of native frame slots we need during
859 TraceRecorder::trackNativeStackUse(unsigned slots
)
861 if (slots
> treeInfo
->maxNativeStackSlots
)
862 treeInfo
->maxNativeStackSlots
= slots
;
865 /* Unbox a jsval into a slot. Slots are wide enough to hold double values
866 directly (instead of storing a pointer to them). */
868 ValueToNative(JSContext
* cx
, jsval v
, uint8 type
, double* slot
)
870 unsigned tag
= JSVAL_TAG(v
);
875 *(jsint
*)slot
= JSVAL_TO_INT(v
);
876 else if ((tag
== JSVAL_DOUBLE
) && JSDOUBLE_IS_INT(*JSVAL_TO_DOUBLE(v
), i
))
878 else if (v
== JSVAL_VOID
)
881 debug_only_v(printf("int != tag%lu(value=%lu) ", JSVAL_TAG(v
), v
);)
884 debug_only_v(printf("int<%d> ", *(jsint
*)slot
);)
890 else if (tag
== JSVAL_DOUBLE
)
891 d
= *JSVAL_TO_DOUBLE(v
);
892 else if (v
== JSVAL_VOID
)
895 debug_only_v(printf("double != tag%lu ", JSVAL_TAG(v
));)
898 *(jsdouble
*)slot
= d
;
899 debug_only_v(printf("double<%g> ", d
);)
902 if (tag
!= JSVAL_BOOLEAN
) {
903 debug_only_v(printf("bool != tag%u ", tag
);)
906 *(JSBool
*)slot
= JSVAL_TO_BOOLEAN(v
);
907 debug_only_v(printf("boolean<%d> ", *(bool*)slot
);)
910 if (v
== JSVAL_VOID
) {
911 *(JSString
**)slot
= ATOM_TO_STRING(cx
->runtime
->atomState
.typeAtoms
[JSTYPE_VOID
]);
914 if (tag
!= JSVAL_STRING
) {
915 debug_only_v(printf("string != tag%u ", tag
);)
918 *(JSString
**)slot
= JSVAL_TO_STRING(v
);
919 debug_only_v(printf("string<%p> ", *(JSString
**)slot
);)
922 JS_ASSERT(type
== JSVAL_OBJECT
);
923 if (v
== JSVAL_VOID
) {
924 *(JSObject
**)slot
= NULL
;
927 if (tag
!= JSVAL_OBJECT
) {
928 debug_only_v(printf("object != tag%u ", tag
);)
931 *(JSObject
**)slot
= JSVAL_TO_OBJECT(v
);
932 debug_only_v(printf("object<%p:%s> ", JSVAL_TO_OBJECT(v
),
935 : STOBJ_GET_CLASS(JSVAL_TO_OBJECT(v
))->name
);)
940 /* Box a value from the native stack back into the jsval format. Integers
941 that are too large to fit into a jsval are automatically boxed into
942 heap-allocated doubles. */
944 NativeToValue(JSContext
* cx
, jsval
& v
, uint8 type
, double* slot
)
950 v
= BOOLEAN_TO_JSVAL(*(bool*)slot
);
951 debug_only_v(printf("boolean<%d> ", *(bool*)slot
);)
955 debug_only_v(printf("int<%d> ", i
);)
957 if (INT_FITS_IN_JSVAL(i
)) {
965 debug_only_v(printf("double<%g> ", d
);)
966 if (JSDOUBLE_IS_INT(d
, i
))
969 /* Its safe to trigger the GC here since we rooted all strings/objects and all the
970 doubles we already processed. */
971 return js_NewDoubleInRootedValue(cx
, d
, &v
) ? true : false;
973 v
= STRING_TO_JSVAL(*(JSString
**)slot
);
974 debug_only_v(printf("string<%p> ", *(JSString
**)slot
);)
977 JS_ASSERT(type
== JSVAL_OBJECT
);
978 v
= OBJECT_TO_JSVAL(*(JSObject
**)slot
);
979 debug_only_v(printf("object<%p:%s> ", JSVAL_TO_OBJECT(v
),
982 : STOBJ_GET_CLASS(JSVAL_TO_OBJECT(v
))->name
);)
988 /* Attempt to unbox the given list of interned globals onto the native global frame, checking
989 along the way that the supplied type-mao holds. */
991 BuildNativeGlobalFrame(JSContext
* cx
, unsigned ngslots
, uint16
* gslots
, uint8
* mp
, double* np
)
993 debug_only_v(printf("global: ");)
994 FORALL_GLOBAL_SLOTS(cx
, ngslots
, gslots
,
995 if (!ValueToNative(cx
, *vp
, *mp
, np
+ gslots
[n
]))
999 debug_only_v(printf("\n");)
1003 /* Attempt to unbox the given JS frame onto a native frame, checking along the way that the
1004 supplied type-map holds. */
1006 BuildNativeStackFrame(JSContext
* cx
, unsigned callDepth
, uint8
* mp
, double* np
)
1008 debug_only_v(printf("stack: ");)
1009 FORALL_SLOTS_IN_PENDING_FRAMES(cx
, callDepth
,
1010 debug_only_v(printf("%s%u=", vpname
, vpnum
);)
1011 if (!ValueToNative(cx
, *vp
, *mp
, np
))
1015 debug_only_v(printf("\n");)
1019 /* Box the given native frame into a JS frame. This only fails due to a hard error
1020 (out of memory for example). */
1022 FlushNativeGlobalFrame(JSContext
* cx
, unsigned ngslots
, uint16
* gslots
, uint8
* mp
, double* np
)
1024 uint8
* mp_base
= mp
;
1025 /* Root all string and object references first (we don't need to call the GC for this). */
1026 FORALL_GLOBAL_SLOTS(cx
, ngslots
, gslots
,
1027 if ((*mp
== JSVAL_STRING
|| *mp
== JSVAL_OBJECT
) &&
1028 !NativeToValue(cx
, *vp
, *mp
, np
+ gslots
[n
])) {
1033 /* Now do this again but this time for all values (properly quicker than actually checking
1034 the type and excluding strings and objects). The GC might kick in when we store doubles,
1035 but everything is rooted now (all strings/objects and all doubles we already boxed). */
1037 FORALL_GLOBAL_SLOTS(cx
, ngslots
, gslots
,
1038 if (!NativeToValue(cx
, *vp
, *mp
, np
+ gslots
[n
]))
1042 debug_only_v(printf("\n");)
1043 return mp
- mp_base
;
1046 /* Box the given native stack frame into the virtual machine stack. This only fails due to a
1047 hard error (out of memory for example). */
1049 FlushNativeStackFrame(JSContext
* cx
, unsigned callDepth
, uint8
* mp
, double* np
, jsval
* stopAt
)
1051 uint8
* mp_base
= mp
;
1052 double* np_base
= np
;
1053 /* Root all string and object references first (we don't need to call the GC for this). */
1054 FORALL_SLOTS_IN_PENDING_FRAMES(cx
, callDepth
,
1055 if (vp
== stopAt
) goto skip1
;
1056 if ((*mp
== JSVAL_STRING
|| *mp
== JSVAL_OBJECT
) && !NativeToValue(cx
, *vp
, *mp
, np
))
1061 // Restore thisp from the now-restored argv[-1] in each pending frame.
1062 unsigned n
= callDepth
;
1063 for (JSStackFrame
* fp
= cx
->fp
; n
-- != 0; fp
= fp
->down
)
1064 fp
->thisp
= JSVAL_TO_OBJECT(fp
->argv
[-1]);
1066 /* Now do this again but this time for all values (properly quicker than actually checking
1067 the type and excluding strings and objects). The GC might kick in when we store doubles,
1068 but everything is rooted now (all strings/objects and all doubles we already boxed). */
1071 FORALL_SLOTS_IN_PENDING_FRAMES(cx
, callDepth
,
1072 if (vp
== stopAt
) goto skip2
;
1073 debug_only_v(printf("%s%u=", vpname
, vpnum
);)
1074 if (!NativeToValue(cx
, *vp
, *mp
, np
))
1079 debug_only_v(printf("\n");)
1080 return mp
- mp_base
;
1083 /* Emit load instructions onto the trace that read the initial stack state. */
1085 TraceRecorder::import(LIns
* base
, ptrdiff_t offset
, jsval
* p
, uint8
& t
,
1086 const char *prefix
, uintN index
, JSStackFrame
*fp
)
1089 if (t
== JSVAL_INT
) { /* demoted */
1090 JS_ASSERT(isInt32(*p
));
1091 /* Ok, we have a valid demotion attempt pending, so insert an integer
1092 read and promote it to double since all arithmetic operations expect
1093 to see doubles on entry. The first op to use this slot will emit a
1094 f2i cast which will cancel out the i2f we insert here. */
1095 ins
= lir
->insLoadi(base
, offset
);
1096 ins
= lir
->ins1(LIR_i2f
, ins
);
1098 JS_ASSERT(isNumber(*p
) == (t
== JSVAL_DOUBLE
));
1099 if (t
== JSVAL_DOUBLE
) {
1100 ins
= lir
->insLoad(LIR_ldq
, base
, offset
);
1102 ins
= lir
->insLoad(LIR_ldp
, base
, offset
);
1105 tracker
.set(p
, ins
);
1108 JS_ASSERT(strlen(prefix
) < 10);
1110 jsuword
* localNames
= NULL
;
1111 const char* funName
= NULL
;
1112 if (*prefix
== 'a' || *prefix
== 'v') {
1113 mark
= JS_ARENA_MARK(&cx
->tempPool
);
1114 if (JS_GET_LOCAL_NAME_COUNT(fp
->fun
) != 0)
1115 localNames
= js_GetLocalNameArray(cx
, fp
->fun
, &cx
->tempPool
);
1116 funName
= fp
->fun
->atom
? js_AtomToPrintableString(cx
, fp
->fun
->atom
) : "<anonymous>";
1118 if (!strcmp(prefix
, "argv")) {
1119 if (index
< fp
->fun
->nargs
) {
1120 JSAtom
*atom
= JS_LOCAL_NAME_TO_ATOM(localNames
[index
]);
1121 JS_snprintf(name
, sizeof name
, "$%s.%s", funName
, js_AtomToPrintableString(cx
, atom
));
1123 JS_snprintf(name
, sizeof name
, "$%s.<arg%d>", funName
, index
);
1125 } else if (!strcmp(prefix
, "vars")) {
1126 JSAtom
*atom
= JS_LOCAL_NAME_TO_ATOM(localNames
[fp
->fun
->nargs
+ index
]);
1127 JS_snprintf(name
, sizeof name
, "$%s.%s", funName
, js_AtomToPrintableString(cx
, atom
));
1129 JS_snprintf(name
, sizeof name
, "$%s%d", prefix
, index
);
1133 JS_ARENA_RELEASE(&cx
->tempPool
, mark
);
1136 static const char* typestr
[] = {
1137 "object", "int", "double", "3", "string", "5", "boolean", "any"
1139 debug_only_v(printf("import vp=%p name=%s type=%s flags=%d\n", p
, name
, typestr
[t
& 7], t
>> 3););
1144 TraceRecorder::import(TreeInfo
* treeInfo
, LIns
* sp
, unsigned ngslots
, unsigned callDepth
,
1145 uint8
* globalTypeMap
, uint8
* stackTypeMap
)
1147 /* If we get a partial list that doesn't have all the types (i.e. recording from a side
1148 exit that was recorded but we added more global slots later), merge the missing types
1149 from the entry type map. This is safe because at the loop edge we verify that we
1150 have compatible types for all globals (entry type and loop edge type match). While
1151 a different trace of the tree might have had a guard with a different type map for
1152 these slots we just filled in here (the guard we continue from didn't know about them),
1153 since we didn't take that particular guard the only way we could have ended up here
1154 is if that other trace had at its end a compatible type distribution with the entry
1155 map. Since thats exactly what we used to fill in the types our current side exit
1156 didn't provide, this is always safe to do. */
1158 if (ngslots
< (length
= traceMonitor
->globalTypeMap
->length()))
1159 mergeTypeMaps(&globalTypeMap
, &ngslots
,
1160 traceMonitor
->globalTypeMap
->data(), length
,
1161 (uint8
*)alloca(sizeof(uint8
) * length
));
1162 JS_ASSERT(ngslots
== traceMonitor
->globalTypeMap
->length());
1164 /* the first time we compile a tree this will be empty as we add entries lazily */
1165 uint16
* gslots
= traceMonitor
->globalSlots
->data();
1166 uint8
* m
= globalTypeMap
;
1167 FORALL_GLOBAL_SLOTS(cx
, ngslots
, gslots
,
1168 import(gp_ins
, nativeGlobalOffset(vp
), vp
, *m
, vpname
, vpnum
, NULL
);
1171 ptrdiff_t offset
= -treeInfo
->nativeStackBase
;
1173 FORALL_SLOTS_IN_PENDING_FRAMES(cx
, callDepth
,
1174 import(sp
, offset
, vp
, *m
, vpname
, vpnum
, fp
);
1175 m
++; offset
+= sizeof(double);
1179 /* Lazily import a global slot if we don't already have it in the tracker. */
1181 TraceRecorder::lazilyImportGlobalSlot(unsigned slot
)
1183 if (slot
!= uint16(slot
)) /* we use a table of 16-bit ints, bail out if that's not enough */
1185 jsval
* vp
= &STOBJ_GET_SLOT(globalObj
, slot
);
1186 if (tracker
.has(vp
))
1187 return true; /* we already have it */
1188 unsigned index
= traceMonitor
->globalSlots
->length();
1189 /* If this the first global we are adding, remember the shape of the global object. */
1191 traceMonitor
->globalShape
= OBJ_SCOPE(JS_GetGlobalForObject(cx
, cx
->fp
->scopeChain
))->shape
;
1192 /* Add the slot to the list of interned global slots. */
1193 traceMonitor
->globalSlots
->add(slot
);
1194 uint8 type
= getCoercedType(*vp
);
1195 if ((type
== JSVAL_INT
) && oracle
.isGlobalSlotUndemotable(cx
->fp
->script
, slot
))
1196 type
= JSVAL_DOUBLE
;
1197 traceMonitor
->globalTypeMap
->add(type
);
1198 import(gp_ins
, slot
*sizeof(double), vp
, type
, "global", index
, NULL
);
1202 /* Write back a value onto the stack or global frames. */
1204 TraceRecorder::writeBack(LIns
* i
, LIns
* base
, ptrdiff_t offset
)
1206 /* Sink all type casts targeting the stack into the side exit by simply storing the original
1207 (uncasted) value. Each guard generates the side exit map based on the types of the
1208 last stores to every stack location, so its safe to not perform them on-trace. */
1209 if (isPromoteInt(i
))
1210 i
= ::demote(lir
, i
);
1211 return lir
->insStorei(i
, base
, offset
);
1214 /* Update the tracker, then issue a write back store. */
1216 TraceRecorder::set(jsval
* p
, LIns
* i
, bool initializing
)
1218 JS_ASSERT(initializing
|| tracker
.has(p
));
1220 /* If we are writing to this location for the first time, calculate the offset into the
1221 native frame manually, otherwise just look up the last load or store associated with
1222 the same source address (p) and use the same offset/base. */
1224 if ((x
= nativeFrameTracker
.get(p
)) == NULL
) {
1226 x
= writeBack(i
, gp_ins
, nativeGlobalOffset(p
));
1228 x
= writeBack(i
, lirbuf
->sp
, -treeInfo
->nativeStackBase
+ nativeStackOffset(p
));
1229 nativeFrameTracker
.set(p
, x
);
1231 #define ASSERT_VALID_CACHE_HIT(base, offset) \
1232 JS_ASSERT(base == lirbuf->sp || base == gp_ins); \
1233 JS_ASSERT(offset == ((base == lirbuf->sp) \
1234 ? -treeInfo->nativeStackBase + nativeStackOffset(p) \
1235 : nativeGlobalOffset(p))); \
1237 if (x->isop(LIR_st) || x->isop(LIR_stq)) {
1238 ASSERT_VALID_CACHE_HIT(x
->oprnd2(), x
->oprnd3()->constval());
1239 writeBack(i
, x
->oprnd2(), x
->oprnd3()->constval());
1241 JS_ASSERT(x
->isop(LIR_sti
) || x
->isop(LIR_stqi
));
1242 ASSERT_VALID_CACHE_HIT(x
->oprnd2(), x
->immdisp());
1243 writeBack(i
, x
->oprnd2(), x
->immdisp());
1246 #undef ASSERT_VALID_CACHE_HIT
1250 TraceRecorder::get(jsval
* p
)
1252 return tracker
.get(p
);
1255 /* Determine whether a bytecode location (pc) terminates a loop or is a path within the loop. */
1257 js_IsLoopExit(JSContext
* cx
, JSScript
* script
, jsbytecode
* header
, jsbytecode
* pc
)
1266 JS_ASSERT(js_CodeSpec
[*pc
].length
== 1);
1275 * Forward jumps are usually intra-branch, but for-in loops jump to the trailing enditer to
1276 * clean up, so check for that case here.
1278 if (pc
[GET_JUMP_OFFSET(pc
)] == JSOP_ENDITER
)
1280 return pc
+ GET_JUMP_OFFSET(pc
) == header
;
1288 JSObject
* callee
; // callee function object
1289 jsbytecode
* callpc
; // pc of JSOP_CALL in caller script
1292 uint16 spdist
; // distance from fp->slots to fp->regs->sp at JSOP_CALL
1293 uint16 argc
; // actual argument count, may be < fun->nargs
1295 uint32 word
; // for spdist/argc LIR store in record_JSOP_CALL
1299 /* Promote slots if necessary to match the called tree' type map and report error if thats
1302 TraceRecorder::adjustCallerTypes(Fragment
* f
)
1304 JSTraceMonitor
* tm
= traceMonitor
;
1305 uint8
* m
= tm
->globalTypeMap
->data();
1306 uint16
* gslots
= traceMonitor
->globalSlots
->data();
1307 unsigned ngslots
= traceMonitor
->globalSlots
->length();
1308 JSScript
* script
= ((TreeInfo
*)f
->vmprivate
)->script
;
1309 uint8
* map
= ((TreeInfo
*)f
->vmprivate
)->stackTypeMap
.data();
1311 FORALL_GLOBAL_SLOTS(cx
, ngslots
, gslots
,
1313 bool isPromote
= isPromoteInt(i
);
1314 if (isPromote
&& *m
== JSVAL_DOUBLE
)
1315 lir
->insStorei(get(vp
), gp_ins
, nativeGlobalOffset(vp
));
1316 else if (!isPromote
&& *m
== JSVAL_INT
) {
1317 oracle
.markGlobalSlotUndemotable(script
, nativeGlobalOffset(vp
)/sizeof(double));
1323 FORALL_SLOTS_IN_PENDING_FRAMES(cx
, 0,
1325 bool isPromote
= isPromoteInt(i
);
1326 if (isPromote
&& *m
== JSVAL_DOUBLE
)
1327 lir
->insStorei(get(vp
), lirbuf
->sp
,
1328 -treeInfo
->nativeStackBase
+ nativeStackOffset(vp
));
1329 else if (!isPromote
&& *m
== JSVAL_INT
) {
1330 oracle
.markStackSlotUndemotable(script
, (jsbytecode
*)f
->ip
, unsigned(m
- map
));
1335 JS_ASSERT(f
== f
->root
);
1338 whichTreeToTrash
= f
;
1343 /* Find a peer fragment that we can call, considering our current type distribution. */
1344 bool TraceRecorder::selectCallablePeerFragment(Fragment
** first
)
1346 /* Until we have multiple trees per start point this is always the first fragment. */
1347 return (*first
)->code();
1351 TraceRecorder::snapshot(ExitType exitType
)
1353 JSStackFrame
* fp
= cx
->fp
;
1354 if (exitType
== BRANCH_EXIT
&&
1355 js_IsLoopExit(cx
, fp
->script
, (jsbytecode
*)fragment
->root
->ip
, fp
->regs
->pc
))
1356 exitType
= LOOP_EXIT
;
1357 /* Generate the entry map and stash it in the trace. */
1358 unsigned stackSlots
= js_NativeStackSlots(cx
, callDepth
);
1359 /* It's sufficient to track the native stack use here since all stores above the
1360 stack watermark defined by guards are killed. */
1361 trackNativeStackUse(stackSlots
+ 1);
1362 /* reserve space for the type map */
1363 unsigned ngslots
= traceMonitor
->globalSlots
->length();
1364 LIns
* data
= lir_buf_writer
->skip((stackSlots
+ ngslots
) * sizeof(uint8
));
1365 /* setup side exit structure */
1366 memset(&exit
, 0, sizeof(exit
));
1367 exit
.from
= fragment
;
1368 exit
.calldepth
= callDepth
;
1369 exit
.numGlobalSlots
= ngslots
;
1370 exit
.numStackSlots
= stackSlots
;
1371 exit
.numStackSlotsBelowCurrentFrame
= cx
->fp
->callee
1372 ? nativeStackOffset(&cx
->fp
->argv
[-2])/sizeof(double)
1374 exit
.exitType
= exitType
;
1375 /* If we take a snapshot on a goto, advance to the target address. This avoids inner
1376 trees returning on a break goto, which the outer recorder then would confuse with
1377 a break in the outer tree. */
1378 jsbytecode
* pc
= fp
->regs
->pc
;
1379 JS_ASSERT(!(((*pc
== JSOP_GOTO
) || (*pc
== JSOP_GOTOX
)) && (exitType
!= LOOP_EXIT
)));
1380 if (*pc
== JSOP_GOTO
)
1381 pc
+= GET_JUMP_OFFSET(pc
);
1382 else if (*pc
== JSOP_GOTOX
)
1383 pc
+= GET_JUMPX_OFFSET(pc
);
1384 exit
.ip_adj
= pc
- (jsbytecode
*)fragment
->root
->ip
;
1385 exit
.sp_adj
= (stackSlots
* sizeof(double)) - treeInfo
->nativeStackBase
;
1386 exit
.rp_adj
= exit
.calldepth
* sizeof(FrameInfo
);
1387 uint8
* m
= exit
.typeMap
= (uint8
*)data
->payload();
1388 /* Determine the type of a store by looking at the current type of the actual value the
1389 interpreter is using. For numbers we have to check what kind of store we used last
1390 (integer or double) to figure out what the side exit show reflect in its typemap. */
1391 FORALL_SLOTS(cx
, ngslots
, traceMonitor
->globalSlots
->data(), callDepth
,
1394 ? (isPromoteInt(i
) ? JSVAL_INT
: JSVAL_DOUBLE
)
1396 JS_ASSERT((*m
!= JSVAL_INT
) || isInt32(*vp
));
1399 JS_ASSERT(unsigned(m
- exit
.typeMap
) == ngslots
+ stackSlots
);
1403 /* Emit a guard for condition (cond), expecting to evaluate to boolean result (expected). */
1405 TraceRecorder::guard(bool expected
, LIns
* cond
, ExitType exitType
)
1407 return lir
->insGuard(expected
? LIR_xf
: LIR_xt
,
1409 snapshot(exitType
));
1412 /* Try to match the type of a slot to type t. checkType is used to verify that the type of
1413 values flowing into the loop edge is compatible with the type we expect in the loop header. */
1415 TraceRecorder::checkType(jsval
& v
, uint8 t
, bool& unstable
)
1417 if (t
== JSVAL_INT
) { /* initially all whole numbers cause the slot to be demoted */
1419 return false; /* not a number? type mismatch */
1421 if (!i
->isop(LIR_i2f
)) {
1422 debug_only_v(printf("int slot is !isInt32, slot #%d, triggering re-compilation\n",
1424 ? nativeStackOffset(&v
)
1425 : nativeGlobalOffset(&v
)););
1426 AUDIT(slotPromoted
);
1428 return true; /* keep checking types, but request re-compilation */
1430 /* Looks good, slot is an int32, the last instruction should be i2f. */
1431 JS_ASSERT(isInt32(v
) && i
->isop(LIR_i2f
));
1432 /* We got the final LIR_i2f as we expected. Overwrite the value in that
1433 slot with the argument of i2f since we want the integer store to flow along
1434 the loop edge, not the casted value. */
1435 set(&v
, i
->oprnd1());
1438 if (t
== JSVAL_DOUBLE
) {
1440 return false; /* not a number? type mismatch */
1442 /* We sink i2f conversions into the side exit, but at the loop edge we have to make
1443 sure we promote back to double if at loop entry we want a double. */
1444 if (isPromoteInt(i
))
1445 set(&v
, lir
->ins1(LIR_i2f
, i
));
1448 /* for non-number types we expect a precise match of the type */
1450 if (JSVAL_TAG(v
) != t
) {
1451 debug_only_v(printf("Type mismatch: val %c, map %c ", "OID?S?B"[JSVAL_TAG(v
)],
1455 return JSVAL_TAG(v
) == t
;
1458 /* Make sure that the current values in the given stack frame and all stack frames
1459 up and including entryFrame are type-compatible with the entry map. */
1461 TraceRecorder::verifyTypeStability()
1463 unsigned ngslots
= traceMonitor
->globalSlots
->length();
1464 uint16
* gslots
= traceMonitor
->globalSlots
->data();
1465 uint8
* typemap
= traceMonitor
->globalTypeMap
->data();
1466 JS_ASSERT(traceMonitor
->globalTypeMap
->length() == ngslots
);
1467 bool recompile
= false;
1469 FORALL_GLOBAL_SLOTS(cx
, ngslots
, gslots
,
1470 bool demote
= false;
1471 if (!checkType(*vp
, *m
, demote
))
1474 oracle
.markGlobalSlotUndemotable(cx
->fp
->script
, gslots
[n
]);
1479 typemap
= treeInfo
->stackTypeMap
.data();
1481 FORALL_SLOTS_IN_PENDING_FRAMES(cx
, callDepth
,
1482 bool demote
= false;
1483 if (!checkType(*vp
, *m
, demote
))
1486 oracle
.markStackSlotUndemotable(cx
->fp
->script
, (jsbytecode
*)fragment
->ip
,
1487 unsigned(m
- typemap
));
1497 /* Check whether the current pc location is the loop header of the loop this recorder records. */
1499 TraceRecorder::isLoopHeader(JSContext
* cx
) const
1501 return cx
->fp
->regs
->pc
== fragment
->root
->ip
;
1504 /* Compile the current fragment. */
1506 TraceRecorder::compile(Fragmento
* fragmento
)
1508 if (treeInfo
->maxNativeStackSlots
>= MAX_NATIVE_STACK_SLOTS
) {
1509 debug_only_v(printf("Trace rejected: excessive stack use.\n"));
1510 fragment
->blacklist();
1513 ::compile(fragmento
->assm(), fragment
);
1515 fragment
->addLink(anchor
);
1516 fragmento
->assm()->patch(anchor
);
1518 JS_ASSERT(fragment
->code());
1519 JS_ASSERT(!fragment
->vmprivate
);
1520 if (fragment
== fragment
->root
)
1521 fragment
->vmprivate
= treeInfo
;
1522 /* :TODO: windows support */
1523 #if defined DEBUG && !defined WIN32
1524 char* label
= (char*)malloc(strlen(cx
->fp
->script
->filename
) + 64);
1525 sprintf(label
, "%s:%u", cx
->fp
->script
->filename
,
1526 js_PCToLineNumber(cx
, cx
->fp
->script
, cx
->fp
->regs
->pc
));
1527 fragmento
->labels
->add(fragment
, sizeof(Fragment
), 0, label
);
1530 AUDIT(traceCompleted
);
1533 /* Complete and compile a trace and link it to the existing tree if appropriate. */
1535 TraceRecorder::closeLoop(Fragmento
* fragmento
)
1537 if (!verifyTypeStability()) {
1538 AUDIT(unstableLoopVariable
);
1539 debug_only_v(printf("Trace rejected: unstable loop variables.\n");)
1542 SideExit
*exit
= snapshot(LOOP_EXIT
);
1543 exit
->target
= fragment
->root
;
1544 if (fragment
== fragment
->root
) {
1545 fragment
->lastIns
= lir
->insGuard(LIR_loop
, lir
->insImm(1), exit
);
1547 fragment
->lastIns
= lir
->insGuard(LIR_x
, lir
->insImm(1), exit
);
1552 /* Emit an always-exit guard and compile the tree (used for break statements. */
1554 TraceRecorder::endLoop(Fragmento
* fragmento
)
1556 SideExit
*exit
= snapshot(LOOP_EXIT
);
1557 fragment
->lastIns
= lir
->insGuard(LIR_x
, lir
->insImm(1), exit
);
1561 /* Emit code to adjust the stack to match the inner tree's stack expectations. */
1563 TraceRecorder::prepareTreeCall(Fragment
* inner
)
1565 TreeInfo
* ti
= (TreeInfo
*)inner
->vmprivate
;
1566 inner_sp_ins
= lirbuf
->sp
;
1567 /* The inner tree expects to be called from the current frame. If the outer tree (this
1568 trace) is currently inside a function inlining code (calldepth > 0), we have to advance
1569 the native stack pointer such that we match what the inner trace expects to see. We
1570 move it back when we come out of the inner tree call. */
1571 if (callDepth
> 0) {
1572 /* Calculate the amount we have to lift the native stack pointer by to compensate for
1573 any outer frames that the inner tree doesn't expect but the outer tree has. */
1574 ptrdiff_t sp_adj
= nativeStackOffset(&cx
->fp
->argv
[-2]);
1575 /* Calculate the amount we have to lift the call stack by */
1576 ptrdiff_t rp_adj
= callDepth
* sizeof(FrameInfo
);
1577 /* Guard that we have enough stack space for the tree we are trying to call on top
1578 of the new value for sp. */
1579 debug_only_v(printf("sp_adj=%d outer=%d inner=%d\n",
1580 sp_adj
, treeInfo
->nativeStackBase
, ti
->nativeStackBase
));
1581 LIns
* sp_top
= lir
->ins2i(LIR_piadd
, lirbuf
->sp
,
1582 - treeInfo
->nativeStackBase
/* rebase sp to beginning of outer tree's stack */
1583 + sp_adj
/* adjust for stack in outer frame inner tree can't see */
1584 + ti
->maxNativeStackSlots
* sizeof(double)); /* plus the inner tree's stack */
1585 guard(true, lir
->ins2(LIR_lt
, sp_top
, eos_ins
), OOM_EXIT
);
1586 /* Guard that we have enough call stack space. */
1587 LIns
* rp_top
= lir
->ins2i(LIR_piadd
, lirbuf
->rp
, rp_adj
+
1588 ti
->maxCallDepth
* sizeof(FrameInfo
));
1589 guard(true, lir
->ins2(LIR_lt
, rp_top
, eor_ins
), OOM_EXIT
);
1590 /* We have enough space, so adjust sp and rp to their new level. */
1591 lir
->insStorei(inner_sp_ins
= lir
->ins2i(LIR_piadd
, lirbuf
->sp
,
1592 - treeInfo
->nativeStackBase
/* rebase sp to beginning of outer tree's stack */
1593 + sp_adj
/* adjust for stack in outer frame inner tree can't see */
1594 + ti
->nativeStackBase
), /* plus the inner tree's stack base */
1595 lirbuf
->state
, offsetof(InterpState
, sp
));
1596 lir
->insStorei(lir
->ins2i(LIR_piadd
, lirbuf
->rp
, rp_adj
),
1597 lirbuf
->state
, offsetof(InterpState
, rp
));
1601 /* Record a call to an inner tree. */
1603 TraceRecorder::emitTreeCall(Fragment
* inner
, GuardRecord
* lr
)
1605 JS_ASSERT(lr
->exit
->exitType
== LOOP_EXIT
&& !lr
->calldepth
);
1606 TreeInfo
* ti
= (TreeInfo
*)inner
->vmprivate
;
1607 /* Invoke the inner tree. */
1608 LIns
* args
[] = { INS_CONSTPTR(inner
), lirbuf
->state
}; /* reverse order */
1609 LIns
* ret
= lir
->insCall(F_CallTree
, args
);
1610 /* Read back all registers, in case the called tree changed any of them. */
1611 SideExit
* exit
= lr
->exit
;
1612 import(ti
, inner_sp_ins
, exit
->numGlobalSlots
, exit
->calldepth
,
1613 exit
->typeMap
, exit
->typeMap
+ exit
->numGlobalSlots
);
1614 /* Store the guard pointer in case we exit on an unexpected guard */
1615 lir
->insStorei(ret
, lirbuf
->state
, offsetof(InterpState
, nestedExit
));
1616 /* Restore sp and rp to their original values (we still have them in a register). */
1617 if (callDepth
> 0) {
1618 lir
->insStorei(lirbuf
->sp
, lirbuf
->state
, offsetof(InterpState
, sp
));
1619 lir
->insStorei(lirbuf
->rp
, lirbuf
->state
, offsetof(InterpState
, rp
));
1621 /* Guard that we come out of the inner tree along the same side exit we came out when
1622 we called the inner tree at recording time. */
1623 guard(true, lir
->ins2(LIR_eq
, ret
, INS_CONSTPTR(lr
)), NESTED_EXIT
);
1624 /* Register us as a dependent tree of the inner tree. */
1625 ((TreeInfo
*)inner
->vmprivate
)->dependentTrees
.addUnique(fragment
->root
);
1628 /* Add a if/if-else control-flow merge point to the list of known merge points. */
1630 TraceRecorder::trackCfgMerges(jsbytecode
* pc
)
1632 /* If we hit the beginning of an if/if-else, then keep track of the merge point after it. */
1633 JS_ASSERT((*pc
== JSOP_IFEQ
) || (*pc
== JSOP_IFEQX
));
1634 jssrcnote
* sn
= js_GetSrcNote(cx
->fp
->script
, pc
);
1636 if (SN_TYPE(sn
) == SRC_IF
) {
1637 cfgMerges
.add((*pc
== JSOP_IFEQ
)
1638 ? pc
+ GET_JUMP_OFFSET(pc
)
1639 : pc
+ GET_JUMPX_OFFSET(pc
));
1640 } else if (SN_TYPE(sn
) == SRC_IF_ELSE
)
1641 cfgMerges
.add(pc
+ js_GetSrcNoteOffset(sn
, 0));
1645 /* Emit code for a fused IFEQ/IFNE. */
1647 TraceRecorder::fuseIf(jsbytecode
* pc
, bool cond
, LIns
* x
)
1649 if (*pc
== JSOP_IFEQ
) {
1650 guard(cond
, x
, BRANCH_EXIT
);
1652 } else if (*pc
== JSOP_IFNE
) {
1653 guard(cond
, x
, BRANCH_EXIT
);
1658 nanojit::StackFilter::getTop(LInsp guard
)
1660 if (sp
== frag
->lirbuf
->sp
)
1661 return guard
->exit()->sp_adj
+ sizeof(double);
1662 JS_ASSERT(sp
== frag
->lirbuf
->rp
);
1663 return guard
->exit()->rp_adj
+ sizeof(FrameInfo
);
1666 #if defined NJ_VERBOSE
1668 nanojit::LirNameMap::formatGuard(LIns
*i
, char *out
)
1673 x
= (SideExit
*)i
->exit();
1674 ip
= intptr_t(x
->from
->ip
) + x
->ip_adj
;
1676 "%s: %s %s -> %s sp%+ld rp%+ld",
1678 lirNames
[i
->opcode()],
1679 i
->oprnd1()->isCond() ? formatRef(i
->oprnd1()) : "",
1680 labels
->format((void *)ip
),
1681 (long int)x
->sp_adj
,
1688 nanojit::Assembler::initGuardRecord(LIns
*guard
, GuardRecord
*rec
)
1692 exit
= guard
->exit();
1694 rec
->calldepth
= exit
->calldepth
;
1696 verbose_only(rec
->sid
= exit
->sid
);
1700 nanojit::Assembler::asm_bailout(LIns
*guard
, Register state
)
1702 /* we adjust ip/sp/rp when exiting from the tree in the recovery code */
1706 nanojit::Fragment::onDestroy()
1712 delete (TreeInfo
*)vmprivate
;
1716 js_DeleteRecorder(JSContext
* cx
)
1718 /* Aborting and completing a trace end up here. */
1719 JS_ASSERT(cx
->executingTrace
);
1720 cx
->executingTrace
= false;
1722 JSTraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
1723 delete tm
->recorder
;
1724 tm
->recorder
= NULL
;
1728 js_StartRecorder(JSContext
* cx
, GuardRecord
* anchor
, Fragment
* f
, TreeInfo
* ti
,
1729 unsigned ngslots
, uint8
* globalTypeMap
, uint8
* stackTypeMap
,
1730 GuardRecord
* expectedInnerExit
)
1733 * Emulate on-trace semantics and avoid rooting headaches while recording,
1734 * by suppressing last-ditch GC attempts while recording a trace. This does
1735 * means that trace recording must not nest or the following assertion will
1738 JS_ASSERT(!cx
->executingTrace
);
1739 cx
->executingTrace
= true;
1741 /* start recording if no exception during construction */
1742 JS_TRACE_MONITOR(cx
).recorder
= new (&gc
) TraceRecorder(cx
, anchor
, f
, ti
,
1743 ngslots
, globalTypeMap
, stackTypeMap
,
1746 js_AbortRecording(cx
, NULL
, "setting up recorder failed");
1749 /* clear any leftover error state */
1750 JS_TRACE_MONITOR(cx
).fragmento
->assm()->setError(None
);
1755 js_TrashTree(JSContext
* cx
, Fragment
* f
)
1757 JS_ASSERT((!f
->code()) == (!f
->vmprivate
));
1758 JS_ASSERT(f
== f
->root
);
1761 AUDIT(treesTrashed
);
1762 debug_only_v(printf("Trashing tree info.\n");)
1763 Fragmento
* fragmento
= JS_TRACE_MONITOR(cx
).fragmento
;
1764 TreeInfo
* ti
= (TreeInfo
*)f
->vmprivate
;
1765 f
->vmprivate
= NULL
;
1766 f
->releaseCode(fragmento
);
1767 Fragment
** data
= ti
->dependentTrees
.data();
1768 unsigned length
= ti
->dependentTrees
.length();
1769 for (unsigned n
= 0; n
< length
; ++n
)
1770 js_TrashTree(cx
, data
[n
]);
1772 JS_ASSERT(!f
->code() && !f
->vmprivate
);
1776 js_SynthesizeFrame(JSContext
* cx
, const FrameInfo
& fi
)
1778 JS_ASSERT(HAS_FUNCTION_CLASS(fi
.callee
));
1780 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, fi
.callee
);
1781 JS_ASSERT(FUN_INTERPRETED(fun
));
1783 JSArena
* a
= cx
->stackPool
.current
;
1784 void* newmark
= (void*) a
->avail
;
1785 JSScript
* script
= fun
->u
.i
.script
;
1787 // Assert that we have a correct sp distance from cx->fp->slots in fi.
1788 JS_ASSERT(js_ReconstructStackDepth(cx
, cx
->fp
->script
, fi
.callpc
) ==
1789 uintN(fi
.s
.spdist
- cx
->fp
->script
->nfixed
));
1791 uintN nframeslots
= JS_HOWMANY(sizeof(JSInlineFrame
), sizeof(jsval
));
1792 size_t nbytes
= (nframeslots
+ script
->nslots
) * sizeof(jsval
);
1794 /* Allocate the inline frame with its vars and operands. */
1796 if (a
->avail
+ nbytes
<= a
->limit
) {
1797 newsp
= (jsval
*) a
->avail
;
1800 JS_ARENA_ALLOCATE_CAST(newsp
, jsval
*, &cx
->stackPool
, nbytes
);
1802 js_ReportOutOfScriptQuota(cx
);
1807 /* Claim space for the stack frame and initialize it. */
1808 JSInlineFrame
* newifp
= (JSInlineFrame
*) newsp
;
1809 newsp
+= nframeslots
;
1811 newifp
->frame
.callobj
= NULL
;
1812 newifp
->frame
.argsobj
= NULL
;
1813 newifp
->frame
.varobj
= NULL
;
1814 newifp
->frame
.script
= script
;
1815 newifp
->frame
.callee
= fi
.callee
;
1816 newifp
->frame
.fun
= fun
;
1818 newifp
->frame
.argc
= fi
.s
.argc
;
1819 newifp
->callerRegs
.pc
= fi
.callpc
;
1820 newifp
->callerRegs
.sp
= cx
->fp
->slots
+ fi
.s
.spdist
;
1821 newifp
->frame
.argv
= newifp
->callerRegs
.sp
- JS_MAX(fun
->nargs
, fi
.s
.argc
);
1822 JS_ASSERT(newifp
->frame
.argv
>= StackBase(cx
->fp
));
1824 newifp
->frame
.rval
= JSVAL_VOID
;
1825 newifp
->frame
.down
= cx
->fp
;
1826 newifp
->frame
.annotation
= NULL
;
1827 newifp
->frame
.scopeChain
= OBJ_GET_PARENT(cx
, fi
.callee
);
1828 newifp
->frame
.sharpDepth
= 0;
1829 newifp
->frame
.sharpArray
= NULL
;
1830 newifp
->frame
.flags
= 0;
1831 newifp
->frame
.dormantNext
= NULL
;
1832 newifp
->frame
.xmlNamespace
= NULL
;
1833 newifp
->frame
.blockChain
= NULL
;
1834 newifp
->mark
= newmark
;
1835 newifp
->frame
.thisp
= NULL
; // will be set by js_ExecuteTree -> FlushNativeStackFrame
1837 newifp
->frame
.regs
= cx
->fp
->regs
;
1838 newifp
->frame
.regs
->pc
= script
->code
;
1839 newifp
->frame
.regs
->sp
= newsp
+ script
->nfixed
;
1840 newifp
->frame
.slots
= newsp
;
1841 if (script
->staticDepth
< JS_DISPLAY_SIZE
) {
1842 JSStackFrame
**disp
= &cx
->display
[script
->staticDepth
];
1843 newifp
->frame
.displaySave
= *disp
;
1844 *disp
= &newifp
->frame
;
1847 newifp
->frame
.pcDisabledSave
= 0;
1850 cx
->fp
->regs
= &newifp
->callerRegs
;
1851 cx
->fp
= &newifp
->frame
;
1853 // FIXME: we must count stack slots from caller's operand stack up to (but not including)
1854 // callee's, including missing arguments. Could we shift everything down to the caller's
1855 // fp->slots (where vars start) and avoid some of the complexity?
1856 return (fi
.s
.spdist
- cx
->fp
->down
->script
->nfixed
) +
1857 ((fun
->nargs
> cx
->fp
->argc
) ? fun
->nargs
- cx
->fp
->argc
: 0) +
1862 js_RecordTree(JSContext
* cx
, JSTraceMonitor
* tm
, Fragment
* f
)
1864 /* Make sure the global type map didn't change on us. */
1865 uint32 globalShape
= OBJ_SCOPE(JS_GetGlobalForObject(cx
, cx
->fp
->scopeChain
))->shape
;
1866 if (tm
->globalShape
!= globalShape
) {
1867 debug_only_v(printf("Global shape mismatch (%u vs. %u) in RecordTree, flushing cache.\n",
1868 globalShape
, tm
->globalShape
);)
1869 js_FlushJITCache(cx
);
1873 current
.captureGlobalTypes(cx
, *tm
->globalSlots
);
1874 if (!current
.matches(*tm
->globalTypeMap
)) {
1875 js_FlushJITCache(cx
);
1876 debug_only_v(printf("Global type map mismatch in RecordTree, flushing cache.\n");)
1880 AUDIT(recorderStarted
);
1882 /* Try to find an unused peer fragment, or allocate a new one. */
1883 while (f
->code() && f
->peer
)
1886 f
= JS_TRACE_MONITOR(cx
).fragmento
->newLoop(f
->ip
);
1890 /* allocate space to store the LIR for this tree */
1892 f
->lirbuf
= new (&gc
) LirBuffer(tm
->fragmento
, builtins
);
1894 f
->lirbuf
->names
= new (&gc
) LirNameMap(&gc
, builtins
, tm
->fragmento
->labels
);
1898 JS_ASSERT(!f
->code() && !f
->vmprivate
);
1900 /* setup the VM-private treeInfo structure for this fragment */
1901 TreeInfo
* ti
= new (&gc
) TreeInfo(f
);
1903 /* capture the coerced type of each active slot in the stack type map */
1904 ti
->stackTypeMap
.captureStackTypes(cx
, 0/*callDepth*/);
1906 /* determine the native frame layout at the entry point */
1907 unsigned entryNativeStackSlots
= ti
->stackTypeMap
.length();
1908 JS_ASSERT(entryNativeStackSlots
== js_NativeStackSlots(cx
, 0/*callDepth*/));
1909 ti
->nativeStackBase
= (entryNativeStackSlots
-
1910 (cx
->fp
->regs
->sp
- StackBase(cx
->fp
))) * sizeof(double);
1911 ti
->maxNativeStackSlots
= entryNativeStackSlots
;
1912 ti
->maxCallDepth
= 0;
1913 ti
->script
= cx
->fp
->script
;
1915 /* recording primary trace */
1916 return js_StartRecorder(cx
, NULL
, f
, ti
,
1917 tm
->globalSlots
->length(), tm
->globalTypeMap
->data(),
1918 ti
->stackTypeMap
.data(), NULL
);
1922 js_AttemptToExtendTree(JSContext
* cx
, GuardRecord
* anchor
, GuardRecord
* exitedFrom
)
1924 Fragment
* f
= anchor
->from
->root
;
1925 JS_ASSERT(f
->vmprivate
);
1927 debug_only_v(printf("trying to attach another branch to the tree\n");)
1930 if (!(c
= anchor
->target
)) {
1931 c
= JS_TRACE_MONITOR(cx
).fragmento
->createBranch(anchor
, anchor
->exit
);
1932 c
->spawnedFrom
= anchor
->guard
;
1934 anchor
->exit
->target
= c
;
1939 if (++c
->hits() >= HOTEXIT
) {
1940 /* start tracing secondary trace from this point */
1941 c
->lirbuf
= f
->lirbuf
;
1943 uint8
* globalTypeMap
;
1944 uint8
* stackTypeMap
;
1946 if (exitedFrom
== NULL
) {
1947 /* If we are coming straight from a simple side exit, just use that exit's type map
1948 as starting point. */
1949 SideExit
* e
= anchor
->exit
;
1950 ngslots
= e
->numGlobalSlots
;
1951 globalTypeMap
= e
->typeMap
;
1952 stackTypeMap
= globalTypeMap
+ ngslots
;
1954 /* If we side-exited on a loop exit and continue on a nesting guard, the nesting
1955 guard (anchor) has the type information for everything below the current scope,
1956 and the actual guard we exited from has the types for everything in the current
1957 scope (and whatever it inlined). We have to merge those maps here. */
1958 SideExit
* e1
= anchor
->exit
;
1959 SideExit
* e2
= exitedFrom
->exit
;
1960 fullMap
.add(e1
->typeMap
+ e1
->numGlobalSlots
, e1
->numStackSlotsBelowCurrentFrame
);
1961 fullMap
.add(e2
->typeMap
+ e2
->numGlobalSlots
, e2
->numStackSlots
);
1962 ngslots
= e2
->numGlobalSlots
;
1963 globalTypeMap
= e2
->typeMap
;
1964 stackTypeMap
= fullMap
.data();
1966 return js_StartRecorder(cx
, anchor
, c
, (TreeInfo
*)f
->vmprivate
,
1967 ngslots
, globalTypeMap
, stackTypeMap
, exitedFrom
);
1973 js_ExecuteTree(JSContext
* cx
, Fragment
** treep
, uintN
& inlineCallCount
,
1974 GuardRecord
** innermostNestedGuardp
);
1977 js_RecordLoopEdge(JSContext
* cx
, TraceRecorder
* r
, jsbytecode
* oldpc
, uintN
& inlineCallCount
)
1979 #ifdef JS_THREADSAFE
1980 if (OBJ_SCOPE(JS_GetGlobalForObject(cx
, cx
->fp
->scopeChain
))->title
.ownercx
!= cx
) {
1981 debug_only_v(printf("Global object not owned by this context.\n"););
1982 return false; /* we stay away from shared global objects */
1985 Fragmento
* fragmento
= JS_TRACE_MONITOR(cx
).fragmento
;
1986 /* If we hit our own loop header, close the loop and compile the trace. */
1987 if (r
->isLoopHeader(cx
)) {
1988 if (fragmento
->assm()->error()) {
1989 js_AbortRecording(cx
, oldpc
, "Error during recording");
1990 /* If we ran out of memory, flush the code cache and abort. */
1991 if (fragmento
->assm()->error() == OutOMem
)
1992 js_FlushJITCache(cx
);
1993 return false; /* done recording */
1995 r
->closeLoop(fragmento
);
1996 js_DeleteRecorder(cx
);
1997 return false; /* done recording */
1999 /* does this branch go to an inner loop? */
2000 Fragment
* f
= fragmento
->getLoop(cx
->fp
->regs
->pc
);
2001 if (nesting_enabled
&&
2002 f
&& /* must have a fragment at that location */
2003 r
->selectCallablePeerFragment(&f
) && /* is there a potentially matching peer fragment? */
2004 r
->adjustCallerTypes(f
)) { /* make sure we can make our arguments fit */
2005 r
->prepareTreeCall(f
);
2006 GuardRecord
* innermostNestedGuard
= NULL
;
2007 GuardRecord
* lr
= js_ExecuteTree(cx
, &f
, inlineCallCount
, &innermostNestedGuard
);
2009 /* js_ExecuteTree might have flushed the cache and aborted us already. */
2010 if (JS_TRACE_MONITOR(cx
).recorder
)
2011 js_AbortRecording(cx
, oldpc
, "Couldn't call inner tree");
2014 switch (lr
->exit
->exitType
) {
2016 /* If the inner tree exited on an unknown loop exit, grow the tree around it. */
2017 if (innermostNestedGuard
) {
2018 js_AbortRecording(cx
, oldpc
,
2019 "Inner tree took different side exit, abort recording");
2020 return js_AttemptToExtendTree(cx
, innermostNestedGuard
, lr
);
2022 /* emit a call to the inner tree and continue recording the outer tree trace */
2023 r
->emitTreeCall(f
, lr
);
2026 /* abort recording the outer tree, extend the inner tree */
2027 js_AbortRecording(cx
, oldpc
, "Inner tree is trying to grow, abort outer recording");
2028 return js_AttemptToExtendTree(cx
, lr
, NULL
);
2030 debug_only_v(printf("exit_type=%d\n", lr
->exit
->exitType
);)
2031 js_AbortRecording(cx
, oldpc
, "Inner tree not suitable for calling");
2035 /* try to unroll the inner loop a bit, maybe it connects back to our loop header eventually */
2036 if ((!f
|| !f
->code()) && r
->trackLoopEdges())
2038 /* not returning to our own loop header, not an inner loop we can call, abort trace */
2039 AUDIT(returnToDifferentLoopHeader
);
2040 debug_only_v(printf("loop edge %d -> %d, header %d\n",
2041 oldpc
- cx
->fp
->script
->code
,
2042 cx
->fp
->regs
->pc
- cx
->fp
->script
->code
,
2043 (jsbytecode
*)r
->getFragment()->root
->ip
- cx
->fp
->script
->code
));
2044 js_AbortRecording(cx
, oldpc
, "Loop edge does not return to header");
2048 static inline GuardRecord
*
2049 js_ExecuteTree(JSContext
* cx
, Fragment
** treep
, uintN
& inlineCallCount
,
2050 GuardRecord
** innermostNestedGuardp
)
2052 Fragment
* f
= *treep
;
2054 /* if we don't have a compiled tree available for this location, bail out */
2056 JS_ASSERT(!f
->vmprivate
);
2059 JS_ASSERT(f
->vmprivate
);
2061 AUDIT(traceTriggered
);
2063 /* execute previously recorded trace */
2064 TreeInfo
* ti
= (TreeInfo
*)f
->vmprivate
;
2066 debug_only_v(printf("entering trace at %s:%u@%u, native stack slots: %u\n",
2067 cx
->fp
->script
->filename
,
2068 js_PCToLineNumber(cx
, cx
->fp
->script
, cx
->fp
->regs
->pc
),
2069 cx
->fp
->regs
->pc
- cx
->fp
->script
->code
, ti
->maxNativeStackSlots
););
2071 JSTraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
2072 unsigned ngslots
= tm
->globalSlots
->length();
2073 uint16
* gslots
= tm
->globalSlots
->data();
2074 JSObject
* globalObj
= JS_GetGlobalForObject(cx
, cx
->fp
->scopeChain
);
2075 unsigned globalFrameSize
= STOBJ_NSLOTS(globalObj
);
2076 double* global
= (double*)alloca((globalFrameSize
+1) * sizeof(double));
2077 debug_only(*(uint64
*)&global
[globalFrameSize
] = 0xdeadbeefdeadbeefLL
;)
2078 double* stack
= (double*)alloca(MAX_NATIVE_STACK_SLOTS
* sizeof(double));
2080 /* If any of our trees uses globals, the shape of the global object must not change and
2081 the global type map must remain applicable at all times (we expect absolute type
2082 stability for globals). */
2084 (OBJ_SCOPE(globalObj
)->shape
!= tm
->globalShape
||
2085 !BuildNativeGlobalFrame(cx
, ngslots
, gslots
, tm
->globalTypeMap
->data(), global
))) {
2086 AUDIT(globalShapeMismatchAtEntry
);
2087 debug_only_v(printf("Global shape mismatch (%u vs. %u), flushing cache.\n",
2088 OBJ_SCOPE(globalObj
)->shape
, tm
->globalShape
);)
2089 const void* ip
= f
->ip
;
2090 js_FlushJITCache(cx
);
2091 *treep
= tm
->fragmento
->newLoop(ip
);
2095 if (!BuildNativeStackFrame(cx
, 0/*callDepth*/, ti
->stackTypeMap
.data(), stack
)) {
2096 AUDIT(typeMapMismatchAtEntry
);
2097 debug_only_v(printf("type-map mismatch.\n");)
2098 if (++ti
->mismatchCount
> MAX_MISMATCH
) {
2099 debug_only_v(printf("excessive mismatches, flushing tree.\n"));
2100 js_TrashTree(cx
, f
);
2106 ti
->mismatchCount
= 0;
2108 double* entry_sp
= &stack
[ti
->nativeStackBase
/sizeof(double)];
2109 FrameInfo
* callstack
= (FrameInfo
*) alloca(MAX_CALL_STACK_ENTRIES
* sizeof(FrameInfo
));
2112 state
.sp
= (void*)entry_sp
;
2113 state
.eos
= ((double*)state
.sp
) + MAX_NATIVE_STACK_SLOTS
;
2114 state
.rp
= callstack
;
2115 state
.eor
= callstack
+ MAX_CALL_STACK_ENTRIES
;
2119 state
.nestedExit
= NULL
;
2121 union { NIns
*code
; GuardRecord
* (FASTCALL
*func
)(InterpState
*, Fragment
*); } u
;
2124 #if defined(DEBUG) && defined(NANOJIT_IA32)
2125 uint64 start
= rdtsc();
2129 * We may be called from js_MonitorLoopEdge while not recording, or while
2130 * recording. Rather than over-generalize by using a counter instead of a
2131 * flag, we simply sample and update cx->executingTrace if necessary.
2133 bool executingTrace
= cx
->executingTrace
;
2134 if (!executingTrace
)
2135 cx
->executingTrace
= true;
2138 #if defined(JS_NO_FASTCALL) && defined(NANOJIT_IA32)
2139 SIMULATE_FASTCALL(lr
, &state
, NULL
, u
.func
);
2141 lr
= u
.func(&state
, NULL
);
2144 if (!executingTrace
)
2145 cx
->executingTrace
= false;
2147 /* If we bail out on a nested exit, the compiled code returns the outermost nesting
2148 guard but what we are really interested in is the innermost guard that we hit
2149 instead of the guard we were expecting there. */
2151 if (lr
->exit
->exitType
== NESTED_EXIT
) {
2152 /* Unwind all frames held by nested outer trees (since the innermost tree's frame which
2153 we restore below doesn't contain such frames. */
2155 if (innermostNestedGuardp
)
2156 *innermostNestedGuardp
= lr
;
2157 debug_only_v(printf("processing tree call guard %p, calldepth=%d\n",
2158 lr
, lr
->calldepth
);)
2159 unsigned calldepth
= lr
->calldepth
;
2160 if (calldepth
> 0) {
2161 /* We found a nesting guard that holds a frame, write it back. */
2162 for (unsigned i
= 0; i
< calldepth
; ++i
)
2163 js_SynthesizeFrame(cx
, callstack
[i
]);
2164 /* Restore the native stack excluding the current frame, which the next tree
2165 call guard or the innermost tree exit guard will restore. */
2166 slots
= FlushNativeStackFrame(cx
, calldepth
,
2167 lr
->exit
->typeMap
+ lr
->exit
->numGlobalSlots
,
2168 stack
, &cx
->fp
->argv
[-2]);
2171 callstack
+= calldepth
;
2172 inlineCallCount
+= calldepth
;
2175 JS_ASSERT(lr
->guard
->oprnd1()->oprnd2()->isconstp());
2176 lr
= (GuardRecord
*)lr
->guard
->oprnd1()->oprnd2()->constvalp();
2177 } while (lr
->exit
->exitType
== NESTED_EXIT
);
2179 /* We restored the nested frames, now we just need to deal with the innermost guard. */
2180 lr
= state
.nestedExit
;
2184 /* sp_adj and ip_adj are relative to the tree we exit out of, not the tree we
2185 entered into (which might be different in the presence of nested trees). */
2186 ti
= (TreeInfo
*)lr
->from
->root
->vmprivate
;
2188 /* We already synthesized the frames around the innermost guard. Here we just deal
2189 with additional frames inside the tree we are bailing out from. */
2190 unsigned calldepth
= lr
->calldepth
;
2191 unsigned calldepth_slots
= 0;
2192 for (unsigned n
= 0; n
< calldepth
; ++n
)
2193 calldepth_slots
+= js_SynthesizeFrame(cx
, callstack
[n
]);
2195 /* Adjust sp and pc relative to the tree we exited from (not the tree we entered
2196 into). These are our final values for sp and pc since js_SynthesizeFrame has
2197 already taken care of all frames in between. */
2198 SideExit
* e
= lr
->exit
;
2199 JSStackFrame
* fp
= cx
->fp
;
2201 /* If we are not exiting from an inlined frame the state->sp is spbase, otherwise spbase
2202 is whatever slots frames around us consume. */
2203 fp
->regs
->pc
= (jsbytecode
*)lr
->from
->root
->ip
+ e
->ip_adj
;
2204 fp
->regs
->sp
= StackBase(fp
) + (e
->sp_adj
/ sizeof(double)) - calldepth_slots
;
2205 JS_ASSERT(fp
->slots
+ fp
->script
->nfixed
+
2206 js_ReconstructStackDepth(cx
, cx
->fp
->script
, fp
->regs
->pc
) == fp
->regs
->sp
);
2208 #if defined(DEBUG) && defined(NANOJIT_IA32)
2209 if (verbose_debug
) {
2210 printf("leaving trace at %s:%u@%u, op=%s, lr=%p, exitType=%d, sp=%d, ip=%p, "
2212 fp
->script
->filename
, js_PCToLineNumber(cx
, fp
->script
, fp
->regs
->pc
),
2213 fp
->regs
->pc
- fp
->script
->code
,
2214 js_CodeName
[*fp
->regs
->pc
],
2217 fp
->regs
->sp
- StackBase(fp
), lr
->jmp
,
2222 /* If this trace is part of a tree, later branches might have added additional globals for
2223 with we don't have any type information available in the side exit. We merge in this
2224 information from the entry type-map. See also comment in the constructor of TraceRecorder
2225 why this is always safe to do. */
2226 unsigned exit_gslots
= e
->numGlobalSlots
;
2227 JS_ASSERT(ngslots
== tm
->globalTypeMap
->length());
2228 JS_ASSERT(ngslots
>= exit_gslots
);
2229 uint8
* globalTypeMap
= e
->typeMap
;
2230 if (exit_gslots
< ngslots
)
2231 mergeTypeMaps(&globalTypeMap
, &exit_gslots
, tm
->globalTypeMap
->data(), ngslots
,
2232 (uint8
*)alloca(sizeof(uint8
) * ngslots
));
2233 JS_ASSERT(exit_gslots
== tm
->globalTypeMap
->length());
2235 /* write back interned globals */
2236 slots
= FlushNativeGlobalFrame(cx
, exit_gslots
, gslots
, globalTypeMap
, global
);
2239 JS_ASSERT(globalFrameSize
== STOBJ_NSLOTS(globalObj
));
2240 JS_ASSERT(*(uint64
*)&global
[globalFrameSize
] == 0xdeadbeefdeadbeefLL
);
2242 /* write back native stack frame */
2243 slots
= FlushNativeStackFrame(cx
, e
->calldepth
, e
->typeMap
+ e
->numGlobalSlots
, stack
, NULL
);
2246 JS_ASSERT(unsigned(slots
) == e
->numStackSlots
);
2248 AUDIT(sideExitIntoInterpreter
);
2250 if (!lr
) /* did the tree actually execute? */
2253 /* Adjust inlineCallCount (we already compensated for any outer nested frames). */
2254 inlineCallCount
+= lr
->calldepth
;
2260 js_MonitorLoopEdge(JSContext
* cx
, jsbytecode
* oldpc
, uintN
& inlineCallCount
)
2262 JSTraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
2264 /* Is the recorder currently active? */
2266 if (js_RecordLoopEdge(cx
, tm
->recorder
, oldpc
, inlineCallCount
))
2268 /* recording was aborted, treat like a regular loop edge hit */
2270 JS_ASSERT(!tm
->recorder
);
2272 /* check if our quick cache has an entry for this ip, otherwise ask fragmento. */
2273 jsbytecode
* pc
= cx
->fp
->regs
->pc
;
2275 JSFragmentCacheEntry
* cacheEntry
= &tm
->fcache
[jsuword(pc
) & JS_FRAGMENT_CACHE_MASK
];
2276 if (cacheEntry
->pc
== pc
) {
2277 f
= cacheEntry
->fragment
;
2279 f
= tm
->fragmento
->getLoop(pc
);
2281 f
= tm
->fragmento
->newLoop(pc
);
2282 cacheEntry
->pc
= pc
;
2283 cacheEntry
->fragment
= f
;
2286 /* If there is a chance that js_ExecuteTree will actually succeed, invoke it (either the
2287 first fragment must contain some code, or at least it must have a peer fragment). */
2288 GuardRecord
* lr
= NULL
;
2289 GuardRecord
* innermostNestedGuard
= NULL
;
2290 if (f
->code() || f
->peer
)
2291 lr
= js_ExecuteTree(cx
, &f
, inlineCallCount
, &innermostNestedGuard
);
2293 JS_ASSERT(!tm
->recorder
);
2294 /* If we don't have compiled code for this entry point (none recorded or we trashed it),
2295 count the number of hits and trigger the recorder if appropriate. */
2296 if (!f
->code() && (++f
->hits() >= HOTLOOP
))
2297 return js_RecordTree(cx
, tm
, f
);
2300 /* If we exit on a branch, or on a tree call guard, try to grow the inner tree (in case
2301 of a branch exit), or the tree nested around the tree we exited from (in case of the
2302 tree call guard). */
2303 SideExit
* exit
= lr
->exit
;
2304 switch (exit
->exitType
) {
2306 return js_AttemptToExtendTree(cx
, lr
, NULL
);
2308 if (innermostNestedGuard
)
2309 return js_AttemptToExtendTree(cx
, innermostNestedGuard
, lr
);
2312 /* No, this was an unusual exit (i.e. out of memory/GC), so just resume interpretation. */
2318 js_MonitorRecording(JSContext
* cx
)
2320 TraceRecorder
*tr
= JS_TRACE_MONITOR(cx
).recorder
;
2321 if (tr
->wasDeepAborted()) {
2322 js_AbortRecording(cx
, NULL
, "deep abort requested");
2326 jsbytecode
* pc
= cx
->fp
->regs
->pc
;
2327 /* If we hit a break, end the loop and generate an always taken loop exit guard. For other
2328 downward gotos (like if/else) continue recording. */
2329 if ((*pc
== JSOP_GOTO
) || (*pc
== JSOP_GOTOX
)) {
2330 jssrcnote
* sn
= js_GetSrcNote(cx
->fp
->script
, pc
);
2331 if ((sn
!= NULL
) && (SN_TYPE(sn
) == SRC_BREAK
)) {
2332 AUDIT(breakLoopExits
);
2333 tr
->endLoop(JS_TRACE_MONITOR(cx
).fragmento
);
2334 js_DeleteRecorder(cx
);
2335 return false; /* done recording */
2338 /* If its not a break, continue recording and follow the trace. */
2343 js_AbortRecording(JSContext
* cx
, jsbytecode
* abortpc
, const char* reason
)
2345 JSTraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
2346 JS_ASSERT(tm
->recorder
!= NULL
);
2347 Fragment
* f
= tm
->recorder
->getFragment();
2348 JS_ASSERT(!f
->vmprivate
);
2349 /* Abort the trace and blacklist its starting point. */
2350 AUDIT(recorderAborted
);
2352 debug_only_v(if (!abortpc
) abortpc
= cx
->fp
->regs
->pc
;
2353 printf("Abort recording (line %d, pc %d): %s.\n",
2354 js_PCToLineNumber(cx
, cx
->fp
->script
, abortpc
),
2355 abortpc
- cx
->fp
->script
->code
, reason
);)
2358 js_DeleteRecorder(cx
);
2359 /* If this is the primary trace and we didn't succeed compiling, trash the TreeInfo object. */
2360 if (!f
->code() && (f
->root
== f
))
2361 js_TrashTree(cx
, f
);
2364 #if defined NANOJIT_IA32
2369 #if defined _MSC_VER
2378 #elif defined __GNUC__
2380 "mov $0x01, %%eax\n"
2385 : /* We have no inputs */
2386 : /* We don't clobber anything */
2389 return (features
& (1<<26)) != 0;
2394 js_InitJIT(JSTraceMonitor
*tm
)
2396 #if defined NANOJIT_IA32
2397 if (!did_we_check_sse2
) {
2398 avmplus::AvmCore::sse2_available
= js_CheckForSSE2();
2399 did_we_check_sse2
= true;
2402 if (!tm
->fragmento
) {
2403 JS_ASSERT(!tm
->globalSlots
&& !tm
->globalTypeMap
);
2404 Fragmento
* fragmento
= new (&gc
) Fragmento(core
, 24);
2405 verbose_only(fragmento
->labels
= new (&gc
) LabelMap(core
, NULL
);)
2406 fragmento
->assm()->setCallTable(builtins
);
2407 tm
->fragmento
= fragmento
;
2408 tm
->globalSlots
= new (&gc
) SlotList();
2409 tm
->globalTypeMap
= new (&gc
) TypeMap();
2412 debug_only(memset(&stat
, 0, sizeof(stat
)));
2417 js_FinishJIT(JSTraceMonitor
*tm
)
2420 printf("recorder: started(%llu), aborted(%llu), completed(%llu), different header(%llu), "
2421 "trees trashed(%llu), slot promoted(%llu), unstable loop variable(%llu), "
2423 stat
.recorderStarted
, stat
.recorderAborted
,
2424 stat
.traceCompleted
, stat
.returnToDifferentLoopHeader
, stat
.treesTrashed
,
2425 stat
.slotPromoted
, stat
.unstableLoopVariable
, stat
.breakLoopExits
);
2426 printf("monitor: triggered(%llu), exits(%llu), type mismatch(%llu), "
2427 "global mismatch(%llu)\n", stat
.traceTriggered
, stat
.sideExitIntoInterpreter
,
2428 stat
.typeMapMismatchAtEntry
, stat
.globalShapeMismatchAtEntry
);
2430 if (tm
->fragmento
!= NULL
) {
2431 JS_ASSERT(tm
->globalSlots
&& tm
->globalTypeMap
);
2432 verbose_only(delete tm
->fragmento
->labels
;)
2433 delete tm
->fragmento
;
2434 tm
->fragmento
= NULL
;
2435 delete tm
->globalSlots
;
2436 tm
->globalSlots
= NULL
;
2437 delete tm
->globalTypeMap
;
2438 tm
->globalTypeMap
= NULL
;
2443 js_FlushJITOracle(JSContext
* cx
)
2449 js_FlushJITCache(JSContext
* cx
)
2451 debug_only_v(printf("Flushing cache.\n"););
2452 JSTraceMonitor
* tm
= &JS_TRACE_MONITOR(cx
);
2454 js_AbortRecording(cx
, NULL
, "flush cache");
2455 Fragmento
* fragmento
= tm
->fragmento
;
2457 fragmento
->clearFrags();
2459 JS_ASSERT(fragmento
->labels
);
2460 delete fragmento
->labels
;
2461 fragmento
->labels
= new (&gc
) LabelMap(core
, NULL
);
2464 memset(&tm
->fcache
, 0, sizeof(tm
->fcache
));
2466 tm
->globalShape
= OBJ_SCOPE(JS_GetGlobalForObject(cx
, cx
->fp
->scopeChain
))->shape
;
2467 tm
->globalSlots
->clear();
2468 tm
->globalTypeMap
->clear();
2473 TraceRecorder::argval(unsigned n
) const
2475 JS_ASSERT(n
< cx
->fp
->fun
->nargs
);
2476 return cx
->fp
->argv
[n
];
2480 TraceRecorder::varval(unsigned n
) const
2482 JS_ASSERT(n
< cx
->fp
->script
->nslots
);
2483 return cx
->fp
->slots
[n
];
2487 TraceRecorder::stackval(int n
) const
2489 jsval
* sp
= cx
->fp
->regs
->sp
;
2490 JS_ASSERT(size_t((sp
+ n
) - StackBase(cx
->fp
)) < StackDepth(cx
->fp
->script
));
2495 TraceRecorder::scopeChain() const
2497 return lir
->insLoad(LIR_ldp
,
2498 lir
->insLoad(LIR_ldp
, cx_ins
, offsetof(JSContext
, fp
)),
2499 offsetof(JSStackFrame
, scopeChain
));
2503 FrameInRange(JSStackFrame
* fp
, JSStackFrame
*target
, unsigned callDepth
)
2505 while (fp
!= target
) {
2506 if (callDepth
-- == 0)
2508 if (!(fp
= fp
->down
))
2515 TraceRecorder::activeCallOrGlobalSlot(JSObject
* obj
, jsval
*& vp
)
2517 JS_ASSERT(obj
!= globalObj
);
2519 JSAtom
* atom
= atoms
[GET_INDEX(cx
->fp
->regs
->pc
)];
2522 if (js_FindProperty(cx
, ATOM_TO_JSID(atom
), &obj
, &obj2
, &prop
) < 0 || !prop
)
2523 ABORT_TRACE("failed to find name in non-global scope chain");
2525 if (obj
== globalObj
) {
2526 JSScopeProperty
* sprop
= (JSScopeProperty
*) prop
;
2527 if (obj2
!= obj
|| !SPROP_HAS_VALID_SLOT(sprop
, OBJ_SCOPE(obj
))) {
2528 OBJ_DROP_PROPERTY(cx
, obj2
, prop
);
2529 ABORT_TRACE("prototype or slotless globalObj property");
2532 if (!lazilyImportGlobalSlot(sprop
->slot
))
2533 ABORT_TRACE("lazy import of global slot failed");
2534 vp
= &STOBJ_GET_SLOT(obj
, sprop
->slot
);
2535 OBJ_DROP_PROPERTY(cx
, obj2
, prop
);
2539 if (obj
== obj2
&& OBJ_GET_CLASS(cx
, obj
) == &js_CallClass
) {
2540 JSStackFrame
* cfp
= (JSStackFrame
*) JS_GetPrivate(cx
, obj
);
2541 if (cfp
&& FrameInRange(cx
->fp
, cfp
, callDepth
)) {
2542 JSScopeProperty
* sprop
= (JSScopeProperty
*) prop
;
2543 uintN slot
= sprop
->shortid
;
2546 if (sprop
->getter
== js_GetCallArg
) {
2547 JS_ASSERT(slot
< cfp
->fun
->nargs
);
2548 vp
= &cfp
->argv
[slot
];
2549 } else if (sprop
->getter
== js_GetCallVar
) {
2550 JS_ASSERT(slot
< cfp
->script
->nslots
);
2551 vp
= &cfp
->slots
[slot
];
2553 OBJ_DROP_PROPERTY(cx
, obj2
, prop
);
2555 ABORT_TRACE("dynamic property of Call object");
2560 OBJ_DROP_PROPERTY(cx
, obj2
, prop
);
2561 ABORT_TRACE("fp->scopeChain is not global or active call object");
2565 TraceRecorder::arg(unsigned n
)
2567 return get(&argval(n
));
2571 TraceRecorder::arg(unsigned n
, LIns
* i
)
2577 TraceRecorder::var(unsigned n
)
2579 return get(&varval(n
));
2583 TraceRecorder::var(unsigned n
, LIns
* i
)
2589 TraceRecorder::stack(int n
)
2591 return get(&stackval(n
));
2595 TraceRecorder::stack(int n
, LIns
* i
)
2597 set(&stackval(n
), i
, n
>= 0);
2600 LIns
* TraceRecorder::f2i(LIns
* f
)
2602 return lir
->insCall(F_DoubleToInt32
, &f
);
2606 TraceRecorder::ifop()
2608 jsval
& v
= stackval(-1);
2609 if (JSVAL_TAG(v
) == JSVAL_BOOLEAN
) {
2610 guard(JSVAL_TO_BOOLEAN(v
) != 1,
2611 lir
->ins_eq0(lir
->ins2i(LIR_eq
, get(&v
), 1)),
2613 } else if (JSVAL_IS_OBJECT(v
)) {
2614 guard(JSVAL_IS_NULL(v
), lir
->ins_eq0(get(&v
)), BRANCH_EXIT
);
2615 } else if (isNumber(v
)) {
2616 jsdouble d
= asNumber(v
);
2619 guard(d
== 0 || JSDOUBLE_IS_NaN(d
),
2620 lir
->ins2(LIR_feq
, get(&v
), lir
->insImmq(u
.u64
)),
2622 } else if (JSVAL_IS_STRING(v
)) {
2623 guard(JSSTRING_LENGTH(JSVAL_TO_STRING(v
)) == 0,
2624 lir
->ins_eq0(lir
->ins2(LIR_piand
,
2625 lir
->insLoad(LIR_ldp
,
2627 (int)offsetof(JSString
, length
)),
2628 INS_CONSTPTR(JSSTRING_LENGTH_MASK
))),
2631 JS_NOT_REACHED("ifop");
2637 TraceRecorder::switchop()
2639 jsval
& v
= stackval(-1);
2641 jsdouble d
= asNumber(v
);
2645 addName(lir
->ins2(LIR_feq
, get(&v
), lir
->insImmq(u
.u64
)),
2646 "guard(switch on numeric)"),
2648 } else if (JSVAL_IS_STRING(v
)) {
2649 LIns
* args
[] = { get(&v
), INS_CONSTPTR(JSVAL_TO_STRING(v
)) };
2651 addName(lir
->ins_eq0(lir
->ins_eq0(lir
->insCall(F_EqualStrings
, args
))),
2652 "guard(switch on string)"),
2654 } else if (JSVAL_IS_BOOLEAN(v
)) {
2656 addName(lir
->ins2(LIR_eq
, get(&v
), lir
->insImm(JSVAL_TO_BOOLEAN(v
))),
2657 "guard(switch on boolean)"),
2660 ABORT_TRACE("switch on object, null, or undefined");
2666 TraceRecorder::inc(jsval
& v
, jsint incr
, bool pre
)
2668 LIns
* v_ins
= get(&v
);
2669 if (!inc(v
, v_ins
, incr
, pre
))
2676 * On exit, v_ins is the incremented unboxed value, and the appropriate
2677 * value (pre- or post-increment as described by pre) is stacked.
2680 TraceRecorder::inc(jsval
& v
, LIns
*& v_ins
, jsint incr
, bool pre
)
2683 ABORT_TRACE("can only inc numbers");
2686 u
.d
= jsdouble(incr
);
2688 LIns
* v_after
= lir
->ins2(LIR_fadd
, v_ins
, lir
->insImmq(u
.u64
));
2690 const JSCodeSpec
& cs
= js_CodeSpec
[*cx
->fp
->regs
->pc
];
2691 JS_ASSERT(cs
.ndefs
== 1);
2692 stack(-cs
.nuses
, pre
? v_after
: v_ins
);
2698 TraceRecorder::incProp(jsint incr
, bool pre
)
2700 jsval
& l
= stackval(-1);
2701 if (JSVAL_IS_PRIMITIVE(l
))
2702 ABORT_TRACE("incProp on primitive");
2704 JSObject
* obj
= JSVAL_TO_OBJECT(l
);
2705 LIns
* obj_ins
= get(&l
);
2709 if (!prop(obj
, obj_ins
, slot
, v_ins
))
2712 if (slot
== SPROP_INVALID_SLOT
)
2713 ABORT_TRACE("incProp on invalid slot");
2715 jsval
& v
= STOBJ_GET_SLOT(obj
, slot
);
2716 if (!inc(v
, v_ins
, incr
, pre
))
2719 if (!box_jsval(v
, v_ins
))
2722 LIns
* dslots_ins
= NULL
;
2723 stobj_set_slot(obj_ins
, slot
, dslots_ins
, v_ins
);
2728 TraceRecorder::incElem(jsint incr
, bool pre
)
2730 jsval
& r
= stackval(-1);
2731 jsval
& l
= stackval(-2);
2735 if (!elem(l
, r
, vp
, v_ins
, addr_ins
))
2737 if (!inc(*vp
, v_ins
, incr
, pre
))
2739 if (!box_jsval(*vp
, v_ins
))
2741 lir
->insStorei(v_ins
, addr_ins
, 0);
2746 TraceRecorder::cmp(LOpcode op
, int flags
)
2748 jsval
& r
= stackval(-1);
2749 jsval
& l
= stackval(-2);
2751 bool negate
= !!(flags
& CMP_NEGATE
);
2753 if (JSVAL_IS_STRING(l
) && JSVAL_IS_STRING(r
)) {
2755 LIns
* args
[] = { get(&r
), get(&l
) };
2756 x
= lir
->ins1(LIR_i2f
, lir
->insCall(F_CompareStrings
, args
));
2757 x
= lir
->ins2i(op
, x
, 0);
2758 jsint result
= js_CompareStrings(JSVAL_TO_STRING(l
), JSVAL_TO_STRING(r
));
2773 JS_NOT_REACHED("unexpected comparison op for strings");
2776 } else if (isNumber(l
) || isNumber(r
)) {
2777 jsval tmp
[2] = {l
, r
};
2778 JSAutoTempValueRooter
tvr(cx
, 2, tmp
);
2780 // TODO: coerce non-numbers to numbers if it's not string-on-string above
2781 LIns
* l_ins
= get(&l
);
2782 LIns
* r_ins
= get(&r
);
2785 LIns
* args
[] = { l_ins
, cx_ins
};
2786 if (JSVAL_IS_STRING(l
)) {
2787 l_ins
= lir
->insCall(F_StringToNumber
, args
);
2788 } else if (JSVAL_TAG(l
) == JSVAL_BOOLEAN
) {
2790 * What I really want here is for undefined to be type-specialized
2791 * differently from real booleans. Failing that, I want to be able
2792 * to cmov on quads. Failing that, I want to have small forward
2793 * branched. Failing that, I want to be able to ins_choose on quads
2794 * without cmov. Failing that, eat flaming builtin!
2796 l_ins
= lir
->insCall(F_BooleanToNumber
, args
);
2797 } else if (!isNumber(l
)) {
2798 ABORT_TRACE("unsupported LHS type for cmp vs number");
2800 lnum
= js_ValueToNumber(cx
, &tmp
[0]);
2804 if (JSVAL_IS_STRING(r
)) {
2805 r_ins
= lir
->insCall(F_StringToNumber
, args
);
2806 } else if (JSVAL_TAG(r
) == JSVAL_BOOLEAN
) {
2807 // See above for the sob story.
2808 r_ins
= lir
->insCall(F_BooleanToNumber
, args
);
2809 } else if (!isNumber(r
)) {
2810 ABORT_TRACE("unsupported RHS type for cmp vs number");
2812 rnum
= js_ValueToNumber(cx
, &tmp
[1]);
2814 x
= lir
->ins2(op
, l_ins
, r_ins
);
2817 x
= lir
->ins_eq0(x
);
2826 cond
= lnum
<= rnum
;
2829 cond
= lnum
>= rnum
;
2832 JS_ASSERT(op
== LIR_feq
);
2833 cond
= (lnum
== rnum
) ^ negate
;
2836 } else if (JSVAL_IS_BOOLEAN(l
) && JSVAL_IS_BOOLEAN(r
)) {
2837 x
= lir
->ins2(op
, lir
->ins1(LIR_i2f
, get(&l
)), lir
->ins1(LIR_i2f
, get(&r
)));
2839 x
= lir
->ins_eq0(x
);
2841 // The well-known values of JSVAL_TRUE and JSVAL_FALSE make this very easy.
2842 // In particular: JSVAL_TO_BOOLEAN(0) < JSVAL_TO_BOOLEAN(1) so all of these comparisons do
2858 JS_ASSERT(op
== LIR_feq
);
2859 cond
= (l
== r
) ^ negate
;
2863 ABORT_TRACE("unsupported operand types for cmp");
2866 if (flags
& CMP_CASE
) {
2867 guard(cond
, x
, BRANCH_EXIT
);
2871 /* The interpreter fuses comparisons and the following branch,
2872 so we have to do that here as well. */
2873 if (flags
& CMP_TRY_BRANCH_AFTER_COND
)
2874 fuseIf(cx
->fp
->regs
->pc
+ 1, cond
, x
);
2876 /* We update the stack after the guard. This is safe since
2877 the guard bails out at the comparison and the interpreter
2878 will therefore re-execute the comparison. This way the
2879 value of the condition doesn't have to be calculated and
2880 saved on the stack in most cases. */
2885 // FIXME: we currently compare only like operand types; if for JSOP_EQ and
2886 // JSOP_NE we ever evolve to handle conversions then we must insist on like
2887 // "types" here (care required for 0 == -1, e.g.).
2889 TraceRecorder::equal(int flags
)
2891 jsval
& r
= stackval(-1);
2892 jsval
& l
= stackval(-2);
2893 bool negate
= !!(flags
& CMP_NEGATE
);
2894 if (JSVAL_IS_STRING(l
) && JSVAL_IS_STRING(r
)) {
2895 LIns
* args
[] = { get(&r
), get(&l
) };
2896 bool cond
= js_EqualStrings(JSVAL_TO_STRING(l
), JSVAL_TO_STRING(r
)) ^ negate
;
2897 LIns
* x
= lir
->ins_eq0(lir
->insCall(F_EqualStrings
, args
));
2899 x
= lir
->ins_eq0(x
);
2901 if (flags
& CMP_CASE
) {
2902 guard(cond
, x
, BRANCH_EXIT
);
2906 /* The interpreter fuses comparisons and the following branch,
2907 so we have to do that here as well. */
2908 if (flags
& CMP_TRY_BRANCH_AFTER_COND
)
2909 fuseIf(cx
->fp
->regs
->pc
+ 1, cond
, x
);
2911 /* We update the stack after the guard. This is safe since
2912 the guard bails out at the comparison and the interpreter
2913 will therefore re-execute the comparison. This way the
2914 value of the condition doesn't have to be calculated and
2915 saved on the stack in most cases. */
2919 if (JSVAL_IS_OBJECT(l
) && JSVAL_IS_OBJECT(r
)) {
2920 bool cond
= (l
== r
) ^ negate
;
2921 LIns
* x
= lir
->ins2(LIR_eq
, get(&l
), get(&r
));
2923 x
= lir
->ins_eq0(x
);
2925 if (flags
& CMP_CASE
) {
2926 guard(cond
, x
, BRANCH_EXIT
);
2930 /* The interpreter fuses comparisons and the following branch,
2931 so we have to do that here as well. */
2932 if (flags
& CMP_TRY_BRANCH_AFTER_COND
)
2933 fuseIf(cx
->fp
->regs
->pc
+ 1, cond
, x
);
2935 /* We update the stack after the guard. This is safe since
2936 the guard bails out at the comparison and the interpreter
2937 will therefore re-execute the comparison. This way the
2938 value of the condition doesn't have to be calculated and
2939 saved on the stack in most cases. */
2943 return cmp(LIR_feq
, flags
);
2947 TraceRecorder::unary(LOpcode op
)
2949 jsval
& v
= stackval(-1);
2950 bool intop
= !(op
& LIR64
);
2955 a
= lir
->ins1(op
, a
);
2957 a
= lir
->ins1(LIR_i2f
, a
);
2965 TraceRecorder::binary(LOpcode op
)
2967 jsval
& r
= stackval(-1);
2968 jsval
& l
= stackval(-2);
2969 bool intop
= !(op
& LIR64
);
2972 bool leftNumber
= isNumber(l
), rightNumber
= isNumber(r
);
2973 if ((op
>= LIR_sub
&& op
<= LIR_ush
) || // sub, mul, (callh), or, xor, (not,) lsh, rsh, ush
2974 (op
>= LIR_fsub
&& op
<= LIR_fdiv
)) { // fsub, fmul, fdiv
2976 if (JSVAL_IS_STRING(l
)) {
2979 a
= lir
->insCall(F_StringToNumber
, args
);
2982 if (JSVAL_IS_STRING(r
)) {
2985 b
= lir
->insCall(F_StringToNumber
, args
);
2989 if (leftNumber
&& rightNumber
) {
2991 LIns
*args
[] = { a
};
2992 a
= lir
->insCall(op
== LIR_ush
? F_DoubleToUint32
: F_DoubleToInt32
, args
);
2995 a
= lir
->ins2(op
, a
, b
);
2997 a
= lir
->ins1(op
== LIR_ush
? LIR_u2f
: LIR_i2f
, a
);
3004 JS_STATIC_ASSERT(offsetof(JSObjectOps
, newObjectMap
) == 0);
3007 TraceRecorder::map_is_native(JSObjectMap
* map
, LIns
* map_ins
, LIns
*& ops_ins
, size_t op_offset
)
3009 ops_ins
= addName(lir
->insLoad(LIR_ldp
, map_ins
, offsetof(JSObjectMap
, ops
)), "ops");
3010 LIns
* n
= lir
->insLoad(LIR_ldp
, ops_ins
, op_offset
);
3012 #define OP(ops) (*(JSObjectOp*) ((char*)(ops) + op_offset))
3014 if (OP(map
->ops
) == OP(&js_ObjectOps
)) {
3015 guard(true, addName(lir
->ins2(LIR_eq
, n
, INS_CONSTPTR(OP(&js_ObjectOps
))),
3016 "guard(native-map)"),
3022 ABORT_TRACE("non-native map");
3026 TraceRecorder::test_property_cache(JSObject
* obj
, LIns
* obj_ins
, JSObject
*& obj2
, jsuword
& pcval
)
3028 // Mimic the interpreter's special case for dense arrays by skipping up one
3029 // hop along the proto chain when accessing a named (not indexed) property,
3030 // typically to find Array.prototype methods.
3031 JSObject
* aobj
= obj
;
3032 if (OBJ_IS_DENSE_ARRAY(cx
, obj
)) {
3033 aobj
= OBJ_GET_PROTO(cx
, obj
);
3034 obj_ins
= stobj_get_fslot(obj_ins
, JSSLOT_PROTO
);
3037 LIns
* map_ins
= lir
->insLoad(LIR_ldp
, obj_ins
, (int)offsetof(JSObject
, map
));
3040 // Interpreter calls to PROPERTY_CACHE_TEST guard on native object ops
3041 // (newObjectMap == js_ObjectOps.newObjectMap) which is required to use
3042 // native objects (those whose maps are scopes), or even more narrow
3043 // conditions required because the cache miss case will call a particular
3044 // object-op (js_GetProperty, js_SetProperty).
3046 // We parameterize using offsetof and guard on match against the hook at
3047 // the given offset in js_ObjectOps. TraceRecorder::record_JSOP_SETPROP
3048 // guards the js_SetProperty case.
3049 uint32 format
= js_CodeSpec
[*cx
->fp
->regs
->pc
].format
;
3050 uint32 mode
= JOF_MODE(format
);
3051 size_t op_offset
= 0;
3052 if (mode
== JOF_PROP
|| mode
== JOF_VARPROP
) {
3053 JS_ASSERT(!(format
& JOF_SET
));
3054 op_offset
= offsetof(JSObjectOps
, getProperty
);
3056 JS_ASSERT(mode
== JOF_NAME
);
3059 if (!map_is_native(aobj
->map
, map_ins
, ops_ins
, op_offset
))
3063 JSPropCacheEntry
* entry
;
3064 PROPERTY_CACHE_TEST(cx
, cx
->fp
->regs
->pc
, aobj
, obj2
, entry
, atom
);
3066 // Miss: pre-fill the cache for the interpreter, as well as for our needs.
3067 // FIXME: 452357 - correctly propagate exceptions into the interpreter from
3068 // js_FindPropertyHelper, js_LookupPropertyWithFlags, and elsewhere.
3069 jsid id
= ATOM_TO_JSID(atom
);
3071 if (JOF_OPMODE(*cx
->fp
->regs
->pc
) == JOF_NAME
) {
3072 JS_ASSERT(aobj
== obj
);
3073 if (js_FindPropertyHelper(cx
, id
, &obj
, &obj2
, &prop
, &entry
) < 0)
3074 ABORT_TRACE("failed to find name");
3076 int protoIndex
= js_LookupPropertyWithFlags(cx
, aobj
, id
, 0, &obj2
, &prop
);
3078 ABORT_TRACE("failed to lookup property");
3081 js_FillPropertyCache(cx
, aobj
, OBJ_SCOPE(aobj
)->shape
, 0, protoIndex
, obj2
,
3082 (JSScopeProperty
*) prop
, &entry
);
3087 // Propagate obj from js_FindPropertyHelper to record_JSOP_BINDNAME
3088 // via our obj2 out-parameter. If we are recording JSOP_SETNAME and
3089 // the global it's assigning does not yet exist, create it.
3091 if (JSOp(*cx
->fp
->regs
->pc
) == JSOP_SETNAME
) {
3092 jsval v
= JSVAL_VOID
;
3093 if (!js_SetPropertyHelper(cx
, obj
, id
, &v
, &entry
))
3095 if (!entry
|| !PCVAL_IS_SPROP(entry
->vword
))
3096 ABORT_TRACE("can't create cacheable global for JSOP_SETNAME");
3097 JSScopeProperty
* sprop
= PCVAL_TO_SPROP(entry
->vword
);
3098 if (!SPROP_HAS_VALID_SLOT(sprop
, OBJ_SCOPE(obj
)))
3099 ABORT_TRACE("can't create slot-ful global for JSOP_SETNAME");
3100 pcval
= SLOT_TO_PCVAL(sprop
->slot
);
3102 // Use PCVAL_NULL to return "no such property" to our caller.
3108 OBJ_DROP_PROPERTY(cx
, obj2
, prop
);
3110 ABORT_TRACE("failed to fill property cache");
3113 #ifdef JS_THREADSAFE
3114 // There's a potential race in any JS_THREADSAFE embedding that's nuts
3115 // enough to share mutable objects on the scope or proto chain, but we
3116 // don't care about such insane embeddings. Anyway, the (scope, proto)
3117 // entry->vcap coordinates must reach obj2 from aobj at this point.
3118 JS_ASSERT(cx
->requestDepth
);
3121 // Emit guard(s), common code for both hit and miss cases.
3122 // Check for first-level cache hit and guard on kshape if possible.
3123 // Otherwise guard on key object exact match.
3124 if (PCVCAP_TAG(entry
->vcap
) <= 1) {
3125 if (aobj
!= globalObj
) {
3126 LIns
* shape_ins
= addName(lir
->insLoad(LIR_ld
, map_ins
, offsetof(JSScope
, shape
)),
3128 guard(true, addName(lir
->ins2i(LIR_eq
, shape_ins
, entry
->kshape
), "guard(kshape)"),
3133 JSOp op
= JSOp(*cx
->fp
->regs
->pc
);
3134 ptrdiff_t pcoff
= (op
== JSOP_GETARGPROP
) ? ARGNO_LEN
:
3135 (op
== JSOP_GETLOCALPROP
) ? SLOTNO_LEN
: 0;
3136 jsatomid index
= js_GetIndexFromBytecode(cx
, cx
->fp
->script
, cx
->fp
->regs
->pc
, pcoff
);
3137 JS_ASSERT(entry
->kpc
== (jsbytecode
*) atoms
[index
]);
3138 JS_ASSERT(entry
->kshape
== jsuword(aobj
));
3140 if (aobj
!= globalObj
) {
3141 guard(true, addName(lir
->ins2i(LIR_eq
, obj_ins
, entry
->kshape
), "guard(kobj)"),
3146 // For any hit that goes up the scope and or proto chains, we will need to
3147 // guard on the shape of the object containing the property.
3148 if (PCVCAP_TAG(entry
->vcap
) >= 1) {
3149 jsuword vcap
= entry
->vcap
;
3150 uint32 vshape
= PCVCAP_SHAPE(vcap
);
3151 JS_ASSERT(OBJ_SCOPE(obj2
)->shape
== vshape
);
3153 LIns
* obj2_ins
= INS_CONSTPTR(obj2
);
3154 map_ins
= lir
->insLoad(LIR_ldp
, obj2_ins
, (int)offsetof(JSObject
, map
));
3155 if (!map_is_native(obj2
->map
, map_ins
, ops_ins
))
3158 LIns
* shape_ins
= addName(lir
->insLoad(LIR_ld
, map_ins
, offsetof(JSScope
, shape
)),
3161 addName(lir
->ins2i(LIR_eq
, shape_ins
, vshape
), "guard(vshape)"),
3165 pcval
= entry
->vword
;
3170 TraceRecorder::test_property_cache_direct_slot(JSObject
* obj
, LIns
* obj_ins
, uint32
& slot
)
3176 * Property cache ensures that we are dealing with an existing property,
3177 * and guards the shape for us.
3179 if (!test_property_cache(obj
, obj_ins
, obj2
, pcval
))
3182 /* No such property means invalid slot, which callers must check for first. */
3183 if (PCVAL_IS_NULL(pcval
)) {
3184 slot
= SPROP_INVALID_SLOT
;
3188 /* Insist if setting on obj being the directly addressed object. */
3189 uint32 setflags
= (js_CodeSpec
[*cx
->fp
->regs
->pc
].format
& (JOF_SET
| JOF_INCDEC
));
3190 if (setflags
&& obj2
!= obj
)
3191 ABORT_TRACE("JOF_SET opcode hit prototype chain");
3193 /* Don't trace getter or setter calls, our caller wants a direct slot. */
3194 if (PCVAL_IS_SPROP(pcval
)) {
3195 JSScopeProperty
* sprop
= PCVAL_TO_SPROP(pcval
);
3197 if (setflags
&& !SPROP_HAS_STUB_SETTER(sprop
))
3198 ABORT_TRACE("non-stub setter");
3199 if (setflags
!= JOF_SET
&& !SPROP_HAS_STUB_GETTER(sprop
))
3200 ABORT_TRACE("non-stub getter");
3201 if (!SPROP_HAS_VALID_SLOT(sprop
, OBJ_SCOPE(obj
)))
3202 ABORT_TRACE("no valid slot");
3205 if (!PCVAL_IS_SLOT(pcval
))
3206 ABORT_TRACE("PCE is not a slot");
3207 slot
= PCVAL_TO_SLOT(pcval
);
3213 TraceRecorder::stobj_set_slot(LIns
* obj_ins
, unsigned slot
, LIns
*& dslots_ins
, LIns
* v_ins
)
3215 if (slot
< JS_INITIAL_NSLOTS
) {
3216 addName(lir
->insStorei(v_ins
, obj_ins
,
3217 offsetof(JSObject
, fslots
) + slot
* sizeof(jsval
)),
3218 "set_slot(fslots)");
3221 dslots_ins
= lir
->insLoad(LIR_ldp
, obj_ins
, offsetof(JSObject
, dslots
));
3222 addName(lir
->insStorei(v_ins
, dslots_ins
,
3223 (slot
- JS_INITIAL_NSLOTS
) * sizeof(jsval
)),
3229 TraceRecorder::stobj_get_fslot(LIns
* obj_ins
, unsigned slot
)
3231 JS_ASSERT(slot
< JS_INITIAL_NSLOTS
);
3232 return lir
->insLoad(LIR_ldp
, obj_ins
, offsetof(JSObject
, fslots
) + slot
* sizeof(jsval
));
3236 TraceRecorder::stobj_get_slot(LIns
* obj_ins
, unsigned slot
, LIns
*& dslots_ins
)
3238 if (slot
< JS_INITIAL_NSLOTS
)
3239 return stobj_get_fslot(obj_ins
, slot
);
3242 dslots_ins
= lir
->insLoad(LIR_ldp
, obj_ins
, offsetof(JSObject
, dslots
));
3243 return lir
->insLoad(LIR_ldp
, dslots_ins
, (slot
- JS_INITIAL_NSLOTS
) * sizeof(jsval
));
3247 TraceRecorder::native_set(LIns
* obj_ins
, JSScopeProperty
* sprop
, LIns
*& dslots_ins
, LIns
* v_ins
)
3249 if (SPROP_HAS_STUB_SETTER(sprop
) && sprop
->slot
!= SPROP_INVALID_SLOT
) {
3250 stobj_set_slot(obj_ins
, sprop
->slot
, dslots_ins
, v_ins
);
3253 ABORT_TRACE("unallocated or non-stub sprop");
3257 TraceRecorder::native_get(LIns
* obj_ins
, LIns
* pobj_ins
, JSScopeProperty
* sprop
,
3258 LIns
*& dslots_ins
, LIns
*& v_ins
)
3260 if (!SPROP_HAS_STUB_GETTER(sprop
))
3263 if (sprop
->slot
!= SPROP_INVALID_SLOT
)
3264 v_ins
= stobj_get_slot(pobj_ins
, sprop
->slot
, dslots_ins
);
3266 v_ins
= INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_VOID
));
3270 // So box_jsval can emit no LIR_or at all to tag an object jsval.
3271 JS_STATIC_ASSERT(JSVAL_OBJECT
== 0);
3274 TraceRecorder::box_jsval(jsval v
, LIns
*& v_ins
)
3277 LIns
* args
[] = { v_ins
, cx_ins
};
3278 v_ins
= lir
->insCall(F_BoxDouble
, args
);
3279 guard(false, lir
->ins2(LIR_eq
, v_ins
, INS_CONST(JSVAL_ERROR_COOKIE
)),
3283 switch (JSVAL_TAG(v
)) {
3285 v_ins
= lir
->ins2i(LIR_pior
, lir
->ins2i(LIR_pilsh
, v_ins
, JSVAL_TAGBITS
), JSVAL_BOOLEAN
);
3290 v_ins
= lir
->ins2(LIR_pior
, v_ins
, INS_CONST(JSVAL_STRING
));
3297 TraceRecorder::unbox_jsval(jsval v
, LIns
*& v_ins
)
3300 // JSVAL_IS_NUMBER(v)
3302 lir
->ins_eq0(lir
->ins2(LIR_pior
,
3303 lir
->ins2(LIR_piand
, v_ins
, INS_CONST(JSVAL_INT
)),
3305 lir
->ins2(LIR_piand
, v_ins
,
3306 INS_CONST(JSVAL_TAGMASK
)),
3309 LIns
* args
[] = { v_ins
};
3310 v_ins
= lir
->insCall(F_UnboxDouble
, args
);
3313 switch (JSVAL_TAG(v
)) {
3317 lir
->ins2(LIR_piand
, v_ins
, INS_CONST(JSVAL_TAGMASK
)),
3320 v_ins
= lir
->ins2i(LIR_ush
, v_ins
, JSVAL_TAGBITS
);
3325 lir
->ins2(LIR_piand
, v_ins
, INS_CONST(JSVAL_TAGMASK
)),
3332 lir
->ins2(LIR_piand
, v_ins
, INS_CONST(JSVAL_TAGMASK
)),
3335 v_ins
= lir
->ins2(LIR_piand
, v_ins
, INS_CONST(~JSVAL_TAGMASK
));
3342 TraceRecorder::getThis(LIns
*& this_ins
)
3344 if (cx
->fp
->callee
) { /* in a function */
3345 if (JSVAL_IS_NULL(cx
->fp
->argv
[-1]))
3347 this_ins
= get(&cx
->fp
->argv
[-1]);
3348 guard(false, lir
->ins_eq0(this_ins
), MISMATCH_EXIT
);
3349 } else { /* in global code */
3350 JS_ASSERT(!JSVAL_IS_NULL(cx
->fp
->argv
[-1]));
3351 this_ins
= scopeChain();
3357 TraceRecorder::guardClass(JSObject
* obj
, LIns
* obj_ins
, JSClass
* clasp
)
3359 if (STOBJ_GET_CLASS(obj
) != clasp
)
3362 LIns
* class_ins
= stobj_get_fslot(obj_ins
, JSSLOT_CLASS
);
3363 class_ins
= lir
->ins2(LIR_piand
, class_ins
, lir
->insImm(~3));
3366 JS_snprintf(namebuf
, sizeof namebuf
, "guard(class is %s)", clasp
->name
);
3367 guard(true, addName(lir
->ins2(LIR_eq
, class_ins
, INS_CONSTPTR(clasp
)), namebuf
),
3373 TraceRecorder::guardDenseArray(JSObject
* obj
, LIns
* obj_ins
)
3375 return guardClass(obj
, obj_ins
, &js_ArrayClass
);
3379 TraceRecorder::guardDenseArrayIndex(JSObject
* obj
, jsint idx
, LIns
* obj_ins
,
3380 LIns
* dslots_ins
, LIns
* idx_ins
)
3382 jsuint length
= ARRAY_DENSE_LENGTH(obj
);
3383 if (!((jsuint
)idx
< length
&& idx
< obj
->fslots
[JSSLOT_ARRAY_LENGTH
]))
3386 LIns
* length_ins
= stobj_get_fslot(obj_ins
, JSSLOT_ARRAY_LENGTH
);
3388 // guard(index >= 0)
3389 guard(true, lir
->ins2i(LIR_ge
, idx_ins
, 0), MISMATCH_EXIT
);
3391 // guard(index < length)
3392 guard(true, lir
->ins2(LIR_lt
, idx_ins
, length_ins
), MISMATCH_EXIT
);
3394 // guard(index < capacity)
3395 guard(false, lir
->ins_eq0(dslots_ins
), MISMATCH_EXIT
);
3397 lir
->ins2(LIR_lt
, idx_ins
, lir
->insLoad(LIR_ldp
, dslots_ins
, 0 - sizeof(jsval
))),
3403 TraceRecorder::clearFrameSlotsFromCache()
3405 /* Clear out all slots of this frame in the nativeFrameTracker. Different locations on the
3406 VM stack might map to different locations on the native stack depending on the
3407 number of arguments (i.e.) of the next call, so we have to make sure we map
3408 those in to the cache with the right offsets. */
3409 JSStackFrame
* fp
= cx
->fp
;
3414 vpstop
= &fp
->argv
[fp
->fun
->nargs
];
3416 nativeFrameTracker
.set(vp
++, (LIns
*)0);
3419 vpstop
= &fp
->slots
[fp
->script
->nslots
];
3421 nativeFrameTracker
.set(vp
++, (LIns
*)0);
3425 TraceRecorder::record_EnterFrame()
3427 if (++callDepth
>= MAX_CALLDEPTH
)
3428 ABORT_TRACE("exceeded maximum call depth");
3429 debug_only_v(printf("EnterFrame %s, callDepth=%d\n",
3430 js_AtomToPrintableString(cx
, cx
->fp
->fun
->atom
),
3432 JSStackFrame
* fp
= cx
->fp
;
3433 LIns
* void_ins
= INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_VOID
));
3435 jsval
* vp
= &fp
->argv
[fp
->argc
];
3436 jsval
* vpstop
= vp
+ (fp
->fun
->nargs
- fp
->argc
);
3437 while (vp
< vpstop
) {
3438 if (vp
>= fp
->down
->regs
->sp
)
3439 nativeFrameTracker
.set(vp
, (LIns
*)0);
3440 set(vp
++, void_ins
, true);
3444 vpstop
= vp
+ fp
->script
->nfixed
;
3446 set(vp
++, void_ins
, true);
3451 TraceRecorder::record_LeaveFrame()
3455 printf("LeaveFrame (back to %s), callDepth=%d\n",
3456 js_AtomToPrintableString(cx
, cx
->fp
->fun
->atom
),
3459 if (callDepth
-- <= 0)
3460 ABORT_TRACE("returned out of a loop we started tracing");
3462 // LeaveFrame gets called after the interpreter popped the frame and
3463 // stored rval, so cx->fp not cx->fp->down, and -1 not 0.
3464 atoms
= cx
->fp
->script
->atomMap
.vector
;
3465 set(&stackval(-1), rval_ins
, true);
3469 bool TraceRecorder::record_JSOP_INTERRUPT()
3475 TraceRecorder::record_JSOP_PUSH()
3477 stack(0, INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_VOID
)));
3482 TraceRecorder::record_JSOP_POPV()
3484 // We should not have to implement JSOP_POPV or JSOP_STOP's rval setting.
3488 bool TraceRecorder::record_JSOP_ENTERWITH()
3492 bool TraceRecorder::record_JSOP_LEAVEWITH()
3498 TraceRecorder::record_JSOP_RETURN()
3500 jsval
& rval
= stackval(-1);
3501 JSStackFrame
*fp
= cx
->fp
;
3502 if (cx
->fp
->flags
& JSFRAME_CONSTRUCTING
) {
3503 if (JSVAL_IS_PRIMITIVE(rval
)) {
3504 JS_ASSERT(OBJECT_TO_JSVAL(fp
->thisp
) == fp
->argv
[-1]);
3505 rval_ins
= get(&fp
->argv
[-1]);
3508 rval_ins
= get(&rval
);
3510 debug_only_v(printf("returning from %s\n", js_AtomToPrintableString(cx
, cx
->fp
->fun
->atom
)););
3511 clearFrameSlotsFromCache();
3516 TraceRecorder::record_JSOP_GOTO()
3522 TraceRecorder::record_JSOP_IFEQ()
3524 trackCfgMerges(cx
->fp
->regs
->pc
);
3529 TraceRecorder::record_JSOP_IFNE()
3535 TraceRecorder::record_JSOP_ARGUMENTS()
3541 TraceRecorder::record_JSOP_DUP()
3543 stack(0, get(&stackval(-1)));
3548 TraceRecorder::record_JSOP_DUP2()
3550 stack(0, get(&stackval(-2)));
3551 stack(1, get(&stackval(-1)));
3556 TraceRecorder::record_JSOP_SETCONST()
3562 TraceRecorder::record_JSOP_BITOR()
3564 return binary(LIR_or
);
3568 TraceRecorder::record_JSOP_BITXOR()
3570 return binary(LIR_xor
);
3574 TraceRecorder::record_JSOP_BITAND()
3576 return binary(LIR_and
);
3580 TraceRecorder::record_JSOP_EQ()
3582 return equal(CMP_TRY_BRANCH_AFTER_COND
);
3586 TraceRecorder::record_JSOP_NE()
3588 return equal(CMP_NEGATE
| CMP_TRY_BRANCH_AFTER_COND
);
3592 TraceRecorder::record_JSOP_LT()
3594 return cmp(LIR_flt
, CMP_TRY_BRANCH_AFTER_COND
);
3598 TraceRecorder::record_JSOP_LE()
3600 return cmp(LIR_fle
, CMP_TRY_BRANCH_AFTER_COND
);
3604 TraceRecorder::record_JSOP_GT()
3606 return cmp(LIR_fgt
, CMP_TRY_BRANCH_AFTER_COND
);
3610 TraceRecorder::record_JSOP_GE()
3612 return cmp(LIR_fge
, CMP_TRY_BRANCH_AFTER_COND
);
3616 TraceRecorder::record_JSOP_LSH()
3618 return binary(LIR_lsh
);
3622 TraceRecorder::record_JSOP_RSH()
3624 return binary(LIR_rsh
);
3628 TraceRecorder::record_JSOP_URSH()
3630 return binary(LIR_ush
);
3634 TraceRecorder::record_JSOP_ADD()
3636 jsval
& r
= stackval(-1);
3637 jsval
& l
= stackval(-2);
3638 if (JSVAL_IS_STRING(l
)) {
3639 LIns
* args
[] = { NULL
, get(&l
), cx_ins
};
3640 if (JSVAL_IS_STRING(r
)) {
3643 LIns
* args2
[] = { get(&r
), cx_ins
};
3644 if (JSVAL_IS_NUMBER(r
)) {
3645 args
[0] = lir
->insCall(F_NumberToString
, args2
);
3646 } else if (JSVAL_IS_OBJECT(r
)) {
3647 args
[0] = lir
->insCall(F_ObjectToString
, args2
);
3649 ABORT_TRACE("untraceable right operand to string-JSOP_ADD");
3651 guard(false, lir
->ins_eq0(args
[0]), OOM_EXIT
);
3653 LIns
* concat
= lir
->insCall(F_ConcatStrings
, args
);
3654 guard(false, lir
->ins_eq0(concat
), OOM_EXIT
);
3658 return binary(LIR_fadd
);
3662 TraceRecorder::record_JSOP_SUB()
3664 return binary(LIR_fsub
);
3668 TraceRecorder::record_JSOP_MUL()
3670 return binary(LIR_fmul
);
3674 TraceRecorder::record_JSOP_DIV()
3676 return binary(LIR_fdiv
);
3680 TraceRecorder::record_JSOP_MOD()
3682 jsval
& r
= stackval(-1);
3683 jsval
& l
= stackval(-2);
3684 if (isNumber(l
) && isNumber(r
)) {
3685 LIns
* args
[] = { get(&r
), get(&l
) };
3686 set(&l
, lir
->insCall(F_dmod
, args
));
3693 TraceRecorder::record_JSOP_NOT()
3695 jsval
& v
= stackval(-1);
3696 if (JSVAL_IS_BOOLEAN(v
) || JSVAL_IS_OBJECT(v
)) {
3697 set(&v
, lir
->ins_eq0(get(&v
)));
3704 TraceRecorder::record_JSOP_BITNOT()
3706 return unary(LIR_not
);
3710 TraceRecorder::record_JSOP_NEG()
3712 return unary(LIR_fneg
);
3715 enum JSTNErrType
{ INFALLIBLE
, FAIL_NULL
, FAIL_NEG
, FAIL_VOID
};
3716 struct JSTraceableNative
{
3717 JSFastNative native
;
3720 const char *argtypes
;
3721 JSTNErrType errtype
;
3725 js_Array(JSContext
* cx
, JSObject
* obj
, uintN argc
, jsval
* argv
, jsval
* rval
);
3728 js_Object(JSContext
*cx
, JSObject
*obj
, uintN argc
, jsval
*argv
, jsval
*rval
);
3731 TraceRecorder::record_JSOP_NEW()
3733 /* Get immediate argc and find the constructor function. */
3734 jsbytecode
*pc
= cx
->fp
->regs
->pc
;
3735 unsigned argc
= GET_ARGC(pc
);
3736 jsval
& fval
= stackval(0 - (2 + argc
));
3737 JS_ASSERT(&fval
>= StackBase(cx
->fp
));
3739 jsval
& tval
= stackval(0 - (argc
+ 1));
3740 LIns
* this_ins
= get(&tval
);
3741 if (this_ins
->isconstp() && !this_ins
->constvalp() && !guardShapelessCallee(fval
))
3745 * Require that the callee be a function object, to avoid guarding on its
3746 * class here. We know if the callee and this were pushed by JSOP_CALLNAME
3747 * or JSOP_CALLPROP that callee is a *particular* function, since these hit
3748 * the property cache and guard on the object (this) in which the callee
3749 * was found. So it's sufficient to test here that the particular function
3750 * is interpreted, not guard on that condition.
3752 * Bytecode sequences that push shapeless callees must guard on the callee
3753 * class being Function and the function being interpreted.
3755 JS_ASSERT(VALUE_IS_FUNCTION(cx
, fval
));
3756 JSFunction
*fun
= GET_FUNCTION_PRIVATE(cx
, JSVAL_TO_OBJECT(fval
));
3758 if (FUN_INTERPRETED(fun
)) {
3759 LIns
* args
[] = { get(&fval
), cx_ins
};
3760 LIns
* tv_ins
= lir
->insCall(F_FastNewObject
, args
);
3761 guard(false, lir
->ins_eq0(tv_ins
), OOM_EXIT
);
3762 jsval
& tv
= stackval(0 - (1 + argc
));
3764 return interpretedFunctionCall(fval
, fun
, argc
);
3767 static JSTraceableNative knownNatives
[] = {
3768 { (JSFastNative
)js_Array
, F_FastNewArray
, "pC", "", FAIL_NULL
},
3769 { (JSFastNative
)js_Array
, F_Array_1int
, "pC", "i", FAIL_NULL
},
3770 { (JSFastNative
)js_Array
, F_Array_2obj
, "pC", "oo", FAIL_NULL
},
3771 { (JSFastNative
)js_Array
, F_Array_3num
, "pC", "ddd", FAIL_NULL
},
3772 { (JSFastNative
)js_Object
, F_FastNewObject
, "fC", "", FAIL_NULL
},
3775 for (uintN i
= 0; i
< JS_ARRAY_LENGTH(knownNatives
); i
++) {
3776 JSTraceableNative
* known
= &knownNatives
[i
];
3777 if ((JSFastNative
)fun
->u
.n
.native
!= known
->native
)
3780 uintN knownargc
= strlen(known
->argtypes
);
3781 if (argc
!= knownargc
)
3784 intN prefixc
= strlen(known
->prefix
);
3786 LIns
** argp
= &args
[argc
+ prefixc
- 1];
3790 memset(args
, 0xCD, sizeof(args
));
3793 #define HANDLE_PREFIX(i) \
3795 argtype = known->prefix[i]; \
3796 if (argtype == 'C') { \
3798 } else if (argtype == 'T') { \
3800 } else if (argtype == 'f') { \
3801 *argp = INS_CONSTPTR(JSVAL_TO_OBJECT(fval)); \
3802 } else if (argtype == 'p') { \
3803 JSObject* ctor = JSVAL_TO_OBJECT(fval); \
3805 if (!OBJ_GET_PROPERTY(cx, ctor, \
3806 ATOM_TO_JSID(cx->runtime->atomState \
3807 .classPrototypeAtom), \
3809 ABORT_TRACE("error getting prototype from constructor"); \
3811 if (!JSVAL_IS_OBJECT(pval)) \
3812 ABORT_TRACE("got primitive prototype from constructor"); \
3813 *argp = INS_CONSTPTR(JSVAL_TO_OBJECT(pval)); \
3815 JS_NOT_REACHED("unknown prefix arg type"); \
3833 JS_NOT_REACHED("illegal number of prefix args");
3836 #undef HANDLE_PREFIX
3838 #define HANDLE_ARG(i) \
3840 jsval& arg = stackval(-(i + 1)); \
3841 argtype = known->argtypes[i]; \
3842 if (argtype == 'd' || argtype == 'i') { \
3843 if (!isNumber(arg)) \
3844 continue; /* might have another specialization for arg */ \
3845 *argp = get(&arg); \
3846 if (argtype == 'i') \
3847 *argp = f2i(*argp); \
3848 } else if (argtype == 'o') { \
3849 if (!JSVAL_IS_OBJECT(arg)) \
3850 continue; /* might have another specialization for arg */ \
3851 *argp = get(&arg); \
3853 continue; /* might have another specialization for arg */ \
3858 switch (knownargc
) {
3874 JS_NOT_REACHED("illegal number of args to traceable native");
3880 JS_ASSERT(args
[0] != (LIns
*)0xcdcdcdcd);
3883 LIns
* res_ins
= lir
->insCall(known
->builtin
, args
);
3884 switch (known
->errtype
) {
3886 guard(false, lir
->ins_eq0(res_ins
), OOM_EXIT
);
3890 res_ins
= lir
->ins1(LIR_i2f
, res_ins
);
3893 guard(false, lir
->ins2(LIR_flt
, res_ins
, lir
->insImmq(u
.u64
)), OOM_EXIT
);
3897 guard(false, lir
->ins2i(LIR_eq
, res_ins
, JSVAL_TO_BOOLEAN(JSVAL_VOID
)), OOM_EXIT
);
3901 set(&fval
, res_ins
);
3906 ABORT_TRACE("can't trace native constructor");
3908 ABORT_TRACE("can't trace unknown constructor");
3912 TraceRecorder::record_JSOP_DELNAME()
3918 TraceRecorder::record_JSOP_DELPROP()
3924 TraceRecorder::record_JSOP_DELELEM()
3930 TraceRecorder::record_JSOP_TYPEOF()
3932 jsval
& r
= stackval(-1);
3934 if (JSVAL_IS_STRING(r
)) {
3935 type
= INS_CONSTPTR(ATOM_TO_STRING(cx
->runtime
->atomState
.typeAtoms
[JSTYPE_STRING
]));
3936 } else if (isNumber(r
)) {
3937 type
= INS_CONSTPTR(ATOM_TO_STRING(cx
->runtime
->atomState
.typeAtoms
[JSTYPE_NUMBER
]));
3939 LIns
* args
[] = { get(&r
), cx_ins
};
3940 if (JSVAL_TAG(r
) == JSVAL_BOOLEAN
) {
3941 // We specialize identically for boolean and undefined. We must not have a hole here.
3942 // Pass the unboxed type here, since TypeOfBoolean knows how to handle it.
3943 JS_ASSERT(JSVAL_TO_BOOLEAN(r
) <= 2);
3944 type
= lir
->insCall(F_TypeOfBoolean
, args
);
3946 JS_ASSERT(JSVAL_IS_OBJECT(r
));
3947 type
= lir
->insCall(F_TypeOfObject
, args
);
3955 TraceRecorder::record_JSOP_VOID()
3957 stack(-1, INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_VOID
)));
3962 js_num_parseFloat(JSContext
* cx
, uintN argc
, jsval
* vp
);
3965 js_num_parseInt(JSContext
* cx
, uintN argc
, jsval
* vp
);
3968 TraceRecorder::record_JSOP_INCNAME()
3974 TraceRecorder::record_JSOP_INCPROP()
3980 TraceRecorder::record_JSOP_INCELEM()
3986 TraceRecorder::record_JSOP_DECNAME()
3992 TraceRecorder::record_JSOP_DECPROP()
3998 TraceRecorder::record_JSOP_DECELEM()
4004 TraceRecorder::incName(jsint incr
, bool pre
)
4009 LIns
* v_ins
= get(vp
);
4010 if (!inc(*vp
, v_ins
, incr
, pre
))
4017 TraceRecorder::record_JSOP_NAMEINC()
4019 return incName(1, false);
4023 TraceRecorder::record_JSOP_PROPINC()
4025 return incProp(1, false);
4028 // XXX consolidate with record_JSOP_GETELEM code...
4030 TraceRecorder::record_JSOP_ELEMINC()
4032 return incElem(1, false);
4036 TraceRecorder::record_JSOP_NAMEDEC()
4038 return incName(-1, true);
4042 TraceRecorder::record_JSOP_PROPDEC()
4044 return incProp(-1, false);
4048 TraceRecorder::record_JSOP_ELEMDEC()
4050 return incElem(-1, false);
4054 TraceRecorder::record_JSOP_GETPROP()
4056 return getProp(stackval(-1));
4060 TraceRecorder::record_JSOP_SETPROP()
4062 jsval
& r
= stackval(-1);
4063 jsval
& l
= stackval(-2);
4065 if (JSVAL_IS_PRIMITIVE(l
))
4066 ABORT_TRACE("primitive this for SETPROP");
4068 JSObject
* obj
= JSVAL_TO_OBJECT(l
);
4070 if (obj
->map
->ops
->setProperty
!= js_SetProperty
)
4071 ABORT_TRACE("non-native JSObjectOps::setProperty");
4073 LIns
* obj_ins
= get(&l
);
4075 JSPropertyCache
* cache
= &JS_PROPERTY_CACHE(cx
);
4076 uint32 kshape
= OBJ_SCOPE(obj
)->shape
;
4077 jsbytecode
* pc
= cx
->fp
->regs
->pc
;
4079 JSPropCacheEntry
* entry
= &cache
->table
[PROPERTY_CACHE_HASH_PC(pc
, kshape
)];
4080 if (entry
->kpc
!= pc
|| entry
->kshape
!= kshape
)
4081 ABORT_TRACE("cache miss");
4082 if (!PCVAL_IS_SPROP(entry
->vword
))
4083 ABORT_TRACE("hit non-sprop cache value");
4085 LIns
* map_ins
= lir
->insLoad(LIR_ldp
, obj_ins
, (int)offsetof(JSObject
, map
));
4087 if (!map_is_native(obj
->map
, map_ins
, ops_ins
, offsetof(JSObjectOps
, setProperty
)))
4090 // The global object's shape is guarded at trace entry.
4091 if (obj
!= globalObj
) {
4092 LIns
* shape_ins
= addName(lir
->insLoad(LIR_ld
, map_ins
, offsetof(JSScope
, shape
)), "shape");
4093 guard(true, addName(lir
->ins2i(LIR_eq
, shape_ins
, kshape
), "guard(shape)"),
4097 JSScope
* scope
= OBJ_SCOPE(obj
);
4098 JSScopeProperty
* sprop
= PCVAL_TO_SPROP(entry
->vword
);
4099 if (scope
->object
!= obj
|| !SCOPE_HAS_PROPERTY(scope
, sprop
)) {
4100 LIns
* args
[] = { INS_CONSTPTR(sprop
), obj_ins
, cx_ins
};
4101 LIns
* ok_ins
= lir
->insCall(F_AddProperty
, args
);
4102 guard(false, lir
->ins_eq0(ok_ins
), MISMATCH_EXIT
);
4105 LIns
* dslots_ins
= NULL
;
4106 LIns
* v_ins
= get(&r
);
4107 LIns
* boxed_ins
= v_ins
;
4108 if (!box_jsval(r
, boxed_ins
))
4110 if (!native_set(obj_ins
, sprop
, dslots_ins
, boxed_ins
))
4112 if (*pc
== JSOP_SETPROP
&& pc
[JSOP_SETPROP_LENGTH
] != JSOP_POP
)
4118 TraceRecorder::record_JSOP_GETELEM()
4120 jsval
& r
= stackval(-1);
4121 jsval
& l
= stackval(-2);
4123 if (JSVAL_IS_STRING(l
) && JSVAL_IS_INT(r
)) {
4126 i
= JSVAL_TO_INT(r
);
4127 if ((size_t)i
>= JSSTRING_LENGTH(JSVAL_TO_STRING(l
)))
4128 ABORT_TRACE("Invalid string index in JSOP_GETELEM");
4130 LIns
* args
[] = { f2i(get(&r
)), get(&l
), cx_ins
};
4131 LIns
* unitstr_ins
= lir
->insCall(F_String_getelem
, args
);
4132 guard(false, lir
->ins_eq0(unitstr_ins
), MISMATCH_EXIT
);
4133 set(&l
, unitstr_ins
);
4137 if (!JSVAL_IS_PRIMITIVE(l
) && JSVAL_IS_STRING(r
)) {
4141 if (!js_ValueToStringId(cx
, r
, &id
))
4143 r
= ID_TO_VALUE(id
);
4144 if (!OBJ_GET_PROPERTY(cx
, JSVAL_TO_OBJECT(l
), id
, &v
))
4147 LIns
* args
[] = { get(&r
), get(&l
), cx_ins
};
4148 LIns
* v_ins
= lir
->insCall(F_Any_getelem
, args
);
4149 guard(false, lir
->ins2(LIR_eq
, v_ins
, INS_CONST(JSVAL_ERROR_COOKIE
)),
4151 if (!unbox_jsval(v
, v_ins
))
4152 ABORT_TRACE("JSOP_GETELEM");
4160 if (!elem(l
, r
, vp
, v_ins
, addr_ins
))
4167 TraceRecorder::record_JSOP_SETELEM()
4169 jsval
& v
= stackval(-1);
4170 jsval
& r
= stackval(-2);
4171 jsval
& l
= stackval(-3);
4173 /* no guards for type checks, trace specialized this already */
4174 if (JSVAL_IS_PRIMITIVE(l
))
4175 ABORT_TRACE("left JSOP_SETELEM operand is not an object");
4176 JSObject
* obj
= JSVAL_TO_OBJECT(l
);
4177 LIns
* obj_ins
= get(&l
);
4179 if (JSVAL_IS_STRING(r
)) {
4180 LIns
* v_ins
= get(&v
);
4181 LIns
* unboxed_v_ins
= v_ins
;
4182 if (!box_jsval(v
, v_ins
))
4183 ABORT_TRACE("boxing string-indexed JSOP_SETELEM value");
4184 LIns
* args
[] = { v_ins
, get(&r
), get(&l
), cx_ins
};
4185 LIns
* ok_ins
= lir
->insCall(F_Any_setelem
, args
);
4186 guard(false, lir
->ins_eq0(ok_ins
), MISMATCH_EXIT
);
4187 set(&l
, unboxed_v_ins
);
4190 if (!JSVAL_IS_INT(r
))
4191 ABORT_TRACE("non-string, non-int JSOP_SETELEM index");
4193 /* make sure the object is actually a dense array */
4194 if (!guardDenseArray(obj
, obj_ins
))
4195 ABORT_TRACE("not a dense array");
4197 /* check that the index is within bounds */
4198 LIns
* idx_ins
= f2i(get(&r
));
4200 /* we have to check that its really an integer, but this check will to go away
4201 once we peel the loop type down to integer for this slot */
4202 guard(true, lir
->ins2(LIR_feq
, get(&r
), lir
->ins1(LIR_i2f
, idx_ins
)), MISMATCH_EXIT
);
4203 /* ok, box the value we are storing, store it and we are done */
4204 LIns
* v_ins
= get(&v
);
4205 LIns
* boxed_ins
= v_ins
;
4206 if (!box_jsval(v
, boxed_ins
))
4207 ABORT_TRACE("boxing failed");
4208 LIns
* args
[] = { boxed_ins
, idx_ins
, obj_ins
, cx_ins
};
4209 LIns
* res_ins
= lir
->insCall(F_Array_dense_setelem
, args
);
4210 guard(false, lir
->ins_eq0(res_ins
), MISMATCH_EXIT
);
4212 jsbytecode
* pc
= cx
->fp
->regs
->pc
;
4213 if (*pc
== JSOP_SETELEM
&& pc
[JSOP_SETELEM_LENGTH
] != JSOP_POP
)
4219 TraceRecorder::record_JSOP_CALLNAME()
4221 JSObject
* obj
= cx
->fp
->scopeChain
;
4222 if (obj
!= globalObj
) {
4224 if (!activeCallOrGlobalSlot(obj
, vp
))
4227 stack(1, INS_CONSTPTR(NULL
));
4231 LIns
* obj_ins
= scopeChain();
4234 if (!test_property_cache(obj
, obj_ins
, obj2
, pcval
))
4237 if (PCVAL_IS_NULL(pcval
) || !PCVAL_IS_OBJECT(pcval
))
4238 ABORT_TRACE("callee is not an object");
4239 JS_ASSERT(HAS_FUNCTION_CLASS(PCVAL_TO_OBJECT(pcval
)));
4241 stack(0, INS_CONSTPTR(PCVAL_TO_OBJECT(pcval
)));
4247 TraceRecorder::record_JSOP_GETUPVAR()
4249 ABORT_TRACE("GETUPVAR");
4253 TraceRecorder::record_JSOP_CALLUPVAR()
4255 ABORT_TRACE("CALLUPVAR");
4259 TraceRecorder::guardShapelessCallee(jsval
& callee
)
4261 if (!VALUE_IS_FUNCTION(cx
, callee
))
4262 ABORT_TRACE("shapeless callee is not a function");
4265 addName(lir
->ins2(LIR_eq
, get(&callee
), INS_CONSTPTR(JSVAL_TO_OBJECT(callee
))),
4266 "guard(shapeless callee)"),
4272 TraceRecorder::interpretedFunctionCall(jsval
& fval
, JSFunction
* fun
, uintN argc
)
4274 JSStackFrame
* fp
= cx
->fp
;
4276 // TODO: track the copying via the tracker...
4277 if (argc
< fun
->nargs
&&
4278 jsuword(fp
->regs
->sp
+ (fun
->nargs
- argc
)) > cx
->stackPool
.current
->limit
) {
4279 ABORT_TRACE("can't trace calls with too few args requiring argv move");
4283 JSVAL_TO_OBJECT(fval
),
4285 { { fp
->regs
->sp
- fp
->slots
, argc
} }
4288 unsigned callDepth
= getCallDepth();
4289 if (callDepth
>= treeInfo
->maxCallDepth
)
4290 treeInfo
->maxCallDepth
= callDepth
+ 1;
4292 lir
->insStorei(INS_CONSTPTR(fi
.callee
), lirbuf
->rp
,
4293 callDepth
* sizeof(FrameInfo
) + offsetof(FrameInfo
, callee
));
4294 lir
->insStorei(INS_CONSTPTR(fi
.callpc
), lirbuf
->rp
,
4295 callDepth
* sizeof(FrameInfo
) + offsetof(FrameInfo
, callpc
));
4296 lir
->insStorei(INS_CONST(fi
.word
), lirbuf
->rp
,
4297 callDepth
* sizeof(FrameInfo
) + offsetof(FrameInfo
, word
));
4299 atoms
= fun
->u
.i
.script
->atomMap
.vector
;
4303 #define KNOWN_NATIVE_DECL(name) JSBool name(JSContext* cx, uintN argc, jsval* vp);
4305 KNOWN_NATIVE_DECL(js_fun_apply
)
4306 KNOWN_NATIVE_DECL(js_math_ceil
)
4307 KNOWN_NATIVE_DECL(js_math_cos
)
4308 KNOWN_NATIVE_DECL(js_math_floor
)
4309 KNOWN_NATIVE_DECL(js_math_pow
)
4310 KNOWN_NATIVE_DECL(js_math_random
)
4311 KNOWN_NATIVE_DECL(js_math_sin
)
4312 KNOWN_NATIVE_DECL(js_math_sqrt
)
4313 KNOWN_NATIVE_DECL(js_num_toString
)
4314 KNOWN_NATIVE_DECL(js_str_charAt
)
4315 KNOWN_NATIVE_DECL(js_str_charCodeAt
)
4316 KNOWN_NATIVE_DECL(js_str_concat
)
4317 KNOWN_NATIVE_DECL(js_str_fromCharCode
)
4318 KNOWN_NATIVE_DECL(js_str_substring
)
4321 TraceRecorder::record_JSOP_CALL()
4323 jsbytecode
*pc
= cx
->fp
->regs
->pc
;
4324 uintN argc
= GET_ARGC(pc
);
4325 jsval
& fval
= stackval(0 - (argc
+ 2));
4326 JS_ASSERT(&fval
>= StackBase(cx
->fp
));
4328 jsval
& tval
= stackval(0 - (argc
+ 1));
4329 LIns
* this_ins
= get(&tval
);
4330 if (this_ins
->isconstp() && !this_ins
->constvalp() && !guardShapelessCallee(fval
))
4334 * Require that the callee be a function object, to avoid guarding on its
4335 * class here. We know if the callee and this were pushed by JSOP_CALLNAME
4336 * or JSOP_CALLPROP that callee is a *particular* function, since these hit
4337 * the property cache and guard on the object (this) in which the callee
4338 * was found. So it's sufficient to test here that the particular function
4339 * is interpreted, not guard on that condition.
4341 * Bytecode sequences that push shapeless callees must guard on the callee
4342 * class being Function and the function being interpreted.
4344 JS_ASSERT(VALUE_IS_FUNCTION(cx
, fval
));
4345 JSFunction
* fun
= GET_FUNCTION_PRIVATE(cx
, JSVAL_TO_OBJECT(fval
));
4347 if (FUN_INTERPRETED(fun
))
4348 return interpretedFunctionCall(fval
, fun
, argc
);
4350 if (FUN_SLOW_NATIVE(fun
))
4351 ABORT_TRACE("slow native");
4353 static JSTraceableNative knownNatives
[] = {
4354 { js_array_join
, F_Array_p_join
, "TC", "s", FAIL_NULL
},
4355 { js_math_sin
, F_Math_sin
, "", "d", INFALLIBLE
},
4356 { js_math_cos
, F_Math_cos
, "", "d", INFALLIBLE
},
4357 { js_math_pow
, F_Math_pow
, "", "dd", INFALLIBLE
},
4358 { js_math_sqrt
, F_Math_sqrt
, "", "d", INFALLIBLE
},
4359 { js_math_floor
, F_Math_floor
, "", "d", INFALLIBLE
},
4360 { js_math_ceil
, F_Math_ceil
, "", "d", INFALLIBLE
},
4361 { js_math_random
, F_Math_random
, "R", "", INFALLIBLE
},
4362 { js_num_parseInt
, F_ParseInt
, "C", "s", INFALLIBLE
},
4363 { js_num_parseFloat
, F_ParseFloat
, "C", "s", INFALLIBLE
},
4364 { js_num_toString
, F_NumberToString
, "TC", "", FAIL_NULL
},
4365 { js_obj_hasOwnProperty
, F_Object_p_hasOwnProperty
,
4366 "TC", "s", FAIL_VOID
},
4367 { js_obj_propertyIsEnumerable
, F_Object_p_propertyIsEnumerable
,
4368 "TC", "s", FAIL_VOID
},
4369 { js_str_charAt
, F_String_getelem
, "TC", "i", FAIL_NULL
},
4370 { js_str_charCodeAt
, F_String_p_charCodeAt
, "T", "i", FAIL_NEG
},
4371 { js_str_concat
, F_String_p_concat_1int
, "TC", "i", FAIL_NULL
},
4372 { js_str_fromCharCode
, F_String_fromCharCode
, "C", "i", FAIL_NULL
},
4373 { js_str_match
, F_String_p_match
, "PTC", "r", FAIL_VOID
},
4374 { js_str_replace
, F_String_p_replace_str
, "TC", "sr", FAIL_NULL
},
4375 { js_str_replace
, F_String_p_replace_str2
,"TC", "ss", FAIL_NULL
},
4376 { js_str_replace
, F_String_p_replace_str3
,"TC", "sss", FAIL_NULL
},
4377 { js_str_split
, F_String_p_split
, "TC", "s", FAIL_NULL
},
4378 { js_str_substring
, F_String_p_substring
, "TC", "ii", FAIL_NULL
},
4379 { js_str_substring
, F_String_p_substring_1
, "TC", "i", FAIL_NULL
},
4380 { js_str_toLowerCase
, F_toLowerCase
, "TC", "", FAIL_NULL
},
4381 { js_str_toUpperCase
, F_toUpperCase
, "TC", "", FAIL_NULL
},
4385 LIns
* arg1_ins
= NULL
;
4386 jsval arg1
= JSVAL_VOID
;
4388 if ((JSFastNative
)fun
->u
.n
.native
== js_fun_apply
) {
4390 ABORT_TRACE("can't trace Function.prototype.apply with other than 2 args");
4392 jsval
& oval
= stackval(-2);
4393 if (JSVAL_IS_PRIMITIVE(oval
))
4394 ABORT_TRACE("can't trace Function.prototype.apply with primitive 1st arg");
4396 jsval
& aval
= stackval(-1);
4397 if (JSVAL_IS_PRIMITIVE(aval
))
4398 ABORT_TRACE("can't trace Function.prototype.apply with primitive 2nd arg");
4400 LIns
* aval_ins
= get(&aval
);
4401 if (!aval_ins
->isCall() || aval_ins
->fid() != F_Array_1str
)
4402 ABORT_TRACE("can't yet trace Function.prototype.apply on other than [str] 2nd arg");
4404 JSObject
* aobj
= JSVAL_TO_OBJECT(aval
);
4405 JS_ASSERT(OBJ_IS_ARRAY(cx
, aobj
));
4406 JS_ASSERT(aobj
->fslots
[JSSLOT_ARRAY_LENGTH
] == 1);
4407 JS_ASSERT(JSVAL_IS_STRING(aobj
->dslots
[0]));
4409 if (!guardShapelessCallee(tval
))
4411 JSObject
* tfunobj
= JSVAL_TO_OBJECT(tval
);
4412 JSFunction
* tfun
= GET_FUNCTION_PRIVATE(cx
, tfunobj
);
4413 if (FUN_INTERPRETED(tfun
))
4414 ABORT_TRACE("can't yet trace Function.prototype.apply for scripted functions");
4416 JSTraceableNative
* known
;
4418 known
= &knownNatives
[i
];
4419 if (known
->native
== (JSFastNative
)tfun
->u
.n
.native
)
4421 if (++i
== JS_ARRAY_LENGTH(knownNatives
))
4422 ABORT_TRACE("unknown native being Function.prototype.apply'ed");
4424 if (strlen(known
->argtypes
) != 1)
4425 ABORT_TRACE("known native being Function.prototype.apply'ed with wrong argc");
4427 this_ins
= get(&oval
);
4428 arg1_ins
= callArgN(aval_ins
, 1);
4429 arg1
= aobj
->dslots
[0];
4434 for (; i
< JS_ARRAY_LENGTH(knownNatives
); i
++) {
4435 JSTraceableNative
* known
= &knownNatives
[i
];
4436 if (known
->native
!= (JSFastNative
)fun
->u
.n
.native
)
4439 uintN knownargc
= strlen(known
->argtypes
);
4440 if (argc
!= knownargc
)
4443 intN prefixc
= strlen(known
->prefix
);
4445 LIns
** argp
= &args
[argc
+ prefixc
- 1];
4449 memset(args
, 0xCD, sizeof(args
));
4452 #define HANDLE_PREFIX(i) \
4454 argtype = known->prefix[i]; \
4455 if (argtype == 'C') { \
4457 } else if (argtype == 'T') { \
4459 } else if (argtype == 'R') { \
4460 *argp = INS_CONSTPTR(cx->runtime); \
4461 } else if (argtype == 'P') { \
4462 *argp = INS_CONSTPTR(pc); \
4464 JS_NOT_REACHED("unknown prefix arg type"); \
4482 JS_NOT_REACHED("illegal number of prefix args");
4485 #undef HANDLE_PREFIX
4488 * NB: do not use JS_BEGIN_MACRO/JS_END_MACRO or the do-while(0) loop they hide,
4489 * because of the embedded continues below.
4491 #define HANDLE_ARG(i) \
4493 jsval& arg = (i == 0 && arg1_ins) ? arg1 : stackval(-(i + 1)); \
4494 *argp = (i == 0 && arg1_ins) ? arg1_ins : get(&arg); \
4495 argtype = known->argtypes[i]; \
4496 if (argtype == 'd' || argtype == 'i') { \
4497 if (!isNumber(arg)) \
4498 continue; /* might have another specialization for arg */ \
4499 if (argtype == 'i') \
4500 *argp = f2i(*argp); \
4501 } else if (argtype == 's') { \
4502 if (!JSVAL_IS_STRING(arg)) \
4503 continue; /* might have another specialization for arg */ \
4504 } else if (argtype == 'r') { \
4505 if (!VALUE_IS_REGEXP(cx, arg)) \
4506 continue; /* might have another specialization for arg */ \
4507 } else if (argtype == 'f') { \
4508 if (!VALUE_IS_FUNCTION(cx, arg)) \
4509 continue; /* might have another specialization for arg */ \
4511 continue; /* might have another specialization for arg */ \
4516 switch (knownargc
) {
4532 JS_NOT_REACHED("illegal number of args to traceable native");
4538 JS_ASSERT(args
[0] != (LIns
*)0xcdcdcdcd);
4541 LIns
* res_ins
= lir
->insCall(known
->builtin
, args
);
4542 switch (known
->errtype
) {
4544 guard(false, lir
->ins_eq0(res_ins
), OOM_EXIT
);
4548 res_ins
= lir
->ins1(LIR_i2f
, res_ins
);
4551 guard(false, lir
->ins2(LIR_flt
, res_ins
, lir
->insImmq(u
.u64
)), OOM_EXIT
);
4555 guard(false, lir
->ins2i(LIR_eq
, res_ins
, JSVAL_TO_BOOLEAN(JSVAL_VOID
)), OOM_EXIT
);
4559 set(&fval
, res_ins
);
4563 /* Didn't find it. */
4564 ABORT_TRACE("unknown native");
4568 TraceRecorder::name(jsval
*& vp
)
4570 JSObject
* obj
= cx
->fp
->scopeChain
;
4571 if (obj
!= globalObj
)
4572 return activeCallOrGlobalSlot(obj
, vp
);
4574 /* Can't use prop here, because we don't want unboxing from global slots. */
4575 LIns
* obj_ins
= scopeChain();
4577 if (!test_property_cache_direct_slot(obj
, obj_ins
, slot
))
4580 if (slot
== SPROP_INVALID_SLOT
)
4581 ABORT_TRACE("name op can't find named property");
4583 if (!lazilyImportGlobalSlot(slot
))
4584 ABORT_TRACE("lazy import of global slot failed");
4586 vp
= &STOBJ_GET_SLOT(obj
, slot
);
4591 TraceRecorder::prop(JSObject
* obj
, LIns
* obj_ins
, uint32
& slot
, LIns
*& v_ins
)
4594 * Can't specialize to assert obj != global, must guard to avoid aliasing
4595 * stale homes of stacked global variables.
4597 if (obj
== globalObj
)
4598 ABORT_TRACE("prop op aliases global");
4599 guard(false, lir
->ins2(LIR_eq
, obj_ins
, INS_CONSTPTR(globalObj
)), MISMATCH_EXIT
);
4602 * Property cache ensures that we are dealing with an existing property,
4603 * and guards the shape for us.
4607 if (!test_property_cache(obj
, obj_ins
, obj2
, pcval
))
4610 /* Check for non-existent property reference, which results in undefined. */
4611 const JSCodeSpec
& cs
= js_CodeSpec
[*cx
->fp
->regs
->pc
];
4612 if (PCVAL_IS_NULL(pcval
)) {
4613 v_ins
= INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_VOID
));
4614 JS_ASSERT(cs
.ndefs
== 1);
4615 stack(-cs
.nuses
, v_ins
);
4616 slot
= SPROP_INVALID_SLOT
;
4620 /* Insist if setting on obj being the directly addressed object. */
4621 uint32 setflags
= (cs
.format
& (JOF_SET
| JOF_INCDEC
));
4622 LIns
* dslots_ins
= NULL
;
4625 ABORT_TRACE("JOF_SET opcode hit prototype chain");
4628 * We're getting a proto-property. Walk up the prototype chain emitting
4629 * proto slot loads, updating obj as we go, leaving obj set to obj2 with
4630 * obj_ins the last proto-load.
4632 while (obj
!= obj2
) {
4633 obj_ins
= stobj_get_slot(obj_ins
, JSSLOT_PROTO
, dslots_ins
);
4634 obj
= STOBJ_GET_PROTO(obj
);
4638 /* Don't trace getter or setter calls, our caller wants a direct slot. */
4639 if (PCVAL_IS_SPROP(pcval
)) {
4640 JSScopeProperty
* sprop
= PCVAL_TO_SPROP(pcval
);
4642 if (setflags
&& !SPROP_HAS_STUB_SETTER(sprop
))
4643 ABORT_TRACE("non-stub setter");
4644 if (setflags
!= JOF_SET
&& !SPROP_HAS_STUB_GETTER(sprop
)) {
4645 // FIXME 450335: generalize this away from regexp built-in getters.
4646 if (setflags
== 0 &&
4647 sprop
->getter
== js_RegExpClass
.getProperty
&&
4648 sprop
->shortid
< 0) {
4649 LIns
* args
[] = { INS_CONSTPTR(sprop
), obj_ins
, cx_ins
};
4650 v_ins
= lir
->insCall(F_CallGetter
, args
);
4651 if (!unbox_jsval((sprop
->shortid
== REGEXP_SOURCE
) ? JSVAL_STRING
: JSVAL_BOOLEAN
,
4653 ABORT_TRACE("unboxing");
4655 JS_ASSERT(cs
.ndefs
== 1);
4656 stack(-cs
.nuses
, v_ins
);
4659 ABORT_TRACE("non-stub getter");
4661 if (!SPROP_HAS_VALID_SLOT(sprop
, OBJ_SCOPE(obj
)))
4662 ABORT_TRACE("no valid slot");
4665 if (!PCVAL_IS_SLOT(pcval
))
4666 ABORT_TRACE("PCE is not a slot");
4667 slot
= PCVAL_TO_SLOT(pcval
);
4670 v_ins
= stobj_get_slot(obj_ins
, slot
, dslots_ins
);
4671 if (!unbox_jsval(STOBJ_GET_SLOT(obj
, slot
), v_ins
))
4672 ABORT_TRACE("unboxing");
4677 TraceRecorder::elem(jsval
& l
, jsval
& r
, jsval
*& vp
, LIns
*& v_ins
, LIns
*& addr_ins
)
4679 /* no guards for type checks, trace specialized this already */
4680 if (!JSVAL_IS_INT(r
) || JSVAL_IS_PRIMITIVE(l
))
4684 * Can't specialize to assert obj != global, must guard to avoid aliasing
4685 * stale homes of stacked global variables.
4687 JSObject
* obj
= JSVAL_TO_OBJECT(l
);
4688 if (obj
== globalObj
)
4689 ABORT_TRACE("elem op aliases global");
4690 LIns
* obj_ins
= get(&l
);
4691 guard(false, lir
->ins2(LIR_eq
, obj_ins
, INS_CONSTPTR(globalObj
)), MISMATCH_EXIT
);
4693 /* make sure the object is actually a dense array */
4694 if (!guardDenseArray(obj
, obj_ins
))
4697 /* check that the index is within bounds */
4698 jsint idx
= JSVAL_TO_INT(r
);
4699 LIns
* idx_ins
= f2i(get(&r
));
4701 /* we have to check that its really an integer, but this check will to go away
4702 once we peel the loop type down to integer for this slot */
4703 guard(true, lir
->ins2(LIR_feq
, get(&r
), lir
->ins1(LIR_i2f
, idx_ins
)), MISMATCH_EXIT
);
4705 LIns
* dslots_ins
= lir
->insLoad(LIR_ldp
, obj_ins
, offsetof(JSObject
, dslots
));
4706 if (!guardDenseArrayIndex(obj
, idx
, obj_ins
, dslots_ins
, idx_ins
))
4708 vp
= &obj
->dslots
[idx
];
4710 addr_ins
= lir
->ins2(LIR_piadd
, dslots_ins
,
4711 lir
->ins2i(LIR_pilsh
, idx_ins
, (sizeof(jsval
) == 4) ? 2 : 3));
4713 /* load the value, check the type (need to check JSVAL_HOLE only for booleans) */
4714 v_ins
= lir
->insLoad(LIR_ldp
, addr_ins
, 0);
4715 return unbox_jsval(*vp
, v_ins
);
4719 TraceRecorder::getProp(JSObject
* obj
, LIns
* obj_ins
)
4723 if (!prop(obj
, obj_ins
, slot
, v_ins
))
4726 const JSCodeSpec
& cs
= js_CodeSpec
[*cx
->fp
->regs
->pc
];
4727 JS_ASSERT(cs
.ndefs
== 1);
4728 stack(-cs
.nuses
, v_ins
);
4733 TraceRecorder::getProp(jsval
& v
)
4735 if (JSVAL_IS_PRIMITIVE(v
))
4736 ABORT_TRACE("primitive lhs");
4738 return getProp(JSVAL_TO_OBJECT(v
), get(&v
));
4742 TraceRecorder::record_JSOP_NAME()
4752 TraceRecorder::record_JSOP_DOUBLE()
4754 jsval v
= jsval(atoms
[GET_INDEX(cx
->fp
->regs
->pc
)]);
4756 u
.d
= *JSVAL_TO_DOUBLE(v
);
4757 stack(0, lir
->insImmq(u
.u64
));
4762 TraceRecorder::record_JSOP_STRING()
4764 JSAtom
* atom
= atoms
[GET_INDEX(cx
->fp
->regs
->pc
)];
4765 JS_ASSERT(ATOM_IS_STRING(atom
));
4766 stack(0, INS_CONSTPTR(ATOM_TO_STRING(atom
)));
4771 TraceRecorder::record_JSOP_ZERO()
4775 stack(0, lir
->insImmq(u
.u64
));
4780 TraceRecorder::record_JSOP_ONE()
4784 stack(0, lir
->insImmq(u
.u64
));
4789 TraceRecorder::record_JSOP_NULL()
4791 stack(0, INS_CONSTPTR(NULL
));
4796 TraceRecorder::record_JSOP_THIS()
4799 if (!getThis(this_ins
))
4806 TraceRecorder::record_JSOP_FALSE()
4808 stack(0, lir
->insImm(0));
4813 TraceRecorder::record_JSOP_TRUE()
4815 stack(0, lir
->insImm(1));
4820 TraceRecorder::record_JSOP_OR()
4826 TraceRecorder::record_JSOP_AND()
4832 TraceRecorder::record_JSOP_TABLESWITCH()
4838 TraceRecorder::record_JSOP_LOOKUPSWITCH()
4844 TraceRecorder::record_JSOP_STRICTEQ()
4850 TraceRecorder::record_JSOP_STRICTNE()
4852 return equal(CMP_NEGATE
);
4856 TraceRecorder::record_JSOP_CLOSURE()
4862 TraceRecorder::record_JSOP_OBJECT()
4864 JSStackFrame
* fp
= cx
->fp
;
4865 JSScript
* script
= fp
->script
;
4866 unsigned index
= atoms
- script
->atomMap
.vector
+ GET_INDEX(fp
->regs
->pc
);
4869 JS_GET_SCRIPT_OBJECT(script
, index
, obj
);
4870 stack(0, INS_CONSTPTR(obj
));
4875 TraceRecorder::record_JSOP_POP()
4881 TraceRecorder::record_JSOP_POS()
4883 jsval
& r
= stackval(-1);
4888 TraceRecorder::record_JSOP_TRAP()
4894 TraceRecorder::record_JSOP_GETARG()
4896 stack(0, arg(GET_ARGNO(cx
->fp
->regs
->pc
)));
4901 TraceRecorder::record_JSOP_SETARG()
4903 arg(GET_ARGNO(cx
->fp
->regs
->pc
), stack(-1));
4908 TraceRecorder::record_JSOP_GETLOCAL()
4910 stack(0, var(GET_SLOTNO(cx
->fp
->regs
->pc
)));
4915 TraceRecorder::record_JSOP_SETLOCAL()
4917 var(GET_SLOTNO(cx
->fp
->regs
->pc
), stack(-1));
4922 TraceRecorder::record_JSOP_UINT16()
4925 u
.d
= (jsdouble
)GET_UINT16(cx
->fp
->regs
->pc
);
4926 stack(0, lir
->insImmq(u
.u64
));
4931 TraceRecorder::record_JSOP_NEWINIT()
4933 JSProtoKey key
= JSProtoKey(GET_INT8(cx
->fp
->regs
->pc
));
4936 if (key
== JSProto_Array
) {
4937 if (!js_GetClassPrototype(cx
, globalObj
, INT_TO_JSID(key
), &obj
))
4939 fid
= F_FastNewArray
;
4941 if (!js_GetClassObject(cx
, globalObj
, key
, &obj
))
4943 fid
= F_FastNewObject
;
4945 LIns
* args
[] = { INS_CONSTPTR(obj
), cx_ins
};
4946 LIns
* v_ins
= lir
->insCall(fid
, args
);
4947 guard(false, lir
->ins_eq0(v_ins
), OOM_EXIT
);
4953 TraceRecorder::record_JSOP_ENDINIT()
4955 jsval
& v
= stackval(-1);
4956 JS_ASSERT(!JSVAL_IS_PRIMITIVE(v
));
4957 JSObject
* obj
= JSVAL_TO_OBJECT(v
);
4958 if (OBJ_IS_DENSE_ARRAY(cx
, obj
)) {
4959 // Until we get JSOP_NEWARRAY working, we do our optimizing here...
4960 if (obj
->fslots
[JSSLOT_ARRAY_LENGTH
] == 1 &&
4961 obj
->dslots
&& JSVAL_IS_STRING(obj
->dslots
[0])) {
4962 LIns
* v_ins
= get(&v
);
4963 JS_ASSERT(v_ins
->isCall() && v_ins
->fid() == F_FastNewArray
);
4964 LIns
* args
[] = { stack(1), callArgN(v_ins
, 1), cx_ins
};
4965 v_ins
= lir
->insCall(F_Array_1str
, args
);
4973 TraceRecorder::record_JSOP_INITPROP()
4975 // The common code avoids stacking the RHS if op is not JSOP_SETPROP.
4976 return record_JSOP_SETPROP();
4980 TraceRecorder::record_JSOP_INITELEM()
4982 return record_JSOP_SETELEM();
4986 TraceRecorder::record_JSOP_DEFSHARP()
4992 TraceRecorder::record_JSOP_USESHARP()
4998 TraceRecorder::record_JSOP_INCARG()
5000 return inc(argval(GET_ARGNO(cx
->fp
->regs
->pc
)), 1);
5004 TraceRecorder::record_JSOP_INCLOCAL()
5006 return inc(varval(GET_SLOTNO(cx
->fp
->regs
->pc
)), 1);
5010 TraceRecorder::record_JSOP_DECARG()
5012 return inc(argval(GET_ARGNO(cx
->fp
->regs
->pc
)), -1);
5016 TraceRecorder::record_JSOP_DECLOCAL()
5018 return inc(varval(GET_SLOTNO(cx
->fp
->regs
->pc
)), -1);
5022 TraceRecorder::record_JSOP_ARGINC()
5024 return inc(argval(GET_ARGNO(cx
->fp
->regs
->pc
)), 1, false);
5028 TraceRecorder::record_JSOP_LOCALINC()
5030 return inc(varval(GET_SLOTNO(cx
->fp
->regs
->pc
)), 1, false);
5034 TraceRecorder::record_JSOP_ARGDEC()
5036 return inc(argval(GET_ARGNO(cx
->fp
->regs
->pc
)), -1, false);
5040 TraceRecorder::record_JSOP_LOCALDEC()
5042 return inc(varval(GET_SLOTNO(cx
->fp
->regs
->pc
)), -1, false);
5046 TraceRecorder::record_JSOP_ITER()
5048 jsval
& v
= stackval(-1);
5049 if (!JSVAL_IS_PRIMITIVE(v
)) {
5050 jsuint flags
= cx
->fp
->regs
->pc
[1];
5051 LIns
* args
[] = { get(&v
), INS_CONST(flags
), cx_ins
};
5052 LIns
* v_ins
= lir
->insCall(F_FastValueToIterator
, args
);
5053 guard(false, lir
->ins_eq0(v_ins
), MISMATCH_EXIT
);
5058 ABORT_TRACE("for-in on a primitive value");
5062 TraceRecorder::forInLoop(jsval
* vp
)
5064 if (!JSVAL_IS_STRING(*vp
))
5065 ABORT_TRACE("for-in loop variable changed type from string");
5066 jsval
& iterobj_val
= stackval(-1);
5067 if (!JSVAL_IS_PRIMITIVE(iterobj_val
)) {
5068 LIns
* args
[] = { get(&iterobj_val
), cx_ins
};
5069 LIns
* v_ins
= lir
->insCall(F_FastCallIteratorNext
, args
);
5070 guard(false, lir
->ins2(LIR_eq
, v_ins
, INS_CONST(JSVAL_ERROR_COOKIE
)), OOM_EXIT
);
5072 LIns
* flag_ins
= lir
->ins_eq0(lir
->ins2(LIR_eq
, v_ins
, INS_CONST(JSVAL_HOLE
)));
5073 LIns
* iter_ins
= get(vp
);
5074 if (!box_jsval(JSVAL_STRING
, iter_ins
))
5076 iter_ins
= lir
->ins_choose(flag_ins
, v_ins
, iter_ins
, true);
5077 if (!unbox_jsval(JSVAL_STRING
, iter_ins
))
5084 ABORT_TRACE("for-in on a primitive value");
5088 TraceRecorder::record_JSOP_ENDITER()
5090 LIns
* args
[] = { stack(-1), cx_ins
};
5091 LIns
* ok_ins
= lir
->insCall(F_CloseIterator
, args
);
5092 guard(false, lir
->ins_eq0(ok_ins
), MISMATCH_EXIT
);
5097 TraceRecorder::record_JSOP_FORNAME()
5100 return name(vp
) && forInLoop(vp
);
5104 TraceRecorder::record_JSOP_FORPROP()
5110 TraceRecorder::record_JSOP_FORELEM()
5116 TraceRecorder::record_JSOP_FORARG()
5118 return forInLoop(&argval(GET_ARGNO(cx
->fp
->regs
->pc
)));
5122 TraceRecorder::record_JSOP_FORLOCAL()
5124 return forInLoop(&varval(GET_SLOTNO(cx
->fp
->regs
->pc
)));
5128 TraceRecorder::record_JSOP_FORCONST()
5134 TraceRecorder::record_JSOP_POPN()
5140 TraceRecorder::record_JSOP_BINDNAME()
5142 JSObject
* obj
= cx
->fp
->scopeChain
;
5143 if (obj
!= globalObj
)
5144 ABORT_TRACE("JSOP_BINDNAME crosses global scopes");
5146 LIns
* obj_ins
= scopeChain();
5149 if (!test_property_cache(obj
, obj_ins
, obj2
, pcval
))
5152 ABORT_TRACE("JSOP_BINDNAME found a non-direct property on the global object");
5159 TraceRecorder::record_JSOP_SETNAME()
5161 jsval
& r
= stackval(-1);
5162 jsval
& l
= stackval(-2);
5163 JS_ASSERT(!JSVAL_IS_PRIMITIVE(l
));
5166 * Trace cases that are global code or in lightweight functions scoped by
5167 * the global object only.
5169 JSObject
* obj
= JSVAL_TO_OBJECT(l
);
5170 if (obj
!= cx
->fp
->scopeChain
|| obj
!= globalObj
)
5176 LIns
* r_ins
= get(&r
);
5179 if (cx
->fp
->regs
->pc
[JSOP_SETNAME_LENGTH
] != JSOP_POP
)
5185 TraceRecorder::record_JSOP_THROW()
5191 TraceRecorder::record_JSOP_IN()
5193 jsval
& rval
= stackval(-1);
5194 if (JSVAL_IS_PRIMITIVE(rval
))
5195 ABORT_TRACE("JSOP_IN on non-object right operand");
5197 jsval
& lval
= stackval(-2);
5198 if (!JSVAL_IS_PRIMITIVE(lval
))
5199 ABORT_TRACE("JSOP_IN on E4X QName left operand");
5202 if (JSVAL_IS_INT(lval
)) {
5203 id
= INT_JSVAL_TO_JSID(lval
);
5205 if (!JSVAL_IS_STRING(lval
))
5206 ABORT_TRACE("non-string left operand to JSOP_IN");
5207 if (!js_ValueToStringId(cx
, lval
, &id
))
5211 // Expect what we see at trace recording time (hit or miss) to be the same
5212 // when executing the trace. Use a builtin helper for named properties, as
5213 // forInLoop does. First, handle indexes in dense arrays as a special case.
5214 JSObject
* obj
= JSVAL_TO_OBJECT(rval
);
5215 LIns
* obj_ins
= get(&rval
);
5220 if (guardDenseArray(obj
, obj_ins
)) {
5221 if (JSVAL_IS_INT(lval
)) {
5222 jsint idx
= JSVAL_TO_INT(lval
);
5223 LIns
* idx_ins
= f2i(get(&lval
));
5224 LIns
* dslots_ins
= lir
->insLoad(LIR_ldp
, obj_ins
, offsetof(JSObject
, dslots
));
5225 if (!guardDenseArrayIndex(obj
, idx
, obj_ins
, dslots_ins
, idx_ins
))
5226 ABORT_TRACE("dense array index out of bounds");
5228 cond
= obj
->dslots
[idx
] != JSVAL_HOLE
;
5229 x
= lir
->ins_eq0(lir
->ins2(LIR_eq
,
5230 lir
->insLoad(LIR_ldp
, dslots_ins
, idx
* sizeof(jsval
)),
5231 INS_CONST(JSVAL_HOLE
)));
5235 // Not an index id, but a dense array -- go up to the proto. */
5236 obj
= STOBJ_GET_PROTO(obj
);
5237 obj_ins
= stobj_get_fslot(obj_ins
, JSSLOT_PROTO
);
5239 if (JSVAL_IS_INT(id
))
5240 ABORT_TRACE("INT in OBJ where OBJ is not a dense array");
5245 if (!OBJ_LOOKUP_PROPERTY(cx
, obj
, id
, &obj2
, &prop
))
5246 ABORT_TRACE("OBJ_LOOKUP_PROPERTY failed in JSOP_IN");
5248 cond
= prop
!= NULL
;
5250 OBJ_DROP_PROPERTY(cx
, obj2
, prop
);
5252 LIns
* args
[] = { get(&lval
), obj_ins
, cx_ins
};
5253 x
= lir
->insCall(F_HasNamedProperty
, args
);
5254 guard(false, lir
->ins2i(LIR_eq
, x
, JSVAL_TO_BOOLEAN(JSVAL_VOID
)), OOM_EXIT
);
5255 x
= lir
->ins2i(LIR_eq
, x
, 1);
5258 /* The interpreter fuses comparisons and the following branch,
5259 so we have to do that here as well. */
5260 fuseIf(cx
->fp
->regs
->pc
+ 1, cond
, x
);
5262 /* We update the stack after the guard. This is safe since
5263 the guard bails out at the comparison and the interpreter
5264 will therefore re-execute the comparison. This way the
5265 value of the condition doesn't have to be calculated and
5266 saved on the stack in most cases. */
5272 TraceRecorder::record_JSOP_INSTANCEOF()
5278 TraceRecorder::record_JSOP_DEBUGGER()
5284 TraceRecorder::record_JSOP_GOSUB()
5290 TraceRecorder::record_JSOP_RETSUB()
5296 TraceRecorder::record_JSOP_EXCEPTION()
5302 TraceRecorder::record_JSOP_LINENO()
5308 TraceRecorder::record_JSOP_CONDSWITCH()
5314 TraceRecorder::record_JSOP_CASE()
5316 return equal(CMP_CASE
);
5320 TraceRecorder::record_JSOP_DEFAULT()
5326 TraceRecorder::record_JSOP_EVAL()
5332 TraceRecorder::record_JSOP_ENUMELEM()
5338 TraceRecorder::record_JSOP_GETTER()
5344 TraceRecorder::record_JSOP_SETTER()
5350 TraceRecorder::record_JSOP_DEFFUN()
5356 TraceRecorder::record_JSOP_DEFCONST()
5362 TraceRecorder::record_JSOP_DEFVAR()
5368 * XXX could hoist out to jsinterp.h and share with jsinterp.cpp, but
5369 * XXX jsopcode.cpp has different definitions of same-named macros.
5371 #define GET_FULL_INDEX(PCOFF) \
5372 (atoms - script->atomMap.vector + GET_INDEX(regs.pc + PCOFF))
5374 #define LOAD_FUNCTION(PCOFF) \
5375 JS_GET_SCRIPT_FUNCTION(script, GET_FULL_INDEX(PCOFF), fun)
5378 TraceRecorder::record_JSOP_ANONFUNOBJ()
5381 JSFrameRegs
& regs
= *cx
->fp
->regs
;
5382 JSScript
* script
= cx
->fp
->script
;
5383 LOAD_FUNCTION(0); // needs script, regs, fun
5385 JSObject
* obj
= FUN_OBJECT(fun
);
5386 if (OBJ_GET_PARENT(cx
, obj
) != cx
->fp
->scopeChain
)
5387 ABORT_TRACE("can't trace with activation object on scopeChain");
5389 stack(0, INS_CONSTPTR(obj
));
5394 TraceRecorder::record_JSOP_NAMEDFUNOBJ()
5400 TraceRecorder::record_JSOP_SETLOCALPOP()
5406 TraceRecorder::record_JSOP_GROUP()
5408 return true; // no-op
5412 TraceRecorder::record_JSOP_SETCALL()
5418 TraceRecorder::record_JSOP_TRY()
5424 TraceRecorder::record_JSOP_FINALLY()
5430 TraceRecorder::record_JSOP_NOP()
5436 TraceRecorder::record_JSOP_ARGSUB()
5442 TraceRecorder::record_JSOP_ARGCNT()
5448 TraceRecorder::record_JSOP_DEFLOCALFUN()
5451 JSFrameRegs
& regs
= *cx
->fp
->regs
;
5452 JSScript
* script
= cx
->fp
->script
;
5453 LOAD_FUNCTION(SLOTNO_LEN
); // needs script, regs, fun
5455 var(GET_SLOTNO(regs
.pc
), INS_CONSTPTR(FUN_OBJECT(fun
)));
5460 TraceRecorder::record_JSOP_GOTOX()
5466 TraceRecorder::record_JSOP_IFEQX()
5468 trackCfgMerges(cx
->fp
->regs
->pc
);
5469 return record_JSOP_IFEQ();
5473 TraceRecorder::record_JSOP_IFNEX()
5475 return record_JSOP_IFNE();
5479 TraceRecorder::record_JSOP_ORX()
5481 return record_JSOP_OR();
5485 TraceRecorder::record_JSOP_ANDX()
5487 return record_JSOP_AND();
5491 TraceRecorder::record_JSOP_GOSUBX()
5493 return record_JSOP_GOSUB();
5497 TraceRecorder::record_JSOP_CASEX()
5499 return equal(CMP_CASE
);
5503 TraceRecorder::record_JSOP_DEFAULTX()
5509 TraceRecorder::record_JSOP_TABLESWITCHX()
5515 TraceRecorder::record_JSOP_LOOKUPSWITCHX()
5521 TraceRecorder::record_JSOP_BACKPATCH()
5527 TraceRecorder::record_JSOP_BACKPATCH_POP()
5533 TraceRecorder::record_JSOP_THROWING()
5539 TraceRecorder::record_JSOP_SETRVAL()
5541 // If we implement this, we need to update JSOP_STOP.
5546 TraceRecorder::record_JSOP_RETRVAL()
5552 TraceRecorder::record_JSOP_GETGVAR()
5554 jsval slotval
= cx
->fp
->slots
[GET_SLOTNO(cx
->fp
->regs
->pc
)];
5555 if (JSVAL_IS_NULL(slotval
))
5556 return true; // We will see JSOP_NAME from the interpreter's jump, so no-op here.
5558 uint32 slot
= JSVAL_TO_INT(slotval
);
5560 if (!lazilyImportGlobalSlot(slot
))
5561 ABORT_TRACE("lazy import of global slot failed");
5563 stack(0, get(&STOBJ_GET_SLOT(cx
->fp
->scopeChain
, slot
)));
5568 TraceRecorder::record_JSOP_SETGVAR()
5570 jsval slotval
= cx
->fp
->slots
[GET_SLOTNO(cx
->fp
->regs
->pc
)];
5571 if (JSVAL_IS_NULL(slotval
))
5572 return true; // We will see JSOP_NAME from the interpreter's jump, so no-op here.
5574 uint32 slot
= JSVAL_TO_INT(slotval
);
5576 if (!lazilyImportGlobalSlot(slot
))
5577 ABORT_TRACE("lazy import of global slot failed");
5579 set(&STOBJ_GET_SLOT(cx
->fp
->scopeChain
, slot
), stack(-1));
5584 TraceRecorder::record_JSOP_INCGVAR()
5586 jsval slotval
= cx
->fp
->slots
[GET_SLOTNO(cx
->fp
->regs
->pc
)];
5587 if (JSVAL_IS_NULL(slotval
))
5588 return true; // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
5590 uint32 slot
= JSVAL_TO_INT(slotval
);
5592 if (!lazilyImportGlobalSlot(slot
))
5593 ABORT_TRACE("lazy import of global slot failed");
5595 return inc(STOBJ_GET_SLOT(cx
->fp
->scopeChain
, slot
), 1);
5599 TraceRecorder::record_JSOP_DECGVAR()
5601 jsval slotval
= cx
->fp
->slots
[GET_SLOTNO(cx
->fp
->regs
->pc
)];
5602 if (JSVAL_IS_NULL(slotval
))
5603 return true; // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
5605 uint32 slot
= JSVAL_TO_INT(slotval
);
5607 if (!lazilyImportGlobalSlot(slot
))
5608 ABORT_TRACE("lazy import of global slot failed");
5610 return inc(STOBJ_GET_SLOT(cx
->fp
->scopeChain
, slot
), -1);
5614 TraceRecorder::record_JSOP_GVARINC()
5616 jsval slotval
= cx
->fp
->slots
[GET_SLOTNO(cx
->fp
->regs
->pc
)];
5617 if (JSVAL_IS_NULL(slotval
))
5618 return true; // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
5620 uint32 slot
= JSVAL_TO_INT(slotval
);
5622 if (!lazilyImportGlobalSlot(slot
))
5623 ABORT_TRACE("lazy import of global slot failed");
5625 return inc(STOBJ_GET_SLOT(cx
->fp
->scopeChain
, slot
), 1, false);
5629 TraceRecorder::record_JSOP_GVARDEC()
5631 jsval slotval
= cx
->fp
->slots
[GET_SLOTNO(cx
->fp
->regs
->pc
)];
5632 if (JSVAL_IS_NULL(slotval
))
5633 return true; // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
5635 uint32 slot
= JSVAL_TO_INT(slotval
);
5637 if (!lazilyImportGlobalSlot(slot
))
5638 ABORT_TRACE("lazy import of global slot failed");
5640 return inc(STOBJ_GET_SLOT(cx
->fp
->scopeChain
, slot
), -1, false);
5644 TraceRecorder::record_JSOP_REGEXP()
5649 // begin JS_HAS_XML_SUPPORT
5652 TraceRecorder::record_JSOP_DEFXMLNS()
5658 TraceRecorder::record_JSOP_ANYNAME()
5664 TraceRecorder::record_JSOP_QNAMEPART()
5670 TraceRecorder::record_JSOP_QNAMECONST()
5676 TraceRecorder::record_JSOP_QNAME()
5682 TraceRecorder::record_JSOP_TOATTRNAME()
5688 TraceRecorder::record_JSOP_TOATTRVAL()
5694 TraceRecorder::record_JSOP_ADDATTRNAME()
5700 TraceRecorder::record_JSOP_ADDATTRVAL()
5706 TraceRecorder::record_JSOP_BINDXMLNAME()
5712 TraceRecorder::record_JSOP_SETXMLNAME()
5718 TraceRecorder::record_JSOP_XMLNAME()
5724 TraceRecorder::record_JSOP_DESCENDANTS()
5730 TraceRecorder::record_JSOP_FILTER()
5736 TraceRecorder::record_JSOP_ENDFILTER()
5742 TraceRecorder::record_JSOP_TOXML()
5748 TraceRecorder::record_JSOP_TOXMLLIST()
5754 TraceRecorder::record_JSOP_XMLTAGEXPR()
5760 TraceRecorder::record_JSOP_XMLELTEXPR()
5766 TraceRecorder::record_JSOP_XMLOBJECT()
5772 TraceRecorder::record_JSOP_XMLCDATA()
5778 TraceRecorder::record_JSOP_XMLCOMMENT()
5784 TraceRecorder::record_JSOP_XMLPI()
5790 TraceRecorder::record_JSOP_GETFUNNS()
5796 TraceRecorder::record_JSOP_STARTXML()
5802 TraceRecorder::record_JSOP_STARTXMLEXPR()
5807 // end JS_HAS_XML_SUPPORT
5810 TraceRecorder::record_JSOP_CALLPROP()
5812 jsval
& l
= stackval(-1);
5815 if (!JSVAL_IS_PRIMITIVE(l
)) {
5816 obj
= JSVAL_TO_OBJECT(l
);
5818 stack(0, obj_ins
); // |this| for subsequent call
5821 debug_only(const char* protoname
= NULL
;)
5822 if (JSVAL_IS_STRING(l
)) {
5824 debug_only(protoname
= "String.prototype";)
5825 } else if (JSVAL_IS_NUMBER(l
)) {
5827 debug_only(protoname
= "Number.prototype";)
5828 } else if (JSVAL_IS_BOOLEAN(l
)) {
5829 i
= JSProto_Boolean
;
5830 debug_only(protoname
= "Boolean.prototype";)
5832 JS_ASSERT(JSVAL_IS_NULL(l
) || JSVAL_IS_VOID(l
));
5833 ABORT_TRACE("callprop on null or void");
5836 if (!js_GetClassPrototype(cx
, NULL
, INT_TO_JSID(i
), &obj
))
5837 ABORT_TRACE("GetClassPrototype failed!");
5839 obj_ins
= INS_CONSTPTR(obj
);
5840 debug_only(obj_ins
= addName(obj_ins
, protoname
);)
5841 stack(0, get(&l
)); // use primitive as |this|
5846 if (!test_property_cache(obj
, obj_ins
, obj2
, pcval
))
5849 if (PCVAL_IS_NULL(pcval
) || !PCVAL_IS_OBJECT(pcval
))
5850 ABORT_TRACE("callee is not an object");
5851 JS_ASSERT(HAS_FUNCTION_CLASS(PCVAL_TO_OBJECT(pcval
)));
5853 stack(-1, INS_CONSTPTR(PCVAL_TO_OBJECT(pcval
)));
5858 TraceRecorder::record_JSOP_DELDESC()
5864 TraceRecorder::record_JSOP_UINT24()
5867 u
.d
= (jsdouble
)GET_UINT24(cx
->fp
->regs
->pc
);
5868 stack(0, lir
->insImmq(u
.u64
));
5873 TraceRecorder::record_JSOP_INDEXBASE()
5875 atoms
+= GET_INDEXBASE(cx
->fp
->regs
->pc
);
5880 TraceRecorder::record_JSOP_RESETBASE()
5882 atoms
= cx
->fp
->script
->atomMap
.vector
;
5887 TraceRecorder::record_JSOP_RESETBASE0()
5889 atoms
= cx
->fp
->script
->atomMap
.vector
;
5894 TraceRecorder::record_JSOP_CALLELEM()
5900 TraceRecorder::record_JSOP_STOP()
5903 * We know falling off the end of a constructor returns the new object that
5904 * was passed in via fp->argv[-1], while falling off the end of a function
5905 * returns undefined.
5907 * NB: we do not support script rval (eval, API users who want the result
5908 * of the last expression-statement, debugger API calls).
5910 JSStackFrame
*fp
= cx
->fp
;
5911 if (fp
->flags
& JSFRAME_CONSTRUCTING
) {
5912 JS_ASSERT(OBJECT_TO_JSVAL(fp
->thisp
) == fp
->argv
[-1]);
5913 rval_ins
= get(&fp
->argv
[-1]);
5915 rval_ins
= INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_VOID
));
5917 clearFrameSlotsFromCache();
5922 TraceRecorder::record_JSOP_GETXPROP()
5924 jsval
& l
= stackval(-1);
5925 if (JSVAL_IS_PRIMITIVE(l
))
5926 ABORT_TRACE("primitive-this for GETXPROP?");
5928 JSObject
* obj
= JSVAL_TO_OBJECT(l
);
5929 if (obj
!= cx
->fp
->scopeChain
|| obj
!= globalObj
)
5940 TraceRecorder::record_JSOP_CALLXMLNAME()
5946 TraceRecorder::record_JSOP_TYPEOFEXPR()
5948 return record_JSOP_TYPEOF();
5952 TraceRecorder::record_JSOP_ENTERBLOCK()
5958 TraceRecorder::record_JSOP_LEAVEBLOCK()
5964 TraceRecorder::record_JSOP_GENERATOR()
5970 TraceRecorder::record_JSOP_YIELD()
5976 TraceRecorder::record_JSOP_ARRAYPUSH()
5982 TraceRecorder::record_JSOP_ENUMCONSTELEM()
5988 TraceRecorder::record_JSOP_LEAVEBLOCKEXPR()
5994 TraceRecorder::record_JSOP_GETTHISPROP()
5998 /* its safe to just use cx->fp->thisp here because getThis() returns false if thisp
6000 return getThis(this_ins
) && getProp(cx
->fp
->thisp
, this_ins
);
6004 TraceRecorder::record_JSOP_GETARGPROP()
6006 return getProp(argval(GET_ARGNO(cx
->fp
->regs
->pc
)));
6010 TraceRecorder::record_JSOP_GETLOCALPROP()
6012 return getProp(varval(GET_SLOTNO(cx
->fp
->regs
->pc
)));
6016 TraceRecorder::record_JSOP_INDEXBASE1()
6023 TraceRecorder::record_JSOP_INDEXBASE2()
6030 TraceRecorder::record_JSOP_INDEXBASE3()
6037 TraceRecorder::record_JSOP_CALLGVAR()
6039 jsval slotval
= cx
->fp
->slots
[GET_SLOTNO(cx
->fp
->regs
->pc
)];
6040 if (JSVAL_IS_NULL(slotval
))
6041 return true; // We will see JSOP_CALLNAME from the interpreter's jump, so no-op here.
6043 uint32 slot
= JSVAL_TO_INT(slotval
);
6045 if (!lazilyImportGlobalSlot(slot
))
6046 ABORT_TRACE("lazy import of global slot failed");
6048 jsval
& v
= STOBJ_GET_SLOT(cx
->fp
->scopeChain
, slot
);
6050 stack(1, INS_CONSTPTR(NULL
));
6055 TraceRecorder::record_JSOP_CALLLOCAL()
6057 uintN slot
= GET_SLOTNO(cx
->fp
->regs
->pc
);
6058 stack(0, var(slot
));
6059 stack(1, INS_CONSTPTR(NULL
));
6064 TraceRecorder::record_JSOP_CALLARG()
6066 uintN slot
= GET_ARGNO(cx
->fp
->regs
->pc
);
6067 stack(0, arg(slot
));
6068 stack(1, INS_CONSTPTR(NULL
));
6073 TraceRecorder::record_JSOP_NULLTHIS()
6075 stack(0, INS_CONSTPTR(NULL
));
6080 TraceRecorder::record_JSOP_INT8()
6083 u
.d
= (jsdouble
)GET_INT8(cx
->fp
->regs
->pc
);
6084 stack(0, lir
->insImmq(u
.u64
));
6089 TraceRecorder::record_JSOP_INT32()
6092 u
.d
= (jsdouble
)GET_INT32(cx
->fp
->regs
->pc
);
6093 stack(0, lir
->insImmq(u
.u64
));
6098 TraceRecorder::record_JSOP_LENGTH()
6100 jsval
& l
= stackval(-1);
6101 if (JSVAL_IS_PRIMITIVE(l
)) {
6102 if (!JSVAL_IS_STRING(l
))
6103 ABORT_TRACE("non-string primitives unsupported");
6104 LIns
* str_ins
= get(&l
);
6105 LIns
* len_ins
= lir
->insLoad(LIR_ldp
, str_ins
, (int)offsetof(JSString
, length
));
6107 LIns
* masked_len_ins
= lir
->ins2(LIR_piand
,
6109 INS_CONSTPTR(JSSTRING_LENGTH_MASK
));
6111 LIns
*choose_len_ins
=
6112 lir
->ins_choose(lir
->ins_eq0(lir
->ins2(LIR_piand
,
6114 INS_CONSTPTR(JSSTRFLAG_DEPENDENT
))),
6116 lir
->ins_choose(lir
->ins_eq0(lir
->ins2(LIR_piand
,
6118 INS_CONSTPTR(JSSTRFLAG_PREFIX
))),
6119 lir
->ins2(LIR_piand
,
6121 INS_CONSTPTR(JSSTRDEP_LENGTH_MASK
)),
6126 set(&l
, lir
->ins1(LIR_i2f
, choose_len_ins
));
6130 JSObject
* obj
= JSVAL_TO_OBJECT(l
);
6131 if (!OBJ_IS_DENSE_ARRAY(cx
, obj
))
6132 ABORT_TRACE("only dense arrays supported");
6133 if (!guardDenseArray(obj
, get(&l
)))
6134 ABORT_TRACE("OBJ_IS_DENSE_ARRAY but not?!?");
6135 LIns
* v_ins
= lir
->ins1(LIR_i2f
, stobj_get_fslot(get(&l
), JSSLOT_ARRAY_LENGTH
));
6141 TraceRecorder::record_JSOP_NEWARRAY()
6147 TraceRecorder::record_JSOP_HOLE()
6149 stack(0, INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_HOLE
)));
6153 #define UNUSED(op) bool TraceRecorder::record_##op() { return false; }
6155 UNUSED(JSOP_UNUSED76
)
6156 UNUSED(JSOP_UNUSED77
)
6157 UNUSED(JSOP_UNUSED78
)
6158 UNUSED(JSOP_UNUSED79
)
6159 UNUSED(JSOP_UNUSED201
)
6160 UNUSED(JSOP_UNUSED202
)
6161 UNUSED(JSOP_UNUSED203
)
6162 UNUSED(JSOP_UNUSED204
)
6163 UNUSED(JSOP_UNUSED205
)
6164 UNUSED(JSOP_UNUSED206
)
6165 UNUSED(JSOP_UNUSED207
)
6166 UNUSED(JSOP_UNUSED219
)
6167 UNUSED(JSOP_UNUSED226
)