On x86 compilers without fastcall, simulate it when invoking traces and un-simulate...
[wine-gecko.git] / js / src / jstracer.cpp
blob1c5c28b2089f3d1d6054dd222158821bec5b7691
1 /* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=4 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
17 * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
18 * May 28, 2008.
20 * The Initial Developer of the Original Code is
21 * Brendan Eich <brendan@mozilla.org>
23 * Contributor(s):
24 * Andreas Gal <gal@mozilla.com>
25 * Mike Shaver <shaver@mozilla.org>
26 * David Anderson <danderson@mozilla.com>
28 * Alternatively, the contents of this file may be used under the terms of
29 * either of the GNU General Public License Version 2 or later (the "GPL"),
30 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
31 * in which case the provisions of the GPL or the LGPL are applicable instead
32 * of those above. If you wish to allow use of your version of this file only
33 * under the terms of either the GPL or the LGPL, and not to allow others to
34 * use your version of this file under the terms of the MPL, indicate your
35 * decision by deleting the provisions above and replace them with the notice
36 * and other provisions required by the GPL or the LGPL. If you do not delete
37 * the provisions above, a recipient may use your version of this file under
38 * the terms of any one of the MPL, the GPL or the LGPL.
40 * ***** END LICENSE BLOCK ***** */
42 #include "jsstddef.h" // always first
43 #include "jsbit.h" // low-level (NSPR-based) headers next
44 #include "jsprf.h"
45 #include <math.h> // standard headers next
46 #ifdef _MSC_VER
47 #include <malloc.h>
48 #define alloca _alloca
49 #endif
51 #include "nanojit/avmplus.h" // nanojit
52 #include "nanojit/nanojit.h"
53 #include "jsarray.h" // higher-level library and API headers
54 #include "jsbool.h"
55 #include "jscntxt.h"
56 #include "jsdbgapi.h"
57 #include "jsemit.h"
58 #include "jsfun.h"
59 #include "jsinterp.h"
60 #include "jsiter.h"
61 #include "jsobj.h"
62 #include "jsopcode.h"
63 #include "jsregexp.h"
64 #include "jsscope.h"
65 #include "jsscript.h"
66 #include "jstracer.h"
68 #include "jsautooplen.h" // generated headers last
70 /* Number of iterations of a loop before we start tracing. */
71 #define HOTLOOP 2
73 /* Number of times we wait to exit on a side exit before we try to extend the tree. */
74 #define HOTEXIT 1
76 /* Max call depths for inlining. */
77 #define MAX_CALLDEPTH 5
79 /* Max number of type mismatchs before we trash the tree. */
80 #define MAX_MISMATCH 5
82 /* Max native stack size. */
83 #define MAX_NATIVE_STACK_SLOTS 1024
85 /* Max call stack size. */
86 #define MAX_CALL_STACK_ENTRIES 64
88 #ifdef DEBUG
89 #define ABORT_TRACE(msg) do { debug_only_v(fprintf(stdout, "abort: %d: %s\n", __LINE__, msg);) return false; } while (0)
90 #else
91 #define ABORT_TRACE(msg) return false
92 #endif
94 #ifdef DEBUG
95 static struct {
96 uint64
97 recorderStarted, recorderAborted, traceCompleted, sideExitIntoInterpreter,
98 typeMapMismatchAtEntry, returnToDifferentLoopHeader, traceTriggered,
99 globalShapeMismatchAtEntry, treesTrashed, slotPromoted,
100 unstableLoopVariable, breakLoopExits;
101 } stat = { 0LL, };
102 #define AUDIT(x) (stat.x++)
103 #else
104 #define AUDIT(x) ((void)0)
105 #endif
107 #define INS_CONST(c) addName(lir->insImm(c), #c)
108 #define INS_CONSTPTR(p) addName(lir->insImmPtr((void*) (p)), #p)
110 using namespace avmplus;
111 using namespace nanojit;
113 static GC gc = GC();
114 static avmplus::AvmCore s_core = avmplus::AvmCore();
115 static avmplus::AvmCore* core = &s_core;
117 /* We really need a better way to configure the JIT. Shaver, where is my fancy JIT object? */
118 static bool nesting_enabled = true;
119 static bool oracle_enabled = true;
120 static bool did_we_check_sse2 = false;
122 #ifdef DEBUG
123 static bool verbose_debug = getenv("TRACEMONKEY") && strstr(getenv("TRACEMONKEY"), "verbose");
124 #define debug_only_v(x) if (verbose_debug) { x; }
125 #else
126 #define debug_only_v(x)
127 #endif
129 /* The entire VM shares one oracle. Collisions and concurrent updates are tolerated and worst
130 case cause performance regressions. */
131 static Oracle oracle;
133 Tracker::Tracker()
135 pagelist = 0;
138 Tracker::~Tracker()
140 clear();
143 jsuword
144 Tracker::getPageBase(const void* v) const
146 return jsuword(v) & ~jsuword(NJ_PAGE_SIZE-1);
149 struct Tracker::Page*
150 Tracker::findPage(const void* v) const
152 jsuword base = getPageBase(v);
153 struct Tracker::Page* p = pagelist;
154 while (p) {
155 if (p->base == base) {
156 return p;
158 p = p->next;
160 return 0;
163 struct Tracker::Page*
164 Tracker::addPage(const void* v) {
165 jsuword base = getPageBase(v);
166 struct Tracker::Page* p = (struct Tracker::Page*)
167 GC::Alloc(sizeof(*p) - sizeof(p->map) + (NJ_PAGE_SIZE >> 2) * sizeof(LIns*));
168 p->base = base;
169 p->next = pagelist;
170 pagelist = p;
171 return p;
174 void
175 Tracker::clear()
177 while (pagelist) {
178 Page* p = pagelist;
179 pagelist = pagelist->next;
180 GC::Free(p);
184 bool
185 Tracker::has(const void *v) const
187 return get(v) != NULL;
190 #if defined NANOJIT_64BIT
191 #define PAGEMASK 0x7ff
192 #else
193 #define PAGEMASK 0xfff
194 #endif
196 LIns*
197 Tracker::get(const void* v) const
199 struct Tracker::Page* p = findPage(v);
200 if (!p)
201 return NULL;
202 return p->map[(jsuword(v) & PAGEMASK) >> 2];
205 void
206 Tracker::set(const void* v, LIns* i)
208 struct Tracker::Page* p = findPage(v);
209 if (!p)
210 p = addPage(v);
211 p->map[(jsuword(v) & PAGEMASK) >> 2] = i;
214 static inline bool isNumber(jsval v)
216 return JSVAL_IS_INT(v) || JSVAL_IS_DOUBLE(v);
219 static inline jsdouble asNumber(jsval v)
221 JS_ASSERT(isNumber(v));
222 if (JSVAL_IS_DOUBLE(v))
223 return *JSVAL_TO_DOUBLE(v);
224 return (jsdouble)JSVAL_TO_INT(v);
227 static inline bool isInt32(jsval v)
229 if (!isNumber(v))
230 return false;
231 jsdouble d = asNumber(v);
232 jsint i;
233 return JSDOUBLE_IS_INT(d, i);
236 static inline uint8 getCoercedType(jsval v)
238 return isInt32(v) ? JSVAL_INT : (uint8) JSVAL_TAG(v);
241 /* Tell the oracle that a certain global variable should not be demoted. */
242 void
243 Oracle::markGlobalSlotUndemotable(JSScript* script, unsigned slot)
245 _dontDemote.set(&gc, (slot % ORACLE_SIZE));
248 /* Consult with the oracle whether we shouldn't demote a certain global variable. */
249 bool
250 Oracle::isGlobalSlotUndemotable(JSScript* script, unsigned slot) const
252 return !oracle_enabled || _dontDemote.get(slot % ORACLE_SIZE);
255 /* Tell the oracle that a certain slot at a certain bytecode location should not be demoted. */
256 void
257 Oracle::markStackSlotUndemotable(JSScript* script, jsbytecode* ip, unsigned slot)
259 uint32 hash = uint32(intptr_t(ip)) + (slot << 5);
260 hash %= ORACLE_SIZE;
261 _dontDemote.set(&gc, hash);
264 /* Consult with the oracle whether we shouldn't demote a certain slot. */
265 bool
266 Oracle::isStackSlotUndemotable(JSScript* script, jsbytecode* ip, unsigned slot) const
268 uint32 hash = uint32(intptr_t(ip)) + (slot << 5);
269 hash %= ORACLE_SIZE;
270 return !oracle_enabled || _dontDemote.get(hash);
273 /* Clear the oracle. */
274 void
275 Oracle::clear()
277 _dontDemote.reset();
280 static LIns* demote(LirWriter *out, LInsp i)
282 if (i->isCall())
283 return callArgN(i, 0);
284 if (i->isop(LIR_i2f) || i->isop(LIR_u2f))
285 return i->oprnd1();
286 if (i->isconst())
287 return i;
288 AvmAssert(i->isconstq());
289 double cf = i->constvalf();
290 int32_t ci = cf > 0x7fffffff ? uint32_t(cf) : int32_t(cf);
291 return out->insImm(ci);
294 static bool isPromoteInt(LIns* i)
296 jsdouble d;
297 return i->isop(LIR_i2f) || i->isconst() ||
298 (i->isconstq() && ((d = i->constvalf()) == (jsdouble)(jsint)d) && !JSDOUBLE_IS_NEGZERO(d));
301 static bool isPromoteUint(LIns* i)
303 jsdouble d;
304 return i->isop(LIR_u2f) || i->isconst() ||
305 (i->isconstq() && ((d = i->constvalf()) == (jsdouble)(jsuint)d));
308 static bool isPromote(LIns* i)
310 return isPromoteInt(i) || isPromoteUint(i);
313 static bool isconst(LIns* i, int32_t c)
315 return i->isconst() && i->constval() == c;
318 static bool overflowSafe(LIns* i)
320 LIns* c;
321 return (i->isop(LIR_and) && ((c = i->oprnd2())->isconst()) &&
322 ((c->constval() & 0xc0000000) == 0)) ||
323 (i->isop(LIR_rsh) && ((c = i->oprnd2())->isconst()) &&
324 ((c->constval() > 0)));
327 class FuncFilter: public LirWriter
329 TraceRecorder& recorder;
330 public:
331 FuncFilter(LirWriter* out, TraceRecorder& _recorder):
332 LirWriter(out), recorder(_recorder)
336 LInsp ins1(LOpcode v, LInsp s0)
338 switch (v) {
339 case LIR_fneg:
340 if (isPromoteInt(s0)) {
341 LIns* result = out->ins1(LIR_neg, demote(out, s0));
342 out->insGuard(LIR_xt, out->ins1(LIR_ov, result),
343 recorder.snapshot(OVERFLOW_EXIT));
344 return out->ins1(LIR_i2f, result);
346 break;
347 default:;
349 return out->ins1(v, s0);
352 LInsp ins2(LOpcode v, LInsp s0, LInsp s1)
354 if (s0 == s1 && v == LIR_feq) {
355 if (isPromote(s0)) {
356 // double(int) and double(uint) cannot be nan
357 return insImm(1);
359 if (s0->isop(LIR_fmul) || s0->isop(LIR_fsub) || s0->isop(LIR_fadd)) {
360 LInsp lhs = s0->oprnd1();
361 LInsp rhs = s0->oprnd2();
362 if (isPromote(lhs) && isPromote(rhs)) {
363 // add/sub/mul promoted ints can't be nan
364 return insImm(1);
367 } else if (LIR_feq <= v && v <= LIR_fge) {
368 if (isPromoteInt(s0) && isPromoteInt(s1)) {
369 // demote fcmp to cmp
370 v = LOpcode(v + (LIR_eq - LIR_feq));
371 return out->ins2(v, demote(out, s0), demote(out, s1));
372 } else if (isPromoteUint(s0) && isPromoteUint(s1)) {
373 // uint compare
374 v = LOpcode(v + (LIR_eq - LIR_feq));
375 if (v != LIR_eq)
376 v = LOpcode(v + (LIR_ult - LIR_lt)); // cmp -> ucmp
377 return out->ins2(v, demote(out, s0), demote(out, s1));
379 } else if (v == LIR_fadd || v == LIR_fsub) {
380 /* demoting multiplication seems to be tricky since it can quickly overflow the
381 value range of int32 */
382 if (isPromoteInt(s0) && isPromoteInt(s1)) {
383 // demote fop to op
384 v = (LOpcode)((int)v & ~LIR64);
385 LIns* d0;
386 LIns* d1;
387 LIns* result = out->ins2(v, d0 = demote(out, s0), d1 = demote(out, s1));
388 if (!overflowSafe(d0) || !overflowSafe(d1)) {
389 out->insGuard(LIR_xt, out->ins1(LIR_ov, result),
390 recorder.snapshot(OVERFLOW_EXIT));
392 return out->ins1(LIR_i2f, result);
394 } else if (v == LIR_or &&
395 s0->isop(LIR_lsh) && isconst(s0->oprnd2(), 16) &&
396 s1->isop(LIR_and) && isconst(s1->oprnd2(), 0xffff)) {
397 LIns* msw = s0->oprnd1();
398 LIns* lsw = s1->oprnd1();
399 LIns* x;
400 LIns* y;
401 if (lsw->isop(LIR_add) &&
402 lsw->oprnd1()->isop(LIR_and) &&
403 lsw->oprnd2()->isop(LIR_and) &&
404 isconst(lsw->oprnd1()->oprnd2(), 0xffff) &&
405 isconst(lsw->oprnd2()->oprnd2(), 0xffff) &&
406 msw->isop(LIR_add) &&
407 msw->oprnd1()->isop(LIR_add) &&
408 msw->oprnd2()->isop(LIR_rsh) &&
409 msw->oprnd1()->oprnd1()->isop(LIR_rsh) &&
410 msw->oprnd1()->oprnd2()->isop(LIR_rsh) &&
411 isconst(msw->oprnd2()->oprnd2(), 16) &&
412 isconst(msw->oprnd1()->oprnd1()->oprnd2(), 16) &&
413 isconst(msw->oprnd1()->oprnd2()->oprnd2(), 16) &&
414 (x = lsw->oprnd1()->oprnd1()) == msw->oprnd1()->oprnd1()->oprnd1() &&
415 (y = lsw->oprnd2()->oprnd1()) == msw->oprnd1()->oprnd2()->oprnd1() &&
416 lsw == msw->oprnd2()->oprnd1()) {
417 return out->ins2(LIR_add, x, y);
420 return out->ins2(v, s0, s1);
423 LInsp insCall(uint32_t fid, LInsp args[])
425 LInsp s0 = args[0];
426 switch (fid) {
427 case F_DoubleToUint32:
428 if (s0->isconstq())
429 return out->insImm(js_DoubleToECMAUint32(s0->constvalf()));
430 if (s0->isop(LIR_i2f) || s0->isop(LIR_u2f)) {
431 return s0->oprnd1();
433 break;
434 case F_DoubleToInt32:
435 if (s0->isconstq())
436 return out->insImm(js_DoubleToECMAInt32(s0->constvalf()));
437 if (s0->isop(LIR_fadd) || s0->isop(LIR_fsub) || s0->isop(LIR_fmul)) {
438 LInsp lhs = s0->oprnd1();
439 LInsp rhs = s0->oprnd2();
440 if (isPromote(lhs) && isPromote(rhs)) {
441 LOpcode op = LOpcode(s0->opcode() & ~LIR64);
442 return out->ins2(op, demote(out, lhs), demote(out, rhs));
445 if (s0->isop(LIR_i2f) || s0->isop(LIR_u2f)) {
446 return s0->oprnd1();
448 if (s0->isCall() && s0->fid() == F_UnboxDouble) {
449 LIns* args2[] = { callArgN(s0, 0) };
450 return out->insCall(F_UnboxInt32, args2);
452 if (s0->isCall() && s0->fid() == F_StringToNumber) {
453 // callArgN's ordering is that as seen by the builtin, not as stored in args here.
454 // True story!
455 LIns* args2[] = { callArgN(s0, 1), callArgN(s0, 0) };
456 return out->insCall(F_StringToInt32, args2);
458 break;
459 case F_BoxDouble:
460 JS_ASSERT(s0->isQuad());
461 if (s0->isop(LIR_i2f)) {
462 LIns* args2[] = { s0->oprnd1(), args[1] };
463 return out->insCall(F_BoxInt32, args2);
465 if (s0->isCall() && s0->fid() == F_UnboxDouble)
466 return callArgN(s0, 0);
467 break;
469 return out->insCall(fid, args);
473 /* In debug mode vpname contains a textual description of the type of the
474 slot during the forall iteration over al slots. */
475 #ifdef DEBUG
476 #define DEF_VPNAME const char* vpname; unsigned vpnum
477 #define SET_VPNAME(name) do { vpname = name; vpnum = 0; } while(0)
478 #define INC_VPNUM() do { ++vpnum; } while(0)
479 #else
480 #define DEF_VPNAME do {} while (0)
481 #define vpname ""
482 #define vpnum 0
483 #define SET_VPNAME(name) ((void)0)
484 #define INC_VPNUM() ((void)0)
485 #endif
487 /* Iterate over all interned global variables. */
488 #define FORALL_GLOBAL_SLOTS(cx, ngslots, gslots, code) \
489 JS_BEGIN_MACRO \
490 DEF_VPNAME; \
491 JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain); \
492 unsigned n; \
493 jsval* vp; \
494 SET_VPNAME("global"); \
495 for (n = 0; n < ngslots; ++n) { \
496 vp = &STOBJ_GET_SLOT(globalObj, gslots[n]); \
497 { code; } \
498 INC_VPNUM(); \
500 JS_END_MACRO
502 /* Iterate over all slots in the frame, consisting of args, vars, and stack
503 (except for the top-level frame which does not have args or vars. */
504 #define FORALL_FRAME_SLOTS(fp, depth, code) \
505 JS_BEGIN_MACRO \
506 jsval* vp; \
507 jsval* vpstop; \
508 if (fp->callee) { \
509 if (depth == 0) { \
510 SET_VPNAME("callee"); \
511 vp = &fp->argv[-2]; \
512 { code; } \
513 SET_VPNAME("this"); \
514 vp = &fp->argv[-1]; \
515 { code; } \
516 SET_VPNAME("argv"); \
517 vp = &fp->argv[0]; vpstop = &fp->argv[fp->fun->nargs]; \
518 while (vp < vpstop) { code; ++vp; INC_VPNUM(); } \
520 SET_VPNAME("vars"); \
521 vp = fp->slots; vpstop = &fp->slots[fp->script->nfixed]; \
522 while (vp < vpstop) { code; ++vp; INC_VPNUM(); } \
524 SET_VPNAME("stack"); \
525 vp = StackBase(fp); vpstop = fp->regs->sp; \
526 while (vp < vpstop) { code; ++vp; INC_VPNUM(); } \
527 if (fsp < fspstop - 1) { \
528 JSStackFrame* fp2 = fsp[1]; \
529 int missing = fp2->fun->nargs - fp2->argc; \
530 if (missing > 0) { \
531 SET_VPNAME("missing"); \
532 vp = fp->regs->sp; \
533 vpstop = vp + missing; \
534 while (vp < vpstop) { code; ++vp; INC_VPNUM(); } \
537 JS_END_MACRO
539 /* Iterate over all slots in each pending frame. */
540 #define FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth, code) \
541 JS_BEGIN_MACRO \
542 DEF_VPNAME; \
543 unsigned n; \
544 JSStackFrame* currentFrame = cx->fp; \
545 JSStackFrame* entryFrame; \
546 JSStackFrame* fp = currentFrame; \
547 for (n = 0; n < callDepth; ++n) { fp = fp->down; } \
548 entryFrame = fp; \
549 unsigned frames = callDepth+1; \
550 JSStackFrame** fstack = \
551 (JSStackFrame**) alloca(frames * sizeof (JSStackFrame*)); \
552 JSStackFrame** fspstop = &fstack[frames]; \
553 JSStackFrame** fsp = fspstop-1; \
554 fp = currentFrame; \
555 for (;; fp = fp->down) { *fsp-- = fp; if (fp == entryFrame) break; } \
556 unsigned depth; \
557 for (depth = 0, fsp = fstack; fsp < fspstop; ++fsp, ++depth) { \
558 fp = *fsp; \
559 FORALL_FRAME_SLOTS(fp, depth, code); \
561 JS_END_MACRO
563 #define FORALL_SLOTS(cx, ngslots, gslots, callDepth, code) \
564 JS_BEGIN_MACRO \
565 FORALL_GLOBAL_SLOTS(cx, ngslots, gslots, code); \
566 FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth, code); \
567 JS_END_MACRO
569 /* Calculate the total number of native frame slots we need from this frame
570 all the way back to the entry frame, including the current stack usage. */
571 unsigned
572 js_NativeStackSlots(JSContext *cx, unsigned callDepth)
574 JSStackFrame* fp = cx->fp;
575 unsigned slots = 0;
576 #if defined _DEBUG
577 unsigned int origCallDepth = callDepth;
578 #endif
579 for (;;) {
580 unsigned operands = fp->regs->sp - StackBase(fp);
581 JS_ASSERT(operands <= unsigned(fp->script->nslots - fp->script->nfixed));
582 slots += operands;
583 if (fp->callee)
584 slots += fp->script->nfixed;
585 if (callDepth-- == 0) {
586 if (fp->callee)
587 slots += 2/*callee,this*/ + fp->fun->nargs;
588 #if defined _DEBUG
589 unsigned int m = 0;
590 FORALL_SLOTS_IN_PENDING_FRAMES(cx, origCallDepth, m++);
591 JS_ASSERT(m == slots);
592 #endif
593 return slots;
595 JSStackFrame* fp2 = fp;
596 fp = fp->down;
597 int missing = fp2->fun->nargs - fp2->argc;
598 if (missing > 0)
599 slots += missing;
601 JS_NOT_REACHED("js_NativeStackSlots");
604 /* Capture the type map for the selected slots of the global object. */
605 void
606 TypeMap::captureGlobalTypes(JSContext* cx, SlotList& slots)
608 unsigned ngslots = slots.length();
609 uint16* gslots = slots.data();
610 setLength(ngslots);
611 uint8* map = data();
612 uint8* m = map;
613 FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
614 uint8 type = getCoercedType(*vp);
615 if ((type == JSVAL_INT) && oracle.isGlobalSlotUndemotable(cx->fp->script, gslots[n]))
616 type = JSVAL_DOUBLE;
617 *m++ = type;
621 /* Capture the type map for the currently pending stack frames. */
622 void
623 TypeMap::captureStackTypes(JSContext* cx, unsigned callDepth)
625 setLength(js_NativeStackSlots(cx, callDepth));
626 uint8* map = data();
627 uint8* m = map;
628 FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
629 uint8 type = getCoercedType(*vp);
630 if ((type == JSVAL_INT) &&
631 oracle.isStackSlotUndemotable(cx->fp->script, cx->fp->regs->pc, unsigned(m - map))) {
632 type = JSVAL_DOUBLE;
634 *m++ = type;
638 /* Compare this type map to another one and see whether they match. */
639 bool
640 TypeMap::matches(TypeMap& other) const
642 if (length() != other.length())
643 return false;
644 return !memcmp(data(), other.data(), length());
647 /* Use the provided storage area to create a new type map that contains the partial type map
648 with the rest of it filled up from the complete type map. */
649 static void
650 mergeTypeMaps(uint8** partial, unsigned* plength, uint8* complete, unsigned clength, uint8* mem)
652 unsigned l = *plength;
653 JS_ASSERT(l < clength);
654 memcpy(mem, *partial, l * sizeof(uint8));
655 memcpy(mem + l, complete + l, (clength - l) * sizeof(uint8));
656 *partial = mem;
657 *plength = clength;
660 static void
661 js_TrashTree(JSContext* cx, Fragment* f);
663 TraceRecorder::TraceRecorder(JSContext* cx, GuardRecord* _anchor, Fragment* _fragment,
664 TreeInfo* ti, unsigned ngslots, uint8* globalTypeMap, uint8* stackTypeMap,
665 GuardRecord* innermostNestedGuard)
667 JS_ASSERT(!_fragment->vmprivate && ti);
669 this->cx = cx;
670 this->traceMonitor = &JS_TRACE_MONITOR(cx);
671 this->globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
672 this->anchor = _anchor;
673 this->fragment = _fragment;
674 this->lirbuf = _fragment->lirbuf;
675 this->treeInfo = ti;
676 this->callDepth = _fragment->calldepth;
677 JS_ASSERT(!_anchor || _anchor->calldepth == _fragment->calldepth);
678 this->atoms = cx->fp->script->atomMap.vector;
679 this->trashTree = false;
680 this->deepAborted = false;
681 this->whichTreeToTrash = _fragment->root;
683 debug_only_v(printf("recording starting from %s:%u@%u\n", cx->fp->script->filename,
684 js_PCToLineNumber(cx, cx->fp->script, cx->fp->regs->pc),
685 cx->fp->regs->pc - cx->fp->script->code););
687 lir = lir_buf_writer = new (&gc) LirBufWriter(lirbuf);
688 #ifdef DEBUG
689 if (verbose_debug)
690 lir = verbose_filter = new (&gc) VerboseWriter(&gc, lir, lirbuf->names);
691 #endif
692 lir = cse_filter = new (&gc) CseFilter(lir, &gc);
693 lir = expr_filter = new (&gc) ExprFilter(lir);
694 lir = func_filter = new (&gc) FuncFilter(lir, *this);
695 lir->ins0(LIR_trace);
697 if (!nanojit::AvmCore::config.tree_opt || fragment->root == fragment) {
698 lirbuf->state = addName(lir->insParam(0), "state");
699 lirbuf->param1 = addName(lir->insParam(1), "param1");
701 lirbuf->sp = addName(lir->insLoad(LIR_ldp, lirbuf->state, (int)offsetof(InterpState, sp)), "sp");
702 lirbuf->rp = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, rp)), "rp");
703 cx_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, cx)), "cx");
704 gp_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, gp)), "gp");
705 eos_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eos)), "eos");
706 eor_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eor)), "eor");
708 /* read into registers all values on the stack and all globals we know so far */
709 import(treeInfo, lirbuf->sp, ngslots, callDepth, globalTypeMap, stackTypeMap);
711 /* If we are attached to a tree call guard, make sure the guard the inner tree exited from
712 is what we expect it to be. */
713 if (_anchor && _anchor->exit->exitType == NESTED_EXIT) {
714 LIns* nested_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state,
715 offsetof(InterpState, nestedExit)), "nestedExit");
716 guard(true, lir->ins2(LIR_eq, nested_ins, INS_CONSTPTR(innermostNestedGuard)), NESTED_EXIT);
720 TraceRecorder::~TraceRecorder()
722 JS_ASSERT(treeInfo);
723 if (fragment->root == fragment && !fragment->root->code()) {
724 JS_ASSERT(!fragment->root->vmprivate);
725 delete treeInfo;
727 if (trashTree)
728 js_TrashTree(cx, whichTreeToTrash);
729 #ifdef DEBUG
730 delete verbose_filter;
731 #endif
732 delete cse_filter;
733 delete expr_filter;
734 delete func_filter;
735 delete lir_buf_writer;
738 /* Add debug information to a LIR instruction as we emit it. */
739 inline LIns*
740 TraceRecorder::addName(LIns* ins, const char* name)
742 #ifdef DEBUG
743 lirbuf->names->addName(ins, name);
744 #endif
745 return ins;
748 /* Determine the current call depth (starting with the entry frame.) */
749 unsigned
750 TraceRecorder::getCallDepth() const
752 return callDepth;
755 /* Determine whether we should unroll a loop (only do so at most once for every loop). */
756 bool
757 TraceRecorder::trackLoopEdges()
759 jsbytecode* pc = cx->fp->regs->pc;
760 if (inlinedLoopEdges.contains(pc))
761 return false;
762 inlinedLoopEdges.add(pc);
763 return true;
766 /* Determine the offset in the native global frame for a jsval we track */
767 ptrdiff_t
768 TraceRecorder::nativeGlobalOffset(jsval* p) const
770 JS_ASSERT(isGlobal(p));
771 if (size_t(p - globalObj->fslots) < JS_INITIAL_NSLOTS)
772 return size_t(p - globalObj->fslots) * sizeof(double);
773 return ((p - globalObj->dslots) + JS_INITIAL_NSLOTS) * sizeof(double);
776 /* Determine whether a value is a global stack slot */
777 bool
778 TraceRecorder::isGlobal(jsval* p) const
780 return ((size_t(p - globalObj->fslots) < JS_INITIAL_NSLOTS) ||
781 (size_t(p - globalObj->dslots) < (STOBJ_NSLOTS(globalObj) - JS_INITIAL_NSLOTS)));
784 /* Determine the offset in the native stack for a jsval we track */
785 ptrdiff_t
786 TraceRecorder::nativeStackOffset(jsval* p) const
788 #ifdef DEBUG
789 size_t slow_offset = 0;
790 FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
791 if (vp == p) goto done;
792 slow_offset += sizeof(double)
796 * If it's not in a pending frame, it must be on the stack of the current frame above
797 * sp but below fp->slots + script->nslots.
799 JS_ASSERT(size_t(p - cx->fp->slots) < cx->fp->script->nslots);
800 slow_offset += size_t(p - cx->fp->regs->sp) * sizeof(double);
802 done:
803 #define RETURN(offset) { JS_ASSERT((offset) == slow_offset); return offset; }
804 #else
805 #define RETURN(offset) { return offset; }
806 #endif
807 size_t offset = 0;
808 JSStackFrame* currentFrame = cx->fp;
809 JSStackFrame* entryFrame;
810 JSStackFrame* fp = currentFrame;
811 for (unsigned n = 0; n < callDepth; ++n) { fp = fp->down; }
812 entryFrame = fp;
813 unsigned frames = callDepth+1;
814 JSStackFrame** fstack = (JSStackFrame **)alloca(frames * sizeof (JSStackFrame *));
815 JSStackFrame** fspstop = &fstack[frames];
816 JSStackFrame** fsp = fspstop-1;
817 fp = currentFrame;
818 for (;; fp = fp->down) { *fsp-- = fp; if (fp == entryFrame) break; }
819 for (fsp = fstack; fsp < fspstop; ++fsp) {
820 fp = *fsp;
821 if (fp->callee) {
822 if (fsp == fstack) {
823 if (size_t(p - &fp->argv[-2]) < size_t(2/*callee,this*/ + fp->fun->nargs))
824 RETURN(offset + size_t(p - &fp->argv[-2]) * sizeof(double));
825 offset += (2/*callee,this*/ + fp->fun->nargs) * sizeof(double);
827 if (size_t(p - &fp->slots[0]) < fp->script->nfixed)
828 RETURN(offset + size_t(p - &fp->slots[0]) * sizeof(double));
829 offset += fp->script->nfixed * sizeof(double);
831 jsval* spbase = StackBase(fp);
832 if (size_t(p - spbase) < size_t(fp->regs->sp - spbase))
833 RETURN(offset + size_t(p - spbase) * sizeof(double));
834 offset += size_t(fp->regs->sp - spbase) * sizeof(double);
835 if (fsp < fspstop - 1) {
836 JSStackFrame* fp2 = fsp[1];
837 int missing = fp2->fun->nargs - fp2->argc;
838 if (missing > 0) {
839 if (size_t(p - fp->regs->sp) < size_t(missing))
840 RETURN(offset + size_t(p - fp->regs->sp) * sizeof(double));
841 offset += size_t(missing) * sizeof(double);
847 * If it's not in a pending frame, it must be on the stack of the current frame above
848 * sp but below fp->slots + script->nslots.
850 JS_ASSERT(size_t(p - currentFrame->slots) < currentFrame->script->nslots);
851 offset += size_t(p - currentFrame->regs->sp) * sizeof(double);
852 RETURN(offset);
853 #undef RETURN
856 /* Track the maximum number of native frame slots we need during
857 execution. */
858 void
859 TraceRecorder::trackNativeStackUse(unsigned slots)
861 if (slots > treeInfo->maxNativeStackSlots)
862 treeInfo->maxNativeStackSlots = slots;
865 /* Unbox a jsval into a slot. Slots are wide enough to hold double values
866 directly (instead of storing a pointer to them). */
867 static bool
868 ValueToNative(JSContext* cx, jsval v, uint8 type, double* slot)
870 unsigned tag = JSVAL_TAG(v);
871 switch (type) {
872 case JSVAL_INT:
873 jsint i;
874 if (JSVAL_IS_INT(v))
875 *(jsint*)slot = JSVAL_TO_INT(v);
876 else if ((tag == JSVAL_DOUBLE) && JSDOUBLE_IS_INT(*JSVAL_TO_DOUBLE(v), i))
877 *(jsint*)slot = i;
878 else if (v == JSVAL_VOID)
879 *(jsint*)slot = 0;
880 else {
881 debug_only_v(printf("int != tag%lu(value=%lu) ", JSVAL_TAG(v), v);)
882 return false;
884 debug_only_v(printf("int<%d> ", *(jsint*)slot);)
885 return true;
886 case JSVAL_DOUBLE:
887 jsdouble d;
888 if (JSVAL_IS_INT(v))
889 d = JSVAL_TO_INT(v);
890 else if (tag == JSVAL_DOUBLE)
891 d = *JSVAL_TO_DOUBLE(v);
892 else if (v == JSVAL_VOID)
893 d = js_NaN;
894 else {
895 debug_only_v(printf("double != tag%lu ", JSVAL_TAG(v));)
896 return false;
898 *(jsdouble*)slot = d;
899 debug_only_v(printf("double<%g> ", d);)
900 return true;
901 case JSVAL_BOOLEAN:
902 if (tag != JSVAL_BOOLEAN) {
903 debug_only_v(printf("bool != tag%u ", tag);)
904 return false;
906 *(JSBool*)slot = JSVAL_TO_BOOLEAN(v);
907 debug_only_v(printf("boolean<%d> ", *(bool*)slot);)
908 return true;
909 case JSVAL_STRING:
910 if (v == JSVAL_VOID) {
911 *(JSString**)slot = ATOM_TO_STRING(cx->runtime->atomState.typeAtoms[JSTYPE_VOID]);
912 return true;
914 if (tag != JSVAL_STRING) {
915 debug_only_v(printf("string != tag%u ", tag);)
916 return false;
918 *(JSString**)slot = JSVAL_TO_STRING(v);
919 debug_only_v(printf("string<%p> ", *(JSString**)slot);)
920 return true;
921 default:
922 JS_ASSERT(type == JSVAL_OBJECT);
923 if (v == JSVAL_VOID) {
924 *(JSObject**)slot = NULL;
925 return true;
927 if (tag != JSVAL_OBJECT) {
928 debug_only_v(printf("object != tag%u ", tag);)
929 return false;
931 *(JSObject**)slot = JSVAL_TO_OBJECT(v);
932 debug_only_v(printf("object<%p:%s> ", JSVAL_TO_OBJECT(v),
933 JSVAL_IS_NULL(v)
934 ? "null"
935 : STOBJ_GET_CLASS(JSVAL_TO_OBJECT(v))->name);)
936 return true;
940 /* Box a value from the native stack back into the jsval format. Integers
941 that are too large to fit into a jsval are automatically boxed into
942 heap-allocated doubles. */
943 static bool
944 NativeToValue(JSContext* cx, jsval& v, uint8 type, double* slot)
946 jsint i;
947 jsdouble d;
948 switch (type) {
949 case JSVAL_BOOLEAN:
950 v = BOOLEAN_TO_JSVAL(*(bool*)slot);
951 debug_only_v(printf("boolean<%d> ", *(bool*)slot);)
952 break;
953 case JSVAL_INT:
954 i = *(jsint*)slot;
955 debug_only_v(printf("int<%d> ", i);)
956 store_int:
957 if (INT_FITS_IN_JSVAL(i)) {
958 v = INT_TO_JSVAL(i);
959 break;
961 d = (jsdouble)i;
962 goto store_double;
963 case JSVAL_DOUBLE:
964 d = *slot;
965 debug_only_v(printf("double<%g> ", d);)
966 if (JSDOUBLE_IS_INT(d, i))
967 goto store_int;
968 store_double:
969 /* Its safe to trigger the GC here since we rooted all strings/objects and all the
970 doubles we already processed. */
971 return js_NewDoubleInRootedValue(cx, d, &v) ? true : false;
972 case JSVAL_STRING:
973 v = STRING_TO_JSVAL(*(JSString**)slot);
974 debug_only_v(printf("string<%p> ", *(JSString**)slot);)
975 break;
976 default:
977 JS_ASSERT(type == JSVAL_OBJECT);
978 v = OBJECT_TO_JSVAL(*(JSObject**)slot);
979 debug_only_v(printf("object<%p:%s> ", JSVAL_TO_OBJECT(v),
980 JSVAL_IS_NULL(v)
981 ? "null"
982 : STOBJ_GET_CLASS(JSVAL_TO_OBJECT(v))->name);)
983 break;
985 return true;
988 /* Attempt to unbox the given list of interned globals onto the native global frame, checking
989 along the way that the supplied type-mao holds. */
990 static bool
991 BuildNativeGlobalFrame(JSContext* cx, unsigned ngslots, uint16* gslots, uint8* mp, double* np)
993 debug_only_v(printf("global: ");)
994 FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
995 if (!ValueToNative(cx, *vp, *mp, np + gslots[n]))
996 return false;
997 ++mp;
999 debug_only_v(printf("\n");)
1000 return true;
1003 /* Attempt to unbox the given JS frame onto a native frame, checking along the way that the
1004 supplied type-map holds. */
1005 static bool
1006 BuildNativeStackFrame(JSContext* cx, unsigned callDepth, uint8* mp, double* np)
1008 debug_only_v(printf("stack: ");)
1009 FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
1010 debug_only_v(printf("%s%u=", vpname, vpnum);)
1011 if (!ValueToNative(cx, *vp, *mp, np))
1012 return false;
1013 ++mp; ++np;
1015 debug_only_v(printf("\n");)
1016 return true;
1019 /* Box the given native frame into a JS frame. This only fails due to a hard error
1020 (out of memory for example). */
1021 static int
1022 FlushNativeGlobalFrame(JSContext* cx, unsigned ngslots, uint16* gslots, uint8* mp, double* np)
1024 uint8* mp_base = mp;
1025 /* Root all string and object references first (we don't need to call the GC for this). */
1026 FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
1027 if ((*mp == JSVAL_STRING || *mp == JSVAL_OBJECT) &&
1028 !NativeToValue(cx, *vp, *mp, np + gslots[n])) {
1029 return -1;
1031 ++mp;
1033 /* Now do this again but this time for all values (properly quicker than actually checking
1034 the type and excluding strings and objects). The GC might kick in when we store doubles,
1035 but everything is rooted now (all strings/objects and all doubles we already boxed). */
1036 mp = mp_base;
1037 FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
1038 if (!NativeToValue(cx, *vp, *mp, np + gslots[n]))
1039 return -1;
1040 ++mp;
1042 debug_only_v(printf("\n");)
1043 return mp - mp_base;
1046 /* Box the given native stack frame into the virtual machine stack. This only fails due to a
1047 hard error (out of memory for example). */
1048 static int
1049 FlushNativeStackFrame(JSContext* cx, unsigned callDepth, uint8* mp, double* np, jsval* stopAt)
1051 uint8* mp_base = mp;
1052 double* np_base = np;
1053 /* Root all string and object references first (we don't need to call the GC for this). */
1054 FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
1055 if (vp == stopAt) goto skip1;
1056 if ((*mp == JSVAL_STRING || *mp == JSVAL_OBJECT) && !NativeToValue(cx, *vp, *mp, np))
1057 return -1;
1058 ++mp; ++np
1060 skip1:
1061 // Restore thisp from the now-restored argv[-1] in each pending frame.
1062 unsigned n = callDepth;
1063 for (JSStackFrame* fp = cx->fp; n-- != 0; fp = fp->down)
1064 fp->thisp = JSVAL_TO_OBJECT(fp->argv[-1]);
1066 /* Now do this again but this time for all values (properly quicker than actually checking
1067 the type and excluding strings and objects). The GC might kick in when we store doubles,
1068 but everything is rooted now (all strings/objects and all doubles we already boxed). */
1069 mp = mp_base;
1070 np = np_base;
1071 FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
1072 if (vp == stopAt) goto skip2;
1073 debug_only_v(printf("%s%u=", vpname, vpnum);)
1074 if (!NativeToValue(cx, *vp, *mp, np))
1075 return -1;
1076 ++mp; ++np
1078 skip2:
1079 debug_only_v(printf("\n");)
1080 return mp - mp_base;
1083 /* Emit load instructions onto the trace that read the initial stack state. */
1084 void
1085 TraceRecorder::import(LIns* base, ptrdiff_t offset, jsval* p, uint8& t,
1086 const char *prefix, uintN index, JSStackFrame *fp)
1088 LIns* ins;
1089 if (t == JSVAL_INT) { /* demoted */
1090 JS_ASSERT(isInt32(*p));
1091 /* Ok, we have a valid demotion attempt pending, so insert an integer
1092 read and promote it to double since all arithmetic operations expect
1093 to see doubles on entry. The first op to use this slot will emit a
1094 f2i cast which will cancel out the i2f we insert here. */
1095 ins = lir->insLoadi(base, offset);
1096 ins = lir->ins1(LIR_i2f, ins);
1097 } else {
1098 JS_ASSERT(isNumber(*p) == (t == JSVAL_DOUBLE));
1099 if (t == JSVAL_DOUBLE) {
1100 ins = lir->insLoad(LIR_ldq, base, offset);
1101 } else {
1102 ins = lir->insLoad(LIR_ldp, base, offset);
1105 tracker.set(p, ins);
1106 #ifdef DEBUG
1107 char name[64];
1108 JS_ASSERT(strlen(prefix) < 10);
1109 void* mark = NULL;
1110 jsuword* localNames = NULL;
1111 const char* funName = NULL;
1112 if (*prefix == 'a' || *prefix == 'v') {
1113 mark = JS_ARENA_MARK(&cx->tempPool);
1114 if (JS_GET_LOCAL_NAME_COUNT(fp->fun) != 0)
1115 localNames = js_GetLocalNameArray(cx, fp->fun, &cx->tempPool);
1116 funName = fp->fun->atom ? js_AtomToPrintableString(cx, fp->fun->atom) : "<anonymous>";
1118 if (!strcmp(prefix, "argv")) {
1119 if (index < fp->fun->nargs) {
1120 JSAtom *atom = JS_LOCAL_NAME_TO_ATOM(localNames[index]);
1121 JS_snprintf(name, sizeof name, "$%s.%s", funName, js_AtomToPrintableString(cx, atom));
1122 } else {
1123 JS_snprintf(name, sizeof name, "$%s.<arg%d>", funName, index);
1125 } else if (!strcmp(prefix, "vars")) {
1126 JSAtom *atom = JS_LOCAL_NAME_TO_ATOM(localNames[fp->fun->nargs + index]);
1127 JS_snprintf(name, sizeof name, "$%s.%s", funName, js_AtomToPrintableString(cx, atom));
1128 } else {
1129 JS_snprintf(name, sizeof name, "$%s%d", prefix, index);
1132 if (mark)
1133 JS_ARENA_RELEASE(&cx->tempPool, mark);
1134 addName(ins, name);
1136 static const char* typestr[] = {
1137 "object", "int", "double", "3", "string", "5", "boolean", "any"
1139 debug_only_v(printf("import vp=%p name=%s type=%s flags=%d\n", p, name, typestr[t & 7], t >> 3););
1140 #endif
1143 void
1144 TraceRecorder::import(TreeInfo* treeInfo, LIns* sp, unsigned ngslots, unsigned callDepth,
1145 uint8* globalTypeMap, uint8* stackTypeMap)
1147 /* If we get a partial list that doesn't have all the types (i.e. recording from a side
1148 exit that was recorded but we added more global slots later), merge the missing types
1149 from the entry type map. This is safe because at the loop edge we verify that we
1150 have compatible types for all globals (entry type and loop edge type match). While
1151 a different trace of the tree might have had a guard with a different type map for
1152 these slots we just filled in here (the guard we continue from didn't know about them),
1153 since we didn't take that particular guard the only way we could have ended up here
1154 is if that other trace had at its end a compatible type distribution with the entry
1155 map. Since thats exactly what we used to fill in the types our current side exit
1156 didn't provide, this is always safe to do. */
1157 unsigned length;
1158 if (ngslots < (length = traceMonitor->globalTypeMap->length()))
1159 mergeTypeMaps(&globalTypeMap, &ngslots,
1160 traceMonitor->globalTypeMap->data(), length,
1161 (uint8*)alloca(sizeof(uint8) * length));
1162 JS_ASSERT(ngslots == traceMonitor->globalTypeMap->length());
1164 /* the first time we compile a tree this will be empty as we add entries lazily */
1165 uint16* gslots = traceMonitor->globalSlots->data();
1166 uint8* m = globalTypeMap;
1167 FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
1168 import(gp_ins, nativeGlobalOffset(vp), vp, *m, vpname, vpnum, NULL);
1169 m++;
1171 ptrdiff_t offset = -treeInfo->nativeStackBase;
1172 m = stackTypeMap;
1173 FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
1174 import(sp, offset, vp, *m, vpname, vpnum, fp);
1175 m++; offset += sizeof(double);
1179 /* Lazily import a global slot if we don't already have it in the tracker. */
1180 bool
1181 TraceRecorder::lazilyImportGlobalSlot(unsigned slot)
1183 if (slot != uint16(slot)) /* we use a table of 16-bit ints, bail out if that's not enough */
1184 return false;
1185 jsval* vp = &STOBJ_GET_SLOT(globalObj, slot);
1186 if (tracker.has(vp))
1187 return true; /* we already have it */
1188 unsigned index = traceMonitor->globalSlots->length();
1189 /* If this the first global we are adding, remember the shape of the global object. */
1190 if (index == 0)
1191 traceMonitor->globalShape = OBJ_SCOPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain))->shape;
1192 /* Add the slot to the list of interned global slots. */
1193 traceMonitor->globalSlots->add(slot);
1194 uint8 type = getCoercedType(*vp);
1195 if ((type == JSVAL_INT) && oracle.isGlobalSlotUndemotable(cx->fp->script, slot))
1196 type = JSVAL_DOUBLE;
1197 traceMonitor->globalTypeMap->add(type);
1198 import(gp_ins, slot*sizeof(double), vp, type, "global", index, NULL);
1199 return true;
1202 /* Write back a value onto the stack or global frames. */
1203 LIns*
1204 TraceRecorder::writeBack(LIns* i, LIns* base, ptrdiff_t offset)
1206 /* Sink all type casts targeting the stack into the side exit by simply storing the original
1207 (uncasted) value. Each guard generates the side exit map based on the types of the
1208 last stores to every stack location, so its safe to not perform them on-trace. */
1209 if (isPromoteInt(i))
1210 i = ::demote(lir, i);
1211 return lir->insStorei(i, base, offset);
1214 /* Update the tracker, then issue a write back store. */
1215 void
1216 TraceRecorder::set(jsval* p, LIns* i, bool initializing)
1218 JS_ASSERT(initializing || tracker.has(p));
1219 tracker.set(p, i);
1220 /* If we are writing to this location for the first time, calculate the offset into the
1221 native frame manually, otherwise just look up the last load or store associated with
1222 the same source address (p) and use the same offset/base. */
1223 LIns* x;
1224 if ((x = nativeFrameTracker.get(p)) == NULL) {
1225 if (isGlobal(p))
1226 x = writeBack(i, gp_ins, nativeGlobalOffset(p));
1227 else
1228 x = writeBack(i, lirbuf->sp, -treeInfo->nativeStackBase + nativeStackOffset(p));
1229 nativeFrameTracker.set(p, x);
1230 } else {
1231 #define ASSERT_VALID_CACHE_HIT(base, offset) \
1232 JS_ASSERT(base == lirbuf->sp || base == gp_ins); \
1233 JS_ASSERT(offset == ((base == lirbuf->sp) \
1234 ? -treeInfo->nativeStackBase + nativeStackOffset(p) \
1235 : nativeGlobalOffset(p))); \
1237 if (x->isop(LIR_st) || x->isop(LIR_stq)) {
1238 ASSERT_VALID_CACHE_HIT(x->oprnd2(), x->oprnd3()->constval());
1239 writeBack(i, x->oprnd2(), x->oprnd3()->constval());
1240 } else {
1241 JS_ASSERT(x->isop(LIR_sti) || x->isop(LIR_stqi));
1242 ASSERT_VALID_CACHE_HIT(x->oprnd2(), x->immdisp());
1243 writeBack(i, x->oprnd2(), x->immdisp());
1246 #undef ASSERT_VALID_CACHE_HIT
1249 LIns*
1250 TraceRecorder::get(jsval* p)
1252 return tracker.get(p);
1255 /* Determine whether a bytecode location (pc) terminates a loop or is a path within the loop. */
1256 static bool
1257 js_IsLoopExit(JSContext* cx, JSScript* script, jsbytecode* header, jsbytecode* pc)
1259 switch (*pc) {
1260 case JSOP_LT:
1261 case JSOP_GT:
1262 case JSOP_LE:
1263 case JSOP_GE:
1264 case JSOP_NE:
1265 case JSOP_EQ:
1266 JS_ASSERT(js_CodeSpec[*pc].length == 1);
1267 pc++;
1268 /* FALL THROUGH */
1270 case JSOP_IFEQ:
1271 case JSOP_IFEQX:
1272 case JSOP_IFNE:
1273 case JSOP_IFNEX:
1275 * Forward jumps are usually intra-branch, but for-in loops jump to the trailing enditer to
1276 * clean up, so check for that case here.
1278 if (pc[GET_JUMP_OFFSET(pc)] == JSOP_ENDITER)
1279 return true;
1280 return pc + GET_JUMP_OFFSET(pc) == header;
1282 default:;
1284 return false;
1287 struct FrameInfo {
1288 JSObject* callee; // callee function object
1289 jsbytecode* callpc; // pc of JSOP_CALL in caller script
1290 union {
1291 struct {
1292 uint16 spdist; // distance from fp->slots to fp->regs->sp at JSOP_CALL
1293 uint16 argc; // actual argument count, may be < fun->nargs
1294 } s;
1295 uint32 word; // for spdist/argc LIR store in record_JSOP_CALL
1299 /* Promote slots if necessary to match the called tree' type map and report error if thats
1300 impossible. */
1301 bool
1302 TraceRecorder::adjustCallerTypes(Fragment* f)
1304 JSTraceMonitor* tm = traceMonitor;
1305 uint8* m = tm->globalTypeMap->data();
1306 uint16* gslots = traceMonitor->globalSlots->data();
1307 unsigned ngslots = traceMonitor->globalSlots->length();
1308 JSScript* script = ((TreeInfo*)f->vmprivate)->script;
1309 uint8* map = ((TreeInfo*)f->vmprivate)->stackTypeMap.data();
1310 bool ok = true;
1311 FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
1312 LIns* i = get(vp);
1313 bool isPromote = isPromoteInt(i);
1314 if (isPromote && *m == JSVAL_DOUBLE)
1315 lir->insStorei(get(vp), gp_ins, nativeGlobalOffset(vp));
1316 else if (!isPromote && *m == JSVAL_INT) {
1317 oracle.markGlobalSlotUndemotable(script, nativeGlobalOffset(vp)/sizeof(double));
1318 ok = false;
1320 ++m;
1322 m = map;
1323 FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,
1324 LIns* i = get(vp);
1325 bool isPromote = isPromoteInt(i);
1326 if (isPromote && *m == JSVAL_DOUBLE)
1327 lir->insStorei(get(vp), lirbuf->sp,
1328 -treeInfo->nativeStackBase + nativeStackOffset(vp));
1329 else if (!isPromote && *m == JSVAL_INT) {
1330 oracle.markStackSlotUndemotable(script, (jsbytecode*)f->ip, unsigned(m - map));
1331 ok = false;
1333 ++m;
1335 JS_ASSERT(f == f->root);
1336 if (!ok) {
1337 trashTree = true;
1338 whichTreeToTrash = f;
1340 return ok;
1343 /* Find a peer fragment that we can call, considering our current type distribution. */
1344 bool TraceRecorder::selectCallablePeerFragment(Fragment** first)
1346 /* Until we have multiple trees per start point this is always the first fragment. */
1347 return (*first)->code();
1350 SideExit*
1351 TraceRecorder::snapshot(ExitType exitType)
1353 JSStackFrame* fp = cx->fp;
1354 if (exitType == BRANCH_EXIT &&
1355 js_IsLoopExit(cx, fp->script, (jsbytecode*)fragment->root->ip, fp->regs->pc))
1356 exitType = LOOP_EXIT;
1357 /* Generate the entry map and stash it in the trace. */
1358 unsigned stackSlots = js_NativeStackSlots(cx, callDepth);
1359 /* It's sufficient to track the native stack use here since all stores above the
1360 stack watermark defined by guards are killed. */
1361 trackNativeStackUse(stackSlots + 1);
1362 /* reserve space for the type map */
1363 unsigned ngslots = traceMonitor->globalSlots->length();
1364 LIns* data = lir_buf_writer->skip((stackSlots + ngslots) * sizeof(uint8));
1365 /* setup side exit structure */
1366 memset(&exit, 0, sizeof(exit));
1367 exit.from = fragment;
1368 exit.calldepth = callDepth;
1369 exit.numGlobalSlots = ngslots;
1370 exit.numStackSlots = stackSlots;
1371 exit.numStackSlotsBelowCurrentFrame = cx->fp->callee
1372 ? nativeStackOffset(&cx->fp->argv[-2])/sizeof(double)
1373 : 0;
1374 exit.exitType = exitType;
1375 /* If we take a snapshot on a goto, advance to the target address. This avoids inner
1376 trees returning on a break goto, which the outer recorder then would confuse with
1377 a break in the outer tree. */
1378 jsbytecode* pc = fp->regs->pc;
1379 JS_ASSERT(!(((*pc == JSOP_GOTO) || (*pc == JSOP_GOTOX)) && (exitType != LOOP_EXIT)));
1380 if (*pc == JSOP_GOTO)
1381 pc += GET_JUMP_OFFSET(pc);
1382 else if (*pc == JSOP_GOTOX)
1383 pc += GET_JUMPX_OFFSET(pc);
1384 exit.ip_adj = pc - (jsbytecode*)fragment->root->ip;
1385 exit.sp_adj = (stackSlots * sizeof(double)) - treeInfo->nativeStackBase;
1386 exit.rp_adj = exit.calldepth * sizeof(FrameInfo);
1387 uint8* m = exit.typeMap = (uint8 *)data->payload();
1388 /* Determine the type of a store by looking at the current type of the actual value the
1389 interpreter is using. For numbers we have to check what kind of store we used last
1390 (integer or double) to figure out what the side exit show reflect in its typemap. */
1391 FORALL_SLOTS(cx, ngslots, traceMonitor->globalSlots->data(), callDepth,
1392 LIns* i = get(vp);
1393 *m = isNumber(*vp)
1394 ? (isPromoteInt(i) ? JSVAL_INT : JSVAL_DOUBLE)
1395 : JSVAL_TAG(*vp);
1396 JS_ASSERT((*m != JSVAL_INT) || isInt32(*vp));
1397 ++m;
1399 JS_ASSERT(unsigned(m - exit.typeMap) == ngslots + stackSlots);
1400 return &exit;
1403 /* Emit a guard for condition (cond), expecting to evaluate to boolean result (expected). */
1404 LIns*
1405 TraceRecorder::guard(bool expected, LIns* cond, ExitType exitType)
1407 return lir->insGuard(expected ? LIR_xf : LIR_xt,
1408 cond,
1409 snapshot(exitType));
1412 /* Try to match the type of a slot to type t. checkType is used to verify that the type of
1413 values flowing into the loop edge is compatible with the type we expect in the loop header. */
1414 bool
1415 TraceRecorder::checkType(jsval& v, uint8 t, bool& unstable)
1417 if (t == JSVAL_INT) { /* initially all whole numbers cause the slot to be demoted */
1418 if (!isNumber(v))
1419 return false; /* not a number? type mismatch */
1420 LIns* i = get(&v);
1421 if (!i->isop(LIR_i2f)) {
1422 debug_only_v(printf("int slot is !isInt32, slot #%d, triggering re-compilation\n",
1423 !isGlobal(&v)
1424 ? nativeStackOffset(&v)
1425 : nativeGlobalOffset(&v)););
1426 AUDIT(slotPromoted);
1427 unstable = true;
1428 return true; /* keep checking types, but request re-compilation */
1430 /* Looks good, slot is an int32, the last instruction should be i2f. */
1431 JS_ASSERT(isInt32(v) && i->isop(LIR_i2f));
1432 /* We got the final LIR_i2f as we expected. Overwrite the value in that
1433 slot with the argument of i2f since we want the integer store to flow along
1434 the loop edge, not the casted value. */
1435 set(&v, i->oprnd1());
1436 return true;
1438 if (t == JSVAL_DOUBLE) {
1439 if (!isNumber(v))
1440 return false; /* not a number? type mismatch */
1441 LIns* i = get(&v);
1442 /* We sink i2f conversions into the side exit, but at the loop edge we have to make
1443 sure we promote back to double if at loop entry we want a double. */
1444 if (isPromoteInt(i))
1445 set(&v, lir->ins1(LIR_i2f, i));
1446 return true;
1448 /* for non-number types we expect a precise match of the type */
1449 #ifdef DEBUG
1450 if (JSVAL_TAG(v) != t) {
1451 debug_only_v(printf("Type mismatch: val %c, map %c ", "OID?S?B"[JSVAL_TAG(v)],
1452 "OID?S?B"[t]););
1454 #endif
1455 return JSVAL_TAG(v) == t;
1458 /* Make sure that the current values in the given stack frame and all stack frames
1459 up and including entryFrame are type-compatible with the entry map. */
1460 bool
1461 TraceRecorder::verifyTypeStability()
1463 unsigned ngslots = traceMonitor->globalSlots->length();
1464 uint16* gslots = traceMonitor->globalSlots->data();
1465 uint8* typemap = traceMonitor->globalTypeMap->data();
1466 JS_ASSERT(traceMonitor->globalTypeMap->length() == ngslots);
1467 bool recompile = false;
1468 uint8* m = typemap;
1469 FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
1470 bool demote = false;
1471 if (!checkType(*vp, *m, demote))
1472 return false;
1473 if (demote) {
1474 oracle.markGlobalSlotUndemotable(cx->fp->script, gslots[n]);
1475 recompile = true;
1479 typemap = treeInfo->stackTypeMap.data();
1480 m = typemap;
1481 FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
1482 bool demote = false;
1483 if (!checkType(*vp, *m, demote))
1484 return false;
1485 if (demote) {
1486 oracle.markStackSlotUndemotable(cx->fp->script, (jsbytecode*)fragment->ip,
1487 unsigned(m - typemap));
1488 recompile = true;
1492 if (recompile)
1493 trashTree = true;
1494 return !recompile;
1497 /* Check whether the current pc location is the loop header of the loop this recorder records. */
1498 bool
1499 TraceRecorder::isLoopHeader(JSContext* cx) const
1501 return cx->fp->regs->pc == fragment->root->ip;
1504 /* Compile the current fragment. */
1505 void
1506 TraceRecorder::compile(Fragmento* fragmento)
1508 if (treeInfo->maxNativeStackSlots >= MAX_NATIVE_STACK_SLOTS) {
1509 debug_only_v(printf("Trace rejected: excessive stack use.\n"));
1510 fragment->blacklist();
1511 return;
1513 ::compile(fragmento->assm(), fragment);
1514 if (anchor) {
1515 fragment->addLink(anchor);
1516 fragmento->assm()->patch(anchor);
1518 JS_ASSERT(fragment->code());
1519 JS_ASSERT(!fragment->vmprivate);
1520 if (fragment == fragment->root)
1521 fragment->vmprivate = treeInfo;
1522 /* :TODO: windows support */
1523 #if defined DEBUG && !defined WIN32
1524 char* label = (char*)malloc(strlen(cx->fp->script->filename) + 64);
1525 sprintf(label, "%s:%u", cx->fp->script->filename,
1526 js_PCToLineNumber(cx, cx->fp->script, cx->fp->regs->pc));
1527 fragmento->labels->add(fragment, sizeof(Fragment), 0, label);
1528 free(label);
1529 #endif
1530 AUDIT(traceCompleted);
1533 /* Complete and compile a trace and link it to the existing tree if appropriate. */
1534 void
1535 TraceRecorder::closeLoop(Fragmento* fragmento)
1537 if (!verifyTypeStability()) {
1538 AUDIT(unstableLoopVariable);
1539 debug_only_v(printf("Trace rejected: unstable loop variables.\n");)
1540 return;
1542 SideExit *exit = snapshot(LOOP_EXIT);
1543 exit->target = fragment->root;
1544 if (fragment == fragment->root) {
1545 fragment->lastIns = lir->insGuard(LIR_loop, lir->insImm(1), exit);
1546 } else {
1547 fragment->lastIns = lir->insGuard(LIR_x, lir->insImm(1), exit);
1549 compile(fragmento);
1552 /* Emit an always-exit guard and compile the tree (used for break statements. */
1553 void
1554 TraceRecorder::endLoop(Fragmento* fragmento)
1556 SideExit *exit = snapshot(LOOP_EXIT);
1557 fragment->lastIns = lir->insGuard(LIR_x, lir->insImm(1), exit);
1558 compile(fragmento);
1561 /* Emit code to adjust the stack to match the inner tree's stack expectations. */
1562 void
1563 TraceRecorder::prepareTreeCall(Fragment* inner)
1565 TreeInfo* ti = (TreeInfo*)inner->vmprivate;
1566 inner_sp_ins = lirbuf->sp;
1567 /* The inner tree expects to be called from the current frame. If the outer tree (this
1568 trace) is currently inside a function inlining code (calldepth > 0), we have to advance
1569 the native stack pointer such that we match what the inner trace expects to see. We
1570 move it back when we come out of the inner tree call. */
1571 if (callDepth > 0) {
1572 /* Calculate the amount we have to lift the native stack pointer by to compensate for
1573 any outer frames that the inner tree doesn't expect but the outer tree has. */
1574 ptrdiff_t sp_adj = nativeStackOffset(&cx->fp->argv[-2]);
1575 /* Calculate the amount we have to lift the call stack by */
1576 ptrdiff_t rp_adj = callDepth * sizeof(FrameInfo);
1577 /* Guard that we have enough stack space for the tree we are trying to call on top
1578 of the new value for sp. */
1579 debug_only_v(printf("sp_adj=%d outer=%d inner=%d\n",
1580 sp_adj, treeInfo->nativeStackBase, ti->nativeStackBase));
1581 LIns* sp_top = lir->ins2i(LIR_piadd, lirbuf->sp,
1582 - treeInfo->nativeStackBase /* rebase sp to beginning of outer tree's stack */
1583 + sp_adj /* adjust for stack in outer frame inner tree can't see */
1584 + ti->maxNativeStackSlots * sizeof(double)); /* plus the inner tree's stack */
1585 guard(true, lir->ins2(LIR_lt, sp_top, eos_ins), OOM_EXIT);
1586 /* Guard that we have enough call stack space. */
1587 LIns* rp_top = lir->ins2i(LIR_piadd, lirbuf->rp, rp_adj +
1588 ti->maxCallDepth * sizeof(FrameInfo));
1589 guard(true, lir->ins2(LIR_lt, rp_top, eor_ins), OOM_EXIT);
1590 /* We have enough space, so adjust sp and rp to their new level. */
1591 lir->insStorei(inner_sp_ins = lir->ins2i(LIR_piadd, lirbuf->sp,
1592 - treeInfo->nativeStackBase /* rebase sp to beginning of outer tree's stack */
1593 + sp_adj /* adjust for stack in outer frame inner tree can't see */
1594 + ti->nativeStackBase), /* plus the inner tree's stack base */
1595 lirbuf->state, offsetof(InterpState, sp));
1596 lir->insStorei(lir->ins2i(LIR_piadd, lirbuf->rp, rp_adj),
1597 lirbuf->state, offsetof(InterpState, rp));
1601 /* Record a call to an inner tree. */
1602 void
1603 TraceRecorder::emitTreeCall(Fragment* inner, GuardRecord* lr)
1605 JS_ASSERT(lr->exit->exitType == LOOP_EXIT && !lr->calldepth);
1606 TreeInfo* ti = (TreeInfo*)inner->vmprivate;
1607 /* Invoke the inner tree. */
1608 LIns* args[] = { INS_CONSTPTR(inner), lirbuf->state }; /* reverse order */
1609 LIns* ret = lir->insCall(F_CallTree, args);
1610 /* Read back all registers, in case the called tree changed any of them. */
1611 SideExit* exit = lr->exit;
1612 import(ti, inner_sp_ins, exit->numGlobalSlots, exit->calldepth,
1613 exit->typeMap, exit->typeMap + exit->numGlobalSlots);
1614 /* Store the guard pointer in case we exit on an unexpected guard */
1615 lir->insStorei(ret, lirbuf->state, offsetof(InterpState, nestedExit));
1616 /* Restore sp and rp to their original values (we still have them in a register). */
1617 if (callDepth > 0) {
1618 lir->insStorei(lirbuf->sp, lirbuf->state, offsetof(InterpState, sp));
1619 lir->insStorei(lirbuf->rp, lirbuf->state, offsetof(InterpState, rp));
1621 /* Guard that we come out of the inner tree along the same side exit we came out when
1622 we called the inner tree at recording time. */
1623 guard(true, lir->ins2(LIR_eq, ret, INS_CONSTPTR(lr)), NESTED_EXIT);
1624 /* Register us as a dependent tree of the inner tree. */
1625 ((TreeInfo*)inner->vmprivate)->dependentTrees.addUnique(fragment->root);
1628 /* Add a if/if-else control-flow merge point to the list of known merge points. */
1629 void
1630 TraceRecorder::trackCfgMerges(jsbytecode* pc)
1632 /* If we hit the beginning of an if/if-else, then keep track of the merge point after it. */
1633 JS_ASSERT((*pc == JSOP_IFEQ) || (*pc == JSOP_IFEQX));
1634 jssrcnote* sn = js_GetSrcNote(cx->fp->script, pc);
1635 if (sn != NULL) {
1636 if (SN_TYPE(sn) == SRC_IF) {
1637 cfgMerges.add((*pc == JSOP_IFEQ)
1638 ? pc + GET_JUMP_OFFSET(pc)
1639 : pc + GET_JUMPX_OFFSET(pc));
1640 } else if (SN_TYPE(sn) == SRC_IF_ELSE)
1641 cfgMerges.add(pc + js_GetSrcNoteOffset(sn, 0));
1645 /* Emit code for a fused IFEQ/IFNE. */
1646 void
1647 TraceRecorder::fuseIf(jsbytecode* pc, bool cond, LIns* x)
1649 if (*pc == JSOP_IFEQ) {
1650 guard(cond, x, BRANCH_EXIT);
1651 trackCfgMerges(pc);
1652 } else if (*pc == JSOP_IFNE) {
1653 guard(cond, x, BRANCH_EXIT);
1658 nanojit::StackFilter::getTop(LInsp guard)
1660 if (sp == frag->lirbuf->sp)
1661 return guard->exit()->sp_adj + sizeof(double);
1662 JS_ASSERT(sp == frag->lirbuf->rp);
1663 return guard->exit()->rp_adj + sizeof(FrameInfo);
1666 #if defined NJ_VERBOSE
1667 void
1668 nanojit::LirNameMap::formatGuard(LIns *i, char *out)
1670 uint32_t ip;
1671 SideExit *x;
1673 x = (SideExit *)i->exit();
1674 ip = intptr_t(x->from->ip) + x->ip_adj;
1675 sprintf(out,
1676 "%s: %s %s -> %s sp%+ld rp%+ld",
1677 formatRef(i),
1678 lirNames[i->opcode()],
1679 i->oprnd1()->isCond() ? formatRef(i->oprnd1()) : "",
1680 labels->format((void *)ip),
1681 (long int)x->sp_adj,
1682 (long int)x->rp_adj
1685 #endif
1687 void
1688 nanojit::Assembler::initGuardRecord(LIns *guard, GuardRecord *rec)
1690 SideExit *exit;
1692 exit = guard->exit();
1693 rec->guard = guard;
1694 rec->calldepth = exit->calldepth;
1695 rec->exit = exit;
1696 verbose_only(rec->sid = exit->sid);
1699 void
1700 nanojit::Assembler::asm_bailout(LIns *guard, Register state)
1702 /* we adjust ip/sp/rp when exiting from the tree in the recovery code */
1705 void
1706 nanojit::Fragment::onDestroy()
1708 if (root == this) {
1709 delete mergeCounts;
1710 delete lirbuf;
1712 delete (TreeInfo *)vmprivate;
1715 void
1716 js_DeleteRecorder(JSContext* cx)
1718 /* Aborting and completing a trace end up here. */
1719 JS_ASSERT(cx->executingTrace);
1720 cx->executingTrace = false;
1722 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
1723 delete tm->recorder;
1724 tm->recorder = NULL;
1727 static bool
1728 js_StartRecorder(JSContext* cx, GuardRecord* anchor, Fragment* f, TreeInfo* ti,
1729 unsigned ngslots, uint8* globalTypeMap, uint8* stackTypeMap,
1730 GuardRecord* expectedInnerExit)
1733 * Emulate on-trace semantics and avoid rooting headaches while recording,
1734 * by suppressing last-ditch GC attempts while recording a trace. This does
1735 * means that trace recording must not nest or the following assertion will
1736 * botch.
1738 JS_ASSERT(!cx->executingTrace);
1739 cx->executingTrace = true;
1741 /* start recording if no exception during construction */
1742 JS_TRACE_MONITOR(cx).recorder = new (&gc) TraceRecorder(cx, anchor, f, ti,
1743 ngslots, globalTypeMap, stackTypeMap,
1744 expectedInnerExit);
1745 if (cx->throwing) {
1746 js_AbortRecording(cx, NULL, "setting up recorder failed");
1747 return false;
1749 /* clear any leftover error state */
1750 JS_TRACE_MONITOR(cx).fragmento->assm()->setError(None);
1751 return true;
1754 static void
1755 js_TrashTree(JSContext* cx, Fragment* f)
1757 JS_ASSERT((!f->code()) == (!f->vmprivate));
1758 JS_ASSERT(f == f->root);
1759 if (!f->code())
1760 return;
1761 AUDIT(treesTrashed);
1762 debug_only_v(printf("Trashing tree info.\n");)
1763 Fragmento* fragmento = JS_TRACE_MONITOR(cx).fragmento;
1764 TreeInfo* ti = (TreeInfo*)f->vmprivate;
1765 f->vmprivate = NULL;
1766 f->releaseCode(fragmento);
1767 Fragment** data = ti->dependentTrees.data();
1768 unsigned length = ti->dependentTrees.length();
1769 for (unsigned n = 0; n < length; ++n)
1770 js_TrashTree(cx, data[n]);
1771 delete ti;
1772 JS_ASSERT(!f->code() && !f->vmprivate);
1775 static unsigned
1776 js_SynthesizeFrame(JSContext* cx, const FrameInfo& fi)
1778 JS_ASSERT(HAS_FUNCTION_CLASS(fi.callee));
1780 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, fi.callee);
1781 JS_ASSERT(FUN_INTERPRETED(fun));
1783 JSArena* a = cx->stackPool.current;
1784 void* newmark = (void*) a->avail;
1785 JSScript* script = fun->u.i.script;
1787 // Assert that we have a correct sp distance from cx->fp->slots in fi.
1788 JS_ASSERT(js_ReconstructStackDepth(cx, cx->fp->script, fi.callpc) ==
1789 uintN(fi.s.spdist - cx->fp->script->nfixed));
1791 uintN nframeslots = JS_HOWMANY(sizeof(JSInlineFrame), sizeof(jsval));
1792 size_t nbytes = (nframeslots + script->nslots) * sizeof(jsval);
1794 /* Allocate the inline frame with its vars and operands. */
1795 jsval* newsp;
1796 if (a->avail + nbytes <= a->limit) {
1797 newsp = (jsval *) a->avail;
1798 a->avail += nbytes;
1799 } else {
1800 JS_ARENA_ALLOCATE_CAST(newsp, jsval *, &cx->stackPool, nbytes);
1801 if (!newsp) {
1802 js_ReportOutOfScriptQuota(cx);
1803 return 0;
1807 /* Claim space for the stack frame and initialize it. */
1808 JSInlineFrame* newifp = (JSInlineFrame *) newsp;
1809 newsp += nframeslots;
1811 newifp->frame.callobj = NULL;
1812 newifp->frame.argsobj = NULL;
1813 newifp->frame.varobj = NULL;
1814 newifp->frame.script = script;
1815 newifp->frame.callee = fi.callee;
1816 newifp->frame.fun = fun;
1818 newifp->frame.argc = fi.s.argc;
1819 newifp->callerRegs.pc = fi.callpc;
1820 newifp->callerRegs.sp = cx->fp->slots + fi.s.spdist;
1821 newifp->frame.argv = newifp->callerRegs.sp - JS_MAX(fun->nargs, fi.s.argc);
1822 JS_ASSERT(newifp->frame.argv >= StackBase(cx->fp));
1824 newifp->frame.rval = JSVAL_VOID;
1825 newifp->frame.down = cx->fp;
1826 newifp->frame.annotation = NULL;
1827 newifp->frame.scopeChain = OBJ_GET_PARENT(cx, fi.callee);
1828 newifp->frame.sharpDepth = 0;
1829 newifp->frame.sharpArray = NULL;
1830 newifp->frame.flags = 0;
1831 newifp->frame.dormantNext = NULL;
1832 newifp->frame.xmlNamespace = NULL;
1833 newifp->frame.blockChain = NULL;
1834 newifp->mark = newmark;
1835 newifp->frame.thisp = NULL; // will be set by js_ExecuteTree -> FlushNativeStackFrame
1837 newifp->frame.regs = cx->fp->regs;
1838 newifp->frame.regs->pc = script->code;
1839 newifp->frame.regs->sp = newsp + script->nfixed;
1840 newifp->frame.slots = newsp;
1841 if (script->staticDepth < JS_DISPLAY_SIZE) {
1842 JSStackFrame **disp = &cx->display[script->staticDepth];
1843 newifp->frame.displaySave = *disp;
1844 *disp = &newifp->frame;
1846 #ifdef DEBUG
1847 newifp->frame.pcDisabledSave = 0;
1848 #endif
1850 cx->fp->regs = &newifp->callerRegs;
1851 cx->fp = &newifp->frame;
1853 // FIXME: we must count stack slots from caller's operand stack up to (but not including)
1854 // callee's, including missing arguments. Could we shift everything down to the caller's
1855 // fp->slots (where vars start) and avoid some of the complexity?
1856 return (fi.s.spdist - cx->fp->down->script->nfixed) +
1857 ((fun->nargs > cx->fp->argc) ? fun->nargs - cx->fp->argc : 0) +
1858 script->nfixed;
1861 bool
1862 js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f)
1864 /* Make sure the global type map didn't change on us. */
1865 uint32 globalShape = OBJ_SCOPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain))->shape;
1866 if (tm->globalShape != globalShape) {
1867 debug_only_v(printf("Global shape mismatch (%u vs. %u) in RecordTree, flushing cache.\n",
1868 globalShape, tm->globalShape);)
1869 js_FlushJITCache(cx);
1870 return false;
1872 TypeMap current;
1873 current.captureGlobalTypes(cx, *tm->globalSlots);
1874 if (!current.matches(*tm->globalTypeMap)) {
1875 js_FlushJITCache(cx);
1876 debug_only_v(printf("Global type map mismatch in RecordTree, flushing cache.\n");)
1877 return false;
1880 AUDIT(recorderStarted);
1882 /* Try to find an unused peer fragment, or allocate a new one. */
1883 while (f->code() && f->peer)
1884 f = f->peer;
1885 if (f->code())
1886 f = JS_TRACE_MONITOR(cx).fragmento->newLoop(f->ip);
1888 f->calldepth = 0;
1889 f->root = f;
1890 /* allocate space to store the LIR for this tree */
1891 if (!f->lirbuf) {
1892 f->lirbuf = new (&gc) LirBuffer(tm->fragmento, builtins);
1893 #ifdef DEBUG
1894 f->lirbuf->names = new (&gc) LirNameMap(&gc, builtins, tm->fragmento->labels);
1895 #endif
1898 JS_ASSERT(!f->code() && !f->vmprivate);
1900 /* setup the VM-private treeInfo structure for this fragment */
1901 TreeInfo* ti = new (&gc) TreeInfo(f);
1903 /* capture the coerced type of each active slot in the stack type map */
1904 ti->stackTypeMap.captureStackTypes(cx, 0/*callDepth*/);
1906 /* determine the native frame layout at the entry point */
1907 unsigned entryNativeStackSlots = ti->stackTypeMap.length();
1908 JS_ASSERT(entryNativeStackSlots == js_NativeStackSlots(cx, 0/*callDepth*/));
1909 ti->nativeStackBase = (entryNativeStackSlots -
1910 (cx->fp->regs->sp - StackBase(cx->fp))) * sizeof(double);
1911 ti->maxNativeStackSlots = entryNativeStackSlots;
1912 ti->maxCallDepth = 0;
1913 ti->script = cx->fp->script;
1915 /* recording primary trace */
1916 return js_StartRecorder(cx, NULL, f, ti,
1917 tm->globalSlots->length(), tm->globalTypeMap->data(),
1918 ti->stackTypeMap.data(), NULL);
1921 static bool
1922 js_AttemptToExtendTree(JSContext* cx, GuardRecord* anchor, GuardRecord* exitedFrom)
1924 Fragment* f = anchor->from->root;
1925 JS_ASSERT(f->vmprivate);
1927 debug_only_v(printf("trying to attach another branch to the tree\n");)
1929 Fragment* c;
1930 if (!(c = anchor->target)) {
1931 c = JS_TRACE_MONITOR(cx).fragmento->createBranch(anchor, anchor->exit);
1932 c->spawnedFrom = anchor->guard;
1933 c->parent = f;
1934 anchor->exit->target = c;
1935 anchor->target = c;
1936 c->root = f;
1939 if (++c->hits() >= HOTEXIT) {
1940 /* start tracing secondary trace from this point */
1941 c->lirbuf = f->lirbuf;
1942 unsigned ngslots;
1943 uint8* globalTypeMap;
1944 uint8* stackTypeMap;
1945 TypeMap fullMap;
1946 if (exitedFrom == NULL) {
1947 /* If we are coming straight from a simple side exit, just use that exit's type map
1948 as starting point. */
1949 SideExit* e = anchor->exit;
1950 ngslots = e->numGlobalSlots;
1951 globalTypeMap = e->typeMap;
1952 stackTypeMap = globalTypeMap + ngslots;
1953 } else {
1954 /* If we side-exited on a loop exit and continue on a nesting guard, the nesting
1955 guard (anchor) has the type information for everything below the current scope,
1956 and the actual guard we exited from has the types for everything in the current
1957 scope (and whatever it inlined). We have to merge those maps here. */
1958 SideExit* e1 = anchor->exit;
1959 SideExit* e2 = exitedFrom->exit;
1960 fullMap.add(e1->typeMap + e1->numGlobalSlots, e1->numStackSlotsBelowCurrentFrame);
1961 fullMap.add(e2->typeMap + e2->numGlobalSlots, e2->numStackSlots);
1962 ngslots = e2->numGlobalSlots;
1963 globalTypeMap = e2->typeMap;
1964 stackTypeMap = fullMap.data();
1966 return js_StartRecorder(cx, anchor, c, (TreeInfo*)f->vmprivate,
1967 ngslots, globalTypeMap, stackTypeMap, exitedFrom);
1969 return false;
1972 static GuardRecord*
1973 js_ExecuteTree(JSContext* cx, Fragment** treep, uintN& inlineCallCount,
1974 GuardRecord** innermostNestedGuardp);
1976 bool
1977 js_RecordLoopEdge(JSContext* cx, TraceRecorder* r, jsbytecode* oldpc, uintN& inlineCallCount)
1979 #ifdef JS_THREADSAFE
1980 if (OBJ_SCOPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain))->title.ownercx != cx) {
1981 debug_only_v(printf("Global object not owned by this context.\n"););
1982 return false; /* we stay away from shared global objects */
1984 #endif
1985 Fragmento* fragmento = JS_TRACE_MONITOR(cx).fragmento;
1986 /* If we hit our own loop header, close the loop and compile the trace. */
1987 if (r->isLoopHeader(cx)) {
1988 if (fragmento->assm()->error()) {
1989 js_AbortRecording(cx, oldpc, "Error during recording");
1990 /* If we ran out of memory, flush the code cache and abort. */
1991 if (fragmento->assm()->error() == OutOMem)
1992 js_FlushJITCache(cx);
1993 return false; /* done recording */
1995 r->closeLoop(fragmento);
1996 js_DeleteRecorder(cx);
1997 return false; /* done recording */
1999 /* does this branch go to an inner loop? */
2000 Fragment* f = fragmento->getLoop(cx->fp->regs->pc);
2001 if (nesting_enabled &&
2002 f && /* must have a fragment at that location */
2003 r->selectCallablePeerFragment(&f) && /* is there a potentially matching peer fragment? */
2004 r->adjustCallerTypes(f)) { /* make sure we can make our arguments fit */
2005 r->prepareTreeCall(f);
2006 GuardRecord* innermostNestedGuard = NULL;
2007 GuardRecord* lr = js_ExecuteTree(cx, &f, inlineCallCount, &innermostNestedGuard);
2008 if (!lr) {
2009 /* js_ExecuteTree might have flushed the cache and aborted us already. */
2010 if (JS_TRACE_MONITOR(cx).recorder)
2011 js_AbortRecording(cx, oldpc, "Couldn't call inner tree");
2012 return false;
2014 switch (lr->exit->exitType) {
2015 case LOOP_EXIT:
2016 /* If the inner tree exited on an unknown loop exit, grow the tree around it. */
2017 if (innermostNestedGuard) {
2018 js_AbortRecording(cx, oldpc,
2019 "Inner tree took different side exit, abort recording");
2020 return js_AttemptToExtendTree(cx, innermostNestedGuard, lr);
2022 /* emit a call to the inner tree and continue recording the outer tree trace */
2023 r->emitTreeCall(f, lr);
2024 return true;
2025 case BRANCH_EXIT:
2026 /* abort recording the outer tree, extend the inner tree */
2027 js_AbortRecording(cx, oldpc, "Inner tree is trying to grow, abort outer recording");
2028 return js_AttemptToExtendTree(cx, lr, NULL);
2029 default:
2030 debug_only_v(printf("exit_type=%d\n", lr->exit->exitType);)
2031 js_AbortRecording(cx, oldpc, "Inner tree not suitable for calling");
2032 return false;
2035 /* try to unroll the inner loop a bit, maybe it connects back to our loop header eventually */
2036 if ((!f || !f->code()) && r->trackLoopEdges())
2037 return true;
2038 /* not returning to our own loop header, not an inner loop we can call, abort trace */
2039 AUDIT(returnToDifferentLoopHeader);
2040 debug_only_v(printf("loop edge %d -> %d, header %d\n",
2041 oldpc - cx->fp->script->code,
2042 cx->fp->regs->pc - cx->fp->script->code,
2043 (jsbytecode*)r->getFragment()->root->ip - cx->fp->script->code));
2044 js_AbortRecording(cx, oldpc, "Loop edge does not return to header");
2045 return false;
2048 static inline GuardRecord*
2049 js_ExecuteTree(JSContext* cx, Fragment** treep, uintN& inlineCallCount,
2050 GuardRecord** innermostNestedGuardp)
2052 Fragment* f = *treep;
2054 /* if we don't have a compiled tree available for this location, bail out */
2055 if (!f->code()) {
2056 JS_ASSERT(!f->vmprivate);
2057 return NULL;
2059 JS_ASSERT(f->vmprivate);
2061 AUDIT(traceTriggered);
2063 /* execute previously recorded trace */
2064 TreeInfo* ti = (TreeInfo*)f->vmprivate;
2066 debug_only_v(printf("entering trace at %s:%u@%u, native stack slots: %u\n",
2067 cx->fp->script->filename,
2068 js_PCToLineNumber(cx, cx->fp->script, cx->fp->regs->pc),
2069 cx->fp->regs->pc - cx->fp->script->code, ti->maxNativeStackSlots););
2071 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
2072 unsigned ngslots = tm->globalSlots->length();
2073 uint16* gslots = tm->globalSlots->data();
2074 JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
2075 unsigned globalFrameSize = STOBJ_NSLOTS(globalObj);
2076 double* global = (double*)alloca((globalFrameSize+1) * sizeof(double));
2077 debug_only(*(uint64*)&global[globalFrameSize] = 0xdeadbeefdeadbeefLL;)
2078 double* stack = (double*)alloca(MAX_NATIVE_STACK_SLOTS * sizeof(double));
2080 /* If any of our trees uses globals, the shape of the global object must not change and
2081 the global type map must remain applicable at all times (we expect absolute type
2082 stability for globals). */
2083 if (ngslots &&
2084 (OBJ_SCOPE(globalObj)->shape != tm->globalShape ||
2085 !BuildNativeGlobalFrame(cx, ngslots, gslots, tm->globalTypeMap->data(), global))) {
2086 AUDIT(globalShapeMismatchAtEntry);
2087 debug_only_v(printf("Global shape mismatch (%u vs. %u), flushing cache.\n",
2088 OBJ_SCOPE(globalObj)->shape, tm->globalShape);)
2089 const void* ip = f->ip;
2090 js_FlushJITCache(cx);
2091 *treep = tm->fragmento->newLoop(ip);
2092 return NULL;
2095 if (!BuildNativeStackFrame(cx, 0/*callDepth*/, ti->stackTypeMap.data(), stack)) {
2096 AUDIT(typeMapMismatchAtEntry);
2097 debug_only_v(printf("type-map mismatch.\n");)
2098 if (++ti->mismatchCount > MAX_MISMATCH) {
2099 debug_only_v(printf("excessive mismatches, flushing tree.\n"));
2100 js_TrashTree(cx, f);
2101 f->blacklist();
2103 return NULL;
2106 ti->mismatchCount = 0;
2108 double* entry_sp = &stack[ti->nativeStackBase/sizeof(double)];
2109 FrameInfo* callstack = (FrameInfo*) alloca(MAX_CALL_STACK_ENTRIES * sizeof(FrameInfo));
2111 InterpState state;
2112 state.sp = (void*)entry_sp;
2113 state.eos = ((double*)state.sp) + MAX_NATIVE_STACK_SLOTS;
2114 state.rp = callstack;
2115 state.eor = callstack + MAX_CALL_STACK_ENTRIES;
2116 state.gp = global;
2117 state.cx = cx;
2118 #ifdef DEBUG
2119 state.nestedExit = NULL;
2120 #endif
2121 union { NIns *code; GuardRecord* (FASTCALL *func)(InterpState*, Fragment*); } u;
2122 u.code = f->code();
2124 #if defined(DEBUG) && defined(NANOJIT_IA32)
2125 uint64 start = rdtsc();
2126 #endif
2129 * We may be called from js_MonitorLoopEdge while not recording, or while
2130 * recording. Rather than over-generalize by using a counter instead of a
2131 * flag, we simply sample and update cx->executingTrace if necessary.
2133 bool executingTrace = cx->executingTrace;
2134 if (!executingTrace)
2135 cx->executingTrace = true;
2136 GuardRecord* lr;
2138 #if defined(JS_NO_FASTCALL) && defined(NANOJIT_IA32)
2139 SIMULATE_FASTCALL(lr, &state, NULL, u.func);
2140 #else
2141 lr = u.func(&state, NULL);
2142 #endif
2144 if (!executingTrace)
2145 cx->executingTrace = false;
2147 /* If we bail out on a nested exit, the compiled code returns the outermost nesting
2148 guard but what we are really interested in is the innermost guard that we hit
2149 instead of the guard we were expecting there. */
2150 int slots;
2151 if (lr->exit->exitType == NESTED_EXIT) {
2152 /* Unwind all frames held by nested outer trees (since the innermost tree's frame which
2153 we restore below doesn't contain such frames. */
2154 do {
2155 if (innermostNestedGuardp)
2156 *innermostNestedGuardp = lr;
2157 debug_only_v(printf("processing tree call guard %p, calldepth=%d\n",
2158 lr, lr->calldepth);)
2159 unsigned calldepth = lr->calldepth;
2160 if (calldepth > 0) {
2161 /* We found a nesting guard that holds a frame, write it back. */
2162 for (unsigned i = 0; i < calldepth; ++i)
2163 js_SynthesizeFrame(cx, callstack[i]);
2164 /* Restore the native stack excluding the current frame, which the next tree
2165 call guard or the innermost tree exit guard will restore. */
2166 slots = FlushNativeStackFrame(cx, calldepth,
2167 lr->exit->typeMap + lr->exit->numGlobalSlots,
2168 stack, &cx->fp->argv[-2]);
2169 if (slots < 0)
2170 return NULL;
2171 callstack += calldepth;
2172 inlineCallCount += calldepth;
2173 stack += slots;
2175 JS_ASSERT(lr->guard->oprnd1()->oprnd2()->isconstp());
2176 lr = (GuardRecord*)lr->guard->oprnd1()->oprnd2()->constvalp();
2177 } while (lr->exit->exitType == NESTED_EXIT);
2179 /* We restored the nested frames, now we just need to deal with the innermost guard. */
2180 lr = state.nestedExit;
2181 JS_ASSERT(lr);
2184 /* sp_adj and ip_adj are relative to the tree we exit out of, not the tree we
2185 entered into (which might be different in the presence of nested trees). */
2186 ti = (TreeInfo*)lr->from->root->vmprivate;
2188 /* We already synthesized the frames around the innermost guard. Here we just deal
2189 with additional frames inside the tree we are bailing out from. */
2190 unsigned calldepth = lr->calldepth;
2191 unsigned calldepth_slots = 0;
2192 for (unsigned n = 0; n < calldepth; ++n)
2193 calldepth_slots += js_SynthesizeFrame(cx, callstack[n]);
2195 /* Adjust sp and pc relative to the tree we exited from (not the tree we entered
2196 into). These are our final values for sp and pc since js_SynthesizeFrame has
2197 already taken care of all frames in between. */
2198 SideExit* e = lr->exit;
2199 JSStackFrame* fp = cx->fp;
2201 /* If we are not exiting from an inlined frame the state->sp is spbase, otherwise spbase
2202 is whatever slots frames around us consume. */
2203 fp->regs->pc = (jsbytecode*)lr->from->root->ip + e->ip_adj;
2204 fp->regs->sp = StackBase(fp) + (e->sp_adj / sizeof(double)) - calldepth_slots;
2205 JS_ASSERT(fp->slots + fp->script->nfixed +
2206 js_ReconstructStackDepth(cx, cx->fp->script, fp->regs->pc) == fp->regs->sp);
2208 #if defined(DEBUG) && defined(NANOJIT_IA32)
2209 if (verbose_debug) {
2210 printf("leaving trace at %s:%u@%u, op=%s, lr=%p, exitType=%d, sp=%d, ip=%p, "
2211 "cycles=%llu\n",
2212 fp->script->filename, js_PCToLineNumber(cx, fp->script, fp->regs->pc),
2213 fp->regs->pc - fp->script->code,
2214 js_CodeName[*fp->regs->pc],
2216 lr->exit->exitType,
2217 fp->regs->sp - StackBase(fp), lr->jmp,
2218 (rdtsc() - start));
2220 #endif
2222 /* If this trace is part of a tree, later branches might have added additional globals for
2223 with we don't have any type information available in the side exit. We merge in this
2224 information from the entry type-map. See also comment in the constructor of TraceRecorder
2225 why this is always safe to do. */
2226 unsigned exit_gslots = e->numGlobalSlots;
2227 JS_ASSERT(ngslots == tm->globalTypeMap->length());
2228 JS_ASSERT(ngslots >= exit_gslots);
2229 uint8* globalTypeMap = e->typeMap;
2230 if (exit_gslots < ngslots)
2231 mergeTypeMaps(&globalTypeMap, &exit_gslots, tm->globalTypeMap->data(), ngslots,
2232 (uint8*)alloca(sizeof(uint8) * ngslots));
2233 JS_ASSERT(exit_gslots == tm->globalTypeMap->length());
2235 /* write back interned globals */
2236 slots = FlushNativeGlobalFrame(cx, exit_gslots, gslots, globalTypeMap, global);
2237 if (slots < 0)
2238 return NULL;
2239 JS_ASSERT(globalFrameSize == STOBJ_NSLOTS(globalObj));
2240 JS_ASSERT(*(uint64*)&global[globalFrameSize] == 0xdeadbeefdeadbeefLL);
2242 /* write back native stack frame */
2243 slots = FlushNativeStackFrame(cx, e->calldepth, e->typeMap + e->numGlobalSlots, stack, NULL);
2244 if (slots < 0)
2245 return NULL;
2246 JS_ASSERT(unsigned(slots) == e->numStackSlots);
2248 AUDIT(sideExitIntoInterpreter);
2250 if (!lr) /* did the tree actually execute? */
2251 return NULL;
2253 /* Adjust inlineCallCount (we already compensated for any outer nested frames). */
2254 inlineCallCount += lr->calldepth;
2256 return lr;
2259 bool
2260 js_MonitorLoopEdge(JSContext* cx, jsbytecode* oldpc, uintN& inlineCallCount)
2262 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
2264 /* Is the recorder currently active? */
2265 if (tm->recorder) {
2266 if (js_RecordLoopEdge(cx, tm->recorder, oldpc, inlineCallCount))
2267 return true;
2268 /* recording was aborted, treat like a regular loop edge hit */
2270 JS_ASSERT(!tm->recorder);
2272 /* check if our quick cache has an entry for this ip, otherwise ask fragmento. */
2273 jsbytecode* pc = cx->fp->regs->pc;
2274 Fragment* f;
2275 JSFragmentCacheEntry* cacheEntry = &tm->fcache[jsuword(pc) & JS_FRAGMENT_CACHE_MASK];
2276 if (cacheEntry->pc == pc) {
2277 f = cacheEntry->fragment;
2278 } else {
2279 f = tm->fragmento->getLoop(pc);
2280 if (!f)
2281 f = tm->fragmento->newLoop(pc);
2282 cacheEntry->pc = pc;
2283 cacheEntry->fragment = f;
2286 /* If there is a chance that js_ExecuteTree will actually succeed, invoke it (either the
2287 first fragment must contain some code, or at least it must have a peer fragment). */
2288 GuardRecord* lr = NULL;
2289 GuardRecord* innermostNestedGuard = NULL;
2290 if (f->code() || f->peer)
2291 lr = js_ExecuteTree(cx, &f, inlineCallCount, &innermostNestedGuard);
2292 if (!lr) {
2293 JS_ASSERT(!tm->recorder);
2294 /* If we don't have compiled code for this entry point (none recorded or we trashed it),
2295 count the number of hits and trigger the recorder if appropriate. */
2296 if (!f->code() && (++f->hits() >= HOTLOOP))
2297 return js_RecordTree(cx, tm, f);
2298 return false;
2300 /* If we exit on a branch, or on a tree call guard, try to grow the inner tree (in case
2301 of a branch exit), or the tree nested around the tree we exited from (in case of the
2302 tree call guard). */
2303 SideExit* exit = lr->exit;
2304 switch (exit->exitType) {
2305 case BRANCH_EXIT:
2306 return js_AttemptToExtendTree(cx, lr, NULL);
2307 case LOOP_EXIT:
2308 if (innermostNestedGuard)
2309 return js_AttemptToExtendTree(cx, innermostNestedGuard, lr);
2310 return false;
2311 default:
2312 /* No, this was an unusual exit (i.e. out of memory/GC), so just resume interpretation. */
2313 return false;
2317 bool
2318 js_MonitorRecording(JSContext* cx)
2320 TraceRecorder *tr = JS_TRACE_MONITOR(cx).recorder;
2321 if (tr->wasDeepAborted()) {
2322 js_AbortRecording(cx, NULL, "deep abort requested");
2323 return false;
2326 jsbytecode* pc = cx->fp->regs->pc;
2327 /* If we hit a break, end the loop and generate an always taken loop exit guard. For other
2328 downward gotos (like if/else) continue recording. */
2329 if ((*pc == JSOP_GOTO) || (*pc == JSOP_GOTOX)) {
2330 jssrcnote* sn = js_GetSrcNote(cx->fp->script, pc);
2331 if ((sn != NULL) && (SN_TYPE(sn) == SRC_BREAK)) {
2332 AUDIT(breakLoopExits);
2333 tr->endLoop(JS_TRACE_MONITOR(cx).fragmento);
2334 js_DeleteRecorder(cx);
2335 return false; /* done recording */
2338 /* If its not a break, continue recording and follow the trace. */
2339 return true;
2342 void
2343 js_AbortRecording(JSContext* cx, jsbytecode* abortpc, const char* reason)
2345 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
2346 JS_ASSERT(tm->recorder != NULL);
2347 Fragment* f = tm->recorder->getFragment();
2348 JS_ASSERT(!f->vmprivate);
2349 /* Abort the trace and blacklist its starting point. */
2350 AUDIT(recorderAborted);
2351 if (cx->fp) {
2352 debug_only_v(if (!abortpc) abortpc = cx->fp->regs->pc;
2353 printf("Abort recording (line %d, pc %d): %s.\n",
2354 js_PCToLineNumber(cx, cx->fp->script, abortpc),
2355 abortpc - cx->fp->script->code, reason);)
2357 f->blacklist();
2358 js_DeleteRecorder(cx);
2359 /* If this is the primary trace and we didn't succeed compiling, trash the TreeInfo object. */
2360 if (!f->code() && (f->root == f))
2361 js_TrashTree(cx, f);
2364 #if defined NANOJIT_IA32
2365 static bool
2366 js_CheckForSSE2()
2368 int features = 0;
2369 #if defined _MSC_VER
2370 __asm
2372 pushad
2373 mov eax, 1
2374 cpuid
2375 mov features, edx
2376 popad
2378 #elif defined __GNUC__
2379 asm("pusha\n"
2380 "mov $0x01, %%eax\n"
2381 "cpuid\n"
2382 "mov %%edx, %0\n"
2383 "popa\n"
2384 : "=m" (features)
2385 : /* We have no inputs */
2386 : /* We don't clobber anything */
2388 #endif
2389 return (features & (1<<26)) != 0;
2391 #endif
2393 extern void
2394 js_InitJIT(JSTraceMonitor *tm)
2396 #if defined NANOJIT_IA32
2397 if (!did_we_check_sse2) {
2398 avmplus::AvmCore::sse2_available = js_CheckForSSE2();
2399 did_we_check_sse2 = true;
2401 #endif
2402 if (!tm->fragmento) {
2403 JS_ASSERT(!tm->globalSlots && !tm->globalTypeMap);
2404 Fragmento* fragmento = new (&gc) Fragmento(core, 24);
2405 verbose_only(fragmento->labels = new (&gc) LabelMap(core, NULL);)
2406 fragmento->assm()->setCallTable(builtins);
2407 tm->fragmento = fragmento;
2408 tm->globalSlots = new (&gc) SlotList();
2409 tm->globalTypeMap = new (&gc) TypeMap();
2411 #if !defined XP_WIN
2412 debug_only(memset(&stat, 0, sizeof(stat)));
2413 #endif
2416 extern void
2417 js_FinishJIT(JSTraceMonitor *tm)
2419 #ifdef DEBUG
2420 printf("recorder: started(%llu), aborted(%llu), completed(%llu), different header(%llu), "
2421 "trees trashed(%llu), slot promoted(%llu), unstable loop variable(%llu), "
2422 "breaks: (%llu)\n",
2423 stat.recorderStarted, stat.recorderAborted,
2424 stat.traceCompleted, stat.returnToDifferentLoopHeader, stat.treesTrashed,
2425 stat.slotPromoted, stat.unstableLoopVariable, stat.breakLoopExits);
2426 printf("monitor: triggered(%llu), exits(%llu), type mismatch(%llu), "
2427 "global mismatch(%llu)\n", stat.traceTriggered, stat.sideExitIntoInterpreter,
2428 stat.typeMapMismatchAtEntry, stat.globalShapeMismatchAtEntry);
2429 #endif
2430 if (tm->fragmento != NULL) {
2431 JS_ASSERT(tm->globalSlots && tm->globalTypeMap);
2432 verbose_only(delete tm->fragmento->labels;)
2433 delete tm->fragmento;
2434 tm->fragmento = NULL;
2435 delete tm->globalSlots;
2436 tm->globalSlots = NULL;
2437 delete tm->globalTypeMap;
2438 tm->globalTypeMap = NULL;
2442 extern void
2443 js_FlushJITOracle(JSContext* cx)
2445 oracle.clear();
2448 extern void
2449 js_FlushJITCache(JSContext* cx)
2451 debug_only_v(printf("Flushing cache.\n"););
2452 JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
2453 if (tm->recorder)
2454 js_AbortRecording(cx, NULL, "flush cache");
2455 Fragmento* fragmento = tm->fragmento;
2456 if (fragmento) {
2457 fragmento->clearFrags();
2458 #ifdef DEBUG
2459 JS_ASSERT(fragmento->labels);
2460 delete fragmento->labels;
2461 fragmento->labels = new (&gc) LabelMap(core, NULL);
2462 #endif
2464 memset(&tm->fcache, 0, sizeof(tm->fcache));
2465 if (cx->fp) {
2466 tm->globalShape = OBJ_SCOPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain))->shape;
2467 tm->globalSlots->clear();
2468 tm->globalTypeMap->clear();
2472 jsval&
2473 TraceRecorder::argval(unsigned n) const
2475 JS_ASSERT(n < cx->fp->fun->nargs);
2476 return cx->fp->argv[n];
2479 jsval&
2480 TraceRecorder::varval(unsigned n) const
2482 JS_ASSERT(n < cx->fp->script->nslots);
2483 return cx->fp->slots[n];
2486 jsval&
2487 TraceRecorder::stackval(int n) const
2489 jsval* sp = cx->fp->regs->sp;
2490 JS_ASSERT(size_t((sp + n) - StackBase(cx->fp)) < StackDepth(cx->fp->script));
2491 return sp[n];
2494 LIns*
2495 TraceRecorder::scopeChain() const
2497 return lir->insLoad(LIR_ldp,
2498 lir->insLoad(LIR_ldp, cx_ins, offsetof(JSContext, fp)),
2499 offsetof(JSStackFrame, scopeChain));
2502 static inline bool
2503 FrameInRange(JSStackFrame* fp, JSStackFrame *target, unsigned callDepth)
2505 while (fp != target) {
2506 if (callDepth-- == 0)
2507 return false;
2508 if (!(fp = fp->down))
2509 return false;
2511 return true;
2514 bool
2515 TraceRecorder::activeCallOrGlobalSlot(JSObject* obj, jsval*& vp)
2517 JS_ASSERT(obj != globalObj);
2519 JSAtom* atom = atoms[GET_INDEX(cx->fp->regs->pc)];
2520 JSObject* obj2;
2521 JSProperty* prop;
2522 if (js_FindProperty(cx, ATOM_TO_JSID(atom), &obj, &obj2, &prop) < 0 || !prop)
2523 ABORT_TRACE("failed to find name in non-global scope chain");
2525 if (obj == globalObj) {
2526 JSScopeProperty* sprop = (JSScopeProperty*) prop;
2527 if (obj2 != obj || !SPROP_HAS_VALID_SLOT(sprop, OBJ_SCOPE(obj))) {
2528 OBJ_DROP_PROPERTY(cx, obj2, prop);
2529 ABORT_TRACE("prototype or slotless globalObj property");
2532 if (!lazilyImportGlobalSlot(sprop->slot))
2533 ABORT_TRACE("lazy import of global slot failed");
2534 vp = &STOBJ_GET_SLOT(obj, sprop->slot);
2535 OBJ_DROP_PROPERTY(cx, obj2, prop);
2536 return true;
2539 if (obj == obj2 && OBJ_GET_CLASS(cx, obj) == &js_CallClass) {
2540 JSStackFrame* cfp = (JSStackFrame*) JS_GetPrivate(cx, obj);
2541 if (cfp && FrameInRange(cx->fp, cfp, callDepth)) {
2542 JSScopeProperty* sprop = (JSScopeProperty*) prop;
2543 uintN slot = sprop->shortid;
2545 vp = NULL;
2546 if (sprop->getter == js_GetCallArg) {
2547 JS_ASSERT(slot < cfp->fun->nargs);
2548 vp = &cfp->argv[slot];
2549 } else if (sprop->getter == js_GetCallVar) {
2550 JS_ASSERT(slot < cfp->script->nslots);
2551 vp = &cfp->slots[slot];
2553 OBJ_DROP_PROPERTY(cx, obj2, prop);
2554 if (!vp)
2555 ABORT_TRACE("dynamic property of Call object");
2556 return true;
2560 OBJ_DROP_PROPERTY(cx, obj2, prop);
2561 ABORT_TRACE("fp->scopeChain is not global or active call object");
2564 LIns*
2565 TraceRecorder::arg(unsigned n)
2567 return get(&argval(n));
2570 void
2571 TraceRecorder::arg(unsigned n, LIns* i)
2573 set(&argval(n), i);
2576 LIns*
2577 TraceRecorder::var(unsigned n)
2579 return get(&varval(n));
2582 void
2583 TraceRecorder::var(unsigned n, LIns* i)
2585 set(&varval(n), i);
2588 LIns*
2589 TraceRecorder::stack(int n)
2591 return get(&stackval(n));
2594 void
2595 TraceRecorder::stack(int n, LIns* i)
2597 set(&stackval(n), i, n >= 0);
2600 LIns* TraceRecorder::f2i(LIns* f)
2602 return lir->insCall(F_DoubleToInt32, &f);
2605 bool
2606 TraceRecorder::ifop()
2608 jsval& v = stackval(-1);
2609 if (JSVAL_TAG(v) == JSVAL_BOOLEAN) {
2610 guard(JSVAL_TO_BOOLEAN(v) != 1,
2611 lir->ins_eq0(lir->ins2i(LIR_eq, get(&v), 1)),
2612 BRANCH_EXIT);
2613 } else if (JSVAL_IS_OBJECT(v)) {
2614 guard(JSVAL_IS_NULL(v), lir->ins_eq0(get(&v)), BRANCH_EXIT);
2615 } else if (isNumber(v)) {
2616 jsdouble d = asNumber(v);
2617 jsdpun u;
2618 u.d = 0;
2619 guard(d == 0 || JSDOUBLE_IS_NaN(d),
2620 lir->ins2(LIR_feq, get(&v), lir->insImmq(u.u64)),
2621 BRANCH_EXIT);
2622 } else if (JSVAL_IS_STRING(v)) {
2623 guard(JSSTRING_LENGTH(JSVAL_TO_STRING(v)) == 0,
2624 lir->ins_eq0(lir->ins2(LIR_piand,
2625 lir->insLoad(LIR_ldp,
2626 get(&v),
2627 (int)offsetof(JSString, length)),
2628 INS_CONSTPTR(JSSTRING_LENGTH_MASK))),
2629 BRANCH_EXIT);
2630 } else {
2631 JS_NOT_REACHED("ifop");
2633 return true;
2636 bool
2637 TraceRecorder::switchop()
2639 jsval& v = stackval(-1);
2640 if (isNumber(v)) {
2641 jsdouble d = asNumber(v);
2642 jsdpun u;
2643 u.d = d;
2644 guard(true,
2645 addName(lir->ins2(LIR_feq, get(&v), lir->insImmq(u.u64)),
2646 "guard(switch on numeric)"),
2647 BRANCH_EXIT);
2648 } else if (JSVAL_IS_STRING(v)) {
2649 LIns* args[] = { get(&v), INS_CONSTPTR(JSVAL_TO_STRING(v)) };
2650 guard(true,
2651 addName(lir->ins_eq0(lir->ins_eq0(lir->insCall(F_EqualStrings, args))),
2652 "guard(switch on string)"),
2653 BRANCH_EXIT);
2654 } else if (JSVAL_IS_BOOLEAN(v)) {
2655 guard(true,
2656 addName(lir->ins2(LIR_eq, get(&v), lir->insImm(JSVAL_TO_BOOLEAN(v))),
2657 "guard(switch on boolean)"),
2658 BRANCH_EXIT);
2659 } else {
2660 ABORT_TRACE("switch on object, null, or undefined");
2662 return true;
2665 bool
2666 TraceRecorder::inc(jsval& v, jsint incr, bool pre)
2668 LIns* v_ins = get(&v);
2669 if (!inc(v, v_ins, incr, pre))
2670 return false;
2671 set(&v, v_ins);
2672 return true;
2676 * On exit, v_ins is the incremented unboxed value, and the appropriate
2677 * value (pre- or post-increment as described by pre) is stacked.
2679 bool
2680 TraceRecorder::inc(jsval& v, LIns*& v_ins, jsint incr, bool pre)
2682 if (!isNumber(v))
2683 ABORT_TRACE("can only inc numbers");
2685 jsdpun u;
2686 u.d = jsdouble(incr);
2688 LIns* v_after = lir->ins2(LIR_fadd, v_ins, lir->insImmq(u.u64));
2690 const JSCodeSpec& cs = js_CodeSpec[*cx->fp->regs->pc];
2691 JS_ASSERT(cs.ndefs == 1);
2692 stack(-cs.nuses, pre ? v_after : v_ins);
2693 v_ins = v_after;
2694 return true;
2697 bool
2698 TraceRecorder::incProp(jsint incr, bool pre)
2700 jsval& l = stackval(-1);
2701 if (JSVAL_IS_PRIMITIVE(l))
2702 ABORT_TRACE("incProp on primitive");
2704 JSObject* obj = JSVAL_TO_OBJECT(l);
2705 LIns* obj_ins = get(&l);
2707 uint32 slot;
2708 LIns* v_ins;
2709 if (!prop(obj, obj_ins, slot, v_ins))
2710 return false;
2712 if (slot == SPROP_INVALID_SLOT)
2713 ABORT_TRACE("incProp on invalid slot");
2715 jsval& v = STOBJ_GET_SLOT(obj, slot);
2716 if (!inc(v, v_ins, incr, pre))
2717 return false;
2719 if (!box_jsval(v, v_ins))
2720 return false;
2722 LIns* dslots_ins = NULL;
2723 stobj_set_slot(obj_ins, slot, dslots_ins, v_ins);
2724 return true;
2727 bool
2728 TraceRecorder::incElem(jsint incr, bool pre)
2730 jsval& r = stackval(-1);
2731 jsval& l = stackval(-2);
2732 jsval* vp;
2733 LIns* v_ins;
2734 LIns* addr_ins;
2735 if (!elem(l, r, vp, v_ins, addr_ins))
2736 return false;
2737 if (!inc(*vp, v_ins, incr, pre))
2738 return false;
2739 if (!box_jsval(*vp, v_ins))
2740 return false;
2741 lir->insStorei(v_ins, addr_ins, 0);
2742 return true;
2745 bool
2746 TraceRecorder::cmp(LOpcode op, int flags)
2748 jsval& r = stackval(-1);
2749 jsval& l = stackval(-2);
2750 LIns* x;
2751 bool negate = !!(flags & CMP_NEGATE);
2752 bool cond;
2753 if (JSVAL_IS_STRING(l) && JSVAL_IS_STRING(r)) {
2754 JS_ASSERT(!negate);
2755 LIns* args[] = { get(&r), get(&l) };
2756 x = lir->ins1(LIR_i2f, lir->insCall(F_CompareStrings, args));
2757 x = lir->ins2i(op, x, 0);
2758 jsint result = js_CompareStrings(JSVAL_TO_STRING(l), JSVAL_TO_STRING(r));
2759 switch (op) {
2760 case LIR_flt:
2761 cond = result < 0;
2762 break;
2763 case LIR_fgt:
2764 cond = result > 0;
2765 break;
2766 case LIR_fle:
2767 cond = result <= 0;
2768 break;
2769 case LIR_fge:
2770 cond = result >= 0;
2771 break;
2772 default:
2773 JS_NOT_REACHED("unexpected comparison op for strings");
2774 return false;
2776 } else if (isNumber(l) || isNumber(r)) {
2777 jsval tmp[2] = {l, r};
2778 JSAutoTempValueRooter tvr(cx, 2, tmp);
2780 // TODO: coerce non-numbers to numbers if it's not string-on-string above
2781 LIns* l_ins = get(&l);
2782 LIns* r_ins = get(&r);
2783 jsdouble lnum;
2784 jsdouble rnum;
2785 LIns* args[] = { l_ins, cx_ins };
2786 if (JSVAL_IS_STRING(l)) {
2787 l_ins = lir->insCall(F_StringToNumber, args);
2788 } else if (JSVAL_TAG(l) == JSVAL_BOOLEAN) {
2790 * What I really want here is for undefined to be type-specialized
2791 * differently from real booleans. Failing that, I want to be able
2792 * to cmov on quads. Failing that, I want to have small forward
2793 * branched. Failing that, I want to be able to ins_choose on quads
2794 * without cmov. Failing that, eat flaming builtin!
2796 l_ins = lir->insCall(F_BooleanToNumber, args);
2797 } else if (!isNumber(l)) {
2798 ABORT_TRACE("unsupported LHS type for cmp vs number");
2800 lnum = js_ValueToNumber(cx, &tmp[0]);
2802 args[0] = get(&r);
2803 args[1] = cx_ins;
2804 if (JSVAL_IS_STRING(r)) {
2805 r_ins = lir->insCall(F_StringToNumber, args);
2806 } else if (JSVAL_TAG(r) == JSVAL_BOOLEAN) {
2807 // See above for the sob story.
2808 r_ins = lir->insCall(F_BooleanToNumber, args);
2809 } else if (!isNumber(r)) {
2810 ABORT_TRACE("unsupported RHS type for cmp vs number");
2812 rnum = js_ValueToNumber(cx, &tmp[1]);
2814 x = lir->ins2(op, l_ins, r_ins);
2816 if (negate)
2817 x = lir->ins_eq0(x);
2818 switch (op) {
2819 case LIR_flt:
2820 cond = lnum < rnum;
2821 break;
2822 case LIR_fgt:
2823 cond = lnum > rnum;
2824 break;
2825 case LIR_fle:
2826 cond = lnum <= rnum;
2827 break;
2828 case LIR_fge:
2829 cond = lnum >= rnum;
2830 break;
2831 default:
2832 JS_ASSERT(op == LIR_feq);
2833 cond = (lnum == rnum) ^ negate;
2834 break;
2836 } else if (JSVAL_IS_BOOLEAN(l) && JSVAL_IS_BOOLEAN(r)) {
2837 x = lir->ins2(op, lir->ins1(LIR_i2f, get(&l)), lir->ins1(LIR_i2f, get(&r)));
2838 if (negate)
2839 x = lir->ins_eq0(x);
2841 // The well-known values of JSVAL_TRUE and JSVAL_FALSE make this very easy.
2842 // In particular: JSVAL_TO_BOOLEAN(0) < JSVAL_TO_BOOLEAN(1) so all of these comparisons do
2843 // the right thing.
2844 switch (op) {
2845 case LIR_flt:
2846 cond = l < r;
2847 break;
2848 case LIR_fgt:
2849 cond = l > r;
2850 break;
2851 case LIR_fle:
2852 cond = l <= r;
2853 break;
2854 case LIR_fge:
2855 cond = l >= r;
2856 break;
2857 default:
2858 JS_ASSERT(op == LIR_feq);
2859 cond = (l == r) ^ negate;
2860 break;
2862 } else {
2863 ABORT_TRACE("unsupported operand types for cmp");
2866 if (flags & CMP_CASE) {
2867 guard(cond, x, BRANCH_EXIT);
2868 return true;
2871 /* The interpreter fuses comparisons and the following branch,
2872 so we have to do that here as well. */
2873 if (flags & CMP_TRY_BRANCH_AFTER_COND)
2874 fuseIf(cx->fp->regs->pc + 1, cond, x);
2876 /* We update the stack after the guard. This is safe since
2877 the guard bails out at the comparison and the interpreter
2878 will therefore re-execute the comparison. This way the
2879 value of the condition doesn't have to be calculated and
2880 saved on the stack in most cases. */
2881 set(&l, x);
2882 return true;
2885 // FIXME: we currently compare only like operand types; if for JSOP_EQ and
2886 // JSOP_NE we ever evolve to handle conversions then we must insist on like
2887 // "types" here (care required for 0 == -1, e.g.).
2888 bool
2889 TraceRecorder::equal(int flags)
2891 jsval& r = stackval(-1);
2892 jsval& l = stackval(-2);
2893 bool negate = !!(flags & CMP_NEGATE);
2894 if (JSVAL_IS_STRING(l) && JSVAL_IS_STRING(r)) {
2895 LIns* args[] = { get(&r), get(&l) };
2896 bool cond = js_EqualStrings(JSVAL_TO_STRING(l), JSVAL_TO_STRING(r)) ^ negate;
2897 LIns* x = lir->ins_eq0(lir->insCall(F_EqualStrings, args));
2898 if (!negate)
2899 x = lir->ins_eq0(x);
2901 if (flags & CMP_CASE) {
2902 guard(cond, x, BRANCH_EXIT);
2903 return true;
2906 /* The interpreter fuses comparisons and the following branch,
2907 so we have to do that here as well. */
2908 if (flags & CMP_TRY_BRANCH_AFTER_COND)
2909 fuseIf(cx->fp->regs->pc + 1, cond, x);
2911 /* We update the stack after the guard. This is safe since
2912 the guard bails out at the comparison and the interpreter
2913 will therefore re-execute the comparison. This way the
2914 value of the condition doesn't have to be calculated and
2915 saved on the stack in most cases. */
2916 set(&l, x);
2917 return true;
2919 if (JSVAL_IS_OBJECT(l) && JSVAL_IS_OBJECT(r)) {
2920 bool cond = (l == r) ^ negate;
2921 LIns* x = lir->ins2(LIR_eq, get(&l), get(&r));
2922 if (negate)
2923 x = lir->ins_eq0(x);
2925 if (flags & CMP_CASE) {
2926 guard(cond, x, BRANCH_EXIT);
2927 return true;
2930 /* The interpreter fuses comparisons and the following branch,
2931 so we have to do that here as well. */
2932 if (flags & CMP_TRY_BRANCH_AFTER_COND)
2933 fuseIf(cx->fp->regs->pc + 1, cond, x);
2935 /* We update the stack after the guard. This is safe since
2936 the guard bails out at the comparison and the interpreter
2937 will therefore re-execute the comparison. This way the
2938 value of the condition doesn't have to be calculated and
2939 saved on the stack in most cases. */
2940 set(&l, x);
2941 return true;
2943 return cmp(LIR_feq, flags);
2946 bool
2947 TraceRecorder::unary(LOpcode op)
2949 jsval& v = stackval(-1);
2950 bool intop = !(op & LIR64);
2951 if (isNumber(v)) {
2952 LIns* a = get(&v);
2953 if (intop)
2954 a = f2i(a);
2955 a = lir->ins1(op, a);
2956 if (intop)
2957 a = lir->ins1(LIR_i2f, a);
2958 set(&v, a);
2959 return true;
2961 return false;
2964 bool
2965 TraceRecorder::binary(LOpcode op)
2967 jsval& r = stackval(-1);
2968 jsval& l = stackval(-2);
2969 bool intop = !(op & LIR64);
2970 LIns* a = get(&l);
2971 LIns* b = get(&r);
2972 bool leftNumber = isNumber(l), rightNumber = isNumber(r);
2973 if ((op >= LIR_sub && op <= LIR_ush) || // sub, mul, (callh), or, xor, (not,) lsh, rsh, ush
2974 (op >= LIR_fsub && op <= LIR_fdiv)) { // fsub, fmul, fdiv
2975 LIns* args[2];
2976 if (JSVAL_IS_STRING(l)) {
2977 args[0] = a;
2978 args[1] = cx_ins;
2979 a = lir->insCall(F_StringToNumber, args);
2980 leftNumber = true;
2982 if (JSVAL_IS_STRING(r)) {
2983 args[0] = b;
2984 args[1] = cx_ins;
2985 b = lir->insCall(F_StringToNumber, args);
2986 rightNumber = true;
2989 if (leftNumber && rightNumber) {
2990 if (intop) {
2991 LIns *args[] = { a };
2992 a = lir->insCall(op == LIR_ush ? F_DoubleToUint32 : F_DoubleToInt32, args);
2993 b = f2i(b);
2995 a = lir->ins2(op, a, b);
2996 if (intop)
2997 a = lir->ins1(op == LIR_ush ? LIR_u2f : LIR_i2f, a);
2998 set(&l, a);
2999 return true;
3001 return false;
3004 JS_STATIC_ASSERT(offsetof(JSObjectOps, newObjectMap) == 0);
3006 bool
3007 TraceRecorder::map_is_native(JSObjectMap* map, LIns* map_ins, LIns*& ops_ins, size_t op_offset)
3009 ops_ins = addName(lir->insLoad(LIR_ldp, map_ins, offsetof(JSObjectMap, ops)), "ops");
3010 LIns* n = lir->insLoad(LIR_ldp, ops_ins, op_offset);
3012 #define OP(ops) (*(JSObjectOp*) ((char*)(ops) + op_offset))
3014 if (OP(map->ops) == OP(&js_ObjectOps)) {
3015 guard(true, addName(lir->ins2(LIR_eq, n, INS_CONSTPTR(OP(&js_ObjectOps))),
3016 "guard(native-map)"),
3017 MISMATCH_EXIT);
3018 return true;
3021 #undef OP
3022 ABORT_TRACE("non-native map");
3025 bool
3026 TraceRecorder::test_property_cache(JSObject* obj, LIns* obj_ins, JSObject*& obj2, jsuword& pcval)
3028 // Mimic the interpreter's special case for dense arrays by skipping up one
3029 // hop along the proto chain when accessing a named (not indexed) property,
3030 // typically to find Array.prototype methods.
3031 JSObject* aobj = obj;
3032 if (OBJ_IS_DENSE_ARRAY(cx, obj)) {
3033 aobj = OBJ_GET_PROTO(cx, obj);
3034 obj_ins = stobj_get_fslot(obj_ins, JSSLOT_PROTO);
3037 LIns* map_ins = lir->insLoad(LIR_ldp, obj_ins, (int)offsetof(JSObject, map));
3038 LIns* ops_ins;
3040 // Interpreter calls to PROPERTY_CACHE_TEST guard on native object ops
3041 // (newObjectMap == js_ObjectOps.newObjectMap) which is required to use
3042 // native objects (those whose maps are scopes), or even more narrow
3043 // conditions required because the cache miss case will call a particular
3044 // object-op (js_GetProperty, js_SetProperty).
3046 // We parameterize using offsetof and guard on match against the hook at
3047 // the given offset in js_ObjectOps. TraceRecorder::record_JSOP_SETPROP
3048 // guards the js_SetProperty case.
3049 uint32 format = js_CodeSpec[*cx->fp->regs->pc].format;
3050 uint32 mode = JOF_MODE(format);
3051 size_t op_offset = 0;
3052 if (mode == JOF_PROP || mode == JOF_VARPROP) {
3053 JS_ASSERT(!(format & JOF_SET));
3054 op_offset = offsetof(JSObjectOps, getProperty);
3055 } else {
3056 JS_ASSERT(mode == JOF_NAME);
3059 if (!map_is_native(aobj->map, map_ins, ops_ins, op_offset))
3060 return false;
3062 JSAtom* atom;
3063 JSPropCacheEntry* entry;
3064 PROPERTY_CACHE_TEST(cx, cx->fp->regs->pc, aobj, obj2, entry, atom);
3065 if (atom) {
3066 // Miss: pre-fill the cache for the interpreter, as well as for our needs.
3067 // FIXME: 452357 - correctly propagate exceptions into the interpreter from
3068 // js_FindPropertyHelper, js_LookupPropertyWithFlags, and elsewhere.
3069 jsid id = ATOM_TO_JSID(atom);
3070 JSProperty* prop;
3071 if (JOF_OPMODE(*cx->fp->regs->pc) == JOF_NAME) {
3072 JS_ASSERT(aobj == obj);
3073 if (js_FindPropertyHelper(cx, id, &obj, &obj2, &prop, &entry) < 0)
3074 ABORT_TRACE("failed to find name");
3075 } else {
3076 int protoIndex = js_LookupPropertyWithFlags(cx, aobj, id, 0, &obj2, &prop);
3077 if (protoIndex < 0)
3078 ABORT_TRACE("failed to lookup property");
3080 if (prop) {
3081 js_FillPropertyCache(cx, aobj, OBJ_SCOPE(aobj)->shape, 0, protoIndex, obj2,
3082 (JSScopeProperty*) prop, &entry);
3086 if (!prop) {
3087 // Propagate obj from js_FindPropertyHelper to record_JSOP_BINDNAME
3088 // via our obj2 out-parameter. If we are recording JSOP_SETNAME and
3089 // the global it's assigning does not yet exist, create it.
3090 obj2 = obj;
3091 if (JSOp(*cx->fp->regs->pc) == JSOP_SETNAME) {
3092 jsval v = JSVAL_VOID;
3093 if (!js_SetPropertyHelper(cx, obj, id, &v, &entry))
3094 return false;
3095 if (!entry || !PCVAL_IS_SPROP(entry->vword))
3096 ABORT_TRACE("can't create cacheable global for JSOP_SETNAME");
3097 JSScopeProperty* sprop = PCVAL_TO_SPROP(entry->vword);
3098 if (!SPROP_HAS_VALID_SLOT(sprop, OBJ_SCOPE(obj)))
3099 ABORT_TRACE("can't create slot-ful global for JSOP_SETNAME");
3100 pcval = SLOT_TO_PCVAL(sprop->slot);
3101 } else {
3102 // Use PCVAL_NULL to return "no such property" to our caller.
3103 pcval = PCVAL_NULL;
3105 return true;
3108 OBJ_DROP_PROPERTY(cx, obj2, prop);
3109 if (!entry)
3110 ABORT_TRACE("failed to fill property cache");
3113 #ifdef JS_THREADSAFE
3114 // There's a potential race in any JS_THREADSAFE embedding that's nuts
3115 // enough to share mutable objects on the scope or proto chain, but we
3116 // don't care about such insane embeddings. Anyway, the (scope, proto)
3117 // entry->vcap coordinates must reach obj2 from aobj at this point.
3118 JS_ASSERT(cx->requestDepth);
3119 #endif
3121 // Emit guard(s), common code for both hit and miss cases.
3122 // Check for first-level cache hit and guard on kshape if possible.
3123 // Otherwise guard on key object exact match.
3124 if (PCVCAP_TAG(entry->vcap) <= 1) {
3125 if (aobj != globalObj) {
3126 LIns* shape_ins = addName(lir->insLoad(LIR_ld, map_ins, offsetof(JSScope, shape)),
3127 "shape");
3128 guard(true, addName(lir->ins2i(LIR_eq, shape_ins, entry->kshape), "guard(kshape)"),
3129 MISMATCH_EXIT);
3131 } else {
3132 #ifdef DEBUG
3133 JSOp op = JSOp(*cx->fp->regs->pc);
3134 ptrdiff_t pcoff = (op == JSOP_GETARGPROP) ? ARGNO_LEN :
3135 (op == JSOP_GETLOCALPROP) ? SLOTNO_LEN : 0;
3136 jsatomid index = js_GetIndexFromBytecode(cx, cx->fp->script, cx->fp->regs->pc, pcoff);
3137 JS_ASSERT(entry->kpc == (jsbytecode*) atoms[index]);
3138 JS_ASSERT(entry->kshape == jsuword(aobj));
3139 #endif
3140 if (aobj != globalObj) {
3141 guard(true, addName(lir->ins2i(LIR_eq, obj_ins, entry->kshape), "guard(kobj)"),
3142 MISMATCH_EXIT);
3146 // For any hit that goes up the scope and or proto chains, we will need to
3147 // guard on the shape of the object containing the property.
3148 if (PCVCAP_TAG(entry->vcap) >= 1) {
3149 jsuword vcap = entry->vcap;
3150 uint32 vshape = PCVCAP_SHAPE(vcap);
3151 JS_ASSERT(OBJ_SCOPE(obj2)->shape == vshape);
3153 LIns* obj2_ins = INS_CONSTPTR(obj2);
3154 map_ins = lir->insLoad(LIR_ldp, obj2_ins, (int)offsetof(JSObject, map));
3155 if (!map_is_native(obj2->map, map_ins, ops_ins))
3156 return false;
3158 LIns* shape_ins = addName(lir->insLoad(LIR_ld, map_ins, offsetof(JSScope, shape)),
3159 "shape");
3160 guard(true,
3161 addName(lir->ins2i(LIR_eq, shape_ins, vshape), "guard(vshape)"),
3162 MISMATCH_EXIT);
3165 pcval = entry->vword;
3166 return true;
3169 bool
3170 TraceRecorder::test_property_cache_direct_slot(JSObject* obj, LIns* obj_ins, uint32& slot)
3172 JSObject* obj2;
3173 jsuword pcval;
3176 * Property cache ensures that we are dealing with an existing property,
3177 * and guards the shape for us.
3179 if (!test_property_cache(obj, obj_ins, obj2, pcval))
3180 return false;
3182 /* No such property means invalid slot, which callers must check for first. */
3183 if (PCVAL_IS_NULL(pcval)) {
3184 slot = SPROP_INVALID_SLOT;
3185 return true;
3188 /* Insist if setting on obj being the directly addressed object. */
3189 uint32 setflags = (js_CodeSpec[*cx->fp->regs->pc].format & (JOF_SET | JOF_INCDEC));
3190 if (setflags && obj2 != obj)
3191 ABORT_TRACE("JOF_SET opcode hit prototype chain");
3193 /* Don't trace getter or setter calls, our caller wants a direct slot. */
3194 if (PCVAL_IS_SPROP(pcval)) {
3195 JSScopeProperty* sprop = PCVAL_TO_SPROP(pcval);
3197 if (setflags && !SPROP_HAS_STUB_SETTER(sprop))
3198 ABORT_TRACE("non-stub setter");
3199 if (setflags != JOF_SET && !SPROP_HAS_STUB_GETTER(sprop))
3200 ABORT_TRACE("non-stub getter");
3201 if (!SPROP_HAS_VALID_SLOT(sprop, OBJ_SCOPE(obj)))
3202 ABORT_TRACE("no valid slot");
3203 slot = sprop->slot;
3204 } else {
3205 if (!PCVAL_IS_SLOT(pcval))
3206 ABORT_TRACE("PCE is not a slot");
3207 slot = PCVAL_TO_SLOT(pcval);
3209 return true;
3212 void
3213 TraceRecorder::stobj_set_slot(LIns* obj_ins, unsigned slot, LIns*& dslots_ins, LIns* v_ins)
3215 if (slot < JS_INITIAL_NSLOTS) {
3216 addName(lir->insStorei(v_ins, obj_ins,
3217 offsetof(JSObject, fslots) + slot * sizeof(jsval)),
3218 "set_slot(fslots)");
3219 } else {
3220 if (!dslots_ins)
3221 dslots_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, dslots));
3222 addName(lir->insStorei(v_ins, dslots_ins,
3223 (slot - JS_INITIAL_NSLOTS) * sizeof(jsval)),
3224 "set_slot(dslots");
3228 LIns*
3229 TraceRecorder::stobj_get_fslot(LIns* obj_ins, unsigned slot)
3231 JS_ASSERT(slot < JS_INITIAL_NSLOTS);
3232 return lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, fslots) + slot * sizeof(jsval));
3235 LIns*
3236 TraceRecorder::stobj_get_slot(LIns* obj_ins, unsigned slot, LIns*& dslots_ins)
3238 if (slot < JS_INITIAL_NSLOTS)
3239 return stobj_get_fslot(obj_ins, slot);
3241 if (!dslots_ins)
3242 dslots_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, dslots));
3243 return lir->insLoad(LIR_ldp, dslots_ins, (slot - JS_INITIAL_NSLOTS) * sizeof(jsval));
3246 bool
3247 TraceRecorder::native_set(LIns* obj_ins, JSScopeProperty* sprop, LIns*& dslots_ins, LIns* v_ins)
3249 if (SPROP_HAS_STUB_SETTER(sprop) && sprop->slot != SPROP_INVALID_SLOT) {
3250 stobj_set_slot(obj_ins, sprop->slot, dslots_ins, v_ins);
3251 return true;
3253 ABORT_TRACE("unallocated or non-stub sprop");
3256 bool
3257 TraceRecorder::native_get(LIns* obj_ins, LIns* pobj_ins, JSScopeProperty* sprop,
3258 LIns*& dslots_ins, LIns*& v_ins)
3260 if (!SPROP_HAS_STUB_GETTER(sprop))
3261 return false;
3263 if (sprop->slot != SPROP_INVALID_SLOT)
3264 v_ins = stobj_get_slot(pobj_ins, sprop->slot, dslots_ins);
3265 else
3266 v_ins = INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_VOID));
3267 return true;
3270 // So box_jsval can emit no LIR_or at all to tag an object jsval.
3271 JS_STATIC_ASSERT(JSVAL_OBJECT == 0);
3273 bool
3274 TraceRecorder::box_jsval(jsval v, LIns*& v_ins)
3276 if (isNumber(v)) {
3277 LIns* args[] = { v_ins, cx_ins };
3278 v_ins = lir->insCall(F_BoxDouble, args);
3279 guard(false, lir->ins2(LIR_eq, v_ins, INS_CONST(JSVAL_ERROR_COOKIE)),
3280 OOM_EXIT);
3281 return true;
3283 switch (JSVAL_TAG(v)) {
3284 case JSVAL_BOOLEAN:
3285 v_ins = lir->ins2i(LIR_pior, lir->ins2i(LIR_pilsh, v_ins, JSVAL_TAGBITS), JSVAL_BOOLEAN);
3286 return true;
3287 case JSVAL_OBJECT:
3288 return true;
3289 case JSVAL_STRING:
3290 v_ins = lir->ins2(LIR_pior, v_ins, INS_CONST(JSVAL_STRING));
3291 return true;
3293 return false;
3296 bool
3297 TraceRecorder::unbox_jsval(jsval v, LIns*& v_ins)
3299 if (isNumber(v)) {
3300 // JSVAL_IS_NUMBER(v)
3301 guard(false,
3302 lir->ins_eq0(lir->ins2(LIR_pior,
3303 lir->ins2(LIR_piand, v_ins, INS_CONST(JSVAL_INT)),
3304 lir->ins2i(LIR_eq,
3305 lir->ins2(LIR_piand, v_ins,
3306 INS_CONST(JSVAL_TAGMASK)),
3307 JSVAL_DOUBLE))),
3308 MISMATCH_EXIT);
3309 LIns* args[] = { v_ins };
3310 v_ins = lir->insCall(F_UnboxDouble, args);
3311 return true;
3313 switch (JSVAL_TAG(v)) {
3314 case JSVAL_BOOLEAN:
3315 guard(true,
3316 lir->ins2i(LIR_eq,
3317 lir->ins2(LIR_piand, v_ins, INS_CONST(JSVAL_TAGMASK)),
3318 JSVAL_BOOLEAN),
3319 MISMATCH_EXIT);
3320 v_ins = lir->ins2i(LIR_ush, v_ins, JSVAL_TAGBITS);
3321 return true;
3322 case JSVAL_OBJECT:
3323 guard(true,
3324 lir->ins2i(LIR_eq,
3325 lir->ins2(LIR_piand, v_ins, INS_CONST(JSVAL_TAGMASK)),
3326 JSVAL_OBJECT),
3327 MISMATCH_EXIT);
3328 return true;
3329 case JSVAL_STRING:
3330 guard(true,
3331 lir->ins2i(LIR_eq,
3332 lir->ins2(LIR_piand, v_ins, INS_CONST(JSVAL_TAGMASK)),
3333 JSVAL_STRING),
3334 MISMATCH_EXIT);
3335 v_ins = lir->ins2(LIR_piand, v_ins, INS_CONST(~JSVAL_TAGMASK));
3336 return true;
3338 return false;
3341 bool
3342 TraceRecorder::getThis(LIns*& this_ins)
3344 if (cx->fp->callee) { /* in a function */
3345 if (JSVAL_IS_NULL(cx->fp->argv[-1]))
3346 return false;
3347 this_ins = get(&cx->fp->argv[-1]);
3348 guard(false, lir->ins_eq0(this_ins), MISMATCH_EXIT);
3349 } else { /* in global code */
3350 JS_ASSERT(!JSVAL_IS_NULL(cx->fp->argv[-1]));
3351 this_ins = scopeChain();
3353 return true;
3356 bool
3357 TraceRecorder::guardClass(JSObject* obj, LIns* obj_ins, JSClass* clasp)
3359 if (STOBJ_GET_CLASS(obj) != clasp)
3360 return false;
3362 LIns* class_ins = stobj_get_fslot(obj_ins, JSSLOT_CLASS);
3363 class_ins = lir->ins2(LIR_piand, class_ins, lir->insImm(~3));
3365 char namebuf[32];
3366 JS_snprintf(namebuf, sizeof namebuf, "guard(class is %s)", clasp->name);
3367 guard(true, addName(lir->ins2(LIR_eq, class_ins, INS_CONSTPTR(clasp)), namebuf),
3368 MISMATCH_EXIT);
3369 return true;
3372 bool
3373 TraceRecorder::guardDenseArray(JSObject* obj, LIns* obj_ins)
3375 return guardClass(obj, obj_ins, &js_ArrayClass);
3378 bool
3379 TraceRecorder::guardDenseArrayIndex(JSObject* obj, jsint idx, LIns* obj_ins,
3380 LIns* dslots_ins, LIns* idx_ins)
3382 jsuint length = ARRAY_DENSE_LENGTH(obj);
3383 if (!((jsuint)idx < length && idx < obj->fslots[JSSLOT_ARRAY_LENGTH]))
3384 return false;
3386 LIns* length_ins = stobj_get_fslot(obj_ins, JSSLOT_ARRAY_LENGTH);
3388 // guard(index >= 0)
3389 guard(true, lir->ins2i(LIR_ge, idx_ins, 0), MISMATCH_EXIT);
3391 // guard(index < length)
3392 guard(true, lir->ins2(LIR_lt, idx_ins, length_ins), MISMATCH_EXIT);
3394 // guard(index < capacity)
3395 guard(false, lir->ins_eq0(dslots_ins), MISMATCH_EXIT);
3396 guard(true,
3397 lir->ins2(LIR_lt, idx_ins, lir->insLoad(LIR_ldp, dslots_ins, 0 - sizeof(jsval))),
3398 MISMATCH_EXIT);
3399 return true;
3402 void
3403 TraceRecorder::clearFrameSlotsFromCache()
3405 /* Clear out all slots of this frame in the nativeFrameTracker. Different locations on the
3406 VM stack might map to different locations on the native stack depending on the
3407 number of arguments (i.e.) of the next call, so we have to make sure we map
3408 those in to the cache with the right offsets. */
3409 JSStackFrame* fp = cx->fp;
3410 jsval* vp;
3411 jsval* vpstop;
3412 if (fp->callee) {
3413 vp = &fp->argv[-2];
3414 vpstop = &fp->argv[fp->fun->nargs];
3415 while (vp < vpstop)
3416 nativeFrameTracker.set(vp++, (LIns*)0);
3418 vp = &fp->slots[0];
3419 vpstop = &fp->slots[fp->script->nslots];
3420 while (vp < vpstop)
3421 nativeFrameTracker.set(vp++, (LIns*)0);
3424 bool
3425 TraceRecorder::record_EnterFrame()
3427 if (++callDepth >= MAX_CALLDEPTH)
3428 ABORT_TRACE("exceeded maximum call depth");
3429 debug_only_v(printf("EnterFrame %s, callDepth=%d\n",
3430 js_AtomToPrintableString(cx, cx->fp->fun->atom),
3431 callDepth););
3432 JSStackFrame* fp = cx->fp;
3433 LIns* void_ins = INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_VOID));
3435 jsval* vp = &fp->argv[fp->argc];
3436 jsval* vpstop = vp + (fp->fun->nargs - fp->argc);
3437 while (vp < vpstop) {
3438 if (vp >= fp->down->regs->sp)
3439 nativeFrameTracker.set(vp, (LIns*)0);
3440 set(vp++, void_ins, true);
3443 vp = &fp->slots[0];
3444 vpstop = vp + fp->script->nfixed;
3445 while (vp < vpstop)
3446 set(vp++, void_ins, true);
3447 return true;
3450 bool
3451 TraceRecorder::record_LeaveFrame()
3453 debug_only_v(
3454 if (cx->fp->fun)
3455 printf("LeaveFrame (back to %s), callDepth=%d\n",
3456 js_AtomToPrintableString(cx, cx->fp->fun->atom),
3457 callDepth);
3459 if (callDepth-- <= 0)
3460 ABORT_TRACE("returned out of a loop we started tracing");
3462 // LeaveFrame gets called after the interpreter popped the frame and
3463 // stored rval, so cx->fp not cx->fp->down, and -1 not 0.
3464 atoms = cx->fp->script->atomMap.vector;
3465 set(&stackval(-1), rval_ins, true);
3466 return true;
3469 bool TraceRecorder::record_JSOP_INTERRUPT()
3471 return false;
3474 bool
3475 TraceRecorder::record_JSOP_PUSH()
3477 stack(0, INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_VOID)));
3478 return true;
3481 bool
3482 TraceRecorder::record_JSOP_POPV()
3484 // We should not have to implement JSOP_POPV or JSOP_STOP's rval setting.
3485 return false;
3488 bool TraceRecorder::record_JSOP_ENTERWITH()
3490 return false;
3492 bool TraceRecorder::record_JSOP_LEAVEWITH()
3494 return false;
3497 bool
3498 TraceRecorder::record_JSOP_RETURN()
3500 jsval& rval = stackval(-1);
3501 JSStackFrame *fp = cx->fp;
3502 if (cx->fp->flags & JSFRAME_CONSTRUCTING) {
3503 if (JSVAL_IS_PRIMITIVE(rval)) {
3504 JS_ASSERT(OBJECT_TO_JSVAL(fp->thisp) == fp->argv[-1]);
3505 rval_ins = get(&fp->argv[-1]);
3507 } else {
3508 rval_ins = get(&rval);
3510 debug_only_v(printf("returning from %s\n", js_AtomToPrintableString(cx, cx->fp->fun->atom)););
3511 clearFrameSlotsFromCache();
3512 return true;
3515 bool
3516 TraceRecorder::record_JSOP_GOTO()
3518 return true;
3521 bool
3522 TraceRecorder::record_JSOP_IFEQ()
3524 trackCfgMerges(cx->fp->regs->pc);
3525 return ifop();
3528 bool
3529 TraceRecorder::record_JSOP_IFNE()
3531 return ifop();
3534 bool
3535 TraceRecorder::record_JSOP_ARGUMENTS()
3537 return false;
3540 bool
3541 TraceRecorder::record_JSOP_DUP()
3543 stack(0, get(&stackval(-1)));
3544 return true;
3547 bool
3548 TraceRecorder::record_JSOP_DUP2()
3550 stack(0, get(&stackval(-2)));
3551 stack(1, get(&stackval(-1)));
3552 return true;
3555 bool
3556 TraceRecorder::record_JSOP_SETCONST()
3558 return false;
3561 bool
3562 TraceRecorder::record_JSOP_BITOR()
3564 return binary(LIR_or);
3567 bool
3568 TraceRecorder::record_JSOP_BITXOR()
3570 return binary(LIR_xor);
3573 bool
3574 TraceRecorder::record_JSOP_BITAND()
3576 return binary(LIR_and);
3579 bool
3580 TraceRecorder::record_JSOP_EQ()
3582 return equal(CMP_TRY_BRANCH_AFTER_COND);
3585 bool
3586 TraceRecorder::record_JSOP_NE()
3588 return equal(CMP_NEGATE | CMP_TRY_BRANCH_AFTER_COND);
3591 bool
3592 TraceRecorder::record_JSOP_LT()
3594 return cmp(LIR_flt, CMP_TRY_BRANCH_AFTER_COND);
3597 bool
3598 TraceRecorder::record_JSOP_LE()
3600 return cmp(LIR_fle, CMP_TRY_BRANCH_AFTER_COND);
3603 bool
3604 TraceRecorder::record_JSOP_GT()
3606 return cmp(LIR_fgt, CMP_TRY_BRANCH_AFTER_COND);
3609 bool
3610 TraceRecorder::record_JSOP_GE()
3612 return cmp(LIR_fge, CMP_TRY_BRANCH_AFTER_COND);
3615 bool
3616 TraceRecorder::record_JSOP_LSH()
3618 return binary(LIR_lsh);
3621 bool
3622 TraceRecorder::record_JSOP_RSH()
3624 return binary(LIR_rsh);
3627 bool
3628 TraceRecorder::record_JSOP_URSH()
3630 return binary(LIR_ush);
3633 bool
3634 TraceRecorder::record_JSOP_ADD()
3636 jsval& r = stackval(-1);
3637 jsval& l = stackval(-2);
3638 if (JSVAL_IS_STRING(l)) {
3639 LIns* args[] = { NULL, get(&l), cx_ins };
3640 if (JSVAL_IS_STRING(r)) {
3641 args[0] = get(&r);
3642 } else {
3643 LIns* args2[] = { get(&r), cx_ins };
3644 if (JSVAL_IS_NUMBER(r)) {
3645 args[0] = lir->insCall(F_NumberToString, args2);
3646 } else if (JSVAL_IS_OBJECT(r)) {
3647 args[0] = lir->insCall(F_ObjectToString, args2);
3648 } else {
3649 ABORT_TRACE("untraceable right operand to string-JSOP_ADD");
3651 guard(false, lir->ins_eq0(args[0]), OOM_EXIT);
3653 LIns* concat = lir->insCall(F_ConcatStrings, args);
3654 guard(false, lir->ins_eq0(concat), OOM_EXIT);
3655 set(&l, concat);
3656 return true;
3658 return binary(LIR_fadd);
3661 bool
3662 TraceRecorder::record_JSOP_SUB()
3664 return binary(LIR_fsub);
3667 bool
3668 TraceRecorder::record_JSOP_MUL()
3670 return binary(LIR_fmul);
3673 bool
3674 TraceRecorder::record_JSOP_DIV()
3676 return binary(LIR_fdiv);
3679 bool
3680 TraceRecorder::record_JSOP_MOD()
3682 jsval& r = stackval(-1);
3683 jsval& l = stackval(-2);
3684 if (isNumber(l) && isNumber(r)) {
3685 LIns* args[] = { get(&r), get(&l) };
3686 set(&l, lir->insCall(F_dmod, args));
3687 return true;
3689 return false;
3692 bool
3693 TraceRecorder::record_JSOP_NOT()
3695 jsval& v = stackval(-1);
3696 if (JSVAL_IS_BOOLEAN(v) || JSVAL_IS_OBJECT(v)) {
3697 set(&v, lir->ins_eq0(get(&v)));
3698 return true;
3700 return false;
3703 bool
3704 TraceRecorder::record_JSOP_BITNOT()
3706 return unary(LIR_not);
3709 bool
3710 TraceRecorder::record_JSOP_NEG()
3712 return unary(LIR_fneg);
3715 enum JSTNErrType { INFALLIBLE, FAIL_NULL, FAIL_NEG, FAIL_VOID };
3716 struct JSTraceableNative {
3717 JSFastNative native;
3718 int builtin;
3719 const char *prefix;
3720 const char *argtypes;
3721 JSTNErrType errtype;
3724 JSBool
3725 js_Array(JSContext* cx, JSObject* obj, uintN argc, jsval* argv, jsval* rval);
3727 JSBool
3728 js_Object(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval);
3730 bool
3731 TraceRecorder::record_JSOP_NEW()
3733 /* Get immediate argc and find the constructor function. */
3734 jsbytecode *pc = cx->fp->regs->pc;
3735 unsigned argc = GET_ARGC(pc);
3736 jsval& fval = stackval(0 - (2 + argc));
3737 JS_ASSERT(&fval >= StackBase(cx->fp));
3739 jsval& tval = stackval(0 - (argc + 1));
3740 LIns* this_ins = get(&tval);
3741 if (this_ins->isconstp() && !this_ins->constvalp() && !guardShapelessCallee(fval))
3742 return false;
3745 * Require that the callee be a function object, to avoid guarding on its
3746 * class here. We know if the callee and this were pushed by JSOP_CALLNAME
3747 * or JSOP_CALLPROP that callee is a *particular* function, since these hit
3748 * the property cache and guard on the object (this) in which the callee
3749 * was found. So it's sufficient to test here that the particular function
3750 * is interpreted, not guard on that condition.
3752 * Bytecode sequences that push shapeless callees must guard on the callee
3753 * class being Function and the function being interpreted.
3755 JS_ASSERT(VALUE_IS_FUNCTION(cx, fval));
3756 JSFunction *fun = GET_FUNCTION_PRIVATE(cx, JSVAL_TO_OBJECT(fval));
3758 if (FUN_INTERPRETED(fun)) {
3759 LIns* args[] = { get(&fval), cx_ins };
3760 LIns* tv_ins = lir->insCall(F_FastNewObject, args);
3761 guard(false, lir->ins_eq0(tv_ins), OOM_EXIT);
3762 jsval& tv = stackval(0 - (1 + argc));
3763 set(&tv, tv_ins);
3764 return interpretedFunctionCall(fval, fun, argc);
3767 static JSTraceableNative knownNatives[] = {
3768 { (JSFastNative)js_Array, F_FastNewArray, "pC", "", FAIL_NULL },
3769 { (JSFastNative)js_Array, F_Array_1int, "pC", "i", FAIL_NULL },
3770 { (JSFastNative)js_Array, F_Array_2obj, "pC", "oo", FAIL_NULL },
3771 { (JSFastNative)js_Array, F_Array_3num, "pC", "ddd", FAIL_NULL },
3772 { (JSFastNative)js_Object, F_FastNewObject, "fC", "", FAIL_NULL },
3775 for (uintN i = 0; i < JS_ARRAY_LENGTH(knownNatives); i++) {
3776 JSTraceableNative* known = &knownNatives[i];
3777 if ((JSFastNative)fun->u.n.native != known->native)
3778 continue;
3780 uintN knownargc = strlen(known->argtypes);
3781 if (argc != knownargc)
3782 continue;
3784 intN prefixc = strlen(known->prefix);
3785 LIns* args[5];
3786 LIns** argp = &args[argc + prefixc - 1];
3787 char argtype;
3789 #if defined _DEBUG
3790 memset(args, 0xCD, sizeof(args));
3791 #endif
3793 #define HANDLE_PREFIX(i) \
3794 JS_BEGIN_MACRO \
3795 argtype = known->prefix[i]; \
3796 if (argtype == 'C') { \
3797 *argp = cx_ins; \
3798 } else if (argtype == 'T') { \
3799 *argp = this_ins; \
3800 } else if (argtype == 'f') { \
3801 *argp = INS_CONSTPTR(JSVAL_TO_OBJECT(fval)); \
3802 } else if (argtype == 'p') { \
3803 JSObject* ctor = JSVAL_TO_OBJECT(fval); \
3804 jsval pval; \
3805 if (!OBJ_GET_PROPERTY(cx, ctor, \
3806 ATOM_TO_JSID(cx->runtime->atomState \
3807 .classPrototypeAtom), \
3808 &pval)) { \
3809 ABORT_TRACE("error getting prototype from constructor"); \
3811 if (!JSVAL_IS_OBJECT(pval)) \
3812 ABORT_TRACE("got primitive prototype from constructor"); \
3813 *argp = INS_CONSTPTR(JSVAL_TO_OBJECT(pval)); \
3814 } else { \
3815 JS_NOT_REACHED("unknown prefix arg type"); \
3817 argp--; \
3818 JS_END_MACRO
3820 switch (prefixc) {
3821 case 3:
3822 HANDLE_PREFIX(2);
3823 /* FALL THROUGH */
3824 case 2:
3825 HANDLE_PREFIX(1);
3826 /* FALL THROUGH */
3827 case 1:
3828 HANDLE_PREFIX(0);
3829 /* FALL THROUGH */
3830 case 0:
3831 break;
3832 default:
3833 JS_NOT_REACHED("illegal number of prefix args");
3836 #undef HANDLE_PREFIX
3838 #define HANDLE_ARG(i) \
3840 jsval& arg = stackval(-(i + 1)); \
3841 argtype = known->argtypes[i]; \
3842 if (argtype == 'd' || argtype == 'i') { \
3843 if (!isNumber(arg)) \
3844 continue; /* might have another specialization for arg */ \
3845 *argp = get(&arg); \
3846 if (argtype == 'i') \
3847 *argp = f2i(*argp); \
3848 } else if (argtype == 'o') { \
3849 if (!JSVAL_IS_OBJECT(arg)) \
3850 continue; /* might have another specialization for arg */ \
3851 *argp = get(&arg); \
3852 } else { \
3853 continue; /* might have another specialization for arg */ \
3855 argp--; \
3858 switch (knownargc) {
3859 case 4:
3860 HANDLE_ARG(3);
3861 /* FALL THROUGH */
3862 case 3:
3863 HANDLE_ARG(2);
3864 /* FALL THROUGH */
3865 case 2:
3866 HANDLE_ARG(1);
3867 /* FALL THROUGH */
3868 case 1:
3869 HANDLE_ARG(0);
3870 /* FALL THROUGH */
3871 case 0:
3872 break;
3873 default:
3874 JS_NOT_REACHED("illegal number of args to traceable native");
3877 #undef HANDLE_ARG
3879 #if defined _DEBUG
3880 JS_ASSERT(args[0] != (LIns *)0xcdcdcdcd);
3881 #endif
3883 LIns* res_ins = lir->insCall(known->builtin, args);
3884 switch (known->errtype) {
3885 case FAIL_NULL:
3886 guard(false, lir->ins_eq0(res_ins), OOM_EXIT);
3887 break;
3888 case FAIL_NEG:
3890 res_ins = lir->ins1(LIR_i2f, res_ins);
3891 jsdpun u;
3892 u.d = 0.0;
3893 guard(false, lir->ins2(LIR_flt, res_ins, lir->insImmq(u.u64)), OOM_EXIT);
3894 break;
3896 case FAIL_VOID:
3897 guard(false, lir->ins2i(LIR_eq, res_ins, JSVAL_TO_BOOLEAN(JSVAL_VOID)), OOM_EXIT);
3898 break;
3899 default:;
3901 set(&fval, res_ins);
3902 return true;
3905 if (fun->u.n.clasp)
3906 ABORT_TRACE("can't trace native constructor");
3908 ABORT_TRACE("can't trace unknown constructor");
3911 bool
3912 TraceRecorder::record_JSOP_DELNAME()
3914 return false;
3917 bool
3918 TraceRecorder::record_JSOP_DELPROP()
3920 return false;
3923 bool
3924 TraceRecorder::record_JSOP_DELELEM()
3926 return false;
3929 bool
3930 TraceRecorder::record_JSOP_TYPEOF()
3932 jsval& r = stackval(-1);
3933 LIns* type;
3934 if (JSVAL_IS_STRING(r)) {
3935 type = INS_CONSTPTR(ATOM_TO_STRING(cx->runtime->atomState.typeAtoms[JSTYPE_STRING]));
3936 } else if (isNumber(r)) {
3937 type = INS_CONSTPTR(ATOM_TO_STRING(cx->runtime->atomState.typeAtoms[JSTYPE_NUMBER]));
3938 } else {
3939 LIns* args[] = { get(&r), cx_ins };
3940 if (JSVAL_TAG(r) == JSVAL_BOOLEAN) {
3941 // We specialize identically for boolean and undefined. We must not have a hole here.
3942 // Pass the unboxed type here, since TypeOfBoolean knows how to handle it.
3943 JS_ASSERT(JSVAL_TO_BOOLEAN(r) <= 2);
3944 type = lir->insCall(F_TypeOfBoolean, args);
3945 } else {
3946 JS_ASSERT(JSVAL_IS_OBJECT(r));
3947 type = lir->insCall(F_TypeOfObject, args);
3950 set(&r, type);
3951 return true;
3954 bool
3955 TraceRecorder::record_JSOP_VOID()
3957 stack(-1, INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_VOID)));
3958 return true;
3961 JSBool
3962 js_num_parseFloat(JSContext* cx, uintN argc, jsval* vp);
3964 JSBool
3965 js_num_parseInt(JSContext* cx, uintN argc, jsval* vp);
3967 bool
3968 TraceRecorder::record_JSOP_INCNAME()
3970 return incName(1);
3973 bool
3974 TraceRecorder::record_JSOP_INCPROP()
3976 return incProp(1);
3979 bool
3980 TraceRecorder::record_JSOP_INCELEM()
3982 return incElem(1);
3985 bool
3986 TraceRecorder::record_JSOP_DECNAME()
3988 return incName(-1);
3991 bool
3992 TraceRecorder::record_JSOP_DECPROP()
3994 return incProp(-1);
3997 bool
3998 TraceRecorder::record_JSOP_DECELEM()
4000 return incElem(-1);
4003 bool
4004 TraceRecorder::incName(jsint incr, bool pre)
4006 jsval* vp;
4007 if (!name(vp))
4008 return false;
4009 LIns* v_ins = get(vp);
4010 if (!inc(*vp, v_ins, incr, pre))
4011 return false;
4012 set(vp, v_ins);
4013 return true;
4016 bool
4017 TraceRecorder::record_JSOP_NAMEINC()
4019 return incName(1, false);
4022 bool
4023 TraceRecorder::record_JSOP_PROPINC()
4025 return incProp(1, false);
4028 // XXX consolidate with record_JSOP_GETELEM code...
4029 bool
4030 TraceRecorder::record_JSOP_ELEMINC()
4032 return incElem(1, false);
4035 bool
4036 TraceRecorder::record_JSOP_NAMEDEC()
4038 return incName(-1, true);
4041 bool
4042 TraceRecorder::record_JSOP_PROPDEC()
4044 return incProp(-1, false);
4047 bool
4048 TraceRecorder::record_JSOP_ELEMDEC()
4050 return incElem(-1, false);
4053 bool
4054 TraceRecorder::record_JSOP_GETPROP()
4056 return getProp(stackval(-1));
4059 bool
4060 TraceRecorder::record_JSOP_SETPROP()
4062 jsval& r = stackval(-1);
4063 jsval& l = stackval(-2);
4065 if (JSVAL_IS_PRIMITIVE(l))
4066 ABORT_TRACE("primitive this for SETPROP");
4068 JSObject* obj = JSVAL_TO_OBJECT(l);
4070 if (obj->map->ops->setProperty != js_SetProperty)
4071 ABORT_TRACE("non-native JSObjectOps::setProperty");
4073 LIns* obj_ins = get(&l);
4075 JSPropertyCache* cache = &JS_PROPERTY_CACHE(cx);
4076 uint32 kshape = OBJ_SCOPE(obj)->shape;
4077 jsbytecode* pc = cx->fp->regs->pc;
4079 JSPropCacheEntry* entry = &cache->table[PROPERTY_CACHE_HASH_PC(pc, kshape)];
4080 if (entry->kpc != pc || entry->kshape != kshape)
4081 ABORT_TRACE("cache miss");
4082 if (!PCVAL_IS_SPROP(entry->vword))
4083 ABORT_TRACE("hit non-sprop cache value");
4085 LIns* map_ins = lir->insLoad(LIR_ldp, obj_ins, (int)offsetof(JSObject, map));
4086 LIns* ops_ins;
4087 if (!map_is_native(obj->map, map_ins, ops_ins, offsetof(JSObjectOps, setProperty)))
4088 return false;
4090 // The global object's shape is guarded at trace entry.
4091 if (obj != globalObj) {
4092 LIns* shape_ins = addName(lir->insLoad(LIR_ld, map_ins, offsetof(JSScope, shape)), "shape");
4093 guard(true, addName(lir->ins2i(LIR_eq, shape_ins, kshape), "guard(shape)"),
4094 MISMATCH_EXIT);
4097 JSScope* scope = OBJ_SCOPE(obj);
4098 JSScopeProperty* sprop = PCVAL_TO_SPROP(entry->vword);
4099 if (scope->object != obj || !SCOPE_HAS_PROPERTY(scope, sprop)) {
4100 LIns* args[] = { INS_CONSTPTR(sprop), obj_ins, cx_ins };
4101 LIns* ok_ins = lir->insCall(F_AddProperty, args);
4102 guard(false, lir->ins_eq0(ok_ins), MISMATCH_EXIT);
4105 LIns* dslots_ins = NULL;
4106 LIns* v_ins = get(&r);
4107 LIns* boxed_ins = v_ins;
4108 if (!box_jsval(r, boxed_ins))
4109 return false;
4110 if (!native_set(obj_ins, sprop, dslots_ins, boxed_ins))
4111 return false;
4112 if (*pc == JSOP_SETPROP && pc[JSOP_SETPROP_LENGTH] != JSOP_POP)
4113 stack(-2, v_ins);
4114 return true;
4117 bool
4118 TraceRecorder::record_JSOP_GETELEM()
4120 jsval& r = stackval(-1);
4121 jsval& l = stackval(-2);
4123 if (JSVAL_IS_STRING(l) && JSVAL_IS_INT(r)) {
4124 int i;
4126 i = JSVAL_TO_INT(r);
4127 if ((size_t)i >= JSSTRING_LENGTH(JSVAL_TO_STRING(l)))
4128 ABORT_TRACE("Invalid string index in JSOP_GETELEM");
4130 LIns* args[] = { f2i(get(&r)), get(&l), cx_ins };
4131 LIns* unitstr_ins = lir->insCall(F_String_getelem, args);
4132 guard(false, lir->ins_eq0(unitstr_ins), MISMATCH_EXIT);
4133 set(&l, unitstr_ins);
4134 return true;
4137 if (!JSVAL_IS_PRIMITIVE(l) && JSVAL_IS_STRING(r)) {
4138 jsval v;
4139 jsid id;
4141 if (!js_ValueToStringId(cx, r, &id))
4142 return false;
4143 r = ID_TO_VALUE(id);
4144 if (!OBJ_GET_PROPERTY(cx, JSVAL_TO_OBJECT(l), id, &v))
4145 return false;
4147 LIns* args[] = { get(&r), get(&l), cx_ins };
4148 LIns* v_ins = lir->insCall(F_Any_getelem, args);
4149 guard(false, lir->ins2(LIR_eq, v_ins, INS_CONST(JSVAL_ERROR_COOKIE)),
4150 MISMATCH_EXIT);
4151 if (!unbox_jsval(v, v_ins))
4152 ABORT_TRACE("JSOP_GETELEM");
4153 set(&l, v_ins);
4154 return true;
4157 jsval* vp;
4158 LIns* v_ins;
4159 LIns* addr_ins;
4160 if (!elem(l, r, vp, v_ins, addr_ins))
4161 return false;
4162 set(&l, v_ins);
4163 return true;
4166 bool
4167 TraceRecorder::record_JSOP_SETELEM()
4169 jsval& v = stackval(-1);
4170 jsval& r = stackval(-2);
4171 jsval& l = stackval(-3);
4173 /* no guards for type checks, trace specialized this already */
4174 if (JSVAL_IS_PRIMITIVE(l))
4175 ABORT_TRACE("left JSOP_SETELEM operand is not an object");
4176 JSObject* obj = JSVAL_TO_OBJECT(l);
4177 LIns* obj_ins = get(&l);
4179 if (JSVAL_IS_STRING(r)) {
4180 LIns* v_ins = get(&v);
4181 LIns* unboxed_v_ins = v_ins;
4182 if (!box_jsval(v, v_ins))
4183 ABORT_TRACE("boxing string-indexed JSOP_SETELEM value");
4184 LIns* args[] = { v_ins, get(&r), get(&l), cx_ins };
4185 LIns* ok_ins = lir->insCall(F_Any_setelem, args);
4186 guard(false, lir->ins_eq0(ok_ins), MISMATCH_EXIT);
4187 set(&l, unboxed_v_ins);
4188 return true;
4190 if (!JSVAL_IS_INT(r))
4191 ABORT_TRACE("non-string, non-int JSOP_SETELEM index");
4193 /* make sure the object is actually a dense array */
4194 if (!guardDenseArray(obj, obj_ins))
4195 ABORT_TRACE("not a dense array");
4197 /* check that the index is within bounds */
4198 LIns* idx_ins = f2i(get(&r));
4200 /* we have to check that its really an integer, but this check will to go away
4201 once we peel the loop type down to integer for this slot */
4202 guard(true, lir->ins2(LIR_feq, get(&r), lir->ins1(LIR_i2f, idx_ins)), MISMATCH_EXIT);
4203 /* ok, box the value we are storing, store it and we are done */
4204 LIns* v_ins = get(&v);
4205 LIns* boxed_ins = v_ins;
4206 if (!box_jsval(v, boxed_ins))
4207 ABORT_TRACE("boxing failed");
4208 LIns* args[] = { boxed_ins, idx_ins, obj_ins, cx_ins };
4209 LIns* res_ins = lir->insCall(F_Array_dense_setelem, args);
4210 guard(false, lir->ins_eq0(res_ins), MISMATCH_EXIT);
4212 jsbytecode* pc = cx->fp->regs->pc;
4213 if (*pc == JSOP_SETELEM && pc[JSOP_SETELEM_LENGTH] != JSOP_POP)
4214 set(&l, v_ins);
4215 return true;
4218 bool
4219 TraceRecorder::record_JSOP_CALLNAME()
4221 JSObject* obj = cx->fp->scopeChain;
4222 if (obj != globalObj) {
4223 jsval* vp;
4224 if (!activeCallOrGlobalSlot(obj, vp))
4225 return false;
4226 stack(0, get(vp));
4227 stack(1, INS_CONSTPTR(NULL));
4228 return true;
4231 LIns* obj_ins = scopeChain();
4232 JSObject* obj2;
4233 jsuword pcval;
4234 if (!test_property_cache(obj, obj_ins, obj2, pcval))
4235 return false;
4237 if (PCVAL_IS_NULL(pcval) || !PCVAL_IS_OBJECT(pcval))
4238 ABORT_TRACE("callee is not an object");
4239 JS_ASSERT(HAS_FUNCTION_CLASS(PCVAL_TO_OBJECT(pcval)));
4241 stack(0, INS_CONSTPTR(PCVAL_TO_OBJECT(pcval)));
4242 stack(1, obj_ins);
4243 return true;
4246 bool
4247 TraceRecorder::record_JSOP_GETUPVAR()
4249 ABORT_TRACE("GETUPVAR");
4252 bool
4253 TraceRecorder::record_JSOP_CALLUPVAR()
4255 ABORT_TRACE("CALLUPVAR");
4258 bool
4259 TraceRecorder::guardShapelessCallee(jsval& callee)
4261 if (!VALUE_IS_FUNCTION(cx, callee))
4262 ABORT_TRACE("shapeless callee is not a function");
4264 guard(true,
4265 addName(lir->ins2(LIR_eq, get(&callee), INS_CONSTPTR(JSVAL_TO_OBJECT(callee))),
4266 "guard(shapeless callee)"),
4267 MISMATCH_EXIT);
4268 return true;
4271 bool
4272 TraceRecorder::interpretedFunctionCall(jsval& fval, JSFunction* fun, uintN argc)
4274 JSStackFrame* fp = cx->fp;
4276 // TODO: track the copying via the tracker...
4277 if (argc < fun->nargs &&
4278 jsuword(fp->regs->sp + (fun->nargs - argc)) > cx->stackPool.current->limit) {
4279 ABORT_TRACE("can't trace calls with too few args requiring argv move");
4282 FrameInfo fi = {
4283 JSVAL_TO_OBJECT(fval),
4284 fp->regs->pc,
4285 { { fp->regs->sp - fp->slots, argc } }
4288 unsigned callDepth = getCallDepth();
4289 if (callDepth >= treeInfo->maxCallDepth)
4290 treeInfo->maxCallDepth = callDepth + 1;
4292 lir->insStorei(INS_CONSTPTR(fi.callee), lirbuf->rp,
4293 callDepth * sizeof(FrameInfo) + offsetof(FrameInfo, callee));
4294 lir->insStorei(INS_CONSTPTR(fi.callpc), lirbuf->rp,
4295 callDepth * sizeof(FrameInfo) + offsetof(FrameInfo, callpc));
4296 lir->insStorei(INS_CONST(fi.word), lirbuf->rp,
4297 callDepth * sizeof(FrameInfo) + offsetof(FrameInfo, word));
4299 atoms = fun->u.i.script->atomMap.vector;
4300 return true;
4303 #define KNOWN_NATIVE_DECL(name) JSBool name(JSContext* cx, uintN argc, jsval* vp);
4305 KNOWN_NATIVE_DECL(js_fun_apply)
4306 KNOWN_NATIVE_DECL(js_math_ceil)
4307 KNOWN_NATIVE_DECL(js_math_cos)
4308 KNOWN_NATIVE_DECL(js_math_floor)
4309 KNOWN_NATIVE_DECL(js_math_pow)
4310 KNOWN_NATIVE_DECL(js_math_random)
4311 KNOWN_NATIVE_DECL(js_math_sin)
4312 KNOWN_NATIVE_DECL(js_math_sqrt)
4313 KNOWN_NATIVE_DECL(js_num_toString)
4314 KNOWN_NATIVE_DECL(js_str_charAt)
4315 KNOWN_NATIVE_DECL(js_str_charCodeAt)
4316 KNOWN_NATIVE_DECL(js_str_concat)
4317 KNOWN_NATIVE_DECL(js_str_fromCharCode)
4318 KNOWN_NATIVE_DECL(js_str_substring)
4320 bool
4321 TraceRecorder::record_JSOP_CALL()
4323 jsbytecode *pc = cx->fp->regs->pc;
4324 uintN argc = GET_ARGC(pc);
4325 jsval& fval = stackval(0 - (argc + 2));
4326 JS_ASSERT(&fval >= StackBase(cx->fp));
4328 jsval& tval = stackval(0 - (argc + 1));
4329 LIns* this_ins = get(&tval);
4330 if (this_ins->isconstp() && !this_ins->constvalp() && !guardShapelessCallee(fval))
4331 return false;
4334 * Require that the callee be a function object, to avoid guarding on its
4335 * class here. We know if the callee and this were pushed by JSOP_CALLNAME
4336 * or JSOP_CALLPROP that callee is a *particular* function, since these hit
4337 * the property cache and guard on the object (this) in which the callee
4338 * was found. So it's sufficient to test here that the particular function
4339 * is interpreted, not guard on that condition.
4341 * Bytecode sequences that push shapeless callees must guard on the callee
4342 * class being Function and the function being interpreted.
4344 JS_ASSERT(VALUE_IS_FUNCTION(cx, fval));
4345 JSFunction* fun = GET_FUNCTION_PRIVATE(cx, JSVAL_TO_OBJECT(fval));
4347 if (FUN_INTERPRETED(fun))
4348 return interpretedFunctionCall(fval, fun, argc);
4350 if (FUN_SLOW_NATIVE(fun))
4351 ABORT_TRACE("slow native");
4353 static JSTraceableNative knownNatives[] = {
4354 { js_array_join, F_Array_p_join, "TC", "s", FAIL_NULL },
4355 { js_math_sin, F_Math_sin, "", "d", INFALLIBLE },
4356 { js_math_cos, F_Math_cos, "", "d", INFALLIBLE },
4357 { js_math_pow, F_Math_pow, "", "dd", INFALLIBLE },
4358 { js_math_sqrt, F_Math_sqrt, "", "d", INFALLIBLE },
4359 { js_math_floor, F_Math_floor, "", "d", INFALLIBLE },
4360 { js_math_ceil, F_Math_ceil, "", "d", INFALLIBLE },
4361 { js_math_random, F_Math_random, "R", "", INFALLIBLE },
4362 { js_num_parseInt, F_ParseInt, "C", "s", INFALLIBLE },
4363 { js_num_parseFloat, F_ParseFloat, "C", "s", INFALLIBLE },
4364 { js_num_toString, F_NumberToString, "TC", "", FAIL_NULL },
4365 { js_obj_hasOwnProperty, F_Object_p_hasOwnProperty,
4366 "TC", "s", FAIL_VOID },
4367 { js_obj_propertyIsEnumerable, F_Object_p_propertyIsEnumerable,
4368 "TC", "s", FAIL_VOID },
4369 { js_str_charAt, F_String_getelem, "TC", "i", FAIL_NULL },
4370 { js_str_charCodeAt, F_String_p_charCodeAt, "T", "i", FAIL_NEG },
4371 { js_str_concat, F_String_p_concat_1int, "TC", "i", FAIL_NULL },
4372 { js_str_fromCharCode, F_String_fromCharCode, "C", "i", FAIL_NULL },
4373 { js_str_match, F_String_p_match, "PTC", "r", FAIL_VOID },
4374 { js_str_replace, F_String_p_replace_str, "TC", "sr", FAIL_NULL },
4375 { js_str_replace, F_String_p_replace_str2,"TC", "ss", FAIL_NULL },
4376 { js_str_replace, F_String_p_replace_str3,"TC", "sss", FAIL_NULL },
4377 { js_str_split, F_String_p_split, "TC", "s", FAIL_NULL },
4378 { js_str_substring, F_String_p_substring, "TC", "ii", FAIL_NULL },
4379 { js_str_substring, F_String_p_substring_1, "TC", "i", FAIL_NULL },
4380 { js_str_toLowerCase, F_toLowerCase, "TC", "", FAIL_NULL },
4381 { js_str_toUpperCase, F_toUpperCase, "TC", "", FAIL_NULL },
4384 uintN i = 0;
4385 LIns* arg1_ins = NULL;
4386 jsval arg1 = JSVAL_VOID;
4388 if ((JSFastNative)fun->u.n.native == js_fun_apply) {
4389 if (argc != 2)
4390 ABORT_TRACE("can't trace Function.prototype.apply with other than 2 args");
4392 jsval& oval = stackval(-2);
4393 if (JSVAL_IS_PRIMITIVE(oval))
4394 ABORT_TRACE("can't trace Function.prototype.apply with primitive 1st arg");
4396 jsval& aval = stackval(-1);
4397 if (JSVAL_IS_PRIMITIVE(aval))
4398 ABORT_TRACE("can't trace Function.prototype.apply with primitive 2nd arg");
4400 LIns* aval_ins = get(&aval);
4401 if (!aval_ins->isCall() || aval_ins->fid() != F_Array_1str)
4402 ABORT_TRACE("can't yet trace Function.prototype.apply on other than [str] 2nd arg");
4404 JSObject* aobj = JSVAL_TO_OBJECT(aval);
4405 JS_ASSERT(OBJ_IS_ARRAY(cx, aobj));
4406 JS_ASSERT(aobj->fslots[JSSLOT_ARRAY_LENGTH] == 1);
4407 JS_ASSERT(JSVAL_IS_STRING(aobj->dslots[0]));
4409 if (!guardShapelessCallee(tval))
4410 return false;
4411 JSObject* tfunobj = JSVAL_TO_OBJECT(tval);
4412 JSFunction* tfun = GET_FUNCTION_PRIVATE(cx, tfunobj);
4413 if (FUN_INTERPRETED(tfun))
4414 ABORT_TRACE("can't yet trace Function.prototype.apply for scripted functions");
4416 JSTraceableNative* known;
4417 for (;;) {
4418 known = &knownNatives[i];
4419 if (known->native == (JSFastNative)tfun->u.n.native)
4420 break;
4421 if (++i == JS_ARRAY_LENGTH(knownNatives))
4422 ABORT_TRACE("unknown native being Function.prototype.apply'ed");
4424 if (strlen(known->argtypes) != 1)
4425 ABORT_TRACE("known native being Function.prototype.apply'ed with wrong argc");
4427 this_ins = get(&oval);
4428 arg1_ins = callArgN(aval_ins, 1);
4429 arg1 = aobj->dslots[0];
4430 fun = tfun;
4431 argc = 1;
4434 for (; i < JS_ARRAY_LENGTH(knownNatives); i++) {
4435 JSTraceableNative* known = &knownNatives[i];
4436 if (known->native != (JSFastNative)fun->u.n.native)
4437 continue;
4439 uintN knownargc = strlen(known->argtypes);
4440 if (argc != knownargc)
4441 continue;
4443 intN prefixc = strlen(known->prefix);
4444 LIns* args[5];
4445 LIns** argp = &args[argc + prefixc - 1];
4446 char argtype;
4448 #if defined _DEBUG
4449 memset(args, 0xCD, sizeof(args));
4450 #endif
4452 #define HANDLE_PREFIX(i) \
4453 JS_BEGIN_MACRO \
4454 argtype = known->prefix[i]; \
4455 if (argtype == 'C') { \
4456 *argp = cx_ins; \
4457 } else if (argtype == 'T') { \
4458 *argp = this_ins; \
4459 } else if (argtype == 'R') { \
4460 *argp = INS_CONSTPTR(cx->runtime); \
4461 } else if (argtype == 'P') { \
4462 *argp = INS_CONSTPTR(pc); \
4463 } else { \
4464 JS_NOT_REACHED("unknown prefix arg type"); \
4466 argp--; \
4467 JS_END_MACRO
4469 switch (prefixc) {
4470 case 3:
4471 HANDLE_PREFIX(2);
4472 /* FALL THROUGH */
4473 case 2:
4474 HANDLE_PREFIX(1);
4475 /* FALL THROUGH */
4476 case 1:
4477 HANDLE_PREFIX(0);
4478 /* FALL THROUGH */
4479 case 0:
4480 break;
4481 default:
4482 JS_NOT_REACHED("illegal number of prefix args");
4485 #undef HANDLE_PREFIX
4488 * NB: do not use JS_BEGIN_MACRO/JS_END_MACRO or the do-while(0) loop they hide,
4489 * because of the embedded continues below.
4491 #define HANDLE_ARG(i) \
4493 jsval& arg = (i == 0 && arg1_ins) ? arg1 : stackval(-(i + 1)); \
4494 *argp = (i == 0 && arg1_ins) ? arg1_ins : get(&arg); \
4495 argtype = known->argtypes[i]; \
4496 if (argtype == 'd' || argtype == 'i') { \
4497 if (!isNumber(arg)) \
4498 continue; /* might have another specialization for arg */ \
4499 if (argtype == 'i') \
4500 *argp = f2i(*argp); \
4501 } else if (argtype == 's') { \
4502 if (!JSVAL_IS_STRING(arg)) \
4503 continue; /* might have another specialization for arg */ \
4504 } else if (argtype == 'r') { \
4505 if (!VALUE_IS_REGEXP(cx, arg)) \
4506 continue; /* might have another specialization for arg */ \
4507 } else if (argtype == 'f') { \
4508 if (!VALUE_IS_FUNCTION(cx, arg)) \
4509 continue; /* might have another specialization for arg */ \
4510 } else { \
4511 continue; /* might have another specialization for arg */ \
4513 argp--; \
4516 switch (knownargc) {
4517 case 4:
4518 HANDLE_ARG(3);
4519 /* FALL THROUGH */
4520 case 3:
4521 HANDLE_ARG(2);
4522 /* FALL THROUGH */
4523 case 2:
4524 HANDLE_ARG(1);
4525 /* FALL THROUGH */
4526 case 1:
4527 HANDLE_ARG(0);
4528 /* FALL THROUGH */
4529 case 0:
4530 break;
4531 default:
4532 JS_NOT_REACHED("illegal number of args to traceable native");
4535 #undef HANDLE_ARG
4537 #if defined _DEBUG
4538 JS_ASSERT(args[0] != (LIns *)0xcdcdcdcd);
4539 #endif
4541 LIns* res_ins = lir->insCall(known->builtin, args);
4542 switch (known->errtype) {
4543 case FAIL_NULL:
4544 guard(false, lir->ins_eq0(res_ins), OOM_EXIT);
4545 break;
4546 case FAIL_NEG:
4548 res_ins = lir->ins1(LIR_i2f, res_ins);
4549 jsdpun u;
4550 u.d = 0.0;
4551 guard(false, lir->ins2(LIR_flt, res_ins, lir->insImmq(u.u64)), OOM_EXIT);
4552 break;
4554 case FAIL_VOID:
4555 guard(false, lir->ins2i(LIR_eq, res_ins, JSVAL_TO_BOOLEAN(JSVAL_VOID)), OOM_EXIT);
4556 break;
4557 default:;
4559 set(&fval, res_ins);
4560 return true;
4563 /* Didn't find it. */
4564 ABORT_TRACE("unknown native");
4567 bool
4568 TraceRecorder::name(jsval*& vp)
4570 JSObject* obj = cx->fp->scopeChain;
4571 if (obj != globalObj)
4572 return activeCallOrGlobalSlot(obj, vp);
4574 /* Can't use prop here, because we don't want unboxing from global slots. */
4575 LIns* obj_ins = scopeChain();
4576 uint32 slot;
4577 if (!test_property_cache_direct_slot(obj, obj_ins, slot))
4578 return false;
4580 if (slot == SPROP_INVALID_SLOT)
4581 ABORT_TRACE("name op can't find named property");
4583 if (!lazilyImportGlobalSlot(slot))
4584 ABORT_TRACE("lazy import of global slot failed");
4586 vp = &STOBJ_GET_SLOT(obj, slot);
4587 return true;
4590 bool
4591 TraceRecorder::prop(JSObject* obj, LIns* obj_ins, uint32& slot, LIns*& v_ins)
4594 * Can't specialize to assert obj != global, must guard to avoid aliasing
4595 * stale homes of stacked global variables.
4597 if (obj == globalObj)
4598 ABORT_TRACE("prop op aliases global");
4599 guard(false, lir->ins2(LIR_eq, obj_ins, INS_CONSTPTR(globalObj)), MISMATCH_EXIT);
4602 * Property cache ensures that we are dealing with an existing property,
4603 * and guards the shape for us.
4605 JSObject* obj2;
4606 jsuword pcval;
4607 if (!test_property_cache(obj, obj_ins, obj2, pcval))
4608 return false;
4610 /* Check for non-existent property reference, which results in undefined. */
4611 const JSCodeSpec& cs = js_CodeSpec[*cx->fp->regs->pc];
4612 if (PCVAL_IS_NULL(pcval)) {
4613 v_ins = INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_VOID));
4614 JS_ASSERT(cs.ndefs == 1);
4615 stack(-cs.nuses, v_ins);
4616 slot = SPROP_INVALID_SLOT;
4617 return true;
4620 /* Insist if setting on obj being the directly addressed object. */
4621 uint32 setflags = (cs.format & (JOF_SET | JOF_INCDEC));
4622 LIns* dslots_ins = NULL;
4623 if (obj2 != obj) {
4624 if (setflags)
4625 ABORT_TRACE("JOF_SET opcode hit prototype chain");
4628 * We're getting a proto-property. Walk up the prototype chain emitting
4629 * proto slot loads, updating obj as we go, leaving obj set to obj2 with
4630 * obj_ins the last proto-load.
4632 while (obj != obj2) {
4633 obj_ins = stobj_get_slot(obj_ins, JSSLOT_PROTO, dslots_ins);
4634 obj = STOBJ_GET_PROTO(obj);
4638 /* Don't trace getter or setter calls, our caller wants a direct slot. */
4639 if (PCVAL_IS_SPROP(pcval)) {
4640 JSScopeProperty* sprop = PCVAL_TO_SPROP(pcval);
4642 if (setflags && !SPROP_HAS_STUB_SETTER(sprop))
4643 ABORT_TRACE("non-stub setter");
4644 if (setflags != JOF_SET && !SPROP_HAS_STUB_GETTER(sprop)) {
4645 // FIXME 450335: generalize this away from regexp built-in getters.
4646 if (setflags == 0 &&
4647 sprop->getter == js_RegExpClass.getProperty &&
4648 sprop->shortid < 0) {
4649 LIns* args[] = { INS_CONSTPTR(sprop), obj_ins, cx_ins };
4650 v_ins = lir->insCall(F_CallGetter, args);
4651 if (!unbox_jsval((sprop->shortid == REGEXP_SOURCE) ? JSVAL_STRING : JSVAL_BOOLEAN,
4652 v_ins)) {
4653 ABORT_TRACE("unboxing");
4655 JS_ASSERT(cs.ndefs == 1);
4656 stack(-cs.nuses, v_ins);
4657 return true;
4659 ABORT_TRACE("non-stub getter");
4661 if (!SPROP_HAS_VALID_SLOT(sprop, OBJ_SCOPE(obj)))
4662 ABORT_TRACE("no valid slot");
4663 slot = sprop->slot;
4664 } else {
4665 if (!PCVAL_IS_SLOT(pcval))
4666 ABORT_TRACE("PCE is not a slot");
4667 slot = PCVAL_TO_SLOT(pcval);
4670 v_ins = stobj_get_slot(obj_ins, slot, dslots_ins);
4671 if (!unbox_jsval(STOBJ_GET_SLOT(obj, slot), v_ins))
4672 ABORT_TRACE("unboxing");
4673 return true;
4676 bool
4677 TraceRecorder::elem(jsval& l, jsval& r, jsval*& vp, LIns*& v_ins, LIns*& addr_ins)
4679 /* no guards for type checks, trace specialized this already */
4680 if (!JSVAL_IS_INT(r) || JSVAL_IS_PRIMITIVE(l))
4681 return false;
4684 * Can't specialize to assert obj != global, must guard to avoid aliasing
4685 * stale homes of stacked global variables.
4687 JSObject* obj = JSVAL_TO_OBJECT(l);
4688 if (obj == globalObj)
4689 ABORT_TRACE("elem op aliases global");
4690 LIns* obj_ins = get(&l);
4691 guard(false, lir->ins2(LIR_eq, obj_ins, INS_CONSTPTR(globalObj)), MISMATCH_EXIT);
4693 /* make sure the object is actually a dense array */
4694 if (!guardDenseArray(obj, obj_ins))
4695 return false;
4697 /* check that the index is within bounds */
4698 jsint idx = JSVAL_TO_INT(r);
4699 LIns* idx_ins = f2i(get(&r));
4701 /* we have to check that its really an integer, but this check will to go away
4702 once we peel the loop type down to integer for this slot */
4703 guard(true, lir->ins2(LIR_feq, get(&r), lir->ins1(LIR_i2f, idx_ins)), MISMATCH_EXIT);
4705 LIns* dslots_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, dslots));
4706 if (!guardDenseArrayIndex(obj, idx, obj_ins, dslots_ins, idx_ins))
4707 return false;
4708 vp = &obj->dslots[idx];
4710 addr_ins = lir->ins2(LIR_piadd, dslots_ins,
4711 lir->ins2i(LIR_pilsh, idx_ins, (sizeof(jsval) == 4) ? 2 : 3));
4713 /* load the value, check the type (need to check JSVAL_HOLE only for booleans) */
4714 v_ins = lir->insLoad(LIR_ldp, addr_ins, 0);
4715 return unbox_jsval(*vp, v_ins);
4718 bool
4719 TraceRecorder::getProp(JSObject* obj, LIns* obj_ins)
4721 uint32 slot;
4722 LIns* v_ins;
4723 if (!prop(obj, obj_ins, slot, v_ins))
4724 return false;
4726 const JSCodeSpec& cs = js_CodeSpec[*cx->fp->regs->pc];
4727 JS_ASSERT(cs.ndefs == 1);
4728 stack(-cs.nuses, v_ins);
4729 return true;
4732 bool
4733 TraceRecorder::getProp(jsval& v)
4735 if (JSVAL_IS_PRIMITIVE(v))
4736 ABORT_TRACE("primitive lhs");
4738 return getProp(JSVAL_TO_OBJECT(v), get(&v));
4741 bool
4742 TraceRecorder::record_JSOP_NAME()
4744 jsval* vp;
4745 if (!name(vp))
4746 return false;
4747 stack(0, get(vp));
4748 return true;
4751 bool
4752 TraceRecorder::record_JSOP_DOUBLE()
4754 jsval v = jsval(atoms[GET_INDEX(cx->fp->regs->pc)]);
4755 jsdpun u;
4756 u.d = *JSVAL_TO_DOUBLE(v);
4757 stack(0, lir->insImmq(u.u64));
4758 return true;
4761 bool
4762 TraceRecorder::record_JSOP_STRING()
4764 JSAtom* atom = atoms[GET_INDEX(cx->fp->regs->pc)];
4765 JS_ASSERT(ATOM_IS_STRING(atom));
4766 stack(0, INS_CONSTPTR(ATOM_TO_STRING(atom)));
4767 return true;
4770 bool
4771 TraceRecorder::record_JSOP_ZERO()
4773 jsdpun u;
4774 u.d = 0.0;
4775 stack(0, lir->insImmq(u.u64));
4776 return true;
4779 bool
4780 TraceRecorder::record_JSOP_ONE()
4782 jsdpun u;
4783 u.d = 1.0;
4784 stack(0, lir->insImmq(u.u64));
4785 return true;
4788 bool
4789 TraceRecorder::record_JSOP_NULL()
4791 stack(0, INS_CONSTPTR(NULL));
4792 return true;
4795 bool
4796 TraceRecorder::record_JSOP_THIS()
4798 LIns* this_ins;
4799 if (!getThis(this_ins))
4800 return false;
4801 stack(0, this_ins);
4802 return true;
4805 bool
4806 TraceRecorder::record_JSOP_FALSE()
4808 stack(0, lir->insImm(0));
4809 return true;
4812 bool
4813 TraceRecorder::record_JSOP_TRUE()
4815 stack(0, lir->insImm(1));
4816 return true;
4819 bool
4820 TraceRecorder::record_JSOP_OR()
4822 return ifop();
4825 bool
4826 TraceRecorder::record_JSOP_AND()
4828 return ifop();
4831 bool
4832 TraceRecorder::record_JSOP_TABLESWITCH()
4834 return switchop();
4837 bool
4838 TraceRecorder::record_JSOP_LOOKUPSWITCH()
4840 return switchop();
4843 bool
4844 TraceRecorder::record_JSOP_STRICTEQ()
4846 return equal();
4849 bool
4850 TraceRecorder::record_JSOP_STRICTNE()
4852 return equal(CMP_NEGATE);
4855 bool
4856 TraceRecorder::record_JSOP_CLOSURE()
4858 return false;
4861 bool
4862 TraceRecorder::record_JSOP_OBJECT()
4864 JSStackFrame* fp = cx->fp;
4865 JSScript* script = fp->script;
4866 unsigned index = atoms - script->atomMap.vector + GET_INDEX(fp->regs->pc);
4868 JSObject* obj;
4869 JS_GET_SCRIPT_OBJECT(script, index, obj);
4870 stack(0, INS_CONSTPTR(obj));
4871 return true;
4874 bool
4875 TraceRecorder::record_JSOP_POP()
4877 return true;
4880 bool
4881 TraceRecorder::record_JSOP_POS()
4883 jsval& r = stackval(-1);
4884 return isNumber(r);
4887 bool
4888 TraceRecorder::record_JSOP_TRAP()
4890 return false;
4893 bool
4894 TraceRecorder::record_JSOP_GETARG()
4896 stack(0, arg(GET_ARGNO(cx->fp->regs->pc)));
4897 return true;
4900 bool
4901 TraceRecorder::record_JSOP_SETARG()
4903 arg(GET_ARGNO(cx->fp->regs->pc), stack(-1));
4904 return true;
4907 bool
4908 TraceRecorder::record_JSOP_GETLOCAL()
4910 stack(0, var(GET_SLOTNO(cx->fp->regs->pc)));
4911 return true;
4914 bool
4915 TraceRecorder::record_JSOP_SETLOCAL()
4917 var(GET_SLOTNO(cx->fp->regs->pc), stack(-1));
4918 return true;
4921 bool
4922 TraceRecorder::record_JSOP_UINT16()
4924 jsdpun u;
4925 u.d = (jsdouble)GET_UINT16(cx->fp->regs->pc);
4926 stack(0, lir->insImmq(u.u64));
4927 return true;
4930 bool
4931 TraceRecorder::record_JSOP_NEWINIT()
4933 JSProtoKey key = JSProtoKey(GET_INT8(cx->fp->regs->pc));
4934 JSObject* obj;
4935 uint32 fid;
4936 if (key == JSProto_Array) {
4937 if (!js_GetClassPrototype(cx, globalObj, INT_TO_JSID(key), &obj))
4938 return false;
4939 fid = F_FastNewArray;
4940 } else {
4941 if (!js_GetClassObject(cx, globalObj, key, &obj))
4942 return false;
4943 fid = F_FastNewObject;
4945 LIns* args[] = { INS_CONSTPTR(obj), cx_ins };
4946 LIns* v_ins = lir->insCall(fid, args);
4947 guard(false, lir->ins_eq0(v_ins), OOM_EXIT);
4948 stack(0, v_ins);
4949 return true;
4952 bool
4953 TraceRecorder::record_JSOP_ENDINIT()
4955 jsval& v = stackval(-1);
4956 JS_ASSERT(!JSVAL_IS_PRIMITIVE(v));
4957 JSObject* obj = JSVAL_TO_OBJECT(v);
4958 if (OBJ_IS_DENSE_ARRAY(cx, obj)) {
4959 // Until we get JSOP_NEWARRAY working, we do our optimizing here...
4960 if (obj->fslots[JSSLOT_ARRAY_LENGTH] == 1 &&
4961 obj->dslots && JSVAL_IS_STRING(obj->dslots[0])) {
4962 LIns* v_ins = get(&v);
4963 JS_ASSERT(v_ins->isCall() && v_ins->fid() == F_FastNewArray);
4964 LIns* args[] = { stack(1), callArgN(v_ins, 1), cx_ins };
4965 v_ins = lir->insCall(F_Array_1str, args);
4966 set(&v, v_ins);
4969 return true;
4972 bool
4973 TraceRecorder::record_JSOP_INITPROP()
4975 // The common code avoids stacking the RHS if op is not JSOP_SETPROP.
4976 return record_JSOP_SETPROP();
4979 bool
4980 TraceRecorder::record_JSOP_INITELEM()
4982 return record_JSOP_SETELEM();
4985 bool
4986 TraceRecorder::record_JSOP_DEFSHARP()
4988 return false;
4991 bool
4992 TraceRecorder::record_JSOP_USESHARP()
4994 return false;
4997 bool
4998 TraceRecorder::record_JSOP_INCARG()
5000 return inc(argval(GET_ARGNO(cx->fp->regs->pc)), 1);
5003 bool
5004 TraceRecorder::record_JSOP_INCLOCAL()
5006 return inc(varval(GET_SLOTNO(cx->fp->regs->pc)), 1);
5009 bool
5010 TraceRecorder::record_JSOP_DECARG()
5012 return inc(argval(GET_ARGNO(cx->fp->regs->pc)), -1);
5015 bool
5016 TraceRecorder::record_JSOP_DECLOCAL()
5018 return inc(varval(GET_SLOTNO(cx->fp->regs->pc)), -1);
5021 bool
5022 TraceRecorder::record_JSOP_ARGINC()
5024 return inc(argval(GET_ARGNO(cx->fp->regs->pc)), 1, false);
5027 bool
5028 TraceRecorder::record_JSOP_LOCALINC()
5030 return inc(varval(GET_SLOTNO(cx->fp->regs->pc)), 1, false);
5033 bool
5034 TraceRecorder::record_JSOP_ARGDEC()
5036 return inc(argval(GET_ARGNO(cx->fp->regs->pc)), -1, false);
5039 bool
5040 TraceRecorder::record_JSOP_LOCALDEC()
5042 return inc(varval(GET_SLOTNO(cx->fp->regs->pc)), -1, false);
5045 bool
5046 TraceRecorder::record_JSOP_ITER()
5048 jsval& v = stackval(-1);
5049 if (!JSVAL_IS_PRIMITIVE(v)) {
5050 jsuint flags = cx->fp->regs->pc[1];
5051 LIns* args[] = { get(&v), INS_CONST(flags), cx_ins };
5052 LIns* v_ins = lir->insCall(F_FastValueToIterator, args);
5053 guard(false, lir->ins_eq0(v_ins), MISMATCH_EXIT);
5054 set(&v, v_ins);
5055 return true;
5058 ABORT_TRACE("for-in on a primitive value");
5061 bool
5062 TraceRecorder::forInLoop(jsval* vp)
5064 if (!JSVAL_IS_STRING(*vp))
5065 ABORT_TRACE("for-in loop variable changed type from string");
5066 jsval& iterobj_val = stackval(-1);
5067 if (!JSVAL_IS_PRIMITIVE(iterobj_val)) {
5068 LIns* args[] = { get(&iterobj_val), cx_ins };
5069 LIns* v_ins = lir->insCall(F_FastCallIteratorNext, args);
5070 guard(false, lir->ins2(LIR_eq, v_ins, INS_CONST(JSVAL_ERROR_COOKIE)), OOM_EXIT);
5072 LIns* flag_ins = lir->ins_eq0(lir->ins2(LIR_eq, v_ins, INS_CONST(JSVAL_HOLE)));
5073 LIns* iter_ins = get(vp);
5074 if (!box_jsval(JSVAL_STRING, iter_ins))
5075 return false;
5076 iter_ins = lir->ins_choose(flag_ins, v_ins, iter_ins, true);
5077 if (!unbox_jsval(JSVAL_STRING, iter_ins))
5078 return false;
5079 set(vp, iter_ins);
5080 stack(0, flag_ins);
5081 return true;
5084 ABORT_TRACE("for-in on a primitive value");
5087 bool
5088 TraceRecorder::record_JSOP_ENDITER()
5090 LIns* args[] = { stack(-1), cx_ins };
5091 LIns* ok_ins = lir->insCall(F_CloseIterator, args);
5092 guard(false, lir->ins_eq0(ok_ins), MISMATCH_EXIT);
5093 return true;
5096 bool
5097 TraceRecorder::record_JSOP_FORNAME()
5099 jsval* vp;
5100 return name(vp) && forInLoop(vp);
5103 bool
5104 TraceRecorder::record_JSOP_FORPROP()
5106 return false;
5109 bool
5110 TraceRecorder::record_JSOP_FORELEM()
5112 return false;
5115 bool
5116 TraceRecorder::record_JSOP_FORARG()
5118 return forInLoop(&argval(GET_ARGNO(cx->fp->regs->pc)));
5121 bool
5122 TraceRecorder::record_JSOP_FORLOCAL()
5124 return forInLoop(&varval(GET_SLOTNO(cx->fp->regs->pc)));
5127 bool
5128 TraceRecorder::record_JSOP_FORCONST()
5130 return false;
5133 bool
5134 TraceRecorder::record_JSOP_POPN()
5136 return true;
5139 bool
5140 TraceRecorder::record_JSOP_BINDNAME()
5142 JSObject* obj = cx->fp->scopeChain;
5143 if (obj != globalObj)
5144 ABORT_TRACE("JSOP_BINDNAME crosses global scopes");
5146 LIns* obj_ins = scopeChain();
5147 JSObject* obj2;
5148 jsuword pcval;
5149 if (!test_property_cache(obj, obj_ins, obj2, pcval))
5150 return false;
5151 if (obj2 != obj)
5152 ABORT_TRACE("JSOP_BINDNAME found a non-direct property on the global object");
5154 stack(0, obj_ins);
5155 return true;
5158 bool
5159 TraceRecorder::record_JSOP_SETNAME()
5161 jsval& r = stackval(-1);
5162 jsval& l = stackval(-2);
5163 JS_ASSERT(!JSVAL_IS_PRIMITIVE(l));
5166 * Trace cases that are global code or in lightweight functions scoped by
5167 * the global object only.
5169 JSObject* obj = JSVAL_TO_OBJECT(l);
5170 if (obj != cx->fp->scopeChain || obj != globalObj)
5171 return false;
5173 jsval* vp;
5174 if (!name(vp))
5175 return false;
5176 LIns* r_ins = get(&r);
5177 set(vp, r_ins);
5179 if (cx->fp->regs->pc[JSOP_SETNAME_LENGTH] != JSOP_POP)
5180 stack(-2, r_ins);
5181 return true;
5184 bool
5185 TraceRecorder::record_JSOP_THROW()
5187 return false;
5190 bool
5191 TraceRecorder::record_JSOP_IN()
5193 jsval& rval = stackval(-1);
5194 if (JSVAL_IS_PRIMITIVE(rval))
5195 ABORT_TRACE("JSOP_IN on non-object right operand");
5197 jsval& lval = stackval(-2);
5198 if (!JSVAL_IS_PRIMITIVE(lval))
5199 ABORT_TRACE("JSOP_IN on E4X QName left operand");
5201 jsid id;
5202 if (JSVAL_IS_INT(lval)) {
5203 id = INT_JSVAL_TO_JSID(lval);
5204 } else {
5205 if (!JSVAL_IS_STRING(lval))
5206 ABORT_TRACE("non-string left operand to JSOP_IN");
5207 if (!js_ValueToStringId(cx, lval, &id))
5208 return false;
5211 // Expect what we see at trace recording time (hit or miss) to be the same
5212 // when executing the trace. Use a builtin helper for named properties, as
5213 // forInLoop does. First, handle indexes in dense arrays as a special case.
5214 JSObject* obj = JSVAL_TO_OBJECT(rval);
5215 LIns* obj_ins = get(&rval);
5217 bool cond;
5218 LIns* x;
5219 do {
5220 if (guardDenseArray(obj, obj_ins)) {
5221 if (JSVAL_IS_INT(lval)) {
5222 jsint idx = JSVAL_TO_INT(lval);
5223 LIns* idx_ins = f2i(get(&lval));
5224 LIns* dslots_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, dslots));
5225 if (!guardDenseArrayIndex(obj, idx, obj_ins, dslots_ins, idx_ins))
5226 ABORT_TRACE("dense array index out of bounds");
5228 cond = obj->dslots[idx] != JSVAL_HOLE;
5229 x = lir->ins_eq0(lir->ins2(LIR_eq,
5230 lir->insLoad(LIR_ldp, dslots_ins, idx * sizeof(jsval)),
5231 INS_CONST(JSVAL_HOLE)));
5232 break;
5235 // Not an index id, but a dense array -- go up to the proto. */
5236 obj = STOBJ_GET_PROTO(obj);
5237 obj_ins = stobj_get_fslot(obj_ins, JSSLOT_PROTO);
5238 } else {
5239 if (JSVAL_IS_INT(id))
5240 ABORT_TRACE("INT in OBJ where OBJ is not a dense array");
5243 JSObject* obj2;
5244 JSProperty* prop;
5245 if (!OBJ_LOOKUP_PROPERTY(cx, obj, id, &obj2, &prop))
5246 ABORT_TRACE("OBJ_LOOKUP_PROPERTY failed in JSOP_IN");
5248 cond = prop != NULL;
5249 if (prop)
5250 OBJ_DROP_PROPERTY(cx, obj2, prop);
5252 LIns* args[] = { get(&lval), obj_ins, cx_ins };
5253 x = lir->insCall(F_HasNamedProperty, args);
5254 guard(false, lir->ins2i(LIR_eq, x, JSVAL_TO_BOOLEAN(JSVAL_VOID)), OOM_EXIT);
5255 x = lir->ins2i(LIR_eq, x, 1);
5256 } while (0);
5258 /* The interpreter fuses comparisons and the following branch,
5259 so we have to do that here as well. */
5260 fuseIf(cx->fp->regs->pc + 1, cond, x);
5262 /* We update the stack after the guard. This is safe since
5263 the guard bails out at the comparison and the interpreter
5264 will therefore re-execute the comparison. This way the
5265 value of the condition doesn't have to be calculated and
5266 saved on the stack in most cases. */
5267 set(&lval, x);
5268 return true;
5271 bool
5272 TraceRecorder::record_JSOP_INSTANCEOF()
5274 return false;
5277 bool
5278 TraceRecorder::record_JSOP_DEBUGGER()
5280 return false;
5283 bool
5284 TraceRecorder::record_JSOP_GOSUB()
5286 return false;
5289 bool
5290 TraceRecorder::record_JSOP_RETSUB()
5292 return false;
5295 bool
5296 TraceRecorder::record_JSOP_EXCEPTION()
5298 return false;
5301 bool
5302 TraceRecorder::record_JSOP_LINENO()
5304 return true;
5307 bool
5308 TraceRecorder::record_JSOP_CONDSWITCH()
5310 return true;
5313 bool
5314 TraceRecorder::record_JSOP_CASE()
5316 return equal(CMP_CASE);
5319 bool
5320 TraceRecorder::record_JSOP_DEFAULT()
5322 return true;
5325 bool
5326 TraceRecorder::record_JSOP_EVAL()
5328 return false;
5331 bool
5332 TraceRecorder::record_JSOP_ENUMELEM()
5334 return false;
5337 bool
5338 TraceRecorder::record_JSOP_GETTER()
5340 return false;
5343 bool
5344 TraceRecorder::record_JSOP_SETTER()
5346 return false;
5349 bool
5350 TraceRecorder::record_JSOP_DEFFUN()
5352 return false;
5355 bool
5356 TraceRecorder::record_JSOP_DEFCONST()
5358 return false;
5361 bool
5362 TraceRecorder::record_JSOP_DEFVAR()
5364 return false;
5368 * XXX could hoist out to jsinterp.h and share with jsinterp.cpp, but
5369 * XXX jsopcode.cpp has different definitions of same-named macros.
5371 #define GET_FULL_INDEX(PCOFF) \
5372 (atoms - script->atomMap.vector + GET_INDEX(regs.pc + PCOFF))
5374 #define LOAD_FUNCTION(PCOFF) \
5375 JS_GET_SCRIPT_FUNCTION(script, GET_FULL_INDEX(PCOFF), fun)
5377 bool
5378 TraceRecorder::record_JSOP_ANONFUNOBJ()
5380 JSFunction* fun;
5381 JSFrameRegs& regs = *cx->fp->regs;
5382 JSScript* script = cx->fp->script;
5383 LOAD_FUNCTION(0); // needs script, regs, fun
5385 JSObject* obj = FUN_OBJECT(fun);
5386 if (OBJ_GET_PARENT(cx, obj) != cx->fp->scopeChain)
5387 ABORT_TRACE("can't trace with activation object on scopeChain");
5389 stack(0, INS_CONSTPTR(obj));
5390 return true;
5393 bool
5394 TraceRecorder::record_JSOP_NAMEDFUNOBJ()
5396 return false;
5399 bool
5400 TraceRecorder::record_JSOP_SETLOCALPOP()
5402 return false;
5405 bool
5406 TraceRecorder::record_JSOP_GROUP()
5408 return true; // no-op
5411 bool
5412 TraceRecorder::record_JSOP_SETCALL()
5414 return false;
5417 bool
5418 TraceRecorder::record_JSOP_TRY()
5420 return true;
5423 bool
5424 TraceRecorder::record_JSOP_FINALLY()
5426 return true;
5429 bool
5430 TraceRecorder::record_JSOP_NOP()
5432 return true;
5435 bool
5436 TraceRecorder::record_JSOP_ARGSUB()
5438 return false;
5441 bool
5442 TraceRecorder::record_JSOP_ARGCNT()
5444 return false;
5447 bool
5448 TraceRecorder::record_JSOP_DEFLOCALFUN()
5450 JSFunction* fun;
5451 JSFrameRegs& regs = *cx->fp->regs;
5452 JSScript* script = cx->fp->script;
5453 LOAD_FUNCTION(SLOTNO_LEN); // needs script, regs, fun
5455 var(GET_SLOTNO(regs.pc), INS_CONSTPTR(FUN_OBJECT(fun)));
5456 return true;
5459 bool
5460 TraceRecorder::record_JSOP_GOTOX()
5462 return true;
5465 bool
5466 TraceRecorder::record_JSOP_IFEQX()
5468 trackCfgMerges(cx->fp->regs->pc);
5469 return record_JSOP_IFEQ();
5472 bool
5473 TraceRecorder::record_JSOP_IFNEX()
5475 return record_JSOP_IFNE();
5478 bool
5479 TraceRecorder::record_JSOP_ORX()
5481 return record_JSOP_OR();
5484 bool
5485 TraceRecorder::record_JSOP_ANDX()
5487 return record_JSOP_AND();
5490 bool
5491 TraceRecorder::record_JSOP_GOSUBX()
5493 return record_JSOP_GOSUB();
5496 bool
5497 TraceRecorder::record_JSOP_CASEX()
5499 return equal(CMP_CASE);
5502 bool
5503 TraceRecorder::record_JSOP_DEFAULTX()
5505 return true;
5508 bool
5509 TraceRecorder::record_JSOP_TABLESWITCHX()
5511 return switchop();
5514 bool
5515 TraceRecorder::record_JSOP_LOOKUPSWITCHX()
5517 return switchop();
5520 bool
5521 TraceRecorder::record_JSOP_BACKPATCH()
5523 return true;
5526 bool
5527 TraceRecorder::record_JSOP_BACKPATCH_POP()
5529 return true;
5532 bool
5533 TraceRecorder::record_JSOP_THROWING()
5535 return false;
5538 bool
5539 TraceRecorder::record_JSOP_SETRVAL()
5541 // If we implement this, we need to update JSOP_STOP.
5542 return false;
5545 bool
5546 TraceRecorder::record_JSOP_RETRVAL()
5548 return false;
5551 bool
5552 TraceRecorder::record_JSOP_GETGVAR()
5554 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
5555 if (JSVAL_IS_NULL(slotval))
5556 return true; // We will see JSOP_NAME from the interpreter's jump, so no-op here.
5558 uint32 slot = JSVAL_TO_INT(slotval);
5560 if (!lazilyImportGlobalSlot(slot))
5561 ABORT_TRACE("lazy import of global slot failed");
5563 stack(0, get(&STOBJ_GET_SLOT(cx->fp->scopeChain, slot)));
5564 return true;
5567 bool
5568 TraceRecorder::record_JSOP_SETGVAR()
5570 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
5571 if (JSVAL_IS_NULL(slotval))
5572 return true; // We will see JSOP_NAME from the interpreter's jump, so no-op here.
5574 uint32 slot = JSVAL_TO_INT(slotval);
5576 if (!lazilyImportGlobalSlot(slot))
5577 ABORT_TRACE("lazy import of global slot failed");
5579 set(&STOBJ_GET_SLOT(cx->fp->scopeChain, slot), stack(-1));
5580 return true;
5583 bool
5584 TraceRecorder::record_JSOP_INCGVAR()
5586 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
5587 if (JSVAL_IS_NULL(slotval))
5588 return true; // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
5590 uint32 slot = JSVAL_TO_INT(slotval);
5592 if (!lazilyImportGlobalSlot(slot))
5593 ABORT_TRACE("lazy import of global slot failed");
5595 return inc(STOBJ_GET_SLOT(cx->fp->scopeChain, slot), 1);
5598 bool
5599 TraceRecorder::record_JSOP_DECGVAR()
5601 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
5602 if (JSVAL_IS_NULL(slotval))
5603 return true; // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
5605 uint32 slot = JSVAL_TO_INT(slotval);
5607 if (!lazilyImportGlobalSlot(slot))
5608 ABORT_TRACE("lazy import of global slot failed");
5610 return inc(STOBJ_GET_SLOT(cx->fp->scopeChain, slot), -1);
5613 bool
5614 TraceRecorder::record_JSOP_GVARINC()
5616 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
5617 if (JSVAL_IS_NULL(slotval))
5618 return true; // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
5620 uint32 slot = JSVAL_TO_INT(slotval);
5622 if (!lazilyImportGlobalSlot(slot))
5623 ABORT_TRACE("lazy import of global slot failed");
5625 return inc(STOBJ_GET_SLOT(cx->fp->scopeChain, slot), 1, false);
5628 bool
5629 TraceRecorder::record_JSOP_GVARDEC()
5631 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
5632 if (JSVAL_IS_NULL(slotval))
5633 return true; // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
5635 uint32 slot = JSVAL_TO_INT(slotval);
5637 if (!lazilyImportGlobalSlot(slot))
5638 ABORT_TRACE("lazy import of global slot failed");
5640 return inc(STOBJ_GET_SLOT(cx->fp->scopeChain, slot), -1, false);
5643 bool
5644 TraceRecorder::record_JSOP_REGEXP()
5646 return false;
5649 // begin JS_HAS_XML_SUPPORT
5651 bool
5652 TraceRecorder::record_JSOP_DEFXMLNS()
5654 return false;
5657 bool
5658 TraceRecorder::record_JSOP_ANYNAME()
5660 return false;
5663 bool
5664 TraceRecorder::record_JSOP_QNAMEPART()
5666 return false;
5669 bool
5670 TraceRecorder::record_JSOP_QNAMECONST()
5672 return false;
5675 bool
5676 TraceRecorder::record_JSOP_QNAME()
5678 return false;
5681 bool
5682 TraceRecorder::record_JSOP_TOATTRNAME()
5684 return false;
5687 bool
5688 TraceRecorder::record_JSOP_TOATTRVAL()
5690 return false;
5693 bool
5694 TraceRecorder::record_JSOP_ADDATTRNAME()
5696 return false;
5699 bool
5700 TraceRecorder::record_JSOP_ADDATTRVAL()
5702 return false;
5705 bool
5706 TraceRecorder::record_JSOP_BINDXMLNAME()
5708 return false;
5711 bool
5712 TraceRecorder::record_JSOP_SETXMLNAME()
5714 return false;
5717 bool
5718 TraceRecorder::record_JSOP_XMLNAME()
5720 return false;
5723 bool
5724 TraceRecorder::record_JSOP_DESCENDANTS()
5726 return false;
5729 bool
5730 TraceRecorder::record_JSOP_FILTER()
5732 return false;
5735 bool
5736 TraceRecorder::record_JSOP_ENDFILTER()
5738 return false;
5741 bool
5742 TraceRecorder::record_JSOP_TOXML()
5744 return false;
5747 bool
5748 TraceRecorder::record_JSOP_TOXMLLIST()
5750 return false;
5753 bool
5754 TraceRecorder::record_JSOP_XMLTAGEXPR()
5756 return false;
5759 bool
5760 TraceRecorder::record_JSOP_XMLELTEXPR()
5762 return false;
5765 bool
5766 TraceRecorder::record_JSOP_XMLOBJECT()
5768 return false;
5771 bool
5772 TraceRecorder::record_JSOP_XMLCDATA()
5774 return false;
5777 bool
5778 TraceRecorder::record_JSOP_XMLCOMMENT()
5780 return false;
5783 bool
5784 TraceRecorder::record_JSOP_XMLPI()
5786 return false;
5789 bool
5790 TraceRecorder::record_JSOP_GETFUNNS()
5792 return false;
5795 bool
5796 TraceRecorder::record_JSOP_STARTXML()
5798 return false;
5801 bool
5802 TraceRecorder::record_JSOP_STARTXMLEXPR()
5804 return false;
5807 // end JS_HAS_XML_SUPPORT
5809 bool
5810 TraceRecorder::record_JSOP_CALLPROP()
5812 jsval& l = stackval(-1);
5813 JSObject* obj;
5814 LIns* obj_ins;
5815 if (!JSVAL_IS_PRIMITIVE(l)) {
5816 obj = JSVAL_TO_OBJECT(l);
5817 obj_ins = get(&l);
5818 stack(0, obj_ins); // |this| for subsequent call
5819 } else {
5820 jsint i;
5821 debug_only(const char* protoname = NULL;)
5822 if (JSVAL_IS_STRING(l)) {
5823 i = JSProto_String;
5824 debug_only(protoname = "String.prototype";)
5825 } else if (JSVAL_IS_NUMBER(l)) {
5826 i = JSProto_Number;
5827 debug_only(protoname = "Number.prototype";)
5828 } else if (JSVAL_IS_BOOLEAN(l)) {
5829 i = JSProto_Boolean;
5830 debug_only(protoname = "Boolean.prototype";)
5831 } else {
5832 JS_ASSERT(JSVAL_IS_NULL(l) || JSVAL_IS_VOID(l));
5833 ABORT_TRACE("callprop on null or void");
5836 if (!js_GetClassPrototype(cx, NULL, INT_TO_JSID(i), &obj))
5837 ABORT_TRACE("GetClassPrototype failed!");
5839 obj_ins = INS_CONSTPTR(obj);
5840 debug_only(obj_ins = addName(obj_ins, protoname);)
5841 stack(0, get(&l)); // use primitive as |this|
5844 JSObject* obj2;
5845 jsuword pcval;
5846 if (!test_property_cache(obj, obj_ins, obj2, pcval))
5847 return false;
5849 if (PCVAL_IS_NULL(pcval) || !PCVAL_IS_OBJECT(pcval))
5850 ABORT_TRACE("callee is not an object");
5851 JS_ASSERT(HAS_FUNCTION_CLASS(PCVAL_TO_OBJECT(pcval)));
5853 stack(-1, INS_CONSTPTR(PCVAL_TO_OBJECT(pcval)));
5854 return true;
5857 bool
5858 TraceRecorder::record_JSOP_DELDESC()
5860 return false;
5863 bool
5864 TraceRecorder::record_JSOP_UINT24()
5866 jsdpun u;
5867 u.d = (jsdouble)GET_UINT24(cx->fp->regs->pc);
5868 stack(0, lir->insImmq(u.u64));
5869 return true;
5872 bool
5873 TraceRecorder::record_JSOP_INDEXBASE()
5875 atoms += GET_INDEXBASE(cx->fp->regs->pc);
5876 return true;
5879 bool
5880 TraceRecorder::record_JSOP_RESETBASE()
5882 atoms = cx->fp->script->atomMap.vector;
5883 return true;
5886 bool
5887 TraceRecorder::record_JSOP_RESETBASE0()
5889 atoms = cx->fp->script->atomMap.vector;
5890 return true;
5893 bool
5894 TraceRecorder::record_JSOP_CALLELEM()
5896 return false;
5899 bool
5900 TraceRecorder::record_JSOP_STOP()
5903 * We know falling off the end of a constructor returns the new object that
5904 * was passed in via fp->argv[-1], while falling off the end of a function
5905 * returns undefined.
5907 * NB: we do not support script rval (eval, API users who want the result
5908 * of the last expression-statement, debugger API calls).
5910 JSStackFrame *fp = cx->fp;
5911 if (fp->flags & JSFRAME_CONSTRUCTING) {
5912 JS_ASSERT(OBJECT_TO_JSVAL(fp->thisp) == fp->argv[-1]);
5913 rval_ins = get(&fp->argv[-1]);
5914 } else {
5915 rval_ins = INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_VOID));
5917 clearFrameSlotsFromCache();
5918 return true;
5921 bool
5922 TraceRecorder::record_JSOP_GETXPROP()
5924 jsval& l = stackval(-1);
5925 if (JSVAL_IS_PRIMITIVE(l))
5926 ABORT_TRACE("primitive-this for GETXPROP?");
5928 JSObject* obj = JSVAL_TO_OBJECT(l);
5929 if (obj != cx->fp->scopeChain || obj != globalObj)
5930 return false;
5932 jsval* vp;
5933 if (!name(vp))
5934 return false;
5935 stack(-1, get(vp));
5936 return true;
5939 bool
5940 TraceRecorder::record_JSOP_CALLXMLNAME()
5942 return false;
5945 bool
5946 TraceRecorder::record_JSOP_TYPEOFEXPR()
5948 return record_JSOP_TYPEOF();
5951 bool
5952 TraceRecorder::record_JSOP_ENTERBLOCK()
5954 return false;
5957 bool
5958 TraceRecorder::record_JSOP_LEAVEBLOCK()
5960 return false;
5963 bool
5964 TraceRecorder::record_JSOP_GENERATOR()
5966 return false;
5969 bool
5970 TraceRecorder::record_JSOP_YIELD()
5972 return false;
5975 bool
5976 TraceRecorder::record_JSOP_ARRAYPUSH()
5978 return false;
5981 bool
5982 TraceRecorder::record_JSOP_ENUMCONSTELEM()
5984 return false;
5987 bool
5988 TraceRecorder::record_JSOP_LEAVEBLOCKEXPR()
5990 return false;
5993 bool
5994 TraceRecorder::record_JSOP_GETTHISPROP()
5996 LIns* this_ins;
5998 /* its safe to just use cx->fp->thisp here because getThis() returns false if thisp
5999 is not available */
6000 return getThis(this_ins) && getProp(cx->fp->thisp, this_ins);
6003 bool
6004 TraceRecorder::record_JSOP_GETARGPROP()
6006 return getProp(argval(GET_ARGNO(cx->fp->regs->pc)));
6009 bool
6010 TraceRecorder::record_JSOP_GETLOCALPROP()
6012 return getProp(varval(GET_SLOTNO(cx->fp->regs->pc)));
6015 bool
6016 TraceRecorder::record_JSOP_INDEXBASE1()
6018 atoms += 1 << 16;
6019 return true;
6022 bool
6023 TraceRecorder::record_JSOP_INDEXBASE2()
6025 atoms += 2 << 16;
6026 return true;
6029 bool
6030 TraceRecorder::record_JSOP_INDEXBASE3()
6032 atoms += 3 << 16;
6033 return true;
6036 bool
6037 TraceRecorder::record_JSOP_CALLGVAR()
6039 jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
6040 if (JSVAL_IS_NULL(slotval))
6041 return true; // We will see JSOP_CALLNAME from the interpreter's jump, so no-op here.
6043 uint32 slot = JSVAL_TO_INT(slotval);
6045 if (!lazilyImportGlobalSlot(slot))
6046 ABORT_TRACE("lazy import of global slot failed");
6048 jsval& v = STOBJ_GET_SLOT(cx->fp->scopeChain, slot);
6049 stack(0, get(&v));
6050 stack(1, INS_CONSTPTR(NULL));
6051 return true;
6054 bool
6055 TraceRecorder::record_JSOP_CALLLOCAL()
6057 uintN slot = GET_SLOTNO(cx->fp->regs->pc);
6058 stack(0, var(slot));
6059 stack(1, INS_CONSTPTR(NULL));
6060 return true;
6063 bool
6064 TraceRecorder::record_JSOP_CALLARG()
6066 uintN slot = GET_ARGNO(cx->fp->regs->pc);
6067 stack(0, arg(slot));
6068 stack(1, INS_CONSTPTR(NULL));
6069 return true;
6072 bool
6073 TraceRecorder::record_JSOP_NULLTHIS()
6075 stack(0, INS_CONSTPTR(NULL));
6076 return true;
6079 bool
6080 TraceRecorder::record_JSOP_INT8()
6082 jsdpun u;
6083 u.d = (jsdouble)GET_INT8(cx->fp->regs->pc);
6084 stack(0, lir->insImmq(u.u64));
6085 return true;
6088 bool
6089 TraceRecorder::record_JSOP_INT32()
6091 jsdpun u;
6092 u.d = (jsdouble)GET_INT32(cx->fp->regs->pc);
6093 stack(0, lir->insImmq(u.u64));
6094 return true;
6097 bool
6098 TraceRecorder::record_JSOP_LENGTH()
6100 jsval& l = stackval(-1);
6101 if (JSVAL_IS_PRIMITIVE(l)) {
6102 if (!JSVAL_IS_STRING(l))
6103 ABORT_TRACE("non-string primitives unsupported");
6104 LIns* str_ins = get(&l);
6105 LIns* len_ins = lir->insLoad(LIR_ldp, str_ins, (int)offsetof(JSString, length));
6107 LIns* masked_len_ins = lir->ins2(LIR_piand,
6108 len_ins,
6109 INS_CONSTPTR(JSSTRING_LENGTH_MASK));
6111 LIns *choose_len_ins =
6112 lir->ins_choose(lir->ins_eq0(lir->ins2(LIR_piand,
6113 len_ins,
6114 INS_CONSTPTR(JSSTRFLAG_DEPENDENT))),
6115 masked_len_ins,
6116 lir->ins_choose(lir->ins_eq0(lir->ins2(LIR_piand,
6117 len_ins,
6118 INS_CONSTPTR(JSSTRFLAG_PREFIX))),
6119 lir->ins2(LIR_piand,
6120 len_ins,
6121 INS_CONSTPTR(JSSTRDEP_LENGTH_MASK)),
6122 masked_len_ins,
6123 true),
6124 true);
6126 set(&l, lir->ins1(LIR_i2f, choose_len_ins));
6127 return true;
6130 JSObject* obj = JSVAL_TO_OBJECT(l);
6131 if (!OBJ_IS_DENSE_ARRAY(cx, obj))
6132 ABORT_TRACE("only dense arrays supported");
6133 if (!guardDenseArray(obj, get(&l)))
6134 ABORT_TRACE("OBJ_IS_DENSE_ARRAY but not?!?");
6135 LIns* v_ins = lir->ins1(LIR_i2f, stobj_get_fslot(get(&l), JSSLOT_ARRAY_LENGTH));
6136 set(&l, v_ins);
6137 return true;
6140 bool
6141 TraceRecorder::record_JSOP_NEWARRAY()
6143 return false;
6146 bool
6147 TraceRecorder::record_JSOP_HOLE()
6149 stack(0, INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_HOLE)));
6150 return true;
6153 #define UNUSED(op) bool TraceRecorder::record_##op() { return false; }
6155 UNUSED(JSOP_UNUSED76)
6156 UNUSED(JSOP_UNUSED77)
6157 UNUSED(JSOP_UNUSED78)
6158 UNUSED(JSOP_UNUSED79)
6159 UNUSED(JSOP_UNUSED201)
6160 UNUSED(JSOP_UNUSED202)
6161 UNUSED(JSOP_UNUSED203)
6162 UNUSED(JSOP_UNUSED204)
6163 UNUSED(JSOP_UNUSED205)
6164 UNUSED(JSOP_UNUSED206)
6165 UNUSED(JSOP_UNUSED207)
6166 UNUSED(JSOP_UNUSED219)
6167 UNUSED(JSOP_UNUSED226)