1 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=4 sw=4 et tw=99:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
17 * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
20 * The Initial Developer of the Original Code is
21 * Brendan Eich <brendan@mozilla.org>
24 * David Anderson <danderson@mozilla.com>
25 * David Mandelin <dmandelin@mozilla.com>
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
43 #include "StubCalls.h"
44 #include "StubCalls-inl.h"
45 #include "assembler/assembler/LinkBuffer.h"
46 #include "assembler/assembler/MacroAssembler.h"
47 #include "assembler/assembler/CodeLocation.h"
48 #include "methodjit/CodeGenIncludes.h"
49 #include "methodjit/Compiler.h"
50 #include "methodjit/ICRepatcher.h"
51 #include "methodjit/PolyIC.h"
52 #include "InlineFrameAssembler.h"
55 #include "jsinterpinlines.h"
56 #include "jsobjinlines.h"
57 #include "jsscopeinlines.h"
58 #include "jsscriptinlines.h"
61 using namespace js::mjit
;
62 using namespace js::mjit::ic
;
64 typedef JSC::MacroAssembler::RegisterID RegisterID
;
65 typedef JSC::MacroAssembler::Address Address
;
66 typedef JSC::MacroAssembler::Jump Jump
;
67 typedef JSC::MacroAssembler::Imm32 Imm32
;
68 typedef JSC::MacroAssembler::ImmPtr ImmPtr
;
69 typedef JSC::MacroAssembler::Call Call
;
70 typedef JSC::MacroAssembler::Label Label
;
71 typedef JSC::MacroAssembler::DataLabel32 DataLabel32
;
76 PatchGetFallback(VMFrame
&f
, ic::GetGlobalNameIC
*ic
)
78 Repatcher
repatch(f
.jit());
79 JSC::FunctionPtr
fptr(JS_FUNC_TO_DATA_PTR(void *, stubs::GetGlobalName
));
80 repatch
.relink(ic
->slowPathCall
, fptr
);
84 ic::GetGlobalName(VMFrame
&f
, ic::GetGlobalNameIC
*ic
)
86 JSObject
*obj
= f
.fp()->scopeChain().getGlobal();
87 JSAtom
*atom
= f
.fp()->script()->getAtom(GET_INDEX(f
.regs
.pc
));
88 jsid id
= ATOM_TO_JSID(atom
);
90 const Shape
*shape
= obj
->nativeLookup(id
);
92 !shape
->hasDefaultGetterOrIsMethod() ||
96 PatchGetFallback(f
, ic
);
97 stubs::GetGlobalName(f
);
100 uint32 slot
= shape
->slot
;
102 /* Patch shape guard. */
103 Repatcher
repatcher(f
.jit());
104 repatcher
.repatch(ic
->fastPathStart
.dataLabel32AtOffset(ic
->shapeOffset
), obj
->shape());
107 JSC::CodeLocationLabel label
= ic
->fastPathStart
.labelAtOffset(ic
->loadStoreOffset
);
108 repatcher
.patchAddressOffsetForValueLoad(label
, slot
* sizeof(Value
));
110 /* Do load anyway... this time. */
111 stubs::GetGlobalName(f
);
114 template <JSBool strict
>
115 static void JS_FASTCALL
116 DisabledSetGlobal(VMFrame
&f
, ic::SetGlobalNameIC
*ic
)
118 JSScript
*script
= f
.fp()->script();
119 JSAtom
*atom
= script
->getAtom(GET_INDEX(f
.regs
.pc
));
120 stubs::SetGlobalName
<strict
>(f
, atom
);
123 template void JS_FASTCALL DisabledSetGlobal
<true>(VMFrame
&f
, ic::SetGlobalNameIC
*ic
);
124 template void JS_FASTCALL DisabledSetGlobal
<false>(VMFrame
&f
, ic::SetGlobalNameIC
*ic
);
126 template <JSBool strict
>
127 static void JS_FASTCALL
128 DisabledSetGlobalNoCache(VMFrame
&f
, ic::SetGlobalNameIC
*ic
)
130 JSScript
*script
= f
.fp()->script();
131 JSAtom
*atom
= script
->getAtom(GET_INDEX(f
.regs
.pc
));
132 stubs::SetGlobalNameNoCache
<strict
>(f
, atom
);
135 template void JS_FASTCALL DisabledSetGlobalNoCache
<true>(VMFrame
&f
, ic::SetGlobalNameIC
*ic
);
136 template void JS_FASTCALL DisabledSetGlobalNoCache
<false>(VMFrame
&f
, ic::SetGlobalNameIC
*ic
);
139 PatchSetFallback(VMFrame
&f
, ic::SetGlobalNameIC
*ic
)
141 JSScript
*script
= f
.fp()->script();
143 Repatcher
repatch(f
.jit());
144 VoidStubSetGlobal stub
= ic
->usePropertyCache
145 ? STRICT_VARIANT(DisabledSetGlobal
)
146 : STRICT_VARIANT(DisabledSetGlobalNoCache
);
147 JSC::FunctionPtr
fptr(JS_FUNC_TO_DATA_PTR(void *, stub
));
148 repatch
.relink(ic
->slowPathCall
, fptr
);
152 SetGlobalNameIC::patchExtraShapeGuard(Repatcher
&repatcher
, int32 shape
)
154 JS_ASSERT(hasExtraStub
);
156 JSC::CodeLocationLabel
label(JSC::MacroAssemblerCodePtr(extraStub
.start()));
157 repatcher
.repatch(label
.dataLabel32AtOffset(extraShapeGuard
), shape
);
161 SetGlobalNameIC::patchInlineShapeGuard(Repatcher
&repatcher
, int32 shape
)
163 JSC::CodeLocationDataLabel32 label
= fastPathStart
.dataLabel32AtOffset(shapeOffset
);
164 repatcher
.repatch(label
, shape
);
168 UpdateSetGlobalNameStub(VMFrame
&f
, ic::SetGlobalNameIC
*ic
, JSObject
*obj
, const Shape
*shape
)
170 Repatcher
repatcher(ic
->extraStub
);
172 ic
->patchExtraShapeGuard(repatcher
, obj
->shape());
174 JSC::CodeLocationLabel
label(JSC::MacroAssemblerCodePtr(ic
->extraStub
.start()));
175 label
= label
.labelAtOffset(ic
->extraStoreOffset
);
176 repatcher
.patchAddressOffsetForValueStore(label
, shape
->slot
* sizeof(Value
),
177 ic
->vr
.isTypeKnown());
179 return Lookup_Cacheable
;
183 AttachSetGlobalNameStub(VMFrame
&f
, ic::SetGlobalNameIC
*ic
, JSObject
*obj
, const Shape
*shape
)
187 Label start
= masm
.label();
189 DataLabel32 shapeLabel
;
190 Jump guard
= masm
.branch32WithPatch(Assembler::NotEqual
, ic
->shapeReg
, Imm32(obj
->shape()),
193 /* A constant object needs rematerialization. */
195 masm
.move(ImmPtr(obj
), ic
->objReg
);
197 JS_ASSERT(obj
->branded());
200 * Load obj->slots. If ic->objConst, then this clobbers objReg, because
201 * ic->objReg == ic->shapeReg.
203 masm
.loadPtr(Address(ic
->objReg
, offsetof(JSObject
, slots
)), ic
->shapeReg
);
205 /* Test if overwriting a function-tagged slot. */
206 Address
slot(ic
->shapeReg
, sizeof(Value
) * shape
->slot
);
207 Jump isNotObject
= masm
.testObject(Assembler::NotEqual
, slot
);
209 /* Now, test if the object is a function object. */
210 masm
.loadPayload(slot
, ic
->shapeReg
);
211 Jump isFun
= masm
.testFunction(Assembler::Equal
, ic
->shapeReg
);
213 /* Restore shapeReg to obj->slots, since we clobbered it. */
215 masm
.move(ImmPtr(obj
), ic
->objReg
);
216 masm
.loadPtr(Address(ic
->objReg
, offsetof(JSObject
, slots
)), ic
->shapeReg
);
218 /* If the object test fails, shapeReg is still obj->slots. */
219 isNotObject
.linkTo(masm
.label(), &masm
);
220 DataLabel32 store
= masm
.storeValueWithAddressOffsetPatch(ic
->vr
, slot
);
222 Jump done
= masm
.jump();
224 JITScript
*jit
= f
.jit();
225 LinkerHelper
linker(masm
);
226 JSC::ExecutablePool
*ep
= linker
.init(f
.cx
);
229 if (!jit
->execPools
.append(ep
)) {
231 js_ReportOutOfMemory(f
.cx
);
235 if (!linker
.verifyRange(jit
)) {
237 return Lookup_Uncacheable
;
240 linker
.link(done
, ic
->fastPathStart
.labelAtOffset(ic
->fastRejoinOffset
));
241 linker
.link(guard
, ic
->slowPathStart
);
242 linker
.link(isFun
, ic
->slowPathStart
);
244 JSC::CodeLocationLabel cs
= linker
.finalize();
245 JaegerSpew(JSpew_PICs
, "generated setgname stub at %p\n", cs
.executableAddress());
247 Repatcher
repatcher(f
.jit());
248 repatcher
.relink(ic
->fastPathStart
.jumpAtOffset(ic
->inlineShapeJump
), cs
);
250 int offset
= linker
.locationOf(shapeLabel
) - linker
.locationOf(start
);
251 ic
->extraShapeGuard
= offset
;
252 JS_ASSERT(ic
->extraShapeGuard
== offset
);
254 ic
->extraStub
= JSC::JITCode(cs
.executableAddress(), linker
.size());
255 offset
= linker
.locationOf(store
) - linker
.locationOf(start
);
256 ic
->extraStoreOffset
= offset
;
257 JS_ASSERT(ic
->extraStoreOffset
== offset
);
259 ic
->hasExtraStub
= true;
261 return Lookup_Cacheable
;
265 UpdateSetGlobalName(VMFrame
&f
, ic::SetGlobalNameIC
*ic
, JSObject
*obj
, const Shape
*shape
)
267 /* Give globals a chance to appear. */
269 return Lookup_Uncacheable
;
271 if (shape
->isMethod() ||
272 !shape
->hasDefaultSetter() ||
273 !shape
->writable() ||
276 /* Disable the IC for weird shape attributes. */
277 PatchSetFallback(f
, ic
);
278 return Lookup_Uncacheable
;
281 /* Branded sets must guard that they don't overwrite method-valued properties. */
282 if (obj
->branded()) {
284 * If this slot has a function valued property, the tail of this opcode
285 * could change the shape. Even if it doesn't, the IC is probably
286 * pointless, because it will always hit the function-test path and
287 * bail out. In these cases, don't bother building or updating the IC.
289 const Value
&v
= obj
->getSlot(shape
->slot
);
290 if (v
.isObject() && v
.toObject().isFunction()) {
292 * If we're going to rebrand, the object may unbrand, allowing this
293 * IC to come back to life. In that case, we don't disable the IC.
295 if (!ChangesMethodValue(v
, f
.regs
.sp
[-1]))
296 PatchSetFallback(f
, ic
);
297 return Lookup_Uncacheable
;
300 if (ic
->hasExtraStub
)
301 return UpdateSetGlobalNameStub(f
, ic
, obj
, shape
);
303 return AttachSetGlobalNameStub(f
, ic
, obj
, shape
);
306 /* Object is not branded, so we can use the inline path. */
307 Repatcher
repatcher(f
.jit());
308 ic
->patchInlineShapeGuard(repatcher
, obj
->shape());
310 JSC::CodeLocationLabel label
= ic
->fastPathStart
.labelAtOffset(ic
->loadStoreOffset
);
311 repatcher
.patchAddressOffsetForValueStore(label
, shape
->slot
* sizeof(Value
),
312 ic
->vr
.isTypeKnown());
314 return Lookup_Cacheable
;
318 ic::SetGlobalName(VMFrame
&f
, ic::SetGlobalNameIC
*ic
)
320 JSObject
*obj
= f
.fp()->scopeChain().getGlobal();
321 JSScript
*script
= f
.fp()->script();
322 JSAtom
*atom
= script
->getAtom(GET_INDEX(f
.regs
.pc
));
323 const Shape
*shape
= obj
->nativeLookup(ATOM_TO_JSID(atom
));
325 LookupStatus status
= UpdateSetGlobalName(f
, ic
, obj
, shape
);
326 if (status
== Lookup_Error
)
329 if (ic
->usePropertyCache
)
330 STRICT_VARIANT(stubs::SetGlobalName
)(f
, atom
);
332 STRICT_VARIANT(stubs::SetGlobalNameNoCache
)(f
, atom
);
335 class EqualityICLinker
: public LinkerHelper
340 EqualityICLinker(Assembler
&masm
, VMFrame
&f
)
341 : LinkerHelper(masm
), f(f
)
344 bool init(JSContext
*cx
) {
345 JSC::ExecutablePool
*pool
= LinkerHelper::init(cx
);
348 JSScript
*script
= f
.fp()->script();
349 JITScript
*jit
= script
->getJIT(f
.fp()->isConstructing());
350 if (!jit
->execPools
.append(pool
)) {
352 js_ReportOutOfMemory(cx
);
359 /* Rough over-estimate of how much memory we need to unprotect. */
360 static const uint32 INLINE_PATH_LENGTH
= 64;
362 class EqualityCompiler
: public BaseCompiler
367 Vector
<Jump
, 4, SystemAllocPolicy
> jumpList
;
372 EqualityCompiler(VMFrame
&f
, EqualityICInfo
&ic
)
373 : BaseCompiler(f
.cx
), f(f
), ic(ic
), jumpList(SystemAllocPolicy())
377 void linkToStub(Jump j
)
382 void linkTrue(Jump j
)
387 void linkFalse(Jump j
)
392 void generateStringPath(Assembler
&masm
)
394 const ValueRemat
&lvr
= ic
.lvr
;
395 const ValueRemat
&rvr
= ic
.rvr
;
397 if (!lvr
.isConstant() && !lvr
.isType(JSVAL_TYPE_STRING
)) {
398 Jump lhsFail
= masm
.testString(Assembler::NotEqual
, lvr
.typeReg());
402 if (!rvr
.isConstant() && !rvr
.isType(JSVAL_TYPE_STRING
)) {
403 Jump rhsFail
= masm
.testString(Assembler::NotEqual
, rvr
.typeReg());
407 RegisterID tmp
= ic
.tempReg
;
409 /* Test if lhs/rhs are atomized. */
410 Imm32
atomizedFlags(JSString::FLAT
| JSString::ATOMIZED
);
412 masm
.load32(Address(lvr
.dataReg(), JSString::offsetOfLengthAndFlags()), tmp
);
413 masm
.and32(Imm32(JSString::TYPE_FLAGS_MASK
), tmp
);
414 Jump lhsNotAtomized
= masm
.branch32(Assembler::NotEqual
, tmp
, atomizedFlags
);
415 linkToStub(lhsNotAtomized
);
417 if (!rvr
.isConstant()) {
418 masm
.load32(Address(rvr
.dataReg(), JSString::offsetOfLengthAndFlags()), tmp
);
419 masm
.and32(Imm32(JSString::TYPE_FLAGS_MASK
), tmp
);
420 Jump rhsNotAtomized
= masm
.branch32(Assembler::NotEqual
, tmp
, atomizedFlags
);
421 linkToStub(rhsNotAtomized
);
424 if (rvr
.isConstant()) {
425 JSString
*str
= rvr
.value().toString();
426 JS_ASSERT(str
->isAtomized());
427 Jump test
= masm
.branchPtr(ic
.cond
, lvr
.dataReg(), ImmPtr(str
));
430 Jump test
= masm
.branchPtr(ic
.cond
, lvr
.dataReg(), rvr
.dataReg());
434 Jump fallthrough
= masm
.jump();
435 linkFalse(fallthrough
);
438 void generateObjectPath(Assembler
&masm
)
440 ValueRemat
&lvr
= ic
.lvr
;
441 ValueRemat
&rvr
= ic
.rvr
;
443 if (!lvr
.isConstant() && !lvr
.isType(JSVAL_TYPE_OBJECT
)) {
444 Jump lhsFail
= masm
.testObject(Assembler::NotEqual
, lvr
.typeReg());
448 if (!rvr
.isConstant() && !rvr
.isType(JSVAL_TYPE_OBJECT
)) {
449 Jump rhsFail
= masm
.testObject(Assembler::NotEqual
, rvr
.typeReg());
453 Jump lhsHasEq
= masm
.branchTest32(Assembler::NonZero
,
454 Address(lvr
.dataReg(),
455 offsetof(JSObject
, flags
)),
456 Imm32(JSObject::HAS_EQUALITY
));
457 linkToStub(lhsHasEq
);
459 if (rvr
.isConstant()) {
460 JSObject
*obj
= &rvr
.value().toObject();
461 Jump test
= masm
.branchPtr(ic
.cond
, lvr
.dataReg(), ImmPtr(obj
));
464 Jump test
= masm
.branchPtr(ic
.cond
, lvr
.dataReg(), rvr
.dataReg());
468 Jump fallthrough
= masm
.jump();
469 linkFalse(fallthrough
);
472 bool linkForIC(Assembler
&masm
)
474 EqualityICLinker
buffer(masm
, f
);
475 if (!buffer
.init(cx
))
478 Repatcher
repatcher(f
.jit());
480 /* Overwrite the call to the IC with a call to the stub. */
481 JSC::FunctionPtr
fptr(JS_FUNC_TO_DATA_PTR(void *, ic
.stub
));
482 repatcher
.relink(ic
.stubCall
, fptr
);
484 // Silently fail, the IC is disabled now.
485 if (!buffer
.verifyRange(f
.jit()))
488 /* Set the targets of all type test failures to go to the stub. */
489 for (size_t i
= 0; i
< jumpList
.length(); i
++)
490 buffer
.link(jumpList
[i
], ic
.stubEntry
);
493 /* Set the targets for the the success and failure of the actual equality test. */
494 buffer
.link(trueJump
, ic
.target
);
495 buffer
.link(falseJump
, ic
.fallThrough
);
497 CodeLocationLabel cs
= buffer
.finalize();
499 /* Jump to the newly generated code instead of to the IC. */
500 repatcher
.relink(ic
.jumpToStub
, cs
);
509 Value rval
= f
.regs
.sp
[-1];
510 Value lval
= f
.regs
.sp
[-2];
512 if (rval
.isObject() && lval
.isObject()) {
513 generateObjectPath(masm
);
515 } else if (rval
.isString() && lval
.isString()) {
516 generateStringPath(masm
);
522 return linkForIC(masm
);
530 ic::Equality(VMFrame
&f
, ic::EqualityICInfo
*ic
)
532 EqualityCompiler
cc(f
, *ic
);
539 static void * JS_FASTCALL
540 SlowCallFromIC(VMFrame
&f
, ic::CallICInfo
*ic
)
542 stubs::SlowCall(f
, ic
->frameSize
.getArgc(f
));
546 static void * JS_FASTCALL
547 SlowNewFromIC(VMFrame
&f
, ic::CallICInfo
*ic
)
549 stubs::SlowNew(f
, ic
->frameSize
.staticArgc());
554 * Calls have an inline path and an out-of-line path. The inline path is used
555 * in the fastest case: the method has JIT'd code, and |argc == nargs|.
557 * The inline path and OOL path are separated by a guard on the identity of
558 * the callee object. This guard starts as NULL and always fails on the first
559 * hit. On the OOL path, the callee is verified to be both a function and a
560 * scripted function. If these conditions hold, |ic::Call| is invoked.
562 * |ic::Call| first ensures that the callee has JIT code. If it doesn't, the
563 * call to |ic::Call| is patched to a slow path. If it does have JIT'd code,
564 * the following cases can occur:
566 * 1) args != nargs: The call to |ic::Call| is patched with a dynamically
567 * generated stub. This stub inlines a path that looks like:
570 * if (callee is not compiled) {
573 * call callee->arityLabel
575 * The arity label is a special entry point for correcting frames for
578 * 2) args == nargs, and the inline call site was not patched yet.
579 * The guard dividing the two paths is patched to guard on the given
580 * function object identity, and the proceeding call is patched to
581 * directly call the JIT code.
583 * 3) args == nargs, and the inline call site was patched already.
584 * A small stub is created which extends the original guard to also
585 * guard on the JSFunction lying underneath the function object.
587 * If the OOL path does not have a scripted function, but does have a
588 * scripted native, then a small stub is generated which inlines the native
591 class CallCompiler
: public BaseCompiler
598 CallCompiler(VMFrame
&f
, CallICInfo
&ic
, bool callingNew
)
599 : BaseCompiler(f
.cx
), f(f
), ic(ic
), callingNew(callingNew
)
603 JSC::ExecutablePool
*poolForSize(LinkerHelper
&linker
, CallICInfo::PoolIndex index
)
605 JSC::ExecutablePool
*ep
= linker
.init(f
.cx
);
608 JS_ASSERT(!ic
.pools
[index
]);
609 ic
.pools
[index
] = ep
;
613 void disable(JITScript
*jit
)
615 JSC::CodeLocationCall oolCall
= ic
.slowPathStart
.callAtOffset(ic
.oolCallOffset
);
616 Repatcher
repatch(jit
);
617 JSC::FunctionPtr fptr
= callingNew
618 ? JSC::FunctionPtr(JS_FUNC_TO_DATA_PTR(void *, SlowNewFromIC
))
619 : JSC::FunctionPtr(JS_FUNC_TO_DATA_PTR(void *, SlowCallFromIC
));
620 repatch
.relink(oolCall
, fptr
);
623 bool generateFullCallStub(JITScript
*from
, JSScript
*script
, uint32 flags
)
626 * Create a stub that works with arity mismatches. Like the fast-path,
627 * this allocates a frame on the caller side, but also performs extra
628 * checks for compilability. Perhaps this should be a separate, shared
629 * trampoline, but for now we generate it dynamically.
632 InlineFrameAssembler
inlFrame(masm
, ic
, flags
);
633 RegisterID t0
= inlFrame
.tempRegs
.takeAnyReg();
635 /* Generate the inline frame creation. */
636 inlFrame
.assemble(ic
.funGuard
.labelAtOffset(ic
.joinPointOffset
).executableAddress());
638 /* funPtrReg is still valid. Check if a compilation is needed. */
639 Address
scriptAddr(ic
.funPtrReg
, offsetof(JSFunction
, u
) +
640 offsetof(JSFunction::U::Scripted
, script
));
641 masm
.loadPtr(scriptAddr
, t0
);
644 * Test if script->nmap is NULL - same as checking ncode, but faster
645 * here since ncode has two failure modes and we need to load out of
648 size_t offset
= callingNew
649 ? offsetof(JSScript
, jitArityCheckCtor
)
650 : offsetof(JSScript
, jitArityCheckNormal
);
651 masm
.loadPtr(Address(t0
, offset
), t0
);
652 Jump hasCode
= masm
.branchPtr(Assembler::Above
, t0
, ImmPtr(JS_UNJITTABLE_SCRIPT
));
654 /* Try and compile. On success we get back the nmap pointer. */
655 masm
.storePtr(JSFrameReg
, FrameAddress(offsetof(VMFrame
, regs
.fp
)));
656 void *compilePtr
= JS_FUNC_TO_DATA_PTR(void *, stubs::CompileFunction
);
657 if (ic
.frameSize
.isStatic()) {
658 masm
.move(Imm32(ic
.frameSize
.staticArgc()), Registers::ArgReg1
);
659 masm
.fallibleVMCall(compilePtr
, script
->code
, ic
.frameSize
.staticLocalSlots());
661 masm
.load32(FrameAddress(offsetof(VMFrame
, u
.call
.dynamicArgc
)), Registers::ArgReg1
);
662 masm
.fallibleVMCall(compilePtr
, script
->code
, -1);
664 masm
.loadPtr(FrameAddress(offsetof(VMFrame
, regs
.fp
)), JSFrameReg
);
666 Jump notCompiled
= masm
.branchTestPtr(Assembler::Zero
, Registers::ReturnReg
,
667 Registers::ReturnReg
);
669 masm
.jump(Registers::ReturnReg
);
671 hasCode
.linkTo(masm
.label(), &masm
);
673 /* Get nmap[ARITY], set argc, call. */
674 if (ic
.frameSize
.isStatic())
675 masm
.move(Imm32(ic
.frameSize
.staticArgc()), JSParamReg_Argc
);
677 masm
.load32(FrameAddress(offsetof(VMFrame
, u
.call
.dynamicArgc
)), JSParamReg_Argc
);
680 LinkerHelper
linker(masm
);
681 JSC::ExecutablePool
*ep
= poolForSize(linker
, CallICInfo::Pool_ScriptStub
);
685 if (!linker
.verifyRange(from
)) {
690 linker
.link(notCompiled
, ic
.slowPathStart
.labelAtOffset(ic
.slowJoinOffset
));
691 JSC::CodeLocationLabel cs
= linker
.finalize();
693 JaegerSpew(JSpew_PICs
, "generated CALL stub %p (%d bytes)\n", cs
.executableAddress(),
696 Repatcher
repatch(from
);
697 JSC::CodeLocationJump oolJump
= ic
.slowPathStart
.jumpAtOffset(ic
.oolJumpOffset
);
698 repatch
.relink(oolJump
, cs
);
703 bool patchInlinePath(JITScript
*from
, JSScript
*script
, JSObject
*obj
)
705 JS_ASSERT(ic
.frameSize
.isStatic());
706 JITScript
*jit
= script
->getJIT(callingNew
);
708 /* Very fast path. */
709 Repatcher
repatch(from
);
711 if (!repatch
.canRelink(ic
.funGuard
.jumpAtOffset(ic
.hotJumpOffset
),
712 JSC::CodeLocationLabel(jit
->fastEntry
))) {
716 ic
.fastGuardedObject
= obj
;
718 repatch
.repatch(ic
.funGuard
, obj
);
719 repatch
.relink(ic
.funGuard
.jumpAtOffset(ic
.hotJumpOffset
),
720 JSC::CodeLocationLabel(jit
->fastEntry
));
722 JaegerSpew(JSpew_PICs
, "patched CALL path %p (obj: %p)\n",
723 ic
.funGuard
.executableAddress(), ic
.fastGuardedObject
);
728 bool generateStubForClosures(JITScript
*from
, JSObject
*obj
)
730 JS_ASSERT(ic
.frameSize
.isStatic());
732 /* Slightly less fast path - guard on fun->getFunctionPrivate() instead. */
736 tempRegs
.takeReg(ic
.funObjReg
);
738 RegisterID t0
= tempRegs
.takeAnyReg();
740 /* Guard that it's actually a function object. */
741 Jump claspGuard
= masm
.testObjClass(Assembler::NotEqual
, ic
.funObjReg
, &js_FunctionClass
);
743 /* Guard that it's the same function. */
744 JSFunction
*fun
= obj
->getFunctionPrivate();
745 masm
.loadObjPrivate(ic
.funObjReg
, t0
);
746 Jump funGuard
= masm
.branchPtr(Assembler::NotEqual
, t0
, ImmPtr(fun
));
747 Jump done
= masm
.jump();
749 LinkerHelper
linker(masm
);
750 JSC::ExecutablePool
*ep
= poolForSize(linker
, CallICInfo::Pool_ClosureStub
);
754 ic
.hasJsFunCheck
= true;
756 if (!linker
.verifyRange(from
)) {
761 linker
.link(claspGuard
, ic
.slowPathStart
);
762 linker
.link(funGuard
, ic
.slowPathStart
);
763 linker
.link(done
, ic
.funGuard
.labelAtOffset(ic
.hotPathOffset
));
764 JSC::CodeLocationLabel cs
= linker
.finalize();
766 JaegerSpew(JSpew_PICs
, "generated CALL closure stub %p (%d bytes)\n",
767 cs
.executableAddress(), masm
.size());
769 Repatcher
repatch(from
);
770 repatch
.relink(ic
.funJump
, cs
);
775 bool generateNativeStub()
777 JITScript
*jit
= f
.jit();
779 /* Snapshot the frameDepth before SplatApplyArgs modifies it. */
780 uintN initialFrameDepth
= f
.regs
.sp
- f
.regs
.fp
->slots();
783 * SplatApplyArgs has not been called, so we call it here before
784 * potentially touching f.u.call.dynamicArgc.
787 if (ic
.frameSize
.isStatic()) {
788 JS_ASSERT(f
.regs
.sp
- f
.regs
.fp
->slots() == (int)ic
.frameSize
.staticLocalSlots());
789 vp
= f
.regs
.sp
- (2 + ic
.frameSize
.staticArgc());
791 JS_ASSERT(*f
.regs
.pc
== JSOP_FUNAPPLY
&& GET_ARGC(f
.regs
.pc
) == 2);
792 if (!ic::SplatApplyArgs(f
)) /* updates regs.sp */
794 vp
= f
.regs
.sp
- (2 + f
.u
.call
.dynamicArgc
);
798 if (!IsFunctionObject(*vp
, &obj
))
801 JSFunction
*fun
= obj
->getFunctionPrivate();
802 if ((!callingNew
&& !fun
->isNative()) || (callingNew
&& !fun
->isConstructor()))
806 vp
[1].setMagicWithObjectOrNullPayload(NULL
);
808 if (!CallJSNative(cx
, fun
->u
.n
.native
, ic
.frameSize
.getArgc(f
), vp
))
811 /* Right now, take slow-path for IC misses or multiple stubs. */
812 if (ic
.fastGuardedNative
|| ic
.hasJsFunCheck
)
815 /* Native MIC needs to warm up first. */
821 /* Generate fast-path for calling this native. */
824 /* Guard on the function object identity, for now. */
825 Jump funGuard
= masm
.branchPtr(Assembler::NotEqual
, ic
.funObjReg
, ImmPtr(obj
));
827 /* N.B. After this call, the frame will have a dynamic frame size. */
828 if (ic
.frameSize
.isDynamic()) {
829 masm
.fallibleVMCall(JS_FUNC_TO_DATA_PTR(void *, ic::SplatApplyArgs
),
830 f
.regs
.pc
, initialFrameDepth
);
835 tempRegs
.takeReg(Registers::ArgReg0
);
836 tempRegs
.takeReg(Registers::ArgReg1
);
837 tempRegs
.takeReg(Registers::ArgReg2
);
839 RegisterID t0
= tempRegs
.takeAnyReg();
842 masm
.storePtr(ImmPtr(cx
->regs
->pc
),
843 FrameAddress(offsetof(VMFrame
, regs
.pc
)));
845 /* Store sp (if not already set by ic::SplatApplyArgs). */
846 if (ic
.frameSize
.isStatic()) {
847 uint32 spOffset
= sizeof(JSStackFrame
) + initialFrameDepth
* sizeof(Value
);
848 masm
.addPtr(Imm32(spOffset
), JSFrameReg
, t0
);
849 masm
.storePtr(t0
, FrameAddress(offsetof(VMFrame
, regs
.sp
)));
853 masm
.storePtr(JSFrameReg
, FrameAddress(offsetof(VMFrame
, regs
.fp
)));
857 RegisterID cxReg
= tempRegs
.takeAnyReg();
859 RegisterID cxReg
= Registers::ArgReg0
;
861 masm
.loadPtr(FrameAddress(offsetof(VMFrame
, cx
)), cxReg
);
865 RegisterID vpReg
= t0
;
867 RegisterID vpReg
= Registers::ArgReg2
;
869 MaybeRegisterID argcReg
;
870 if (ic
.frameSize
.isStatic()) {
871 uint32 vpOffset
= sizeof(JSStackFrame
) + (vp
- f
.regs
.fp
->slots()) * sizeof(Value
);
872 masm
.addPtr(Imm32(vpOffset
), JSFrameReg
, vpReg
);
874 argcReg
= tempRegs
.takeAnyReg();
875 masm
.load32(FrameAddress(offsetof(VMFrame
, u
.call
.dynamicArgc
)), argcReg
.reg());
876 masm
.loadPtr(FrameAddress(offsetof(VMFrame
, regs
.sp
)), vpReg
);
878 /* vpOff = (argc + 2) * sizeof(Value) */
879 RegisterID vpOff
= tempRegs
.takeAnyReg();
880 masm
.move(argcReg
.reg(), vpOff
);
881 masm
.add32(Imm32(2), vpOff
); /* callee, this */
882 JS_STATIC_ASSERT(sizeof(Value
) == 8);
883 masm
.lshift32(Imm32(3), vpOff
);
884 masm
.subPtr(vpOff
, vpReg
);
886 tempRegs
.putReg(vpOff
);
889 /* Mark vp[1] as magic for |new|. */
892 v
.setMagicWithObjectOrNullPayload(NULL
);
893 masm
.storeValue(v
, Address(vpReg
, sizeof(Value
)));
896 masm
.setupABICall(Registers::NormalCall
, 3);
897 masm
.storeArg(2, vpReg
);
898 if (ic
.frameSize
.isStatic())
899 masm
.storeArg(1, Imm32(ic
.frameSize
.staticArgc()));
901 masm
.storeArg(1, argcReg
.reg());
902 masm
.storeArg(0, cxReg
);
903 masm
.callWithABI(JS_FUNC_TO_DATA_PTR(void *, fun
->u
.n
.native
), false);
905 Jump hasException
= masm
.branchTest32(Assembler::Zero
, Registers::ReturnReg
,
906 Registers::ReturnReg
);
909 Jump done
= masm
.jump();
911 /* Move JaegerThrowpoline into register for very far jump on x64. */
912 hasException
.linkTo(masm
.label(), &masm
);
915 LinkerHelper
linker(masm
);
916 JSC::ExecutablePool
*ep
= poolForSize(linker
, CallICInfo::Pool_NativeStub
);
920 ic
.fastGuardedNative
= obj
;
922 if (!linker
.verifyRange(jit
)) {
927 linker
.link(done
, ic
.slowPathStart
.labelAtOffset(ic
.slowJoinOffset
));
928 linker
.link(funGuard
, ic
.slowPathStart
);
929 JSC::CodeLocationLabel cs
= linker
.finalize();
931 JaegerSpew(JSpew_PICs
, "generated native CALL stub %p (%d bytes)\n",
932 cs
.executableAddress(), masm
.size());
934 Repatcher
repatch(jit
);
935 repatch
.relink(ic
.funJump
, cs
);
942 JITScript
*jit
= f
.jit();
944 stubs::UncachedCallResult ucr
;
946 stubs::UncachedNewHelper(f
, ic
.frameSize
.staticArgc(), &ucr
);
948 stubs::UncachedCallHelper(f
, ic
.frameSize
.getArgc(f
), &ucr
);
950 // If the function cannot be jitted (generally unjittable or empty script),
951 // patch this site to go to a slow path always.
958 JSFunction
*fun
= ucr
.fun
;
960 JSScript
*script
= fun
->script();
962 JSObject
*callee
= ucr
.callee
;
965 uint32 flags
= callingNew
? JSFRAME_CONSTRUCTING
: 0;
972 if (!ic
.frameSize
.isStatic() || ic
.frameSize
.staticArgc() != fun
->nargs
) {
973 if (!generateFullCallStub(jit
, script
, flags
))
976 if (!ic
.fastGuardedObject
&& patchInlinePath(jit
, script
, callee
)) {
978 } else if (ic
.fastGuardedObject
&&
980 !ic
.fastGuardedNative
&&
981 ic
.fastGuardedObject
->getFunctionPrivate() == fun
) {
983 * Note: Multiple "function guard" stubs are not yet
984 * supported, thus the fastGuardedNative check.
986 if (!generateStubForClosures(jit
, callee
))
989 if (!generateFullCallStub(jit
, script
, flags
))
999 ic::Call(VMFrame
&f
, CallICInfo
*ic
)
1001 CallCompiler
cc(f
, *ic
, false);
1006 ic::New(VMFrame
&f
, CallICInfo
*ic
)
1008 CallCompiler
cc(f
, *ic
, true);
1013 ic::NativeCall(VMFrame
&f
, CallICInfo
*ic
)
1015 CallCompiler
cc(f
, *ic
, false);
1016 if (!cc
.generateNativeStub())
1017 stubs::SlowCall(f
, ic
->frameSize
.getArgc(f
));
1021 ic::NativeNew(VMFrame
&f
, CallICInfo
*ic
)
1023 CallCompiler
cc(f
, *ic
, true);
1024 if (!cc
.generateNativeStub())
1025 stubs::SlowNew(f
, ic
->frameSize
.staticArgc());
1029 BumpStack(VMFrame
&f
, uintN inc
)
1031 static const unsigned MANY_ARGS
= 1024;
1032 static const unsigned MIN_SPACE
= 500;
1034 /* If we are not passing many args, treat this as a normal call. */
1035 if (inc
< MANY_ARGS
) {
1036 if (f
.regs
.sp
+ inc
< f
.stackLimit
)
1038 StackSpace
&stack
= f
.cx
->stack();
1039 if (!stack
.bumpCommitAndLimit(f
.entryfp
, f
.regs
.sp
, inc
, &f
.stackLimit
)) {
1040 js_ReportOverRecursed(f
.cx
);
1047 * The purpose of f.stackLimit is to catch over-recursion based on
1048 * assumptions about the average frame size. 'apply' with a large number of
1049 * arguments breaks these assumptions and can result in premature "out of
1050 * script quota" errors. Normally, apply will go through js::Invoke, which
1051 * effectively starts a fresh stackLimit. Here, we bump f.stackLimit,
1052 * if necessary, to allow for this 'apply' call, and a reasonable number of
1053 * subsequent calls, to succeed without hitting the stackLimit. In theory,
1054 * this a recursive chain containing apply to circumvent the stackLimit.
1055 * However, since each apply call must consume at least MANY_ARGS slots,
1056 * this sequence will quickly reach the end of the stack and OOM.
1059 uintN incWithSpace
= inc
+ MIN_SPACE
;
1060 Value
*bumpedWithSpace
= f
.regs
.sp
+ incWithSpace
;
1061 if (bumpedWithSpace
< f
.stackLimit
)
1064 StackSpace
&stack
= f
.cx
->stack();
1065 if (stack
.bumpCommitAndLimit(f
.entryfp
, f
.regs
.sp
, incWithSpace
, &f
.stackLimit
))
1068 if (!stack
.ensureSpace(f
.cx
, f
.regs
.sp
, incWithSpace
))
1070 f
.stackLimit
= bumpedWithSpace
;
1075 * SplatApplyArgs is only called for expressions of the form |f.apply(x, y)|.
1076 * Additionally, the callee has already been checked to be the native apply.
1077 * All successful paths through SplatApplyArgs must set f.u.call.dynamicArgc
1081 ic::SplatApplyArgs(VMFrame
&f
)
1083 JSContext
*cx
= f
.cx
;
1084 JS_ASSERT(GET_ARGC(f
.regs
.pc
) == 2);
1087 * The lazyArgsObj flag indicates an optimized call |f.apply(x, arguments)|
1088 * where the args obj has not been created or pushed on the stack. Thus,
1089 * if lazyArgsObj is set, the stack for |f.apply(x, arguments)| is:
1091 * | Function.prototype.apply | f | x |
1093 * Otherwise, if !lazyArgsObj, the stack is a normal 2-argument apply:
1095 * | Function.prototype.apply | f | x | arguments |
1097 if (f
.u
.call
.lazyArgsObj
) {
1098 Value
*vp
= f
.regs
.sp
- 3;
1099 JS_ASSERT(JS_CALLEE(cx
, vp
).toObject().getFunctionPrivate()->u
.n
.native
== js_fun_apply
);
1101 JSStackFrame
*fp
= f
.regs
.fp
;
1102 if (!fp
->hasOverriddenArgs() &&
1103 (!fp
->hasArgsObj() ||
1104 (fp
->hasArgsObj() && !fp
->argsObj().isArgsLengthOverridden() &&
1105 !js_PrototypeHasIndexedProperties(cx
, &fp
->argsObj())))) {
1107 uintN n
= fp
->numActualArgs();
1108 if (!BumpStack(f
, n
))
1112 Value
*argv
= JS_ARGV(cx
, vp
+ 1 /* vp[1]'s argv */);
1113 if (fp
->hasArgsObj())
1114 fp
->forEachCanonicalActualArg(CopyNonHoleArgsTo(&fp
->argsObj(), argv
));
1116 fp
->forEachCanonicalActualArg(CopyTo(argv
));
1118 f
.u
.call
.dynamicArgc
= n
;
1123 * Can't optimize; push the arguments object so that the stack matches
1124 * the !lazyArgsObj stack state described above.
1127 if (!js_GetArgsValue(cx
, fp
, &vp
[3]))
1131 Value
*vp
= f
.regs
.sp
- 4;
1132 JS_ASSERT(JS_CALLEE(cx
, vp
).toObject().getFunctionPrivate()->u
.n
.native
== js_fun_apply
);
1135 * This stub should mimic the steps taken by js_fun_apply. Step 1 and part
1136 * of Step 2 have already been taken care of by calling jit code.
1139 /* Step 2 (part 2). */
1140 if (vp
[3].isNullOrUndefined()) {
1142 f
.u
.call
.dynamicArgc
= 0;
1147 if (!vp
[3].isObject()) {
1148 JS_ReportErrorNumber(cx
, js_GetErrorMessage
, NULL
, JSMSG_BAD_APPLY_ARGS
, js_apply_str
);
1153 JSObject
*aobj
= &vp
[3].toObject();
1155 if (!js_GetLengthProperty(cx
, aobj
, &length
))
1158 JS_ASSERT(!JS_ON_TRACE(cx
));
1161 uintN n
= uintN(JS_MIN(length
, JS_ARGS_LENGTH_MAX
));
1164 if (delta
> 0 && !BumpStack(f
, delta
))
1169 if (!GetElements(cx
, aobj
, n
, f
.regs
.sp
- n
))
1172 f
.u
.call
.dynamicArgc
= n
;
1177 JITScript::purgeMICs()
1179 if (!nGetGlobalNames
|| !nSetGlobalNames
)
1182 Repatcher
repatch(this);
1184 ic::GetGlobalNameIC
*getGlobalNames_
= getGlobalNames();
1185 for (uint32 i
= 0; i
< nGetGlobalNames
; i
++) {
1186 ic::GetGlobalNameIC
&ic
= getGlobalNames_
[i
];
1187 JSC::CodeLocationDataLabel32 label
= ic
.fastPathStart
.dataLabel32AtOffset(ic
.shapeOffset
);
1188 repatch
.repatch(label
, int(JSObjectMap::INVALID_SHAPE
));
1191 ic::SetGlobalNameIC
*setGlobalNames_
= setGlobalNames();
1192 for (uint32 i
= 0; i
< nSetGlobalNames
; i
++) {
1193 ic::SetGlobalNameIC
&ic
= setGlobalNames_
[i
];
1194 ic
.patchInlineShapeGuard(repatch
, int32(JSObjectMap::INVALID_SHAPE
));
1196 if (ic
.hasExtraStub
) {
1197 Repatcher
repatcher(ic
.extraStub
);
1198 ic
.patchExtraShapeGuard(repatcher
, int32(JSObjectMap::INVALID_SHAPE
));
1204 ic::PurgeMICs(JSContext
*cx
, JSScript
*script
)
1206 /* MICs are purged during GC to handle changing shapes. */
1207 JS_ASSERT(cx
->runtime
->gcRegenShapes
);
1209 if (script
->jitNormal
)
1210 script
->jitNormal
->purgeMICs();
1211 if (script
->jitCtor
)
1212 script
->jitCtor
->purgeMICs();
1216 JITScript::nukeScriptDependentICs()
1221 Repatcher
repatcher(this);
1223 ic::CallICInfo
*callICs_
= callICs();
1224 for (uint32 i
= 0; i
< nCallICs
; i
++) {
1225 ic::CallICInfo
&ic
= callICs_
[i
];
1226 if (!ic
.fastGuardedObject
)
1228 repatcher
.repatch(ic
.funGuard
, NULL
);
1229 repatcher
.relink(ic
.funJump
, ic
.slowPathStart
);
1230 ic
.releasePool(CallICInfo::Pool_ClosureStub
);
1231 ic
.fastGuardedObject
= NULL
;
1232 ic
.hasJsFunCheck
= false;
1237 JITScript::sweepCallICs(JSContext
*cx
, bool purgeAll
)
1239 Repatcher
repatcher(this);
1242 * If purgeAll is set, purge stubs in the script except those covered by PurgePICs
1243 * (which is always called during GC). We want to remove references which can keep
1244 * alive pools that we are trying to destroy (see JSCompartment::sweep).
1247 ic::CallICInfo
*callICs_
= callICs();
1248 for (uint32 i
= 0; i
< nCallICs
; i
++) {
1249 ic::CallICInfo
&ic
= callICs_
[i
];
1252 * If the object is unreachable, we're guaranteed not to be currently
1253 * executing a stub generated by a guard on that object. This lets us
1254 * precisely GC call ICs while keeping the identity guard safe.
1256 bool fastFunDead
= ic
.fastGuardedObject
&&
1257 (purgeAll
|| IsAboutToBeFinalized(cx
, ic
.fastGuardedObject
));
1258 bool nativeDead
= ic
.fastGuardedNative
&&
1259 (purgeAll
|| IsAboutToBeFinalized(cx
, ic
.fastGuardedNative
));
1262 repatcher
.repatch(ic
.funGuard
, NULL
);
1263 ic
.releasePool(CallICInfo::Pool_ClosureStub
);
1264 ic
.hasJsFunCheck
= false;
1265 ic
.fastGuardedObject
= NULL
;
1269 ic
.releasePool(CallICInfo::Pool_NativeStub
);
1270 ic
.fastGuardedNative
= NULL
;
1274 ic
.releasePool(CallICInfo::Pool_ScriptStub
);
1275 JSC::CodeLocationJump oolJump
= ic
.slowPathStart
.jumpAtOffset(ic
.oolJumpOffset
);
1276 JSC::CodeLocationLabel icCall
= ic
.slowPathStart
.labelAtOffset(ic
.icCallOffset
);
1277 repatcher
.relink(oolJump
, icCall
);
1281 * Only relink the fast-path if there are no connected stubs, or we're
1282 * trying to disconnect all stubs. Otherwise, we're just disabling an
1283 * optimization that must take up space anyway (see bug 632729).
1285 if (purgeAll
|| !(ic
.fastGuardedObject
|| ic
.fastGuardedNative
)) {
1286 repatcher
.relink(ic
.funJump
, ic
.slowPathStart
);
1292 /* Purge ICs generating stubs into execPools. */
1293 uint32 released
= 0;
1295 ic::EqualityICInfo
*equalityICs_
= equalityICs();
1296 for (uint32 i
= 0; i
< nEqualityICs
; i
++) {
1297 ic::EqualityICInfo
&ic
= equalityICs_
[i
];
1301 JSC::FunctionPtr
fptr(JS_FUNC_TO_DATA_PTR(void *, ic::Equality
));
1302 repatcher
.relink(ic
.stubCall
, fptr
);
1303 repatcher
.relink(ic
.jumpToStub
, ic
.stubEntry
);
1305 ic
.generated
= false;
1309 ic::SetGlobalNameIC
*setGlobalNames_
= setGlobalNames();
1310 for (uint32 i
= 0; i
< nSetGlobalNames
; i
++) {
1311 ic::SetGlobalNameIC
&ic
= setGlobalNames_
[i
];
1312 if (!ic
.hasExtraStub
)
1314 repatcher
.relink(ic
.fastPathStart
.jumpAtOffset(ic
.inlineShapeJump
), ic
.slowPathStart
);
1315 ic
.hasExtraStub
= false;
1319 JS_ASSERT(released
== execPools
.length());
1320 for (uint32 i
= 0; i
< released
; i
++)
1321 execPools
[i
]->release();
1327 ic::SweepCallICs(JSContext
*cx
, JSScript
*script
, bool purgeAll
)
1329 if (script
->jitNormal
)
1330 script
->jitNormal
->sweepCallICs(cx
, purgeAll
);
1331 if (script
->jitCtor
)
1332 script
->jitCtor
->sweepCallICs(cx
, purgeAll
);
1335 #endif /* JS_MONOIC */