1 /* -*- Mode: C++; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 4 -*- */
2 /* vi: set ts=4 sw=4 expandtab: (add to ~/.vimrc: set modeline modelines=5) */
3 /* ***** BEGIN LICENSE BLOCK *****
4 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
6 * The contents of this file are subject to the Mozilla Public License Version
7 * 1.1 (the "License"); you may not use this file except in compliance with
8 * the License. You may obtain a copy of the License at
9 * http://www.mozilla.org/MPL/
11 * Software distributed under the License is distributed on an "AS IS" basis,
12 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
13 * for the specific language governing rights and limitations under the
16 * The Original Code is [Open Source Virtual Machine.].
18 * The Initial Developer of the Original Code is
19 * Adobe System Incorporated.
20 * Portions created by the Initial Developer are Copyright (C) 2010
21 * the Initial Developer. All Rights Reserved.
26 * Alternatively, the contents of this file may be used under the terms of
27 * either the GNU General Public License Version 2 or later (the "GPL"), or
28 * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
29 * in which case the provisions of the GPL or the LGPL are applicable instead
30 * of those above. If you wish to allow use of your version of this file only
31 * under the terms of either the GPL or the LGPL, and not to allow others to
32 * use your version of this file under the terms of the MPL, indicate your
33 * decision by deleting the provisions above and replace them with the notice
34 * and other provisions required by the GPL or the LGPL. If you do not delete
35 * the provisions above, a recipient may use your version of this file under
36 * the terms of any one of the MPL, the GPL or the LGPL.
38 * ***** END LICENSE BLOCK ***** */
40 #ifndef __avmplus_exec__
41 #define __avmplus_exec__
46 * Execution manager pure virtual interface. An execution manager implementation
47 * is responsible for all aspects of AS3 execution, including invocation,
48 * policy decisions for how to execute, when to verify, and when to translate,
51 * Although some configurations may only have one concrete implementation,
52 * we use ordinary polymorphism instead of build-time polymorphism, to keep
60 /** Called when a MethodInfo is allocated. */
61 virtual void init(MethodInfo
*, const NativeMethodInfo
*) = 0;
63 /** Called when a MethodEnv is allocated. */
64 virtual void init(MethodEnv
*) = 0;
66 /** Called after MethodInfo::resolveSignatures() completes */
67 virtual void notifyMethodResolved(MethodInfo
*, MethodSignaturep
) = 0;
69 /** Called after VTable::resolveSignatures() completes. */
70 virtual void notifyVTableResolved(VTable
*) = 0;
72 /** Called after prepareActionPool completes. */
73 virtual void notifyAbcPrepared(Toplevel
*, AbcEnv
*) = 0;
75 /** Invoke a function apply-style, by unpacking arguments from an array */
76 virtual Atom
apply(MethodEnv
*, Atom thisArg
, ArrayObject
* a
) = 0;
78 /** Invoke a function call-style, with thisArg passed explicitly */
79 virtual Atom
call(MethodEnv
*, Atom thisArg
, int32_t argc
, Atom
* argv
) = 0;
82 enum Runmode
{ RM_mixed
, RM_jit_all
, RM_interp_all
};
84 // Signature for method invocation when caller coerces arguments
85 // and boxes results. Typically the caller is JIT code.
86 typedef uintptr_t (*GprMethodProc
)(MethodEnv
*, int32_t, uint32_t *);
87 typedef double (*FprMethodProc
)(MethodEnv
*, int32_t, uint32_t *);
89 // Signature for invocation when callee coerces & boxes;
90 // the caller is calling an unknown function with an unknown signature.
91 typedef Atom (*AtomMethodProc
)(MethodEnv
*, int, Atom
*);
93 // Signature for invoking a method early bound via an interface
94 // type reference. JIT code passes in the IID of the interface method
95 // to enable searching for the correct concrete method.
96 typedef uintptr_t GprImtThunkProcRetType
;
97 typedef GprImtThunkProcRetType (*GprImtThunkProc
)(class ImtThunkEnv
*,
98 int argc
, uint32_t* args
, uintptr_t idd
);
101 * Size of a variable in a JIT stack frame, in bytes. VARSIZE is large
102 * enough to hold double, int, pointers, or Atom on 32-bit or 64-bit cpus.
103 * This is an aspect of the JIT implementation, but is defined here because
104 * the debugger boxing/unboxing code in MethodInfo needs to know it.
106 static const size_t VARSIZE
= 8;
109 * Compute number of bytes needed for the unboxed representation
110 * of this argument value when passed on the stack.
112 int32_t argSize(Traits
*);
115 * BaseExecMgr implements for all policies, and encapsulates
116 * jit+abc, abc-only, and wordcode-only mechanisms. This could be improved
117 * by factoring into multiple implementations.
119 * Extends GCFinalizedObject because instances contain GC object references
120 * and have a destructor that needs to run.
122 class BaseExecMgr
: public MMgc::GCFinalizedObject
123 , /* implements */ public ExecMgr
126 BaseExecMgr(AvmCore
*);
127 virtual ~BaseExecMgr();
130 void init(MethodInfo
*, const NativeMethodInfo
*);
131 void init(MethodEnv
*);
132 void notifyMethodResolved(MethodInfo
*, MethodSignaturep
);
133 void notifyVTableResolved(VTable
*);
134 void notifyAbcPrepared(Toplevel
*, AbcEnv
*);
135 Atom
apply(MethodEnv
*, Atom thisArg
, ArrayObject
* a
);
136 Atom
call(MethodEnv
*, Atom thisArg
, int32_t argc
, Atom
* argv
);
139 // Helpers to simply return the current implementation:
140 static BaseExecMgr
* exec(VTable
*);
141 static BaseExecMgr
* exec(MethodEnv
*);
143 /** True if method's _isInterpImpl flag is set. */
144 static bool isInterpreted(MethodEnv
*);
146 // Trampolines that verify on first call:
147 static uintptr_t verifyEnterGPR(MethodEnv
*, int32_t argc
, uint32_t* args
);
148 static double verifyEnterFPR(MethodEnv
*, int32_t argc
, uint32_t* args
);
149 static Atom
verifyInvoke(MethodEnv
*, int32_t argc
, Atom
* args
);
150 static void verifyOnCall(MethodEnv
*); // helper called by verify trampolines
152 // Trampolines to call debugEnter/Exit around native methods:
153 static uintptr_t debugEnterExitWrapper32(MethodEnv
* env
, int32_t argc
, uint32_t* argv
);
154 static double debugEnterExitWrapperN(MethodEnv
* env
, int32_t argc
, uint32_t* argv
);
156 // Trampoline to set MethodEnv->impl to MethodInfo->impl on first call.
157 static uintptr_t delegateInvoke(MethodEnv
* env
, int32_t argc
, uint32_t* ap
);
159 // Interpreter invocation when called by JIT code. C++ and Interpreter
160 // calls to the interpreter go through one of the invoke_interp variants.
161 static uintptr_t interpGPR(MethodEnv
* method
, int argc
, uint32_t *ap
);
162 static double interpFPR(MethodEnv
* method
, int argc
, uint32_t *ap
);
164 /** General purpose interpreter invocation. */
165 static Atom
invokeInterp(MethodEnv
* env
, int32_t argc
, Atom
* argv
);
168 * Invoke the interpreter for a method that does not need to coerce
169 * any arguments, either because there are none, or they're all type *.
171 static Atom
invokeInterpNoCoerce(MethodEnv
* env
, int32_t argc
, Atom
* argv
);
173 // Stubs used for invoking interpreted constructor methods; these
174 // initialize default values of the new object before invoking the
175 // interpreter. See initObj() in exec.cpp.
176 static uintptr_t initInterpGPR(MethodEnv
*, int, uint32_t*);
177 static double initInterpFPR(MethodEnv
*, int, uint32_t*);
178 static Atom
initInvokeInterp(MethodEnv
*, int, Atom
*);
179 static Atom
initInvokeInterpNoCoerce(MethodEnv
*, int, Atom
*);
181 /** Set an object's fields to default values. Called by init stubs. */
182 static void initObj(MethodEnv
* env
, ScriptObject
* obj
);
185 * Generic interpretive invoker for JIT and native methods that
186 * iterates over argument types and coerces each one.
188 static Atom
invokeGeneric(MethodEnv
* env
, int32_t argc
, Atom
* argv
);
190 /** Invoke a native or jit-compiled method and then box the return value. */
191 static Atom
endCoerce(MethodEnv
*, int32_t argc
, uint32_t *ap
,
192 MethodSignaturep ms
);
194 /** Check argc and compute the space required for rest args. */
195 static size_t startCoerce(MethodEnv
*, int32_t argc
, MethodSignaturep ms
);
197 /** Unbox and coerce arguments for ordinary invocation. */
198 static void unboxCoerceArgs(MethodEnv
*, int32_t argc
, Atom
* in
,
199 uint32_t *ap
, MethodSignaturep ms
);
201 /** Unbox and coerce arguments for invocation via Function.apply(). */
202 static void unboxCoerceArgs(MethodEnv
*, Atom thisArg
, ArrayObject
*a
,
203 uint32_t *argv
, MethodSignaturep ms
);
205 /** Unbox and coerce arguments for invocation via Function.call(). */
206 static void unboxCoerceArgs(MethodEnv
*, Atom thisArg
, int32_t argc
,
207 Atom
* in
, uint32_t *argv
, MethodSignaturep ms
);
209 /** Coerce and unbox a single argument. */
210 static Atom
* FASTCALL
coerceUnbox1(MethodEnv
*, Atom atom
, Traits
* t
, Atom
* args
);
212 /** Just unbox a single argument that is known to be the correct type already. */
213 static Atom
* FASTCALL
unbox1(MethodEnv
*, Atom atom
, Traits
* t
, Atom
* args
);
216 * Set trampolines and flags for the interpreter, possibly including an
217 * initializer trampoline.
219 void setInterp(MethodInfo
*, MethodSignaturep
);
221 /** Set trampolines and flags for a native method. */
222 void setNative(MethodInfo
*, GprMethodProc p
);
225 * Verify any kind of method, by sniffing what kind it is and dispatching
226 * to the appropriate case. Called on or before the first invocation of the
227 * target method. Each subcase is responsible for setting up CodeWriters and
228 * then ultimately running the verifier by calling verifyCommon().
230 void verifyMethod(MethodInfo
*, Toplevel
*, AbcEnv
*);
232 /** "Verify" a native method by installing trampolines and flags. */
233 void verifyNative(MethodInfo
*, MethodSignaturep
);
235 /** Verify a method and install interpreter trampolines. */
236 void verifyInterp(MethodInfo
*, MethodSignaturep
, Toplevel
*, AbcEnv
*);
239 * Actually run the verifier with the given CodeWriter chain, and
240 * clean up if it throws a VerifyError.
242 void verifyCommon(MethodInfo
*, MethodSignaturep
, Toplevel
*, AbcEnv
*, CodeWriter
*);
245 // Support for verifyall mode mainly consists of hooking into the
246 // code loading mechanism and verifying early instead of on the first
247 // call. This includes installing trampolines and doing jit-compiltion,
248 // if applicable. verifyall mode supports a "verifyonly" sub-mode whereby
249 // no code is actually executed.
251 // Verifying early is done in a semi breadth-first order by maintaining
252 // two queues: verifyFunctionQueue, containing MethodInfo's that are
253 // resolved and ready to verify, and verifyTraitsQueue, containing types
254 // that contain read-to-verify functions
256 // Traits are queued when their corresponding OP_newclass or OP_newactivation
257 // is encountered while verifying another method. Functions are added when
258 // we encounter OP_newfunction, or when the declaring traits reaches the head
261 // Methods have their _isVerifyPending flag set when queued, then changed
262 // to _isVerified once verified.
264 // In verifyall mode we never install the verify-on-first-call trampolines.
265 // In verifyonly mode, all execution is stubbed out by a single stub that
266 // just returns undefined.
269 friend class VerifyallWriter
;
270 void enqFunction(MethodInfo
* f
);
271 void enqTraits(Traits
* t
);
272 void verifyEarly(Toplevel
* toplevel
, AbcEnv
* abc_env
);
273 bool isVerified(const MethodInfo
*) const;
274 bool isVerifyPending(const MethodInfo
*) const;
275 void setVerified(MethodInfo
*) const;
276 void setVerifyPending(MethodInfo
*) const;
279 // Support for JIT Compilation:
282 /** Return true if we should eagerly JIT. False means use interpreter. */
283 bool shouldJit(const MethodInfo
*, const MethodSignaturep
) const;
285 /** True if the JIT is enabled */
286 bool isJitEnabled() const;
288 /** Run the verifier with the JIT attached. */
289 void verifyJit(MethodInfo
*, MethodSignaturep
, Toplevel
*, AbcEnv
*);
291 /** Install JIT code pointers and set MethodInfo::_isJitImpl. */
292 void setJit(MethodInfo
*, GprMethodProc p
);
295 * Invoker called on the first invocation then calls invoke_generic,
296 * installs jitInvokerNow yielding a 1-call delay before we try to
297 * compile the invoker itself.
299 static Atom
jitInvokerNext(MethodEnv
*, int argc
, Atom
* args
);
301 /* Compile now then invoke the compiled invoker. */
302 static Atom
jitInvokerNow(MethodEnv
*, int argc
, Atom
* args
);
304 // Support for interface method tables (IMTs). These enable fast
305 // dispatching of an interface method when invoked via an interface-
306 // typed reference, when we know the method signature but not the
307 // vtable index. Only applies to call sites in JIT code.
309 // Each VTable contains a fixed sized array called the interface
310 // method table, each entry points to a concrete MethodEnv, resolveImt(),
311 // or dispatchImt(). Initially all entries point to resolveImt().
313 // Resolution: All interface methods that the concrete type implements
314 // are hashed into the IMT by a unique interface method id (IID). If an IMT slot
315 // has no collisions, then the concrete MethodEnv* for the method implementing
316 // that interface method is installed. Otherwise we install dispatchImt().
318 // Dispatching: JIT'd call sites pass the IID of the method they early bound
319 // to. If the called slot contains just a concrete MethodEnv, the IID parameter
320 // is ignored. Otherwise we search a table for the correct concrete
321 // method and then invoke it.
323 // IIDs are simply the pointer to the interface method's MethodInfo.
324 // The IMT itself is declared in class ImtHolder, below. It is private
325 // to the execution mechanism but must be allocated in each VTable instance.
329 * Analyze the implemented types then build the ImtEntry table and
330 * install dispatchImt() or a concrete MethodEnv*.
332 static class ImtThunkEnv
* resolveImtSlot(class ImtThunkEnv
*, uintptr_t iid
);
334 // Helpers for resolveImtSlot():
335 void resolveImtSlotFromBase(VTable
*, uint32_t slot
);
336 bool resolveImtSlotSelf(VTable
*, uint32_t slot
);
337 void resolveImtSlotFull(VTable
*, uint32_t slot
);
338 static class ImtEntry
* buildImtEntries(VTable
* vtable
, uint32_t slot
, uint32_t& count
);
340 /** Trampoline to resolve this IMT slot then invoke the proper handler. */
341 static GprImtThunkProcRetType
resolveImt(class ImtThunkEnv
* ite
,
342 int argc
, uint32_t* ap
, uintptr_t iid
);
344 /** Trampoline which searches for the method with a matching IID. */
345 static GprImtThunkProcRetType
dispatchImt(class ImtThunkEnv
* ite
,
346 int argc
, uint32_t* ap
, uintptr_t iid
);
350 const struct Config
& config
;
351 #ifdef VMCFG_VERIFYALL
352 GCList
<MethodInfo
> verifyFunctionQueue
;
353 GCList
<Traits
> verifyTraitsQueue
;
358 * CodeWriter instance to hook into opcodes OP_newfunction, OP_newclass, and
359 * OP_newactivation so we can populate verifyFunctionQueue and
360 * verifyTraitsQueue in verifyall mode.
362 class VerifyallWriter
: public NullWriter
367 VerifyallWriter(MethodInfo
*, BaseExecMgr
*, CodeWriter
*);
368 void write(const FrameState
*, const uint8_t *pc
, AbcOpcode
, Traits
*);
369 void writeOp1(const FrameState
*, const uint8_t *pc
, AbcOpcode
, uint32_t opd1
, Traits
*);
373 * Base class for MethodInfo which contains invocation pointers. These
374 * pointers are private to the ExecMgr instance and hence declared here.
376 class GC_CPP_EXACT(MethodInfoProcHolder
, MMgc::GCTraceableObject
)
378 friend class ImtThunkEnv
;
379 friend class InvokerCompiler
;
380 friend class BaseExecMgr
;
381 friend class MethodEnv
;
384 MethodInfoProcHolder();
386 GC_DATA_BEGIN(MethodInfoProcHolder
)
390 GprMethodProc _implGPR
;
391 FprMethodProc _implFPR
;
393 /** pointer to invoker used when callee must coerce args. */
394 AtomMethodProc _invoker
;
396 GC_DATA_END(MethodInfoProcHolder
)
400 * Base class for MethodEnv and ImtThunkEnv, containing copies of the
401 * trampoline from MethodInfo. In JIT configurations this saves one
402 * load along the call fast path. Calls from C++ or the Interpreter
403 * always go through MethodInfo._invoker.
405 class GC_CPP_EXACT(MethodEnvProcHolder
, MMgc::GCTraceableObject
)
407 friend class CodegenLIR
;
408 friend class BaseExecMgr
;
411 MethodEnvProcHolder();
413 GC_NO_DATA(MethodEnvProcHolder
)
415 #ifdef VMCFG_METHODENV_IMPL32
418 GprMethodProc _implGPR
;
419 FprMethodProc _implFPR
;
420 GprImtThunkProc _implImtGPR
;
426 inline GprMethodProc
implGPR() const { return _implGPR
; }
427 inline FprMethodProc
implFPR() const { return _implFPR
; }
433 * Holder for interface method dispatch table within class VTable,
434 * declared here because it is private to the ExecMgr implementation.
438 friend class BaseExecMgr
;
439 friend class CodegenLIR
;
441 // IMT_SIZE should be a number that is relatively prime to sizeof(MethodInfo)/8
442 // since we use the MethodInfo pointer as the interface method id (IID).
443 // smaller = dense table, few large conflict tables
444 // larger = sparse table, many small conflict tables
447 static const uint32_t IMT_SIZE
= 3; // Good for testing all code paths.
449 static const uint32_t IMT_SIZE
= 7; // Marginally better for speed/size tradeoff.
452 static uintptr_t getIID(MethodInfo
*); // Return a full IID for the given interface method.
453 static uint32_t hashIID(uintptr_t iid
); // Hash the IID into an IMT slot number.
454 static uint32_t hashIID(MethodInfo
*); // Hash the method's IID into an IMT slot number.
455 class ImtThunkEnv
* entries
[IMT_SIZE
];
458 void gcTrace(MMgc::GC
* gc
)
460 gc
->TraceLocations((void**)entries
, IMT_SIZE
);
464 #if defined(_MSC_VER) && defined(AVMPLUS_IA32)
465 // These macros are used for dynamically aligning our
466 // stack before calling into our JITed code. By aligning our
467 // stack to an 8 byte boundary before calling into our JITed
468 // code, we will keep all double access aligned and improve performance.
469 #define STACKADJUST() \
479 #define STACKRESTORE() \
487 #define STACKADJUST()
488 #define STACKRESTORE()
492 #endif // __avmplus_exec__