1 //===-- JITEmitter.cpp - Write machine code to executable memory ----------===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file defines a MachineCodeEmitter object that is used by the JIT to
11 // write machine code to memory and remember where relocatable values are.
13 //===----------------------------------------------------------------------===//
15 #define DEBUG_TYPE "jit"
17 #include "JITDwarfEmitter.h"
18 #include "llvm/Constants.h"
19 #include "llvm/Module.h"
20 #include "llvm/DerivedTypes.h"
21 #include "llvm/CodeGen/JITCodeEmitter.h"
22 #include "llvm/CodeGen/MachineFunction.h"
23 #include "llvm/CodeGen/MachineConstantPool.h"
24 #include "llvm/CodeGen/MachineJumpTableInfo.h"
25 #include "llvm/CodeGen/MachineModuleInfo.h"
26 #include "llvm/CodeGen/MachineRelocation.h"
27 #include "llvm/ExecutionEngine/GenericValue.h"
28 #include "llvm/ExecutionEngine/JITEventListener.h"
29 #include "llvm/ExecutionEngine/JITMemoryManager.h"
30 #include "llvm/CodeGen/MachineCodeInfo.h"
31 #include "llvm/Target/TargetData.h"
32 #include "llvm/Target/TargetJITInfo.h"
33 #include "llvm/Target/TargetMachine.h"
34 #include "llvm/Target/TargetOptions.h"
35 #include "llvm/Support/Debug.h"
36 #include "llvm/Support/ErrorHandling.h"
37 #include "llvm/Support/MutexGuard.h"
38 #include "llvm/Support/ValueHandle.h"
39 #include "llvm/Support/raw_ostream.h"
40 #include "llvm/System/Disassembler.h"
41 #include "llvm/System/Memory.h"
42 #include "llvm/Target/TargetInstrInfo.h"
43 #include "llvm/ADT/SmallPtrSet.h"
44 #include "llvm/ADT/SmallVector.h"
45 #include "llvm/ADT/Statistic.h"
52 STATISTIC(NumBytes
, "Number of bytes of machine code compiled");
53 STATISTIC(NumRelos
, "Number of relocations applied");
54 STATISTIC(NumRetries
, "Number of retries with more memory");
55 static JIT
*TheJIT
= 0;
58 //===----------------------------------------------------------------------===//
59 // JIT lazy compilation code.
62 class JITResolverState
{
64 typedef std::map
<AssertingVH
<Function
>, void*> FunctionToStubMapTy
;
65 typedef std::map
<void*, Function
*> StubToFunctionMapTy
;
66 typedef std::map
<AssertingVH
<GlobalValue
>, void*> GlobalToIndirectSymMapTy
;
68 /// FunctionToStubMap - Keep track of the stub created for a particular
69 /// function so that we can reuse them if necessary.
70 FunctionToStubMapTy FunctionToStubMap
;
72 /// StubToFunctionMap - Keep track of the function that each stub
74 StubToFunctionMapTy StubToFunctionMap
;
76 /// GlobalToIndirectSymMap - Keep track of the indirect symbol created for a
77 /// particular GlobalVariable so that we can reuse them if necessary.
78 GlobalToIndirectSymMapTy GlobalToIndirectSymMap
;
81 FunctionToStubMapTy
& getFunctionToStubMap(const MutexGuard
& locked
) {
82 assert(locked
.holds(TheJIT
->lock
));
83 return FunctionToStubMap
;
86 StubToFunctionMapTy
& getStubToFunctionMap(const MutexGuard
& locked
) {
87 assert(locked
.holds(TheJIT
->lock
));
88 return StubToFunctionMap
;
91 GlobalToIndirectSymMapTy
& getGlobalToIndirectSymMap(const MutexGuard
& locked
) {
92 assert(locked
.holds(TheJIT
->lock
));
93 return GlobalToIndirectSymMap
;
97 /// JITResolver - Keep track of, and resolve, call sites for functions that
98 /// have not yet been compiled.
100 typedef JITResolverState::FunctionToStubMapTy FunctionToStubMapTy
;
101 typedef JITResolverState::StubToFunctionMapTy StubToFunctionMapTy
;
102 typedef JITResolverState::GlobalToIndirectSymMapTy GlobalToIndirectSymMapTy
;
104 /// LazyResolverFn - The target lazy resolver function that we actually
105 /// rewrite instructions to use.
106 TargetJITInfo::LazyResolverFn LazyResolverFn
;
108 JITResolverState state
;
110 /// ExternalFnToStubMap - This is the equivalent of FunctionToStubMap for
111 /// external functions.
112 std::map
<void*, void*> ExternalFnToStubMap
;
114 /// revGOTMap - map addresses to indexes in the GOT
115 std::map
<void*, unsigned> revGOTMap
;
116 unsigned nextGOTIndex
;
118 static JITResolver
*TheJITResolver
;
120 explicit JITResolver(JIT
&jit
) : nextGOTIndex(0) {
123 LazyResolverFn
= jit
.getJITInfo().getLazyResolverFunction(JITCompilerFn
);
124 assert(TheJITResolver
== 0 && "Multiple JIT resolvers?");
125 TheJITResolver
= this;
132 /// getFunctionStubIfAvailable - This returns a pointer to a function stub
133 /// if it has already been created.
134 void *getFunctionStubIfAvailable(Function
*F
);
136 /// getFunctionStub - This returns a pointer to a function stub, creating
137 /// one on demand as needed. If empty is true, create a function stub
138 /// pointing at address 0, to be filled in later.
139 void *getFunctionStub(Function
*F
);
141 /// getExternalFunctionStub - Return a stub for the function at the
142 /// specified address, created lazily on demand.
143 void *getExternalFunctionStub(void *FnAddr
);
145 /// getGlobalValueIndirectSym - Return an indirect symbol containing the
146 /// specified GV address.
147 void *getGlobalValueIndirectSym(GlobalValue
*V
, void *GVAddress
);
149 /// AddCallbackAtLocation - If the target is capable of rewriting an
150 /// instruction without the use of a stub, record the location of the use so
151 /// we know which function is being used at the location.
152 void *AddCallbackAtLocation(Function
*F
, void *Location
) {
153 MutexGuard
locked(TheJIT
->lock
);
154 /// Get the target-specific JIT resolver function.
155 state
.getStubToFunctionMap(locked
)[Location
] = F
;
156 return (void*)(intptr_t)LazyResolverFn
;
159 void getRelocatableGVs(SmallVectorImpl
<GlobalValue
*> &GVs
,
160 SmallVectorImpl
<void*> &Ptrs
);
162 GlobalValue
*invalidateStub(void *Stub
);
164 /// getGOTIndexForAddress - Return a new or existing index in the GOT for
165 /// an address. This function only manages slots, it does not manage the
166 /// contents of the slots or the memory associated with the GOT.
167 unsigned getGOTIndexForAddr(void *addr
);
169 /// JITCompilerFn - This function is called to resolve a stub to a compiled
170 /// address. If the LLVM Function corresponding to the stub has not yet
171 /// been compiled, this function compiles it first.
172 static void *JITCompilerFn(void *Stub
);
176 JITResolver
*JITResolver::TheJITResolver
= 0;
178 /// getFunctionStubIfAvailable - This returns a pointer to a function stub
179 /// if it has already been created.
180 void *JITResolver::getFunctionStubIfAvailable(Function
*F
) {
181 MutexGuard
locked(TheJIT
->lock
);
183 // If we already have a stub for this function, recycle it.
184 void *&Stub
= state
.getFunctionToStubMap(locked
)[F
];
188 /// getFunctionStub - This returns a pointer to a function stub, creating
189 /// one on demand as needed.
190 void *JITResolver::getFunctionStub(Function
*F
) {
191 MutexGuard
locked(TheJIT
->lock
);
193 // If we already have a stub for this function, recycle it.
194 void *&Stub
= state
.getFunctionToStubMap(locked
)[F
];
195 if (Stub
) return Stub
;
197 // Call the lazy resolver function unless we are JIT'ing non-lazily, in which
198 // case we must resolve the symbol now.
199 void *Actual
= TheJIT
->isLazyCompilationDisabled()
200 ? (void *)0 : (void *)(intptr_t)LazyResolverFn
;
202 // If this is an external declaration, attempt to resolve the address now
203 // to place in the stub.
204 if (F
->isDeclaration() && !F
->hasNotBeenReadFromBitcode()) {
205 Actual
= TheJIT
->getPointerToFunction(F
);
207 // If we resolved the symbol to a null address (eg. a weak external)
208 // don't emit a stub. Return a null pointer to the application. If dlsym
209 // stubs are enabled, not being able to resolve the address is not
211 if (!Actual
&& !TheJIT
->areDlsymStubsEnabled()) return 0;
214 // Codegen a new stub, calling the lazy resolver or the actual address of the
215 // external function, if it was resolved.
216 Stub
= TheJIT
->getJITInfo().emitFunctionStub(F
, Actual
,
217 *TheJIT
->getCodeEmitter());
219 if (Actual
!= (void*)(intptr_t)LazyResolverFn
) {
220 // If we are getting the stub for an external function, we really want the
221 // address of the stub in the GlobalAddressMap for the JIT, not the address
222 // of the external function.
223 TheJIT
->updateGlobalMapping(F
, Stub
);
226 DEBUG(errs() << "JIT: Stub emitted at [" << Stub
<< "] for function '"
227 << F
->getName() << "'\n");
229 // Finally, keep track of the stub-to-Function mapping so that the
230 // JITCompilerFn knows which function to compile!
231 state
.getStubToFunctionMap(locked
)[Stub
] = F
;
233 // If we are JIT'ing non-lazily but need to call a function that does not
234 // exist yet, add it to the JIT's work list so that we can fill in the stub
236 if (!Actual
&& TheJIT
->isLazyCompilationDisabled())
237 if (!F
->isDeclaration() || F
->hasNotBeenReadFromBitcode())
238 TheJIT
->addPendingFunction(F
);
243 /// getGlobalValueIndirectSym - Return a lazy pointer containing the specified
245 void *JITResolver::getGlobalValueIndirectSym(GlobalValue
*GV
, void *GVAddress
) {
246 MutexGuard
locked(TheJIT
->lock
);
248 // If we already have a stub for this global variable, recycle it.
249 void *&IndirectSym
= state
.getGlobalToIndirectSymMap(locked
)[GV
];
250 if (IndirectSym
) return IndirectSym
;
252 // Otherwise, codegen a new indirect symbol.
253 IndirectSym
= TheJIT
->getJITInfo().emitGlobalValueIndirectSym(GV
, GVAddress
,
254 *TheJIT
->getCodeEmitter());
256 DEBUG(errs() << "JIT: Indirect symbol emitted at [" << IndirectSym
257 << "] for GV '" << GV
->getName() << "'\n");
262 /// getExternalFunctionStub - Return a stub for the function at the
263 /// specified address, created lazily on demand.
264 void *JITResolver::getExternalFunctionStub(void *FnAddr
) {
265 // If we already have a stub for this function, recycle it.
266 void *&Stub
= ExternalFnToStubMap
[FnAddr
];
267 if (Stub
) return Stub
;
269 Stub
= TheJIT
->getJITInfo().emitFunctionStub(0, FnAddr
,
270 *TheJIT
->getCodeEmitter());
272 DOUT
<< "JIT: Stub emitted at [" << Stub
273 << "] for external function at '" << FnAddr
<< "'\n";
277 unsigned JITResolver::getGOTIndexForAddr(void* addr
) {
278 unsigned idx
= revGOTMap
[addr
];
280 idx
= ++nextGOTIndex
;
281 revGOTMap
[addr
] = idx
;
282 DOUT
<< "JIT: Adding GOT entry " << idx
<< " for addr [" << addr
<< "]\n";
287 void JITResolver::getRelocatableGVs(SmallVectorImpl
<GlobalValue
*> &GVs
,
288 SmallVectorImpl
<void*> &Ptrs
) {
289 MutexGuard
locked(TheJIT
->lock
);
291 FunctionToStubMapTy
&FM
= state
.getFunctionToStubMap(locked
);
292 GlobalToIndirectSymMapTy
&GM
= state
.getGlobalToIndirectSymMap(locked
);
294 for (FunctionToStubMapTy::iterator i
= FM
.begin(), e
= FM
.end(); i
!= e
; ++i
){
295 Function
*F
= i
->first
;
296 if (F
->isDeclaration() && F
->hasExternalLinkage()) {
297 GVs
.push_back(i
->first
);
298 Ptrs
.push_back(i
->second
);
301 for (GlobalToIndirectSymMapTy::iterator i
= GM
.begin(), e
= GM
.end();
303 GVs
.push_back(i
->first
);
304 Ptrs
.push_back(i
->second
);
308 GlobalValue
*JITResolver::invalidateStub(void *Stub
) {
309 MutexGuard
locked(TheJIT
->lock
);
311 FunctionToStubMapTy
&FM
= state
.getFunctionToStubMap(locked
);
312 StubToFunctionMapTy
&SM
= state
.getStubToFunctionMap(locked
);
313 GlobalToIndirectSymMapTy
&GM
= state
.getGlobalToIndirectSymMap(locked
);
315 // Look up the cheap way first, to see if it's a function stub we are
316 // invalidating. If so, remove it from both the forward and reverse maps.
317 if (SM
.find(Stub
) != SM
.end()) {
318 Function
*F
= SM
[Stub
];
324 // Otherwise, it might be an indirect symbol stub. Find it and remove it.
325 for (GlobalToIndirectSymMapTy::iterator i
= GM
.begin(), e
= GM
.end();
327 if (i
->second
!= Stub
)
329 GlobalValue
*GV
= i
->first
;
334 // Lastly, check to see if it's in the ExternalFnToStubMap.
335 for (std::map
<void *, void *>::iterator i
= ExternalFnToStubMap
.begin(),
336 e
= ExternalFnToStubMap
.end(); i
!= e
; ++i
) {
337 if (i
->second
!= Stub
)
339 ExternalFnToStubMap
.erase(i
);
346 /// JITCompilerFn - This function is called when a lazy compilation stub has
347 /// been entered. It looks up which function this stub corresponds to, compiles
348 /// it if necessary, then returns the resultant function pointer.
349 void *JITResolver::JITCompilerFn(void *Stub
) {
350 JITResolver
&JR
= *TheJITResolver
;
356 // Only lock for getting the Function. The call getPointerToFunction made
357 // in this function might trigger function materializing, which requires
358 // JIT lock to be unlocked.
359 MutexGuard
locked(TheJIT
->lock
);
361 // The address given to us for the stub may not be exactly right, it might be
362 // a little bit after the stub. As such, use upper_bound to find it.
363 StubToFunctionMapTy::iterator I
=
364 JR
.state
.getStubToFunctionMap(locked
).upper_bound(Stub
);
365 assert(I
!= JR
.state
.getStubToFunctionMap(locked
).begin() &&
366 "This is not a known stub!");
368 ActualPtr
= I
->first
;
371 // If we have already code generated the function, just return the address.
372 void *Result
= TheJIT
->getPointerToGlobalIfAvailable(F
);
375 // Otherwise we don't have it, do lazy compilation now.
377 // If lazy compilation is disabled, emit a useful error message and abort.
378 if (TheJIT
->isLazyCompilationDisabled()) {
379 llvm_report_error("LLVM JIT requested to do lazy compilation of function '"
380 + F
->getName() + "' when lazy compiles are disabled!");
383 // We might like to remove the stub from the StubToFunction map.
384 // We can't do that! Multiple threads could be stuck, waiting to acquire the
385 // lock above. As soon as the 1st function finishes compiling the function,
386 // the next one will be released, and needs to be able to find the function
388 //JR.state.getStubToFunctionMap(locked).erase(I);
390 DEBUG(errs() << "JIT: Lazily resolving function '" << F
->getName()
391 << "' In stub ptr = " << Stub
<< " actual ptr = "
392 << ActualPtr
<< "\n");
394 Result
= TheJIT
->getPointerToFunction(F
);
397 // Reacquire the lock to erase the stub in the map.
398 MutexGuard
locked(TheJIT
->lock
);
400 // We don't need to reuse this stub in the future, as F is now compiled.
401 JR
.state
.getFunctionToStubMap(locked
).erase(F
);
403 // FIXME: We could rewrite all references to this stub if we knew them.
405 // What we will do is set the compiled function address to map to the
406 // same GOT entry as the stub so that later clients may update the GOT
407 // if they see it still using the stub address.
408 // Note: this is done so the Resolver doesn't have to manage GOT memory
409 // Do this without allocating map space if the target isn't using a GOT
410 if(JR
.revGOTMap
.find(Stub
) != JR
.revGOTMap
.end())
411 JR
.revGOTMap
[Result
] = JR
.revGOTMap
[Stub
];
416 //===----------------------------------------------------------------------===//
420 /// JITEmitter - The JIT implementation of the MachineCodeEmitter, which is
421 /// used to output functions to memory for execution.
422 class JITEmitter
: public JITCodeEmitter
{
423 JITMemoryManager
*MemMgr
;
425 // When outputting a function stub in the context of some other function, we
426 // save BufferBegin/BufferEnd/CurBufferPtr here.
427 uint8_t *SavedBufferBegin
, *SavedBufferEnd
, *SavedCurBufferPtr
;
429 // When reattempting to JIT a function after running out of space, we store
430 // the estimated size of the function we're trying to JIT here, so we can
431 // ask the memory manager for at least this much space. When we
432 // successfully emit the function, we reset this back to zero.
433 uintptr_t SizeEstimate
;
435 /// Relocations - These are the relocations that the function needs, as
437 std::vector
<MachineRelocation
> Relocations
;
439 /// MBBLocations - This vector is a mapping from MBB ID's to their address.
440 /// It is filled in by the StartMachineBasicBlock callback and queried by
441 /// the getMachineBasicBlockAddress callback.
442 std::vector
<uintptr_t> MBBLocations
;
444 /// ConstantPool - The constant pool for the current function.
446 MachineConstantPool
*ConstantPool
;
448 /// ConstantPoolBase - A pointer to the first entry in the constant pool.
450 void *ConstantPoolBase
;
452 /// ConstPoolAddresses - Addresses of individual constant pool entries.
454 SmallVector
<uintptr_t, 8> ConstPoolAddresses
;
456 /// JumpTable - The jump tables for the current function.
458 MachineJumpTableInfo
*JumpTable
;
460 /// JumpTableBase - A pointer to the first entry in the jump table.
464 /// Resolver - This contains info about the currently resolved functions.
465 JITResolver Resolver
;
467 /// DE - The dwarf emitter for the jit.
470 /// LabelLocations - This vector is a mapping from Label ID's to their
472 std::vector
<uintptr_t> LabelLocations
;
474 /// MMI - Machine module info for exception informations
475 MachineModuleInfo
* MMI
;
477 // GVSet - a set to keep track of which globals have been seen
478 SmallPtrSet
<const GlobalVariable
*, 8> GVSet
;
480 // CurFn - The llvm function being emitted. Only valid during
482 const Function
*CurFn
;
484 /// Information about emitted code, which is passed to the
485 /// JITEventListeners. This is reset in startFunction and used in
487 JITEvent_EmittedFunctionDetails EmissionDetails
;
489 // CurFnStubUses - For a given Function, a vector of stubs that it
490 // references. This facilitates the JIT detecting that a stub is no
491 // longer used, so that it may be deallocated.
492 DenseMap
<const Function
*, SmallVector
<void*, 1> > CurFnStubUses
;
494 // StubFnRefs - For a given pointer to a stub, a set of Functions which
495 // reference the stub. When the count of a stub's references drops to zero,
496 // the stub is unused.
497 DenseMap
<void *, SmallPtrSet
<const Function
*, 1> > StubFnRefs
;
499 // ExtFnStubs - A map of external function names to stubs which have entries
500 // in the JITResolver's ExternalFnToStubMap.
501 StringMap
<void *> ExtFnStubs
;
503 DebugLocTuple PrevDLT
;
506 JITEmitter(JIT
&jit
, JITMemoryManager
*JMM
)
507 : SizeEstimate(0), Resolver(jit
), CurFn(0) {
508 MemMgr
= JMM
? JMM
: JITMemoryManager::CreateDefaultMemManager();
509 if (jit
.getJITInfo().needsGOT()) {
510 MemMgr
->AllocateGOT();
511 DOUT
<< "JIT is managing a GOT\n";
514 if (DwarfExceptionHandling
) DE
= new JITDwarfEmitter(jit
);
518 if (DwarfExceptionHandling
) delete DE
;
521 /// classof - Methods for support type inquiry through isa, cast, and
524 static inline bool classof(const JITEmitter
*) { return true; }
525 static inline bool classof(const MachineCodeEmitter
*) { return true; }
527 JITResolver
&getJITResolver() { return Resolver
; }
529 virtual void startFunction(MachineFunction
&F
);
530 virtual bool finishFunction(MachineFunction
&F
);
532 void emitConstantPool(MachineConstantPool
*MCP
);
533 void initJumpTableInfo(MachineJumpTableInfo
*MJTI
);
534 void emitJumpTableInfo(MachineJumpTableInfo
*MJTI
);
536 virtual void startGVStub(const GlobalValue
* GV
, unsigned StubSize
,
537 unsigned Alignment
= 1);
538 virtual void startGVStub(const GlobalValue
* GV
, void *Buffer
,
540 virtual void* finishGVStub(const GlobalValue
*GV
);
542 /// allocateSpace - Reserves space in the current block if any, or
543 /// allocate a new one of the given size.
544 virtual void *allocateSpace(uintptr_t Size
, unsigned Alignment
);
546 /// allocateGlobal - Allocate memory for a global. Unlike allocateSpace,
547 /// this method does not allocate memory in the current output buffer,
548 /// because a global may live longer than the current function.
549 virtual void *allocateGlobal(uintptr_t Size
, unsigned Alignment
);
551 virtual void addRelocation(const MachineRelocation
&MR
) {
552 Relocations
.push_back(MR
);
555 virtual void StartMachineBasicBlock(MachineBasicBlock
*MBB
) {
556 if (MBBLocations
.size() <= (unsigned)MBB
->getNumber())
557 MBBLocations
.resize((MBB
->getNumber()+1)*2);
558 MBBLocations
[MBB
->getNumber()] = getCurrentPCValue();
559 DOUT
<< "JIT: Emitting BB" << MBB
->getNumber() << " at ["
560 << (void*) getCurrentPCValue() << "]\n";
563 virtual uintptr_t getConstantPoolEntryAddress(unsigned Entry
) const;
564 virtual uintptr_t getJumpTableEntryAddress(unsigned Entry
) const;
566 virtual uintptr_t getMachineBasicBlockAddress(MachineBasicBlock
*MBB
) const {
567 assert(MBBLocations
.size() > (unsigned)MBB
->getNumber() &&
568 MBBLocations
[MBB
->getNumber()] && "MBB not emitted!");
569 return MBBLocations
[MBB
->getNumber()];
572 /// retryWithMoreMemory - Log a retry and deallocate all memory for the
573 /// given function. Increase the minimum allocation size so that we get
574 /// more memory next time.
575 void retryWithMoreMemory(MachineFunction
&F
);
577 /// deallocateMemForFunction - Deallocate all memory for the specified
579 void deallocateMemForFunction(const Function
*F
);
581 /// AddStubToCurrentFunction - Mark the current function being JIT'd as
582 /// using the stub at the specified address. Allows
583 /// deallocateMemForFunction to also remove stubs no longer referenced.
584 void AddStubToCurrentFunction(void *Stub
);
586 /// getExternalFnStubs - Accessor for the JIT to find stubs emitted for
587 /// MachineRelocations that reference external functions by name.
588 const StringMap
<void*> &getExternalFnStubs() const { return ExtFnStubs
; }
590 virtual void processDebugLoc(DebugLoc DL
);
592 virtual void emitLabel(uint64_t LabelID
) {
593 if (LabelLocations
.size() <= LabelID
)
594 LabelLocations
.resize((LabelID
+1)*2);
595 LabelLocations
[LabelID
] = getCurrentPCValue();
598 virtual uintptr_t getLabelAddress(uint64_t LabelID
) const {
599 assert(LabelLocations
.size() > (unsigned)LabelID
&&
600 LabelLocations
[LabelID
] && "Label not emitted!");
601 return LabelLocations
[LabelID
];
604 virtual void setModuleInfo(MachineModuleInfo
* Info
) {
606 if (DwarfExceptionHandling
) DE
->setModuleInfo(Info
);
609 void setMemoryExecutable() {
610 MemMgr
->setMemoryExecutable();
613 JITMemoryManager
*getMemMgr() const { return MemMgr
; }
616 void *getPointerToGlobal(GlobalValue
*GV
, void *Reference
, bool NoNeedStub
);
617 void *getPointerToGVIndirectSym(GlobalValue
*V
, void *Reference
,
619 unsigned addSizeOfGlobal(const GlobalVariable
*GV
, unsigned Size
);
620 unsigned addSizeOfGlobalsInConstantVal(const Constant
*C
, unsigned Size
);
621 unsigned addSizeOfGlobalsInInitializer(const Constant
*Init
, unsigned Size
);
622 unsigned GetSizeOfGlobalsInBytes(MachineFunction
&MF
);
626 void *JITEmitter::getPointerToGlobal(GlobalValue
*V
, void *Reference
,
627 bool DoesntNeedStub
) {
628 if (GlobalVariable
*GV
= dyn_cast
<GlobalVariable
>(V
))
629 return TheJIT
->getOrEmitGlobalVariable(GV
);
631 if (GlobalAlias
*GA
= dyn_cast
<GlobalAlias
>(V
))
632 return TheJIT
->getPointerToGlobal(GA
->resolveAliasedGlobal(false));
634 // If we have already compiled the function, return a pointer to its body.
635 Function
*F
= cast
<Function
>(V
);
637 if (!DoesntNeedStub
&& !TheJIT
->isLazyCompilationDisabled()) {
638 // Return the function stub if it's already created.
639 ResultPtr
= Resolver
.getFunctionStubIfAvailable(F
);
641 AddStubToCurrentFunction(ResultPtr
);
643 ResultPtr
= TheJIT
->getPointerToGlobalIfAvailable(F
);
645 if (ResultPtr
) return ResultPtr
;
647 // If this is an external function pointer, we can force the JIT to
648 // 'compile' it, which really just adds it to the map. In dlsym mode,
649 // external functions are forced through a stub, regardless of reloc type.
650 if (F
->isDeclaration() && !F
->hasNotBeenReadFromBitcode() &&
651 DoesntNeedStub
&& !TheJIT
->areDlsymStubsEnabled())
652 return TheJIT
->getPointerToFunction(F
);
654 // Okay, the function has not been compiled yet, if the target callback
655 // mechanism is capable of rewriting the instruction directly, prefer to do
656 // that instead of emitting a stub. This uses the lazy resolver, so is not
657 // legal if lazy compilation is disabled.
658 if (DoesntNeedStub
&& !TheJIT
->isLazyCompilationDisabled())
659 return Resolver
.AddCallbackAtLocation(F
, Reference
);
661 // Otherwise, we have to emit a stub.
662 void *StubAddr
= Resolver
.getFunctionStub(F
);
664 // Add the stub to the current function's list of referenced stubs, so we can
665 // deallocate them if the current function is ever freed. It's possible to
666 // return null from getFunctionStub in the case of a weak extern that fails
669 AddStubToCurrentFunction(StubAddr
);
674 void *JITEmitter::getPointerToGVIndirectSym(GlobalValue
*V
, void *Reference
,
676 // Make sure GV is emitted first, and create a stub containing the fully
678 void *GVAddress
= getPointerToGlobal(V
, Reference
, true);
679 void *StubAddr
= Resolver
.getGlobalValueIndirectSym(V
, GVAddress
);
681 // Add the stub to the current function's list of referenced stubs, so we can
682 // deallocate them if the current function is ever freed.
683 AddStubToCurrentFunction(StubAddr
);
688 void JITEmitter::AddStubToCurrentFunction(void *StubAddr
) {
689 if (!TheJIT
->areDlsymStubsEnabled())
692 assert(CurFn
&& "Stub added to current function, but current function is 0!");
694 SmallVectorImpl
<void*> &StubsUsed
= CurFnStubUses
[CurFn
];
695 StubsUsed
.push_back(StubAddr
);
697 SmallPtrSet
<const Function
*, 1> &FnRefs
= StubFnRefs
[StubAddr
];
698 FnRefs
.insert(CurFn
);
701 void JITEmitter::processDebugLoc(DebugLoc DL
) {
702 if (!DL
.isUnknown()) {
703 DebugLocTuple CurDLT
= EmissionDetails
.MF
->getDebugLocTuple(DL
);
705 if (CurDLT
.CompileUnit
!= 0 && PrevDLT
!= CurDLT
) {
706 JITEvent_EmittedFunctionDetails::LineStart NextLine
;
707 NextLine
.Address
= getCurrentPCValue();
709 EmissionDetails
.LineStarts
.push_back(NextLine
);
716 static unsigned GetConstantPoolSizeInBytes(MachineConstantPool
*MCP
,
717 const TargetData
*TD
) {
718 const std::vector
<MachineConstantPoolEntry
> &Constants
= MCP
->getConstants();
719 if (Constants
.empty()) return 0;
722 for (unsigned i
= 0, e
= Constants
.size(); i
!= e
; ++i
) {
723 MachineConstantPoolEntry CPE
= Constants
[i
];
724 unsigned AlignMask
= CPE
.getAlignment() - 1;
725 Size
= (Size
+ AlignMask
) & ~AlignMask
;
726 const Type
*Ty
= CPE
.getType();
727 Size
+= TD
->getTypeAllocSize(Ty
);
732 static unsigned GetJumpTableSizeInBytes(MachineJumpTableInfo
*MJTI
) {
733 const std::vector
<MachineJumpTableEntry
> &JT
= MJTI
->getJumpTables();
734 if (JT
.empty()) return 0;
736 unsigned NumEntries
= 0;
737 for (unsigned i
= 0, e
= JT
.size(); i
!= e
; ++i
)
738 NumEntries
+= JT
[i
].MBBs
.size();
740 unsigned EntrySize
= MJTI
->getEntrySize();
742 return NumEntries
* EntrySize
;
745 static uintptr_t RoundUpToAlign(uintptr_t Size
, unsigned Alignment
) {
746 if (Alignment
== 0) Alignment
= 1;
747 // Since we do not know where the buffer will be allocated, be pessimistic.
748 return Size
+ Alignment
;
751 /// addSizeOfGlobal - add the size of the global (plus any alignment padding)
752 /// into the running total Size.
754 unsigned JITEmitter::addSizeOfGlobal(const GlobalVariable
*GV
, unsigned Size
) {
755 const Type
*ElTy
= GV
->getType()->getElementType();
756 size_t GVSize
= (size_t)TheJIT
->getTargetData()->getTypeAllocSize(ElTy
);
758 (size_t)TheJIT
->getTargetData()->getPreferredAlignment(GV
);
759 DOUT
<< "JIT: Adding in size " << GVSize
<< " alignment " << GVAlign
;
761 // Assume code section ends with worst possible alignment, so first
762 // variable needs maximal padding.
765 Size
= ((Size
+GVAlign
-1)/GVAlign
)*GVAlign
;
770 /// addSizeOfGlobalsInConstantVal - find any globals that we haven't seen yet
771 /// but are referenced from the constant; put them in GVSet and add their
772 /// size into the running total Size.
774 unsigned JITEmitter::addSizeOfGlobalsInConstantVal(const Constant
*C
,
776 // If its undefined, return the garbage.
777 if (isa
<UndefValue
>(C
))
780 // If the value is a ConstantExpr
781 if (const ConstantExpr
*CE
= dyn_cast
<ConstantExpr
>(C
)) {
782 Constant
*Op0
= CE
->getOperand(0);
783 switch (CE
->getOpcode()) {
784 case Instruction::GetElementPtr
:
785 case Instruction::Trunc
:
786 case Instruction::ZExt
:
787 case Instruction::SExt
:
788 case Instruction::FPTrunc
:
789 case Instruction::FPExt
:
790 case Instruction::UIToFP
:
791 case Instruction::SIToFP
:
792 case Instruction::FPToUI
:
793 case Instruction::FPToSI
:
794 case Instruction::PtrToInt
:
795 case Instruction::IntToPtr
:
796 case Instruction::BitCast
: {
797 Size
= addSizeOfGlobalsInConstantVal(Op0
, Size
);
800 case Instruction::Add
:
801 case Instruction::FAdd
:
802 case Instruction::Sub
:
803 case Instruction::FSub
:
804 case Instruction::Mul
:
805 case Instruction::FMul
:
806 case Instruction::UDiv
:
807 case Instruction::SDiv
:
808 case Instruction::URem
:
809 case Instruction::SRem
:
810 case Instruction::And
:
811 case Instruction::Or
:
812 case Instruction::Xor
: {
813 Size
= addSizeOfGlobalsInConstantVal(Op0
, Size
);
814 Size
= addSizeOfGlobalsInConstantVal(CE
->getOperand(1), Size
);
819 raw_string_ostream
Msg(msg
);
820 Msg
<< "ConstantExpr not handled: " << *CE
;
821 llvm_report_error(Msg
.str());
826 if (C
->getType()->getTypeID() == Type::PointerTyID
)
827 if (const GlobalVariable
* GV
= dyn_cast
<GlobalVariable
>(C
))
828 if (GVSet
.insert(GV
))
829 Size
= addSizeOfGlobal(GV
, Size
);
834 /// addSizeOfGLobalsInInitializer - handle any globals that we haven't seen yet
835 /// but are referenced from the given initializer.
837 unsigned JITEmitter::addSizeOfGlobalsInInitializer(const Constant
*Init
,
839 if (!isa
<UndefValue
>(Init
) &&
840 !isa
<ConstantVector
>(Init
) &&
841 !isa
<ConstantAggregateZero
>(Init
) &&
842 !isa
<ConstantArray
>(Init
) &&
843 !isa
<ConstantStruct
>(Init
) &&
844 Init
->getType()->isFirstClassType())
845 Size
= addSizeOfGlobalsInConstantVal(Init
, Size
);
849 /// GetSizeOfGlobalsInBytes - walk the code for the function, looking for
850 /// globals; then walk the initializers of those globals looking for more.
851 /// If their size has not been considered yet, add it into the running total
854 unsigned JITEmitter::GetSizeOfGlobalsInBytes(MachineFunction
&MF
) {
858 for (MachineFunction::iterator MBB
= MF
.begin(), E
= MF
.end();
860 for (MachineBasicBlock::const_iterator I
= MBB
->begin(), E
= MBB
->end();
862 const TargetInstrDesc
&Desc
= I
->getDesc();
863 const MachineInstr
&MI
= *I
;
864 unsigned NumOps
= Desc
.getNumOperands();
865 for (unsigned CurOp
= 0; CurOp
< NumOps
; CurOp
++) {
866 const MachineOperand
&MO
= MI
.getOperand(CurOp
);
868 GlobalValue
* V
= MO
.getGlobal();
869 const GlobalVariable
*GV
= dyn_cast
<const GlobalVariable
>(V
);
872 // If seen in previous function, it will have an entry here.
873 if (TheJIT
->getPointerToGlobalIfAvailable(GV
))
875 // If seen earlier in this function, it will have an entry here.
876 // FIXME: it should be possible to combine these tables, by
877 // assuming the addresses of the new globals in this module
878 // start at 0 (or something) and adjusting them after codegen
879 // complete. Another possibility is to grab a marker bit in GV.
880 if (GVSet
.insert(GV
))
881 // A variable as yet unseen. Add in its size.
882 Size
= addSizeOfGlobal(GV
, Size
);
887 DOUT
<< "JIT: About to look through initializers\n";
888 // Look for more globals that are referenced only from initializers.
889 // GVSet.end is computed each time because the set can grow as we go.
890 for (SmallPtrSet
<const GlobalVariable
*, 8>::iterator I
= GVSet
.begin();
891 I
!= GVSet
.end(); I
++) {
892 const GlobalVariable
* GV
= *I
;
893 if (GV
->hasInitializer())
894 Size
= addSizeOfGlobalsInInitializer(GV
->getInitializer(), Size
);
900 void JITEmitter::startFunction(MachineFunction
&F
) {
901 DEBUG(errs() << "JIT: Starting CodeGen of Function "
902 << F
.getFunction()->getName() << "\n");
904 uintptr_t ActualSize
= 0;
905 // Set the memory writable, if it's not already
906 MemMgr
->setMemoryWritable();
907 if (MemMgr
->NeedsExactSize()) {
908 DOUT
<< "JIT: ExactSize\n";
909 const TargetInstrInfo
* TII
= F
.getTarget().getInstrInfo();
910 MachineJumpTableInfo
*MJTI
= F
.getJumpTableInfo();
911 MachineConstantPool
*MCP
= F
.getConstantPool();
913 // Ensure the constant pool/jump table info is at least 4-byte aligned.
914 ActualSize
= RoundUpToAlign(ActualSize
, 16);
916 // Add the alignment of the constant pool
917 ActualSize
= RoundUpToAlign(ActualSize
, MCP
->getConstantPoolAlignment());
919 // Add the constant pool size
920 ActualSize
+= GetConstantPoolSizeInBytes(MCP
, TheJIT
->getTargetData());
922 // Add the aligment of the jump table info
923 ActualSize
= RoundUpToAlign(ActualSize
, MJTI
->getAlignment());
925 // Add the jump table size
926 ActualSize
+= GetJumpTableSizeInBytes(MJTI
);
928 // Add the alignment for the function
929 ActualSize
= RoundUpToAlign(ActualSize
,
930 std::max(F
.getFunction()->getAlignment(), 8U));
932 // Add the function size
933 ActualSize
+= TII
->GetFunctionSizeInBytes(F
);
935 DOUT
<< "JIT: ActualSize before globals " << ActualSize
<< "\n";
936 // Add the size of the globals that will be allocated after this function.
937 // These are all the ones referenced from this function that were not
938 // previously allocated.
939 ActualSize
+= GetSizeOfGlobalsInBytes(F
);
940 DOUT
<< "JIT: ActualSize after globals " << ActualSize
<< "\n";
941 } else if (SizeEstimate
> 0) {
942 // SizeEstimate will be non-zero on reallocation attempts.
943 ActualSize
= SizeEstimate
;
946 BufferBegin
= CurBufferPtr
= MemMgr
->startFunctionBody(F
.getFunction(),
948 BufferEnd
= BufferBegin
+ActualSize
;
950 // Ensure the constant pool/jump table info is at least 4-byte aligned.
953 emitConstantPool(F
.getConstantPool());
954 initJumpTableInfo(F
.getJumpTableInfo());
956 // About to start emitting the machine code for the function.
957 emitAlignment(std::max(F
.getFunction()->getAlignment(), 8U));
958 TheJIT
->updateGlobalMapping(F
.getFunction(), CurBufferPtr
);
960 MBBLocations
.clear();
962 EmissionDetails
.MF
= &F
;
963 EmissionDetails
.LineStarts
.clear();
966 bool JITEmitter::finishFunction(MachineFunction
&F
) {
967 if (CurBufferPtr
== BufferEnd
) {
968 // We must call endFunctionBody before retrying, because
969 // deallocateMemForFunction requires it.
970 MemMgr
->endFunctionBody(F
.getFunction(), BufferBegin
, CurBufferPtr
);
971 retryWithMoreMemory(F
);
975 emitJumpTableInfo(F
.getJumpTableInfo());
977 // FnStart is the start of the text, not the start of the constant pool and
978 // other per-function data.
980 (uint8_t *)TheJIT
->getPointerToGlobalIfAvailable(F
.getFunction());
982 // FnEnd is the end of the function's machine code.
983 uint8_t *FnEnd
= CurBufferPtr
;
985 if (!Relocations
.empty()) {
986 CurFn
= F
.getFunction();
987 NumRelos
+= Relocations
.size();
989 // Resolve the relocations to concrete pointers.
990 for (unsigned i
= 0, e
= Relocations
.size(); i
!= e
; ++i
) {
991 MachineRelocation
&MR
= Relocations
[i
];
993 if (!MR
.letTargetResolve()) {
994 if (MR
.isExternalSymbol()) {
995 ResultPtr
= TheJIT
->getPointerToNamedFunction(MR
.getExternalSymbol(),
997 DOUT
<< "JIT: Map \'" << MR
.getExternalSymbol() << "\' to ["
998 << ResultPtr
<< "]\n";
1000 // If the target REALLY wants a stub for this function, emit it now.
1001 if (!MR
.doesntNeedStub()) {
1002 if (!TheJIT
->areDlsymStubsEnabled()) {
1003 ResultPtr
= Resolver
.getExternalFunctionStub(ResultPtr
);
1005 void *&Stub
= ExtFnStubs
[MR
.getExternalSymbol()];
1007 Stub
= Resolver
.getExternalFunctionStub((void *)&Stub
);
1008 AddStubToCurrentFunction(Stub
);
1013 } else if (MR
.isGlobalValue()) {
1014 ResultPtr
= getPointerToGlobal(MR
.getGlobalValue(),
1015 BufferBegin
+MR
.getMachineCodeOffset(),
1016 MR
.doesntNeedStub());
1017 } else if (MR
.isIndirectSymbol()) {
1018 ResultPtr
= getPointerToGVIndirectSym(MR
.getGlobalValue(),
1019 BufferBegin
+MR
.getMachineCodeOffset(),
1020 MR
.doesntNeedStub());
1021 } else if (MR
.isBasicBlock()) {
1022 ResultPtr
= (void*)getMachineBasicBlockAddress(MR
.getBasicBlock());
1023 } else if (MR
.isConstantPoolIndex()) {
1024 ResultPtr
= (void*)getConstantPoolEntryAddress(MR
.getConstantPoolIndex());
1026 assert(MR
.isJumpTableIndex());
1027 ResultPtr
=(void*)getJumpTableEntryAddress(MR
.getJumpTableIndex());
1030 MR
.setResultPointer(ResultPtr
);
1033 // if we are managing the GOT and the relocation wants an index,
1035 if (MR
.isGOTRelative() && MemMgr
->isManagingGOT()) {
1036 unsigned idx
= Resolver
.getGOTIndexForAddr(ResultPtr
);
1037 MR
.setGOTIndex(idx
);
1038 if (((void**)MemMgr
->getGOTBase())[idx
] != ResultPtr
) {
1039 DOUT
<< "JIT: GOT was out of date for " << ResultPtr
1040 << " pointing at " << ((void**)MemMgr
->getGOTBase())[idx
]
1042 ((void**)MemMgr
->getGOTBase())[idx
] = ResultPtr
;
1048 TheJIT
->getJITInfo().relocate(BufferBegin
, &Relocations
[0],
1049 Relocations
.size(), MemMgr
->getGOTBase());
1052 // Update the GOT entry for F to point to the new code.
1053 if (MemMgr
->isManagingGOT()) {
1054 unsigned idx
= Resolver
.getGOTIndexForAddr((void*)BufferBegin
);
1055 if (((void**)MemMgr
->getGOTBase())[idx
] != (void*)BufferBegin
) {
1056 DOUT
<< "JIT: GOT was out of date for " << (void*)BufferBegin
1057 << " pointing at " << ((void**)MemMgr
->getGOTBase())[idx
] << "\n";
1058 ((void**)MemMgr
->getGOTBase())[idx
] = (void*)BufferBegin
;
1062 // CurBufferPtr may have moved beyond FnEnd, due to memory allocation for
1063 // global variables that were referenced in the relocations.
1064 MemMgr
->endFunctionBody(F
.getFunction(), BufferBegin
, CurBufferPtr
);
1066 if (CurBufferPtr
== BufferEnd
) {
1067 retryWithMoreMemory(F
);
1070 // Now that we've succeeded in emitting the function, reset the
1071 // SizeEstimate back down to zero.
1075 BufferBegin
= CurBufferPtr
= 0;
1076 NumBytes
+= FnEnd
-FnStart
;
1078 // Invalidate the icache if necessary.
1079 sys::Memory::InvalidateInstructionCache(FnStart
, FnEnd
-FnStart
);
1081 TheJIT
->NotifyFunctionEmitted(*F
.getFunction(), FnStart
, FnEnd
-FnStart
,
1084 DEBUG(errs() << "JIT: Finished CodeGen of [" << (void*)FnStart
1085 << "] Function: " << F
.getFunction()->getName()
1086 << ": " << (FnEnd
-FnStart
) << " bytes of text, "
1087 << Relocations
.size() << " relocations\n");
1089 Relocations
.clear();
1090 ConstPoolAddresses
.clear();
1092 // Mark code region readable and executable if it's not so already.
1093 MemMgr
->setMemoryExecutable();
1097 if (sys::hasDisassembler()) {
1098 DOUT
<< "JIT: Disassembled code:\n";
1099 DOUT
<< sys::disassembleBuffer(FnStart
, FnEnd
-FnStart
, (uintptr_t)FnStart
);
1101 DOUT
<< "JIT: Binary code:\n";
1103 uint8_t* q
= FnStart
;
1104 for (int i
= 0; q
< FnEnd
; q
+= 4, ++i
) {
1108 DOUT
<< "JIT: " << std::setw(8) << std::setfill('0')
1109 << (long)(q
- FnStart
) << ": ";
1111 for (int j
= 3; j
>= 0; --j
) {
1115 DOUT
<< std::setw(2) << std::setfill('0') << (unsigned short)q
[j
];
1128 if (DwarfExceptionHandling
) {
1129 uintptr_t ActualSize
= 0;
1130 SavedBufferBegin
= BufferBegin
;
1131 SavedBufferEnd
= BufferEnd
;
1132 SavedCurBufferPtr
= CurBufferPtr
;
1134 if (MemMgr
->NeedsExactSize()) {
1135 ActualSize
= DE
->GetDwarfTableSizeInBytes(F
, *this, FnStart
, FnEnd
);
1138 BufferBegin
= CurBufferPtr
= MemMgr
->startExceptionTable(F
.getFunction(),
1140 BufferEnd
= BufferBegin
+ActualSize
;
1141 uint8_t* FrameRegister
= DE
->EmitDwarfTable(F
, *this, FnStart
, FnEnd
);
1142 MemMgr
->endExceptionTable(F
.getFunction(), BufferBegin
, CurBufferPtr
,
1144 BufferBegin
= SavedBufferBegin
;
1145 BufferEnd
= SavedBufferEnd
;
1146 CurBufferPtr
= SavedCurBufferPtr
;
1148 TheJIT
->RegisterTable(FrameRegister
);
1157 void JITEmitter::retryWithMoreMemory(MachineFunction
&F
) {
1158 DOUT
<< "JIT: Ran out of space for native code. Reattempting.\n";
1159 Relocations
.clear(); // Clear the old relocations or we'll reapply them.
1160 ConstPoolAddresses
.clear();
1162 deallocateMemForFunction(F
.getFunction());
1163 // Try again with at least twice as much free space.
1164 SizeEstimate
= (uintptr_t)(2 * (BufferEnd
- BufferBegin
));
1167 /// deallocateMemForFunction - Deallocate all memory for the specified
1168 /// function body. Also drop any references the function has to stubs.
1169 void JITEmitter::deallocateMemForFunction(const Function
*F
) {
1170 MemMgr
->deallocateMemForFunction(F
);
1172 // If the function did not reference any stubs, return.
1173 if (CurFnStubUses
.find(F
) == CurFnStubUses
.end())
1176 // For each referenced stub, erase the reference to this function, and then
1177 // erase the list of referenced stubs.
1178 SmallVectorImpl
<void *> &StubList
= CurFnStubUses
[F
];
1179 for (unsigned i
= 0, e
= StubList
.size(); i
!= e
; ++i
) {
1180 void *Stub
= StubList
[i
];
1182 // If we already invalidated this stub for this function, continue.
1183 if (StubFnRefs
.count(Stub
) == 0)
1186 SmallPtrSet
<const Function
*, 1> &FnRefs
= StubFnRefs
[Stub
];
1189 // If this function was the last reference to the stub, invalidate the stub
1190 // in the JITResolver. Were there a memory manager deallocateStub routine,
1191 // we could call that at this point too.
1192 if (FnRefs
.empty()) {
1193 DOUT
<< "\nJIT: Invalidated Stub at [" << Stub
<< "]\n";
1194 StubFnRefs
.erase(Stub
);
1196 // Invalidate the stub. If it is a GV stub, update the JIT's global
1197 // mapping for that GV to zero, otherwise, search the string map of
1198 // external function names to stubs and remove the entry for this stub.
1199 GlobalValue
*GV
= Resolver
.invalidateStub(Stub
);
1201 TheJIT
->updateGlobalMapping(GV
, 0);
1203 for (StringMapIterator
<void*> i
= ExtFnStubs
.begin(),
1204 e
= ExtFnStubs
.end(); i
!= e
; ++i
) {
1205 if (i
->second
== Stub
) {
1206 ExtFnStubs
.erase(i
);
1213 CurFnStubUses
.erase(F
);
1217 void* JITEmitter::allocateSpace(uintptr_t Size
, unsigned Alignment
) {
1219 return JITCodeEmitter::allocateSpace(Size
, Alignment
);
1221 // create a new memory block if there is no active one.
1222 // care must be taken so that BufferBegin is invalidated when a
1224 BufferBegin
= CurBufferPtr
= MemMgr
->allocateSpace(Size
, Alignment
);
1225 BufferEnd
= BufferBegin
+Size
;
1226 return CurBufferPtr
;
1229 void* JITEmitter::allocateGlobal(uintptr_t Size
, unsigned Alignment
) {
1230 // Delegate this call through the memory manager.
1231 return MemMgr
->allocateGlobal(Size
, Alignment
);
1234 void JITEmitter::emitConstantPool(MachineConstantPool
*MCP
) {
1235 if (TheJIT
->getJITInfo().hasCustomConstantPool())
1238 const std::vector
<MachineConstantPoolEntry
> &Constants
= MCP
->getConstants();
1239 if (Constants
.empty()) return;
1241 unsigned Size
= GetConstantPoolSizeInBytes(MCP
, TheJIT
->getTargetData());
1242 unsigned Align
= MCP
->getConstantPoolAlignment();
1243 ConstantPoolBase
= allocateSpace(Size
, Align
);
1246 if (ConstantPoolBase
== 0) return; // Buffer overflow.
1248 DOUT
<< "JIT: Emitted constant pool at [" << ConstantPoolBase
1249 << "] (size: " << Size
<< ", alignment: " << Align
<< ")\n";
1251 // Initialize the memory for all of the constant pool entries.
1252 unsigned Offset
= 0;
1253 for (unsigned i
= 0, e
= Constants
.size(); i
!= e
; ++i
) {
1254 MachineConstantPoolEntry CPE
= Constants
[i
];
1255 unsigned AlignMask
= CPE
.getAlignment() - 1;
1256 Offset
= (Offset
+ AlignMask
) & ~AlignMask
;
1258 uintptr_t CAddr
= (uintptr_t)ConstantPoolBase
+ Offset
;
1259 ConstPoolAddresses
.push_back(CAddr
);
1260 if (CPE
.isMachineConstantPoolEntry()) {
1261 // FIXME: add support to lower machine constant pool values into bytes!
1262 llvm_report_error("Initialize memory with machine specific constant pool"
1263 "entry has not been implemented!");
1265 TheJIT
->InitializeMemory(CPE
.Val
.ConstVal
, (void*)CAddr
);
1266 DOUT
<< "JIT: CP" << i
<< " at [0x"
1267 << std::hex
<< CAddr
<< std::dec
<< "]\n";
1269 const Type
*Ty
= CPE
.Val
.ConstVal
->getType();
1270 Offset
+= TheJIT
->getTargetData()->getTypeAllocSize(Ty
);
1274 void JITEmitter::initJumpTableInfo(MachineJumpTableInfo
*MJTI
) {
1275 if (TheJIT
->getJITInfo().hasCustomJumpTables())
1278 const std::vector
<MachineJumpTableEntry
> &JT
= MJTI
->getJumpTables();
1279 if (JT
.empty()) return;
1281 unsigned NumEntries
= 0;
1282 for (unsigned i
= 0, e
= JT
.size(); i
!= e
; ++i
)
1283 NumEntries
+= JT
[i
].MBBs
.size();
1285 unsigned EntrySize
= MJTI
->getEntrySize();
1287 // Just allocate space for all the jump tables now. We will fix up the actual
1288 // MBB entries in the tables after we emit the code for each block, since then
1289 // we will know the final locations of the MBBs in memory.
1291 JumpTableBase
= allocateSpace(NumEntries
* EntrySize
, MJTI
->getAlignment());
1294 void JITEmitter::emitJumpTableInfo(MachineJumpTableInfo
*MJTI
) {
1295 if (TheJIT
->getJITInfo().hasCustomJumpTables())
1298 const std::vector
<MachineJumpTableEntry
> &JT
= MJTI
->getJumpTables();
1299 if (JT
.empty() || JumpTableBase
== 0) return;
1301 if (TargetMachine::getRelocationModel() == Reloc::PIC_
) {
1302 assert(MJTI
->getEntrySize() == 4 && "Cross JIT'ing?");
1303 // For each jump table, place the offset from the beginning of the table
1304 // to the target address.
1305 int *SlotPtr
= (int*)JumpTableBase
;
1307 for (unsigned i
= 0, e
= JT
.size(); i
!= e
; ++i
) {
1308 const std::vector
<MachineBasicBlock
*> &MBBs
= JT
[i
].MBBs
;
1309 // Store the offset of the basic block for this jump table slot in the
1310 // memory we allocated for the jump table in 'initJumpTableInfo'
1311 uintptr_t Base
= (uintptr_t)SlotPtr
;
1312 for (unsigned mi
= 0, me
= MBBs
.size(); mi
!= me
; ++mi
) {
1313 uintptr_t MBBAddr
= getMachineBasicBlockAddress(MBBs
[mi
]);
1314 *SlotPtr
++ = TheJIT
->getJITInfo().getPICJumpTableEntry(MBBAddr
, Base
);
1318 assert(MJTI
->getEntrySize() == sizeof(void*) && "Cross JIT'ing?");
1320 // For each jump table, map each target in the jump table to the address of
1321 // an emitted MachineBasicBlock.
1322 intptr_t *SlotPtr
= (intptr_t*)JumpTableBase
;
1324 for (unsigned i
= 0, e
= JT
.size(); i
!= e
; ++i
) {
1325 const std::vector
<MachineBasicBlock
*> &MBBs
= JT
[i
].MBBs
;
1326 // Store the address of the basic block for this jump table slot in the
1327 // memory we allocated for the jump table in 'initJumpTableInfo'
1328 for (unsigned mi
= 0, me
= MBBs
.size(); mi
!= me
; ++mi
)
1329 *SlotPtr
++ = getMachineBasicBlockAddress(MBBs
[mi
]);
1334 void JITEmitter::startGVStub(const GlobalValue
* GV
, unsigned StubSize
,
1335 unsigned Alignment
) {
1336 SavedBufferBegin
= BufferBegin
;
1337 SavedBufferEnd
= BufferEnd
;
1338 SavedCurBufferPtr
= CurBufferPtr
;
1340 BufferBegin
= CurBufferPtr
= MemMgr
->allocateStub(GV
, StubSize
, Alignment
);
1341 BufferEnd
= BufferBegin
+StubSize
+1;
1344 void JITEmitter::startGVStub(const GlobalValue
* GV
, void *Buffer
,
1345 unsigned StubSize
) {
1346 SavedBufferBegin
= BufferBegin
;
1347 SavedBufferEnd
= BufferEnd
;
1348 SavedCurBufferPtr
= CurBufferPtr
;
1350 BufferBegin
= CurBufferPtr
= (uint8_t *)Buffer
;
1351 BufferEnd
= BufferBegin
+StubSize
+1;
1354 void *JITEmitter::finishGVStub(const GlobalValue
* GV
) {
1355 NumBytes
+= getCurrentPCOffset();
1356 std::swap(SavedBufferBegin
, BufferBegin
);
1357 BufferEnd
= SavedBufferEnd
;
1358 CurBufferPtr
= SavedCurBufferPtr
;
1359 return SavedBufferBegin
;
1362 // getConstantPoolEntryAddress - Return the address of the 'ConstantNum' entry
1363 // in the constant pool that was last emitted with the 'emitConstantPool'
1366 uintptr_t JITEmitter::getConstantPoolEntryAddress(unsigned ConstantNum
) const {
1367 assert(ConstantNum
< ConstantPool
->getConstants().size() &&
1368 "Invalid ConstantPoolIndex!");
1369 return ConstPoolAddresses
[ConstantNum
];
1372 // getJumpTableEntryAddress - Return the address of the JumpTable with index
1373 // 'Index' in the jumpp table that was last initialized with 'initJumpTableInfo'
1375 uintptr_t JITEmitter::getJumpTableEntryAddress(unsigned Index
) const {
1376 const std::vector
<MachineJumpTableEntry
> &JT
= JumpTable
->getJumpTables();
1377 assert(Index
< JT
.size() && "Invalid jump table index!");
1379 unsigned Offset
= 0;
1380 unsigned EntrySize
= JumpTable
->getEntrySize();
1382 for (unsigned i
= 0; i
< Index
; ++i
)
1383 Offset
+= JT
[i
].MBBs
.size();
1385 Offset
*= EntrySize
;
1387 return (uintptr_t)((char *)JumpTableBase
+ Offset
);
1390 //===----------------------------------------------------------------------===//
1391 // Public interface to this file
1392 //===----------------------------------------------------------------------===//
1394 JITCodeEmitter
*JIT::createEmitter(JIT
&jit
, JITMemoryManager
*JMM
) {
1395 return new JITEmitter(jit
, JMM
);
1398 // getPointerToNamedFunction - This function is used as a global wrapper to
1399 // JIT::getPointerToNamedFunction for the purpose of resolving symbols when
1400 // bugpoint is debugging the JIT. In that scenario, we are loading an .so and
1401 // need to resolve function(s) that are being mis-codegenerated, so we need to
1402 // resolve their addresses at runtime, and this is the way to do it.
1404 void *getPointerToNamedFunction(const char *Name
) {
1405 if (Function
*F
= TheJIT
->FindFunctionNamed(Name
))
1406 return TheJIT
->getPointerToFunction(F
);
1407 return TheJIT
->getPointerToNamedFunction(Name
);
1411 // getPointerToFunctionOrStub - If the specified function has been
1412 // code-gen'd, return a pointer to the function. If not, compile it, or use
1413 // a stub to implement lazy compilation if available.
1415 void *JIT::getPointerToFunctionOrStub(Function
*F
) {
1416 // If we have already code generated the function, just return the address.
1417 if (void *Addr
= getPointerToGlobalIfAvailable(F
))
1420 // Get a stub if the target supports it.
1421 assert(isa
<JITEmitter
>(JCE
) && "Unexpected MCE?");
1422 JITEmitter
*JE
= cast
<JITEmitter
>(getCodeEmitter());
1423 return JE
->getJITResolver().getFunctionStub(F
);
1426 void JIT::updateFunctionStub(Function
*F
) {
1427 // Get the empty stub we generated earlier.
1428 assert(isa
<JITEmitter
>(JCE
) && "Unexpected MCE?");
1429 JITEmitter
*JE
= cast
<JITEmitter
>(getCodeEmitter());
1430 void *Stub
= JE
->getJITResolver().getFunctionStub(F
);
1432 // Tell the target jit info to rewrite the stub at the specified address,
1433 // rather than creating a new one.
1434 void *Addr
= getPointerToGlobalIfAvailable(F
);
1435 getJITInfo().emitFunctionStubAtAddr(F
, Addr
, Stub
, *getCodeEmitter());
1438 /// updateDlsymStubTable - Emit the data necessary to relocate the stubs
1439 /// that were emitted during code generation.
1441 void JIT::updateDlsymStubTable() {
1442 assert(isa
<JITEmitter
>(JCE
) && "Unexpected MCE?");
1443 JITEmitter
*JE
= cast
<JITEmitter
>(getCodeEmitter());
1445 SmallVector
<GlobalValue
*, 8> GVs
;
1446 SmallVector
<void*, 8> Ptrs
;
1447 const StringMap
<void *> &ExtFns
= JE
->getExternalFnStubs();
1449 JE
->getJITResolver().getRelocatableGVs(GVs
, Ptrs
);
1451 unsigned nStubs
= GVs
.size() + ExtFns
.size();
1453 // If there are no relocatable stubs, return.
1457 // If there are no new relocatable stubs, return.
1458 void *CurTable
= JE
->getMemMgr()->getDlsymTable();
1459 if (CurTable
&& (*(unsigned *)CurTable
== nStubs
))
1462 // Calculate the size of the stub info
1463 unsigned offset
= 4 + 4 * nStubs
+ sizeof(intptr_t) * nStubs
;
1465 SmallVector
<unsigned, 8> Offsets
;
1466 for (unsigned i
= 0; i
!= GVs
.size(); ++i
) {
1467 Offsets
.push_back(offset
);
1468 offset
+= GVs
[i
]->getName().size() + 1;
1470 for (StringMapConstIterator
<void*> i
= ExtFns
.begin(), e
= ExtFns
.end();
1472 Offsets
.push_back(offset
);
1473 offset
+= strlen(i
->first()) + 1;
1476 // Allocate space for the new "stub", which contains the dlsym table.
1477 JE
->startGVStub(0, offset
, 4);
1479 // Emit the number of records
1480 JE
->emitInt32(nStubs
);
1482 // Emit the string offsets
1483 for (unsigned i
= 0; i
!= nStubs
; ++i
)
1484 JE
->emitInt32(Offsets
[i
]);
1486 // Emit the pointers. Verify that they are at least 2-byte aligned, and set
1487 // the low bit to 0 == GV, 1 == Function, so that the client code doing the
1488 // relocation can write the relocated pointer at the appropriate place in
1490 for (unsigned i
= 0; i
!= GVs
.size(); ++i
) {
1491 intptr_t Ptr
= (intptr_t)Ptrs
[i
];
1492 assert((Ptr
& 1) == 0 && "Stub pointers must be at least 2-byte aligned!");
1494 if (isa
<Function
>(GVs
[i
]))
1497 if (sizeof(Ptr
) == 8)
1502 for (StringMapConstIterator
<void*> i
= ExtFns
.begin(), e
= ExtFns
.end();
1504 intptr_t Ptr
= (intptr_t)i
->second
| 1;
1506 if (sizeof(Ptr
) == 8)
1512 // Emit the strings.
1513 for (unsigned i
= 0; i
!= GVs
.size(); ++i
)
1514 JE
->emitString(GVs
[i
]->getName());
1515 for (StringMapConstIterator
<void*> i
= ExtFns
.begin(), e
= ExtFns
.end();
1517 JE
->emitString(i
->first());
1519 // Tell the JIT memory manager where it is. The JIT Memory Manager will
1520 // deallocate space for the old one, if one existed.
1521 JE
->getMemMgr()->SetDlsymTable(JE
->finishGVStub(0));
1524 /// freeMachineCodeForFunction - release machine code memory for given Function.
1526 void JIT::freeMachineCodeForFunction(Function
*F
) {
1528 // Delete translation for this from the ExecutionEngine, so it will get
1529 // retranslated next time it is used.
1530 void *OldPtr
= updateGlobalMapping(F
, 0);
1533 TheJIT
->NotifyFreeingMachineCode(*F
, OldPtr
);
1535 // Free the actual memory for the function body and related stuff.
1536 assert(isa
<JITEmitter
>(JCE
) && "Unexpected MCE?");
1537 cast
<JITEmitter
>(JCE
)->deallocateMemForFunction(F
);