the various ConstantExpr::get*Ty methods existed to work with issues around
[llvm/stm8.git] / lib / CodeGen / GCStrategy.cpp
blob766c6ee542a94cf0e7fe3980d82140d21bfcff24
1 //===-- GCStrategy.cpp - Garbage collection infrastructure -----------------===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file implements target- and collector-independent garbage collection
11 // infrastructure.
13 // MachineCodeAnalysis identifies the GC safe points in the machine code. Roots
14 // are identified in SelectionDAGISel.
16 //===----------------------------------------------------------------------===//
18 #include "llvm/CodeGen/GCStrategy.h"
19 #include "llvm/CodeGen/Passes.h"
20 #include "llvm/IntrinsicInst.h"
21 #include "llvm/Module.h"
22 #include "llvm/Analysis/Dominators.h"
23 #include "llvm/CodeGen/MachineFrameInfo.h"
24 #include "llvm/CodeGen/MachineFunctionPass.h"
25 #include "llvm/CodeGen/MachineInstrBuilder.h"
26 #include "llvm/CodeGen/MachineModuleInfo.h"
27 #include "llvm/Target/TargetFrameLowering.h"
28 #include "llvm/Target/TargetInstrInfo.h"
29 #include "llvm/Target/TargetMachine.h"
30 #include "llvm/Target/TargetRegisterInfo.h"
31 #include "llvm/Support/Debug.h"
32 #include "llvm/Support/ErrorHandling.h"
33 #include "llvm/Support/raw_ostream.h"
35 using namespace llvm;
37 namespace {
39 /// LowerIntrinsics - This pass rewrites calls to the llvm.gcread or
40 /// llvm.gcwrite intrinsics, replacing them with simple loads and stores as
41 /// directed by the GCStrategy. It also performs automatic root initialization
42 /// and custom intrinsic lowering.
43 class LowerIntrinsics : public FunctionPass {
44 static bool NeedsDefaultLoweringPass(const GCStrategy &C);
45 static bool NeedsCustomLoweringPass(const GCStrategy &C);
46 static bool CouldBecomeSafePoint(Instruction *I);
47 bool PerformDefaultLowering(Function &F, GCStrategy &Coll);
48 static bool InsertRootInitializers(Function &F,
49 AllocaInst **Roots, unsigned Count);
51 public:
52 static char ID;
54 LowerIntrinsics();
55 const char *getPassName() const;
56 void getAnalysisUsage(AnalysisUsage &AU) const;
58 bool doInitialization(Module &M);
59 bool runOnFunction(Function &F);
63 /// MachineCodeAnalysis - This is a target-independent pass over the machine
64 /// function representation to identify safe points for the garbage collector
65 /// in the machine code. It inserts labels at safe points and populates a
66 /// GCMetadata record for each function.
67 class MachineCodeAnalysis : public MachineFunctionPass {
68 const TargetMachine *TM;
69 GCFunctionInfo *FI;
70 MachineModuleInfo *MMI;
71 const TargetInstrInfo *TII;
73 void FindSafePoints(MachineFunction &MF);
74 void VisitCallPoint(MachineBasicBlock::iterator MI);
75 MCSymbol *InsertLabel(MachineBasicBlock &MBB,
76 MachineBasicBlock::iterator MI,
77 DebugLoc DL) const;
79 void FindStackOffsets(MachineFunction &MF);
81 public:
82 static char ID;
84 MachineCodeAnalysis();
85 const char *getPassName() const;
86 void getAnalysisUsage(AnalysisUsage &AU) const;
88 bool runOnMachineFunction(MachineFunction &MF);
93 // -----------------------------------------------------------------------------
95 GCStrategy::GCStrategy() :
96 NeededSafePoints(0),
97 CustomReadBarriers(false),
98 CustomWriteBarriers(false),
99 CustomRoots(false),
100 InitRoots(true),
101 UsesMetadata(false)
104 GCStrategy::~GCStrategy() {
105 for (iterator I = begin(), E = end(); I != E; ++I)
106 delete *I;
108 Functions.clear();
111 bool GCStrategy::initializeCustomLowering(Module &M) { return false; }
113 bool GCStrategy::performCustomLowering(Function &F) {
114 dbgs() << "gc " << getName() << " must override performCustomLowering.\n";
115 llvm_unreachable(0);
116 return 0;
119 GCFunctionInfo *GCStrategy::insertFunctionInfo(const Function &F) {
120 GCFunctionInfo *FI = new GCFunctionInfo(F, *this);
121 Functions.push_back(FI);
122 return FI;
125 // -----------------------------------------------------------------------------
127 INITIALIZE_PASS_BEGIN(LowerIntrinsics, "gc-lowering", "GC Lowering",
128 false, false)
129 INITIALIZE_PASS_DEPENDENCY(GCModuleInfo)
130 INITIALIZE_PASS_END(LowerIntrinsics, "gc-lowering", "GC Lowering", false, false)
132 FunctionPass *llvm::createGCLoweringPass() {
133 return new LowerIntrinsics();
136 char LowerIntrinsics::ID = 0;
138 LowerIntrinsics::LowerIntrinsics()
139 : FunctionPass(ID) {
140 initializeLowerIntrinsicsPass(*PassRegistry::getPassRegistry());
143 const char *LowerIntrinsics::getPassName() const {
144 return "Lower Garbage Collection Instructions";
147 void LowerIntrinsics::getAnalysisUsage(AnalysisUsage &AU) const {
148 FunctionPass::getAnalysisUsage(AU);
149 AU.addRequired<GCModuleInfo>();
150 AU.addPreserved<DominatorTree>();
153 /// doInitialization - If this module uses the GC intrinsics, find them now.
154 bool LowerIntrinsics::doInitialization(Module &M) {
155 // FIXME: This is rather antisocial in the context of a JIT since it performs
156 // work against the entire module. But this cannot be done at
157 // runFunction time (initializeCustomLowering likely needs to change
158 // the module).
159 GCModuleInfo *MI = getAnalysisIfAvailable<GCModuleInfo>();
160 assert(MI && "LowerIntrinsics didn't require GCModuleInfo!?");
161 for (Module::iterator I = M.begin(), E = M.end(); I != E; ++I)
162 if (!I->isDeclaration() && I->hasGC())
163 MI->getFunctionInfo(*I); // Instantiate the GC strategy.
165 bool MadeChange = false;
166 for (GCModuleInfo::iterator I = MI->begin(), E = MI->end(); I != E; ++I)
167 if (NeedsCustomLoweringPass(**I))
168 if ((*I)->initializeCustomLowering(M))
169 MadeChange = true;
171 return MadeChange;
174 bool LowerIntrinsics::InsertRootInitializers(Function &F, AllocaInst **Roots,
175 unsigned Count) {
176 // Scroll past alloca instructions.
177 BasicBlock::iterator IP = F.getEntryBlock().begin();
178 while (isa<AllocaInst>(IP)) ++IP;
180 // Search for initializers in the initial BB.
181 SmallPtrSet<AllocaInst*,16> InitedRoots;
182 for (; !CouldBecomeSafePoint(IP); ++IP)
183 if (StoreInst *SI = dyn_cast<StoreInst>(IP))
184 if (AllocaInst *AI =
185 dyn_cast<AllocaInst>(SI->getOperand(1)->stripPointerCasts()))
186 InitedRoots.insert(AI);
188 // Add root initializers.
189 bool MadeChange = false;
191 for (AllocaInst **I = Roots, **E = Roots + Count; I != E; ++I)
192 if (!InitedRoots.count(*I)) {
193 StoreInst* SI = new StoreInst(ConstantPointerNull::get(cast<PointerType>(
194 cast<PointerType>((*I)->getType())->getElementType())),
195 *I);
196 SI->insertAfter(*I);
197 MadeChange = true;
200 return MadeChange;
203 bool LowerIntrinsics::NeedsDefaultLoweringPass(const GCStrategy &C) {
204 // Default lowering is necessary only if read or write barriers have a default
205 // action. The default for roots is no action.
206 return !C.customWriteBarrier()
207 || !C.customReadBarrier()
208 || C.initializeRoots();
211 bool LowerIntrinsics::NeedsCustomLoweringPass(const GCStrategy &C) {
212 // Custom lowering is only necessary if enabled for some action.
213 return C.customWriteBarrier()
214 || C.customReadBarrier()
215 || C.customRoots();
218 /// CouldBecomeSafePoint - Predicate to conservatively determine whether the
219 /// instruction could introduce a safe point.
220 bool LowerIntrinsics::CouldBecomeSafePoint(Instruction *I) {
221 // The natural definition of instructions which could introduce safe points
222 // are:
224 // - call, invoke (AfterCall, BeforeCall)
225 // - phis (Loops)
226 // - invoke, ret, unwind (Exit)
228 // However, instructions as seemingly inoccuous as arithmetic can become
229 // libcalls upon lowering (e.g., div i64 on a 32-bit platform), so instead
230 // it is necessary to take a conservative approach.
232 if (isa<AllocaInst>(I) || isa<GetElementPtrInst>(I) ||
233 isa<StoreInst>(I) || isa<LoadInst>(I))
234 return false;
236 // llvm.gcroot is safe because it doesn't do anything at runtime.
237 if (CallInst *CI = dyn_cast<CallInst>(I))
238 if (Function *F = CI->getCalledFunction())
239 if (unsigned IID = F->getIntrinsicID())
240 if (IID == Intrinsic::gcroot)
241 return false;
243 return true;
246 /// runOnFunction - Replace gcread/gcwrite intrinsics with loads and stores.
247 /// Leave gcroot intrinsics; the code generator needs to see those.
248 bool LowerIntrinsics::runOnFunction(Function &F) {
249 // Quick exit for functions that do not use GC.
250 if (!F.hasGC())
251 return false;
253 GCFunctionInfo &FI = getAnalysis<GCModuleInfo>().getFunctionInfo(F);
254 GCStrategy &S = FI.getStrategy();
256 bool MadeChange = false;
258 if (NeedsDefaultLoweringPass(S))
259 MadeChange |= PerformDefaultLowering(F, S);
261 bool UseCustomLoweringPass = NeedsCustomLoweringPass(S);
262 if (UseCustomLoweringPass)
263 MadeChange |= S.performCustomLowering(F);
265 // Custom lowering may modify the CFG, so dominators must be recomputed.
266 if (UseCustomLoweringPass) {
267 if (DominatorTree *DT = getAnalysisIfAvailable<DominatorTree>())
268 DT->DT->recalculate(F);
271 return MadeChange;
274 bool LowerIntrinsics::PerformDefaultLowering(Function &F, GCStrategy &S) {
275 bool LowerWr = !S.customWriteBarrier();
276 bool LowerRd = !S.customReadBarrier();
277 bool InitRoots = S.initializeRoots();
279 SmallVector<AllocaInst*, 32> Roots;
281 bool MadeChange = false;
282 for (Function::iterator BB = F.begin(), E = F.end(); BB != E; ++BB) {
283 for (BasicBlock::iterator II = BB->begin(), E = BB->end(); II != E;) {
284 if (IntrinsicInst *CI = dyn_cast<IntrinsicInst>(II++)) {
285 Function *F = CI->getCalledFunction();
286 switch (F->getIntrinsicID()) {
287 case Intrinsic::gcwrite:
288 if (LowerWr) {
289 // Replace a write barrier with a simple store.
290 Value *St = new StoreInst(CI->getArgOperand(0),
291 CI->getArgOperand(2), CI);
292 CI->replaceAllUsesWith(St);
293 CI->eraseFromParent();
295 break;
296 case Intrinsic::gcread:
297 if (LowerRd) {
298 // Replace a read barrier with a simple load.
299 Value *Ld = new LoadInst(CI->getArgOperand(1), "", CI);
300 Ld->takeName(CI);
301 CI->replaceAllUsesWith(Ld);
302 CI->eraseFromParent();
304 break;
305 case Intrinsic::gcroot:
306 if (InitRoots) {
307 // Initialize the GC root, but do not delete the intrinsic. The
308 // backend needs the intrinsic to flag the stack slot.
309 Roots.push_back(cast<AllocaInst>(
310 CI->getArgOperand(0)->stripPointerCasts()));
312 break;
313 default:
314 continue;
317 MadeChange = true;
322 if (Roots.size())
323 MadeChange |= InsertRootInitializers(F, Roots.begin(), Roots.size());
325 return MadeChange;
328 // -----------------------------------------------------------------------------
330 FunctionPass *llvm::createGCMachineCodeAnalysisPass() {
331 return new MachineCodeAnalysis();
334 char MachineCodeAnalysis::ID = 0;
336 MachineCodeAnalysis::MachineCodeAnalysis()
337 : MachineFunctionPass(ID) {}
339 const char *MachineCodeAnalysis::getPassName() const {
340 return "Analyze Machine Code For Garbage Collection";
343 void MachineCodeAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
344 MachineFunctionPass::getAnalysisUsage(AU);
345 AU.setPreservesAll();
346 AU.addRequired<MachineModuleInfo>();
347 AU.addRequired<GCModuleInfo>();
350 MCSymbol *MachineCodeAnalysis::InsertLabel(MachineBasicBlock &MBB,
351 MachineBasicBlock::iterator MI,
352 DebugLoc DL) const {
353 MCSymbol *Label = MBB.getParent()->getContext().CreateTempSymbol();
354 BuildMI(MBB, MI, DL, TII->get(TargetOpcode::GC_LABEL)).addSym(Label);
355 return Label;
358 void MachineCodeAnalysis::VisitCallPoint(MachineBasicBlock::iterator CI) {
359 // Find the return address (next instruction), too, so as to bracket the call
360 // instruction.
361 MachineBasicBlock::iterator RAI = CI;
362 ++RAI;
364 if (FI->getStrategy().needsSafePoint(GC::PreCall)) {
365 MCSymbol* Label = InsertLabel(*CI->getParent(), CI, CI->getDebugLoc());
366 FI->addSafePoint(GC::PreCall, Label, CI->getDebugLoc());
369 if (FI->getStrategy().needsSafePoint(GC::PostCall)) {
370 MCSymbol* Label = InsertLabel(*CI->getParent(), RAI, CI->getDebugLoc());
371 FI->addSafePoint(GC::PostCall, Label, CI->getDebugLoc());
375 void MachineCodeAnalysis::FindSafePoints(MachineFunction &MF) {
376 for (MachineFunction::iterator BBI = MF.begin(),
377 BBE = MF.end(); BBI != BBE; ++BBI)
378 for (MachineBasicBlock::iterator MI = BBI->begin(),
379 ME = BBI->end(); MI != ME; ++MI)
380 if (MI->getDesc().isCall())
381 VisitCallPoint(MI);
384 void MachineCodeAnalysis::FindStackOffsets(MachineFunction &MF) {
385 const TargetFrameLowering *TFI = TM->getFrameLowering();
386 assert(TFI && "TargetRegisterInfo not available!");
388 for (GCFunctionInfo::roots_iterator RI = FI->roots_begin(),
389 RE = FI->roots_end(); RI != RE; ++RI)
390 RI->StackOffset = TFI->getFrameIndexOffset(MF, RI->Num);
393 bool MachineCodeAnalysis::runOnMachineFunction(MachineFunction &MF) {
394 // Quick exit for functions that do not use GC.
395 if (!MF.getFunction()->hasGC())
396 return false;
398 FI = &getAnalysis<GCModuleInfo>().getFunctionInfo(*MF.getFunction());
399 if (!FI->getStrategy().needsSafePoints())
400 return false;
402 TM = &MF.getTarget();
403 MMI = &getAnalysis<MachineModuleInfo>();
404 TII = TM->getInstrInfo();
406 // Find the size of the stack frame.
407 FI->setFrameSize(MF.getFrameInfo()->getStackSize());
409 // Find all safe points.
410 FindSafePoints(MF);
412 // Find the stack offsets for all roots.
413 FindStackOffsets(MF);
415 return false;