[DAGCombiner] Add target hook function to decide folding (mul (add x, c1), c2)
[llvm-project.git] / llvm / lib / CodeGen / GCRootLowering.cpp
blob58269e172c57374017e4ef6575905190f8fa9f93
1 //===-- GCRootLowering.cpp - Garbage collection infrastructure ------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements the lowering for the gc.root mechanism.
11 //===----------------------------------------------------------------------===//
13 #include "llvm/CodeGen/GCMetadata.h"
14 #include "llvm/CodeGen/MachineFrameInfo.h"
15 #include "llvm/CodeGen/MachineFunctionPass.h"
16 #include "llvm/CodeGen/MachineInstrBuilder.h"
17 #include "llvm/CodeGen/MachineModuleInfo.h"
18 #include "llvm/CodeGen/Passes.h"
19 #include "llvm/CodeGen/TargetFrameLowering.h"
20 #include "llvm/CodeGen/TargetInstrInfo.h"
21 #include "llvm/CodeGen/TargetRegisterInfo.h"
22 #include "llvm/CodeGen/TargetSubtargetInfo.h"
23 #include "llvm/IR/Dominators.h"
24 #include "llvm/IR/IntrinsicInst.h"
25 #include "llvm/IR/Module.h"
26 #include "llvm/InitializePasses.h"
27 #include "llvm/Support/Debug.h"
28 #include "llvm/Support/ErrorHandling.h"
29 #include "llvm/Support/raw_ostream.h"
31 using namespace llvm;
33 namespace {
35 /// LowerIntrinsics - This pass rewrites calls to the llvm.gcread or
36 /// llvm.gcwrite intrinsics, replacing them with simple loads and stores as
37 /// directed by the GCStrategy. It also performs automatic root initialization
38 /// and custom intrinsic lowering.
39 class LowerIntrinsics : public FunctionPass {
40 bool DoLowering(Function &F, GCStrategy &S);
42 public:
43 static char ID;
45 LowerIntrinsics();
46 StringRef getPassName() const override;
47 void getAnalysisUsage(AnalysisUsage &AU) const override;
49 bool doInitialization(Module &M) override;
50 bool runOnFunction(Function &F) override;
53 /// GCMachineCodeAnalysis - This is a target-independent pass over the machine
54 /// function representation to identify safe points for the garbage collector
55 /// in the machine code. It inserts labels at safe points and populates a
56 /// GCMetadata record for each function.
57 class GCMachineCodeAnalysis : public MachineFunctionPass {
58 GCFunctionInfo *FI;
59 const TargetInstrInfo *TII;
61 void FindSafePoints(MachineFunction &MF);
62 void VisitCallPoint(MachineBasicBlock::iterator CI);
63 MCSymbol *InsertLabel(MachineBasicBlock &MBB, MachineBasicBlock::iterator MI,
64 const DebugLoc &DL) const;
66 void FindStackOffsets(MachineFunction &MF);
68 public:
69 static char ID;
71 GCMachineCodeAnalysis();
72 void getAnalysisUsage(AnalysisUsage &AU) const override;
74 bool runOnMachineFunction(MachineFunction &MF) override;
78 // -----------------------------------------------------------------------------
80 INITIALIZE_PASS_BEGIN(LowerIntrinsics, "gc-lowering", "GC Lowering", false,
81 false)
82 INITIALIZE_PASS_DEPENDENCY(GCModuleInfo)
83 INITIALIZE_PASS_END(LowerIntrinsics, "gc-lowering", "GC Lowering", false, false)
85 FunctionPass *llvm::createGCLoweringPass() { return new LowerIntrinsics(); }
87 char LowerIntrinsics::ID = 0;
88 char &llvm::GCLoweringID = LowerIntrinsics::ID;
90 LowerIntrinsics::LowerIntrinsics() : FunctionPass(ID) {
91 initializeLowerIntrinsicsPass(*PassRegistry::getPassRegistry());
94 StringRef LowerIntrinsics::getPassName() const {
95 return "Lower Garbage Collection Instructions";
98 void LowerIntrinsics::getAnalysisUsage(AnalysisUsage &AU) const {
99 FunctionPass::getAnalysisUsage(AU);
100 AU.addRequired<GCModuleInfo>();
101 AU.addPreserved<DominatorTreeWrapperPass>();
104 /// doInitialization - If this module uses the GC intrinsics, find them now.
105 bool LowerIntrinsics::doInitialization(Module &M) {
106 GCModuleInfo *MI = getAnalysisIfAvailable<GCModuleInfo>();
107 assert(MI && "LowerIntrinsics didn't require GCModuleInfo!?");
108 for (Function &F : M)
109 if (!F.isDeclaration() && F.hasGC())
110 MI->getFunctionInfo(F); // Instantiate the GC strategy.
112 return false;
115 /// CouldBecomeSafePoint - Predicate to conservatively determine whether the
116 /// instruction could introduce a safe point.
117 static bool CouldBecomeSafePoint(Instruction *I) {
118 // The natural definition of instructions which could introduce safe points
119 // are:
121 // - call, invoke (AfterCall, BeforeCall)
122 // - phis (Loops)
123 // - invoke, ret, unwind (Exit)
125 // However, instructions as seemingly inoccuous as arithmetic can become
126 // libcalls upon lowering (e.g., div i64 on a 32-bit platform), so instead
127 // it is necessary to take a conservative approach.
129 if (isa<AllocaInst>(I) || isa<GetElementPtrInst>(I) || isa<StoreInst>(I) ||
130 isa<LoadInst>(I))
131 return false;
133 // llvm.gcroot is safe because it doesn't do anything at runtime.
134 if (CallInst *CI = dyn_cast<CallInst>(I))
135 if (Function *F = CI->getCalledFunction())
136 if (Intrinsic::ID IID = F->getIntrinsicID())
137 if (IID == Intrinsic::gcroot)
138 return false;
140 return true;
143 static bool InsertRootInitializers(Function &F, ArrayRef<AllocaInst *> Roots) {
144 // Scroll past alloca instructions.
145 BasicBlock::iterator IP = F.getEntryBlock().begin();
146 while (isa<AllocaInst>(IP))
147 ++IP;
149 // Search for initializers in the initial BB.
150 SmallPtrSet<AllocaInst *, 16> InitedRoots;
151 for (; !CouldBecomeSafePoint(&*IP); ++IP)
152 if (StoreInst *SI = dyn_cast<StoreInst>(IP))
153 if (AllocaInst *AI =
154 dyn_cast<AllocaInst>(SI->getOperand(1)->stripPointerCasts()))
155 InitedRoots.insert(AI);
157 // Add root initializers.
158 bool MadeChange = false;
160 for (AllocaInst *Root : Roots)
161 if (!InitedRoots.count(Root)) {
162 new StoreInst(
163 ConstantPointerNull::get(cast<PointerType>(Root->getAllocatedType())),
164 Root, Root->getNextNode());
165 MadeChange = true;
168 return MadeChange;
171 /// runOnFunction - Replace gcread/gcwrite intrinsics with loads and stores.
172 /// Leave gcroot intrinsics; the code generator needs to see those.
173 bool LowerIntrinsics::runOnFunction(Function &F) {
174 // Quick exit for functions that do not use GC.
175 if (!F.hasGC())
176 return false;
178 GCFunctionInfo &FI = getAnalysis<GCModuleInfo>().getFunctionInfo(F);
179 GCStrategy &S = FI.getStrategy();
181 return DoLowering(F, S);
184 /// Lower barriers out of existance (if the associated GCStrategy hasn't
185 /// already done so...), and insert initializing stores to roots as a defensive
186 /// measure. Given we're going to report all roots live at all safepoints, we
187 /// need to be able to ensure each root has been initialized by the point the
188 /// first safepoint is reached. This really should have been done by the
189 /// frontend, but the old API made this non-obvious, so we do a potentially
190 /// redundant store just in case.
191 bool LowerIntrinsics::DoLowering(Function &F, GCStrategy &S) {
192 SmallVector<AllocaInst *, 32> Roots;
194 bool MadeChange = false;
195 for (BasicBlock &BB : F)
196 for (BasicBlock::iterator II = BB.begin(), E = BB.end(); II != E;) {
197 IntrinsicInst *CI = dyn_cast<IntrinsicInst>(II++);
198 if (!CI)
199 continue;
201 Function *F = CI->getCalledFunction();
202 switch (F->getIntrinsicID()) {
203 default: break;
204 case Intrinsic::gcwrite: {
205 // Replace a write barrier with a simple store.
206 Value *St = new StoreInst(CI->getArgOperand(0),
207 CI->getArgOperand(2), CI);
208 CI->replaceAllUsesWith(St);
209 CI->eraseFromParent();
210 MadeChange = true;
211 break;
213 case Intrinsic::gcread: {
214 // Replace a read barrier with a simple load.
215 Value *Ld = new LoadInst(CI->getType(), CI->getArgOperand(1), "", CI);
216 Ld->takeName(CI);
217 CI->replaceAllUsesWith(Ld);
218 CI->eraseFromParent();
219 MadeChange = true;
220 break;
222 case Intrinsic::gcroot: {
223 // Initialize the GC root, but do not delete the intrinsic. The
224 // backend needs the intrinsic to flag the stack slot.
225 Roots.push_back(
226 cast<AllocaInst>(CI->getArgOperand(0)->stripPointerCasts()));
227 break;
232 if (Roots.size())
233 MadeChange |= InsertRootInitializers(F, Roots);
235 return MadeChange;
238 // -----------------------------------------------------------------------------
240 char GCMachineCodeAnalysis::ID = 0;
241 char &llvm::GCMachineCodeAnalysisID = GCMachineCodeAnalysis::ID;
243 INITIALIZE_PASS(GCMachineCodeAnalysis, "gc-analysis",
244 "Analyze Machine Code For Garbage Collection", false, false)
246 GCMachineCodeAnalysis::GCMachineCodeAnalysis() : MachineFunctionPass(ID) {}
248 void GCMachineCodeAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
249 MachineFunctionPass::getAnalysisUsage(AU);
250 AU.setPreservesAll();
251 AU.addRequired<GCModuleInfo>();
254 MCSymbol *GCMachineCodeAnalysis::InsertLabel(MachineBasicBlock &MBB,
255 MachineBasicBlock::iterator MI,
256 const DebugLoc &DL) const {
257 MCSymbol *Label = MBB.getParent()->getContext().createTempSymbol();
258 BuildMI(MBB, MI, DL, TII->get(TargetOpcode::GC_LABEL)).addSym(Label);
259 return Label;
262 void GCMachineCodeAnalysis::VisitCallPoint(MachineBasicBlock::iterator CI) {
263 // Find the return address (next instruction), since that's what will be on
264 // the stack when the call is suspended and we need to inspect the stack.
265 MachineBasicBlock::iterator RAI = CI;
266 ++RAI;
268 MCSymbol *Label = InsertLabel(*CI->getParent(), RAI, CI->getDebugLoc());
269 FI->addSafePoint(Label, CI->getDebugLoc());
272 void GCMachineCodeAnalysis::FindSafePoints(MachineFunction &MF) {
273 for (MachineBasicBlock &MBB : MF)
274 for (MachineBasicBlock::iterator MI = MBB.begin(), ME = MBB.end();
275 MI != ME; ++MI)
276 if (MI->isCall()) {
277 // Do not treat tail or sibling call sites as safe points. This is
278 // legal since any arguments passed to the callee which live in the
279 // remnants of the callers frame will be owned and updated by the
280 // callee if required.
281 if (MI->isTerminator())
282 continue;
283 VisitCallPoint(MI);
287 void GCMachineCodeAnalysis::FindStackOffsets(MachineFunction &MF) {
288 const TargetFrameLowering *TFI = MF.getSubtarget().getFrameLowering();
289 assert(TFI && "TargetRegisterInfo not available!");
291 for (GCFunctionInfo::roots_iterator RI = FI->roots_begin();
292 RI != FI->roots_end();) {
293 // If the root references a dead object, no need to keep it.
294 if (MF.getFrameInfo().isDeadObjectIndex(RI->Num)) {
295 RI = FI->removeStackRoot(RI);
296 } else {
297 Register FrameReg; // FIXME: surely GCRoot ought to store the
298 // register that the offset is from?
299 auto FrameOffset = TFI->getFrameIndexReference(MF, RI->Num, FrameReg);
300 assert(!FrameOffset.getScalable() &&
301 "Frame offsets with a scalable component are not supported");
302 RI->StackOffset = FrameOffset.getFixed();
303 ++RI;
308 bool GCMachineCodeAnalysis::runOnMachineFunction(MachineFunction &MF) {
309 // Quick exit for functions that do not use GC.
310 if (!MF.getFunction().hasGC())
311 return false;
313 FI = &getAnalysis<GCModuleInfo>().getFunctionInfo(MF.getFunction());
314 TII = MF.getSubtarget().getInstrInfo();
316 // Find the size of the stack frame. There may be no correct static frame
317 // size, we use UINT64_MAX to represent this.
318 const MachineFrameInfo &MFI = MF.getFrameInfo();
319 const TargetRegisterInfo *RegInfo = MF.getSubtarget().getRegisterInfo();
320 const bool DynamicFrameSize =
321 MFI.hasVarSizedObjects() || RegInfo->hasStackRealignment(MF);
322 FI->setFrameSize(DynamicFrameSize ? UINT64_MAX : MFI.getStackSize());
324 // Find all safe points.
325 if (FI->getStrategy().needsSafePoints())
326 FindSafePoints(MF);
328 // Find the concrete stack offsets for all roots (stack slots)
329 FindStackOffsets(MF);
331 return false;