[InstCombine] Signed saturation tests. NFC
[llvm-complete.git] / lib / CodeGen / SwiftErrorValueTracking.cpp
blobc72a04276a4f28a5cd78240a4e73cfe317e100b4
1 //===-- SwiftErrorValueTracking.cpp --------------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This implements a limited mem2reg-like analysis to promote uses of function
10 // arguments and allocas marked with swiftalloc from memory into virtual
11 // registers tracked by this class.
13 //===----------------------------------------------------------------------===//
15 #include "llvm/CodeGen/SwiftErrorValueTracking.h"
16 #include "llvm/ADT/PostOrderIterator.h"
17 #include "llvm/ADT/SmallSet.h"
18 #include "llvm/CodeGen/MachineInstrBuilder.h"
19 #include "llvm/CodeGen/MachineRegisterInfo.h"
20 #include "llvm/CodeGen/TargetInstrInfo.h"
21 #include "llvm/CodeGen/TargetLowering.h"
22 #include "llvm/IR/Value.h"
24 using namespace llvm;
26 Register SwiftErrorValueTracking::getOrCreateVReg(const MachineBasicBlock *MBB,
27 const Value *Val) {
28 auto Key = std::make_pair(MBB, Val);
29 auto It = VRegDefMap.find(Key);
30 // If this is the first use of this swifterror value in this basic block,
31 // create a new virtual register.
32 // After we processed all basic blocks we will satisfy this "upwards exposed
33 // use" by inserting a copy or phi at the beginning of this block.
34 if (It == VRegDefMap.end()) {
35 auto &DL = MF->getDataLayout();
36 const TargetRegisterClass *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
37 auto VReg = MF->getRegInfo().createVirtualRegister(RC);
38 VRegDefMap[Key] = VReg;
39 VRegUpwardsUse[Key] = VReg;
40 return VReg;
41 } else
42 return It->second;
45 void SwiftErrorValueTracking::setCurrentVReg(const MachineBasicBlock *MBB,
46 const Value *Val, Register VReg) {
47 VRegDefMap[std::make_pair(MBB, Val)] = VReg;
50 Register SwiftErrorValueTracking::getOrCreateVRegDefAt(
51 const Instruction *I, const MachineBasicBlock *MBB, const Value *Val) {
52 auto Key = PointerIntPair<const Instruction *, 1, bool>(I, true);
53 auto It = VRegDefUses.find(Key);
54 if (It != VRegDefUses.end())
55 return It->second;
57 auto &DL = MF->getDataLayout();
58 const TargetRegisterClass *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
59 Register VReg = MF->getRegInfo().createVirtualRegister(RC);
60 VRegDefUses[Key] = VReg;
61 setCurrentVReg(MBB, Val, VReg);
62 return VReg;
65 Register SwiftErrorValueTracking::getOrCreateVRegUseAt(
66 const Instruction *I, const MachineBasicBlock *MBB, const Value *Val) {
67 auto Key = PointerIntPair<const Instruction *, 1, bool>(I, false);
68 auto It = VRegDefUses.find(Key);
69 if (It != VRegDefUses.end())
70 return It->second;
72 Register VReg = getOrCreateVReg(MBB, Val);
73 VRegDefUses[Key] = VReg;
74 return VReg;
77 /// Set up SwiftErrorVals by going through the function. If the function has
78 /// swifterror argument, it will be the first entry.
79 void SwiftErrorValueTracking::setFunction(MachineFunction &mf) {
80 MF = &mf;
81 Fn = &MF->getFunction();
82 TLI = MF->getSubtarget().getTargetLowering();
83 TII = MF->getSubtarget().getInstrInfo();
85 if (!TLI->supportSwiftError())
86 return;
88 SwiftErrorVals.clear();
89 VRegDefMap.clear();
90 VRegUpwardsUse.clear();
91 VRegDefUses.clear();
92 SwiftErrorArg = nullptr;
94 // Check if function has a swifterror argument.
95 bool HaveSeenSwiftErrorArg = false;
96 for (Function::const_arg_iterator AI = Fn->arg_begin(), AE = Fn->arg_end();
97 AI != AE; ++AI)
98 if (AI->hasSwiftErrorAttr()) {
99 assert(!HaveSeenSwiftErrorArg &&
100 "Must have only one swifterror parameter");
101 (void)HaveSeenSwiftErrorArg; // silence warning.
102 HaveSeenSwiftErrorArg = true;
103 SwiftErrorArg = &*AI;
104 SwiftErrorVals.push_back(&*AI);
107 for (const auto &LLVMBB : *Fn)
108 for (const auto &Inst : LLVMBB) {
109 if (const AllocaInst *Alloca = dyn_cast<AllocaInst>(&Inst))
110 if (Alloca->isSwiftError())
111 SwiftErrorVals.push_back(Alloca);
115 bool SwiftErrorValueTracking::createEntriesInEntryBlock(DebugLoc DbgLoc) {
116 if (!TLI->supportSwiftError())
117 return false;
119 // We only need to do this when we have swifterror parameter or swifterror
120 // alloc.
121 if (SwiftErrorVals.empty())
122 return false;
124 MachineBasicBlock *MBB = &*MF->begin();
125 auto &DL = MF->getDataLayout();
126 auto const *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
127 bool Inserted = false;
128 for (const auto *SwiftErrorVal : SwiftErrorVals) {
129 // We will always generate a copy from the argument. It is always used at
130 // least by the 'return' of the swifterror.
131 if (SwiftErrorArg && SwiftErrorArg == SwiftErrorVal)
132 continue;
133 Register VReg = MF->getRegInfo().createVirtualRegister(RC);
134 // Assign Undef to Vreg. We construct MI directly to make sure it works
135 // with FastISel.
136 BuildMI(*MBB, MBB->getFirstNonPHI(), DbgLoc,
137 TII->get(TargetOpcode::IMPLICIT_DEF), VReg);
139 setCurrentVReg(MBB, SwiftErrorVal, VReg);
140 Inserted = true;
143 return Inserted;
146 /// Propagate swifterror values through the machine function CFG.
147 void SwiftErrorValueTracking::propagateVRegs() {
148 if (!TLI->supportSwiftError())
149 return;
151 // We only need to do this when we have swifterror parameter or swifterror
152 // alloc.
153 if (SwiftErrorVals.empty())
154 return;
156 // For each machine basic block in reverse post order.
157 ReversePostOrderTraversal<MachineFunction *> RPOT(MF);
158 for (MachineBasicBlock *MBB : RPOT) {
159 // For each swifterror value in the function.
160 for (const auto *SwiftErrorVal : SwiftErrorVals) {
161 auto Key = std::make_pair(MBB, SwiftErrorVal);
162 auto UUseIt = VRegUpwardsUse.find(Key);
163 auto VRegDefIt = VRegDefMap.find(Key);
164 bool UpwardsUse = UUseIt != VRegUpwardsUse.end();
165 Register UUseVReg = UpwardsUse ? UUseIt->second : Register();
166 bool DownwardDef = VRegDefIt != VRegDefMap.end();
167 assert(!(UpwardsUse && !DownwardDef) &&
168 "We can't have an upwards use but no downwards def");
170 // If there is no upwards exposed use and an entry for the swifterror in
171 // the def map for this value we don't need to do anything: We already
172 // have a downward def for this basic block.
173 if (!UpwardsUse && DownwardDef)
174 continue;
176 // Otherwise we either have an upwards exposed use vreg that we need to
177 // materialize or need to forward the downward def from predecessors.
179 // Check whether we have a single vreg def from all predecessors.
180 // Otherwise we need a phi.
181 SmallVector<std::pair<MachineBasicBlock *, Register>, 4> VRegs;
182 SmallSet<const MachineBasicBlock *, 8> Visited;
183 for (auto *Pred : MBB->predecessors()) {
184 if (!Visited.insert(Pred).second)
185 continue;
186 VRegs.push_back(std::make_pair(
187 Pred, getOrCreateVReg(Pred, SwiftErrorVal)));
188 if (Pred != MBB)
189 continue;
190 // We have a self-edge.
191 // If there was no upwards use in this basic block there is now one: the
192 // phi needs to use it self.
193 if (!UpwardsUse) {
194 UpwardsUse = true;
195 UUseIt = VRegUpwardsUse.find(Key);
196 assert(UUseIt != VRegUpwardsUse.end());
197 UUseVReg = UUseIt->second;
201 // We need a phi node if we have more than one predecessor with different
202 // downward defs.
203 bool needPHI =
204 VRegs.size() >= 1 &&
205 std::find_if(
206 VRegs.begin(), VRegs.end(),
207 [&](const std::pair<const MachineBasicBlock *, Register> &V)
208 -> bool { return V.second != VRegs[0].second; }) !=
209 VRegs.end();
211 // If there is no upwards exposed used and we don't need a phi just
212 // forward the swifterror vreg from the predecessor(s).
213 if (!UpwardsUse && !needPHI) {
214 assert(!VRegs.empty() &&
215 "No predecessors? The entry block should bail out earlier");
216 // Just forward the swifterror vreg from the predecessor(s).
217 setCurrentVReg(MBB, SwiftErrorVal, VRegs[0].second);
218 continue;
221 auto DLoc = isa<Instruction>(SwiftErrorVal)
222 ? cast<Instruction>(SwiftErrorVal)->getDebugLoc()
223 : DebugLoc();
224 const auto *TII = MF->getSubtarget().getInstrInfo();
226 // If we don't need a phi create a copy to the upward exposed vreg.
227 if (!needPHI) {
228 assert(UpwardsUse);
229 assert(!VRegs.empty() &&
230 "No predecessors? Is the Calling Convention correct?");
231 Register DestReg = UUseVReg;
232 BuildMI(*MBB, MBB->getFirstNonPHI(), DLoc, TII->get(TargetOpcode::COPY),
233 DestReg)
234 .addReg(VRegs[0].second);
235 continue;
238 // We need a phi: if there is an upwards exposed use we already have a
239 // destination virtual register number otherwise we generate a new one.
240 auto &DL = MF->getDataLayout();
241 auto const *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
242 Register PHIVReg =
243 UpwardsUse ? UUseVReg : MF->getRegInfo().createVirtualRegister(RC);
244 MachineInstrBuilder PHI =
245 BuildMI(*MBB, MBB->getFirstNonPHI(), DLoc,
246 TII->get(TargetOpcode::PHI), PHIVReg);
247 for (auto BBRegPair : VRegs) {
248 PHI.addReg(BBRegPair.second).addMBB(BBRegPair.first);
251 // We did not have a definition in this block before: store the phi's vreg
252 // as this block downward exposed def.
253 if (!UpwardsUse)
254 setCurrentVReg(MBB, SwiftErrorVal, PHIVReg);
259 void SwiftErrorValueTracking::preassignVRegs(
260 MachineBasicBlock *MBB, BasicBlock::const_iterator Begin,
261 BasicBlock::const_iterator End) {
262 if (!TLI->supportSwiftError() || SwiftErrorVals.empty())
263 return;
265 // Iterator over instructions and assign vregs to swifterror defs and uses.
266 for (auto It = Begin; It != End; ++It) {
267 ImmutableCallSite CS(&*It);
268 if (CS) {
269 // A call-site with a swifterror argument is both use and def.
270 const Value *SwiftErrorAddr = nullptr;
271 for (auto &Arg : CS.args()) {
272 if (!Arg->isSwiftError())
273 continue;
274 // Use of swifterror.
275 assert(!SwiftErrorAddr && "Cannot have multiple swifterror arguments");
276 SwiftErrorAddr = &*Arg;
277 assert(SwiftErrorAddr->isSwiftError() &&
278 "Must have a swifterror value argument");
279 getOrCreateVRegUseAt(&*It, MBB, SwiftErrorAddr);
281 if (!SwiftErrorAddr)
282 continue;
284 // Def of swifterror.
285 getOrCreateVRegDefAt(&*It, MBB, SwiftErrorAddr);
287 // A load is a use.
288 } else if (const LoadInst *LI = dyn_cast<const LoadInst>(&*It)) {
289 const Value *V = LI->getOperand(0);
290 if (!V->isSwiftError())
291 continue;
293 getOrCreateVRegUseAt(LI, MBB, V);
295 // A store is a def.
296 } else if (const StoreInst *SI = dyn_cast<const StoreInst>(&*It)) {
297 const Value *SwiftErrorAddr = SI->getOperand(1);
298 if (!SwiftErrorAddr->isSwiftError())
299 continue;
301 // Def of swifterror.
302 getOrCreateVRegDefAt(&*It, MBB, SwiftErrorAddr);
304 // A return in a swiferror returning function is a use.
305 } else if (const ReturnInst *R = dyn_cast<const ReturnInst>(&*It)) {
306 const Function *F = R->getParent()->getParent();
307 if (!F->getAttributes().hasAttrSomewhere(Attribute::SwiftError))
308 continue;
310 getOrCreateVRegUseAt(R, MBB, SwiftErrorArg);