[x86] fix assert with horizontal math + broadcast of vector (PR43402)
[llvm-core.git] / lib / CodeGen / SwiftErrorValueTracking.cpp
blob96821cadb1b6e1fc4912255101076988d0428606
1 //===-- SwiftErrorValueTracking.cpp --------------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This implements a limited mem2reg-like analysis to promote uses of function
10 // arguments and allocas marked with swiftalloc from memory into virtual
11 // registers tracked by this class.
13 //===----------------------------------------------------------------------===//
15 #include "llvm/CodeGen/SwiftErrorValueTracking.h"
16 #include "llvm/ADT/SmallSet.h"
17 #include "llvm/CodeGen/MachineRegisterInfo.h"
18 #include "llvm/CodeGen/MachineInstrBuilder.h"
19 #include "llvm/CodeGen/TargetInstrInfo.h"
20 #include "llvm/CodeGen/TargetLowering.h"
21 #include "llvm/IR/Value.h"
23 using namespace llvm;
25 Register SwiftErrorValueTracking::getOrCreateVReg(const MachineBasicBlock *MBB,
26 const Value *Val) {
27 auto Key = std::make_pair(MBB, Val);
28 auto It = VRegDefMap.find(Key);
29 // If this is the first use of this swifterror value in this basic block,
30 // create a new virtual register.
31 // After we processed all basic blocks we will satisfy this "upwards exposed
32 // use" by inserting a copy or phi at the beginning of this block.
33 if (It == VRegDefMap.end()) {
34 auto &DL = MF->getDataLayout();
35 const TargetRegisterClass *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
36 auto VReg = MF->getRegInfo().createVirtualRegister(RC);
37 VRegDefMap[Key] = VReg;
38 VRegUpwardsUse[Key] = VReg;
39 return VReg;
40 } else
41 return It->second;
44 void SwiftErrorValueTracking::setCurrentVReg(const MachineBasicBlock *MBB,
45 const Value *Val, Register VReg) {
46 VRegDefMap[std::make_pair(MBB, Val)] = VReg;
49 Register SwiftErrorValueTracking::getOrCreateVRegDefAt(
50 const Instruction *I, const MachineBasicBlock *MBB, const Value *Val) {
51 auto Key = PointerIntPair<const Instruction *, 1, bool>(I, true);
52 auto It = VRegDefUses.find(Key);
53 if (It != VRegDefUses.end())
54 return It->second;
56 auto &DL = MF->getDataLayout();
57 const TargetRegisterClass *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
58 Register VReg = MF->getRegInfo().createVirtualRegister(RC);
59 VRegDefUses[Key] = VReg;
60 setCurrentVReg(MBB, Val, VReg);
61 return VReg;
64 Register SwiftErrorValueTracking::getOrCreateVRegUseAt(
65 const Instruction *I, const MachineBasicBlock *MBB, const Value *Val) {
66 auto Key = PointerIntPair<const Instruction *, 1, bool>(I, false);
67 auto It = VRegDefUses.find(Key);
68 if (It != VRegDefUses.end())
69 return It->second;
71 Register VReg = getOrCreateVReg(MBB, Val);
72 VRegDefUses[Key] = VReg;
73 return VReg;
76 /// Set up SwiftErrorVals by going through the function. If the function has
77 /// swifterror argument, it will be the first entry.
78 void SwiftErrorValueTracking::setFunction(MachineFunction &mf) {
79 MF = &mf;
80 Fn = &MF->getFunction();
81 TLI = MF->getSubtarget().getTargetLowering();
82 TII = MF->getSubtarget().getInstrInfo();
84 if (!TLI->supportSwiftError())
85 return;
87 SwiftErrorVals.clear();
88 VRegDefMap.clear();
89 VRegUpwardsUse.clear();
90 VRegDefUses.clear();
91 SwiftErrorArg = nullptr;
93 // Check if function has a swifterror argument.
94 bool HaveSeenSwiftErrorArg = false;
95 for (Function::const_arg_iterator AI = Fn->arg_begin(), AE = Fn->arg_end();
96 AI != AE; ++AI)
97 if (AI->hasSwiftErrorAttr()) {
98 assert(!HaveSeenSwiftErrorArg &&
99 "Must have only one swifterror parameter");
100 (void)HaveSeenSwiftErrorArg; // silence warning.
101 HaveSeenSwiftErrorArg = true;
102 SwiftErrorArg = &*AI;
103 SwiftErrorVals.push_back(&*AI);
106 for (const auto &LLVMBB : *Fn)
107 for (const auto &Inst : LLVMBB) {
108 if (const AllocaInst *Alloca = dyn_cast<AllocaInst>(&Inst))
109 if (Alloca->isSwiftError())
110 SwiftErrorVals.push_back(Alloca);
114 bool SwiftErrorValueTracking::createEntriesInEntryBlock(DebugLoc DbgLoc) {
115 if (!TLI->supportSwiftError())
116 return false;
118 // We only need to do this when we have swifterror parameter or swifterror
119 // alloc.
120 if (SwiftErrorVals.empty())
121 return false;
123 MachineBasicBlock *MBB = &*MF->begin();
124 auto &DL = MF->getDataLayout();
125 auto const *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
126 bool Inserted = false;
127 for (const auto *SwiftErrorVal : SwiftErrorVals) {
128 // We will always generate a copy from the argument. It is always used at
129 // least by the 'return' of the swifterror.
130 if (SwiftErrorArg && SwiftErrorArg == SwiftErrorVal)
131 continue;
132 Register VReg = MF->getRegInfo().createVirtualRegister(RC);
133 // Assign Undef to Vreg. We construct MI directly to make sure it works
134 // with FastISel.
135 BuildMI(*MBB, MBB->getFirstNonPHI(), DbgLoc,
136 TII->get(TargetOpcode::IMPLICIT_DEF), VReg);
138 setCurrentVReg(MBB, SwiftErrorVal, VReg);
139 Inserted = true;
142 return Inserted;
145 /// Propagate swifterror values through the machine function CFG.
146 void SwiftErrorValueTracking::propagateVRegs() {
147 if (!TLI->supportSwiftError())
148 return;
150 // We only need to do this when we have swifterror parameter or swifterror
151 // alloc.
152 if (SwiftErrorVals.empty())
153 return;
155 // For each machine basic block in reverse post order.
156 ReversePostOrderTraversal<MachineFunction *> RPOT(MF);
157 for (MachineBasicBlock *MBB : RPOT) {
158 // For each swifterror value in the function.
159 for (const auto *SwiftErrorVal : SwiftErrorVals) {
160 auto Key = std::make_pair(MBB, SwiftErrorVal);
161 auto UUseIt = VRegUpwardsUse.find(Key);
162 auto VRegDefIt = VRegDefMap.find(Key);
163 bool UpwardsUse = UUseIt != VRegUpwardsUse.end();
164 Register UUseVReg = UpwardsUse ? UUseIt->second : Register();
165 bool DownwardDef = VRegDefIt != VRegDefMap.end();
166 assert(!(UpwardsUse && !DownwardDef) &&
167 "We can't have an upwards use but no downwards def");
169 // If there is no upwards exposed use and an entry for the swifterror in
170 // the def map for this value we don't need to do anything: We already
171 // have a downward def for this basic block.
172 if (!UpwardsUse && DownwardDef)
173 continue;
175 // Otherwise we either have an upwards exposed use vreg that we need to
176 // materialize or need to forward the downward def from predecessors.
178 // Check whether we have a single vreg def from all predecessors.
179 // Otherwise we need a phi.
180 SmallVector<std::pair<MachineBasicBlock *, Register>, 4> VRegs;
181 SmallSet<const MachineBasicBlock *, 8> Visited;
182 for (auto *Pred : MBB->predecessors()) {
183 if (!Visited.insert(Pred).second)
184 continue;
185 VRegs.push_back(std::make_pair(
186 Pred, getOrCreateVReg(Pred, SwiftErrorVal)));
187 if (Pred != MBB)
188 continue;
189 // We have a self-edge.
190 // If there was no upwards use in this basic block there is now one: the
191 // phi needs to use it self.
192 if (!UpwardsUse) {
193 UpwardsUse = true;
194 UUseIt = VRegUpwardsUse.find(Key);
195 assert(UUseIt != VRegUpwardsUse.end());
196 UUseVReg = UUseIt->second;
200 // We need a phi node if we have more than one predecessor with different
201 // downward defs.
202 bool needPHI =
203 VRegs.size() >= 1 &&
204 std::find_if(
205 VRegs.begin(), VRegs.end(),
206 [&](const std::pair<const MachineBasicBlock *, Register> &V)
207 -> bool { return V.second != VRegs[0].second; }) !=
208 VRegs.end();
210 // If there is no upwards exposed used and we don't need a phi just
211 // forward the swifterror vreg from the predecessor(s).
212 if (!UpwardsUse && !needPHI) {
213 assert(!VRegs.empty() &&
214 "No predecessors? The entry block should bail out earlier");
215 // Just forward the swifterror vreg from the predecessor(s).
216 setCurrentVReg(MBB, SwiftErrorVal, VRegs[0].second);
217 continue;
220 auto DLoc = isa<Instruction>(SwiftErrorVal)
221 ? cast<Instruction>(SwiftErrorVal)->getDebugLoc()
222 : DebugLoc();
223 const auto *TII = MF->getSubtarget().getInstrInfo();
225 // If we don't need a phi create a copy to the upward exposed vreg.
226 if (!needPHI) {
227 assert(UpwardsUse);
228 assert(!VRegs.empty() &&
229 "No predecessors? Is the Calling Convention correct?");
230 Register DestReg = UUseVReg;
231 BuildMI(*MBB, MBB->getFirstNonPHI(), DLoc, TII->get(TargetOpcode::COPY),
232 DestReg)
233 .addReg(VRegs[0].second);
234 continue;
237 // We need a phi: if there is an upwards exposed use we already have a
238 // destination virtual register number otherwise we generate a new one.
239 auto &DL = MF->getDataLayout();
240 auto const *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
241 Register PHIVReg =
242 UpwardsUse ? UUseVReg : MF->getRegInfo().createVirtualRegister(RC);
243 MachineInstrBuilder PHI =
244 BuildMI(*MBB, MBB->getFirstNonPHI(), DLoc,
245 TII->get(TargetOpcode::PHI), PHIVReg);
246 for (auto BBRegPair : VRegs) {
247 PHI.addReg(BBRegPair.second).addMBB(BBRegPair.first);
250 // We did not have a definition in this block before: store the phi's vreg
251 // as this block downward exposed def.
252 if (!UpwardsUse)
253 setCurrentVReg(MBB, SwiftErrorVal, PHIVReg);
258 void SwiftErrorValueTracking::preassignVRegs(
259 MachineBasicBlock *MBB, BasicBlock::const_iterator Begin,
260 BasicBlock::const_iterator End) {
261 if (!TLI->supportSwiftError() || SwiftErrorVals.empty())
262 return;
264 // Iterator over instructions and assign vregs to swifterror defs and uses.
265 for (auto It = Begin; It != End; ++It) {
266 ImmutableCallSite CS(&*It);
267 if (CS) {
268 // A call-site with a swifterror argument is both use and def.
269 const Value *SwiftErrorAddr = nullptr;
270 for (auto &Arg : CS.args()) {
271 if (!Arg->isSwiftError())
272 continue;
273 // Use of swifterror.
274 assert(!SwiftErrorAddr && "Cannot have multiple swifterror arguments");
275 SwiftErrorAddr = &*Arg;
276 assert(SwiftErrorAddr->isSwiftError() &&
277 "Must have a swifterror value argument");
278 getOrCreateVRegUseAt(&*It, MBB, SwiftErrorAddr);
280 if (!SwiftErrorAddr)
281 continue;
283 // Def of swifterror.
284 getOrCreateVRegDefAt(&*It, MBB, SwiftErrorAddr);
286 // A load is a use.
287 } else if (const LoadInst *LI = dyn_cast<const LoadInst>(&*It)) {
288 const Value *V = LI->getOperand(0);
289 if (!V->isSwiftError())
290 continue;
292 getOrCreateVRegUseAt(LI, MBB, V);
294 // A store is a def.
295 } else if (const StoreInst *SI = dyn_cast<const StoreInst>(&*It)) {
296 const Value *SwiftErrorAddr = SI->getOperand(1);
297 if (!SwiftErrorAddr->isSwiftError())
298 continue;
300 // Def of swifterror.
301 getOrCreateVRegDefAt(&*It, MBB, SwiftErrorAddr);
303 // A return in a swiferror returning function is a use.
304 } else if (const ReturnInst *R = dyn_cast<const ReturnInst>(&*It)) {
305 const Function *F = R->getParent()->getParent();
306 if (!F->getAttributes().hasAttrSomewhere(Attribute::SwiftError))
307 continue;
309 getOrCreateVRegUseAt(R, MBB, SwiftErrorArg);