1 //===- llvm/CodeGen/VirtRegMap.cpp - Virtual Register Map -----------------===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 // This file implements the VirtRegMap class.
11 // It also contains implementations of the Spiller interface, which, given a
12 // virtual register map and a machine function, eliminates all virtual
13 // references by replacing them with physical register references - adding spill
16 //===----------------------------------------------------------------------===//
18 #include "llvm/CodeGen/VirtRegMap.h"
19 #include "LiveDebugVariables.h"
20 #include "llvm/ADT/SmallVector.h"
21 #include "llvm/ADT/Statistic.h"
22 #include "llvm/CodeGen/LiveInterval.h"
23 #include "llvm/CodeGen/LiveIntervals.h"
24 #include "llvm/CodeGen/LiveStacks.h"
25 #include "llvm/CodeGen/MachineBasicBlock.h"
26 #include "llvm/CodeGen/MachineFrameInfo.h"
27 #include "llvm/CodeGen/MachineFunction.h"
28 #include "llvm/CodeGen/MachineFunctionPass.h"
29 #include "llvm/CodeGen/MachineInstr.h"
30 #include "llvm/CodeGen/MachineOperand.h"
31 #include "llvm/CodeGen/MachineRegisterInfo.h"
32 #include "llvm/CodeGen/SlotIndexes.h"
33 #include "llvm/CodeGen/TargetInstrInfo.h"
34 #include "llvm/CodeGen/TargetOpcodes.h"
35 #include "llvm/CodeGen/TargetRegisterInfo.h"
36 #include "llvm/CodeGen/TargetSubtargetInfo.h"
37 #include "llvm/Config/llvm-config.h"
38 #include "llvm/MC/LaneBitmask.h"
39 #include "llvm/Pass.h"
40 #include "llvm/Support/Compiler.h"
41 #include "llvm/Support/Debug.h"
42 #include "llvm/Support/raw_ostream.h"
49 #define DEBUG_TYPE "regalloc"
51 STATISTIC(NumSpillSlots
, "Number of spill slots allocated");
52 STATISTIC(NumIdCopies
, "Number of identity moves eliminated after rewriting");
54 //===----------------------------------------------------------------------===//
55 // VirtRegMap implementation
56 //===----------------------------------------------------------------------===//
58 char VirtRegMap::ID
= 0;
60 INITIALIZE_PASS(VirtRegMap
, "virtregmap", "Virtual Register Map", false, false)
62 bool VirtRegMap::runOnMachineFunction(MachineFunction
&mf
) {
63 MRI
= &mf
.getRegInfo();
64 TII
= mf
.getSubtarget().getInstrInfo();
65 TRI
= mf
.getSubtarget().getRegisterInfo();
69 Virt2StackSlotMap
.clear();
70 Virt2SplitMap
.clear();
76 void VirtRegMap::grow() {
77 unsigned NumRegs
= MF
->getRegInfo().getNumVirtRegs();
78 Virt2PhysMap
.resize(NumRegs
);
79 Virt2StackSlotMap
.resize(NumRegs
);
80 Virt2SplitMap
.resize(NumRegs
);
83 void VirtRegMap::assignVirt2Phys(unsigned virtReg
, MCPhysReg physReg
) {
84 assert(TargetRegisterInfo::isVirtualRegister(virtReg
) &&
85 TargetRegisterInfo::isPhysicalRegister(physReg
));
86 assert(Virt2PhysMap
[virtReg
] == NO_PHYS_REG
&&
87 "attempt to assign physical register to already mapped "
89 assert(!getRegInfo().isReserved(physReg
) &&
90 "Attempt to map virtReg to a reserved physReg");
91 Virt2PhysMap
[virtReg
] = physReg
;
94 unsigned VirtRegMap::createSpillSlot(const TargetRegisterClass
*RC
) {
95 unsigned Size
= TRI
->getSpillSize(*RC
);
96 unsigned Align
= TRI
->getSpillAlignment(*RC
);
97 int SS
= MF
->getFrameInfo().CreateSpillStackObject(Size
, Align
);
102 bool VirtRegMap::hasPreferredPhys(unsigned VirtReg
) {
103 unsigned Hint
= MRI
->getSimpleHint(VirtReg
);
106 if (TargetRegisterInfo::isVirtualRegister(Hint
))
107 Hint
= getPhys(Hint
);
108 return getPhys(VirtReg
) == Hint
;
111 bool VirtRegMap::hasKnownPreference(unsigned VirtReg
) {
112 std::pair
<unsigned, unsigned> Hint
= MRI
->getRegAllocationHint(VirtReg
);
113 if (TargetRegisterInfo::isPhysicalRegister(Hint
.second
))
115 if (TargetRegisterInfo::isVirtualRegister(Hint
.second
))
116 return hasPhys(Hint
.second
);
120 int VirtRegMap::assignVirt2StackSlot(unsigned virtReg
) {
121 assert(TargetRegisterInfo::isVirtualRegister(virtReg
));
122 assert(Virt2StackSlotMap
[virtReg
] == NO_STACK_SLOT
&&
123 "attempt to assign stack slot to already spilled register");
124 const TargetRegisterClass
* RC
= MF
->getRegInfo().getRegClass(virtReg
);
125 return Virt2StackSlotMap
[virtReg
] = createSpillSlot(RC
);
128 void VirtRegMap::assignVirt2StackSlot(unsigned virtReg
, int SS
) {
129 assert(TargetRegisterInfo::isVirtualRegister(virtReg
));
130 assert(Virt2StackSlotMap
[virtReg
] == NO_STACK_SLOT
&&
131 "attempt to assign stack slot to already spilled register");
133 (SS
>= MF
->getFrameInfo().getObjectIndexBegin())) &&
134 "illegal fixed frame index");
135 Virt2StackSlotMap
[virtReg
] = SS
;
138 void VirtRegMap::print(raw_ostream
&OS
, const Module
*) const {
139 OS
<< "********** REGISTER MAP **********\n";
140 for (unsigned i
= 0, e
= MRI
->getNumVirtRegs(); i
!= e
; ++i
) {
141 unsigned Reg
= TargetRegisterInfo::index2VirtReg(i
);
142 if (Virt2PhysMap
[Reg
] != (unsigned)VirtRegMap::NO_PHYS_REG
) {
143 OS
<< '[' << printReg(Reg
, TRI
) << " -> "
144 << printReg(Virt2PhysMap
[Reg
], TRI
) << "] "
145 << TRI
->getRegClassName(MRI
->getRegClass(Reg
)) << "\n";
149 for (unsigned i
= 0, e
= MRI
->getNumVirtRegs(); i
!= e
; ++i
) {
150 unsigned Reg
= TargetRegisterInfo::index2VirtReg(i
);
151 if (Virt2StackSlotMap
[Reg
] != VirtRegMap::NO_STACK_SLOT
) {
152 OS
<< '[' << printReg(Reg
, TRI
) << " -> fi#" << Virt2StackSlotMap
[Reg
]
153 << "] " << TRI
->getRegClassName(MRI
->getRegClass(Reg
)) << "\n";
159 #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
160 LLVM_DUMP_METHOD
void VirtRegMap::dump() const {
165 //===----------------------------------------------------------------------===//
167 //===----------------------------------------------------------------------===//
169 // The VirtRegRewriter is the last of the register allocator passes.
170 // It rewrites virtual registers to physical registers as specified in the
171 // VirtRegMap analysis. It also updates live-in information on basic blocks
172 // according to LiveIntervals.
176 class VirtRegRewriter
: public MachineFunctionPass
{
178 const TargetRegisterInfo
*TRI
;
179 const TargetInstrInfo
*TII
;
180 MachineRegisterInfo
*MRI
;
181 SlotIndexes
*Indexes
;
186 void addMBBLiveIns();
187 bool readsUndefSubreg(const MachineOperand
&MO
) const;
188 void addLiveInsForSubRanges(const LiveInterval
&LI
, unsigned PhysReg
) const;
189 void handleIdentityCopy(MachineInstr
&MI
) const;
190 void expandCopyBundle(MachineInstr
&MI
) const;
191 bool subRegLiveThrough(const MachineInstr
&MI
, unsigned SuperPhysReg
) const;
196 VirtRegRewriter() : MachineFunctionPass(ID
) {}
198 void getAnalysisUsage(AnalysisUsage
&AU
) const override
;
200 bool runOnMachineFunction(MachineFunction
&) override
;
202 MachineFunctionProperties
getSetProperties() const override
{
203 return MachineFunctionProperties().set(
204 MachineFunctionProperties::Property::NoVRegs
);
208 } // end anonymous namespace
210 char VirtRegRewriter::ID
= 0;
212 char &llvm::VirtRegRewriterID
= VirtRegRewriter::ID
;
214 INITIALIZE_PASS_BEGIN(VirtRegRewriter
, "virtregrewriter",
215 "Virtual Register Rewriter", false, false)
216 INITIALIZE_PASS_DEPENDENCY(SlotIndexes
)
217 INITIALIZE_PASS_DEPENDENCY(LiveIntervals
)
218 INITIALIZE_PASS_DEPENDENCY(LiveDebugVariables
)
219 INITIALIZE_PASS_DEPENDENCY(LiveStacks
)
220 INITIALIZE_PASS_DEPENDENCY(VirtRegMap
)
221 INITIALIZE_PASS_END(VirtRegRewriter
, "virtregrewriter",
222 "Virtual Register Rewriter", false, false)
224 void VirtRegRewriter::getAnalysisUsage(AnalysisUsage
&AU
) const {
225 AU
.setPreservesCFG();
226 AU
.addRequired
<LiveIntervals
>();
227 AU
.addRequired
<SlotIndexes
>();
228 AU
.addPreserved
<SlotIndexes
>();
229 AU
.addRequired
<LiveDebugVariables
>();
230 AU
.addRequired
<LiveStacks
>();
231 AU
.addPreserved
<LiveStacks
>();
232 AU
.addRequired
<VirtRegMap
>();
233 MachineFunctionPass::getAnalysisUsage(AU
);
236 bool VirtRegRewriter::runOnMachineFunction(MachineFunction
&fn
) {
238 TRI
= MF
->getSubtarget().getRegisterInfo();
239 TII
= MF
->getSubtarget().getInstrInfo();
240 MRI
= &MF
->getRegInfo();
241 Indexes
= &getAnalysis
<SlotIndexes
>();
242 LIS
= &getAnalysis
<LiveIntervals
>();
243 VRM
= &getAnalysis
<VirtRegMap
>();
244 LLVM_DEBUG(dbgs() << "********** REWRITE VIRTUAL REGISTERS **********\n"
245 << "********** Function: " << MF
->getName() << '\n');
246 LLVM_DEBUG(VRM
->dump());
248 // Add kill flags while we still have virtual registers.
249 LIS
->addKillFlags(VRM
);
251 // Live-in lists on basic blocks are required for physregs.
254 // Rewrite virtual registers.
257 // Write out new DBG_VALUE instructions.
258 getAnalysis
<LiveDebugVariables
>().emitDebugValues(VRM
);
260 // All machine operands and other references to virtual registers have been
261 // replaced. Remove the virtual registers and release all the transient data.
263 MRI
->clearVirtRegs();
267 void VirtRegRewriter::addLiveInsForSubRanges(const LiveInterval
&LI
,
268 unsigned PhysReg
) const {
270 assert(LI
.hasSubRanges());
272 using SubRangeIteratorPair
=
273 std::pair
<const LiveInterval::SubRange
*, LiveInterval::const_iterator
>;
275 SmallVector
<SubRangeIteratorPair
, 4> SubRanges
;
278 for (const LiveInterval::SubRange
&SR
: LI
.subranges()) {
279 SubRanges
.push_back(std::make_pair(&SR
, SR
.begin()));
280 if (!First
.isValid() || SR
.segments
.front().start
< First
)
281 First
= SR
.segments
.front().start
;
282 if (!Last
.isValid() || SR
.segments
.back().end
> Last
)
283 Last
= SR
.segments
.back().end
;
286 // Check all mbb start positions between First and Last while
287 // simulatenously advancing an iterator for each subrange.
288 for (SlotIndexes::MBBIndexIterator MBBI
= Indexes
->findMBBIndex(First
);
289 MBBI
!= Indexes
->MBBIndexEnd() && MBBI
->first
<= Last
; ++MBBI
) {
290 SlotIndex MBBBegin
= MBBI
->first
;
291 // Advance all subrange iterators so that their end position is just
292 // behind MBBBegin (or the iterator is at the end).
293 LaneBitmask LaneMask
;
294 for (auto &RangeIterPair
: SubRanges
) {
295 const LiveInterval::SubRange
*SR
= RangeIterPair
.first
;
296 LiveInterval::const_iterator
&SRI
= RangeIterPair
.second
;
297 while (SRI
!= SR
->end() && SRI
->end
<= MBBBegin
)
299 if (SRI
== SR
->end())
301 if (SRI
->start
<= MBBBegin
)
302 LaneMask
|= SR
->LaneMask
;
306 MachineBasicBlock
*MBB
= MBBI
->second
;
307 MBB
->addLiveIn(PhysReg
, LaneMask
);
311 // Compute MBB live-in lists from virtual register live ranges and their
313 void VirtRegRewriter::addMBBLiveIns() {
314 for (unsigned Idx
= 0, IdxE
= MRI
->getNumVirtRegs(); Idx
!= IdxE
; ++Idx
) {
315 unsigned VirtReg
= TargetRegisterInfo::index2VirtReg(Idx
);
316 if (MRI
->reg_nodbg_empty(VirtReg
))
318 LiveInterval
&LI
= LIS
->getInterval(VirtReg
);
319 if (LI
.empty() || LIS
->intervalIsInOneMBB(LI
))
321 // This is a virtual register that is live across basic blocks. Its
322 // assigned PhysReg must be marked as live-in to those blocks.
323 unsigned PhysReg
= VRM
->getPhys(VirtReg
);
324 assert(PhysReg
!= VirtRegMap::NO_PHYS_REG
&& "Unmapped virtual register.");
326 if (LI
.hasSubRanges()) {
327 addLiveInsForSubRanges(LI
, PhysReg
);
329 // Go over MBB begin positions and see if we have segments covering them.
330 // The following works because segments and the MBBIndex list are both
331 // sorted by slot indexes.
332 SlotIndexes::MBBIndexIterator I
= Indexes
->MBBIndexBegin();
333 for (const auto &Seg
: LI
) {
334 I
= Indexes
->advanceMBBIndex(I
, Seg
.start
);
335 for (; I
!= Indexes
->MBBIndexEnd() && I
->first
< Seg
.end
; ++I
) {
336 MachineBasicBlock
*MBB
= I
->second
;
337 MBB
->addLiveIn(PhysReg
);
343 // Sort and unique MBB LiveIns as we've not checked if SubReg/PhysReg were in
344 // each MBB's LiveIns set before calling addLiveIn on them.
345 for (MachineBasicBlock
&MBB
: *MF
)
346 MBB
.sortUniqueLiveIns();
349 /// Returns true if the given machine operand \p MO only reads undefined lanes.
350 /// The function only works for use operands with a subregister set.
351 bool VirtRegRewriter::readsUndefSubreg(const MachineOperand
&MO
) const {
352 // Shortcut if the operand is already marked undef.
356 unsigned Reg
= MO
.getReg();
357 const LiveInterval
&LI
= LIS
->getInterval(Reg
);
358 const MachineInstr
&MI
= *MO
.getParent();
359 SlotIndex BaseIndex
= LIS
->getInstructionIndex(MI
);
360 // This code is only meant to handle reading undefined subregisters which
361 // we couldn't properly detect before.
362 assert(LI
.liveAt(BaseIndex
) &&
363 "Reads of completely dead register should be marked undef already");
364 unsigned SubRegIdx
= MO
.getSubReg();
365 assert(SubRegIdx
!= 0 && LI
.hasSubRanges());
366 LaneBitmask UseMask
= TRI
->getSubRegIndexLaneMask(SubRegIdx
);
367 // See if any of the relevant subregister liveranges is defined at this point.
368 for (const LiveInterval::SubRange
&SR
: LI
.subranges()) {
369 if ((SR
.LaneMask
& UseMask
).any() && SR
.liveAt(BaseIndex
))
375 void VirtRegRewriter::handleIdentityCopy(MachineInstr
&MI
) const {
376 if (!MI
.isIdentityCopy())
378 LLVM_DEBUG(dbgs() << "Identity copy: " << MI
);
382 // %r0 = COPY undef %r0
383 // %al = COPY %al, implicit-def %eax
384 // give us additional liveness information: The target (super-)register
385 // must not be valid before this point. Replace the COPY with a KILL
386 // instruction to maintain this information.
387 if (MI
.getOperand(1).isUndef() || MI
.getNumOperands() > 2) {
388 MI
.setDesc(TII
->get(TargetOpcode::KILL
));
389 LLVM_DEBUG(dbgs() << " replace by: " << MI
);
394 Indexes
->removeSingleMachineInstrFromMaps(MI
);
395 MI
.eraseFromBundle();
396 LLVM_DEBUG(dbgs() << " deleted.\n");
399 /// The liverange splitting logic sometimes produces bundles of copies when
400 /// subregisters are involved. Expand these into a sequence of copy instructions
401 /// after processing the last in the bundle. Does not update LiveIntervals
402 /// which we shouldn't need for this instruction anymore.
403 void VirtRegRewriter::expandCopyBundle(MachineInstr
&MI
) const {
407 if (MI
.isBundledWithPred() && !MI
.isBundledWithSucc()) {
408 SmallVector
<MachineInstr
*, 2> MIs({&MI
});
410 // Only do this when the complete bundle is made out of COPYs.
411 MachineBasicBlock
&MBB
= *MI
.getParent();
412 for (MachineBasicBlock::reverse_instr_iterator I
=
413 std::next(MI
.getReverseIterator()), E
= MBB
.instr_rend();
414 I
!= E
&& I
->isBundledWithSucc(); ++I
) {
419 MachineInstr
*FirstMI
= MIs
.back();
421 auto anyRegsAlias
= [](const MachineInstr
*Dst
,
422 ArrayRef
<MachineInstr
*> Srcs
,
423 const TargetRegisterInfo
*TRI
) {
424 for (const MachineInstr
*Src
: Srcs
)
426 if (TRI
->regsOverlap(Dst
->getOperand(0).getReg(),
427 Src
->getOperand(1).getReg()))
432 // If any of the destination registers in the bundle of copies alias any of
433 // the source registers, try to schedule the instructions to avoid any
435 for (int E
= MIs
.size(), PrevE
= E
; E
> 1; PrevE
= E
) {
436 for (int I
= E
; I
--; )
437 if (!anyRegsAlias(MIs
[I
], makeArrayRef(MIs
).take_front(E
), TRI
)) {
439 std::swap(MIs
[I
], MIs
[E
- 1]);
443 MF
->getFunction().getContext().emitError(
444 "register rewriting failed: cycle in copy bundle");
449 MachineInstr
*BundleStart
= FirstMI
;
450 for (MachineInstr
*BundledMI
: llvm::reverse(MIs
)) {
451 // If instruction is in the middle of the bundle, move it before the
452 // bundle starts, otherwise, just unbundle it. When we get to the last
453 // instruction, the bundle will have been completely undone.
454 if (BundledMI
!= BundleStart
) {
455 BundledMI
->removeFromBundle();
456 MBB
.insert(FirstMI
, BundledMI
);
457 } else if (BundledMI
->isBundledWithSucc()) {
458 BundledMI
->unbundleFromSucc();
459 BundleStart
= &*std::next(BundledMI
->getIterator());
462 if (Indexes
&& BundledMI
!= FirstMI
)
463 Indexes
->insertMachineInstrInMaps(*BundledMI
);
468 /// Check whether (part of) \p SuperPhysReg is live through \p MI.
469 /// \pre \p MI defines a subregister of a virtual register that
470 /// has been assigned to \p SuperPhysReg.
471 bool VirtRegRewriter::subRegLiveThrough(const MachineInstr
&MI
,
472 unsigned SuperPhysReg
) const {
473 SlotIndex MIIndex
= LIS
->getInstructionIndex(MI
);
474 SlotIndex BeforeMIUses
= MIIndex
.getBaseIndex();
475 SlotIndex AfterMIDefs
= MIIndex
.getBoundaryIndex();
476 for (MCRegUnitIterator
Unit(SuperPhysReg
, TRI
); Unit
.isValid(); ++Unit
) {
477 const LiveRange
&UnitRange
= LIS
->getRegUnit(*Unit
);
478 // If the regunit is live both before and after MI,
479 // we assume it is live through.
480 // Generally speaking, this is not true, because something like
481 // "RU = op RU" would match that description.
482 // However, we know that we are trying to assess whether
483 // a def of a virtual reg, vreg, is live at the same time of RU.
484 // If we are in the "RU = op RU" situation, that means that vreg
485 // is defined at the same time as RU (i.e., "vreg, RU = op RU").
486 // Thus, vreg and RU interferes and vreg cannot be assigned to
487 // SuperPhysReg. Therefore, this situation cannot happen.
488 if (UnitRange
.liveAt(AfterMIDefs
) && UnitRange
.liveAt(BeforeMIUses
))
494 void VirtRegRewriter::rewrite() {
495 bool NoSubRegLiveness
= !MRI
->subRegLivenessEnabled();
496 SmallVector
<unsigned, 8> SuperDeads
;
497 SmallVector
<unsigned, 8> SuperDefs
;
498 SmallVector
<unsigned, 8> SuperKills
;
500 for (MachineFunction::iterator MBBI
= MF
->begin(), MBBE
= MF
->end();
501 MBBI
!= MBBE
; ++MBBI
) {
502 LLVM_DEBUG(MBBI
->print(dbgs(), Indexes
));
503 for (MachineBasicBlock::instr_iterator
504 MII
= MBBI
->instr_begin(), MIE
= MBBI
->instr_end(); MII
!= MIE
;) {
505 MachineInstr
*MI
= &*MII
;
508 for (MachineInstr::mop_iterator MOI
= MI
->operands_begin(),
509 MOE
= MI
->operands_end(); MOI
!= MOE
; ++MOI
) {
510 MachineOperand
&MO
= *MOI
;
512 // Make sure MRI knows about registers clobbered by regmasks.
514 MRI
->addPhysRegsUsedFromRegMask(MO
.getRegMask());
516 if (!MO
.isReg() || !TargetRegisterInfo::isVirtualRegister(MO
.getReg()))
518 unsigned VirtReg
= MO
.getReg();
519 unsigned PhysReg
= VRM
->getPhys(VirtReg
);
520 assert(PhysReg
!= VirtRegMap::NO_PHYS_REG
&&
521 "Instruction uses unmapped VirtReg");
522 assert(!MRI
->isReserved(PhysReg
) && "Reserved register assignment");
524 // Preserve semantics of sub-register operands.
525 unsigned SubReg
= MO
.getSubReg();
527 if (NoSubRegLiveness
|| !MRI
->shouldTrackSubRegLiveness(VirtReg
)) {
528 // A virtual register kill refers to the whole register, so we may
529 // have to add implicit killed operands for the super-register. A
530 // partial redef always kills and redefines the super-register.
531 if ((MO
.readsReg() && (MO
.isDef() || MO
.isKill())) ||
532 (MO
.isDef() && subRegLiveThrough(*MI
, PhysReg
)))
533 SuperKills
.push_back(PhysReg
);
536 // Also add implicit defs for the super-register.
538 SuperDeads
.push_back(PhysReg
);
540 SuperDefs
.push_back(PhysReg
);
544 if (readsUndefSubreg(MO
))
545 // We need to add an <undef> flag if the subregister is
546 // completely undefined (and we are not adding super-register
549 } else if (!MO
.isDead()) {
554 // The def undef and def internal flags only make sense for
555 // sub-register defs, and we are substituting a full physreg. An
556 // implicit killed operand from the SuperKills list will represent the
557 // partial read of the super-register.
559 MO
.setIsUndef(false);
560 MO
.setIsInternalRead(false);
563 // PhysReg operands cannot have subregister indexes.
564 PhysReg
= TRI
->getSubReg(PhysReg
, SubReg
);
565 assert(PhysReg
&& "Invalid SubReg for physical register");
568 // Rewrite. Note we could have used MachineOperand::substPhysReg(), but
569 // we need the inlining here.
571 MO
.setIsRenamable(true);
574 // Add any missing super-register kills after rewriting the whole
576 while (!SuperKills
.empty())
577 MI
->addRegisterKilled(SuperKills
.pop_back_val(), TRI
, true);
579 while (!SuperDeads
.empty())
580 MI
->addRegisterDead(SuperDeads
.pop_back_val(), TRI
, true);
582 while (!SuperDefs
.empty())
583 MI
->addRegisterDefined(SuperDefs
.pop_back_val(), TRI
);
585 LLVM_DEBUG(dbgs() << "> " << *MI
);
587 expandCopyBundle(*MI
);
589 // We can remove identity copies right now.
590 handleIdentityCopy(*MI
);