[llvm-objcopy] - Reimplement strip-dwo-groups.test to stop using the precompiled...
[llvm-complete.git] / lib / CodeGen / VirtRegMap.cpp
blob4a06704a8876357e9e1105d5f07014fe4b60598f
1 //===- llvm/CodeGen/VirtRegMap.cpp - Virtual Register Map -----------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements the VirtRegMap class.
11 // It also contains implementations of the Spiller interface, which, given a
12 // virtual register map and a machine function, eliminates all virtual
13 // references by replacing them with physical register references - adding spill
14 // code as necessary.
16 //===----------------------------------------------------------------------===//
18 #include "llvm/CodeGen/VirtRegMap.h"
19 #include "LiveDebugVariables.h"
20 #include "llvm/ADT/SmallVector.h"
21 #include "llvm/ADT/Statistic.h"
22 #include "llvm/CodeGen/LiveInterval.h"
23 #include "llvm/CodeGen/LiveIntervals.h"
24 #include "llvm/CodeGen/LiveStacks.h"
25 #include "llvm/CodeGen/MachineBasicBlock.h"
26 #include "llvm/CodeGen/MachineFrameInfo.h"
27 #include "llvm/CodeGen/MachineFunction.h"
28 #include "llvm/CodeGen/MachineFunctionPass.h"
29 #include "llvm/CodeGen/MachineInstr.h"
30 #include "llvm/CodeGen/MachineOperand.h"
31 #include "llvm/CodeGen/MachineRegisterInfo.h"
32 #include "llvm/CodeGen/SlotIndexes.h"
33 #include "llvm/CodeGen/TargetInstrInfo.h"
34 #include "llvm/CodeGen/TargetOpcodes.h"
35 #include "llvm/CodeGen/TargetRegisterInfo.h"
36 #include "llvm/CodeGen/TargetSubtargetInfo.h"
37 #include "llvm/Config/llvm-config.h"
38 #include "llvm/MC/LaneBitmask.h"
39 #include "llvm/Pass.h"
40 #include "llvm/Support/Compiler.h"
41 #include "llvm/Support/Debug.h"
42 #include "llvm/Support/raw_ostream.h"
43 #include <cassert>
44 #include <iterator>
45 #include <utility>
47 using namespace llvm;
49 #define DEBUG_TYPE "regalloc"
51 STATISTIC(NumSpillSlots, "Number of spill slots allocated");
52 STATISTIC(NumIdCopies, "Number of identity moves eliminated after rewriting");
54 //===----------------------------------------------------------------------===//
55 // VirtRegMap implementation
56 //===----------------------------------------------------------------------===//
58 char VirtRegMap::ID = 0;
60 INITIALIZE_PASS(VirtRegMap, "virtregmap", "Virtual Register Map", false, false)
62 bool VirtRegMap::runOnMachineFunction(MachineFunction &mf) {
63 MRI = &mf.getRegInfo();
64 TII = mf.getSubtarget().getInstrInfo();
65 TRI = mf.getSubtarget().getRegisterInfo();
66 MF = &mf;
68 Virt2PhysMap.clear();
69 Virt2StackSlotMap.clear();
70 Virt2SplitMap.clear();
72 grow();
73 return false;
76 void VirtRegMap::grow() {
77 unsigned NumRegs = MF->getRegInfo().getNumVirtRegs();
78 Virt2PhysMap.resize(NumRegs);
79 Virt2StackSlotMap.resize(NumRegs);
80 Virt2SplitMap.resize(NumRegs);
83 void VirtRegMap::assignVirt2Phys(unsigned virtReg, MCPhysReg physReg) {
84 assert(TargetRegisterInfo::isVirtualRegister(virtReg) &&
85 TargetRegisterInfo::isPhysicalRegister(physReg));
86 assert(Virt2PhysMap[virtReg] == NO_PHYS_REG &&
87 "attempt to assign physical register to already mapped "
88 "virtual register");
89 assert(!getRegInfo().isReserved(physReg) &&
90 "Attempt to map virtReg to a reserved physReg");
91 Virt2PhysMap[virtReg] = physReg;
94 unsigned VirtRegMap::createSpillSlot(const TargetRegisterClass *RC) {
95 unsigned Size = TRI->getSpillSize(*RC);
96 unsigned Align = TRI->getSpillAlignment(*RC);
97 int SS = MF->getFrameInfo().CreateSpillStackObject(Size, Align);
98 ++NumSpillSlots;
99 return SS;
102 bool VirtRegMap::hasPreferredPhys(unsigned VirtReg) {
103 unsigned Hint = MRI->getSimpleHint(VirtReg);
104 if (!Hint)
105 return false;
106 if (TargetRegisterInfo::isVirtualRegister(Hint))
107 Hint = getPhys(Hint);
108 return getPhys(VirtReg) == Hint;
111 bool VirtRegMap::hasKnownPreference(unsigned VirtReg) {
112 std::pair<unsigned, unsigned> Hint = MRI->getRegAllocationHint(VirtReg);
113 if (TargetRegisterInfo::isPhysicalRegister(Hint.second))
114 return true;
115 if (TargetRegisterInfo::isVirtualRegister(Hint.second))
116 return hasPhys(Hint.second);
117 return false;
120 int VirtRegMap::assignVirt2StackSlot(unsigned virtReg) {
121 assert(TargetRegisterInfo::isVirtualRegister(virtReg));
122 assert(Virt2StackSlotMap[virtReg] == NO_STACK_SLOT &&
123 "attempt to assign stack slot to already spilled register");
124 const TargetRegisterClass* RC = MF->getRegInfo().getRegClass(virtReg);
125 return Virt2StackSlotMap[virtReg] = createSpillSlot(RC);
128 void VirtRegMap::assignVirt2StackSlot(unsigned virtReg, int SS) {
129 assert(TargetRegisterInfo::isVirtualRegister(virtReg));
130 assert(Virt2StackSlotMap[virtReg] == NO_STACK_SLOT &&
131 "attempt to assign stack slot to already spilled register");
132 assert((SS >= 0 ||
133 (SS >= MF->getFrameInfo().getObjectIndexBegin())) &&
134 "illegal fixed frame index");
135 Virt2StackSlotMap[virtReg] = SS;
138 void VirtRegMap::print(raw_ostream &OS, const Module*) const {
139 OS << "********** REGISTER MAP **********\n";
140 for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
141 unsigned Reg = TargetRegisterInfo::index2VirtReg(i);
142 if (Virt2PhysMap[Reg] != (unsigned)VirtRegMap::NO_PHYS_REG) {
143 OS << '[' << printReg(Reg, TRI) << " -> "
144 << printReg(Virt2PhysMap[Reg], TRI) << "] "
145 << TRI->getRegClassName(MRI->getRegClass(Reg)) << "\n";
149 for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
150 unsigned Reg = TargetRegisterInfo::index2VirtReg(i);
151 if (Virt2StackSlotMap[Reg] != VirtRegMap::NO_STACK_SLOT) {
152 OS << '[' << printReg(Reg, TRI) << " -> fi#" << Virt2StackSlotMap[Reg]
153 << "] " << TRI->getRegClassName(MRI->getRegClass(Reg)) << "\n";
156 OS << '\n';
159 #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
160 LLVM_DUMP_METHOD void VirtRegMap::dump() const {
161 print(dbgs());
163 #endif
165 //===----------------------------------------------------------------------===//
166 // VirtRegRewriter
167 //===----------------------------------------------------------------------===//
169 // The VirtRegRewriter is the last of the register allocator passes.
170 // It rewrites virtual registers to physical registers as specified in the
171 // VirtRegMap analysis. It also updates live-in information on basic blocks
172 // according to LiveIntervals.
174 namespace {
176 class VirtRegRewriter : public MachineFunctionPass {
177 MachineFunction *MF;
178 const TargetRegisterInfo *TRI;
179 const TargetInstrInfo *TII;
180 MachineRegisterInfo *MRI;
181 SlotIndexes *Indexes;
182 LiveIntervals *LIS;
183 VirtRegMap *VRM;
185 void rewrite();
186 void addMBBLiveIns();
187 bool readsUndefSubreg(const MachineOperand &MO) const;
188 void addLiveInsForSubRanges(const LiveInterval &LI, unsigned PhysReg) const;
189 void handleIdentityCopy(MachineInstr &MI) const;
190 void expandCopyBundle(MachineInstr &MI) const;
191 bool subRegLiveThrough(const MachineInstr &MI, unsigned SuperPhysReg) const;
193 public:
194 static char ID;
196 VirtRegRewriter() : MachineFunctionPass(ID) {}
198 void getAnalysisUsage(AnalysisUsage &AU) const override;
200 bool runOnMachineFunction(MachineFunction&) override;
202 MachineFunctionProperties getSetProperties() const override {
203 return MachineFunctionProperties().set(
204 MachineFunctionProperties::Property::NoVRegs);
208 } // end anonymous namespace
210 char VirtRegRewriter::ID = 0;
212 char &llvm::VirtRegRewriterID = VirtRegRewriter::ID;
214 INITIALIZE_PASS_BEGIN(VirtRegRewriter, "virtregrewriter",
215 "Virtual Register Rewriter", false, false)
216 INITIALIZE_PASS_DEPENDENCY(SlotIndexes)
217 INITIALIZE_PASS_DEPENDENCY(LiveIntervals)
218 INITIALIZE_PASS_DEPENDENCY(LiveDebugVariables)
219 INITIALIZE_PASS_DEPENDENCY(LiveStacks)
220 INITIALIZE_PASS_DEPENDENCY(VirtRegMap)
221 INITIALIZE_PASS_END(VirtRegRewriter, "virtregrewriter",
222 "Virtual Register Rewriter", false, false)
224 void VirtRegRewriter::getAnalysisUsage(AnalysisUsage &AU) const {
225 AU.setPreservesCFG();
226 AU.addRequired<LiveIntervals>();
227 AU.addRequired<SlotIndexes>();
228 AU.addPreserved<SlotIndexes>();
229 AU.addRequired<LiveDebugVariables>();
230 AU.addRequired<LiveStacks>();
231 AU.addPreserved<LiveStacks>();
232 AU.addRequired<VirtRegMap>();
233 MachineFunctionPass::getAnalysisUsage(AU);
236 bool VirtRegRewriter::runOnMachineFunction(MachineFunction &fn) {
237 MF = &fn;
238 TRI = MF->getSubtarget().getRegisterInfo();
239 TII = MF->getSubtarget().getInstrInfo();
240 MRI = &MF->getRegInfo();
241 Indexes = &getAnalysis<SlotIndexes>();
242 LIS = &getAnalysis<LiveIntervals>();
243 VRM = &getAnalysis<VirtRegMap>();
244 LLVM_DEBUG(dbgs() << "********** REWRITE VIRTUAL REGISTERS **********\n"
245 << "********** Function: " << MF->getName() << '\n');
246 LLVM_DEBUG(VRM->dump());
248 // Add kill flags while we still have virtual registers.
249 LIS->addKillFlags(VRM);
251 // Live-in lists on basic blocks are required for physregs.
252 addMBBLiveIns();
254 // Rewrite virtual registers.
255 rewrite();
257 // Write out new DBG_VALUE instructions.
258 getAnalysis<LiveDebugVariables>().emitDebugValues(VRM);
260 // All machine operands and other references to virtual registers have been
261 // replaced. Remove the virtual registers and release all the transient data.
262 VRM->clearAllVirt();
263 MRI->clearVirtRegs();
264 return true;
267 void VirtRegRewriter::addLiveInsForSubRanges(const LiveInterval &LI,
268 unsigned PhysReg) const {
269 assert(!LI.empty());
270 assert(LI.hasSubRanges());
272 using SubRangeIteratorPair =
273 std::pair<const LiveInterval::SubRange *, LiveInterval::const_iterator>;
275 SmallVector<SubRangeIteratorPair, 4> SubRanges;
276 SlotIndex First;
277 SlotIndex Last;
278 for (const LiveInterval::SubRange &SR : LI.subranges()) {
279 SubRanges.push_back(std::make_pair(&SR, SR.begin()));
280 if (!First.isValid() || SR.segments.front().start < First)
281 First = SR.segments.front().start;
282 if (!Last.isValid() || SR.segments.back().end > Last)
283 Last = SR.segments.back().end;
286 // Check all mbb start positions between First and Last while
287 // simulatenously advancing an iterator for each subrange.
288 for (SlotIndexes::MBBIndexIterator MBBI = Indexes->findMBBIndex(First);
289 MBBI != Indexes->MBBIndexEnd() && MBBI->first <= Last; ++MBBI) {
290 SlotIndex MBBBegin = MBBI->first;
291 // Advance all subrange iterators so that their end position is just
292 // behind MBBBegin (or the iterator is at the end).
293 LaneBitmask LaneMask;
294 for (auto &RangeIterPair : SubRanges) {
295 const LiveInterval::SubRange *SR = RangeIterPair.first;
296 LiveInterval::const_iterator &SRI = RangeIterPair.second;
297 while (SRI != SR->end() && SRI->end <= MBBBegin)
298 ++SRI;
299 if (SRI == SR->end())
300 continue;
301 if (SRI->start <= MBBBegin)
302 LaneMask |= SR->LaneMask;
304 if (LaneMask.none())
305 continue;
306 MachineBasicBlock *MBB = MBBI->second;
307 MBB->addLiveIn(PhysReg, LaneMask);
311 // Compute MBB live-in lists from virtual register live ranges and their
312 // assignments.
313 void VirtRegRewriter::addMBBLiveIns() {
314 for (unsigned Idx = 0, IdxE = MRI->getNumVirtRegs(); Idx != IdxE; ++Idx) {
315 unsigned VirtReg = TargetRegisterInfo::index2VirtReg(Idx);
316 if (MRI->reg_nodbg_empty(VirtReg))
317 continue;
318 LiveInterval &LI = LIS->getInterval(VirtReg);
319 if (LI.empty() || LIS->intervalIsInOneMBB(LI))
320 continue;
321 // This is a virtual register that is live across basic blocks. Its
322 // assigned PhysReg must be marked as live-in to those blocks.
323 unsigned PhysReg = VRM->getPhys(VirtReg);
324 assert(PhysReg != VirtRegMap::NO_PHYS_REG && "Unmapped virtual register.");
326 if (LI.hasSubRanges()) {
327 addLiveInsForSubRanges(LI, PhysReg);
328 } else {
329 // Go over MBB begin positions and see if we have segments covering them.
330 // The following works because segments and the MBBIndex list are both
331 // sorted by slot indexes.
332 SlotIndexes::MBBIndexIterator I = Indexes->MBBIndexBegin();
333 for (const auto &Seg : LI) {
334 I = Indexes->advanceMBBIndex(I, Seg.start);
335 for (; I != Indexes->MBBIndexEnd() && I->first < Seg.end; ++I) {
336 MachineBasicBlock *MBB = I->second;
337 MBB->addLiveIn(PhysReg);
343 // Sort and unique MBB LiveIns as we've not checked if SubReg/PhysReg were in
344 // each MBB's LiveIns set before calling addLiveIn on them.
345 for (MachineBasicBlock &MBB : *MF)
346 MBB.sortUniqueLiveIns();
349 /// Returns true if the given machine operand \p MO only reads undefined lanes.
350 /// The function only works for use operands with a subregister set.
351 bool VirtRegRewriter::readsUndefSubreg(const MachineOperand &MO) const {
352 // Shortcut if the operand is already marked undef.
353 if (MO.isUndef())
354 return true;
356 unsigned Reg = MO.getReg();
357 const LiveInterval &LI = LIS->getInterval(Reg);
358 const MachineInstr &MI = *MO.getParent();
359 SlotIndex BaseIndex = LIS->getInstructionIndex(MI);
360 // This code is only meant to handle reading undefined subregisters which
361 // we couldn't properly detect before.
362 assert(LI.liveAt(BaseIndex) &&
363 "Reads of completely dead register should be marked undef already");
364 unsigned SubRegIdx = MO.getSubReg();
365 assert(SubRegIdx != 0 && LI.hasSubRanges());
366 LaneBitmask UseMask = TRI->getSubRegIndexLaneMask(SubRegIdx);
367 // See if any of the relevant subregister liveranges is defined at this point.
368 for (const LiveInterval::SubRange &SR : LI.subranges()) {
369 if ((SR.LaneMask & UseMask).any() && SR.liveAt(BaseIndex))
370 return false;
372 return true;
375 void VirtRegRewriter::handleIdentityCopy(MachineInstr &MI) const {
376 if (!MI.isIdentityCopy())
377 return;
378 LLVM_DEBUG(dbgs() << "Identity copy: " << MI);
379 ++NumIdCopies;
381 // Copies like:
382 // %r0 = COPY undef %r0
383 // %al = COPY %al, implicit-def %eax
384 // give us additional liveness information: The target (super-)register
385 // must not be valid before this point. Replace the COPY with a KILL
386 // instruction to maintain this information.
387 if (MI.getOperand(1).isUndef() || MI.getNumOperands() > 2) {
388 MI.setDesc(TII->get(TargetOpcode::KILL));
389 LLVM_DEBUG(dbgs() << " replace by: " << MI);
390 return;
393 if (Indexes)
394 Indexes->removeSingleMachineInstrFromMaps(MI);
395 MI.eraseFromBundle();
396 LLVM_DEBUG(dbgs() << " deleted.\n");
399 /// The liverange splitting logic sometimes produces bundles of copies when
400 /// subregisters are involved. Expand these into a sequence of copy instructions
401 /// after processing the last in the bundle. Does not update LiveIntervals
402 /// which we shouldn't need for this instruction anymore.
403 void VirtRegRewriter::expandCopyBundle(MachineInstr &MI) const {
404 if (!MI.isCopy())
405 return;
407 if (MI.isBundledWithPred() && !MI.isBundledWithSucc()) {
408 SmallVector<MachineInstr *, 2> MIs({&MI});
410 // Only do this when the complete bundle is made out of COPYs.
411 MachineBasicBlock &MBB = *MI.getParent();
412 for (MachineBasicBlock::reverse_instr_iterator I =
413 std::next(MI.getReverseIterator()), E = MBB.instr_rend();
414 I != E && I->isBundledWithSucc(); ++I) {
415 if (!I->isCopy())
416 return;
417 MIs.push_back(&*I);
419 MachineInstr *FirstMI = MIs.back();
421 auto anyRegsAlias = [](const MachineInstr *Dst,
422 ArrayRef<MachineInstr *> Srcs,
423 const TargetRegisterInfo *TRI) {
424 for (const MachineInstr *Src : Srcs)
425 if (Src != Dst)
426 if (TRI->regsOverlap(Dst->getOperand(0).getReg(),
427 Src->getOperand(1).getReg()))
428 return true;
429 return false;
432 // If any of the destination registers in the bundle of copies alias any of
433 // the source registers, try to schedule the instructions to avoid any
434 // clobbering.
435 for (int E = MIs.size(), PrevE = E; E > 1; PrevE = E) {
436 for (int I = E; I--; )
437 if (!anyRegsAlias(MIs[I], makeArrayRef(MIs).take_front(E), TRI)) {
438 if (I + 1 != E)
439 std::swap(MIs[I], MIs[E - 1]);
440 --E;
442 if (PrevE == E) {
443 MF->getFunction().getContext().emitError(
444 "register rewriting failed: cycle in copy bundle");
445 break;
449 MachineInstr *BundleStart = FirstMI;
450 for (MachineInstr *BundledMI : llvm::reverse(MIs)) {
451 // If instruction is in the middle of the bundle, move it before the
452 // bundle starts, otherwise, just unbundle it. When we get to the last
453 // instruction, the bundle will have been completely undone.
454 if (BundledMI != BundleStart) {
455 BundledMI->removeFromBundle();
456 MBB.insert(FirstMI, BundledMI);
457 } else if (BundledMI->isBundledWithSucc()) {
458 BundledMI->unbundleFromSucc();
459 BundleStart = &*std::next(BundledMI->getIterator());
462 if (Indexes && BundledMI != FirstMI)
463 Indexes->insertMachineInstrInMaps(*BundledMI);
468 /// Check whether (part of) \p SuperPhysReg is live through \p MI.
469 /// \pre \p MI defines a subregister of a virtual register that
470 /// has been assigned to \p SuperPhysReg.
471 bool VirtRegRewriter::subRegLiveThrough(const MachineInstr &MI,
472 unsigned SuperPhysReg) const {
473 SlotIndex MIIndex = LIS->getInstructionIndex(MI);
474 SlotIndex BeforeMIUses = MIIndex.getBaseIndex();
475 SlotIndex AfterMIDefs = MIIndex.getBoundaryIndex();
476 for (MCRegUnitIterator Unit(SuperPhysReg, TRI); Unit.isValid(); ++Unit) {
477 const LiveRange &UnitRange = LIS->getRegUnit(*Unit);
478 // If the regunit is live both before and after MI,
479 // we assume it is live through.
480 // Generally speaking, this is not true, because something like
481 // "RU = op RU" would match that description.
482 // However, we know that we are trying to assess whether
483 // a def of a virtual reg, vreg, is live at the same time of RU.
484 // If we are in the "RU = op RU" situation, that means that vreg
485 // is defined at the same time as RU (i.e., "vreg, RU = op RU").
486 // Thus, vreg and RU interferes and vreg cannot be assigned to
487 // SuperPhysReg. Therefore, this situation cannot happen.
488 if (UnitRange.liveAt(AfterMIDefs) && UnitRange.liveAt(BeforeMIUses))
489 return true;
491 return false;
494 void VirtRegRewriter::rewrite() {
495 bool NoSubRegLiveness = !MRI->subRegLivenessEnabled();
496 SmallVector<unsigned, 8> SuperDeads;
497 SmallVector<unsigned, 8> SuperDefs;
498 SmallVector<unsigned, 8> SuperKills;
500 for (MachineFunction::iterator MBBI = MF->begin(), MBBE = MF->end();
501 MBBI != MBBE; ++MBBI) {
502 LLVM_DEBUG(MBBI->print(dbgs(), Indexes));
503 for (MachineBasicBlock::instr_iterator
504 MII = MBBI->instr_begin(), MIE = MBBI->instr_end(); MII != MIE;) {
505 MachineInstr *MI = &*MII;
506 ++MII;
508 for (MachineInstr::mop_iterator MOI = MI->operands_begin(),
509 MOE = MI->operands_end(); MOI != MOE; ++MOI) {
510 MachineOperand &MO = *MOI;
512 // Make sure MRI knows about registers clobbered by regmasks.
513 if (MO.isRegMask())
514 MRI->addPhysRegsUsedFromRegMask(MO.getRegMask());
516 if (!MO.isReg() || !TargetRegisterInfo::isVirtualRegister(MO.getReg()))
517 continue;
518 unsigned VirtReg = MO.getReg();
519 unsigned PhysReg = VRM->getPhys(VirtReg);
520 assert(PhysReg != VirtRegMap::NO_PHYS_REG &&
521 "Instruction uses unmapped VirtReg");
522 assert(!MRI->isReserved(PhysReg) && "Reserved register assignment");
524 // Preserve semantics of sub-register operands.
525 unsigned SubReg = MO.getSubReg();
526 if (SubReg != 0) {
527 if (NoSubRegLiveness || !MRI->shouldTrackSubRegLiveness(VirtReg)) {
528 // A virtual register kill refers to the whole register, so we may
529 // have to add implicit killed operands for the super-register. A
530 // partial redef always kills and redefines the super-register.
531 if ((MO.readsReg() && (MO.isDef() || MO.isKill())) ||
532 (MO.isDef() && subRegLiveThrough(*MI, PhysReg)))
533 SuperKills.push_back(PhysReg);
535 if (MO.isDef()) {
536 // Also add implicit defs for the super-register.
537 if (MO.isDead())
538 SuperDeads.push_back(PhysReg);
539 else
540 SuperDefs.push_back(PhysReg);
542 } else {
543 if (MO.isUse()) {
544 if (readsUndefSubreg(MO))
545 // We need to add an <undef> flag if the subregister is
546 // completely undefined (and we are not adding super-register
547 // defs).
548 MO.setIsUndef(true);
549 } else if (!MO.isDead()) {
550 assert(MO.isDef());
554 // The def undef and def internal flags only make sense for
555 // sub-register defs, and we are substituting a full physreg. An
556 // implicit killed operand from the SuperKills list will represent the
557 // partial read of the super-register.
558 if (MO.isDef()) {
559 MO.setIsUndef(false);
560 MO.setIsInternalRead(false);
563 // PhysReg operands cannot have subregister indexes.
564 PhysReg = TRI->getSubReg(PhysReg, SubReg);
565 assert(PhysReg && "Invalid SubReg for physical register");
566 MO.setSubReg(0);
568 // Rewrite. Note we could have used MachineOperand::substPhysReg(), but
569 // we need the inlining here.
570 MO.setReg(PhysReg);
571 MO.setIsRenamable(true);
574 // Add any missing super-register kills after rewriting the whole
575 // instruction.
576 while (!SuperKills.empty())
577 MI->addRegisterKilled(SuperKills.pop_back_val(), TRI, true);
579 while (!SuperDeads.empty())
580 MI->addRegisterDead(SuperDeads.pop_back_val(), TRI, true);
582 while (!SuperDefs.empty())
583 MI->addRegisterDefined(SuperDefs.pop_back_val(), TRI);
585 LLVM_DEBUG(dbgs() << "> " << *MI);
587 expandCopyBundle(*MI);
589 // We can remove identity copies right now.
590 handleIdentityCopy(*MI);