[PowerPC] Do not emit record-form rotates when record-form andi/andis suffices
[llvm-core.git] / lib / CodeGen / RegisterScavenging.cpp
blob3660586c135813a4dd48257b751d102185f2fe9c
1 //===- RegisterScavenging.cpp - Machine register scavenging ---------------===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 /// \file
11 /// This file implements the machine register scavenger. It can provide
12 /// information, such as unused registers, at any point in a machine basic
13 /// block. It also provides a mechanism to make registers available by evicting
14 /// them to spill slots.
16 //===----------------------------------------------------------------------===//
18 #include "llvm/CodeGen/RegisterScavenging.h"
19 #include "llvm/ADT/ArrayRef.h"
20 #include "llvm/ADT/BitVector.h"
21 #include "llvm/ADT/SmallVector.h"
22 #include "llvm/ADT/Statistic.h"
23 #include "llvm/CodeGen/LiveRegUnits.h"
24 #include "llvm/CodeGen/MachineBasicBlock.h"
25 #include "llvm/CodeGen/MachineFrameInfo.h"
26 #include "llvm/CodeGen/MachineFunction.h"
27 #include "llvm/CodeGen/MachineFunctionPass.h"
28 #include "llvm/CodeGen/MachineInstr.h"
29 #include "llvm/CodeGen/MachineOperand.h"
30 #include "llvm/CodeGen/MachineRegisterInfo.h"
31 #include "llvm/CodeGen/TargetFrameLowering.h"
32 #include "llvm/CodeGen/TargetInstrInfo.h"
33 #include "llvm/CodeGen/TargetRegisterInfo.h"
34 #include "llvm/CodeGen/TargetSubtargetInfo.h"
35 #include "llvm/MC/MCRegisterInfo.h"
36 #include "llvm/Pass.h"
37 #include "llvm/Support/Debug.h"
38 #include "llvm/Support/ErrorHandling.h"
39 #include "llvm/Support/raw_ostream.h"
40 #include <algorithm>
41 #include <cassert>
42 #include <iterator>
43 #include <limits>
44 #include <string>
45 #include <utility>
47 using namespace llvm;
49 #define DEBUG_TYPE "reg-scavenging"
51 STATISTIC(NumScavengedRegs, "Number of frame index regs scavenged");
53 void RegScavenger::setRegUsed(unsigned Reg, LaneBitmask LaneMask) {
54 LiveUnits.addRegMasked(Reg, LaneMask);
57 void RegScavenger::init(MachineBasicBlock &MBB) {
58 MachineFunction &MF = *MBB.getParent();
59 TII = MF.getSubtarget().getInstrInfo();
60 TRI = MF.getSubtarget().getRegisterInfo();
61 MRI = &MF.getRegInfo();
62 LiveUnits.init(*TRI);
64 assert((NumRegUnits == 0 || NumRegUnits == TRI->getNumRegUnits()) &&
65 "Target changed?");
67 // Self-initialize.
68 if (!this->MBB) {
69 NumRegUnits = TRI->getNumRegUnits();
70 KillRegUnits.resize(NumRegUnits);
71 DefRegUnits.resize(NumRegUnits);
72 TmpRegUnits.resize(NumRegUnits);
74 this->MBB = &MBB;
76 for (ScavengedInfo &SI : Scavenged) {
77 SI.Reg = 0;
78 SI.Restore = nullptr;
81 Tracking = false;
84 void RegScavenger::enterBasicBlock(MachineBasicBlock &MBB) {
85 init(MBB);
86 LiveUnits.addLiveIns(MBB);
89 void RegScavenger::enterBasicBlockEnd(MachineBasicBlock &MBB) {
90 init(MBB);
91 LiveUnits.addLiveOuts(MBB);
93 // Move internal iterator at the last instruction of the block.
94 if (MBB.begin() != MBB.end()) {
95 MBBI = std::prev(MBB.end());
96 Tracking = true;
100 void RegScavenger::addRegUnits(BitVector &BV, unsigned Reg) {
101 for (MCRegUnitIterator RUI(Reg, TRI); RUI.isValid(); ++RUI)
102 BV.set(*RUI);
105 void RegScavenger::removeRegUnits(BitVector &BV, unsigned Reg) {
106 for (MCRegUnitIterator RUI(Reg, TRI); RUI.isValid(); ++RUI)
107 BV.reset(*RUI);
110 void RegScavenger::determineKillsAndDefs() {
111 assert(Tracking && "Must be tracking to determine kills and defs");
113 MachineInstr &MI = *MBBI;
114 assert(!MI.isDebugInstr() && "Debug values have no kills or defs");
116 // Find out which registers are early clobbered, killed, defined, and marked
117 // def-dead in this instruction.
118 KillRegUnits.reset();
119 DefRegUnits.reset();
120 for (const MachineOperand &MO : MI.operands()) {
121 if (MO.isRegMask()) {
122 TmpRegUnits.clear();
123 for (unsigned RU = 0, RUEnd = TRI->getNumRegUnits(); RU != RUEnd; ++RU) {
124 for (MCRegUnitRootIterator RURI(RU, TRI); RURI.isValid(); ++RURI) {
125 if (MO.clobbersPhysReg(*RURI)) {
126 TmpRegUnits.set(RU);
127 break;
132 // Apply the mask.
133 KillRegUnits |= TmpRegUnits;
135 if (!MO.isReg())
136 continue;
137 unsigned Reg = MO.getReg();
138 if (!TargetRegisterInfo::isPhysicalRegister(Reg) || isReserved(Reg))
139 continue;
141 if (MO.isUse()) {
142 // Ignore undef uses.
143 if (MO.isUndef())
144 continue;
145 if (MO.isKill())
146 addRegUnits(KillRegUnits, Reg);
147 } else {
148 assert(MO.isDef());
149 if (MO.isDead())
150 addRegUnits(KillRegUnits, Reg);
151 else
152 addRegUnits(DefRegUnits, Reg);
157 void RegScavenger::unprocess() {
158 assert(Tracking && "Cannot unprocess because we're not tracking");
160 MachineInstr &MI = *MBBI;
161 if (!MI.isDebugInstr()) {
162 determineKillsAndDefs();
164 // Commit the changes.
165 setUnused(DefRegUnits);
166 setUsed(KillRegUnits);
169 if (MBBI == MBB->begin()) {
170 MBBI = MachineBasicBlock::iterator(nullptr);
171 Tracking = false;
172 } else
173 --MBBI;
176 void RegScavenger::forward() {
177 // Move ptr forward.
178 if (!Tracking) {
179 MBBI = MBB->begin();
180 Tracking = true;
181 } else {
182 assert(MBBI != MBB->end() && "Already past the end of the basic block!");
183 MBBI = std::next(MBBI);
185 assert(MBBI != MBB->end() && "Already at the end of the basic block!");
187 MachineInstr &MI = *MBBI;
189 for (SmallVectorImpl<ScavengedInfo>::iterator I = Scavenged.begin(),
190 IE = Scavenged.end(); I != IE; ++I) {
191 if (I->Restore != &MI)
192 continue;
194 I->Reg = 0;
195 I->Restore = nullptr;
198 if (MI.isDebugInstr())
199 return;
201 determineKillsAndDefs();
203 // Verify uses and defs.
204 #ifndef NDEBUG
205 for (const MachineOperand &MO : MI.operands()) {
206 if (!MO.isReg())
207 continue;
208 unsigned Reg = MO.getReg();
209 if (!TargetRegisterInfo::isPhysicalRegister(Reg) || isReserved(Reg))
210 continue;
211 if (MO.isUse()) {
212 if (MO.isUndef())
213 continue;
214 if (!isRegUsed(Reg)) {
215 // Check if it's partial live: e.g.
216 // D0 = insert_subreg undef D0, S0
217 // ... D0
218 // The problem is the insert_subreg could be eliminated. The use of
219 // D0 is using a partially undef value. This is not *incorrect* since
220 // S1 is can be freely clobbered.
221 // Ideally we would like a way to model this, but leaving the
222 // insert_subreg around causes both correctness and performance issues.
223 bool SubUsed = false;
224 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs)
225 if (isRegUsed(*SubRegs)) {
226 SubUsed = true;
227 break;
229 bool SuperUsed = false;
230 for (MCSuperRegIterator SR(Reg, TRI); SR.isValid(); ++SR) {
231 if (isRegUsed(*SR)) {
232 SuperUsed = true;
233 break;
236 if (!SubUsed && !SuperUsed) {
237 MBB->getParent()->verify(nullptr, "In Register Scavenger");
238 llvm_unreachable("Using an undefined register!");
240 (void)SubUsed;
241 (void)SuperUsed;
243 } else {
244 assert(MO.isDef());
245 #if 0
246 // FIXME: Enable this once we've figured out how to correctly transfer
247 // implicit kills during codegen passes like the coalescer.
248 assert((KillRegs.test(Reg) || isUnused(Reg) ||
249 isLiveInButUnusedBefore(Reg, MI, MBB, TRI, MRI)) &&
250 "Re-defining a live register!");
251 #endif
254 #endif // NDEBUG
256 // Commit the changes.
257 setUnused(KillRegUnits);
258 setUsed(DefRegUnits);
261 void RegScavenger::backward() {
262 assert(Tracking && "Must be tracking to determine kills and defs");
264 const MachineInstr &MI = *MBBI;
265 LiveUnits.stepBackward(MI);
267 // Expire scavenge spill frameindex uses.
268 for (ScavengedInfo &I : Scavenged) {
269 if (I.Restore == &MI) {
270 I.Reg = 0;
271 I.Restore = nullptr;
275 if (MBBI == MBB->begin()) {
276 MBBI = MachineBasicBlock::iterator(nullptr);
277 Tracking = false;
278 } else
279 --MBBI;
282 bool RegScavenger::isRegUsed(unsigned Reg, bool includeReserved) const {
283 if (isReserved(Reg))
284 return includeReserved;
285 return !LiveUnits.available(Reg);
288 unsigned RegScavenger::FindUnusedReg(const TargetRegisterClass *RC) const {
289 for (unsigned Reg : *RC) {
290 if (!isRegUsed(Reg)) {
291 LLVM_DEBUG(dbgs() << "Scavenger found unused reg: " << printReg(Reg, TRI)
292 << "\n");
293 return Reg;
296 return 0;
299 BitVector RegScavenger::getRegsAvailable(const TargetRegisterClass *RC) {
300 BitVector Mask(TRI->getNumRegs());
301 for (unsigned Reg : *RC)
302 if (!isRegUsed(Reg))
303 Mask.set(Reg);
304 return Mask;
307 unsigned RegScavenger::findSurvivorReg(MachineBasicBlock::iterator StartMI,
308 BitVector &Candidates,
309 unsigned InstrLimit,
310 MachineBasicBlock::iterator &UseMI) {
311 int Survivor = Candidates.find_first();
312 assert(Survivor > 0 && "No candidates for scavenging");
314 MachineBasicBlock::iterator ME = MBB->getFirstTerminator();
315 assert(StartMI != ME && "MI already at terminator");
316 MachineBasicBlock::iterator RestorePointMI = StartMI;
317 MachineBasicBlock::iterator MI = StartMI;
319 bool inVirtLiveRange = false;
320 for (++MI; InstrLimit > 0 && MI != ME; ++MI, --InstrLimit) {
321 if (MI->isDebugInstr()) {
322 ++InstrLimit; // Don't count debug instructions
323 continue;
325 bool isVirtKillInsn = false;
326 bool isVirtDefInsn = false;
327 // Remove any candidates touched by instruction.
328 for (const MachineOperand &MO : MI->operands()) {
329 if (MO.isRegMask())
330 Candidates.clearBitsNotInMask(MO.getRegMask());
331 if (!MO.isReg() || MO.isUndef() || !MO.getReg())
332 continue;
333 if (TargetRegisterInfo::isVirtualRegister(MO.getReg())) {
334 if (MO.isDef())
335 isVirtDefInsn = true;
336 else if (MO.isKill())
337 isVirtKillInsn = true;
338 continue;
340 for (MCRegAliasIterator AI(MO.getReg(), TRI, true); AI.isValid(); ++AI)
341 Candidates.reset(*AI);
343 // If we're not in a virtual reg's live range, this is a valid
344 // restore point.
345 if (!inVirtLiveRange) RestorePointMI = MI;
347 // Update whether we're in the live range of a virtual register
348 if (isVirtKillInsn) inVirtLiveRange = false;
349 if (isVirtDefInsn) inVirtLiveRange = true;
351 // Was our survivor untouched by this instruction?
352 if (Candidates.test(Survivor))
353 continue;
355 // All candidates gone?
356 if (Candidates.none())
357 break;
359 Survivor = Candidates.find_first();
361 // If we ran off the end, that's where we want to restore.
362 if (MI == ME) RestorePointMI = ME;
363 assert(RestorePointMI != StartMI &&
364 "No available scavenger restore location!");
366 // We ran out of candidates, so stop the search.
367 UseMI = RestorePointMI;
368 return Survivor;
371 /// Given the bitvector \p Available of free register units at position
372 /// \p From. Search backwards to find a register that is part of \p
373 /// Candidates and not used/clobbered until the point \p To. If there is
374 /// multiple candidates continue searching and pick the one that is not used/
375 /// clobbered for the longest time.
376 /// Returns the register and the earliest position we know it to be free or
377 /// the position MBB.end() if no register is available.
378 static std::pair<MCPhysReg, MachineBasicBlock::iterator>
379 findSurvivorBackwards(const MachineRegisterInfo &MRI,
380 MachineBasicBlock::iterator From, MachineBasicBlock::iterator To,
381 const LiveRegUnits &LiveOut, ArrayRef<MCPhysReg> AllocationOrder,
382 bool RestoreAfter) {
383 bool FoundTo = false;
384 MCPhysReg Survivor = 0;
385 MachineBasicBlock::iterator Pos;
386 MachineBasicBlock &MBB = *From->getParent();
387 unsigned InstrLimit = 25;
388 unsigned InstrCountDown = InstrLimit;
389 const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
390 LiveRegUnits Used(TRI);
392 for (MachineBasicBlock::iterator I = From;; --I) {
393 const MachineInstr &MI = *I;
395 Used.accumulate(MI);
397 if (I == To) {
398 // See if one of the registers in RC wasn't used so far.
399 for (MCPhysReg Reg : AllocationOrder) {
400 if (!MRI.isReserved(Reg) && Used.available(Reg) &&
401 LiveOut.available(Reg))
402 return std::make_pair(Reg, MBB.end());
404 // Otherwise we will continue up to InstrLimit instructions to find
405 // the register which is not defined/used for the longest time.
406 FoundTo = true;
407 Pos = To;
408 // Note: It was fine so far to start our search at From, however now that
409 // we have to spill, and can only place the restore after From then
410 // add the regs used/defed by std::next(From) to the set.
411 if (RestoreAfter)
412 Used.accumulate(*std::next(From));
414 if (FoundTo) {
415 if (Survivor == 0 || !Used.available(Survivor)) {
416 MCPhysReg AvilableReg = 0;
417 for (MCPhysReg Reg : AllocationOrder) {
418 if (!MRI.isReserved(Reg) && Used.available(Reg)) {
419 AvilableReg = Reg;
420 break;
423 if (AvilableReg == 0)
424 break;
425 Survivor = AvilableReg;
427 if (--InstrCountDown == 0)
428 break;
430 // Keep searching when we find a vreg since the spilled register will
431 // be usefull for this other vreg as well later.
432 bool FoundVReg = false;
433 for (const MachineOperand &MO : MI.operands()) {
434 if (MO.isReg() && TargetRegisterInfo::isVirtualRegister(MO.getReg())) {
435 FoundVReg = true;
436 break;
439 if (FoundVReg) {
440 InstrCountDown = InstrLimit;
441 Pos = I;
443 if (I == MBB.begin())
444 break;
448 return std::make_pair(Survivor, Pos);
451 static unsigned getFrameIndexOperandNum(MachineInstr &MI) {
452 unsigned i = 0;
453 while (!MI.getOperand(i).isFI()) {
454 ++i;
455 assert(i < MI.getNumOperands() && "Instr doesn't have FrameIndex operand!");
457 return i;
460 RegScavenger::ScavengedInfo &
461 RegScavenger::spill(unsigned Reg, const TargetRegisterClass &RC, int SPAdj,
462 MachineBasicBlock::iterator Before,
463 MachineBasicBlock::iterator &UseMI) {
464 // Find an available scavenging slot with size and alignment matching
465 // the requirements of the class RC.
466 const MachineFunction &MF = *Before->getMF();
467 const MachineFrameInfo &MFI = MF.getFrameInfo();
468 unsigned NeedSize = TRI->getSpillSize(RC);
469 unsigned NeedAlign = TRI->getSpillAlignment(RC);
471 unsigned SI = Scavenged.size(), Diff = std::numeric_limits<unsigned>::max();
472 int FIB = MFI.getObjectIndexBegin(), FIE = MFI.getObjectIndexEnd();
473 for (unsigned I = 0; I < Scavenged.size(); ++I) {
474 if (Scavenged[I].Reg != 0)
475 continue;
476 // Verify that this slot is valid for this register.
477 int FI = Scavenged[I].FrameIndex;
478 if (FI < FIB || FI >= FIE)
479 continue;
480 unsigned S = MFI.getObjectSize(FI);
481 unsigned A = MFI.getObjectAlignment(FI);
482 if (NeedSize > S || NeedAlign > A)
483 continue;
484 // Avoid wasting slots with large size and/or large alignment. Pick one
485 // that is the best fit for this register class (in street metric).
486 // Picking a larger slot than necessary could happen if a slot for a
487 // larger register is reserved before a slot for a smaller one. When
488 // trying to spill a smaller register, the large slot would be found
489 // first, thus making it impossible to spill the larger register later.
490 unsigned D = (S-NeedSize) + (A-NeedAlign);
491 if (D < Diff) {
492 SI = I;
493 Diff = D;
497 if (SI == Scavenged.size()) {
498 // We need to scavenge a register but have no spill slot, the target
499 // must know how to do it (if not, we'll assert below).
500 Scavenged.push_back(ScavengedInfo(FIE));
503 // Avoid infinite regress
504 Scavenged[SI].Reg = Reg;
506 // If the target knows how to save/restore the register, let it do so;
507 // otherwise, use the emergency stack spill slot.
508 if (!TRI->saveScavengerRegister(*MBB, Before, UseMI, &RC, Reg)) {
509 // Spill the scavenged register before \p Before.
510 int FI = Scavenged[SI].FrameIndex;
511 if (FI < FIB || FI >= FIE) {
512 std::string Msg = std::string("Error while trying to spill ") +
513 TRI->getName(Reg) + " from class " + TRI->getRegClassName(&RC) +
514 ": Cannot scavenge register without an emergency spill slot!";
515 report_fatal_error(Msg.c_str());
517 TII->storeRegToStackSlot(*MBB, Before, Reg, true, Scavenged[SI].FrameIndex,
518 &RC, TRI);
519 MachineBasicBlock::iterator II = std::prev(Before);
521 unsigned FIOperandNum = getFrameIndexOperandNum(*II);
522 TRI->eliminateFrameIndex(II, SPAdj, FIOperandNum, this);
524 // Restore the scavenged register before its use (or first terminator).
525 TII->loadRegFromStackSlot(*MBB, UseMI, Reg, Scavenged[SI].FrameIndex,
526 &RC, TRI);
527 II = std::prev(UseMI);
529 FIOperandNum = getFrameIndexOperandNum(*II);
530 TRI->eliminateFrameIndex(II, SPAdj, FIOperandNum, this);
532 return Scavenged[SI];
535 unsigned RegScavenger::scavengeRegister(const TargetRegisterClass *RC,
536 MachineBasicBlock::iterator I,
537 int SPAdj) {
538 MachineInstr &MI = *I;
539 const MachineFunction &MF = *MI.getMF();
540 // Consider all allocatable registers in the register class initially
541 BitVector Candidates = TRI->getAllocatableSet(MF, RC);
543 // Exclude all the registers being used by the instruction.
544 for (const MachineOperand &MO : MI.operands()) {
545 if (MO.isReg() && MO.getReg() != 0 && !(MO.isUse() && MO.isUndef()) &&
546 !TargetRegisterInfo::isVirtualRegister(MO.getReg()))
547 for (MCRegAliasIterator AI(MO.getReg(), TRI, true); AI.isValid(); ++AI)
548 Candidates.reset(*AI);
551 // Try to find a register that's unused if there is one, as then we won't
552 // have to spill.
553 BitVector Available = getRegsAvailable(RC);
554 Available &= Candidates;
555 if (Available.any())
556 Candidates = Available;
558 // Find the register whose use is furthest away.
559 MachineBasicBlock::iterator UseMI;
560 unsigned SReg = findSurvivorReg(I, Candidates, 25, UseMI);
562 // If we found an unused register there is no reason to spill it.
563 if (!isRegUsed(SReg)) {
564 LLVM_DEBUG(dbgs() << "Scavenged register: " << printReg(SReg, TRI) << "\n");
565 return SReg;
568 ScavengedInfo &Scavenged = spill(SReg, *RC, SPAdj, I, UseMI);
569 Scavenged.Restore = &*std::prev(UseMI);
571 LLVM_DEBUG(dbgs() << "Scavenged register (with spill): "
572 << printReg(SReg, TRI) << "\n");
574 return SReg;
577 unsigned RegScavenger::scavengeRegisterBackwards(const TargetRegisterClass &RC,
578 MachineBasicBlock::iterator To,
579 bool RestoreAfter, int SPAdj) {
580 const MachineBasicBlock &MBB = *To->getParent();
581 const MachineFunction &MF = *MBB.getParent();
583 // Find the register whose use is furthest away.
584 MachineBasicBlock::iterator UseMI;
585 ArrayRef<MCPhysReg> AllocationOrder = RC.getRawAllocationOrder(MF);
586 std::pair<MCPhysReg, MachineBasicBlock::iterator> P =
587 findSurvivorBackwards(*MRI, MBBI, To, LiveUnits, AllocationOrder,
588 RestoreAfter);
589 MCPhysReg Reg = P.first;
590 MachineBasicBlock::iterator SpillBefore = P.second;
591 assert(Reg != 0 && "No register left to scavenge!");
592 // Found an available register?
593 if (SpillBefore != MBB.end()) {
594 MachineBasicBlock::iterator ReloadAfter =
595 RestoreAfter ? std::next(MBBI) : MBBI;
596 MachineBasicBlock::iterator ReloadBefore = std::next(ReloadAfter);
597 if (ReloadBefore != MBB.end())
598 LLVM_DEBUG(dbgs() << "Reload before: " << *ReloadBefore << '\n');
599 ScavengedInfo &Scavenged = spill(Reg, RC, SPAdj, SpillBefore, ReloadBefore);
600 Scavenged.Restore = &*std::prev(SpillBefore);
601 LiveUnits.removeReg(Reg);
602 LLVM_DEBUG(dbgs() << "Scavenged register with spill: " << printReg(Reg, TRI)
603 << " until " << *SpillBefore);
604 } else {
605 LLVM_DEBUG(dbgs() << "Scavenged free register: " << printReg(Reg, TRI)
606 << '\n');
608 return Reg;
611 /// Allocate a register for the virtual register \p VReg. The last use of
612 /// \p VReg is around the current position of the register scavenger \p RS.
613 /// \p ReserveAfter controls whether the scavenged register needs to be reserved
614 /// after the current instruction, otherwise it will only be reserved before the
615 /// current instruction.
616 static unsigned scavengeVReg(MachineRegisterInfo &MRI, RegScavenger &RS,
617 unsigned VReg, bool ReserveAfter) {
618 const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
619 #ifndef NDEBUG
620 // Verify that all definitions and uses are in the same basic block.
621 const MachineBasicBlock *CommonMBB = nullptr;
622 // Real definition for the reg, re-definitions are not considered.
623 const MachineInstr *RealDef = nullptr;
624 for (MachineOperand &MO : MRI.reg_nodbg_operands(VReg)) {
625 MachineBasicBlock *MBB = MO.getParent()->getParent();
626 if (CommonMBB == nullptr)
627 CommonMBB = MBB;
628 assert(MBB == CommonMBB && "All defs+uses must be in the same basic block");
629 if (MO.isDef()) {
630 const MachineInstr &MI = *MO.getParent();
631 if (!MI.readsRegister(VReg, &TRI)) {
632 assert((!RealDef || RealDef == &MI) &&
633 "Can have at most one definition which is not a redefinition");
634 RealDef = &MI;
638 assert(RealDef != nullptr && "Must have at least 1 Def");
639 #endif
641 // We should only have one definition of the register. However to accommodate
642 // the requirements of two address code we also allow definitions in
643 // subsequent instructions provided they also read the register. That way
644 // we get a single contiguous lifetime.
646 // Definitions in MRI.def_begin() are unordered, search for the first.
647 MachineRegisterInfo::def_iterator FirstDef =
648 std::find_if(MRI.def_begin(VReg), MRI.def_end(),
649 [VReg, &TRI](const MachineOperand &MO) {
650 return !MO.getParent()->readsRegister(VReg, &TRI);
652 assert(FirstDef != MRI.def_end() &&
653 "Must have one definition that does not redefine vreg");
654 MachineInstr &DefMI = *FirstDef->getParent();
656 // The register scavenger will report a free register inserting an emergency
657 // spill/reload if necessary.
658 int SPAdj = 0;
659 const TargetRegisterClass &RC = *MRI.getRegClass(VReg);
660 unsigned SReg = RS.scavengeRegisterBackwards(RC, DefMI.getIterator(),
661 ReserveAfter, SPAdj);
662 MRI.replaceRegWith(VReg, SReg);
663 ++NumScavengedRegs;
664 return SReg;
667 /// Allocate (scavenge) vregs inside a single basic block.
668 /// Returns true if the target spill callback created new vregs and a 2nd pass
669 /// is necessary.
670 static bool scavengeFrameVirtualRegsInBlock(MachineRegisterInfo &MRI,
671 RegScavenger &RS,
672 MachineBasicBlock &MBB) {
673 const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
674 RS.enterBasicBlockEnd(MBB);
676 unsigned InitialNumVirtRegs = MRI.getNumVirtRegs();
677 bool NextInstructionReadsVReg = false;
678 for (MachineBasicBlock::iterator I = MBB.end(); I != MBB.begin(); ) {
679 --I;
680 // Move RegScavenger to the position between *I and *std::next(I).
681 RS.backward(I);
683 // Look for unassigned vregs in the uses of *std::next(I).
684 if (NextInstructionReadsVReg) {
685 MachineBasicBlock::iterator N = std::next(I);
686 const MachineInstr &NMI = *N;
687 for (const MachineOperand &MO : NMI.operands()) {
688 if (!MO.isReg())
689 continue;
690 unsigned Reg = MO.getReg();
691 // We only care about virtual registers and ignore virtual registers
692 // created by the target callbacks in the process (those will be handled
693 // in a scavenging round).
694 if (!TargetRegisterInfo::isVirtualRegister(Reg) ||
695 TargetRegisterInfo::virtReg2Index(Reg) >= InitialNumVirtRegs)
696 continue;
697 if (!MO.readsReg())
698 continue;
700 unsigned SReg = scavengeVReg(MRI, RS, Reg, true);
701 N->addRegisterKilled(SReg, &TRI, false);
702 RS.setRegUsed(SReg);
706 // Look for unassigned vregs in the defs of *I.
707 NextInstructionReadsVReg = false;
708 const MachineInstr &MI = *I;
709 for (const MachineOperand &MO : MI.operands()) {
710 if (!MO.isReg())
711 continue;
712 unsigned Reg = MO.getReg();
713 // Only vregs, no newly created vregs (see above).
714 if (!TargetRegisterInfo::isVirtualRegister(Reg) ||
715 TargetRegisterInfo::virtReg2Index(Reg) >= InitialNumVirtRegs)
716 continue;
717 // We have to look at all operands anyway so we can precalculate here
718 // whether there is a reading operand. This allows use to skip the use
719 // step in the next iteration if there was none.
720 assert(!MO.isInternalRead() && "Cannot assign inside bundles");
721 assert((!MO.isUndef() || MO.isDef()) && "Cannot handle undef uses");
722 if (MO.readsReg()) {
723 NextInstructionReadsVReg = true;
725 if (MO.isDef()) {
726 unsigned SReg = scavengeVReg(MRI, RS, Reg, false);
727 I->addRegisterDead(SReg, &TRI, false);
731 #ifndef NDEBUG
732 for (const MachineOperand &MO : MBB.front().operands()) {
733 if (!MO.isReg() || !TargetRegisterInfo::isVirtualRegister(MO.getReg()))
734 continue;
735 assert(!MO.isInternalRead() && "Cannot assign inside bundles");
736 assert((!MO.isUndef() || MO.isDef()) && "Cannot handle undef uses");
737 assert(!MO.readsReg() && "Vreg use in first instruction not allowed");
739 #endif
741 return MRI.getNumVirtRegs() != InitialNumVirtRegs;
744 void llvm::scavengeFrameVirtualRegs(MachineFunction &MF, RegScavenger &RS) {
745 // FIXME: Iterating over the instruction stream is unnecessary. We can simply
746 // iterate over the vreg use list, which at this point only contains machine
747 // operands for which eliminateFrameIndex need a new scratch reg.
748 MachineRegisterInfo &MRI = MF.getRegInfo();
749 // Shortcut.
750 if (MRI.getNumVirtRegs() == 0) {
751 MF.getProperties().set(MachineFunctionProperties::Property::NoVRegs);
752 return;
755 // Run through the instructions and find any virtual registers.
756 for (MachineBasicBlock &MBB : MF) {
757 if (MBB.empty())
758 continue;
760 bool Again = scavengeFrameVirtualRegsInBlock(MRI, RS, MBB);
761 if (Again) {
762 LLVM_DEBUG(dbgs() << "Warning: Required two scavenging passes for block "
763 << MBB.getName() << '\n');
764 Again = scavengeFrameVirtualRegsInBlock(MRI, RS, MBB);
765 // The target required a 2nd run (because it created new vregs while
766 // spilling). Refuse to do another pass to keep compiletime in check.
767 if (Again)
768 report_fatal_error("Incomplete scavenging after 2nd pass");
772 MRI.clearVirtRegs();
773 MF.getProperties().set(MachineFunctionProperties::Property::NoVRegs);
776 namespace {
778 /// This class runs register scavenging independ of the PrologEpilogInserter.
779 /// This is used in for testing.
780 class ScavengerTest : public MachineFunctionPass {
781 public:
782 static char ID;
784 ScavengerTest() : MachineFunctionPass(ID) {}
786 bool runOnMachineFunction(MachineFunction &MF) override {
787 const TargetSubtargetInfo &STI = MF.getSubtarget();
788 const TargetFrameLowering &TFL = *STI.getFrameLowering();
790 RegScavenger RS;
791 // Let's hope that calling those outside of PrologEpilogueInserter works
792 // well enough to initialize the scavenger with some emergency spillslots
793 // for the target.
794 BitVector SavedRegs;
795 TFL.determineCalleeSaves(MF, SavedRegs, &RS);
796 TFL.processFunctionBeforeFrameFinalized(MF, &RS);
798 // Let's scavenge the current function
799 scavengeFrameVirtualRegs(MF, RS);
800 return true;
804 } // end anonymous namespace
806 char ScavengerTest::ID;
808 INITIALIZE_PASS(ScavengerTest, "scavenger-test",
809 "Scavenge virtual registers inside basic blocks", false, false)