1 //===- RegisterScavenging.cpp - Machine register scavenging ---------------===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
10 /// This file implements the machine register scavenger. It can provide
11 /// information, such as unused registers, at any point in a machine basic
12 /// block. It also provides a mechanism to make registers available by evicting
13 /// them to spill slots.
15 //===----------------------------------------------------------------------===//
17 #include "llvm/CodeGen/RegisterScavenging.h"
18 #include "llvm/ADT/ArrayRef.h"
19 #include "llvm/ADT/BitVector.h"
20 #include "llvm/ADT/SmallVector.h"
21 #include "llvm/ADT/Statistic.h"
22 #include "llvm/CodeGen/LiveRegUnits.h"
23 #include "llvm/CodeGen/MachineBasicBlock.h"
24 #include "llvm/CodeGen/MachineFrameInfo.h"
25 #include "llvm/CodeGen/MachineFunction.h"
26 #include "llvm/CodeGen/MachineFunctionPass.h"
27 #include "llvm/CodeGen/MachineInstr.h"
28 #include "llvm/CodeGen/MachineOperand.h"
29 #include "llvm/CodeGen/MachineRegisterInfo.h"
30 #include "llvm/CodeGen/TargetFrameLowering.h"
31 #include "llvm/CodeGen/TargetInstrInfo.h"
32 #include "llvm/CodeGen/TargetRegisterInfo.h"
33 #include "llvm/CodeGen/TargetSubtargetInfo.h"
34 #include "llvm/InitializePasses.h"
35 #include "llvm/MC/MCRegisterInfo.h"
36 #include "llvm/Pass.h"
37 #include "llvm/Support/Debug.h"
38 #include "llvm/Support/ErrorHandling.h"
39 #include "llvm/Support/raw_ostream.h"
49 #define DEBUG_TYPE "reg-scavenging"
51 STATISTIC(NumScavengedRegs
, "Number of frame index regs scavenged");
53 void RegScavenger::setRegUsed(Register Reg
, LaneBitmask LaneMask
) {
54 LiveUnits
.addRegMasked(Reg
, LaneMask
);
57 void RegScavenger::init(MachineBasicBlock
&MBB
) {
58 MachineFunction
&MF
= *MBB
.getParent();
59 TII
= MF
.getSubtarget().getInstrInfo();
60 TRI
= MF
.getSubtarget().getRegisterInfo();
61 MRI
= &MF
.getRegInfo();
64 assert((NumRegUnits
== 0 || NumRegUnits
== TRI
->getNumRegUnits()) &&
69 NumRegUnits
= TRI
->getNumRegUnits();
70 KillRegUnits
.resize(NumRegUnits
);
71 DefRegUnits
.resize(NumRegUnits
);
72 TmpRegUnits
.resize(NumRegUnits
);
76 for (ScavengedInfo
&SI
: Scavenged
) {
84 void RegScavenger::enterBasicBlock(MachineBasicBlock
&MBB
) {
86 LiveUnits
.addLiveIns(MBB
);
89 void RegScavenger::enterBasicBlockEnd(MachineBasicBlock
&MBB
) {
91 LiveUnits
.addLiveOuts(MBB
);
93 // Move internal iterator at the last instruction of the block.
95 MBBI
= std::prev(MBB
.end());
100 void RegScavenger::addRegUnits(BitVector
&BV
, MCRegister Reg
) {
101 for (MCRegUnitIterator
RUI(Reg
, TRI
); RUI
.isValid(); ++RUI
)
105 void RegScavenger::removeRegUnits(BitVector
&BV
, MCRegister Reg
) {
106 for (MCRegUnitIterator
RUI(Reg
, TRI
); RUI
.isValid(); ++RUI
)
110 void RegScavenger::determineKillsAndDefs() {
111 assert(Tracking
&& "Must be tracking to determine kills and defs");
113 MachineInstr
&MI
= *MBBI
;
114 assert(!MI
.isDebugInstr() && "Debug values have no kills or defs");
116 // Find out which registers are early clobbered, killed, defined, and marked
117 // def-dead in this instruction.
118 KillRegUnits
.reset();
120 for (const MachineOperand
&MO
: MI
.operands()) {
121 if (MO
.isRegMask()) {
123 for (unsigned RU
= 0, RUEnd
= TRI
->getNumRegUnits(); RU
!= RUEnd
; ++RU
) {
124 for (MCRegUnitRootIterator
RURI(RU
, TRI
); RURI
.isValid(); ++RURI
) {
125 if (MO
.clobbersPhysReg(*RURI
)) {
133 KillRegUnits
|= TmpRegUnits
;
137 if (!MO
.getReg().isPhysical() || isReserved(MO
.getReg()))
139 MCRegister Reg
= MO
.getReg().asMCReg();
142 // Ignore undef uses.
146 addRegUnits(KillRegUnits
, Reg
);
150 addRegUnits(KillRegUnits
, Reg
);
152 addRegUnits(DefRegUnits
, Reg
);
157 void RegScavenger::forward() {
163 assert(MBBI
!= MBB
->end() && "Already past the end of the basic block!");
164 MBBI
= std::next(MBBI
);
166 assert(MBBI
!= MBB
->end() && "Already at the end of the basic block!");
168 MachineInstr
&MI
= *MBBI
;
170 for (ScavengedInfo
&I
: Scavenged
) {
171 if (I
.Restore
!= &MI
)
178 if (MI
.isDebugOrPseudoInstr())
181 determineKillsAndDefs();
183 // Verify uses and defs.
185 for (const MachineOperand
&MO
: MI
.operands()) {
188 Register Reg
= MO
.getReg();
189 if (!Register::isPhysicalRegister(Reg
) || isReserved(Reg
))
194 if (!isRegUsed(Reg
)) {
195 // Check if it's partial live: e.g.
196 // D0 = insert_subreg undef D0, S0
198 // The problem is the insert_subreg could be eliminated. The use of
199 // D0 is using a partially undef value. This is not *incorrect* since
200 // S1 is can be freely clobbered.
201 // Ideally we would like a way to model this, but leaving the
202 // insert_subreg around causes both correctness and performance issues.
203 bool SubUsed
= false;
204 for (const MCPhysReg
&SubReg
: TRI
->subregs(Reg
))
205 if (isRegUsed(SubReg
)) {
209 bool SuperUsed
= false;
210 for (MCSuperRegIterator
SR(Reg
, TRI
); SR
.isValid(); ++SR
) {
211 if (isRegUsed(*SR
)) {
216 if (!SubUsed
&& !SuperUsed
) {
217 MBB
->getParent()->verify(nullptr, "In Register Scavenger");
218 llvm_unreachable("Using an undefined register!");
226 // FIXME: Enable this once we've figured out how to correctly transfer
227 // implicit kills during codegen passes like the coalescer.
228 assert((KillRegs
.test(Reg
) || isUnused(Reg
) ||
229 isLiveInButUnusedBefore(Reg
, MI
, MBB
, TRI
, MRI
)) &&
230 "Re-defining a live register!");
236 // Commit the changes.
237 setUnused(KillRegUnits
);
238 setUsed(DefRegUnits
);
241 void RegScavenger::backward() {
242 assert(Tracking
&& "Must be tracking to determine kills and defs");
244 const MachineInstr
&MI
= *MBBI
;
245 LiveUnits
.stepBackward(MI
);
247 // Expire scavenge spill frameindex uses.
248 for (ScavengedInfo
&I
: Scavenged
) {
249 if (I
.Restore
== &MI
) {
255 if (MBBI
== MBB
->begin()) {
256 MBBI
= MachineBasicBlock::iterator(nullptr);
262 bool RegScavenger::isRegUsed(Register Reg
, bool includeReserved
) const {
264 return includeReserved
;
265 return !LiveUnits
.available(Reg
);
268 Register
RegScavenger::FindUnusedReg(const TargetRegisterClass
*RC
) const {
269 for (Register Reg
: *RC
) {
270 if (!isRegUsed(Reg
)) {
271 LLVM_DEBUG(dbgs() << "Scavenger found unused reg: " << printReg(Reg
, TRI
)
279 BitVector
RegScavenger::getRegsAvailable(const TargetRegisterClass
*RC
) {
280 BitVector
Mask(TRI
->getNumRegs());
281 for (Register Reg
: *RC
)
287 Register
RegScavenger::findSurvivorReg(MachineBasicBlock::iterator StartMI
,
288 BitVector
&Candidates
,
290 MachineBasicBlock::iterator
&UseMI
) {
291 int Survivor
= Candidates
.find_first();
292 assert(Survivor
> 0 && "No candidates for scavenging");
294 MachineBasicBlock::iterator ME
= MBB
->getFirstTerminator();
295 assert(StartMI
!= ME
&& "MI already at terminator");
296 MachineBasicBlock::iterator RestorePointMI
= StartMI
;
297 MachineBasicBlock::iterator MI
= StartMI
;
299 bool inVirtLiveRange
= false;
300 for (++MI
; InstrLimit
> 0 && MI
!= ME
; ++MI
, --InstrLimit
) {
301 if (MI
->isDebugOrPseudoInstr()) {
302 ++InstrLimit
; // Don't count debug instructions
305 bool isVirtKillInsn
= false;
306 bool isVirtDefInsn
= false;
307 // Remove any candidates touched by instruction.
308 for (const MachineOperand
&MO
: MI
->operands()) {
310 Candidates
.clearBitsNotInMask(MO
.getRegMask());
311 if (!MO
.isReg() || MO
.isUndef() || !MO
.getReg())
313 if (Register::isVirtualRegister(MO
.getReg())) {
315 isVirtDefInsn
= true;
316 else if (MO
.isKill())
317 isVirtKillInsn
= true;
320 for (MCRegAliasIterator
AI(MO
.getReg(), TRI
, true); AI
.isValid(); ++AI
)
321 Candidates
.reset(*AI
);
323 // If we're not in a virtual reg's live range, this is a valid
325 if (!inVirtLiveRange
) RestorePointMI
= MI
;
327 // Update whether we're in the live range of a virtual register
328 if (isVirtKillInsn
) inVirtLiveRange
= false;
329 if (isVirtDefInsn
) inVirtLiveRange
= true;
331 // Was our survivor untouched by this instruction?
332 if (Candidates
.test(Survivor
))
335 // All candidates gone?
336 if (Candidates
.none())
339 Survivor
= Candidates
.find_first();
341 // If we ran off the end, that's where we want to restore.
342 if (MI
== ME
) RestorePointMI
= ME
;
343 assert(RestorePointMI
!= StartMI
&&
344 "No available scavenger restore location!");
346 // We ran out of candidates, so stop the search.
347 UseMI
= RestorePointMI
;
351 /// Given the bitvector \p Available of free register units at position
352 /// \p From. Search backwards to find a register that is part of \p
353 /// Candidates and not used/clobbered until the point \p To. If there is
354 /// multiple candidates continue searching and pick the one that is not used/
355 /// clobbered for the longest time.
356 /// Returns the register and the earliest position we know it to be free or
357 /// the position MBB.end() if no register is available.
358 static std::pair
<MCPhysReg
, MachineBasicBlock::iterator
>
359 findSurvivorBackwards(const MachineRegisterInfo
&MRI
,
360 MachineBasicBlock::iterator From
, MachineBasicBlock::iterator To
,
361 const LiveRegUnits
&LiveOut
, ArrayRef
<MCPhysReg
> AllocationOrder
,
363 bool FoundTo
= false;
364 MCPhysReg Survivor
= 0;
365 MachineBasicBlock::iterator Pos
;
366 MachineBasicBlock
&MBB
= *From
->getParent();
367 unsigned InstrLimit
= 25;
368 unsigned InstrCountDown
= InstrLimit
;
369 const TargetRegisterInfo
&TRI
= *MRI
.getTargetRegisterInfo();
370 LiveRegUnits
Used(TRI
);
372 assert(From
->getParent() == To
->getParent() &&
373 "Target instruction is in other than current basic block, use "
374 "enterBasicBlockEnd first");
376 for (MachineBasicBlock::iterator I
= From
;; --I
) {
377 const MachineInstr
&MI
= *I
;
382 // See if one of the registers in RC wasn't used so far.
383 for (MCPhysReg Reg
: AllocationOrder
) {
384 if (!MRI
.isReserved(Reg
) && Used
.available(Reg
) &&
385 LiveOut
.available(Reg
))
386 return std::make_pair(Reg
, MBB
.end());
388 // Otherwise we will continue up to InstrLimit instructions to find
389 // the register which is not defined/used for the longest time.
392 // Note: It was fine so far to start our search at From, however now that
393 // we have to spill, and can only place the restore after From then
394 // add the regs used/defed by std::next(From) to the set.
396 Used
.accumulate(*std::next(From
));
399 if (Survivor
== 0 || !Used
.available(Survivor
)) {
400 MCPhysReg AvilableReg
= 0;
401 for (MCPhysReg Reg
: AllocationOrder
) {
402 if (!MRI
.isReserved(Reg
) && Used
.available(Reg
)) {
407 if (AvilableReg
== 0)
409 Survivor
= AvilableReg
;
411 if (--InstrCountDown
== 0)
414 // Keep searching when we find a vreg since the spilled register will
415 // be usefull for this other vreg as well later.
416 bool FoundVReg
= false;
417 for (const MachineOperand
&MO
: MI
.operands()) {
418 if (MO
.isReg() && Register::isVirtualRegister(MO
.getReg())) {
424 InstrCountDown
= InstrLimit
;
427 if (I
== MBB
.begin())
430 assert(I
!= MBB
.begin() && "Did not find target instruction while "
431 "iterating backwards");
434 return std::make_pair(Survivor
, Pos
);
437 static unsigned getFrameIndexOperandNum(MachineInstr
&MI
) {
439 while (!MI
.getOperand(i
).isFI()) {
441 assert(i
< MI
.getNumOperands() && "Instr doesn't have FrameIndex operand!");
446 RegScavenger::ScavengedInfo
&
447 RegScavenger::spill(Register Reg
, const TargetRegisterClass
&RC
, int SPAdj
,
448 MachineBasicBlock::iterator Before
,
449 MachineBasicBlock::iterator
&UseMI
) {
450 // Find an available scavenging slot with size and alignment matching
451 // the requirements of the class RC.
452 const MachineFunction
&MF
= *Before
->getMF();
453 const MachineFrameInfo
&MFI
= MF
.getFrameInfo();
454 unsigned NeedSize
= TRI
->getSpillSize(RC
);
455 Align NeedAlign
= TRI
->getSpillAlign(RC
);
457 unsigned SI
= Scavenged
.size(), Diff
= std::numeric_limits
<unsigned>::max();
458 int FIB
= MFI
.getObjectIndexBegin(), FIE
= MFI
.getObjectIndexEnd();
459 for (unsigned I
= 0; I
< Scavenged
.size(); ++I
) {
460 if (Scavenged
[I
].Reg
!= 0)
462 // Verify that this slot is valid for this register.
463 int FI
= Scavenged
[I
].FrameIndex
;
464 if (FI
< FIB
|| FI
>= FIE
)
466 unsigned S
= MFI
.getObjectSize(FI
);
467 Align A
= MFI
.getObjectAlign(FI
);
468 if (NeedSize
> S
|| NeedAlign
> A
)
470 // Avoid wasting slots with large size and/or large alignment. Pick one
471 // that is the best fit for this register class (in street metric).
472 // Picking a larger slot than necessary could happen if a slot for a
473 // larger register is reserved before a slot for a smaller one. When
474 // trying to spill a smaller register, the large slot would be found
475 // first, thus making it impossible to spill the larger register later.
476 unsigned D
= (S
- NeedSize
) + (A
.value() - NeedAlign
.value());
483 if (SI
== Scavenged
.size()) {
484 // We need to scavenge a register but have no spill slot, the target
485 // must know how to do it (if not, we'll assert below).
486 Scavenged
.push_back(ScavengedInfo(FIE
));
489 // Avoid infinite regress
490 Scavenged
[SI
].Reg
= Reg
;
492 // If the target knows how to save/restore the register, let it do so;
493 // otherwise, use the emergency stack spill slot.
494 if (!TRI
->saveScavengerRegister(*MBB
, Before
, UseMI
, &RC
, Reg
)) {
495 // Spill the scavenged register before \p Before.
496 int FI
= Scavenged
[SI
].FrameIndex
;
497 if (FI
< FIB
|| FI
>= FIE
) {
498 report_fatal_error(Twine("Error while trying to spill ") +
499 TRI
->getName(Reg
) + " from class " +
500 TRI
->getRegClassName(&RC
) +
501 ": Cannot scavenge register without an emergency "
504 TII
->storeRegToStackSlot(*MBB
, Before
, Reg
, true, FI
, &RC
, TRI
);
505 MachineBasicBlock::iterator II
= std::prev(Before
);
507 unsigned FIOperandNum
= getFrameIndexOperandNum(*II
);
508 TRI
->eliminateFrameIndex(II
, SPAdj
, FIOperandNum
, this);
510 // Restore the scavenged register before its use (or first terminator).
511 TII
->loadRegFromStackSlot(*MBB
, UseMI
, Reg
, FI
, &RC
, TRI
);
512 II
= std::prev(UseMI
);
514 FIOperandNum
= getFrameIndexOperandNum(*II
);
515 TRI
->eliminateFrameIndex(II
, SPAdj
, FIOperandNum
, this);
517 return Scavenged
[SI
];
520 Register
RegScavenger::scavengeRegister(const TargetRegisterClass
*RC
,
521 MachineBasicBlock::iterator I
,
522 int SPAdj
, bool AllowSpill
) {
523 MachineInstr
&MI
= *I
;
524 const MachineFunction
&MF
= *MI
.getMF();
525 // Consider all allocatable registers in the register class initially
526 BitVector Candidates
= TRI
->getAllocatableSet(MF
, RC
);
528 // Exclude all the registers being used by the instruction.
529 for (const MachineOperand
&MO
: MI
.operands()) {
530 if (MO
.isReg() && MO
.getReg() != 0 && !(MO
.isUse() && MO
.isUndef()) &&
531 !Register::isVirtualRegister(MO
.getReg()))
532 for (MCRegAliasIterator
AI(MO
.getReg(), TRI
, true); AI
.isValid(); ++AI
)
533 Candidates
.reset(*AI
);
536 // If we have already scavenged some registers, remove them from the
537 // candidates. If we end up recursively calling eliminateFrameIndex, we don't
538 // want to be clobbering previously scavenged registers or their associated
540 for (ScavengedInfo
&SI
: Scavenged
) {
542 if (isRegUsed(SI
.Reg
)) {
544 dbgs() << "Removing " << printReg(SI
.Reg
, TRI
) <<
545 " from scavenging candidates since it was already scavenged\n");
546 for (MCRegAliasIterator
AI(SI
.Reg
, TRI
, true); AI
.isValid(); ++AI
)
547 Candidates
.reset(*AI
);
552 // Try to find a register that's unused if there is one, as then we won't
554 BitVector Available
= getRegsAvailable(RC
);
555 Available
&= Candidates
;
557 Candidates
= Available
;
559 // Find the register whose use is furthest away.
560 MachineBasicBlock::iterator UseMI
;
561 Register SReg
= findSurvivorReg(I
, Candidates
, 25, UseMI
);
563 // If we found an unused register there is no reason to spill it.
564 if (!isRegUsed(SReg
)) {
565 LLVM_DEBUG(dbgs() << "Scavenged register: " << printReg(SReg
, TRI
) << "\n");
573 for (ScavengedInfo
&SI
: Scavenged
) {
574 assert(SI
.Reg
!= SReg
&& "scavenged a previously scavenged register");
578 ScavengedInfo
&Scavenged
= spill(SReg
, *RC
, SPAdj
, I
, UseMI
);
579 Scavenged
.Restore
= &*std::prev(UseMI
);
581 LLVM_DEBUG(dbgs() << "Scavenged register (with spill): "
582 << printReg(SReg
, TRI
) << "\n");
587 Register
RegScavenger::scavengeRegisterBackwards(const TargetRegisterClass
&RC
,
588 MachineBasicBlock::iterator To
,
589 bool RestoreAfter
, int SPAdj
,
591 const MachineBasicBlock
&MBB
= *To
->getParent();
592 const MachineFunction
&MF
= *MBB
.getParent();
594 // Find the register whose use is furthest away.
595 MachineBasicBlock::iterator UseMI
;
596 ArrayRef
<MCPhysReg
> AllocationOrder
= RC
.getRawAllocationOrder(MF
);
597 std::pair
<MCPhysReg
, MachineBasicBlock::iterator
> P
=
598 findSurvivorBackwards(*MRI
, MBBI
, To
, LiveUnits
, AllocationOrder
,
600 MCPhysReg Reg
= P
.first
;
601 MachineBasicBlock::iterator SpillBefore
= P
.second
;
602 // Found an available register?
603 if (Reg
!= 0 && SpillBefore
== MBB
.end()) {
604 LLVM_DEBUG(dbgs() << "Scavenged free register: " << printReg(Reg
, TRI
)
612 assert(Reg
!= 0 && "No register left to scavenge!");
614 MachineBasicBlock::iterator ReloadAfter
=
615 RestoreAfter
? std::next(MBBI
) : MBBI
;
616 MachineBasicBlock::iterator ReloadBefore
= std::next(ReloadAfter
);
617 if (ReloadBefore
!= MBB
.end())
618 LLVM_DEBUG(dbgs() << "Reload before: " << *ReloadBefore
<< '\n');
619 ScavengedInfo
&Scavenged
= spill(Reg
, RC
, SPAdj
, SpillBefore
, ReloadBefore
);
620 Scavenged
.Restore
= &*std::prev(SpillBefore
);
621 LiveUnits
.removeReg(Reg
);
622 LLVM_DEBUG(dbgs() << "Scavenged register with spill: " << printReg(Reg
, TRI
)
623 << " until " << *SpillBefore
);
627 /// Allocate a register for the virtual register \p VReg. The last use of
628 /// \p VReg is around the current position of the register scavenger \p RS.
629 /// \p ReserveAfter controls whether the scavenged register needs to be reserved
630 /// after the current instruction, otherwise it will only be reserved before the
631 /// current instruction.
632 static Register
scavengeVReg(MachineRegisterInfo
&MRI
, RegScavenger
&RS
,
633 Register VReg
, bool ReserveAfter
) {
634 const TargetRegisterInfo
&TRI
= *MRI
.getTargetRegisterInfo();
636 // Verify that all definitions and uses are in the same basic block.
637 const MachineBasicBlock
*CommonMBB
= nullptr;
638 // Real definition for the reg, re-definitions are not considered.
639 const MachineInstr
*RealDef
= nullptr;
640 for (MachineOperand
&MO
: MRI
.reg_nodbg_operands(VReg
)) {
641 MachineBasicBlock
*MBB
= MO
.getParent()->getParent();
642 if (CommonMBB
== nullptr)
644 assert(MBB
== CommonMBB
&& "All defs+uses must be in the same basic block");
646 const MachineInstr
&MI
= *MO
.getParent();
647 if (!MI
.readsRegister(VReg
, &TRI
)) {
648 assert((!RealDef
|| RealDef
== &MI
) &&
649 "Can have at most one definition which is not a redefinition");
654 assert(RealDef
!= nullptr && "Must have at least 1 Def");
657 // We should only have one definition of the register. However to accommodate
658 // the requirements of two address code we also allow definitions in
659 // subsequent instructions provided they also read the register. That way
660 // we get a single contiguous lifetime.
662 // Definitions in MRI.def_begin() are unordered, search for the first.
663 MachineRegisterInfo::def_iterator FirstDef
= llvm::find_if(
664 MRI
.def_operands(VReg
), [VReg
, &TRI
](const MachineOperand
&MO
) {
665 return !MO
.getParent()->readsRegister(VReg
, &TRI
);
667 assert(FirstDef
!= MRI
.def_end() &&
668 "Must have one definition that does not redefine vreg");
669 MachineInstr
&DefMI
= *FirstDef
->getParent();
671 // The register scavenger will report a free register inserting an emergency
672 // spill/reload if necessary.
674 const TargetRegisterClass
&RC
= *MRI
.getRegClass(VReg
);
675 Register SReg
= RS
.scavengeRegisterBackwards(RC
, DefMI
.getIterator(),
676 ReserveAfter
, SPAdj
);
677 MRI
.replaceRegWith(VReg
, SReg
);
682 /// Allocate (scavenge) vregs inside a single basic block.
683 /// Returns true if the target spill callback created new vregs and a 2nd pass
685 static bool scavengeFrameVirtualRegsInBlock(MachineRegisterInfo
&MRI
,
687 MachineBasicBlock
&MBB
) {
688 const TargetRegisterInfo
&TRI
= *MRI
.getTargetRegisterInfo();
689 RS
.enterBasicBlockEnd(MBB
);
691 unsigned InitialNumVirtRegs
= MRI
.getNumVirtRegs();
692 bool NextInstructionReadsVReg
= false;
693 for (MachineBasicBlock::iterator I
= MBB
.end(); I
!= MBB
.begin(); ) {
695 // Move RegScavenger to the position between *I and *std::next(I).
698 // Look for unassigned vregs in the uses of *std::next(I).
699 if (NextInstructionReadsVReg
) {
700 MachineBasicBlock::iterator N
= std::next(I
);
701 const MachineInstr
&NMI
= *N
;
702 for (const MachineOperand
&MO
: NMI
.operands()) {
705 Register Reg
= MO
.getReg();
706 // We only care about virtual registers and ignore virtual registers
707 // created by the target callbacks in the process (those will be handled
708 // in a scavenging round).
709 if (!Register::isVirtualRegister(Reg
) ||
710 Register::virtReg2Index(Reg
) >= InitialNumVirtRegs
)
715 Register SReg
= scavengeVReg(MRI
, RS
, Reg
, true);
716 N
->addRegisterKilled(SReg
, &TRI
, false);
721 // Look for unassigned vregs in the defs of *I.
722 NextInstructionReadsVReg
= false;
723 const MachineInstr
&MI
= *I
;
724 for (const MachineOperand
&MO
: MI
.operands()) {
727 Register Reg
= MO
.getReg();
728 // Only vregs, no newly created vregs (see above).
729 if (!Register::isVirtualRegister(Reg
) ||
730 Register::virtReg2Index(Reg
) >= InitialNumVirtRegs
)
732 // We have to look at all operands anyway so we can precalculate here
733 // whether there is a reading operand. This allows use to skip the use
734 // step in the next iteration if there was none.
735 assert(!MO
.isInternalRead() && "Cannot assign inside bundles");
736 assert((!MO
.isUndef() || MO
.isDef()) && "Cannot handle undef uses");
738 NextInstructionReadsVReg
= true;
741 Register SReg
= scavengeVReg(MRI
, RS
, Reg
, false);
742 I
->addRegisterDead(SReg
, &TRI
, false);
747 for (const MachineOperand
&MO
: MBB
.front().operands()) {
748 if (!MO
.isReg() || !Register::isVirtualRegister(MO
.getReg()))
750 assert(!MO
.isInternalRead() && "Cannot assign inside bundles");
751 assert((!MO
.isUndef() || MO
.isDef()) && "Cannot handle undef uses");
752 assert(!MO
.readsReg() && "Vreg use in first instruction not allowed");
756 return MRI
.getNumVirtRegs() != InitialNumVirtRegs
;
759 void llvm::scavengeFrameVirtualRegs(MachineFunction
&MF
, RegScavenger
&RS
) {
760 // FIXME: Iterating over the instruction stream is unnecessary. We can simply
761 // iterate over the vreg use list, which at this point only contains machine
762 // operands for which eliminateFrameIndex need a new scratch reg.
763 MachineRegisterInfo
&MRI
= MF
.getRegInfo();
765 if (MRI
.getNumVirtRegs() == 0) {
766 MF
.getProperties().set(MachineFunctionProperties::Property::NoVRegs
);
770 // Run through the instructions and find any virtual registers.
771 for (MachineBasicBlock
&MBB
: MF
) {
775 bool Again
= scavengeFrameVirtualRegsInBlock(MRI
, RS
, MBB
);
777 LLVM_DEBUG(dbgs() << "Warning: Required two scavenging passes for block "
778 << MBB
.getName() << '\n');
779 Again
= scavengeFrameVirtualRegsInBlock(MRI
, RS
, MBB
);
780 // The target required a 2nd run (because it created new vregs while
781 // spilling). Refuse to do another pass to keep compiletime in check.
783 report_fatal_error("Incomplete scavenging after 2nd pass");
788 MF
.getProperties().set(MachineFunctionProperties::Property::NoVRegs
);
793 /// This class runs register scavenging independ of the PrologEpilogInserter.
794 /// This is used in for testing.
795 class ScavengerTest
: public MachineFunctionPass
{
799 ScavengerTest() : MachineFunctionPass(ID
) {}
801 bool runOnMachineFunction(MachineFunction
&MF
) override
{
802 const TargetSubtargetInfo
&STI
= MF
.getSubtarget();
803 const TargetFrameLowering
&TFL
= *STI
.getFrameLowering();
806 // Let's hope that calling those outside of PrologEpilogueInserter works
807 // well enough to initialize the scavenger with some emergency spillslots
810 TFL
.determineCalleeSaves(MF
, SavedRegs
, &RS
);
811 TFL
.processFunctionBeforeFrameFinalized(MF
, &RS
);
813 // Let's scavenge the current function
814 scavengeFrameVirtualRegs(MF
, RS
);
819 } // end anonymous namespace
821 char ScavengerTest::ID
;
823 INITIALIZE_PASS(ScavengerTest
, "scavenger-test",
824 "Scavenge virtual registers inside basic blocks", false, false)