1 //===- RegisterScavenging.cpp - Machine register scavenging ---------------===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
10 /// This file implements the machine register scavenger. It can provide
11 /// information, such as unused registers, at any point in a machine basic
12 /// block. It also provides a mechanism to make registers available by evicting
13 /// them to spill slots.
15 //===----------------------------------------------------------------------===//
17 #include "llvm/CodeGen/RegisterScavenging.h"
18 #include "llvm/ADT/ArrayRef.h"
19 #include "llvm/ADT/BitVector.h"
20 #include "llvm/ADT/SmallVector.h"
21 #include "llvm/ADT/Statistic.h"
22 #include "llvm/CodeGen/LiveRegUnits.h"
23 #include "llvm/CodeGen/MachineBasicBlock.h"
24 #include "llvm/CodeGen/MachineFrameInfo.h"
25 #include "llvm/CodeGen/MachineFunction.h"
26 #include "llvm/CodeGen/MachineFunctionPass.h"
27 #include "llvm/CodeGen/MachineInstr.h"
28 #include "llvm/CodeGen/MachineOperand.h"
29 #include "llvm/CodeGen/MachineRegisterInfo.h"
30 #include "llvm/CodeGen/TargetFrameLowering.h"
31 #include "llvm/CodeGen/TargetInstrInfo.h"
32 #include "llvm/CodeGen/TargetRegisterInfo.h"
33 #include "llvm/CodeGen/TargetSubtargetInfo.h"
34 #include "llvm/MC/MCRegisterInfo.h"
35 #include "llvm/Pass.h"
36 #include "llvm/Support/Debug.h"
37 #include "llvm/Support/ErrorHandling.h"
38 #include "llvm/Support/raw_ostream.h"
48 #define DEBUG_TYPE "reg-scavenging"
50 STATISTIC(NumScavengedRegs
, "Number of frame index regs scavenged");
52 void RegScavenger::setRegUsed(Register Reg
, LaneBitmask LaneMask
) {
53 LiveUnits
.addRegMasked(Reg
, LaneMask
);
56 void RegScavenger::init(MachineBasicBlock
&MBB
) {
57 MachineFunction
&MF
= *MBB
.getParent();
58 TII
= MF
.getSubtarget().getInstrInfo();
59 TRI
= MF
.getSubtarget().getRegisterInfo();
60 MRI
= &MF
.getRegInfo();
63 assert((NumRegUnits
== 0 || NumRegUnits
== TRI
->getNumRegUnits()) &&
68 NumRegUnits
= TRI
->getNumRegUnits();
69 KillRegUnits
.resize(NumRegUnits
);
70 DefRegUnits
.resize(NumRegUnits
);
71 TmpRegUnits
.resize(NumRegUnits
);
75 for (ScavengedInfo
&SI
: Scavenged
) {
83 void RegScavenger::enterBasicBlock(MachineBasicBlock
&MBB
) {
85 LiveUnits
.addLiveIns(MBB
);
88 void RegScavenger::enterBasicBlockEnd(MachineBasicBlock
&MBB
) {
90 LiveUnits
.addLiveOuts(MBB
);
92 // Move internal iterator at the last instruction of the block.
93 if (MBB
.begin() != MBB
.end()) {
94 MBBI
= std::prev(MBB
.end());
99 void RegScavenger::addRegUnits(BitVector
&BV
, Register Reg
) {
100 for (MCRegUnitIterator
RUI(Reg
, TRI
); RUI
.isValid(); ++RUI
)
104 void RegScavenger::removeRegUnits(BitVector
&BV
, Register Reg
) {
105 for (MCRegUnitIterator
RUI(Reg
, TRI
); RUI
.isValid(); ++RUI
)
109 void RegScavenger::determineKillsAndDefs() {
110 assert(Tracking
&& "Must be tracking to determine kills and defs");
112 MachineInstr
&MI
= *MBBI
;
113 assert(!MI
.isDebugInstr() && "Debug values have no kills or defs");
115 // Find out which registers are early clobbered, killed, defined, and marked
116 // def-dead in this instruction.
117 KillRegUnits
.reset();
119 for (const MachineOperand
&MO
: MI
.operands()) {
120 if (MO
.isRegMask()) {
122 for (unsigned RU
= 0, RUEnd
= TRI
->getNumRegUnits(); RU
!= RUEnd
; ++RU
) {
123 for (MCRegUnitRootIterator
RURI(RU
, TRI
); RURI
.isValid(); ++RURI
) {
124 if (MO
.clobbersPhysReg(*RURI
)) {
132 KillRegUnits
|= TmpRegUnits
;
136 Register Reg
= MO
.getReg();
137 if (!Register::isPhysicalRegister(Reg
) || isReserved(Reg
))
141 // Ignore undef uses.
145 addRegUnits(KillRegUnits
, Reg
);
149 addRegUnits(KillRegUnits
, Reg
);
151 addRegUnits(DefRegUnits
, Reg
);
156 void RegScavenger::unprocess() {
157 assert(Tracking
&& "Cannot unprocess because we're not tracking");
159 MachineInstr
&MI
= *MBBI
;
160 if (!MI
.isDebugInstr()) {
161 determineKillsAndDefs();
163 // Commit the changes.
164 setUnused(DefRegUnits
);
165 setUsed(KillRegUnits
);
168 if (MBBI
== MBB
->begin()) {
169 MBBI
= MachineBasicBlock::iterator(nullptr);
175 void RegScavenger::forward() {
181 assert(MBBI
!= MBB
->end() && "Already past the end of the basic block!");
182 MBBI
= std::next(MBBI
);
184 assert(MBBI
!= MBB
->end() && "Already at the end of the basic block!");
186 MachineInstr
&MI
= *MBBI
;
188 for (SmallVectorImpl
<ScavengedInfo
>::iterator I
= Scavenged
.begin(),
189 IE
= Scavenged
.end(); I
!= IE
; ++I
) {
190 if (I
->Restore
!= &MI
)
194 I
->Restore
= nullptr;
197 if (MI
.isDebugInstr())
200 determineKillsAndDefs();
202 // Verify uses and defs.
204 for (const MachineOperand
&MO
: MI
.operands()) {
207 Register Reg
= MO
.getReg();
208 if (!Register::isPhysicalRegister(Reg
) || isReserved(Reg
))
213 if (!isRegUsed(Reg
)) {
214 // Check if it's partial live: e.g.
215 // D0 = insert_subreg undef D0, S0
217 // The problem is the insert_subreg could be eliminated. The use of
218 // D0 is using a partially undef value. This is not *incorrect* since
219 // S1 is can be freely clobbered.
220 // Ideally we would like a way to model this, but leaving the
221 // insert_subreg around causes both correctness and performance issues.
222 bool SubUsed
= false;
223 for (MCSubRegIterator
SubRegs(Reg
, TRI
); SubRegs
.isValid(); ++SubRegs
)
224 if (isRegUsed(*SubRegs
)) {
228 bool SuperUsed
= false;
229 for (MCSuperRegIterator
SR(Reg
, TRI
); SR
.isValid(); ++SR
) {
230 if (isRegUsed(*SR
)) {
235 if (!SubUsed
&& !SuperUsed
) {
236 MBB
->getParent()->verify(nullptr, "In Register Scavenger");
237 llvm_unreachable("Using an undefined register!");
245 // FIXME: Enable this once we've figured out how to correctly transfer
246 // implicit kills during codegen passes like the coalescer.
247 assert((KillRegs
.test(Reg
) || isUnused(Reg
) ||
248 isLiveInButUnusedBefore(Reg
, MI
, MBB
, TRI
, MRI
)) &&
249 "Re-defining a live register!");
255 // Commit the changes.
256 setUnused(KillRegUnits
);
257 setUsed(DefRegUnits
);
260 void RegScavenger::backward() {
261 assert(Tracking
&& "Must be tracking to determine kills and defs");
263 const MachineInstr
&MI
= *MBBI
;
264 LiveUnits
.stepBackward(MI
);
266 // Expire scavenge spill frameindex uses.
267 for (ScavengedInfo
&I
: Scavenged
) {
268 if (I
.Restore
== &MI
) {
274 if (MBBI
== MBB
->begin()) {
275 MBBI
= MachineBasicBlock::iterator(nullptr);
281 bool RegScavenger::isRegUsed(Register Reg
, bool includeReserved
) const {
283 return includeReserved
;
284 return !LiveUnits
.available(Reg
);
287 Register
RegScavenger::FindUnusedReg(const TargetRegisterClass
*RC
) const {
288 for (Register Reg
: *RC
) {
289 if (!isRegUsed(Reg
)) {
290 LLVM_DEBUG(dbgs() << "Scavenger found unused reg: " << printReg(Reg
, TRI
)
298 BitVector
RegScavenger::getRegsAvailable(const TargetRegisterClass
*RC
) {
299 BitVector
Mask(TRI
->getNumRegs());
300 for (Register Reg
: *RC
)
306 Register
RegScavenger::findSurvivorReg(MachineBasicBlock::iterator StartMI
,
307 BitVector
&Candidates
,
309 MachineBasicBlock::iterator
&UseMI
) {
310 int Survivor
= Candidates
.find_first();
311 assert(Survivor
> 0 && "No candidates for scavenging");
313 MachineBasicBlock::iterator ME
= MBB
->getFirstTerminator();
314 assert(StartMI
!= ME
&& "MI already at terminator");
315 MachineBasicBlock::iterator RestorePointMI
= StartMI
;
316 MachineBasicBlock::iterator MI
= StartMI
;
318 bool inVirtLiveRange
= false;
319 for (++MI
; InstrLimit
> 0 && MI
!= ME
; ++MI
, --InstrLimit
) {
320 if (MI
->isDebugInstr()) {
321 ++InstrLimit
; // Don't count debug instructions
324 bool isVirtKillInsn
= false;
325 bool isVirtDefInsn
= false;
326 // Remove any candidates touched by instruction.
327 for (const MachineOperand
&MO
: MI
->operands()) {
329 Candidates
.clearBitsNotInMask(MO
.getRegMask());
330 if (!MO
.isReg() || MO
.isUndef() || !MO
.getReg())
332 if (Register::isVirtualRegister(MO
.getReg())) {
334 isVirtDefInsn
= true;
335 else if (MO
.isKill())
336 isVirtKillInsn
= true;
339 for (MCRegAliasIterator
AI(MO
.getReg(), TRI
, true); AI
.isValid(); ++AI
)
340 Candidates
.reset(*AI
);
342 // If we're not in a virtual reg's live range, this is a valid
344 if (!inVirtLiveRange
) RestorePointMI
= MI
;
346 // Update whether we're in the live range of a virtual register
347 if (isVirtKillInsn
) inVirtLiveRange
= false;
348 if (isVirtDefInsn
) inVirtLiveRange
= true;
350 // Was our survivor untouched by this instruction?
351 if (Candidates
.test(Survivor
))
354 // All candidates gone?
355 if (Candidates
.none())
358 Survivor
= Candidates
.find_first();
360 // If we ran off the end, that's where we want to restore.
361 if (MI
== ME
) RestorePointMI
= ME
;
362 assert(RestorePointMI
!= StartMI
&&
363 "No available scavenger restore location!");
365 // We ran out of candidates, so stop the search.
366 UseMI
= RestorePointMI
;
370 /// Given the bitvector \p Available of free register units at position
371 /// \p From. Search backwards to find a register that is part of \p
372 /// Candidates and not used/clobbered until the point \p To. If there is
373 /// multiple candidates continue searching and pick the one that is not used/
374 /// clobbered for the longest time.
375 /// Returns the register and the earliest position we know it to be free or
376 /// the position MBB.end() if no register is available.
377 static std::pair
<MCPhysReg
, MachineBasicBlock::iterator
>
378 findSurvivorBackwards(const MachineRegisterInfo
&MRI
,
379 MachineBasicBlock::iterator From
, MachineBasicBlock::iterator To
,
380 const LiveRegUnits
&LiveOut
, ArrayRef
<MCPhysReg
> AllocationOrder
,
382 bool FoundTo
= false;
383 MCPhysReg Survivor
= 0;
384 MachineBasicBlock::iterator Pos
;
385 MachineBasicBlock
&MBB
= *From
->getParent();
386 unsigned InstrLimit
= 25;
387 unsigned InstrCountDown
= InstrLimit
;
388 const TargetRegisterInfo
&TRI
= *MRI
.getTargetRegisterInfo();
389 LiveRegUnits
Used(TRI
);
391 for (MachineBasicBlock::iterator I
= From
;; --I
) {
392 const MachineInstr
&MI
= *I
;
397 // See if one of the registers in RC wasn't used so far.
398 for (MCPhysReg Reg
: AllocationOrder
) {
399 if (!MRI
.isReserved(Reg
) && Used
.available(Reg
) &&
400 LiveOut
.available(Reg
))
401 return std::make_pair(Reg
, MBB
.end());
403 // Otherwise we will continue up to InstrLimit instructions to find
404 // the register which is not defined/used for the longest time.
407 // Note: It was fine so far to start our search at From, however now that
408 // we have to spill, and can only place the restore after From then
409 // add the regs used/defed by std::next(From) to the set.
411 Used
.accumulate(*std::next(From
));
414 if (Survivor
== 0 || !Used
.available(Survivor
)) {
415 MCPhysReg AvilableReg
= 0;
416 for (MCPhysReg Reg
: AllocationOrder
) {
417 if (!MRI
.isReserved(Reg
) && Used
.available(Reg
)) {
422 if (AvilableReg
== 0)
424 Survivor
= AvilableReg
;
426 if (--InstrCountDown
== 0)
429 // Keep searching when we find a vreg since the spilled register will
430 // be usefull for this other vreg as well later.
431 bool FoundVReg
= false;
432 for (const MachineOperand
&MO
: MI
.operands()) {
433 if (MO
.isReg() && Register::isVirtualRegister(MO
.getReg())) {
439 InstrCountDown
= InstrLimit
;
442 if (I
== MBB
.begin())
447 return std::make_pair(Survivor
, Pos
);
450 static unsigned getFrameIndexOperandNum(MachineInstr
&MI
) {
452 while (!MI
.getOperand(i
).isFI()) {
454 assert(i
< MI
.getNumOperands() && "Instr doesn't have FrameIndex operand!");
459 RegScavenger::ScavengedInfo
&
460 RegScavenger::spill(Register Reg
, const TargetRegisterClass
&RC
, int SPAdj
,
461 MachineBasicBlock::iterator Before
,
462 MachineBasicBlock::iterator
&UseMI
) {
463 // Find an available scavenging slot with size and alignment matching
464 // the requirements of the class RC.
465 const MachineFunction
&MF
= *Before
->getMF();
466 const MachineFrameInfo
&MFI
= MF
.getFrameInfo();
467 unsigned NeedSize
= TRI
->getSpillSize(RC
);
468 unsigned NeedAlign
= TRI
->getSpillAlignment(RC
);
470 unsigned SI
= Scavenged
.size(), Diff
= std::numeric_limits
<unsigned>::max();
471 int FIB
= MFI
.getObjectIndexBegin(), FIE
= MFI
.getObjectIndexEnd();
472 for (unsigned I
= 0; I
< Scavenged
.size(); ++I
) {
473 if (Scavenged
[I
].Reg
!= 0)
475 // Verify that this slot is valid for this register.
476 int FI
= Scavenged
[I
].FrameIndex
;
477 if (FI
< FIB
|| FI
>= FIE
)
479 unsigned S
= MFI
.getObjectSize(FI
);
480 unsigned A
= MFI
.getObjectAlignment(FI
);
481 if (NeedSize
> S
|| NeedAlign
> A
)
483 // Avoid wasting slots with large size and/or large alignment. Pick one
484 // that is the best fit for this register class (in street metric).
485 // Picking a larger slot than necessary could happen if a slot for a
486 // larger register is reserved before a slot for a smaller one. When
487 // trying to spill a smaller register, the large slot would be found
488 // first, thus making it impossible to spill the larger register later.
489 unsigned D
= (S
-NeedSize
) + (A
-NeedAlign
);
496 if (SI
== Scavenged
.size()) {
497 // We need to scavenge a register but have no spill slot, the target
498 // must know how to do it (if not, we'll assert below).
499 Scavenged
.push_back(ScavengedInfo(FIE
));
502 // Avoid infinite regress
503 Scavenged
[SI
].Reg
= Reg
;
505 // If the target knows how to save/restore the register, let it do so;
506 // otherwise, use the emergency stack spill slot.
507 if (!TRI
->saveScavengerRegister(*MBB
, Before
, UseMI
, &RC
, Reg
)) {
508 // Spill the scavenged register before \p Before.
509 int FI
= Scavenged
[SI
].FrameIndex
;
510 if (FI
< FIB
|| FI
>= FIE
) {
511 std::string Msg
= std::string("Error while trying to spill ") +
512 TRI
->getName(Reg
) + " from class " + TRI
->getRegClassName(&RC
) +
513 ": Cannot scavenge register without an emergency spill slot!";
514 report_fatal_error(Msg
.c_str());
516 TII
->storeRegToStackSlot(*MBB
, Before
, Reg
, true, Scavenged
[SI
].FrameIndex
,
518 MachineBasicBlock::iterator II
= std::prev(Before
);
520 unsigned FIOperandNum
= getFrameIndexOperandNum(*II
);
521 TRI
->eliminateFrameIndex(II
, SPAdj
, FIOperandNum
, this);
523 // Restore the scavenged register before its use (or first terminator).
524 TII
->loadRegFromStackSlot(*MBB
, UseMI
, Reg
, Scavenged
[SI
].FrameIndex
,
526 II
= std::prev(UseMI
);
528 FIOperandNum
= getFrameIndexOperandNum(*II
);
529 TRI
->eliminateFrameIndex(II
, SPAdj
, FIOperandNum
, this);
531 return Scavenged
[SI
];
534 Register
RegScavenger::scavengeRegister(const TargetRegisterClass
*RC
,
535 MachineBasicBlock::iterator I
,
536 int SPAdj
, bool AllowSpill
) {
537 MachineInstr
&MI
= *I
;
538 const MachineFunction
&MF
= *MI
.getMF();
539 // Consider all allocatable registers in the register class initially
540 BitVector Candidates
= TRI
->getAllocatableSet(MF
, RC
);
542 // Exclude all the registers being used by the instruction.
543 for (const MachineOperand
&MO
: MI
.operands()) {
544 if (MO
.isReg() && MO
.getReg() != 0 && !(MO
.isUse() && MO
.isUndef()) &&
545 !Register::isVirtualRegister(MO
.getReg()))
546 for (MCRegAliasIterator
AI(MO
.getReg(), TRI
, true); AI
.isValid(); ++AI
)
547 Candidates
.reset(*AI
);
550 // Try to find a register that's unused if there is one, as then we won't
552 BitVector Available
= getRegsAvailable(RC
);
553 Available
&= Candidates
;
555 Candidates
= Available
;
557 // Find the register whose use is furthest away.
558 MachineBasicBlock::iterator UseMI
;
559 Register SReg
= findSurvivorReg(I
, Candidates
, 25, UseMI
);
561 // If we found an unused register there is no reason to spill it.
562 if (!isRegUsed(SReg
)) {
563 LLVM_DEBUG(dbgs() << "Scavenged register: " << printReg(SReg
, TRI
) << "\n");
570 ScavengedInfo
&Scavenged
= spill(SReg
, *RC
, SPAdj
, I
, UseMI
);
571 Scavenged
.Restore
= &*std::prev(UseMI
);
573 LLVM_DEBUG(dbgs() << "Scavenged register (with spill): "
574 << printReg(SReg
, TRI
) << "\n");
579 Register
RegScavenger::scavengeRegisterBackwards(const TargetRegisterClass
&RC
,
580 MachineBasicBlock::iterator To
,
581 bool RestoreAfter
, int SPAdj
,
583 const MachineBasicBlock
&MBB
= *To
->getParent();
584 const MachineFunction
&MF
= *MBB
.getParent();
586 // Find the register whose use is furthest away.
587 MachineBasicBlock::iterator UseMI
;
588 ArrayRef
<MCPhysReg
> AllocationOrder
= RC
.getRawAllocationOrder(MF
);
589 std::pair
<MCPhysReg
, MachineBasicBlock::iterator
> P
=
590 findSurvivorBackwards(*MRI
, MBBI
, To
, LiveUnits
, AllocationOrder
,
592 MCPhysReg Reg
= P
.first
;
593 MachineBasicBlock::iterator SpillBefore
= P
.second
;
594 assert(Reg
!= 0 && "No register left to scavenge!");
595 // Found an available register?
596 if (SpillBefore
== MBB
.end()) {
597 LLVM_DEBUG(dbgs() << "Scavenged free register: " << printReg(Reg
, TRI
)
605 MachineBasicBlock::iterator ReloadAfter
=
606 RestoreAfter
? std::next(MBBI
) : MBBI
;
607 MachineBasicBlock::iterator ReloadBefore
= std::next(ReloadAfter
);
608 if (ReloadBefore
!= MBB
.end())
609 LLVM_DEBUG(dbgs() << "Reload before: " << *ReloadBefore
<< '\n');
610 ScavengedInfo
&Scavenged
= spill(Reg
, RC
, SPAdj
, SpillBefore
, ReloadBefore
);
611 Scavenged
.Restore
= &*std::prev(SpillBefore
);
612 LiveUnits
.removeReg(Reg
);
613 LLVM_DEBUG(dbgs() << "Scavenged register with spill: " << printReg(Reg
, TRI
)
614 << " until " << *SpillBefore
);
618 /// Allocate a register for the virtual register \p VReg. The last use of
619 /// \p VReg is around the current position of the register scavenger \p RS.
620 /// \p ReserveAfter controls whether the scavenged register needs to be reserved
621 /// after the current instruction, otherwise it will only be reserved before the
622 /// current instruction.
623 static Register
scavengeVReg(MachineRegisterInfo
&MRI
, RegScavenger
&RS
,
624 Register VReg
, bool ReserveAfter
) {
625 const TargetRegisterInfo
&TRI
= *MRI
.getTargetRegisterInfo();
627 // Verify that all definitions and uses are in the same basic block.
628 const MachineBasicBlock
*CommonMBB
= nullptr;
629 // Real definition for the reg, re-definitions are not considered.
630 const MachineInstr
*RealDef
= nullptr;
631 for (MachineOperand
&MO
: MRI
.reg_nodbg_operands(VReg
)) {
632 MachineBasicBlock
*MBB
= MO
.getParent()->getParent();
633 if (CommonMBB
== nullptr)
635 assert(MBB
== CommonMBB
&& "All defs+uses must be in the same basic block");
637 const MachineInstr
&MI
= *MO
.getParent();
638 if (!MI
.readsRegister(VReg
, &TRI
)) {
639 assert((!RealDef
|| RealDef
== &MI
) &&
640 "Can have at most one definition which is not a redefinition");
645 assert(RealDef
!= nullptr && "Must have at least 1 Def");
648 // We should only have one definition of the register. However to accommodate
649 // the requirements of two address code we also allow definitions in
650 // subsequent instructions provided they also read the register. That way
651 // we get a single contiguous lifetime.
653 // Definitions in MRI.def_begin() are unordered, search for the first.
654 MachineRegisterInfo::def_iterator FirstDef
=
655 std::find_if(MRI
.def_begin(VReg
), MRI
.def_end(),
656 [VReg
, &TRI
](const MachineOperand
&MO
) {
657 return !MO
.getParent()->readsRegister(VReg
, &TRI
);
659 assert(FirstDef
!= MRI
.def_end() &&
660 "Must have one definition that does not redefine vreg");
661 MachineInstr
&DefMI
= *FirstDef
->getParent();
663 // The register scavenger will report a free register inserting an emergency
664 // spill/reload if necessary.
666 const TargetRegisterClass
&RC
= *MRI
.getRegClass(VReg
);
667 Register SReg
= RS
.scavengeRegisterBackwards(RC
, DefMI
.getIterator(),
668 ReserveAfter
, SPAdj
);
669 MRI
.replaceRegWith(VReg
, SReg
);
674 /// Allocate (scavenge) vregs inside a single basic block.
675 /// Returns true if the target spill callback created new vregs and a 2nd pass
677 static bool scavengeFrameVirtualRegsInBlock(MachineRegisterInfo
&MRI
,
679 MachineBasicBlock
&MBB
) {
680 const TargetRegisterInfo
&TRI
= *MRI
.getTargetRegisterInfo();
681 RS
.enterBasicBlockEnd(MBB
);
683 unsigned InitialNumVirtRegs
= MRI
.getNumVirtRegs();
684 bool NextInstructionReadsVReg
= false;
685 for (MachineBasicBlock::iterator I
= MBB
.end(); I
!= MBB
.begin(); ) {
687 // Move RegScavenger to the position between *I and *std::next(I).
690 // Look for unassigned vregs in the uses of *std::next(I).
691 if (NextInstructionReadsVReg
) {
692 MachineBasicBlock::iterator N
= std::next(I
);
693 const MachineInstr
&NMI
= *N
;
694 for (const MachineOperand
&MO
: NMI
.operands()) {
697 Register Reg
= MO
.getReg();
698 // We only care about virtual registers and ignore virtual registers
699 // created by the target callbacks in the process (those will be handled
700 // in a scavenging round).
701 if (!Register::isVirtualRegister(Reg
) ||
702 Register::virtReg2Index(Reg
) >= InitialNumVirtRegs
)
707 Register SReg
= scavengeVReg(MRI
, RS
, Reg
, true);
708 N
->addRegisterKilled(SReg
, &TRI
, false);
713 // Look for unassigned vregs in the defs of *I.
714 NextInstructionReadsVReg
= false;
715 const MachineInstr
&MI
= *I
;
716 for (const MachineOperand
&MO
: MI
.operands()) {
719 Register Reg
= MO
.getReg();
720 // Only vregs, no newly created vregs (see above).
721 if (!Register::isVirtualRegister(Reg
) ||
722 Register::virtReg2Index(Reg
) >= InitialNumVirtRegs
)
724 // We have to look at all operands anyway so we can precalculate here
725 // whether there is a reading operand. This allows use to skip the use
726 // step in the next iteration if there was none.
727 assert(!MO
.isInternalRead() && "Cannot assign inside bundles");
728 assert((!MO
.isUndef() || MO
.isDef()) && "Cannot handle undef uses");
730 NextInstructionReadsVReg
= true;
733 Register SReg
= scavengeVReg(MRI
, RS
, Reg
, false);
734 I
->addRegisterDead(SReg
, &TRI
, false);
739 for (const MachineOperand
&MO
: MBB
.front().operands()) {
740 if (!MO
.isReg() || !Register::isVirtualRegister(MO
.getReg()))
742 assert(!MO
.isInternalRead() && "Cannot assign inside bundles");
743 assert((!MO
.isUndef() || MO
.isDef()) && "Cannot handle undef uses");
744 assert(!MO
.readsReg() && "Vreg use in first instruction not allowed");
748 return MRI
.getNumVirtRegs() != InitialNumVirtRegs
;
751 void llvm::scavengeFrameVirtualRegs(MachineFunction
&MF
, RegScavenger
&RS
) {
752 // FIXME: Iterating over the instruction stream is unnecessary. We can simply
753 // iterate over the vreg use list, which at this point only contains machine
754 // operands for which eliminateFrameIndex need a new scratch reg.
755 MachineRegisterInfo
&MRI
= MF
.getRegInfo();
757 if (MRI
.getNumVirtRegs() == 0) {
758 MF
.getProperties().set(MachineFunctionProperties::Property::NoVRegs
);
762 // Run through the instructions and find any virtual registers.
763 for (MachineBasicBlock
&MBB
: MF
) {
767 bool Again
= scavengeFrameVirtualRegsInBlock(MRI
, RS
, MBB
);
769 LLVM_DEBUG(dbgs() << "Warning: Required two scavenging passes for block "
770 << MBB
.getName() << '\n');
771 Again
= scavengeFrameVirtualRegsInBlock(MRI
, RS
, MBB
);
772 // The target required a 2nd run (because it created new vregs while
773 // spilling). Refuse to do another pass to keep compiletime in check.
775 report_fatal_error("Incomplete scavenging after 2nd pass");
780 MF
.getProperties().set(MachineFunctionProperties::Property::NoVRegs
);
785 /// This class runs register scavenging independ of the PrologEpilogInserter.
786 /// This is used in for testing.
787 class ScavengerTest
: public MachineFunctionPass
{
791 ScavengerTest() : MachineFunctionPass(ID
) {}
793 bool runOnMachineFunction(MachineFunction
&MF
) override
{
794 const TargetSubtargetInfo
&STI
= MF
.getSubtarget();
795 const TargetFrameLowering
&TFL
= *STI
.getFrameLowering();
798 // Let's hope that calling those outside of PrologEpilogueInserter works
799 // well enough to initialize the scavenger with some emergency spillslots
802 TFL
.determineCalleeSaves(MF
, SavedRegs
, &RS
);
803 TFL
.processFunctionBeforeFrameFinalized(MF
, &RS
);
805 // Let's scavenge the current function
806 scavengeFrameVirtualRegs(MF
, RS
);
811 } // end anonymous namespace
813 char ScavengerTest::ID
;
815 INITIALIZE_PASS(ScavengerTest
, "scavenger-test",
816 "Scavenge virtual registers inside basic blocks", false, false)