1 //===- RegAllocFast.cpp - A fast register allocator for debug code --------===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 /// \file This register allocator allocates registers to a basic block at a
11 /// time, attempting to keep values in registers and reusing registers as
14 //===----------------------------------------------------------------------===//
16 #include "llvm/ADT/ArrayRef.h"
17 #include "llvm/ADT/DenseMap.h"
18 #include "llvm/ADT/IndexedMap.h"
19 #include "llvm/ADT/SmallSet.h"
20 #include "llvm/ADT/SmallVector.h"
21 #include "llvm/ADT/SparseSet.h"
22 #include "llvm/ADT/Statistic.h"
23 #include "llvm/CodeGen/MachineBasicBlock.h"
24 #include "llvm/CodeGen/MachineFrameInfo.h"
25 #include "llvm/CodeGen/MachineFunction.h"
26 #include "llvm/CodeGen/MachineFunctionPass.h"
27 #include "llvm/CodeGen/MachineInstr.h"
28 #include "llvm/CodeGen/MachineInstrBuilder.h"
29 #include "llvm/CodeGen/MachineOperand.h"
30 #include "llvm/CodeGen/MachineRegisterInfo.h"
31 #include "llvm/CodeGen/RegAllocRegistry.h"
32 #include "llvm/CodeGen/RegisterClassInfo.h"
33 #include "llvm/CodeGen/TargetInstrInfo.h"
34 #include "llvm/CodeGen/TargetOpcodes.h"
35 #include "llvm/CodeGen/TargetRegisterInfo.h"
36 #include "llvm/CodeGen/TargetSubtargetInfo.h"
37 #include "llvm/IR/DebugLoc.h"
38 #include "llvm/IR/Metadata.h"
39 #include "llvm/MC/MCInstrDesc.h"
40 #include "llvm/MC/MCRegisterInfo.h"
41 #include "llvm/Pass.h"
42 #include "llvm/Support/Casting.h"
43 #include "llvm/Support/Compiler.h"
44 #include "llvm/Support/Debug.h"
45 #include "llvm/Support/ErrorHandling.h"
46 #include "llvm/Support/raw_ostream.h"
53 #define DEBUG_TYPE "regalloc"
55 STATISTIC(NumStores
, "Number of stores added");
56 STATISTIC(NumLoads
, "Number of loads added");
57 STATISTIC(NumCopies
, "Number of copies coalesced");
59 static RegisterRegAlloc
60 fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator
);
64 class RegAllocFast
: public MachineFunctionPass
{
68 RegAllocFast() : MachineFunctionPass(ID
), StackSlotForVirtReg(-1) {}
71 MachineFrameInfo
*MFI
;
72 MachineRegisterInfo
*MRI
;
73 const TargetRegisterInfo
*TRI
;
74 const TargetInstrInfo
*TII
;
75 RegisterClassInfo RegClassInfo
;
77 /// Basic block currently being allocated.
78 MachineBasicBlock
*MBB
;
80 /// Maps virtual regs to the frame index where these values are spilled.
81 IndexedMap
<int, VirtReg2IndexFunctor
> StackSlotForVirtReg
;
83 /// Everything we know about a live virtual register.
85 MachineInstr
*LastUse
= nullptr; ///< Last instr to use reg.
86 unsigned VirtReg
; ///< Virtual register number.
87 MCPhysReg PhysReg
= 0; ///< Currently held here.
88 unsigned short LastOpNum
= 0; ///< OpNum on LastUse.
89 bool Dirty
= false; ///< Register needs spill.
91 explicit LiveReg(unsigned v
) : VirtReg(v
) {}
93 unsigned getSparseSetIndex() const {
94 return TargetRegisterInfo::virtReg2Index(VirtReg
);
98 using LiveRegMap
= SparseSet
<LiveReg
>;
100 /// This map contains entries for each virtual register that is currently
101 /// available in a physical register.
102 LiveRegMap LiveVirtRegs
;
104 DenseMap
<unsigned, SmallVector
<MachineInstr
*, 4>> LiveDbgValueMap
;
106 /// Track the state of a physical register.
108 /// A disabled register is not available for allocation, but an alias may
109 /// be in use. A register can only be moved out of the disabled state if
110 /// all aliases are disabled.
113 /// A free register is not currently in use and can be allocated
114 /// immediately without checking aliases.
117 /// A reserved register has been assigned explicitly (e.g., setting up a
118 /// call parameter), and it remains reserved until it is used.
121 /// A register state may also be a virtual register number, indication
122 /// that the physical register is currently allocated to a virtual
123 /// register. In that case, LiveVirtRegs contains the inverse mapping.
126 /// One of the RegState enums, or a virtreg.
127 std::vector
<unsigned> PhysRegState
;
129 SmallVector
<unsigned, 16> VirtDead
;
130 SmallVector
<MachineInstr
*, 32> Coalesced
;
132 /// Set of register units.
133 using UsedInInstrSet
= SparseSet
<unsigned>;
135 /// Set of register units that are used in the current instruction, and so
136 /// cannot be allocated.
137 UsedInInstrSet UsedInInstr
;
139 /// Mark a physreg as used in this instruction.
140 void markRegUsedInInstr(MCPhysReg PhysReg
) {
141 for (MCRegUnitIterator
Units(PhysReg
, TRI
); Units
.isValid(); ++Units
)
142 UsedInInstr
.insert(*Units
);
145 /// Check if a physreg or any of its aliases are used in this instruction.
146 bool isRegUsedInInstr(MCPhysReg PhysReg
) const {
147 for (MCRegUnitIterator
Units(PhysReg
, TRI
); Units
.isValid(); ++Units
)
148 if (UsedInInstr
.count(*Units
))
153 /// This flag is set when LiveRegMap will be cleared completely after
154 /// spilling all live registers. LiveRegMap entries should not be erased.
155 bool isBulkSpilling
= false;
160 spillImpossible
= ~0u
164 StringRef
getPassName() const override
{ return "Fast Register Allocator"; }
166 void getAnalysisUsage(AnalysisUsage
&AU
) const override
{
167 AU
.setPreservesCFG();
168 MachineFunctionPass::getAnalysisUsage(AU
);
171 MachineFunctionProperties
getRequiredProperties() const override
{
172 return MachineFunctionProperties().set(
173 MachineFunctionProperties::Property::NoPHIs
);
176 MachineFunctionProperties
getSetProperties() const override
{
177 return MachineFunctionProperties().set(
178 MachineFunctionProperties::Property::NoVRegs
);
182 bool runOnMachineFunction(MachineFunction
&MF
) override
;
183 void allocateBasicBlock(MachineBasicBlock
&MBB
);
184 void handleThroughOperands(MachineInstr
&MI
,
185 SmallVectorImpl
<unsigned> &VirtDead
);
186 int getStackSpaceFor(unsigned VirtReg
, const TargetRegisterClass
&RC
);
187 bool isLastUseOfLocalReg(const MachineOperand
&MO
) const;
189 void addKillFlag(const LiveReg
&LRI
);
190 void killVirtReg(LiveRegMap::iterator LRI
);
191 void killVirtReg(unsigned VirtReg
);
192 void spillVirtReg(MachineBasicBlock::iterator MI
, LiveRegMap::iterator
);
193 void spillVirtReg(MachineBasicBlock::iterator MI
, unsigned VirtReg
);
195 void usePhysReg(MachineOperand
&MO
);
196 void definePhysReg(MachineBasicBlock::iterator MI
, MCPhysReg PhysReg
,
198 unsigned calcSpillCost(MCPhysReg PhysReg
) const;
199 void assignVirtToPhysReg(LiveReg
&, MCPhysReg PhysReg
);
201 LiveRegMap::iterator
findLiveVirtReg(unsigned VirtReg
) {
202 return LiveVirtRegs
.find(TargetRegisterInfo::virtReg2Index(VirtReg
));
205 LiveRegMap::const_iterator
findLiveVirtReg(unsigned VirtReg
) const {
206 return LiveVirtRegs
.find(TargetRegisterInfo::virtReg2Index(VirtReg
));
209 LiveRegMap::iterator
assignVirtToPhysReg(unsigned VirtReg
, MCPhysReg PhysReg
);
210 LiveRegMap::iterator
allocVirtReg(MachineInstr
&MI
, LiveRegMap::iterator
,
212 LiveRegMap::iterator
defineVirtReg(MachineInstr
&MI
, unsigned OpNum
,
213 unsigned VirtReg
, unsigned Hint
);
214 LiveRegMap::iterator
reloadVirtReg(MachineInstr
&MI
, unsigned OpNum
,
215 unsigned VirtReg
, unsigned Hint
);
216 void spillAll(MachineBasicBlock::iterator MI
);
217 bool setPhysReg(MachineInstr
&MI
, unsigned OpNum
, MCPhysReg PhysReg
);
222 } // end anonymous namespace
224 char RegAllocFast::ID
= 0;
226 INITIALIZE_PASS(RegAllocFast
, "regallocfast", "Fast Register Allocator", false,
229 /// This allocates space for the specified virtual register to be held on the
231 int RegAllocFast::getStackSpaceFor(unsigned VirtReg
,
232 const TargetRegisterClass
&RC
) {
233 // Find the location Reg would belong...
234 int SS
= StackSlotForVirtReg
[VirtReg
];
235 // Already has space allocated?
239 // Allocate a new stack object for this spill location...
240 unsigned Size
= TRI
->getSpillSize(RC
);
241 unsigned Align
= TRI
->getSpillAlignment(RC
);
242 int FrameIdx
= MFI
->CreateSpillStackObject(Size
, Align
);
245 StackSlotForVirtReg
[VirtReg
] = FrameIdx
;
249 /// Return true if MO is the only remaining reference to its virtual register,
250 /// and it is guaranteed to be a block-local register.
251 bool RegAllocFast::isLastUseOfLocalReg(const MachineOperand
&MO
) const {
252 // If the register has ever been spilled or reloaded, we conservatively assume
253 // it is a global register used in multiple blocks.
254 if (StackSlotForVirtReg
[MO
.getReg()] != -1)
257 // Check that the use/def chain has exactly one operand - MO.
258 MachineRegisterInfo::reg_nodbg_iterator I
= MRI
->reg_nodbg_begin(MO
.getReg());
261 return ++I
== MRI
->reg_nodbg_end();
264 /// Set kill flags on last use of a virtual register.
265 void RegAllocFast::addKillFlag(const LiveReg
&LR
) {
266 if (!LR
.LastUse
) return;
267 MachineOperand
&MO
= LR
.LastUse
->getOperand(LR
.LastOpNum
);
268 if (MO
.isUse() && !LR
.LastUse
->isRegTiedToDefOperand(LR
.LastOpNum
)) {
269 if (MO
.getReg() == LR
.PhysReg
)
271 // else, don't do anything we are problably redefining a
272 // subreg of this register and given we don't track which
273 // lanes are actually dead, we cannot insert a kill flag here.
274 // Otherwise we may end up in a situation like this:
275 // ... = (MO) physreg:sub1, implicit killed physreg
276 // ... <== Here we would allow later pass to reuse physreg:sub1
277 // which is potentially wrong.
279 // ... = LR.sub1 <== This is going to use physreg:sub1
283 /// Mark virtreg as no longer available.
284 void RegAllocFast::killVirtReg(LiveRegMap::iterator LRI
) {
286 assert(PhysRegState
[LRI
->PhysReg
] == LRI
->VirtReg
&&
287 "Broken RegState mapping");
288 PhysRegState
[LRI
->PhysReg
] = regFree
;
289 // Erase from LiveVirtRegs unless we're spilling in bulk.
291 LiveVirtRegs
.erase(LRI
);
294 /// Mark virtreg as no longer available.
295 void RegAllocFast::killVirtReg(unsigned VirtReg
) {
296 assert(TargetRegisterInfo::isVirtualRegister(VirtReg
) &&
297 "killVirtReg needs a virtual register");
298 LiveRegMap::iterator LRI
= findLiveVirtReg(VirtReg
);
299 if (LRI
!= LiveVirtRegs
.end())
303 /// This method spills the value specified by VirtReg into the corresponding
304 /// stack slot if needed.
305 void RegAllocFast::spillVirtReg(MachineBasicBlock::iterator MI
,
307 assert(TargetRegisterInfo::isVirtualRegister(VirtReg
) &&
308 "Spilling a physical register is illegal!");
309 LiveRegMap::iterator LRI
= findLiveVirtReg(VirtReg
);
310 assert(LRI
!= LiveVirtRegs
.end() && "Spilling unmapped virtual register");
311 spillVirtReg(MI
, LRI
);
314 /// Do the actual work of spilling.
315 void RegAllocFast::spillVirtReg(MachineBasicBlock::iterator MI
,
316 LiveRegMap::iterator LRI
) {
318 assert(PhysRegState
[LR
.PhysReg
] == LRI
->VirtReg
&& "Broken RegState mapping");
321 // If this physreg is used by the instruction, we want to kill it on the
322 // instruction, not on the spill.
323 bool SpillKill
= MachineBasicBlock::iterator(LR
.LastUse
) != MI
;
325 LLVM_DEBUG(dbgs() << "Spilling " << printReg(LRI
->VirtReg
, TRI
) << " in "
326 << printReg(LR
.PhysReg
, TRI
));
327 const TargetRegisterClass
&RC
= *MRI
->getRegClass(LRI
->VirtReg
);
328 int FI
= getStackSpaceFor(LRI
->VirtReg
, RC
);
329 LLVM_DEBUG(dbgs() << " to stack slot #" << FI
<< "\n");
330 TII
->storeRegToStackSlot(*MBB
, MI
, LR
.PhysReg
, SpillKill
, FI
, &RC
, TRI
);
331 ++NumStores
; // Update statistics
333 // If this register is used by DBG_VALUE then insert new DBG_VALUE to
334 // identify spilled location as the place to find corresponding variable's
336 SmallVectorImpl
<MachineInstr
*> &LRIDbgValues
=
337 LiveDbgValueMap
[LRI
->VirtReg
];
338 for (MachineInstr
*DBG
: LRIDbgValues
) {
339 MachineInstr
*NewDV
= buildDbgValueForSpill(*MBB
, MI
, *DBG
, FI
);
340 assert(NewDV
->getParent() == MBB
&& "dangling parent pointer");
342 LLVM_DEBUG(dbgs() << "Inserting debug info due to spill:"
346 // Now this register is spilled there is should not be any DBG_VALUE
347 // pointing to this register because they are all pointing to spilled value
349 LRIDbgValues
.clear();
351 LR
.LastUse
= nullptr; // Don't kill register again
356 /// Spill all dirty virtregs without killing them.
357 void RegAllocFast::spillAll(MachineBasicBlock::iterator MI
) {
358 if (LiveVirtRegs
.empty()) return;
359 isBulkSpilling
= true;
360 // The LiveRegMap is keyed by an unsigned (the virtreg number), so the order
361 // of spilling here is deterministic, if arbitrary.
362 for (LiveRegMap::iterator I
= LiveVirtRegs
.begin(), E
= LiveVirtRegs
.end();
365 LiveVirtRegs
.clear();
366 isBulkSpilling
= false;
369 /// Handle the direct use of a physical register. Check that the register is
370 /// not used by a virtreg. Kill the physreg, marking it free. This may add
371 /// implicit kills to MO->getParent() and invalidate MO.
372 void RegAllocFast::usePhysReg(MachineOperand
&MO
) {
373 // Ignore undef uses.
377 unsigned PhysReg
= MO
.getReg();
378 assert(TargetRegisterInfo::isPhysicalRegister(PhysReg
) &&
379 "Bad usePhysReg operand");
381 markRegUsedInInstr(PhysReg
);
382 switch (PhysRegState
[PhysReg
]) {
386 PhysRegState
[PhysReg
] = regFree
;
392 // The physreg was allocated to a virtual register. That means the value we
393 // wanted has been clobbered.
394 llvm_unreachable("Instruction uses an allocated register");
397 // Maybe a superregister is reserved?
398 for (MCRegAliasIterator
AI(PhysReg
, TRI
, false); AI
.isValid(); ++AI
) {
399 MCPhysReg Alias
= *AI
;
400 switch (PhysRegState
[Alias
]) {
404 // Either PhysReg is a subregister of Alias and we mark the
405 // whole register as free, or PhysReg is the superregister of
406 // Alias and we mark all the aliases as disabled before freeing
408 // In the latter case, since PhysReg was disabled, this means that
409 // its value is defined only by physical sub-registers. This check
410 // is performed by the assert of the default case in this loop.
411 // Note: The value of the superregister may only be partial
412 // defined, that is why regDisabled is a valid state for aliases.
413 assert((TRI
->isSuperRegister(PhysReg
, Alias
) ||
414 TRI
->isSuperRegister(Alias
, PhysReg
)) &&
415 "Instruction is not using a subregister of a reserved register");
418 if (TRI
->isSuperRegister(PhysReg
, Alias
)) {
419 // Leave the superregister in the working set.
420 PhysRegState
[Alias
] = regFree
;
421 MO
.getParent()->addRegisterKilled(Alias
, TRI
, true);
424 // Some other alias was in the working set - clear it.
425 PhysRegState
[Alias
] = regDisabled
;
428 llvm_unreachable("Instruction uses an alias of an allocated register");
432 // All aliases are disabled, bring register into working set.
433 PhysRegState
[PhysReg
] = regFree
;
437 /// Mark PhysReg as reserved or free after spilling any virtregs. This is very
438 /// similar to defineVirtReg except the physreg is reserved instead of
440 void RegAllocFast::definePhysReg(MachineBasicBlock::iterator MI
,
441 MCPhysReg PhysReg
, RegState NewState
) {
442 markRegUsedInInstr(PhysReg
);
443 switch (unsigned VirtReg
= PhysRegState
[PhysReg
]) {
447 spillVirtReg(MI
, VirtReg
);
451 PhysRegState
[PhysReg
] = NewState
;
455 // This is a disabled register, disable all aliases.
456 PhysRegState
[PhysReg
] = NewState
;
457 for (MCRegAliasIterator
AI(PhysReg
, TRI
, false); AI
.isValid(); ++AI
) {
458 MCPhysReg Alias
= *AI
;
459 switch (unsigned VirtReg
= PhysRegState
[Alias
]) {
463 spillVirtReg(MI
, VirtReg
);
467 PhysRegState
[Alias
] = regDisabled
;
468 if (TRI
->isSuperRegister(PhysReg
, Alias
))
475 /// Return the cost of spilling clearing out PhysReg and aliases so it is
476 /// free for allocation. Returns 0 when PhysReg is free or disabled with all
477 /// aliases disabled - it can be allocated directly.
478 /// \returns spillImpossible when PhysReg or an alias can't be spilled.
479 unsigned RegAllocFast::calcSpillCost(MCPhysReg PhysReg
) const {
480 if (isRegUsedInInstr(PhysReg
)) {
481 LLVM_DEBUG(dbgs() << printReg(PhysReg
, TRI
)
482 << " is already used in instr.\n");
483 return spillImpossible
;
485 switch (unsigned VirtReg
= PhysRegState
[PhysReg
]) {
491 LLVM_DEBUG(dbgs() << printReg(VirtReg
, TRI
) << " corresponding "
492 << printReg(PhysReg
, TRI
) << " is reserved already.\n");
493 return spillImpossible
;
495 LiveRegMap::const_iterator I
= findLiveVirtReg(VirtReg
);
496 assert(I
!= LiveVirtRegs
.end() && "Missing VirtReg entry");
497 return I
->Dirty
? spillDirty
: spillClean
;
501 // This is a disabled register, add up cost of aliases.
502 LLVM_DEBUG(dbgs() << printReg(PhysReg
, TRI
) << " is disabled.\n");
504 for (MCRegAliasIterator
AI(PhysReg
, TRI
, false); AI
.isValid(); ++AI
) {
505 MCPhysReg Alias
= *AI
;
506 switch (unsigned VirtReg
= PhysRegState
[Alias
]) {
513 return spillImpossible
;
515 LiveRegMap::const_iterator I
= findLiveVirtReg(VirtReg
);
516 assert(I
!= LiveVirtRegs
.end() && "Missing VirtReg entry");
517 Cost
+= I
->Dirty
? spillDirty
: spillClean
;
525 /// This method updates local state so that we know that PhysReg is the
526 /// proper container for VirtReg now. The physical register must not be used
527 /// for anything else when this is called.
528 void RegAllocFast::assignVirtToPhysReg(LiveReg
&LR
, MCPhysReg PhysReg
) {
529 LLVM_DEBUG(dbgs() << "Assigning " << printReg(LR
.VirtReg
, TRI
) << " to "
530 << printReg(PhysReg
, TRI
) << "\n");
531 PhysRegState
[PhysReg
] = LR
.VirtReg
;
532 assert(!LR
.PhysReg
&& "Already assigned a physreg");
533 LR
.PhysReg
= PhysReg
;
536 RegAllocFast::LiveRegMap::iterator
537 RegAllocFast::assignVirtToPhysReg(unsigned VirtReg
, MCPhysReg PhysReg
) {
538 LiveRegMap::iterator LRI
= findLiveVirtReg(VirtReg
);
539 assert(LRI
!= LiveVirtRegs
.end() && "VirtReg disappeared");
540 assignVirtToPhysReg(*LRI
, PhysReg
);
544 /// Allocates a physical register for VirtReg.
545 RegAllocFast::LiveRegMap::iterator
RegAllocFast::allocVirtReg(MachineInstr
&MI
,
546 LiveRegMap::iterator LRI
, unsigned Hint
) {
547 const unsigned VirtReg
= LRI
->VirtReg
;
549 assert(TargetRegisterInfo::isVirtualRegister(VirtReg
) &&
550 "Can only allocate virtual registers");
552 // Take hint when possible.
553 const TargetRegisterClass
&RC
= *MRI
->getRegClass(VirtReg
);
554 if (TargetRegisterInfo::isPhysicalRegister(Hint
) &&
555 MRI
->isAllocatable(Hint
) && RC
.contains(Hint
)) {
556 // Ignore the hint if we would have to spill a dirty register.
557 unsigned Cost
= calcSpillCost(Hint
);
558 if (Cost
< spillDirty
) {
560 definePhysReg(MI
, Hint
, regFree
);
561 // definePhysReg may kill virtual registers and modify LiveVirtRegs.
562 // That invalidates LRI, so run a new lookup for VirtReg.
563 return assignVirtToPhysReg(VirtReg
, Hint
);
567 // First try to find a completely free register.
568 ArrayRef
<MCPhysReg
> AO
= RegClassInfo
.getOrder(&RC
);
569 for (MCPhysReg PhysReg
: AO
) {
570 if (PhysRegState
[PhysReg
] == regFree
&& !isRegUsedInInstr(PhysReg
)) {
571 assignVirtToPhysReg(*LRI
, PhysReg
);
576 LLVM_DEBUG(dbgs() << "Allocating " << printReg(VirtReg
) << " from "
577 << TRI
->getRegClassName(&RC
) << "\n");
579 unsigned BestReg
= 0;
580 unsigned BestCost
= spillImpossible
;
581 for (MCPhysReg PhysReg
: AO
) {
582 unsigned Cost
= calcSpillCost(PhysReg
);
583 LLVM_DEBUG(dbgs() << "\tRegister: " << printReg(PhysReg
, TRI
) << "\n");
584 LLVM_DEBUG(dbgs() << "\tCost: " << Cost
<< "\n");
585 LLVM_DEBUG(dbgs() << "\tBestCost: " << BestCost
<< "\n");
586 // Cost is 0 when all aliases are already disabled.
588 assignVirtToPhysReg(*LRI
, PhysReg
);
592 BestReg
= PhysReg
, BestCost
= Cost
;
596 definePhysReg(MI
, BestReg
, regFree
);
597 // definePhysReg may kill virtual registers and modify LiveVirtRegs.
598 // That invalidates LRI, so run a new lookup for VirtReg.
599 return assignVirtToPhysReg(VirtReg
, BestReg
);
602 // Nothing we can do. Report an error and keep going with a bad allocation.
603 if (MI
.isInlineAsm())
604 MI
.emitError("inline assembly requires more registers than available");
606 MI
.emitError("ran out of registers during register allocation");
607 definePhysReg(MI
, *AO
.begin(), regFree
);
608 return assignVirtToPhysReg(VirtReg
, *AO
.begin());
611 /// Allocates a register for VirtReg and mark it as dirty.
612 RegAllocFast::LiveRegMap::iterator
RegAllocFast::defineVirtReg(MachineInstr
&MI
,
616 assert(TargetRegisterInfo::isVirtualRegister(VirtReg
) &&
617 "Not a virtual register");
618 LiveRegMap::iterator LRI
;
620 std::tie(LRI
, New
) = LiveVirtRegs
.insert(LiveReg(VirtReg
));
622 // If there is no hint, peek at the only use of this register.
623 if ((!Hint
|| !TargetRegisterInfo::isPhysicalRegister(Hint
)) &&
624 MRI
->hasOneNonDBGUse(VirtReg
)) {
625 const MachineInstr
&UseMI
= *MRI
->use_instr_nodbg_begin(VirtReg
);
626 // It's a copy, use the destination register as a hint.
627 if (UseMI
.isCopyLike())
628 Hint
= UseMI
.getOperand(0).getReg();
630 LRI
= allocVirtReg(MI
, LRI
, Hint
);
631 } else if (LRI
->LastUse
) {
632 // Redefining a live register - kill at the last use, unless it is this
633 // instruction defining VirtReg multiple times.
634 if (LRI
->LastUse
!= &MI
|| LRI
->LastUse
->getOperand(LRI
->LastOpNum
).isUse())
637 assert(LRI
->PhysReg
&& "Register not assigned");
639 LRI
->LastOpNum
= OpNum
;
641 markRegUsedInInstr(LRI
->PhysReg
);
645 /// Make sure VirtReg is available in a physreg and return it.
646 RegAllocFast::LiveRegMap::iterator
RegAllocFast::reloadVirtReg(MachineInstr
&MI
,
650 assert(TargetRegisterInfo::isVirtualRegister(VirtReg
) &&
651 "Not a virtual register");
652 LiveRegMap::iterator LRI
;
654 std::tie(LRI
, New
) = LiveVirtRegs
.insert(LiveReg(VirtReg
));
655 MachineOperand
&MO
= MI
.getOperand(OpNum
);
657 LRI
= allocVirtReg(MI
, LRI
, Hint
);
658 const TargetRegisterClass
&RC
= *MRI
->getRegClass(VirtReg
);
659 int FrameIndex
= getStackSpaceFor(VirtReg
, RC
);
660 LLVM_DEBUG(dbgs() << "Reloading " << printReg(VirtReg
, TRI
) << " into "
661 << printReg(LRI
->PhysReg
, TRI
) << "\n");
662 TII
->loadRegFromStackSlot(*MBB
, MI
, LRI
->PhysReg
, FrameIndex
, &RC
, TRI
);
664 } else if (LRI
->Dirty
) {
665 if (isLastUseOfLocalReg(MO
)) {
666 LLVM_DEBUG(dbgs() << "Killing last use: " << MO
<< "\n");
671 } else if (MO
.isKill()) {
672 LLVM_DEBUG(dbgs() << "Clearing dubious kill: " << MO
<< "\n");
674 } else if (MO
.isDead()) {
675 LLVM_DEBUG(dbgs() << "Clearing dubious dead: " << MO
<< "\n");
678 } else if (MO
.isKill()) {
679 // We must remove kill flags from uses of reloaded registers because the
680 // register would be killed immediately, and there might be a second use:
681 // %foo = OR killed %x, %x
682 // This would cause a second reload of %x into a different register.
683 LLVM_DEBUG(dbgs() << "Clearing clean kill: " << MO
<< "\n");
685 } else if (MO
.isDead()) {
686 LLVM_DEBUG(dbgs() << "Clearing clean dead: " << MO
<< "\n");
689 assert(LRI
->PhysReg
&& "Register not assigned");
691 LRI
->LastOpNum
= OpNum
;
692 markRegUsedInInstr(LRI
->PhysReg
);
696 /// Changes operand OpNum in MI the refer the PhysReg, considering subregs. This
697 /// may invalidate any operand pointers. Return true if the operand kills its
699 bool RegAllocFast::setPhysReg(MachineInstr
&MI
, unsigned OpNum
,
701 MachineOperand
&MO
= MI
.getOperand(OpNum
);
702 bool Dead
= MO
.isDead();
703 if (!MO
.getSubReg()) {
705 MO
.setIsRenamable(true);
706 return MO
.isKill() || Dead
;
709 // Handle subregister index.
710 MO
.setReg(PhysReg
? TRI
->getSubReg(PhysReg
, MO
.getSubReg()) : 0);
711 MO
.setIsRenamable(true);
714 // A kill flag implies killing the full register. Add corresponding super
717 MI
.addRegisterKilled(PhysReg
, TRI
, true);
721 // A <def,read-undef> of a sub-register requires an implicit def of the full
723 if (MO
.isDef() && MO
.isUndef())
724 MI
.addRegisterDefined(PhysReg
, TRI
);
729 // Handles special instruction operand like early clobbers and tied ops when
730 // there are additional physreg defines.
731 void RegAllocFast::handleThroughOperands(MachineInstr
&MI
,
732 SmallVectorImpl
<unsigned> &VirtDead
) {
733 LLVM_DEBUG(dbgs() << "Scanning for through registers:");
734 SmallSet
<unsigned, 8> ThroughRegs
;
735 for (const MachineOperand
&MO
: MI
.operands()) {
736 if (!MO
.isReg()) continue;
737 unsigned Reg
= MO
.getReg();
738 if (!TargetRegisterInfo::isVirtualRegister(Reg
))
740 if (MO
.isEarlyClobber() || (MO
.isUse() && MO
.isTied()) ||
741 (MO
.getSubReg() && MI
.readsVirtualRegister(Reg
))) {
742 if (ThroughRegs
.insert(Reg
).second
)
743 LLVM_DEBUG(dbgs() << ' ' << printReg(Reg
));
747 // If any physreg defines collide with preallocated through registers,
748 // we must spill and reallocate.
749 LLVM_DEBUG(dbgs() << "\nChecking for physdef collisions.\n");
750 for (const MachineOperand
&MO
: MI
.operands()) {
751 if (!MO
.isReg() || !MO
.isDef()) continue;
752 unsigned Reg
= MO
.getReg();
753 if (!Reg
|| !TargetRegisterInfo::isPhysicalRegister(Reg
)) continue;
754 markRegUsedInInstr(Reg
);
755 for (MCRegAliasIterator
AI(Reg
, TRI
, true); AI
.isValid(); ++AI
) {
756 if (ThroughRegs
.count(PhysRegState
[*AI
]))
757 definePhysReg(MI
, *AI
, regFree
);
761 SmallVector
<unsigned, 8> PartialDefs
;
762 LLVM_DEBUG(dbgs() << "Allocating tied uses.\n");
763 for (unsigned I
= 0, E
= MI
.getNumOperands(); I
!= E
; ++I
) {
764 const MachineOperand
&MO
= MI
.getOperand(I
);
765 if (!MO
.isReg()) continue;
766 unsigned Reg
= MO
.getReg();
767 if (!TargetRegisterInfo::isVirtualRegister(Reg
)) continue;
769 if (!MO
.isTied()) continue;
770 LLVM_DEBUG(dbgs() << "Operand " << I
<< "(" << MO
771 << ") is tied to operand " << MI
.findTiedOperandIdx(I
)
773 LiveRegMap::iterator LRI
= reloadVirtReg(MI
, I
, Reg
, 0);
774 MCPhysReg PhysReg
= LRI
->PhysReg
;
775 setPhysReg(MI
, I
, PhysReg
);
776 // Note: we don't update the def operand yet. That would cause the normal
777 // def-scan to attempt spilling.
778 } else if (MO
.getSubReg() && MI
.readsVirtualRegister(Reg
)) {
779 LLVM_DEBUG(dbgs() << "Partial redefine: " << MO
<< "\n");
780 // Reload the register, but don't assign to the operand just yet.
781 // That would confuse the later phys-def processing pass.
782 LiveRegMap::iterator LRI
= reloadVirtReg(MI
, I
, Reg
, 0);
783 PartialDefs
.push_back(LRI
->PhysReg
);
787 LLVM_DEBUG(dbgs() << "Allocating early clobbers.\n");
788 for (unsigned I
= 0, E
= MI
.getNumOperands(); I
!= E
; ++I
) {
789 const MachineOperand
&MO
= MI
.getOperand(I
);
790 if (!MO
.isReg()) continue;
791 unsigned Reg
= MO
.getReg();
792 if (!TargetRegisterInfo::isVirtualRegister(Reg
)) continue;
793 if (!MO
.isEarlyClobber())
795 // Note: defineVirtReg may invalidate MO.
796 LiveRegMap::iterator LRI
= defineVirtReg(MI
, I
, Reg
, 0);
797 MCPhysReg PhysReg
= LRI
->PhysReg
;
798 if (setPhysReg(MI
, I
, PhysReg
))
799 VirtDead
.push_back(Reg
);
802 // Restore UsedInInstr to a state usable for allocating normal virtual uses.
804 for (const MachineOperand
&MO
: MI
.operands()) {
805 if (!MO
.isReg() || (MO
.isDef() && !MO
.isEarlyClobber())) continue;
806 unsigned Reg
= MO
.getReg();
807 if (!Reg
|| !TargetRegisterInfo::isPhysicalRegister(Reg
)) continue;
808 LLVM_DEBUG(dbgs() << "\tSetting " << printReg(Reg
, TRI
)
809 << " as used in instr\n");
810 markRegUsedInInstr(Reg
);
813 // Also mark PartialDefs as used to avoid reallocation.
814 for (unsigned PartialDef
: PartialDefs
)
815 markRegUsedInInstr(PartialDef
);
819 void RegAllocFast::dumpState() {
820 for (unsigned Reg
= 1, E
= TRI
->getNumRegs(); Reg
!= E
; ++Reg
) {
821 if (PhysRegState
[Reg
] == regDisabled
) continue;
822 dbgs() << " " << printReg(Reg
, TRI
);
823 switch(PhysRegState
[Reg
]) {
830 dbgs() << '=' << printReg(PhysRegState
[Reg
]);
831 LiveRegMap::iterator I
= findLiveVirtReg(PhysRegState
[Reg
]);
832 assert(I
!= LiveVirtRegs
.end() && "Missing VirtReg entry");
835 assert(I
->PhysReg
== Reg
&& "Bad inverse map");
841 // Check that LiveVirtRegs is the inverse.
842 for (LiveRegMap::iterator i
= LiveVirtRegs
.begin(),
843 e
= LiveVirtRegs
.end(); i
!= e
; ++i
) {
844 assert(TargetRegisterInfo::isVirtualRegister(i
->VirtReg
) &&
846 assert(TargetRegisterInfo::isPhysicalRegister(i
->PhysReg
) &&
848 assert(PhysRegState
[i
->PhysReg
] == i
->VirtReg
&& "Bad inverse map");
853 void RegAllocFast::allocateBasicBlock(MachineBasicBlock
&MBB
) {
855 LLVM_DEBUG(dbgs() << "\nAllocating " << MBB
);
857 PhysRegState
.assign(TRI
->getNumRegs(), regDisabled
);
858 assert(LiveVirtRegs
.empty() && "Mapping not cleared from last block?");
860 MachineBasicBlock::iterator MII
= MBB
.begin();
862 // Add live-in registers as live.
863 for (const MachineBasicBlock::RegisterMaskPair LI
: MBB
.liveins())
864 if (MRI
->isAllocatable(LI
.PhysReg
))
865 definePhysReg(MII
, LI
.PhysReg
, regReserved
);
870 // Otherwise, sequentially allocate each instruction in the MBB.
871 for (MachineInstr
&MI
: MBB
) {
872 const MCInstrDesc
&MCID
= MI
.getDesc();
873 LLVM_DEBUG(dbgs() << "\n>> " << MI
<< "Regs:"; dumpState());
875 // Debug values are not allowed to change codegen in any way.
876 if (MI
.isDebugValue()) {
877 MachineInstr
*DebugMI
= &MI
;
878 MachineOperand
&MO
= DebugMI
->getOperand(0);
880 // Ignore DBG_VALUEs that aren't based on virtual registers. These are
881 // mostly constants and frame indices.
884 unsigned Reg
= MO
.getReg();
885 if (!TargetRegisterInfo::isVirtualRegister(Reg
))
888 // See if this virtual register has already been allocated to a physical
889 // register or spilled to a stack slot.
890 LiveRegMap::iterator LRI
= findLiveVirtReg(Reg
);
891 if (LRI
!= LiveVirtRegs
.end())
892 setPhysReg(*DebugMI
, 0, LRI
->PhysReg
);
894 int SS
= StackSlotForVirtReg
[Reg
];
896 // Modify DBG_VALUE now that the value is in a spill slot.
897 updateDbgValueForSpill(*DebugMI
, SS
);
898 LLVM_DEBUG(dbgs() << "Modifying debug info due to spill:"
899 << "\t" << *DebugMI
);
903 // We can't allocate a physreg for a DebugValue, sorry!
904 LLVM_DEBUG(dbgs() << "Unable to allocate vreg used by DBG_VALUE");
908 // If Reg hasn't been spilled, put this DBG_VALUE in LiveDbgValueMap so
909 // that future spills of Reg will have DBG_VALUEs.
910 LiveDbgValueMap
[Reg
].push_back(DebugMI
);
914 if (MI
.isDebugLabel())
917 // If this is a copy, we may be able to coalesce.
918 unsigned CopySrcReg
= 0;
919 unsigned CopyDstReg
= 0;
920 unsigned CopySrcSub
= 0;
921 unsigned CopyDstSub
= 0;
923 CopyDstReg
= MI
.getOperand(0).getReg();
924 CopySrcReg
= MI
.getOperand(1).getReg();
925 CopyDstSub
= MI
.getOperand(0).getSubReg();
926 CopySrcSub
= MI
.getOperand(1).getSubReg();
929 // Track registers used by instruction.
933 // Mark physreg uses and early clobbers as used.
934 // Find the end of the virtreg operands
935 unsigned VirtOpEnd
= 0;
936 bool hasTiedOps
= false;
937 bool hasEarlyClobbers
= false;
938 bool hasPartialRedefs
= false;
939 bool hasPhysDefs
= false;
940 for (unsigned i
= 0, e
= MI
.getNumOperands(); i
!= e
; ++i
) {
941 MachineOperand
&MO
= MI
.getOperand(i
);
942 // Make sure MRI knows about registers clobbered by regmasks.
943 if (MO
.isRegMask()) {
944 MRI
->addPhysRegsUsedFromRegMask(MO
.getRegMask());
947 if (!MO
.isReg()) continue;
948 unsigned Reg
= MO
.getReg();
950 if (TargetRegisterInfo::isVirtualRegister(Reg
)) {
953 hasTiedOps
= hasTiedOps
||
954 MCID
.getOperandConstraint(i
, MCOI::TIED_TO
) != -1;
956 if (MO
.isEarlyClobber())
957 hasEarlyClobbers
= true;
958 if (MO
.getSubReg() && MI
.readsVirtualRegister(Reg
))
959 hasPartialRedefs
= true;
963 if (!MRI
->isAllocatable(Reg
)) continue;
966 } else if (MO
.isEarlyClobber()) {
967 definePhysReg(MI
, Reg
,
968 (MO
.isImplicit() || MO
.isDead()) ? regFree
: regReserved
);
969 hasEarlyClobbers
= true;
974 // The instruction may have virtual register operands that must be allocated
975 // the same register at use-time and def-time: early clobbers and tied
976 // operands. If there are also physical defs, these registers must avoid
977 // both physical defs and uses, making them more constrained than normal
979 // Similarly, if there are multiple defs and tied operands, we must make
980 // sure the same register is allocated to uses and defs.
981 // We didn't detect inline asm tied operands above, so just make this extra
982 // pass for all inline asm.
983 if (MI
.isInlineAsm() || hasEarlyClobbers
|| hasPartialRedefs
||
984 (hasTiedOps
&& (hasPhysDefs
|| MCID
.getNumDefs() > 1))) {
985 handleThroughOperands(MI
, VirtDead
);
986 // Don't attempt coalescing when we have funny stuff going on.
988 // Pretend we have early clobbers so the use operands get marked below.
989 // This is not necessary for the common case of a single tied use.
990 hasEarlyClobbers
= true;
994 // Allocate virtreg uses.
995 for (unsigned I
= 0; I
!= VirtOpEnd
; ++I
) {
996 const MachineOperand
&MO
= MI
.getOperand(I
);
997 if (!MO
.isReg()) continue;
998 unsigned Reg
= MO
.getReg();
999 if (!TargetRegisterInfo::isVirtualRegister(Reg
)) continue;
1001 LiveRegMap::iterator LRI
= reloadVirtReg(MI
, I
, Reg
, CopyDstReg
);
1002 MCPhysReg PhysReg
= LRI
->PhysReg
;
1003 CopySrcReg
= (CopySrcReg
== Reg
|| CopySrcReg
== PhysReg
) ? PhysReg
: 0;
1004 if (setPhysReg(MI
, I
, PhysReg
))
1009 // Track registers defined by instruction - early clobbers and tied uses at
1011 UsedInInstr
.clear();
1012 if (hasEarlyClobbers
) {
1013 for (const MachineOperand
&MO
: MI
.operands()) {
1014 if (!MO
.isReg()) continue;
1015 unsigned Reg
= MO
.getReg();
1016 if (!Reg
|| !TargetRegisterInfo::isPhysicalRegister(Reg
)) continue;
1017 // Look for physreg defs and tied uses.
1018 if (!MO
.isDef() && !MO
.isTied()) continue;
1019 markRegUsedInInstr(Reg
);
1023 unsigned DefOpEnd
= MI
.getNumOperands();
1025 // Spill all virtregs before a call. This serves one purpose: If an
1026 // exception is thrown, the landing pad is going to expect to find
1027 // registers in their spill slots.
1028 // Note: although this is appealing to just consider all definitions
1029 // as call-clobbered, this is not correct because some of those
1030 // definitions may be used later on and we do not want to reuse
1031 // those for virtual registers in between.
1032 LLVM_DEBUG(dbgs() << " Spilling remaining registers before call.\n");
1037 // Allocate defs and collect dead defs.
1038 for (unsigned I
= 0; I
!= DefOpEnd
; ++I
) {
1039 const MachineOperand
&MO
= MI
.getOperand(I
);
1040 if (!MO
.isReg() || !MO
.isDef() || !MO
.getReg() || MO
.isEarlyClobber())
1042 unsigned Reg
= MO
.getReg();
1044 if (TargetRegisterInfo::isPhysicalRegister(Reg
)) {
1045 if (!MRI
->isAllocatable(Reg
)) continue;
1046 definePhysReg(MI
, Reg
, MO
.isDead() ? regFree
: regReserved
);
1049 LiveRegMap::iterator LRI
= defineVirtReg(MI
, I
, Reg
, CopySrcReg
);
1050 MCPhysReg PhysReg
= LRI
->PhysReg
;
1051 if (setPhysReg(MI
, I
, PhysReg
)) {
1052 VirtDead
.push_back(Reg
);
1053 CopyDstReg
= 0; // cancel coalescing;
1055 CopyDstReg
= (CopyDstReg
== Reg
|| CopyDstReg
== PhysReg
) ? PhysReg
: 0;
1058 // Kill dead defs after the scan to ensure that multiple defs of the same
1059 // register are allocated identically. We didn't need to do this for uses
1060 // because we are crerating our own kill flags, and they are always at the
1062 for (unsigned VirtReg
: VirtDead
)
1063 killVirtReg(VirtReg
);
1066 if (CopyDstReg
&& CopyDstReg
== CopySrcReg
&& CopyDstSub
== CopySrcSub
) {
1067 LLVM_DEBUG(dbgs() << "-- coalescing: " << MI
);
1068 Coalesced
.push_back(&MI
);
1070 LLVM_DEBUG(dbgs() << "<< " << MI
);
1074 // Spill all physical registers holding virtual registers now.
1075 LLVM_DEBUG(dbgs() << "Spilling live registers at end of block.\n");
1076 spillAll(MBB
.getFirstTerminator());
1078 // Erase all the coalesced copies. We are delaying it until now because
1079 // LiveVirtRegs might refer to the instrs.
1080 for (MachineInstr
*MI
: Coalesced
)
1082 NumCopies
+= Coalesced
.size();
1084 LLVM_DEBUG(MBB
.dump());
1087 /// Allocates registers for a function.
1088 bool RegAllocFast::runOnMachineFunction(MachineFunction
&MF
) {
1089 LLVM_DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n"
1090 << "********** Function: " << MF
.getName() << '\n');
1091 MRI
= &MF
.getRegInfo();
1092 const TargetSubtargetInfo
&STI
= MF
.getSubtarget();
1093 TRI
= STI
.getRegisterInfo();
1094 TII
= STI
.getInstrInfo();
1095 MFI
= &MF
.getFrameInfo();
1096 MRI
->freezeReservedRegs(MF
);
1097 RegClassInfo
.runOnMachineFunction(MF
);
1098 UsedInInstr
.clear();
1099 UsedInInstr
.setUniverse(TRI
->getNumRegUnits());
1101 // initialize the virtual->physical register map to have a 'null'
1102 // mapping for all virtual registers
1103 unsigned NumVirtRegs
= MRI
->getNumVirtRegs();
1104 StackSlotForVirtReg
.resize(NumVirtRegs
);
1105 LiveVirtRegs
.setUniverse(NumVirtRegs
);
1107 // Loop over all of the basic blocks, eliminating virtual register references
1108 for (MachineBasicBlock
&MBB
: MF
)
1109 allocateBasicBlock(MBB
);
1111 // All machine operands and other references to virtual registers have been
1112 // replaced. Remove the virtual registers.
1113 MRI
->clearVirtRegs();
1115 StackSlotForVirtReg
.clear();
1116 LiveDbgValueMap
.clear();
1120 FunctionPass
*llvm::createFastRegisterAllocator() {
1121 return new RegAllocFast();