1 //===-- PreAllocSplitting.cpp - Pre-allocation Interval Spltting Pass. ----===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file implements the machine instruction level pre-register allocation
11 // live interval splitting pass. It finds live interval barriers, i.e.
12 // instructions which will kill all physical registers in certain register
13 // classes, and split all live intervals which cross the barrier.
15 //===----------------------------------------------------------------------===//
17 #define DEBUG_TYPE "pre-alloc-split"
18 #include "VirtRegMap.h"
19 #include "llvm/CodeGen/LiveIntervalAnalysis.h"
20 #include "llvm/CodeGen/LiveStackAnalysis.h"
21 #include "llvm/CodeGen/MachineDominators.h"
22 #include "llvm/CodeGen/MachineFrameInfo.h"
23 #include "llvm/CodeGen/MachineFunctionPass.h"
24 #include "llvm/CodeGen/MachineLoopInfo.h"
25 #include "llvm/CodeGen/MachineRegisterInfo.h"
26 #include "llvm/CodeGen/Passes.h"
27 #include "llvm/CodeGen/RegisterCoalescer.h"
28 #include "llvm/Target/TargetInstrInfo.h"
29 #include "llvm/Target/TargetMachine.h"
30 #include "llvm/Target/TargetOptions.h"
31 #include "llvm/Target/TargetRegisterInfo.h"
32 #include "llvm/Support/CommandLine.h"
33 #include "llvm/Support/Debug.h"
34 #include "llvm/Support/ErrorHandling.h"
35 #include "llvm/ADT/DenseMap.h"
36 #include "llvm/ADT/DepthFirstIterator.h"
37 #include "llvm/ADT/SmallPtrSet.h"
38 #include "llvm/ADT/Statistic.h"
41 static cl::opt
<int> PreSplitLimit("pre-split-limit", cl::init(-1), cl::Hidden
);
42 static cl::opt
<int> DeadSplitLimit("dead-split-limit", cl::init(-1), cl::Hidden
);
43 static cl::opt
<int> RestoreFoldLimit("restore-fold-limit", cl::init(-1), cl::Hidden
);
45 STATISTIC(NumSplits
, "Number of intervals split");
46 STATISTIC(NumRemats
, "Number of intervals split by rematerialization");
47 STATISTIC(NumFolds
, "Number of intervals split with spill folding");
48 STATISTIC(NumRestoreFolds
, "Number of intervals split with restore folding");
49 STATISTIC(NumRenumbers
, "Number of intervals renumbered into new registers");
50 STATISTIC(NumDeadSpills
, "Number of dead spills removed");
53 class VISIBILITY_HIDDEN PreAllocSplitting
: public MachineFunctionPass
{
54 MachineFunction
*CurrMF
;
55 const TargetMachine
*TM
;
56 const TargetInstrInfo
*TII
;
57 const TargetRegisterInfo
* TRI
;
58 MachineFrameInfo
*MFI
;
59 MachineRegisterInfo
*MRI
;
64 // Barrier - Current barrier being processed.
65 MachineInstr
*Barrier
;
67 // BarrierMBB - Basic block where the barrier resides in.
68 MachineBasicBlock
*BarrierMBB
;
70 // Barrier - Current barrier index.
71 MachineInstrIndex BarrierIdx
;
73 // CurrLI - Current live interval being split.
76 // CurrSLI - Current stack slot live interval.
77 LiveInterval
*CurrSLI
;
79 // CurrSValNo - Current val# for the stack slot live interval.
82 // IntervalSSMap - A map from live interval to spill slots.
83 DenseMap
<unsigned, int> IntervalSSMap
;
85 // Def2SpillMap - A map from a def instruction index to spill index.
86 DenseMap
<MachineInstrIndex
, MachineInstrIndex
> Def2SpillMap
;
90 PreAllocSplitting() : MachineFunctionPass(&ID
) {}
92 virtual bool runOnMachineFunction(MachineFunction
&MF
);
94 virtual void getAnalysisUsage(AnalysisUsage
&AU
) const {
96 AU
.addRequired
<LiveIntervals
>();
97 AU
.addPreserved
<LiveIntervals
>();
98 AU
.addRequired
<LiveStacks
>();
99 AU
.addPreserved
<LiveStacks
>();
100 AU
.addPreserved
<RegisterCoalescer
>();
102 AU
.addPreservedID(StrongPHIEliminationID
);
104 AU
.addPreservedID(PHIEliminationID
);
105 AU
.addRequired
<MachineDominatorTree
>();
106 AU
.addRequired
<MachineLoopInfo
>();
107 AU
.addRequired
<VirtRegMap
>();
108 AU
.addPreserved
<MachineDominatorTree
>();
109 AU
.addPreserved
<MachineLoopInfo
>();
110 AU
.addPreserved
<VirtRegMap
>();
111 MachineFunctionPass::getAnalysisUsage(AU
);
114 virtual void releaseMemory() {
115 IntervalSSMap
.clear();
116 Def2SpillMap
.clear();
119 virtual const char *getPassName() const {
120 return "Pre-Register Allocaton Live Interval Splitting";
123 /// print - Implement the dump method.
124 virtual void print(raw_ostream
&O
, const Module
* M
= 0) const {
130 MachineBasicBlock::iterator
131 findNextEmptySlot(MachineBasicBlock
*, MachineInstr
*,
134 MachineBasicBlock::iterator
135 findSpillPoint(MachineBasicBlock
*, MachineInstr
*, MachineInstr
*,
136 SmallPtrSet
<MachineInstr
*, 4>&, MachineInstrIndex
&);
138 MachineBasicBlock::iterator
139 findRestorePoint(MachineBasicBlock
*, MachineInstr
*, MachineInstrIndex
,
140 SmallPtrSet
<MachineInstr
*, 4>&, MachineInstrIndex
&);
142 int CreateSpillStackSlot(unsigned, const TargetRegisterClass
*);
144 bool IsAvailableInStack(MachineBasicBlock
*, unsigned,
145 MachineInstrIndex
, MachineInstrIndex
,
146 MachineInstrIndex
&, int&) const;
148 void UpdateSpillSlotInterval(VNInfo
*, MachineInstrIndex
, MachineInstrIndex
);
150 bool SplitRegLiveInterval(LiveInterval
*);
152 bool SplitRegLiveIntervals(const TargetRegisterClass
**,
153 SmallPtrSet
<LiveInterval
*, 8>&);
155 bool createsNewJoin(LiveRange
* LR
, MachineBasicBlock
* DefMBB
,
156 MachineBasicBlock
* BarrierMBB
);
157 bool Rematerialize(unsigned vreg
, VNInfo
* ValNo
,
159 MachineBasicBlock::iterator RestorePt
,
160 MachineInstrIndex RestoreIdx
,
161 SmallPtrSet
<MachineInstr
*, 4>& RefsInMBB
);
162 MachineInstr
* FoldSpill(unsigned vreg
, const TargetRegisterClass
* RC
,
164 MachineInstr
* Barrier
,
165 MachineBasicBlock
* MBB
,
167 SmallPtrSet
<MachineInstr
*, 4>& RefsInMBB
);
168 MachineInstr
* FoldRestore(unsigned vreg
,
169 const TargetRegisterClass
* RC
,
170 MachineInstr
* Barrier
,
171 MachineBasicBlock
* MBB
,
173 SmallPtrSet
<MachineInstr
*, 4>& RefsInMBB
);
174 void RenumberValno(VNInfo
* VN
);
175 void ReconstructLiveInterval(LiveInterval
* LI
);
176 bool removeDeadSpills(SmallPtrSet
<LiveInterval
*, 8>& split
);
177 unsigned getNumberOfNonSpills(SmallPtrSet
<MachineInstr
*, 4>& MIs
,
178 unsigned Reg
, int FrameIndex
, bool& TwoAddr
);
179 VNInfo
* PerformPHIConstruction(MachineBasicBlock::iterator Use
,
180 MachineBasicBlock
* MBB
, LiveInterval
* LI
,
181 SmallPtrSet
<MachineInstr
*, 4>& Visited
,
182 DenseMap
<MachineBasicBlock
*, SmallPtrSet
<MachineInstr
*, 2> >& Defs
,
183 DenseMap
<MachineBasicBlock
*, SmallPtrSet
<MachineInstr
*, 2> >& Uses
,
184 DenseMap
<MachineInstr
*, VNInfo
*>& NewVNs
,
185 DenseMap
<MachineBasicBlock
*, VNInfo
*>& LiveOut
,
186 DenseMap
<MachineBasicBlock
*, VNInfo
*>& Phis
,
187 bool IsTopLevel
, bool IsIntraBlock
);
188 VNInfo
* PerformPHIConstructionFallBack(MachineBasicBlock::iterator Use
,
189 MachineBasicBlock
* MBB
, LiveInterval
* LI
,
190 SmallPtrSet
<MachineInstr
*, 4>& Visited
,
191 DenseMap
<MachineBasicBlock
*, SmallPtrSet
<MachineInstr
*, 2> >& Defs
,
192 DenseMap
<MachineBasicBlock
*, SmallPtrSet
<MachineInstr
*, 2> >& Uses
,
193 DenseMap
<MachineInstr
*, VNInfo
*>& NewVNs
,
194 DenseMap
<MachineBasicBlock
*, VNInfo
*>& LiveOut
,
195 DenseMap
<MachineBasicBlock
*, VNInfo
*>& Phis
,
196 bool IsTopLevel
, bool IsIntraBlock
);
198 } // end anonymous namespace
200 char PreAllocSplitting::ID
= 0;
202 static RegisterPass
<PreAllocSplitting
>
203 X("pre-alloc-splitting", "Pre-Register Allocation Live Interval Splitting");
205 const PassInfo
*const llvm::PreAllocSplittingID
= &X
;
208 /// findNextEmptySlot - Find a gap after the given machine instruction in the
209 /// instruction index map. If there isn't one, return end().
210 MachineBasicBlock::iterator
211 PreAllocSplitting::findNextEmptySlot(MachineBasicBlock
*MBB
, MachineInstr
*MI
,
212 MachineInstrIndex
&SpotIndex
) {
213 MachineBasicBlock::iterator MII
= MI
;
214 if (++MII
!= MBB
->end()) {
215 MachineInstrIndex Index
=
216 LIs
->findGapBeforeInstr(LIs
->getInstructionIndex(MII
));
217 if (Index
!= MachineInstrIndex()) {
225 /// findSpillPoint - Find a gap as far away from the given MI that's suitable
226 /// for spilling the current live interval. The index must be before any
227 /// defs and uses of the live interval register in the mbb. Return begin() if
229 MachineBasicBlock::iterator
230 PreAllocSplitting::findSpillPoint(MachineBasicBlock
*MBB
, MachineInstr
*MI
,
232 SmallPtrSet
<MachineInstr
*, 4> &RefsInMBB
,
233 MachineInstrIndex
&SpillIndex
) {
234 MachineBasicBlock::iterator Pt
= MBB
->begin();
236 MachineBasicBlock::iterator MII
= MI
;
237 MachineBasicBlock::iterator EndPt
= DefMI
238 ? MachineBasicBlock::iterator(DefMI
) : MBB
->begin();
240 while (MII
!= EndPt
&& !RefsInMBB
.count(MII
) &&
241 MII
->getOpcode() != TRI
->getCallFrameSetupOpcode())
243 if (MII
== EndPt
|| RefsInMBB
.count(MII
)) return Pt
;
245 while (MII
!= EndPt
&& !RefsInMBB
.count(MII
)) {
246 MachineInstrIndex Index
= LIs
->getInstructionIndex(MII
);
248 // We can't insert the spill between the barrier (a call), and its
249 // corresponding call frame setup.
250 if (MII
->getOpcode() == TRI
->getCallFrameDestroyOpcode()) {
251 while (MII
->getOpcode() != TRI
->getCallFrameSetupOpcode()) {
258 } else if (LIs
->hasGapBeforeInstr(Index
)) {
260 SpillIndex
= LIs
->findGapBeforeInstr(Index
, true);
263 if (RefsInMBB
.count(MII
))
273 /// findRestorePoint - Find a gap in the instruction index map that's suitable
274 /// for restoring the current live interval value. The index must be before any
275 /// uses of the live interval register in the mbb. Return end() if none is
277 MachineBasicBlock::iterator
278 PreAllocSplitting::findRestorePoint(MachineBasicBlock
*MBB
, MachineInstr
*MI
,
279 MachineInstrIndex LastIdx
,
280 SmallPtrSet
<MachineInstr
*, 4> &RefsInMBB
,
281 MachineInstrIndex
&RestoreIndex
) {
282 // FIXME: Allow spill to be inserted to the beginning of the mbb. Update mbb
283 // begin index accordingly.
284 MachineBasicBlock::iterator Pt
= MBB
->end();
285 MachineBasicBlock::iterator EndPt
= MBB
->getFirstTerminator();
287 // We start at the call, so walk forward until we find the call frame teardown
288 // since we can't insert restores before that. Bail if we encounter a use
290 MachineBasicBlock::iterator MII
= MI
;
291 if (MII
== EndPt
) return Pt
;
293 while (MII
!= EndPt
&& !RefsInMBB
.count(MII
) &&
294 MII
->getOpcode() != TRI
->getCallFrameDestroyOpcode())
296 if (MII
== EndPt
|| RefsInMBB
.count(MII
)) return Pt
;
299 // FIXME: Limit the number of instructions to examine to reduce
301 while (MII
!= EndPt
) {
302 MachineInstrIndex Index
= LIs
->getInstructionIndex(MII
);
305 MachineInstrIndex Gap
= LIs
->findGapBeforeInstr(Index
);
307 // We can't insert a restore between the barrier (a call) and its
308 // corresponding call frame teardown.
309 if (MII
->getOpcode() == TRI
->getCallFrameSetupOpcode()) {
311 if (MII
== EndPt
|| RefsInMBB
.count(MII
)) return Pt
;
313 } while (MII
->getOpcode() != TRI
->getCallFrameDestroyOpcode());
314 } else if (Gap
!= MachineInstrIndex()) {
319 if (RefsInMBB
.count(MII
))
328 /// CreateSpillStackSlot - Create a stack slot for the live interval being
329 /// split. If the live interval was previously split, just reuse the same
331 int PreAllocSplitting::CreateSpillStackSlot(unsigned Reg
,
332 const TargetRegisterClass
*RC
) {
334 DenseMap
<unsigned, int>::iterator I
= IntervalSSMap
.find(Reg
);
335 if (I
!= IntervalSSMap
.end()) {
338 SS
= MFI
->CreateStackObject(RC
->getSize(), RC
->getAlignment());
339 IntervalSSMap
[Reg
] = SS
;
342 // Create live interval for stack slot.
343 CurrSLI
= &LSs
->getOrCreateInterval(SS
, RC
);
344 if (CurrSLI
->hasAtLeastOneValue())
345 CurrSValNo
= CurrSLI
->getValNumInfo(0);
347 CurrSValNo
= CurrSLI
->getNextValue(MachineInstrIndex(), 0, false,
348 LSs
->getVNInfoAllocator());
352 /// IsAvailableInStack - Return true if register is available in a split stack
353 /// slot at the specified index.
355 PreAllocSplitting::IsAvailableInStack(MachineBasicBlock
*DefMBB
,
356 unsigned Reg
, MachineInstrIndex DefIndex
,
357 MachineInstrIndex RestoreIndex
,
358 MachineInstrIndex
&SpillIndex
,
363 DenseMap
<unsigned, int>::iterator I
= IntervalSSMap
.find(Reg
);
364 if (I
== IntervalSSMap
.end())
366 DenseMap
<MachineInstrIndex
, MachineInstrIndex
>::iterator
367 II
= Def2SpillMap
.find(DefIndex
);
368 if (II
== Def2SpillMap
.end())
371 // If last spill of def is in the same mbb as barrier mbb (where restore will
372 // be), make sure it's not below the intended restore index.
373 // FIXME: Undo the previous spill?
374 assert(LIs
->getMBBFromIndex(II
->second
) == DefMBB
);
375 if (DefMBB
== BarrierMBB
&& II
->second
>= RestoreIndex
)
379 SpillIndex
= II
->second
;
383 /// UpdateSpillSlotInterval - Given the specified val# of the register live
384 /// interval being split, and the spill and restore indicies, update the live
385 /// interval of the spill stack slot.
387 PreAllocSplitting::UpdateSpillSlotInterval(VNInfo
*ValNo
, MachineInstrIndex SpillIndex
,
388 MachineInstrIndex RestoreIndex
) {
389 assert(LIs
->getMBBFromIndex(RestoreIndex
) == BarrierMBB
&&
390 "Expect restore in the barrier mbb");
392 MachineBasicBlock
*MBB
= LIs
->getMBBFromIndex(SpillIndex
);
393 if (MBB
== BarrierMBB
) {
394 // Intra-block spill + restore. We are done.
395 LiveRange
SLR(SpillIndex
, RestoreIndex
, CurrSValNo
);
396 CurrSLI
->addRange(SLR
);
400 SmallPtrSet
<MachineBasicBlock
*, 4> Processed
;
401 MachineInstrIndex EndIdx
= LIs
->getMBBEndIdx(MBB
);
402 LiveRange
SLR(SpillIndex
, LIs
->getNextSlot(EndIdx
), CurrSValNo
);
403 CurrSLI
->addRange(SLR
);
404 Processed
.insert(MBB
);
406 // Start from the spill mbb, figure out the extend of the spill slot's
408 SmallVector
<MachineBasicBlock
*, 4> WorkList
;
409 const LiveRange
*LR
= CurrLI
->getLiveRangeContaining(SpillIndex
);
410 if (LR
->end
> EndIdx
)
411 // If live range extend beyond end of mbb, add successors to work list.
412 for (MachineBasicBlock::succ_iterator SI
= MBB
->succ_begin(),
413 SE
= MBB
->succ_end(); SI
!= SE
; ++SI
)
414 WorkList
.push_back(*SI
);
416 while (!WorkList
.empty()) {
417 MachineBasicBlock
*MBB
= WorkList
.back();
419 if (Processed
.count(MBB
))
421 MachineInstrIndex Idx
= LIs
->getMBBStartIdx(MBB
);
422 LR
= CurrLI
->getLiveRangeContaining(Idx
);
423 if (LR
&& LR
->valno
== ValNo
) {
424 EndIdx
= LIs
->getMBBEndIdx(MBB
);
425 if (Idx
<= RestoreIndex
&& RestoreIndex
< EndIdx
) {
426 // Spill slot live interval stops at the restore.
427 LiveRange
SLR(Idx
, RestoreIndex
, CurrSValNo
);
428 CurrSLI
->addRange(SLR
);
429 } else if (LR
->end
> EndIdx
) {
430 // Live range extends beyond end of mbb, process successors.
431 LiveRange
SLR(Idx
, LIs
->getNextIndex(EndIdx
), CurrSValNo
);
432 CurrSLI
->addRange(SLR
);
433 for (MachineBasicBlock::succ_iterator SI
= MBB
->succ_begin(),
434 SE
= MBB
->succ_end(); SI
!= SE
; ++SI
)
435 WorkList
.push_back(*SI
);
437 LiveRange
SLR(Idx
, LR
->end
, CurrSValNo
);
438 CurrSLI
->addRange(SLR
);
440 Processed
.insert(MBB
);
445 /// PerformPHIConstruction - From properly set up use and def lists, use a PHI
446 /// construction algorithm to compute the ranges and valnos for an interval.
448 PreAllocSplitting::PerformPHIConstruction(MachineBasicBlock::iterator UseI
,
449 MachineBasicBlock
* MBB
, LiveInterval
* LI
,
450 SmallPtrSet
<MachineInstr
*, 4>& Visited
,
451 DenseMap
<MachineBasicBlock
*, SmallPtrSet
<MachineInstr
*, 2> >& Defs
,
452 DenseMap
<MachineBasicBlock
*, SmallPtrSet
<MachineInstr
*, 2> >& Uses
,
453 DenseMap
<MachineInstr
*, VNInfo
*>& NewVNs
,
454 DenseMap
<MachineBasicBlock
*, VNInfo
*>& LiveOut
,
455 DenseMap
<MachineBasicBlock
*, VNInfo
*>& Phis
,
456 bool IsTopLevel
, bool IsIntraBlock
) {
457 // Return memoized result if it's available.
458 if (IsTopLevel
&& Visited
.count(UseI
) && NewVNs
.count(UseI
))
460 else if (!IsTopLevel
&& IsIntraBlock
&& NewVNs
.count(UseI
))
462 else if (!IsIntraBlock
&& LiveOut
.count(MBB
))
465 // Check if our block contains any uses or defs.
466 bool ContainsDefs
= Defs
.count(MBB
);
467 bool ContainsUses
= Uses
.count(MBB
);
471 // Enumerate the cases of use/def contaning blocks.
472 if (!ContainsDefs
&& !ContainsUses
) {
473 return PerformPHIConstructionFallBack(UseI
, MBB
, LI
, Visited
, Defs
, Uses
,
474 NewVNs
, LiveOut
, Phis
,
475 IsTopLevel
, IsIntraBlock
);
476 } else if (ContainsDefs
&& !ContainsUses
) {
477 SmallPtrSet
<MachineInstr
*, 2>& BlockDefs
= Defs
[MBB
];
479 // Search for the def in this block. If we don't find it before the
480 // instruction we care about, go to the fallback case. Note that that
481 // should never happen: this cannot be intrablock, so use should
482 // always be an end() iterator.
483 assert(UseI
== MBB
->end() && "No use marked in intrablock");
485 MachineBasicBlock::iterator Walker
= UseI
;
487 while (Walker
!= MBB
->begin()) {
488 if (BlockDefs
.count(Walker
))
493 // Once we've found it, extend its VNInfo to our instruction.
494 MachineInstrIndex DefIndex
= LIs
->getInstructionIndex(Walker
);
495 DefIndex
= LIs
->getDefIndex(DefIndex
);
496 MachineInstrIndex EndIndex
= LIs
->getMBBEndIdx(MBB
);
498 RetVNI
= NewVNs
[Walker
];
499 LI
->addRange(LiveRange(DefIndex
, LIs
->getNextSlot(EndIndex
), RetVNI
));
500 } else if (!ContainsDefs
&& ContainsUses
) {
501 SmallPtrSet
<MachineInstr
*, 2>& BlockUses
= Uses
[MBB
];
503 // Search for the use in this block that precedes the instruction we care
504 // about, going to the fallback case if we don't find it.
505 if (UseI
== MBB
->begin())
506 return PerformPHIConstructionFallBack(UseI
, MBB
, LI
, Visited
, Defs
,
507 Uses
, NewVNs
, LiveOut
, Phis
,
508 IsTopLevel
, IsIntraBlock
);
510 MachineBasicBlock::iterator Walker
= UseI
;
513 while (Walker
!= MBB
->begin()) {
514 if (BlockUses
.count(Walker
)) {
521 // Must check begin() too.
523 if (BlockUses
.count(Walker
))
526 return PerformPHIConstructionFallBack(UseI
, MBB
, LI
, Visited
, Defs
,
527 Uses
, NewVNs
, LiveOut
, Phis
,
528 IsTopLevel
, IsIntraBlock
);
531 MachineInstrIndex UseIndex
= LIs
->getInstructionIndex(Walker
);
532 UseIndex
= LIs
->getUseIndex(UseIndex
);
533 MachineInstrIndex EndIndex
;
535 EndIndex
= LIs
->getInstructionIndex(UseI
);
536 EndIndex
= LIs
->getUseIndex(EndIndex
);
538 EndIndex
= LIs
->getMBBEndIdx(MBB
);
540 // Now, recursively phi construct the VNInfo for the use we found,
541 // and then extend it to include the instruction we care about
542 RetVNI
= PerformPHIConstruction(Walker
, MBB
, LI
, Visited
, Defs
, Uses
,
543 NewVNs
, LiveOut
, Phis
, false, true);
545 LI
->addRange(LiveRange(UseIndex
, LIs
->getNextSlot(EndIndex
), RetVNI
));
547 // FIXME: Need to set kills properly for inter-block stuff.
548 if (RetVNI
->isKill(UseIndex
)) RetVNI
->removeKill(UseIndex
);
550 RetVNI
->addKill(EndIndex
);
551 } else if (ContainsDefs
&& ContainsUses
) {
552 SmallPtrSet
<MachineInstr
*, 2>& BlockDefs
= Defs
[MBB
];
553 SmallPtrSet
<MachineInstr
*, 2>& BlockUses
= Uses
[MBB
];
555 // This case is basically a merging of the two preceding case, with the
556 // special note that checking for defs must take precedence over checking
557 // for uses, because of two-address instructions.
559 if (UseI
== MBB
->begin())
560 return PerformPHIConstructionFallBack(UseI
, MBB
, LI
, Visited
, Defs
, Uses
,
561 NewVNs
, LiveOut
, Phis
,
562 IsTopLevel
, IsIntraBlock
);
564 MachineBasicBlock::iterator Walker
= UseI
;
566 bool foundDef
= false;
567 bool foundUse
= false;
568 while (Walker
!= MBB
->begin()) {
569 if (BlockDefs
.count(Walker
)) {
572 } else if (BlockUses
.count(Walker
)) {
579 // Must check begin() too.
580 if (!foundDef
&& !foundUse
) {
581 if (BlockDefs
.count(Walker
))
583 else if (BlockUses
.count(Walker
))
586 return PerformPHIConstructionFallBack(UseI
, MBB
, LI
, Visited
, Defs
,
587 Uses
, NewVNs
, LiveOut
, Phis
,
588 IsTopLevel
, IsIntraBlock
);
591 MachineInstrIndex StartIndex
= LIs
->getInstructionIndex(Walker
);
592 StartIndex
= foundDef
? LIs
->getDefIndex(StartIndex
) :
593 LIs
->getUseIndex(StartIndex
);
594 MachineInstrIndex EndIndex
;
596 EndIndex
= LIs
->getInstructionIndex(UseI
);
597 EndIndex
= LIs
->getUseIndex(EndIndex
);
599 EndIndex
= LIs
->getMBBEndIdx(MBB
);
602 RetVNI
= NewVNs
[Walker
];
604 RetVNI
= PerformPHIConstruction(Walker
, MBB
, LI
, Visited
, Defs
, Uses
,
605 NewVNs
, LiveOut
, Phis
, false, true);
607 LI
->addRange(LiveRange(StartIndex
, LIs
->getNextSlot(EndIndex
), RetVNI
));
609 if (foundUse
&& RetVNI
->isKill(StartIndex
))
610 RetVNI
->removeKill(StartIndex
);
612 RetVNI
->addKill(EndIndex
);
616 // Memoize results so we don't have to recompute them.
617 if (!IsIntraBlock
) LiveOut
[MBB
] = RetVNI
;
619 if (!NewVNs
.count(UseI
))
620 NewVNs
[UseI
] = RetVNI
;
621 Visited
.insert(UseI
);
627 /// PerformPHIConstructionFallBack - PerformPHIConstruction fall back path.
630 PreAllocSplitting::PerformPHIConstructionFallBack(MachineBasicBlock::iterator UseI
,
631 MachineBasicBlock
* MBB
, LiveInterval
* LI
,
632 SmallPtrSet
<MachineInstr
*, 4>& Visited
,
633 DenseMap
<MachineBasicBlock
*, SmallPtrSet
<MachineInstr
*, 2> >& Defs
,
634 DenseMap
<MachineBasicBlock
*, SmallPtrSet
<MachineInstr
*, 2> >& Uses
,
635 DenseMap
<MachineInstr
*, VNInfo
*>& NewVNs
,
636 DenseMap
<MachineBasicBlock
*, VNInfo
*>& LiveOut
,
637 DenseMap
<MachineBasicBlock
*, VNInfo
*>& Phis
,
638 bool IsTopLevel
, bool IsIntraBlock
) {
639 // NOTE: Because this is the fallback case from other cases, we do NOT
640 // assume that we are not intrablock here.
641 if (Phis
.count(MBB
)) return Phis
[MBB
];
643 MachineInstrIndex StartIndex
= LIs
->getMBBStartIdx(MBB
);
644 VNInfo
*RetVNI
= Phis
[MBB
] =
645 LI
->getNextValue(MachineInstrIndex(), /*FIXME*/ 0, false,
646 LIs
->getVNInfoAllocator());
648 if (!IsIntraBlock
) LiveOut
[MBB
] = RetVNI
;
650 // If there are no uses or defs between our starting point and the
651 // beginning of the block, then recursive perform phi construction
652 // on our predecessors.
653 DenseMap
<MachineBasicBlock
*, VNInfo
*> IncomingVNs
;
654 for (MachineBasicBlock::pred_iterator PI
= MBB
->pred_begin(),
655 PE
= MBB
->pred_end(); PI
!= PE
; ++PI
) {
656 VNInfo
* Incoming
= PerformPHIConstruction((*PI
)->end(), *PI
, LI
,
657 Visited
, Defs
, Uses
, NewVNs
,
658 LiveOut
, Phis
, false, false);
660 IncomingVNs
[*PI
] = Incoming
;
663 if (MBB
->pred_size() == 1 && !RetVNI
->hasPHIKill()) {
664 VNInfo
* OldVN
= RetVNI
;
665 VNInfo
* NewVN
= IncomingVNs
.begin()->second
;
666 VNInfo
* MergedVN
= LI
->MergeValueNumberInto(OldVN
, NewVN
);
667 if (MergedVN
== OldVN
) std::swap(OldVN
, NewVN
);
669 for (DenseMap
<MachineBasicBlock
*, VNInfo
*>::iterator LOI
= LiveOut
.begin(),
670 LOE
= LiveOut
.end(); LOI
!= LOE
; ++LOI
)
671 if (LOI
->second
== OldVN
)
672 LOI
->second
= MergedVN
;
673 for (DenseMap
<MachineInstr
*, VNInfo
*>::iterator NVI
= NewVNs
.begin(),
674 NVE
= NewVNs
.end(); NVI
!= NVE
; ++NVI
)
675 if (NVI
->second
== OldVN
)
676 NVI
->second
= MergedVN
;
677 for (DenseMap
<MachineBasicBlock
*, VNInfo
*>::iterator PI
= Phis
.begin(),
678 PE
= Phis
.end(); PI
!= PE
; ++PI
)
679 if (PI
->second
== OldVN
)
680 PI
->second
= MergedVN
;
683 // Otherwise, merge the incoming VNInfos with a phi join. Create a new
684 // VNInfo to represent the joined value.
685 for (DenseMap
<MachineBasicBlock
*, VNInfo
*>::iterator I
=
686 IncomingVNs
.begin(), E
= IncomingVNs
.end(); I
!= E
; ++I
) {
687 I
->second
->setHasPHIKill(true);
688 MachineInstrIndex KillIndex
= LIs
->getMBBEndIdx(I
->first
);
689 if (!I
->second
->isKill(KillIndex
))
690 I
->second
->addKill(KillIndex
);
694 MachineInstrIndex EndIndex
;
696 EndIndex
= LIs
->getInstructionIndex(UseI
);
697 EndIndex
= LIs
->getUseIndex(EndIndex
);
699 EndIndex
= LIs
->getMBBEndIdx(MBB
);
700 LI
->addRange(LiveRange(StartIndex
, LIs
->getNextSlot(EndIndex
), RetVNI
));
702 RetVNI
->addKill(EndIndex
);
704 // Memoize results so we don't have to recompute them.
706 LiveOut
[MBB
] = RetVNI
;
708 if (!NewVNs
.count(UseI
))
709 NewVNs
[UseI
] = RetVNI
;
710 Visited
.insert(UseI
);
716 /// ReconstructLiveInterval - Recompute a live interval from scratch.
717 void PreAllocSplitting::ReconstructLiveInterval(LiveInterval
* LI
) {
718 BumpPtrAllocator
& Alloc
= LIs
->getVNInfoAllocator();
720 // Clear the old ranges and valnos;
723 // Cache the uses and defs of the register
724 typedef DenseMap
<MachineBasicBlock
*, SmallPtrSet
<MachineInstr
*, 2> > RegMap
;
727 // Keep track of the new VNs we're creating.
728 DenseMap
<MachineInstr
*, VNInfo
*> NewVNs
;
729 SmallPtrSet
<VNInfo
*, 2> PhiVNs
;
731 // Cache defs, and create a new VNInfo for each def.
732 for (MachineRegisterInfo::def_iterator DI
= MRI
->def_begin(LI
->reg
),
733 DE
= MRI
->def_end(); DI
!= DE
; ++DI
) {
734 Defs
[(*DI
).getParent()].insert(&*DI
);
736 MachineInstrIndex DefIdx
= LIs
->getInstructionIndex(&*DI
);
737 DefIdx
= LIs
->getDefIndex(DefIdx
);
739 assert(DI
->getOpcode() != TargetInstrInfo::PHI
&&
740 "Following NewVN isPHIDef flag incorrect. Fix me!");
741 VNInfo
* NewVN
= LI
->getNextValue(DefIdx
, 0, true, Alloc
);
743 // If the def is a move, set the copy field.
744 unsigned SrcReg
, DstReg
, SrcSubIdx
, DstSubIdx
;
745 if (TII
->isMoveInstr(*DI
, SrcReg
, DstReg
, SrcSubIdx
, DstSubIdx
))
746 if (DstReg
== LI
->reg
)
747 NewVN
->setCopy(&*DI
);
749 NewVNs
[&*DI
] = NewVN
;
752 // Cache uses as a separate pass from actually processing them.
753 for (MachineRegisterInfo::use_iterator UI
= MRI
->use_begin(LI
->reg
),
754 UE
= MRI
->use_end(); UI
!= UE
; ++UI
)
755 Uses
[(*UI
).getParent()].insert(&*UI
);
757 // Now, actually process every use and use a phi construction algorithm
758 // to walk from it to its reaching definitions, building VNInfos along
760 DenseMap
<MachineBasicBlock
*, VNInfo
*> LiveOut
;
761 DenseMap
<MachineBasicBlock
*, VNInfo
*> Phis
;
762 SmallPtrSet
<MachineInstr
*, 4> Visited
;
763 for (MachineRegisterInfo::use_iterator UI
= MRI
->use_begin(LI
->reg
),
764 UE
= MRI
->use_end(); UI
!= UE
; ++UI
) {
765 PerformPHIConstruction(&*UI
, UI
->getParent(), LI
, Visited
, Defs
,
766 Uses
, NewVNs
, LiveOut
, Phis
, true, true);
769 // Add ranges for dead defs
770 for (MachineRegisterInfo::def_iterator DI
= MRI
->def_begin(LI
->reg
),
771 DE
= MRI
->def_end(); DI
!= DE
; ++DI
) {
772 MachineInstrIndex DefIdx
= LIs
->getInstructionIndex(&*DI
);
773 DefIdx
= LIs
->getDefIndex(DefIdx
);
775 if (LI
->liveAt(DefIdx
)) continue;
777 VNInfo
* DeadVN
= NewVNs
[&*DI
];
778 LI
->addRange(LiveRange(DefIdx
, LIs
->getNextSlot(DefIdx
), DeadVN
));
779 DeadVN
->addKill(DefIdx
);
783 /// RenumberValno - Split the given valno out into a new vreg, allowing it to
784 /// be allocated to a different register. This function creates a new vreg,
785 /// copies the valno and its live ranges over to the new vreg's interval,
786 /// removes them from the old interval, and rewrites all uses and defs of
787 /// the original reg to the new vreg within those ranges.
788 void PreAllocSplitting::RenumberValno(VNInfo
* VN
) {
789 SmallVector
<VNInfo
*, 4> Stack
;
790 SmallVector
<VNInfo
*, 4> VNsToCopy
;
793 // Walk through and copy the valno we care about, and any other valnos
794 // that are two-address redefinitions of the one we care about. These
795 // will need to be rewritten as well. We also check for safety of the
796 // renumbering here, by making sure that none of the valno involved has
798 while (!Stack
.empty()) {
799 VNInfo
* OldVN
= Stack
.back();
802 // Bail out if we ever encounter a valno that has a PHI kill. We can't
804 if (OldVN
->hasPHIKill()) return;
806 VNsToCopy
.push_back(OldVN
);
808 // Locate two-address redefinitions
809 for (VNInfo::KillSet::iterator KI
= OldVN
->kills
.begin(),
810 KE
= OldVN
->kills
.end(); KI
!= KE
; ++KI
) {
811 assert(!KI
->isPHIIndex() &&
812 "VN previously reported having no PHI kills.");
813 MachineInstr
* MI
= LIs
->getInstructionFromIndex(*KI
);
814 unsigned DefIdx
= MI
->findRegisterDefOperandIdx(CurrLI
->reg
);
815 if (DefIdx
== ~0U) continue;
816 if (MI
->isRegTiedToUseOperand(DefIdx
)) {
818 CurrLI
->findDefinedVNInfoForRegInt(LIs
->getDefIndex(*KI
));
819 if (NextVN
== OldVN
) continue;
820 Stack
.push_back(NextVN
);
825 // Create the new vreg
826 unsigned NewVReg
= MRI
->createVirtualRegister(MRI
->getRegClass(CurrLI
->reg
));
828 // Create the new live interval
829 LiveInterval
& NewLI
= LIs
->getOrCreateInterval(NewVReg
);
831 for (SmallVector
<VNInfo
*, 4>::iterator OI
= VNsToCopy
.begin(), OE
=
832 VNsToCopy
.end(); OI
!= OE
; ++OI
) {
835 // Copy the valno over
836 VNInfo
* NewVN
= NewLI
.createValueCopy(OldVN
, LIs
->getVNInfoAllocator());
837 NewLI
.MergeValueInAsValue(*CurrLI
, OldVN
, NewVN
);
839 // Remove the valno from the old interval
840 CurrLI
->removeValNo(OldVN
);
843 // Rewrite defs and uses. This is done in two stages to avoid invalidating
845 SmallVector
<std::pair
<MachineInstr
*, unsigned>, 8> OpsToChange
;
847 for (MachineRegisterInfo::reg_iterator I
= MRI
->reg_begin(CurrLI
->reg
),
848 E
= MRI
->reg_end(); I
!= E
; ++I
) {
849 MachineOperand
& MO
= I
.getOperand();
850 MachineInstrIndex InstrIdx
= LIs
->getInstructionIndex(&*I
);
852 if ((MO
.isUse() && NewLI
.liveAt(LIs
->getUseIndex(InstrIdx
))) ||
853 (MO
.isDef() && NewLI
.liveAt(LIs
->getDefIndex(InstrIdx
))))
854 OpsToChange
.push_back(std::make_pair(&*I
, I
.getOperandNo()));
857 for (SmallVector
<std::pair
<MachineInstr
*, unsigned>, 8>::iterator I
=
858 OpsToChange
.begin(), E
= OpsToChange
.end(); I
!= E
; ++I
) {
859 MachineInstr
* Inst
= I
->first
;
860 unsigned OpIdx
= I
->second
;
861 MachineOperand
& MO
= Inst
->getOperand(OpIdx
);
865 // Grow the VirtRegMap, since we've created a new vreg.
868 // The renumbered vreg shares a stack slot with the old register.
869 if (IntervalSSMap
.count(CurrLI
->reg
))
870 IntervalSSMap
[NewVReg
] = IntervalSSMap
[CurrLI
->reg
];
875 bool PreAllocSplitting::Rematerialize(unsigned VReg
, VNInfo
* ValNo
,
877 MachineBasicBlock::iterator RestorePt
,
878 MachineInstrIndex RestoreIdx
,
879 SmallPtrSet
<MachineInstr
*, 4>& RefsInMBB
) {
880 MachineBasicBlock
& MBB
= *RestorePt
->getParent();
882 MachineBasicBlock::iterator KillPt
= BarrierMBB
->end();
883 MachineInstrIndex KillIdx
;
884 if (!ValNo
->isDefAccurate() || DefMI
->getParent() == BarrierMBB
)
885 KillPt
= findSpillPoint(BarrierMBB
, Barrier
, NULL
, RefsInMBB
, KillIdx
);
887 KillPt
= findNextEmptySlot(DefMI
->getParent(), DefMI
, KillIdx
);
889 if (KillPt
== DefMI
->getParent()->end())
892 TII
->reMaterialize(MBB
, RestorePt
, VReg
, 0, DefMI
);
893 LIs
->InsertMachineInstrInMaps(prior(RestorePt
), RestoreIdx
);
895 ReconstructLiveInterval(CurrLI
);
896 MachineInstrIndex RematIdx
= LIs
->getInstructionIndex(prior(RestorePt
));
897 RematIdx
= LIs
->getDefIndex(RematIdx
);
898 RenumberValno(CurrLI
->findDefinedVNInfoForRegInt(RematIdx
));
905 MachineInstr
* PreAllocSplitting::FoldSpill(unsigned vreg
,
906 const TargetRegisterClass
* RC
,
908 MachineInstr
* Barrier
,
909 MachineBasicBlock
* MBB
,
911 SmallPtrSet
<MachineInstr
*, 4>& RefsInMBB
) {
912 MachineBasicBlock::iterator Pt
= MBB
->begin();
914 // Go top down if RefsInMBB is empty.
915 if (RefsInMBB
.empty())
918 MachineBasicBlock::iterator FoldPt
= Barrier
;
919 while (&*FoldPt
!= DefMI
&& FoldPt
!= MBB
->begin() &&
920 !RefsInMBB
.count(FoldPt
))
923 int OpIdx
= FoldPt
->findRegisterDefOperandIdx(vreg
, false);
927 SmallVector
<unsigned, 1> Ops
;
928 Ops
.push_back(OpIdx
);
930 if (!TII
->canFoldMemoryOperand(FoldPt
, Ops
))
933 DenseMap
<unsigned, int>::iterator I
= IntervalSSMap
.find(vreg
);
934 if (I
!= IntervalSSMap
.end()) {
937 SS
= MFI
->CreateStackObject(RC
->getSize(), RC
->getAlignment());
940 MachineInstr
* FMI
= TII
->foldMemoryOperand(*MBB
->getParent(),
944 LIs
->ReplaceMachineInstrInMaps(FoldPt
, FMI
);
945 FMI
= MBB
->insert(MBB
->erase(FoldPt
), FMI
);
948 IntervalSSMap
[vreg
] = SS
;
949 CurrSLI
= &LSs
->getOrCreateInterval(SS
, RC
);
950 if (CurrSLI
->hasAtLeastOneValue())
951 CurrSValNo
= CurrSLI
->getValNumInfo(0);
953 CurrSValNo
= CurrSLI
->getNextValue(MachineInstrIndex(), 0, false,
954 LSs
->getVNInfoAllocator());
960 MachineInstr
* PreAllocSplitting::FoldRestore(unsigned vreg
,
961 const TargetRegisterClass
* RC
,
962 MachineInstr
* Barrier
,
963 MachineBasicBlock
* MBB
,
965 SmallPtrSet
<MachineInstr
*, 4>& RefsInMBB
) {
966 if ((int)RestoreFoldLimit
!= -1 && RestoreFoldLimit
== (int)NumRestoreFolds
)
969 // Go top down if RefsInMBB is empty.
970 if (RefsInMBB
.empty())
973 // Can't fold a restore between a call stack setup and teardown.
974 MachineBasicBlock::iterator FoldPt
= Barrier
;
976 // Advance from barrier to call frame teardown.
977 while (FoldPt
!= MBB
->getFirstTerminator() &&
978 FoldPt
->getOpcode() != TRI
->getCallFrameDestroyOpcode()) {
979 if (RefsInMBB
.count(FoldPt
))
985 if (FoldPt
== MBB
->getFirstTerminator())
990 // Now find the restore point.
991 while (FoldPt
!= MBB
->getFirstTerminator() && !RefsInMBB
.count(FoldPt
)) {
992 if (FoldPt
->getOpcode() == TRI
->getCallFrameSetupOpcode()) {
993 while (FoldPt
!= MBB
->getFirstTerminator() &&
994 FoldPt
->getOpcode() != TRI
->getCallFrameDestroyOpcode()) {
995 if (RefsInMBB
.count(FoldPt
))
1001 if (FoldPt
== MBB
->getFirstTerminator())
1008 if (FoldPt
== MBB
->getFirstTerminator())
1011 int OpIdx
= FoldPt
->findRegisterUseOperandIdx(vreg
, true);
1015 SmallVector
<unsigned, 1> Ops
;
1016 Ops
.push_back(OpIdx
);
1018 if (!TII
->canFoldMemoryOperand(FoldPt
, Ops
))
1021 MachineInstr
* FMI
= TII
->foldMemoryOperand(*MBB
->getParent(),
1025 LIs
->ReplaceMachineInstrInMaps(FoldPt
, FMI
);
1026 FMI
= MBB
->insert(MBB
->erase(FoldPt
), FMI
);
1033 /// SplitRegLiveInterval - Split (spill and restore) the given live interval
1034 /// so it would not cross the barrier that's being processed. Shrink wrap
1035 /// (minimize) the live interval to the last uses.
1036 bool PreAllocSplitting::SplitRegLiveInterval(LiveInterval
*LI
) {
1039 // Find live range where current interval cross the barrier.
1040 LiveInterval::iterator LR
=
1041 CurrLI
->FindLiveRangeContaining(LIs
->getUseIndex(BarrierIdx
));
1042 VNInfo
*ValNo
= LR
->valno
;
1044 assert(!ValNo
->isUnused() && "Val# is defined by a dead def?");
1046 MachineInstr
*DefMI
= ValNo
->isDefAccurate()
1047 ? LIs
->getInstructionFromIndex(ValNo
->def
) : NULL
;
1049 // If this would create a new join point, do not split.
1050 if (DefMI
&& createsNewJoin(LR
, DefMI
->getParent(), Barrier
->getParent()))
1053 // Find all references in the barrier mbb.
1054 SmallPtrSet
<MachineInstr
*, 4> RefsInMBB
;
1055 for (MachineRegisterInfo::reg_iterator I
= MRI
->reg_begin(CurrLI
->reg
),
1056 E
= MRI
->reg_end(); I
!= E
; ++I
) {
1057 MachineInstr
*RefMI
= &*I
;
1058 if (RefMI
->getParent() == BarrierMBB
)
1059 RefsInMBB
.insert(RefMI
);
1062 // Find a point to restore the value after the barrier.
1063 MachineInstrIndex RestoreIndex
;
1064 MachineBasicBlock::iterator RestorePt
=
1065 findRestorePoint(BarrierMBB
, Barrier
, LR
->end
, RefsInMBB
, RestoreIndex
);
1066 if (RestorePt
== BarrierMBB
->end())
1069 if (DefMI
&& LIs
->isReMaterializable(*LI
, ValNo
, DefMI
))
1070 if (Rematerialize(LI
->reg
, ValNo
, DefMI
, RestorePt
,
1071 RestoreIndex
, RefsInMBB
))
1074 // Add a spill either before the barrier or after the definition.
1075 MachineBasicBlock
*DefMBB
= DefMI
? DefMI
->getParent() : NULL
;
1076 const TargetRegisterClass
*RC
= MRI
->getRegClass(CurrLI
->reg
);
1077 MachineInstrIndex SpillIndex
;
1078 MachineInstr
*SpillMI
= NULL
;
1080 if (!ValNo
->isDefAccurate()) {
1081 // If we don't know where the def is we must split just before the barrier.
1082 if ((SpillMI
= FoldSpill(LI
->reg
, RC
, 0, Barrier
,
1083 BarrierMBB
, SS
, RefsInMBB
))) {
1084 SpillIndex
= LIs
->getInstructionIndex(SpillMI
);
1086 MachineBasicBlock::iterator SpillPt
=
1087 findSpillPoint(BarrierMBB
, Barrier
, NULL
, RefsInMBB
, SpillIndex
);
1088 if (SpillPt
== BarrierMBB
->begin())
1089 return false; // No gap to insert spill.
1092 SS
= CreateSpillStackSlot(CurrLI
->reg
, RC
);
1093 TII
->storeRegToStackSlot(*BarrierMBB
, SpillPt
, CurrLI
->reg
, true, SS
, RC
);
1094 SpillMI
= prior(SpillPt
);
1095 LIs
->InsertMachineInstrInMaps(SpillMI
, SpillIndex
);
1097 } else if (!IsAvailableInStack(DefMBB
, CurrLI
->reg
, ValNo
->def
,
1098 RestoreIndex
, SpillIndex
, SS
)) {
1099 // If it's already split, just restore the value. There is no need to spill
1102 return false; // Def is dead. Do nothing.
1104 if ((SpillMI
= FoldSpill(LI
->reg
, RC
, DefMI
, Barrier
,
1105 BarrierMBB
, SS
, RefsInMBB
))) {
1106 SpillIndex
= LIs
->getInstructionIndex(SpillMI
);
1108 // Check if it's possible to insert a spill after the def MI.
1109 MachineBasicBlock::iterator SpillPt
;
1110 if (DefMBB
== BarrierMBB
) {
1111 // Add spill after the def and the last use before the barrier.
1112 SpillPt
= findSpillPoint(BarrierMBB
, Barrier
, DefMI
,
1113 RefsInMBB
, SpillIndex
);
1114 if (SpillPt
== DefMBB
->begin())
1115 return false; // No gap to insert spill.
1117 SpillPt
= findNextEmptySlot(DefMBB
, DefMI
, SpillIndex
);
1118 if (SpillPt
== DefMBB
->end())
1119 return false; // No gap to insert spill.
1121 // Add spill. The store instruction kills the register if def is before
1122 // the barrier in the barrier block.
1123 SS
= CreateSpillStackSlot(CurrLI
->reg
, RC
);
1124 TII
->storeRegToStackSlot(*DefMBB
, SpillPt
, CurrLI
->reg
,
1125 DefMBB
== BarrierMBB
, SS
, RC
);
1126 SpillMI
= prior(SpillPt
);
1127 LIs
->InsertMachineInstrInMaps(SpillMI
, SpillIndex
);
1131 // Remember def instruction index to spill index mapping.
1132 if (DefMI
&& SpillMI
)
1133 Def2SpillMap
[ValNo
->def
] = SpillIndex
;
1136 bool FoldedRestore
= false;
1137 if (MachineInstr
* LMI
= FoldRestore(CurrLI
->reg
, RC
, Barrier
,
1138 BarrierMBB
, SS
, RefsInMBB
)) {
1140 RestoreIndex
= LIs
->getInstructionIndex(RestorePt
);
1141 FoldedRestore
= true;
1143 TII
->loadRegFromStackSlot(*BarrierMBB
, RestorePt
, CurrLI
->reg
, SS
, RC
);
1144 MachineInstr
*LoadMI
= prior(RestorePt
);
1145 LIs
->InsertMachineInstrInMaps(LoadMI
, RestoreIndex
);
1148 // Update spill stack slot live interval.
1149 UpdateSpillSlotInterval(ValNo
, LIs
->getNextSlot(LIs
->getUseIndex(SpillIndex
)),
1150 LIs
->getDefIndex(RestoreIndex
));
1152 ReconstructLiveInterval(CurrLI
);
1154 if (!FoldedRestore
) {
1155 MachineInstrIndex RestoreIdx
= LIs
->getInstructionIndex(prior(RestorePt
));
1156 RestoreIdx
= LIs
->getDefIndex(RestoreIdx
);
1157 RenumberValno(CurrLI
->findDefinedVNInfoForRegInt(RestoreIdx
));
1164 /// SplitRegLiveIntervals - Split all register live intervals that cross the
1165 /// barrier that's being processed.
1167 PreAllocSplitting::SplitRegLiveIntervals(const TargetRegisterClass
**RCs
,
1168 SmallPtrSet
<LiveInterval
*, 8>& Split
) {
1169 // First find all the virtual registers whose live intervals are intercepted
1170 // by the current barrier.
1171 SmallVector
<LiveInterval
*, 8> Intervals
;
1172 for (const TargetRegisterClass
**RC
= RCs
; *RC
; ++RC
) {
1173 // FIXME: If it's not safe to move any instruction that defines the barrier
1174 // register class, then it means there are some special dependencies which
1175 // codegen is not modelling. Ignore these barriers for now.
1176 if (!TII
->isSafeToMoveRegClassDefs(*RC
))
1178 std::vector
<unsigned> &VRs
= MRI
->getRegClassVirtRegs(*RC
);
1179 for (unsigned i
= 0, e
= VRs
.size(); i
!= e
; ++i
) {
1180 unsigned Reg
= VRs
[i
];
1181 if (!LIs
->hasInterval(Reg
))
1183 LiveInterval
*LI
= &LIs
->getInterval(Reg
);
1184 if (LI
->liveAt(BarrierIdx
) && !Barrier
->readsRegister(Reg
))
1185 // Virtual register live interval is intercepted by the barrier. We
1186 // should split and shrink wrap its interval if possible.
1187 Intervals
.push_back(LI
);
1191 // Process the affected live intervals.
1192 bool Change
= false;
1193 while (!Intervals
.empty()) {
1194 if (PreSplitLimit
!= -1 && (int)NumSplits
== PreSplitLimit
)
1196 else if (NumSplits
== 4)
1198 LiveInterval
*LI
= Intervals
.back();
1199 Intervals
.pop_back();
1200 bool result
= SplitRegLiveInterval(LI
);
1201 if (result
) Split
.insert(LI
);
1208 unsigned PreAllocSplitting::getNumberOfNonSpills(
1209 SmallPtrSet
<MachineInstr
*, 4>& MIs
,
1210 unsigned Reg
, int FrameIndex
,
1211 bool& FeedsTwoAddr
) {
1212 unsigned NonSpills
= 0;
1213 for (SmallPtrSet
<MachineInstr
*, 4>::iterator UI
= MIs
.begin(), UE
= MIs
.end();
1215 int StoreFrameIndex
;
1216 unsigned StoreVReg
= TII
->isStoreToStackSlot(*UI
, StoreFrameIndex
);
1217 if (StoreVReg
!= Reg
|| StoreFrameIndex
!= FrameIndex
)
1220 int DefIdx
= (*UI
)->findRegisterDefOperandIdx(Reg
);
1221 if (DefIdx
!= -1 && (*UI
)->isRegTiedToUseOperand(DefIdx
))
1222 FeedsTwoAddr
= true;
1228 /// removeDeadSpills - After doing splitting, filter through all intervals we've
1229 /// split, and see if any of the spills are unnecessary. If so, remove them.
1230 bool PreAllocSplitting::removeDeadSpills(SmallPtrSet
<LiveInterval
*, 8>& split
) {
1231 bool changed
= false;
1233 // Walk over all of the live intervals that were touched by the splitter,
1234 // and see if we can do any DCE and/or folding.
1235 for (SmallPtrSet
<LiveInterval
*, 8>::iterator LI
= split
.begin(),
1236 LE
= split
.end(); LI
!= LE
; ++LI
) {
1237 DenseMap
<VNInfo
*, SmallPtrSet
<MachineInstr
*, 4> > VNUseCount
;
1239 // First, collect all the uses of the vreg, and sort them by their
1240 // reaching definition (VNInfo).
1241 for (MachineRegisterInfo::use_iterator UI
= MRI
->use_begin((*LI
)->reg
),
1242 UE
= MRI
->use_end(); UI
!= UE
; ++UI
) {
1243 MachineInstrIndex index
= LIs
->getInstructionIndex(&*UI
);
1244 index
= LIs
->getUseIndex(index
);
1246 const LiveRange
* LR
= (*LI
)->getLiveRangeContaining(index
);
1247 VNUseCount
[LR
->valno
].insert(&*UI
);
1250 // Now, take the definitions (VNInfo's) one at a time and try to DCE
1251 // and/or fold them away.
1252 for (LiveInterval::vni_iterator VI
= (*LI
)->vni_begin(),
1253 VE
= (*LI
)->vni_end(); VI
!= VE
; ++VI
) {
1255 if (DeadSplitLimit
!= -1 && (int)NumDeadSpills
== DeadSplitLimit
)
1258 VNInfo
* CurrVN
= *VI
;
1260 // We don't currently try to handle definitions with PHI kills, because
1261 // it would involve processing more than one VNInfo at once.
1262 if (CurrVN
->hasPHIKill()) continue;
1264 // We also don't try to handle the results of PHI joins, since there's
1265 // no defining instruction to analyze.
1266 if (!CurrVN
->isDefAccurate() || CurrVN
->isUnused()) continue;
1268 // We're only interested in eliminating cruft introduced by the splitter,
1269 // is of the form load-use or load-use-store. First, check that the
1270 // definition is a load, and remember what stack slot we loaded it from.
1271 MachineInstr
* DefMI
= LIs
->getInstructionFromIndex(CurrVN
->def
);
1273 if (!TII
->isLoadFromStackSlot(DefMI
, FrameIndex
)) continue;
1275 // If the definition has no uses at all, just DCE it.
1276 if (VNUseCount
[CurrVN
].size() == 0) {
1277 LIs
->RemoveMachineInstrFromMaps(DefMI
);
1278 (*LI
)->removeValNo(CurrVN
);
1279 DefMI
->eraseFromParent();
1280 VNUseCount
.erase(CurrVN
);
1286 // Second, get the number of non-store uses of the definition, as well as
1287 // a flag indicating whether it feeds into a later two-address definition.
1288 bool FeedsTwoAddr
= false;
1289 unsigned NonSpillCount
= getNumberOfNonSpills(VNUseCount
[CurrVN
],
1290 (*LI
)->reg
, FrameIndex
,
1293 // If there's one non-store use and it doesn't feed a two-addr, then
1294 // this is a load-use-store case that we can try to fold.
1295 if (NonSpillCount
== 1 && !FeedsTwoAddr
) {
1296 // Start by finding the non-store use MachineInstr.
1297 SmallPtrSet
<MachineInstr
*, 4>::iterator UI
= VNUseCount
[CurrVN
].begin();
1298 int StoreFrameIndex
;
1299 unsigned StoreVReg
= TII
->isStoreToStackSlot(*UI
, StoreFrameIndex
);
1300 while (UI
!= VNUseCount
[CurrVN
].end() &&
1301 (StoreVReg
== (*LI
)->reg
&& StoreFrameIndex
== FrameIndex
)) {
1303 if (UI
!= VNUseCount
[CurrVN
].end())
1304 StoreVReg
= TII
->isStoreToStackSlot(*UI
, StoreFrameIndex
);
1306 if (UI
== VNUseCount
[CurrVN
].end()) continue;
1308 MachineInstr
* use
= *UI
;
1310 // Attempt to fold it away!
1311 int OpIdx
= use
->findRegisterUseOperandIdx((*LI
)->reg
, false);
1312 if (OpIdx
== -1) continue;
1313 SmallVector
<unsigned, 1> Ops
;
1314 Ops
.push_back(OpIdx
);
1315 if (!TII
->canFoldMemoryOperand(use
, Ops
)) continue;
1317 MachineInstr
* NewMI
=
1318 TII
->foldMemoryOperand(*use
->getParent()->getParent(),
1319 use
, Ops
, FrameIndex
);
1321 if (!NewMI
) continue;
1323 // Update relevant analyses.
1324 LIs
->RemoveMachineInstrFromMaps(DefMI
);
1325 LIs
->ReplaceMachineInstrInMaps(use
, NewMI
);
1326 (*LI
)->removeValNo(CurrVN
);
1328 DefMI
->eraseFromParent();
1329 MachineBasicBlock
* MBB
= use
->getParent();
1330 NewMI
= MBB
->insert(MBB
->erase(use
), NewMI
);
1331 VNUseCount
[CurrVN
].erase(use
);
1333 // Remove deleted instructions. Note that we need to remove them from
1334 // the VNInfo->use map as well, just to be safe.
1335 for (SmallPtrSet
<MachineInstr
*, 4>::iterator II
=
1336 VNUseCount
[CurrVN
].begin(), IE
= VNUseCount
[CurrVN
].end();
1338 for (DenseMap
<VNInfo
*, SmallPtrSet
<MachineInstr
*, 4> >::iterator
1339 VNI
= VNUseCount
.begin(), VNE
= VNUseCount
.end(); VNI
!= VNE
;
1341 if (VNI
->first
!= CurrVN
)
1342 VNI
->second
.erase(*II
);
1343 LIs
->RemoveMachineInstrFromMaps(*II
);
1344 (*II
)->eraseFromParent();
1347 VNUseCount
.erase(CurrVN
);
1349 for (DenseMap
<VNInfo
*, SmallPtrSet
<MachineInstr
*, 4> >::iterator
1350 VI
= VNUseCount
.begin(), VE
= VNUseCount
.end(); VI
!= VE
; ++VI
)
1351 if (VI
->second
.erase(use
))
1352 VI
->second
.insert(NewMI
);
1359 // If there's more than one non-store instruction, we can't profitably
1360 // fold it, so bail.
1361 if (NonSpillCount
) continue;
1363 // Otherwise, this is a load-store case, so DCE them.
1364 for (SmallPtrSet
<MachineInstr
*, 4>::iterator UI
=
1365 VNUseCount
[CurrVN
].begin(), UE
= VNUseCount
[CurrVN
].end();
1367 LIs
->RemoveMachineInstrFromMaps(*UI
);
1368 (*UI
)->eraseFromParent();
1371 VNUseCount
.erase(CurrVN
);
1373 LIs
->RemoveMachineInstrFromMaps(DefMI
);
1374 (*LI
)->removeValNo(CurrVN
);
1375 DefMI
->eraseFromParent();
1384 bool PreAllocSplitting::createsNewJoin(LiveRange
* LR
,
1385 MachineBasicBlock
* DefMBB
,
1386 MachineBasicBlock
* BarrierMBB
) {
1387 if (DefMBB
== BarrierMBB
)
1390 if (LR
->valno
->hasPHIKill())
1393 MachineInstrIndex MBBEnd
= LIs
->getMBBEndIdx(BarrierMBB
);
1394 if (LR
->end
< MBBEnd
)
1397 MachineLoopInfo
& MLI
= getAnalysis
<MachineLoopInfo
>();
1398 if (MLI
.getLoopFor(DefMBB
) != MLI
.getLoopFor(BarrierMBB
))
1401 MachineDominatorTree
& MDT
= getAnalysis
<MachineDominatorTree
>();
1402 SmallPtrSet
<MachineBasicBlock
*, 4> Visited
;
1403 typedef std::pair
<MachineBasicBlock
*,
1404 MachineBasicBlock::succ_iterator
> ItPair
;
1405 SmallVector
<ItPair
, 4> Stack
;
1406 Stack
.push_back(std::make_pair(BarrierMBB
, BarrierMBB
->succ_begin()));
1408 while (!Stack
.empty()) {
1409 ItPair P
= Stack
.back();
1412 MachineBasicBlock
* PredMBB
= P
.first
;
1413 MachineBasicBlock::succ_iterator S
= P
.second
;
1415 if (S
== PredMBB
->succ_end())
1417 else if (Visited
.count(*S
)) {
1418 Stack
.push_back(std::make_pair(PredMBB
, ++S
));
1421 Stack
.push_back(std::make_pair(PredMBB
, S
+1));
1423 MachineBasicBlock
* MBB
= *S
;
1424 Visited
.insert(MBB
);
1426 if (MBB
== BarrierMBB
)
1429 MachineDomTreeNode
* DefMDTN
= MDT
.getNode(DefMBB
);
1430 MachineDomTreeNode
* BarrierMDTN
= MDT
.getNode(BarrierMBB
);
1431 MachineDomTreeNode
* MDTN
= MDT
.getNode(MBB
)->getIDom();
1433 if (MDTN
== DefMDTN
)
1435 else if (MDTN
== BarrierMDTN
)
1437 MDTN
= MDTN
->getIDom();
1440 MBBEnd
= LIs
->getMBBEndIdx(MBB
);
1441 if (LR
->end
> MBBEnd
)
1442 Stack
.push_back(std::make_pair(MBB
, MBB
->succ_begin()));
1449 bool PreAllocSplitting::runOnMachineFunction(MachineFunction
&MF
) {
1451 TM
= &MF
.getTarget();
1452 TRI
= TM
->getRegisterInfo();
1453 TII
= TM
->getInstrInfo();
1454 MFI
= MF
.getFrameInfo();
1455 MRI
= &MF
.getRegInfo();
1456 LIs
= &getAnalysis
<LiveIntervals
>();
1457 LSs
= &getAnalysis
<LiveStacks
>();
1458 VRM
= &getAnalysis
<VirtRegMap
>();
1460 bool MadeChange
= false;
1462 // Make sure blocks are numbered in order.
1463 MF
.RenumberBlocks();
1465 MachineBasicBlock
*Entry
= MF
.begin();
1466 SmallPtrSet
<MachineBasicBlock
*,16> Visited
;
1468 SmallPtrSet
<LiveInterval
*, 8> Split
;
1470 for (df_ext_iterator
<MachineBasicBlock
*, SmallPtrSet
<MachineBasicBlock
*,16> >
1471 DFI
= df_ext_begin(Entry
, Visited
), E
= df_ext_end(Entry
, Visited
);
1474 for (MachineBasicBlock::iterator I
= BarrierMBB
->begin(),
1475 E
= BarrierMBB
->end(); I
!= E
; ++I
) {
1477 const TargetRegisterClass
**BarrierRCs
=
1478 Barrier
->getDesc().getRegClassBarriers();
1481 BarrierIdx
= LIs
->getInstructionIndex(Barrier
);
1482 MadeChange
|= SplitRegLiveIntervals(BarrierRCs
, Split
);
1486 MadeChange
|= removeDeadSpills(Split
);