[Alignment][NFC] Use Align with TargetLowering::setMinFunctionAlignment
[llvm-core.git] / include / llvm / CodeGen / LiveInterval.h
blobc3b472e6555c296f9845d42e1840635976408800
1 //===- llvm/CodeGen/LiveInterval.h - Interval representation ----*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements the LiveRange and LiveInterval classes. Given some
10 // numbering of each the machine instructions an interval [i, j) is said to be a
11 // live range for register v if there is no instruction with number j' >= j
12 // such that v is live at j' and there is no instruction with number i' < i such
13 // that v is live at i'. In this implementation ranges can have holes,
14 // i.e. a range might look like [1,20), [50,65), [1000,1001). Each
15 // individual segment is represented as an instance of LiveRange::Segment,
16 // and the whole range is represented as an instance of LiveRange.
18 //===----------------------------------------------------------------------===//
20 #ifndef LLVM_CODEGEN_LIVEINTERVAL_H
21 #define LLVM_CODEGEN_LIVEINTERVAL_H
23 #include "llvm/ADT/ArrayRef.h"
24 #include "llvm/ADT/IntEqClasses.h"
25 #include "llvm/ADT/STLExtras.h"
26 #include "llvm/ADT/SmallVector.h"
27 #include "llvm/ADT/iterator_range.h"
28 #include "llvm/CodeGen/SlotIndexes.h"
29 #include "llvm/MC/LaneBitmask.h"
30 #include "llvm/Support/Allocator.h"
31 #include "llvm/Support/MathExtras.h"
32 #include <algorithm>
33 #include <cassert>
34 #include <cstddef>
35 #include <functional>
36 #include <memory>
37 #include <set>
38 #include <tuple>
39 #include <utility>
41 namespace llvm {
43 class CoalescerPair;
44 class LiveIntervals;
45 class MachineRegisterInfo;
46 class raw_ostream;
48 /// VNInfo - Value Number Information.
49 /// This class holds information about a machine level values, including
50 /// definition and use points.
51 ///
52 class VNInfo {
53 public:
54 using Allocator = BumpPtrAllocator;
56 /// The ID number of this value.
57 unsigned id;
59 /// The index of the defining instruction.
60 SlotIndex def;
62 /// VNInfo constructor.
63 VNInfo(unsigned i, SlotIndex d) : id(i), def(d) {}
65 /// VNInfo constructor, copies values from orig, except for the value number.
66 VNInfo(unsigned i, const VNInfo &orig) : id(i), def(orig.def) {}
68 /// Copy from the parameter into this VNInfo.
69 void copyFrom(VNInfo &src) {
70 def = src.def;
73 /// Returns true if this value is defined by a PHI instruction (or was,
74 /// PHI instructions may have been eliminated).
75 /// PHI-defs begin at a block boundary, all other defs begin at register or
76 /// EC slots.
77 bool isPHIDef() const { return def.isBlock(); }
79 /// Returns true if this value is unused.
80 bool isUnused() const { return !def.isValid(); }
82 /// Mark this value as unused.
83 void markUnused() { def = SlotIndex(); }
86 /// Result of a LiveRange query. This class hides the implementation details
87 /// of live ranges, and it should be used as the primary interface for
88 /// examining live ranges around instructions.
89 class LiveQueryResult {
90 VNInfo *const EarlyVal;
91 VNInfo *const LateVal;
92 const SlotIndex EndPoint;
93 const bool Kill;
95 public:
96 LiveQueryResult(VNInfo *EarlyVal, VNInfo *LateVal, SlotIndex EndPoint,
97 bool Kill)
98 : EarlyVal(EarlyVal), LateVal(LateVal), EndPoint(EndPoint), Kill(Kill)
101 /// Return the value that is live-in to the instruction. This is the value
102 /// that will be read by the instruction's use operands. Return NULL if no
103 /// value is live-in.
104 VNInfo *valueIn() const {
105 return EarlyVal;
108 /// Return true if the live-in value is killed by this instruction. This
109 /// means that either the live range ends at the instruction, or it changes
110 /// value.
111 bool isKill() const {
112 return Kill;
115 /// Return true if this instruction has a dead def.
116 bool isDeadDef() const {
117 return EndPoint.isDead();
120 /// Return the value leaving the instruction, if any. This can be a
121 /// live-through value, or a live def. A dead def returns NULL.
122 VNInfo *valueOut() const {
123 return isDeadDef() ? nullptr : LateVal;
126 /// Returns the value alive at the end of the instruction, if any. This can
127 /// be a live-through value, a live def or a dead def.
128 VNInfo *valueOutOrDead() const {
129 return LateVal;
132 /// Return the value defined by this instruction, if any. This includes
133 /// dead defs, it is the value created by the instruction's def operands.
134 VNInfo *valueDefined() const {
135 return EarlyVal == LateVal ? nullptr : LateVal;
138 /// Return the end point of the last live range segment to interact with
139 /// the instruction, if any.
141 /// The end point is an invalid SlotIndex only if the live range doesn't
142 /// intersect the instruction at all.
144 /// The end point may be at or past the end of the instruction's basic
145 /// block. That means the value was live out of the block.
146 SlotIndex endPoint() const {
147 return EndPoint;
151 /// This class represents the liveness of a register, stack slot, etc.
152 /// It manages an ordered list of Segment objects.
153 /// The Segments are organized in a static single assignment form: At places
154 /// where a new value is defined or different values reach a CFG join a new
155 /// segment with a new value number is used.
156 class LiveRange {
157 public:
158 /// This represents a simple continuous liveness interval for a value.
159 /// The start point is inclusive, the end point exclusive. These intervals
160 /// are rendered as [start,end).
161 struct Segment {
162 SlotIndex start; // Start point of the interval (inclusive)
163 SlotIndex end; // End point of the interval (exclusive)
164 VNInfo *valno = nullptr; // identifier for the value contained in this
165 // segment.
167 Segment() = default;
169 Segment(SlotIndex S, SlotIndex E, VNInfo *V)
170 : start(S), end(E), valno(V) {
171 assert(S < E && "Cannot create empty or backwards segment");
174 /// Return true if the index is covered by this segment.
175 bool contains(SlotIndex I) const {
176 return start <= I && I < end;
179 /// Return true if the given interval, [S, E), is covered by this segment.
180 bool containsInterval(SlotIndex S, SlotIndex E) const {
181 assert((S < E) && "Backwards interval?");
182 return (start <= S && S < end) && (start < E && E <= end);
185 bool operator<(const Segment &Other) const {
186 return std::tie(start, end) < std::tie(Other.start, Other.end);
188 bool operator==(const Segment &Other) const {
189 return start == Other.start && end == Other.end;
192 void dump() const;
195 using Segments = SmallVector<Segment, 2>;
196 using VNInfoList = SmallVector<VNInfo *, 2>;
198 Segments segments; // the liveness segments
199 VNInfoList valnos; // value#'s
201 // The segment set is used temporarily to accelerate initial computation
202 // of live ranges of physical registers in computeRegUnitRange.
203 // After that the set is flushed to the segment vector and deleted.
204 using SegmentSet = std::set<Segment>;
205 std::unique_ptr<SegmentSet> segmentSet;
207 using iterator = Segments::iterator;
208 using const_iterator = Segments::const_iterator;
210 iterator begin() { return segments.begin(); }
211 iterator end() { return segments.end(); }
213 const_iterator begin() const { return segments.begin(); }
214 const_iterator end() const { return segments.end(); }
216 using vni_iterator = VNInfoList::iterator;
217 using const_vni_iterator = VNInfoList::const_iterator;
219 vni_iterator vni_begin() { return valnos.begin(); }
220 vni_iterator vni_end() { return valnos.end(); }
222 const_vni_iterator vni_begin() const { return valnos.begin(); }
223 const_vni_iterator vni_end() const { return valnos.end(); }
225 /// Constructs a new LiveRange object.
226 LiveRange(bool UseSegmentSet = false)
227 : segmentSet(UseSegmentSet ? std::make_unique<SegmentSet>()
228 : nullptr) {}
230 /// Constructs a new LiveRange object by copying segments and valnos from
231 /// another LiveRange.
232 LiveRange(const LiveRange &Other, BumpPtrAllocator &Allocator) {
233 assert(Other.segmentSet == nullptr &&
234 "Copying of LiveRanges with active SegmentSets is not supported");
235 assign(Other, Allocator);
238 /// Copies values numbers and live segments from \p Other into this range.
239 void assign(const LiveRange &Other, BumpPtrAllocator &Allocator) {
240 if (this == &Other)
241 return;
243 assert(Other.segmentSet == nullptr &&
244 "Copying of LiveRanges with active SegmentSets is not supported");
245 // Duplicate valnos.
246 for (const VNInfo *VNI : Other.valnos)
247 createValueCopy(VNI, Allocator);
248 // Now we can copy segments and remap their valnos.
249 for (const Segment &S : Other.segments)
250 segments.push_back(Segment(S.start, S.end, valnos[S.valno->id]));
253 /// advanceTo - Advance the specified iterator to point to the Segment
254 /// containing the specified position, or end() if the position is past the
255 /// end of the range. If no Segment contains this position, but the
256 /// position is in a hole, this method returns an iterator pointing to the
257 /// Segment immediately after the hole.
258 iterator advanceTo(iterator I, SlotIndex Pos) {
259 assert(I != end());
260 if (Pos >= endIndex())
261 return end();
262 while (I->end <= Pos) ++I;
263 return I;
266 const_iterator advanceTo(const_iterator I, SlotIndex Pos) const {
267 assert(I != end());
268 if (Pos >= endIndex())
269 return end();
270 while (I->end <= Pos) ++I;
271 return I;
274 /// find - Return an iterator pointing to the first segment that ends after
275 /// Pos, or end(). This is the same as advanceTo(begin(), Pos), but faster
276 /// when searching large ranges.
278 /// If Pos is contained in a Segment, that segment is returned.
279 /// If Pos is in a hole, the following Segment is returned.
280 /// If Pos is beyond endIndex, end() is returned.
281 iterator find(SlotIndex Pos);
283 const_iterator find(SlotIndex Pos) const {
284 return const_cast<LiveRange*>(this)->find(Pos);
287 void clear() {
288 valnos.clear();
289 segments.clear();
292 size_t size() const {
293 return segments.size();
296 bool hasAtLeastOneValue() const { return !valnos.empty(); }
298 bool containsOneValue() const { return valnos.size() == 1; }
300 unsigned getNumValNums() const { return (unsigned)valnos.size(); }
302 /// getValNumInfo - Returns pointer to the specified val#.
304 inline VNInfo *getValNumInfo(unsigned ValNo) {
305 return valnos[ValNo];
307 inline const VNInfo *getValNumInfo(unsigned ValNo) const {
308 return valnos[ValNo];
311 /// containsValue - Returns true if VNI belongs to this range.
312 bool containsValue(const VNInfo *VNI) const {
313 return VNI && VNI->id < getNumValNums() && VNI == getValNumInfo(VNI->id);
316 /// getNextValue - Create a new value number and return it. MIIdx specifies
317 /// the instruction that defines the value number.
318 VNInfo *getNextValue(SlotIndex def, VNInfo::Allocator &VNInfoAllocator) {
319 VNInfo *VNI =
320 new (VNInfoAllocator) VNInfo((unsigned)valnos.size(), def);
321 valnos.push_back(VNI);
322 return VNI;
325 /// createDeadDef - Make sure the range has a value defined at Def.
326 /// If one already exists, return it. Otherwise allocate a new value and
327 /// add liveness for a dead def.
328 VNInfo *createDeadDef(SlotIndex Def, VNInfo::Allocator &VNIAlloc);
330 /// Create a def of value @p VNI. Return @p VNI. If there already exists
331 /// a definition at VNI->def, the value defined there must be @p VNI.
332 VNInfo *createDeadDef(VNInfo *VNI);
334 /// Create a copy of the given value. The new value will be identical except
335 /// for the Value number.
336 VNInfo *createValueCopy(const VNInfo *orig,
337 VNInfo::Allocator &VNInfoAllocator) {
338 VNInfo *VNI =
339 new (VNInfoAllocator) VNInfo((unsigned)valnos.size(), *orig);
340 valnos.push_back(VNI);
341 return VNI;
344 /// RenumberValues - Renumber all values in order of appearance and remove
345 /// unused values.
346 void RenumberValues();
348 /// MergeValueNumberInto - This method is called when two value numbers
349 /// are found to be equivalent. This eliminates V1, replacing all
350 /// segments with the V1 value number with the V2 value number. This can
351 /// cause merging of V1/V2 values numbers and compaction of the value space.
352 VNInfo* MergeValueNumberInto(VNInfo *V1, VNInfo *V2);
354 /// Merge all of the live segments of a specific val# in RHS into this live
355 /// range as the specified value number. The segments in RHS are allowed
356 /// to overlap with segments in the current range, it will replace the
357 /// value numbers of the overlaped live segments with the specified value
358 /// number.
359 void MergeSegmentsInAsValue(const LiveRange &RHS, VNInfo *LHSValNo);
361 /// MergeValueInAsValue - Merge all of the segments of a specific val#
362 /// in RHS into this live range as the specified value number.
363 /// The segments in RHS are allowed to overlap with segments in the
364 /// current range, but only if the overlapping segments have the
365 /// specified value number.
366 void MergeValueInAsValue(const LiveRange &RHS,
367 const VNInfo *RHSValNo, VNInfo *LHSValNo);
369 bool empty() const { return segments.empty(); }
371 /// beginIndex - Return the lowest numbered slot covered.
372 SlotIndex beginIndex() const {
373 assert(!empty() && "Call to beginIndex() on empty range.");
374 return segments.front().start;
377 /// endNumber - return the maximum point of the range of the whole,
378 /// exclusive.
379 SlotIndex endIndex() const {
380 assert(!empty() && "Call to endIndex() on empty range.");
381 return segments.back().end;
384 bool expiredAt(SlotIndex index) const {
385 return index >= endIndex();
388 bool liveAt(SlotIndex index) const {
389 const_iterator r = find(index);
390 return r != end() && r->start <= index;
393 /// Return the segment that contains the specified index, or null if there
394 /// is none.
395 const Segment *getSegmentContaining(SlotIndex Idx) const {
396 const_iterator I = FindSegmentContaining(Idx);
397 return I == end() ? nullptr : &*I;
400 /// Return the live segment that contains the specified index, or null if
401 /// there is none.
402 Segment *getSegmentContaining(SlotIndex Idx) {
403 iterator I = FindSegmentContaining(Idx);
404 return I == end() ? nullptr : &*I;
407 /// getVNInfoAt - Return the VNInfo that is live at Idx, or NULL.
408 VNInfo *getVNInfoAt(SlotIndex Idx) const {
409 const_iterator I = FindSegmentContaining(Idx);
410 return I == end() ? nullptr : I->valno;
413 /// getVNInfoBefore - Return the VNInfo that is live up to but not
414 /// necessarilly including Idx, or NULL. Use this to find the reaching def
415 /// used by an instruction at this SlotIndex position.
416 VNInfo *getVNInfoBefore(SlotIndex Idx) const {
417 const_iterator I = FindSegmentContaining(Idx.getPrevSlot());
418 return I == end() ? nullptr : I->valno;
421 /// Return an iterator to the segment that contains the specified index, or
422 /// end() if there is none.
423 iterator FindSegmentContaining(SlotIndex Idx) {
424 iterator I = find(Idx);
425 return I != end() && I->start <= Idx ? I : end();
428 const_iterator FindSegmentContaining(SlotIndex Idx) const {
429 const_iterator I = find(Idx);
430 return I != end() && I->start <= Idx ? I : end();
433 /// overlaps - Return true if the intersection of the two live ranges is
434 /// not empty.
435 bool overlaps(const LiveRange &other) const {
436 if (other.empty())
437 return false;
438 return overlapsFrom(other, other.begin());
441 /// overlaps - Return true if the two ranges have overlapping segments
442 /// that are not coalescable according to CP.
444 /// Overlapping segments where one range is defined by a coalescable
445 /// copy are allowed.
446 bool overlaps(const LiveRange &Other, const CoalescerPair &CP,
447 const SlotIndexes&) const;
449 /// overlaps - Return true if the live range overlaps an interval specified
450 /// by [Start, End).
451 bool overlaps(SlotIndex Start, SlotIndex End) const;
453 /// overlapsFrom - Return true if the intersection of the two live ranges
454 /// is not empty. The specified iterator is a hint that we can begin
455 /// scanning the Other range starting at I.
456 bool overlapsFrom(const LiveRange &Other, const_iterator StartPos) const;
458 /// Returns true if all segments of the @p Other live range are completely
459 /// covered by this live range.
460 /// Adjacent live ranges do not affect the covering:the liverange
461 /// [1,5](5,10] covers (3,7].
462 bool covers(const LiveRange &Other) const;
464 /// Add the specified Segment to this range, merging segments as
465 /// appropriate. This returns an iterator to the inserted segment (which
466 /// may have grown since it was inserted).
467 iterator addSegment(Segment S);
469 /// Attempt to extend a value defined after @p StartIdx to include @p Use.
470 /// Both @p StartIdx and @p Use should be in the same basic block. In case
471 /// of subranges, an extension could be prevented by an explicit "undef"
472 /// caused by a <def,read-undef> on a non-overlapping lane. The list of
473 /// location of such "undefs" should be provided in @p Undefs.
474 /// The return value is a pair: the first element is VNInfo of the value
475 /// that was extended (possibly nullptr), the second is a boolean value
476 /// indicating whether an "undef" was encountered.
477 /// If this range is live before @p Use in the basic block that starts at
478 /// @p StartIdx, and there is no intervening "undef", extend it to be live
479 /// up to @p Use, and return the pair {value, false}. If there is no
480 /// segment before @p Use and there is no "undef" between @p StartIdx and
481 /// @p Use, return {nullptr, false}. If there is an "undef" before @p Use,
482 /// return {nullptr, true}.
483 std::pair<VNInfo*,bool> extendInBlock(ArrayRef<SlotIndex> Undefs,
484 SlotIndex StartIdx, SlotIndex Kill);
486 /// Simplified version of the above "extendInBlock", which assumes that
487 /// no register lanes are undefined by <def,read-undef> operands.
488 /// If this range is live before @p Use in the basic block that starts
489 /// at @p StartIdx, extend it to be live up to @p Use, and return the
490 /// value. If there is no segment before @p Use, return nullptr.
491 VNInfo *extendInBlock(SlotIndex StartIdx, SlotIndex Kill);
493 /// join - Join two live ranges (this, and other) together. This applies
494 /// mappings to the value numbers in the LHS/RHS ranges as specified. If
495 /// the ranges are not joinable, this aborts.
496 void join(LiveRange &Other,
497 const int *ValNoAssignments,
498 const int *RHSValNoAssignments,
499 SmallVectorImpl<VNInfo *> &NewVNInfo);
501 /// True iff this segment is a single segment that lies between the
502 /// specified boundaries, exclusively. Vregs live across a backedge are not
503 /// considered local. The boundaries are expected to lie within an extended
504 /// basic block, so vregs that are not live out should contain no holes.
505 bool isLocal(SlotIndex Start, SlotIndex End) const {
506 return beginIndex() > Start.getBaseIndex() &&
507 endIndex() < End.getBoundaryIndex();
510 /// Remove the specified segment from this range. Note that the segment
511 /// must be a single Segment in its entirety.
512 void removeSegment(SlotIndex Start, SlotIndex End,
513 bool RemoveDeadValNo = false);
515 void removeSegment(Segment S, bool RemoveDeadValNo = false) {
516 removeSegment(S.start, S.end, RemoveDeadValNo);
519 /// Remove segment pointed to by iterator @p I from this range. This does
520 /// not remove dead value numbers.
521 iterator removeSegment(iterator I) {
522 return segments.erase(I);
525 /// Query Liveness at Idx.
526 /// The sub-instruction slot of Idx doesn't matter, only the instruction
527 /// it refers to is considered.
528 LiveQueryResult Query(SlotIndex Idx) const {
529 // Find the segment that enters the instruction.
530 const_iterator I = find(Idx.getBaseIndex());
531 const_iterator E = end();
532 if (I == E)
533 return LiveQueryResult(nullptr, nullptr, SlotIndex(), false);
535 // Is this an instruction live-in segment?
536 // If Idx is the start index of a basic block, include live-in segments
537 // that start at Idx.getBaseIndex().
538 VNInfo *EarlyVal = nullptr;
539 VNInfo *LateVal = nullptr;
540 SlotIndex EndPoint;
541 bool Kill = false;
542 if (I->start <= Idx.getBaseIndex()) {
543 EarlyVal = I->valno;
544 EndPoint = I->end;
545 // Move to the potentially live-out segment.
546 if (SlotIndex::isSameInstr(Idx, I->end)) {
547 Kill = true;
548 if (++I == E)
549 return LiveQueryResult(EarlyVal, LateVal, EndPoint, Kill);
551 // Special case: A PHIDef value can have its def in the middle of a
552 // segment if the value happens to be live out of the layout
553 // predecessor.
554 // Such a value is not live-in.
555 if (EarlyVal->def == Idx.getBaseIndex())
556 EarlyVal = nullptr;
558 // I now points to the segment that may be live-through, or defined by
559 // this instr. Ignore segments starting after the current instr.
560 if (!SlotIndex::isEarlierInstr(Idx, I->start)) {
561 LateVal = I->valno;
562 EndPoint = I->end;
564 return LiveQueryResult(EarlyVal, LateVal, EndPoint, Kill);
567 /// removeValNo - Remove all the segments defined by the specified value#.
568 /// Also remove the value# from value# list.
569 void removeValNo(VNInfo *ValNo);
571 /// Returns true if the live range is zero length, i.e. no live segments
572 /// span instructions. It doesn't pay to spill such a range.
573 bool isZeroLength(SlotIndexes *Indexes) const {
574 for (const Segment &S : segments)
575 if (Indexes->getNextNonNullIndex(S.start).getBaseIndex() <
576 S.end.getBaseIndex())
577 return false;
578 return true;
581 // Returns true if any segment in the live range contains any of the
582 // provided slot indexes. Slots which occur in holes between
583 // segments will not cause the function to return true.
584 bool isLiveAtIndexes(ArrayRef<SlotIndex> Slots) const;
586 bool operator<(const LiveRange& other) const {
587 const SlotIndex &thisIndex = beginIndex();
588 const SlotIndex &otherIndex = other.beginIndex();
589 return thisIndex < otherIndex;
592 /// Returns true if there is an explicit "undef" between @p Begin
593 /// @p End.
594 bool isUndefIn(ArrayRef<SlotIndex> Undefs, SlotIndex Begin,
595 SlotIndex End) const {
596 return std::any_of(Undefs.begin(), Undefs.end(),
597 [Begin,End] (SlotIndex Idx) -> bool {
598 return Begin <= Idx && Idx < End;
602 /// Flush segment set into the regular segment vector.
603 /// The method is to be called after the live range
604 /// has been created, if use of the segment set was
605 /// activated in the constructor of the live range.
606 void flushSegmentSet();
608 /// Stores indexes from the input index sequence R at which this LiveRange
609 /// is live to the output O iterator.
610 /// R is a range of _ascending sorted_ _random_ access iterators
611 /// to the input indexes. Indexes stored at O are ascending sorted so it
612 /// can be used directly in the subsequent search (for example for
613 /// subranges). Returns true if found at least one index.
614 template <typename Range, typename OutputIt>
615 bool findIndexesLiveAt(Range &&R, OutputIt O) const {
616 assert(std::is_sorted(R.begin(), R.end()));
617 auto Idx = R.begin(), EndIdx = R.end();
618 auto Seg = segments.begin(), EndSeg = segments.end();
619 bool Found = false;
620 while (Idx != EndIdx && Seg != EndSeg) {
621 // if the Seg is lower find first segment that is above Idx using binary
622 // search
623 if (Seg->end <= *Idx) {
624 Seg = std::upper_bound(++Seg, EndSeg, *Idx,
625 [=](typename std::remove_reference<decltype(*Idx)>::type V,
626 const typename std::remove_reference<decltype(*Seg)>::type &S) {
627 return V < S.end;
629 if (Seg == EndSeg)
630 break;
632 auto NotLessStart = std::lower_bound(Idx, EndIdx, Seg->start);
633 if (NotLessStart == EndIdx)
634 break;
635 auto NotLessEnd = std::lower_bound(NotLessStart, EndIdx, Seg->end);
636 if (NotLessEnd != NotLessStart) {
637 Found = true;
638 O = std::copy(NotLessStart, NotLessEnd, O);
640 Idx = NotLessEnd;
641 ++Seg;
643 return Found;
646 void print(raw_ostream &OS) const;
647 void dump() const;
649 /// Walk the range and assert if any invariants fail to hold.
651 /// Note that this is a no-op when asserts are disabled.
652 #ifdef NDEBUG
653 void verify() const {}
654 #else
655 void verify() const;
656 #endif
658 protected:
659 /// Append a segment to the list of segments.
660 void append(const LiveRange::Segment S);
662 private:
663 friend class LiveRangeUpdater;
664 void addSegmentToSet(Segment S);
665 void markValNoForDeletion(VNInfo *V);
668 inline raw_ostream &operator<<(raw_ostream &OS, const LiveRange &LR) {
669 LR.print(OS);
670 return OS;
673 /// LiveInterval - This class represents the liveness of a register,
674 /// or stack slot.
675 class LiveInterval : public LiveRange {
676 public:
677 using super = LiveRange;
679 /// A live range for subregisters. The LaneMask specifies which parts of the
680 /// super register are covered by the interval.
681 /// (@sa TargetRegisterInfo::getSubRegIndexLaneMask()).
682 class SubRange : public LiveRange {
683 public:
684 SubRange *Next = nullptr;
685 LaneBitmask LaneMask;
687 /// Constructs a new SubRange object.
688 SubRange(LaneBitmask LaneMask) : LaneMask(LaneMask) {}
690 /// Constructs a new SubRange object by copying liveness from @p Other.
691 SubRange(LaneBitmask LaneMask, const LiveRange &Other,
692 BumpPtrAllocator &Allocator)
693 : LiveRange(Other, Allocator), LaneMask(LaneMask) {}
695 void print(raw_ostream &OS) const;
696 void dump() const;
699 private:
700 SubRange *SubRanges = nullptr; ///< Single linked list of subregister live
701 /// ranges.
703 public:
704 const unsigned reg; // the register or stack slot of this interval.
705 float weight; // weight of this interval
707 LiveInterval(unsigned Reg, float Weight) : reg(Reg), weight(Weight) {}
709 ~LiveInterval() {
710 clearSubRanges();
713 template<typename T>
714 class SingleLinkedListIterator {
715 T *P;
717 public:
718 SingleLinkedListIterator<T>(T *P) : P(P) {}
720 SingleLinkedListIterator<T> &operator++() {
721 P = P->Next;
722 return *this;
724 SingleLinkedListIterator<T> operator++(int) {
725 SingleLinkedListIterator res = *this;
726 ++*this;
727 return res;
729 bool operator!=(const SingleLinkedListIterator<T> &Other) {
730 return P != Other.operator->();
732 bool operator==(const SingleLinkedListIterator<T> &Other) {
733 return P == Other.operator->();
735 T &operator*() const {
736 return *P;
738 T *operator->() const {
739 return P;
743 using subrange_iterator = SingleLinkedListIterator<SubRange>;
744 using const_subrange_iterator = SingleLinkedListIterator<const SubRange>;
746 subrange_iterator subrange_begin() {
747 return subrange_iterator(SubRanges);
749 subrange_iterator subrange_end() {
750 return subrange_iterator(nullptr);
753 const_subrange_iterator subrange_begin() const {
754 return const_subrange_iterator(SubRanges);
756 const_subrange_iterator subrange_end() const {
757 return const_subrange_iterator(nullptr);
760 iterator_range<subrange_iterator> subranges() {
761 return make_range(subrange_begin(), subrange_end());
764 iterator_range<const_subrange_iterator> subranges() const {
765 return make_range(subrange_begin(), subrange_end());
768 /// Creates a new empty subregister live range. The range is added at the
769 /// beginning of the subrange list; subrange iterators stay valid.
770 SubRange *createSubRange(BumpPtrAllocator &Allocator,
771 LaneBitmask LaneMask) {
772 SubRange *Range = new (Allocator) SubRange(LaneMask);
773 appendSubRange(Range);
774 return Range;
777 /// Like createSubRange() but the new range is filled with a copy of the
778 /// liveness information in @p CopyFrom.
779 SubRange *createSubRangeFrom(BumpPtrAllocator &Allocator,
780 LaneBitmask LaneMask,
781 const LiveRange &CopyFrom) {
782 SubRange *Range = new (Allocator) SubRange(LaneMask, CopyFrom, Allocator);
783 appendSubRange(Range);
784 return Range;
787 /// Returns true if subregister liveness information is available.
788 bool hasSubRanges() const {
789 return SubRanges != nullptr;
792 /// Removes all subregister liveness information.
793 void clearSubRanges();
795 /// Removes all subranges without any segments (subranges without segments
796 /// are not considered valid and should only exist temporarily).
797 void removeEmptySubRanges();
799 /// getSize - Returns the sum of sizes of all the LiveRange's.
801 unsigned getSize() const;
803 /// isSpillable - Can this interval be spilled?
804 bool isSpillable() const {
805 return weight != huge_valf;
808 /// markNotSpillable - Mark interval as not spillable
809 void markNotSpillable() {
810 weight = huge_valf;
813 /// For a given lane mask @p LaneMask, compute indexes at which the
814 /// lane is marked undefined by subregister <def,read-undef> definitions.
815 void computeSubRangeUndefs(SmallVectorImpl<SlotIndex> &Undefs,
816 LaneBitmask LaneMask,
817 const MachineRegisterInfo &MRI,
818 const SlotIndexes &Indexes) const;
820 /// Refines the subranges to support \p LaneMask. This may only be called
821 /// for LI.hasSubrange()==true. Subregister ranges are split or created
822 /// until \p LaneMask can be matched exactly. \p Mod is executed on the
823 /// matching subranges.
825 /// Example:
826 /// Given an interval with subranges with lanemasks L0F00, L00F0 and
827 /// L000F, refining for mask L0018. Will split the L00F0 lane into
828 /// L00E0 and L0010 and the L000F lane into L0007 and L0008. The Mod
829 /// function will be applied to the L0010 and L0008 subranges.
831 /// \p Indexes and \p TRI are required to clean up the VNIs that
832 /// don't defne the related lane masks after they get shrunk. E.g.,
833 /// when L000F gets split into L0007 and L0008 maybe only a subset
834 /// of the VNIs that defined L000F defines L0007.
835 void refineSubRanges(BumpPtrAllocator &Allocator, LaneBitmask LaneMask,
836 std::function<void(LiveInterval::SubRange &)> Apply,
837 const SlotIndexes &Indexes,
838 const TargetRegisterInfo &TRI);
840 bool operator<(const LiveInterval& other) const {
841 const SlotIndex &thisIndex = beginIndex();
842 const SlotIndex &otherIndex = other.beginIndex();
843 return std::tie(thisIndex, reg) < std::tie(otherIndex, other.reg);
846 void print(raw_ostream &OS) const;
847 void dump() const;
849 /// Walks the interval and assert if any invariants fail to hold.
851 /// Note that this is a no-op when asserts are disabled.
852 #ifdef NDEBUG
853 void verify(const MachineRegisterInfo *MRI = nullptr) const {}
854 #else
855 void verify(const MachineRegisterInfo *MRI = nullptr) const;
856 #endif
858 private:
859 /// Appends @p Range to SubRanges list.
860 void appendSubRange(SubRange *Range) {
861 Range->Next = SubRanges;
862 SubRanges = Range;
865 /// Free memory held by SubRange.
866 void freeSubRange(SubRange *S);
869 inline raw_ostream &operator<<(raw_ostream &OS,
870 const LiveInterval::SubRange &SR) {
871 SR.print(OS);
872 return OS;
875 inline raw_ostream &operator<<(raw_ostream &OS, const LiveInterval &LI) {
876 LI.print(OS);
877 return OS;
880 raw_ostream &operator<<(raw_ostream &OS, const LiveRange::Segment &S);
882 inline bool operator<(SlotIndex V, const LiveRange::Segment &S) {
883 return V < S.start;
886 inline bool operator<(const LiveRange::Segment &S, SlotIndex V) {
887 return S.start < V;
890 /// Helper class for performant LiveRange bulk updates.
892 /// Calling LiveRange::addSegment() repeatedly can be expensive on large
893 /// live ranges because segments after the insertion point may need to be
894 /// shifted. The LiveRangeUpdater class can defer the shifting when adding
895 /// many segments in order.
897 /// The LiveRange will be in an invalid state until flush() is called.
898 class LiveRangeUpdater {
899 LiveRange *LR;
900 SlotIndex LastStart;
901 LiveRange::iterator WriteI;
902 LiveRange::iterator ReadI;
903 SmallVector<LiveRange::Segment, 16> Spills;
904 void mergeSpills();
906 public:
907 /// Create a LiveRangeUpdater for adding segments to LR.
908 /// LR will temporarily be in an invalid state until flush() is called.
909 LiveRangeUpdater(LiveRange *lr = nullptr) : LR(lr) {}
911 ~LiveRangeUpdater() { flush(); }
913 /// Add a segment to LR and coalesce when possible, just like
914 /// LR.addSegment(). Segments should be added in increasing start order for
915 /// best performance.
916 void add(LiveRange::Segment);
918 void add(SlotIndex Start, SlotIndex End, VNInfo *VNI) {
919 add(LiveRange::Segment(Start, End, VNI));
922 /// Return true if the LR is currently in an invalid state, and flush()
923 /// needs to be called.
924 bool isDirty() const { return LastStart.isValid(); }
926 /// Flush the updater state to LR so it is valid and contains all added
927 /// segments.
928 void flush();
930 /// Select a different destination live range.
931 void setDest(LiveRange *lr) {
932 if (LR != lr && isDirty())
933 flush();
934 LR = lr;
937 /// Get the current destination live range.
938 LiveRange *getDest() const { return LR; }
940 void dump() const;
941 void print(raw_ostream&) const;
944 inline raw_ostream &operator<<(raw_ostream &OS, const LiveRangeUpdater &X) {
945 X.print(OS);
946 return OS;
949 /// ConnectedVNInfoEqClasses - Helper class that can divide VNInfos in a
950 /// LiveInterval into equivalence clases of connected components. A
951 /// LiveInterval that has multiple connected components can be broken into
952 /// multiple LiveIntervals.
954 /// Given a LiveInterval that may have multiple connected components, run:
956 /// unsigned numComps = ConEQ.Classify(LI);
957 /// if (numComps > 1) {
958 /// // allocate numComps-1 new LiveIntervals into LIS[1..]
959 /// ConEQ.Distribute(LIS);
960 /// }
962 class ConnectedVNInfoEqClasses {
963 LiveIntervals &LIS;
964 IntEqClasses EqClass;
966 public:
967 explicit ConnectedVNInfoEqClasses(LiveIntervals &lis) : LIS(lis) {}
969 /// Classify the values in \p LR into connected components.
970 /// Returns the number of connected components.
971 unsigned Classify(const LiveRange &LR);
973 /// getEqClass - Classify creates equivalence classes numbered 0..N. Return
974 /// the equivalence class assigned the VNI.
975 unsigned getEqClass(const VNInfo *VNI) const { return EqClass[VNI->id]; }
977 /// Distribute values in \p LI into a separate LiveIntervals
978 /// for each connected component. LIV must have an empty LiveInterval for
979 /// each additional connected component. The first connected component is
980 /// left in \p LI.
981 void Distribute(LiveInterval &LI, LiveInterval *LIV[],
982 MachineRegisterInfo &MRI);
985 } // end namespace llvm
987 #endif // LLVM_CODEGEN_LIVEINTERVAL_H