[Alignment][NFC] Migrate Instructions to Align
[llvm-core.git] / include / llvm / CodeGen / LiveInterval.h
blob290a2381d9c96fe7818f50630278e3d8b4aebb28
1 //===- llvm/CodeGen/LiveInterval.h - Interval representation ----*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements the LiveRange and LiveInterval classes. Given some
10 // numbering of each the machine instructions an interval [i, j) is said to be a
11 // live range for register v if there is no instruction with number j' >= j
12 // such that v is live at j' and there is no instruction with number i' < i such
13 // that v is live at i'. In this implementation ranges can have holes,
14 // i.e. a range might look like [1,20), [50,65), [1000,1001). Each
15 // individual segment is represented as an instance of LiveRange::Segment,
16 // and the whole range is represented as an instance of LiveRange.
18 //===----------------------------------------------------------------------===//
20 #ifndef LLVM_CODEGEN_LIVEINTERVAL_H
21 #define LLVM_CODEGEN_LIVEINTERVAL_H
23 #include "llvm/ADT/ArrayRef.h"
24 #include "llvm/ADT/IntEqClasses.h"
25 #include "llvm/ADT/STLExtras.h"
26 #include "llvm/ADT/SmallVector.h"
27 #include "llvm/ADT/iterator_range.h"
28 #include "llvm/CodeGen/SlotIndexes.h"
29 #include "llvm/MC/LaneBitmask.h"
30 #include "llvm/Support/Allocator.h"
31 #include "llvm/Support/MathExtras.h"
32 #include <algorithm>
33 #include <cassert>
34 #include <cstddef>
35 #include <functional>
36 #include <memory>
37 #include <set>
38 #include <tuple>
39 #include <utility>
41 namespace llvm {
43 class CoalescerPair;
44 class LiveIntervals;
45 class MachineRegisterInfo;
46 class raw_ostream;
48 /// VNInfo - Value Number Information.
49 /// This class holds information about a machine level values, including
50 /// definition and use points.
51 ///
52 class VNInfo {
53 public:
54 using Allocator = BumpPtrAllocator;
56 /// The ID number of this value.
57 unsigned id;
59 /// The index of the defining instruction.
60 SlotIndex def;
62 /// VNInfo constructor.
63 VNInfo(unsigned i, SlotIndex d) : id(i), def(d) {}
65 /// VNInfo constructor, copies values from orig, except for the value number.
66 VNInfo(unsigned i, const VNInfo &orig) : id(i), def(orig.def) {}
68 /// Copy from the parameter into this VNInfo.
69 void copyFrom(VNInfo &src) {
70 def = src.def;
73 /// Returns true if this value is defined by a PHI instruction (or was,
74 /// PHI instructions may have been eliminated).
75 /// PHI-defs begin at a block boundary, all other defs begin at register or
76 /// EC slots.
77 bool isPHIDef() const { return def.isBlock(); }
79 /// Returns true if this value is unused.
80 bool isUnused() const { return !def.isValid(); }
82 /// Mark this value as unused.
83 void markUnused() { def = SlotIndex(); }
86 /// Result of a LiveRange query. This class hides the implementation details
87 /// of live ranges, and it should be used as the primary interface for
88 /// examining live ranges around instructions.
89 class LiveQueryResult {
90 VNInfo *const EarlyVal;
91 VNInfo *const LateVal;
92 const SlotIndex EndPoint;
93 const bool Kill;
95 public:
96 LiveQueryResult(VNInfo *EarlyVal, VNInfo *LateVal, SlotIndex EndPoint,
97 bool Kill)
98 : EarlyVal(EarlyVal), LateVal(LateVal), EndPoint(EndPoint), Kill(Kill)
101 /// Return the value that is live-in to the instruction. This is the value
102 /// that will be read by the instruction's use operands. Return NULL if no
103 /// value is live-in.
104 VNInfo *valueIn() const {
105 return EarlyVal;
108 /// Return true if the live-in value is killed by this instruction. This
109 /// means that either the live range ends at the instruction, or it changes
110 /// value.
111 bool isKill() const {
112 return Kill;
115 /// Return true if this instruction has a dead def.
116 bool isDeadDef() const {
117 return EndPoint.isDead();
120 /// Return the value leaving the instruction, if any. This can be a
121 /// live-through value, or a live def. A dead def returns NULL.
122 VNInfo *valueOut() const {
123 return isDeadDef() ? nullptr : LateVal;
126 /// Returns the value alive at the end of the instruction, if any. This can
127 /// be a live-through value, a live def or a dead def.
128 VNInfo *valueOutOrDead() const {
129 return LateVal;
132 /// Return the value defined by this instruction, if any. This includes
133 /// dead defs, it is the value created by the instruction's def operands.
134 VNInfo *valueDefined() const {
135 return EarlyVal == LateVal ? nullptr : LateVal;
138 /// Return the end point of the last live range segment to interact with
139 /// the instruction, if any.
141 /// The end point is an invalid SlotIndex only if the live range doesn't
142 /// intersect the instruction at all.
144 /// The end point may be at or past the end of the instruction's basic
145 /// block. That means the value was live out of the block.
146 SlotIndex endPoint() const {
147 return EndPoint;
151 /// This class represents the liveness of a register, stack slot, etc.
152 /// It manages an ordered list of Segment objects.
153 /// The Segments are organized in a static single assignment form: At places
154 /// where a new value is defined or different values reach a CFG join a new
155 /// segment with a new value number is used.
156 class LiveRange {
157 public:
158 /// This represents a simple continuous liveness interval for a value.
159 /// The start point is inclusive, the end point exclusive. These intervals
160 /// are rendered as [start,end).
161 struct Segment {
162 SlotIndex start; // Start point of the interval (inclusive)
163 SlotIndex end; // End point of the interval (exclusive)
164 VNInfo *valno = nullptr; // identifier for the value contained in this
165 // segment.
167 Segment() = default;
169 Segment(SlotIndex S, SlotIndex E, VNInfo *V)
170 : start(S), end(E), valno(V) {
171 assert(S < E && "Cannot create empty or backwards segment");
174 /// Return true if the index is covered by this segment.
175 bool contains(SlotIndex I) const {
176 return start <= I && I < end;
179 /// Return true if the given interval, [S, E), is covered by this segment.
180 bool containsInterval(SlotIndex S, SlotIndex E) const {
181 assert((S < E) && "Backwards interval?");
182 return (start <= S && S < end) && (start < E && E <= end);
185 bool operator<(const Segment &Other) const {
186 return std::tie(start, end) < std::tie(Other.start, Other.end);
188 bool operator==(const Segment &Other) const {
189 return start == Other.start && end == Other.end;
192 bool operator!=(const Segment &Other) const {
193 return !(*this == Other);
196 void dump() const;
199 using Segments = SmallVector<Segment, 2>;
200 using VNInfoList = SmallVector<VNInfo *, 2>;
202 Segments segments; // the liveness segments
203 VNInfoList valnos; // value#'s
205 // The segment set is used temporarily to accelerate initial computation
206 // of live ranges of physical registers in computeRegUnitRange.
207 // After that the set is flushed to the segment vector and deleted.
208 using SegmentSet = std::set<Segment>;
209 std::unique_ptr<SegmentSet> segmentSet;
211 using iterator = Segments::iterator;
212 using const_iterator = Segments::const_iterator;
214 iterator begin() { return segments.begin(); }
215 iterator end() { return segments.end(); }
217 const_iterator begin() const { return segments.begin(); }
218 const_iterator end() const { return segments.end(); }
220 using vni_iterator = VNInfoList::iterator;
221 using const_vni_iterator = VNInfoList::const_iterator;
223 vni_iterator vni_begin() { return valnos.begin(); }
224 vni_iterator vni_end() { return valnos.end(); }
226 const_vni_iterator vni_begin() const { return valnos.begin(); }
227 const_vni_iterator vni_end() const { return valnos.end(); }
229 /// Constructs a new LiveRange object.
230 LiveRange(bool UseSegmentSet = false)
231 : segmentSet(UseSegmentSet ? std::make_unique<SegmentSet>()
232 : nullptr) {}
234 /// Constructs a new LiveRange object by copying segments and valnos from
235 /// another LiveRange.
236 LiveRange(const LiveRange &Other, BumpPtrAllocator &Allocator) {
237 assert(Other.segmentSet == nullptr &&
238 "Copying of LiveRanges with active SegmentSets is not supported");
239 assign(Other, Allocator);
242 /// Copies values numbers and live segments from \p Other into this range.
243 void assign(const LiveRange &Other, BumpPtrAllocator &Allocator) {
244 if (this == &Other)
245 return;
247 assert(Other.segmentSet == nullptr &&
248 "Copying of LiveRanges with active SegmentSets is not supported");
249 // Duplicate valnos.
250 for (const VNInfo *VNI : Other.valnos)
251 createValueCopy(VNI, Allocator);
252 // Now we can copy segments and remap their valnos.
253 for (const Segment &S : Other.segments)
254 segments.push_back(Segment(S.start, S.end, valnos[S.valno->id]));
257 /// advanceTo - Advance the specified iterator to point to the Segment
258 /// containing the specified position, or end() if the position is past the
259 /// end of the range. If no Segment contains this position, but the
260 /// position is in a hole, this method returns an iterator pointing to the
261 /// Segment immediately after the hole.
262 iterator advanceTo(iterator I, SlotIndex Pos) {
263 assert(I != end());
264 if (Pos >= endIndex())
265 return end();
266 while (I->end <= Pos) ++I;
267 return I;
270 const_iterator advanceTo(const_iterator I, SlotIndex Pos) const {
271 assert(I != end());
272 if (Pos >= endIndex())
273 return end();
274 while (I->end <= Pos) ++I;
275 return I;
278 /// find - Return an iterator pointing to the first segment that ends after
279 /// Pos, or end(). This is the same as advanceTo(begin(), Pos), but faster
280 /// when searching large ranges.
282 /// If Pos is contained in a Segment, that segment is returned.
283 /// If Pos is in a hole, the following Segment is returned.
284 /// If Pos is beyond endIndex, end() is returned.
285 iterator find(SlotIndex Pos);
287 const_iterator find(SlotIndex Pos) const {
288 return const_cast<LiveRange*>(this)->find(Pos);
291 void clear() {
292 valnos.clear();
293 segments.clear();
296 size_t size() const {
297 return segments.size();
300 bool hasAtLeastOneValue() const { return !valnos.empty(); }
302 bool containsOneValue() const { return valnos.size() == 1; }
304 unsigned getNumValNums() const { return (unsigned)valnos.size(); }
306 /// getValNumInfo - Returns pointer to the specified val#.
308 inline VNInfo *getValNumInfo(unsigned ValNo) {
309 return valnos[ValNo];
311 inline const VNInfo *getValNumInfo(unsigned ValNo) const {
312 return valnos[ValNo];
315 /// containsValue - Returns true if VNI belongs to this range.
316 bool containsValue(const VNInfo *VNI) const {
317 return VNI && VNI->id < getNumValNums() && VNI == getValNumInfo(VNI->id);
320 /// getNextValue - Create a new value number and return it. MIIdx specifies
321 /// the instruction that defines the value number.
322 VNInfo *getNextValue(SlotIndex def, VNInfo::Allocator &VNInfoAllocator) {
323 VNInfo *VNI =
324 new (VNInfoAllocator) VNInfo((unsigned)valnos.size(), def);
325 valnos.push_back(VNI);
326 return VNI;
329 /// createDeadDef - Make sure the range has a value defined at Def.
330 /// If one already exists, return it. Otherwise allocate a new value and
331 /// add liveness for a dead def.
332 VNInfo *createDeadDef(SlotIndex Def, VNInfo::Allocator &VNIAlloc);
334 /// Create a def of value @p VNI. Return @p VNI. If there already exists
335 /// a definition at VNI->def, the value defined there must be @p VNI.
336 VNInfo *createDeadDef(VNInfo *VNI);
338 /// Create a copy of the given value. The new value will be identical except
339 /// for the Value number.
340 VNInfo *createValueCopy(const VNInfo *orig,
341 VNInfo::Allocator &VNInfoAllocator) {
342 VNInfo *VNI =
343 new (VNInfoAllocator) VNInfo((unsigned)valnos.size(), *orig);
344 valnos.push_back(VNI);
345 return VNI;
348 /// RenumberValues - Renumber all values in order of appearance and remove
349 /// unused values.
350 void RenumberValues();
352 /// MergeValueNumberInto - This method is called when two value numbers
353 /// are found to be equivalent. This eliminates V1, replacing all
354 /// segments with the V1 value number with the V2 value number. This can
355 /// cause merging of V1/V2 values numbers and compaction of the value space.
356 VNInfo* MergeValueNumberInto(VNInfo *V1, VNInfo *V2);
358 /// Merge all of the live segments of a specific val# in RHS into this live
359 /// range as the specified value number. The segments in RHS are allowed
360 /// to overlap with segments in the current range, it will replace the
361 /// value numbers of the overlaped live segments with the specified value
362 /// number.
363 void MergeSegmentsInAsValue(const LiveRange &RHS, VNInfo *LHSValNo);
365 /// MergeValueInAsValue - Merge all of the segments of a specific val#
366 /// in RHS into this live range as the specified value number.
367 /// The segments in RHS are allowed to overlap with segments in the
368 /// current range, but only if the overlapping segments have the
369 /// specified value number.
370 void MergeValueInAsValue(const LiveRange &RHS,
371 const VNInfo *RHSValNo, VNInfo *LHSValNo);
373 bool empty() const { return segments.empty(); }
375 /// beginIndex - Return the lowest numbered slot covered.
376 SlotIndex beginIndex() const {
377 assert(!empty() && "Call to beginIndex() on empty range.");
378 return segments.front().start;
381 /// endNumber - return the maximum point of the range of the whole,
382 /// exclusive.
383 SlotIndex endIndex() const {
384 assert(!empty() && "Call to endIndex() on empty range.");
385 return segments.back().end;
388 bool expiredAt(SlotIndex index) const {
389 return index >= endIndex();
392 bool liveAt(SlotIndex index) const {
393 const_iterator r = find(index);
394 return r != end() && r->start <= index;
397 /// Return the segment that contains the specified index, or null if there
398 /// is none.
399 const Segment *getSegmentContaining(SlotIndex Idx) const {
400 const_iterator I = FindSegmentContaining(Idx);
401 return I == end() ? nullptr : &*I;
404 /// Return the live segment that contains the specified index, or null if
405 /// there is none.
406 Segment *getSegmentContaining(SlotIndex Idx) {
407 iterator I = FindSegmentContaining(Idx);
408 return I == end() ? nullptr : &*I;
411 /// getVNInfoAt - Return the VNInfo that is live at Idx, or NULL.
412 VNInfo *getVNInfoAt(SlotIndex Idx) const {
413 const_iterator I = FindSegmentContaining(Idx);
414 return I == end() ? nullptr : I->valno;
417 /// getVNInfoBefore - Return the VNInfo that is live up to but not
418 /// necessarilly including Idx, or NULL. Use this to find the reaching def
419 /// used by an instruction at this SlotIndex position.
420 VNInfo *getVNInfoBefore(SlotIndex Idx) const {
421 const_iterator I = FindSegmentContaining(Idx.getPrevSlot());
422 return I == end() ? nullptr : I->valno;
425 /// Return an iterator to the segment that contains the specified index, or
426 /// end() if there is none.
427 iterator FindSegmentContaining(SlotIndex Idx) {
428 iterator I = find(Idx);
429 return I != end() && I->start <= Idx ? I : end();
432 const_iterator FindSegmentContaining(SlotIndex Idx) const {
433 const_iterator I = find(Idx);
434 return I != end() && I->start <= Idx ? I : end();
437 /// overlaps - Return true if the intersection of the two live ranges is
438 /// not empty.
439 bool overlaps(const LiveRange &other) const {
440 if (other.empty())
441 return false;
442 return overlapsFrom(other, other.begin());
445 /// overlaps - Return true if the two ranges have overlapping segments
446 /// that are not coalescable according to CP.
448 /// Overlapping segments where one range is defined by a coalescable
449 /// copy are allowed.
450 bool overlaps(const LiveRange &Other, const CoalescerPair &CP,
451 const SlotIndexes&) const;
453 /// overlaps - Return true if the live range overlaps an interval specified
454 /// by [Start, End).
455 bool overlaps(SlotIndex Start, SlotIndex End) const;
457 /// overlapsFrom - Return true if the intersection of the two live ranges
458 /// is not empty. The specified iterator is a hint that we can begin
459 /// scanning the Other range starting at I.
460 bool overlapsFrom(const LiveRange &Other, const_iterator StartPos) const;
462 /// Returns true if all segments of the @p Other live range are completely
463 /// covered by this live range.
464 /// Adjacent live ranges do not affect the covering:the liverange
465 /// [1,5](5,10] covers (3,7].
466 bool covers(const LiveRange &Other) const;
468 /// Add the specified Segment to this range, merging segments as
469 /// appropriate. This returns an iterator to the inserted segment (which
470 /// may have grown since it was inserted).
471 iterator addSegment(Segment S);
473 /// Attempt to extend a value defined after @p StartIdx to include @p Use.
474 /// Both @p StartIdx and @p Use should be in the same basic block. In case
475 /// of subranges, an extension could be prevented by an explicit "undef"
476 /// caused by a <def,read-undef> on a non-overlapping lane. The list of
477 /// location of such "undefs" should be provided in @p Undefs.
478 /// The return value is a pair: the first element is VNInfo of the value
479 /// that was extended (possibly nullptr), the second is a boolean value
480 /// indicating whether an "undef" was encountered.
481 /// If this range is live before @p Use in the basic block that starts at
482 /// @p StartIdx, and there is no intervening "undef", extend it to be live
483 /// up to @p Use, and return the pair {value, false}. If there is no
484 /// segment before @p Use and there is no "undef" between @p StartIdx and
485 /// @p Use, return {nullptr, false}. If there is an "undef" before @p Use,
486 /// return {nullptr, true}.
487 std::pair<VNInfo*,bool> extendInBlock(ArrayRef<SlotIndex> Undefs,
488 SlotIndex StartIdx, SlotIndex Kill);
490 /// Simplified version of the above "extendInBlock", which assumes that
491 /// no register lanes are undefined by <def,read-undef> operands.
492 /// If this range is live before @p Use in the basic block that starts
493 /// at @p StartIdx, extend it to be live up to @p Use, and return the
494 /// value. If there is no segment before @p Use, return nullptr.
495 VNInfo *extendInBlock(SlotIndex StartIdx, SlotIndex Kill);
497 /// join - Join two live ranges (this, and other) together. This applies
498 /// mappings to the value numbers in the LHS/RHS ranges as specified. If
499 /// the ranges are not joinable, this aborts.
500 void join(LiveRange &Other,
501 const int *ValNoAssignments,
502 const int *RHSValNoAssignments,
503 SmallVectorImpl<VNInfo *> &NewVNInfo);
505 /// True iff this segment is a single segment that lies between the
506 /// specified boundaries, exclusively. Vregs live across a backedge are not
507 /// considered local. The boundaries are expected to lie within an extended
508 /// basic block, so vregs that are not live out should contain no holes.
509 bool isLocal(SlotIndex Start, SlotIndex End) const {
510 return beginIndex() > Start.getBaseIndex() &&
511 endIndex() < End.getBoundaryIndex();
514 /// Remove the specified segment from this range. Note that the segment
515 /// must be a single Segment in its entirety.
516 void removeSegment(SlotIndex Start, SlotIndex End,
517 bool RemoveDeadValNo = false);
519 void removeSegment(Segment S, bool RemoveDeadValNo = false) {
520 removeSegment(S.start, S.end, RemoveDeadValNo);
523 /// Remove segment pointed to by iterator @p I from this range. This does
524 /// not remove dead value numbers.
525 iterator removeSegment(iterator I) {
526 return segments.erase(I);
529 /// Query Liveness at Idx.
530 /// The sub-instruction slot of Idx doesn't matter, only the instruction
531 /// it refers to is considered.
532 LiveQueryResult Query(SlotIndex Idx) const {
533 // Find the segment that enters the instruction.
534 const_iterator I = find(Idx.getBaseIndex());
535 const_iterator E = end();
536 if (I == E)
537 return LiveQueryResult(nullptr, nullptr, SlotIndex(), false);
539 // Is this an instruction live-in segment?
540 // If Idx is the start index of a basic block, include live-in segments
541 // that start at Idx.getBaseIndex().
542 VNInfo *EarlyVal = nullptr;
543 VNInfo *LateVal = nullptr;
544 SlotIndex EndPoint;
545 bool Kill = false;
546 if (I->start <= Idx.getBaseIndex()) {
547 EarlyVal = I->valno;
548 EndPoint = I->end;
549 // Move to the potentially live-out segment.
550 if (SlotIndex::isSameInstr(Idx, I->end)) {
551 Kill = true;
552 if (++I == E)
553 return LiveQueryResult(EarlyVal, LateVal, EndPoint, Kill);
555 // Special case: A PHIDef value can have its def in the middle of a
556 // segment if the value happens to be live out of the layout
557 // predecessor.
558 // Such a value is not live-in.
559 if (EarlyVal->def == Idx.getBaseIndex())
560 EarlyVal = nullptr;
562 // I now points to the segment that may be live-through, or defined by
563 // this instr. Ignore segments starting after the current instr.
564 if (!SlotIndex::isEarlierInstr(Idx, I->start)) {
565 LateVal = I->valno;
566 EndPoint = I->end;
568 return LiveQueryResult(EarlyVal, LateVal, EndPoint, Kill);
571 /// removeValNo - Remove all the segments defined by the specified value#.
572 /// Also remove the value# from value# list.
573 void removeValNo(VNInfo *ValNo);
575 /// Returns true if the live range is zero length, i.e. no live segments
576 /// span instructions. It doesn't pay to spill such a range.
577 bool isZeroLength(SlotIndexes *Indexes) const {
578 for (const Segment &S : segments)
579 if (Indexes->getNextNonNullIndex(S.start).getBaseIndex() <
580 S.end.getBaseIndex())
581 return false;
582 return true;
585 // Returns true if any segment in the live range contains any of the
586 // provided slot indexes. Slots which occur in holes between
587 // segments will not cause the function to return true.
588 bool isLiveAtIndexes(ArrayRef<SlotIndex> Slots) const;
590 bool operator<(const LiveRange& other) const {
591 const SlotIndex &thisIndex = beginIndex();
592 const SlotIndex &otherIndex = other.beginIndex();
593 return thisIndex < otherIndex;
596 /// Returns true if there is an explicit "undef" between @p Begin
597 /// @p End.
598 bool isUndefIn(ArrayRef<SlotIndex> Undefs, SlotIndex Begin,
599 SlotIndex End) const {
600 return std::any_of(Undefs.begin(), Undefs.end(),
601 [Begin,End] (SlotIndex Idx) -> bool {
602 return Begin <= Idx && Idx < End;
606 /// Flush segment set into the regular segment vector.
607 /// The method is to be called after the live range
608 /// has been created, if use of the segment set was
609 /// activated in the constructor of the live range.
610 void flushSegmentSet();
612 /// Stores indexes from the input index sequence R at which this LiveRange
613 /// is live to the output O iterator.
614 /// R is a range of _ascending sorted_ _random_ access iterators
615 /// to the input indexes. Indexes stored at O are ascending sorted so it
616 /// can be used directly in the subsequent search (for example for
617 /// subranges). Returns true if found at least one index.
618 template <typename Range, typename OutputIt>
619 bool findIndexesLiveAt(Range &&R, OutputIt O) const {
620 assert(std::is_sorted(R.begin(), R.end()));
621 auto Idx = R.begin(), EndIdx = R.end();
622 auto Seg = segments.begin(), EndSeg = segments.end();
623 bool Found = false;
624 while (Idx != EndIdx && Seg != EndSeg) {
625 // if the Seg is lower find first segment that is above Idx using binary
626 // search
627 if (Seg->end <= *Idx) {
628 Seg = std::upper_bound(++Seg, EndSeg, *Idx,
629 [=](typename std::remove_reference<decltype(*Idx)>::type V,
630 const typename std::remove_reference<decltype(*Seg)>::type &S) {
631 return V < S.end;
633 if (Seg == EndSeg)
634 break;
636 auto NotLessStart = std::lower_bound(Idx, EndIdx, Seg->start);
637 if (NotLessStart == EndIdx)
638 break;
639 auto NotLessEnd = std::lower_bound(NotLessStart, EndIdx, Seg->end);
640 if (NotLessEnd != NotLessStart) {
641 Found = true;
642 O = std::copy(NotLessStart, NotLessEnd, O);
644 Idx = NotLessEnd;
645 ++Seg;
647 return Found;
650 void print(raw_ostream &OS) const;
651 void dump() const;
653 /// Walk the range and assert if any invariants fail to hold.
655 /// Note that this is a no-op when asserts are disabled.
656 #ifdef NDEBUG
657 void verify() const {}
658 #else
659 void verify() const;
660 #endif
662 protected:
663 /// Append a segment to the list of segments.
664 void append(const LiveRange::Segment S);
666 private:
667 friend class LiveRangeUpdater;
668 void addSegmentToSet(Segment S);
669 void markValNoForDeletion(VNInfo *V);
672 inline raw_ostream &operator<<(raw_ostream &OS, const LiveRange &LR) {
673 LR.print(OS);
674 return OS;
677 /// LiveInterval - This class represents the liveness of a register,
678 /// or stack slot.
679 class LiveInterval : public LiveRange {
680 public:
681 using super = LiveRange;
683 /// A live range for subregisters. The LaneMask specifies which parts of the
684 /// super register are covered by the interval.
685 /// (@sa TargetRegisterInfo::getSubRegIndexLaneMask()).
686 class SubRange : public LiveRange {
687 public:
688 SubRange *Next = nullptr;
689 LaneBitmask LaneMask;
691 /// Constructs a new SubRange object.
692 SubRange(LaneBitmask LaneMask) : LaneMask(LaneMask) {}
694 /// Constructs a new SubRange object by copying liveness from @p Other.
695 SubRange(LaneBitmask LaneMask, const LiveRange &Other,
696 BumpPtrAllocator &Allocator)
697 : LiveRange(Other, Allocator), LaneMask(LaneMask) {}
699 void print(raw_ostream &OS) const;
700 void dump() const;
703 private:
704 SubRange *SubRanges = nullptr; ///< Single linked list of subregister live
705 /// ranges.
707 public:
708 const unsigned reg; // the register or stack slot of this interval.
709 float weight; // weight of this interval
711 LiveInterval(unsigned Reg, float Weight) : reg(Reg), weight(Weight) {}
713 ~LiveInterval() {
714 clearSubRanges();
717 template<typename T>
718 class SingleLinkedListIterator {
719 T *P;
721 public:
722 SingleLinkedListIterator<T>(T *P) : P(P) {}
724 SingleLinkedListIterator<T> &operator++() {
725 P = P->Next;
726 return *this;
728 SingleLinkedListIterator<T> operator++(int) {
729 SingleLinkedListIterator res = *this;
730 ++*this;
731 return res;
733 bool operator!=(const SingleLinkedListIterator<T> &Other) {
734 return P != Other.operator->();
736 bool operator==(const SingleLinkedListIterator<T> &Other) {
737 return P == Other.operator->();
739 T &operator*() const {
740 return *P;
742 T *operator->() const {
743 return P;
747 using subrange_iterator = SingleLinkedListIterator<SubRange>;
748 using const_subrange_iterator = SingleLinkedListIterator<const SubRange>;
750 subrange_iterator subrange_begin() {
751 return subrange_iterator(SubRanges);
753 subrange_iterator subrange_end() {
754 return subrange_iterator(nullptr);
757 const_subrange_iterator subrange_begin() const {
758 return const_subrange_iterator(SubRanges);
760 const_subrange_iterator subrange_end() const {
761 return const_subrange_iterator(nullptr);
764 iterator_range<subrange_iterator> subranges() {
765 return make_range(subrange_begin(), subrange_end());
768 iterator_range<const_subrange_iterator> subranges() const {
769 return make_range(subrange_begin(), subrange_end());
772 /// Creates a new empty subregister live range. The range is added at the
773 /// beginning of the subrange list; subrange iterators stay valid.
774 SubRange *createSubRange(BumpPtrAllocator &Allocator,
775 LaneBitmask LaneMask) {
776 SubRange *Range = new (Allocator) SubRange(LaneMask);
777 appendSubRange(Range);
778 return Range;
781 /// Like createSubRange() but the new range is filled with a copy of the
782 /// liveness information in @p CopyFrom.
783 SubRange *createSubRangeFrom(BumpPtrAllocator &Allocator,
784 LaneBitmask LaneMask,
785 const LiveRange &CopyFrom) {
786 SubRange *Range = new (Allocator) SubRange(LaneMask, CopyFrom, Allocator);
787 appendSubRange(Range);
788 return Range;
791 /// Returns true if subregister liveness information is available.
792 bool hasSubRanges() const {
793 return SubRanges != nullptr;
796 /// Removes all subregister liveness information.
797 void clearSubRanges();
799 /// Removes all subranges without any segments (subranges without segments
800 /// are not considered valid and should only exist temporarily).
801 void removeEmptySubRanges();
803 /// getSize - Returns the sum of sizes of all the LiveRange's.
805 unsigned getSize() const;
807 /// isSpillable - Can this interval be spilled?
808 bool isSpillable() const {
809 return weight != huge_valf;
812 /// markNotSpillable - Mark interval as not spillable
813 void markNotSpillable() {
814 weight = huge_valf;
817 /// For a given lane mask @p LaneMask, compute indexes at which the
818 /// lane is marked undefined by subregister <def,read-undef> definitions.
819 void computeSubRangeUndefs(SmallVectorImpl<SlotIndex> &Undefs,
820 LaneBitmask LaneMask,
821 const MachineRegisterInfo &MRI,
822 const SlotIndexes &Indexes) const;
824 /// Refines the subranges to support \p LaneMask. This may only be called
825 /// for LI.hasSubrange()==true. Subregister ranges are split or created
826 /// until \p LaneMask can be matched exactly. \p Mod is executed on the
827 /// matching subranges.
829 /// Example:
830 /// Given an interval with subranges with lanemasks L0F00, L00F0 and
831 /// L000F, refining for mask L0018. Will split the L00F0 lane into
832 /// L00E0 and L0010 and the L000F lane into L0007 and L0008. The Mod
833 /// function will be applied to the L0010 and L0008 subranges.
835 /// \p Indexes and \p TRI are required to clean up the VNIs that
836 /// don't defne the related lane masks after they get shrunk. E.g.,
837 /// when L000F gets split into L0007 and L0008 maybe only a subset
838 /// of the VNIs that defined L000F defines L0007.
839 void refineSubRanges(BumpPtrAllocator &Allocator, LaneBitmask LaneMask,
840 std::function<void(LiveInterval::SubRange &)> Apply,
841 const SlotIndexes &Indexes,
842 const TargetRegisterInfo &TRI);
844 bool operator<(const LiveInterval& other) const {
845 const SlotIndex &thisIndex = beginIndex();
846 const SlotIndex &otherIndex = other.beginIndex();
847 return std::tie(thisIndex, reg) < std::tie(otherIndex, other.reg);
850 void print(raw_ostream &OS) const;
851 void dump() const;
853 /// Walks the interval and assert if any invariants fail to hold.
855 /// Note that this is a no-op when asserts are disabled.
856 #ifdef NDEBUG
857 void verify(const MachineRegisterInfo *MRI = nullptr) const {}
858 #else
859 void verify(const MachineRegisterInfo *MRI = nullptr) const;
860 #endif
862 private:
863 /// Appends @p Range to SubRanges list.
864 void appendSubRange(SubRange *Range) {
865 Range->Next = SubRanges;
866 SubRanges = Range;
869 /// Free memory held by SubRange.
870 void freeSubRange(SubRange *S);
873 inline raw_ostream &operator<<(raw_ostream &OS,
874 const LiveInterval::SubRange &SR) {
875 SR.print(OS);
876 return OS;
879 inline raw_ostream &operator<<(raw_ostream &OS, const LiveInterval &LI) {
880 LI.print(OS);
881 return OS;
884 raw_ostream &operator<<(raw_ostream &OS, const LiveRange::Segment &S);
886 inline bool operator<(SlotIndex V, const LiveRange::Segment &S) {
887 return V < S.start;
890 inline bool operator<(const LiveRange::Segment &S, SlotIndex V) {
891 return S.start < V;
894 /// Helper class for performant LiveRange bulk updates.
896 /// Calling LiveRange::addSegment() repeatedly can be expensive on large
897 /// live ranges because segments after the insertion point may need to be
898 /// shifted. The LiveRangeUpdater class can defer the shifting when adding
899 /// many segments in order.
901 /// The LiveRange will be in an invalid state until flush() is called.
902 class LiveRangeUpdater {
903 LiveRange *LR;
904 SlotIndex LastStart;
905 LiveRange::iterator WriteI;
906 LiveRange::iterator ReadI;
907 SmallVector<LiveRange::Segment, 16> Spills;
908 void mergeSpills();
910 public:
911 /// Create a LiveRangeUpdater for adding segments to LR.
912 /// LR will temporarily be in an invalid state until flush() is called.
913 LiveRangeUpdater(LiveRange *lr = nullptr) : LR(lr) {}
915 ~LiveRangeUpdater() { flush(); }
917 /// Add a segment to LR and coalesce when possible, just like
918 /// LR.addSegment(). Segments should be added in increasing start order for
919 /// best performance.
920 void add(LiveRange::Segment);
922 void add(SlotIndex Start, SlotIndex End, VNInfo *VNI) {
923 add(LiveRange::Segment(Start, End, VNI));
926 /// Return true if the LR is currently in an invalid state, and flush()
927 /// needs to be called.
928 bool isDirty() const { return LastStart.isValid(); }
930 /// Flush the updater state to LR so it is valid and contains all added
931 /// segments.
932 void flush();
934 /// Select a different destination live range.
935 void setDest(LiveRange *lr) {
936 if (LR != lr && isDirty())
937 flush();
938 LR = lr;
941 /// Get the current destination live range.
942 LiveRange *getDest() const { return LR; }
944 void dump() const;
945 void print(raw_ostream&) const;
948 inline raw_ostream &operator<<(raw_ostream &OS, const LiveRangeUpdater &X) {
949 X.print(OS);
950 return OS;
953 /// ConnectedVNInfoEqClasses - Helper class that can divide VNInfos in a
954 /// LiveInterval into equivalence clases of connected components. A
955 /// LiveInterval that has multiple connected components can be broken into
956 /// multiple LiveIntervals.
958 /// Given a LiveInterval that may have multiple connected components, run:
960 /// unsigned numComps = ConEQ.Classify(LI);
961 /// if (numComps > 1) {
962 /// // allocate numComps-1 new LiveIntervals into LIS[1..]
963 /// ConEQ.Distribute(LIS);
964 /// }
966 class ConnectedVNInfoEqClasses {
967 LiveIntervals &LIS;
968 IntEqClasses EqClass;
970 public:
971 explicit ConnectedVNInfoEqClasses(LiveIntervals &lis) : LIS(lis) {}
973 /// Classify the values in \p LR into connected components.
974 /// Returns the number of connected components.
975 unsigned Classify(const LiveRange &LR);
977 /// getEqClass - Classify creates equivalence classes numbered 0..N. Return
978 /// the equivalence class assigned the VNI.
979 unsigned getEqClass(const VNInfo *VNI) const { return EqClass[VNI->id]; }
981 /// Distribute values in \p LI into a separate LiveIntervals
982 /// for each connected component. LIV must have an empty LiveInterval for
983 /// each additional connected component. The first connected component is
984 /// left in \p LI.
985 void Distribute(LiveInterval &LI, LiveInterval *LIV[],
986 MachineRegisterInfo &MRI);
989 } // end namespace llvm
991 #endif // LLVM_CODEGEN_LIVEINTERVAL_H