1 //===- MachineCopyPropagation.cpp - Machine Copy Propagation Pass ---------===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 // This is an extremely simple MachineInstr-level copy propagation pass.
11 // This pass forwards the source of COPYs to the users of their destinations
12 // when doing so is legal. For example:
19 // - %reg0 has not been clobbered by the time of the use of %reg1
20 // - the register class constraints are satisfied
21 // - the COPY def is the only value that reaches OP
22 // then this pass replaces the above with:
28 // This pass also removes some redundant COPYs. For example:
31 // ... // No clobber of %R1
32 // %R0 = COPY %R1 <<< Removed
37 // ... // No clobber of %R0
38 // %R1 = COPY %R0 <<< Removed
43 // ... // No read/clobber of $R0 and $R1
44 // $R1 = COPY $R0 // $R0 is killed
45 // Replace $R0 with $R1 and remove the COPY
49 //===----------------------------------------------------------------------===//
51 #include "llvm/ADT/DenseMap.h"
52 #include "llvm/ADT/STLExtras.h"
53 #include "llvm/ADT/SetVector.h"
54 #include "llvm/ADT/SmallSet.h"
55 #include "llvm/ADT/SmallVector.h"
56 #include "llvm/ADT/Statistic.h"
57 #include "llvm/ADT/iterator_range.h"
58 #include "llvm/CodeGen/MachineBasicBlock.h"
59 #include "llvm/CodeGen/MachineFunction.h"
60 #include "llvm/CodeGen/MachineFunctionPass.h"
61 #include "llvm/CodeGen/MachineInstr.h"
62 #include "llvm/CodeGen/MachineOperand.h"
63 #include "llvm/CodeGen/MachineRegisterInfo.h"
64 #include "llvm/CodeGen/TargetInstrInfo.h"
65 #include "llvm/CodeGen/TargetRegisterInfo.h"
66 #include "llvm/CodeGen/TargetSubtargetInfo.h"
67 #include "llvm/InitializePasses.h"
68 #include "llvm/MC/MCRegisterInfo.h"
69 #include "llvm/Pass.h"
70 #include "llvm/Support/Debug.h"
71 #include "llvm/Support/DebugCounter.h"
72 #include "llvm/Support/raw_ostream.h"
78 #define DEBUG_TYPE "machine-cp"
80 STATISTIC(NumDeletes
, "Number of dead copies deleted");
81 STATISTIC(NumCopyForwards
, "Number of copy uses forwarded");
82 STATISTIC(NumCopyBackwardPropagated
, "Number of copy defs backward propagated");
83 STATISTIC(SpillageChainsLength
, "Length of spillage chains");
84 STATISTIC(NumSpillageChains
, "Number of spillage chains");
85 DEBUG_COUNTER(FwdCounter
, "machine-cp-fwd",
86 "Controls which register COPYs are forwarded");
88 static cl::opt
<bool> MCPUseCopyInstr("mcp-use-is-copy-instr", cl::init(false),
90 static cl::opt
<cl::boolOrDefault
>
91 EnableSpillageCopyElimination("enable-spill-copy-elim", cl::Hidden
);
95 static std::optional
<DestSourcePair
> isCopyInstr(const MachineInstr
&MI
,
96 const TargetInstrInfo
&TII
,
99 return TII
.isCopyInstr(MI
);
102 return std::optional
<DestSourcePair
>(
103 DestSourcePair
{MI
.getOperand(0), MI
.getOperand(1)});
110 MachineInstr
*MI
, *LastSeenUseInCopy
;
111 SmallVector
<MCRegister
, 4> DefRegs
;
115 DenseMap
<MCRegister
, CopyInfo
> Copies
;
118 /// Mark all of the given registers and their subregisters as unavailable for
120 void markRegsUnavailable(ArrayRef
<MCRegister
> Regs
,
121 const TargetRegisterInfo
&TRI
) {
122 for (MCRegister Reg
: Regs
) {
123 // Source of copy is no longer available for propagation.
124 for (MCRegUnit Unit
: TRI
.regunits(Reg
)) {
125 auto CI
= Copies
.find(Unit
);
126 if (CI
!= Copies
.end())
127 CI
->second
.Avail
= false;
132 /// Remove register from copy maps.
133 void invalidateRegister(MCRegister Reg
, const TargetRegisterInfo
&TRI
,
134 const TargetInstrInfo
&TII
, bool UseCopyInstr
) {
135 // Since Reg might be a subreg of some registers, only invalidate Reg is not
136 // enough. We have to find the COPY defines Reg or registers defined by Reg
137 // and invalidate all of them. Similarly, we must invalidate all of the
138 // the subregisters used in the source of the COPY.
139 SmallSet
<MCRegUnit
, 8> RegUnitsToInvalidate
;
140 auto InvalidateCopy
= [&](MachineInstr
*MI
) {
141 std::optional
<DestSourcePair
> CopyOperands
=
142 isCopyInstr(*MI
, TII
, UseCopyInstr
);
143 assert(CopyOperands
&& "Expect copy");
145 auto Dest
= TRI
.regunits(CopyOperands
->Destination
->getReg().asMCReg());
146 auto Src
= TRI
.regunits(CopyOperands
->Source
->getReg().asMCReg());
147 RegUnitsToInvalidate
.insert(Dest
.begin(), Dest
.end());
148 RegUnitsToInvalidate
.insert(Src
.begin(), Src
.end());
151 for (MCRegUnit Unit
: TRI
.regunits(Reg
)) {
152 auto I
= Copies
.find(Unit
);
153 if (I
!= Copies
.end()) {
154 if (MachineInstr
*MI
= I
->second
.MI
)
156 if (MachineInstr
*MI
= I
->second
.LastSeenUseInCopy
)
160 for (MCRegUnit Unit
: RegUnitsToInvalidate
)
164 /// Clobber a single register, removing it from the tracker's copy maps.
165 void clobberRegister(MCRegister Reg
, const TargetRegisterInfo
&TRI
,
166 const TargetInstrInfo
&TII
, bool UseCopyInstr
) {
167 for (MCRegUnit Unit
: TRI
.regunits(Reg
)) {
168 auto I
= Copies
.find(Unit
);
169 if (I
!= Copies
.end()) {
170 // When we clobber the source of a copy, we need to clobber everything
172 markRegsUnavailable(I
->second
.DefRegs
, TRI
);
173 // When we clobber the destination of a copy, we need to clobber the
174 // whole register it defined.
175 if (MachineInstr
*MI
= I
->second
.MI
) {
176 std::optional
<DestSourcePair
> CopyOperands
=
177 isCopyInstr(*MI
, TII
, UseCopyInstr
);
178 markRegsUnavailable({CopyOperands
->Destination
->getReg().asMCReg()},
181 // Now we can erase the copy.
187 /// Add this copy's registers into the tracker's copy maps.
188 void trackCopy(MachineInstr
*MI
, const TargetRegisterInfo
&TRI
,
189 const TargetInstrInfo
&TII
, bool UseCopyInstr
) {
190 std::optional
<DestSourcePair
> CopyOperands
=
191 isCopyInstr(*MI
, TII
, UseCopyInstr
);
192 assert(CopyOperands
&& "Tracking non-copy?");
194 MCRegister Src
= CopyOperands
->Source
->getReg().asMCReg();
195 MCRegister Def
= CopyOperands
->Destination
->getReg().asMCReg();
197 // Remember Def is defined by the copy.
198 for (MCRegUnit Unit
: TRI
.regunits(Def
))
199 Copies
[Unit
] = {MI
, nullptr, {}, true};
201 // Remember source that's copied to Def. Once it's clobbered, then
202 // it's no longer available for copy propagation.
203 for (MCRegUnit Unit
: TRI
.regunits(Src
)) {
204 auto I
= Copies
.insert({Unit
, {nullptr, nullptr, {}, false}});
205 auto &Copy
= I
.first
->second
;
206 if (!is_contained(Copy
.DefRegs
, Def
))
207 Copy
.DefRegs
.push_back(Def
);
208 Copy
.LastSeenUseInCopy
= MI
;
212 bool hasAnyCopies() {
213 return !Copies
.empty();
216 MachineInstr
*findCopyForUnit(MCRegister RegUnit
,
217 const TargetRegisterInfo
&TRI
,
218 bool MustBeAvailable
= false) {
219 auto CI
= Copies
.find(RegUnit
);
220 if (CI
== Copies
.end())
222 if (MustBeAvailable
&& !CI
->second
.Avail
)
224 return CI
->second
.MI
;
227 MachineInstr
*findCopyDefViaUnit(MCRegister RegUnit
,
228 const TargetRegisterInfo
&TRI
) {
229 auto CI
= Copies
.find(RegUnit
);
230 if (CI
== Copies
.end())
232 if (CI
->second
.DefRegs
.size() != 1)
234 MCRegUnit RU
= *TRI
.regunits(CI
->second
.DefRegs
[0]).begin();
235 return findCopyForUnit(RU
, TRI
, true);
238 MachineInstr
*findAvailBackwardCopy(MachineInstr
&I
, MCRegister Reg
,
239 const TargetRegisterInfo
&TRI
,
240 const TargetInstrInfo
&TII
,
242 MCRegUnit RU
= *TRI
.regunits(Reg
).begin();
243 MachineInstr
*AvailCopy
= findCopyDefViaUnit(RU
, TRI
);
248 std::optional
<DestSourcePair
> CopyOperands
=
249 isCopyInstr(*AvailCopy
, TII
, UseCopyInstr
);
250 Register AvailSrc
= CopyOperands
->Source
->getReg();
251 Register AvailDef
= CopyOperands
->Destination
->getReg();
252 if (!TRI
.isSubRegisterEq(AvailSrc
, Reg
))
255 for (const MachineInstr
&MI
:
256 make_range(AvailCopy
->getReverseIterator(), I
.getReverseIterator()))
257 for (const MachineOperand
&MO
: MI
.operands())
259 // FIXME: Shall we simultaneously invalidate AvailSrc or AvailDef?
260 if (MO
.clobbersPhysReg(AvailSrc
) || MO
.clobbersPhysReg(AvailDef
))
266 MachineInstr
*findAvailCopy(MachineInstr
&DestCopy
, MCRegister Reg
,
267 const TargetRegisterInfo
&TRI
,
268 const TargetInstrInfo
&TII
, bool UseCopyInstr
) {
269 // We check the first RegUnit here, since we'll only be interested in the
270 // copy if it copies the entire register anyway.
271 MCRegUnit RU
= *TRI
.regunits(Reg
).begin();
272 MachineInstr
*AvailCopy
=
273 findCopyForUnit(RU
, TRI
, /*MustBeAvailable=*/true);
278 std::optional
<DestSourcePair
> CopyOperands
=
279 isCopyInstr(*AvailCopy
, TII
, UseCopyInstr
);
280 Register AvailSrc
= CopyOperands
->Source
->getReg();
281 Register AvailDef
= CopyOperands
->Destination
->getReg();
282 if (!TRI
.isSubRegisterEq(AvailDef
, Reg
))
285 // Check that the available copy isn't clobbered by any regmasks between
286 // itself and the destination.
287 for (const MachineInstr
&MI
:
288 make_range(AvailCopy
->getIterator(), DestCopy
.getIterator()))
289 for (const MachineOperand
&MO
: MI
.operands())
291 if (MO
.clobbersPhysReg(AvailSrc
) || MO
.clobbersPhysReg(AvailDef
))
297 // Find last COPY that defines Reg before Current MachineInstr.
298 MachineInstr
*findLastSeenDefInCopy(const MachineInstr
&Current
,
300 const TargetRegisterInfo
&TRI
,
301 const TargetInstrInfo
&TII
,
303 MCRegUnit RU
= *TRI
.regunits(Reg
).begin();
304 auto CI
= Copies
.find(RU
);
305 if (CI
== Copies
.end() || !CI
->second
.Avail
)
308 MachineInstr
*DefCopy
= CI
->second
.MI
;
309 std::optional
<DestSourcePair
> CopyOperands
=
310 isCopyInstr(*DefCopy
, TII
, UseCopyInstr
);
311 Register Def
= CopyOperands
->Destination
->getReg();
312 if (!TRI
.isSubRegisterEq(Def
, Reg
))
315 for (const MachineInstr
&MI
:
316 make_range(static_cast<const MachineInstr
*>(DefCopy
)->getIterator(),
317 Current
.getIterator()))
318 for (const MachineOperand
&MO
: MI
.operands())
320 if (MO
.clobbersPhysReg(Def
)) {
321 LLVM_DEBUG(dbgs() << "MCP: Removed tracking of "
322 << printReg(Def
, &TRI
) << "\n");
329 // Find last COPY that uses Reg.
330 MachineInstr
*findLastSeenUseInCopy(MCRegister Reg
,
331 const TargetRegisterInfo
&TRI
) {
332 MCRegUnit RU
= *TRI
.regunits(Reg
).begin();
333 auto CI
= Copies
.find(RU
);
334 if (CI
== Copies
.end())
336 return CI
->second
.LastSeenUseInCopy
;
344 class MachineCopyPropagation
: public MachineFunctionPass
{
345 const TargetRegisterInfo
*TRI
= nullptr;
346 const TargetInstrInfo
*TII
= nullptr;
347 const MachineRegisterInfo
*MRI
= nullptr;
349 // Return true if this is a copy instruction and false otherwise.
353 static char ID
; // Pass identification, replacement for typeid
355 MachineCopyPropagation(bool CopyInstr
= false)
356 : MachineFunctionPass(ID
), UseCopyInstr(CopyInstr
|| MCPUseCopyInstr
) {
357 initializeMachineCopyPropagationPass(*PassRegistry::getPassRegistry());
360 void getAnalysisUsage(AnalysisUsage
&AU
) const override
{
361 AU
.setPreservesCFG();
362 MachineFunctionPass::getAnalysisUsage(AU
);
365 bool runOnMachineFunction(MachineFunction
&MF
) override
;
367 MachineFunctionProperties
getRequiredProperties() const override
{
368 return MachineFunctionProperties().set(
369 MachineFunctionProperties::Property::NoVRegs
);
373 typedef enum { DebugUse
= false, RegularUse
= true } DebugType
;
375 void ReadRegister(MCRegister Reg
, MachineInstr
&Reader
, DebugType DT
);
376 void ForwardCopyPropagateBlock(MachineBasicBlock
&MBB
);
377 void BackwardCopyPropagateBlock(MachineBasicBlock
&MBB
);
378 void EliminateSpillageCopies(MachineBasicBlock
&MBB
);
379 bool eraseIfRedundant(MachineInstr
&Copy
, MCRegister Src
, MCRegister Def
);
380 void forwardUses(MachineInstr
&MI
);
381 void propagateDefs(MachineInstr
&MI
);
382 bool isForwardableRegClassCopy(const MachineInstr
&Copy
,
383 const MachineInstr
&UseI
, unsigned UseIdx
);
384 bool isBackwardPropagatableRegClassCopy(const MachineInstr
&Copy
,
385 const MachineInstr
&UseI
,
387 bool hasImplicitOverlap(const MachineInstr
&MI
, const MachineOperand
&Use
);
388 bool hasOverlappingMultipleDef(const MachineInstr
&MI
,
389 const MachineOperand
&MODef
, Register Def
);
391 /// Candidates for deletion.
392 SmallSetVector
<MachineInstr
*, 8> MaybeDeadCopies
;
394 /// Multimap tracking debug users in current BB
395 DenseMap
<MachineInstr
*, SmallSet
<MachineInstr
*, 2>> CopyDbgUsers
;
399 bool Changed
= false;
402 } // end anonymous namespace
404 char MachineCopyPropagation::ID
= 0;
406 char &llvm::MachineCopyPropagationID
= MachineCopyPropagation::ID
;
408 INITIALIZE_PASS(MachineCopyPropagation
, DEBUG_TYPE
,
409 "Machine Copy Propagation Pass", false, false)
411 void MachineCopyPropagation::ReadRegister(MCRegister Reg
, MachineInstr
&Reader
,
413 // If 'Reg' is defined by a copy, the copy is no longer a candidate
414 // for elimination. If a copy is "read" by a debug user, record the user
416 for (MCRegUnit Unit
: TRI
->regunits(Reg
)) {
417 if (MachineInstr
*Copy
= Tracker
.findCopyForUnit(Unit
, *TRI
)) {
418 if (DT
== RegularUse
) {
419 LLVM_DEBUG(dbgs() << "MCP: Copy is used - not dead: "; Copy
->dump());
420 MaybeDeadCopies
.remove(Copy
);
422 CopyDbgUsers
[Copy
].insert(&Reader
);
428 /// Return true if \p PreviousCopy did copy register \p Src to register \p Def.
429 /// This fact may have been obscured by sub register usage or may not be true at
430 /// all even though Src and Def are subregisters of the registers used in
431 /// PreviousCopy. e.g.
432 /// isNopCopy("ecx = COPY eax", AX, CX) == true
433 /// isNopCopy("ecx = COPY eax", AH, CL) == false
434 static bool isNopCopy(const MachineInstr
&PreviousCopy
, MCRegister Src
,
435 MCRegister Def
, const TargetRegisterInfo
*TRI
,
436 const TargetInstrInfo
*TII
, bool UseCopyInstr
) {
438 std::optional
<DestSourcePair
> CopyOperands
=
439 isCopyInstr(PreviousCopy
, *TII
, UseCopyInstr
);
440 MCRegister PreviousSrc
= CopyOperands
->Source
->getReg().asMCReg();
441 MCRegister PreviousDef
= CopyOperands
->Destination
->getReg().asMCReg();
442 if (Src
== PreviousSrc
&& Def
== PreviousDef
)
444 if (!TRI
->isSubRegister(PreviousSrc
, Src
))
446 unsigned SubIdx
= TRI
->getSubRegIndex(PreviousSrc
, Src
);
447 return SubIdx
== TRI
->getSubRegIndex(PreviousDef
, Def
);
450 /// Remove instruction \p Copy if there exists a previous copy that copies the
451 /// register \p Src to the register \p Def; This may happen indirectly by
452 /// copying the super registers.
453 bool MachineCopyPropagation::eraseIfRedundant(MachineInstr
&Copy
,
454 MCRegister Src
, MCRegister Def
) {
455 // Avoid eliminating a copy from/to a reserved registers as we cannot predict
456 // the value (Example: The sparc zero register is writable but stays zero).
457 if (MRI
->isReserved(Src
) || MRI
->isReserved(Def
))
460 // Search for an existing copy.
461 MachineInstr
*PrevCopy
=
462 Tracker
.findAvailCopy(Copy
, Def
, *TRI
, *TII
, UseCopyInstr
);
466 auto PrevCopyOperands
= isCopyInstr(*PrevCopy
, *TII
, UseCopyInstr
);
467 // Check that the existing copy uses the correct sub registers.
468 if (PrevCopyOperands
->Destination
->isDead())
470 if (!isNopCopy(*PrevCopy
, Src
, Def
, TRI
, TII
, UseCopyInstr
))
473 LLVM_DEBUG(dbgs() << "MCP: copy is a NOP, removing: "; Copy
.dump());
475 // Copy was redundantly redefining either Src or Def. Remove earlier kill
476 // flags between Copy and PrevCopy because the value will be reused now.
477 std::optional
<DestSourcePair
> CopyOperands
=
478 isCopyInstr(Copy
, *TII
, UseCopyInstr
);
479 assert(CopyOperands
);
481 Register CopyDef
= CopyOperands
->Destination
->getReg();
482 assert(CopyDef
== Src
|| CopyDef
== Def
);
483 for (MachineInstr
&MI
:
484 make_range(PrevCopy
->getIterator(), Copy
.getIterator()))
485 MI
.clearRegisterKills(CopyDef
, TRI
);
487 // Clear undef flag from remaining copy if needed.
488 if (!CopyOperands
->Source
->isUndef()) {
489 PrevCopy
->getOperand(PrevCopyOperands
->Source
->getOperandNo())
493 Copy
.eraseFromParent();
499 bool MachineCopyPropagation::isBackwardPropagatableRegClassCopy(
500 const MachineInstr
&Copy
, const MachineInstr
&UseI
, unsigned UseIdx
) {
501 std::optional
<DestSourcePair
> CopyOperands
=
502 isCopyInstr(Copy
, *TII
, UseCopyInstr
);
503 Register Def
= CopyOperands
->Destination
->getReg();
505 if (const TargetRegisterClass
*URC
=
506 UseI
.getRegClassConstraint(UseIdx
, TII
, TRI
))
507 return URC
->contains(Def
);
509 // We don't process further if UseI is a COPY, since forward copy propagation
510 // should handle that.
514 /// Decide whether we should forward the source of \param Copy to its use in
515 /// \param UseI based on the physical register class constraints of the opcode
516 /// and avoiding introducing more cross-class COPYs.
517 bool MachineCopyPropagation::isForwardableRegClassCopy(const MachineInstr
&Copy
,
518 const MachineInstr
&UseI
,
520 std::optional
<DestSourcePair
> CopyOperands
=
521 isCopyInstr(Copy
, *TII
, UseCopyInstr
);
522 Register CopySrcReg
= CopyOperands
->Source
->getReg();
524 // If the new register meets the opcode register constraints, then allow
526 if (const TargetRegisterClass
*URC
=
527 UseI
.getRegClassConstraint(UseIdx
, TII
, TRI
))
528 return URC
->contains(CopySrcReg
);
530 auto UseICopyOperands
= isCopyInstr(UseI
, *TII
, UseCopyInstr
);
531 if (!UseICopyOperands
)
534 /// COPYs don't have register class constraints, so if the user instruction
535 /// is a COPY, we just try to avoid introducing additional cross-class
536 /// COPYs. For example:
538 /// RegClassA = COPY RegClassB // Copy parameter
540 /// RegClassB = COPY RegClassA // UseI parameter
542 /// which after forwarding becomes
544 /// RegClassA = COPY RegClassB
546 /// RegClassB = COPY RegClassB
548 /// so we have reduced the number of cross-class COPYs and potentially
549 /// introduced a nop COPY that can be removed.
551 // Allow forwarding if src and dst belong to any common class, so long as they
552 // don't belong to any (possibly smaller) common class that requires copies to
553 // go via a different class.
554 Register UseDstReg
= UseICopyOperands
->Destination
->getReg();
556 bool IsCrossClass
= false;
557 for (const TargetRegisterClass
*RC
: TRI
->regclasses()) {
558 if (RC
->contains(CopySrcReg
) && RC
->contains(UseDstReg
)) {
560 if (TRI
->getCrossCopyRegClass(RC
) != RC
) {
570 // The forwarded copy would be cross-class. Only do this if the original copy
571 // was also cross-class.
572 Register CopyDstReg
= CopyOperands
->Destination
->getReg();
573 for (const TargetRegisterClass
*RC
: TRI
->regclasses()) {
574 if (RC
->contains(CopySrcReg
) && RC
->contains(CopyDstReg
) &&
575 TRI
->getCrossCopyRegClass(RC
) != RC
)
581 /// Check that \p MI does not have implicit uses that overlap with it's \p Use
582 /// operand (the register being replaced), since these can sometimes be
583 /// implicitly tied to other operands. For example, on AMDGPU:
585 /// V_MOVRELS_B32_e32 %VGPR2, %M0<imp-use>, %EXEC<imp-use>, %VGPR2_VGPR3_VGPR4_VGPR5<imp-use>
587 /// the %VGPR2 is implicitly tied to the larger reg operand, but we have no
588 /// way of knowing we need to update the latter when updating the former.
589 bool MachineCopyPropagation::hasImplicitOverlap(const MachineInstr
&MI
,
590 const MachineOperand
&Use
) {
591 for (const MachineOperand
&MIUse
: MI
.uses())
592 if (&MIUse
!= &Use
&& MIUse
.isReg() && MIUse
.isImplicit() &&
593 MIUse
.isUse() && TRI
->regsOverlap(Use
.getReg(), MIUse
.getReg()))
599 /// For an MI that has multiple definitions, check whether \p MI has
600 /// a definition that overlaps with another of its definitions.
601 /// For example, on ARM: umull r9, r9, lr, r0
602 /// The umull instruction is unpredictable unless RdHi and RdLo are different.
603 bool MachineCopyPropagation::hasOverlappingMultipleDef(
604 const MachineInstr
&MI
, const MachineOperand
&MODef
, Register Def
) {
605 for (const MachineOperand
&MIDef
: MI
.defs()) {
606 if ((&MIDef
!= &MODef
) && MIDef
.isReg() &&
607 TRI
->regsOverlap(Def
, MIDef
.getReg()))
614 /// Look for available copies whose destination register is used by \p MI and
615 /// replace the use in \p MI with the copy's source register.
616 void MachineCopyPropagation::forwardUses(MachineInstr
&MI
) {
617 if (!Tracker
.hasAnyCopies())
620 // Look for non-tied explicit vreg uses that have an active COPY
621 // instruction that defines the physical register allocated to them.
622 // Replace the vreg with the source of the active COPY.
623 for (unsigned OpIdx
= 0, OpEnd
= MI
.getNumOperands(); OpIdx
< OpEnd
;
625 MachineOperand
&MOUse
= MI
.getOperand(OpIdx
);
626 // Don't forward into undef use operands since doing so can cause problems
627 // with the machine verifier, since it doesn't treat undef reads as reads,
628 // so we can end up with a live range that ends on an undef read, leading to
629 // an error that the live range doesn't end on a read of the live range
631 if (!MOUse
.isReg() || MOUse
.isTied() || MOUse
.isUndef() || MOUse
.isDef() ||
638 // Check that the register is marked 'renamable' so we know it is safe to
639 // rename it without violating any constraints that aren't expressed in the
640 // IR (e.g. ABI or opcode requirements).
641 if (!MOUse
.isRenamable())
644 MachineInstr
*Copy
= Tracker
.findAvailCopy(MI
, MOUse
.getReg().asMCReg(),
645 *TRI
, *TII
, UseCopyInstr
);
649 std::optional
<DestSourcePair
> CopyOperands
=
650 isCopyInstr(*Copy
, *TII
, UseCopyInstr
);
651 Register CopyDstReg
= CopyOperands
->Destination
->getReg();
652 const MachineOperand
&CopySrc
= *CopyOperands
->Source
;
653 Register CopySrcReg
= CopySrc
.getReg();
655 Register ForwardedReg
= CopySrcReg
;
656 // MI might use a sub-register of the Copy destination, in which case the
657 // forwarded register is the matching sub-register of the Copy source.
658 if (MOUse
.getReg() != CopyDstReg
) {
659 unsigned SubRegIdx
= TRI
->getSubRegIndex(CopyDstReg
, MOUse
.getReg());
661 "MI source is not a sub-register of Copy destination");
662 ForwardedReg
= TRI
->getSubReg(CopySrcReg
, SubRegIdx
);
664 LLVM_DEBUG(dbgs() << "MCP: Copy source does not have sub-register "
665 << TRI
->getSubRegIndexName(SubRegIdx
) << '\n');
670 // Don't forward COPYs of reserved regs unless they are constant.
671 if (MRI
->isReserved(CopySrcReg
) && !MRI
->isConstantPhysReg(CopySrcReg
))
674 if (!isForwardableRegClassCopy(*Copy
, MI
, OpIdx
))
677 if (hasImplicitOverlap(MI
, MOUse
))
680 // Check that the instruction is not a copy that partially overwrites the
681 // original copy source that we are about to use. The tracker mechanism
682 // cannot cope with that.
683 if (isCopyInstr(MI
, *TII
, UseCopyInstr
) &&
684 MI
.modifiesRegister(CopySrcReg
, TRI
) &&
685 !MI
.definesRegister(CopySrcReg
)) {
686 LLVM_DEBUG(dbgs() << "MCP: Copy source overlap with dest in " << MI
);
690 if (!DebugCounter::shouldExecute(FwdCounter
)) {
691 LLVM_DEBUG(dbgs() << "MCP: Skipping forwarding due to debug counter:\n "
696 LLVM_DEBUG(dbgs() << "MCP: Replacing " << printReg(MOUse
.getReg(), TRI
)
697 << "\n with " << printReg(ForwardedReg
, TRI
)
698 << "\n in " << MI
<< " from " << *Copy
);
700 MOUse
.setReg(ForwardedReg
);
702 if (!CopySrc
.isRenamable())
703 MOUse
.setIsRenamable(false);
704 MOUse
.setIsUndef(CopySrc
.isUndef());
706 LLVM_DEBUG(dbgs() << "MCP: After replacement: " << MI
<< "\n");
708 // Clear kill markers that may have been invalidated.
709 for (MachineInstr
&KMI
:
710 make_range(Copy
->getIterator(), std::next(MI
.getIterator())))
711 KMI
.clearRegisterKills(CopySrcReg
, TRI
);
718 void MachineCopyPropagation::ForwardCopyPropagateBlock(MachineBasicBlock
&MBB
) {
719 LLVM_DEBUG(dbgs() << "MCP: ForwardCopyPropagateBlock " << MBB
.getName()
722 for (MachineInstr
&MI
: llvm::make_early_inc_range(MBB
)) {
723 // Analyze copies (which don't overlap themselves).
724 std::optional
<DestSourcePair
> CopyOperands
=
725 isCopyInstr(MI
, *TII
, UseCopyInstr
);
728 Register RegSrc
= CopyOperands
->Source
->getReg();
729 Register RegDef
= CopyOperands
->Destination
->getReg();
731 if (!TRI
->regsOverlap(RegDef
, RegSrc
)) {
732 assert(RegDef
.isPhysical() && RegSrc
.isPhysical() &&
733 "MachineCopyPropagation should be run after register allocation!");
735 MCRegister Def
= RegDef
.asMCReg();
736 MCRegister Src
= RegSrc
.asMCReg();
738 // The two copies cancel out and the source of the first copy
739 // hasn't been overridden, eliminate the second one. e.g.
741 // ... nothing clobbered eax.
749 // ... nothing clobbered eax.
753 if (eraseIfRedundant(MI
, Def
, Src
) || eraseIfRedundant(MI
, Src
, Def
))
758 // Src may have been changed by forwardUses()
759 CopyOperands
= isCopyInstr(MI
, *TII
, UseCopyInstr
);
760 Src
= CopyOperands
->Source
->getReg().asMCReg();
762 // If Src is defined by a previous copy, the previous copy cannot be
764 ReadRegister(Src
, MI
, RegularUse
);
765 for (const MachineOperand
&MO
: MI
.implicit_operands()) {
766 if (!MO
.isReg() || !MO
.readsReg())
768 MCRegister Reg
= MO
.getReg().asMCReg();
771 ReadRegister(Reg
, MI
, RegularUse
);
774 LLVM_DEBUG(dbgs() << "MCP: Copy is a deletion candidate: "; MI
.dump());
776 // Copy is now a candidate for deletion.
777 if (!MRI
->isReserved(Def
))
778 MaybeDeadCopies
.insert(&MI
);
780 // If 'Def' is previously source of another copy, then this earlier copy's
781 // source is no longer available. e.g.
782 // %xmm9 = copy %xmm2
784 // %xmm2 = copy %xmm0
786 // %xmm2 = copy %xmm9
787 Tracker
.clobberRegister(Def
, *TRI
, *TII
, UseCopyInstr
);
788 for (const MachineOperand
&MO
: MI
.implicit_operands()) {
789 if (!MO
.isReg() || !MO
.isDef())
791 MCRegister Reg
= MO
.getReg().asMCReg();
794 Tracker
.clobberRegister(Reg
, *TRI
, *TII
, UseCopyInstr
);
797 Tracker
.trackCopy(&MI
, *TRI
, *TII
, UseCopyInstr
);
803 // Clobber any earlyclobber regs first.
804 for (const MachineOperand
&MO
: MI
.operands())
805 if (MO
.isReg() && MO
.isEarlyClobber()) {
806 MCRegister Reg
= MO
.getReg().asMCReg();
807 // If we have a tied earlyclobber, that means it is also read by this
808 // instruction, so we need to make sure we don't remove it as dead
811 ReadRegister(Reg
, MI
, RegularUse
);
812 Tracker
.clobberRegister(Reg
, *TRI
, *TII
, UseCopyInstr
);
818 SmallVector
<Register
, 2> Defs
;
819 const MachineOperand
*RegMask
= nullptr;
820 for (const MachineOperand
&MO
: MI
.operands()) {
825 Register Reg
= MO
.getReg();
829 assert(!Reg
.isVirtual() &&
830 "MachineCopyPropagation should be run after register allocation!");
832 if (MO
.isDef() && !MO
.isEarlyClobber()) {
833 Defs
.push_back(Reg
.asMCReg());
835 } else if (MO
.readsReg())
836 ReadRegister(Reg
.asMCReg(), MI
, MO
.isDebug() ? DebugUse
: RegularUse
);
839 // The instruction has a register mask operand which means that it clobbers
840 // a large set of registers. Treat clobbered registers the same way as
841 // defined registers.
843 // Erase any MaybeDeadCopies whose destination register is clobbered.
844 for (SmallSetVector
<MachineInstr
*, 8>::iterator DI
=
845 MaybeDeadCopies
.begin();
846 DI
!= MaybeDeadCopies
.end();) {
847 MachineInstr
*MaybeDead
= *DI
;
848 std::optional
<DestSourcePair
> CopyOperands
=
849 isCopyInstr(*MaybeDead
, *TII
, UseCopyInstr
);
850 MCRegister Reg
= CopyOperands
->Destination
->getReg().asMCReg();
851 assert(!MRI
->isReserved(Reg
));
853 if (!RegMask
->clobbersPhysReg(Reg
)) {
858 LLVM_DEBUG(dbgs() << "MCP: Removing copy due to regmask clobbering: ";
861 // Make sure we invalidate any entries in the copy maps before erasing
863 Tracker
.clobberRegister(Reg
, *TRI
, *TII
, UseCopyInstr
);
865 // erase() will return the next valid iterator pointing to the next
866 // element after the erased one.
867 DI
= MaybeDeadCopies
.erase(DI
);
868 MaybeDead
->eraseFromParent();
874 // Any previous copy definition or reading the Defs is no longer available.
875 for (MCRegister Reg
: Defs
)
876 Tracker
.clobberRegister(Reg
, *TRI
, *TII
, UseCopyInstr
);
879 // If MBB doesn't have successors, delete the copies whose defs are not used.
880 // If MBB does have successors, then conservative assume the defs are live-out
881 // since we don't want to trust live-in lists.
882 if (MBB
.succ_empty()) {
883 for (MachineInstr
*MaybeDead
: MaybeDeadCopies
) {
884 LLVM_DEBUG(dbgs() << "MCP: Removing copy due to no live-out succ: ";
887 std::optional
<DestSourcePair
> CopyOperands
=
888 isCopyInstr(*MaybeDead
, *TII
, UseCopyInstr
);
889 assert(CopyOperands
);
891 Register SrcReg
= CopyOperands
->Source
->getReg();
892 Register DestReg
= CopyOperands
->Destination
->getReg();
893 assert(!MRI
->isReserved(DestReg
));
895 // Update matching debug values, if any.
896 SmallVector
<MachineInstr
*> MaybeDeadDbgUsers(
897 CopyDbgUsers
[MaybeDead
].begin(), CopyDbgUsers
[MaybeDead
].end());
898 MRI
->updateDbgUsersToReg(DestReg
.asMCReg(), SrcReg
.asMCReg(),
901 MaybeDead
->eraseFromParent();
907 MaybeDeadCopies
.clear();
908 CopyDbgUsers
.clear();
912 static bool isBackwardPropagatableCopy(const DestSourcePair
&CopyOperands
,
913 const MachineRegisterInfo
&MRI
,
914 const TargetInstrInfo
&TII
) {
915 Register Def
= CopyOperands
.Destination
->getReg();
916 Register Src
= CopyOperands
.Source
->getReg();
921 if (MRI
.isReserved(Def
) || MRI
.isReserved(Src
))
924 return CopyOperands
.Source
->isRenamable() && CopyOperands
.Source
->isKill();
927 void MachineCopyPropagation::propagateDefs(MachineInstr
&MI
) {
928 if (!Tracker
.hasAnyCopies())
931 for (unsigned OpIdx
= 0, OpEnd
= MI
.getNumOperands(); OpIdx
!= OpEnd
;
933 MachineOperand
&MODef
= MI
.getOperand(OpIdx
);
935 if (!MODef
.isReg() || MODef
.isUse())
938 // Ignore non-trivial cases.
939 if (MODef
.isTied() || MODef
.isUndef() || MODef
.isImplicit())
945 // We only handle if the register comes from a vreg.
946 if (!MODef
.isRenamable())
949 MachineInstr
*Copy
= Tracker
.findAvailBackwardCopy(
950 MI
, MODef
.getReg().asMCReg(), *TRI
, *TII
, UseCopyInstr
);
954 std::optional
<DestSourcePair
> CopyOperands
=
955 isCopyInstr(*Copy
, *TII
, UseCopyInstr
);
956 Register Def
= CopyOperands
->Destination
->getReg();
957 Register Src
= CopyOperands
->Source
->getReg();
959 if (MODef
.getReg() != Src
)
962 if (!isBackwardPropagatableRegClassCopy(*Copy
, MI
, OpIdx
))
965 if (hasImplicitOverlap(MI
, MODef
))
968 if (hasOverlappingMultipleDef(MI
, MODef
, Def
))
971 LLVM_DEBUG(dbgs() << "MCP: Replacing " << printReg(MODef
.getReg(), TRI
)
972 << "\n with " << printReg(Def
, TRI
) << "\n in "
973 << MI
<< " from " << *Copy
);
976 MODef
.setIsRenamable(CopyOperands
->Destination
->isRenamable());
978 LLVM_DEBUG(dbgs() << "MCP: After replacement: " << MI
<< "\n");
979 MaybeDeadCopies
.insert(Copy
);
981 ++NumCopyBackwardPropagated
;
985 void MachineCopyPropagation::BackwardCopyPropagateBlock(
986 MachineBasicBlock
&MBB
) {
987 LLVM_DEBUG(dbgs() << "MCP: BackwardCopyPropagateBlock " << MBB
.getName()
990 for (MachineInstr
&MI
: llvm::make_early_inc_range(llvm::reverse(MBB
))) {
991 // Ignore non-trivial COPYs.
992 std::optional
<DestSourcePair
> CopyOperands
=
993 isCopyInstr(MI
, *TII
, UseCopyInstr
);
994 if (CopyOperands
&& MI
.getNumOperands() == 2) {
995 Register DefReg
= CopyOperands
->Destination
->getReg();
996 Register SrcReg
= CopyOperands
->Source
->getReg();
998 if (!TRI
->regsOverlap(DefReg
, SrcReg
)) {
999 // Unlike forward cp, we don't invoke propagateDefs here,
1000 // just let forward cp do COPY-to-COPY propagation.
1001 if (isBackwardPropagatableCopy(*CopyOperands
, *MRI
, *TII
)) {
1002 Tracker
.invalidateRegister(SrcReg
.asMCReg(), *TRI
, *TII
,
1004 Tracker
.invalidateRegister(DefReg
.asMCReg(), *TRI
, *TII
,
1006 Tracker
.trackCopy(&MI
, *TRI
, *TII
, UseCopyInstr
);
1012 // Invalidate any earlyclobber regs first.
1013 for (const MachineOperand
&MO
: MI
.operands())
1014 if (MO
.isReg() && MO
.isEarlyClobber()) {
1015 MCRegister Reg
= MO
.getReg().asMCReg();
1018 Tracker
.invalidateRegister(Reg
, *TRI
, *TII
, UseCopyInstr
);
1022 for (const MachineOperand
&MO
: MI
.operands()) {
1030 Tracker
.invalidateRegister(MO
.getReg().asMCReg(), *TRI
, *TII
,
1033 if (MO
.readsReg()) {
1035 // Check if the register in the debug instruction is utilized
1036 // in a copy instruction, so we can update the debug info if the
1037 // register is changed.
1038 for (MCRegUnit Unit
: TRI
->regunits(MO
.getReg().asMCReg())) {
1039 if (auto *Copy
= Tracker
.findCopyDefViaUnit(Unit
, *TRI
)) {
1040 CopyDbgUsers
[Copy
].insert(&MI
);
1044 Tracker
.invalidateRegister(MO
.getReg().asMCReg(), *TRI
, *TII
,
1051 for (auto *Copy
: MaybeDeadCopies
) {
1052 std::optional
<DestSourcePair
> CopyOperands
=
1053 isCopyInstr(*Copy
, *TII
, UseCopyInstr
);
1054 Register Src
= CopyOperands
->Source
->getReg();
1055 Register Def
= CopyOperands
->Destination
->getReg();
1056 SmallVector
<MachineInstr
*> MaybeDeadDbgUsers(CopyDbgUsers
[Copy
].begin(),
1057 CopyDbgUsers
[Copy
].end());
1059 MRI
->updateDbgUsersToReg(Src
.asMCReg(), Def
.asMCReg(), MaybeDeadDbgUsers
);
1060 Copy
->eraseFromParent();
1064 MaybeDeadCopies
.clear();
1065 CopyDbgUsers
.clear();
1069 static void LLVM_ATTRIBUTE_UNUSED
printSpillReloadChain(
1070 DenseMap
<MachineInstr
*, SmallVector
<MachineInstr
*>> &SpillChain
,
1071 DenseMap
<MachineInstr
*, SmallVector
<MachineInstr
*>> &ReloadChain
,
1072 MachineInstr
*Leader
) {
1073 auto &SC
= SpillChain
[Leader
];
1074 auto &RC
= ReloadChain
[Leader
];
1075 for (auto I
= SC
.rbegin(), E
= SC
.rend(); I
!= E
; ++I
)
1077 for (MachineInstr
*MI
: RC
)
1081 // Remove spill-reload like copy chains. For example
1091 // will be folded into
1097 // TODO: Currently we don't track usage of r0 outside the chain, so we
1098 // conservatively keep its value as it was before the rewrite.
1100 // The algorithm is trying to keep
1101 // property#1: No Def of spill COPY in the chain is used or defined until the
1102 // paired reload COPY in the chain uses the Def.
1104 // property#2: NO Source of COPY in the chain is used or defined until the next
1105 // COPY in the chain defines the Source, except the innermost spill-reload
1108 // The algorithm is conducted by checking every COPY inside the MBB, assuming
1109 // the COPY is a reload COPY, then try to find paired spill COPY by searching
1110 // the COPY defines the Src of the reload COPY backward. If such pair is found,
1111 // it either belongs to an existing chain or a new chain depends on
1112 // last available COPY uses the Def of the reload COPY.
1113 // Implementation notes, we use CopyTracker::findLastDefCopy(Reg, ...) to find
1114 // out last COPY that defines Reg; we use CopyTracker::findLastUseCopy(Reg, ...)
1115 // to find out last COPY that uses Reg. When we are encountered with a Non-COPY
1116 // instruction, we check registers in the operands of this instruction. If this
1117 // Reg is defined by a COPY, we untrack this Reg via
1118 // CopyTracker::clobberRegister(Reg, ...).
1119 void MachineCopyPropagation::EliminateSpillageCopies(MachineBasicBlock
&MBB
) {
1120 // ChainLeader maps MI inside a spill-reload chain to its innermost reload COPY.
1121 // Thus we can track if a MI belongs to an existing spill-reload chain.
1122 DenseMap
<MachineInstr
*, MachineInstr
*> ChainLeader
;
1123 // SpillChain maps innermost reload COPY of a spill-reload chain to a sequence
1124 // of COPYs that forms spills of a spill-reload chain.
1125 // ReloadChain maps innermost reload COPY of a spill-reload chain to a
1126 // sequence of COPYs that forms reloads of a spill-reload chain.
1127 DenseMap
<MachineInstr
*, SmallVector
<MachineInstr
*>> SpillChain
, ReloadChain
;
1128 // If a COPY's Source has use or def until next COPY defines the Source,
1129 // we put the COPY in this set to keep property#2.
1130 DenseSet
<const MachineInstr
*> CopySourceInvalid
;
1132 auto TryFoldSpillageCopies
=
1133 [&, this](const SmallVectorImpl
<MachineInstr
*> &SC
,
1134 const SmallVectorImpl
<MachineInstr
*> &RC
) {
1135 assert(SC
.size() == RC
.size() && "Spill-reload should be paired");
1137 // We need at least 3 pairs of copies for the transformation to apply,
1138 // because the first outermost pair cannot be removed since we don't
1139 // recolor outside of the chain and that we need at least one temporary
1140 // spill slot to shorten the chain. If we only have a chain of two
1141 // pairs, we already have the shortest sequence this code can handle:
1142 // the outermost pair for the temporary spill slot, and the pair that
1143 // use that temporary spill slot for the other end of the chain.
1144 // TODO: We might be able to simplify to one spill-reload pair if collecting
1145 // more infomation about the outermost COPY.
1149 // If violate property#2, we don't fold the chain.
1150 for (const MachineInstr
*Spill
: drop_begin(SC
))
1151 if (CopySourceInvalid
.count(Spill
))
1154 for (const MachineInstr
*Reload
: drop_end(RC
))
1155 if (CopySourceInvalid
.count(Reload
))
1158 auto CheckCopyConstraint
= [this](Register Def
, Register Src
) {
1159 for (const TargetRegisterClass
*RC
: TRI
->regclasses()) {
1160 if (RC
->contains(Def
) && RC
->contains(Src
))
1166 auto UpdateReg
= [](MachineInstr
*MI
, const MachineOperand
*Old
,
1167 const MachineOperand
*New
) {
1168 for (MachineOperand
&MO
: MI
->operands()) {
1170 MO
.setReg(New
->getReg());
1174 std::optional
<DestSourcePair
> InnerMostSpillCopy
=
1175 isCopyInstr(*SC
[0], *TII
, UseCopyInstr
);
1176 std::optional
<DestSourcePair
> OuterMostSpillCopy
=
1177 isCopyInstr(*SC
.back(), *TII
, UseCopyInstr
);
1178 std::optional
<DestSourcePair
> InnerMostReloadCopy
=
1179 isCopyInstr(*RC
[0], *TII
, UseCopyInstr
);
1180 std::optional
<DestSourcePair
> OuterMostReloadCopy
=
1181 isCopyInstr(*RC
.back(), *TII
, UseCopyInstr
);
1182 if (!CheckCopyConstraint(OuterMostSpillCopy
->Source
->getReg(),
1183 InnerMostSpillCopy
->Source
->getReg()) ||
1184 !CheckCopyConstraint(InnerMostReloadCopy
->Destination
->getReg(),
1185 OuterMostReloadCopy
->Destination
->getReg()))
1188 SpillageChainsLength
+= SC
.size() + RC
.size();
1189 NumSpillageChains
+= 1;
1190 UpdateReg(SC
[0], InnerMostSpillCopy
->Destination
,
1191 OuterMostSpillCopy
->Source
);
1192 UpdateReg(RC
[0], InnerMostReloadCopy
->Source
,
1193 OuterMostReloadCopy
->Destination
);
1195 for (size_t I
= 1; I
< SC
.size() - 1; ++I
) {
1196 SC
[I
]->eraseFromParent();
1197 RC
[I
]->eraseFromParent();
1202 auto IsFoldableCopy
= [this](const MachineInstr
&MaybeCopy
) {
1203 if (MaybeCopy
.getNumImplicitOperands() > 0)
1205 std::optional
<DestSourcePair
> CopyOperands
=
1206 isCopyInstr(MaybeCopy
, *TII
, UseCopyInstr
);
1209 Register Src
= CopyOperands
->Source
->getReg();
1210 Register Def
= CopyOperands
->Destination
->getReg();
1211 return Src
&& Def
&& !TRI
->regsOverlap(Src
, Def
) &&
1212 CopyOperands
->Source
->isRenamable() &&
1213 CopyOperands
->Destination
->isRenamable();
1216 auto IsSpillReloadPair
= [&, this](const MachineInstr
&Spill
,
1217 const MachineInstr
&Reload
) {
1218 if (!IsFoldableCopy(Spill
) || !IsFoldableCopy(Reload
))
1220 std::optional
<DestSourcePair
> SpillCopy
=
1221 isCopyInstr(Spill
, *TII
, UseCopyInstr
);
1222 std::optional
<DestSourcePair
> ReloadCopy
=
1223 isCopyInstr(Reload
, *TII
, UseCopyInstr
);
1224 if (!SpillCopy
|| !ReloadCopy
)
1226 return SpillCopy
->Source
->getReg() == ReloadCopy
->Destination
->getReg() &&
1227 SpillCopy
->Destination
->getReg() == ReloadCopy
->Source
->getReg();
1230 auto IsChainedCopy
= [&, this](const MachineInstr
&Prev
,
1231 const MachineInstr
&Current
) {
1232 if (!IsFoldableCopy(Prev
) || !IsFoldableCopy(Current
))
1234 std::optional
<DestSourcePair
> PrevCopy
=
1235 isCopyInstr(Prev
, *TII
, UseCopyInstr
);
1236 std::optional
<DestSourcePair
> CurrentCopy
=
1237 isCopyInstr(Current
, *TII
, UseCopyInstr
);
1238 if (!PrevCopy
|| !CurrentCopy
)
1240 return PrevCopy
->Source
->getReg() == CurrentCopy
->Destination
->getReg();
1243 for (MachineInstr
&MI
: llvm::make_early_inc_range(MBB
)) {
1244 std::optional
<DestSourcePair
> CopyOperands
=
1245 isCopyInstr(MI
, *TII
, UseCopyInstr
);
1247 // Update track information via non-copy instruction.
1248 SmallSet
<Register
, 8> RegsToClobber
;
1249 if (!CopyOperands
) {
1250 for (const MachineOperand
&MO
: MI
.operands()) {
1253 Register Reg
= MO
.getReg();
1256 MachineInstr
*LastUseCopy
=
1257 Tracker
.findLastSeenUseInCopy(Reg
.asMCReg(), *TRI
);
1259 LLVM_DEBUG(dbgs() << "MCP: Copy source of\n");
1260 LLVM_DEBUG(LastUseCopy
->dump());
1261 LLVM_DEBUG(dbgs() << "might be invalidated by\n");
1262 LLVM_DEBUG(MI
.dump());
1263 CopySourceInvalid
.insert(LastUseCopy
);
1265 // Must be noted Tracker.clobberRegister(Reg, ...) removes tracking of
1266 // Reg, i.e, COPY that defines Reg is removed from the mapping as well
1267 // as marking COPYs that uses Reg unavailable.
1268 // We don't invoke CopyTracker::clobberRegister(Reg, ...) if Reg is not
1269 // defined by a previous COPY, since we don't want to make COPYs uses
1271 if (Tracker
.findLastSeenDefInCopy(MI
, Reg
.asMCReg(), *TRI
, *TII
,
1273 // Thus we can keep the property#1.
1274 RegsToClobber
.insert(Reg
);
1276 for (Register Reg
: RegsToClobber
) {
1277 Tracker
.clobberRegister(Reg
, *TRI
, *TII
, UseCopyInstr
);
1278 LLVM_DEBUG(dbgs() << "MCP: Removed tracking of " << printReg(Reg
, TRI
)
1284 Register Src
= CopyOperands
->Source
->getReg();
1285 Register Def
= CopyOperands
->Destination
->getReg();
1286 // Check if we can find a pair spill-reload copy.
1287 LLVM_DEBUG(dbgs() << "MCP: Searching paired spill for reload: ");
1288 LLVM_DEBUG(MI
.dump());
1289 MachineInstr
*MaybeSpill
=
1290 Tracker
.findLastSeenDefInCopy(MI
, Src
.asMCReg(), *TRI
, *TII
, UseCopyInstr
);
1291 bool MaybeSpillIsChained
= ChainLeader
.count(MaybeSpill
);
1292 if (!MaybeSpillIsChained
&& MaybeSpill
&&
1293 IsSpillReloadPair(*MaybeSpill
, MI
)) {
1294 // Check if we already have an existing chain. Now we have a
1295 // spill-reload pair.
1298 // Looking for a valid COPY before L5 which uses r3.
1299 // This can be serverial cases.
1301 // No COPY is found, which can be r3 is def-use between (L2, L5), we
1302 // create a new chain for L2 and L5.
1306 // Such COPY is found and is L2, we create a new chain for L2 and L5.
1311 // we create a new chain for L2 and L5.
1317 // Such COPY won't be found since L4 defines r3. we create a new chain
1324 // COPY is found and is L4 which belongs to an existing chain, we add
1325 // L2 and L5 to this chain.
1326 LLVM_DEBUG(dbgs() << "MCP: Found spill: ");
1327 LLVM_DEBUG(MaybeSpill
->dump());
1328 MachineInstr
*MaybePrevReload
=
1329 Tracker
.findLastSeenUseInCopy(Def
.asMCReg(), *TRI
);
1330 auto Leader
= ChainLeader
.find(MaybePrevReload
);
1331 MachineInstr
*L
= nullptr;
1332 if (Leader
== ChainLeader
.end() ||
1333 (MaybePrevReload
&& !IsChainedCopy(*MaybePrevReload
, MI
))) {
1335 assert(!SpillChain
.count(L
) &&
1336 "SpillChain should not have contained newly found chain");
1338 assert(MaybePrevReload
&&
1339 "Found a valid leader through nullptr should not happend");
1341 assert(SpillChain
[L
].size() > 0 &&
1342 "Existing chain's length should be larger than zero");
1344 assert(!ChainLeader
.count(&MI
) && !ChainLeader
.count(MaybeSpill
) &&
1345 "Newly found paired spill-reload should not belong to any chain "
1347 ChainLeader
.insert({MaybeSpill
, L
});
1348 ChainLeader
.insert({&MI
, L
});
1349 SpillChain
[L
].push_back(MaybeSpill
);
1350 ReloadChain
[L
].push_back(&MI
);
1351 LLVM_DEBUG(dbgs() << "MCP: Chain " << L
<< " now is:\n");
1352 LLVM_DEBUG(printSpillReloadChain(SpillChain
, ReloadChain
, L
));
1353 } else if (MaybeSpill
&& !MaybeSpillIsChained
) {
1354 // MaybeSpill is unable to pair with MI. That's to say adding MI makes
1355 // the chain invalid.
1356 // The COPY defines Src is no longer considered as a candidate of a
1357 // valid chain. Since we expect the Def of a spill copy isn't used by
1358 // any COPY instruction until a reload copy. For example:
1365 // L1 and L3 can't be a valid spill-reload pair.
1366 // Thus we keep the property#1.
1367 LLVM_DEBUG(dbgs() << "MCP: Not paired spill-reload:\n");
1368 LLVM_DEBUG(MaybeSpill
->dump());
1369 LLVM_DEBUG(MI
.dump());
1370 Tracker
.clobberRegister(Src
.asMCReg(), *TRI
, *TII
, UseCopyInstr
);
1371 LLVM_DEBUG(dbgs() << "MCP: Removed tracking of " << printReg(Src
, TRI
)
1374 Tracker
.trackCopy(&MI
, *TRI
, *TII
, UseCopyInstr
);
1377 for (auto I
= SpillChain
.begin(), E
= SpillChain
.end(); I
!= E
; ++I
) {
1378 auto &SC
= I
->second
;
1379 assert(ReloadChain
.count(I
->first
) &&
1380 "Reload chain of the same leader should exist");
1381 auto &RC
= ReloadChain
[I
->first
];
1382 TryFoldSpillageCopies(SC
, RC
);
1385 MaybeDeadCopies
.clear();
1386 CopyDbgUsers
.clear();
1390 bool MachineCopyPropagation::runOnMachineFunction(MachineFunction
&MF
) {
1391 if (skipFunction(MF
.getFunction()))
1394 bool isSpillageCopyElimEnabled
= false;
1395 switch (EnableSpillageCopyElimination
) {
1397 isSpillageCopyElimEnabled
=
1398 MF
.getSubtarget().enableSpillageCopyElimination();
1401 isSpillageCopyElimEnabled
= true;
1404 isSpillageCopyElimEnabled
= false;
1410 TRI
= MF
.getSubtarget().getRegisterInfo();
1411 TII
= MF
.getSubtarget().getInstrInfo();
1412 MRI
= &MF
.getRegInfo();
1414 for (MachineBasicBlock
&MBB
: MF
) {
1415 if (isSpillageCopyElimEnabled
)
1416 EliminateSpillageCopies(MBB
);
1417 BackwardCopyPropagateBlock(MBB
);
1418 ForwardCopyPropagateBlock(MBB
);
1424 MachineFunctionPass
*
1425 llvm::createMachineCopyPropagationPass(bool UseCopyInstr
= false) {
1426 return new MachineCopyPropagation(UseCopyInstr
);