1 //===- MachineCopyPropagation.cpp - Machine Copy Propagation Pass ---------===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 // This is an extremely simple MachineInstr-level copy propagation pass.
11 // This pass forwards the source of COPYs to the users of their destinations
12 // when doing so is legal. For example:
19 // - %reg0 has not been clobbered by the time of the use of %reg1
20 // - the register class constraints are satisfied
21 // - the COPY def is the only value that reaches OP
22 // then this pass replaces the above with:
28 // This pass also removes some redundant COPYs. For example:
31 // ... // No clobber of %R1
32 // %R0 = COPY %R1 <<< Removed
37 // ... // No clobber of %R0
38 // %R1 = COPY %R0 <<< Removed
43 // ... // No read/clobber of $R0 and $R1
44 // $R1 = COPY $R0 // $R0 is killed
45 // Replace $R0 with $R1 and remove the COPY
49 //===----------------------------------------------------------------------===//
51 #include "llvm/ADT/DenseMap.h"
52 #include "llvm/ADT/STLExtras.h"
53 #include "llvm/ADT/SetVector.h"
54 #include "llvm/ADT/SmallSet.h"
55 #include "llvm/ADT/SmallVector.h"
56 #include "llvm/ADT/Statistic.h"
57 #include "llvm/ADT/iterator_range.h"
58 #include "llvm/CodeGen/MachineBasicBlock.h"
59 #include "llvm/CodeGen/MachineFunction.h"
60 #include "llvm/CodeGen/MachineFunctionPass.h"
61 #include "llvm/CodeGen/MachineInstr.h"
62 #include "llvm/CodeGen/MachineOperand.h"
63 #include "llvm/CodeGen/MachineRegisterInfo.h"
64 #include "llvm/CodeGen/TargetInstrInfo.h"
65 #include "llvm/CodeGen/TargetRegisterInfo.h"
66 #include "llvm/CodeGen/TargetSubtargetInfo.h"
67 #include "llvm/InitializePasses.h"
68 #include "llvm/MC/MCRegisterInfo.h"
69 #include "llvm/Pass.h"
70 #include "llvm/Support/Debug.h"
71 #include "llvm/Support/DebugCounter.h"
72 #include "llvm/Support/raw_ostream.h"
78 #define DEBUG_TYPE "machine-cp"
80 STATISTIC(NumDeletes
, "Number of dead copies deleted");
81 STATISTIC(NumCopyForwards
, "Number of copy uses forwarded");
82 STATISTIC(NumCopyBackwardPropagated
, "Number of copy defs backward propagated");
83 STATISTIC(SpillageChainsLength
, "Length of spillage chains");
84 STATISTIC(NumSpillageChains
, "Number of spillage chains");
85 DEBUG_COUNTER(FwdCounter
, "machine-cp-fwd",
86 "Controls which register COPYs are forwarded");
88 static cl::opt
<bool> MCPUseCopyInstr("mcp-use-is-copy-instr", cl::init(false),
90 static cl::opt
<cl::boolOrDefault
>
91 EnableSpillageCopyElimination("enable-spill-copy-elim", cl::Hidden
);
95 static std::optional
<DestSourcePair
> isCopyInstr(const MachineInstr
&MI
,
96 const TargetInstrInfo
&TII
,
99 return TII
.isCopyInstr(MI
);
102 return std::optional
<DestSourcePair
>(
103 DestSourcePair
{MI
.getOperand(0), MI
.getOperand(1)});
110 MachineInstr
*MI
, *LastSeenUseInCopy
;
111 SmallVector
<MCRegister
, 4> DefRegs
;
115 DenseMap
<MCRegister
, CopyInfo
> Copies
;
118 /// Mark all of the given registers and their subregisters as unavailable for
120 void markRegsUnavailable(ArrayRef
<MCRegister
> Regs
,
121 const TargetRegisterInfo
&TRI
) {
122 for (MCRegister Reg
: Regs
) {
123 // Source of copy is no longer available for propagation.
124 for (MCRegUnit Unit
: TRI
.regunits(Reg
)) {
125 auto CI
= Copies
.find(Unit
);
126 if (CI
!= Copies
.end())
127 CI
->second
.Avail
= false;
132 /// Remove register from copy maps.
133 void invalidateRegister(MCRegister Reg
, const TargetRegisterInfo
&TRI
,
134 const TargetInstrInfo
&TII
, bool UseCopyInstr
) {
135 // Since Reg might be a subreg of some registers, only invalidate Reg is not
136 // enough. We have to find the COPY defines Reg or registers defined by Reg
137 // and invalidate all of them.
138 SmallSet
<MCRegister
, 8> RegsToInvalidate
;
139 RegsToInvalidate
.insert(Reg
);
140 for (MCRegUnit Unit
: TRI
.regunits(Reg
)) {
141 auto I
= Copies
.find(Unit
);
142 if (I
!= Copies
.end()) {
143 if (MachineInstr
*MI
= I
->second
.MI
) {
144 std::optional
<DestSourcePair
> CopyOperands
=
145 isCopyInstr(*MI
, TII
, UseCopyInstr
);
146 assert(CopyOperands
&& "Expect copy");
148 RegsToInvalidate
.insert(
149 CopyOperands
->Destination
->getReg().asMCReg());
150 RegsToInvalidate
.insert(CopyOperands
->Source
->getReg().asMCReg());
152 RegsToInvalidate
.insert(I
->second
.DefRegs
.begin(),
153 I
->second
.DefRegs
.end());
156 for (MCRegister InvalidReg
: RegsToInvalidate
)
157 for (MCRegUnit Unit
: TRI
.regunits(InvalidReg
))
161 /// Clobber a single register, removing it from the tracker's copy maps.
162 void clobberRegister(MCRegister Reg
, const TargetRegisterInfo
&TRI
,
163 const TargetInstrInfo
&TII
, bool UseCopyInstr
) {
164 for (MCRegUnit Unit
: TRI
.regunits(Reg
)) {
165 auto I
= Copies
.find(Unit
);
166 if (I
!= Copies
.end()) {
167 // When we clobber the source of a copy, we need to clobber everything
169 markRegsUnavailable(I
->second
.DefRegs
, TRI
);
170 // When we clobber the destination of a copy, we need to clobber the
171 // whole register it defined.
172 if (MachineInstr
*MI
= I
->second
.MI
) {
173 std::optional
<DestSourcePair
> CopyOperands
=
174 isCopyInstr(*MI
, TII
, UseCopyInstr
);
175 markRegsUnavailable({CopyOperands
->Destination
->getReg().asMCReg()},
178 // Now we can erase the copy.
184 /// Add this copy's registers into the tracker's copy maps.
185 void trackCopy(MachineInstr
*MI
, const TargetRegisterInfo
&TRI
,
186 const TargetInstrInfo
&TII
, bool UseCopyInstr
) {
187 std::optional
<DestSourcePair
> CopyOperands
=
188 isCopyInstr(*MI
, TII
, UseCopyInstr
);
189 assert(CopyOperands
&& "Tracking non-copy?");
191 MCRegister Src
= CopyOperands
->Source
->getReg().asMCReg();
192 MCRegister Def
= CopyOperands
->Destination
->getReg().asMCReg();
194 // Remember Def is defined by the copy.
195 for (MCRegUnit Unit
: TRI
.regunits(Def
))
196 Copies
[Unit
] = {MI
, nullptr, {}, true};
198 // Remember source that's copied to Def. Once it's clobbered, then
199 // it's no longer available for copy propagation.
200 for (MCRegUnit Unit
: TRI
.regunits(Src
)) {
201 auto I
= Copies
.insert({Unit
, {nullptr, nullptr, {}, false}});
202 auto &Copy
= I
.first
->second
;
203 if (!is_contained(Copy
.DefRegs
, Def
))
204 Copy
.DefRegs
.push_back(Def
);
205 Copy
.LastSeenUseInCopy
= MI
;
209 bool hasAnyCopies() {
210 return !Copies
.empty();
213 MachineInstr
*findCopyForUnit(MCRegister RegUnit
,
214 const TargetRegisterInfo
&TRI
,
215 bool MustBeAvailable
= false) {
216 auto CI
= Copies
.find(RegUnit
);
217 if (CI
== Copies
.end())
219 if (MustBeAvailable
&& !CI
->second
.Avail
)
221 return CI
->second
.MI
;
224 MachineInstr
*findCopyDefViaUnit(MCRegister RegUnit
,
225 const TargetRegisterInfo
&TRI
) {
226 auto CI
= Copies
.find(RegUnit
);
227 if (CI
== Copies
.end())
229 if (CI
->second
.DefRegs
.size() != 1)
231 MCRegUnit RU
= *TRI
.regunits(CI
->second
.DefRegs
[0]).begin();
232 return findCopyForUnit(RU
, TRI
, true);
235 MachineInstr
*findAvailBackwardCopy(MachineInstr
&I
, MCRegister Reg
,
236 const TargetRegisterInfo
&TRI
,
237 const TargetInstrInfo
&TII
,
239 MCRegUnit RU
= *TRI
.regunits(Reg
).begin();
240 MachineInstr
*AvailCopy
= findCopyDefViaUnit(RU
, TRI
);
245 std::optional
<DestSourcePair
> CopyOperands
=
246 isCopyInstr(*AvailCopy
, TII
, UseCopyInstr
);
247 Register AvailSrc
= CopyOperands
->Source
->getReg();
248 Register AvailDef
= CopyOperands
->Destination
->getReg();
249 if (!TRI
.isSubRegisterEq(AvailSrc
, Reg
))
252 for (const MachineInstr
&MI
:
253 make_range(AvailCopy
->getReverseIterator(), I
.getReverseIterator()))
254 for (const MachineOperand
&MO
: MI
.operands())
256 // FIXME: Shall we simultaneously invalidate AvailSrc or AvailDef?
257 if (MO
.clobbersPhysReg(AvailSrc
) || MO
.clobbersPhysReg(AvailDef
))
263 MachineInstr
*findAvailCopy(MachineInstr
&DestCopy
, MCRegister Reg
,
264 const TargetRegisterInfo
&TRI
,
265 const TargetInstrInfo
&TII
, bool UseCopyInstr
) {
266 // We check the first RegUnit here, since we'll only be interested in the
267 // copy if it copies the entire register anyway.
268 MCRegUnit RU
= *TRI
.regunits(Reg
).begin();
269 MachineInstr
*AvailCopy
=
270 findCopyForUnit(RU
, TRI
, /*MustBeAvailable=*/true);
275 std::optional
<DestSourcePair
> CopyOperands
=
276 isCopyInstr(*AvailCopy
, TII
, UseCopyInstr
);
277 Register AvailSrc
= CopyOperands
->Source
->getReg();
278 Register AvailDef
= CopyOperands
->Destination
->getReg();
279 if (!TRI
.isSubRegisterEq(AvailDef
, Reg
))
282 // Check that the available copy isn't clobbered by any regmasks between
283 // itself and the destination.
284 for (const MachineInstr
&MI
:
285 make_range(AvailCopy
->getIterator(), DestCopy
.getIterator()))
286 for (const MachineOperand
&MO
: MI
.operands())
288 if (MO
.clobbersPhysReg(AvailSrc
) || MO
.clobbersPhysReg(AvailDef
))
294 // Find last COPY that defines Reg before Current MachineInstr.
295 MachineInstr
*findLastSeenDefInCopy(const MachineInstr
&Current
,
297 const TargetRegisterInfo
&TRI
,
298 const TargetInstrInfo
&TII
,
300 MCRegUnit RU
= *TRI
.regunits(Reg
).begin();
301 auto CI
= Copies
.find(RU
);
302 if (CI
== Copies
.end() || !CI
->second
.Avail
)
305 MachineInstr
*DefCopy
= CI
->second
.MI
;
306 std::optional
<DestSourcePair
> CopyOperands
=
307 isCopyInstr(*DefCopy
, TII
, UseCopyInstr
);
308 Register Def
= CopyOperands
->Destination
->getReg();
309 if (!TRI
.isSubRegisterEq(Def
, Reg
))
312 for (const MachineInstr
&MI
:
313 make_range(static_cast<const MachineInstr
*>(DefCopy
)->getIterator(),
314 Current
.getIterator()))
315 for (const MachineOperand
&MO
: MI
.operands())
317 if (MO
.clobbersPhysReg(Def
)) {
318 LLVM_DEBUG(dbgs() << "MCP: Removed tracking of "
319 << printReg(Def
, &TRI
) << "\n");
326 // Find last COPY that uses Reg.
327 MachineInstr
*findLastSeenUseInCopy(MCRegister Reg
,
328 const TargetRegisterInfo
&TRI
) {
329 MCRegUnit RU
= *TRI
.regunits(Reg
).begin();
330 auto CI
= Copies
.find(RU
);
331 if (CI
== Copies
.end())
333 return CI
->second
.LastSeenUseInCopy
;
341 class MachineCopyPropagation
: public MachineFunctionPass
{
342 const TargetRegisterInfo
*TRI
= nullptr;
343 const TargetInstrInfo
*TII
= nullptr;
344 const MachineRegisterInfo
*MRI
= nullptr;
346 // Return true if this is a copy instruction and false otherwise.
350 static char ID
; // Pass identification, replacement for typeid
352 MachineCopyPropagation(bool CopyInstr
= false)
353 : MachineFunctionPass(ID
), UseCopyInstr(CopyInstr
|| MCPUseCopyInstr
) {
354 initializeMachineCopyPropagationPass(*PassRegistry::getPassRegistry());
357 void getAnalysisUsage(AnalysisUsage
&AU
) const override
{
358 AU
.setPreservesCFG();
359 MachineFunctionPass::getAnalysisUsage(AU
);
362 bool runOnMachineFunction(MachineFunction
&MF
) override
;
364 MachineFunctionProperties
getRequiredProperties() const override
{
365 return MachineFunctionProperties().set(
366 MachineFunctionProperties::Property::NoVRegs
);
370 typedef enum { DebugUse
= false, RegularUse
= true } DebugType
;
372 void ReadRegister(MCRegister Reg
, MachineInstr
&Reader
, DebugType DT
);
373 void ForwardCopyPropagateBlock(MachineBasicBlock
&MBB
);
374 void BackwardCopyPropagateBlock(MachineBasicBlock
&MBB
);
375 void EliminateSpillageCopies(MachineBasicBlock
&MBB
);
376 bool eraseIfRedundant(MachineInstr
&Copy
, MCRegister Src
, MCRegister Def
);
377 void forwardUses(MachineInstr
&MI
);
378 void propagateDefs(MachineInstr
&MI
);
379 bool isForwardableRegClassCopy(const MachineInstr
&Copy
,
380 const MachineInstr
&UseI
, unsigned UseIdx
);
381 bool isBackwardPropagatableRegClassCopy(const MachineInstr
&Copy
,
382 const MachineInstr
&UseI
,
384 bool hasImplicitOverlap(const MachineInstr
&MI
, const MachineOperand
&Use
);
385 bool hasOverlappingMultipleDef(const MachineInstr
&MI
,
386 const MachineOperand
&MODef
, Register Def
);
388 /// Candidates for deletion.
389 SmallSetVector
<MachineInstr
*, 8> MaybeDeadCopies
;
391 /// Multimap tracking debug users in current BB
392 DenseMap
<MachineInstr
*, SmallSet
<MachineInstr
*, 2>> CopyDbgUsers
;
396 bool Changed
= false;
399 } // end anonymous namespace
401 char MachineCopyPropagation::ID
= 0;
403 char &llvm::MachineCopyPropagationID
= MachineCopyPropagation::ID
;
405 INITIALIZE_PASS(MachineCopyPropagation
, DEBUG_TYPE
,
406 "Machine Copy Propagation Pass", false, false)
408 void MachineCopyPropagation::ReadRegister(MCRegister Reg
, MachineInstr
&Reader
,
410 // If 'Reg' is defined by a copy, the copy is no longer a candidate
411 // for elimination. If a copy is "read" by a debug user, record the user
413 for (MCRegUnit Unit
: TRI
->regunits(Reg
)) {
414 if (MachineInstr
*Copy
= Tracker
.findCopyForUnit(Unit
, *TRI
)) {
415 if (DT
== RegularUse
) {
416 LLVM_DEBUG(dbgs() << "MCP: Copy is used - not dead: "; Copy
->dump());
417 MaybeDeadCopies
.remove(Copy
);
419 CopyDbgUsers
[Copy
].insert(&Reader
);
425 /// Return true if \p PreviousCopy did copy register \p Src to register \p Def.
426 /// This fact may have been obscured by sub register usage or may not be true at
427 /// all even though Src and Def are subregisters of the registers used in
428 /// PreviousCopy. e.g.
429 /// isNopCopy("ecx = COPY eax", AX, CX) == true
430 /// isNopCopy("ecx = COPY eax", AH, CL) == false
431 static bool isNopCopy(const MachineInstr
&PreviousCopy
, MCRegister Src
,
432 MCRegister Def
, const TargetRegisterInfo
*TRI
,
433 const TargetInstrInfo
*TII
, bool UseCopyInstr
) {
435 std::optional
<DestSourcePair
> CopyOperands
=
436 isCopyInstr(PreviousCopy
, *TII
, UseCopyInstr
);
437 MCRegister PreviousSrc
= CopyOperands
->Source
->getReg().asMCReg();
438 MCRegister PreviousDef
= CopyOperands
->Destination
->getReg().asMCReg();
439 if (Src
== PreviousSrc
&& Def
== PreviousDef
)
441 if (!TRI
->isSubRegister(PreviousSrc
, Src
))
443 unsigned SubIdx
= TRI
->getSubRegIndex(PreviousSrc
, Src
);
444 return SubIdx
== TRI
->getSubRegIndex(PreviousDef
, Def
);
447 /// Remove instruction \p Copy if there exists a previous copy that copies the
448 /// register \p Src to the register \p Def; This may happen indirectly by
449 /// copying the super registers.
450 bool MachineCopyPropagation::eraseIfRedundant(MachineInstr
&Copy
,
451 MCRegister Src
, MCRegister Def
) {
452 // Avoid eliminating a copy from/to a reserved registers as we cannot predict
453 // the value (Example: The sparc zero register is writable but stays zero).
454 if (MRI
->isReserved(Src
) || MRI
->isReserved(Def
))
457 // Search for an existing copy.
458 MachineInstr
*PrevCopy
=
459 Tracker
.findAvailCopy(Copy
, Def
, *TRI
, *TII
, UseCopyInstr
);
463 auto PrevCopyOperands
= isCopyInstr(*PrevCopy
, *TII
, UseCopyInstr
);
464 // Check that the existing copy uses the correct sub registers.
465 if (PrevCopyOperands
->Destination
->isDead())
467 if (!isNopCopy(*PrevCopy
, Src
, Def
, TRI
, TII
, UseCopyInstr
))
470 LLVM_DEBUG(dbgs() << "MCP: copy is a NOP, removing: "; Copy
.dump());
472 // Copy was redundantly redefining either Src or Def. Remove earlier kill
473 // flags between Copy and PrevCopy because the value will be reused now.
474 std::optional
<DestSourcePair
> CopyOperands
=
475 isCopyInstr(Copy
, *TII
, UseCopyInstr
);
476 assert(CopyOperands
);
478 Register CopyDef
= CopyOperands
->Destination
->getReg();
479 assert(CopyDef
== Src
|| CopyDef
== Def
);
480 for (MachineInstr
&MI
:
481 make_range(PrevCopy
->getIterator(), Copy
.getIterator()))
482 MI
.clearRegisterKills(CopyDef
, TRI
);
484 // Clear undef flag from remaining copy if needed.
485 if (!CopyOperands
->Source
->isUndef()) {
486 PrevCopy
->getOperand(PrevCopyOperands
->Source
->getOperandNo())
490 Copy
.eraseFromParent();
496 bool MachineCopyPropagation::isBackwardPropagatableRegClassCopy(
497 const MachineInstr
&Copy
, const MachineInstr
&UseI
, unsigned UseIdx
) {
498 std::optional
<DestSourcePair
> CopyOperands
=
499 isCopyInstr(Copy
, *TII
, UseCopyInstr
);
500 Register Def
= CopyOperands
->Destination
->getReg();
502 if (const TargetRegisterClass
*URC
=
503 UseI
.getRegClassConstraint(UseIdx
, TII
, TRI
))
504 return URC
->contains(Def
);
506 // We don't process further if UseI is a COPY, since forward copy propagation
507 // should handle that.
511 /// Decide whether we should forward the source of \param Copy to its use in
512 /// \param UseI based on the physical register class constraints of the opcode
513 /// and avoiding introducing more cross-class COPYs.
514 bool MachineCopyPropagation::isForwardableRegClassCopy(const MachineInstr
&Copy
,
515 const MachineInstr
&UseI
,
517 std::optional
<DestSourcePair
> CopyOperands
=
518 isCopyInstr(Copy
, *TII
, UseCopyInstr
);
519 Register CopySrcReg
= CopyOperands
->Source
->getReg();
521 // If the new register meets the opcode register constraints, then allow
523 if (const TargetRegisterClass
*URC
=
524 UseI
.getRegClassConstraint(UseIdx
, TII
, TRI
))
525 return URC
->contains(CopySrcReg
);
527 auto UseICopyOperands
= isCopyInstr(UseI
, *TII
, UseCopyInstr
);
528 if (!UseICopyOperands
)
531 /// COPYs don't have register class constraints, so if the user instruction
532 /// is a COPY, we just try to avoid introducing additional cross-class
533 /// COPYs. For example:
535 /// RegClassA = COPY RegClassB // Copy parameter
537 /// RegClassB = COPY RegClassA // UseI parameter
539 /// which after forwarding becomes
541 /// RegClassA = COPY RegClassB
543 /// RegClassB = COPY RegClassB
545 /// so we have reduced the number of cross-class COPYs and potentially
546 /// introduced a nop COPY that can be removed.
548 // Allow forwarding if src and dst belong to any common class, so long as they
549 // don't belong to any (possibly smaller) common class that requires copies to
550 // go via a different class.
551 Register UseDstReg
= UseICopyOperands
->Destination
->getReg();
553 bool IsCrossClass
= false;
554 for (const TargetRegisterClass
*RC
: TRI
->regclasses()) {
555 if (RC
->contains(CopySrcReg
) && RC
->contains(UseDstReg
)) {
557 if (TRI
->getCrossCopyRegClass(RC
) != RC
) {
567 // The forwarded copy would be cross-class. Only do this if the original copy
568 // was also cross-class.
569 Register CopyDstReg
= CopyOperands
->Destination
->getReg();
570 for (const TargetRegisterClass
*RC
: TRI
->regclasses()) {
571 if (RC
->contains(CopySrcReg
) && RC
->contains(CopyDstReg
) &&
572 TRI
->getCrossCopyRegClass(RC
) != RC
)
578 /// Check that \p MI does not have implicit uses that overlap with it's \p Use
579 /// operand (the register being replaced), since these can sometimes be
580 /// implicitly tied to other operands. For example, on AMDGPU:
582 /// V_MOVRELS_B32_e32 %VGPR2, %M0<imp-use>, %EXEC<imp-use>, %VGPR2_VGPR3_VGPR4_VGPR5<imp-use>
584 /// the %VGPR2 is implicitly tied to the larger reg operand, but we have no
585 /// way of knowing we need to update the latter when updating the former.
586 bool MachineCopyPropagation::hasImplicitOverlap(const MachineInstr
&MI
,
587 const MachineOperand
&Use
) {
588 for (const MachineOperand
&MIUse
: MI
.uses())
589 if (&MIUse
!= &Use
&& MIUse
.isReg() && MIUse
.isImplicit() &&
590 MIUse
.isUse() && TRI
->regsOverlap(Use
.getReg(), MIUse
.getReg()))
596 /// For an MI that has multiple definitions, check whether \p MI has
597 /// a definition that overlaps with another of its definitions.
598 /// For example, on ARM: umull r9, r9, lr, r0
599 /// The umull instruction is unpredictable unless RdHi and RdLo are different.
600 bool MachineCopyPropagation::hasOverlappingMultipleDef(
601 const MachineInstr
&MI
, const MachineOperand
&MODef
, Register Def
) {
602 for (const MachineOperand
&MIDef
: MI
.defs()) {
603 if ((&MIDef
!= &MODef
) && MIDef
.isReg() &&
604 TRI
->regsOverlap(Def
, MIDef
.getReg()))
611 /// Look for available copies whose destination register is used by \p MI and
612 /// replace the use in \p MI with the copy's source register.
613 void MachineCopyPropagation::forwardUses(MachineInstr
&MI
) {
614 if (!Tracker
.hasAnyCopies())
617 // Look for non-tied explicit vreg uses that have an active COPY
618 // instruction that defines the physical register allocated to them.
619 // Replace the vreg with the source of the active COPY.
620 for (unsigned OpIdx
= 0, OpEnd
= MI
.getNumOperands(); OpIdx
< OpEnd
;
622 MachineOperand
&MOUse
= MI
.getOperand(OpIdx
);
623 // Don't forward into undef use operands since doing so can cause problems
624 // with the machine verifier, since it doesn't treat undef reads as reads,
625 // so we can end up with a live range that ends on an undef read, leading to
626 // an error that the live range doesn't end on a read of the live range
628 if (!MOUse
.isReg() || MOUse
.isTied() || MOUse
.isUndef() || MOUse
.isDef() ||
635 // Check that the register is marked 'renamable' so we know it is safe to
636 // rename it without violating any constraints that aren't expressed in the
637 // IR (e.g. ABI or opcode requirements).
638 if (!MOUse
.isRenamable())
641 MachineInstr
*Copy
= Tracker
.findAvailCopy(MI
, MOUse
.getReg().asMCReg(),
642 *TRI
, *TII
, UseCopyInstr
);
646 std::optional
<DestSourcePair
> CopyOperands
=
647 isCopyInstr(*Copy
, *TII
, UseCopyInstr
);
648 Register CopyDstReg
= CopyOperands
->Destination
->getReg();
649 const MachineOperand
&CopySrc
= *CopyOperands
->Source
;
650 Register CopySrcReg
= CopySrc
.getReg();
652 Register ForwardedReg
= CopySrcReg
;
653 // MI might use a sub-register of the Copy destination, in which case the
654 // forwarded register is the matching sub-register of the Copy source.
655 if (MOUse
.getReg() != CopyDstReg
) {
656 unsigned SubRegIdx
= TRI
->getSubRegIndex(CopyDstReg
, MOUse
.getReg());
658 "MI source is not a sub-register of Copy destination");
659 ForwardedReg
= TRI
->getSubReg(CopySrcReg
, SubRegIdx
);
661 LLVM_DEBUG(dbgs() << "MCP: Copy source does not have sub-register "
662 << TRI
->getSubRegIndexName(SubRegIdx
) << '\n');
667 // Don't forward COPYs of reserved regs unless they are constant.
668 if (MRI
->isReserved(CopySrcReg
) && !MRI
->isConstantPhysReg(CopySrcReg
))
671 if (!isForwardableRegClassCopy(*Copy
, MI
, OpIdx
))
674 if (hasImplicitOverlap(MI
, MOUse
))
677 // Check that the instruction is not a copy that partially overwrites the
678 // original copy source that we are about to use. The tracker mechanism
679 // cannot cope with that.
680 if (isCopyInstr(MI
, *TII
, UseCopyInstr
) &&
681 MI
.modifiesRegister(CopySrcReg
, TRI
) &&
682 !MI
.definesRegister(CopySrcReg
)) {
683 LLVM_DEBUG(dbgs() << "MCP: Copy source overlap with dest in " << MI
);
687 if (!DebugCounter::shouldExecute(FwdCounter
)) {
688 LLVM_DEBUG(dbgs() << "MCP: Skipping forwarding due to debug counter:\n "
693 LLVM_DEBUG(dbgs() << "MCP: Replacing " << printReg(MOUse
.getReg(), TRI
)
694 << "\n with " << printReg(ForwardedReg
, TRI
)
695 << "\n in " << MI
<< " from " << *Copy
);
697 MOUse
.setReg(ForwardedReg
);
699 if (!CopySrc
.isRenamable())
700 MOUse
.setIsRenamable(false);
701 MOUse
.setIsUndef(CopySrc
.isUndef());
703 LLVM_DEBUG(dbgs() << "MCP: After replacement: " << MI
<< "\n");
705 // Clear kill markers that may have been invalidated.
706 for (MachineInstr
&KMI
:
707 make_range(Copy
->getIterator(), std::next(MI
.getIterator())))
708 KMI
.clearRegisterKills(CopySrcReg
, TRI
);
715 void MachineCopyPropagation::ForwardCopyPropagateBlock(MachineBasicBlock
&MBB
) {
716 LLVM_DEBUG(dbgs() << "MCP: ForwardCopyPropagateBlock " << MBB
.getName()
719 for (MachineInstr
&MI
: llvm::make_early_inc_range(MBB
)) {
720 // Analyze copies (which don't overlap themselves).
721 std::optional
<DestSourcePair
> CopyOperands
=
722 isCopyInstr(MI
, *TII
, UseCopyInstr
);
725 Register RegSrc
= CopyOperands
->Source
->getReg();
726 Register RegDef
= CopyOperands
->Destination
->getReg();
728 if (!TRI
->regsOverlap(RegDef
, RegSrc
)) {
729 assert(RegDef
.isPhysical() && RegSrc
.isPhysical() &&
730 "MachineCopyPropagation should be run after register allocation!");
732 MCRegister Def
= RegDef
.asMCReg();
733 MCRegister Src
= RegSrc
.asMCReg();
735 // The two copies cancel out and the source of the first copy
736 // hasn't been overridden, eliminate the second one. e.g.
738 // ... nothing clobbered eax.
746 // ... nothing clobbered eax.
750 if (eraseIfRedundant(MI
, Def
, Src
) || eraseIfRedundant(MI
, Src
, Def
))
755 // Src may have been changed by forwardUses()
756 CopyOperands
= isCopyInstr(MI
, *TII
, UseCopyInstr
);
757 Src
= CopyOperands
->Source
->getReg().asMCReg();
759 // If Src is defined by a previous copy, the previous copy cannot be
761 ReadRegister(Src
, MI
, RegularUse
);
762 for (const MachineOperand
&MO
: MI
.implicit_operands()) {
763 if (!MO
.isReg() || !MO
.readsReg())
765 MCRegister Reg
= MO
.getReg().asMCReg();
768 ReadRegister(Reg
, MI
, RegularUse
);
771 LLVM_DEBUG(dbgs() << "MCP: Copy is a deletion candidate: "; MI
.dump());
773 // Copy is now a candidate for deletion.
774 if (!MRI
->isReserved(Def
))
775 MaybeDeadCopies
.insert(&MI
);
777 // If 'Def' is previously source of another copy, then this earlier copy's
778 // source is no longer available. e.g.
779 // %xmm9 = copy %xmm2
781 // %xmm2 = copy %xmm0
783 // %xmm2 = copy %xmm9
784 Tracker
.clobberRegister(Def
, *TRI
, *TII
, UseCopyInstr
);
785 for (const MachineOperand
&MO
: MI
.implicit_operands()) {
786 if (!MO
.isReg() || !MO
.isDef())
788 MCRegister Reg
= MO
.getReg().asMCReg();
791 Tracker
.clobberRegister(Reg
, *TRI
, *TII
, UseCopyInstr
);
794 Tracker
.trackCopy(&MI
, *TRI
, *TII
, UseCopyInstr
);
800 // Clobber any earlyclobber regs first.
801 for (const MachineOperand
&MO
: MI
.operands())
802 if (MO
.isReg() && MO
.isEarlyClobber()) {
803 MCRegister Reg
= MO
.getReg().asMCReg();
804 // If we have a tied earlyclobber, that means it is also read by this
805 // instruction, so we need to make sure we don't remove it as dead
808 ReadRegister(Reg
, MI
, RegularUse
);
809 Tracker
.clobberRegister(Reg
, *TRI
, *TII
, UseCopyInstr
);
815 SmallVector
<Register
, 2> Defs
;
816 const MachineOperand
*RegMask
= nullptr;
817 for (const MachineOperand
&MO
: MI
.operands()) {
822 Register Reg
= MO
.getReg();
826 assert(!Reg
.isVirtual() &&
827 "MachineCopyPropagation should be run after register allocation!");
829 if (MO
.isDef() && !MO
.isEarlyClobber()) {
830 Defs
.push_back(Reg
.asMCReg());
832 } else if (MO
.readsReg())
833 ReadRegister(Reg
.asMCReg(), MI
, MO
.isDebug() ? DebugUse
: RegularUse
);
836 // The instruction has a register mask operand which means that it clobbers
837 // a large set of registers. Treat clobbered registers the same way as
838 // defined registers.
840 // Erase any MaybeDeadCopies whose destination register is clobbered.
841 for (SmallSetVector
<MachineInstr
*, 8>::iterator DI
=
842 MaybeDeadCopies
.begin();
843 DI
!= MaybeDeadCopies
.end();) {
844 MachineInstr
*MaybeDead
= *DI
;
845 std::optional
<DestSourcePair
> CopyOperands
=
846 isCopyInstr(*MaybeDead
, *TII
, UseCopyInstr
);
847 MCRegister Reg
= CopyOperands
->Destination
->getReg().asMCReg();
848 assert(!MRI
->isReserved(Reg
));
850 if (!RegMask
->clobbersPhysReg(Reg
)) {
855 LLVM_DEBUG(dbgs() << "MCP: Removing copy due to regmask clobbering: ";
858 // Make sure we invalidate any entries in the copy maps before erasing
860 Tracker
.clobberRegister(Reg
, *TRI
, *TII
, UseCopyInstr
);
862 // erase() will return the next valid iterator pointing to the next
863 // element after the erased one.
864 DI
= MaybeDeadCopies
.erase(DI
);
865 MaybeDead
->eraseFromParent();
871 // Any previous copy definition or reading the Defs is no longer available.
872 for (MCRegister Reg
: Defs
)
873 Tracker
.clobberRegister(Reg
, *TRI
, *TII
, UseCopyInstr
);
876 // If MBB doesn't have successors, delete the copies whose defs are not used.
877 // If MBB does have successors, then conservative assume the defs are live-out
878 // since we don't want to trust live-in lists.
879 if (MBB
.succ_empty()) {
880 for (MachineInstr
*MaybeDead
: MaybeDeadCopies
) {
881 LLVM_DEBUG(dbgs() << "MCP: Removing copy due to no live-out succ: ";
884 std::optional
<DestSourcePair
> CopyOperands
=
885 isCopyInstr(*MaybeDead
, *TII
, UseCopyInstr
);
886 assert(CopyOperands
);
888 Register SrcReg
= CopyOperands
->Source
->getReg();
889 Register DestReg
= CopyOperands
->Destination
->getReg();
890 assert(!MRI
->isReserved(DestReg
));
892 // Update matching debug values, if any.
893 SmallVector
<MachineInstr
*> MaybeDeadDbgUsers(
894 CopyDbgUsers
[MaybeDead
].begin(), CopyDbgUsers
[MaybeDead
].end());
895 MRI
->updateDbgUsersToReg(DestReg
.asMCReg(), SrcReg
.asMCReg(),
898 MaybeDead
->eraseFromParent();
904 MaybeDeadCopies
.clear();
905 CopyDbgUsers
.clear();
909 static bool isBackwardPropagatableCopy(const DestSourcePair
&CopyOperands
,
910 const MachineRegisterInfo
&MRI
,
911 const TargetInstrInfo
&TII
) {
912 Register Def
= CopyOperands
.Destination
->getReg();
913 Register Src
= CopyOperands
.Source
->getReg();
918 if (MRI
.isReserved(Def
) || MRI
.isReserved(Src
))
921 return CopyOperands
.Source
->isRenamable() && CopyOperands
.Source
->isKill();
924 void MachineCopyPropagation::propagateDefs(MachineInstr
&MI
) {
925 if (!Tracker
.hasAnyCopies())
928 for (unsigned OpIdx
= 0, OpEnd
= MI
.getNumOperands(); OpIdx
!= OpEnd
;
930 MachineOperand
&MODef
= MI
.getOperand(OpIdx
);
932 if (!MODef
.isReg() || MODef
.isUse())
935 // Ignore non-trivial cases.
936 if (MODef
.isTied() || MODef
.isUndef() || MODef
.isImplicit())
942 // We only handle if the register comes from a vreg.
943 if (!MODef
.isRenamable())
946 MachineInstr
*Copy
= Tracker
.findAvailBackwardCopy(
947 MI
, MODef
.getReg().asMCReg(), *TRI
, *TII
, UseCopyInstr
);
951 std::optional
<DestSourcePair
> CopyOperands
=
952 isCopyInstr(*Copy
, *TII
, UseCopyInstr
);
953 Register Def
= CopyOperands
->Destination
->getReg();
954 Register Src
= CopyOperands
->Source
->getReg();
956 if (MODef
.getReg() != Src
)
959 if (!isBackwardPropagatableRegClassCopy(*Copy
, MI
, OpIdx
))
962 if (hasImplicitOverlap(MI
, MODef
))
965 if (hasOverlappingMultipleDef(MI
, MODef
, Def
))
968 LLVM_DEBUG(dbgs() << "MCP: Replacing " << printReg(MODef
.getReg(), TRI
)
969 << "\n with " << printReg(Def
, TRI
) << "\n in "
970 << MI
<< " from " << *Copy
);
973 MODef
.setIsRenamable(CopyOperands
->Destination
->isRenamable());
975 LLVM_DEBUG(dbgs() << "MCP: After replacement: " << MI
<< "\n");
976 MaybeDeadCopies
.insert(Copy
);
978 ++NumCopyBackwardPropagated
;
982 void MachineCopyPropagation::BackwardCopyPropagateBlock(
983 MachineBasicBlock
&MBB
) {
984 LLVM_DEBUG(dbgs() << "MCP: BackwardCopyPropagateBlock " << MBB
.getName()
987 for (MachineInstr
&MI
: llvm::make_early_inc_range(llvm::reverse(MBB
))) {
988 // Ignore non-trivial COPYs.
989 std::optional
<DestSourcePair
> CopyOperands
=
990 isCopyInstr(MI
, *TII
, UseCopyInstr
);
991 if (CopyOperands
&& MI
.getNumOperands() == 2) {
992 Register DefReg
= CopyOperands
->Destination
->getReg();
993 Register SrcReg
= CopyOperands
->Source
->getReg();
995 if (!TRI
->regsOverlap(DefReg
, SrcReg
)) {
996 // Unlike forward cp, we don't invoke propagateDefs here,
997 // just let forward cp do COPY-to-COPY propagation.
998 if (isBackwardPropagatableCopy(*CopyOperands
, *MRI
, *TII
)) {
999 Tracker
.invalidateRegister(SrcReg
.asMCReg(), *TRI
, *TII
,
1001 Tracker
.invalidateRegister(DefReg
.asMCReg(), *TRI
, *TII
,
1003 Tracker
.trackCopy(&MI
, *TRI
, *TII
, UseCopyInstr
);
1009 // Invalidate any earlyclobber regs first.
1010 for (const MachineOperand
&MO
: MI
.operands())
1011 if (MO
.isReg() && MO
.isEarlyClobber()) {
1012 MCRegister Reg
= MO
.getReg().asMCReg();
1015 Tracker
.invalidateRegister(Reg
, *TRI
, *TII
, UseCopyInstr
);
1019 for (const MachineOperand
&MO
: MI
.operands()) {
1027 Tracker
.invalidateRegister(MO
.getReg().asMCReg(), *TRI
, *TII
,
1030 if (MO
.readsReg()) {
1032 // Check if the register in the debug instruction is utilized
1033 // in a copy instruction, so we can update the debug info if the
1034 // register is changed.
1035 for (MCRegUnit Unit
: TRI
->regunits(MO
.getReg().asMCReg())) {
1036 if (auto *Copy
= Tracker
.findCopyDefViaUnit(Unit
, *TRI
)) {
1037 CopyDbgUsers
[Copy
].insert(&MI
);
1041 Tracker
.invalidateRegister(MO
.getReg().asMCReg(), *TRI
, *TII
,
1048 for (auto *Copy
: MaybeDeadCopies
) {
1049 std::optional
<DestSourcePair
> CopyOperands
=
1050 isCopyInstr(*Copy
, *TII
, UseCopyInstr
);
1051 Register Src
= CopyOperands
->Source
->getReg();
1052 Register Def
= CopyOperands
->Destination
->getReg();
1053 SmallVector
<MachineInstr
*> MaybeDeadDbgUsers(CopyDbgUsers
[Copy
].begin(),
1054 CopyDbgUsers
[Copy
].end());
1056 MRI
->updateDbgUsersToReg(Src
.asMCReg(), Def
.asMCReg(), MaybeDeadDbgUsers
);
1057 Copy
->eraseFromParent();
1061 MaybeDeadCopies
.clear();
1062 CopyDbgUsers
.clear();
1066 static void LLVM_ATTRIBUTE_UNUSED
printSpillReloadChain(
1067 DenseMap
<MachineInstr
*, SmallVector
<MachineInstr
*>> &SpillChain
,
1068 DenseMap
<MachineInstr
*, SmallVector
<MachineInstr
*>> &ReloadChain
,
1069 MachineInstr
*Leader
) {
1070 auto &SC
= SpillChain
[Leader
];
1071 auto &RC
= ReloadChain
[Leader
];
1072 for (auto I
= SC
.rbegin(), E
= SC
.rend(); I
!= E
; ++I
)
1074 for (MachineInstr
*MI
: RC
)
1078 // Remove spill-reload like copy chains. For example
1088 // will be folded into
1094 // TODO: Currently we don't track usage of r0 outside the chain, so we
1095 // conservatively keep its value as it was before the rewrite.
1097 // The algorithm is trying to keep
1098 // property#1: No Def of spill COPY in the chain is used or defined until the
1099 // paired reload COPY in the chain uses the Def.
1101 // property#2: NO Source of COPY in the chain is used or defined until the next
1102 // COPY in the chain defines the Source, except the innermost spill-reload
1105 // The algorithm is conducted by checking every COPY inside the MBB, assuming
1106 // the COPY is a reload COPY, then try to find paired spill COPY by searching
1107 // the COPY defines the Src of the reload COPY backward. If such pair is found,
1108 // it either belongs to an existing chain or a new chain depends on
1109 // last available COPY uses the Def of the reload COPY.
1110 // Implementation notes, we use CopyTracker::findLastDefCopy(Reg, ...) to find
1111 // out last COPY that defines Reg; we use CopyTracker::findLastUseCopy(Reg, ...)
1112 // to find out last COPY that uses Reg. When we are encountered with a Non-COPY
1113 // instruction, we check registers in the operands of this instruction. If this
1114 // Reg is defined by a COPY, we untrack this Reg via
1115 // CopyTracker::clobberRegister(Reg, ...).
1116 void MachineCopyPropagation::EliminateSpillageCopies(MachineBasicBlock
&MBB
) {
1117 // ChainLeader maps MI inside a spill-reload chain to its innermost reload COPY.
1118 // Thus we can track if a MI belongs to an existing spill-reload chain.
1119 DenseMap
<MachineInstr
*, MachineInstr
*> ChainLeader
;
1120 // SpillChain maps innermost reload COPY of a spill-reload chain to a sequence
1121 // of COPYs that forms spills of a spill-reload chain.
1122 // ReloadChain maps innermost reload COPY of a spill-reload chain to a
1123 // sequence of COPYs that forms reloads of a spill-reload chain.
1124 DenseMap
<MachineInstr
*, SmallVector
<MachineInstr
*>> SpillChain
, ReloadChain
;
1125 // If a COPY's Source has use or def until next COPY defines the Source,
1126 // we put the COPY in this set to keep property#2.
1127 DenseSet
<const MachineInstr
*> CopySourceInvalid
;
1129 auto TryFoldSpillageCopies
=
1130 [&, this](const SmallVectorImpl
<MachineInstr
*> &SC
,
1131 const SmallVectorImpl
<MachineInstr
*> &RC
) {
1132 assert(SC
.size() == RC
.size() && "Spill-reload should be paired");
1134 // We need at least 3 pairs of copies for the transformation to apply,
1135 // because the first outermost pair cannot be removed since we don't
1136 // recolor outside of the chain and that we need at least one temporary
1137 // spill slot to shorten the chain. If we only have a chain of two
1138 // pairs, we already have the shortest sequence this code can handle:
1139 // the outermost pair for the temporary spill slot, and the pair that
1140 // use that temporary spill slot for the other end of the chain.
1141 // TODO: We might be able to simplify to one spill-reload pair if collecting
1142 // more infomation about the outermost COPY.
1146 // If violate property#2, we don't fold the chain.
1147 for (const MachineInstr
*Spill
: make_range(SC
.begin() + 1, SC
.end()))
1148 if (CopySourceInvalid
.count(Spill
))
1151 for (const MachineInstr
*Reload
: make_range(RC
.begin(), RC
.end() - 1))
1152 if (CopySourceInvalid
.count(Reload
))
1155 auto CheckCopyConstraint
= [this](Register Def
, Register Src
) {
1156 for (const TargetRegisterClass
*RC
: TRI
->regclasses()) {
1157 if (RC
->contains(Def
) && RC
->contains(Src
))
1163 auto UpdateReg
= [](MachineInstr
*MI
, const MachineOperand
*Old
,
1164 const MachineOperand
*New
) {
1165 for (MachineOperand
&MO
: MI
->operands()) {
1167 MO
.setReg(New
->getReg());
1171 std::optional
<DestSourcePair
> InnerMostSpillCopy
=
1172 isCopyInstr(*SC
[0], *TII
, UseCopyInstr
);
1173 std::optional
<DestSourcePair
> OuterMostSpillCopy
=
1174 isCopyInstr(*SC
.back(), *TII
, UseCopyInstr
);
1175 std::optional
<DestSourcePair
> InnerMostReloadCopy
=
1176 isCopyInstr(*RC
[0], *TII
, UseCopyInstr
);
1177 std::optional
<DestSourcePair
> OuterMostReloadCopy
=
1178 isCopyInstr(*RC
.back(), *TII
, UseCopyInstr
);
1179 if (!CheckCopyConstraint(OuterMostSpillCopy
->Source
->getReg(),
1180 InnerMostSpillCopy
->Source
->getReg()) ||
1181 !CheckCopyConstraint(InnerMostReloadCopy
->Destination
->getReg(),
1182 OuterMostReloadCopy
->Destination
->getReg()))
1185 SpillageChainsLength
+= SC
.size() + RC
.size();
1186 NumSpillageChains
+= 1;
1187 UpdateReg(SC
[0], InnerMostSpillCopy
->Destination
,
1188 OuterMostSpillCopy
->Source
);
1189 UpdateReg(RC
[0], InnerMostReloadCopy
->Source
,
1190 OuterMostReloadCopy
->Destination
);
1192 for (size_t I
= 1; I
< SC
.size() - 1; ++I
) {
1193 SC
[I
]->eraseFromParent();
1194 RC
[I
]->eraseFromParent();
1199 auto IsFoldableCopy
= [this](const MachineInstr
&MaybeCopy
) {
1200 if (MaybeCopy
.getNumImplicitOperands() > 0)
1202 std::optional
<DestSourcePair
> CopyOperands
=
1203 isCopyInstr(MaybeCopy
, *TII
, UseCopyInstr
);
1206 Register Src
= CopyOperands
->Source
->getReg();
1207 Register Def
= CopyOperands
->Destination
->getReg();
1208 return Src
&& Def
&& !TRI
->regsOverlap(Src
, Def
) &&
1209 CopyOperands
->Source
->isRenamable() &&
1210 CopyOperands
->Destination
->isRenamable();
1213 auto IsSpillReloadPair
= [&, this](const MachineInstr
&Spill
,
1214 const MachineInstr
&Reload
) {
1215 if (!IsFoldableCopy(Spill
) || !IsFoldableCopy(Reload
))
1217 std::optional
<DestSourcePair
> SpillCopy
=
1218 isCopyInstr(Spill
, *TII
, UseCopyInstr
);
1219 std::optional
<DestSourcePair
> ReloadCopy
=
1220 isCopyInstr(Reload
, *TII
, UseCopyInstr
);
1221 if (!SpillCopy
|| !ReloadCopy
)
1223 return SpillCopy
->Source
->getReg() == ReloadCopy
->Destination
->getReg() &&
1224 SpillCopy
->Destination
->getReg() == ReloadCopy
->Source
->getReg();
1227 auto IsChainedCopy
= [&, this](const MachineInstr
&Prev
,
1228 const MachineInstr
&Current
) {
1229 if (!IsFoldableCopy(Prev
) || !IsFoldableCopy(Current
))
1231 std::optional
<DestSourcePair
> PrevCopy
=
1232 isCopyInstr(Prev
, *TII
, UseCopyInstr
);
1233 std::optional
<DestSourcePair
> CurrentCopy
=
1234 isCopyInstr(Current
, *TII
, UseCopyInstr
);
1235 if (!PrevCopy
|| !CurrentCopy
)
1237 return PrevCopy
->Source
->getReg() == CurrentCopy
->Destination
->getReg();
1240 for (MachineInstr
&MI
: llvm::make_early_inc_range(MBB
)) {
1241 std::optional
<DestSourcePair
> CopyOperands
=
1242 isCopyInstr(MI
, *TII
, UseCopyInstr
);
1244 // Update track information via non-copy instruction.
1245 SmallSet
<Register
, 8> RegsToClobber
;
1246 if (!CopyOperands
) {
1247 for (const MachineOperand
&MO
: MI
.operands()) {
1250 Register Reg
= MO
.getReg();
1253 MachineInstr
*LastUseCopy
=
1254 Tracker
.findLastSeenUseInCopy(Reg
.asMCReg(), *TRI
);
1256 LLVM_DEBUG(dbgs() << "MCP: Copy source of\n");
1257 LLVM_DEBUG(LastUseCopy
->dump());
1258 LLVM_DEBUG(dbgs() << "might be invalidated by\n");
1259 LLVM_DEBUG(MI
.dump());
1260 CopySourceInvalid
.insert(LastUseCopy
);
1262 // Must be noted Tracker.clobberRegister(Reg, ...) removes tracking of
1263 // Reg, i.e, COPY that defines Reg is removed from the mapping as well
1264 // as marking COPYs that uses Reg unavailable.
1265 // We don't invoke CopyTracker::clobberRegister(Reg, ...) if Reg is not
1266 // defined by a previous COPY, since we don't want to make COPYs uses
1268 if (Tracker
.findLastSeenDefInCopy(MI
, Reg
.asMCReg(), *TRI
, *TII
,
1270 // Thus we can keep the property#1.
1271 RegsToClobber
.insert(Reg
);
1273 for (Register Reg
: RegsToClobber
) {
1274 Tracker
.clobberRegister(Reg
, *TRI
, *TII
, UseCopyInstr
);
1275 LLVM_DEBUG(dbgs() << "MCP: Removed tracking of " << printReg(Reg
, TRI
)
1281 Register Src
= CopyOperands
->Source
->getReg();
1282 Register Def
= CopyOperands
->Destination
->getReg();
1283 // Check if we can find a pair spill-reload copy.
1284 LLVM_DEBUG(dbgs() << "MCP: Searching paired spill for reload: ");
1285 LLVM_DEBUG(MI
.dump());
1286 MachineInstr
*MaybeSpill
=
1287 Tracker
.findLastSeenDefInCopy(MI
, Src
.asMCReg(), *TRI
, *TII
, UseCopyInstr
);
1288 bool MaybeSpillIsChained
= ChainLeader
.count(MaybeSpill
);
1289 if (!MaybeSpillIsChained
&& MaybeSpill
&&
1290 IsSpillReloadPair(*MaybeSpill
, MI
)) {
1291 // Check if we already have an existing chain. Now we have a
1292 // spill-reload pair.
1295 // Looking for a valid COPY before L5 which uses r3.
1296 // This can be serverial cases.
1298 // No COPY is found, which can be r3 is def-use between (L2, L5), we
1299 // create a new chain for L2 and L5.
1303 // Such COPY is found and is L2, we create a new chain for L2 and L5.
1308 // we create a new chain for L2 and L5.
1314 // Such COPY won't be found since L4 defines r3. we create a new chain
1321 // COPY is found and is L4 which belongs to an existing chain, we add
1322 // L2 and L5 to this chain.
1323 LLVM_DEBUG(dbgs() << "MCP: Found spill: ");
1324 LLVM_DEBUG(MaybeSpill
->dump());
1325 MachineInstr
*MaybePrevReload
=
1326 Tracker
.findLastSeenUseInCopy(Def
.asMCReg(), *TRI
);
1327 auto Leader
= ChainLeader
.find(MaybePrevReload
);
1328 MachineInstr
*L
= nullptr;
1329 if (Leader
== ChainLeader
.end() ||
1330 (MaybePrevReload
&& !IsChainedCopy(*MaybePrevReload
, MI
))) {
1332 assert(!SpillChain
.count(L
) &&
1333 "SpillChain should not have contained newly found chain");
1335 assert(MaybePrevReload
&&
1336 "Found a valid leader through nullptr should not happend");
1338 assert(SpillChain
[L
].size() > 0 &&
1339 "Existing chain's length should be larger than zero");
1341 assert(!ChainLeader
.count(&MI
) && !ChainLeader
.count(MaybeSpill
) &&
1342 "Newly found paired spill-reload should not belong to any chain "
1344 ChainLeader
.insert({MaybeSpill
, L
});
1345 ChainLeader
.insert({&MI
, L
});
1346 SpillChain
[L
].push_back(MaybeSpill
);
1347 ReloadChain
[L
].push_back(&MI
);
1348 LLVM_DEBUG(dbgs() << "MCP: Chain " << L
<< " now is:\n");
1349 LLVM_DEBUG(printSpillReloadChain(SpillChain
, ReloadChain
, L
));
1350 } else if (MaybeSpill
&& !MaybeSpillIsChained
) {
1351 // MaybeSpill is unable to pair with MI. That's to say adding MI makes
1352 // the chain invalid.
1353 // The COPY defines Src is no longer considered as a candidate of a
1354 // valid chain. Since we expect the Def of a spill copy isn't used by
1355 // any COPY instruction until a reload copy. For example:
1362 // L1 and L3 can't be a valid spill-reload pair.
1363 // Thus we keep the property#1.
1364 LLVM_DEBUG(dbgs() << "MCP: Not paired spill-reload:\n");
1365 LLVM_DEBUG(MaybeSpill
->dump());
1366 LLVM_DEBUG(MI
.dump());
1367 Tracker
.clobberRegister(Src
.asMCReg(), *TRI
, *TII
, UseCopyInstr
);
1368 LLVM_DEBUG(dbgs() << "MCP: Removed tracking of " << printReg(Src
, TRI
)
1371 Tracker
.trackCopy(&MI
, *TRI
, *TII
, UseCopyInstr
);
1374 for (auto I
= SpillChain
.begin(), E
= SpillChain
.end(); I
!= E
; ++I
) {
1375 auto &SC
= I
->second
;
1376 assert(ReloadChain
.count(I
->first
) &&
1377 "Reload chain of the same leader should exist");
1378 auto &RC
= ReloadChain
[I
->first
];
1379 TryFoldSpillageCopies(SC
, RC
);
1382 MaybeDeadCopies
.clear();
1383 CopyDbgUsers
.clear();
1387 bool MachineCopyPropagation::runOnMachineFunction(MachineFunction
&MF
) {
1388 if (skipFunction(MF
.getFunction()))
1391 bool isSpillageCopyElimEnabled
= false;
1392 switch (EnableSpillageCopyElimination
) {
1394 isSpillageCopyElimEnabled
=
1395 MF
.getSubtarget().enableSpillageCopyElimination();
1398 isSpillageCopyElimEnabled
= true;
1401 isSpillageCopyElimEnabled
= false;
1407 TRI
= MF
.getSubtarget().getRegisterInfo();
1408 TII
= MF
.getSubtarget().getInstrInfo();
1409 MRI
= &MF
.getRegInfo();
1411 for (MachineBasicBlock
&MBB
: MF
) {
1412 if (isSpillageCopyElimEnabled
)
1413 EliminateSpillageCopies(MBB
);
1414 BackwardCopyPropagateBlock(MBB
);
1415 ForwardCopyPropagateBlock(MBB
);
1421 MachineFunctionPass
*
1422 llvm::createMachineCopyPropagationPass(bool UseCopyInstr
= false) {
1423 return new MachineCopyPropagation(UseCopyInstr
);