79#define DEBUG_TYPE "machine-cp"
81STATISTIC(NumDeletes,
"Number of dead copies deleted");
82STATISTIC(NumCopyForwards,
"Number of copy uses forwarded");
83STATISTIC(NumCopyBackwardPropagated,
"Number of copy defs backward propagated");
84STATISTIC(SpillageChainsLength,
"Length of spillage chains");
85STATISTIC(NumSpillageChains,
"Number of spillage chains");
87 "Controls which register COPYs are forwarded");
96static std::optional<DestSourcePair> isCopyInstr(
const MachineInstr &
MI,
100 return TII.isCopyInstr(
MI);
103 return std::optional<DestSourcePair>(
130 auto Existing = RegMaskToPreservedRegUnits.
find(RegMask);
131 if (Existing != RegMaskToPreservedRegUnits.
end()) {
132 return Existing->second;
134 BitVector &PreservedRegUnits = RegMaskToPreservedRegUnits[RegMask];
136 PreservedRegUnits.
resize(
TRI.getNumRegUnits());
137 for (
unsigned SafeReg = 0, E =
TRI.getNumRegs(); SafeReg < E; ++SafeReg)
139 for (
auto SafeUnit :
TRI.regunits(SafeReg))
140 PreservedRegUnits.
set(SafeUnit);
142 return PreservedRegUnits;
153 auto CI =
Copies.find(Unit);
155 CI->second.Avail =
false;
169 std::optional<DestSourcePair> CopyOperands =
170 isCopyInstr(*
MI,
TII, UseCopyInstr);
171 assert(CopyOperands &&
"Expect copy");
173 auto Dest =
TRI.regunits(CopyOperands->Destination->getReg().asMCReg());
174 auto Src =
TRI.regunits(CopyOperands->Source->getReg().asMCReg());
175 RegUnitsToInvalidate.
insert(Dest.begin(), Dest.end());
176 RegUnitsToInvalidate.
insert(Src.begin(), Src.end());
188 for (
MCRegUnit Unit : RegUnitsToInvalidate)
199 markRegsUnavailable(
I->second.DefRegs,
TRI);
203 std::optional<DestSourcePair> CopyOperands =
204 isCopyInstr(*
MI,
TII, UseCopyInstr);
206 MCRegister Def = CopyOperands->Destination->getReg().asMCReg();
207 MCRegister Src = CopyOperands->Source->getReg().asMCReg();
209 markRegsUnavailable(Def,
TRI);
224 auto SrcCopy =
Copies.find(SrcUnit);
225 if (SrcCopy !=
Copies.end() && SrcCopy->second.LastSeenUseInCopy) {
228 for (
auto itr = SrcCopy->second.DefRegs.begin();
229 itr != SrcCopy->second.DefRegs.end(); itr++) {
231 SrcCopy->second.DefRegs.erase(itr);
237 if (SrcCopy->second.DefRegs.empty() && !SrcCopy->second.MI) {
255 clobberRegUnit(Unit,
TRI,
TII, UseCopyInstr);
270 std::optional<DestSourcePair> CopyOperands =
271 isCopyInstr(*AvailCopy,
TII, UseCopyInstr);
272 Register Src = CopyOperands->Source->getReg();
282 I->second.SrcUsers.insert(&
MI);
293 return I->second.SrcUsers;
299 std::optional<DestSourcePair> CopyOperands =
300 isCopyInstr(*
MI,
TII, UseCopyInstr);
301 assert(CopyOperands &&
"Tracking non-copy?");
303 MCRegister Src = CopyOperands->Source->getReg().asMCReg();
304 MCRegister Def = CopyOperands->Destination->getReg().asMCReg();
308 Copies[Unit] = {
MI,
nullptr, {}, {},
true};
315 Copy.DefRegs.push_back(Def);
316 Copy.LastSeenUseInCopy =
MI;
320 bool hasAnyCopies() {
326 bool MustBeAvailable =
false) {
327 auto CI =
Copies.find(RegUnit);
330 if (MustBeAvailable && !CI->second.Avail)
332 return CI->second.MI;
337 auto CI =
Copies.find(RegUnit);
340 if (CI->second.DefRegs.size() != 1)
342 MCRegUnit RU = *
TRI.regunits(CI->second.DefRegs[0]).begin();
343 return findCopyForUnit(RU,
TRI,
true);
356 std::optional<DestSourcePair> CopyOperands =
357 isCopyInstr(*AvailCopy,
TII, UseCopyInstr);
358 Register AvailSrc = CopyOperands->Source->getReg();
359 Register AvailDef = CopyOperands->Destination->getReg();
360 if (!
TRI.isSubRegisterEq(AvailSrc, Reg))
368 if (MO.clobbersPhysReg(AvailSrc) || MO.clobbersPhysReg(AvailDef))
381 findCopyForUnit(RU,
TRI,
true);
386 std::optional<DestSourcePair> CopyOperands =
387 isCopyInstr(*AvailCopy,
TII, UseCopyInstr);
388 Register AvailSrc = CopyOperands->Source->getReg();
389 Register AvailDef = CopyOperands->Destination->getReg();
390 if (!
TRI.isSubRegisterEq(AvailDef, Reg))
399 if (MO.clobbersPhysReg(AvailSrc) || MO.clobbersPhysReg(AvailDef))
412 auto CI =
Copies.find(RU);
413 if (CI ==
Copies.end() || !CI->second.Avail)
417 std::optional<DestSourcePair> CopyOperands =
418 isCopyInstr(*DefCopy,
TII, UseCopyInstr);
419 Register Def = CopyOperands->Destination->getReg();
420 if (!
TRI.isSubRegisterEq(Def, Reg))
428 if (MO.clobbersPhysReg(Def)) {
441 auto CI =
Copies.find(RU);
444 return CI->second.LastSeenUseInCopy;
463 MachineCopyPropagation(
bool CopyInstr =
false)
477 MachineFunctionProperties::Property::NoVRegs);
481 typedef enum { DebugUse =
false, RegularUse =
true }
DebugType;
491 bool isForwardableRegClassCopy(
const MachineInstr &Copy,
493 bool isBackwardPropagatableRegClassCopy(
const MachineInstr &Copy,
510 bool Changed =
false;
515char MachineCopyPropagation::ID = 0;
520 "Machine Copy Propagation Pass",
false,
false)
529 if (DT == RegularUse) {
530 LLVM_DEBUG(
dbgs() <<
"MCP: Copy is used - not dead: "; Copy->dump());
531 MaybeDeadCopies.remove(Copy);
533 CopyDbgUsers[Copy].insert(&Reader);
539void MachineCopyPropagation::readSuccessorLiveIns(
541 if (MaybeDeadCopies.empty())
546 for (
const auto &LI : Succ->liveins()) {
549 MaybeDeadCopies.remove(Copy);
565 std::optional<DestSourcePair> CopyOperands =
566 isCopyInstr(PreviousCopy, *
TII, UseCopyInstr);
567 MCRegister PreviousSrc = CopyOperands->Source->getReg().asMCReg();
568 MCRegister PreviousDef = CopyOperands->Destination->getReg().asMCReg();
569 if (Src == PreviousSrc && Def == PreviousDef)
571 if (!
TRI->isSubRegister(PreviousSrc, Src))
573 unsigned SubIdx =
TRI->getSubRegIndex(PreviousSrc, Src);
574 return SubIdx ==
TRI->getSubRegIndex(PreviousDef, Def);
580bool MachineCopyPropagation::eraseIfRedundant(
MachineInstr &Copy,
584 if (
MRI->isReserved(Src) ||
MRI->isReserved(Def))
589 Tracker.findAvailCopy(Copy, Def, *
TRI, *
TII, UseCopyInstr);
593 auto PrevCopyOperands = isCopyInstr(*PrevCopy, *
TII, UseCopyInstr);
595 if (PrevCopyOperands->Destination->isDead())
604 std::optional<DestSourcePair> CopyOperands =
605 isCopyInstr(Copy, *
TII, UseCopyInstr);
608 Register CopyDef = CopyOperands->Destination->getReg();
609 assert(CopyDef == Src || CopyDef == Def);
612 MI.clearRegisterKills(CopyDef,
TRI);
615 if (!CopyOperands->Source->isUndef()) {
616 PrevCopy->
getOperand(PrevCopyOperands->Source->getOperandNo())
620 Copy.eraseFromParent();
626bool MachineCopyPropagation::isBackwardPropagatableRegClassCopy(
628 std::optional<DestSourcePair> CopyOperands =
629 isCopyInstr(Copy, *
TII, UseCopyInstr);
630 Register Def = CopyOperands->Destination->getReg();
634 return URC->contains(Def);
644bool MachineCopyPropagation::isForwardableRegClassCopy(
const MachineInstr &Copy,
647 std::optional<DestSourcePair> CopyOperands =
648 isCopyInstr(Copy, *
TII, UseCopyInstr);
649 Register CopySrcReg = CopyOperands->Source->getReg();
655 return URC->contains(CopySrcReg);
657 auto UseICopyOperands = isCopyInstr(UseI, *
TII, UseCopyInstr);
658 if (!UseICopyOperands)
681 Register UseDstReg = UseICopyOperands->Destination->getReg();
683 bool IsCrossClass =
false;
685 if (RC->contains(CopySrcReg) && RC->contains(UseDstReg)) {
687 if (
TRI->getCrossCopyRegClass(RC) != RC) {
699 Register CopyDstReg = CopyOperands->Destination->getReg();
701 if (RC->contains(CopySrcReg) && RC->contains(CopyDstReg) &&
702 TRI->getCrossCopyRegClass(RC) != RC)
716bool MachineCopyPropagation::hasImplicitOverlap(
const MachineInstr &
MI,
719 if (&MIUse != &
Use && MIUse.isReg() && MIUse.isImplicit() &&
720 MIUse.isUse() &&
TRI->regsOverlap(
Use.getReg(), MIUse.getReg()))
730bool MachineCopyPropagation::hasOverlappingMultipleDef(
733 if ((&MIDef != &MODef) && MIDef.isReg() &&
734 TRI->regsOverlap(Def, MIDef.getReg()))
743bool MachineCopyPropagation::canUpdateSrcUsers(
const MachineInstr &Copy,
745 assert(CopySrc.
isReg() &&
"Expected a register operand");
746 for (
auto *SrcUser : Tracker.getSrcUsers(CopySrc.
getReg(), *
TRI)) {
747 if (hasImplicitOverlap(*SrcUser, CopySrc))
751 if (!MO.isReg() || !MO.isUse() || MO.getReg() != CopySrc.
getReg())
753 if (MO.isTied() || !MO.isRenamable() ||
754 !isBackwardPropagatableRegClassCopy(Copy, *SrcUser,
765 if (!Tracker.hasAnyCopies())
771 for (
unsigned OpIdx = 0, OpEnd =
MI.getNumOperands(); OpIdx < OpEnd;
793 *
TRI, *
TII, UseCopyInstr);
797 std::optional<DestSourcePair> CopyOperands =
798 isCopyInstr(*Copy, *
TII, UseCopyInstr);
799 Register CopyDstReg = CopyOperands->Destination->getReg();
806 if (MOUse.
getReg() != CopyDstReg) {
807 unsigned SubRegIdx =
TRI->getSubRegIndex(CopyDstReg, MOUse.
getReg());
809 "MI source is not a sub-register of Copy destination");
810 ForwardedReg =
TRI->getSubReg(CopySrcReg, SubRegIdx);
812 LLVM_DEBUG(
dbgs() <<
"MCP: Copy source does not have sub-register "
813 <<
TRI->getSubRegIndexName(SubRegIdx) <<
'\n');
819 if (
MRI->isReserved(CopySrcReg) && !
MRI->isConstantPhysReg(CopySrcReg))
822 if (!isForwardableRegClassCopy(*Copy,
MI, OpIdx))
825 if (hasImplicitOverlap(
MI, MOUse))
831 if (isCopyInstr(
MI, *
TII, UseCopyInstr) &&
832 MI.modifiesRegister(CopySrcReg,
TRI) &&
833 !
MI.definesRegister(CopySrcReg,
nullptr)) {
839 LLVM_DEBUG(
dbgs() <<
"MCP: Skipping forwarding due to debug counter:\n "
846 <<
"\n in " <<
MI <<
" from " << *Copy);
848 MOUse.
setReg(ForwardedReg);
859 KMI.clearRegisterKills(CopySrcReg,
TRI);
872 std::optional<DestSourcePair> CopyOperands =
873 isCopyInstr(
MI, *
TII, UseCopyInstr);
876 Register RegSrc = CopyOperands->Source->getReg();
877 Register RegDef = CopyOperands->Destination->getReg();
879 if (!
TRI->regsOverlap(RegDef, RegSrc)) {
881 "MachineCopyPropagation should be run after register allocation!");
901 if (eraseIfRedundant(
MI, Def, Src) || eraseIfRedundant(
MI, Src, Def))
907 CopyOperands = isCopyInstr(
MI, *
TII, UseCopyInstr);
908 Src = CopyOperands->Source->getReg().asMCReg();
912 ReadRegister(Src,
MI, RegularUse);
914 if (!MO.isReg() || !MO.readsReg())
919 ReadRegister(Reg,
MI, RegularUse);
925 if (!
MRI->isReserved(Def))
926 MaybeDeadCopies.insert(&
MI);
935 Tracker.clobberRegister(Def, *
TRI, *
TII, UseCopyInstr);
937 if (!MO.isReg() || !MO.isDef())
942 Tracker.clobberRegister(Reg, *
TRI, *
TII, UseCopyInstr);
945 Tracker.trackCopy(&
MI, *
TRI, *
TII, UseCopyInstr);
953 if (MO.isReg() && MO.isEarlyClobber()) {
959 ReadRegister(Reg,
MI, RegularUse);
960 Tracker.clobberRegister(Reg, *
TRI, *
TII, UseCopyInstr);
978 "MachineCopyPropagation should be run after register allocation!");
980 if (MO.isDef() && !MO.isEarlyClobber()) {
982 if (!
MRI->isConstantPhysReg(Reg)) {
986 }
else if (MO.readsReg())
987 ReadRegister(
Reg.asMCReg(),
MI, MO.isDebug() ? DebugUse : RegularUse);
995 Tracker.getPreservedRegUnits(*RegMask, *
TRI);
999 MaybeDeadCopies.
begin();
1000 DI != MaybeDeadCopies.end();) {
1002 std::optional<DestSourcePair> CopyOperands =
1003 isCopyInstr(*MaybeDead, *
TII, UseCopyInstr);
1004 MCRegister Reg = CopyOperands->Destination->getReg().asMCReg();
1012 LLVM_DEBUG(
dbgs() <<
"MCP: Removing copy due to regmask clobbering: ";
1017 for (
unsigned RegUnit :
TRI->regunits(Reg))
1018 if (!PreservedRegUnits.
test(RegUnit))
1019 Tracker.clobberRegUnit(RegUnit, *
TRI, *
TII, UseCopyInstr);
1023 DI = MaybeDeadCopies.erase(DI);
1032 Tracker.clobberRegister(Reg, *
TRI, *
TII, UseCopyInstr);
1035 bool TracksLiveness =
MRI->tracksLiveness();
1040 readSuccessorLiveIns(
MBB);
1047 LLVM_DEBUG(
dbgs() <<
"MCP: Removing copy due to no live-out succ: ";
1050 std::optional<DestSourcePair> CopyOperands =
1051 isCopyInstr(*MaybeDead, *
TII, UseCopyInstr);
1054 Register SrcReg = CopyOperands->Source->getReg();
1055 Register DestReg = CopyOperands->Destination->getReg();
1060 CopyDbgUsers[MaybeDead].
begin(), CopyDbgUsers[MaybeDead].
end());
1070 MaybeDeadCopies.clear();
1071 CopyDbgUsers.clear();
1083 if (
MRI.isReserved(Def) ||
MRI.isReserved(Src))
1090 if (!Tracker.hasAnyCopies())
1093 for (
unsigned OpIdx = 0, OpEnd =
MI.getNumOperands(); OpIdx != OpEnd;
1116 std::optional<DestSourcePair> CopyOperands =
1117 isCopyInstr(*Copy, *
TII, UseCopyInstr);
1118 Register Def = CopyOperands->Destination->getReg();
1119 Register Src = CopyOperands->Source->getReg();
1121 if (MODef.
getReg() != Src)
1124 if (!isBackwardPropagatableRegClassCopy(*Copy,
MI, OpIdx))
1127 if (hasImplicitOverlap(
MI, MODef))
1130 if (hasOverlappingMultipleDef(
MI, MODef, Def))
1133 if (!canUpdateSrcUsers(*Copy, *CopyOperands->Source))
1138 <<
MI <<
" from " << *Copy);
1143 for (
auto *SrcUser : Tracker.getSrcUsers(Src, *
TRI)) {
1145 if (!MO.isReg() || !MO.isUse() || MO.getReg() != Src)
1148 MO.setIsRenamable(CopyOperands->Destination->isRenamable());
1153 MaybeDeadCopies.insert(Copy);
1155 ++NumCopyBackwardPropagated;
1159void MachineCopyPropagation::BackwardCopyPropagateBlock(
1166 std::optional<DestSourcePair> CopyOperands =
1167 isCopyInstr(
MI, *
TII, UseCopyInstr);
1169 Register DefReg = CopyOperands->Destination->getReg();
1170 Register SrcReg = CopyOperands->Source->getReg();
1172 if (!
TRI->regsOverlap(DefReg, SrcReg)) {
1180 Tracker.trackCopy(&
MI, *
TRI, *
TII, UseCopyInstr);
1188 if (MO.isReg() && MO.isEarlyClobber()) {
1192 Tracker.invalidateRegister(Reg, *
TRI, *
TII, UseCopyInstr);
1204 Tracker.invalidateRegister(MO.getReg().asMCReg(), *
TRI, *
TII,
1207 if (MO.readsReg()) {
1212 for (
MCRegUnit Unit :
TRI->regunits(MO.getReg().asMCReg())) {
1213 if (
auto *Copy = Tracker.findCopyDefViaUnit(Unit, *
TRI)) {
1214 CopyDbgUsers[
Copy].insert(&
MI);
1217 }
else if (!Tracker.trackSrcUsers(MO.getReg().asMCReg(),
MI, *
TRI, *
TII,
1220 Tracker.invalidateRegister(MO.getReg().asMCReg(), *
TRI, *
TII,
1227 for (
auto *Copy : MaybeDeadCopies) {
1228 std::optional<DestSourcePair> CopyOperands =
1229 isCopyInstr(*Copy, *
TII, UseCopyInstr);
1230 Register Src = CopyOperands->Source->getReg();
1231 Register Def = CopyOperands->Destination->getReg();
1233 CopyDbgUsers[Copy].
end());
1235 MRI->updateDbgUsersToReg(Src.asMCReg(),
Def.asMCReg(), MaybeDeadDbgUsers);
1236 Copy->eraseFromParent();
1240 MaybeDeadCopies.clear();
1241 CopyDbgUsers.clear();
1249 auto &SC = SpillChain[Leader];
1250 auto &RC = ReloadChain[Leader];
1251 for (
auto I = SC.rbegin(), E = SC.rend();
I != E; ++
I)
1308 auto TryFoldSpillageCopies =
1311 assert(
SC.size() == RC.size() &&
"Spill-reload should be paired");
1327 if (CopySourceInvalid.
count(Spill))
1331 if (CopySourceInvalid.
count(Reload))
1336 if (RC->contains(Def) && RC->contains(Src))
1346 MO.setReg(
New->getReg());
1350 std::optional<DestSourcePair> InnerMostSpillCopy =
1351 isCopyInstr(*SC[0], *
TII, UseCopyInstr);
1352 std::optional<DestSourcePair> OuterMostSpillCopy =
1353 isCopyInstr(*
SC.back(), *
TII, UseCopyInstr);
1354 std::optional<DestSourcePair> InnerMostReloadCopy =
1355 isCopyInstr(*RC[0], *
TII, UseCopyInstr);
1356 std::optional<DestSourcePair> OuterMostReloadCopy =
1357 isCopyInstr(*RC.back(), *
TII, UseCopyInstr);
1358 if (!CheckCopyConstraint(OuterMostSpillCopy->Source->getReg(),
1359 InnerMostSpillCopy->Source->getReg()) ||
1360 !CheckCopyConstraint(InnerMostReloadCopy->Destination->getReg(),
1361 OuterMostReloadCopy->Destination->getReg()))
1364 SpillageChainsLength +=
SC.size() + RC.size();
1365 NumSpillageChains += 1;
1366 UpdateReg(SC[0], InnerMostSpillCopy->Destination,
1367 OuterMostSpillCopy->Source);
1368 UpdateReg(RC[0], InnerMostReloadCopy->Source,
1369 OuterMostReloadCopy->Destination);
1371 for (
size_t I = 1;
I <
SC.size() - 1; ++
I) {
1372 SC[
I]->eraseFromParent();
1373 RC[
I]->eraseFromParent();
1378 auto IsFoldableCopy = [
this](
const MachineInstr &MaybeCopy) {
1379 if (MaybeCopy.getNumImplicitOperands() > 0)
1381 std::optional<DestSourcePair> CopyOperands =
1382 isCopyInstr(MaybeCopy, *
TII, UseCopyInstr);
1385 Register Src = CopyOperands->Source->getReg();
1386 Register Def = CopyOperands->Destination->getReg();
1387 return Src &&
Def && !
TRI->regsOverlap(Src, Def) &&
1388 CopyOperands->Source->isRenamable() &&
1389 CopyOperands->Destination->isRenamable();
1394 if (!IsFoldableCopy(Spill) || !IsFoldableCopy(Reload))
1396 std::optional<DestSourcePair> SpillCopy =
1397 isCopyInstr(Spill, *
TII, UseCopyInstr);
1398 std::optional<DestSourcePair> ReloadCopy =
1399 isCopyInstr(Reload, *
TII, UseCopyInstr);
1400 if (!SpillCopy || !ReloadCopy)
1402 return SpillCopy->Source->getReg() == ReloadCopy->Destination->getReg() &&
1403 SpillCopy->Destination->getReg() == ReloadCopy->Source->getReg();
1406 auto IsChainedCopy = [&,
this](
const MachineInstr &Prev,
1408 if (!IsFoldableCopy(Prev) || !IsFoldableCopy(Current))
1410 std::optional<DestSourcePair> PrevCopy =
1411 isCopyInstr(Prev, *
TII, UseCopyInstr);
1412 std::optional<DestSourcePair> CurrentCopy =
1413 isCopyInstr(Current, *
TII, UseCopyInstr);
1414 if (!PrevCopy || !CurrentCopy)
1416 return PrevCopy->Source->getReg() == CurrentCopy->Destination->getReg();
1420 std::optional<DestSourcePair> CopyOperands =
1421 isCopyInstr(
MI, *
TII, UseCopyInstr);
1425 if (!CopyOperands) {
1433 Tracker.findLastSeenUseInCopy(
Reg.asMCReg(), *
TRI);
1439 CopySourceInvalid.
insert(LastUseCopy);
1447 if (Tracker.findLastSeenDefInCopy(
MI,
Reg.asMCReg(), *
TRI, *
TII,
1450 RegsToClobber.
insert(Reg);
1452 for (
Register Reg : RegsToClobber) {
1453 Tracker.clobberRegister(Reg, *
TRI, *
TII, UseCopyInstr);
1460 Register Src = CopyOperands->Source->getReg();
1461 Register Def = CopyOperands->Destination->getReg();
1463 LLVM_DEBUG(
dbgs() <<
"MCP: Searching paired spill for reload: ");
1466 Tracker.findLastSeenDefInCopy(
MI, Src.asMCReg(), *
TRI, *
TII, UseCopyInstr);
1467 bool MaybeSpillIsChained = ChainLeader.
count(MaybeSpill);
1468 if (!MaybeSpillIsChained && MaybeSpill &&
1469 IsSpillReloadPair(*MaybeSpill,
MI)) {
1505 Tracker.findLastSeenUseInCopy(
Def.asMCReg(), *
TRI);
1506 auto Leader = ChainLeader.
find(MaybePrevReload);
1508 if (Leader == ChainLeader.
end() ||
1509 (MaybePrevReload && !IsChainedCopy(*MaybePrevReload,
MI))) {
1512 "SpillChain should not have contained newly found chain");
1514 assert(MaybePrevReload &&
1515 "Found a valid leader through nullptr should not happend");
1518 "Existing chain's length should be larger than zero");
1521 "Newly found paired spill-reload should not belong to any chain "
1523 ChainLeader.
insert({MaybeSpill,
L});
1525 SpillChain[
L].push_back(MaybeSpill);
1526 ReloadChain[
L].push_back(&
MI);
1529 }
else if (MaybeSpill && !MaybeSpillIsChained) {
1546 Tracker.clobberRegister(Src.asMCReg(), *
TRI, *
TII, UseCopyInstr);
1550 Tracker.trackCopy(&
MI, *
TRI, *
TII, UseCopyInstr);
1553 for (
auto I = SpillChain.
begin(), E = SpillChain.
end();
I != E; ++
I) {
1554 auto &
SC =
I->second;
1556 "Reload chain of the same leader should exist");
1557 auto &RC = ReloadChain[
I->first];
1558 TryFoldSpillageCopies(SC, RC);
1561 MaybeDeadCopies.clear();
1562 CopyDbgUsers.clear();
1566bool MachineCopyPropagation::runOnMachineFunction(
MachineFunction &MF) {
1570 bool isSpillageCopyElimEnabled =
false;
1573 isSpillageCopyElimEnabled =
1577 isSpillageCopyElimEnabled =
true;
1580 isSpillageCopyElimEnabled =
false;
1591 if (isSpillageCopyElimEnabled)
1592 EliminateSpillageCopies(
MBB);
1593 BackwardCopyPropagateBlock(
MBB);
1594 ForwardCopyPropagateBlock(
MBB);
1602 return new MachineCopyPropagation(UseCopyInstr);
unsigned const MachineRegisterInfo * MRI
#define LLVM_ATTRIBUTE_UNUSED
This file provides an implementation of debug counters.
#define DEBUG_COUNTER(VARNAME, COUNTERNAME, DESC)
This file defines the DenseMap class.
const HexagonInstrInfo * TII
static cl::opt< cl::boolOrDefault > EnableSpillageCopyElimination("enable-spill-copy-elim", cl::Hidden)
static void LLVM_ATTRIBUTE_UNUSED printSpillReloadChain(DenseMap< MachineInstr *, SmallVector< MachineInstr * > > &SpillChain, DenseMap< MachineInstr *, SmallVector< MachineInstr * > > &ReloadChain, MachineInstr *Leader)
static bool isBackwardPropagatableCopy(const DestSourcePair &CopyOperands, const MachineRegisterInfo &MRI)
static bool isNopCopy(const MachineInstr &PreviousCopy, MCRegister Src, MCRegister Def, const TargetRegisterInfo *TRI, const TargetInstrInfo *TII, bool UseCopyInstr)
Return true if PreviousCopy did copy register Src to register Def.
static cl::opt< bool > MCPUseCopyInstr("mcp-use-is-copy-instr", cl::init(false), cl::Hidden)
unsigned const TargetRegisterInfo * TRI
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
Represent the analysis usage information of a pass.
void setPreservesCFG()
This function should be called by the pass, iff they do not:
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
bool test(unsigned Idx) const
void resize(unsigned N, bool t=false)
resize - Grow or shrink the bitvector.
static bool shouldExecute(unsigned CounterName)
iterator find(const_arg_type_t< KeyT > Val)
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Implements a dense probed hash-table based set.
Wrapper class representing physical registers. Should be passed by value.
iterator_range< succ_iterator > successors()
StringRef getName() const
Return the name of the corresponding LLVM basic block, or an empty string.
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - Subclasses that override getAnalysisUsage must call this.
virtual bool runOnMachineFunction(MachineFunction &MF)=0
runOnMachineFunction - This method must be overloaded to perform the desired machine code transformat...
virtual MachineFunctionProperties getRequiredProperties() const
Properties which a MachineFunction may have at a given point in time.
MachineFunctionProperties & set(Property P)
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
MachineRegisterInfo & getRegInfo()
getRegInfo - Return information about the registers currently in use.
Function & getFunction()
Return the LLVM function that this machine code represents.
Representation of each machine instruction.
void eraseFromParent()
Unlink 'this' from the containing basic block and delete it.
const MachineOperand & getOperand(unsigned i) const
const TargetRegisterClass * getRegClassConstraint(unsigned OpIdx, const TargetInstrInfo *TII, const TargetRegisterInfo *TRI) const
Compute the static register class constraint for operand OpIdx.
MachineOperand class - Representation of each machine instruction operand.
void setIsRenamable(bool Val=true)
bool isReg() const
isReg - Tests if this is a MO_Register operand.
void setReg(Register Reg)
Change the register this operand corresponds to.
bool isRenamable() const
isRenamable - Returns true if this register may be renamed, i.e.
void setIsUndef(bool Val=true)
Register getReg() const
getReg - Returns the register number.
static bool clobbersPhysReg(const uint32_t *RegMask, MCRegister PhysReg)
clobbersPhysReg - Returns true if this RegMask clobbers PhysReg.
const uint32_t * getRegMask() const
getRegMask - Returns a bit mask of registers preserved by this RegMask operand.
MachineRegisterInfo - Keep track of information for virtual and physical registers,...
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
Wrapper class representing virtual and physical registers.
MCRegister asMCReg() const
Utility to check-convert this value to a MCRegister.
constexpr bool isPhysical() const
Return true if the specified register number is in the physical register namespace.
iterator begin()
Get an iterator to the beginning of the SetVector.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
A SetVector that performs no allocations if smaller than a certain size.
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
TargetInstrInfo - Interface to description of machine instruction set.
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
virtual const TargetRegisterInfo * getRegisterInfo() const
getRegisterInfo - If register information is available, return it.
virtual bool enableSpillageCopyElimination() const
Enable spillage copy elimination in MachineCopyPropagation pass.
virtual const TargetInstrInfo * getInstrInfo() const
A Use represents the edge between a Value definition and its users.
std::pair< iterator, bool > insert(const ValueT &V)
size_type count(const_arg_type_t< ValueT > V) const
Return 1 if the specified key is in the set, 0 otherwise.
reverse_self_iterator getReverseIterator()
self_iterator getIterator()
This provides a very simple, boring adaptor for a begin and end iterator into a range type.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
@ SC
CHAIN = SC CHAIN, Imm128 - System call.
Reg
All possible values of the reg field in the ModR/M byte.
initializer< Ty > init(const Ty &Val)
NodeAddr< DefNode * > Def
const_iterator begin(StringRef path LLVM_LIFETIME_BOUND, Style style=Style::native)
Get begin iterator over path.
const_iterator end(StringRef path LLVM_LIFETIME_BOUND)
Get end iterator over path.
This is an optimization pass for GlobalISel generic memory operations.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
auto reverse(ContainerTy &&C)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
auto drop_end(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the last N elements excluded.
bool is_contained(R &&Range, const E &Element)
Returns true if Element is found in Range.
MachineFunctionPass * createMachineCopyPropagationPass(bool UseCopyInstr)
void initializeMachineCopyPropagationPass(PassRegistry &)
Printable printReg(Register Reg, const TargetRegisterInfo *TRI=nullptr, unsigned SubIdx=0, const MachineRegisterInfo *MRI=nullptr)
Prints virtual and physical registers with or without a TRI instance.
char & MachineCopyPropagationID
MachineCopyPropagation - This pass performs copy propagation on machine instructions.
const MachineOperand * Source
const MachineOperand * Destination