Skip to content

Commit 7e5c267

Browse files
committed
[GlobalISel][NFC] Clean up and modernize the indexed load/store combines.
Use wrappers and helpers to tidy it up, and remove some debug prints.
1 parent 79af1cb commit 7e5c267

File tree

2 files changed

+36
-96
lines changed

2 files changed

+36
-96
lines changed

llvm/include/llvm/CodeGen/GlobalISel/CombinerHelper.h

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919

2020
#include "llvm/ADT/DenseMap.h"
2121
#include "llvm/ADT/SmallVector.h"
22+
#include "llvm/CodeGen/GlobalISel/GenericMachineInstrs.h"
2223
#include "llvm/CodeGen/LowLevelType.h"
2324
#include "llvm/CodeGen/Register.h"
2425
#include "llvm/IR/InstrTypes.h"
@@ -821,14 +822,14 @@ class CombinerHelper {
821822
/// can be usefully and legally folded into it as a post-indexing operation.
822823
///
823824
/// \returns true if a candidate is found.
824-
bool findPostIndexCandidate(MachineInstr &MI, Register &Addr, Register &Base,
825+
bool findPostIndexCandidate(GLoadStore &MI, Register &Addr, Register &Base,
825826
Register &Offset);
826827

827828
/// Given a non-indexed load or store instruction \p MI, find an offset that
828829
/// can be usefully and legally folded into it as a pre-indexing operation.
829830
///
830831
/// \returns true if a candidate is found.
831-
bool findPreIndexCandidate(MachineInstr &MI, Register &Addr, Register &Base,
832+
bool findPreIndexCandidate(GLoadStore &MI, Register &Addr, Register &Base,
832833
Register &Offset);
833834

834835
/// Helper function for matchLoadOrCombine. Searches for Registers

llvm/lib/CodeGen/GlobalISel/CombinerHelper.cpp

Lines changed: 33 additions & 94 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
//===----------------------------------------------------------------------===//
88
#include "llvm/CodeGen/GlobalISel/CombinerHelper.h"
99
#include "llvm/ADT/APFloat.h"
10+
#include "llvm/ADT/STLExtras.h"
1011
#include "llvm/ADT/SetVector.h"
1112
#include "llvm/ADT/SmallBitVector.h"
1213
#include "llvm/CodeGen/GlobalISel/GISelChangeObserver.h"
@@ -944,159 +945,97 @@ void CombinerHelper::applySextInRegOfLoad(
944945
MI.eraseFromParent();
945946
}
946947

947-
bool CombinerHelper::findPostIndexCandidate(MachineInstr &MI, Register &Addr,
948+
bool CombinerHelper::findPostIndexCandidate(GLoadStore &LdSt, Register &Addr,
948949
Register &Base, Register &Offset) {
949-
auto &MF = *MI.getParent()->getParent();
950+
auto &MF = *LdSt.getParent()->getParent();
950951
const auto &TLI = *MF.getSubtarget().getTargetLowering();
951952

952-
#ifndef NDEBUG
953-
unsigned Opcode = MI.getOpcode();
954-
assert(Opcode == TargetOpcode::G_LOAD || Opcode == TargetOpcode::G_SEXTLOAD ||
955-
Opcode == TargetOpcode::G_ZEXTLOAD || Opcode == TargetOpcode::G_STORE);
956-
#endif
953+
Base = LdSt.getPointerReg();
957954

958-
Base = MI.getOperand(1).getReg();
959-
MachineInstr *BaseDef = MRI.getUniqueVRegDef(Base);
960-
if (BaseDef && BaseDef->getOpcode() == TargetOpcode::G_FRAME_INDEX)
955+
if (getOpcodeDef(TargetOpcode::G_FRAME_INDEX, Base, MRI))
961956
return false;
962957

963-
LLVM_DEBUG(dbgs() << "Searching for post-indexing opportunity for: " << MI);
964958
// FIXME: The following use traversal needs a bail out for patholigical cases.
965959
for (auto &Use : MRI.use_nodbg_instructions(Base)) {
966-
if (Use.getOpcode() != TargetOpcode::G_PTR_ADD)
960+
auto *PtrAdd = dyn_cast<GPtrAdd>(&Use);
961+
if (!PtrAdd)
967962
continue;
968963

969-
Offset = Use.getOperand(2).getReg();
964+
Offset = PtrAdd->getOffsetReg();
970965
if (!ForceLegalIndexing &&
971-
!TLI.isIndexingLegal(MI, Base, Offset, /*IsPre*/ false, MRI)) {
972-
LLVM_DEBUG(dbgs() << " Ignoring candidate with illegal addrmode: "
973-
<< Use);
966+
!TLI.isIndexingLegal(LdSt, Base, Offset, /*IsPre*/ false, MRI))
974967
continue;
975-
}
976968

977969
// Make sure the offset calculation is before the potentially indexed op.
978-
// FIXME: we really care about dependency here. The offset calculation might
979-
// be movable.
980-
MachineInstr *OffsetDef = MRI.getUniqueVRegDef(Offset);
981-
if (!OffsetDef || !dominates(*OffsetDef, MI)) {
982-
LLVM_DEBUG(dbgs() << " Ignoring candidate with offset after mem-op: "
983-
<< Use);
970+
MachineInstr *OffsetDef = MRI.getVRegDef(Offset);
971+
if (!dominates(*OffsetDef, LdSt))
984972
continue;
985-
}
986973

987974
// FIXME: check whether all uses of Base are load/store with foldable
988975
// addressing modes. If so, using the normal addr-modes is better than
989976
// forming an indexed one.
990-
991-
bool MemOpDominatesAddrUses = true;
992-
for (auto &PtrAddUse :
993-
MRI.use_nodbg_instructions(Use.getOperand(0).getReg())) {
994-
if (!dominates(MI, PtrAddUse)) {
995-
MemOpDominatesAddrUses = false;
996-
break;
997-
}
998-
}
999-
1000-
if (!MemOpDominatesAddrUses) {
1001-
LLVM_DEBUG(
1002-
dbgs() << " Ignoring candidate as memop does not dominate uses: "
1003-
<< Use);
977+
if (any_of(MRI.use_nodbg_instructions(PtrAdd->getReg(0)),
978+
[&](MachineInstr &PtrAddUse) {
979+
return !dominates(LdSt, PtrAddUse);
980+
}))
1004981
continue;
1005-
}
1006982

1007-
LLVM_DEBUG(dbgs() << " Found match: " << Use);
1008-
Addr = Use.getOperand(0).getReg();
983+
Addr = PtrAdd->getReg(0);
1009984
return true;
1010985
}
1011986

1012987
return false;
1013988
}
1014989

1015-
bool CombinerHelper::findPreIndexCandidate(MachineInstr &MI, Register &Addr,
990+
bool CombinerHelper::findPreIndexCandidate(GLoadStore &LdSt, Register &Addr,
1016991
Register &Base, Register &Offset) {
1017-
auto &MF = *MI.getParent()->getParent();
992+
auto &MF = *LdSt.getParent()->getParent();
1018993
const auto &TLI = *MF.getSubtarget().getTargetLowering();
1019994

1020-
#ifndef NDEBUG
1021-
unsigned Opcode = MI.getOpcode();
1022-
assert(Opcode == TargetOpcode::G_LOAD || Opcode == TargetOpcode::G_SEXTLOAD ||
1023-
Opcode == TargetOpcode::G_ZEXTLOAD || Opcode == TargetOpcode::G_STORE);
1024-
#endif
1025-
1026-
Addr = MI.getOperand(1).getReg();
1027-
MachineInstr *AddrDef = getOpcodeDef(TargetOpcode::G_PTR_ADD, Addr, MRI);
1028-
if (!AddrDef || MRI.hasOneNonDBGUse(Addr))
995+
Addr = LdSt.getPointerReg();
996+
if (!mi_match(Addr, MRI, m_GPtrAdd(m_Reg(Base), m_Reg(Offset))) ||
997+
MRI.hasOneNonDBGUse(Addr))
1029998
return false;
1030999

1031-
Base = AddrDef->getOperand(1).getReg();
1032-
Offset = AddrDef->getOperand(2).getReg();
1033-
1034-
LLVM_DEBUG(dbgs() << "Found potential pre-indexed load_store: " << MI);
1035-
10361000
if (!ForceLegalIndexing &&
1037-
!TLI.isIndexingLegal(MI, Base, Offset, /*IsPre*/ true, MRI)) {
1038-
LLVM_DEBUG(dbgs() << " Skipping, not legal for target");
1001+
!TLI.isIndexingLegal(LdSt, Base, Offset, /*IsPre*/ true, MRI))
10391002
return false;
1040-
}
10411003

10421004
MachineInstr *BaseDef = getDefIgnoringCopies(Base, MRI);
1043-
if (BaseDef->getOpcode() == TargetOpcode::G_FRAME_INDEX) {
1044-
LLVM_DEBUG(dbgs() << " Skipping, frame index would need copy anyway.");
1005+
if (BaseDef->getOpcode() == TargetOpcode::G_FRAME_INDEX)
10451006
return false;
1046-
}
10471007

1048-
if (MI.getOpcode() == TargetOpcode::G_STORE) {
1008+
if (auto *St = dyn_cast<GStore>(&LdSt)) {
10491009
// Would require a copy.
1050-
if (Base == MI.getOperand(0).getReg()) {
1051-
LLVM_DEBUG(dbgs() << " Skipping, storing base so need copy anyway.");
1010+
if (Base == St->getValueReg())
10521011
return false;
1053-
}
10541012

10551013
// We're expecting one use of Addr in MI, but it could also be the
10561014
// value stored, which isn't actually dominated by the instruction.
1057-
if (MI.getOperand(0).getReg() == Addr) {
1058-
LLVM_DEBUG(dbgs() << " Skipping, does not dominate all addr uses");
1015+
if (St->getValueReg() == Addr)
10591016
return false;
1060-
}
10611017
}
10621018

10631019
// FIXME: check whether all uses of the base pointer are constant PtrAdds.
10641020
// That might allow us to end base's liveness here by adjusting the constant.
10651021

1066-
for (auto &UseMI : MRI.use_nodbg_instructions(Addr)) {
1067-
if (!dominates(MI, UseMI)) {
1068-
LLVM_DEBUG(dbgs() << " Skipping, does not dominate all addr uses.");
1069-
return false;
1070-
}
1071-
}
1072-
1073-
return true;
1022+
return all_of(MRI.use_nodbg_instructions(Addr),
1023+
[&](MachineInstr &UseMI) { return dominates(LdSt, UseMI); });
10741024
}
10751025

1076-
bool CombinerHelper::tryCombineIndexedLoadStore(MachineInstr &MI) {
1077-
IndexedLoadStoreMatchInfo MatchInfo;
1078-
if (matchCombineIndexedLoadStore(MI, MatchInfo)) {
1079-
applyCombineIndexedLoadStore(MI, MatchInfo);
1080-
return true;
1081-
}
1082-
return false;
1083-
}
1084-
1085-
bool CombinerHelper::matchCombineIndexedLoadStore(MachineInstr &MI, IndexedLoadStoreMatchInfo &MatchInfo) {
1086-
unsigned Opcode = MI.getOpcode();
1087-
if (Opcode != TargetOpcode::G_LOAD && Opcode != TargetOpcode::G_SEXTLOAD &&
1088-
Opcode != TargetOpcode::G_ZEXTLOAD && Opcode != TargetOpcode::G_STORE)
1089-
return false;
1026+
bool CombinerHelper::matchCombineIndexedLoadStore(
1027+
MachineInstr &MI, IndexedLoadStoreMatchInfo &MatchInfo) {
1028+
auto &LdSt = cast<GLoadStore>(MI);
10901029

10911030
// For now, no targets actually support these opcodes so don't waste time
10921031
// running these unless we're forced to for testing.
10931032
if (!ForceLegalIndexing)
10941033
return false;
10951034

1096-
MatchInfo.IsPre = findPreIndexCandidate(MI, MatchInfo.Addr, MatchInfo.Base,
1035+
MatchInfo.IsPre = findPreIndexCandidate(LdSt, MatchInfo.Addr, MatchInfo.Base,
10971036
MatchInfo.Offset);
10981037
if (!MatchInfo.IsPre &&
1099-
!findPostIndexCandidate(MI, MatchInfo.Addr, MatchInfo.Base,
1038+
!findPostIndexCandidate(LdSt, MatchInfo.Addr, MatchInfo.Base,
11001039
MatchInfo.Offset))
11011040
return false;
11021041

0 commit comments

Comments
 (0)