Skip to content

Commit 54f8f78

Browse files
committed
[X86] Add X86DAGToDAGISel::matchIndexRecursively helper to match/resolve address indices, scaling + displacement
This currently just splits ADD/AddLike constant offset values, but we should be able to expand on this as part of handling the regressions noticed in D155472 without adding more duplicate zext/sext code inside matchAddressRecursively
1 parent 87c5a3e commit 54f8f78

File tree

1 file changed

+28
-14
lines changed

1 file changed

+28
-14
lines changed

llvm/lib/Target/X86/X86ISelDAGToDAG.cpp

Lines changed: 28 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -212,6 +212,8 @@ namespace {
212212
bool matchAddress(SDValue N, X86ISelAddressMode &AM);
213213
bool matchVectorAddress(SDValue N, X86ISelAddressMode &AM);
214214
bool matchAdd(SDValue &N, X86ISelAddressMode &AM, unsigned Depth);
215+
SDValue matchIndexRecursively(SDValue N, X86ISelAddressMode &AM,
216+
unsigned Depth);
215217
bool matchAddressRecursively(SDValue N, X86ISelAddressMode &AM,
216218
unsigned Depth);
217219
bool matchVectorAddressRecursively(SDValue N, X86ISelAddressMode &AM,
@@ -2201,6 +2203,30 @@ static bool foldMaskedShiftToBEXTR(SelectionDAG &DAG, SDValue N,
22012203
return false;
22022204
}
22032205

2206+
// Attempt to peek further into a scaled index register, collecting additional
2207+
// extensions / offsets / etc. Returns /p N if we can't peek any further.
2208+
SDValue X86DAGToDAGISel::matchIndexRecursively(SDValue N,
2209+
X86ISelAddressMode &AM,
2210+
unsigned Depth) {
2211+
assert(AM.IndexReg.getNode() == nullptr && "IndexReg already matched");
2212+
assert((AM.Scale == 1 || AM.Scale == 2 || AM.Scale == 4 || AM.Scale == 8) &&
2213+
"Illegal index scale");
2214+
2215+
// Limit recursion.
2216+
if (Depth >= SelectionDAG::MaxRecursionDepth)
2217+
return N;
2218+
2219+
if (CurDAG->isBaseWithConstantOffset(N)) {
2220+
auto *AddVal = cast<ConstantSDNode>(N.getOperand(1));
2221+
uint64_t Offset = (uint64_t)AddVal->getSExtValue() * AM.Scale;
2222+
if (!foldOffsetIntoAddress(Offset, AM))
2223+
return matchIndexRecursively(N.getOperand(0), AM, Depth + 1);
2224+
}
2225+
2226+
// TODO: Handle extensions, shifted masks etc.
2227+
return N;
2228+
}
2229+
22042230
bool X86DAGToDAGISel::matchAddressRecursively(SDValue N, X86ISelAddressMode &AM,
22052231
unsigned Depth) {
22062232
SDLoc dl(N);
@@ -2278,21 +2304,9 @@ bool X86DAGToDAGISel::matchAddressRecursively(SDValue N, X86ISelAddressMode &AM,
22782304
// the base doesn't end up getting used, a post-processing step
22792305
// in MatchAddress turns (,x,2) into (x,x), which is cheaper.
22802306
if (Val == 1 || Val == 2 || Val == 3) {
2281-
AM.Scale = 1 << Val;
22822307
SDValue ShVal = N.getOperand(0);
2283-
2284-
// Okay, we know that we have a scale by now. However, if the scaled
2285-
// value is an add of something and a constant, we can fold the
2286-
// constant into the disp field here.
2287-
if (CurDAG->isBaseWithConstantOffset(ShVal)) {
2288-
AM.IndexReg = ShVal.getOperand(0);
2289-
auto *AddVal = cast<ConstantSDNode>(ShVal.getOperand(1));
2290-
uint64_t Disp = (uint64_t)AddVal->getSExtValue() << Val;
2291-
if (!foldOffsetIntoAddress(Disp, AM))
2292-
return false;
2293-
}
2294-
2295-
AM.IndexReg = ShVal;
2308+
AM.Scale = 1 << Val;
2309+
AM.IndexReg = matchIndexRecursively(ShVal, AM, Depth + 1);
22962310
return false;
22972311
}
22982312
}

0 commit comments

Comments
 (0)