@@ -458,8 +458,7 @@ static SmallBitVector isUndefVector(const Value *V,
458
458
/// ShuffleVectorInst/getShuffleCost?
459
459
static std::optional<TargetTransformInfo::ShuffleKind>
460
460
isFixedVectorShuffle(ArrayRef<Value *> VL, SmallVectorImpl<int> &Mask) {
461
- const auto *It =
462
- find_if(VL, [](Value *V) { return isa<ExtractElementInst>(V); });
461
+ const auto *It = find_if(VL, IsaPred<ExtractElementInst>);
463
462
if (It == VL.end())
464
463
return std::nullopt;
465
464
auto *EI0 = cast<ExtractElementInst>(*It);
@@ -4695,12 +4694,8 @@ BoUpSLP::getReorderingData(const TreeEntry &TE, bool TopToBottom) {
4695
4694
// TODO: add analysis of other gather nodes with extractelement
4696
4695
// instructions and other values/instructions, not only undefs.
4697
4696
if ((TE.getOpcode() == Instruction::ExtractElement ||
4698
- (all_of(TE.Scalars,
4699
- [](Value *V) {
4700
- return isa<UndefValue, ExtractElementInst>(V);
4701
- }) &&
4702
- any_of(TE.Scalars,
4703
- [](Value *V) { return isa<ExtractElementInst>(V); }))) &&
4697
+ (all_of(TE.Scalars, IsaPred<UndefValue, ExtractElementInst>) &&
4698
+ any_of(TE.Scalars, IsaPred<ExtractElementInst>))) &&
4704
4699
all_of(TE.Scalars, [](Value *V) {
4705
4700
auto *EE = dyn_cast<ExtractElementInst>(V);
4706
4701
return !EE || isa<FixedVectorType>(EE->getVectorOperandType());
@@ -4721,7 +4716,7 @@ BoUpSLP::getReorderingData(const TreeEntry &TE, bool TopToBottom) {
4721
4716
// might be transformed.
4722
4717
int Sz = TE.Scalars.size();
4723
4718
if (isSplat(TE.Scalars) && !allConstant(TE.Scalars) &&
4724
- count_if(TE.Scalars, UndefValue::classof ) == Sz - 1) {
4719
+ count_if(TE.Scalars, IsaPred< UndefValue> ) == Sz - 1) {
4725
4720
const auto *It =
4726
4721
find_if(TE.Scalars, [](Value *V) { return !isConstant(V); });
4727
4722
if (It == TE.Scalars.begin())
@@ -6345,11 +6340,10 @@ void BoUpSLP::buildTree_rec(ArrayRef<Value *> VL, unsigned Depth,
6345
6340
UserTreeIdx.UserTE->State == TreeEntry::ScatterVectorize &&
6346
6341
!(S.getOpcode() && allSameBlock(VL))) {
6347
6342
assert(S.OpValue->getType()->isPointerTy() &&
6348
- count_if(VL, [](Value *V) { return isa<GetElementPtrInst>(V); }) >=
6349
- 2 &&
6343
+ count_if(VL, IsaPred<GetElementPtrInst>) >= 2 &&
6350
6344
"Expected pointers only.");
6351
6345
// Reset S to make it GetElementPtr kind of node.
6352
- const auto *It = find_if(VL, [](Value *V) { return isa <GetElementPtrInst>(V); } );
6346
+ const auto *It = find_if(VL, IsaPred <GetElementPtrInst>);
6353
6347
assert(It != VL.end() && "Expected at least one GEP.");
6354
6348
S = getSameOpcode(*It, *TLI);
6355
6349
}
@@ -6893,17 +6887,12 @@ unsigned BoUpSLP::canMapToVector(Type *T) const {
6893
6887
bool BoUpSLP::canReuseExtract(ArrayRef<Value *> VL, Value *OpValue,
6894
6888
SmallVectorImpl<unsigned> &CurrentOrder,
6895
6889
bool ResizeAllowed) const {
6896
- const auto *It = find_if(VL, [](Value *V) {
6897
- return isa<ExtractElementInst, ExtractValueInst>(V);
6898
- });
6890
+ const auto *It = find_if(VL, IsaPred<ExtractElementInst, ExtractValueInst>);
6899
6891
assert(It != VL.end() && "Expected at least one extract instruction.");
6900
6892
auto *E0 = cast<Instruction>(*It);
6901
- assert(all_of(VL,
6902
- [](Value *V) {
6903
- return isa<UndefValue, ExtractElementInst, ExtractValueInst>(
6904
- V);
6905
- }) &&
6906
- "Invalid opcode");
6893
+ assert(
6894
+ all_of(VL, IsaPred<UndefValue, ExtractElementInst, ExtractValueInst>) &&
6895
+ "Invalid opcode");
6907
6896
// Check if all of the extracts come from the same vector and from the
6908
6897
// correct offset.
6909
6898
Value *Vec = E0->getOperand(0);
@@ -7575,7 +7564,7 @@ class BoUpSLP::ShuffleCostEstimator : public BaseShuffleAnalysis {
7575
7564
}
7576
7565
7577
7566
InstructionCost getBuildVectorCost(ArrayRef<Value *> VL, Value *Root) {
7578
- if ((!Root && allConstant(VL)) || all_of(VL, UndefValue::classof ))
7567
+ if ((!Root && allConstant(VL)) || all_of(VL, IsaPred< UndefValue> ))
7579
7568
return TTI::TCC_Free;
7580
7569
auto *VecTy = FixedVectorType::get(VL.front()->getType(), VL.size());
7581
7570
InstructionCost GatherCost = 0;
@@ -7743,21 +7732,20 @@ class BoUpSLP::ShuffleCostEstimator : public BaseShuffleAnalysis {
7743
7732
} else if (!Root && isSplat(VL)) {
7744
7733
// Found the broadcasting of the single scalar, calculate the cost as
7745
7734
// the broadcast.
7746
- const auto *It =
7747
- find_if(VL, [](Value *V) { return !isa<UndefValue>(V); });
7735
+ const auto *It = find_if_not(VL, IsaPred<UndefValue>);
7748
7736
assert(It != VL.end() && "Expected at least one non-undef value.");
7749
7737
// Add broadcast for non-identity shuffle only.
7750
7738
bool NeedShuffle =
7751
7739
count(VL, *It) > 1 &&
7752
- (VL.front() != *It || !all_of(VL.drop_front(), UndefValue::classof ));
7740
+ (VL.front() != *It || !all_of(VL.drop_front(), IsaPred< UndefValue> ));
7753
7741
if (!NeedShuffle)
7754
7742
return TTI.getVectorInstrCost(Instruction::InsertElement, VecTy,
7755
7743
CostKind, std::distance(VL.begin(), It),
7756
7744
PoisonValue::get(VecTy), *It);
7757
7745
7758
7746
SmallVector<int> ShuffleMask(VL.size(), PoisonMaskElem);
7759
7747
transform(VL, ShuffleMask.begin(), [](Value *V) {
7760
- return isa<PoisonValue>(V) ? PoisonMaskElem : 0;
7748
+ return isa<PoisonValue>(V) ? PoisonMaskElem : 0;
7761
7749
});
7762
7750
InstructionCost InsertCost = TTI.getVectorInstrCost(
7763
7751
Instruction::InsertElement, VecTy, CostKind, 0,
@@ -7768,7 +7756,7 @@ class BoUpSLP::ShuffleCostEstimator : public BaseShuffleAnalysis {
7768
7756
/*SubTp=*/nullptr, /*Args=*/*It);
7769
7757
}
7770
7758
return GatherCost +
7771
- (all_of(Gathers, UndefValue::classof )
7759
+ (all_of(Gathers, IsaPred< UndefValue> )
7772
7760
? TTI::TCC_Free
7773
7761
: R.getGatherCost(Gathers, !Root && VL.equals(Gathers)));
7774
7762
};
@@ -8178,9 +8166,8 @@ class BoUpSLP::ShuffleCostEstimator : public BaseShuffleAnalysis {
8178
8166
// Take credit for instruction that will become dead.
8179
8167
if (EE->hasOneUse() || !PrevNodeFound) {
8180
8168
Instruction *Ext = EE->user_back();
8181
- if (isa<SExtInst, ZExtInst>(Ext) && all_of(Ext->users(), [](User *U) {
8182
- return isa<GetElementPtrInst>(U);
8183
- })) {
8169
+ if (isa<SExtInst, ZExtInst>(Ext) &&
8170
+ all_of(Ext->users(), IsaPred<GetElementPtrInst>)) {
8184
8171
// Use getExtractWithExtendCost() to calculate the cost of
8185
8172
// extractelement/ext pair.
8186
8173
Cost -=
@@ -8645,8 +8632,7 @@ BoUpSLP::getEntryCost(const TreeEntry *E, ArrayRef<Value *> VectorizedVals,
8645
8632
if (I->hasOneUse()) {
8646
8633
Instruction *Ext = I->user_back();
8647
8634
if ((isa<SExtInst>(Ext) || isa<ZExtInst>(Ext)) &&
8648
- all_of(Ext->users(),
8649
- [](User *U) { return isa<GetElementPtrInst>(U); })) {
8635
+ all_of(Ext->users(), IsaPred<GetElementPtrInst>)) {
8650
8636
// Use getExtractWithExtendCost() to calculate the cost of
8651
8637
// extractelement/ext pair.
8652
8638
InstructionCost Cost = TTI->getExtractWithExtendCost(
@@ -9130,10 +9116,7 @@ bool BoUpSLP::isFullyVectorizableTinyTree(bool ForReduction) const {
9130
9116
(allConstant(TE->Scalars) || isSplat(TE->Scalars) ||
9131
9117
TE->Scalars.size() < Limit ||
9132
9118
((TE->getOpcode() == Instruction::ExtractElement ||
9133
- all_of(TE->Scalars,
9134
- [](Value *V) {
9135
- return isa<ExtractElementInst, UndefValue>(V);
9136
- })) &&
9119
+ all_of(TE->Scalars, IsaPred<ExtractElementInst, UndefValue>)) &&
9137
9120
isFixedVectorShuffle(TE->Scalars, Mask)) ||
9138
9121
(TE->State == TreeEntry::NeedToGather &&
9139
9122
TE->getOpcode() == Instruction::Load && !TE->isAltShuffle()));
@@ -9254,9 +9237,7 @@ bool BoUpSLP::isTreeTinyAndNotFullyVectorizable(bool ForReduction) const {
9254
9237
all_of(VectorizableTree, [&](const std::unique_ptr<TreeEntry> &TE) {
9255
9238
return (TE->State == TreeEntry::NeedToGather &&
9256
9239
TE->getOpcode() != Instruction::ExtractElement &&
9257
- count_if(TE->Scalars,
9258
- [](Value *V) { return isa<ExtractElementInst>(V); }) <=
9259
- Limit) ||
9240
+ count_if(TE->Scalars, IsaPred<ExtractElementInst>) <= Limit) ||
9260
9241
TE->getOpcode() == Instruction::PHI;
9261
9242
}))
9262
9243
return true;
@@ -9285,9 +9266,7 @@ bool BoUpSLP::isTreeTinyAndNotFullyVectorizable(bool ForReduction) const {
9285
9266
return isa<ExtractElementInst, UndefValue>(V) ||
9286
9267
(IsAllowedSingleBVNode &&
9287
9268
!V->hasNUsesOrMore(UsesLimit) &&
9288
- any_of(V->users(), [](User *U) {
9289
- return isa<InsertElementInst>(U);
9290
- }));
9269
+ any_of(V->users(), IsaPred<InsertElementInst>));
9291
9270
});
9292
9271
}))
9293
9272
return false;
@@ -10284,7 +10263,7 @@ BoUpSLP::isGatherShuffledSingleRegisterEntry(
10284
10263
}
10285
10264
}
10286
10265
10287
- bool IsSplatOrUndefs = isSplat(VL) || all_of(VL, UndefValue::classof );
10266
+ bool IsSplatOrUndefs = isSplat(VL) || all_of(VL, IsaPred< UndefValue> );
10288
10267
// Checks if the 2 PHIs are compatible in terms of high possibility to be
10289
10268
// vectorized.
10290
10269
auto AreCompatiblePHIs = [&](Value *V, Value *V1) {
@@ -11261,8 +11240,7 @@ Value *BoUpSLP::vectorizeOperand(TreeEntry *E, unsigned NodeIdx,
11261
11240
InstructionsState S = getSameOpcode(VL, *TLI);
11262
11241
// Special processing for GEPs bundle, which may include non-gep values.
11263
11242
if (!S.getOpcode() && VL.front()->getType()->isPointerTy()) {
11264
- const auto *It =
11265
- find_if(VL, [](Value *V) { return isa<GetElementPtrInst>(V); });
11243
+ const auto *It = find_if(VL, IsaPred<GetElementPtrInst>);
11266
11244
if (It != VL.end())
11267
11245
S = getSameOpcode(*It, *TLI);
11268
11246
}
@@ -11432,7 +11410,7 @@ ResTy BoUpSLP::processBuildVector(const TreeEntry *E, Args &...Params) {
11432
11410
unsigned NumParts = TTI->getNumberOfParts(VecTy);
11433
11411
if (NumParts == 0 || NumParts >= GatheredScalars.size())
11434
11412
NumParts = 1;
11435
- if (!all_of(GatheredScalars, UndefValue::classof )) {
11413
+ if (!all_of(GatheredScalars, IsaPred< UndefValue> )) {
11436
11414
// Check for gathered extracts.
11437
11415
bool Resized = false;
11438
11416
ExtractShuffles =
@@ -11757,7 +11735,7 @@ ResTy BoUpSLP::processBuildVector(const TreeEntry *E, Args &...Params) {
11757
11735
GatheredScalars[I] = PoisonValue::get(ScalarTy);
11758
11736
}
11759
11737
// Generate constants for final shuffle and build a mask for them.
11760
- if (!all_of(GatheredScalars, PoisonValue::classof )) {
11738
+ if (!all_of(GatheredScalars, IsaPred< PoisonValue> )) {
11761
11739
SmallVector<int> BVMask(GatheredScalars.size(), PoisonMaskElem);
11762
11740
TryPackScalars(GatheredScalars, BVMask, /*IsRootPoison=*/true);
11763
11741
Value *BV = ShuffleBuilder.gather(GatheredScalars, BVMask.size());
@@ -14509,7 +14487,7 @@ void BoUpSLP::computeMinimumValueSizes() {
14509
14487
return SIt != DemotedConsts.end() &&
14510
14488
is_contained(SIt->getSecond(), Idx);
14511
14489
}) ||
14512
- all_of(CTE->Scalars, Constant::classof ))
14490
+ all_of(CTE->Scalars, IsaPred< Constant> ))
14513
14491
MinBWs.try_emplace(CTE, MaxBitWidth, IsSigned);
14514
14492
}
14515
14493
}
@@ -15257,12 +15235,10 @@ class HorizontalReduction {
15257
15235
static Value *createOp(IRBuilderBase &Builder, RecurKind RdxKind, Value *LHS,
15258
15236
Value *RHS, const Twine &Name,
15259
15237
const ReductionOpsListType &ReductionOps) {
15260
- bool UseSelect =
15261
- ReductionOps.size() == 2 ||
15262
- // Logical or/and.
15263
- (ReductionOps.size() == 1 && any_of(ReductionOps.front(), [](Value *V) {
15264
- return isa<SelectInst>(V);
15265
- }));
15238
+ bool UseSelect = ReductionOps.size() == 2 ||
15239
+ // Logical or/and.
15240
+ (ReductionOps.size() == 1 &&
15241
+ any_of(ReductionOps.front(), IsaPred<SelectInst>));
15266
15242
assert((!UseSelect || ReductionOps.size() != 2 ||
15267
15243
isa<SelectInst>(ReductionOps[1][0])) &&
15268
15244
"Expected cmp + select pairs for reduction");
@@ -15501,7 +15477,7 @@ class HorizontalReduction {
15501
15477
!hasRequiredNumberOfUses(IsCmpSelMinMax, EdgeInst) ||
15502
15478
!isVectorizable(RdxKind, EdgeInst) ||
15503
15479
(R.isAnalyzedReductionRoot(EdgeInst) &&
15504
- all_of(EdgeInst->operands(), Constant::classof ))) {
15480
+ all_of(EdgeInst->operands(), IsaPred< Constant> ))) {
15505
15481
PossibleReducedVals.push_back(EdgeVal);
15506
15482
continue;
15507
15483
}
@@ -16857,9 +16833,7 @@ bool SLPVectorizerPass::vectorizeInsertElementInst(InsertElementInst *IEI,
16857
16833
SmallVector<Value *, 16> BuildVectorOpds;
16858
16834
SmallVector<int> Mask;
16859
16835
if (!findBuildAggregate(IEI, TTI, BuildVectorOpds, BuildVectorInsts) ||
16860
- (llvm::all_of(
16861
- BuildVectorOpds,
16862
- [](Value *V) { return isa<ExtractElementInst, UndefValue>(V); }) &&
16836
+ (llvm::all_of(BuildVectorOpds, IsaPred<ExtractElementInst, UndefValue>) &&
16863
16837
isFixedVectorShuffle(BuildVectorOpds, Mask)))
16864
16838
return false;
16865
16839
@@ -17080,10 +17054,7 @@ bool SLPVectorizerPass::vectorizeCmpInsts(iterator_range<ItT> CmpInsts,
17080
17054
17081
17055
bool SLPVectorizerPass::vectorizeInserts(InstSetVector &Instructions,
17082
17056
BasicBlock *BB, BoUpSLP &R) {
17083
- assert(all_of(Instructions,
17084
- [](auto *I) {
17085
- return isa<InsertElementInst, InsertValueInst>(I);
17086
- }) &&
17057
+ assert(all_of(Instructions, IsaPred<InsertElementInst, InsertValueInst>) &&
17087
17058
"This function only accepts Insert instructions");
17088
17059
bool OpsChanged = false;
17089
17060
SmallVector<WeakTrackingVH> PostponedInsts;
0 commit comments