@@ -926,32 +926,39 @@ static Value *foldIsPowerOf2OrZero(ICmpInst *Cmp0, ICmpInst *Cmp1, bool IsAnd,
926
926
}
927
927
928
928
// / Reduce a pair of compares that check if a value has exactly 1 bit set.
929
- // / Also used for logical and/or, must be poison safe.
929
+ // / Also used for logical and/or, must be poison safe if range attributes are
930
+ // / dropped.
930
931
static Value *foldIsPowerOf2 (ICmpInst *Cmp0, ICmpInst *Cmp1, bool JoinedByAnd,
931
- InstCombiner::BuilderTy &Builder) {
932
+ InstCombiner::BuilderTy &Builder,
933
+ InstCombinerImpl &IC) {
932
934
// Handle 'and' / 'or' commutation: make the equality check the first operand.
933
935
if (JoinedByAnd && Cmp1->getPredicate () == ICmpInst::ICMP_NE)
934
936
std::swap (Cmp0, Cmp1);
935
937
else if (!JoinedByAnd && Cmp1->getPredicate () == ICmpInst::ICMP_EQ)
936
938
std::swap (Cmp0, Cmp1);
937
939
938
940
// (X != 0) && (ctpop(X) u< 2) --> ctpop(X) == 1
939
- Value *X;
940
- if (JoinedByAnd &&
941
- match (Cmp0, m_SpecificICmp (ICmpInst::ICMP_NE, m_Value (X), m_ZeroInt ())) &&
942
- match (Cmp1, m_SpecificICmp (ICmpInst::ICMP_ULT,
943
- m_Intrinsic<Intrinsic::ctpop>(m_Specific (X)),
944
- m_SpecificInt (2 )))) {
945
- Value *CtPop = Cmp1->getOperand (0 );
941
+ CmpInst::Predicate Pred0, Pred1;
942
+ Value *X;
943
+ if (JoinedByAnd && match (Cmp0, m_ICmp (Pred0, m_Value (X), m_ZeroInt ())) &&
944
+ match (Cmp1, m_ICmp (Pred1, m_Intrinsic<Intrinsic::ctpop>(m_Specific (X)),
945
+ m_SpecificInt (2 ))) &&
946
+ Pred0 == ICmpInst::ICMP_NE && Pred1 == ICmpInst::ICMP_ULT) {
947
+ auto *CtPop = cast<Instruction>(Cmp1->getOperand (0 ));
948
+ // Drop range attributes and re-infer them in the next iteration.
949
+ CtPop->dropPoisonGeneratingAnnotations ();
950
+ IC.addToWorklist (CtPop);
946
951
return Builder.CreateICmpEQ (CtPop, ConstantInt::get (CtPop->getType (), 1 ));
947
952
}
948
953
// (X == 0) || (ctpop(X) u> 1) --> ctpop(X) != 1
949
- if (!JoinedByAnd &&
950
- match (Cmp0, m_SpecificICmp (ICmpInst::ICMP_EQ, m_Value (X), m_ZeroInt ())) &&
951
- match (Cmp1, m_SpecificICmp (ICmpInst::ICMP_UGT,
952
- m_Intrinsic<Intrinsic::ctpop>(m_Specific (X)),
953
- m_SpecificInt (1 )))) {
954
- Value *CtPop = Cmp1->getOperand (0 );
954
+ if (!JoinedByAnd && match (Cmp0, m_ICmp (Pred0, m_Value (X), m_ZeroInt ())) &&
955
+ match (Cmp1, m_ICmp (Pred1, m_Intrinsic<Intrinsic::ctpop>(m_Specific (X)),
956
+ m_SpecificInt (1 ))) &&
957
+ Pred0 == ICmpInst::ICMP_EQ && Pred1 == ICmpInst::ICMP_UGT) {
958
+ auto *CtPop = cast<Instruction>(Cmp1->getOperand (0 ));
959
+ // Drop range attributes and re-infer them in the next iteration.
960
+ CtPop->dropPoisonGeneratingAnnotations ();
961
+ IC.addToWorklist (CtPop);
955
962
return Builder.CreateICmpNE (CtPop, ConstantInt::get (CtPop->getType (), 1 ));
956
963
}
957
964
return nullptr ;
@@ -3346,7 +3353,7 @@ Value *InstCombinerImpl::foldAndOrOfICmps(ICmpInst *LHS, ICmpInst *RHS,
3346
3353
if (Value *V = foldSignedTruncationCheck (LHS, RHS, I, Builder))
3347
3354
return V;
3348
3355
3349
- if (Value *V = foldIsPowerOf2 (LHS, RHS, IsAnd, Builder))
3356
+ if (Value *V = foldIsPowerOf2 (LHS, RHS, IsAnd, Builder, * this ))
3350
3357
return V;
3351
3358
3352
3359
if (Value *V = foldPowerOf2AndShiftedMask (LHS, RHS, IsAnd, Builder))
0 commit comments