@@ -3096,6 +3096,58 @@ static bool isNonEqualSelect(const Value *V1, const Value *V2, unsigned Depth,
3096
3096
isKnownNonEqual (SI1->getFalseValue (), V2, Depth + 1 , Q);
3097
3097
}
3098
3098
3099
+ // Check to see if A is both a GEP and is the incoming value for a PHI in the
3100
+ // loop, and B is either a ptr or another GEP. If the PHI has 2 incoming values,
3101
+ // one of them being the recursive GEP A and the other a ptr at same base and at
3102
+ // the same/higher offset than B we are only incrementing the pointer further in
3103
+ // loop if offset of recursive GEP is greater than 0.
3104
+ static bool isNonEqualPointersWithRecursiveGEP (const Value *A, const Value *B,
3105
+ const SimplifyQuery &Q) {
3106
+ if (!A->getType ()->isPointerTy () || !B->getType ()->isPointerTy ())
3107
+ return false ;
3108
+
3109
+ auto *GEPA = dyn_cast<GEPOperator>(A);
3110
+ if (!GEPA || GEPA->getNumIndices () != 1 || !isa<Constant>(GEPA->idx_begin ()))
3111
+ return false ;
3112
+
3113
+ // Handle 2 incoming PHI values with one being a recursive GEP.
3114
+ auto *PN = dyn_cast<PHINode>(GEPA->getPointerOperand ());
3115
+ if (!PN || PN->getNumIncomingValues () != 2 )
3116
+ return false ;
3117
+
3118
+ // Search for the recursive GEP as an incoming operand, and record that as
3119
+ // Step.
3120
+ Value *Start = nullptr ;
3121
+ Value *Step = const_cast <Value *>(A);
3122
+ if (PN->getIncomingValue (0 ) == Step)
3123
+ Start = PN->getIncomingValue (1 );
3124
+ else if (PN->getIncomingValue (1 ) == Step)
3125
+ Start = PN->getIncomingValue (0 );
3126
+ else
3127
+ return false ;
3128
+
3129
+ // Other incoming node base should match the B base.
3130
+ // StartOffset >= OffsetB && StepOffset > 0?
3131
+ // StartOffset <= OffsetB && StepOffset < 0?
3132
+ // Is non-equal if above are true.
3133
+ // We use stripAndAccumulateInBoundsConstantOffsets to restrict the
3134
+ // optimisation to inbounds GEPs only.
3135
+ unsigned IndexWidth = Q.DL .getIndexTypeSizeInBits (Start->getType ());
3136
+ APInt StartOffset (IndexWidth, 0 );
3137
+ Start = Start->stripAndAccumulateInBoundsConstantOffsets (Q.DL , StartOffset);
3138
+ APInt StepOffset (IndexWidth, 0 );
3139
+ Step = Step->stripAndAccumulateInBoundsConstantOffsets (Q.DL , StepOffset);
3140
+
3141
+ // Check if Base Pointer of Step matches the PHI.
3142
+ if (Step != PN)
3143
+ return false ;
3144
+ APInt OffsetB (IndexWidth, 0 );
3145
+ B = B->stripAndAccumulateInBoundsConstantOffsets (Q.DL , OffsetB);
3146
+ return Start == B &&
3147
+ ((StartOffset.sge (OffsetB) && StepOffset.isStrictlyPositive ()) ||
3148
+ (StartOffset.sle (OffsetB) && StepOffset.isNegative ()));
3149
+ }
3150
+
3099
3151
// / Return true if it is known that V1 != V2.
3100
3152
static bool isKnownNonEqual (const Value *V1, const Value *V2, unsigned Depth,
3101
3153
const SimplifyQuery &Q) {
@@ -3149,6 +3201,10 @@ static bool isKnownNonEqual(const Value *V1, const Value *V2, unsigned Depth,
3149
3201
if (isNonEqualSelect (V1, V2, Depth, Q) || isNonEqualSelect (V2, V1, Depth, Q))
3150
3202
return true ;
3151
3203
3204
+ if (isNonEqualPointersWithRecursiveGEP (V1, V2, Q) ||
3205
+ isNonEqualPointersWithRecursiveGEP (V2, V1, Q))
3206
+ return true ;
3207
+
3152
3208
return false ;
3153
3209
}
3154
3210
0 commit comments