@@ -2664,15 +2664,15 @@ LSRInstance::OptimizeLoopTermCond() {
2664
2664
// Conservatively avoid trying to use the post-inc value in non-latch
2665
2665
// exits if there may be pre-inc users in intervening blocks.
2666
2666
if (LatchBlock != ExitingBlock)
2667
- for (IVUsers::const_iterator UI = IU. begin (), E = IU. end (); UI != E; ++UI )
2667
+ for (const IVStrideUse & UI : IU )
2668
2668
// Test if the use is reachable from the exiting block. This dominator
2669
2669
// query is a conservative approximation of reachability.
2670
- if (&* UI != CondUse &&
2671
- !DT.properlyDominates (UI-> getUser ()->getParent (), ExitingBlock)) {
2670
+ if (&UI != CondUse &&
2671
+ !DT.properlyDominates (UI. getUser ()->getParent (), ExitingBlock)) {
2672
2672
// Conservatively assume there may be reuse if the quotient of their
2673
2673
// strides could be a legal scale.
2674
2674
const SCEV *A = IU.getStride (*CondUse, L);
2675
- const SCEV *B = IU.getStride (* UI, L);
2675
+ const SCEV *B = IU.getStride (UI, L);
2676
2676
if (!A || !B) continue ;
2677
2677
if (SE.getTypeSizeInBits (A->getType ()) !=
2678
2678
SE.getTypeSizeInBits (B->getType ())) {
@@ -2693,9 +2693,9 @@ LSRInstance::OptimizeLoopTermCond() {
2693
2693
C->getValue ().isMinSignedValue ())
2694
2694
goto decline_post_inc;
2695
2695
// Check for possible scaled-address reuse.
2696
- if (isAddressUse (TTI, UI-> getUser (), UI-> getOperandValToReplace ())) {
2696
+ if (isAddressUse (TTI, UI. getUser (), UI. getOperandValToReplace ())) {
2697
2697
MemAccessTy AccessTy = getAccessType (
2698
- TTI, UI-> getUser (), UI-> getOperandValToReplace ());
2698
+ TTI, UI. getUser (), UI. getOperandValToReplace ());
2699
2699
int64_t Scale = C->getSExtValue ();
2700
2700
if (TTI.isLegalAddressingMode (AccessTy.MemTy , /* BaseGV=*/ nullptr ,
2701
2701
/* BaseOffset=*/ 0 ,
0 commit comments