@@ -607,36 +607,11 @@ class EarlyCSE {
607
607
MSSA->verifyMemorySSA ();
608
608
// Removing a store here can leave MemorySSA in an unoptimized state by
609
609
// creating MemoryPhis that have identical arguments and by creating
610
- // MemoryUses whose defining access is not an actual clobber. We handle the
611
- // phi case eagerly here. The non-optimized MemoryUse case is lazily
612
- // updated by MemorySSA getClobberingMemoryAccess.
613
- if (MemoryAccess *MA = MSSA->getMemoryAccess (Inst)) {
614
- // Optimize MemoryPhi nodes that may become redundant by having all the
615
- // same input values once MA is removed.
616
- SmallSetVector<MemoryPhi *, 4 > PhisToCheck;
617
- SmallVector<MemoryAccess *, 8 > WorkQueue;
618
- WorkQueue.push_back (MA);
619
- // Process MemoryPhi nodes in FIFO order using a ever-growing vector since
620
- // we shouldn't be processing that many phis and this will avoid an
621
- // allocation in almost all cases.
622
- for (unsigned I = 0 ; I < WorkQueue.size (); ++I) {
623
- MemoryAccess *WI = WorkQueue[I];
624
-
625
- for (auto *U : WI->users ())
626
- if (MemoryPhi *MP = dyn_cast<MemoryPhi>(U))
627
- PhisToCheck.insert (MP);
628
-
629
- MSSAUpdater->removeMemoryAccess (WI);
630
-
631
- for (MemoryPhi *MP : PhisToCheck) {
632
- MemoryAccess *FirstIn = MP->getIncomingValue (0 );
633
- if (llvm::all_of (MP->incoming_values (),
634
- [=](Use &In) { return In == FirstIn; }))
635
- WorkQueue.push_back (MP);
636
- }
637
- PhisToCheck.clear ();
638
- }
639
- }
610
+ // MemoryUses whose defining access is not an actual clobber. The phi case
611
+ // is handled by MemorySSA when passing OptimizePhis = true to
612
+ // removeMemoryAccess. The non-optimized MemoryUse case is lazily updated
613
+ // by MemorySSA's getClobberingMemoryAccess.
614
+ MSSAUpdater->removeMemoryAccess (Inst, true );
640
615
}
641
616
};
642
617
0 commit comments