|
65 | 65 | #include "llvm/Transforms/Scalar/GVNExpression.h"
|
66 | 66 | #include "llvm/Transforms/Utils/BasicBlockUtils.h"
|
67 | 67 | #include "llvm/Transforms/Utils/Local.h"
|
68 |
| -#include "llvm/Transforms/Utils/LockstepReverseIterator.h" |
69 | 68 | #include <cassert>
|
70 | 69 | #include <cstddef>
|
71 | 70 | #include <cstdint>
|
@@ -97,6 +96,87 @@ static bool isMemoryInst(const Instruction *I) {
|
97 | 96 | (isa<CallInst>(I) && !cast<CallInst>(I)->doesNotAccessMemory());
|
98 | 97 | }
|
99 | 98 |
|
| 99 | +/// Iterates through instructions in a set of blocks in reverse order from the |
| 100 | +/// first non-terminator. For example (assume all blocks have size n): |
| 101 | +/// LockstepReverseIterator I([B1, B2, B3]); |
| 102 | +/// *I-- = [B1[n], B2[n], B3[n]]; |
| 103 | +/// *I-- = [B1[n-1], B2[n-1], B3[n-1]]; |
| 104 | +/// *I-- = [B1[n-2], B2[n-2], B3[n-2]]; |
| 105 | +/// ... |
| 106 | +/// |
| 107 | +/// It continues until all blocks have been exhausted. Use \c getActiveBlocks() |
| 108 | +/// to |
| 109 | +/// determine which blocks are still going and the order they appear in the |
| 110 | +/// list returned by operator*. |
| 111 | +class LockstepReverseIterator { |
| 112 | + ArrayRef<BasicBlock *> Blocks; |
| 113 | + SmallSetVector<BasicBlock *, 4> ActiveBlocks; |
| 114 | + SmallVector<Instruction *, 4> Insts; |
| 115 | + bool Fail; |
| 116 | + |
| 117 | +public: |
| 118 | + LockstepReverseIterator(ArrayRef<BasicBlock *> Blocks) : Blocks(Blocks) { |
| 119 | + reset(); |
| 120 | + } |
| 121 | + |
| 122 | + void reset() { |
| 123 | + Fail = false; |
| 124 | + ActiveBlocks.clear(); |
| 125 | + for (BasicBlock *BB : Blocks) |
| 126 | + ActiveBlocks.insert(BB); |
| 127 | + Insts.clear(); |
| 128 | + for (BasicBlock *BB : Blocks) { |
| 129 | + if (BB->size() <= 1) { |
| 130 | + // Block wasn't big enough - only contained a terminator. |
| 131 | + ActiveBlocks.remove(BB); |
| 132 | + continue; |
| 133 | + } |
| 134 | + Insts.push_back(BB->getTerminator()->getPrevNonDebugInstruction()); |
| 135 | + } |
| 136 | + if (Insts.empty()) |
| 137 | + Fail = true; |
| 138 | + } |
| 139 | + |
| 140 | + bool isValid() const { return !Fail; } |
| 141 | + ArrayRef<Instruction *> operator*() const { return Insts; } |
| 142 | + |
| 143 | + // Note: This needs to return a SmallSetVector as the elements of |
| 144 | + // ActiveBlocks will be later copied to Blocks using std::copy. The |
| 145 | + // resultant order of elements in Blocks needs to be deterministic. |
| 146 | + // Using SmallPtrSet instead causes non-deterministic order while |
| 147 | + // copying. And we cannot simply sort Blocks as they need to match the |
| 148 | + // corresponding Values. |
| 149 | + SmallSetVector<BasicBlock *, 4> &getActiveBlocks() { return ActiveBlocks; } |
| 150 | + |
| 151 | + void restrictToBlocks(SmallSetVector<BasicBlock *, 4> &Blocks) { |
| 152 | + for (auto II = Insts.begin(); II != Insts.end();) { |
| 153 | + if (!Blocks.contains((*II)->getParent())) { |
| 154 | + ActiveBlocks.remove((*II)->getParent()); |
| 155 | + II = Insts.erase(II); |
| 156 | + } else { |
| 157 | + ++II; |
| 158 | + } |
| 159 | + } |
| 160 | + } |
| 161 | + |
| 162 | + void operator--() { |
| 163 | + if (Fail) |
| 164 | + return; |
| 165 | + SmallVector<Instruction *, 4> NewInsts; |
| 166 | + for (auto *Inst : Insts) { |
| 167 | + if (Inst == &Inst->getParent()->front()) |
| 168 | + ActiveBlocks.remove(Inst->getParent()); |
| 169 | + else |
| 170 | + NewInsts.push_back(Inst->getPrevNonDebugInstruction()); |
| 171 | + } |
| 172 | + if (NewInsts.empty()) { |
| 173 | + Fail = true; |
| 174 | + return; |
| 175 | + } |
| 176 | + Insts = NewInsts; |
| 177 | + } |
| 178 | +}; |
| 179 | + |
100 | 180 | //===----------------------------------------------------------------------===//
|
101 | 181 |
|
102 | 182 | /// Candidate solution for sinking. There may be different ways to
|
@@ -554,11 +634,9 @@ class GVNSink {
|
554 | 634 | /// The main heuristic function. Analyze the set of instructions pointed to by
|
555 | 635 | /// LRI and return a candidate solution if these instructions can be sunk, or
|
556 | 636 | /// std::nullopt otherwise.
|
557 |
| - std::optional<SinkingInstructionCandidate> |
558 |
| - analyzeInstructionForSinking(LockstepReverseIterator<false> &LRI, |
559 |
| - unsigned &InstNum, unsigned &MemoryInstNum, |
560 |
| - ModelledPHISet &NeededPHIs, |
561 |
| - SmallPtrSetImpl<Value *> &PHIContents); |
| 637 | + std::optional<SinkingInstructionCandidate> analyzeInstructionForSinking( |
| 638 | + LockstepReverseIterator &LRI, unsigned &InstNum, unsigned &MemoryInstNum, |
| 639 | + ModelledPHISet &NeededPHIs, SmallPtrSetImpl<Value *> &PHIContents); |
562 | 640 |
|
563 | 641 | /// Create a ModelledPHI for each PHI in BB, adding to PHIs.
|
564 | 642 | void analyzeInitialPHIs(BasicBlock *BB, ModelledPHISet &PHIs,
|
@@ -597,7 +675,7 @@ class GVNSink {
|
597 | 675 | };
|
598 | 676 |
|
599 | 677 | std::optional<SinkingInstructionCandidate>
|
600 |
| -GVNSink::analyzeInstructionForSinking(LockstepReverseIterator<false> &LRI, |
| 678 | +GVNSink::analyzeInstructionForSinking(LockstepReverseIterator &LRI, |
601 | 679 | unsigned &InstNum,
|
602 | 680 | unsigned &MemoryInstNum,
|
603 | 681 | ModelledPHISet &NeededPHIs,
|
@@ -749,7 +827,7 @@ unsigned GVNSink::sinkBB(BasicBlock *BBEnd) {
|
749 | 827 | return BB->getTerminator()->getNumSuccessors() != 1;
|
750 | 828 | });
|
751 | 829 |
|
752 |
| - LockstepReverseIterator<false> LRI(Preds); |
| 830 | + LockstepReverseIterator LRI(Preds); |
753 | 831 | SmallVector<SinkingInstructionCandidate, 4> Candidates;
|
754 | 832 | unsigned InstNum = 0, MemoryInstNum = 0;
|
755 | 833 | ModelledPHISet NeededPHIs;
|
|
0 commit comments