Skip to content

Commit 7175e17

Browse files
committed
[pmo] Eliminate dead flat namespace tuple numbering from PMOMemoryUseCollector.
TLDR: This does not eliminate the struct/tuple flat namespace from Predictable Mem Opts. Just the tuple specific flat namespace code from PMOMemoryUseCollector that we were computing and then throwing away. I explain below in more detail. First note that this is cruft from when def-init and pmo were one pass. What we were doing here was maintaing a flattened tuple namespace while we were collecting uses in PMOMemoryUseCollector. We never actually used them for anything since we recomputed this information including information about structs in PMO itself! So this information was truly completely dead. This commit removes that and related logic and from a maintenance standpoint makes PMOMemoryUseCollector a simple visitor that doesn't have any real special logic in it beyond the tuple scalarization.
1 parent 480b433 commit 7175e17

File tree

3 files changed

+55
-115
lines changed

3 files changed

+55
-115
lines changed

lib/SILOptimizer/Mandatory/PMOMemoryUseCollector.cpp

Lines changed: 52 additions & 85 deletions
Original file line numberDiff line numberDiff line change
@@ -51,9 +51,6 @@ PMOMemoryObjectInfo::PMOMemoryObjectInfo(AllocationInst *allocation)
5151
} else {
5252
MemorySILType = cast<AllocStackInst>(MemoryInst)->getElementType();
5353
}
54-
55-
// Break down the initializer.
56-
NumElements = getElementCountRec(module, MemorySILType);
5754
}
5855

5956
SILInstruction *PMOMemoryObjectInfo::getFunctionEntryPoint() const {
@@ -207,14 +204,11 @@ class ElementUseCollector {
207204
LLVM_NODISCARD bool collectFrom();
208205

209206
private:
210-
LLVM_NODISCARD bool collectUses(SILValue Pointer, unsigned BaseEltNo);
207+
LLVM_NODISCARD bool collectUses(SILValue Pointer);
211208
LLVM_NODISCARD bool collectContainerUses(AllocBoxInst *ABI);
212-
void addElementUses(unsigned BaseEltNo, SILType UseTy, SILInstruction *User,
213-
PMOUseKind Kind);
214-
LLVM_NODISCARD bool collectTupleElementUses(TupleElementAddrInst *TEAI,
215-
unsigned BaseEltNo);
216-
LLVM_NODISCARD bool collectStructElementUses(StructElementAddrInst *SEAI,
217-
unsigned BaseEltNo);
209+
void addElementUses(SILInstruction *User, PMOUseKind Kind);
210+
LLVM_NODISCARD bool collectTupleElementUses(TupleElementAddrInst *TEAI);
211+
LLVM_NODISCARD bool collectStructElementUses(StructElementAddrInst *SEAI);
218212
};
219213
} // end anonymous namespace
220214

@@ -224,7 +218,7 @@ bool ElementUseCollector::collectFrom() {
224218
if (auto *ABI = TheMemory.getContainer()) {
225219
shouldOptimize = collectContainerUses(ABI);
226220
} else {
227-
shouldOptimize = collectUses(TheMemory.getAddress(), 0);
221+
shouldOptimize = collectUses(TheMemory.getAddress());
228222
}
229223

230224
if (!shouldOptimize)
@@ -247,51 +241,28 @@ bool ElementUseCollector::collectFrom() {
247241
/// acts on all of the aggregate elements in that value. For example, a load
248242
/// of $*(Int,Int) is a use of both Int elements of the tuple. This is a helper
249243
/// to keep the Uses data structure up to date for aggregate uses.
250-
void ElementUseCollector::addElementUses(unsigned BaseEltNo, SILType UseTy,
251-
SILInstruction *User,
244+
void ElementUseCollector::addElementUses(SILInstruction *User,
252245
PMOUseKind Kind) {
253-
// If we're in a subelement of a struct or enum, just mark the struct, not
254-
// things that come after it in a parent tuple.
255-
unsigned NumElements = 1;
256-
if (TheMemory.NumElements != 1 && !InStructSubElement)
257-
NumElements = getElementCountRec(Module, UseTy);
258-
259-
Uses.push_back(PMOMemoryUse(User, Kind, BaseEltNo, NumElements));
246+
Uses.emplace_back(User, Kind);
260247
}
261248

262249
/// Given a tuple_element_addr or struct_element_addr, compute the new
263250
/// BaseEltNo implicit in the selected member, and recursively add uses of
264251
/// the instruction.
265-
bool ElementUseCollector::collectTupleElementUses(TupleElementAddrInst *TEAI,
266-
unsigned BaseEltNo) {
267-
252+
bool ElementUseCollector::collectTupleElementUses(TupleElementAddrInst *TEAI) {
268253
// If we're walking into a tuple within a struct or enum, don't adjust the
269254
// BaseElt. The uses hanging off the tuple_element_addr are going to be
270255
// counted as uses of the struct or enum itself.
271-
if (InStructSubElement)
272-
return collectUses(TEAI, BaseEltNo);
273-
274-
// tuple_element_addr P, 42 indexes into the current tuple element.
275-
// Recursively process its uses with the adjusted element number.
276-
unsigned FieldNo = TEAI->getFieldNo();
277-
auto T = TEAI->getOperand()->getType();
278-
if (T.is<TupleType>()) {
279-
for (unsigned i = 0; i != FieldNo; ++i) {
280-
SILType EltTy = T.getTupleElementType(i);
281-
BaseEltNo += getElementCountRec(Module, EltTy);
282-
}
283-
}
284-
285-
return collectUses(TEAI, BaseEltNo);
256+
return collectUses(TEAI);
286257
}
287258

288-
bool ElementUseCollector::collectStructElementUses(StructElementAddrInst *SEAI,
289-
unsigned BaseEltNo) {
259+
bool ElementUseCollector::collectStructElementUses(
260+
StructElementAddrInst *SEAI) {
290261
// Generally, we set the "InStructSubElement" flag and recursively process
291262
// the uses so that we know that we're looking at something within the
292263
// current element.
293264
llvm::SaveAndRestore<bool> X(InStructSubElement, true);
294-
return collectUses(SEAI, BaseEltNo);
265+
return collectUses(SEAI);
295266
}
296267

297268
bool ElementUseCollector::collectContainerUses(AllocBoxInst *ABI) {
@@ -307,24 +278,23 @@ bool ElementUseCollector::collectContainerUses(AllocBoxInst *ABI) {
307278
continue;
308279

309280
if (auto project = dyn_cast<ProjectBoxInst>(User)) {
310-
if (!collectUses(project, project->getFieldIndex()))
281+
if (!collectUses(project))
311282
return false;
312283
continue;
313284
}
314285

315-
// Other uses of the container are considered escapes of the values.
316-
for (unsigned field :
317-
indices(ABI->getBoxType()->getLayout()->getFields())) {
318-
addElementUses(field,
319-
ABI->getBoxType()->getFieldType(ABI->getModule(), field),
320-
User, PMOUseKind::Escape);
321-
}
286+
// Other uses of the container are considered escapes of the underlying
287+
// value.
288+
//
289+
// This will cause the dataflow to stop propagating any information at the
290+
// use block.
291+
addElementUses(User, PMOUseKind::Escape);
322292
}
323293

324294
return true;
325295
}
326296

327-
bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
297+
bool ElementUseCollector::collectUses(SILValue Pointer) {
328298
assert(Pointer->getType().isAddress() &&
329299
"Walked through the pointer to the value?");
330300
SILType PointeeType = Pointer->getType().getObjectType();
@@ -340,21 +310,21 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
340310

341311
// struct_element_addr P, #field indexes into the current element.
342312
if (auto *SEAI = dyn_cast<StructElementAddrInst>(User)) {
343-
if (!collectStructElementUses(SEAI, BaseEltNo))
313+
if (!collectStructElementUses(SEAI))
344314
return false;
345315
continue;
346316
}
347317

348318
// Instructions that compute a subelement are handled by a helper.
349319
if (auto *TEAI = dyn_cast<TupleElementAddrInst>(User)) {
350-
if (!collectTupleElementUses(TEAI, BaseEltNo))
320+
if (!collectTupleElementUses(TEAI))
351321
return false;
352322
continue;
353323
}
354324

355325
// Look through begin_access.
356326
if (auto I = dyn_cast<BeginAccessInst>(User)) {
357-
if (!collectUses(I, BaseEltNo))
327+
if (!collectUses(I))
358328
return false;
359329
continue;
360330
}
@@ -369,15 +339,15 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
369339
if (PointeeType.is<TupleType>())
370340
UsesToScalarize.push_back(User);
371341
else
372-
addElementUses(BaseEltNo, PointeeType, User, PMOUseKind::Load);
342+
addElementUses(User, PMOUseKind::Load);
373343
continue;
374344
}
375345

376-
#define NEVER_OR_SOMETIMES_LOADABLE_CHECKED_REF_STORAGE(Name, ...) \
377-
if (isa<Load##Name##Inst>(User)) { \
378-
Uses.push_back(PMOMemoryUse(User, PMOUseKind::Load, BaseEltNo, 1)); \
379-
continue; \
380-
}
346+
#define NEVER_OR_SOMETIMES_LOADABLE_CHECKED_REF_STORAGE(Name, ...) \
347+
if (isa<Load##Name##Inst>(User)) { \
348+
Uses.emplace_back(User, PMOUseKind::Load); \
349+
continue; \
350+
}
381351
#include "swift/AST/ReferenceStorage.def"
382352

383353
// Stores *to* the allocation are writes.
@@ -397,24 +367,24 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
397367
else
398368
Kind = PMOUseKind::Initialization;
399369

400-
addElementUses(BaseEltNo, PointeeType, User, Kind);
370+
addElementUses(User, Kind);
401371
continue;
402372
}
403373

404-
#define NEVER_OR_SOMETIMES_LOADABLE_CHECKED_REF_STORAGE(Name, ...) \
405-
if (auto *SI = dyn_cast<Store##Name##Inst>(User)) { \
406-
if (UI->getOperandNumber() == 1) { \
407-
PMOUseKind Kind; \
408-
if (InStructSubElement) \
409-
Kind = PMOUseKind::PartialStore; \
410-
else if (SI->isInitializationOfDest()) \
411-
Kind = PMOUseKind::Initialization; \
412-
else \
413-
Kind = PMOUseKind::Assign; \
414-
Uses.push_back(PMOMemoryUse(User, Kind, BaseEltNo, 1)); \
415-
continue; \
416-
} \
417-
}
374+
#define NEVER_OR_SOMETIMES_LOADABLE_CHECKED_REF_STORAGE(Name, ...) \
375+
if (auto *SI = dyn_cast<Store##Name##Inst>(User)) { \
376+
if (UI->getOperandNumber() == 1) { \
377+
PMOUseKind Kind; \
378+
if (InStructSubElement) \
379+
Kind = PMOUseKind::PartialStore; \
380+
else if (SI->isInitializationOfDest()) \
381+
Kind = PMOUseKind::Initialization; \
382+
else \
383+
Kind = PMOUseKind::Assign; \
384+
Uses.emplace_back(User, Kind); \
385+
continue; \
386+
} \
387+
}
418388
#include "swift/AST/ReferenceStorage.def"
419389

420390
if (auto *CAI = dyn_cast<CopyAddrInst>(User)) {
@@ -439,7 +409,7 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
439409
else
440410
Kind = PMOUseKind::Assign;
441411

442-
addElementUses(BaseEltNo, PointeeType, User, Kind);
412+
addElementUses(User, Kind);
443413
continue;
444414
}
445415

@@ -464,8 +434,7 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
464434
if (InStructSubElement) {
465435
return false;
466436
}
467-
addElementUses(BaseEltNo, PointeeType, User,
468-
PMOUseKind::Initialization);
437+
addElementUses(User, PMOUseKind::Initialization);
469438
continue;
470439

471440
// Otherwise, adjust the argument index.
@@ -486,7 +455,7 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
486455
case ParameterConvention::Indirect_In:
487456
case ParameterConvention::Indirect_In_Constant:
488457
case ParameterConvention::Indirect_In_Guaranteed:
489-
addElementUses(BaseEltNo, PointeeType, User, PMOUseKind::IndirectIn);
458+
addElementUses(User, PMOUseKind::IndirectIn);
490459
continue;
491460

492461
// If this is an @inout parameter, it is like both a load and store.
@@ -496,7 +465,7 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
496465
// mutating method, we model that as an escape of self. If an
497466
// individual sub-member is passed as inout, then we model that as an
498467
// inout use.
499-
addElementUses(BaseEltNo, PointeeType, User, PMOUseKind::InOutUse);
468+
addElementUses(User, PMOUseKind::InOutUse);
500469
continue;
501470
}
502471
}
@@ -509,15 +478,14 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
509478
if (InStructSubElement) {
510479
return false;
511480
}
512-
Uses.push_back(
513-
PMOMemoryUse(User, PMOUseKind::Initialization, BaseEltNo, 1));
481+
Uses.push_back(PMOMemoryUse(User, PMOUseKind::Initialization));
514482
continue;
515483
}
516484

517485
// open_existential_addr is a use of the protocol value,
518486
// so it is modeled as a load.
519487
if (isa<OpenExistentialAddrInst>(User)) {
520-
Uses.push_back(PMOMemoryUse(User, PMOUseKind::Load, BaseEltNo, 1));
488+
Uses.push_back(PMOMemoryUse(User, PMOUseKind::Load));
521489
// TODO: Is it safe to ignore all uses of the open_existential_addr?
522490
continue;
523491
}
@@ -538,7 +506,7 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
538506
continue;
539507

540508
// Otherwise, the use is something complicated, it escapes.
541-
addElementUses(BaseEltNo, PointeeType, User, PMOUseKind::Escape);
509+
addElementUses(User, PMOUseKind::Escape);
542510
}
543511

544512
// Now that we've walked all of the immediate uses, scalarize any operations
@@ -604,8 +572,7 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
604572
// element address computations to recursively process it. This can cause
605573
// further scalarization.
606574
if (llvm::any_of(ElementAddrs, [&](SILValue V) {
607-
return !collectTupleElementUses(cast<TupleElementAddrInst>(V),
608-
BaseEltNo);
575+
return !collectTupleElementUses(cast<TupleElementAddrInst>(V));
609576
})) {
610577
return false;
611578
}

lib/SILOptimizer/Mandatory/PMOMemoryUseCollector.h

Lines changed: 2 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -77,11 +77,6 @@ class PMOMemoryObjectInfo {
7777
/// This is the base type of the memory allocation.
7878
SILType MemorySILType;
7979

80-
/// This is the count of elements being analyzed. For memory objects that are
81-
/// tuples, this is the flattened element count. For 'self' members in init
82-
/// methods, this is the local field count (+1 for derive classes).
83-
unsigned NumElements;
84-
8580
public:
8681
PMOMemoryObjectInfo(AllocationInst *MemoryInst);
8782

@@ -104,8 +99,6 @@ class PMOMemoryObjectInfo {
10499
return dyn_cast<AllocBoxInst>(MemoryInst);
105100
}
106101

107-
unsigned getNumMemoryElements() const { return NumElements; }
108-
109102
/// getElementType - Return the swift type of the specified element.
110103
SILType getElementType(unsigned EltNo) const;
111104

@@ -155,32 +148,13 @@ struct PMOMemoryUse {
155148
/// This is what kind of access it is, load, store, escape, etc.
156149
PMOUseKind Kind;
157150

158-
/// For memory objects of (potentially recursive) tuple type, this keeps
159-
/// track of which tuple elements are affected.
160-
unsigned short FirstElement, NumElements;
161-
162-
PMOMemoryUse(SILInstruction *Inst, PMOUseKind Kind, unsigned FE, unsigned NE)
163-
: Inst(Inst), Kind(Kind), FirstElement(FE), NumElements(NE) {
164-
assert(FE == FirstElement && NumElements == NE &&
165-
"more than 64K elements not supported yet");
166-
}
151+
PMOMemoryUse(SILInstruction *Inst, PMOUseKind Kind)
152+
: Inst(Inst), Kind(Kind) {}
167153

168154
PMOMemoryUse() : Inst(nullptr) {}
169155

170156
bool isInvalid() const { return Inst == nullptr; }
171157
bool isValid() const { return Inst != nullptr; }
172-
173-
bool usesElement(unsigned i) const {
174-
return i >= FirstElement &&
175-
i < static_cast<unsigned>(FirstElement + NumElements);
176-
}
177-
178-
/// getElementBitmask - Return a bitmask with the touched tuple elements
179-
/// set.
180-
APInt getElementBitmask(unsigned NumMemoryTupleElements) const {
181-
return APInt::getBitsSet(NumMemoryTupleElements, FirstElement,
182-
FirstElement + NumElements);
183-
}
184158
};
185159

186160
/// collectPMOElementUsesFrom - Analyze all uses of the specified allocation

lib/SILOptimizer/Mandatory/PredictableMemOpt.cpp

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -516,8 +516,7 @@ SILValue AvailableValueAggregator::handlePrimitiveValue(SILType LoadTy,
516516
if (!Val) {
517517
auto *Load =
518518
B.createLoad(Loc, Address, LoadOwnershipQualifier::Unqualified);
519-
Uses.push_back(PMOMemoryUse(Load, PMOUseKind::Load, FirstElt,
520-
getNumSubElements(Load->getType(), M)));
519+
Uses.emplace_back(Load, PMOUseKind::Load);
521520
return Load;
522521
}
523522

0 commit comments

Comments
 (0)