23
23
using namespace swift ;
24
24
25
25
// ===----------------------------------------------------------------------===//
26
- // PMOMemoryObjectInfo Implementation
26
+ // PMOMemoryObjectInfo Implementation
27
27
// ===----------------------------------------------------------------------===//
28
28
29
- static unsigned getElementCountRec (SILModule &Module, SILType T) {
30
- // If this is a tuple, it is always recursively flattened.
31
- if (CanTupleType TT = T.getAs <TupleType>()) {
32
- unsigned NumElements = 0 ;
33
- for (unsigned i = 0 , e = TT->getNumElements (); i < e; i++)
34
- NumElements += getElementCountRec (Module, T.getTupleElementType (i));
35
- return NumElements;
36
- }
37
-
38
- // Otherwise, it is a single element.
39
- return 1 ;
40
- }
41
-
42
29
PMOMemoryObjectInfo::PMOMemoryObjectInfo (AllocationInst *allocation)
43
30
: MemoryInst(allocation) {
44
31
auto &module = MemoryInst->getModule ();
@@ -51,90 +38,12 @@ PMOMemoryObjectInfo::PMOMemoryObjectInfo(AllocationInst *allocation)
51
38
} else {
52
39
MemorySILType = cast<AllocStackInst>(MemoryInst)->getElementType ();
53
40
}
54
-
55
- // Break down the initializer.
56
- NumElements = getElementCountRec (module , MemorySILType);
57
41
}
58
42
59
43
SILInstruction *PMOMemoryObjectInfo::getFunctionEntryPoint () const {
60
44
return &*getFunction ().begin ()->begin ();
61
45
}
62
46
63
- // / Given a symbolic element number, return the type of the element.
64
- static SILType getElementTypeRec (SILModule &Module, SILType T, unsigned EltNo) {
65
- // If this is a tuple type, walk into it.
66
- if (CanTupleType TT = T.getAs <TupleType>()) {
67
- for (unsigned i = 0 , e = TT->getNumElements (); i < e; i++) {
68
- auto FieldType = T.getTupleElementType (i);
69
- unsigned NumFieldElements = getElementCountRec (Module, FieldType);
70
- if (EltNo < NumFieldElements)
71
- return getElementTypeRec (Module, FieldType, EltNo);
72
- EltNo -= NumFieldElements;
73
- }
74
- // This can only happen if we look at a symbolic element number of an empty
75
- // tuple.
76
- llvm::report_fatal_error (" invalid element number" );
77
- }
78
-
79
- // Otherwise, it is a leaf element.
80
- assert (EltNo == 0 );
81
- return T;
82
- }
83
-
84
- // / getElementTypeRec - Return the swift type of the specified element.
85
- SILType PMOMemoryObjectInfo::getElementType (unsigned EltNo) const {
86
- auto &Module = MemoryInst->getModule ();
87
- return getElementTypeRec (Module, MemorySILType, EltNo);
88
- }
89
-
90
- // / Push the symbolic path name to the specified element number onto the
91
- // / specified std::string.
92
- static void getPathStringToElementRec (SILModule &Module, SILType T,
93
- unsigned EltNo, std::string &Result) {
94
- if (CanTupleType TT = T.getAs <TupleType>()) {
95
- unsigned FieldNo = 0 ;
96
- for (unsigned i = 0 , e = TT->getNumElements (); i < e; i++) {
97
- auto Field = TT->getElement (i);
98
- SILType FieldTy = T.getTupleElementType (i);
99
- unsigned NumFieldElements = getElementCountRec (Module, FieldTy);
100
-
101
- if (EltNo < NumFieldElements) {
102
- Result += ' .' ;
103
- if (Field.hasName ())
104
- Result += Field.getName ().str ();
105
- else
106
- Result += llvm::utostr (FieldNo);
107
- return getPathStringToElementRec (Module, FieldTy, EltNo, Result);
108
- }
109
-
110
- EltNo -= NumFieldElements;
111
-
112
- ++FieldNo;
113
- }
114
- llvm_unreachable (" Element number is out of range for this type!" );
115
- }
116
-
117
- // Otherwise, there are no subelements.
118
- assert (EltNo == 0 && " Element count problem" );
119
- }
120
-
121
- ValueDecl *
122
- PMOMemoryObjectInfo::getPathStringToElement (unsigned Element,
123
- std::string &Result) const {
124
- auto &Module = MemoryInst->getModule ();
125
-
126
- if (auto *VD = dyn_cast_or_null<ValueDecl>(getLoc ().getAsASTNode <Decl>()))
127
- Result = VD->getBaseName ().userFacingName ();
128
- else
129
- Result = " <unknown>" ;
130
-
131
- // Get the path through a tuple, if relevant.
132
- getPathStringToElementRec (Module, MemorySILType, Element, Result);
133
-
134
- // Otherwise, we can't.
135
- return nullptr ;
136
- }
137
-
138
47
// ===----------------------------------------------------------------------===//
139
48
// Scalarization Logic
140
49
// ===----------------------------------------------------------------------===//
@@ -207,14 +116,11 @@ class ElementUseCollector {
207
116
LLVM_NODISCARD bool collectFrom ();
208
117
209
118
private:
210
- LLVM_NODISCARD bool collectUses (SILValue Pointer, unsigned BaseEltNo );
119
+ LLVM_NODISCARD bool collectUses (SILValue Pointer);
211
120
LLVM_NODISCARD bool collectContainerUses (AllocBoxInst *ABI);
212
- void addElementUses (unsigned BaseEltNo, SILType UseTy, SILInstruction *User,
213
- PMOUseKind Kind);
214
- LLVM_NODISCARD bool collectTupleElementUses (TupleElementAddrInst *TEAI,
215
- unsigned BaseEltNo);
216
- LLVM_NODISCARD bool collectStructElementUses (StructElementAddrInst *SEAI,
217
- unsigned BaseEltNo);
121
+ void addElementUses (SILInstruction *User, PMOUseKind Kind);
122
+ LLVM_NODISCARD bool collectTupleElementUses (TupleElementAddrInst *TEAI);
123
+ LLVM_NODISCARD bool collectStructElementUses (StructElementAddrInst *SEAI);
218
124
};
219
125
} // end anonymous namespace
220
126
@@ -224,7 +130,7 @@ bool ElementUseCollector::collectFrom() {
224
130
if (auto *ABI = TheMemory.getContainer ()) {
225
131
shouldOptimize = collectContainerUses (ABI);
226
132
} else {
227
- shouldOptimize = collectUses (TheMemory.getAddress (), 0 );
133
+ shouldOptimize = collectUses (TheMemory.getAddress ());
228
134
}
229
135
230
136
if (!shouldOptimize)
@@ -247,51 +153,28 @@ bool ElementUseCollector::collectFrom() {
247
153
// / acts on all of the aggregate elements in that value. For example, a load
248
154
// / of $*(Int,Int) is a use of both Int elements of the tuple. This is a helper
249
155
// / to keep the Uses data structure up to date for aggregate uses.
250
- void ElementUseCollector::addElementUses (unsigned BaseEltNo, SILType UseTy,
251
- SILInstruction *User,
156
+ void ElementUseCollector::addElementUses (SILInstruction *User,
252
157
PMOUseKind Kind) {
253
- // If we're in a subelement of a struct or enum, just mark the struct, not
254
- // things that come after it in a parent tuple.
255
- unsigned NumElements = 1 ;
256
- if (TheMemory.NumElements != 1 && !InStructSubElement)
257
- NumElements = getElementCountRec (Module, UseTy);
258
-
259
- Uses.push_back (PMOMemoryUse (User, Kind, BaseEltNo, NumElements));
158
+ Uses.emplace_back (User, Kind);
260
159
}
261
160
262
161
// / Given a tuple_element_addr or struct_element_addr, compute the new
263
162
// / BaseEltNo implicit in the selected member, and recursively add uses of
264
163
// / the instruction.
265
- bool ElementUseCollector::collectTupleElementUses (TupleElementAddrInst *TEAI,
266
- unsigned BaseEltNo) {
267
-
164
+ bool ElementUseCollector::collectTupleElementUses (TupleElementAddrInst *TEAI) {
268
165
// If we're walking into a tuple within a struct or enum, don't adjust the
269
166
// BaseElt. The uses hanging off the tuple_element_addr are going to be
270
167
// counted as uses of the struct or enum itself.
271
- if (InStructSubElement)
272
- return collectUses (TEAI, BaseEltNo);
273
-
274
- // tuple_element_addr P, 42 indexes into the current tuple element.
275
- // Recursively process its uses with the adjusted element number.
276
- unsigned FieldNo = TEAI->getFieldNo ();
277
- auto T = TEAI->getOperand ()->getType ();
278
- if (T.is <TupleType>()) {
279
- for (unsigned i = 0 ; i != FieldNo; ++i) {
280
- SILType EltTy = T.getTupleElementType (i);
281
- BaseEltNo += getElementCountRec (Module, EltTy);
282
- }
283
- }
284
-
285
- return collectUses (TEAI, BaseEltNo);
168
+ return collectUses (TEAI);
286
169
}
287
170
288
- bool ElementUseCollector::collectStructElementUses (StructElementAddrInst *SEAI,
289
- unsigned BaseEltNo ) {
171
+ bool ElementUseCollector::collectStructElementUses (
172
+ StructElementAddrInst *SEAI ) {
290
173
// Generally, we set the "InStructSubElement" flag and recursively process
291
174
// the uses so that we know that we're looking at something within the
292
175
// current element.
293
176
llvm::SaveAndRestore<bool > X (InStructSubElement, true );
294
- return collectUses (SEAI, BaseEltNo );
177
+ return collectUses (SEAI);
295
178
}
296
179
297
180
bool ElementUseCollector::collectContainerUses (AllocBoxInst *ABI) {
@@ -307,24 +190,23 @@ bool ElementUseCollector::collectContainerUses(AllocBoxInst *ABI) {
307
190
continue ;
308
191
309
192
if (auto project = dyn_cast<ProjectBoxInst>(User)) {
310
- if (!collectUses (project, project-> getFieldIndex () ))
193
+ if (!collectUses (project))
311
194
return false ;
312
195
continue ;
313
196
}
314
197
315
- // Other uses of the container are considered escapes of the values.
316
- for (unsigned field :
317
- indices (ABI->getBoxType ()->getLayout ()->getFields ())) {
318
- addElementUses (field,
319
- ABI->getBoxType ()->getFieldType (ABI->getModule (), field),
320
- User, PMOUseKind::Escape);
321
- }
198
+ // Other uses of the container are considered escapes of the underlying
199
+ // value.
200
+ //
201
+ // This will cause the dataflow to stop propagating any information at the
202
+ // use block.
203
+ addElementUses (User, PMOUseKind::Escape);
322
204
}
323
205
324
206
return true ;
325
207
}
326
208
327
- bool ElementUseCollector::collectUses (SILValue Pointer, unsigned BaseEltNo ) {
209
+ bool ElementUseCollector::collectUses (SILValue Pointer) {
328
210
assert (Pointer->getType ().isAddress () &&
329
211
" Walked through the pointer to the value?" );
330
212
SILType PointeeType = Pointer->getType ().getObjectType ();
@@ -340,21 +222,21 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
340
222
341
223
// struct_element_addr P, #field indexes into the current element.
342
224
if (auto *SEAI = dyn_cast<StructElementAddrInst>(User)) {
343
- if (!collectStructElementUses (SEAI, BaseEltNo ))
225
+ if (!collectStructElementUses (SEAI))
344
226
return false ;
345
227
continue ;
346
228
}
347
229
348
230
// Instructions that compute a subelement are handled by a helper.
349
231
if (auto *TEAI = dyn_cast<TupleElementAddrInst>(User)) {
350
- if (!collectTupleElementUses (TEAI, BaseEltNo ))
232
+ if (!collectTupleElementUses (TEAI))
351
233
return false ;
352
234
continue ;
353
235
}
354
236
355
237
// Look through begin_access.
356
238
if (auto I = dyn_cast<BeginAccessInst>(User)) {
357
- if (!collectUses (I, BaseEltNo ))
239
+ if (!collectUses (I))
358
240
return false ;
359
241
continue ;
360
242
}
@@ -369,15 +251,15 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
369
251
if (PointeeType.is <TupleType>())
370
252
UsesToScalarize.push_back (User);
371
253
else
372
- addElementUses (BaseEltNo, PointeeType, User, PMOUseKind::Load);
254
+ addElementUses (User, PMOUseKind::Load);
373
255
continue ;
374
256
}
375
257
376
- #define NEVER_OR_SOMETIMES_LOADABLE_CHECKED_REF_STORAGE (Name, ...) \
377
- if (isa<Load##Name##Inst>(User)) { \
378
- Uses.push_back ( PMOMemoryUse ( User, PMOUseKind::Load, BaseEltNo, 1 )); \
379
- continue ; \
380
- }
258
+ #define NEVER_OR_SOMETIMES_LOADABLE_CHECKED_REF_STORAGE (Name, ...) \
259
+ if (isa<Load##Name##Inst>(User)) { \
260
+ Uses.emplace_back ( User, PMOUseKind::Load); \
261
+ continue ; \
262
+ }
381
263
#include " swift/AST/ReferenceStorage.def"
382
264
383
265
// Stores *to* the allocation are writes.
@@ -397,24 +279,24 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
397
279
else
398
280
Kind = PMOUseKind::Initialization;
399
281
400
- addElementUses (BaseEltNo, PointeeType, User, Kind);
282
+ addElementUses (User, Kind);
401
283
continue ;
402
284
}
403
285
404
- #define NEVER_OR_SOMETIMES_LOADABLE_CHECKED_REF_STORAGE (Name, ...) \
405
- if (auto *SI = dyn_cast<Store##Name##Inst>(User)) { \
406
- if (UI->getOperandNumber () == 1 ) { \
407
- PMOUseKind Kind; \
408
- if (InStructSubElement) \
409
- Kind = PMOUseKind::PartialStore; \
410
- else if (SI->isInitializationOfDest ()) \
411
- Kind = PMOUseKind::Initialization; \
412
- else \
413
- Kind = PMOUseKind::Assign; \
414
- Uses.push_back ( PMOMemoryUse ( User, Kind, BaseEltNo, 1 )); \
415
- continue ; \
416
- } \
417
- }
286
+ #define NEVER_OR_SOMETIMES_LOADABLE_CHECKED_REF_STORAGE (Name, ...) \
287
+ if (auto *SI = dyn_cast<Store##Name##Inst>(User)) { \
288
+ if (UI->getOperandNumber () == 1 ) { \
289
+ PMOUseKind Kind; \
290
+ if (InStructSubElement) \
291
+ Kind = PMOUseKind::PartialStore; \
292
+ else if (SI->isInitializationOfDest ()) \
293
+ Kind = PMOUseKind::Initialization; \
294
+ else \
295
+ Kind = PMOUseKind::Assign; \
296
+ Uses.emplace_back ( User, Kind); \
297
+ continue ; \
298
+ } \
299
+ }
418
300
#include " swift/AST/ReferenceStorage.def"
419
301
420
302
if (auto *CAI = dyn_cast<CopyAddrInst>(User)) {
@@ -439,7 +321,7 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
439
321
else
440
322
Kind = PMOUseKind::Assign;
441
323
442
- addElementUses (BaseEltNo, PointeeType, User, Kind);
324
+ addElementUses (User, Kind);
443
325
continue ;
444
326
}
445
327
@@ -464,8 +346,7 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
464
346
if (InStructSubElement) {
465
347
return false ;
466
348
}
467
- addElementUses (BaseEltNo, PointeeType, User,
468
- PMOUseKind::Initialization);
349
+ addElementUses (User, PMOUseKind::Initialization);
469
350
continue ;
470
351
471
352
// Otherwise, adjust the argument index.
@@ -486,7 +367,7 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
486
367
case ParameterConvention::Indirect_In:
487
368
case ParameterConvention::Indirect_In_Constant:
488
369
case ParameterConvention::Indirect_In_Guaranteed:
489
- addElementUses (BaseEltNo, PointeeType, User, PMOUseKind::IndirectIn);
370
+ addElementUses (User, PMOUseKind::IndirectIn);
490
371
continue ;
491
372
492
373
// If this is an @inout parameter, it is like both a load and store.
@@ -496,7 +377,7 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
496
377
// mutating method, we model that as an escape of self. If an
497
378
// individual sub-member is passed as inout, then we model that as an
498
379
// inout use.
499
- addElementUses (BaseEltNo, PointeeType, User, PMOUseKind::InOutUse);
380
+ addElementUses (User, PMOUseKind::InOutUse);
500
381
continue ;
501
382
}
502
383
}
@@ -509,15 +390,14 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
509
390
if (InStructSubElement) {
510
391
return false ;
511
392
}
512
- Uses.push_back (
513
- PMOMemoryUse (User, PMOUseKind::Initialization, BaseEltNo, 1 ));
393
+ Uses.push_back (PMOMemoryUse (User, PMOUseKind::Initialization));
514
394
continue ;
515
395
}
516
396
517
397
// open_existential_addr is a use of the protocol value,
518
398
// so it is modeled as a load.
519
399
if (isa<OpenExistentialAddrInst>(User)) {
520
- Uses.push_back (PMOMemoryUse (User, PMOUseKind::Load, BaseEltNo, 1 ));
400
+ Uses.push_back (PMOMemoryUse (User, PMOUseKind::Load));
521
401
// TODO: Is it safe to ignore all uses of the open_existential_addr?
522
402
continue ;
523
403
}
@@ -538,7 +418,7 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
538
418
continue ;
539
419
540
420
// Otherwise, the use is something complicated, it escapes.
541
- addElementUses (BaseEltNo, PointeeType, User, PMOUseKind::Escape);
421
+ addElementUses (User, PMOUseKind::Escape);
542
422
}
543
423
544
424
// Now that we've walked all of the immediate uses, scalarize any operations
@@ -604,8 +484,7 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
604
484
// element address computations to recursively process it. This can cause
605
485
// further scalarization.
606
486
if (llvm::any_of (ElementAddrs, [&](SILValue V) {
607
- return !collectTupleElementUses (cast<TupleElementAddrInst>(V),
608
- BaseEltNo);
487
+ return !collectTupleElementUses (cast<TupleElementAddrInst>(V));
609
488
})) {
610
489
return false ;
611
490
}
0 commit comments