@@ -51,9 +51,6 @@ PMOMemoryObjectInfo::PMOMemoryObjectInfo(AllocationInst *allocation)
51
51
} else {
52
52
MemorySILType = cast<AllocStackInst>(MemoryInst)->getElementType ();
53
53
}
54
-
55
- // Break down the initializer.
56
- NumElements = getElementCountRec (module , MemorySILType);
57
54
}
58
55
59
56
SILInstruction *PMOMemoryObjectInfo::getFunctionEntryPoint () const {
@@ -207,14 +204,11 @@ class ElementUseCollector {
207
204
LLVM_NODISCARD bool collectFrom ();
208
205
209
206
private:
210
- LLVM_NODISCARD bool collectUses (SILValue Pointer, unsigned BaseEltNo );
207
+ LLVM_NODISCARD bool collectUses (SILValue Pointer);
211
208
LLVM_NODISCARD bool collectContainerUses (AllocBoxInst *ABI);
212
- void addElementUses (unsigned BaseEltNo, SILType UseTy, SILInstruction *User,
213
- PMOUseKind Kind);
214
- LLVM_NODISCARD bool collectTupleElementUses (TupleElementAddrInst *TEAI,
215
- unsigned BaseEltNo);
216
- LLVM_NODISCARD bool collectStructElementUses (StructElementAddrInst *SEAI,
217
- unsigned BaseEltNo);
209
+ void addElementUses (SILInstruction *User, PMOUseKind Kind);
210
+ LLVM_NODISCARD bool collectTupleElementUses (TupleElementAddrInst *TEAI);
211
+ LLVM_NODISCARD bool collectStructElementUses (StructElementAddrInst *SEAI);
218
212
};
219
213
} // end anonymous namespace
220
214
@@ -224,7 +218,7 @@ bool ElementUseCollector::collectFrom() {
224
218
if (auto *ABI = TheMemory.getContainer ()) {
225
219
shouldOptimize = collectContainerUses (ABI);
226
220
} else {
227
- shouldOptimize = collectUses (TheMemory.getAddress (), 0 );
221
+ shouldOptimize = collectUses (TheMemory.getAddress ());
228
222
}
229
223
230
224
if (!shouldOptimize)
@@ -247,51 +241,28 @@ bool ElementUseCollector::collectFrom() {
247
241
// / acts on all of the aggregate elements in that value. For example, a load
248
242
// / of $*(Int,Int) is a use of both Int elements of the tuple. This is a helper
249
243
// / to keep the Uses data structure up to date for aggregate uses.
250
- void ElementUseCollector::addElementUses (unsigned BaseEltNo, SILType UseTy,
251
- SILInstruction *User,
244
+ void ElementUseCollector::addElementUses (SILInstruction *User,
252
245
PMOUseKind Kind) {
253
- // If we're in a subelement of a struct or enum, just mark the struct, not
254
- // things that come after it in a parent tuple.
255
- unsigned NumElements = 1 ;
256
- if (TheMemory.NumElements != 1 && !InStructSubElement)
257
- NumElements = getElementCountRec (Module, UseTy);
258
-
259
- Uses.push_back (PMOMemoryUse (User, Kind, BaseEltNo, NumElements));
246
+ Uses.emplace_back (User, Kind);
260
247
}
261
248
262
249
// / Given a tuple_element_addr or struct_element_addr, compute the new
263
250
// / BaseEltNo implicit in the selected member, and recursively add uses of
264
251
// / the instruction.
265
- bool ElementUseCollector::collectTupleElementUses (TupleElementAddrInst *TEAI,
266
- unsigned BaseEltNo) {
267
-
252
+ bool ElementUseCollector::collectTupleElementUses (TupleElementAddrInst *TEAI) {
268
253
// If we're walking into a tuple within a struct or enum, don't adjust the
269
254
// BaseElt. The uses hanging off the tuple_element_addr are going to be
270
255
// counted as uses of the struct or enum itself.
271
- if (InStructSubElement)
272
- return collectUses (TEAI, BaseEltNo);
273
-
274
- // tuple_element_addr P, 42 indexes into the current tuple element.
275
- // Recursively process its uses with the adjusted element number.
276
- unsigned FieldNo = TEAI->getFieldNo ();
277
- auto T = TEAI->getOperand ()->getType ();
278
- if (T.is <TupleType>()) {
279
- for (unsigned i = 0 ; i != FieldNo; ++i) {
280
- SILType EltTy = T.getTupleElementType (i);
281
- BaseEltNo += getElementCountRec (Module, EltTy);
282
- }
283
- }
284
-
285
- return collectUses (TEAI, BaseEltNo);
256
+ return collectUses (TEAI);
286
257
}
287
258
288
- bool ElementUseCollector::collectStructElementUses (StructElementAddrInst *SEAI,
289
- unsigned BaseEltNo ) {
259
+ bool ElementUseCollector::collectStructElementUses (
260
+ StructElementAddrInst *SEAI ) {
290
261
// Generally, we set the "InStructSubElement" flag and recursively process
291
262
// the uses so that we know that we're looking at something within the
292
263
// current element.
293
264
llvm::SaveAndRestore<bool > X (InStructSubElement, true );
294
- return collectUses (SEAI, BaseEltNo );
265
+ return collectUses (SEAI);
295
266
}
296
267
297
268
bool ElementUseCollector::collectContainerUses (AllocBoxInst *ABI) {
@@ -307,24 +278,23 @@ bool ElementUseCollector::collectContainerUses(AllocBoxInst *ABI) {
307
278
continue ;
308
279
309
280
if (auto project = dyn_cast<ProjectBoxInst>(User)) {
310
- if (!collectUses (project, project-> getFieldIndex () ))
281
+ if (!collectUses (project))
311
282
return false ;
312
283
continue ;
313
284
}
314
285
315
- // Other uses of the container are considered escapes of the values.
316
- for (unsigned field :
317
- indices (ABI->getBoxType ()->getLayout ()->getFields ())) {
318
- addElementUses (field,
319
- ABI->getBoxType ()->getFieldType (ABI->getModule (), field),
320
- User, PMOUseKind::Escape);
321
- }
286
+ // Other uses of the container are considered escapes of the underlying
287
+ // value.
288
+ //
289
+ // This will cause the dataflow to stop propagating any information at the
290
+ // use block.
291
+ addElementUses (User, PMOUseKind::Escape);
322
292
}
323
293
324
294
return true ;
325
295
}
326
296
327
- bool ElementUseCollector::collectUses (SILValue Pointer, unsigned BaseEltNo ) {
297
+ bool ElementUseCollector::collectUses (SILValue Pointer) {
328
298
assert (Pointer->getType ().isAddress () &&
329
299
" Walked through the pointer to the value?" );
330
300
SILType PointeeType = Pointer->getType ().getObjectType ();
@@ -340,21 +310,21 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
340
310
341
311
// struct_element_addr P, #field indexes into the current element.
342
312
if (auto *SEAI = dyn_cast<StructElementAddrInst>(User)) {
343
- if (!collectStructElementUses (SEAI, BaseEltNo ))
313
+ if (!collectStructElementUses (SEAI))
344
314
return false ;
345
315
continue ;
346
316
}
347
317
348
318
// Instructions that compute a subelement are handled by a helper.
349
319
if (auto *TEAI = dyn_cast<TupleElementAddrInst>(User)) {
350
- if (!collectTupleElementUses (TEAI, BaseEltNo ))
320
+ if (!collectTupleElementUses (TEAI))
351
321
return false ;
352
322
continue ;
353
323
}
354
324
355
325
// Look through begin_access.
356
326
if (auto I = dyn_cast<BeginAccessInst>(User)) {
357
- if (!collectUses (I, BaseEltNo ))
327
+ if (!collectUses (I))
358
328
return false ;
359
329
continue ;
360
330
}
@@ -369,15 +339,15 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
369
339
if (PointeeType.is <TupleType>())
370
340
UsesToScalarize.push_back (User);
371
341
else
372
- addElementUses (BaseEltNo, PointeeType, User, PMOUseKind::Load);
342
+ addElementUses (User, PMOUseKind::Load);
373
343
continue ;
374
344
}
375
345
376
- #define NEVER_OR_SOMETIMES_LOADABLE_CHECKED_REF_STORAGE (Name, ...) \
377
- if (isa<Load##Name##Inst>(User)) { \
378
- Uses.push_back ( PMOMemoryUse ( User, PMOUseKind::Load, BaseEltNo, 1 )); \
379
- continue ; \
380
- }
346
+ #define NEVER_OR_SOMETIMES_LOADABLE_CHECKED_REF_STORAGE (Name, ...) \
347
+ if (isa<Load##Name##Inst>(User)) { \
348
+ Uses.emplace_back ( User, PMOUseKind::Load); \
349
+ continue ; \
350
+ }
381
351
#include " swift/AST/ReferenceStorage.def"
382
352
383
353
// Stores *to* the allocation are writes.
@@ -397,24 +367,24 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
397
367
else
398
368
Kind = PMOUseKind::Initialization;
399
369
400
- addElementUses (BaseEltNo, PointeeType, User, Kind);
370
+ addElementUses (User, Kind);
401
371
continue ;
402
372
}
403
373
404
- #define NEVER_OR_SOMETIMES_LOADABLE_CHECKED_REF_STORAGE (Name, ...) \
405
- if (auto *SI = dyn_cast<Store##Name##Inst>(User)) { \
406
- if (UI->getOperandNumber () == 1 ) { \
407
- PMOUseKind Kind; \
408
- if (InStructSubElement) \
409
- Kind = PMOUseKind::PartialStore; \
410
- else if (SI->isInitializationOfDest ()) \
411
- Kind = PMOUseKind::Initialization; \
412
- else \
413
- Kind = PMOUseKind::Assign; \
414
- Uses.push_back ( PMOMemoryUse ( User, Kind, BaseEltNo, 1 )); \
415
- continue ; \
416
- } \
417
- }
374
+ #define NEVER_OR_SOMETIMES_LOADABLE_CHECKED_REF_STORAGE (Name, ...) \
375
+ if (auto *SI = dyn_cast<Store##Name##Inst>(User)) { \
376
+ if (UI->getOperandNumber () == 1 ) { \
377
+ PMOUseKind Kind; \
378
+ if (InStructSubElement) \
379
+ Kind = PMOUseKind::PartialStore; \
380
+ else if (SI->isInitializationOfDest ()) \
381
+ Kind = PMOUseKind::Initialization; \
382
+ else \
383
+ Kind = PMOUseKind::Assign; \
384
+ Uses.emplace_back ( User, Kind); \
385
+ continue ; \
386
+ } \
387
+ }
418
388
#include " swift/AST/ReferenceStorage.def"
419
389
420
390
if (auto *CAI = dyn_cast<CopyAddrInst>(User)) {
@@ -439,7 +409,7 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
439
409
else
440
410
Kind = PMOUseKind::Assign;
441
411
442
- addElementUses (BaseEltNo, PointeeType, User, Kind);
412
+ addElementUses (User, Kind);
443
413
continue ;
444
414
}
445
415
@@ -464,8 +434,7 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
464
434
if (InStructSubElement) {
465
435
return false ;
466
436
}
467
- addElementUses (BaseEltNo, PointeeType, User,
468
- PMOUseKind::Initialization);
437
+ addElementUses (User, PMOUseKind::Initialization);
469
438
continue ;
470
439
471
440
// Otherwise, adjust the argument index.
@@ -486,7 +455,7 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
486
455
case ParameterConvention::Indirect_In:
487
456
case ParameterConvention::Indirect_In_Constant:
488
457
case ParameterConvention::Indirect_In_Guaranteed:
489
- addElementUses (BaseEltNo, PointeeType, User, PMOUseKind::IndirectIn);
458
+ addElementUses (User, PMOUseKind::IndirectIn);
490
459
continue ;
491
460
492
461
// If this is an @inout parameter, it is like both a load and store.
@@ -496,7 +465,7 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
496
465
// mutating method, we model that as an escape of self. If an
497
466
// individual sub-member is passed as inout, then we model that as an
498
467
// inout use.
499
- addElementUses (BaseEltNo, PointeeType, User, PMOUseKind::InOutUse);
468
+ addElementUses (User, PMOUseKind::InOutUse);
500
469
continue ;
501
470
}
502
471
}
@@ -509,15 +478,14 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
509
478
if (InStructSubElement) {
510
479
return false ;
511
480
}
512
- Uses.push_back (
513
- PMOMemoryUse (User, PMOUseKind::Initialization, BaseEltNo, 1 ));
481
+ Uses.push_back (PMOMemoryUse (User, PMOUseKind::Initialization));
514
482
continue ;
515
483
}
516
484
517
485
// open_existential_addr is a use of the protocol value,
518
486
// so it is modeled as a load.
519
487
if (isa<OpenExistentialAddrInst>(User)) {
520
- Uses.push_back (PMOMemoryUse (User, PMOUseKind::Load, BaseEltNo, 1 ));
488
+ Uses.push_back (PMOMemoryUse (User, PMOUseKind::Load));
521
489
// TODO: Is it safe to ignore all uses of the open_existential_addr?
522
490
continue ;
523
491
}
@@ -538,7 +506,7 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
538
506
continue ;
539
507
540
508
// Otherwise, the use is something complicated, it escapes.
541
- addElementUses (BaseEltNo, PointeeType, User, PMOUseKind::Escape);
509
+ addElementUses (User, PMOUseKind::Escape);
542
510
}
543
511
544
512
// Now that we've walked all of the immediate uses, scalarize any operations
@@ -604,8 +572,7 @@ bool ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) {
604
572
// element address computations to recursively process it. This can cause
605
573
// further scalarization.
606
574
if (llvm::any_of (ElementAddrs, [&](SILValue V) {
607
- return !collectTupleElementUses (cast<TupleElementAddrInst>(V),
608
- BaseEltNo);
575
+ return !collectTupleElementUses (cast<TupleElementAddrInst>(V));
609
576
})) {
610
577
return false ;
611
578
}
0 commit comments