Skip to content

Commit 53b14a2

Browse files
authored
Merge pull request #28653 from gottesmm/pr-48051636c430a08d11def5e474d526a5e8e1eed1
[semantic-arc-opts] Convert load [copy] -> load_borrow given single init alloc_stack.
2 parents c21b335 + 61e5653 commit 53b14a2

File tree

5 files changed

+218
-10
lines changed

5 files changed

+218
-10
lines changed

include/swift/SIL/MemAccessUtils.h

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -488,6 +488,17 @@ AccessedStorage findAccessedStorageNonNested(SILValue sourceAddr);
488488
/// uninitialized.
489489
bool memInstMustInitialize(Operand *memOper);
490490

491+
/// Is this an alloc_stack instruction that is:
492+
///
493+
/// 1. Only initialized once in its own def block.
494+
/// 2. Never written to again except by destroy_addr.
495+
///
496+
/// On return, destroyingUsers contains the list of users that destroy the
497+
/// alloc_stack. If the alloc_stack is destroyed in pieces, we do not guarantee
498+
/// that the list of destroying users is a minimal jointly post-dominating set.
499+
bool isSingleInitAllocStack(AllocStackInst *asi,
500+
SmallVectorImpl<SILInstruction *> &destroyingUsers);
501+
491502
/// Return true if the given address producer may be the source of a formal
492503
/// access (a read or write of a potentially aliased, user visible variable).
493504
///

include/swift/SIL/OwnershipUtils.h

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -183,6 +183,21 @@ class LinearLifetimeChecker {
183183
nullptr /*leakingBlocks*/)
184184
.getFoundError();
185185
}
186+
187+
bool validateLifetime(SILValue value,
188+
ArrayRef<SILInstruction *> consumingUses,
189+
ArrayRef<SILInstruction *> nonConsumingUses) {
190+
auto *consumingUsesCast =
191+
reinterpret_cast<const BranchPropagatedUser *>(consumingUses.data());
192+
auto *nonConsumingUsesCast =
193+
reinterpret_cast<const BranchPropagatedUser *>(nonConsumingUses.data());
194+
ArrayRef<BranchPropagatedUser> consumingUsesCastArray(consumingUsesCast,
195+
consumingUses.size());
196+
ArrayRef<BranchPropagatedUser> nonConsumingUsesCastArray(
197+
nonConsumingUsesCast, nonConsumingUses.size());
198+
return validateLifetime(value, consumingUsesCastArray,
199+
nonConsumingUsesCastArray);
200+
}
186201
};
187202

188203
/// Returns true if v is an address or trivial.

lib/SIL/MemAccessUtils.cpp

Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414

1515
#include "swift/SIL/MemAccessUtils.h"
1616
#include "swift/SIL/ApplySite.h"
17+
#include "swift/SIL/Projection.h"
1718
#include "swift/SIL/SILGlobalVariable.h"
1819
#include "swift/SIL/SILModule.h"
1920
#include "swift/SIL/SILUndef.h"
@@ -660,3 +661,64 @@ SILBasicBlock::iterator swift::removeBeginAccess(BeginAccessInst *beginAccess) {
660661
}
661662
return beginAccess->getParent()->erase(beginAccess);
662663
}
664+
665+
bool swift::isSingleInitAllocStack(
666+
AllocStackInst *asi, SmallVectorImpl<SILInstruction *> &destroyingUsers) {
667+
// For now, we just look through projections and rely on memInstMustInitialize
668+
// to classify all other uses as init or not.
669+
SmallVector<Operand *, 32> worklist(asi->getUses());
670+
bool foundInit = false;
671+
672+
while (!worklist.empty()) {
673+
auto *use = worklist.pop_back_val();
674+
auto *user = use->getUser();
675+
676+
if (Projection::isAddressProjection(user) ||
677+
isa<OpenExistentialAddrInst>(user)) {
678+
// Look through address projections.
679+
for (SILValue r : user->getResults()) {
680+
llvm::copy(r->getUses(), std::back_inserter(worklist));
681+
}
682+
continue;
683+
}
684+
685+
if (auto *li = dyn_cast<LoadInst>(user)) {
686+
// If we are not taking,
687+
if (li->getOwnershipQualifier() != LoadOwnershipQualifier::Take) {
688+
continue;
689+
}
690+
// Treat load [take] as a write.
691+
return false;
692+
}
693+
694+
switch (user->getKind()) {
695+
default:
696+
break;
697+
case SILInstructionKind::DestroyAddrInst:
698+
destroyingUsers.push_back(user);
699+
continue;
700+
case SILInstructionKind::DeallocStackInst:
701+
case SILInstructionKind::LoadBorrowInst:
702+
case SILInstructionKind::DebugValueAddrInst:
703+
continue;
704+
}
705+
706+
// See if we have an initializer and that such initializer is in the same
707+
// block.
708+
if (memInstMustInitialize(use)) {
709+
if (user->getParent() != asi->getParent() || foundInit) {
710+
return false;
711+
}
712+
713+
foundInit = true;
714+
continue;
715+
}
716+
717+
// Otherwise, if we have found something not in our whitelist, return false.
718+
return false;
719+
}
720+
721+
// We did not find any users that we did not understand. So we can
722+
// conservatively return true here.
723+
return true;
724+
}

lib/SILOptimizer/Mandatory/SemanticARCOpts.cpp

Lines changed: 34 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -280,7 +280,7 @@ struct SemanticARCOptVisitor
280280
FORWARDING_TERM(CondBranch)
281281
#undef FORWARDING_TERM
282282

283-
bool isWrittenTo(LoadInst *li);
283+
bool isWrittenTo(LoadInst *li, ArrayRef<SILInstruction *> destroys);
284284

285285
bool processWorklist();
286286

@@ -664,6 +664,7 @@ bool mayFunctionMutateArgument(const AccessedStorage &storage, SILFunction &f) {
664664
// Then find our accessed storage to determine whether it provides a guarantee
665665
// for the loaded value.
666666
namespace {
667+
667668
class StorageGuaranteesLoadVisitor
668669
: public AccessUseDefChainVisitor<StorageGuaranteesLoadVisitor>
669670
{
@@ -677,12 +678,15 @@ class StorageGuaranteesLoadVisitor
677678
SILValue currentAddress;
678679

679680
Optional<bool> isWritten;
680-
681+
682+
ArrayRef<SILInstruction *> destroyValues;
683+
681684
public:
682-
StorageGuaranteesLoadVisitor(SemanticARCOptVisitor &arcOpt, LoadInst *load)
683-
: ARCOpt(arcOpt), Load(load), currentAddress(load->getOperand())
684-
{}
685-
685+
StorageGuaranteesLoadVisitor(SemanticARCOptVisitor &arcOpt, LoadInst *load,
686+
ArrayRef<SILInstruction *> destroyValues)
687+
: ARCOpt(arcOpt), Load(load), currentAddress(load->getOperand()),
688+
destroyValues(destroyValues) {}
689+
686690
void answer(bool written) {
687691
currentAddress = nullptr;
688692
isWritten = written;
@@ -790,17 +794,38 @@ class StorageGuaranteesLoadVisitor
790794
return answer(true);
791795
}
792796

797+
/// See if we have an alloc_stack that is only written to once by an
798+
/// initializing instruction.
799+
void visitStackAccess(AllocStackInst *stack) {
800+
SmallVector<SILInstruction *, 8> destroyAddrs;
801+
bool initialAnswer = isSingleInitAllocStack(stack, destroyAddrs);
802+
if (!initialAnswer)
803+
return answer(true);
804+
805+
// Then make sure that all of our load [copy] uses are within the
806+
// destroy_addr.
807+
SmallPtrSet<SILBasicBlock *, 4> visitedBlocks;
808+
LinearLifetimeChecker checker(visitedBlocks, ARCOpt.getDeadEndBlocks());
809+
// Returns true on success. So we invert.
810+
bool foundError =
811+
!checker.validateLifetime(stack, destroyAddrs /*consuming users*/,
812+
destroyValues /*non consuming users*/);
813+
return answer(foundError);
814+
}
815+
793816
bool doIt() {
794817
while (currentAddress) {
795818
visit(currentAddress);
796819
}
797820
return *isWritten;
798821
}
799822
};
823+
800824
} // namespace
801825

802-
bool SemanticARCOptVisitor::isWrittenTo(LoadInst *load) {
803-
StorageGuaranteesLoadVisitor visitor(*this, load);
826+
bool SemanticARCOptVisitor::isWrittenTo(LoadInst *load,
827+
ArrayRef<SILInstruction *> destroys) {
828+
StorageGuaranteesLoadVisitor visitor(*this, load, destroys);
804829
return visitor.doIt();
805830
}
806831

@@ -825,7 +850,7 @@ bool SemanticARCOptVisitor::visitLoadInst(LoadInst *li) {
825850
// Then check if our address is ever written to. If it is, then we cannot use
826851
// the load_borrow because the stored value may be released during the loaded
827852
// value's live range.
828-
if (isWrittenTo(li))
853+
if (isWrittenTo(li, destroyValues))
829854
return false;
830855

831856
// Ok, we can perform our optimization. Convert the load [copy] into a

test/SILOptimizer/semantic-arc-opts.sil

Lines changed: 96 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -947,4 +947,99 @@ bb0(%0 : @guaranteed $ClassLet):
947947
destroy_value %4 : $Klass
948948
%9999 = tuple()
949949
return %9999 : $()
950-
}
950+
}
951+
952+
// CHECK-LABEL: sil [ossa] @single_init_allocstack : $@convention(thin) (@owned Klass) -> () {
953+
// CHECK-NOT: load [copy]
954+
// CHECK: } // end sil function 'single_init_allocstack'
955+
sil [ossa] @single_init_allocstack : $@convention(thin) (@owned Klass) -> () {
956+
bb0(%0 : @owned $Klass):
957+
%1 = alloc_stack $Klass
958+
store %0 to [init] %1 : $*Klass
959+
%2 = load [copy] %1 : $*Klass
960+
961+
%3 = function_ref @guaranteed_klass_user : $@convention(thin) (@guaranteed Klass) -> ()
962+
apply %3(%2) : $@convention(thin) (@guaranteed Klass) -> ()
963+
964+
destroy_value %2 : $Klass
965+
destroy_addr %1 : $*Klass
966+
dealloc_stack %1 : $*Klass
967+
968+
%9999 = tuple()
969+
return %9999 : $()
970+
}
971+
972+
// CHECK-LABEL: sil [ossa] @multiple_init_allocstack : $@convention(thin) (@owned Klass) -> () {
973+
// CHECK: load [copy]
974+
// CHECK: } // end sil function 'multiple_init_allocstack'
975+
sil [ossa] @multiple_init_allocstack : $@convention(thin) (@owned Klass) -> () {
976+
bb0(%0 : @owned $Klass):
977+
%0a = copy_value %0 : $Klass
978+
%1 = alloc_stack $Klass
979+
store %0 to [init] %1 : $*Klass
980+
%2 = load [copy] %1 : $*Klass
981+
982+
%3 = function_ref @guaranteed_klass_user : $@convention(thin) (@guaranteed Klass) -> ()
983+
apply %3(%2) : $@convention(thin) (@guaranteed Klass) -> ()
984+
985+
destroy_value %2 : $Klass
986+
destroy_addr %1 : $*Klass
987+
988+
store %0a to [init] %1 : $*Klass
989+
destroy_addr %1 : $*Klass
990+
dealloc_stack %1 : $*Klass
991+
992+
%9999 = tuple()
993+
return %9999 : $()
994+
}
995+
996+
// We could support this, but for now we are keeping things simple. If we do add
997+
// support, this test will need to be updated.
998+
//
999+
// CHECK-LABEL: sil [ossa] @single_init_wrongblock : $@convention(thin) (@owned Klass) -> () {
1000+
// CHECK: load [copy]
1001+
// CHECK: } // end sil function 'single_init_wrongblock'
1002+
sil [ossa] @single_init_wrongblock : $@convention(thin) (@owned Klass) -> () {
1003+
bb0(%0 : @owned $Klass):
1004+
%1 = alloc_stack $Klass
1005+
br bb1
1006+
1007+
bb1:
1008+
store %0 to [init] %1 : $*Klass
1009+
%2 = load [copy] %1 : $*Klass
1010+
1011+
%3 = function_ref @guaranteed_klass_user : $@convention(thin) (@guaranteed Klass) -> ()
1012+
apply %3(%2) : $@convention(thin) (@guaranteed Klass) -> ()
1013+
1014+
destroy_value %2 : $Klass
1015+
destroy_addr %1 : $*Klass
1016+
dealloc_stack %1 : $*Klass
1017+
1018+
%9999 = tuple()
1019+
return %9999 : $()
1020+
}
1021+
1022+
// We could support this, but for now we are keeping things simple. If we do add
1023+
// support, this test will need to be updated.
1024+
//
1025+
// CHECK-LABEL: sil [ossa] @single_init_loadtake : $@convention(thin) (@owned Klass) -> () {
1026+
// CHECK: load [copy]
1027+
// CHECK: } // end sil function 'single_init_loadtake'
1028+
sil [ossa] @single_init_loadtake : $@convention(thin) (@owned Klass) -> () {
1029+
bb0(%0 : @owned $Klass):
1030+
%1 = alloc_stack $Klass
1031+
store %0 to [init] %1 : $*Klass
1032+
%2 = load [copy] %1 : $*Klass
1033+
1034+
%3 = function_ref @guaranteed_klass_user : $@convention(thin) (@guaranteed Klass) -> ()
1035+
apply %3(%2) : $@convention(thin) (@guaranteed Klass) -> ()
1036+
1037+
destroy_value %2 : $Klass
1038+
1039+
%4 = load [take] %1 : $*Klass
1040+
destroy_value %4 : $Klass
1041+
dealloc_stack %1 : $*Klass
1042+
1043+
%9999 = tuple()
1044+
return %9999 : $()
1045+
}

0 commit comments

Comments
 (0)