Skip to content

[Mem2Reg] Skip load [take] of cast projections. #41751

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
56 changes: 36 additions & 20 deletions lib/SILOptimizer/Transforms/SILMem2Reg.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1410,19 +1410,12 @@ class MemoryToRegisters {
/// 1. (load %ASI)
/// 2. (load (struct_element_addr/tuple_element_addr/unchecked_addr_cast %ASI))
static bool isAddressForLoad(SILInstruction *load, SILBasicBlock *&singleBlock,
bool &hasGuaranteedOwnership) {

if (isa<LoadInst>(load)) {
// SILMem2Reg is disabled when we find:
// (load [take] (struct_element_addr/tuple_element_addr %ASI))
// struct_element_addr and tuple_element_addr are lowered into
// struct_extract and tuple_extract and these SIL instructions have a
// guaranteed ownership. For replacing load's users, we need an owned value.
// We will need a new copy and destroy of the running val placed after the
// last use. This is not implemented currently.
if (hasGuaranteedOwnership &&
cast<LoadInst>(load)->getOwnershipQualifier() ==
LoadOwnershipQualifier::Take) {
bool &involvesUntakableProjection) {
if (auto *li = dyn_cast<LoadInst>(load)) {
// SILMem2Reg is disabled when we find a load [take] of an untakable
// projection. See below for further discussion.
if (involvesUntakableProjection &&
li->getOwnershipQualifier() == LoadOwnershipQualifier::Take) {
return false;
}
return true;
Expand All @@ -1432,17 +1425,40 @@ static bool isAddressForLoad(SILInstruction *load, SILBasicBlock *&singleBlock,
!isa<TupleElementAddrInst>(load))
return false;

if (isa<StructElementAddrInst>(load) || isa<TupleElementAddrInst>(load)) {
hasGuaranteedOwnership = true;
}
// None of the projections are lowered to owned values:
//
// struct_element_addr and tuple_element_addr instructions are lowered to
// struct_extract and tuple_extract instructions respectively. These both
// have guaranteed ownership (since they forward ownership and can only be
// used on a guaranteed value).
//
// unchecked_addr_cast instructions are lowered to unchecked_bitwise_cast
// instructions. These have unowned ownership.
//
// So in no case can a load [take] be lowered into the new projected value
// (some sequence of struct_extract, tuple_extract, and
// unchecked_bitwise_cast instructions) taking over ownership of the original
// value. Without additional changes.
//
// For example, for a sequence of element_addr projections could be
// transformed into a sequence of destructure instructions, followed by a
// sequence of structure instructions where all the original values are
// kept in place but the taken value is "knocked out" and replaced with
// undef. The running value would then be set to the newly structed
// "knockout" value.
//
// Alternatively, a new copy of the running value could be created and a new
// set of destroys placed after its last uses.
involvesUntakableProjection = true;

// Recursively search for other (non-)loads in the instruction's uses.
for (auto *use : cast<SingleValueInstruction>(load)->getUses()) {
auto *svi = cast<SingleValueInstruction>(load);
for (auto *use : svi->getUses()) {
SILInstruction *user = use->getUser();
if (user->getParent() != singleBlock)
singleBlock = nullptr;

if (!isAddressForLoad(user, singleBlock, hasGuaranteedOwnership))
if (!isAddressForLoad(user, singleBlock, involvesUntakableProjection))
return false;
}
return true;
Expand Down Expand Up @@ -1476,8 +1492,8 @@ static bool isCaptured(AllocStackInst *asi, bool &inSingleBlock) {
singleBlock = nullptr;

// Loads are okay.
bool hasGuaranteedOwnership = false;
if (isAddressForLoad(user, singleBlock, hasGuaranteedOwnership))
bool involvesUntakableProjection = false;
if (isAddressForLoad(user, singleBlock, involvesUntakableProjection))
continue;

// We can store into an AllocStack (but not the pointer).
Expand Down
112 changes: 112 additions & 0 deletions test/SILOptimizer/mem2reg_ossa_nontrivial.sil
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,17 @@ import Swift
// Declarations //
//////////////////

typealias AnyObject = Builtin.AnyObject

class Klass {}

class X {}

struct S {
var v1: AnyObject
var v2: AnyObject
}

struct SmallCodesizeStruct {
var cls1 : Klass
var cls2 : Klass
Expand Down Expand Up @@ -40,6 +49,10 @@ public enum FakeOptional<T> {
case none
}

struct UInt8 {
var _value : Builtin.Int8
}

sil [ossa] @get_nontrivialstruct : $@convention(thin) () -> @owned NonTrivialStruct
sil [ossa] @get_nontrivialenum : $@convention(thin) () -> @owned NonTrivialEnum
sil [ossa] @get_optionalnontrivialstruct : $@convention(thin) () -> @owned FakeOptional<NonTrivialStruct>
Expand Down Expand Up @@ -1113,3 +1126,102 @@ bb9:
return %res : $()
}

// Test that a load [take] of an unchecked_addr_cast doesn't get promoted.
//
// CHECK-LABEL: sil [ossa] @load_take_unchecked_addr_cast : {{.*}} {
// CHECK: load [take]
// CHECK-LABEL: } // end sil function 'load_take_unchecked_addr_cast'
sil [ossa] @load_take_unchecked_addr_cast : $@convention(thin) (@guaranteed AnyObject) -> () {
entry(%instance : @guaranteed $AnyObject):
%copy = copy_value %instance : $AnyObject
%storage = alloc_stack $AnyObject
store %copy to [init] %storage : $*AnyObject
%cast_addr = unchecked_addr_cast %storage : $*AnyObject to $*Klass
%value = load [take] %cast_addr : $*Klass
dealloc_stack %storage : $*AnyObject
destroy_value %value : $Klass
%retval = tuple ()
return %retval : $()
}

// Don't bail if the original address is destroyed even if we see a cast.
//
// CHECK-LABEL: sil [ossa] @destroy_original_storage : {{.*}} {
// CHECK: destroy_value
// CHECK-LABEL: } // end sil function 'destroy_original_storage'
sil [ossa] @destroy_original_storage : $@convention(thin) (@guaranteed AnyObject) -> () {
entry(%instance : @guaranteed $AnyObject):
%copy = copy_value %instance : $AnyObject
%storage = alloc_stack $AnyObject
store %copy to [init] %storage : $*AnyObject
%cast_addr = unchecked_addr_cast %storage : $*AnyObject to $*Klass
%value = load [copy] %cast_addr : $*Klass
destroy_addr %storage : $*AnyObject
dealloc_stack %storage : $*AnyObject
destroy_value %value : $Klass
%retval = tuple ()
return %retval : $()
}

// Bail if the address produced by an unchecked_addr_cast is destroyed.
//
// CHECK-LABEL: sil [ossa] @destroy_addr_unchecked_addr_cast : {{.*}} {
// CHECK: unchecked_addr_cast
// CHECK: load [copy]
// CHECK-LABEL: } // end sil function 'destroy_addr_unchecked_addr_cast'
sil [ossa] @destroy_addr_unchecked_addr_cast : $@convention(thin) (@guaranteed AnyObject) -> () {
entry(%instance : @guaranteed $AnyObject):
%copy = copy_value %instance : $AnyObject
%storage = alloc_stack $AnyObject
store %copy to [init] %storage : $*AnyObject
%cast_addr = unchecked_addr_cast %storage : $*AnyObject to $*Klass
%value = load [copy] %cast_addr : $*Klass
destroy_addr %cast_addr : $*Klass
dealloc_stack %storage : $*AnyObject
destroy_value %value : $Klass
%retval = tuple ()
return %retval : $()
}

// Bail if there's a load [take] of one of multiple non-trivial fields.
//
// CHECK-LABEL: sil [ossa] @load_take_one_of_two_nontrivial_struct_fields : {{.*}} {
// CHECK: load [take]
// CHECK: destroy_addr
// CHECK-LABEL: } // end sil function 'load_take_one_of_two_nontrivial_struct_fields'
sil [ossa] @load_take_one_of_two_nontrivial_struct_fields : $@convention(thin) (@guaranteed S) -> () {
entry(%instance : @guaranteed $S):
%copy = copy_value %instance : $S
%storage = alloc_stack $S
store %copy to [init] %storage : $*S
%v1_addr = struct_element_addr %storage : $*S, #S.v1
%cast_addr = unchecked_addr_cast %v1_addr : $*AnyObject to $*Klass
%value = load [take] %cast_addr : $*Klass
%v2_addr = struct_element_addr %storage : $*S, #S.v2
destroy_addr %v2_addr : $*AnyObject
//destroy_addr %storage : $*S
dealloc_stack %storage : $*S
destroy_value %value : $Klass
%retval = tuple ()
return %retval : $()
}

// Don't bail if we happen to see an unchecked_addr_cast but then load [take]
// the original address.
//
// CHECK-LABEL: sil [ossa] @load_take_original_despite_cast : {{.*}} {
// CHECK: unchecked_bitwise_cast
// CHECK: destroy_value
// CHECK-LABEL: } // end sil function 'load_take_original_despite_cast'
sil [ossa] @load_take_original_despite_cast : $@convention(thin) (@owned AnyObject) -> () {
entry(%instance : @owned $AnyObject):
%74 = alloc_stack $AnyObject
store %instance to [init] %74 : $*AnyObject
%76 = unchecked_addr_cast %74 : $*AnyObject to $*UInt8
%77 = load [trivial] %76 : $*UInt8
%79 = load [take] %74 : $*AnyObject
destroy_value %79 : $AnyObject
dealloc_stack %74 : $*AnyObject
%82 = tuple ()
return %82 : $()
}