|
| 1 | +// RUN: %target-sil-opt %s -dead-store-elim -enable-sil-verify-all | %FileCheck %s |
| 2 | + |
| 3 | +sil_stage canonical |
| 4 | + |
| 5 | +import Builtin |
| 6 | +import Swift |
| 7 | +import SwiftShims |
| 8 | + |
| 9 | +final class X { |
| 10 | + init() |
| 11 | +} |
| 12 | + |
| 13 | +public struct S { |
| 14 | + @_hasStorage var x: X { get set } |
| 15 | + @_hasStorage var i: Int { get set } |
| 16 | + init(x: X, i: Int) |
| 17 | +} |
| 18 | + |
| 19 | +@_hasStorage @_hasInitialValue var gg: X { get set } |
| 20 | + |
| 21 | +@inline(never) func takex(_ x: X) |
| 22 | + |
| 23 | +sil [noinline] @takeX : $@convention(thin) (@guaranteed X) -> () |
| 24 | + |
| 25 | +// Test that escape analysis does not consider an inout argument to |
| 26 | +// escape at a call site even though it's reference-type field does |
| 27 | +// escape. Dead store elimination asks MemoryBehaviorVisitor whether |
| 28 | +// the apply may read from the inout argument. This call into |
| 29 | +// canEscapeToUsePoint, which should return false because the inout |
| 30 | +// structure itself is not exposed to the call, only it's |
| 31 | +// reference-type field is. |
| 32 | +// |
| 33 | +// CHECK-LABEL: sil @testInoutNoEscape |
| 34 | +// CHECK-NOT: store |
| 35 | +// CHECK: apply |
| 36 | +// CHECK: store |
| 37 | +// CHECK-NOT: store |
| 38 | +// CHECK: } // end sil function 'testInoutNoEscape' |
| 39 | +sil @testInoutNoEscape : $@convention(thin) (@inout S, @guaranteed S) -> () { |
| 40 | +bb0(%0 : $*S, %1 : $S): |
| 41 | + %4 = struct_extract %1 : $S, #S.x |
| 42 | + %5 = struct_element_addr %0 : $*S, #S.x |
| 43 | + %6 = load %5 : $*X |
| 44 | + strong_retain %4 : $X |
| 45 | + strong_release %6 : $X |
| 46 | + store %1 to %0 : $*S |
| 47 | + %10 = function_ref @takeX : $@convention(thin) (@guaranteed X) -> () |
| 48 | + strong_retain %4 : $X |
| 49 | + %12 = apply %10(%4) : $@convention(thin) (@guaranteed X) -> () |
| 50 | + release_value %1 : $S |
| 51 | + store %1 to %0 : $*S |
| 52 | + %15 = tuple () |
| 53 | + return %15 : $() |
| 54 | +} |
| 55 | + |
| 56 | +// ============================================================================= |
| 57 | +// Test that a store writing back into a container is not eliminated |
| 58 | +// when the container's interior pointer later escapes into a function |
| 59 | +// that reads from the pointer. |
| 60 | + |
| 61 | +final internal class TestArrayContainer { |
| 62 | + @_hasStorage @_hasInitialValue internal final var pointer: UnsafeMutablePointer<Int32> { get set } |
| 63 | + @_hasStorage @_hasInitialValue internal final var storage: ContiguousArray<Int32> { get set } |
| 64 | + @_optimize(none) @inline(never) internal final func append(_ arg: Int32) |
| 65 | + internal final func va_list() -> UnsafeMutableRawPointer |
| 66 | + init() |
| 67 | +} |
| 68 | + |
| 69 | +sil @UnsafeMutablePointer_load_Int64 : $@convention(method) (Int64, UnsafeMutableRawPointer) -> Optional<UnsafeMutablePointer<Int32>> // user: %5 |
| 70 | +// ContiguousArray.append(_:) |
| 71 | +sil @$ss15ContiguousArrayV6appendyyxnF : $@convention(method) <τ_0_0> (@in τ_0_0, @inout ContiguousArray<τ_0_0>) -> () |
| 72 | + |
| 73 | + |
| 74 | +// Helper that reads from a raw pointer. |
| 75 | +sil hidden [noinline] @takeRawPtr : $@convention(thin) (UnsafeMutableRawPointer) -> Bool { |
| 76 | +bb0(%0 : $UnsafeMutableRawPointer): |
| 77 | + %1 = integer_literal $Builtin.Int64, 0 |
| 78 | + %2 = struct $Int64 (%1 : $Builtin.Int64) |
| 79 | + %3 = function_ref @UnsafeMutablePointer_load_Int64 : $@convention(method) (Int64, UnsafeMutableRawPointer) -> Optional<UnsafeMutablePointer<Int32>> |
| 80 | + %4 = apply %3(%2, %0) : $@convention(method) (Int64, UnsafeMutableRawPointer) -> Optional<UnsafeMutablePointer<Int32>> // users: %18, %6 |
| 81 | + %5 = integer_literal $Builtin.Int1, -1 |
| 82 | + %6 = struct $Bool (%5 : $Builtin.Int1) |
| 83 | + return %6 : $Bool |
| 84 | +} |
| 85 | + |
| 86 | +// TestArrayContainer.append(_:) |
| 87 | +// Helper that produces a nonempty array. |
| 88 | +sil hidden [noinline] [Onone] @TestArrayContainer_append : $@convention(method) (Int32, @guaranteed TestArrayContainer) -> () { |
| 89 | +bb0(%0 : $Int32, %1 : $TestArrayContainer): |
| 90 | + %2 = alloc_stack $Int32 |
| 91 | + store %0 to %2 : $*Int32 |
| 92 | + %4 = ref_element_addr %1 : $TestArrayContainer, #TestArrayContainer.storage |
| 93 | + %5 = function_ref @$ss15ContiguousArrayV6appendyyxnF : $@convention(method) <τ_0_0> (@in τ_0_0, @inout ContiguousArray<τ_0_0>) -> () |
| 94 | + %6 = apply %5<Int32>(%2, %4) : $@convention(method) <τ_0_0> (@in τ_0_0, @inout ContiguousArray<τ_0_0>) -> () |
| 95 | + dealloc_stack %2 : $*Int32 |
| 96 | + %8 = tuple () |
| 97 | + return %8 : $() |
| 98 | +} |
| 99 | + |
| 100 | +// CHECK-LABEL: sil [noinline] @testContainerPointer : $@convention(thin) () -> Bool { |
| 101 | +// CHECK: [[ALLOC:%.*]] = alloc_ref [stack] $TestArrayContainer |
| 102 | +// CHECK: [[PTR:%.*]] = ref_element_addr %0 : $TestArrayContainer, #TestArrayContainer.pointer |
| 103 | +// CHECK: [[LOAD:%.*]] = load %{{.*}} : $*__ContiguousArrayStorageBase |
| 104 | +// CHECK: [[ELTS:%.*]] = ref_tail_addr [[LOAD]] : $__ContiguousArrayStorageBase, $Int32 |
| 105 | +// CHECK: [[ELTPTR:%.*]] = address_to_pointer [[ELTS]] : $*Int32 to $Builtin.RawPointer |
| 106 | +// CHECK: [[UMP:%.*]] = struct $UnsafeMutablePointer<Int32> ([[ELTPTR]] : $Builtin.RawPointer) |
| 107 | +// CHECK: store [[UMP]] to [[PTR]] : $*UnsafeMutablePointer<Int32> |
| 108 | +// CHECK: [[F:%.*]] = function_ref @takeRawPtr : $@convention(thin) (UnsafeMutableRawPointer) -> Bool |
| 109 | +// CHECK: apply [[F]](%{{.*}}) : $@convention(thin) (UnsafeMutableRawPointer) -> Bool |
| 110 | +// CHECK: fix_lifetime %0 : $TestArrayContainer |
| 111 | +// CHECK-LABEL: } // end sil function 'testContainerPointer' |
| 112 | +sil [noinline] @testContainerPointer : $@convention(thin) () -> Bool { |
| 113 | +bb0: |
| 114 | + %0 = alloc_ref [stack] $TestArrayContainer |
| 115 | + %1 = ref_element_addr %0 : $TestArrayContainer, #TestArrayContainer.pointer |
| 116 | + %2 = ref_element_addr %0 : $TestArrayContainer, #TestArrayContainer.storage |
| 117 | + %3 = integer_literal $Builtin.Int32, 42 |
| 118 | + %4 = struct $Int32 (%3 : $Builtin.Int32) |
| 119 | + %5 = function_ref @TestArrayContainer_append : $@convention(method) (Int32, @guaranteed TestArrayContainer) -> () |
| 120 | + %6 = apply %5(%4, %0) : $@convention(method) (Int32, @guaranteed TestArrayContainer) -> () |
| 121 | + %7 = struct_element_addr %2 : $*ContiguousArray<Int32>, #ContiguousArray._buffer |
| 122 | + %8 = struct_element_addr %7 : $*_ContiguousArrayBuffer<Int32>, #_ContiguousArrayBuffer._storage |
| 123 | + %9 = load %8 : $*__ContiguousArrayStorageBase |
| 124 | + %10 = ref_tail_addr %9 : $__ContiguousArrayStorageBase, $Int32 |
| 125 | + %11 = address_to_pointer %10 : $*Int32 to $Builtin.RawPointer |
| 126 | + %12 = struct $UnsafeMutablePointer<Int32> (%11 : $Builtin.RawPointer) |
| 127 | + store %12 to %1 : $*UnsafeMutablePointer<Int32> |
| 128 | + %14 = address_to_pointer %1 : $*UnsafeMutablePointer<Int32> to $Builtin.RawPointer |
| 129 | + %15 = struct $UnsafeMutableRawPointer (%14 : $Builtin.RawPointer) |
| 130 | + %16 = function_ref @takeRawPtr : $@convention(thin) (UnsafeMutableRawPointer) -> Bool |
| 131 | + %17 = apply %16(%15) : $@convention(thin) (UnsafeMutableRawPointer) -> Bool |
| 132 | + fix_lifetime %0 : $TestArrayContainer |
| 133 | + set_deallocating %0 : $TestArrayContainer |
| 134 | + strong_release %9 : $__ContiguousArrayStorageBase |
| 135 | + dealloc_ref %0 : $TestArrayContainer |
| 136 | + dealloc_ref [stack] %0 : $TestArrayContainer |
| 137 | + return %17 : $Bool |
| 138 | +} |
0 commit comments