Skip to content

Commit 3782d67

Browse files
committed
EscapeAnalysis: rewrite canEscapeToUsePoint.
Correctness: do not make any unenforced assumptions about how the connection graph is built (I don't think the previous assumption about the structure of the graph node mapped to a reference-type value would always hold if content nodes can be arbitrarily merged). Only make one assumption about the client code: the access being checked must be to some address within the provided value, not another object indirectly reachable from that value. Optimization: Allow escape analysis to prove that an addressable object does not escape even when one of its reference-type fields escapes.
1 parent 17ab0ad commit 3782d67

File tree

2 files changed

+174
-30
lines changed

2 files changed

+174
-30
lines changed

lib/SILOptimizer/Analysis/EscapeAnalysis.cpp

Lines changed: 36 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -2479,44 +2479,50 @@ bool EscapeAnalysis::mergeSummaryGraph(ConnectionGraph *SummaryGraph,
24792479
return SummaryGraph->mergeFrom(Graph, Mapping);
24802480
}
24812481

2482-
bool EscapeAnalysis::canEscapeToUsePoint(SILValue V, SILNode *UsePoint,
2483-
ConnectionGraph *ConGraph) {
2482+
// Return true if any content within the logical object pointed to by \p value
2483+
// escapes.
2484+
//
2485+
// Get the value's content node and check the escaping flag on all nodes within
2486+
// that object. An interior CG node points to content within the same object.
2487+
bool EscapeAnalysis::canEscapeToUsePoint(SILValue value,
2488+
SILInstruction *usePoint,
2489+
ConnectionGraph *conGraph) {
24842490

2485-
assert((FullApplySite::isa(UsePoint) || isa<RefCountingInst>(UsePoint)) &&
2486-
"use points are only created for calls and refcount instructions");
2491+
assert((FullApplySite::isa(usePoint) || isa<RefCountingInst>(usePoint))
2492+
&& "use points are only created for calls and refcount instructions");
24872493

2488-
CGNode *Node = ConGraph->getNodeOrNull(V);
2489-
if (!Node)
2494+
CGNode *node = conGraph->getValueContent(value);
2495+
if (!node)
24902496
return true;
24912497

2492-
// First check if there are escape paths which we don't explicitly see
2493-
// in the graph.
2494-
if (Node->valueEscapesInsideFunction(V))
2495-
return true;
2498+
// Follow points-to edges and return true if the current 'node' may escape at
2499+
// 'usePoint'.
2500+
CGNodeWorklist worklist(conGraph);
2501+
while (node) {
2502+
// Merging arbitrary nodes is supported, which may lead to cycles of
2503+
// interior nodes. End the search.
2504+
if (!worklist.tryPush(node))
2505+
break;
24962506

2497-
// No hidden escapes: check if the Node is reachable from the UsePoint.
2498-
// Check if the object itself can escape to the called function.
2499-
if (ConGraph->isUsePoint(UsePoint, Node))
2500-
return true;
2507+
// First check if 'node' may escape in a way not represented by the
2508+
// connection graph, assuming that it may represent part of the object
2509+
// pointed to by 'value'. If 'node' happens to represent another object
2510+
// indirectly reachabe from 'value', then it cannot actually escape to this
2511+
// usePoint, so passing the original value is still conservatively correct.
2512+
if (node->valueEscapesInsideFunction(value))
2513+
return true;
25012514

2502-
assert(isPointer(V) && "should not have a node for a non-pointer");
2503-
2504-
// Check if the object "content" can escape to the called function.
2505-
// This will catch cases where V is a reference and a pointer to a stored
2506-
// property escapes.
2507-
// It's also important in case of a pointer assignment, e.g.
2508-
// V = V1
2509-
// apply(V1)
2510-
// In this case the apply is only a use-point for V1 and V1's content node.
2511-
// As V1's content node is the same as V's content node, we also make the
2512-
// check for the content node.
2513-
CGNode *ContentNode = getValueContent(ConGraph, V);
2514-
if (ContentNode->valueEscapesInsideFunction(V))
2515-
return true;
2515+
// No hidden escapes; check if 'usePoint' may access memory at 'node'.
2516+
if (conGraph->isUsePoint(usePoint, node))
2517+
return true;
25162518

2517-
if (ConGraph->isUsePoint(UsePoint, ContentNode))
2518-
return true;
2519+
if (!node->isInterior())
2520+
break;
25192521

2522+
// Continue to check for escaping content whenever 'content' may point to
2523+
// the same object as 'node'.
2524+
node = node->getContentNodeOrNull();
2525+
}
25202526
return false;
25212527
}
25222528

Lines changed: 138 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,138 @@
1+
// RUN: %target-sil-opt %s -dead-store-elim -enable-sil-verify-all | %FileCheck %s
2+
3+
sil_stage canonical
4+
5+
import Builtin
6+
import Swift
7+
import SwiftShims
8+
9+
final class X {
10+
init()
11+
}
12+
13+
public struct S {
14+
@_hasStorage var x: X { get set }
15+
@_hasStorage var i: Int { get set }
16+
init(x: X, i: Int)
17+
}
18+
19+
@_hasStorage @_hasInitialValue var gg: X { get set }
20+
21+
@inline(never) func takex(_ x: X)
22+
23+
sil [noinline] @takeX : $@convention(thin) (@guaranteed X) -> ()
24+
25+
// Test that escape analysis does not consider an inout argument to
26+
// escape at a call site even though it's reference-type field does
27+
// escape. Dead store elimination asks MemoryBehaviorVisitor whether
28+
// the apply may read from the inout argument. This call into
29+
// canEscapeToUsePoint, which should return false because the inout
30+
// structure itself is not exposed to the call, only it's
31+
// reference-type field is.
32+
//
33+
// CHECK-LABEL: sil @testInoutNoEscape
34+
// CHECK-NOT: store
35+
// CHECK: apply
36+
// CHECK: store
37+
// CHECK-NOT: store
38+
// CHECK: } // end sil function 'testInoutNoEscape'
39+
sil @testInoutNoEscape : $@convention(thin) (@inout S, @guaranteed S) -> () {
40+
bb0(%0 : $*S, %1 : $S):
41+
%4 = struct_extract %1 : $S, #S.x
42+
%5 = struct_element_addr %0 : $*S, #S.x
43+
%6 = load %5 : $*X
44+
strong_retain %4 : $X
45+
strong_release %6 : $X
46+
store %1 to %0 : $*S
47+
%10 = function_ref @takeX : $@convention(thin) (@guaranteed X) -> ()
48+
strong_retain %4 : $X
49+
%12 = apply %10(%4) : $@convention(thin) (@guaranteed X) -> ()
50+
release_value %1 : $S
51+
store %1 to %0 : $*S
52+
%15 = tuple ()
53+
return %15 : $()
54+
}
55+
56+
// =============================================================================
57+
// Test that a store writing back into a container is not eliminated
58+
// when the container's interior pointer later escapes into a function
59+
// that reads from the pointer.
60+
61+
final internal class TestArrayContainer {
62+
@_hasStorage @_hasInitialValue internal final var pointer: UnsafeMutablePointer<Int32> { get set }
63+
@_hasStorage @_hasInitialValue internal final var storage: ContiguousArray<Int32> { get set }
64+
@_optimize(none) @inline(never) internal final func append(_ arg: Int32)
65+
internal final func va_list() -> UnsafeMutableRawPointer
66+
init()
67+
}
68+
69+
sil @UnsafeMutablePointer_load_Int64 : $@convention(method) (Int64, UnsafeMutableRawPointer) -> Optional<UnsafeMutablePointer<Int32>> // user: %5
70+
// ContiguousArray.append(_:)
71+
sil @$ss15ContiguousArrayV6appendyyxnF : $@convention(method) <τ_0_0> (@in τ_0_0, @inout ContiguousArray<τ_0_0>) -> ()
72+
73+
74+
// Helper that reads from a raw pointer.
75+
sil hidden [noinline] @takeRawPtr : $@convention(thin) (UnsafeMutableRawPointer) -> Bool {
76+
bb0(%0 : $UnsafeMutableRawPointer):
77+
%1 = integer_literal $Builtin.Int64, 0
78+
%2 = struct $Int64 (%1 : $Builtin.Int64)
79+
%3 = function_ref @UnsafeMutablePointer_load_Int64 : $@convention(method) (Int64, UnsafeMutableRawPointer) -> Optional<UnsafeMutablePointer<Int32>>
80+
%4 = apply %3(%2, %0) : $@convention(method) (Int64, UnsafeMutableRawPointer) -> Optional<UnsafeMutablePointer<Int32>> // users: %18, %6
81+
%5 = integer_literal $Builtin.Int1, -1
82+
%6 = struct $Bool (%5 : $Builtin.Int1)
83+
return %6 : $Bool
84+
}
85+
86+
// TestArrayContainer.append(_:)
87+
// Helper that produces a nonempty array.
88+
sil hidden [noinline] [Onone] @TestArrayContainer_append : $@convention(method) (Int32, @guaranteed TestArrayContainer) -> () {
89+
bb0(%0 : $Int32, %1 : $TestArrayContainer):
90+
%2 = alloc_stack $Int32
91+
store %0 to %2 : $*Int32
92+
%4 = ref_element_addr %1 : $TestArrayContainer, #TestArrayContainer.storage
93+
%5 = function_ref @$ss15ContiguousArrayV6appendyyxnF : $@convention(method) <τ_0_0> (@in τ_0_0, @inout ContiguousArray<τ_0_0>) -> ()
94+
%6 = apply %5<Int32>(%2, %4) : $@convention(method) <τ_0_0> (@in τ_0_0, @inout ContiguousArray<τ_0_0>) -> ()
95+
dealloc_stack %2 : $*Int32
96+
%8 = tuple ()
97+
return %8 : $()
98+
}
99+
100+
// CHECK-LABEL: sil [noinline] @testContainerPointer : $@convention(thin) () -> Bool {
101+
// CHECK: [[ALLOC:%.*]] = alloc_ref [stack] $TestArrayContainer
102+
// CHECK: [[PTR:%.*]] = ref_element_addr %0 : $TestArrayContainer, #TestArrayContainer.pointer
103+
// CHECK: [[LOAD:%.*]] = load %{{.*}} : $*__ContiguousArrayStorageBase
104+
// CHECK: [[ELTS:%.*]] = ref_tail_addr [[LOAD]] : $__ContiguousArrayStorageBase, $Int32
105+
// CHECK: [[ELTPTR:%.*]] = address_to_pointer [[ELTS]] : $*Int32 to $Builtin.RawPointer
106+
// CHECK: [[UMP:%.*]] = struct $UnsafeMutablePointer<Int32> ([[ELTPTR]] : $Builtin.RawPointer)
107+
// CHECK: store [[UMP]] to [[PTR]] : $*UnsafeMutablePointer<Int32>
108+
// CHECK: [[F:%.*]] = function_ref @takeRawPtr : $@convention(thin) (UnsafeMutableRawPointer) -> Bool
109+
// CHECK: apply [[F]](%{{.*}}) : $@convention(thin) (UnsafeMutableRawPointer) -> Bool
110+
// CHECK: fix_lifetime %0 : $TestArrayContainer
111+
// CHECK-LABEL: } // end sil function 'testContainerPointer'
112+
sil [noinline] @testContainerPointer : $@convention(thin) () -> Bool {
113+
bb0:
114+
%0 = alloc_ref [stack] $TestArrayContainer
115+
%1 = ref_element_addr %0 : $TestArrayContainer, #TestArrayContainer.pointer
116+
%2 = ref_element_addr %0 : $TestArrayContainer, #TestArrayContainer.storage
117+
%3 = integer_literal $Builtin.Int32, 42
118+
%4 = struct $Int32 (%3 : $Builtin.Int32)
119+
%5 = function_ref @TestArrayContainer_append : $@convention(method) (Int32, @guaranteed TestArrayContainer) -> ()
120+
%6 = apply %5(%4, %0) : $@convention(method) (Int32, @guaranteed TestArrayContainer) -> ()
121+
%7 = struct_element_addr %2 : $*ContiguousArray<Int32>, #ContiguousArray._buffer
122+
%8 = struct_element_addr %7 : $*_ContiguousArrayBuffer<Int32>, #_ContiguousArrayBuffer._storage
123+
%9 = load %8 : $*__ContiguousArrayStorageBase
124+
%10 = ref_tail_addr %9 : $__ContiguousArrayStorageBase, $Int32
125+
%11 = address_to_pointer %10 : $*Int32 to $Builtin.RawPointer
126+
%12 = struct $UnsafeMutablePointer<Int32> (%11 : $Builtin.RawPointer)
127+
store %12 to %1 : $*UnsafeMutablePointer<Int32>
128+
%14 = address_to_pointer %1 : $*UnsafeMutablePointer<Int32> to $Builtin.RawPointer
129+
%15 = struct $UnsafeMutableRawPointer (%14 : $Builtin.RawPointer)
130+
%16 = function_ref @takeRawPtr : $@convention(thin) (UnsafeMutableRawPointer) -> Bool
131+
%17 = apply %16(%15) : $@convention(thin) (UnsafeMutableRawPointer) -> Bool
132+
fix_lifetime %0 : $TestArrayContainer
133+
set_deallocating %0 : $TestArrayContainer
134+
strong_release %9 : $__ContiguousArrayStorageBase
135+
dealloc_ref %0 : $TestArrayContainer
136+
dealloc_ref [stack] %0 : $TestArrayContainer
137+
return %17 : $Bool
138+
}

0 commit comments

Comments
 (0)