Skip to content

[Exclusivity] Statically enforce exclusive access in noescape closures #10310

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Jun 17, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 34 additions & 6 deletions lib/SILOptimizer/Analysis/AccessSummaryAnalysis.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,7 @@ void AccessSummaryAnalysis::processArgument(FunctionInfo *info,
break;
case ValueKind::CopyAddrInst:
case ValueKind::ExistentialMetatypeInst:
case ValueKind::ValueMetatypeInst:
case ValueKind::LoadInst:
case ValueKind::OpenExistentialAddrInst:
case ValueKind::ProjectBlockStorageInst:
Expand All @@ -114,11 +115,41 @@ void AccessSummaryAnalysis::processArgument(FunctionInfo *info,
break;
default:
// TODO: These requirements should be checked for in the SIL verifier.
llvm_unreachable("Unrecognized argument use");
// This is an assertion rather than llvm_unreachable() because
// it is likely the whitelist above for scenarios in which we'ren
// not generating access markers is not comprehensive.
assert(false && "Unrecognized argument use");
break;
}
}
}

#ifndef NDEBUG
/// Sanity check to make sure that a noescape partial apply is
/// only ultimately used by an apply, a try_apply or as an argument (but not
/// the called function) in a partial_apply.
/// TODO: This really should be checked in the SILVerifier.
static bool isExpectedUseOfNoEscapePartialApply(SILInstruction *user) {
if (!user)
return true;

// It is fine to call the partial apply
if (isa<ApplyInst>(user) || isa<TryApplyInst>(user)) {
return true;
}

if (isa<ConvertFunctionInst>(user)) {
return isExpectedUseOfNoEscapePartialApply(user->getSingleUse()->getUser());
}

if (auto *PAI = dyn_cast<PartialApplyInst>(user)) {
return user != PAI->getCallee();
}

return false;
}
#endif

void AccessSummaryAnalysis::processPartialApply(FunctionInfo *callerInfo,
unsigned callerArgumentIndex,
PartialApplyInst *apply,
Expand All @@ -133,12 +164,9 @@ void AccessSummaryAnalysis::processPartialApply(FunctionInfo *callerInfo,
assert(isa<FunctionRefInst>(apply->getCallee()) &&
"Noescape partial apply of non-functionref?");

// Make sure the partial_apply is used by an apply and not another
// partial_apply
SILInstruction *user = apply->getSingleUse()->getUser();
assert((isa<ApplyInst>(user) || isa<TryApplyInst>(user) ||
isa<ConvertFunctionInst>(user)) &&
"noescape partial_apply has non-apply use!");
assert(isExpectedUseOfNoEscapePartialApply(user) &&
"noescape partial_apply has unexpected use!");
(void)user;

// The arguments to partial_apply are a suffix of the arguments to the
Expand Down
178 changes: 167 additions & 11 deletions lib/SILOptimizer/Mandatory/DiagnoseStaticExclusivity.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -163,26 +163,75 @@ class AccessedStorage {
}
};

enum class RecordedAccessKind {
/// The access was for a 'begin_access' instruction in the current function
/// being checked.
BeginInstruction,

/// The access was inside noescape closure that we either
/// passed to function or called directly. It results from applying the
/// the summary of the closure to the closure's captures.
NoescapeClosureCapture
};

/// Records an access to an address and the single subpath of projections
/// that was performed on the address, if such a single subpath exists.
class RecordedAccess {
private:
BeginAccessInst *Inst;
const IndexTrieNode *SubPath;
RecordedAccessKind RecordKind;
union {
BeginAccessInst *Inst;
struct {
SILAccessKind ClosureAccessKind;
SILLocation ClosureAccessLoc;
};
};

const IndexTrieNode *SubPath;
public:
RecordedAccess(BeginAccessInst *BAI, const IndexTrieNode *SubPath)
: Inst(BAI), SubPath(SubPath) {}
RecordedAccess(BeginAccessInst *BAI, const IndexTrieNode *SubPath) :
RecordKind(RecordedAccessKind::BeginInstruction), Inst(BAI),
SubPath(SubPath) { }

RecordedAccess(SILAccessKind ClosureAccessKind,
SILLocation ClosureAccessLoc, const IndexTrieNode *SubPath) :
RecordKind(RecordedAccessKind::NoescapeClosureCapture),
ClosureAccessKind(ClosureAccessKind), ClosureAccessLoc(ClosureAccessLoc),
SubPath(SubPath) { }

RecordedAccessKind getRecordKind() const {
return RecordKind;
}

BeginAccessInst *getInstruction() const { return Inst; }
BeginAccessInst *getInstruction() const {
assert(RecordKind == RecordedAccessKind::BeginInstruction);
return Inst;
}

SILAccessKind getAccessKind() const { return Inst->getAccessKind(); }
SILAccessKind getAccessKind() const {
switch (RecordKind) {
case RecordedAccessKind::BeginInstruction:
return Inst->getAccessKind();
case RecordedAccessKind::NoescapeClosureCapture:
return ClosureAccessKind;
};
}

SILLocation getAccessLoc() const { return Inst->getLoc(); }
SILLocation getAccessLoc() const {
switch (RecordKind) {
case RecordedAccessKind::BeginInstruction:
return Inst->getLoc();
case RecordedAccessKind::NoescapeClosureCapture:
return ClosureAccessLoc;
};
}

const IndexTrieNode *getSubPath() const { return SubPath; }
const IndexTrieNode *getSubPath() const {
return SubPath;
}
};


/// Records the in-progress accesses to a given sub path.
class SubAccessInfo {
public:
Expand Down Expand Up @@ -676,9 +725,11 @@ static void diagnoseExclusivityViolation(const ConflictingAccess &Violation,
diagnose(Ctx, MainAccess.getAccessLoc().getSourceLoc(), DiagnosticID,
PathDescription, AccessKindForMain);
D.highlight(RangeForMain);
tryFixItWithCallToCollectionSwapAt(FirstAccess.getInstruction(),
SecondAccess.getInstruction(),
CallsToSwap, Ctx, D);
if (SecondAccess.getRecordKind() == RecordedAccessKind::BeginInstruction) {
tryFixItWithCallToCollectionSwapAt(FirstAccess.getInstruction(),
SecondAccess.getInstruction(),
CallsToSwap, Ctx, D);
}
} else {
auto DiagnosticID = (Ctx.LangOpts.isSwiftVersion3() ?
diag::exclusivity_access_required_unknown_decl_swift3 :
Expand Down Expand Up @@ -868,6 +919,102 @@ Optional<RecordedAccess> shouldReportAccess(const AccessInfo &Info,
return Info.conflictsWithAccess(Kind, SubPath);
}

/// Use the summary analysis to check whether a call to the given
/// function would conflict with any in progress accesses. The starting
/// index indicates what index into the the callee's parameters the
/// arguments array starts at -- this is useful for partial_apply functions,
/// which pass only a suffix of the callee's arguments at the apply site.
static void checkForViolationWithCall(
const StorageMap &Accesses, SILFunction *Callee, unsigned StartingAtIndex,
OperandValueArrayRef Arguments, AccessSummaryAnalysis *ASA,
llvm::SmallVectorImpl<ConflictingAccess> &ConflictingAccesses) {
const AccessSummaryAnalysis::FunctionSummary &FS =
ASA->getOrCreateSummary(Callee);

// For each argument in the suffix of the callee arguments being passed
// at this call site, determine whether the arguments will be accessed
// in a way that conflicts with any currently in progress accesses.
// If so, diagnose.
for (unsigned ArgumentIndex : indices(Arguments)) {
unsigned CalleeIndex = StartingAtIndex + ArgumentIndex;

const AccessSummaryAnalysis::ArgumentSummary &AS =
FS.getAccessForArgument(CalleeIndex);
Optional<SILAccessKind> Kind = AS.getAccessKind();
if (!Kind)
continue;

SILValue Argument = Arguments[ArgumentIndex];
assert(Argument->getType().isAddress());

const AccessedStorage &Storage = findAccessedStorage(Argument);
auto AccessIt = Accesses.find(Storage);
if (AccessIt == Accesses.end())
continue;
const AccessInfo &Info = AccessIt->getSecond();

// TODO: For now, treat a summarized access as an access to the whole
// address. Once the summary analysis is sensitive to stored properties,
// this should be updated look at the subpaths from the summary.
const IndexTrieNode *SubPath = ASA->getSubPathTrieRoot();
if (auto Conflict = shouldReportAccess(Info, *Kind, SubPath)) {
SILLocation AccessLoc = AS.getAccessLoc();
const auto &SecondAccess = RecordedAccess(*Kind, AccessLoc, SubPath);
ConflictingAccesses.emplace_back(Storage, *Conflict, SecondAccess);
}
}
}

/// Look through a value passed as a function argument to determine whether
/// it is a partial_apply.
static PartialApplyInst *lookThroughForPartialApply(SILValue V) {
while (true) {
if (auto CFI = dyn_cast<ConvertFunctionInst>(V)) {
V = CFI->getOperand();
continue;
}

if (auto *PAI = dyn_cast<PartialApplyInst>(V))
return PAI;

return nullptr;
}
}

/// Checks whether any of the arguments to the apply are closures and diagnoses
/// if any of the @inout_aliasable captures passed to those closures have
/// in-progress accesses that would conflict with any access the summary
/// says the closure would perform.
static void checkForViolationsInNoEscapeClosures(
const StorageMap &Accesses, FullApplySite FAS, AccessSummaryAnalysis *ASA,
llvm::SmallVectorImpl<ConflictingAccess> &ConflictingAccesses) {

SILFunction *Callee = FAS.getCalleeFunction();
if (Callee && !Callee->empty()) {
// Check for violation with directly called closure
checkForViolationWithCall(Accesses, Callee, 0, FAS.getArguments(), ASA,
ConflictingAccesses);
}

// Check for violation with closures passed as arguments
for (SILValue Argument : FAS.getArguments()) {
auto *PAI = lookThroughForPartialApply(Argument);
if (!PAI)
continue;

SILFunction *Closure = PAI->getCalleeFunction();
if (!Closure || Closure->empty())
continue;

// Check the closure's captures, which are a suffix of the closure's
// parameters.
unsigned StartIndex =
Closure->getArguments().size() - PAI->getNumCallArguments();
checkForViolationWithCall(Accesses, Closure, StartIndex,
PAI->getArguments(), ASA, ConflictingAccesses);
}
}

static void checkStaticExclusivity(SILFunction &Fn, PostOrderFunctionInfo *PO,
AccessSummaryAnalysis *ASA) {
// The implementation relies on the following SIL invariants:
Expand Down Expand Up @@ -966,8 +1113,17 @@ static void checkStaticExclusivity(SILFunction &Fn, PostOrderFunctionInfo *PO,
// Record calls to swap() for potential Fix-Its.
if (isCallToStandardLibrarySwap(AI, Fn.getASTContext()))
CallsToSwap.push_back(AI);
else
checkForViolationsInNoEscapeClosures(Accesses, AI, ASA,
ConflictingAccesses);
continue;
}

if (auto *TAI = dyn_cast<TryApplyInst>(&I)) {
checkForViolationsInNoEscapeClosures(Accesses, TAI, ASA,
ConflictingAccesses);
continue;
}
// Sanity check to make sure entries are properly removed.
assert((!isa<ReturnInst>(&I) || Accesses.size() == 0) &&
"Entries were not properly removed?!");
Expand Down
28 changes: 28 additions & 0 deletions test/SILOptimizer/access_summary_analysis.sil
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,34 @@ bb0(%0 : $*Int, %1 : $*Int, %2 : $*Int):
return %8 : $()
}


sil @takesAutoClosureReturningGeneric : $@convention(thin) <T where T : Equatable> (@owned @callee_owned () -> (@out T, @error Error)) -> ()
sil @thunkForAutoClosure : $@convention(thin) (@owned @callee_owned () -> (Int, @error Error)) -> (@out Int, @error Error)

// CHECK-LABEL: @readsAndThrows
// CHECK-NEXT: (read)
sil private @readsAndThrows : $@convention(thin) (@inout_aliasable Int) -> (Int, @error Error) {
bb0(%0 : $*Int):
%3 = begin_access [read] [unknown] %0 : $*Int
%4 = load %3 : $*Int
end_access %3 : $*Int
return %4 : $Int
}

// CHECK-LABEL: @passPartialApplyAsArgumentToPartialApply
// CHECK-NEXT: (read)
sil hidden @passPartialApplyAsArgumentToPartialApply : $@convention(thin) (@inout_aliasable Int) -> () {
bb0(%0 : $*Int):
%2 = function_ref @takesAutoClosureReturningGeneric : $@convention(thin) <τ_0_0 where τ_0_0 : Equatable> (@owned @callee_owned () -> (@out τ_0_0, @error Error)) -> ()
%3 = function_ref @readsAndThrows : $@convention(thin) (@inout_aliasable Int) -> (Int, @error Error)
%4 = partial_apply %3(%0) : $@convention(thin) (@inout_aliasable Int) -> (Int, @error Error)
%5 = function_ref @thunkForAutoClosure : $@convention(thin) (@owned @callee_owned () -> (Int, @error Error)) -> (@out Int, @error Error)
%6 = partial_apply %5(%4) : $@convention(thin) (@owned @callee_owned () -> (Int, @error Error)) -> (@out Int, @error Error)
%7 = apply %2<Int>(%6) : $@convention(thin) <τ_0_0 where τ_0_0 : Equatable> (@owned @callee_owned () -> (@out τ_0_0, @error Error)) -> ()
%8 = tuple ()
return %8 : $()
}

// CHECK-LABEL: @selfRecursion
// CHECK-NEXT: (modify, none)
sil private @selfRecursion : $@convention(thin) (@inout_aliasable Int, Int) -> () {
Expand Down
Loading