Skip to content

Fix a bug related to cloning of self-referring generic closures #9018

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Apr 26, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions include/swift/SIL/SILCloner.h
Original file line number Diff line number Diff line change
Expand Up @@ -384,6 +384,10 @@ SILCloner<ImplClass>::postProcess(SILInstruction *Orig,
SILInstruction *Cloned) {
assert((Orig->getDebugScope() ? Cloned->getDebugScope()!=nullptr : true) &&
"cloned function dropped debug scope");
// Remove any previous mappings for the Orig instruction.
// If this is not done and there is a mapping for Orig in the map already,
// then this new mapping will be silently ignored.
InstructionMap.erase(Orig);
InstructionMap.insert(std::make_pair(Orig, Cloned));
}

Expand Down
20 changes: 17 additions & 3 deletions include/swift/SIL/TypeSubstCloner.h
Original file line number Diff line number Diff line change
Expand Up @@ -145,25 +145,39 @@ class TypeSubstCloner : public SILClonerWithScopes<ImplClass> {
SILValue CalleeVal = Inst->getCallee();
SILBuilderWithPostProcess<TypeSubstCloner, 4> Builder(this, Inst);
Builder.setCurrentDebugScope(super::getOpScope(Inst->getDebugScope()));
SmallVector<Substitution, 16> TempSubstList;
if (!Inlining) {
FunctionRefInst *FRI = dyn_cast<FunctionRefInst>(CalleeVal);
if (FRI && FRI->getReferencedFunction() == Inst->getFunction()) {
auto LoweredFnTy = Builder.getFunction().getLoweredFunctionType();
auto GenSig = LoweredFnTy->getGenericSignature();
if (GenSig) {
GenSig->getSubstitutions(
Inst->getFunction()
->getLoweredFunctionType()
->getGenericSignature()
->getSubstitutionMap(Inst->getSubstitutions()),
TempSubstList);
}
for (auto &Sub : TempSubstList) {
Sub = asImpl().getOpSubstitution(Sub);
}
SubstitutionList Subs = TempSubstList;
FRI = Builder.createFunctionRef(getOpLocation(Inst->getLoc()),
&Builder.getFunction());
Builder.createPartialApply(getOpLocation(Inst->getLoc()), FRI,
getOpType(Inst->getSubstCalleeSILType()),
SubstitutionList(),
Subs,
Args,
getOpType(Inst->getType()));
return;
}
}

SmallVector<Substitution, 16> TempSubstList;
for (auto &Sub : Inst->getSubstitutions()) {
TempSubstList.push_back(asImpl().getOpSubstitution(Sub));
}

Builder.createPartialApply(
getOpLocation(Inst->getLoc()), getOpValue(CalleeVal),
getOpType(Inst->getSubstCalleeSILType()), TempSubstList, Args,
Expand Down
32 changes: 32 additions & 0 deletions test/SILOptimizer/specialize.sil
Original file line number Diff line number Diff line change
Expand Up @@ -582,3 +582,35 @@ bb0:
%12 = tuple ()
return %12 : $()
} // end sil function 'testGenericClosureSpecialization'

// Test a specialization of a self-recursive generic closure.

// CHECK-LABEL: sil shared @_T027selfReferringGenericClosurexBi64_Bi64_Bi64_Rs_r0_lItnny_Tp5 : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_1 == Builtin.Int64> (@in_guaranteed τ_0_0, @in_guaranteed Builtin.Int64, Builtin.Int64) -> ()
// CHECK: [[SPECIALIZED_FN:%[0-9]+]] = function_ref @_T027selfReferringGenericClosurexBi64_Bi64_Bi64_Rs_r0_lItnny_Tp5
// CHECK: partial_apply [[SPECIALIZED_FN]]{{.*}}({{.*}}) : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_1 == Builtin.Int64> (@in_guaranteed τ_0_0, @in_guaranteed Builtin.Int64, Builtin.Int64) -> ()

// CHECK-LABEL: sil @selfReferringGenericClosure : $@convention(thin) <R, S> (@in_guaranteed R, @in_guaranteed S, Builtin.Int64) -> ()
// Refer to the specialized version of the function
// CHECK: [[SPECIALIZED_FN:%[0-9]+]] = function_ref @_T027selfReferringGenericClosurexBi64_Bi64_Bi64_Rs_r0_lItnny_Tp5
// CHECK: partial_apply [[SPECIALIZED_FN]]<R>({{.*}}) : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_1 == Builtin.Int64> (@in_guaranteed τ_0_0, @in_guaranteed Builtin.Int64, Builtin.Int64) -> ()
sil @selfReferringGenericClosure : $@convention(thin) <R, S> (@in_guaranteed R, @in_guaranteed S, Builtin.Int64) -> () {
bb0(%0 : $*R, %1 : $*S, %2 : $Builtin.Int64):
%4 = integer_literal $Builtin.Int64, 100
%5 = builtin "cmp_eq_Int64"(%2 : $Builtin.Int64, %4 : $Builtin.Int64) : $Builtin.Int1
cond_br %5, bb2, bb1

bb1:
%val_storage = alloc_stack $Builtin.Int64
%val = integer_literal $Builtin.Int64, 4
store %val to %val_storage : $*Builtin.Int64
%fn = function_ref @selfReferringGenericClosure : $@convention(thin) <U, V> (@in_guaranteed U, @in_guaranteed V, Builtin.Int64) -> ()
%7 = partial_apply %fn<R, Builtin.Int64>(%0, %val_storage, %4) : $@convention(thin) <U, V> (@in_guaranteed U, @in_guaranteed V, Builtin.Int64) -> ()
dealloc_stack %val_storage : $*Builtin.Int64
br bb3
bb2:
br bb3

bb3:
%8 = tuple ()
return %8 : $()
}