Skip to content

Commit ecaa330

Browse files
authored
Merge pull request #9018 from swiftix/partial-specialization-fixes
Fix a bug related to cloning of self-referring generic closures
2 parents 46ddcaa + d08e075 commit ecaa330

File tree

3 files changed

+53
-3
lines changed

3 files changed

+53
-3
lines changed

include/swift/SIL/SILCloner.h

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -384,6 +384,10 @@ SILCloner<ImplClass>::postProcess(SILInstruction *Orig,
384384
SILInstruction *Cloned) {
385385
assert((Orig->getDebugScope() ? Cloned->getDebugScope()!=nullptr : true) &&
386386
"cloned function dropped debug scope");
387+
// Remove any previous mappings for the Orig instruction.
388+
// If this is not done and there is a mapping for Orig in the map already,
389+
// then this new mapping will be silently ignored.
390+
InstructionMap.erase(Orig);
387391
InstructionMap.insert(std::make_pair(Orig, Cloned));
388392
}
389393

include/swift/SIL/TypeSubstCloner.h

Lines changed: 17 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -145,25 +145,39 @@ class TypeSubstCloner : public SILClonerWithScopes<ImplClass> {
145145
SILValue CalleeVal = Inst->getCallee();
146146
SILBuilderWithPostProcess<TypeSubstCloner, 4> Builder(this, Inst);
147147
Builder.setCurrentDebugScope(super::getOpScope(Inst->getDebugScope()));
148+
SmallVector<Substitution, 16> TempSubstList;
148149
if (!Inlining) {
149150
FunctionRefInst *FRI = dyn_cast<FunctionRefInst>(CalleeVal);
150151
if (FRI && FRI->getReferencedFunction() == Inst->getFunction()) {
152+
auto LoweredFnTy = Builder.getFunction().getLoweredFunctionType();
153+
auto GenSig = LoweredFnTy->getGenericSignature();
154+
if (GenSig) {
155+
GenSig->getSubstitutions(
156+
Inst->getFunction()
157+
->getLoweredFunctionType()
158+
->getGenericSignature()
159+
->getSubstitutionMap(Inst->getSubstitutions()),
160+
TempSubstList);
161+
}
162+
for (auto &Sub : TempSubstList) {
163+
Sub = asImpl().getOpSubstitution(Sub);
164+
}
165+
SubstitutionList Subs = TempSubstList;
151166
FRI = Builder.createFunctionRef(getOpLocation(Inst->getLoc()),
152167
&Builder.getFunction());
153168
Builder.createPartialApply(getOpLocation(Inst->getLoc()), FRI,
154169
getOpType(Inst->getSubstCalleeSILType()),
155-
SubstitutionList(),
170+
Subs,
156171
Args,
157172
getOpType(Inst->getType()));
158173
return;
159174
}
160175
}
161176

162-
SmallVector<Substitution, 16> TempSubstList;
163177
for (auto &Sub : Inst->getSubstitutions()) {
164178
TempSubstList.push_back(asImpl().getOpSubstitution(Sub));
165179
}
166-
180+
167181
Builder.createPartialApply(
168182
getOpLocation(Inst->getLoc()), getOpValue(CalleeVal),
169183
getOpType(Inst->getSubstCalleeSILType()), TempSubstList, Args,

test/SILOptimizer/specialize.sil

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -582,3 +582,35 @@ bb0:
582582
%12 = tuple ()
583583
return %12 : $()
584584
} // end sil function 'testGenericClosureSpecialization'
585+
586+
// Test a specialization of a self-recursive generic closure.
587+
588+
// CHECK-LABEL: sil shared @_T027selfReferringGenericClosurexBi64_Bi64_Bi64_Rs_r0_lItnny_Tp5 : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_1 == Builtin.Int64> (@in_guaranteed τ_0_0, @in_guaranteed Builtin.Int64, Builtin.Int64) -> ()
589+
// CHECK: [[SPECIALIZED_FN:%[0-9]+]] = function_ref @_T027selfReferringGenericClosurexBi64_Bi64_Bi64_Rs_r0_lItnny_Tp5
590+
// CHECK: partial_apply [[SPECIALIZED_FN]]{{.*}}({{.*}}) : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_1 == Builtin.Int64> (@in_guaranteed τ_0_0, @in_guaranteed Builtin.Int64, Builtin.Int64) -> ()
591+
592+
// CHECK-LABEL: sil @selfReferringGenericClosure : $@convention(thin) <R, S> (@in_guaranteed R, @in_guaranteed S, Builtin.Int64) -> ()
593+
// Refer to the specialized version of the function
594+
// CHECK: [[SPECIALIZED_FN:%[0-9]+]] = function_ref @_T027selfReferringGenericClosurexBi64_Bi64_Bi64_Rs_r0_lItnny_Tp5
595+
// CHECK: partial_apply [[SPECIALIZED_FN]]<R>({{.*}}) : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_1 == Builtin.Int64> (@in_guaranteed τ_0_0, @in_guaranteed Builtin.Int64, Builtin.Int64) -> ()
596+
sil @selfReferringGenericClosure : $@convention(thin) <R, S> (@in_guaranteed R, @in_guaranteed S, Builtin.Int64) -> () {
597+
bb0(%0 : $*R, %1 : $*S, %2 : $Builtin.Int64):
598+
%4 = integer_literal $Builtin.Int64, 100
599+
%5 = builtin "cmp_eq_Int64"(%2 : $Builtin.Int64, %4 : $Builtin.Int64) : $Builtin.Int1
600+
cond_br %5, bb2, bb1
601+
602+
bb1:
603+
%val_storage = alloc_stack $Builtin.Int64
604+
%val = integer_literal $Builtin.Int64, 4
605+
store %val to %val_storage : $*Builtin.Int64
606+
%fn = function_ref @selfReferringGenericClosure : $@convention(thin) <U, V> (@in_guaranteed U, @in_guaranteed V, Builtin.Int64) -> ()
607+
%7 = partial_apply %fn<R, Builtin.Int64>(%0, %val_storage, %4) : $@convention(thin) <U, V> (@in_guaranteed U, @in_guaranteed V, Builtin.Int64) -> ()
608+
dealloc_stack %val_storage : $*Builtin.Int64
609+
br bb3
610+
bb2:
611+
br bb3
612+
613+
bb3:
614+
%8 = tuple ()
615+
return %8 : $()
616+
}

0 commit comments

Comments
 (0)