Skip to content

Commit a2604dc

Browse files
committed
[region-isolation] Ensure that we error if we access a Sendable field of a non-Sendable typed var and the var is captured in a closure
If the var is captured in a closure before it is transferred, it is not safe to access the Sendable field since we may race on accessing the field with an assignment to the field in another concurrency domain. rdar://115124361
1 parent 556c503 commit a2604dc

File tree

5 files changed

+839
-98
lines changed

5 files changed

+839
-98
lines changed

include/swift/SIL/SILBasicBlock.h

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -231,6 +231,52 @@ public SwiftObjectHeader {
231231
const_reverse_iterator rbegin() const { return InstList.rbegin(); }
232232
const_reverse_iterator rend() const { return InstList.rend(); }
233233

234+
llvm::iterator_range<iterator> getRangeStartingAtInst(SILInstruction *inst) {
235+
assert(inst->getParent() == this);
236+
return {inst->getIterator(), end()};
237+
}
238+
239+
llvm::iterator_range<iterator> getRangeEndingAtInst(SILInstruction *inst) {
240+
assert(inst->getParent() == this);
241+
return {begin(), inst->getIterator()};
242+
}
243+
244+
llvm::iterator_range<reverse_iterator>
245+
getReverseRangeStartingAtInst(SILInstruction *inst) {
246+
assert(inst->getParent() == this);
247+
return {inst->getReverseIterator(), rend()};
248+
}
249+
250+
llvm::iterator_range<reverse_iterator>
251+
getReverseRangeEndingAtInst(SILInstruction *inst) {
252+
assert(inst->getParent() == this);
253+
return {rbegin(), inst->getReverseIterator()};
254+
}
255+
256+
llvm::iterator_range<const_iterator>
257+
getRangeStartingAtInst(SILInstruction *inst) const {
258+
assert(inst->getParent() == this);
259+
return {inst->getIterator(), end()};
260+
}
261+
262+
llvm::iterator_range<const_iterator>
263+
getRangeEndingAtInst(SILInstruction *inst) const {
264+
assert(inst->getParent() == this);
265+
return {begin(), inst->getIterator()};
266+
}
267+
268+
llvm::iterator_range<const_reverse_iterator>
269+
getReverseRangeStartingAtInst(SILInstruction *inst) const {
270+
assert(inst->getParent() == this);
271+
return {inst->getReverseIterator(), rend()};
272+
}
273+
274+
llvm::iterator_range<const_reverse_iterator>
275+
getReverseRangeEndingAtInst(SILInstruction *inst) const {
276+
assert(inst->getParent() == this);
277+
return {rbegin(), inst->getReverseIterator()};
278+
}
279+
234280
/// Allows deleting instructions while iterating over all instructions of the
235281
/// block.
236282
///

include/swift/SILOptimizer/Utils/PartitionUtils.h

Lines changed: 139 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -88,6 +88,70 @@ struct DenseMapInfo<swift::PartitionPrimitives::Region> {
8888

8989
namespace swift {
9090

91+
struct TransferringOperand {
92+
using ValueType = llvm::PointerIntPair<Operand *, 1>;
93+
ValueType value;
94+
95+
TransferringOperand() : value() {}
96+
TransferringOperand(Operand *op, bool isClosureCaptured)
97+
: value(op, isClosureCaptured) {}
98+
explicit TransferringOperand(Operand *op) : value(op, false) {}
99+
TransferringOperand(ValueType newValue) : value(newValue) {}
100+
101+
operator bool() const { return bool(value.getPointer()); }
102+
103+
Operand *getOperand() const { return value.getPointer(); }
104+
105+
bool isClosureCaptured() const { return value.getInt(); }
106+
107+
SILInstruction *getUser() const { return getOperand()->getUser(); }
108+
};
109+
110+
} // namespace swift
111+
112+
namespace llvm {
113+
114+
template <>
115+
struct PointerLikeTypeTraits<swift::TransferringOperand> {
116+
using TransferringOperand = swift::TransferringOperand;
117+
118+
static inline void *getAsVoidPointer(TransferringOperand ptr) {
119+
return PointerLikeTypeTraits<
120+
TransferringOperand::ValueType>::getAsVoidPointer(ptr.value);
121+
}
122+
static inline TransferringOperand getFromVoidPointer(void *ptr) {
123+
return {PointerLikeTypeTraits<
124+
TransferringOperand::ValueType>::getFromVoidPointer(ptr)};
125+
}
126+
127+
static constexpr int NumLowBitsAvailable = PointerLikeTypeTraits<
128+
TransferringOperand::ValueType>::NumLowBitsAvailable;
129+
};
130+
131+
template <>
132+
struct DenseMapInfo<swift::TransferringOperand> {
133+
using TransferringOperand = swift::TransferringOperand;
134+
using ParentInfo = DenseMapInfo<TransferringOperand::ValueType>;
135+
136+
static TransferringOperand getEmptyKey() {
137+
return TransferringOperand(ParentInfo::getEmptyKey());
138+
}
139+
static TransferringOperand getTombstoneKey() {
140+
return TransferringOperand(ParentInfo::getTombstoneKey());
141+
}
142+
143+
static unsigned getHashValue(TransferringOperand operand) {
144+
return ParentInfo::getHashValue(operand.value);
145+
}
146+
static bool isEqual(TransferringOperand LHS, TransferringOperand RHS) {
147+
return ParentInfo::isEqual(LHS.value, RHS.value);
148+
}
149+
};
150+
151+
} // namespace llvm
152+
153+
namespace swift {
154+
91155
/// PartitionOpKind represents the different kinds of PartitionOps that
92156
/// SILInstructions can be translated to
93157
enum class PartitionOpKind : uint8_t {
@@ -142,7 +206,7 @@ class PartitionOp {
142206
: opKind(opKind), opArgs({arg1}), source(sourceOperand) {
143207
assert(((opKind != PartitionOpKind::Transfer &&
144208
opKind != PartitionOpKind::UndoTransfer) ||
145-
sourceOperand) &&
209+
bool(sourceOperand)) &&
146210
"Transfer needs a sourceInst");
147211
}
148212

@@ -304,7 +368,7 @@ class Partition {
304368
/// multi map here. The implication of this is that when we are performing
305369
/// dataflow we use a union operation to combine CFG elements and just take
306370
/// the first instruction that we see.
307-
llvm::SmallDenseMap<Region, Operand *, 2> regionToTransferredOpMap;
371+
llvm::SmallDenseMap<Region, TransferringOperand, 2> regionToTransferredOpMap;
308372

309373
public:
310374
Partition() : elementToRegionMap({}), canonical(true) {}
@@ -358,12 +422,12 @@ class Partition {
358422

359423
/// Mark val as transferred. Returns true if we inserted \p
360424
/// transferOperand. We return false otherwise.
361-
bool markTransferred(Element val, Operand *transferOperand) {
425+
bool markTransferred(Element val, TransferringOperand transferredOperand) {
362426
// First see if our val is tracked. If it is not tracked, insert it and mark
363427
// its new region as transferred.
364428
if (!isTracked(val)) {
365429
elementToRegionMap.insert_or_assign(val, fresh_label);
366-
regionToTransferredOpMap.insert({fresh_label, transferOperand});
430+
regionToTransferredOpMap.insert({fresh_label, transferredOperand});
367431
fresh_label = Region(fresh_label + 1);
368432
canonical = false;
369433
return true;
@@ -373,7 +437,7 @@ class Partition {
373437
auto iter1 = elementToRegionMap.find(val);
374438
assert(iter1 != elementToRegionMap.end());
375439
auto iter2 =
376-
regionToTransferredOpMap.try_emplace(iter1->second, transferOperand);
440+
regionToTransferredOpMap.try_emplace(iter1->second, transferredOperand);
377441
return iter2.second;
378442
}
379443

@@ -532,13 +596,30 @@ class Partition {
532596

533597
os << "[";
534598
for (auto [regionNo, elementNumbers] : multimap.getRange()) {
535-
bool isTransferred = regionToTransferredOpMap.count(regionNo);
536-
os << (isTransferred ? "{" : "(");
599+
auto iter = regionToTransferredOpMap.find(regionNo);
600+
bool isTransferred = iter != regionToTransferredOpMap.end();
601+
bool isClosureCaptured =
602+
isTransferred ? iter->getSecond().isClosureCaptured() : false;
603+
604+
if (isTransferred) {
605+
os << '{';
606+
if (isClosureCaptured)
607+
os << '*';
608+
} else {
609+
os << '(';
610+
}
611+
537612
int j = 0;
538613
for (Element i : elementNumbers) {
539614
os << (j++ ? " " : "") << i;
540615
}
541-
os << (isTransferred ? "}" : ")");
616+
if (isTransferred) {
617+
if (isClosureCaptured)
618+
os << '*';
619+
os << '}';
620+
} else {
621+
os << ')';
622+
}
542623
}
543624
os << "]\n";
544625
}
@@ -552,13 +633,13 @@ class Partition {
552633

553634
/// Return the instruction that transferred \p val's region or nullptr
554635
/// otherwise.
555-
Operand *getTransferred(Element val) const {
636+
std::optional<TransferringOperand> getTransferred(Element val) const {
556637
auto iter = elementToRegionMap.find(val);
557638
if (iter == elementToRegionMap.end())
558-
return nullptr;
639+
return std::nullopt;
559640
auto iter2 = regionToTransferredOpMap.find(iter->second);
560641
if (iter2 == regionToTransferredOpMap.end())
561-
return nullptr;
642+
return std::nullopt;
562643
return iter2->second;
563644
}
564645

@@ -642,7 +723,7 @@ class Partition {
642723
//
643724
// TODO: If we just used an array for this, we could just rewrite and
644725
// re-sort and not have to deal with potential allocations.
645-
llvm::SmallDenseMap<Region, Operand *, 2> oldMap =
726+
llvm::SmallDenseMap<Region, TransferringOperand, 2> oldMap =
646727
std::move(regionToTransferredOpMap);
647728
for (auto &[oldReg, op] : oldMap) {
648729
auto iter = oldRegionToRelabeledMap.find(oldReg);
@@ -676,17 +757,19 @@ class Partition {
676757
horizontalUpdate(elementToRegionMap, snd, fstRegion);
677758
auto iter = regionToTransferredOpMap.find(sndRegion);
678759
if (iter != regionToTransferredOpMap.end()) {
679-
regionToTransferredOpMap.try_emplace(fstRegion, iter->second);
760+
auto operand = iter->second;
680761
regionToTransferredOpMap.erase(iter);
762+
regionToTransferredOpMap.try_emplace(fstRegion, operand);
681763
}
682764
} else {
683765
result = sndRegion;
684766

685767
horizontalUpdate(elementToRegionMap, fst, sndRegion);
686768
auto iter = regionToTransferredOpMap.find(fstRegion);
687769
if (iter != regionToTransferredOpMap.end()) {
688-
regionToTransferredOpMap.try_emplace(sndRegion, iter->second);
770+
auto operand = iter->second;
689771
regionToTransferredOpMap.erase(iter);
772+
regionToTransferredOpMap.try_emplace(sndRegion, operand);
690773
}
691774
}
692775

@@ -751,8 +834,8 @@ struct PartitionOpEvaluator {
751834
/// 3. The operand of the instruction that originally transferred the
752835
/// region. Can be used to get the immediate value transferred or the
753836
/// transferring instruction.
754-
std::function<void(const PartitionOp &, Element, Operand *)> failureCallback =
755-
nullptr;
837+
std::function<void(const PartitionOp &, Element, TransferringOperand)>
838+
failureCallback = nullptr;
756839

757840
/// A list of elements that cannot be transferred. Whenever we transfer, we
758841
/// check this list to see if we are transferring the element and then call
@@ -771,11 +854,21 @@ struct PartitionOpEvaluator {
771854
/// transferred.
772855
std::function<bool(Element)> isActorDerivedCallback = nullptr;
773856

857+
/// Check if the representative value of \p elt is closure captured at \p
858+
/// op.
859+
///
860+
/// NOTE: We actually just use the user of \p op in our callbacks. The reason
861+
/// why we do not just pass in that SILInstruction is that then we would need
862+
/// to access the instruction in the evaluator which creates a problem when
863+
/// since the operand we pass in is a dummy operand.
864+
std::function<bool(Element elt, Operand *op)> isClosureCapturedCallback =
865+
nullptr;
866+
774867
PartitionOpEvaluator(Partition &p) : p(p) {}
775868

776869
/// A wrapper around the failure callback that checks if it is nullptr.
777870
void handleFailure(const PartitionOp &op, Element elt,
778-
Operand *transferringOp) const {
871+
TransferringOperand transferringOp) const {
779872
if (!failureCallback)
780873
return;
781874
failureCallback(op, elt, transferringOp);
@@ -797,6 +890,14 @@ struct PartitionOpEvaluator {
797890
return bool(isActorDerivedCallback) && isActorDerivedCallback(elt);
798891
}
799892

893+
/// A wraper around isClosureCapturedCallback that returns false if
894+
/// isClosureCapturedCallback is nullptr and otherwise returns
895+
/// isClosureCapturedCallback.
896+
bool isClosureCaptured(Element elt, Operand *op) const {
897+
return bool(isClosureCapturedCallback) &&
898+
isClosureCapturedCallback(elt, op);
899+
}
900+
800901
/// Apply \p op to the partition op.
801902
void apply(const PartitionOp &op) const {
802903
if (emitLog) {
@@ -821,8 +922,8 @@ struct PartitionOpEvaluator {
821922
"Assign PartitionOp's source argument should be already tracked");
822923
// If we are using a region that was transferred as our assignment source
823924
// value... emit an error.
824-
if (auto *transferringInst = p.getTransferred(op.getOpArgs()[1]))
825-
handleFailure(op, op.getOpArgs()[1], transferringInst);
925+
if (auto transferredOperand = p.getTransferred(op.getOpArgs()[1]))
926+
handleFailure(op, op.getOpArgs()[1], *transferredOperand);
826927

827928
p.elementToRegionMap.insert_or_assign(
828929
op.getOpArgs()[0], p.elementToRegionMap.at(op.getOpArgs()[1]));
@@ -863,17 +964,22 @@ struct PartitionOpEvaluator {
863964
if (isActorDerived(op.getOpArgs()[0]))
864965
return handleTransferNonTransferrable(op, op.getOpArgs()[0]);
865966

967+
// While we are checking for actor derived, also check if our value or any
968+
// value in our region is closure captured and propagate that bit in our
969+
// transferred inst.
970+
bool isClosureCapturedElt =
971+
isClosureCaptured(op.getOpArgs()[0], op.getSourceOp());
972+
866973
Region elementRegion = p.elementToRegionMap.at(op.getOpArgs()[0]);
867-
if (llvm::any_of(p.elementToRegionMap,
868-
[&](const std::pair<Element, Region> &pair) -> bool {
869-
if (pair.second != elementRegion)
870-
return false;
871-
return isActorDerived(pair.first);
872-
}))
873-
return handleTransferNonTransferrable(op, op.getOpArgs()[0]);
974+
for (const auto &pair : p.elementToRegionMap) {
975+
if (pair.second == elementRegion && isActorDerived(pair.first))
976+
return handleTransferNonTransferrable(op, op.getOpArgs()[0]);
977+
isClosureCapturedElt |= isClosureCaptured(pair.first, op.getSourceOp());
978+
}
874979

875980
// Mark op.getOpArgs()[0] as transferred.
876-
p.markTransferred(op.getOpArgs()[0], op.getSourceOp());
981+
p.markTransferred(op.getOpArgs()[0],
982+
{op.getSourceOp(), isClosureCapturedElt});
877983
return;
878984
}
879985
case PartitionOpKind::UndoTransfer: {
@@ -894,10 +1000,10 @@ struct PartitionOpEvaluator {
8941000
"Merge PartitionOp's arguments should already be tracked");
8951001

8961002
// if attempting to merge a transferred region, handle the failure
897-
if (auto *transferringInst = p.getTransferred(op.getOpArgs()[0]))
898-
handleFailure(op, op.getOpArgs()[0], transferringInst);
899-
if (auto *transferringInst = p.getTransferred(op.getOpArgs()[1]))
900-
handleFailure(op, op.getOpArgs()[1], transferringInst);
1003+
if (auto transferringOp = p.getTransferred(op.getOpArgs()[0]))
1004+
handleFailure(op, op.getOpArgs()[0], *transferringOp);
1005+
if (auto transferringOp = p.getTransferred(op.getOpArgs()[1]))
1006+
handleFailure(op, op.getOpArgs()[1], *transferringOp);
9011007

9021008
p.merge(op.getOpArgs()[0], op.getOpArgs()[1]);
9031009
return;
@@ -906,8 +1012,8 @@ struct PartitionOpEvaluator {
9061012
"Require PartitionOp should be passed 1 argument");
9071013
assert(p.elementToRegionMap.count(op.getOpArgs()[0]) &&
9081014
"Require PartitionOp's argument should already be tracked");
909-
if (auto *transferringInst = p.getTransferred(op.getOpArgs()[0]))
910-
handleFailure(op, op.getOpArgs()[0], transferringInst);
1015+
if (auto transferringOp = p.getTransferred(op.getOpArgs()[0]))
1016+
handleFailure(op, op.getOpArgs()[0], *transferringOp);
9111017
return;
9121018
}
9131019

0 commit comments

Comments
 (0)