Skip to content

Commit 13b58a0

Browse files
committed
[Runtime] Fix strong and unowned refcount overflow on 32-bit.
Fix overflow detection on unowned refcounts so that we create a side table when incrementing from 126. Implement strong refcount overflow to the side table. The unowned refcount is never supposed to be 127, because that (sometimes) represents the immortal refcount. We attempt to detect that by checking newValue == Offsets::UnownedRefCountMask, but the mask is shifted so that condition is never true. We managed to hit the side table case when incrementing from 127, because it looks like the immortal case. But that broke when we fixed immortal side table initialization in b41079a8f54ae2d61c68cdda46c74232084af020. With that change, we now create an immortal side table when overflowing the unowned refcount, then try to increment the unowned refcount in that immortal side table, which traps. rdar://123788910
1 parent be1551f commit 13b58a0

File tree

5 files changed

+373
-97
lines changed

5 files changed

+373
-97
lines changed

stdlib/public/SwiftShims/swift/shims/RefCount.h

Lines changed: 15 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -397,7 +397,7 @@ class RefCountBitsT {
397397
bool isOverflowingUnownedRefCount(uint32_t oldValue, uint32_t inc) const {
398398
auto newValue = getUnownedRefCount();
399399
return newValue != oldValue + inc ||
400-
newValue == Offsets::UnownedRefCountMask;
400+
newValue == Offsets::UnownedRefCountMask >> Offsets::UnownedRefCountShift;
401401
}
402402

403403
SWIFT_ALWAYS_INLINE
@@ -582,6 +582,12 @@ class RefCountBitsT {
582582
}
583583
#endif
584584

585+
// If we're decrementing by more than 1, then this underflow might end up
586+
// subtracting away an existing 1 in UseSlowRC. Check that separately. This
587+
// check should be constant folded away for the swift_release case.
588+
if (dec > 1 && getUseSlowRC())
589+
return false;
590+
585591
// This deliberately underflows by borrowing from the UseSlowRC field.
586592
bits -= BitsType(dec) << Offsets::StrongExtraRefCountShift;
587593
return (SignedBitsType(bits) >= 0);
@@ -1007,8 +1013,8 @@ class RefCounts {
10071013
bool doDecrementSlow(RefCountBits oldbits, uint32_t dec) {
10081014
RefCountBits newbits;
10091015

1010-
// constant propagation will remove this in swift_release, it should only
1011-
// be present in swift_release_n
1016+
// Constant propagation will remove this in swift_release, it should only
1017+
// be present in swift_release_n.
10121018
if (dec != 1 && oldbits.isImmortal(true)) {
10131019
return false;
10141020
}
@@ -1306,6 +1312,8 @@ class RefCounts {
13061312
return refCounts.load(std::memory_order_relaxed).getBitsValue();
13071313
}
13081314

1315+
void dump() const;
1316+
13091317
private:
13101318
HeapObject *getHeapObject();
13111319

@@ -1505,6 +1513,10 @@ class HeapObjectSideTableEntry {
15051513
immutableCOWBuffer = immutable;
15061514
}
15071515
#endif
1516+
1517+
void dumpRefCounts() {
1518+
refCounts.dump();
1519+
}
15081520
};
15091521

15101522

stdlib/public/runtime/RefCount.cpp

Lines changed: 86 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,56 @@
1414

1515
namespace swift {
1616

17-
template <typename RefCountBits>
18-
HeapObject *RefCounts<RefCountBits>::incrementSlow(RefCountBits oldbits,
17+
// Return an object's side table, allocating it if necessary.
18+
// Returns null if the object is deiniting.
19+
// SideTableRefCountBits specialization intentionally does not exist.
20+
template <>
21+
HeapObjectSideTableEntry* RefCounts<InlineRefCountBits>::allocateSideTable(bool failIfDeiniting)
22+
{
23+
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
24+
25+
// Preflight failures before allocating a new side table.
26+
if (oldbits.hasSideTable()) {
27+
// Already have a side table. Return it.
28+
return oldbits.getSideTable();
29+
}
30+
else if (failIfDeiniting && oldbits.getIsDeiniting()) {
31+
// Already past the start of deinit. Do nothing.
32+
return nullptr;
33+
}
34+
35+
// Preflight passed. Allocate a side table.
36+
37+
// FIXME: custom side table allocator
38+
auto side = swift_cxx_newObject<HeapObjectSideTableEntry>(getHeapObject());
39+
40+
auto newbits = InlineRefCountBits(side);
41+
42+
do {
43+
if (oldbits.hasSideTable()) {
44+
// Already have a side table. Return it and delete ours.
45+
// Read before delete to streamline barriers.
46+
auto result = oldbits.getSideTable();
47+
swift_cxx_deleteObject(side);
48+
return result;
49+
}
50+
else if (failIfDeiniting && oldbits.getIsDeiniting()) {
51+
// Already past the start of deinit. Do nothing.
52+
return nullptr;
53+
}
54+
55+
side->initRefCounts(oldbits);
56+
57+
} while (! refCounts.compare_exchange_weak(oldbits, newbits,
58+
std::memory_order_release,
59+
std::memory_order_relaxed));
60+
61+
return side;
62+
}
63+
64+
65+
template <>
66+
HeapObject *RefCounts<InlineRefCountBits>::incrementSlow(InlineRefCountBits oldbits,
1967
uint32_t n) {
2068
if (oldbits.isImmortal(false)) {
2169
return getHeapObject();
@@ -25,21 +73,28 @@ HeapObject *RefCounts<RefCountBits>::incrementSlow(RefCountBits oldbits,
2573
auto side = oldbits.getSideTable();
2674
side->incrementStrong(n);
2775
}
76+
else {
77+
// Overflow into a new side table.
78+
auto side = allocateSideTable(false);
79+
side->incrementStrong(n);
80+
}
81+
return getHeapObject();
82+
}
83+
template <>
84+
HeapObject *RefCounts<SideTableRefCountBits>::incrementSlow(SideTableRefCountBits oldbits,
85+
uint32_t n) {
86+
if (oldbits.isImmortal(false)) {
87+
return getHeapObject();
88+
}
2889
else {
2990
// Retain count overflow.
3091
swift::swift_abortRetainOverflow();
3192
}
3293
return getHeapObject();
3394
}
34-
template HeapObject *
35-
RefCounts<InlineRefCountBits>::incrementSlow(InlineRefCountBits oldbits,
36-
uint32_t n);
37-
template HeapObject *
38-
RefCounts<SideTableRefCountBits>::incrementSlow(SideTableRefCountBits oldbits,
39-
uint32_t n);
4095

41-
template <typename RefCountBits>
42-
void RefCounts<RefCountBits>::incrementNonAtomicSlow(RefCountBits oldbits,
96+
template <>
97+
void RefCounts<InlineRefCountBits>::incrementNonAtomicSlow(InlineRefCountBits oldbits,
4398
uint32_t n) {
4499
if (oldbits.isImmortal(false)) {
45100
return;
@@ -48,12 +103,20 @@ void RefCounts<RefCountBits>::incrementNonAtomicSlow(RefCountBits oldbits,
48103
// Out-of-line slow path.
49104
auto side = oldbits.getSideTable();
50105
side->incrementStrong(n); // FIXME: can there be a nonatomic impl?
106+
} else {
107+
// Overflow into a new side table.
108+
auto side = allocateSideTable(false);
109+
side->incrementStrong(n); // FIXME: can there be a nonatomic impl?
110+
}
111+
}
112+
template <>
113+
void RefCounts<SideTableRefCountBits>::incrementNonAtomicSlow(SideTableRefCountBits oldbits, uint32_t n) {
114+
if (oldbits.isImmortal(false)) {
115+
return;
51116
} else {
52117
swift::swift_abortRetainOverflow();
53118
}
54119
}
55-
template void RefCounts<InlineRefCountBits>::incrementNonAtomicSlow(InlineRefCountBits oldbits, uint32_t n);
56-
template void RefCounts<SideTableRefCountBits>::incrementNonAtomicSlow(SideTableRefCountBits oldbits, uint32_t n);
57120

58121
template <typename RefCountBits>
59122
bool RefCounts<RefCountBits>::tryIncrementSlow(RefCountBits oldbits) {
@@ -81,53 +144,6 @@ bool RefCounts<RefCountBits>::tryIncrementNonAtomicSlow(RefCountBits oldbits) {
81144
template bool RefCounts<InlineRefCountBits>::tryIncrementNonAtomicSlow(InlineRefCountBits oldbits);
82145
template bool RefCounts<SideTableRefCountBits>::tryIncrementNonAtomicSlow(SideTableRefCountBits oldbits);
83146

84-
// Return an object's side table, allocating it if necessary.
85-
// Returns null if the object is deiniting.
86-
// SideTableRefCountBits specialization intentionally does not exist.
87-
template <>
88-
HeapObjectSideTableEntry* RefCounts<InlineRefCountBits>::allocateSideTable(bool failIfDeiniting)
89-
{
90-
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
91-
92-
// Preflight failures before allocating a new side table.
93-
if (oldbits.hasSideTable()) {
94-
// Already have a side table. Return it.
95-
return oldbits.getSideTable();
96-
}
97-
else if (failIfDeiniting && oldbits.getIsDeiniting()) {
98-
// Already past the start of deinit. Do nothing.
99-
return nullptr;
100-
}
101-
102-
// Preflight passed. Allocate a side table.
103-
104-
// FIXME: custom side table allocator
105-
auto side = swift_cxx_newObject<HeapObjectSideTableEntry>(getHeapObject());
106-
107-
auto newbits = InlineRefCountBits(side);
108-
109-
do {
110-
if (oldbits.hasSideTable()) {
111-
// Already have a side table. Return it and delete ours.
112-
// Read before delete to streamline barriers.
113-
auto result = oldbits.getSideTable();
114-
swift_cxx_deleteObject(side);
115-
return result;
116-
}
117-
else if (failIfDeiniting && oldbits.getIsDeiniting()) {
118-
// Already past the start of deinit. Do nothing.
119-
return nullptr;
120-
}
121-
122-
side->initRefCounts(oldbits);
123-
124-
} while (! refCounts.compare_exchange_weak(oldbits, newbits,
125-
std::memory_order_release,
126-
std::memory_order_relaxed));
127-
return side;
128-
}
129-
130-
131147
// SideTableRefCountBits specialization intentionally does not exist.
132148
template <>
133149
HeapObjectSideTableEntry* RefCounts<InlineRefCountBits>::formWeakReference()
@@ -183,6 +199,17 @@ bool RefCounts<InlineRefCountBits>::setIsImmutableCOWBuffer(bool immutable) {
183199

184200
#endif
185201

202+
template <typename RefCountBits>
203+
void RefCounts<RefCountBits>::dump() const {
204+
printf("Location: %p\n", this);
205+
printf("Strong Ref Count: %d.\n", getCount());
206+
printf("Unowned Ref Count: %d.\n", getUnownedCount());
207+
printf("Weak Ref Count: %d.\n", getWeakCount());
208+
printf("RefCount Side Table: %p.\n", getSideTable());
209+
printf("Is Deiniting: %s.\n", isDeiniting() ? "true" : "false");
210+
printf("Is Immortal: %s.\n", refCounts.load().isImmortal(false) ? "true" : "false");
211+
}
212+
186213
// namespace swift
187214
} // namespace swift
188215

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
#include <stdint.h>
2+
3+
void *swift_retain_n(void *, uint32_t);
4+
void swift_release_n(void *, uint32_t);
5+
void *swift_nonatomic_retain_n(void *, uint32_t);
6+
void swift_nonatomic_release_n(void *, uint32_t);
7+
8+
void *swift_unownedRetain_n(void *, uint32_t);
9+
void swift_unownedRelease_n(void *, uint32_t);
10+
void *swift_nonatomic_unownedRetain_n(void *, uint32_t);
11+
void swift_nonatomic_unownedRelease_n(void *, uint32_t);
12+
13+
// Wrappers so we can call these from Swift without upsetting the ARC optimizer.
14+
void *wrapper_swift_retain_n(void *obj, uint32_t n) {
15+
return swift_retain_n(obj, n);
16+
}
17+
18+
void wrapper_swift_release_n(void *obj, uint32_t n) {
19+
swift_release_n(obj, n);
20+
}
21+
22+
void *wrapper_swift_nonatomic_retain_n(void *obj, uint32_t n) {
23+
return swift_nonatomic_retain_n(obj, n);
24+
}
25+
26+
void wrapper_swift_nonatomic_release_n(void *obj, uint32_t n) {
27+
swift_nonatomic_release_n(obj, n);
28+
}
29+
30+
void *wrapper_swift_unownedRetain_n(void *obj, uint32_t n) {
31+
return swift_unownedRetain_n(obj, n);
32+
}
33+
34+
void wrapper_swift_unownedRelease_n(void *obj, uint32_t n) {
35+
swift_unownedRelease_n(obj, n);
36+
}
37+
38+
void *wrapper_swift_nonatomic_unownedRetain_n(void *obj, uint32_t n) {
39+
return swift_nonatomic_unownedRetain_n(obj, n);
40+
}
41+
42+
void wrapper_swift_nonatomic_unownedRelease_n(void *obj, uint32_t n) {
43+
swift_nonatomic_unownedRelease_n(obj, n);
44+
}
45+
46+

0 commit comments

Comments
 (0)