Skip to content

Commit 8e09cd2

Browse files
authored
Merge pull request #25864 from Catfish-Man/no-objc-complications-once-more-into-the-breach
Update fast dealloc to use new-style interposing and support objc weak refs
2 parents 82a9f13 + f36a4db commit 8e09cd2

File tree

9 files changed

+211
-118
lines changed

9 files changed

+211
-118
lines changed

include/swift/Runtime/HeapObject.h

Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1086,4 +1086,53 @@ swift_getTypeName(const Metadata *type, bool qualified);
10861086

10871087
} // end namespace swift
10881088

1089+
#if SWIFT_OBJC_INTEROP
1090+
/// Standard ObjC lifecycle methods for Swift objects
1091+
#define STANDARD_OBJC_METHOD_IMPLS_FOR_SWIFT_OBJECTS \
1092+
- (id)retain { \
1093+
auto SELF = reinterpret_cast<HeapObject *>(self); \
1094+
swift_retain(SELF); \
1095+
return self; \
1096+
} \
1097+
- (void)release { \
1098+
auto SELF = reinterpret_cast<HeapObject *>(self); \
1099+
swift_release(SELF); \
1100+
} \
1101+
- (id)autorelease { \
1102+
return _objc_rootAutorelease(self); \
1103+
} \
1104+
- (NSUInteger)retainCount { \
1105+
return swift::swift_retainCount(reinterpret_cast<HeapObject *>(self)); \
1106+
} \
1107+
- (BOOL)_isDeallocating { \
1108+
return swift_isDeallocating(reinterpret_cast<HeapObject *>(self)); \
1109+
} \
1110+
- (BOOL)_tryRetain { \
1111+
return swift_tryRetain(reinterpret_cast<HeapObject*>(self)) != nullptr; \
1112+
} \
1113+
- (BOOL)allowsWeakReference { \
1114+
return !swift_isDeallocating(reinterpret_cast<HeapObject *>(self)); \
1115+
} \
1116+
- (BOOL)retainWeakReference { \
1117+
return swift_tryRetain(reinterpret_cast<HeapObject*>(self)) != nullptr; \
1118+
} \
1119+
- (void)_setWeaklyReferenced { \
1120+
auto heapObj = reinterpret_cast<HeapObject *>(self); \
1121+
heapObj->refCounts.setPureSwiftDeallocation(false); \
1122+
} \
1123+
- (bool)_setAssociatedObject:(id)obj \
1124+
forKey:(const void *)key \
1125+
associationPolicy:(objc_AssociationPolicy)policy { \
1126+
auto heapObj = reinterpret_cast<HeapObject *>(self); \
1127+
heapObj->refCounts.setPureSwiftDeallocation(false); \
1128+
/* false to let libobjc know it still needs to associate the object */ \
1129+
return false; \
1130+
} \
1131+
- (void)dealloc { \
1132+
swift_rootObjCDealloc(reinterpret_cast<HeapObject *>(self)); \
1133+
}
1134+
1135+
#endif // SWIFT_OBJC_INTEROP
1136+
1137+
10891138
#endif // SWIFT_RUNTIME_ALLOC_H

stdlib/public/SwiftShims/RefCount.h

Lines changed: 99 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -238,14 +238,29 @@ struct RefCountBitOffsets;
238238
// 32-bit out of line
239239
template <>
240240
struct RefCountBitOffsets<8> {
241-
static const size_t IsImmortalShift = 0;
242-
static const size_t IsImmortalBitCount = 1;
243-
static const uint64_t IsImmortalMask = maskForField(IsImmortal);
244-
245-
static const size_t UnownedRefCountShift = shiftAfterField(IsImmortal);
241+
/*
242+
The bottom 32 bits (on 64 bit architectures, fewer on 32 bit) of the refcount
243+
field are effectively a union of two different configurations:
244+
245+
---Normal case---
246+
Bit 0: Does this object need to call out to the ObjC runtime for deallocation
247+
Bits 1-31: Unowned refcount
248+
249+
---Immortal case---
250+
All bits set, the object does not deallocate or have a refcount
251+
*/
252+
static const size_t PureSwiftDeallocShift = 0;
253+
static const size_t PureSwiftDeallocBitCount = 1;
254+
static const uint64_t PureSwiftDeallocMask = maskForField(PureSwiftDealloc);
255+
256+
static const size_t UnownedRefCountShift = shiftAfterField(PureSwiftDealloc);
246257
static const size_t UnownedRefCountBitCount = 31;
247258
static const uint64_t UnownedRefCountMask = maskForField(UnownedRefCount);
248259

260+
static const size_t IsImmortalShift = 0; // overlaps PureSwiftDealloc and UnownedRefCount
261+
static const size_t IsImmortalBitCount = 32;
262+
static const uint64_t IsImmortalMask = maskForField(IsImmortal);
263+
249264
static const size_t IsDeinitingShift = shiftAfterField(UnownedRefCount);
250265
static const size_t IsDeinitingBitCount = 1;
251266
static const uint64_t IsDeinitingMask = maskForField(IsDeiniting);
@@ -271,14 +286,18 @@ struct RefCountBitOffsets<8> {
271286
// 32-bit inline
272287
template <>
273288
struct RefCountBitOffsets<4> {
274-
static const size_t IsImmortalShift = 0;
275-
static const size_t IsImmortalBitCount = 1;
276-
static const uint64_t IsImmortalMask = maskForField(IsImmortal);
289+
static const size_t PureSwiftDeallocShift = 0;
290+
static const size_t PureSwiftDeallocBitCount = 1;
291+
static const uint32_t PureSwiftDeallocMask = maskForField(PureSwiftDealloc);
277292

278-
static const size_t UnownedRefCountShift = shiftAfterField(IsImmortal);
293+
static const size_t UnownedRefCountShift = shiftAfterField(PureSwiftDealloc);
279294
static const size_t UnownedRefCountBitCount = 7;
280295
static const uint32_t UnownedRefCountMask = maskForField(UnownedRefCount);
281296

297+
static const size_t IsImmortalShift = 0; // overlaps PureSwiftDealloc and UnownedRefCount
298+
static const size_t IsImmortalBitCount = 8;
299+
static const uint32_t IsImmortalMask = maskForField(IsImmortal);
300+
282301
static const size_t IsDeinitingShift = shiftAfterField(UnownedRefCount);
283302
static const size_t IsDeinitingBitCount = 1;
284303
static const uint32_t IsDeinitingMask = maskForField(IsDeiniting);
@@ -369,33 +388,56 @@ class RefCountBitsT {
369388
enum Immortal_t { Immortal };
370389

371390
LLVM_ATTRIBUTE_ALWAYS_INLINE
372-
bool isImmortal() const {
373-
return bool(getField(IsImmortal));
391+
bool isImmortal(bool checkSlowRCBit) const {
392+
if (checkSlowRCBit) {
393+
return (getField(IsImmortal) == Offsets::IsImmortalMask) &&
394+
bool(getField(UseSlowRC));
395+
} else {
396+
return (getField(IsImmortal) == Offsets::IsImmortalMask);
397+
}
398+
}
399+
400+
LLVM_ATTRIBUTE_ALWAYS_INLINE
401+
bool isOverflowingUnownedRefCount(uint32_t oldValue, uint32_t inc) const {
402+
auto newValue = getUnownedRefCount();
403+
return newValue != oldValue + inc ||
404+
newValue == Offsets::UnownedRefCountMask;
374405
}
375406

376407
LLVM_ATTRIBUTE_ALWAYS_INLINE
377408
void setIsImmortal(bool value) {
378-
setField(IsImmortal, value);
409+
assert(value);
410+
setField(IsImmortal, Offsets::IsImmortalMask);
379411
setField(UseSlowRC, value);
380412
}
381413

414+
LLVM_ATTRIBUTE_ALWAYS_INLINE
415+
bool pureSwiftDeallocation() const {
416+
return bool(getField(PureSwiftDealloc)) && !bool(getField(UseSlowRC));
417+
}
418+
419+
LLVM_ATTRIBUTE_ALWAYS_INLINE
420+
void setPureSwiftDeallocation(bool value) {
421+
setField(PureSwiftDealloc, value);
422+
}
423+
382424
LLVM_ATTRIBUTE_ALWAYS_INLINE
383425
RefCountBitsT() = default;
384426

385427
LLVM_ATTRIBUTE_ALWAYS_INLINE
386428
constexpr
387429
RefCountBitsT(uint32_t strongExtraCount, uint32_t unownedCount)
388430
: bits((BitsType(strongExtraCount) << Offsets::StrongExtraRefCountShift) |
431+
(BitsType(1) << Offsets::PureSwiftDeallocShift) |
389432
(BitsType(unownedCount) << Offsets::UnownedRefCountShift))
390433
{ }
391434

392435
LLVM_ATTRIBUTE_ALWAYS_INLINE
393436
constexpr
394437
RefCountBitsT(Immortal_t immortal)
395-
: bits((BitsType(2) << Offsets::StrongExtraRefCountShift) |
396-
(BitsType(2) << Offsets::UnownedRefCountShift) |
397-
(BitsType(1) << Offsets::IsImmortalShift) |
398-
(BitsType(1) << Offsets::UseSlowRCShift))
438+
: bits((BitsType(2) << Offsets::StrongExtraRefCountShift) |
439+
(BitsType(Offsets::IsImmortalMask)) |
440+
(BitsType(1) << Offsets::UseSlowRCShift))
399441
{ }
400442

401443
LLVM_ATTRIBUTE_ALWAYS_INLINE
@@ -433,7 +475,7 @@ class RefCountBitsT {
433475

434476
LLVM_ATTRIBUTE_ALWAYS_INLINE
435477
bool hasSideTable() const {
436-
bool hasSide = getUseSlowRC() && !isImmortal();
478+
bool hasSide = getUseSlowRC() && !isImmortal(false);
437479

438480
// Side table refcount must not point to another side table.
439481
assert((refcountIsInline || !hasSide) &&
@@ -523,7 +565,7 @@ class RefCountBitsT {
523565
LLVM_NODISCARD LLVM_ATTRIBUTE_ALWAYS_INLINE
524566
bool decrementStrongExtraRefCount(uint32_t dec) {
525567
#ifndef NDEBUG
526-
if (!hasSideTable() && !isImmortal()) {
568+
if (!hasSideTable() && !isImmortal(false)) {
527569
// Can't check these assertions with side table present.
528570

529571
if (getIsDeiniting())
@@ -558,7 +600,7 @@ class RefCountBitsT {
558600
static_assert(Offsets::UnownedRefCountBitCount +
559601
Offsets::IsDeinitingBitCount +
560602
Offsets::StrongExtraRefCountBitCount +
561-
Offsets::IsImmortalBitCount +
603+
Offsets::PureSwiftDeallocBitCount +
562604
Offsets::UseSlowRCBitCount == sizeof(bits)*8,
563605
"inspect isUniquelyReferenced after adding fields");
564606

@@ -715,7 +757,7 @@ class RefCounts {
715757

716758
void setIsImmortal(bool immortal) {
717759
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
718-
if (oldbits.isImmortal()) {
760+
if (oldbits.isImmortal(true)) {
719761
return;
720762
}
721763
RefCountBits newbits;
@@ -725,7 +767,28 @@ class RefCounts {
725767
} while (!refCounts.compare_exchange_weak(oldbits, newbits,
726768
std::memory_order_relaxed));
727769
}
728-
770+
771+
void setPureSwiftDeallocation(bool nonobjc) {
772+
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
773+
//Immortal and no objc complications share a bit, so don't let setting
774+
//the complications one clear the immmortal one
775+
if (oldbits.isImmortal(true) || oldbits.pureSwiftDeallocation() == nonobjc){
776+
assert(!oldbits.hasSideTable());
777+
return;
778+
}
779+
RefCountBits newbits;
780+
do {
781+
newbits = oldbits;
782+
newbits.setPureSwiftDeallocation(nonobjc);
783+
} while (!refCounts.compare_exchange_weak(oldbits, newbits,
784+
std::memory_order_relaxed));
785+
}
786+
787+
bool getPureSwiftDeallocation() {
788+
auto bits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
789+
return bits.pureSwiftDeallocation();
790+
}
791+
729792
// Initialize from another refcount bits.
730793
// Only inline -> out-of-line is allowed (used for new side table entries).
731794
void init(InlineRefCountBits newBits) {
@@ -740,7 +803,7 @@ class RefCounts {
740803
newbits = oldbits;
741804
bool fast = newbits.incrementStrongExtraRefCount(inc);
742805
if (SWIFT_UNLIKELY(!fast)) {
743-
if (oldbits.isImmortal())
806+
if (oldbits.isImmortal(false))
744807
return;
745808
return incrementSlow(oldbits, inc);
746809
}
@@ -753,7 +816,7 @@ class RefCounts {
753816
auto newbits = oldbits;
754817
bool fast = newbits.incrementStrongExtraRefCount(inc);
755818
if (SWIFT_UNLIKELY(!fast)) {
756-
if (oldbits.isImmortal())
819+
if (oldbits.isImmortal(false))
757820
return;
758821
return incrementNonAtomicSlow(oldbits, inc);
759822
}
@@ -771,7 +834,7 @@ class RefCounts {
771834
newbits = oldbits;
772835
bool fast = newbits.incrementStrongExtraRefCount(1);
773836
if (SWIFT_UNLIKELY(!fast)) {
774-
if (oldbits.isImmortal())
837+
if (oldbits.isImmortal(false))
775838
return true;
776839
return tryIncrementSlow(oldbits);
777840
}
@@ -788,7 +851,7 @@ class RefCounts {
788851
auto newbits = oldbits;
789852
bool fast = newbits.incrementStrongExtraRefCount(1);
790853
if (SWIFT_UNLIKELY(!fast)) {
791-
if (oldbits.isImmortal())
854+
if (oldbits.isImmortal(false))
792855
return true;
793856
return tryIncrementNonAtomicSlow(oldbits);
794857
}
@@ -824,7 +887,7 @@ class RefCounts {
824887
// Precondition: the reference count must be 1
825888
void decrementFromOneNonAtomic() {
826889
auto bits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
827-
if (bits.isImmortal()) {
890+
if (bits.isImmortal(true)) {
828891
return;
829892
}
830893
if (bits.hasSideTable())
@@ -922,7 +985,7 @@ class RefCounts {
922985
// Decrement completed normally. New refcount is not zero.
923986
deinitNow = false;
924987
}
925-
else if (oldbits.isImmortal()) {
988+
else if (oldbits.isImmortal(false)) {
926989
return false;
927990
} else if (oldbits.hasSideTable()) {
928991
// Decrement failed because we're on some other slow path.
@@ -961,7 +1024,7 @@ class RefCounts {
9611024
// Decrement completed normally. New refcount is not zero.
9621025
deinitNow = false;
9631026
}
964-
else if (oldbits.isImmortal()) {
1027+
else if (oldbits.isImmortal(false)) {
9651028
return false;
9661029
}
9671030
else if (oldbits.hasSideTable()) {
@@ -1001,7 +1064,7 @@ class RefCounts {
10011064
bool fast =
10021065
newbits.decrementStrongExtraRefCount(dec);
10031066
if (SWIFT_UNLIKELY(!fast)) {
1004-
if (oldbits.isImmortal()) {
1067+
if (oldbits.isImmortal(false)) {
10051068
return false;
10061069
}
10071070
// Slow paths include side table; deinit; underflow
@@ -1025,7 +1088,7 @@ class RefCounts {
10251088
// Increment the unowned reference count.
10261089
void incrementUnowned(uint32_t inc) {
10271090
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
1028-
if (oldbits.isImmortal())
1091+
if (oldbits.isImmortal(true))
10291092
return;
10301093
RefCountBits newbits;
10311094
do {
@@ -1037,7 +1100,7 @@ class RefCounts {
10371100
uint32_t oldValue = newbits.incrementUnownedRefCount(inc);
10381101

10391102
// Check overflow and use the side table on overflow.
1040-
if (newbits.getUnownedRefCount() != oldValue + inc)
1103+
if (newbits.isOverflowingUnownedRefCount(oldValue, inc))
10411104
return incrementUnownedSlow(inc);
10421105

10431106
} while (!refCounts.compare_exchange_weak(oldbits, newbits,
@@ -1046,7 +1109,7 @@ class RefCounts {
10461109

10471110
void incrementUnownedNonAtomic(uint32_t inc) {
10481111
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
1049-
if (oldbits.isImmortal())
1112+
if (oldbits.isImmortal(true))
10501113
return;
10511114
if (oldbits.hasSideTable())
10521115
return oldbits.getSideTable()->incrementUnownedNonAtomic(inc);
@@ -1056,7 +1119,7 @@ class RefCounts {
10561119
uint32_t oldValue = newbits.incrementUnownedRefCount(inc);
10571120

10581121
// Check overflow and use the side table on overflow.
1059-
if (newbits.getUnownedRefCount() != oldValue + inc)
1122+
if (newbits.isOverflowingUnownedRefCount(oldValue, inc))
10601123
return incrementUnownedSlow(inc);
10611124

10621125
refCounts.store(newbits, std::memory_order_relaxed);
@@ -1066,7 +1129,7 @@ class RefCounts {
10661129
// Return true if the caller should free the object.
10671130
bool decrementUnownedShouldFree(uint32_t dec) {
10681131
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
1069-
if (oldbits.isImmortal())
1132+
if (oldbits.isImmortal(true))
10701133
return false;
10711134
RefCountBits newbits;
10721135

@@ -1094,7 +1157,7 @@ class RefCounts {
10941157

10951158
bool decrementUnownedShouldFreeNonAtomic(uint32_t dec) {
10961159
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
1097-
if (oldbits.isImmortal())
1160+
if (oldbits.isImmortal(true))
10981161
return false;
10991162
if (oldbits.hasSideTable())
11001163
return oldbits.getSideTable()->decrementUnownedShouldFreeNonAtomic(dec);
@@ -1383,7 +1446,7 @@ inline bool RefCounts<InlineRefCountBits>::doDecrementNonAtomic(uint32_t dec) {
13831446
auto newbits = oldbits;
13841447
bool fast = newbits.decrementStrongExtraRefCount(dec);
13851448
if (!fast) {
1386-
if (oldbits.isImmortal()) {
1449+
if (oldbits.isImmortal(false)) {
13871450
return false;
13881451
}
13891452
return doDecrementNonAtomicSlow<performDeinit>(oldbits, dec);

0 commit comments

Comments
 (0)