Skip to content

Commit e3da6c3

Browse files
committed
Revert "Revert "Use the remaining half bit in the refcount to bypass ObjC deallocation overhead""
And add availability checking for back deployment This reverts commit 817ea12. (cherry picked from commit c512946)
1 parent 500333c commit e3da6c3

File tree

4 files changed

+147
-45
lines changed

4 files changed

+147
-45
lines changed

stdlib/public/SwiftShims/RefCount.h

Lines changed: 99 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -238,14 +238,29 @@ struct RefCountBitOffsets;
238238
// 32-bit out of line
239239
template <>
240240
struct RefCountBitOffsets<8> {
241-
static const size_t IsImmortalShift = 0;
242-
static const size_t IsImmortalBitCount = 1;
243-
static const uint64_t IsImmortalMask = maskForField(IsImmortal);
244-
245-
static const size_t UnownedRefCountShift = shiftAfterField(IsImmortal);
241+
/*
242+
The bottom 32 bits (on 64 bit architectures, fewer on 32 bit) of the refcount
243+
field are effectively a union of two different configurations:
244+
245+
---Normal case---
246+
Bit 0: Does this object need to call out to the ObjC runtime for deallocation
247+
Bits 1-31: Unowned refcount
248+
249+
---Immortal case---
250+
All bits set, the object does not deallocate or have a refcount
251+
*/
252+
static const size_t PureSwiftDeallocShift = 0;
253+
static const size_t PureSwiftDeallocBitCount = 1;
254+
static const uint64_t PureSwiftDeallocMask = maskForField(PureSwiftDealloc);
255+
256+
static const size_t UnownedRefCountShift = shiftAfterField(PureSwiftDealloc);
246257
static const size_t UnownedRefCountBitCount = 31;
247258
static const uint64_t UnownedRefCountMask = maskForField(UnownedRefCount);
248259

260+
static const size_t IsImmortalShift = 0; // overlaps PureSwiftDealloc and UnownedRefCount
261+
static const size_t IsImmortalBitCount = 32;
262+
static const uint64_t IsImmortalMask = maskForField(IsImmortal);
263+
249264
static const size_t IsDeinitingShift = shiftAfterField(UnownedRefCount);
250265
static const size_t IsDeinitingBitCount = 1;
251266
static const uint64_t IsDeinitingMask = maskForField(IsDeiniting);
@@ -271,14 +286,18 @@ struct RefCountBitOffsets<8> {
271286
// 32-bit inline
272287
template <>
273288
struct RefCountBitOffsets<4> {
274-
static const size_t IsImmortalShift = 0;
275-
static const size_t IsImmortalBitCount = 1;
276-
static const uint64_t IsImmortalMask = maskForField(IsImmortal);
289+
static const size_t PureSwiftDeallocShift = 0;
290+
static const size_t PureSwiftDeallocBitCount = 1;
291+
static const uint32_t PureSwiftDeallocMask = maskForField(PureSwiftDealloc);
277292

278-
static const size_t UnownedRefCountShift = shiftAfterField(IsImmortal);
293+
static const size_t UnownedRefCountShift = shiftAfterField(PureSwiftDealloc);
279294
static const size_t UnownedRefCountBitCount = 7;
280295
static const uint32_t UnownedRefCountMask = maskForField(UnownedRefCount);
281296

297+
static const size_t IsImmortalShift = 0; // overlaps PureSwiftDealloc and UnownedRefCount
298+
static const size_t IsImmortalBitCount = 8;
299+
static const uint32_t IsImmortalMask = maskForField(IsImmortal);
300+
282301
static const size_t IsDeinitingShift = shiftAfterField(UnownedRefCount);
283302
static const size_t IsDeinitingBitCount = 1;
284303
static const uint32_t IsDeinitingMask = maskForField(IsDeiniting);
@@ -369,33 +388,56 @@ class RefCountBitsT {
369388
enum Immortal_t { Immortal };
370389

371390
LLVM_ATTRIBUTE_ALWAYS_INLINE
372-
bool isImmortal() const {
373-
return bool(getField(IsImmortal));
391+
bool isImmortal(bool checkSlowRCBit) const {
392+
if (checkSlowRCBit) {
393+
return (getField(IsImmortal) == Offsets::IsImmortalMask) &&
394+
bool(getField(UseSlowRC));
395+
} else {
396+
return (getField(IsImmortal) == Offsets::IsImmortalMask);
397+
}
398+
}
399+
400+
LLVM_ATTRIBUTE_ALWAYS_INLINE
401+
bool isOverflowingUnownedRefCount(uint32_t oldValue, uint32_t inc) const {
402+
auto newValue = getUnownedRefCount();
403+
return newValue != oldValue + inc ||
404+
newValue == Offsets::UnownedRefCountMask;
374405
}
375406

376407
LLVM_ATTRIBUTE_ALWAYS_INLINE
377408
void setIsImmortal(bool value) {
378-
setField(IsImmortal, value);
409+
assert(value);
410+
setField(IsImmortal, Offsets::IsImmortalMask);
379411
setField(UseSlowRC, value);
380412
}
381413

414+
LLVM_ATTRIBUTE_ALWAYS_INLINE
415+
bool pureSwiftDeallocation() const {
416+
return bool(getField(PureSwiftDealloc)) && !bool(getField(UseSlowRC));
417+
}
418+
419+
LLVM_ATTRIBUTE_ALWAYS_INLINE
420+
void setPureSwiftDeallocation(bool value) {
421+
setField(PureSwiftDealloc, value);
422+
}
423+
382424
LLVM_ATTRIBUTE_ALWAYS_INLINE
383425
RefCountBitsT() = default;
384426

385427
LLVM_ATTRIBUTE_ALWAYS_INLINE
386428
constexpr
387429
RefCountBitsT(uint32_t strongExtraCount, uint32_t unownedCount)
388430
: bits((BitsType(strongExtraCount) << Offsets::StrongExtraRefCountShift) |
431+
(BitsType(1) << Offsets::PureSwiftDeallocShift) |
389432
(BitsType(unownedCount) << Offsets::UnownedRefCountShift))
390433
{ }
391434

392435
LLVM_ATTRIBUTE_ALWAYS_INLINE
393436
constexpr
394437
RefCountBitsT(Immortal_t immortal)
395-
: bits((BitsType(2) << Offsets::StrongExtraRefCountShift) |
396-
(BitsType(2) << Offsets::UnownedRefCountShift) |
397-
(BitsType(1) << Offsets::IsImmortalShift) |
398-
(BitsType(1) << Offsets::UseSlowRCShift))
438+
: bits((BitsType(2) << Offsets::StrongExtraRefCountShift) |
439+
(BitsType(Offsets::IsImmortalMask)) |
440+
(BitsType(1) << Offsets::UseSlowRCShift))
399441
{ }
400442

401443
LLVM_ATTRIBUTE_ALWAYS_INLINE
@@ -433,7 +475,7 @@ class RefCountBitsT {
433475

434476
LLVM_ATTRIBUTE_ALWAYS_INLINE
435477
bool hasSideTable() const {
436-
bool hasSide = getUseSlowRC() && !isImmortal();
478+
bool hasSide = getUseSlowRC() && !isImmortal(false);
437479

438480
// Side table refcount must not point to another side table.
439481
assert((refcountIsInline || !hasSide) &&
@@ -523,7 +565,7 @@ class RefCountBitsT {
523565
LLVM_NODISCARD LLVM_ATTRIBUTE_ALWAYS_INLINE
524566
bool decrementStrongExtraRefCount(uint32_t dec) {
525567
#ifndef NDEBUG
526-
if (!hasSideTable() && !isImmortal()) {
568+
if (!hasSideTable() && !isImmortal(false)) {
527569
// Can't check these assertions with side table present.
528570

529571
if (getIsDeiniting())
@@ -558,7 +600,7 @@ class RefCountBitsT {
558600
static_assert(Offsets::UnownedRefCountBitCount +
559601
Offsets::IsDeinitingBitCount +
560602
Offsets::StrongExtraRefCountBitCount +
561-
Offsets::IsImmortalBitCount +
603+
Offsets::PureSwiftDeallocBitCount +
562604
Offsets::UseSlowRCBitCount == sizeof(bits)*8,
563605
"inspect isUniquelyReferenced after adding fields");
564606

@@ -715,7 +757,7 @@ class RefCounts {
715757

716758
void setIsImmortal(bool immortal) {
717759
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
718-
if (oldbits.isImmortal()) {
760+
if (oldbits.isImmortal(true)) {
719761
return;
720762
}
721763
RefCountBits newbits;
@@ -725,7 +767,28 @@ class RefCounts {
725767
} while (!refCounts.compare_exchange_weak(oldbits, newbits,
726768
std::memory_order_relaxed));
727769
}
728-
770+
771+
void setPureSwiftDeallocation(bool nonobjc) {
772+
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
773+
//Immortal and no objc complications share a bit, so don't let setting
774+
//the complications one clear the immmortal one
775+
if (oldbits.isImmortal(true) || oldbits.pureSwiftDeallocation() == nonobjc){
776+
assert(!oldbits.hasSideTable());
777+
return;
778+
}
779+
RefCountBits newbits;
780+
do {
781+
newbits = oldbits;
782+
newbits.setPureSwiftDeallocation(nonobjc);
783+
} while (!refCounts.compare_exchange_weak(oldbits, newbits,
784+
std::memory_order_relaxed));
785+
}
786+
787+
bool getPureSwiftDeallocation() {
788+
auto bits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
789+
return bits.pureSwiftDeallocation();
790+
}
791+
729792
// Initialize from another refcount bits.
730793
// Only inline -> out-of-line is allowed (used for new side table entries).
731794
void init(InlineRefCountBits newBits) {
@@ -740,7 +803,7 @@ class RefCounts {
740803
newbits = oldbits;
741804
bool fast = newbits.incrementStrongExtraRefCount(inc);
742805
if (SWIFT_UNLIKELY(!fast)) {
743-
if (oldbits.isImmortal())
806+
if (oldbits.isImmortal(false))
744807
return;
745808
return incrementSlow(oldbits, inc);
746809
}
@@ -753,7 +816,7 @@ class RefCounts {
753816
auto newbits = oldbits;
754817
bool fast = newbits.incrementStrongExtraRefCount(inc);
755818
if (SWIFT_UNLIKELY(!fast)) {
756-
if (oldbits.isImmortal())
819+
if (oldbits.isImmortal(false))
757820
return;
758821
return incrementNonAtomicSlow(oldbits, inc);
759822
}
@@ -771,7 +834,7 @@ class RefCounts {
771834
newbits = oldbits;
772835
bool fast = newbits.incrementStrongExtraRefCount(1);
773836
if (SWIFT_UNLIKELY(!fast)) {
774-
if (oldbits.isImmortal())
837+
if (oldbits.isImmortal(false))
775838
return true;
776839
return tryIncrementSlow(oldbits);
777840
}
@@ -788,7 +851,7 @@ class RefCounts {
788851
auto newbits = oldbits;
789852
bool fast = newbits.incrementStrongExtraRefCount(1);
790853
if (SWIFT_UNLIKELY(!fast)) {
791-
if (oldbits.isImmortal())
854+
if (oldbits.isImmortal(false))
792855
return true;
793856
return tryIncrementNonAtomicSlow(oldbits);
794857
}
@@ -824,7 +887,7 @@ class RefCounts {
824887
// Precondition: the reference count must be 1
825888
void decrementFromOneNonAtomic() {
826889
auto bits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
827-
if (bits.isImmortal()) {
890+
if (bits.isImmortal(true)) {
828891
return;
829892
}
830893
if (bits.hasSideTable())
@@ -922,7 +985,7 @@ class RefCounts {
922985
// Decrement completed normally. New refcount is not zero.
923986
deinitNow = false;
924987
}
925-
else if (oldbits.isImmortal()) {
988+
else if (oldbits.isImmortal(false)) {
926989
return false;
927990
} else if (oldbits.hasSideTable()) {
928991
// Decrement failed because we're on some other slow path.
@@ -961,7 +1024,7 @@ class RefCounts {
9611024
// Decrement completed normally. New refcount is not zero.
9621025
deinitNow = false;
9631026
}
964-
else if (oldbits.isImmortal()) {
1027+
else if (oldbits.isImmortal(false)) {
9651028
return false;
9661029
}
9671030
else if (oldbits.hasSideTable()) {
@@ -1001,7 +1064,7 @@ class RefCounts {
10011064
bool fast =
10021065
newbits.decrementStrongExtraRefCount(dec);
10031066
if (SWIFT_UNLIKELY(!fast)) {
1004-
if (oldbits.isImmortal()) {
1067+
if (oldbits.isImmortal(false)) {
10051068
return false;
10061069
}
10071070
// Slow paths include side table; deinit; underflow
@@ -1025,7 +1088,7 @@ class RefCounts {
10251088
// Increment the unowned reference count.
10261089
void incrementUnowned(uint32_t inc) {
10271090
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
1028-
if (oldbits.isImmortal())
1091+
if (oldbits.isImmortal(true))
10291092
return;
10301093
RefCountBits newbits;
10311094
do {
@@ -1037,7 +1100,7 @@ class RefCounts {
10371100
uint32_t oldValue = newbits.incrementUnownedRefCount(inc);
10381101

10391102
// Check overflow and use the side table on overflow.
1040-
if (newbits.getUnownedRefCount() != oldValue + inc)
1103+
if (newbits.isOverflowingUnownedRefCount(oldValue, inc))
10411104
return incrementUnownedSlow(inc);
10421105

10431106
} while (!refCounts.compare_exchange_weak(oldbits, newbits,
@@ -1046,7 +1109,7 @@ class RefCounts {
10461109

10471110
void incrementUnownedNonAtomic(uint32_t inc) {
10481111
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
1049-
if (oldbits.isImmortal())
1112+
if (oldbits.isImmortal(true))
10501113
return;
10511114
if (oldbits.hasSideTable())
10521115
return oldbits.getSideTable()->incrementUnownedNonAtomic(inc);
@@ -1056,7 +1119,7 @@ class RefCounts {
10561119
uint32_t oldValue = newbits.incrementUnownedRefCount(inc);
10571120

10581121
// Check overflow and use the side table on overflow.
1059-
if (newbits.getUnownedRefCount() != oldValue + inc)
1122+
if (newbits.isOverflowingUnownedRefCount(oldValue, inc))
10601123
return incrementUnownedSlow(inc);
10611124

10621125
refCounts.store(newbits, std::memory_order_relaxed);
@@ -1066,7 +1129,7 @@ class RefCounts {
10661129
// Return true if the caller should free the object.
10671130
bool decrementUnownedShouldFree(uint32_t dec) {
10681131
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
1069-
if (oldbits.isImmortal())
1132+
if (oldbits.isImmortal(true))
10701133
return false;
10711134
RefCountBits newbits;
10721135

@@ -1094,7 +1157,7 @@ class RefCounts {
10941157

10951158
bool decrementUnownedShouldFreeNonAtomic(uint32_t dec) {
10961159
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
1097-
if (oldbits.isImmortal())
1160+
if (oldbits.isImmortal(true))
10981161
return false;
10991162
if (oldbits.hasSideTable())
11001163
return oldbits.getSideTable()->decrementUnownedShouldFreeNonAtomic(dec);
@@ -1383,7 +1446,7 @@ inline bool RefCounts<InlineRefCountBits>::doDecrementNonAtomic(uint32_t dec) {
13831446
auto newbits = oldbits;
13841447
bool fast = newbits.decrementStrongExtraRefCount(dec);
13851448
if (!fast) {
1386-
if (oldbits.isImmortal()) {
1449+
if (oldbits.isImmortal(false)) {
13871450
return false;
13881451
}
13891452
return doDecrementNonAtomicSlow<performDeinit>(oldbits, dec);

stdlib/public/runtime/HeapObject.cpp

Lines changed: 40 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,7 @@
4242
# include <objc/message.h>
4343
# include <objc/objc.h>
4444
# include "swift/Runtime/ObjCBridge.h"
45+
# include "swift/Runtime/Once.h"
4546
#endif
4647
#include "Leaks.h"
4748

@@ -78,6 +79,34 @@ HeapObject *swift::swift_allocObject(HeapMetadata const *metadata,
7879
return _swift_allocObject(metadata, requiredSize, requiredAlignmentMask);
7980
}
8081

82+
#if OBJC_SETASSOCIATEDOBJECTHOOK_DEFINED
83+
//We interpose objc_setAssociatedObject so that we can set a flag in
84+
//the refcount field of Swift objects to indicate that they have associations,
85+
//since we can't safely skip ObjC dealloc work if they do
86+
static objc_hook_setAssociatedObject originalAssocObjectFunc = nullptr;
87+
88+
static void _swift_setAssociatedObject_hook(
89+
id _Nonnull object,
90+
const void * _Nonnull key,
91+
id _Nullable value,
92+
objc_AssociationPolicy policy
93+
) {
94+
if (!isObjCTaggedPointerOrNull(object) &&
95+
objectUsesNativeSwiftReferenceCounting(object)) {
96+
auto heapObj = reinterpret_cast<HeapObject *>(object);
97+
heapObj->refCounts.setPureSwiftDeallocation(false);
98+
}
99+
originalAssocObjectFunc(object, key, value, policy);
100+
}
101+
102+
static void _interpose_objc_association(void *ctxt) {
103+
if (__builtin_available(macOS 10.15, iOS 13, tvOS 13, watchOS 6, *)) {
104+
objc_setHook_setAssociatedObject(_swift_setAssociatedObject_hook,
105+
&originalAssocObjectFunc);
106+
}
107+
}
108+
#endif
109+
81110
static HeapObject *_swift_allocObject_(HeapMetadata const *metadata,
82111
size_t requiredSize,
83112
size_t requiredAlignmentMask) {
@@ -90,6 +119,11 @@ static HeapObject *_swift_allocObject_(HeapMetadata const *metadata,
90119
// Linux, and macOS.
91120
new (object) HeapObject(metadata);
92121

122+
#if OBJC_SETASSOCIATEDOBJECTHOOK_DEFINED
123+
static swift_once_t associatedObjectHookOnce;
124+
swift_once(&associatedObjectHookOnce, _interpose_objc_association, nullptr);
125+
#endif
126+
93127
// If leak tracking is enabled, start tracking this object.
94128
SWIFT_LEAKS_START_TRACKING_OBJECT(object);
95129

@@ -594,9 +628,14 @@ void swift::swift_rootObjCDealloc(HeapObject *self) {
594628
void swift::swift_deallocClassInstance(HeapObject *object,
595629
size_t allocatedSize,
596630
size_t allocatedAlignMask) {
597-
#if SWIFT_OBJC_INTEROP
631+
#if OBJC_SETASSOCIATEDOBJECTHOOK_DEFINED
598632
// We need to let the ObjC runtime clean up any associated objects or weak
599633
// references associated with this object.
634+
if (originalAssocObjectFunc == nullptr ||
635+
!object->refCounts.getPureSwiftDeallocation()) {
636+
objc_destructInstance((id)object);
637+
}
638+
#elif SWIFT_OBJC_INTEROP
600639
objc_destructInstance((id)object);
601640
#endif
602641
swift_deallocObject(object, allocatedSize, allocatedAlignMask);

0 commit comments

Comments
 (0)