Skip to content

Commit e4989cd

Browse files
authored
Merge pull request swiftlang#25923 from Catfish-Man/turn-fast-dealloc-back-off
Revert "Use the remaining half bit in the refcount to bypass ObjC deallocation overhead"
2 parents f029162 + a84af6f commit e4989cd

File tree

4 files changed

+45
-147
lines changed

4 files changed

+45
-147
lines changed

stdlib/public/SwiftShims/RefCount.h

Lines changed: 36 additions & 99 deletions
Original file line numberDiff line numberDiff line change
@@ -238,29 +238,14 @@ struct RefCountBitOffsets;
238238
// 32-bit out of line
239239
template <>
240240
struct RefCountBitOffsets<8> {
241-
/*
242-
The bottom 32 bits (on 64 bit architectures, fewer on 32 bit) of the refcount
243-
field are effectively a union of two different configurations:
244-
245-
---Normal case---
246-
Bit 0: Does this object need to call out to the ObjC runtime for deallocation
247-
Bits 1-31: Unowned refcount
248-
249-
---Immortal case---
250-
All bits set, the object does not deallocate or have a refcount
251-
*/
252-
static const size_t PureSwiftDeallocShift = 0;
253-
static const size_t PureSwiftDeallocBitCount = 1;
254-
static const uint64_t PureSwiftDeallocMask = maskForField(PureSwiftDealloc);
255-
256-
static const size_t UnownedRefCountShift = shiftAfterField(PureSwiftDealloc);
241+
static const size_t IsImmortalShift = 0;
242+
static const size_t IsImmortalBitCount = 1;
243+
static const uint64_t IsImmortalMask = maskForField(IsImmortal);
244+
245+
static const size_t UnownedRefCountShift = shiftAfterField(IsImmortal);
257246
static const size_t UnownedRefCountBitCount = 31;
258247
static const uint64_t UnownedRefCountMask = maskForField(UnownedRefCount);
259248

260-
static const size_t IsImmortalShift = 0; // overlaps PureSwiftDealloc and UnownedRefCount
261-
static const size_t IsImmortalBitCount = 32;
262-
static const uint64_t IsImmortalMask = maskForField(IsImmortal);
263-
264249
static const size_t IsDeinitingShift = shiftAfterField(UnownedRefCount);
265250
static const size_t IsDeinitingBitCount = 1;
266251
static const uint64_t IsDeinitingMask = maskForField(IsDeiniting);
@@ -286,18 +271,14 @@ struct RefCountBitOffsets<8> {
286271
// 32-bit inline
287272
template <>
288273
struct RefCountBitOffsets<4> {
289-
static const size_t PureSwiftDeallocShift = 0;
290-
static const size_t PureSwiftDeallocBitCount = 1;
291-
static const uint32_t PureSwiftDeallocMask = maskForField(PureSwiftDealloc);
274+
static const size_t IsImmortalShift = 0;
275+
static const size_t IsImmortalBitCount = 1;
276+
static const uint64_t IsImmortalMask = maskForField(IsImmortal);
292277

293-
static const size_t UnownedRefCountShift = shiftAfterField(PureSwiftDealloc);
278+
static const size_t UnownedRefCountShift = shiftAfterField(IsImmortal);
294279
static const size_t UnownedRefCountBitCount = 7;
295280
static const uint32_t UnownedRefCountMask = maskForField(UnownedRefCount);
296281

297-
static const size_t IsImmortalShift = 0; // overlaps PureSwiftDealloc and UnownedRefCount
298-
static const size_t IsImmortalBitCount = 8;
299-
static const uint32_t IsImmortalMask = maskForField(IsImmortal);
300-
301282
static const size_t IsDeinitingShift = shiftAfterField(UnownedRefCount);
302283
static const size_t IsDeinitingBitCount = 1;
303284
static const uint32_t IsDeinitingMask = maskForField(IsDeiniting);
@@ -388,56 +369,33 @@ class RefCountBitsT {
388369
enum Immortal_t { Immortal };
389370

390371
LLVM_ATTRIBUTE_ALWAYS_INLINE
391-
bool isImmortal(bool checkSlowRCBit) const {
392-
if (checkSlowRCBit) {
393-
return (getField(IsImmortal) == Offsets::IsImmortalMask) &&
394-
bool(getField(UseSlowRC));
395-
} else {
396-
return (getField(IsImmortal) == Offsets::IsImmortalMask);
397-
}
398-
}
399-
400-
LLVM_ATTRIBUTE_ALWAYS_INLINE
401-
bool isOverflowingUnownedRefCount(uint32_t oldValue, uint32_t inc) const {
402-
auto newValue = getUnownedRefCount();
403-
return newValue != oldValue + inc ||
404-
newValue == Offsets::UnownedRefCountMask;
372+
bool isImmortal() const {
373+
return bool(getField(IsImmortal));
405374
}
406375

407376
LLVM_ATTRIBUTE_ALWAYS_INLINE
408377
void setIsImmortal(bool value) {
409-
assert(value);
410-
setField(IsImmortal, Offsets::IsImmortalMask);
378+
setField(IsImmortal, value);
411379
setField(UseSlowRC, value);
412380
}
413381

414-
LLVM_ATTRIBUTE_ALWAYS_INLINE
415-
bool pureSwiftDeallocation() const {
416-
return bool(getField(PureSwiftDealloc)) && !bool(getField(UseSlowRC));
417-
}
418-
419-
LLVM_ATTRIBUTE_ALWAYS_INLINE
420-
void setPureSwiftDeallocation(bool value) {
421-
setField(PureSwiftDealloc, value);
422-
}
423-
424382
LLVM_ATTRIBUTE_ALWAYS_INLINE
425383
RefCountBitsT() = default;
426384

427385
LLVM_ATTRIBUTE_ALWAYS_INLINE
428386
constexpr
429387
RefCountBitsT(uint32_t strongExtraCount, uint32_t unownedCount)
430388
: bits((BitsType(strongExtraCount) << Offsets::StrongExtraRefCountShift) |
431-
(BitsType(1) << Offsets::PureSwiftDeallocShift) |
432389
(BitsType(unownedCount) << Offsets::UnownedRefCountShift))
433390
{ }
434391

435392
LLVM_ATTRIBUTE_ALWAYS_INLINE
436393
constexpr
437394
RefCountBitsT(Immortal_t immortal)
438-
: bits((BitsType(2) << Offsets::StrongExtraRefCountShift) |
439-
(BitsType(Offsets::IsImmortalMask)) |
440-
(BitsType(1) << Offsets::UseSlowRCShift))
395+
: bits((BitsType(2) << Offsets::StrongExtraRefCountShift) |
396+
(BitsType(2) << Offsets::UnownedRefCountShift) |
397+
(BitsType(1) << Offsets::IsImmortalShift) |
398+
(BitsType(1) << Offsets::UseSlowRCShift))
441399
{ }
442400

443401
LLVM_ATTRIBUTE_ALWAYS_INLINE
@@ -475,7 +433,7 @@ class RefCountBitsT {
475433

476434
LLVM_ATTRIBUTE_ALWAYS_INLINE
477435
bool hasSideTable() const {
478-
bool hasSide = getUseSlowRC() && !isImmortal(false);
436+
bool hasSide = getUseSlowRC() && !isImmortal();
479437

480438
// Side table refcount must not point to another side table.
481439
assert((refcountIsInline || !hasSide) &&
@@ -565,7 +523,7 @@ class RefCountBitsT {
565523
LLVM_NODISCARD LLVM_ATTRIBUTE_ALWAYS_INLINE
566524
bool decrementStrongExtraRefCount(uint32_t dec) {
567525
#ifndef NDEBUG
568-
if (!hasSideTable() && !isImmortal(false)) {
526+
if (!hasSideTable() && !isImmortal()) {
569527
// Can't check these assertions with side table present.
570528

571529
if (getIsDeiniting())
@@ -600,7 +558,7 @@ class RefCountBitsT {
600558
static_assert(Offsets::UnownedRefCountBitCount +
601559
Offsets::IsDeinitingBitCount +
602560
Offsets::StrongExtraRefCountBitCount +
603-
Offsets::PureSwiftDeallocBitCount +
561+
Offsets::IsImmortalBitCount +
604562
Offsets::UseSlowRCBitCount == sizeof(bits)*8,
605563
"inspect isUniquelyReferenced after adding fields");
606564

@@ -757,7 +715,7 @@ class RefCounts {
757715

758716
void setIsImmortal(bool immortal) {
759717
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
760-
if (oldbits.isImmortal(true)) {
718+
if (oldbits.isImmortal()) {
761719
return;
762720
}
763721
RefCountBits newbits;
@@ -767,28 +725,7 @@ class RefCounts {
767725
} while (!refCounts.compare_exchange_weak(oldbits, newbits,
768726
std::memory_order_relaxed));
769727
}
770-
771-
void setPureSwiftDeallocation(bool nonobjc) {
772-
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
773-
//Immortal and no objc complications share a bit, so don't let setting
774-
//the complications one clear the immmortal one
775-
if (oldbits.isImmortal(true) || oldbits.pureSwiftDeallocation() == nonobjc){
776-
assert(!oldbits.hasSideTable());
777-
return;
778-
}
779-
RefCountBits newbits;
780-
do {
781-
newbits = oldbits;
782-
newbits.setPureSwiftDeallocation(nonobjc);
783-
} while (!refCounts.compare_exchange_weak(oldbits, newbits,
784-
std::memory_order_relaxed));
785-
}
786-
787-
bool getPureSwiftDeallocation() {
788-
auto bits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
789-
return bits.pureSwiftDeallocation();
790-
}
791-
728+
792729
// Initialize from another refcount bits.
793730
// Only inline -> out-of-line is allowed (used for new side table entries).
794731
void init(InlineRefCountBits newBits) {
@@ -803,7 +740,7 @@ class RefCounts {
803740
newbits = oldbits;
804741
bool fast = newbits.incrementStrongExtraRefCount(inc);
805742
if (SWIFT_UNLIKELY(!fast)) {
806-
if (oldbits.isImmortal(false))
743+
if (oldbits.isImmortal())
807744
return;
808745
return incrementSlow(oldbits, inc);
809746
}
@@ -816,7 +753,7 @@ class RefCounts {
816753
auto newbits = oldbits;
817754
bool fast = newbits.incrementStrongExtraRefCount(inc);
818755
if (SWIFT_UNLIKELY(!fast)) {
819-
if (oldbits.isImmortal(false))
756+
if (oldbits.isImmortal())
820757
return;
821758
return incrementNonAtomicSlow(oldbits, inc);
822759
}
@@ -834,7 +771,7 @@ class RefCounts {
834771
newbits = oldbits;
835772
bool fast = newbits.incrementStrongExtraRefCount(1);
836773
if (SWIFT_UNLIKELY(!fast)) {
837-
if (oldbits.isImmortal(false))
774+
if (oldbits.isImmortal())
838775
return true;
839776
return tryIncrementSlow(oldbits);
840777
}
@@ -851,7 +788,7 @@ class RefCounts {
851788
auto newbits = oldbits;
852789
bool fast = newbits.incrementStrongExtraRefCount(1);
853790
if (SWIFT_UNLIKELY(!fast)) {
854-
if (oldbits.isImmortal(false))
791+
if (oldbits.isImmortal())
855792
return true;
856793
return tryIncrementNonAtomicSlow(oldbits);
857794
}
@@ -887,7 +824,7 @@ class RefCounts {
887824
// Precondition: the reference count must be 1
888825
void decrementFromOneNonAtomic() {
889826
auto bits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
890-
if (bits.isImmortal(true)) {
827+
if (bits.isImmortal()) {
891828
return;
892829
}
893830
if (bits.hasSideTable())
@@ -985,7 +922,7 @@ class RefCounts {
985922
// Decrement completed normally. New refcount is not zero.
986923
deinitNow = false;
987924
}
988-
else if (oldbits.isImmortal(false)) {
925+
else if (oldbits.isImmortal()) {
989926
return false;
990927
} else if (oldbits.hasSideTable()) {
991928
// Decrement failed because we're on some other slow path.
@@ -1024,7 +961,7 @@ class RefCounts {
1024961
// Decrement completed normally. New refcount is not zero.
1025962
deinitNow = false;
1026963
}
1027-
else if (oldbits.isImmortal(false)) {
964+
else if (oldbits.isImmortal()) {
1028965
return false;
1029966
}
1030967
else if (oldbits.hasSideTable()) {
@@ -1064,7 +1001,7 @@ class RefCounts {
10641001
bool fast =
10651002
newbits.decrementStrongExtraRefCount(dec);
10661003
if (SWIFT_UNLIKELY(!fast)) {
1067-
if (oldbits.isImmortal(false)) {
1004+
if (oldbits.isImmortal()) {
10681005
return false;
10691006
}
10701007
// Slow paths include side table; deinit; underflow
@@ -1088,7 +1025,7 @@ class RefCounts {
10881025
// Increment the unowned reference count.
10891026
void incrementUnowned(uint32_t inc) {
10901027
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
1091-
if (oldbits.isImmortal(true))
1028+
if (oldbits.isImmortal())
10921029
return;
10931030
RefCountBits newbits;
10941031
do {
@@ -1100,7 +1037,7 @@ class RefCounts {
11001037
uint32_t oldValue = newbits.incrementUnownedRefCount(inc);
11011038

11021039
// Check overflow and use the side table on overflow.
1103-
if (newbits.isOverflowingUnownedRefCount(oldValue, inc))
1040+
if (newbits.getUnownedRefCount() != oldValue + inc)
11041041
return incrementUnownedSlow(inc);
11051042

11061043
} while (!refCounts.compare_exchange_weak(oldbits, newbits,
@@ -1109,7 +1046,7 @@ class RefCounts {
11091046

11101047
void incrementUnownedNonAtomic(uint32_t inc) {
11111048
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
1112-
if (oldbits.isImmortal(true))
1049+
if (oldbits.isImmortal())
11131050
return;
11141051
if (oldbits.hasSideTable())
11151052
return oldbits.getSideTable()->incrementUnownedNonAtomic(inc);
@@ -1119,7 +1056,7 @@ class RefCounts {
11191056
uint32_t oldValue = newbits.incrementUnownedRefCount(inc);
11201057

11211058
// Check overflow and use the side table on overflow.
1122-
if (newbits.isOverflowingUnownedRefCount(oldValue, inc))
1059+
if (newbits.getUnownedRefCount() != oldValue + inc)
11231060
return incrementUnownedSlow(inc);
11241061

11251062
refCounts.store(newbits, std::memory_order_relaxed);
@@ -1129,7 +1066,7 @@ class RefCounts {
11291066
// Return true if the caller should free the object.
11301067
bool decrementUnownedShouldFree(uint32_t dec) {
11311068
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
1132-
if (oldbits.isImmortal(true))
1069+
if (oldbits.isImmortal())
11331070
return false;
11341071
RefCountBits newbits;
11351072

@@ -1157,7 +1094,7 @@ class RefCounts {
11571094

11581095
bool decrementUnownedShouldFreeNonAtomic(uint32_t dec) {
11591096
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
1160-
if (oldbits.isImmortal(true))
1097+
if (oldbits.isImmortal())
11611098
return false;
11621099
if (oldbits.hasSideTable())
11631100
return oldbits.getSideTable()->decrementUnownedShouldFreeNonAtomic(dec);
@@ -1446,7 +1383,7 @@ inline bool RefCounts<InlineRefCountBits>::doDecrementNonAtomic(uint32_t dec) {
14461383
auto newbits = oldbits;
14471384
bool fast = newbits.decrementStrongExtraRefCount(dec);
14481385
if (!fast) {
1449-
if (oldbits.isImmortal(false)) {
1386+
if (oldbits.isImmortal()) {
14501387
return false;
14511388
}
14521389
return doDecrementNonAtomicSlow<performDeinit>(oldbits, dec);

stdlib/public/runtime/HeapObject.cpp

Lines changed: 1 addition & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,6 @@
4242
# include <objc/message.h>
4343
# include <objc/objc.h>
4444
# include "swift/Runtime/ObjCBridge.h"
45-
# include "swift/Runtime/Once.h"
4645
#endif
4746
#include "Leaks.h"
4847

@@ -79,34 +78,6 @@ HeapObject *swift::swift_allocObject(HeapMetadata const *metadata,
7978
return _swift_allocObject(metadata, requiredSize, requiredAlignmentMask);
8079
}
8180

82-
#if OBJC_SETASSOCIATEDOBJECTHOOK_DEFINED
83-
//We interpose objc_setAssociatedObject so that we can set a flag in
84-
//the refcount field of Swift objects to indicate that they have associations,
85-
//since we can't safely skip ObjC dealloc work if they do
86-
static objc_hook_setAssociatedObject originalAssocObjectFunc = nullptr;
87-
88-
static void _swift_setAssociatedObject_hook(
89-
id _Nonnull object,
90-
const void * _Nonnull key,
91-
id _Nullable value,
92-
objc_AssociationPolicy policy
93-
) {
94-
if (!isObjCTaggedPointerOrNull(object) &&
95-
objectUsesNativeSwiftReferenceCounting(object)) {
96-
auto heapObj = reinterpret_cast<HeapObject *>(object);
97-
heapObj->refCounts.setPureSwiftDeallocation(false);
98-
}
99-
originalAssocObjectFunc(object, key, value, policy);
100-
}
101-
102-
static void _interpose_objc_association(void *ctxt) {
103-
if (__builtin_available(macOS 10.15, iOS 13, tvOS 13, watchOS 6, *)) {
104-
objc_setHook_setAssociatedObject(_swift_setAssociatedObject_hook,
105-
&originalAssocObjectFunc);
106-
}
107-
}
108-
#endif
109-
11081
static HeapObject *_swift_allocObject_(HeapMetadata const *metadata,
11182
size_t requiredSize,
11283
size_t requiredAlignmentMask) {
@@ -119,11 +90,6 @@ static HeapObject *_swift_allocObject_(HeapMetadata const *metadata,
11990
// Linux, and macOS.
12091
new (object) HeapObject(metadata);
12192

122-
#if OBJC_SETASSOCIATEDOBJECTHOOK_DEFINED
123-
static swift_once_t associatedObjectHookOnce;
124-
swift_once(&associatedObjectHookOnce, _interpose_objc_association, nullptr);
125-
#endif
126-
12793
// If leak tracking is enabled, start tracking this object.
12894
SWIFT_LEAKS_START_TRACKING_OBJECT(object);
12995

@@ -628,14 +594,9 @@ void swift::swift_rootObjCDealloc(HeapObject *self) {
628594
void swift::swift_deallocClassInstance(HeapObject *object,
629595
size_t allocatedSize,
630596
size_t allocatedAlignMask) {
631-
#if OBJC_SETASSOCIATEDOBJECTHOOK_DEFINED
597+
#if SWIFT_OBJC_INTEROP
632598
// We need to let the ObjC runtime clean up any associated objects or weak
633599
// references associated with this object.
634-
if (originalAssocObjectFunc == nullptr ||
635-
!object->refCounts.getPureSwiftDeallocation()) {
636-
objc_destructInstance((id)object);
637-
}
638-
#elif SWIFT_OBJC_INTEROP
639600
objc_destructInstance((id)object);
640601
#endif
641602
swift_deallocObject(object, allocatedSize, allocatedAlignMask);

0 commit comments

Comments
 (0)