Skip to content

Commit 817ea12

Browse files
authored
Revert "Use the remaining half bit in the refcount to bypass ObjC deallocation overhead"
1 parent a83c04e commit 817ea12

File tree

4 files changed

+45
-143
lines changed

4 files changed

+45
-143
lines changed

stdlib/public/SwiftShims/RefCount.h

Lines changed: 36 additions & 97 deletions
Original file line numberDiff line numberDiff line change
@@ -238,29 +238,14 @@ struct RefCountBitOffsets;
238238
// 32-bit out of line
239239
template <>
240240
struct RefCountBitOffsets<8> {
241-
/*
242-
The bottom 32 bits (on 64 bit architectures, fewer on 32 bit) of the refcount
243-
field are effectively a union of two different configurations:
244-
245-
---Normal case---
246-
Bit 0: Does this object need to call out to the ObjC runtime for deallocation
247-
Bits 1-31: Unowned refcount
248-
249-
---Immortal case---
250-
All bits set, the object does not deallocate or have a refcount
251-
*/
252-
static const size_t PureSwiftDeallocShift = 0;
253-
static const size_t PureSwiftDeallocBitCount = 1;
254-
static const uint64_t PureSwiftDeallocMask = maskForField(PureSwiftDealloc);
255-
256-
static const size_t UnownedRefCountShift = shiftAfterField(PureSwiftDealloc);
241+
static const size_t IsImmortalShift = 0;
242+
static const size_t IsImmortalBitCount = 1;
243+
static const uint64_t IsImmortalMask = maskForField(IsImmortal);
244+
245+
static const size_t UnownedRefCountShift = shiftAfterField(IsImmortal);
257246
static const size_t UnownedRefCountBitCount = 31;
258247
static const uint64_t UnownedRefCountMask = maskForField(UnownedRefCount);
259248

260-
static const size_t IsImmortalShift = 0; // overlaps PureSwiftDealloc and UnownedRefCount
261-
static const size_t IsImmortalBitCount = 32;
262-
static const uint64_t IsImmortalMask = maskForField(IsImmortal);
263-
264249
static const size_t IsDeinitingShift = shiftAfterField(UnownedRefCount);
265250
static const size_t IsDeinitingBitCount = 1;
266251
static const uint64_t IsDeinitingMask = maskForField(IsDeiniting);
@@ -286,18 +271,14 @@ struct RefCountBitOffsets<8> {
286271
// 32-bit inline
287272
template <>
288273
struct RefCountBitOffsets<4> {
289-
static const size_t PureSwiftDeallocShift = 0;
290-
static const size_t PureSwiftDeallocBitCount = 1;
291-
static const uint32_t PureSwiftDeallocMask = maskForField(PureSwiftDealloc);
274+
static const size_t IsImmortalShift = 0;
275+
static const size_t IsImmortalBitCount = 1;
276+
static const uint64_t IsImmortalMask = maskForField(IsImmortal);
292277

293-
static const size_t UnownedRefCountShift = shiftAfterField(PureSwiftDealloc);
278+
static const size_t UnownedRefCountShift = shiftAfterField(IsImmortal);
294279
static const size_t UnownedRefCountBitCount = 7;
295280
static const uint32_t UnownedRefCountMask = maskForField(UnownedRefCount);
296281

297-
static const size_t IsImmortalShift = 0; // overlaps PureSwiftDealloc and UnownedRefCount
298-
static const size_t IsImmortalBitCount = 8;
299-
static const uint32_t IsImmortalMask = maskForField(IsImmortal);
300-
301282
static const size_t IsDeinitingShift = shiftAfterField(UnownedRefCount);
302283
static const size_t IsDeinitingBitCount = 1;
303284
static const uint32_t IsDeinitingMask = maskForField(IsDeiniting);
@@ -388,55 +369,33 @@ class RefCountBitsT {
388369
enum Immortal_t { Immortal };
389370

390371
LLVM_ATTRIBUTE_ALWAYS_INLINE
391-
bool isImmortal(bool checkSlowRCBit) const {
392-
if (checkSlowRCBit) {
393-
return (getField(IsImmortal) == Offsets::IsImmortalMask) &&
394-
bool(getField(UseSlowRC));
395-
} else {
396-
return (getField(IsImmortal) == Offsets::IsImmortalMask);
397-
}
398-
}
399-
400-
LLVM_ATTRIBUTE_ALWAYS_INLINE
401-
bool isOverflowingUnownedRefCount(uint32_t oldValue, uint32_t inc) const {
402-
auto newValue = getUnownedRefCount();
403-
return newValue != oldValue + inc ||
404-
newValue == Offsets::UnownedRefCountMask;
372+
bool isImmortal() const {
373+
return bool(getField(IsImmortal));
405374
}
406375

407376
LLVM_ATTRIBUTE_ALWAYS_INLINE
408377
void setIsImmortal(bool value) {
409-
setField(IsImmortal, value ? Offsets::IsImmortalMask : 0);
378+
setField(IsImmortal, value);
410379
setField(UseSlowRC, value);
411380
}
412381

413-
LLVM_ATTRIBUTE_ALWAYS_INLINE
414-
bool pureSwiftDeallocation() const {
415-
return bool(getField(PureSwiftDealloc)) && !bool(getField(UseSlowRC));
416-
}
417-
418-
LLVM_ATTRIBUTE_ALWAYS_INLINE
419-
void setPureSwiftDeallocation(bool value) {
420-
setField(PureSwiftDealloc, value);
421-
}
422-
423382
LLVM_ATTRIBUTE_ALWAYS_INLINE
424383
RefCountBitsT() = default;
425384

426385
LLVM_ATTRIBUTE_ALWAYS_INLINE
427386
constexpr
428387
RefCountBitsT(uint32_t strongExtraCount, uint32_t unownedCount)
429388
: bits((BitsType(strongExtraCount) << Offsets::StrongExtraRefCountShift) |
430-
(BitsType(1) << Offsets::PureSwiftDeallocShift) |
431389
(BitsType(unownedCount) << Offsets::UnownedRefCountShift))
432390
{ }
433391

434392
LLVM_ATTRIBUTE_ALWAYS_INLINE
435393
constexpr
436394
RefCountBitsT(Immortal_t immortal)
437-
: bits((BitsType(2) << Offsets::StrongExtraRefCountShift) |
438-
(BitsType(Offsets::IsImmortalMask)) |
439-
(BitsType(1) << Offsets::UseSlowRCShift))
395+
: bits((BitsType(2) << Offsets::StrongExtraRefCountShift) |
396+
(BitsType(2) << Offsets::UnownedRefCountShift) |
397+
(BitsType(1) << Offsets::IsImmortalShift) |
398+
(BitsType(1) << Offsets::UseSlowRCShift))
440399
{ }
441400

442401
LLVM_ATTRIBUTE_ALWAYS_INLINE
@@ -474,7 +433,7 @@ class RefCountBitsT {
474433

475434
LLVM_ATTRIBUTE_ALWAYS_INLINE
476435
bool hasSideTable() const {
477-
bool hasSide = getUseSlowRC() && !isImmortal(false);
436+
bool hasSide = getUseSlowRC() && !isImmortal();
478437

479438
// Side table refcount must not point to another side table.
480439
assert((refcountIsInline || !hasSide) &&
@@ -564,7 +523,7 @@ class RefCountBitsT {
564523
LLVM_NODISCARD LLVM_ATTRIBUTE_ALWAYS_INLINE
565524
bool decrementStrongExtraRefCount(uint32_t dec) {
566525
#ifndef NDEBUG
567-
if (!hasSideTable() && !isImmortal(false)) {
526+
if (!hasSideTable() && !isImmortal()) {
568527
// Can't check these assertions with side table present.
569528

570529
if (getIsDeiniting())
@@ -599,7 +558,7 @@ class RefCountBitsT {
599558
static_assert(Offsets::UnownedRefCountBitCount +
600559
Offsets::IsDeinitingBitCount +
601560
Offsets::StrongExtraRefCountBitCount +
602-
Offsets::PureSwiftDeallocBitCount +
561+
Offsets::IsImmortalBitCount +
603562
Offsets::UseSlowRCBitCount == sizeof(bits)*8,
604563
"inspect isUniquelyReferenced after adding fields");
605564

@@ -756,7 +715,7 @@ class RefCounts {
756715

757716
void setIsImmortal(bool immortal) {
758717
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
759-
if (oldbits.isImmortal(true)) {
718+
if (oldbits.isImmortal()) {
760719
return;
761720
}
762721
RefCountBits newbits;
@@ -766,27 +725,7 @@ class RefCounts {
766725
} while (!refCounts.compare_exchange_weak(oldbits, newbits,
767726
std::memory_order_relaxed));
768727
}
769-
770-
void setPureSwiftDeallocation(bool nonobjc) {
771-
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
772-
//Immortal and no objc complications share a bit, so don't let setting
773-
//the complications one clear the immmortal one
774-
if (oldbits.isImmortal(true) || oldbits.pureSwiftDeallocation() == nonobjc){
775-
return;
776-
}
777-
RefCountBits newbits;
778-
do {
779-
newbits = oldbits;
780-
newbits.setPureSwiftDeallocation(nonobjc);
781-
} while (!refCounts.compare_exchange_weak(oldbits, newbits,
782-
std::memory_order_relaxed));
783-
}
784-
785-
bool getPureSwiftDeallocation() {
786-
auto bits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
787-
return bits.pureSwiftDeallocation();
788-
}
789-
728+
790729
// Initialize from another refcount bits.
791730
// Only inline -> out-of-line is allowed (used for new side table entries).
792731
void init(InlineRefCountBits newBits) {
@@ -801,7 +740,7 @@ class RefCounts {
801740
newbits = oldbits;
802741
bool fast = newbits.incrementStrongExtraRefCount(inc);
803742
if (SWIFT_UNLIKELY(!fast)) {
804-
if (oldbits.isImmortal(false))
743+
if (oldbits.isImmortal())
805744
return;
806745
return incrementSlow(oldbits, inc);
807746
}
@@ -814,7 +753,7 @@ class RefCounts {
814753
auto newbits = oldbits;
815754
bool fast = newbits.incrementStrongExtraRefCount(inc);
816755
if (SWIFT_UNLIKELY(!fast)) {
817-
if (oldbits.isImmortal(false))
756+
if (oldbits.isImmortal())
818757
return;
819758
return incrementNonAtomicSlow(oldbits, inc);
820759
}
@@ -832,7 +771,7 @@ class RefCounts {
832771
newbits = oldbits;
833772
bool fast = newbits.incrementStrongExtraRefCount(1);
834773
if (SWIFT_UNLIKELY(!fast)) {
835-
if (oldbits.isImmortal(false))
774+
if (oldbits.isImmortal())
836775
return true;
837776
return tryIncrementSlow(oldbits);
838777
}
@@ -849,7 +788,7 @@ class RefCounts {
849788
auto newbits = oldbits;
850789
bool fast = newbits.incrementStrongExtraRefCount(1);
851790
if (SWIFT_UNLIKELY(!fast)) {
852-
if (oldbits.isImmortal(false))
791+
if (oldbits.isImmortal())
853792
return true;
854793
return tryIncrementNonAtomicSlow(oldbits);
855794
}
@@ -885,7 +824,7 @@ class RefCounts {
885824
// Precondition: the reference count must be 1
886825
void decrementFromOneNonAtomic() {
887826
auto bits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
888-
if (bits.isImmortal(true)) {
827+
if (bits.isImmortal()) {
889828
return;
890829
}
891830
if (bits.hasSideTable())
@@ -983,7 +922,7 @@ class RefCounts {
983922
// Decrement completed normally. New refcount is not zero.
984923
deinitNow = false;
985924
}
986-
else if (oldbits.isImmortal(false)) {
925+
else if (oldbits.isImmortal()) {
987926
return false;
988927
} else if (oldbits.hasSideTable()) {
989928
// Decrement failed because we're on some other slow path.
@@ -1022,7 +961,7 @@ class RefCounts {
1022961
// Decrement completed normally. New refcount is not zero.
1023962
deinitNow = false;
1024963
}
1025-
else if (oldbits.isImmortal(false)) {
964+
else if (oldbits.isImmortal()) {
1026965
return false;
1027966
}
1028967
else if (oldbits.hasSideTable()) {
@@ -1062,7 +1001,7 @@ class RefCounts {
10621001
bool fast =
10631002
newbits.decrementStrongExtraRefCount(dec);
10641003
if (SWIFT_UNLIKELY(!fast)) {
1065-
if (oldbits.isImmortal(false)) {
1004+
if (oldbits.isImmortal()) {
10661005
return false;
10671006
}
10681007
// Slow paths include side table; deinit; underflow
@@ -1086,7 +1025,7 @@ class RefCounts {
10861025
// Increment the unowned reference count.
10871026
void incrementUnowned(uint32_t inc) {
10881027
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
1089-
if (oldbits.isImmortal(true))
1028+
if (oldbits.isImmortal())
10901029
return;
10911030
RefCountBits newbits;
10921031
do {
@@ -1098,7 +1037,7 @@ class RefCounts {
10981037
uint32_t oldValue = newbits.incrementUnownedRefCount(inc);
10991038

11001039
// Check overflow and use the side table on overflow.
1101-
if (newbits.isOverflowingUnownedRefCount(oldValue, inc))
1040+
if (newbits.getUnownedRefCount() != oldValue + inc)
11021041
return incrementUnownedSlow(inc);
11031042

11041043
} while (!refCounts.compare_exchange_weak(oldbits, newbits,
@@ -1107,7 +1046,7 @@ class RefCounts {
11071046

11081047
void incrementUnownedNonAtomic(uint32_t inc) {
11091048
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
1110-
if (oldbits.isImmortal(true))
1049+
if (oldbits.isImmortal())
11111050
return;
11121051
if (oldbits.hasSideTable())
11131052
return oldbits.getSideTable()->incrementUnownedNonAtomic(inc);
@@ -1117,7 +1056,7 @@ class RefCounts {
11171056
uint32_t oldValue = newbits.incrementUnownedRefCount(inc);
11181057

11191058
// Check overflow and use the side table on overflow.
1120-
if (newbits.isOverflowingUnownedRefCount(oldValue, inc))
1059+
if (newbits.getUnownedRefCount() != oldValue + inc)
11211060
return incrementUnownedSlow(inc);
11221061

11231062
refCounts.store(newbits, std::memory_order_relaxed);
@@ -1127,7 +1066,7 @@ class RefCounts {
11271066
// Return true if the caller should free the object.
11281067
bool decrementUnownedShouldFree(uint32_t dec) {
11291068
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
1130-
if (oldbits.isImmortal(true))
1069+
if (oldbits.isImmortal())
11311070
return false;
11321071
RefCountBits newbits;
11331072

@@ -1155,7 +1094,7 @@ class RefCounts {
11551094

11561095
bool decrementUnownedShouldFreeNonAtomic(uint32_t dec) {
11571096
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
1158-
if (oldbits.isImmortal(true))
1097+
if (oldbits.isImmortal())
11591098
return false;
11601099
if (oldbits.hasSideTable())
11611100
return oldbits.getSideTable()->decrementUnownedShouldFreeNonAtomic(dec);
@@ -1444,7 +1383,7 @@ inline bool RefCounts<InlineRefCountBits>::doDecrementNonAtomic(uint32_t dec) {
14441383
auto newbits = oldbits;
14451384
bool fast = newbits.decrementStrongExtraRefCount(dec);
14461385
if (!fast) {
1447-
if (oldbits.isImmortal(false)) {
1386+
if (oldbits.isImmortal()) {
14481387
return false;
14491388
}
14501389
return doDecrementNonAtomicSlow<performDeinit>(oldbits, dec);

stdlib/public/runtime/HeapObject.cpp

Lines changed: 1 addition & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,6 @@
4242
# include <objc/message.h>
4343
# include <objc/objc.h>
4444
# include "swift/Runtime/ObjCBridge.h"
45-
# include "swift/Runtime/Once.h"
4645
#endif
4746
#include "Leaks.h"
4847

@@ -79,32 +78,6 @@ HeapObject *swift::swift_allocObject(HeapMetadata const *metadata,
7978
return _swift_allocObject(metadata, requiredSize, requiredAlignmentMask);
8079
}
8180

82-
#if OBJC_SETASSOCIATEDOBJECTHOOK_DEFINED
83-
//We interpose objc_setAssociatedObject so that we can set a flag in
84-
//the refcount field of Swift objects to indicate that they have associations,
85-
//since we can't safely skip ObjC dealloc work if they do
86-
static objc_hook_setAssociatedObject originalAssocObjectFunc = nullptr;
87-
88-
static void _swift_setAssociatedObject_hook(
89-
id _Nonnull object,
90-
const void * _Nonnull key,
91-
id _Nullable value,
92-
objc_AssociationPolicy policy
93-
) {
94-
if (!isObjCTaggedPointerOrNull(object) &&
95-
objectUsesNativeSwiftReferenceCounting(object)) {
96-
auto heapObj = reinterpret_cast<HeapObject *>(object);
97-
heapObj->refCounts.setPureSwiftDeallocation(false);
98-
}
99-
originalAssocObjectFunc(object, key, value, policy);
100-
}
101-
102-
static void _interpose_objc_association(void *ctxt) {
103-
objc_setHook_setAssociatedObject(_swift_setAssociatedObject_hook,
104-
&originalAssocObjectFunc);
105-
}
106-
#endif
107-
10881
static HeapObject *_swift_allocObject_(HeapMetadata const *metadata,
10982
size_t requiredSize,
11083
size_t requiredAlignmentMask) {
@@ -117,11 +90,6 @@ static HeapObject *_swift_allocObject_(HeapMetadata const *metadata,
11790
// Linux, and macOS.
11891
new (object) HeapObject(metadata);
11992

120-
#if OBJC_SETASSOCIATEDOBJECTHOOK_DEFINED
121-
static swift_once_t associatedObjectHookOnce;
122-
swift_once(&associatedObjectHookOnce, _interpose_objc_association, nullptr);
123-
#endif
124-
12593
// If leak tracking is enabled, start tracking this object.
12694
SWIFT_LEAKS_START_TRACKING_OBJECT(object);
12795

@@ -626,14 +594,9 @@ void swift::swift_rootObjCDealloc(HeapObject *self) {
626594
void swift::swift_deallocClassInstance(HeapObject *object,
627595
size_t allocatedSize,
628596
size_t allocatedAlignMask) {
629-
#if OBJC_SETASSOCIATEDOBJECTHOOK_DEFINED
597+
#if SWIFT_OBJC_INTEROP
630598
// We need to let the ObjC runtime clean up any associated objects or weak
631599
// references associated with this object.
632-
if (originalAssocObjectFunc == nullptr ||
633-
!object->refCounts.getPureSwiftDeallocation()) {
634-
objc_destructInstance((id)object);
635-
}
636-
#elif SWIFT_OBJC_INTEROP
637600
objc_destructInstance((id)object);
638601
#endif
639602
swift_deallocObject(object, allocatedSize, allocatedAlignMask);

0 commit comments

Comments
 (0)