Skip to content

Update fast dealloc to use new-style interposing and support objc weak refs #25864

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
49 changes: 49 additions & 0 deletions include/swift/Runtime/HeapObject.h
Original file line number Diff line number Diff line change
Expand Up @@ -1086,4 +1086,53 @@ swift_getTypeName(const Metadata *type, bool qualified);

} // end namespace swift

#if SWIFT_OBJC_INTEROP
/// Standard ObjC lifecycle methods for Swift objects
#define STANDARD_OBJC_METHOD_IMPLS_FOR_SWIFT_OBJECTS \
- (id)retain { \
auto SELF = reinterpret_cast<HeapObject *>(self); \
swift_retain(SELF); \
return self; \
} \
- (void)release { \
auto SELF = reinterpret_cast<HeapObject *>(self); \
swift_release(SELF); \
} \
- (id)autorelease { \
return _objc_rootAutorelease(self); \
} \
- (NSUInteger)retainCount { \
return swift::swift_retainCount(reinterpret_cast<HeapObject *>(self)); \
} \
- (BOOL)_isDeallocating { \
return swift_isDeallocating(reinterpret_cast<HeapObject *>(self)); \
} \
- (BOOL)_tryRetain { \
return swift_tryRetain(reinterpret_cast<HeapObject*>(self)) != nullptr; \
} \
- (BOOL)allowsWeakReference { \
return !swift_isDeallocating(reinterpret_cast<HeapObject *>(self)); \
} \
- (BOOL)retainWeakReference { \
return swift_tryRetain(reinterpret_cast<HeapObject*>(self)) != nullptr; \
} \
- (void)_setWeaklyReferenced { \
auto heapObj = reinterpret_cast<HeapObject *>(self); \
heapObj->refCounts.setPureSwiftDeallocation(false); \
} \
- (bool)_setAssociatedObject:(id)obj \
forKey:(const void *)key \
associationPolicy:(objc_AssociationPolicy)policy { \
auto heapObj = reinterpret_cast<HeapObject *>(self); \
heapObj->refCounts.setPureSwiftDeallocation(false); \
/* false to let libobjc know it still needs to associate the object */ \
return false; \
} \
- (void)dealloc { \
swift_rootObjCDealloc(reinterpret_cast<HeapObject *>(self)); \
}

#endif // SWIFT_OBJC_INTEROP


#endif // SWIFT_RUNTIME_ALLOC_H
135 changes: 99 additions & 36 deletions stdlib/public/SwiftShims/RefCount.h
Original file line number Diff line number Diff line change
Expand Up @@ -238,14 +238,29 @@ struct RefCountBitOffsets;
// 32-bit out of line
template <>
struct RefCountBitOffsets<8> {
static const size_t IsImmortalShift = 0;
static const size_t IsImmortalBitCount = 1;
static const uint64_t IsImmortalMask = maskForField(IsImmortal);

static const size_t UnownedRefCountShift = shiftAfterField(IsImmortal);
/*
The bottom 32 bits (on 64 bit architectures, fewer on 32 bit) of the refcount
field are effectively a union of two different configurations:

---Normal case---
Bit 0: Does this object need to call out to the ObjC runtime for deallocation
Bits 1-31: Unowned refcount

---Immortal case---
All bits set, the object does not deallocate or have a refcount
*/
static const size_t PureSwiftDeallocShift = 0;
static const size_t PureSwiftDeallocBitCount = 1;
static const uint64_t PureSwiftDeallocMask = maskForField(PureSwiftDealloc);

static const size_t UnownedRefCountShift = shiftAfterField(PureSwiftDealloc);
static const size_t UnownedRefCountBitCount = 31;
static const uint64_t UnownedRefCountMask = maskForField(UnownedRefCount);

static const size_t IsImmortalShift = 0; // overlaps PureSwiftDealloc and UnownedRefCount
static const size_t IsImmortalBitCount = 32;
static const uint64_t IsImmortalMask = maskForField(IsImmortal);

static const size_t IsDeinitingShift = shiftAfterField(UnownedRefCount);
static const size_t IsDeinitingBitCount = 1;
static const uint64_t IsDeinitingMask = maskForField(IsDeiniting);
Expand All @@ -271,14 +286,18 @@ struct RefCountBitOffsets<8> {
// 32-bit inline
template <>
struct RefCountBitOffsets<4> {
static const size_t IsImmortalShift = 0;
static const size_t IsImmortalBitCount = 1;
static const uint64_t IsImmortalMask = maskForField(IsImmortal);
static const size_t PureSwiftDeallocShift = 0;
static const size_t PureSwiftDeallocBitCount = 1;
static const uint32_t PureSwiftDeallocMask = maskForField(PureSwiftDealloc);

static const size_t UnownedRefCountShift = shiftAfterField(IsImmortal);
static const size_t UnownedRefCountShift = shiftAfterField(PureSwiftDealloc);
static const size_t UnownedRefCountBitCount = 7;
static const uint32_t UnownedRefCountMask = maskForField(UnownedRefCount);

static const size_t IsImmortalShift = 0; // overlaps PureSwiftDealloc and UnownedRefCount
static const size_t IsImmortalBitCount = 8;
static const uint32_t IsImmortalMask = maskForField(IsImmortal);

static const size_t IsDeinitingShift = shiftAfterField(UnownedRefCount);
static const size_t IsDeinitingBitCount = 1;
static const uint32_t IsDeinitingMask = maskForField(IsDeiniting);
Expand Down Expand Up @@ -369,33 +388,56 @@ class RefCountBitsT {
enum Immortal_t { Immortal };

LLVM_ATTRIBUTE_ALWAYS_INLINE
bool isImmortal() const {
return bool(getField(IsImmortal));
bool isImmortal(bool checkSlowRCBit) const {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Since this is always called with a constant true or false, it might be clearer to just split it into two functions.

if (checkSlowRCBit) {
return (getField(IsImmortal) == Offsets::IsImmortalMask) &&
bool(getField(UseSlowRC));
} else {
return (getField(IsImmortal) == Offsets::IsImmortalMask);
}
}

LLVM_ATTRIBUTE_ALWAYS_INLINE
bool isOverflowingUnownedRefCount(uint32_t oldValue, uint32_t inc) const {
auto newValue = getUnownedRefCount();
return newValue != oldValue + inc ||
newValue == Offsets::UnownedRefCountMask;
}

LLVM_ATTRIBUTE_ALWAYS_INLINE
void setIsImmortal(bool value) {
setField(IsImmortal, value);
assert(value);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If value must be true, should this even take a parameter?

setField(IsImmortal, Offsets::IsImmortalMask);
setField(UseSlowRC, value);
}

LLVM_ATTRIBUTE_ALWAYS_INLINE
bool pureSwiftDeallocation() const {
return bool(getField(PureSwiftDealloc)) && !bool(getField(UseSlowRC));
}

LLVM_ATTRIBUTE_ALWAYS_INLINE
void setPureSwiftDeallocation(bool value) {
setField(PureSwiftDealloc, value);
}

LLVM_ATTRIBUTE_ALWAYS_INLINE
RefCountBitsT() = default;

LLVM_ATTRIBUTE_ALWAYS_INLINE
constexpr
RefCountBitsT(uint32_t strongExtraCount, uint32_t unownedCount)
: bits((BitsType(strongExtraCount) << Offsets::StrongExtraRefCountShift) |
(BitsType(1) << Offsets::PureSwiftDeallocShift) |
(BitsType(unownedCount) << Offsets::UnownedRefCountShift))
{ }

LLVM_ATTRIBUTE_ALWAYS_INLINE
constexpr
RefCountBitsT(Immortal_t immortal)
: bits((BitsType(2) << Offsets::StrongExtraRefCountShift) |
(BitsType(2) << Offsets::UnownedRefCountShift) |
(BitsType(1) << Offsets::IsImmortalShift) |
(BitsType(1) << Offsets::UseSlowRCShift))
: bits((BitsType(2) << Offsets::StrongExtraRefCountShift) |
(BitsType(Offsets::IsImmortalMask)) |
(BitsType(1) << Offsets::UseSlowRCShift))
{ }

LLVM_ATTRIBUTE_ALWAYS_INLINE
Expand Down Expand Up @@ -433,7 +475,7 @@ class RefCountBitsT {

LLVM_ATTRIBUTE_ALWAYS_INLINE
bool hasSideTable() const {
bool hasSide = getUseSlowRC() && !isImmortal();
bool hasSide = getUseSlowRC() && !isImmortal(false);

// Side table refcount must not point to another side table.
assert((refcountIsInline || !hasSide) &&
Expand Down Expand Up @@ -523,7 +565,7 @@ class RefCountBitsT {
LLVM_NODISCARD LLVM_ATTRIBUTE_ALWAYS_INLINE
bool decrementStrongExtraRefCount(uint32_t dec) {
#ifndef NDEBUG
if (!hasSideTable() && !isImmortal()) {
if (!hasSideTable() && !isImmortal(false)) {
// Can't check these assertions with side table present.

if (getIsDeiniting())
Expand Down Expand Up @@ -558,7 +600,7 @@ class RefCountBitsT {
static_assert(Offsets::UnownedRefCountBitCount +
Offsets::IsDeinitingBitCount +
Offsets::StrongExtraRefCountBitCount +
Offsets::IsImmortalBitCount +
Offsets::PureSwiftDeallocBitCount +
Offsets::UseSlowRCBitCount == sizeof(bits)*8,
"inspect isUniquelyReferenced after adding fields");

Expand Down Expand Up @@ -715,7 +757,7 @@ class RefCounts {

void setIsImmortal(bool immortal) {
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
if (oldbits.isImmortal()) {
if (oldbits.isImmortal(true)) {
return;
}
RefCountBits newbits;
Expand All @@ -725,7 +767,28 @@ class RefCounts {
} while (!refCounts.compare_exchange_weak(oldbits, newbits,
std::memory_order_relaxed));
}


void setPureSwiftDeallocation(bool nonobjc) {
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
//Immortal and no objc complications share a bit, so don't let setting
//the complications one clear the immmortal one
if (oldbits.isImmortal(true) || oldbits.pureSwiftDeallocation() == nonobjc){
assert(!oldbits.hasSideTable());
return;
}
RefCountBits newbits;
do {
newbits = oldbits;
newbits.setPureSwiftDeallocation(nonobjc);
} while (!refCounts.compare_exchange_weak(oldbits, newbits,
std::memory_order_relaxed));
}

bool getPureSwiftDeallocation() {
auto bits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
return bits.pureSwiftDeallocation();
}

// Initialize from another refcount bits.
// Only inline -> out-of-line is allowed (used for new side table entries).
void init(InlineRefCountBits newBits) {
Expand All @@ -740,7 +803,7 @@ class RefCounts {
newbits = oldbits;
bool fast = newbits.incrementStrongExtraRefCount(inc);
if (SWIFT_UNLIKELY(!fast)) {
if (oldbits.isImmortal())
if (oldbits.isImmortal(false))
return;
return incrementSlow(oldbits, inc);
}
Expand All @@ -753,7 +816,7 @@ class RefCounts {
auto newbits = oldbits;
bool fast = newbits.incrementStrongExtraRefCount(inc);
if (SWIFT_UNLIKELY(!fast)) {
if (oldbits.isImmortal())
if (oldbits.isImmortal(false))
return;
return incrementNonAtomicSlow(oldbits, inc);
}
Expand All @@ -771,7 +834,7 @@ class RefCounts {
newbits = oldbits;
bool fast = newbits.incrementStrongExtraRefCount(1);
if (SWIFT_UNLIKELY(!fast)) {
if (oldbits.isImmortal())
if (oldbits.isImmortal(false))
return true;
return tryIncrementSlow(oldbits);
}
Expand All @@ -788,7 +851,7 @@ class RefCounts {
auto newbits = oldbits;
bool fast = newbits.incrementStrongExtraRefCount(1);
if (SWIFT_UNLIKELY(!fast)) {
if (oldbits.isImmortal())
if (oldbits.isImmortal(false))
return true;
return tryIncrementNonAtomicSlow(oldbits);
}
Expand Down Expand Up @@ -824,7 +887,7 @@ class RefCounts {
// Precondition: the reference count must be 1
void decrementFromOneNonAtomic() {
auto bits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
if (bits.isImmortal()) {
if (bits.isImmortal(true)) {
return;
}
if (bits.hasSideTable())
Expand Down Expand Up @@ -922,7 +985,7 @@ class RefCounts {
// Decrement completed normally. New refcount is not zero.
deinitNow = false;
}
else if (oldbits.isImmortal()) {
else if (oldbits.isImmortal(false)) {
return false;
} else if (oldbits.hasSideTable()) {
// Decrement failed because we're on some other slow path.
Expand Down Expand Up @@ -961,7 +1024,7 @@ class RefCounts {
// Decrement completed normally. New refcount is not zero.
deinitNow = false;
}
else if (oldbits.isImmortal()) {
else if (oldbits.isImmortal(false)) {
return false;
}
else if (oldbits.hasSideTable()) {
Expand Down Expand Up @@ -1001,7 +1064,7 @@ class RefCounts {
bool fast =
newbits.decrementStrongExtraRefCount(dec);
if (SWIFT_UNLIKELY(!fast)) {
if (oldbits.isImmortal()) {
if (oldbits.isImmortal(false)) {
return false;
}
// Slow paths include side table; deinit; underflow
Expand All @@ -1025,7 +1088,7 @@ class RefCounts {
// Increment the unowned reference count.
void incrementUnowned(uint32_t inc) {
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
if (oldbits.isImmortal())
if (oldbits.isImmortal(true))
return;
RefCountBits newbits;
do {
Expand All @@ -1037,7 +1100,7 @@ class RefCounts {
uint32_t oldValue = newbits.incrementUnownedRefCount(inc);

// Check overflow and use the side table on overflow.
if (newbits.getUnownedRefCount() != oldValue + inc)
if (newbits.isOverflowingUnownedRefCount(oldValue, inc))
return incrementUnownedSlow(inc);

} while (!refCounts.compare_exchange_weak(oldbits, newbits,
Expand All @@ -1046,7 +1109,7 @@ class RefCounts {

void incrementUnownedNonAtomic(uint32_t inc) {
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
if (oldbits.isImmortal())
if (oldbits.isImmortal(true))
return;
if (oldbits.hasSideTable())
return oldbits.getSideTable()->incrementUnownedNonAtomic(inc);
Expand All @@ -1056,7 +1119,7 @@ class RefCounts {
uint32_t oldValue = newbits.incrementUnownedRefCount(inc);

// Check overflow and use the side table on overflow.
if (newbits.getUnownedRefCount() != oldValue + inc)
if (newbits.isOverflowingUnownedRefCount(oldValue, inc))
return incrementUnownedSlow(inc);

refCounts.store(newbits, std::memory_order_relaxed);
Expand All @@ -1066,7 +1129,7 @@ class RefCounts {
// Return true if the caller should free the object.
bool decrementUnownedShouldFree(uint32_t dec) {
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
if (oldbits.isImmortal())
if (oldbits.isImmortal(true))
return false;
RefCountBits newbits;

Expand Down Expand Up @@ -1094,7 +1157,7 @@ class RefCounts {

bool decrementUnownedShouldFreeNonAtomic(uint32_t dec) {
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
if (oldbits.isImmortal())
if (oldbits.isImmortal(true))
return false;
if (oldbits.hasSideTable())
return oldbits.getSideTable()->decrementUnownedShouldFreeNonAtomic(dec);
Expand Down Expand Up @@ -1383,7 +1446,7 @@ inline bool RefCounts<InlineRefCountBits>::doDecrementNonAtomic(uint32_t dec) {
auto newbits = oldbits;
bool fast = newbits.decrementStrongExtraRefCount(dec);
if (!fast) {
if (oldbits.isImmortal()) {
if (oldbits.isImmortal(false)) {
return false;
}
return doDecrementNonAtomicSlow<performDeinit>(oldbits, dec);
Expand Down
Loading