@@ -238,29 +238,14 @@ struct RefCountBitOffsets;
238
238
// 32-bit out of line
239
239
template <>
240
240
struct RefCountBitOffsets <8 > {
241
- /*
242
- The bottom 32 bits (on 64 bit architectures, fewer on 32 bit) of the refcount
243
- field are effectively a union of two different configurations:
244
-
245
- ---Normal case---
246
- Bit 0: Does this object need to call out to the ObjC runtime for deallocation
247
- Bits 1-31: Unowned refcount
248
-
249
- ---Immortal case---
250
- All bits set, the object does not deallocate or have a refcount
251
- */
252
- static const size_t PureSwiftDeallocShift = 0 ;
253
- static const size_t PureSwiftDeallocBitCount = 1 ;
254
- static const uint64_t PureSwiftDeallocMask = maskForField(PureSwiftDealloc);
255
-
256
- static const size_t UnownedRefCountShift = shiftAfterField(PureSwiftDealloc);
241
+ static const size_t IsImmortalShift = 0 ;
242
+ static const size_t IsImmortalBitCount = 1 ;
243
+ static const uint64_t IsImmortalMask = maskForField(IsImmortal);
244
+
245
+ static const size_t UnownedRefCountShift = shiftAfterField(IsImmortal);
257
246
static const size_t UnownedRefCountBitCount = 31 ;
258
247
static const uint64_t UnownedRefCountMask = maskForField(UnownedRefCount);
259
248
260
- static const size_t IsImmortalShift = 0 ; // overlaps PureSwiftDealloc and UnownedRefCount
261
- static const size_t IsImmortalBitCount = 32 ;
262
- static const uint64_t IsImmortalMask = maskForField(IsImmortal);
263
-
264
249
static const size_t IsDeinitingShift = shiftAfterField(UnownedRefCount);
265
250
static const size_t IsDeinitingBitCount = 1 ;
266
251
static const uint64_t IsDeinitingMask = maskForField(IsDeiniting);
@@ -286,18 +271,14 @@ struct RefCountBitOffsets<8> {
286
271
// 32-bit inline
287
272
template <>
288
273
struct RefCountBitOffsets <4 > {
289
- static const size_t PureSwiftDeallocShift = 0 ;
290
- static const size_t PureSwiftDeallocBitCount = 1 ;
291
- static const uint32_t PureSwiftDeallocMask = maskForField(PureSwiftDealloc );
274
+ static const size_t IsImmortalShift = 0 ;
275
+ static const size_t IsImmortalBitCount = 1 ;
276
+ static const uint64_t IsImmortalMask = maskForField(IsImmortal );
292
277
293
- static const size_t UnownedRefCountShift = shiftAfterField(PureSwiftDealloc );
278
+ static const size_t UnownedRefCountShift = shiftAfterField(IsImmortal );
294
279
static const size_t UnownedRefCountBitCount = 7 ;
295
280
static const uint32_t UnownedRefCountMask = maskForField(UnownedRefCount);
296
281
297
- static const size_t IsImmortalShift = 0 ; // overlaps PureSwiftDealloc and UnownedRefCount
298
- static const size_t IsImmortalBitCount = 8 ;
299
- static const uint32_t IsImmortalMask = maskForField(IsImmortal);
300
-
301
282
static const size_t IsDeinitingShift = shiftAfterField(UnownedRefCount);
302
283
static const size_t IsDeinitingBitCount = 1 ;
303
284
static const uint32_t IsDeinitingMask = maskForField(IsDeiniting);
@@ -388,55 +369,33 @@ class RefCountBitsT {
388
369
enum Immortal_t { Immortal };
389
370
390
371
LLVM_ATTRIBUTE_ALWAYS_INLINE
391
- bool isImmortal (bool checkSlowRCBit) const {
392
- if (checkSlowRCBit) {
393
- return (getField (IsImmortal) == Offsets::IsImmortalMask) &&
394
- bool (getField (UseSlowRC));
395
- } else {
396
- return (getField (IsImmortal) == Offsets::IsImmortalMask);
397
- }
398
- }
399
-
400
- LLVM_ATTRIBUTE_ALWAYS_INLINE
401
- bool isOverflowingUnownedRefCount (uint32_t oldValue, uint32_t inc) const {
402
- auto newValue = getUnownedRefCount ();
403
- return newValue != oldValue + inc ||
404
- newValue == Offsets::UnownedRefCountMask;
372
+ bool isImmortal () const {
373
+ return bool (getField (IsImmortal));
405
374
}
406
375
407
376
LLVM_ATTRIBUTE_ALWAYS_INLINE
408
377
void setIsImmortal (bool value) {
409
- setField (IsImmortal, value ? Offsets::IsImmortalMask : 0 );
378
+ setField (IsImmortal, value);
410
379
setField (UseSlowRC, value);
411
380
}
412
381
413
- LLVM_ATTRIBUTE_ALWAYS_INLINE
414
- bool pureSwiftDeallocation () const {
415
- return bool (getField (PureSwiftDealloc)) && !bool (getField (UseSlowRC));
416
- }
417
-
418
- LLVM_ATTRIBUTE_ALWAYS_INLINE
419
- void setPureSwiftDeallocation (bool value) {
420
- setField (PureSwiftDealloc, value);
421
- }
422
-
423
382
LLVM_ATTRIBUTE_ALWAYS_INLINE
424
383
RefCountBitsT () = default;
425
384
426
385
LLVM_ATTRIBUTE_ALWAYS_INLINE
427
386
constexpr
428
387
RefCountBitsT (uint32_t strongExtraCount, uint32_t unownedCount)
429
388
: bits((BitsType(strongExtraCount) << Offsets::StrongExtraRefCountShift) |
430
- (BitsType(1 ) << Offsets::PureSwiftDeallocShift) |
431
389
(BitsType(unownedCount) << Offsets::UnownedRefCountShift))
432
390
{ }
433
391
434
392
LLVM_ATTRIBUTE_ALWAYS_INLINE
435
393
constexpr
436
394
RefCountBitsT (Immortal_t immortal)
437
- : bits((BitsType(2 ) << Offsets::StrongExtraRefCountShift) |
438
- (BitsType(Offsets::IsImmortalMask)) |
439
- (BitsType(1 ) << Offsets::UseSlowRCShift))
395
+ : bits((BitsType(2 ) << Offsets::StrongExtraRefCountShift) |
396
+ (BitsType(2 ) << Offsets::UnownedRefCountShift) |
397
+ (BitsType(1 ) << Offsets::IsImmortalShift) |
398
+ (BitsType(1 ) << Offsets::UseSlowRCShift))
440
399
{ }
441
400
442
401
LLVM_ATTRIBUTE_ALWAYS_INLINE
@@ -474,7 +433,7 @@ class RefCountBitsT {
474
433
475
434
LLVM_ATTRIBUTE_ALWAYS_INLINE
476
435
bool hasSideTable () const {
477
- bool hasSide = getUseSlowRC () && !isImmortal (false );
436
+ bool hasSide = getUseSlowRC () && !isImmortal ();
478
437
479
438
// Side table refcount must not point to another side table.
480
439
assert ((refcountIsInline || !hasSide) &&
@@ -564,7 +523,7 @@ class RefCountBitsT {
564
523
LLVM_NODISCARD LLVM_ATTRIBUTE_ALWAYS_INLINE
565
524
bool decrementStrongExtraRefCount (uint32_t dec) {
566
525
#ifndef NDEBUG
567
- if (!hasSideTable () && !isImmortal (false )) {
526
+ if (!hasSideTable () && !isImmortal ()) {
568
527
// Can't check these assertions with side table present.
569
528
570
529
if (getIsDeiniting ())
@@ -599,7 +558,7 @@ class RefCountBitsT {
599
558
static_assert (Offsets::UnownedRefCountBitCount +
600
559
Offsets::IsDeinitingBitCount +
601
560
Offsets::StrongExtraRefCountBitCount +
602
- Offsets::PureSwiftDeallocBitCount +
561
+ Offsets::IsImmortalBitCount +
603
562
Offsets::UseSlowRCBitCount == sizeof (bits)*8 ,
604
563
" inspect isUniquelyReferenced after adding fields" );
605
564
@@ -756,7 +715,7 @@ class RefCounts {
756
715
757
716
void setIsImmortal (bool immortal) {
758
717
auto oldbits = refCounts.load (SWIFT_MEMORY_ORDER_CONSUME);
759
- if (oldbits.isImmortal (true )) {
718
+ if (oldbits.isImmortal ()) {
760
719
return ;
761
720
}
762
721
RefCountBits newbits;
@@ -766,27 +725,7 @@ class RefCounts {
766
725
} while (!refCounts.compare_exchange_weak (oldbits, newbits,
767
726
std::memory_order_relaxed));
768
727
}
769
-
770
- void setPureSwiftDeallocation (bool nonobjc) {
771
- auto oldbits = refCounts.load (SWIFT_MEMORY_ORDER_CONSUME);
772
- // Immortal and no objc complications share a bit, so don't let setting
773
- // the complications one clear the immmortal one
774
- if (oldbits.isImmortal (true ) || oldbits.pureSwiftDeallocation () == nonobjc){
775
- return ;
776
- }
777
- RefCountBits newbits;
778
- do {
779
- newbits = oldbits;
780
- newbits.setPureSwiftDeallocation (nonobjc);
781
- } while (!refCounts.compare_exchange_weak (oldbits, newbits,
782
- std::memory_order_relaxed));
783
- }
784
-
785
- bool getPureSwiftDeallocation () {
786
- auto bits = refCounts.load (SWIFT_MEMORY_ORDER_CONSUME);
787
- return bits.pureSwiftDeallocation ();
788
- }
789
-
728
+
790
729
// Initialize from another refcount bits.
791
730
// Only inline -> out-of-line is allowed (used for new side table entries).
792
731
void init (InlineRefCountBits newBits) {
@@ -801,7 +740,7 @@ class RefCounts {
801
740
newbits = oldbits;
802
741
bool fast = newbits.incrementStrongExtraRefCount (inc);
803
742
if (SWIFT_UNLIKELY (!fast)) {
804
- if (oldbits.isImmortal (false ))
743
+ if (oldbits.isImmortal ())
805
744
return ;
806
745
return incrementSlow (oldbits, inc);
807
746
}
@@ -814,7 +753,7 @@ class RefCounts {
814
753
auto newbits = oldbits;
815
754
bool fast = newbits.incrementStrongExtraRefCount (inc);
816
755
if (SWIFT_UNLIKELY (!fast)) {
817
- if (oldbits.isImmortal (false ))
756
+ if (oldbits.isImmortal ())
818
757
return ;
819
758
return incrementNonAtomicSlow (oldbits, inc);
820
759
}
@@ -832,7 +771,7 @@ class RefCounts {
832
771
newbits = oldbits;
833
772
bool fast = newbits.incrementStrongExtraRefCount (1 );
834
773
if (SWIFT_UNLIKELY (!fast)) {
835
- if (oldbits.isImmortal (false ))
774
+ if (oldbits.isImmortal ())
836
775
return true ;
837
776
return tryIncrementSlow (oldbits);
838
777
}
@@ -849,7 +788,7 @@ class RefCounts {
849
788
auto newbits = oldbits;
850
789
bool fast = newbits.incrementStrongExtraRefCount (1 );
851
790
if (SWIFT_UNLIKELY (!fast)) {
852
- if (oldbits.isImmortal (false ))
791
+ if (oldbits.isImmortal ())
853
792
return true ;
854
793
return tryIncrementNonAtomicSlow (oldbits);
855
794
}
@@ -885,7 +824,7 @@ class RefCounts {
885
824
// Precondition: the reference count must be 1
886
825
void decrementFromOneNonAtomic () {
887
826
auto bits = refCounts.load (SWIFT_MEMORY_ORDER_CONSUME);
888
- if (bits.isImmortal (true )) {
827
+ if (bits.isImmortal ()) {
889
828
return ;
890
829
}
891
830
if (bits.hasSideTable ())
@@ -983,7 +922,7 @@ class RefCounts {
983
922
// Decrement completed normally. New refcount is not zero.
984
923
deinitNow = false ;
985
924
}
986
- else if (oldbits.isImmortal (false )) {
925
+ else if (oldbits.isImmortal ()) {
987
926
return false ;
988
927
} else if (oldbits.hasSideTable ()) {
989
928
// Decrement failed because we're on some other slow path.
@@ -1022,7 +961,7 @@ class RefCounts {
1022
961
// Decrement completed normally. New refcount is not zero.
1023
962
deinitNow = false ;
1024
963
}
1025
- else if (oldbits.isImmortal (false )) {
964
+ else if (oldbits.isImmortal ()) {
1026
965
return false ;
1027
966
}
1028
967
else if (oldbits.hasSideTable ()) {
@@ -1062,7 +1001,7 @@ class RefCounts {
1062
1001
bool fast =
1063
1002
newbits.decrementStrongExtraRefCount (dec);
1064
1003
if (SWIFT_UNLIKELY (!fast)) {
1065
- if (oldbits.isImmortal (false )) {
1004
+ if (oldbits.isImmortal ()) {
1066
1005
return false ;
1067
1006
}
1068
1007
// Slow paths include side table; deinit; underflow
@@ -1086,7 +1025,7 @@ class RefCounts {
1086
1025
// Increment the unowned reference count.
1087
1026
void incrementUnowned (uint32_t inc) {
1088
1027
auto oldbits = refCounts.load (SWIFT_MEMORY_ORDER_CONSUME);
1089
- if (oldbits.isImmortal (true ))
1028
+ if (oldbits.isImmortal ())
1090
1029
return ;
1091
1030
RefCountBits newbits;
1092
1031
do {
@@ -1098,7 +1037,7 @@ class RefCounts {
1098
1037
uint32_t oldValue = newbits.incrementUnownedRefCount (inc);
1099
1038
1100
1039
// Check overflow and use the side table on overflow.
1101
- if (newbits.isOverflowingUnownedRefCount ( oldValue, inc) )
1040
+ if (newbits.getUnownedRefCount () != oldValue + inc)
1102
1041
return incrementUnownedSlow (inc);
1103
1042
1104
1043
} while (!refCounts.compare_exchange_weak (oldbits, newbits,
@@ -1107,7 +1046,7 @@ class RefCounts {
1107
1046
1108
1047
void incrementUnownedNonAtomic (uint32_t inc) {
1109
1048
auto oldbits = refCounts.load (SWIFT_MEMORY_ORDER_CONSUME);
1110
- if (oldbits.isImmortal (true ))
1049
+ if (oldbits.isImmortal ())
1111
1050
return ;
1112
1051
if (oldbits.hasSideTable ())
1113
1052
return oldbits.getSideTable ()->incrementUnownedNonAtomic (inc);
@@ -1117,7 +1056,7 @@ class RefCounts {
1117
1056
uint32_t oldValue = newbits.incrementUnownedRefCount (inc);
1118
1057
1119
1058
// Check overflow and use the side table on overflow.
1120
- if (newbits.isOverflowingUnownedRefCount ( oldValue, inc) )
1059
+ if (newbits.getUnownedRefCount () != oldValue + inc)
1121
1060
return incrementUnownedSlow (inc);
1122
1061
1123
1062
refCounts.store (newbits, std::memory_order_relaxed);
@@ -1127,7 +1066,7 @@ class RefCounts {
1127
1066
// Return true if the caller should free the object.
1128
1067
bool decrementUnownedShouldFree (uint32_t dec) {
1129
1068
auto oldbits = refCounts.load (SWIFT_MEMORY_ORDER_CONSUME);
1130
- if (oldbits.isImmortal (true ))
1069
+ if (oldbits.isImmortal ())
1131
1070
return false ;
1132
1071
RefCountBits newbits;
1133
1072
@@ -1155,7 +1094,7 @@ class RefCounts {
1155
1094
1156
1095
bool decrementUnownedShouldFreeNonAtomic (uint32_t dec) {
1157
1096
auto oldbits = refCounts.load (SWIFT_MEMORY_ORDER_CONSUME);
1158
- if (oldbits.isImmortal (true ))
1097
+ if (oldbits.isImmortal ())
1159
1098
return false ;
1160
1099
if (oldbits.hasSideTable ())
1161
1100
return oldbits.getSideTable ()->decrementUnownedShouldFreeNonAtomic (dec);
@@ -1444,7 +1383,7 @@ inline bool RefCounts<InlineRefCountBits>::doDecrementNonAtomic(uint32_t dec) {
1444
1383
auto newbits = oldbits;
1445
1384
bool fast = newbits.decrementStrongExtraRefCount (dec);
1446
1385
if (!fast) {
1447
- if (oldbits.isImmortal (false )) {
1386
+ if (oldbits.isImmortal ()) {
1448
1387
return false ;
1449
1388
}
1450
1389
return doDecrementNonAtomicSlow<performDeinit>(oldbits, dec);
0 commit comments