13
13
#ifndef SWIFT_BASIC_THREADSAFEREFCOUNTED_H
14
14
#define SWIFT_BASIC_THREADSAFEREFCOUNTED_H
15
15
16
- #include " llvm/Support/Atomic.h "
16
+ #include < atomic >
17
17
#include < cassert>
18
18
19
19
namespace swift {
@@ -28,18 +28,19 @@ namespace swift {
28
28
// / FIXME: This should eventually move to llvm.
29
29
template <class Derived >
30
30
class ThreadSafeRefCountedBase {
31
- mutable llvm::sys::cas_flag ref_cnt;
31
+ mutable std::atomic< unsigned > ref_cnt;
32
32
33
33
protected:
34
34
ThreadSafeRefCountedBase () : ref_cnt(0 ) {}
35
35
36
36
public:
37
37
void Retain () const {
38
- llvm::sys::AtomicIncrement (&ref_cnt );
38
+ ref_cnt. fetch_add ( 1 , std::memory_order_acq_rel );
39
39
}
40
40
41
41
void Release () const {
42
- int refCount = static_cast <int >(llvm::sys::AtomicDecrement (&ref_cnt));
42
+ int refCount =
43
+ static_cast <int >(ref_cnt.fetch_sub (1 , std::memory_order_acq_rel));
43
44
assert (refCount >= 0 && " Reference count was already zero." );
44
45
if (refCount == 0 ) delete static_cast <const Derived*>(this );
45
46
}
@@ -52,7 +53,7 @@ class ThreadSafeRefCountedBase {
52
53
// / already have virtual methods to enforce dynamic allocation via 'new'.
53
54
// / FIXME: This should eventually move to llvm.
54
55
class ThreadSafeRefCountedBaseVPTR {
55
- mutable llvm::sys::cas_flag ref_cnt;
56
+ mutable std::atomic< unsigned > ref_cnt;
56
57
virtual void anchor ();
57
58
58
59
protected:
@@ -61,11 +62,12 @@ class ThreadSafeRefCountedBaseVPTR {
61
62
62
63
public:
63
64
void Retain () const {
64
- llvm::sys::AtomicIncrement (&ref_cnt );
65
+ ref_cnt. fetch_add ( 1 , std::memory_order_acq_rel );
65
66
}
66
67
67
68
void Release () const {
68
- int refCount = static_cast <int >(llvm::sys::AtomicDecrement (&ref_cnt));
69
+ int refCount =
70
+ static_cast <int >(ref_cnt.fetch_sub (1 , std::memory_order_acq_rel));
69
71
assert (refCount >= 0 && " Reference count was already zero." );
70
72
if (refCount == 0 ) delete this ;
71
73
}
0 commit comments