Skip to content

Commit a3efa16

Browse files
authored
Revert "Revert "[tsan] Don't use enum __tsan_memory_order in tsan interface…"
This reverts commit b14c436.
1 parent c822135 commit a3efa16

File tree

4 files changed

+336
-336
lines changed

4 files changed

+336
-336
lines changed

compiler-rt/include/sanitizer/tsan_interface_atomic.h

Lines changed: 82 additions & 87 deletions
Original file line numberDiff line numberDiff line change
@@ -43,183 +43,178 @@ typedef enum {
4343
} __tsan_memory_order;
4444

4545
__tsan_atomic8 SANITIZER_CDECL
46-
__tsan_atomic8_load(const volatile __tsan_atomic8 *a, __tsan_memory_order mo);
46+
__tsan_atomic8_load(const volatile __tsan_atomic8 *a, int mo);
4747
__tsan_atomic16 SANITIZER_CDECL
48-
__tsan_atomic16_load(const volatile __tsan_atomic16 *a, __tsan_memory_order mo);
48+
__tsan_atomic16_load(const volatile __tsan_atomic16 *a, int mo);
4949
__tsan_atomic32 SANITIZER_CDECL
50-
__tsan_atomic32_load(const volatile __tsan_atomic32 *a, __tsan_memory_order mo);
50+
__tsan_atomic32_load(const volatile __tsan_atomic32 *a, int mo);
5151
__tsan_atomic64 SANITIZER_CDECL
52-
__tsan_atomic64_load(const volatile __tsan_atomic64 *a, __tsan_memory_order mo);
52+
__tsan_atomic64_load(const volatile __tsan_atomic64 *a, int mo);
5353
#if __TSAN_HAS_INT128
54-
__tsan_atomic128 SANITIZER_CDECL __tsan_atomic128_load(
55-
const volatile __tsan_atomic128 *a, __tsan_memory_order mo);
54+
__tsan_atomic128 SANITIZER_CDECL
55+
__tsan_atomic128_load(const volatile __tsan_atomic128 *a, int mo);
5656
#endif
5757

5858
void SANITIZER_CDECL __tsan_atomic8_store(volatile __tsan_atomic8 *a,
59-
__tsan_atomic8 v,
60-
__tsan_memory_order mo);
59+
__tsan_atomic8 v, int mo);
6160
void SANITIZER_CDECL __tsan_atomic16_store(volatile __tsan_atomic16 *a,
62-
__tsan_atomic16 v,
63-
__tsan_memory_order mo);
61+
__tsan_atomic16 v, int mo);
6462
void SANITIZER_CDECL __tsan_atomic32_store(volatile __tsan_atomic32 *a,
65-
__tsan_atomic32 v,
66-
__tsan_memory_order mo);
63+
__tsan_atomic32 v, int mo);
6764
void SANITIZER_CDECL __tsan_atomic64_store(volatile __tsan_atomic64 *a,
68-
__tsan_atomic64 v,
69-
__tsan_memory_order mo);
65+
__tsan_atomic64 v, int mo);
7066
#if __TSAN_HAS_INT128
7167
void SANITIZER_CDECL __tsan_atomic128_store(volatile __tsan_atomic128 *a,
72-
__tsan_atomic128 v,
73-
__tsan_memory_order mo);
68+
__tsan_atomic128 v, int mo);
7469
#endif
7570

76-
__tsan_atomic8 SANITIZER_CDECL __tsan_atomic8_exchange(
77-
volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo);
71+
__tsan_atomic8 SANITIZER_CDECL
72+
__tsan_atomic8_exchange(volatile __tsan_atomic8 *a, __tsan_atomic8 v, int mo);
7873
__tsan_atomic16 SANITIZER_CDECL __tsan_atomic16_exchange(
79-
volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo);
74+
volatile __tsan_atomic16 *a, __tsan_atomic16 v, int mo);
8075
__tsan_atomic32 SANITIZER_CDECL __tsan_atomic32_exchange(
81-
volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo);
76+
volatile __tsan_atomic32 *a, __tsan_atomic32 v, int mo);
8277
__tsan_atomic64 SANITIZER_CDECL __tsan_atomic64_exchange(
83-
volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo);
78+
volatile __tsan_atomic64 *a, __tsan_atomic64 v, int mo);
8479
#if __TSAN_HAS_INT128
8580
__tsan_atomic128 SANITIZER_CDECL __tsan_atomic128_exchange(
86-
volatile __tsan_atomic128 *a, __tsan_atomic128 v, __tsan_memory_order mo);
81+
volatile __tsan_atomic128 *a, __tsan_atomic128 v, int mo);
8782
#endif
8883

89-
__tsan_atomic8 SANITIZER_CDECL __tsan_atomic8_fetch_add(
90-
volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo);
84+
__tsan_atomic8 SANITIZER_CDECL
85+
__tsan_atomic8_fetch_add(volatile __tsan_atomic8 *a, __tsan_atomic8 v, int mo);
9186
__tsan_atomic16 SANITIZER_CDECL __tsan_atomic16_fetch_add(
92-
volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo);
87+
volatile __tsan_atomic16 *a, __tsan_atomic16 v, int mo);
9388
__tsan_atomic32 SANITIZER_CDECL __tsan_atomic32_fetch_add(
94-
volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo);
89+
volatile __tsan_atomic32 *a, __tsan_atomic32 v, int mo);
9590
__tsan_atomic64 SANITIZER_CDECL __tsan_atomic64_fetch_add(
96-
volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo);
91+
volatile __tsan_atomic64 *a, __tsan_atomic64 v, int mo);
9792
#if __TSAN_HAS_INT128
9893
__tsan_atomic128 SANITIZER_CDECL __tsan_atomic128_fetch_add(
99-
volatile __tsan_atomic128 *a, __tsan_atomic128 v, __tsan_memory_order mo);
94+
volatile __tsan_atomic128 *a, __tsan_atomic128 v, int mo);
10095
#endif
10196

102-
__tsan_atomic8 SANITIZER_CDECL __tsan_atomic8_fetch_sub(
103-
volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo);
97+
__tsan_atomic8 SANITIZER_CDECL
98+
__tsan_atomic8_fetch_sub(volatile __tsan_atomic8 *a, __tsan_atomic8 v, int mo);
10499
__tsan_atomic16 SANITIZER_CDECL __tsan_atomic16_fetch_sub(
105-
volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo);
100+
volatile __tsan_atomic16 *a, __tsan_atomic16 v, int mo);
106101
__tsan_atomic32 SANITIZER_CDECL __tsan_atomic32_fetch_sub(
107-
volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo);
102+
volatile __tsan_atomic32 *a, __tsan_atomic32 v, int mo);
108103
__tsan_atomic64 SANITIZER_CDECL __tsan_atomic64_fetch_sub(
109-
volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo);
104+
volatile __tsan_atomic64 *a, __tsan_atomic64 v, int mo);
110105
#if __TSAN_HAS_INT128
111106
__tsan_atomic128 SANITIZER_CDECL __tsan_atomic128_fetch_sub(
112-
volatile __tsan_atomic128 *a, __tsan_atomic128 v, __tsan_memory_order mo);
107+
volatile __tsan_atomic128 *a, __tsan_atomic128 v, int mo);
113108
#endif
114109

115-
__tsan_atomic8 SANITIZER_CDECL __tsan_atomic8_fetch_and(
116-
volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo);
110+
__tsan_atomic8 SANITIZER_CDECL
111+
__tsan_atomic8_fetch_and(volatile __tsan_atomic8 *a, __tsan_atomic8 v, int mo);
117112
__tsan_atomic16 SANITIZER_CDECL __tsan_atomic16_fetch_and(
118-
volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo);
113+
volatile __tsan_atomic16 *a, __tsan_atomic16 v, int mo);
119114
__tsan_atomic32 SANITIZER_CDECL __tsan_atomic32_fetch_and(
120-
volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo);
115+
volatile __tsan_atomic32 *a, __tsan_atomic32 v, int mo);
121116
__tsan_atomic64 SANITIZER_CDECL __tsan_atomic64_fetch_and(
122-
volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo);
117+
volatile __tsan_atomic64 *a, __tsan_atomic64 v, int mo);
123118
#if __TSAN_HAS_INT128
124119
__tsan_atomic128 SANITIZER_CDECL __tsan_atomic128_fetch_and(
125-
volatile __tsan_atomic128 *a, __tsan_atomic128 v, __tsan_memory_order mo);
120+
volatile __tsan_atomic128 *a, __tsan_atomic128 v, int mo);
126121
#endif
127122

128-
__tsan_atomic8 SANITIZER_CDECL __tsan_atomic8_fetch_or(
129-
volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo);
123+
__tsan_atomic8 SANITIZER_CDECL
124+
__tsan_atomic8_fetch_or(volatile __tsan_atomic8 *a, __tsan_atomic8 v, int mo);
130125
__tsan_atomic16 SANITIZER_CDECL __tsan_atomic16_fetch_or(
131-
volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo);
126+
volatile __tsan_atomic16 *a, __tsan_atomic16 v, int mo);
132127
__tsan_atomic32 SANITIZER_CDECL __tsan_atomic32_fetch_or(
133-
volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo);
128+
volatile __tsan_atomic32 *a, __tsan_atomic32 v, int mo);
134129
__tsan_atomic64 SANITIZER_CDECL __tsan_atomic64_fetch_or(
135-
volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo);
130+
volatile __tsan_atomic64 *a, __tsan_atomic64 v, int mo);
136131
#if __TSAN_HAS_INT128
137132
__tsan_atomic128 SANITIZER_CDECL __tsan_atomic128_fetch_or(
138-
volatile __tsan_atomic128 *a, __tsan_atomic128 v, __tsan_memory_order mo);
133+
volatile __tsan_atomic128 *a, __tsan_atomic128 v, int mo);
139134
#endif
140135

141-
__tsan_atomic8 SANITIZER_CDECL __tsan_atomic8_fetch_xor(
142-
volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo);
136+
__tsan_atomic8 SANITIZER_CDECL
137+
__tsan_atomic8_fetch_xor(volatile __tsan_atomic8 *a, __tsan_atomic8 v, int mo);
143138
__tsan_atomic16 SANITIZER_CDECL __tsan_atomic16_fetch_xor(
144-
volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo);
139+
volatile __tsan_atomic16 *a, __tsan_atomic16 v, int mo);
145140
__tsan_atomic32 SANITIZER_CDECL __tsan_atomic32_fetch_xor(
146-
volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo);
141+
volatile __tsan_atomic32 *a, __tsan_atomic32 v, int mo);
147142
__tsan_atomic64 SANITIZER_CDECL __tsan_atomic64_fetch_xor(
148-
volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo);
143+
volatile __tsan_atomic64 *a, __tsan_atomic64 v, int mo);
149144
#if __TSAN_HAS_INT128
150145
__tsan_atomic128 SANITIZER_CDECL __tsan_atomic128_fetch_xor(
151-
volatile __tsan_atomic128 *a, __tsan_atomic128 v, __tsan_memory_order mo);
146+
volatile __tsan_atomic128 *a, __tsan_atomic128 v, int mo);
152147
#endif
153148

154-
__tsan_atomic8 SANITIZER_CDECL __tsan_atomic8_fetch_nand(
155-
volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo);
149+
__tsan_atomic8 SANITIZER_CDECL
150+
__tsan_atomic8_fetch_nand(volatile __tsan_atomic8 *a, __tsan_atomic8 v, int mo);
156151
__tsan_atomic16 SANITIZER_CDECL __tsan_atomic16_fetch_nand(
157-
volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo);
152+
volatile __tsan_atomic16 *a, __tsan_atomic16 v, int mo);
158153
__tsan_atomic32 SANITIZER_CDECL __tsan_atomic32_fetch_nand(
159-
volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo);
154+
volatile __tsan_atomic32 *a, __tsan_atomic32 v, int mo);
160155
__tsan_atomic64 SANITIZER_CDECL __tsan_atomic64_fetch_nand(
161-
volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo);
156+
volatile __tsan_atomic64 *a, __tsan_atomic64 v, int mo);
162157
#if __TSAN_HAS_INT128
163158
__tsan_atomic128 SANITIZER_CDECL __tsan_atomic128_fetch_nand(
164-
volatile __tsan_atomic128 *a, __tsan_atomic128 v, __tsan_memory_order mo);
159+
volatile __tsan_atomic128 *a, __tsan_atomic128 v, int mo);
165160
#endif
166161

167162
int SANITIZER_CDECL __tsan_atomic8_compare_exchange_weak(
168-
volatile __tsan_atomic8 *a, __tsan_atomic8 *c, __tsan_atomic8 v,
169-
__tsan_memory_order mo, __tsan_memory_order fail_mo);
163+
volatile __tsan_atomic8 *a, __tsan_atomic8 *c, __tsan_atomic8 v, int mo,
164+
int fail_mo);
170165
int SANITIZER_CDECL __tsan_atomic16_compare_exchange_weak(
171-
volatile __tsan_atomic16 *a, __tsan_atomic16 *c, __tsan_atomic16 v,
172-
__tsan_memory_order mo, __tsan_memory_order fail_mo);
166+
volatile __tsan_atomic16 *a, __tsan_atomic16 *c, __tsan_atomic16 v, int mo,
167+
int fail_mo);
173168
int SANITIZER_CDECL __tsan_atomic32_compare_exchange_weak(
174-
volatile __tsan_atomic32 *a, __tsan_atomic32 *c, __tsan_atomic32 v,
175-
__tsan_memory_order mo, __tsan_memory_order fail_mo);
169+
volatile __tsan_atomic32 *a, __tsan_atomic32 *c, __tsan_atomic32 v, int mo,
170+
int fail_mo);
176171
int SANITIZER_CDECL __tsan_atomic64_compare_exchange_weak(
177-
volatile __tsan_atomic64 *a, __tsan_atomic64 *c, __tsan_atomic64 v,
178-
__tsan_memory_order mo, __tsan_memory_order fail_mo);
172+
volatile __tsan_atomic64 *a, __tsan_atomic64 *c, __tsan_atomic64 v, int mo,
173+
int fail_mo);
179174
#if __TSAN_HAS_INT128
180175
int SANITIZER_CDECL __tsan_atomic128_compare_exchange_weak(
181176
volatile __tsan_atomic128 *a, __tsan_atomic128 *c, __tsan_atomic128 v,
182-
__tsan_memory_order mo, __tsan_memory_order fail_mo);
177+
int mo, int fail_mo);
183178
#endif
184179

185180
int SANITIZER_CDECL __tsan_atomic8_compare_exchange_strong(
186-
volatile __tsan_atomic8 *a, __tsan_atomic8 *c, __tsan_atomic8 v,
187-
__tsan_memory_order mo, __tsan_memory_order fail_mo);
181+
volatile __tsan_atomic8 *a, __tsan_atomic8 *c, __tsan_atomic8 v, int mo,
182+
int fail_mo);
188183
int SANITIZER_CDECL __tsan_atomic16_compare_exchange_strong(
189-
volatile __tsan_atomic16 *a, __tsan_atomic16 *c, __tsan_atomic16 v,
190-
__tsan_memory_order mo, __tsan_memory_order fail_mo);
184+
volatile __tsan_atomic16 *a, __tsan_atomic16 *c, __tsan_atomic16 v, int mo,
185+
int fail_mo);
191186
int SANITIZER_CDECL __tsan_atomic32_compare_exchange_strong(
192-
volatile __tsan_atomic32 *a, __tsan_atomic32 *c, __tsan_atomic32 v,
193-
__tsan_memory_order mo, __tsan_memory_order fail_mo);
187+
volatile __tsan_atomic32 *a, __tsan_atomic32 *c, __tsan_atomic32 v, int mo,
188+
int fail_mo);
194189
int SANITIZER_CDECL __tsan_atomic64_compare_exchange_strong(
195-
volatile __tsan_atomic64 *a, __tsan_atomic64 *c, __tsan_atomic64 v,
196-
__tsan_memory_order mo, __tsan_memory_order fail_mo);
190+
volatile __tsan_atomic64 *a, __tsan_atomic64 *c, __tsan_atomic64 v, int mo,
191+
int fail_mo);
197192
#if __TSAN_HAS_INT128
198193
int SANITIZER_CDECL __tsan_atomic128_compare_exchange_strong(
199194
volatile __tsan_atomic128 *a, __tsan_atomic128 *c, __tsan_atomic128 v,
200-
__tsan_memory_order mo, __tsan_memory_order fail_mo);
195+
int mo, int fail_mo);
201196
#endif
202197

203198
__tsan_atomic8 SANITIZER_CDECL __tsan_atomic8_compare_exchange_val(
204-
volatile __tsan_atomic8 *a, __tsan_atomic8 c, __tsan_atomic8 v,
205-
__tsan_memory_order mo, __tsan_memory_order fail_mo);
199+
volatile __tsan_atomic8 *a, __tsan_atomic8 c, __tsan_atomic8 v, int mo,
200+
int fail_mo);
206201
__tsan_atomic16 SANITIZER_CDECL __tsan_atomic16_compare_exchange_val(
207-
volatile __tsan_atomic16 *a, __tsan_atomic16 c, __tsan_atomic16 v,
208-
__tsan_memory_order mo, __tsan_memory_order fail_mo);
202+
volatile __tsan_atomic16 *a, __tsan_atomic16 c, __tsan_atomic16 v, int mo,
203+
int fail_mo);
209204
__tsan_atomic32 SANITIZER_CDECL __tsan_atomic32_compare_exchange_val(
210-
volatile __tsan_atomic32 *a, __tsan_atomic32 c, __tsan_atomic32 v,
211-
__tsan_memory_order mo, __tsan_memory_order fail_mo);
205+
volatile __tsan_atomic32 *a, __tsan_atomic32 c, __tsan_atomic32 v, int mo,
206+
int fail_mo);
212207
__tsan_atomic64 SANITIZER_CDECL __tsan_atomic64_compare_exchange_val(
213-
volatile __tsan_atomic64 *a, __tsan_atomic64 c, __tsan_atomic64 v,
214-
__tsan_memory_order mo, __tsan_memory_order fail_mo);
208+
volatile __tsan_atomic64 *a, __tsan_atomic64 c, __tsan_atomic64 v, int mo,
209+
int fail_mo);
215210
#if __TSAN_HAS_INT128
216211
__tsan_atomic128 SANITIZER_CDECL __tsan_atomic128_compare_exchange_val(
217212
volatile __tsan_atomic128 *a, __tsan_atomic128 c, __tsan_atomic128 v,
218-
__tsan_memory_order mo, __tsan_memory_order fail_mo);
213+
int mo, int fail_mo);
219214
#endif
220215

221-
void SANITIZER_CDECL __tsan_atomic_thread_fence(__tsan_memory_order mo);
222-
void SANITIZER_CDECL __tsan_atomic_signal_fence(__tsan_memory_order mo);
216+
void SANITIZER_CDECL __tsan_atomic_thread_fence(int mo);
217+
void SANITIZER_CDECL __tsan_atomic_signal_fence(int mo);
223218

224219
#ifdef __cplusplus
225220
} // extern "C"

compiler-rt/lib/tsan/rtl/tsan_interceptors_mac.cpp

Lines changed: 14 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -40,14 +40,15 @@ int setcontext(const ucontext_t *ucp);
4040

4141
namespace __tsan {
4242

43-
// The non-barrier versions of OSAtomic* functions are semantically mo_relaxed,
44-
// but the two variants (e.g. OSAtomicAdd32 and OSAtomicAdd32Barrier) are
45-
// actually aliases of each other, and we cannot have different interceptors for
46-
// them, because they're actually the same function. Thus, we have to stay
47-
// conservative and treat the non-barrier versions as mo_acq_rel.
48-
static constexpr morder kMacOrderBarrier = mo_acq_rel;
49-
static constexpr morder kMacOrderNonBarrier = mo_acq_rel;
50-
static constexpr morder kMacFailureOrder = mo_relaxed;
43+
// The non-barrier versions of OSAtomic* functions are semantically
44+
// morder::relaxed, but the two variants (e.g. OSAtomicAdd32 and
45+
// OSAtomicAdd32Barrier) are actually aliases of each other, and we cannot have
46+
// different interceptors for them, because they're actually the same function.
47+
// Thus, we have to stay conservative and treat the non-barrier versions as
48+
// morder::acq_rel.
49+
static constexpr morder kMacOrderBarrier = morder::acq_rel;
50+
static constexpr morder kMacOrderNonBarrier = morder::acq_rel;
51+
static constexpr morder kMacFailureOrder = morder::relaxed;
5152

5253
# define OSATOMIC_INTERCEPTOR(return_t, t, tsan_t, f, tsan_atomic_f, mo) \
5354
TSAN_INTERCEPTOR(return_t, f, t x, volatile t *ptr) { \
@@ -464,7 +465,7 @@ struct fake_shared_weak_count {
464465
// Shared and weak pointers in C++ maintain reference counts via atomics in
465466
// libc++.dylib, which are TSan-invisible, and this leads to false positives in
466467
// destructor code. These interceptors re-implements the whole functions so that
467-
// the mo_acq_rel semantics of the atomic decrement are visible.
468+
// the morder::acq_rel semantics of the atomic decrement are visible.
468469
//
469470
// Unfortunately, the interceptors cannot simply Acquire/Release some sync
470471
// object and call the original function, because it would have a race between
@@ -479,11 +480,11 @@ STDCXX_INTERCEPTOR(void, _ZNSt3__119__shared_weak_count16__release_sharedEv,
479480

480481
SCOPED_TSAN_INTERCEPTOR(_ZNSt3__119__shared_weak_count16__release_sharedEv,
481482
o);
482-
if (__tsan_atomic64_fetch_add(&o->shared_owners, -1, mo_release) == 0) {
483+
if (__tsan_atomic64_fetch_add(&o->shared_owners, -1, morder::release) == 0) {
483484
Acquire(thr, pc, (uptr)&o->shared_owners);
484485
o->on_zero_shared();
485-
if (__tsan_atomic64_fetch_add(&o->shared_weak_owners, -1, mo_release) ==
486-
0) {
486+
if (__tsan_atomic64_fetch_add(&o->shared_weak_owners, -1,
487+
morder::release) == 0) {
487488
Acquire(thr, pc, (uptr)&o->shared_weak_owners);
488489
o->on_zero_shared_weak();
489490
}
@@ -496,7 +497,7 @@ STDCXX_INTERCEPTOR(bool, _ZNSt3__114__shared_count16__release_sharedEv,
496497
return REAL(_ZNSt3__114__shared_count16__release_sharedEv)(o);
497498

498499
SCOPED_TSAN_INTERCEPTOR(_ZNSt3__114__shared_count16__release_sharedEv, o);
499-
if (__tsan_atomic64_fetch_add(&o->shared_owners, -1, mo_release) == 0) {
500+
if (__tsan_atomic64_fetch_add(&o->shared_owners, -1, morder::release) == 0) {
500501
Acquire(thr, pc, (uptr)&o->shared_owners);
501502
o->on_zero_shared();
502503
return true;

0 commit comments

Comments
 (0)