Skip to content

Commit e8cee12

Browse files
committed
tsan/asan: first try on msvc atomics
llvm-svn: 159443
1 parent af031a9 commit e8cee12

File tree

6 files changed

+234
-144
lines changed

6 files changed

+234
-144
lines changed

compiler-rt/lib/sanitizer_common/sanitizer_atomic.h

Lines changed: 7 additions & 99 deletions
Original file line numberDiff line numberDiff line change
@@ -52,106 +52,14 @@ struct atomic_uintptr_t {
5252
volatile Type val_dont_use;
5353
};
5454

55-
INLINE void atomic_signal_fence(memory_order) {
56-
__asm__ __volatile__("" ::: "memory");
57-
}
58-
59-
INLINE void atomic_thread_fence(memory_order) {
60-
__sync_synchronize();
61-
}
55+
} // namespace __sanitizer
6256

63-
INLINE void proc_yield(int cnt) {
64-
__asm__ __volatile__("" ::: "memory");
65-
#if defined(__i386__) || defined(__x86_64__)
66-
for (int i = 0; i < cnt; i++)
67-
__asm__ __volatile__("pause");
57+
#if defined(__GNUC__)
58+
# include "sanitizer_atomic_clang.h"
59+
#elif defined(_MSC_VER)
60+
# include "sanitizer_atomic_msvc.h"
61+
#else
62+
# error "Unsupported compiler"
6863
#endif
69-
__asm__ __volatile__("" ::: "memory");
70-
}
71-
72-
template<typename T>
73-
INLINE typename T::Type atomic_load(
74-
const volatile T *a, memory_order mo) {
75-
DCHECK(mo & (memory_order_relaxed | memory_order_consume
76-
| memory_order_acquire | memory_order_seq_cst));
77-
DCHECK(!((uptr)a % sizeof(*a)));
78-
typename T::Type v;
79-
if (mo == memory_order_relaxed) {
80-
v = a->val_dont_use;
81-
} else {
82-
atomic_signal_fence(memory_order_seq_cst);
83-
v = a->val_dont_use;
84-
atomic_signal_fence(memory_order_seq_cst);
85-
}
86-
return v;
87-
}
88-
89-
template<typename T>
90-
INLINE void atomic_store(volatile T *a, typename T::Type v, memory_order mo) {
91-
DCHECK(mo & (memory_order_relaxed | memory_order_release
92-
| memory_order_seq_cst));
93-
DCHECK(!((uptr)a % sizeof(*a)));
94-
if (mo == memory_order_relaxed) {
95-
a->val_dont_use = v;
96-
} else {
97-
atomic_signal_fence(memory_order_seq_cst);
98-
a->val_dont_use = v;
99-
atomic_signal_fence(memory_order_seq_cst);
100-
}
101-
if (mo == memory_order_seq_cst)
102-
atomic_thread_fence(memory_order_seq_cst);
103-
}
104-
105-
template<typename T>
106-
INLINE typename T::Type atomic_fetch_add(volatile T *a,
107-
typename T::Type v, memory_order mo) {
108-
(void)mo;
109-
DCHECK(!((uptr)a % sizeof(*a)));
110-
return __sync_fetch_and_add(&a->val_dont_use, v);
111-
}
112-
113-
template<typename T>
114-
INLINE typename T::Type atomic_fetch_sub(volatile T *a,
115-
typename T::Type v, memory_order mo) {
116-
(void)mo;
117-
DCHECK(!((uptr)a % sizeof(*a)));
118-
return __sync_fetch_and_add(&a->val_dont_use, -v);
119-
}
120-
121-
template<typename T>
122-
INLINE typename T::Type atomic_exchange(volatile T *a,
123-
typename T::Type v, memory_order mo) {
124-
DCHECK(!((uptr)a % sizeof(*a)));
125-
if (mo & (memory_order_release | memory_order_acq_rel | memory_order_seq_cst))
126-
__sync_synchronize();
127-
v = __sync_lock_test_and_set(&a->val_dont_use, v);
128-
if (mo == memory_order_seq_cst)
129-
__sync_synchronize();
130-
return v;
131-
}
132-
133-
template<typename T>
134-
INLINE bool atomic_compare_exchange_strong(volatile T *a,
135-
typename T::Type *cmp,
136-
typename T::Type xchg,
137-
memory_order mo) {
138-
typedef typename T::Type Type;
139-
Type cmpv = *cmp;
140-
Type prev = __sync_val_compare_and_swap(&a->val_dont_use, cmpv, xchg);
141-
if (prev == cmpv)
142-
return true;
143-
*cmp = prev;
144-
return false;
145-
}
146-
147-
template<typename T>
148-
INLINE bool atomic_compare_exchange_weak(volatile T *a,
149-
typename T::Type *cmp,
150-
typename T::Type xchg,
151-
memory_order mo) {
152-
return atomic_compare_exchange_strong(a, cmp, xchg, mo);
153-
}
154-
155-
} // namespace __sanitizer
15664

15765
#endif // SANITIZER_ATOMIC_H
Lines changed: 122 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,122 @@
1+
//===-- sanitizer_atomic_clang.h --------------------------------*- C++ -*-===//
2+
//
3+
// The LLVM Compiler Infrastructure
4+
//
5+
// This file is distributed under the University of Illinois Open Source
6+
// License. See LICENSE.TXT for details.
7+
//
8+
//===----------------------------------------------------------------------===//
9+
//
10+
// This file is a part of ThreadSanitizer/AddressSanitizer runtime.
11+
// Not intended for direct inclusion. Include sanitizer_atomic.h.
12+
//
13+
//===----------------------------------------------------------------------===//
14+
15+
#ifndef SANITIZER_ATOMIC_CLANG_H
16+
#define SANITIZER_ATOMIC_CLANG_H
17+
18+
namespace __sanitizer {
19+
20+
INLINE void atomic_signal_fence(memory_order) {
21+
__asm__ __volatile__("" ::: "memory");
22+
}
23+
24+
INLINE void atomic_thread_fence(memory_order) {
25+
__sync_synchronize();
26+
}
27+
28+
INLINE void proc_yield(int cnt) {
29+
__asm__ __volatile__("" ::: "memory");
30+
#if defined(__i386__) || defined(__x86_64__)
31+
for (int i = 0; i < cnt; i++)
32+
__asm__ __volatile__("pause");
33+
#endif
34+
__asm__ __volatile__("" ::: "memory");
35+
}
36+
37+
template<typename T>
38+
INLINE typename T::Type atomic_load(
39+
const volatile T *a, memory_order mo) {
40+
DCHECK(mo & (memory_order_relaxed | memory_order_consume
41+
| memory_order_acquire | memory_order_seq_cst));
42+
DCHECK(!((uptr)a % sizeof(*a)));
43+
typename T::Type v;
44+
if (mo == memory_order_relaxed) {
45+
v = a->val_dont_use;
46+
} else {
47+
atomic_signal_fence(memory_order_seq_cst);
48+
v = a->val_dont_use;
49+
atomic_signal_fence(memory_order_seq_cst);
50+
}
51+
return v;
52+
}
53+
54+
template<typename T>
55+
INLINE void atomic_store(volatile T *a, typename T::Type v, memory_order mo) {
56+
DCHECK(mo & (memory_order_relaxed | memory_order_release
57+
| memory_order_seq_cst));
58+
DCHECK(!((uptr)a % sizeof(*a)));
59+
if (mo == memory_order_relaxed) {
60+
a->val_dont_use = v;
61+
} else {
62+
atomic_signal_fence(memory_order_seq_cst);
63+
a->val_dont_use = v;
64+
atomic_signal_fence(memory_order_seq_cst);
65+
}
66+
if (mo == memory_order_seq_cst)
67+
atomic_thread_fence(memory_order_seq_cst);
68+
}
69+
70+
template<typename T>
71+
INLINE typename T::Type atomic_fetch_add(volatile T *a,
72+
typename T::Type v, memory_order mo) {
73+
(void)mo;
74+
DCHECK(!((uptr)a % sizeof(*a)));
75+
return __sync_fetch_and_add(&a->val_dont_use, v);
76+
}
77+
78+
template<typename T>
79+
INLINE typename T::Type atomic_fetch_sub(volatile T *a,
80+
typename T::Type v, memory_order mo) {
81+
(void)mo;
82+
DCHECK(!((uptr)a % sizeof(*a)));
83+
return __sync_fetch_and_add(&a->val_dont_use, -v);
84+
}
85+
86+
template<typename T>
87+
INLINE typename T::Type atomic_exchange(volatile T *a,
88+
typename T::Type v, memory_order mo) {
89+
DCHECK(!((uptr)a % sizeof(*a)));
90+
if (mo & (memory_order_release | memory_order_acq_rel | memory_order_seq_cst))
91+
__sync_synchronize();
92+
v = __sync_lock_test_and_set(&a->val_dont_use, v);
93+
if (mo == memory_order_seq_cst)
94+
__sync_synchronize();
95+
return v;
96+
}
97+
98+
template<typename T>
99+
INLINE bool atomic_compare_exchange_strong(volatile T *a,
100+
typename T::Type *cmp,
101+
typename T::Type xchg,
102+
memory_order mo) {
103+
typedef typename T::Type Type;
104+
Type cmpv = *cmp;
105+
Type prev = __sync_val_compare_and_swap(&a->val_dont_use, cmpv, xchg);
106+
if (prev == cmpv)
107+
return true;
108+
*cmp = prev;
109+
return false;
110+
}
111+
112+
template<typename T>
113+
INLINE bool atomic_compare_exchange_weak(volatile T *a,
114+
typename T::Type *cmp,
115+
typename T::Type xchg,
116+
memory_order mo) {
117+
return atomic_compare_exchange_strong(a, cmp, xchg, mo);
118+
}
119+
120+
} // namespace __sanitizer
121+
122+
#endif // SANITIZER_ATOMIC_CLANG_H
Lines changed: 104 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,104 @@
1+
//===-- sanitizer_atomic_msvc.h ---------------------------------*- C++ -*-===//
2+
//
3+
// The LLVM Compiler Infrastructure
4+
//
5+
// This file is distributed under the University of Illinois Open Source
6+
// License. See LICENSE.TXT for details.
7+
//
8+
//===----------------------------------------------------------------------===//
9+
//
10+
// This file is a part of ThreadSanitizer/AddressSanitizer runtime.
11+
// Not intended for direct inclusion. Include sanitizer_atomic.h.
12+
//
13+
//===----------------------------------------------------------------------===//
14+
15+
#ifndef SANITIZER_ATOMIC_MSVC_H
16+
#define SANITIZER_ATOMIC_MSVC_H
17+
18+
#include <intrin.h>
19+
20+
namespace __sanitizer {
21+
22+
INLINE void atomic_signal_fence(memory_order) {
23+
_ReadWriteBarrier();
24+
}
25+
26+
INLINE void atomic_thread_fence(memory_order) {
27+
_mm_mfence();
28+
}
29+
30+
INLINE void proc_yield(int cnt) {
31+
for (int i = 0; i < cnt; i++)
32+
_mm_pause();
33+
}
34+
35+
template<typename T>
36+
INLINE typename T::Type atomic_load(
37+
const volatile T *a, memory_order mo) {
38+
DCHECK(mo & (memory_order_relaxed | memory_order_consume
39+
| memory_order_acquire | memory_order_seq_cst));
40+
DCHECK(!((uptr)a % sizeof(*a)));
41+
typename T::Type v;
42+
if (mo == memory_order_relaxed) {
43+
v = a->val_dont_use;
44+
} else {
45+
atomic_signal_fence(memory_order_seq_cst);
46+
v = a->val_dont_use;
47+
atomic_signal_fence(memory_order_seq_cst);
48+
}
49+
return v;
50+
}
51+
52+
template<typename T>
53+
INLINE void atomic_store(volatile T *a, typename T::Type v, memory_order mo) {
54+
DCHECK(mo & (memory_order_relaxed | memory_order_release
55+
| memory_order_seq_cst));
56+
DCHECK(!((uptr)a % sizeof(*a)));
57+
if (mo == memory_order_relaxed) {
58+
a->val_dont_use = v;
59+
} else {
60+
atomic_signal_fence(memory_order_seq_cst);
61+
a->val_dont_use = v;
62+
atomic_signal_fence(memory_order_seq_cst);
63+
}
64+
if (mo == memory_order_seq_cst)
65+
atomic_thread_fence(memory_order_seq_cst);
66+
}
67+
68+
INLINE u32 atomic_fetch_add(volatile atomic_uint32_t *a,
69+
u32 v, memory_order mo) {
70+
(void)mo;
71+
DCHECK(!((uptr)a % sizeof(*a)));
72+
return (u32)_InterlockedExchangeAdd(
73+
(volatile long*)&a->val_dont_use, (long)v); // NOLINT
74+
}
75+
76+
INLINE u8 atomic_exchange(volatile atomic_uint8_t *a,
77+
u8 v, memory_order mo) {
78+
(void)mo;
79+
DCHECK(!((uptr)a % sizeof(*a)));
80+
__asm {
81+
mov eax, a
82+
mov cx, v
83+
xchg [eax], cx // NOLINT
84+
mov v, cx
85+
}
86+
return v;
87+
}
88+
89+
INLINE u16 atomic_exchange(volatile atomic_uint16_t *a,
90+
u16 v, memory_order mo) {
91+
(void)mo;
92+
DCHECK(!((uptr)a % sizeof(*a)));
93+
__asm {
94+
mov eax, a
95+
mov cl, v
96+
xchg [eax], cl // NOLINT
97+
mov v, cl
98+
}
99+
return v;
100+
}
101+
102+
} // namespace __sanitizer
103+
104+
#endif // SANITIZER_ATOMIC_CLANG_H

compiler-rt/lib/sanitizer_common/sanitizer_posix.cc

Lines changed: 0 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -27,10 +27,6 @@
2727
#include <sys/types.h>
2828
#include <unistd.h>
2929

30-
#ifdef ANDROID
31-
#include <sys/atomics.h>
32-
#endif
33-
3430
namespace __sanitizer {
3531

3632
// ------------- sanitizer_common.h
@@ -147,22 +143,6 @@ int Atexit(void (*function)(void)) {
147143
return atexit(function);
148144
}
149145

150-
int AtomicInc(int *a) {
151-
#ifdef ANDROID
152-
return __atomic_inc(a) + 1;
153-
#else
154-
return __sync_add_and_fetch(a, 1);
155-
#endif
156-
}
157-
158-
u16 AtomicExchange(u16 *a, u16 new_val) {
159-
return __sync_lock_test_and_set(a, new_val);
160-
}
161-
162-
u8 AtomicExchange(u8 *a, u8 new_val) {
163-
return __sync_lock_test_and_set(a, new_val);
164-
}
165-
166146
} // namespace __sanitizer
167147

168148
#endif // __linux__ || __APPLE_

0 commit comments

Comments
 (0)