Skip to content

Commit b65e704

Browse files
committed
address feedback
1 parent a4202f4 commit b65e704

File tree

3 files changed

+76
-82
lines changed

3 files changed

+76
-82
lines changed

libcxx/include/__atomic/atomic_base.h

Lines changed: 26 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -102,26 +102,6 @@ struct __atomic_base // false
102102
return std::__cxx_atomic_compare_exchange_strong(std::addressof(__a_), std::addressof(__e), __d, __m, __m);
103103
}
104104

105-
friend _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI _Tp
106-
__tag_invoke(__atomic_load_cpo, const __atomic_base& __this, memory_order __order) {
107-
return __this.load(__order);
108-
}
109-
110-
friend _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI _Tp
111-
__tag_invoke(__atomic_load_cpo, const volatile __atomic_base& __this, memory_order __order) {
112-
return __this.load(__order);
113-
}
114-
115-
friend _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI __cxx_atomic_impl<_Tp> const*
116-
__tag_invoke(__atomic_contention_address_cpo, const __atomic_base& __this) {
117-
return std::addressof(__this.__a_);
118-
}
119-
120-
friend _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI __cxx_atomic_impl<_Tp> const volatile*
121-
__tag_invoke(__atomic_contention_address_cpo, const volatile __atomic_base& __this) {
122-
return std::addressof(__this.__a_);
123-
}
124-
125105
_LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const
126106
volatile _NOEXCEPT {
127107
std::__atomic_wait(*this, __v, __m);
@@ -216,6 +196,32 @@ struct __atomic_base<_Tp, true> : public __atomic_base<_Tp, false> {
216196
_LIBCPP_HIDE_FROM_ABI _Tp operator^=(_Tp __op) _NOEXCEPT { return fetch_xor(__op) ^ __op; }
217197
};
218198

199+
// here we need _IsIntegral because the default template argument is not enough
200+
// e.g __atomic_base<int> is __atomic_base<int, true>, which inherits from
201+
// __atomic_base<int, false> and the caller of the wait function is
202+
// __atomic_base<int, false>. So specializing __atomic_base<_Tp> does not work
203+
template <class _Tp, bool _IsIntegral>
204+
struct __atomic_waitable_customisations<__atomic_base<_Tp, _IsIntegral> > {
205+
static _LIBCPP_HIDE_FROM_ABI _Tp __atomic_load(const __atomic_base<_Tp, _IsIntegral>& __a, memory_order __order) {
206+
return __a.load(__order);
207+
}
208+
209+
static _LIBCPP_HIDE_FROM_ABI _Tp
210+
__atomic_load(const volatile __atomic_base<_Tp, _IsIntegral>& __this, memory_order __order) {
211+
return __this.load(__order);
212+
}
213+
214+
static _LIBCPP_HIDE_FROM_ABI const __cxx_atomic_impl<_Tp>*
215+
__atomic_contention_address(const __atomic_base<_Tp, _IsIntegral>& __a) {
216+
return std::addressof(__a.__a_);
217+
}
218+
219+
static _LIBCPP_HIDE_FROM_ABI const volatile __cxx_atomic_impl<_Tp>*
220+
__atomic_contention_address(const volatile __atomic_base<_Tp, _IsIntegral>& __this) {
221+
return std::addressof(__this.__a_);
222+
}
223+
};
224+
219225
_LIBCPP_END_NAMESPACE_STD
220226

221227
#endif // _LIBCPP___ATOMIC_ATOMIC_BASE_H

libcxx/include/__atomic/atomic_flag.h

Lines changed: 22 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -48,26 +48,6 @@ struct atomic_flag {
4848
__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);
4949
}
5050

51-
friend _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI _LIBCPP_ATOMIC_FLAG_TYPE
52-
__tag_invoke(__atomic_load_cpo, const atomic_flag& __this, memory_order __order) {
53-
return std::__cxx_atomic_load(&__this.__a_, __order);
54-
}
55-
56-
friend _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI _LIBCPP_ATOMIC_FLAG_TYPE
57-
__tag_invoke(__atomic_load_cpo, const volatile atomic_flag& __this, memory_order __order) {
58-
return std::__cxx_atomic_load(&__this.__a_, __order);
59-
}
60-
61-
friend _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI __cxx_atomic_impl<_LIBCPP_ATOMIC_FLAG_TYPE> const*
62-
__tag_invoke(__atomic_contention_address_cpo, const atomic_flag& __this) {
63-
return std::addressof(__this.__a_);
64-
}
65-
66-
friend _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI __cxx_atomic_impl<_LIBCPP_ATOMIC_FLAG_TYPE> const volatile*
67-
__tag_invoke(__atomic_contention_address_cpo, const volatile atomic_flag& __this) {
68-
return std::addressof(__this.__a_);
69-
}
70-
7151
_LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void wait(bool __v, memory_order __m = memory_order_seq_cst) const
7252
volatile _NOEXCEPT {
7353
std::__atomic_wait(*this, _LIBCPP_ATOMIC_FLAG_TYPE(__v), __m);
@@ -98,6 +78,28 @@ struct atomic_flag {
9878
atomic_flag& operator=(const atomic_flag&) volatile = delete;
9979
};
10080

81+
template <>
82+
struct __atomic_waitable_customisations<atomic_flag> {
83+
static _LIBCPP_HIDE_FROM_ABI _LIBCPP_ATOMIC_FLAG_TYPE __atomic_load(const atomic_flag& __a, memory_order __order) {
84+
return std::__cxx_atomic_load(&__a.__a_, __order);
85+
}
86+
87+
static _LIBCPP_HIDE_FROM_ABI _LIBCPP_ATOMIC_FLAG_TYPE
88+
__atomic_load(const volatile atomic_flag& __a, memory_order __order) {
89+
return std::__cxx_atomic_load(&__a.__a_, __order);
90+
}
91+
92+
static _LIBCPP_HIDE_FROM_ABI const __cxx_atomic_impl<_LIBCPP_ATOMIC_FLAG_TYPE>*
93+
__atomic_contention_address(const atomic_flag& __a) {
94+
return std::addressof(__a.__a_);
95+
}
96+
97+
static _LIBCPP_HIDE_FROM_ABI const volatile __cxx_atomic_impl<_LIBCPP_ATOMIC_FLAG_TYPE>*
98+
__atomic_contention_address(const volatile atomic_flag& __a) {
99+
return std::addressof(__a.__a_);
100+
}
101+
};
102+
101103
inline _LIBCPP_HIDE_FROM_ABI bool atomic_flag_test(const volatile atomic_flag* __o) _NOEXCEPT { return __o->test(); }
102104

103105
inline _LIBCPP_HIDE_FROM_ABI bool atomic_flag_test(const atomic_flag* __o) _NOEXCEPT { return __o->test(); }

libcxx/include/__atomic/atomic_sync.h

Lines changed: 28 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121
#include <__type_traits/conjunction.h>
2222
#include <__type_traits/decay.h>
2323
#include <__type_traits/invoke.h>
24+
#include <__type_traits/void_t.h>
2425
#include <__utility/declval.h>
2526
#include <cstring>
2627

@@ -36,40 +37,30 @@ _LIBCPP_BEGIN_NAMESPACE_STD
3637
// - __atomic_notify_one
3738
// - __atomic_notify_all
3839
// Note that std::atomic<T>::wait was back-ported to C++03
39-
// there the below implementations look ugly to support C++03
40-
41-
// NOLINTBEGIN(libcpp-robust-against-adl)
42-
struct __atomic_load_cpo {
43-
template <class _Tp>
44-
using _Ret = decltype(__tag_invoke(
45-
std::declval<__atomic_load_cpo>(), std::declval<const _Tp&>(), std::declval<memory_order>()));
46-
47-
template <class _Tp>
48-
_LIBCPP_HIDE_FROM_ABI _Ret<_Tp> operator()(const _Tp& __t, memory_order __order) const _NOEXCEPT {
49-
return __tag_invoke(*this, __t, __order);
50-
}
40+
// The below implementations look ugly to support C++03
41+
template <class _Tp, class = void>
42+
struct __atomic_waitable_customisations {
43+
template <class _AtomicWaitable>
44+
static void __atomic_load(_AtomicWaitable&&, memory_order) = delete;
45+
46+
template <class _AtomicWaitable>
47+
static void __atomic_contention_address(_AtomicWaitable&&) = delete;
5148
};
52-
// TODO: if we can deprecate std::atomic<T>::wait before c++17, we could add
53-
// inline constexpr __atomic_load_cpo __atomic_load{};
54-
55-
struct __atomic_contention_address_cpo {
56-
template <class _Tp>
57-
using _Ret = decltype(__tag_invoke(std::declval<__atomic_contention_address_cpo>(), std::declval<const _Tp&>()));
5849

59-
template <class _Tp>
60-
_LIBCPP_HIDE_FROM_ABI _Ret<_Tp> operator()(const _Tp& __t) const _NOEXCEPT {
61-
return __tag_invoke(*this, __t);
62-
}
63-
};
64-
// TODO: if we can deprecate std::atomic<T>::wait before c++17, we could add
65-
// inline constexpr __atomic_contention_address_cpo __atomic_contention_address{};
50+
template <class _Tp>
51+
struct __atomic_waitable_customisations<_Tp, __enable_if_t<!__is_same(_Tp, __decay_t<_Tp>)> >
52+
: __atomic_waitable_customisations<__decay_t<_Tp> > {};
6653

67-
// NOLINTEND(libcpp-robust-against-adl)
54+
template <class _Tp, class = void>
55+
struct __atomic_waitable : false_type {};
6856

6957
template <class _Tp>
70-
using __atomic_waitable =
71-
_And<__invokable<__atomic_load_cpo, const _Tp&, memory_order>,
72-
__invokable<__atomic_contention_address_cpo, const _Tp&> >;
58+
struct __atomic_waitable<
59+
_Tp,
60+
__void_t<decltype(__atomic_waitable_customisations<_Tp>::__atomic_load(
61+
std::declval<const _Tp&>(), std::declval<memory_order>())),
62+
decltype(__atomic_waitable_customisations<_Tp>::__atomic_contention_address(std::declval<const _Tp&>()))> >
63+
: true_type {};
7364

7465
template <class _AtomicWaitable, class _Poll>
7566
struct __atomic_wait_poll_impl {
@@ -78,8 +69,7 @@ struct __atomic_wait_poll_impl {
7869
memory_order __order_;
7970

8071
_LIBCPP_HIDE_FROM_ABI bool operator()() const {
81-
__atomic_load_cpo __atomic_load = {};
82-
auto __current_val = __atomic_load(__a_, __order_);
72+
auto __current_val = __atomic_waitable_customisations<_AtomicWaitable>::__atomic_load(__a_, __order_);
8373
return __poll_(__current_val);
8474
}
8575
};
@@ -111,8 +101,7 @@ struct __atomic_wait_backoff_impl {
111101
__update_monitor_val_and_poll(__cxx_atomic_contention_t const volatile*, __cxx_contention_t& __monitor_val) const {
112102
// In case the contention type happens to be __cxx_atomic_contention_t, i.e. __cxx_atomic_impl<int64_t>,
113103
// the platform wait is directly monitoring the atomic value itself.
114-
__atomic_load_cpo __atomic_load = {};
115-
__monitor_val = __atomic_load(__a_, __order_);
104+
__monitor_val = __atomic_waitable_customisations<_AtomicWaitable>::__atomic_load(__a_, __order_);
116105
return __poll_(__monitor_val);
117106
}
118107

@@ -121,17 +110,15 @@ struct __atomic_wait_backoff_impl {
121110
__update_monitor_val_and_poll(void const volatile* __contention_address, __cxx_contention_t& __monitor_val) const {
122111
// In case the contention type is anything else, platform wait is monitoring a __cxx_atomic_contention_t
123112
// from the global pool, the monitor comes from __libcpp_atomic_monitor
124-
__monitor_val = std::__libcpp_atomic_monitor(__contention_address);
125-
__atomic_load_cpo __atomic_load = {};
126-
auto __current_val = __atomic_load(__a_, __order_);
113+
__monitor_val = std::__libcpp_atomic_monitor(__contention_address);
114+
auto __current_val = __atomic_waitable_customisations<_AtomicWaitable>::__atomic_load(__a_, __order_);
127115
return __poll_(__current_val);
128116
}
129117

130118
_LIBCPP_AVAILABILITY_SYNC
131119
_LIBCPP_HIDE_FROM_ABI bool operator()(chrono::nanoseconds __elapsed) const {
132120
if (__elapsed > chrono::microseconds(64)) {
133-
__atomic_contention_address_cpo __atomic_contention_address = {};
134-
auto __contention_address = __atomic_contention_address(__a_);
121+
auto __contention_address = __atomic_waitable_customisations<_AtomicWaitable>::__atomic_contention_address(__a_);
135122
__cxx_contention_t __monitor_val;
136123
if (__update_monitor_val_and_poll(__contention_address, __monitor_val))
137124
return true;
@@ -156,15 +143,13 @@ __atomic_wait_unless(const _AtomicWaitable& __a, _Poll&& __poll, memory_order __
156143
template <class _AtomicWaitable>
157144
_LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void __atomic_notify_one(const _AtomicWaitable& __a) {
158145
static_assert(__atomic_waitable<_AtomicWaitable>::value, "");
159-
__atomic_contention_address_cpo __atomic_contention_address = {};
160-
std::__cxx_atomic_notify_one(__atomic_contention_address(__a));
146+
std::__cxx_atomic_notify_one(__atomic_waitable_customisations<_AtomicWaitable>::__atomic_contention_address(__a));
161147
}
162148

163149
template <class _AtomicWaitable>
164150
_LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void __atomic_notify_all(const _AtomicWaitable& __a) {
165151
static_assert(__atomic_waitable<_AtomicWaitable>::value, "");
166-
__atomic_contention_address_cpo __atomic_contention_address = {};
167-
std::__cxx_atomic_notify_all(__atomic_contention_address(__a));
152+
std::__cxx_atomic_notify_all(__atomic_waitable_customisations<_AtomicWaitable>::__atomic_contention_address(__a));
168153
}
169154

170155
#else // _LIBCPP_HAS_NO_THREADS
@@ -199,6 +184,7 @@ struct __bind_nonatomic_equal {
199184
template <class _AtomicWaitable, class _Up>
200185
_LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void
201186
__atomic_wait(_AtomicWaitable& __a, _Up __val, memory_order __order) {
187+
static_assert(__atomic_waitable<_AtomicWaitable>::value, "");
202188
__bind_nonatomic_equal<_Up> __nonatomic_equal = {__val};
203189
std::__atomic_wait_unless(__a, __nonatomic_equal, __order);
204190
}

0 commit comments

Comments
 (0)