|
9 | 9 | #ifndef _LIBCPP___ATOMIC_ATOMIC_H
|
10 | 10 | #define _LIBCPP___ATOMIC_ATOMIC_H
|
11 | 11 |
|
12 |
| -#include <__atomic/atomic_base.h> |
| 12 | +#include <__atomic/atomic_sync.h> |
13 | 13 | #include <__atomic/check_memory_order.h>
|
14 | 14 | #include <__atomic/cxx_atomic_impl.h>
|
| 15 | +#include <__atomic/is_always_lock_free.h> |
15 | 16 | #include <__atomic/memory_order.h>
|
16 | 17 | #include <__config>
|
17 | 18 | #include <__cstddef/ptrdiff_t.h>
|
|
21 | 22 | #include <__type_traits/is_floating_point.h>
|
22 | 23 | #include <__type_traits/is_function.h>
|
23 | 24 | #include <__type_traits/is_integral.h>
|
| 25 | +#include <__type_traits/is_nothrow_constructible.h> |
24 | 26 | #include <__type_traits/is_same.h>
|
25 | 27 | #include <__type_traits/remove_const.h>
|
26 | 28 | #include <__type_traits/remove_pointer.h>
|
|
34 | 36 |
|
35 | 37 | _LIBCPP_BEGIN_NAMESPACE_STD
|
36 | 38 |
|
| 39 | +template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value> |
| 40 | +struct __atomic_base // false |
| 41 | +{ |
| 42 | + mutable __cxx_atomic_impl<_Tp> __a_; |
| 43 | + |
| 44 | +#if _LIBCPP_STD_VER >= 17 |
| 45 | + static constexpr bool is_always_lock_free = __libcpp_is_always_lock_free<__cxx_atomic_impl<_Tp> >::__value; |
| 46 | +#endif |
| 47 | + |
| 48 | + _LIBCPP_HIDE_FROM_ABI bool is_lock_free() const volatile _NOEXCEPT { |
| 49 | + return __cxx_atomic_is_lock_free(sizeof(__cxx_atomic_impl<_Tp>)); |
| 50 | + } |
| 51 | + _LIBCPP_HIDE_FROM_ABI bool is_lock_free() const _NOEXCEPT { |
| 52 | + return static_cast<__atomic_base const volatile*>(this)->is_lock_free(); |
| 53 | + } |
| 54 | + _LIBCPP_HIDE_FROM_ABI void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT |
| 55 | + _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) { |
| 56 | + std::__cxx_atomic_store(std::addressof(__a_), __d, __m); |
| 57 | + } |
| 58 | + _LIBCPP_HIDE_FROM_ABI void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT |
| 59 | + _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) { |
| 60 | + std::__cxx_atomic_store(std::addressof(__a_), __d, __m); |
| 61 | + } |
| 62 | + _LIBCPP_HIDE_FROM_ABI _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT |
| 63 | + _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) { |
| 64 | + return std::__cxx_atomic_load(std::addressof(__a_), __m); |
| 65 | + } |
| 66 | + _LIBCPP_HIDE_FROM_ABI _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT |
| 67 | + _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) { |
| 68 | + return std::__cxx_atomic_load(std::addressof(__a_), __m); |
| 69 | + } |
| 70 | + _LIBCPP_HIDE_FROM_ABI operator _Tp() const volatile _NOEXCEPT { return load(); } |
| 71 | + _LIBCPP_HIDE_FROM_ABI operator _Tp() const _NOEXCEPT { return load(); } |
| 72 | + _LIBCPP_HIDE_FROM_ABI _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT { |
| 73 | + return std::__cxx_atomic_exchange(std::addressof(__a_), __d, __m); |
| 74 | + } |
| 75 | + _LIBCPP_HIDE_FROM_ABI _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT { |
| 76 | + return std::__cxx_atomic_exchange(std::addressof(__a_), __d, __m); |
| 77 | + } |
| 78 | + _LIBCPP_HIDE_FROM_ABI bool |
| 79 | + compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) volatile _NOEXCEPT |
| 80 | + _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) { |
| 81 | + return std::__cxx_atomic_compare_exchange_weak(std::addressof(__a_), std::addressof(__e), __d, __s, __f); |
| 82 | + } |
| 83 | + _LIBCPP_HIDE_FROM_ABI bool compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) _NOEXCEPT |
| 84 | + _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) { |
| 85 | + return std::__cxx_atomic_compare_exchange_weak(std::addressof(__a_), std::addressof(__e), __d, __s, __f); |
| 86 | + } |
| 87 | + _LIBCPP_HIDE_FROM_ABI bool |
| 88 | + compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) volatile _NOEXCEPT |
| 89 | + _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) { |
| 90 | + return std::__cxx_atomic_compare_exchange_strong(std::addressof(__a_), std::addressof(__e), __d, __s, __f); |
| 91 | + } |
| 92 | + _LIBCPP_HIDE_FROM_ABI bool compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) _NOEXCEPT |
| 93 | + _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) { |
| 94 | + return std::__cxx_atomic_compare_exchange_strong(std::addressof(__a_), std::addressof(__e), __d, __s, __f); |
| 95 | + } |
| 96 | + _LIBCPP_HIDE_FROM_ABI bool |
| 97 | + compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT { |
| 98 | + return std::__cxx_atomic_compare_exchange_weak(std::addressof(__a_), std::addressof(__e), __d, __m, __m); |
| 99 | + } |
| 100 | + _LIBCPP_HIDE_FROM_ABI bool |
| 101 | + compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT { |
| 102 | + return std::__cxx_atomic_compare_exchange_weak(std::addressof(__a_), std::addressof(__e), __d, __m, __m); |
| 103 | + } |
| 104 | + _LIBCPP_HIDE_FROM_ABI bool |
| 105 | + compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT { |
| 106 | + return std::__cxx_atomic_compare_exchange_strong(std::addressof(__a_), std::addressof(__e), __d, __m, __m); |
| 107 | + } |
| 108 | + _LIBCPP_HIDE_FROM_ABI bool |
| 109 | + compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT { |
| 110 | + return std::__cxx_atomic_compare_exchange_strong(std::addressof(__a_), std::addressof(__e), __d, __m, __m); |
| 111 | + } |
| 112 | + |
| 113 | +#if _LIBCPP_STD_VER >= 20 |
| 114 | + _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const |
| 115 | + volatile _NOEXCEPT { |
| 116 | + std::__atomic_wait(*this, __v, __m); |
| 117 | + } |
| 118 | + _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void |
| 119 | + wait(_Tp __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT { |
| 120 | + std::__atomic_wait(*this, __v, __m); |
| 121 | + } |
| 122 | + _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void notify_one() volatile _NOEXCEPT { |
| 123 | + std::__atomic_notify_one(*this); |
| 124 | + } |
| 125 | + _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void notify_one() _NOEXCEPT { std::__atomic_notify_one(*this); } |
| 126 | + _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void notify_all() volatile _NOEXCEPT { |
| 127 | + std::__atomic_notify_all(*this); |
| 128 | + } |
| 129 | + _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void notify_all() _NOEXCEPT { std::__atomic_notify_all(*this); } |
| 130 | +#endif // _LIBCPP_STD_VER >= 20 |
| 131 | + |
| 132 | +#if _LIBCPP_STD_VER >= 20 |
| 133 | + _LIBCPP_HIDE_FROM_ABI constexpr __atomic_base() noexcept(is_nothrow_default_constructible_v<_Tp>) : __a_(_Tp()) {} |
| 134 | +#else |
| 135 | + _LIBCPP_HIDE_FROM_ABI __atomic_base() _NOEXCEPT = default; |
| 136 | +#endif |
| 137 | + |
| 138 | + _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {} |
| 139 | + |
| 140 | + __atomic_base(const __atomic_base&) = delete; |
| 141 | +}; |
| 142 | + |
| 143 | +// atomic<Integral> |
| 144 | + |
| 145 | +template <class _Tp> |
| 146 | +struct __atomic_base<_Tp, true> : public __atomic_base<_Tp, false> { |
| 147 | + using __base = __atomic_base<_Tp, false>; |
| 148 | + |
| 149 | + _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR_SINCE_CXX20 __atomic_base() _NOEXCEPT = default; |
| 150 | + |
| 151 | + _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {} |
| 152 | + |
| 153 | + _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT { |
| 154 | + return std::__cxx_atomic_fetch_add(std::addressof(this->__a_), __op, __m); |
| 155 | + } |
| 156 | + _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT { |
| 157 | + return std::__cxx_atomic_fetch_add(std::addressof(this->__a_), __op, __m); |
| 158 | + } |
| 159 | + _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT { |
| 160 | + return std::__cxx_atomic_fetch_sub(std::addressof(this->__a_), __op, __m); |
| 161 | + } |
| 162 | + _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT { |
| 163 | + return std::__cxx_atomic_fetch_sub(std::addressof(this->__a_), __op, __m); |
| 164 | + } |
| 165 | + _LIBCPP_HIDE_FROM_ABI _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT { |
| 166 | + return std::__cxx_atomic_fetch_and(std::addressof(this->__a_), __op, __m); |
| 167 | + } |
| 168 | + _LIBCPP_HIDE_FROM_ABI _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT { |
| 169 | + return std::__cxx_atomic_fetch_and(std::addressof(this->__a_), __op, __m); |
| 170 | + } |
| 171 | + _LIBCPP_HIDE_FROM_ABI _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT { |
| 172 | + return std::__cxx_atomic_fetch_or(std::addressof(this->__a_), __op, __m); |
| 173 | + } |
| 174 | + _LIBCPP_HIDE_FROM_ABI _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT { |
| 175 | + return std::__cxx_atomic_fetch_or(std::addressof(this->__a_), __op, __m); |
| 176 | + } |
| 177 | + _LIBCPP_HIDE_FROM_ABI _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT { |
| 178 | + return std::__cxx_atomic_fetch_xor(std::addressof(this->__a_), __op, __m); |
| 179 | + } |
| 180 | + _LIBCPP_HIDE_FROM_ABI _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT { |
| 181 | + return std::__cxx_atomic_fetch_xor(std::addressof(this->__a_), __op, __m); |
| 182 | + } |
| 183 | + |
| 184 | + _LIBCPP_HIDE_FROM_ABI _Tp operator++(int) volatile _NOEXCEPT { return fetch_add(_Tp(1)); } |
| 185 | + _LIBCPP_HIDE_FROM_ABI _Tp operator++(int) _NOEXCEPT { return fetch_add(_Tp(1)); } |
| 186 | + _LIBCPP_HIDE_FROM_ABI _Tp operator--(int) volatile _NOEXCEPT { return fetch_sub(_Tp(1)); } |
| 187 | + _LIBCPP_HIDE_FROM_ABI _Tp operator--(int) _NOEXCEPT { return fetch_sub(_Tp(1)); } |
| 188 | + _LIBCPP_HIDE_FROM_ABI _Tp operator++() volatile _NOEXCEPT { return fetch_add(_Tp(1)) + _Tp(1); } |
| 189 | + _LIBCPP_HIDE_FROM_ABI _Tp operator++() _NOEXCEPT { return fetch_add(_Tp(1)) + _Tp(1); } |
| 190 | + _LIBCPP_HIDE_FROM_ABI _Tp operator--() volatile _NOEXCEPT { return fetch_sub(_Tp(1)) - _Tp(1); } |
| 191 | + _LIBCPP_HIDE_FROM_ABI _Tp operator--() _NOEXCEPT { return fetch_sub(_Tp(1)) - _Tp(1); } |
| 192 | + _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __op) volatile _NOEXCEPT { return fetch_add(__op) + __op; } |
| 193 | + _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __op) _NOEXCEPT { return fetch_add(__op) + __op; } |
| 194 | + _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __op) volatile _NOEXCEPT { return fetch_sub(__op) - __op; } |
| 195 | + _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __op) _NOEXCEPT { return fetch_sub(__op) - __op; } |
| 196 | + _LIBCPP_HIDE_FROM_ABI _Tp operator&=(_Tp __op) volatile _NOEXCEPT { return fetch_and(__op) & __op; } |
| 197 | + _LIBCPP_HIDE_FROM_ABI _Tp operator&=(_Tp __op) _NOEXCEPT { return fetch_and(__op) & __op; } |
| 198 | + _LIBCPP_HIDE_FROM_ABI _Tp operator|=(_Tp __op) volatile _NOEXCEPT { return fetch_or(__op) | __op; } |
| 199 | + _LIBCPP_HIDE_FROM_ABI _Tp operator|=(_Tp __op) _NOEXCEPT { return fetch_or(__op) | __op; } |
| 200 | + _LIBCPP_HIDE_FROM_ABI _Tp operator^=(_Tp __op) volatile _NOEXCEPT { return fetch_xor(__op) ^ __op; } |
| 201 | + _LIBCPP_HIDE_FROM_ABI _Tp operator^=(_Tp __op) _NOEXCEPT { return fetch_xor(__op) ^ __op; } |
| 202 | +}; |
| 203 | + |
| 204 | +// Here we need _IsIntegral because the default template argument is not enough |
| 205 | +// e.g __atomic_base<int> is __atomic_base<int, true>, which inherits from |
| 206 | +// __atomic_base<int, false> and the caller of the wait function is |
| 207 | +// __atomic_base<int, false>. So specializing __atomic_base<_Tp> does not work |
| 208 | +template <class _Tp, bool _IsIntegral> |
| 209 | +struct __atomic_waitable_traits<__atomic_base<_Tp, _IsIntegral> > { |
| 210 | + static _LIBCPP_HIDE_FROM_ABI _Tp __atomic_load(const __atomic_base<_Tp, _IsIntegral>& __a, memory_order __order) { |
| 211 | + return __a.load(__order); |
| 212 | + } |
| 213 | + |
| 214 | + static _LIBCPP_HIDE_FROM_ABI _Tp |
| 215 | + __atomic_load(const volatile __atomic_base<_Tp, _IsIntegral>& __this, memory_order __order) { |
| 216 | + return __this.load(__order); |
| 217 | + } |
| 218 | + |
| 219 | + static _LIBCPP_HIDE_FROM_ABI const __cxx_atomic_impl<_Tp>* |
| 220 | + __atomic_contention_address(const __atomic_base<_Tp, _IsIntegral>& __a) { |
| 221 | + return std::addressof(__a.__a_); |
| 222 | + } |
| 223 | + |
| 224 | + static _LIBCPP_HIDE_FROM_ABI const volatile __cxx_atomic_impl<_Tp>* |
| 225 | + __atomic_contention_address(const volatile __atomic_base<_Tp, _IsIntegral>& __this) { |
| 226 | + return std::addressof(__this.__a_); |
| 227 | + } |
| 228 | +}; |
| 229 | + |
37 | 230 | template <class _Tp>
|
38 | 231 | struct atomic : public __atomic_base<_Tp> {
|
39 | 232 | using __base = __atomic_base<_Tp>;
|
@@ -123,6 +316,9 @@ struct atomic<_Tp*> : public __atomic_base<_Tp*> {
|
123 | 316 | atomic& operator=(const atomic&) volatile = delete;
|
124 | 317 | };
|
125 | 318 |
|
| 319 | +template <class _Tp> |
| 320 | +struct __atomic_waitable_traits<atomic<_Tp> > : __atomic_waitable_traits<__atomic_base<_Tp> > {}; |
| 321 | + |
126 | 322 | #if _LIBCPP_STD_VER >= 20
|
127 | 323 | template <class _Tp>
|
128 | 324 | requires is_floating_point_v<_Tp>
|
|
0 commit comments