19
19
#include < stdio.h>
20
20
#endif
21
21
22
- namespace swift {
23
-
24
22
// / This is a node in a concurrent linked list.
25
23
template <class ElemTy > struct ConcurrentListNode {
26
24
ConcurrentListNode (ElemTy Elem) : Payload(Elem), Next(nullptr ) {}
@@ -125,29 +123,6 @@ template <class ElemTy> struct ConcurrentList {
125
123
std::atomic<ConcurrentListNode<ElemTy> *> First;
126
124
};
127
125
128
- template <class T , bool Delete> class AtomicMaybeOwningPointer ;
129
-
130
- template <class T >
131
- class AtomicMaybeOwningPointer <T, false > {
132
- public:
133
- std::atomic<T*> Value;
134
- constexpr AtomicMaybeOwningPointer (T *value) : Value(value) {}
135
- };
136
-
137
- template <class T >
138
- class AtomicMaybeOwningPointer <T, true > {
139
- public:
140
- std::atomic<T*> Value;
141
- constexpr AtomicMaybeOwningPointer (T *value) : Value(value) {}
142
-
143
- ~AtomicMaybeOwningPointer () {
144
- // This can use relaxed memory order because the client has to ensure
145
- // that all accesses are safely completed and their effects fully
146
- // visible before destruction occurs anyway.
147
- ::delete Value.load (std::memory_order_relaxed);
148
- }
149
- };
150
-
151
126
// / A concurrent map that is implemented using a binary tree. It supports
152
127
// / concurrent insertions but does not support removals or rebalancing of
153
128
// / the tree.
@@ -165,8 +140,7 @@ class AtomicMaybeOwningPointer<T, true> {
165
140
// / /// where KeyTy is the type of the first argument to getOrInsert and
166
141
// / /// ArgTys is the type of the remaining arguments.
167
142
// / static size_t getExtraAllocationSize(KeyTy key, ArgTys...)
168
- template <class EntryTy , bool ProvideDestructor = true >
169
- class ConcurrentMap {
143
+ template <class EntryTy > class ConcurrentMap {
170
144
struct Node {
171
145
std::atomic<Node*> Left;
172
146
std::atomic<Node*> Right;
@@ -208,7 +182,7 @@ class ConcurrentMap {
208
182
};
209
183
210
184
// / The root of the tree.
211
- AtomicMaybeOwningPointer <Node, ProvideDestructor > Root;
185
+ std::atomic <Node* > Root;
212
186
213
187
// / This member stores the address of the last node that was found by the
214
188
// / search procedure. We cache the last search to accelerate code that
@@ -221,14 +195,13 @@ class ConcurrentMap {
221
195
ConcurrentMap (const ConcurrentMap &) = delete ;
222
196
ConcurrentMap &operator =(const ConcurrentMap &) = delete ;
223
197
224
- // ConcurrentMap<T, false> must have a trivial destructor.
225
- ~ConcurrentMap () = default ;
226
-
227
- public:
198
+ ~ConcurrentMap () {
199
+ ::delete Root.load (std::memory_order_relaxed);
200
+ }
228
201
229
202
#ifndef NDEBUG
230
203
void dump () const {
231
- auto R = Root.Value . load (std::memory_order_acquire);
204
+ auto R = Root.load (std::memory_order_acquire);
232
205
printf (" digraph g {\n "
233
206
" graph [ rankdir = \" TB\" ];\n "
234
207
" node [ fontsize = \" 16\" ];\n "
@@ -252,7 +225,7 @@ class ConcurrentMap {
252
225
}
253
226
254
227
// Search the tree, starting from the root.
255
- Node *node = Root.Value . load (std::memory_order_acquire);
228
+ Node *node = Root.load (std::memory_order_acquire);
256
229
while (node) {
257
230
int comparisonResult = node->Payload .compareWithKey (key);
258
231
if (comparisonResult == 0 ) {
@@ -285,7 +258,7 @@ class ConcurrentMap {
285
258
Node *newNode = nullptr ;
286
259
287
260
// Start from the root.
288
- auto edge = &Root. Value ;
261
+ auto edge = &Root;
289
262
290
263
while (true ) {
291
264
// Load the edge.
@@ -340,6 +313,4 @@ class ConcurrentMap {
340
313
}
341
314
};
342
315
343
- } // end namespace swift
344
-
345
316
#endif // SWIFT_RUNTIME_CONCURRENTUTILS_H
0 commit comments