@@ -269,16 +269,45 @@ static constexpr size_t globalQueueCacheCount =
269
269
static_cast <size_t >(JobPriority::UserInteractive) + 1;
270
270
static std::atomic<dispatch_queue_t > globalQueueCache[globalQueueCacheCount];
271
271
272
+ #ifdef SWIFT_CONCURRENCY_BACK_DEPLOYMENT
273
+ extern " C" void dispatch_queue_set_width (dispatch_queue_t dq, long width);
274
+ #endif
275
+
272
276
static dispatch_queue_t getGlobalQueue (JobPriority priority) {
273
277
size_t numericPriority = static_cast <size_t >(priority);
274
278
if (numericPriority >= globalQueueCacheCount)
275
279
swift_Concurrency_fatalError (0 , " invalid job priority %#zx" );
276
280
281
+ #ifdef SWIFT_CONCURRENCY_BACK_DEPLOYMENT
282
+ std::memory_order loadOrder = std::memory_order_acquire;
283
+ #else
284
+ std::memory_order loadOrder = std::memory_order_relaxed;
285
+ #endif
286
+
277
287
auto *ptr = &globalQueueCache[numericPriority];
278
- auto queue = ptr->load (std::memory_order_relaxed );
288
+ auto queue = ptr->load (loadOrder );
279
289
if (SWIFT_LIKELY (queue))
280
290
return queue;
281
291
292
+ #ifdef SWIFT_CONCURRENCY_BACK_DEPLOYMENT
293
+ const int DISPATCH_QUEUE_WIDTH_MAX_LOGICAL_CPUS = -3 ;
294
+
295
+ // Create a new cooperative concurrent queue and swap it in.
296
+ dispatch_queue_attr_t newQueueAttr = dispatch_queue_attr_make_with_qos_class (
297
+ DISPATCH_QUEUE_CONCURRENT, (dispatch_qos_class_t )priority, 0 );
298
+ dispatch_queue_t newQueue = dispatch_queue_create (
299
+ " Swift global concurrent queue" , newQueueAttr);
300
+ dispatch_queue_set_width (newQueue, DISPATCH_QUEUE_WIDTH_MAX_LOGICAL_CPUS);
301
+
302
+ if (!ptr->compare_exchange_strong (queue, newQueue,
303
+ /* success*/ std::memory_order_release,
304
+ /* failure*/ std::memory_order_acquire)) {
305
+ dispatch_release (newQueue);
306
+ return queue;
307
+ }
308
+
309
+ return newQueue;
310
+ #else
282
311
// If we don't have a queue cached for this priority, cache it now. This may
283
312
// race with other threads doing this at the same time for this priority, but
284
313
// that's OK, they'll all end up writing the same value.
@@ -288,6 +317,7 @@ static dispatch_queue_t getGlobalQueue(JobPriority priority) {
288
317
// Unconditionally store it back in the cache. If we raced with another
289
318
// thread, we'll just overwrite the entry with the same value.
290
319
ptr->store (queue, std::memory_order_relaxed);
320
+ #endif
291
321
292
322
return queue;
293
323
}
0 commit comments