Skip to content

🍒 5.5+ [Concurrency] set queue width on non apple platforms #39766

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 24 additions & 0 deletions stdlib/public/Concurrency/GlobalExecutor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -265,6 +265,10 @@ static constexpr size_t globalQueueCacheCount =
static_cast<size_t>(JobPriority::UserInteractive) + 1;
static std::atomic<dispatch_queue_t> globalQueueCache[globalQueueCacheCount];

#ifndef __APPLE__
extern "C" void dispatch_queue_set_width(dispatch_queue_t dq, long width);
#endif

static dispatch_queue_t getGlobalQueue(JobPriority priority) {
size_t numericPriority = static_cast<size_t>(priority);
if (numericPriority >= globalQueueCacheCount)
Expand All @@ -275,6 +279,25 @@ static dispatch_queue_t getGlobalQueue(JobPriority priority) {
if (SWIFT_LIKELY(queue))
return queue;

#ifndef __APPLE__
const int DISPATCH_QUEUE_WIDTH_MAX_LOGICAL_CPUS = -3;

// Create a new cooperative concurrent queue and swap it in.
dispatch_queue_attr_t newQueueAttr = dispatch_queue_attr_make_with_qos_class(
DISPATCH_QUEUE_CONCURRENT, (dispatch_qos_class_t)priority, 0);
dispatch_queue_t newQueue = dispatch_queue_create(
"Swift global concurrent queue", newQueueAttr);
dispatch_queue_set_width(newQueue, DISPATCH_QUEUE_WIDTH_MAX_LOGICAL_CPUS);

if (!ptr->compare_exchange_strong(queue, newQueue,
/*success*/ std::memory_order_release,
/*failure*/ std::memory_order_acquire)) {
dispatch_release(newQueue);
return queue;
}

return newQueue;
#else
// If we don't have a queue cached for this priority, cache it now. This may
// race with other threads doing this at the same time for this priority, but
// that's OK, they'll all end up writing the same value.
Expand All @@ -284,6 +307,7 @@ static dispatch_queue_t getGlobalQueue(JobPriority priority) {
// Unconditionally store it back in the cache. If we raced with another
// thread, we'll just overwrite the entry with the same value.
ptr->store(queue, std::memory_order_relaxed);
#endif

return queue;
}
Expand Down