Skip to content

Commit a5fff58

Browse files
committed
[ThreadPool] Do not return shared futures.
The only users of returned futures from ThreadPool is llvm-reduce after D113857. There should be no cases where multiple threads wait on the same future, so there should be no need to return std::shared_future<>. Instead return plain std::future<>. If users need to share a future between multiple threads, they can share the futures themselves. Reviewed By: Meinersbur, mehdi_amini Differential Revision: https://reviews.llvm.org/D114363
1 parent c7cc70c commit a5fff58

File tree

2 files changed

+6
-7
lines changed

2 files changed

+6
-7
lines changed

llvm/include/llvm/Support/ThreadPool.h

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -56,8 +56,7 @@ class ThreadPool {
5656

5757
/// Asynchronous submission of a task to the pool. The returned future can be
5858
/// used to wait for the task to finish and is *non-blocking* on destruction.
59-
template <typename Func>
60-
auto async(Func &&F) -> std::shared_future<decltype(F())> {
59+
template <typename Func> auto async(Func &&F) -> std::future<decltype(F())> {
6160
return asyncImpl(std::function<decltype(F())()>(std::forward<Func>(F)));
6261
}
6362

@@ -101,7 +100,7 @@ class ThreadPool {
101100
/// Asynchronous submission of a task to the pool. The returned future can be
102101
/// used to wait for the task to finish and is *non-blocking* on destruction.
103102
template <typename ResTy>
104-
std::shared_future<ResTy> asyncImpl(std::function<ResTy()> Task) {
103+
std::future<ResTy> asyncImpl(std::function<ResTy()> Task) {
105104

106105
#if LLVM_ENABLE_THREADS
107106
/// Wrap the Task in a std::function<void()> that sets the result of the
@@ -117,12 +116,12 @@ class ThreadPool {
117116
Tasks.push(std::move(R.first));
118117
}
119118
QueueCondition.notify_one();
120-
return R.second.share();
119+
return std::move(R.second);
121120

122121
#else // LLVM_ENABLE_THREADS Disabled
123122

124123
// Get a Future with launch::deferred execution using std::async
125-
auto Future = std::async(std::launch::deferred, std::move(Task)).share();
124+
auto Future = std::async(std::launch::deferred, std::move(Task));
126125
// Wrap the future so that both ThreadPool::wait() can operate and the
127126
// returned future can be sync'ed on.
128127
Tasks.push([Future]() { Future.get(); });

mlir/include/mlir/IR/Threading.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -71,14 +71,14 @@ LogicalResult failableParallelForEach(MLIRContext *context, IteratorT begin,
7171
// Otherwise, process the elements in parallel.
7272
llvm::ThreadPool &threadPool = context->getThreadPool();
7373
size_t numActions = std::min(numElements, threadPool.getThreadCount());
74-
SmallVector<std::shared_future<void>> threadFutures;
74+
SmallVector<std::future<void>> threadFutures;
7575
threadFutures.reserve(numActions - 1);
7676
for (unsigned i = 1; i < numActions; ++i)
7777
threadFutures.emplace_back(threadPool.async(processFn));
7878
processFn();
7979

8080
// Wait for all of the threads to finish.
81-
for (std::shared_future<void> &future : threadFutures)
81+
for (std::future<void> &future : threadFutures)
8282
future.wait();
8383
return failure(processingFailed);
8484
}

0 commit comments

Comments
 (0)