Skip to content

Commit a17c027

Browse files
authored
Update sync_dist warning for multiple processes (#6790)
1 parent 7f6154f commit a17c027

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

pytorch_lightning/trainer/connectors/logger_connector/epoch_result_store.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -133,8 +133,8 @@ def run_epoch_func(self, results, opt_metric, func_name, *args, **kwargs) -> Non
133133
warning_cache.warn(
134134
f"The value associated to the key {non_metric_key}: {metric.cpu().tolist()} "
135135
"doesn't appear to be the same accross all processes. "
136-
"HINT: One could either do: `self.log(..., sync_dist=True, sync_fn=torch.mean)`"
137-
" to force mean reduction across processes which can be inaccurate or implement"
136+
"HINT: One could either do: `self.log(..., sync_dist=True)` to force mean"
137+
" reduction by default across processes which can be inaccurate or implement"
138138
" a `torchmetrics.Metric`"
139139
)
140140
warning_cache.warned_metrics.append(non_metric_key)

0 commit comments

Comments
 (0)