We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent c8fb646 commit 0e8f4a8Copy full SHA for 0e8f4a8
pytorch_lightning/trainer/connectors/accelerator_connector.py
@@ -266,6 +266,10 @@ def use_deepspeed(self) -> bool:
266
267
@property
268
def is_distributed(self) -> bool:
269
+ # Used for custom plugins.
270
+ # Custom plugins should implement is_distributed property.
271
+ if hasattr(self.training_type_plugin, 'is_distributed') and not self.on_tpu:
272
+ return self.training_type_plugin.is_distributed
273
is_distributed = self.use_ddp or self.use_ddp2 or self.use_horovod
274
if self.on_tpu:
275
is_distributed |= self.training_type_plugin.is_distributed
0 commit comments