We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 6453091 commit 6a14146Copy full SHA for 6a14146
pytorch_lightning/trainer/connectors/accelerator_connector.py
@@ -273,6 +273,10 @@ def use_deepspeed(self) -> bool:
273
274
@property
275
def is_distributed(self) -> bool:
276
+ # Used for custom plugins.
277
+ # Custom plugins should implement is_distributed property.
278
+ if hasattr(self.training_type_plugin, 'is_distributed') and not self.on_tpu:
279
+ return self.training_type_plugin.is_distributed
280
is_distributed = self.use_ddp or self.use_ddp2 or self.use_horovod
281
if self.on_tpu:
282
is_distributed |= self.training_type_plugin.is_distributed
0 commit comments