Skip to content

Commit 5d6a091

Browse files
authored
default sched (Lightning-AI#6062)
1 parent 8f82823 commit 5d6a091

File tree

1 file changed

+3
-10
lines changed

1 file changed

+3
-10
lines changed

pytorch_lightning/plugins/training_type/deepspeed.py

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
from pytorch_lightning.overrides.base import _LightningModuleWrapperBase
2727
from pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment
2828
from pytorch_lightning.plugins.training_type.ddp import DDPPlugin
29+
from pytorch_lightning.trainer.optimizers import _get_default_scheduler_config
2930
from pytorch_lightning.utilities import AMPType
3031
from pytorch_lightning.utilities.apply_func import apply_to_collection
3132
from pytorch_lightning.utilities.distributed import rank_zero_info, rank_zero_only
@@ -240,16 +241,8 @@ def _initialize_deepspeed_inference(self, model):
240241
)
241242

242243
def configure_scheduler(self, lr_scheduler):
243-
# this duplicates the defaults from init_optimizers
244-
scheduler = {
245-
'scheduler': lr_scheduler,
246-
'name': None, # no custom name
247-
'interval': 'epoch', # after epoch is over
248-
'frequency': 1, # every epoch/batch
249-
'reduce_on_plateau': False, # most often not ReduceLROnPlateau scheduler
250-
'monitor': None, # value to monitor for ReduceLROnPlateau
251-
'strict': True, # enforce that the monitor exists for ReduceLROnPlateau
252-
}
244+
scheduler = _get_default_scheduler_config()
245+
scheduler["scheduler"] = lr_scheduler
253246
return [scheduler]
254247

255248
@property

0 commit comments

Comments
 (0)