File tree Expand file tree Collapse file tree 1 file changed +3
-10
lines changed
pytorch_lightning/plugins/training_type Expand file tree Collapse file tree 1 file changed +3
-10
lines changed Original file line number Diff line number Diff line change 26
26
from pytorch_lightning .overrides .base import _LightningModuleWrapperBase
27
27
from pytorch_lightning .plugins .environments .cluster_environment import ClusterEnvironment
28
28
from pytorch_lightning .plugins .training_type .ddp import DDPPlugin
29
+ from pytorch_lightning .trainer .optimizers import _get_default_scheduler_config
29
30
from pytorch_lightning .utilities import AMPType
30
31
from pytorch_lightning .utilities .apply_func import apply_to_collection
31
32
from pytorch_lightning .utilities .distributed import rank_zero_info , rank_zero_only
@@ -240,16 +241,8 @@ def _initialize_deepspeed_inference(self, model):
240
241
)
241
242
242
243
def configure_scheduler (self , lr_scheduler ):
243
- # this duplicates the defaults from init_optimizers
244
- scheduler = {
245
- 'scheduler' : lr_scheduler ,
246
- 'name' : None , # no custom name
247
- 'interval' : 'epoch' , # after epoch is over
248
- 'frequency' : 1 , # every epoch/batch
249
- 'reduce_on_plateau' : False , # most often not ReduceLROnPlateau scheduler
250
- 'monitor' : None , # value to monitor for ReduceLROnPlateau
251
- 'strict' : True , # enforce that the monitor exists for ReduceLROnPlateau
252
- }
244
+ scheduler = _get_default_scheduler_config ()
245
+ scheduler ["scheduler" ] = lr_scheduler
253
246
return [scheduler ]
254
247
255
248
@property
You can’t perform that action at this time.
0 commit comments