@@ -53,42 +53,6 @@ def test_optimizer_with_scheduling(tmpdir):
53
53
'Lr not adjusted correctly, expected %f but got %f' % (init_lr * 0.1 , adjusted_lr )
54
54
55
55
56
- def test_multi_optimizer_with_scheduling (tmpdir ):
57
- """ Verify that learning rate scheduling is working """
58
-
59
- hparams = EvalModelTemplate .get_default_hparams ()
60
- model = EvalModelTemplate (** hparams )
61
- model .configure_optimizers = model .configure_optimizers__multiple_schedulers
62
-
63
- # fit model
64
- trainer = Trainer (
65
- default_root_dir = tmpdir ,
66
- max_epochs = 1 ,
67
- limit_val_batches = 0.1 ,
68
- limit_train_batches = 0.2 ,
69
- )
70
- trainer .fit (model )
71
- assert trainer .state == TrainerState .FINISHED , f"Training failed with { trainer .state } "
72
-
73
- init_lr = hparams .get ('learning_rate' )
74
- adjusted_lr1 = [pg ['lr' ] for pg in trainer .optimizers [0 ].param_groups ]
75
- adjusted_lr2 = [pg ['lr' ] for pg in trainer .optimizers [1 ].param_groups ]
76
-
77
- assert len (trainer .lr_schedulers ) == 2 , \
78
- 'all lr scheduler not initialized properly, it has %i elements instread of 1' % len (trainer .lr_schedulers )
79
-
80
- assert all (a == adjusted_lr1 [0 ] for a in adjusted_lr1 ), \
81
- 'Lr not equally adjusted for all param groups for optimizer 1'
82
- adjusted_lr1 = adjusted_lr1 [0 ]
83
-
84
- assert all (a == adjusted_lr2 [0 ] for a in adjusted_lr2 ), \
85
- 'Lr not equally adjusted for all param groups for optimizer 2'
86
- adjusted_lr2 = adjusted_lr2 [0 ]
87
-
88
- assert init_lr * 0.1 == adjusted_lr1 and init_lr * 0.1 == adjusted_lr2 , \
89
- 'Lr not adjusted correctly, expected %f but got %f' % (init_lr * 0.1 , adjusted_lr1 )
90
-
91
-
92
56
def test_multi_optimizer_with_scheduling_stepping (tmpdir ):
93
57
94
58
hparams = EvalModelTemplate .get_default_hparams ()
@@ -109,8 +73,7 @@ def test_multi_optimizer_with_scheduling_stepping(tmpdir):
109
73
adjusted_lr1 = [pg ['lr' ] for pg in trainer .optimizers [0 ].param_groups ]
110
74
adjusted_lr2 = [pg ['lr' ] for pg in trainer .optimizers [1 ].param_groups ]
111
75
112
- assert len (trainer .lr_schedulers ) == 2 , \
113
- 'all lr scheduler not initialized properly'
76
+ assert len (trainer .lr_schedulers ) == 2 , 'all lr scheduler not initialized properly'
114
77
115
78
assert all (a == adjusted_lr1 [0 ] for a in adjusted_lr1 ), \
116
79
'lr not equally adjusted for all param groups for optimizer 1'
@@ -121,11 +84,9 @@ def test_multi_optimizer_with_scheduling_stepping(tmpdir):
121
84
adjusted_lr2 = adjusted_lr2 [0 ]
122
85
123
86
# Called ones after end of epoch
124
- assert init_lr * 0.1 ** 1 == adjusted_lr1 , \
125
- 'lr for optimizer 1 not adjusted correctly'
87
+ assert init_lr * 0.1 == adjusted_lr1 , 'lr for optimizer 1 not adjusted correctly'
126
88
# Called every 3 steps, meaning for 1 epoch of 11 batches, it is called 3 times
127
- assert init_lr * 0.1 == adjusted_lr2 , \
128
- 'lr for optimizer 2 not adjusted correctly'
89
+ assert init_lr * 0.1 == adjusted_lr2 , 'lr for optimizer 2 not adjusted correctly'
129
90
130
91
131
92
def test_reducelronplateau_with_no_monitor_raises (tmpdir ):
@@ -209,13 +170,7 @@ def test_optimizer_return_options():
209
170
assert optim == [opt_a , opt_b ]
210
171
assert len (lr_sched ) == len (freq ) == 0
211
172
212
- # opt tuple of 2 lists
213
- model .configure_optimizers = lambda : ([opt_a ], [scheduler_a ])
214
- optim , lr_sched , freq = trainer .init_optimizers (model )
215
- assert len (optim ) == len (lr_sched ) == 1
216
- assert len (freq ) == 0
217
- assert optim [0 ] == opt_a
218
- assert lr_sched [0 ] == dict (
173
+ ref_lr_sched = dict (
219
174
scheduler = scheduler_a ,
220
175
interval = 'epoch' ,
221
176
frequency = 1 ,
@@ -225,37 +180,29 @@ def test_optimizer_return_options():
225
180
name = None ,
226
181
)
227
182
183
+ # opt tuple of 2 lists
184
+ model .configure_optimizers = lambda : ([opt_a ], [scheduler_a ])
185
+ optim , lr_sched , freq = trainer .init_optimizers (model )
186
+ assert len (optim ) == len (lr_sched ) == 1
187
+ assert len (freq ) == 0
188
+ assert optim [0 ] == opt_a
189
+ assert lr_sched [0 ] == ref_lr_sched
190
+
228
191
# opt tuple of 1 list
229
192
model .configure_optimizers = lambda : ([opt_a ], scheduler_a )
230
193
optim , lr_sched , freq = trainer .init_optimizers (model )
231
194
assert len (optim ) == len (lr_sched ) == 1
232
195
assert len (freq ) == 0
233
196
assert optim [0 ] == opt_a
234
- assert lr_sched [0 ] == dict (
235
- scheduler = scheduler_a ,
236
- interval = 'epoch' ,
237
- frequency = 1 ,
238
- reduce_on_plateau = False ,
239
- monitor = None ,
240
- strict = True ,
241
- name = None ,
242
- )
197
+ assert lr_sched [0 ] == ref_lr_sched
243
198
244
199
# opt single dictionary
245
200
model .configure_optimizers = lambda : {"optimizer" : opt_a , "lr_scheduler" : scheduler_a }
246
201
optim , lr_sched , freq = trainer .init_optimizers (model )
247
202
assert len (optim ) == len (lr_sched ) == 1
248
203
assert len (freq ) == 0
249
204
assert optim [0 ] == opt_a
250
- assert lr_sched [0 ] == dict (
251
- scheduler = scheduler_a ,
252
- interval = 'epoch' ,
253
- frequency = 1 ,
254
- reduce_on_plateau = False ,
255
- monitor = None ,
256
- strict = True ,
257
- name = None ,
258
- )
205
+ assert lr_sched [0 ] == ref_lr_sched
259
206
260
207
# opt multiple dictionaries with frequencies
261
208
model .configure_optimizers = lambda : (
@@ -273,15 +220,7 @@ def test_optimizer_return_options():
273
220
optim , lr_sched , freq = trainer .init_optimizers (model )
274
221
assert len (optim ) == len (lr_sched ) == len (freq ) == 2
275
222
assert optim [0 ] == opt_a
276
- assert lr_sched [0 ] == dict (
277
- scheduler = scheduler_a ,
278
- interval = 'epoch' ,
279
- frequency = 1 ,
280
- reduce_on_plateau = False ,
281
- monitor = None ,
282
- strict = True ,
283
- name = None ,
284
- )
223
+ assert lr_sched [0 ] == ref_lr_sched
285
224
assert freq == [1 , 5 ]
286
225
287
226
0 commit comments