Skip to content

Commit bf6ba83

Browse files
authored
prune duplicite test in optim (Lightning-AI#6312)
1 parent 1aac481 commit bf6ba83

File tree

2 files changed

+15
-86
lines changed

2 files changed

+15
-86
lines changed

tests/trainer/logging_/test_eval_loop_logging_1_0.py

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -509,16 +509,6 @@ def on_validation_epoch_start(self, trainer, pl_module):
509509
prob_bars=self.choices
510510
)
511511

512-
"""
513-
def on_batch_start(self, trainer, pl_module):
514-
self.make_logging(pl_module, 'on_batch_start', 4, on_steps=self.choices,
515-
on_epochs=self.choices, prob_bars=self.choices)
516-
517-
def on_validation_batch_start(self, trainer, pl_module, batch, batch_idx, dataloader_idx):
518-
self.make_logging(pl_module, 'on_validation_batch_start', 5, on_steps=self.choices,
519-
on_epochs=self.choices, prob_bars=self.choices)
520-
"""
521-
522512
def on_batch_end(self, trainer, pl_module):
523513
self.make_logging(
524514
pl_module, 'on_batch_end', 6, on_steps=self.choices, on_epochs=self.choices, prob_bars=self.choices

tests/trainer/optimization/test_optimizers.py

Lines changed: 15 additions & 76 deletions
Original file line numberDiff line numberDiff line change
@@ -53,42 +53,6 @@ def test_optimizer_with_scheduling(tmpdir):
5353
'Lr not adjusted correctly, expected %f but got %f' % (init_lr * 0.1, adjusted_lr)
5454

5555

56-
def test_multi_optimizer_with_scheduling(tmpdir):
57-
""" Verify that learning rate scheduling is working """
58-
59-
hparams = EvalModelTemplate.get_default_hparams()
60-
model = EvalModelTemplate(**hparams)
61-
model.configure_optimizers = model.configure_optimizers__multiple_schedulers
62-
63-
# fit model
64-
trainer = Trainer(
65-
default_root_dir=tmpdir,
66-
max_epochs=1,
67-
limit_val_batches=0.1,
68-
limit_train_batches=0.2,
69-
)
70-
trainer.fit(model)
71-
assert trainer.state == TrainerState.FINISHED, f"Training failed with {trainer.state}"
72-
73-
init_lr = hparams.get('learning_rate')
74-
adjusted_lr1 = [pg['lr'] for pg in trainer.optimizers[0].param_groups]
75-
adjusted_lr2 = [pg['lr'] for pg in trainer.optimizers[1].param_groups]
76-
77-
assert len(trainer.lr_schedulers) == 2, \
78-
'all lr scheduler not initialized properly, it has %i elements instread of 1' % len(trainer.lr_schedulers)
79-
80-
assert all(a == adjusted_lr1[0] for a in adjusted_lr1), \
81-
'Lr not equally adjusted for all param groups for optimizer 1'
82-
adjusted_lr1 = adjusted_lr1[0]
83-
84-
assert all(a == adjusted_lr2[0] for a in adjusted_lr2), \
85-
'Lr not equally adjusted for all param groups for optimizer 2'
86-
adjusted_lr2 = adjusted_lr2[0]
87-
88-
assert init_lr * 0.1 == adjusted_lr1 and init_lr * 0.1 == adjusted_lr2, \
89-
'Lr not adjusted correctly, expected %f but got %f' % (init_lr * 0.1, adjusted_lr1)
90-
91-
9256
def test_multi_optimizer_with_scheduling_stepping(tmpdir):
9357

9458
hparams = EvalModelTemplate.get_default_hparams()
@@ -109,8 +73,7 @@ def test_multi_optimizer_with_scheduling_stepping(tmpdir):
10973
adjusted_lr1 = [pg['lr'] for pg in trainer.optimizers[0].param_groups]
11074
adjusted_lr2 = [pg['lr'] for pg in trainer.optimizers[1].param_groups]
11175

112-
assert len(trainer.lr_schedulers) == 2, \
113-
'all lr scheduler not initialized properly'
76+
assert len(trainer.lr_schedulers) == 2, 'all lr scheduler not initialized properly'
11477

11578
assert all(a == adjusted_lr1[0] for a in adjusted_lr1), \
11679
'lr not equally adjusted for all param groups for optimizer 1'
@@ -121,11 +84,9 @@ def test_multi_optimizer_with_scheduling_stepping(tmpdir):
12184
adjusted_lr2 = adjusted_lr2[0]
12285

12386
# Called ones after end of epoch
124-
assert init_lr * 0.1 ** 1 == adjusted_lr1, \
125-
'lr for optimizer 1 not adjusted correctly'
87+
assert init_lr * 0.1 == adjusted_lr1, 'lr for optimizer 1 not adjusted correctly'
12688
# Called every 3 steps, meaning for 1 epoch of 11 batches, it is called 3 times
127-
assert init_lr * 0.1 == adjusted_lr2, \
128-
'lr for optimizer 2 not adjusted correctly'
89+
assert init_lr * 0.1 == adjusted_lr2, 'lr for optimizer 2 not adjusted correctly'
12990

13091

13192
def test_reducelronplateau_with_no_monitor_raises(tmpdir):
@@ -209,13 +170,7 @@ def test_optimizer_return_options():
209170
assert optim == [opt_a, opt_b]
210171
assert len(lr_sched) == len(freq) == 0
211172

212-
# opt tuple of 2 lists
213-
model.configure_optimizers = lambda: ([opt_a], [scheduler_a])
214-
optim, lr_sched, freq = trainer.init_optimizers(model)
215-
assert len(optim) == len(lr_sched) == 1
216-
assert len(freq) == 0
217-
assert optim[0] == opt_a
218-
assert lr_sched[0] == dict(
173+
ref_lr_sched = dict(
219174
scheduler=scheduler_a,
220175
interval='epoch',
221176
frequency=1,
@@ -225,37 +180,29 @@ def test_optimizer_return_options():
225180
name=None,
226181
)
227182

183+
# opt tuple of 2 lists
184+
model.configure_optimizers = lambda: ([opt_a], [scheduler_a])
185+
optim, lr_sched, freq = trainer.init_optimizers(model)
186+
assert len(optim) == len(lr_sched) == 1
187+
assert len(freq) == 0
188+
assert optim[0] == opt_a
189+
assert lr_sched[0] == ref_lr_sched
190+
228191
# opt tuple of 1 list
229192
model.configure_optimizers = lambda: ([opt_a], scheduler_a)
230193
optim, lr_sched, freq = trainer.init_optimizers(model)
231194
assert len(optim) == len(lr_sched) == 1
232195
assert len(freq) == 0
233196
assert optim[0] == opt_a
234-
assert lr_sched[0] == dict(
235-
scheduler=scheduler_a,
236-
interval='epoch',
237-
frequency=1,
238-
reduce_on_plateau=False,
239-
monitor=None,
240-
strict=True,
241-
name=None,
242-
)
197+
assert lr_sched[0] == ref_lr_sched
243198

244199
# opt single dictionary
245200
model.configure_optimizers = lambda: {"optimizer": opt_a, "lr_scheduler": scheduler_a}
246201
optim, lr_sched, freq = trainer.init_optimizers(model)
247202
assert len(optim) == len(lr_sched) == 1
248203
assert len(freq) == 0
249204
assert optim[0] == opt_a
250-
assert lr_sched[0] == dict(
251-
scheduler=scheduler_a,
252-
interval='epoch',
253-
frequency=1,
254-
reduce_on_plateau=False,
255-
monitor=None,
256-
strict=True,
257-
name=None,
258-
)
205+
assert lr_sched[0] == ref_lr_sched
259206

260207
# opt multiple dictionaries with frequencies
261208
model.configure_optimizers = lambda: (
@@ -273,15 +220,7 @@ def test_optimizer_return_options():
273220
optim, lr_sched, freq = trainer.init_optimizers(model)
274221
assert len(optim) == len(lr_sched) == len(freq) == 2
275222
assert optim[0] == opt_a
276-
assert lr_sched[0] == dict(
277-
scheduler=scheduler_a,
278-
interval='epoch',
279-
frequency=1,
280-
reduce_on_plateau=False,
281-
monitor=None,
282-
strict=True,
283-
name=None,
284-
)
223+
assert lr_sched[0] == ref_lr_sched
285224
assert freq == [1, 5]
286225

287226

0 commit comments

Comments
 (0)