Skip to content

Commit ed67490

Browse files
authored
cleaning SWA (#6259)
* rename * if * test * chlog
1 parent 352e8f0 commit ed67490

File tree

5 files changed

+9
-8
lines changed

5 files changed

+9
-8
lines changed

CHANGELOG.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
2020
- Changed the order of `backward`, `step`, `zero_grad` to `zero_grad`, `backward`, `step` ([#6147](https://github.com/PyTorchLightning/pytorch-lightning/pull/6147))
2121

2222

23+
- Renamed `pytorch_lightning.callbacks.swa` to `pytorch_lightning.callbacks.stochastic_weight_avg` ([#6259](https://github.com/PyTorchLightning/pytorch-lightning/pull/6259))
24+
25+
2326
### Deprecated
2427

2528

pytorch_lightning/callbacks/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222
from pytorch_lightning.callbacks.progress import ProgressBar, ProgressBarBase
2323
from pytorch_lightning.callbacks.pruning import ModelPruning
2424
from pytorch_lightning.callbacks.quantization import QuantizationAwareTraining
25-
from pytorch_lightning.callbacks.swa import StochasticWeightAveraging
25+
from pytorch_lightning.callbacks.stochastic_weight_avg import StochasticWeightAveraging
2626

2727
__all__ = [
2828
'BackboneFinetuning',

pytorch_lightning/callbacks/swa.py renamed to pytorch_lightning/callbacks/stochastic_weight_avg.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -102,12 +102,10 @@ def __init__(
102102
if isinstance(swa_epoch_start, float) and not (0 <= swa_epoch_start <= 1):
103103
raise MisconfigurationException(err_msg)
104104

105-
if (
106-
swa_lrs is not None and (
107-
not isinstance(swa_lrs, (float, list)) or isinstance(swa_lrs, float) and swa_lrs <= 0
108-
or isinstance(swa_lrs, list) and not all(lr > 0 and isinstance(lr, float) for lr in swa_lrs)
109-
)
110-
):
105+
wrong_type = not isinstance(swa_lrs, (float, list))
106+
wrong_float = isinstance(swa_lrs, float) and swa_lrs <= 0
107+
wrong_list = isinstance(swa_lrs, list) and not all(lr > 0 and isinstance(lr, float) for lr in swa_lrs)
108+
if (swa_lrs is not None and (wrong_type or wrong_float or wrong_list)):
111109
raise MisconfigurationException("The `swa_lrs` should be a positive float or a list of positive float.")
112110

113111
if avg_fn is not None and not isinstance(avg_fn, Callable):

pytorch_lightning/trainer/connectors/callback_connector.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ def _configure_swa_callbacks(self):
7676
if not self.trainer._stochastic_weight_avg:
7777
return
7878

79-
from pytorch_lightning.callbacks.swa import StochasticWeightAveraging
79+
from pytorch_lightning.callbacks.stochastic_weight_avg import StochasticWeightAveraging
8080
existing_swa = [cb for cb in self.trainer.callbacks if isinstance(cb, StochasticWeightAveraging)]
8181
if not existing_swa:
8282
self.trainer.callbacks = [StochasticWeightAveraging()] + self.trainer.callbacks

0 commit comments

Comments
 (0)