Skip to content

Commit 412a7d8

Browse files
authored
Remove opt from manual_backward in docs (#6267)
1 parent ed67490 commit 412a7d8

File tree

2 files changed

+6
-6
lines changed

2 files changed

+6
-6
lines changed

docs/source/common/lightning_module.rst

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -946,7 +946,7 @@ When set to ``False``, Lightning does not automate the optimization process. Thi
946946
opt = self.optimizers(use_pl_optimizer=True)
947947
948948
loss = ...
949-
self.manual_backward(loss, opt)
949+
self.manual_backward(loss)
950950
opt.step()
951951
opt.zero_grad()
952952
@@ -961,16 +961,16 @@ In the multi-optimizer case, ignore the ``optimizer_idx`` argument and use the o
961961
962962
def training_step(self, batch, batch_idx, optimizer_idx):
963963
# access your optimizers with use_pl_optimizer=False. Default is True
964-
(opt_a, opt_b) = self.optimizers(use_pl_optimizer=True)
964+
opt_a, opt_b = self.optimizers(use_pl_optimizer=True)
965965
966966
gen_loss = ...
967967
opt_a.zero_grad()
968-
self.manual_backward(gen_loss, opt_a)
968+
self.manual_backward(gen_loss)
969969
opt_a.step()
970970
971971
disc_loss = ...
972972
opt_b.zero_grad()
973-
self.manual_backward(disc_loss, opt_b)
973+
self.manual_backward(disc_loss)
974974
opt_b.step()
975975
976976
--------------

pytorch_lightning/core/lightning.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1211,10 +1211,10 @@ def manual_backward(self, loss: Tensor, optimizer: Optional[Optimizer] = None, *
12111211
Example::
12121212
12131213
def training_step(...):
1214-
(opt_a, opt_b) = self.optimizers()
1214+
opt_a, opt_b = self.optimizers()
12151215
loss = ...
12161216
# automatically applies scaling, etc...
1217-
self.manual_backward(loss, opt_a)
1217+
self.manual_backward(loss)
12181218
opt_a.step()
12191219
"""
12201220
if optimizer is not None:

0 commit comments

Comments
 (0)