Skip to content

Commit 46db252

Browse files
awaelchliBorda
authored andcommitted
Fix/mismatched toggle optimizer (#7563)
* fix: avoid potential mismatched toggling of optimzier Refs #7405 chore: update CHANGELOG [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci fix: resolve a confict chore: update changelog * feat: add a test that fails in master * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix typo in tests/trainer/optimization/test_multiple_optimizers.py Co-authored-by: ananthsub <[email protected]> * Polish tests/trainer/optimization/test_multiple_optimizers.py Co-authored-by: Carlos Mocholí <[email protected]> * Polish tests/trainer/optimization/test_multiple_optimizers.py Co-authored-by: Carlos Mocholí <[email protected]> * fix: change placeholder in optimizer_step from positional args to keyword args Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: ananthsub <[email protected]> Co-authored-by: Carlos Mocholí <[email protected]> fix whitespace fix parentheses
1 parent 29960c0 commit 46db252

File tree

3 files changed

+72
-5
lines changed

3 files changed

+72
-5
lines changed

CHANGELOG.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,10 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
77

88
## [1.3.3] - 2021-05-27
99

10+
### Changed
11+
12+
- Changed calling of `untoggle_optimizer(opt_idx)` out of the closure function ([#7563](https://github.com/PyTorchLightning/pytorch-lightning/pull/7563))
13+
1014
### Fixed
1115

1216
- Fixed `ProgressBar` pickling after calling `trainer.predict` ([#7608](https://github.com/PyTorchLightning/pytorch-lightning/pull/7608))

pytorch_lightning/trainer/training_loop.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -727,7 +727,9 @@ def train_step_and_backward_closure():
727727

728728
# optimizer step
729729
self.optimizer_step(optimizer, opt_idx, batch_idx, train_step_and_backward_closure)
730-
730+
if len(self.trainer.optimizers) > 1:
731+
# revert back to previous state
732+
self.trainer.lightning_module.untoggle_optimizer(opt_idx)
731733
else:
732734
self._curr_step_result = self.training_step(
733735
split_batch, batch_idx, opt_idx, self.trainer.hiddens
@@ -838,10 +840,6 @@ def training_step_and_backward(self, split_batch, batch_idx, opt_idx, optimizer,
838840
"training_step returned None. If this was on purpose, ignore this warning..."
839841
)
840842

841-
if len(self.trainer.optimizers) > 1:
842-
# revert back to previous state
843-
self.trainer.lightning_module.untoggle_optimizer(opt_idx)
844-
845843
return result
846844

847845
def _check_finite(self, loss: torch.Tensor) -> None:

tests/trainer/optimization/test_multiple_optimizers.py

Lines changed: 65 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -168,3 +168,68 @@ def training_step(self, batch, batch_idx):
168168

169169
with pytest.raises(ValueError, match='`training_step` is missing the `optimizer_idx`'):
170170
trainer.fit(TestModel())
171+
172+
173+
def test_custom_optimizer_step_with_multiple_optimizers(tmpdir):
174+
"""
175+
This tests ensures custom optimizer_step works,
176+
even when optimizer.step is not called for a particular optimizer
177+
"""
178+
179+
class TestModel(BoringModel):
180+
training_step_called = [0, 0]
181+
optimizer_step_called = [0, 0]
182+
183+
def __init__(self):
184+
super().__init__()
185+
self.layer_a = torch.nn.Linear(32, 2)
186+
self.layer_b = torch.nn.Linear(32, 2)
187+
188+
def configure_optimizers(self):
189+
opt_a = torch.optim.SGD(self.layer_a.parameters(), lr=0.001)
190+
opt_b = torch.optim.SGD(self.layer_b.parameters(), lr=0.001)
191+
return opt_a, opt_b
192+
193+
def training_step(self, batch, batch_idx, optimizer_idx):
194+
self.training_step_called[optimizer_idx] += 1
195+
x = self.layer_a(batch[0]) if (optimizer_idx == 0) else self.layer_b(batch[0])
196+
loss = torch.nn.functional.mse_loss(x, torch.ones_like(x))
197+
return loss
198+
199+
def training_epoch_end(self, outputs) -> None:
200+
# outputs should be an array with an entry per optimizer
201+
assert len(outputs) == 2
202+
203+
def optimizer_step(
204+
self,
205+
epoch,
206+
batch_idx,
207+
optimizer,
208+
optimizer_idx,
209+
optimizer_closure,
210+
**_,
211+
):
212+
# update first optimizer every step
213+
if optimizer_idx == 0:
214+
self.optimizer_step_called[optimizer_idx] += 1
215+
optimizer.step(closure=optimizer_closure)
216+
217+
# update second optimizer every 2 steps
218+
if optimizer_idx == 1:
219+
if batch_idx % 2 == 0:
220+
self.optimizer_step_called[optimizer_idx] += 1
221+
optimizer.step(closure=optimizer_closure)
222+
223+
model = TestModel()
224+
model.val_dataloader = None
225+
226+
trainer = pl.Trainer(
227+
default_root_dir=tmpdir,
228+
limit_train_batches=4,
229+
max_epochs=1,
230+
log_every_n_steps=1,
231+
weights_summary=None,
232+
)
233+
trainer.fit(model)
234+
assert model.training_step_called == [4, 2]
235+
assert model.optimizer_step_called == [4, 2]

0 commit comments

Comments
 (0)