Skip to content

Commit

Permalink
fix: avoid potential mismatched toggling of optimzier
Browse files Browse the repository at this point in the history
Refs #7405

chore: update CHANGELOG

[pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

fix: resolve a confict
  • Loading branch information
Lucklyric committed May 19, 2021
1 parent 20f6337 commit 42b931d
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 5 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).

### Changed

- Changed calling of `untoggle_optimizer(opt_idx)` out of the closure function

- Log epoch metrics before the `on_evaluation_end` hook ([#7272](https://github.com/PyTorchLightning/pytorch-lightning/pull/7272))

Expand Down
8 changes: 3 additions & 5 deletions pytorch_lightning/trainer/training_loop.py
Original file line number Diff line number Diff line change
Expand Up @@ -726,7 +726,6 @@ def _run_optimization(self, batch_idx, split_idx, split_batch, opt_idx=0, optimi
# -------------------
# calculate loss (train step + train step end)
# -------------------

# automatic_optimization=True: perform ddp sync only when performing optimizer_step
# automatic_optimization=False: don't block synchronization here
with self.block_ddp_sync_behaviour():
Expand All @@ -739,6 +738,9 @@ def _run_optimization(self, batch_idx, split_idx, split_batch, opt_idx=0, optimi
else:
if self.trainer.lightning_module.automatic_optimization:
self.optimizer_step(optimizer, opt_idx, batch_idx, closure)
if len(self.trainer.optimizers) > 1:
# revert back to previous state
self.trainer.lightning_module.untoggle_optimizer(opt_idx)
else:
result = self.training_step(split_batch, batch_idx, opt_idx, self._hiddens)

Expand Down Expand Up @@ -839,10 +841,6 @@ def training_step_and_backward(self, split_batch, batch_idx, opt_idx, optimizer,
"training_step returned None. If this was on purpose, ignore this warning..."
)

if len(self.trainer.optimizers) > 1:
# revert back to previous state
self.trainer.lightning_module.untoggle_optimizer(opt_idx)

return result

def _check_finite(self, loss: torch.Tensor) -> None:
Expand Down

0 comments on commit 42b931d

Please sign in to comment.