diff --git a/pytorch_lightning/core/lightning.py b/pytorch_lightning/core/lightning.py index bc1b5b9547771..b62fd57e8debf 100644 --- a/pytorch_lightning/core/lightning.py +++ b/pytorch_lightning/core/lightning.py @@ -111,6 +111,7 @@ def __init__(self, *args, **kwargs): self._running_manual_backward = False self._current_hook_fx_name = None self._current_dataloader_idx = None + self._automatic_optimization: bool = True def optimizers(self, use_pl_optimizer: bool = True) -> Union[Optimizer, List[Optimizer], List[LightningOptimizer]]: if use_pl_optimizer: @@ -163,7 +164,12 @@ def automatic_optimization(self) -> bool: """ If False you are responsible for calling .backward, .step, zero_grad. """ - return True + return self._automatic_optimization + + @automatic_optimization.setter + def automatic_optimization(self, automatic_optimization: bool) -> None: + self._automatic_optimization = automatic_optimization + def print(self, *args, **kwargs) -> None: r""" diff --git a/tests/trainer/optimization/test_manual_optimization.py b/tests/trainer/optimization/test_manual_optimization.py index f0d7c6d96914e..338769f2ee40f 100644 --- a/tests/trainer/optimization/test_manual_optimization.py +++ b/tests/trainer/optimization/test_manual_optimization.py @@ -33,6 +33,11 @@ def test_multiple_optimizers_manual(tmpdir): Tests that only training_step can be used """ class TestModel(BoringModel): + + def __init__(self): + super().__init__() + self.automatic_optimization = False + def training_step(self, batch, batch_idx, optimizer_idx): # manual (opt_a, opt_b) = self.optimizers() @@ -69,10 +74,6 @@ def configure_optimizers(self): optimizer_2 = torch.optim.SGD(self.layer.parameters(), lr=0.1) return optimizer, optimizer_2 - @property - def automatic_optimization(self) -> bool: - return False - model = TestModel() model.val_dataloader = None