diff --git a/pytorch_lightning/callbacks/model_checkpoint.py b/pytorch_lightning/callbacks/model_checkpoint.py index eac8349004ad98..073b437b4cf89e 100644 --- a/pytorch_lightning/callbacks/model_checkpoint.py +++ b/pytorch_lightning/callbacks/model_checkpoint.py @@ -289,7 +289,8 @@ def __validate_init_configuration(self): ) if self.every_n_train_steps > 0 and self.every_n_val_epochs > 0: raise MisconfigurationException( - f'Invalid values for every_n_train_steps={self.every_n_train_steps} and every_n_val_epochs={self.every_n_val_epochs}.' + f'Invalid values for every_n_train_steps={self.every_n_train_steps}' + ' and every_n_val_epochs={self.every_n_val_epochs}.' 'Both cannot be enabled at the same time.' ) if self.monitor is None: diff --git a/tests/checkpointing/test_model_checkpoint.py b/tests/checkpointing/test_model_checkpoint.py index 73fff72a8362a2..ae7d5b772652f3 100644 --- a/tests/checkpointing/test_model_checkpoint.py +++ b/tests/checkpointing/test_model_checkpoint.py @@ -536,7 +536,10 @@ def test_invalid_every_n_train_steps(tmpdir): def test_invalid_every_n_train_steps_val_epochs_combination(tmpdir): - """ Make sure that a MisconfigurationException is raised if both every_n_val_epochs and every_n_train_steps are enabled together. """ + """ + Test that a MisconfigurationException is raised if both + every_n_val_epochs and every_n_train_steps are enabled together. + """ with pytest.raises(MisconfigurationException, match=r'.*Both cannot be enabled at the same time'): ModelCheckpoint(dirpath=tmpdir, every_n_train_steps=1, every_n_val_epochs=2) # These should not fail