From 1e7f64087d0bf20afd09673da73ad7ba839851c0 Mon Sep 17 00:00:00 2001 From: ananthsub Date: Wed, 10 Mar 2021 16:56:59 -0800 Subject: [PATCH] formatting --- pytorch_lightning/callbacks/model_checkpoint.py | 3 ++- tests/checkpointing/test_model_checkpoint.py | 5 ++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/pytorch_lightning/callbacks/model_checkpoint.py b/pytorch_lightning/callbacks/model_checkpoint.py index 211b20036e28a..731232612cd0c 100644 --- a/pytorch_lightning/callbacks/model_checkpoint.py +++ b/pytorch_lightning/callbacks/model_checkpoint.py @@ -301,7 +301,8 @@ def __validate_init_configuration(self): ) if self.every_n_train_steps > 0 and self.every_n_val_epochs > 0: raise MisconfigurationException( - f'Invalid values for every_n_train_steps={self.every_n_train_steps} and every_n_val_epochs={self.every_n_val_epochs}.' + f'Invalid values for every_n_train_steps={self.every_n_train_steps}' + ' and every_n_val_epochs={self.every_n_val_epochs}.' 'Both cannot be enabled at the same time.' ) if self.monitor is None: diff --git a/tests/checkpointing/test_model_checkpoint.py b/tests/checkpointing/test_model_checkpoint.py index a2e7e89cab97b..e9d5e2daa85a7 100644 --- a/tests/checkpointing/test_model_checkpoint.py +++ b/tests/checkpointing/test_model_checkpoint.py @@ -545,7 +545,10 @@ def test_invalid_every_n_train_steps(tmpdir): def test_invalid_every_n_train_steps_val_epochs_combination(tmpdir): - """ Make sure that a MisconfigurationException is raised if both every_n_val_epochs and every_n_train_steps are enabled together. """ + """ + Test that a MisconfigurationException is raised if both + every_n_val_epochs and every_n_train_steps are enabled together. + """ with pytest.raises(MisconfigurationException, match=r'.*Both cannot be enabled at the same time'): ModelCheckpoint(dirpath=tmpdir, every_n_train_steps=1, every_n_val_epochs=2) # These should not fail