From 04b124557b7197c3b20b3598237d7ce5287761b1 Mon Sep 17 00:00:00 2001 From: rohitgr7 Date: Tue, 12 Jan 2021 01:40:35 +0530 Subject: [PATCH 1/3] add missing val/test hooks --- pytorch_lightning/core/hooks.py | 29 +++++++++++++++++++++++++++-- tests/models/test_hooks.py | 25 +++++++++++++++++++------ 2 files changed, 46 insertions(+), 8 deletions(-) diff --git a/pytorch_lightning/core/hooks.py b/pytorch_lightning/core/hooks.py index f24a4ce8beb8a..1bc6e53228591 100644 --- a/pytorch_lightning/core/hooks.py +++ b/pytorch_lightning/core/hooks.py @@ -17,10 +17,11 @@ from typing import Any, Dict, List, Optional, Union import torch -from pytorch_lightning.utilities import move_data_to_device, rank_zero_warn from torch.optim.optimizer import Optimizer from torch.utils.data import DataLoader +from pytorch_lightning.utilities import move_data_to_device, rank_zero_warn + class ModelHooks: """Hooks to be used in LightningModule.""" @@ -74,7 +75,7 @@ def on_fit_end(self): def on_train_start(self) -> None: """ - Called at the beginning of training before sanity check. + Called at the beginning of training after sanity check. """ # do something at the start of training @@ -84,6 +85,18 @@ def on_train_end(self) -> None: """ # do something at the end of training + def on_validation_start(self): + """ + Called at the beginning of validation. + """ + # do something at the start of validation + + def on_validation_end(self): + """ + Called at the end of validation. + """ + # do something at the end of validation + def on_pretrain_routine_start(self) -> None: """ Called at the beginning of the pretrain routine (between fit and train start). @@ -253,6 +266,18 @@ def on_test_epoch_end(self) -> None: """ # do something when the epoch ends + def on_test_start(self): + """ + Called at the beginning of testing. + """ + # do something at the start of testing + + def on_test_end(self): + """ + Called at the end of testing. + """ + # do something at the end of testing + def on_before_zero_grad(self, optimizer: Optimizer) -> None: """ Called after optimizer.step() and before optimizer.zero_grad(). diff --git a/tests/models/test_hooks.py b/tests/models/test_hooks.py index f3af5b745a380..24fb7a97ca37c 100644 --- a/tests/models/test_hooks.py +++ b/tests/models/test_hooks.py @@ -12,14 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. import inspect +from unittest.mock import MagicMock import pytest import torch -from unittest.mock import MagicMock from pytorch_lightning import Trainer from pytorch_lightning.accelerators.gpu_accelerator import GPUAccelerator -from tests.base import EvalModelTemplate, BoringModel +from tests.base import BoringModel, EvalModelTemplate @pytest.mark.parametrize('max_steps', [1, 2, 3]) @@ -253,10 +253,6 @@ def on_test_start(self): self.called.append(inspect.currentframe().f_code.co_name) super().on_test_start() - def on_test_end(self): - self.called.append(inspect.currentframe().f_code.co_name) - super().on_test_end() - def on_test_batch_start(self, batch, batch_idx, dataloader_idx): self.called.append(inspect.currentframe().f_code.co_name) super().on_test_batch_start(batch, batch_idx, dataloader_idx) @@ -289,6 +285,14 @@ def on_test_model_train(self): self.called.append(inspect.currentframe().f_code.co_name) super().on_test_model_train() + def on_test_end(self): + self.called.append(inspect.currentframe().f_code.co_name) + super().on_test_end() + + def teardown(self, stage: str): + self.called.append(inspect.currentframe().f_code.co_name) + super().teardown(stage) + model = HookedModel() assert model.called == [] @@ -312,10 +316,12 @@ def on_test_model_train(self): 'on_pretrain_routine_start', 'on_pretrain_routine_end', 'on_validation_model_eval', + 'on_validation_start', 'on_validation_epoch_start', 'on_validation_batch_start', 'on_validation_batch_end', 'on_validation_epoch_end', + 'on_validation_end', 'on_validation_model_train', 'on_train_start', 'on_epoch_start', @@ -329,16 +335,19 @@ def on_test_model_train(self): 'on_before_zero_grad', 'on_train_batch_end', 'on_validation_model_eval', + 'on_validation_start', 'on_validation_epoch_start', 'on_validation_batch_start', 'on_validation_batch_end', 'on_validation_epoch_end', 'on_save_checkpoint', + 'on_validation_end', 'on_validation_model_train', 'on_epoch_end', 'on_train_epoch_end', 'on_train_end', 'on_fit_end', + 'teardown', ] assert model.called == expected @@ -351,12 +360,16 @@ def on_test_model_train(self): 'on_pretrain_routine_start', 'on_pretrain_routine_end', 'on_test_model_eval', + 'on_test_start', 'on_test_epoch_start', 'on_test_batch_start', 'on_test_batch_end', 'on_test_epoch_end', + 'on_test_end', 'on_test_model_train', 'on_fit_end', + 'teardown', # for 'fit' + 'teardown', # for 'test' ] assert model2.called == expected From b0f3ef08707de1351016e8d387f9b81a936aa199 Mon Sep 17 00:00:00 2001 From: rohitgr7 Date: Tue, 12 Jan 2021 01:45:20 +0530 Subject: [PATCH 2/3] chlog --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index eb1963d64d954..c1857a23c936f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -47,6 +47,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Added `IoU` class interface ([#4704](https://github.com/PyTorchLightning/pytorch-lightning/pull/4704)) +- Added missing val/test hooks in `LightningModule` ([#5467](https://github.com/PyTorchLightning/pytorch-lightning/pull/5467)) + + ### Changed - Changed `stat_scores` metric now calculates stat scores over all classes and gains new parameters, in line with the new `StatScores` metric ([#4839](https://github.com/PyTorchLightning/pytorch-lightning/pull/4839)) From 279be47a8d32668662ff198390175edecfc513f5 Mon Sep 17 00:00:00 2001 From: rohitgr7 Date: Wed, 13 Jan 2021 00:27:47 +0530 Subject: [PATCH 3/3] None --- pytorch_lightning/core/hooks.py | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/pytorch_lightning/core/hooks.py b/pytorch_lightning/core/hooks.py index 1bc6e53228591..0b97d328904ac 100644 --- a/pytorch_lightning/core/hooks.py +++ b/pytorch_lightning/core/hooks.py @@ -25,7 +25,7 @@ class ModelHooks: """Hooks to be used in LightningModule.""" - def setup(self, stage: str): + def setup(self, stage: str) -> None: """ Called at the beginning of fit and test. This is a good hook when you need to build models dynamically or adjust something about them. @@ -53,7 +53,7 @@ def setup(stage): """ - def teardown(self, stage: str): + def teardown(self, stage: str) -> None: """ Called at the end of fit and test. @@ -61,13 +61,13 @@ def teardown(self, stage: str): stage: either 'fit' or 'test' """ - def on_fit_start(self): + def on_fit_start(self) -> None: """ Called at the very beginning of fit. If on DDP it is called on every process """ - def on_fit_end(self): + def on_fit_end(self) -> None: """ Called at the very end of fit. If on DDP it is called on every process @@ -85,13 +85,13 @@ def on_train_end(self) -> None: """ # do something at the end of training - def on_validation_start(self): + def on_validation_start(self) -> None: """ Called at the beginning of validation. """ # do something at the start of validation - def on_validation_end(self): + def on_validation_end(self) -> None: """ Called at the end of validation. """ @@ -121,9 +121,7 @@ def on_pretrain_routine_end(self) -> None: """ # do something at the end of the pretrain routine - def on_train_batch_start( - self, batch: Any, batch_idx: int, dataloader_idx: int - ) -> None: + def on_train_batch_start(self, batch: Any, batch_idx: int, dataloader_idx: int) -> None: """ Called in the training loop before anything happens for that batch. @@ -266,13 +264,13 @@ def on_test_epoch_end(self) -> None: """ # do something when the epoch ends - def on_test_start(self): + def on_test_start(self) -> None: """ Called at the beginning of testing. """ # do something at the start of testing - def on_test_end(self): + def on_test_end(self) -> None: """ Called at the end of testing. """