From 76bdf1a231bfac6f711f45d0dd423e7d685ad686 Mon Sep 17 00:00:00 2001 From: ananthsub Date: Thu, 6 May 2021 22:46:58 -0700 Subject: [PATCH 1/4] move-dp-warning-to-relevant-spot --- pytorch_lightning/plugins/training_type/dp.py | 13 +++++++++ pytorch_lightning/trainer/ignored_warnings.py | 27 ------------------- 2 files changed, 13 insertions(+), 27 deletions(-) delete mode 100644 pytorch_lightning/trainer/ignored_warnings.py diff --git a/pytorch_lightning/plugins/training_type/dp.py b/pytorch_lightning/plugins/training_type/dp.py index 08caa7398ab8c..9f0d50e9edf92 100644 --- a/pytorch_lightning/plugins/training_type/dp.py +++ b/pytorch_lightning/plugins/training_type/dp.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import warnings from typing import List, Optional import torch @@ -22,6 +23,18 @@ from pytorch_lightning.utilities.apply_func import apply_to_collection +def _ignore_scalar_return_in_dp(): + # Users get confused by this warning so we silence it + warnings.filterwarnings( + 'ignore', + message='Was asked to gather along dimension 0, but all input tensors were scalars;' + ' will instead unsqueeze and return a vector.' + ) + + +_ignore_scalar_return_in_dp() + + class DataParallelPlugin(ParallelPlugin): """ Implements data-parallel training in a single process, i.e., the model gets replicated to each diff --git a/pytorch_lightning/trainer/ignored_warnings.py b/pytorch_lightning/trainer/ignored_warnings.py deleted file mode 100644 index 894416d607a3e..0000000000000 --- a/pytorch_lightning/trainer/ignored_warnings.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright The PyTorch Lightning team. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import warnings - - -def ignore_scalar_return_in_dp(): - # Users get confused by this warning so we silence it - warnings.filterwarnings( - 'ignore', - message='Was asked to gather along dimension 0, but all input tensors were scalars;' - ' will instead unsqueeze and return a vector.' - ) - - -ignore_scalar_return_in_dp() From 1a3c0938ef4e2bdfd9a01fae593aa36f0b75b5ab Mon Sep 17 00:00:00 2001 From: ananthsub Date: Fri, 7 May 2021 13:27:23 -0700 Subject: [PATCH 2/4] mv-warning-to-override --- CHANGELOG.md | 3 +++ pytorch_lightning/overrides/data_parallel.py | 10 ++++++++++ pytorch_lightning/plugins/training_type/dp.py | 12 ------------ 3 files changed, 13 insertions(+), 12 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 899f9f74b9fb5..adac6ae8a4e71 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,6 +19,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Changed `clip_grad_norm` to use `torch.nn.utils.clip_grad_norm_` ([#7025](https://github.com/PyTorchLightning/pytorch-lightning/pull/7025)) +- Moved `ignore_scalar_return_in_dp` warning suppression to the DataParallelPlugin class ([#7421](https://github.com/PyTorchLightning/pytorch-lightning/pull/7421/)) + + ### Deprecated diff --git a/pytorch_lightning/overrides/data_parallel.py b/pytorch_lightning/overrides/data_parallel.py index a9d312e9f6417..272f4c67502c7 100644 --- a/pytorch_lightning/overrides/data_parallel.py +++ b/pytorch_lightning/overrides/data_parallel.py @@ -26,6 +26,15 @@ from pytorch_lightning.utilities.apply_func import apply_to_collection +def _ignore_scalar_return_in_dp(): + # Users get confused by this warning so we silence it + warnings.filterwarnings( + 'ignore', + message='Was asked to gather along dimension 0, but all input tensors were scalars;' + ' will instead unsqueeze and return a vector.' + ) + + class LightningDataParallel(DataParallel): def __init__(self, module: LightningModule, *args, **kwargs): @@ -70,6 +79,7 @@ class LightningParallelModule(_LightningModuleWrapperBase): def __init__(self, pl_module: LightningModule): super().__init__(pl_module) + _ignore_scalar_return_in_dp() def forward(self, *inputs, **kwargs): self.update_replica_device_attributes(inputs) diff --git a/pytorch_lightning/plugins/training_type/dp.py b/pytorch_lightning/plugins/training_type/dp.py index 9f0d50e9edf92..a69dfd28ea6e9 100644 --- a/pytorch_lightning/plugins/training_type/dp.py +++ b/pytorch_lightning/plugins/training_type/dp.py @@ -23,18 +23,6 @@ from pytorch_lightning.utilities.apply_func import apply_to_collection -def _ignore_scalar_return_in_dp(): - # Users get confused by this warning so we silence it - warnings.filterwarnings( - 'ignore', - message='Was asked to gather along dimension 0, but all input tensors were scalars;' - ' will instead unsqueeze and return a vector.' - ) - - -_ignore_scalar_return_in_dp() - - class DataParallelPlugin(ParallelPlugin): """ Implements data-parallel training in a single process, i.e., the model gets replicated to each From a9569462af0961c71f434df32ab08e2040154342 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 7 May 2021 18:22:20 +0000 Subject: [PATCH 3/4] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- docs/source/governance.rst | 2 -- 1 file changed, 2 deletions(-) diff --git a/docs/source/governance.rst b/docs/source/governance.rst index fac8b68e1df53..5b1f9bd1916c1 100644 --- a/docs/source/governance.rst +++ b/docs/source/governance.rst @@ -38,5 +38,3 @@ Alumni - Jeff Ling (`jeffling `_) - Teddy Koker (`teddykoker `_) - Nate Raw (`nateraw `_) - - From ace2fca8a399e92c4153e6ef4035e63124a61b78 Mon Sep 17 00:00:00 2001 From: ananthsub Date: Fri, 7 May 2021 13:28:36 -0700 Subject: [PATCH 4/4] Update dp.py --- pytorch_lightning/plugins/training_type/dp.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pytorch_lightning/plugins/training_type/dp.py b/pytorch_lightning/plugins/training_type/dp.py index a69dfd28ea6e9..08caa7398ab8c 100644 --- a/pytorch_lightning/plugins/training_type/dp.py +++ b/pytorch_lightning/plugins/training_type/dp.py @@ -11,7 +11,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import warnings from typing import List, Optional import torch