From 9aee3ac2d014bfe7b63430264033483326dfcac4 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 10 Jun 2021 10:40:29 +0000 Subject: [PATCH] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- pytorch_lightning/core/lightning.py | 2 +- pytorch_lightning/plugins/training_type/ddp_spawn.py | 8 +++++--- .../connectors/logger_connector/logger_connector_new.py | 2 +- pytorch_lightning/trainer/properties.py | 2 +- 4 files changed, 8 insertions(+), 6 deletions(-) diff --git a/pytorch_lightning/core/lightning.py b/pytorch_lightning/core/lightning.py index 7cd5e9a1d6c765..3517d387bb68d8 100644 --- a/pytorch_lightning/core/lightning.py +++ b/pytorch_lightning/core/lightning.py @@ -137,7 +137,7 @@ def lr_schedulers(self) -> Optional[Union[Any, List[Any]]]: # multiple schedulers return lr_schedulers - + @property def spawn_callback_metrics(self) -> Dict: return self.trainer.spawn_callback_metrics diff --git a/pytorch_lightning/plugins/training_type/ddp_spawn.py b/pytorch_lightning/plugins/training_type/ddp_spawn.py index f1170db5e885ef..93c25577aee973 100644 --- a/pytorch_lightning/plugins/training_type/ddp_spawn.py +++ b/pytorch_lightning/plugins/training_type/ddp_spawn.py @@ -13,7 +13,6 @@ # limitations under the License. import logging import os -from pytorch_lightning.utilities.apply_func import apply_to_collection import re from typing import Any, List, Optional, Union @@ -293,8 +292,11 @@ def transfer_distrib_spawn_state_on_fit_end(self, results): self.mp_queue.put(best_model_path) self.mp_queue.put(last_path) self.mp_queue.put(results) - self.mp_queue.put(apply_to_collection(self.lightning_module.trainer.logger_connector.callback_metrics, - torch.Tensor, lambda x: x.item())) + self.mp_queue.put( + apply_to_collection( + self.lightning_module.trainer.logger_connector.callback_metrics, torch.Tensor, lambda x: x.item() + ) + ) def __recover_child_process_weights(self, best_path, last_path): # transfer back the best path to the trainer diff --git a/pytorch_lightning/trainer/connectors/logger_connector/logger_connector_new.py b/pytorch_lightning/trainer/connectors/logger_connector/logger_connector_new.py index c4f74bff89b1d0..073d78b4880d3d 100644 --- a/pytorch_lightning/trainer/connectors/logger_connector/logger_connector_new.py +++ b/pytorch_lightning/trainer/connectors/logger_connector/logger_connector_new.py @@ -278,7 +278,7 @@ def reset(self, metrics: Optional[bool] = None) -> None: self._batch_idx = None self._split_idx = None self._current_fx = None - + @property def spawn_callback_metrics(self) -> Dict[MetricSource, Dict[str, _METRIC]]: return self._spawn_callback_metrics diff --git a/pytorch_lightning/trainer/properties.py b/pytorch_lightning/trainer/properties.py index 49877923390f1e..bee2f7a9ea5b2d 100644 --- a/pytorch_lightning/trainer/properties.py +++ b/pytorch_lightning/trainer/properties.py @@ -521,7 +521,7 @@ def _active_loop(self) -> Optional[Union[TrainLoop, EvaluationLoop]]: @property def spawn_callback_metrics(self) -> dict: return self.logger_connector.spawn_callback_metrics - + @spawn_callback_metrics.setter def spawn_callback_metrics(self, val: dict) -> None: self.logger_connector.spawn_callback_metrics = val