Skip to content

Commit

Permalink
temp change to run tpu test
Browse files Browse the repository at this point in the history
  • Loading branch information
four4fish committed Nov 30, 2021
1 parent fdbbc08 commit 8f85576
Showing 1 changed file with 9 additions and 1 deletion.
10 changes: 9 additions & 1 deletion pytorch_lightning/plugins/training_type/tpu_spawn.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
from pytorch_lightning.trainer.connectors.data_connector import DataConnector
from pytorch_lightning.trainer.states import TrainerFn
from pytorch_lightning.utilities import _TPU_AVAILABLE, find_shared_parameters, rank_zero_warn, set_shared_parameters
from pytorch_lightning.utilities.apply_func import move_data_to_device
from pytorch_lightning.utilities.apply_func import apply_to_collection, move_data_to_device
from pytorch_lightning.utilities.data import has_len
from pytorch_lightning.utilities.distributed import rank_zero_only, ReduceOp
from pytorch_lightning.utilities.exceptions import MisconfigurationException
Expand Down Expand Up @@ -127,6 +127,14 @@ def setup(self, trainer: "pl.Trainer") -> None:
self.setup_optimizers(trainer)
self.setup_precision_plugin()

def _move_optimizer_state(self, device: Optional[torch.device] = None) -> None:
"""Moves the state of the optimizers to the TPU if needed."""
# TODO: `self.root_device` would raise error if called outside the spawn process
# while training on 8 and more cores.
for opt in self.optimizers:
for p, v in opt.state.items():
opt.state[p] = apply_to_collection(v, torch.Tensor, move_data_to_device, self.root_device)

def _setup_model(self, model: Module) -> Module:
return model

Expand Down

0 comments on commit 8f85576

Please sign in to comment.