Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Apr 25, 2023
1 parent 5cb35b2 commit 36424ec
Show file tree
Hide file tree
Showing 4 changed files with 6 additions and 13 deletions.
2 changes: 1 addition & 1 deletion examples/nlp/language_modeling/megatron_lm_ckpt_to_nemo.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,11 +42,11 @@
from typing import Any, Optional

import torch
from lightning_fabric.utilities.cloud_io import _load as pl_load
from megatron.core import parallel_state
from pytorch_lightning.core.saving import _load_state as ptl_load_state
from pytorch_lightning.core.saving import load_hparams_from_tags_csv, load_hparams_from_yaml
from pytorch_lightning.trainer.trainer import Trainer
from lightning_fabric.utilities.cloud_io import _load as pl_load
from pytorch_lightning.utilities.migration import pl_legacy_patch

from nemo.collections.nlp.models.language_modeling.megatron_bert_model import MegatronBertModel
Expand Down
2 changes: 1 addition & 1 deletion nemo/collections/nlp/models/nlp_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,12 @@
import os
from typing import Any, Optional

from lightning_fabric.utilities.cloud_io import _load as pl_load
from omegaconf import DictConfig, OmegaConf
from pytorch_lightning import Trainer
from pytorch_lightning.core.saving import _load_state as ptl_load_state
from pytorch_lightning.core.saving import load_hparams_from_tags_csv, load_hparams_from_yaml
from pytorch_lightning.utilities import rank_zero_only
from lightning_fabric.utilities.cloud_io import _load as pl_load
from pytorch_lightning.utilities.migration import pl_legacy_patch
from transformers import TRANSFORMERS_CACHE

Expand Down
2 changes: 1 addition & 1 deletion nemo/collections/nlp/parts/nlp_overrides.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,14 +24,14 @@
import pytorch_lightning as pl
import torch
from omegaconf import OmegaConf
from pytorch_lightning.loops.fetchers import _DataFetcher
from pytorch_lightning.overrides.base import _LightningModuleWrapperBase
from pytorch_lightning.plugins import ClusterEnvironment
from pytorch_lightning.plugins.io.checkpoint_plugin import CheckpointIO
from pytorch_lightning.plugins.precision import MixedPrecisionPlugin
from pytorch_lightning.strategies.ddp import DDPStrategy
from pytorch_lightning.trainer.trainer import Trainer
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.loops.fetchers import _DataFetcher
from torch.distributed.algorithms.ddp_comm_hooks.debugging_hooks import noop_hook
from torch.nn.parallel import DistributedDataParallel

Expand Down
13 changes: 3 additions & 10 deletions tests/core/test_exp_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -329,17 +329,13 @@ def test_resume(self, tmp_path):
{"resume_if_exists": True, "explicit_log_dir": str(tmp_path / "test_resume" / "default" / "version_0")},
)
checkpoint = Path(tmp_path / "test_resume" / "default" / "version_0" / "checkpoints" / "mymodel--last.ckpt")
assert (
Path(test_trainer._checkpoint_connector._ckpt_path).resolve() == checkpoint.resolve()
)
assert Path(test_trainer._checkpoint_connector._ckpt_path).resolve() == checkpoint.resolve()

# Succeed again and make sure that run_0 exists and previous log files were moved
test_trainer = pl.Trainer(accelerator='cpu', enable_checkpointing=False, logger=False)
exp_manager(test_trainer, {"resume_if_exists": True, "explicit_log_dir": str(log_dir)})
checkpoint = Path(tmp_path / "test_resume" / "default" / "version_0" / "checkpoints" / "mymodel--last.ckpt")
assert (
Path(test_trainer._checkpoint_connector._ckpt_path).resolve() == checkpoint.resolve()
)
assert Path(test_trainer._checkpoint_connector._ckpt_path).resolve() == checkpoint.resolve()
prev_run_dir = Path(tmp_path / "test_resume" / "default" / "version_0" / "run_0")
assert prev_run_dir.exists()
prev_log = Path(tmp_path / "test_resume" / "default" / "version_0" / "run_0" / "lightning_logs.txt")
Expand Down Expand Up @@ -372,10 +368,7 @@ def test_resume(self, tmp_path):
"explicit_log_dir": str(dirpath_log_dir),
},
)
assert (
Path(test_trainer._checkpoint_connector._ckpt_path).resolve()
== dirpath_checkpoint.resolve()
)
assert Path(test_trainer._checkpoint_connector._ckpt_path).resolve() == dirpath_checkpoint.resolve()

@pytest.mark.unit
def test_nemo_checkpoint_save_best_model_1(self, tmp_path):
Expand Down

0 comments on commit 36424ec

Please sign in to comment.