diff --git a/requirements/pytorch/extra.txt b/requirements/pytorch/extra.txt index 62cd1363c8bc0..f92f282518abe 100644 --- a/requirements/pytorch/extra.txt +++ b/requirements/pytorch/extra.txt @@ -5,7 +5,7 @@ matplotlib>3.1, <3.9.0 omegaconf >=2.0.5, <2.4.0 hydra-core >=1.0.5, <1.4.0 -jsonargparse[signatures] >=4.18.0, <4.28.0 +jsonargparse[signatures] >=4.26.1, <4.28.0 rich >=12.3.0, <13.6.0 tensorboardX >=2.2, <2.7.0 # min version is set by torch.onnx missing attribute bitsandbytes <=0.41.1 diff --git a/src/lightning/pytorch/CHANGELOG.md b/src/lightning/pytorch/CHANGELOG.md index c7b95c3d700a0..5c44c845b85da 100644 --- a/src/lightning/pytorch/CHANGELOG.md +++ b/src/lightning/pytorch/CHANGELOG.md @@ -7,6 +7,10 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). ## [2.1.3] - 2023-12-21 +### Changed + +- `LightningCLI` no longer allows setting a normal class instance as default. A `lazy_instance` can be used instead ([#18822](https://github.com/Lightning-AI/lightning/pull/18822)) + ### Fixed - Fixed checks for local file protocol due to fsspec changes in 2023.10.0 ([#19023](https://github.com/Lightning-AI/lightning/pull/19023)) diff --git a/src/lightning/pytorch/cli.py b/src/lightning/pytorch/cli.py index e2156b493983a..f41889d94a825 100644 --- a/src/lightning/pytorch/cli.py +++ b/src/lightning/pytorch/cli.py @@ -30,7 +30,7 @@ from lightning.pytorch.utilities.model_helpers import is_overridden from lightning.pytorch.utilities.rank_zero import rank_zero_warn -_JSONARGPARSE_SIGNATURES_AVAILABLE = RequirementCache("jsonargparse[signatures]>=4.18.0") +_JSONARGPARSE_SIGNATURES_AVAILABLE = RequirementCache("jsonargparse[signatures]>=4.26.1") if _JSONARGPARSE_SIGNATURES_AVAILABLE: import docstring_parser diff --git a/tests/tests_pytorch/test_cli.py b/tests/tests_pytorch/test_cli.py index 41fb14bae215f..dd0a43d7e046c 100644 --- a/tests/tests_pytorch/test_cli.py +++ b/tests/tests_pytorch/test_cli.py @@ -60,6 +60,9 @@ else: from argparse import Namespace + def lazy_instance(*args, **kwargs): + return None + @contextmanager def mock_subclasses(baseclass, *subclasses): @@ -173,7 +176,9 @@ def on_fit_start(self): self.trainer.ran_asserts = True with mock.patch("sys.argv", ["any.py", "fit", f"--trainer.callbacks={json.dumps(callbacks)}"]): - cli = LightningCLI(TestModel, trainer_defaults={"fast_dev_run": True, "logger": CSVLogger(".")}) + cli = LightningCLI( + TestModel, trainer_defaults={"fast_dev_run": True, "logger": lazy_instance(CSVLogger, save_dir=".")} + ) assert cli.trainer.ran_asserts @@ -589,7 +594,7 @@ def on_fit_start(self): # mps not yet supported by distributed @RunIf(skip_windows=True, mps=False) -@pytest.mark.parametrize("logger", [False, TensorBoardLogger(".")]) +@pytest.mark.parametrize("logger", [False, lazy_instance(TensorBoardLogger, save_dir=".")]) @pytest.mark.parametrize("strategy", ["ddp_spawn", "ddp"]) def test_cli_distributed_save_config_callback(cleandir, logger, strategy): from torch.multiprocessing import ProcessRaisedException