Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add testing for PyTorch 2.4 (Fabric) #20028

Merged
merged 19 commits into from
Jul 2, 2024
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
delete obsolete test
awaelchli committed Jun 30, 2024

Verified

This commit was created on GitHub.com and signed with GitHub’s verified signature. The key has expired.
commit 2f8402f589d25160095b4da7f44a24d21c88be29
21 changes: 0 additions & 21 deletions tests/tests_pytorch/strategies/test_model_parallel.py
Original file line number Diff line number Diff line change
@@ -174,27 +174,6 @@ def test_save_checkpoint_path_exists(shutil_mock, torch_save_mock, tmp_path):
assert path.is_dir()


@RunIf(min_torch="2.3")
@mock.patch("lightning.fabric.strategies.model_parallel._TORCH_GREATER_EQUAL_2_4", False)
def test_load_full_checkpoint_support(tmp_path):
"""Test that loading non-distributed checkpoints into distributed models requires PyTorch >= 2.4."""
strategy = ModelParallelStrategy()
strategy.model = Mock()
strategy._lightning_module = Mock(strict_loading=True)
path = tmp_path / "full.ckpt"
path.touch()

with pytest.raises(ImportError, match="Loading .* into a distributed model requires PyTorch >= 2.4"), mock.patch(
"lightning.fabric.strategies.model_parallel._has_dtensor_modules", return_value=True
):
strategy.load_checkpoint(checkpoint_path=path)

with pytest.raises(ImportError, match="Loading .* into a distributed model requires PyTorch >= 2.4"), mock.patch(
"lightning.fabric.strategies.model_parallel._has_dtensor_modules", return_value=True
):
strategy.load_checkpoint(checkpoint_path=path)


@RunIf(min_torch="2.3")
@mock.patch("lightning.fabric.strategies.model_parallel._has_dtensor_modules", return_value=True)
def test_load_unknown_checkpoint_type(_, tmp_path):