Skip to content

Commit

Permalink
release 2.1.4
Browse files Browse the repository at this point in the history
  • Loading branch information
awaelchli authored and lexierule committed Jan 31, 2024
1 parent 80c02ff commit 8623143
Show file tree
Hide file tree
Showing 19 changed files with 76 additions and 19 deletions.
4 changes: 2 additions & 2 deletions .actions/assistant.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@ def _download_frontend(pkg_path: str, version: str = "v0.0.0"):
response = urllib.request.urlopen(frontend_release_url)

file = tarfile.open(fileobj=response, mode="r|gz")
file.extractall(path=download_dir)
file.extractall(path=download_dir) # noqa: S202

shutil.move(download_dir, frontend_dir)
print("The Lightning UI has successfully been downloaded!")
Expand Down Expand Up @@ -457,7 +457,7 @@ def pull_docs_files(
raise RuntimeError(f"Requesting file '{zip_url}' does not exist or it is just unavailable.")

with zipfile.ZipFile(zip_file, "r") as zip_ref:
zip_ref.extractall(tmp)
zip_ref.extractall(tmp) # noqa: S202

zip_dirs = [d for d in glob.glob(os.path.join(tmp, "*")) if os.path.isdir(d)]
# check that the extracted archive has only repo folder
Expand Down
2 changes: 1 addition & 1 deletion docs/source-pytorch/extensions/strategy.rst
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ The below table lists all relevant strategies available in Lightning with their
- Strategy for multi-process single-device training on one or multiple nodes. :ref:`Learn more. <accelerators/gpu_intermediate:Distributed Data Parallel>`
* - ddp_spawn
- :class:`~lightning.pytorch.strategies.DDPStrategy`
- Same as "ddp" but launches processes using :func:`torch.multiprocessing.spawn` method and joins processes after training finishes. :ref:`Learn more. <accelerators/gpu_intermediate:Distributed Data Parallel Spawn>`
- Same as "ddp" but launches processes using ``torch.multiprocessing.spawn`` method and joins processes after training finishes. :ref:`Learn more. <accelerators/gpu_intermediate:Distributed Data Parallel Spawn>`
* - deepspeed
- :class:`~lightning.pytorch.strategies.DeepSpeedStrategy`
- Provides capabilities to run training using the DeepSpeed library, with training optimizations for large billion parameter models. :doc:`Learn more. <../advanced/model_parallel/deepspeed>`
Expand Down
2 changes: 1 addition & 1 deletion examples/app/hpo/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def extract_tarfile(file_path: str, extract_path: str, mode: str):
if ".zip" in local_filename:
if os.path.exists(local_filename):
with zipfile.ZipFile(local_filename, "r") as zip_ref:
zip_ref.extractall(path)
zip_ref.extractall(path) # noqa: S202
elif local_filename.endswith(".tar.gz") or local_filename.endswith(".tgz"):
extract_tarfile(local_filename, path, "r:gz")
elif local_filename.endswith(".tar.bz2") or local_filename.endswith(".tbz"):
Expand Down
4 changes: 4 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@ ignore-init-module-imports = true
".actions/*" = ["S101", "S310"]
"setup.py" = ["S101"]
"examples/**" = [
"F841", # Local variable is assigned to but never used
"S101", # Use of `assert` detected
"S113", # todo: Probable use of requests call without
"S104", # Possible binding to all interface
Expand All @@ -88,6 +89,7 @@ ignore-init-module-imports = true
"S108", # Probable insecure usage of temporary file or directory: "/tmp/data/MNIST"
]
"src/**" = [
"F841", # Local variable is assigned to but never used
"S101", # todo: Use of `assert` detected
"S105", "S106", "S107", # todo: Possible hardcoded password: ...
"S113", # todo: Probable use of requests call without timeout
Expand All @@ -103,12 +105,14 @@ ignore-init-module-imports = true
"RET503",
]
"tests/**" = [
"F841", # Local variable is assigned to but never used
"S101", # Use of `assert` detected
"S105", "S106", # todo: Possible hardcoded password: ...
"S301", # `pickle` and modules that wrap it can be unsafe when used to deserialize untrusted data, possible security issue
"S113", # todo: Probable use of requests call without timeout
"S311", # todo: Standard pseudo-random generators are not suitable for cryptographic purposes
"S108", # todo: Probable insecure usage of temporary file or directory: "/tmp/sys-customizations-sync"
"S202", # Uses of `tarfile.extractall()`
"S403", # `pickle`, `cPickle`, `dill`, and `shelve` modules are possibly insecure
"S404", # `subprocess` module is possibly insecure
"S602", # todo: `subprocess` call with `shell=True` identified, security issue
Expand Down
7 changes: 7 additions & 0 deletions src/lightning/app/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,13 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).


## [2.1.4] - 2024-01-31

### Changed

- Remove torch distributed for the Dataset Optimizer ([#19182](https://github.com/Lightning-AI/lightning/pull/19182))


## [2.1.3] - 2023-12-21

### Changed
Expand Down
2 changes: 1 addition & 1 deletion src/lightning/app/cli/cmd_pl_init.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ def download_frontend(destination: Path) -> None:
with TemporaryDirectory() as download_dir:
response = urllib.request.urlopen(url) # noqa: S310
file = tarfile.open(fileobj=response, mode="r|gz")
file.extractall(path=download_dir)
file.extractall(path=download_dir) # noqa: S202
shutil.move(str(Path(download_dir, build_dir_name)), destination)


Expand Down
2 changes: 1 addition & 1 deletion src/lightning/app/plugin/plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ def _run_plugin(run: _Run) -> Dict[str, Any]:
logger.info("Extracting plugin source.")

with tarfile.open(download_path, "r:gz") as tf:
tf.extractall(source_path)
tf.extractall(source_path) # noqa: S202
except Exception as ex:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
Expand Down
2 changes: 1 addition & 1 deletion src/lightning/app/storage/drive.py
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ def __str__(self) -> str:


def _maybe_create_drive(component_name: str, state: Dict) -> Union[Dict, Drive]:
if state.get("type", None) == Drive.__IDENTIFIER__:
if state.get("type") == Drive.__IDENTIFIER__:
drive = Drive.from_dict(state)
drive.component_name = component_name
return drive
Expand Down
4 changes: 2 additions & 2 deletions src/lightning/app/utilities/packaging/cloud_compute.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ def from_dict(cls, d: dict) -> "CloudCompute":
f"mounts argument must be one of [None, Mount, List[Mount]], "
f"received {mounts} of type {type(mounts)}"
)
_verify_mount_root_dirs_are_unique(d.get("mounts", None))
_verify_mount_root_dirs_are_unique(d.get("mounts"))
return cls(**d)

@property
Expand All @@ -183,6 +183,6 @@ def _verify_mount_root_dirs_are_unique(mounts: Union[None, Mount, List[Mount], T


def _maybe_create_cloud_compute(state: Dict) -> Union[CloudCompute, Dict]:
if state and state.get("type", None) == __CLOUD_COMPUTE_IDENTIFIER__:
if state and state.get("type") == __CLOUD_COMPUTE_IDENTIFIER__:
return CloudCompute.from_dict(state)
return state
2 changes: 1 addition & 1 deletion src/lightning/app/utilities/packaging/lightning_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def download_frontend(root: str = _PROJECT_ROOT):
response = urllib.request.urlopen(LIGHTNING_FRONTEND_RELEASE_URL) # noqa: S310

file = tarfile.open(fileobj=response, mode="r|gz")
file.extractall(path=download_dir)
file.extractall(path=download_dir) # noqa: S202

shutil.move(os.path.join(download_dir, build_dir), frontend_dir)
print("The Lightning UI has successfully been downloaded!")
Expand Down
28 changes: 28 additions & 0 deletions src/lightning/data/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,34 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).


## [2.1.4] - 2024-01-31

### Added

- Added support for nested folders in map operator ([#19366](https://github.com/Lightning-AI/lightning/pull/19366))
- Added support for weights to evenly distributed works among workers for map operator ([#19365](https://github.com/Lightning-AI/lightning/pull/19365))
- Added profiling support to StreamingDataloader ([#19338](https://github.com/Lightning-AI/lightning/pull/19338))
- Allow any AWS authentication method in studios ([#19336](https://github.com/Lightning-AI/lightning/pull/19336))
- Added walk operator #19333
- Added intra node shuffling to accelerate second epoch in StreamingDataset ([#19296](https://github.com/Lightning-AI/lightning/pull/19296))
- Enabled map over inputs without files input ([#19285](https://github.com/Lightning-AI/lightning/pull/19285))
- Added Fault Tolerance v2 ([#19196](https://github.com/Lightning-AI/lightning/pull/19196), [#19201](https://github.com/Lightning-AI/lightning/pull/19201))

### Changed

- Switched map operator arguments order ([#19345](https://github.com/Lightning-AI/lightning/pull/19345))
- Removed torch distributed for the Dataset Optimizer ([#19182](https://github.com/Lightning-AI/lightning/pull/19182))
- Remove `__len__` from CombinedStreamingDataset ([#19321](https://github.com/Lightning-AI/lightning/pull/19321))

### Fixed

- Fixed race condition in downloader ([#19348](https://github.com/Lightning-AI/lightning/pull/19348))
- Fixed serializer `io.bytes` image in JPEGSerializer ([#19369](https://github.com/Lightning-AI/lightning/pull/19369))
- Fixed several bugs found in Studio Data Processor ([#19309](https://github.com/Lightning-AI/lightning/pull/19309))
- Fixed handling queue errors in streaming dataset reader ([#19167](https://github.com/Lightning-AI/lightning/pull/19167))
- Fixed chunks eviction in StreamingDataset ([#19214](https://github.com/Lightning-AI/lightning/pull/19214))


## [2.1.3] - 2023-12-21

### Added
Expand Down
8 changes: 8 additions & 0 deletions src/lightning/fabric/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,14 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).


## [2.1.4] - 2024-01-31

### Fixed

- Fixed an issue preventing Fabric to run on CPU when the system's CUDA driver is outdated or broken ([#19234](https://github.com/Lightning-AI/lightning/pull/19234))
- Fixed typo in kwarg in SpikeDetection ([#19282](https://github.com/Lightning-AI/lightning/pull/19282))


## [2.1.3] - 2023-12-21

### Fixed
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -160,10 +160,11 @@ def _basic_subprocess_cmd() -> Sequence[str]:


def _hydra_subprocess_cmd(local_rank: int) -> Tuple[Sequence[str], str]:
import __main__ # local import to avoid https://github.com/Lightning-AI/lightning/issues/15218
from hydra.core.hydra_config import HydraConfig
from hydra.utils import get_original_cwd, to_absolute_path

import __main__ # local import to avoid https://github.com/Lightning-AI/lightning/issues/15218

# when user is using hydra find the absolute path
if __main__.__spec__ is None: # pragma: no-cover
command = [sys.executable, to_absolute_path(sys.argv[0])]
Expand Down
16 changes: 13 additions & 3 deletions src/lightning/pytorch/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,19 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).


## [2.1.4] - 2024-01-31

### Fixed

- Fixed `Trainer` not expanding the `default_root_dir` if it has the `~` (home) prefix ([#19179](https://github.com/Lightning-AI/lightning/pull/19179))
- Fixed warning for Dataloader if `num_workers=1` and CPU count is 1 ([#19224](https://github.com/Lightning-AI/lightning/pull/19224))
- Fixed `WandbLogger.watch()` method annotation to accept `None` for the log parameter ([#19237](https://github.com/Lightning-AI/lightning/pull/19237))
- Fixed an issue preventing the Trainer to run on CPU when the system's CUDA driver is outdated or broken ([#19234](https://github.com/Lightning-AI/lightning/pull/19234))
- Fixed an issue with the ModelCheckpoint callback not saving relative symlinks with `ModelCheckpoint(save_last="link")` ([#19303](https://github.com/Lightning-AI/lightning/pull/19303))
- Fixed issue where the `_restricted_classmethod_impl` would incorrectly raise a TypeError on inspection rather than on call ([#19332](https://github.com/Lightning-AI/lightning/pull/19332))
- Fixed exporting `__version__` in `__init__` ([#19221](https://github.com/Lightning-AI/lightning/pull/19221))


## [2.1.3] - 2023-12-21

### Changed
Expand All @@ -23,9 +36,6 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Fixed the tensor conversion in `self.log` to respect the default dtype ([#19046](https://github.com/Lightning-AI/lightning/issues/19046))


- Fixed `Trainer` not expanding the `default_root_dir` if it has the `~` (home) prefix ([#19179](https://github.com/Lightning-AI/lightning/pull/19179))


## [2.1.2] - 2023-11-15

### Fixed
Expand Down
2 changes: 1 addition & 1 deletion src/lightning/pytorch/utilities/model_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
import functools
import inspect
import os
from typing import TYPE_CHECKING, Any, Callable, Dict, Generic, Optional, Type, TypeVar
from typing import TYPE_CHECKING, Any, Callable, Generic, Optional, Type, TypeVar

from lightning_utilities.core.imports import RequirementCache
from torch import nn
Expand Down
2 changes: 1 addition & 1 deletion src/version.info
Original file line number Diff line number Diff line change
@@ -1 +1 @@
2.1.3
2.1.4
2 changes: 1 addition & 1 deletion tests/tests_app/core/test_lightning_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -1115,7 +1115,7 @@ def __init__(self, flow):
def test_cloud_compute_binding():
cloud_compute.ENABLE_MULTIPLE_WORKS_IN_NON_DEFAULT_CONTAINER = True

assert {} == cloud_compute._CLOUD_COMPUTE_STORE
assert cloud_compute._CLOUD_COMPUTE_STORE == {}
flow = FlowCC()
assert len(cloud_compute._CLOUD_COMPUTE_STORE) == 2
assert cloud_compute._CLOUD_COMPUTE_STORE["default"].component_names == ["root.work_c"]
Expand Down
2 changes: 1 addition & 1 deletion tests/tests_pytorch/strategies/test_fsdp.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ def __init__(self, wrap_min_params: int = 2):

self.save_hyperparameters()
self.layer = torch.nn.Sequential(torch.nn.Linear(32, 32), torch.nn.ReLU(), torch.nn.Linear(32, 2))
self.should_be_wrapped = [(32 * 32 + 32) > wrap_min_params, None, (32 * 2 + 2) > wrap_min_params]
self.should_be_wrapped = [wrap_min_params < (32 * 32 + 32), None, wrap_min_params < (32 * 2 + 2)]

def configure_optimizers(self):
parameters = self.parameters() if _TORCH_GREATER_EQUAL_2_0 else self.trainer.model.parameters()
Expand Down
1 change: 0 additions & 1 deletion tests/tests_pytorch/utilities/test_model_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import inspect
import logging

import pytest
from lightning.pytorch import LightningDataModule
Expand Down

0 comments on commit 8623143

Please sign in to comment.