Skip to content

Commit 4f37cc5

Browse files
authored
Merge branch 'master' into ci/clear-cache
2 parents ed9a1ae + a45c047 commit 4f37cc5

File tree

9 files changed

+23
-40
lines changed

9 files changed

+23
-40
lines changed

.azure/hpu-tests.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ jobs:
3939
cancelTimeoutInMinutes: "2"
4040
pool: intel-hpus
4141
container:
42-
image: "vault.habana.ai/gaudi-docker/1.6.0/ubuntu20.04/habanalabs/pytorch-installer-1.12.0:latest"
42+
image: "vault.habana.ai/gaudi-docker/1.6.1/ubuntu20.04/habanalabs/pytorch-installer-1.12.0:latest"
4343
options: "--runtime=habana -e HABANA_VISIBLE_DEVICES=all -e OMPI_MCA_btl_vader_single_copy_mechanism=none --cap-add=sys_nice --ipc=host --shm-size=4g --name cd-container -v /usr/bin/docker:/tmp/docker:ro"
4444
workspace:
4545
clean: all

dockers/ci-runner-hpu/Dockerfile

+1-1
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
# gaudi-docker-agent:latest
1717

1818
ARG DIST="latest"
19-
ARG GAUDI_VERSION="1.6.0"
19+
ARG GAUDI_VERSION="1.6.1"
2020
ARG PYTORCH_INSTALLER_VERSION="1.12.0"
2121
FROM vault.habana.ai/gaudi-docker/${GAUDI_VERSION}/ubuntu20.04/habanalabs/pytorch-installer-${PYTORCH_INSTALLER_VERSION}:${DIST}
2222

pyproject.toml

-1
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,6 @@ module = [
5959
"pytorch_lightning.callbacks.progress.rich_progress",
6060
"pytorch_lightning.trainer.trainer",
6161
"pytorch_lightning.trainer.connectors.checkpoint_connector",
62-
"pytorch_lightning.tuner.batch_size_scaling",
6362
"lightning_app.api.http_methods",
6463
"lightning_app.api.request_types",
6564
"lightning_app.cli.app-template.app",

src/pytorch_lightning/CHANGELOG.md

+3
Original file line numberDiff line numberDiff line change
@@ -222,6 +222,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
222222
- Removed the deprecated device attributes `Trainer.{devices,gpus,num_gpus,ipus,tpu_cores}` in favor of the accelerator-agnostic `Trainer.num_devices` ([#14829](https://github.com/Lightning-AI/lightning/pull/14829))
223223

224224

225+
- Removed the deprecated `LightningIPUModule` ([#14830](https://github.com/Lightning-AI/lightning/pull/14830))
226+
227+
225228
- Removed the deprecated `Logger.agg_and_log_metrics` hook in favour of `Logger.log_metrics` and the `agg_key_funcs` and `agg_default_func` arguments. ([#14840](https://github.com/Lightning-AI/lightning/pull/14840))
226229

227230

src/pytorch_lightning/callbacks/batch_size_finder.py

+2
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,8 @@
3131
class BatchSizeFinder(Callback):
3232
SUPPORTED_MODES = ("power", "binsearch")
3333

34+
optimal_batch_size: Optional[int]
35+
3436
def __init__(
3537
self,
3638
mode: str = "power",

src/pytorch_lightning/strategies/ipu.py

+1-24
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
from lightning_lite.plugins import CheckpointIO, ClusterEnvironment
2525
from lightning_lite.plugins.precision.utils import _fp_to_half
2626
from lightning_lite.utilities.cloud_io import get_filesystem
27-
from pytorch_lightning.overrides.base import _LightningModuleWrapperBase, _LightningPrecisionModuleWrapperBase
27+
from pytorch_lightning.overrides.base import _LightningModuleWrapperBase
2828
from pytorch_lightning.plugins.precision import PrecisionPlugin
2929
from pytorch_lightning.strategies.parallel import ParallelStrategy
3030
from pytorch_lightning.strategies.strategy import TBroadcast
@@ -33,7 +33,6 @@
3333
from pytorch_lightning.utilities.data import _get_dataloader_init_args_and_kwargs, _reinstantiate_wrapped_cls
3434
from pytorch_lightning.utilities.exceptions import MisconfigurationException
3535
from pytorch_lightning.utilities.model_helpers import is_overridden
36-
from pytorch_lightning.utilities.rank_zero import rank_zero_deprecation
3736
from pytorch_lightning.utilities.types import STEP_OUTPUT
3837

3938
if _POPTORCH_AVAILABLE:
@@ -42,28 +41,6 @@
4241
poptorch = None
4342

4443

45-
class LightningIPUModule(_LightningModuleWrapperBase):
46-
"""
47-
.. deprecated:: v1.7.0
48-
``LightningIPUModule`` has been deprecated in v1.7.0 and will be removed in v1.9.0.
49-
"""
50-
51-
def __init__(
52-
self,
53-
forward_module: Optional[Union["pl.LightningModule", _LightningPrecisionModuleWrapperBase]] = None,
54-
precision: Union[str, int] = 32,
55-
pl_module: Optional[Union["pl.LightningModule", _LightningPrecisionModuleWrapperBase]] = None,
56-
) -> None:
57-
rank_zero_deprecation("`LightningIPUModule` has been deprecated in v1.7.0 and will be removed in v1.8.0")
58-
self._validate_init_arguments(pl_module, forward_module)
59-
super().__init__(forward_module=(pl_module or forward_module))
60-
self.precision = precision
61-
62-
def forward(self, *inputs: Any, **kwargs: Any) -> Any:
63-
inputs = apply_to_collection(inputs, Tensor, function=_fp_to_half, precision=self.precision)
64-
return super().forward(*inputs, **kwargs)
65-
66-
6744
class IPUStrategy(ParallelStrategy):
6845
"""Plugin for training on IPU devices."""
6946

src/pytorch_lightning/tuner/batch_size_scaling.py

+15-5
Original file line numberDiff line numberDiff line change
@@ -35,10 +35,10 @@ def scale_batch_size(
3535
init_val: int = 2,
3636
max_trials: int = 25,
3737
batch_arg_name: str = "batch_size",
38-
):
38+
) -> Optional[int]:
3939
if trainer.fast_dev_run:
4040
rank_zero_warn("Skipping batch size scaler since `fast_dev_run` is enabled.")
41-
return
41+
return None
4242

4343
# Save initial model, that is loaded after batch size is found
4444
ckpt_path = os.path.join(trainer.default_root_dir, f".scale_batch_size_{uuid.uuid4()}.ckpt")
@@ -141,7 +141,12 @@ def __scale_batch_restore_params(trainer: "pl.Trainer", params: Dict[str, Any])
141141

142142

143143
def _run_power_scaling(
144-
trainer: "pl.Trainer", pl_module: "pl.LightningModule", new_size: int, batch_arg_name: str, max_trials: int, params
144+
trainer: "pl.Trainer",
145+
pl_module: "pl.LightningModule",
146+
new_size: int,
147+
batch_arg_name: str,
148+
max_trials: int,
149+
params: Dict[str, Any],
145150
) -> int:
146151
"""Batch scaling mode where the size is doubled at each iteration until an OOM error is encountered."""
147152
# this flag is used to determine whether the previously scaled batch size, right before OOM, was a success or not
@@ -179,7 +184,12 @@ def _run_power_scaling(
179184

180185

181186
def _run_binary_scaling(
182-
trainer: "pl.Trainer", pl_module: "pl.LightningModule", new_size: int, batch_arg_name: str, max_trials: int, params
187+
trainer: "pl.Trainer",
188+
pl_module: "pl.LightningModule",
189+
new_size: int,
190+
batch_arg_name: str,
191+
max_trials: int,
192+
params: Dict[str, Any],
183193
) -> int:
184194
"""Batch scaling mode where the size is initially is doubled at each iteration until an OOM error is
185195
encountered.
@@ -309,7 +319,7 @@ def _reset_dataloaders(trainer: "pl.Trainer", pl_module: "pl.LightningModule") -
309319
reset_fn(pl_module)
310320

311321

312-
def _try_loop_run(trainer: "pl.Trainer", params) -> None:
322+
def _try_loop_run(trainer: "pl.Trainer", params: Dict[str, Any]) -> None:
313323
if trainer.state.fn == "fit":
314324
loop = trainer.fit_loop
315325
else:

tests/tests_pytorch/deprecated_api/test_remove_1-10.py

-2
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,6 @@
3333
from pytorch_lightning.plugins.environments import LightningEnvironment
3434
from pytorch_lightning.strategies.bagua import LightningBaguaModule
3535
from pytorch_lightning.strategies.deepspeed import LightningDeepSpeedModule
36-
from pytorch_lightning.strategies.ipu import LightningIPUModule
3736
from pytorch_lightning.strategies.utils import on_colab_kaggle
3837
from pytorch_lightning.utilities.apply_func import (
3938
apply_to_collection,
@@ -85,7 +84,6 @@ def test_deprecated_amp_level():
8584
LightningBaguaModule,
8685
LightningDeepSpeedModule,
8786
pytest.param(LightningShardedDataParallel, marks=RunIf(fairscale=True)),
88-
LightningIPUModule,
8987
],
9088
)
9189
def test_v1_10_deprecated_pl_module_init_parameter(wrapper_class):

tests/tests_pytorch/deprecated_api/test_remove_1-8.py

-6
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020
from pytorch_lightning import Callback, Trainer
2121
from pytorch_lightning.callbacks import ModelCheckpoint
2222
from pytorch_lightning.demos.boring_classes import BoringDataModule, BoringModel
23-
from pytorch_lightning.strategies.ipu import LightningIPUModule
2423
from pytorch_lightning.trainer.configuration_validator import _check_datamodule_checkpoint_hooks
2524

2625

@@ -257,11 +256,6 @@ def on_load_checkpoint(self, checkpoint):
257256
_check_datamodule_checkpoint_hooks(trainer)
258257

259258

260-
def test_v1_8_0_deprecated_lightning_ipu_module():
261-
with pytest.deprecated_call(match=r"has been deprecated in v1.7.0 and will be removed in v1.8."):
262-
_ = LightningIPUModule(BoringModel(), 32)
263-
264-
265259
def test_deprecated_mc_save_checkpoint():
266260
mc = ModelCheckpoint()
267261
trainer = Trainer()

0 commit comments

Comments
 (0)