Skip to content

Commit

Permalink
Delete deprecated save function (#8680)
Browse files Browse the repository at this point in the history
  • Loading branch information
carmocca authored Aug 2, 2021
1 parent f67892e commit ca96b2d
Show file tree
Hide file tree
Showing 3 changed files with 4 additions and 27 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).

- Removed the deprecated `optimizer_idx` from `training_step` as an accepted argument in manual optimization ([#8576](https://github.com/PyTorchLightning/pytorch-lightning/pull/8576))


- Removed the deprecated `save_function` property in `ModelCheckpoint` ([#8680](https://github.com/PyTorchLightning/pytorch-lightning/pull/8680))

### Fixed

- Fixed `trainer.fit_loop.split_idx` always returning `None` ([#8601](https://github.com/PyTorchLightning/pytorch-lightning/pull/8601))
Expand Down
20 changes: 1 addition & 19 deletions pytorch_lightning/callbacks/model_checkpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
from copy import deepcopy
from datetime import timedelta
from pathlib import Path
from typing import Any, Callable, Dict, Optional, Union
from typing import Any, Dict, Optional, Union
from weakref import proxy

import numpy as np
Expand Down Expand Up @@ -247,14 +247,12 @@ def __init__(
self.__init_ckpt_dir(dirpath, filename)
self.__init_triggers(every_n_train_steps, every_n_epochs, train_time_interval, period)
self.__validate_init_configuration()
self._save_function = None

def on_pretrain_routine_start(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""
When pretrain routine starts we build the ckpt dir on the fly
"""
self.__resolve_ckpt_dir(trainer)
self._save_function = trainer.save_checkpoint
if self._save_on_train_epoch_end is None:
# if the user runs validation multiple times per training epoch, we try to save checkpoint after
# validation instead of on train epoch end
Expand Down Expand Up @@ -507,22 +505,6 @@ def period(self, value: Optional[int]) -> None:
)
self._period = value

@property
def save_function(self) -> Optional[Callable]:
rank_zero_deprecation(
"Property `save_function` in `ModelCheckpoint` is deprecated in v1.3 and will be removed in v1.5."
" Please use `trainer.save_checkpoint` instead."
)
return self._save_function

@save_function.setter
def save_function(self, value: Optional[Callable]) -> None:
rank_zero_deprecation(
"Property `save_function` in `ModelCheckpoint` is deprecated in v1.3 and will be removed in v1.5."
" Please use `trainer.save_checkpoint` instead."
)
self._save_function = value

def _del_model(self, trainer: "pl.Trainer", filepath: str) -> None:
if trainer.should_rank_save_checkpoint and self._fs.exists(filepath):
self._fs.rm(filepath)
Expand Down
8 changes: 0 additions & 8 deletions tests/deprecated_api/test_remove_1-5.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,14 +42,6 @@ def test_v1_5_0_model_checkpoint_save_checkpoint():
model_ckpt.save_checkpoint(trainer, object())


def test_v1_5_0_model_checkpoint_save_function():
model_ckpt = ModelCheckpoint()
with pytest.deprecated_call(match="Property `save_function` in `ModelCheckpoint` is deprecated in v1.3"):
model_ckpt.save_function = lambda *_, **__: None
with pytest.deprecated_call(match="Property `save_function` in `ModelCheckpoint` is deprecated in v1.3"):
_ = model_ckpt.save_function


@mock.patch("pytorch_lightning.loggers.wandb.wandb")
def test_v1_5_0_wandb_unused_sync_step(_):
with pytest.deprecated_call(match=r"v1.2.1 and will be removed in v1.5"):
Expand Down

0 comments on commit ca96b2d

Please sign in to comment.