Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove profile("training_step_and_backward") #11222

Merged
merged 11 commits into from
Jan 4, 2022
Merged
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -346,6 +346,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).

- Removed `Strategy.init_optimizers` in favor of `Strategy.setup_optimizers` ([#11236](https://github.com/PyTorchLightning/pytorch-lightning/pull/11236))


- Removed `profile("training_step_and_backward")` ([#11222](https://github.com/PyTorchLightning/pytorch-lightning/pull/11222))

### Fixed

- Fixed security vulnerabilities CVE-2020-1747 and CVE-2020-14343 caused by the `PyYAML` dependency ([#11099](https://github.com/PyTorchLightning/pytorch-lightning/pull/11099))
Expand Down
26 changes: 8 additions & 18 deletions pytorch_lightning/loops/optimization/optimizer_loop.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
_extract_hiddens,
check_finite_loss,
)
from pytorch_lightning.profiler import BaseProfiler, PassThroughProfiler
from pytorch_lightning.trainer.progress import OptimizationProgress
from pytorch_lightning.utilities import _AcceleratorType, AMPType
from pytorch_lightning.utilities.exceptions import MisconfigurationException
Expand Down Expand Up @@ -110,7 +109,6 @@ class Closure(AbstractClosure[ClosureResult]):
Can be set to ``None`` to skip the backward operation.
zero_grad_fn: A function that zeroes the gradients. Can be set to ``None`` to skip zero_grad, for example
when accumulating gradients.
profiler: A profiler for profiling the actions of the passed in closure functions.

Example:

Expand All @@ -126,28 +124,23 @@ def __init__(
step_fn: Callable[[], ClosureResult],
backward_fn: Optional[Callable[[Tensor], None]] = None,
zero_grad_fn: Optional[Callable[[], None]] = None,
profiler: Optional[BaseProfiler] = None,
):
super().__init__()
self._step_fn = step_fn
self._backward_fn = backward_fn
self._zero_grad_fn = zero_grad_fn
self._profiler = PassThroughProfiler() if profiler is None else profiler

def closure(self, *args: Any, **kwargs: Any) -> ClosureResult:
with self._profiler.profile("training_step_and_backward"):
step_output = self._step_fn()
step_output = self._step_fn()

if step_output.closure_loss is None:
self.warning_cache.warn(
"`training_step` returned `None`. If this was on purpose, ignore this warning..."
)
if step_output.closure_loss is None:
self.warning_cache.warn("`training_step` returned `None`. If this was on purpose, ignore this warning...")

if self._zero_grad_fn is not None:
self._zero_grad_fn()
if self._zero_grad_fn is not None:
self._zero_grad_fn()

if self._backward_fn is not None and step_output.closure_loss is not None:
self._backward_fn(step_output.closure_loss)
if self._backward_fn is not None and step_output.closure_loss is not None:
self._backward_fn(step_output.closure_loss)

return step_output

Expand Down Expand Up @@ -280,10 +273,7 @@ def _make_closure(self, split_batch: Any, batch_idx: int, opt_idx: int, optimize
step_fn = self._make_step_fn(split_batch, batch_idx, opt_idx)
backward_fn = self._make_backward_fn(optimizer, opt_idx)
zero_grad_fn = self._make_zero_grad_fn(batch_idx, opt_idx, optimizer)

return Closure(
step_fn=step_fn, backward_fn=backward_fn, zero_grad_fn=zero_grad_fn, profiler=self.trainer.profiler
)
return Closure(step_fn=step_fn, backward_fn=backward_fn, zero_grad_fn=zero_grad_fn)

def _make_step_fn(self, split_batch: Any, batch_idx: int, opt_idx: int) -> Callable[[], ClosureResult]:
"""Build the step function that runs the `training_step` and processes its output."""
Expand Down
6 changes: 3 additions & 3 deletions pytorch_lightning/profiler/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,9 +141,9 @@ def custom_processing_step(self, data):

The profiler's results will be printed on the completion of ``{fit,validate,test,predict}``.

This profiler will record ``training_step_and_backward``, ``training_step``, ``backward``,
This profiler will record ``training_step``, ``backward``,
``validation_step``, ``test_step``, and ``predict_step`` by default.
The output below shows the profiling for the action ``training_step_and_backward``.
The output below shows the profiling for the action ``training_step``.
The user can provide ``PyTorchProfiler(record_functions={...})`` to extend the scope of profiled functions.

.. note::
Expand All @@ -156,7 +156,7 @@ def custom_processing_step(self, data):

Profiler Report

Profile stats for: training_step_and_backward
Profile stats for: training_step
--------------------- --------------- --------------- --------------- --------------- ---------------
Name Self CPU total % Self CPU total CPU total % CPU total CPU time avg
--------------------- --------------- --------------- --------------- --------------- ---------------
Expand Down
1 change: 0 additions & 1 deletion pytorch_lightning/profiler/pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,6 @@ def __call__(self, num_step: int) -> "ProfilerAction":
class PyTorchProfiler(BaseProfiler):

RECORD_FUNCTIONS = {
"training_step_and_backward",
"training_step",
"backward",
"validation_step",
Expand Down
3 changes: 1 addition & 2 deletions pytorch_lightning/profiler/xla.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,8 @@

class XLAProfiler(BaseProfiler):

STEP_FUNCTIONS = {"training_step_and_backward", "validation_step", "test_step", "predict_step"}
STEP_FUNCTIONS = {"validation_step", "test_step", "predict_step"}
RECORD_FUNCTIONS = {
"training_step_and_backward",
"training_step",
"backward",
"validation_step",
Expand Down
1 change: 0 additions & 1 deletion tests/profiler/test_profiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -313,7 +313,6 @@ def test_pytorch_profiler_trainer_ddp(tmpdir, pytorch_profiler):
expected = {"[Strategy]DDPStrategy.validation_step"}
if not _KINETO_AVAILABLE:
expected |= {
"training_step_and_backward",
"[Strategy]DDPStrategy.training_step",
"[Strategy]DDPStrategy.backward",
}
Expand Down