Skip to content

Commit

Permalink
Revert "refactor and add types"
Browse files Browse the repository at this point in the history
This reverts commit b4c552f
  • Loading branch information
Borda committed Mar 27, 2020
1 parent c59a1b8 commit 96a2825
Show file tree
Hide file tree
Showing 21 changed files with 21 additions and 38 deletions.
2 changes: 1 addition & 1 deletion pytorch_lightning/core/lightning.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from pytorch_lightning.core.memory import ModelSummary
from pytorch_lightning.core.saving import ModelIO, load_hparams_from_tags_csv
from pytorch_lightning.overrides.data_parallel import LightningDistributedDataParallel
from pytorch_lightning.debugging import MisconfigurationException
from pytorch_lightning.utilities.exceptions import MisconfigurationException

try:
import torch_xla.core.xla_model as xm
Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/loggers/comet.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@

from pytorch_lightning import _logger as log
from pytorch_lightning.loggers.base import LightningLoggerBase, rank_zero_only
from pytorch_lightning.debugging import MisconfigurationException
from pytorch_lightning.utilities.exceptions import MisconfigurationException


class CometLogger(LightningLoggerBase):
Expand Down
13 changes: 0 additions & 13 deletions pytorch_lightning/profiler/__init__.py

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -113,11 +113,9 @@ def custom_processing_step(self, data):
"""

from pytorch_lightning.debugging.exceptions import MisconfigurationException
from pytorch_lightning.debugging.profilers import BaseProfiler, Profiler, AdvancedProfiler, PassThroughProfiler
from pytorch_lightning.profiling.profilers import Profiler, AdvancedProfiler, PassThroughProfiler, BaseProfiler

__all__ = [
'MisconfigurationException',
'BaseProfiler',
'Profiler',
'AdvancedProfiler',
Expand Down
File renamed without changes.
6 changes: 3 additions & 3 deletions pytorch_lightning/trainer/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -612,15 +612,15 @@ def on_train_end(self):
# default used by the Trainer
trainer = Trainer(process_position=0)
profiler
^^^^^^^^
profiling
^^^^^^^^^
To profile individual steps during training and assist in identifying bottlenecks.
See the `profiler documentation <profiler.rst>`_. for more details.
Example::
from pytorch_lightning.profiler import Profiler, AdvancedProfiler
from pytorch_lightning.profiling import Profiler, AdvancedProfiler
# default used by the Trainer
trainer = Trainer(profiler=None)
Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/trainer/data_loading.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from torch.utils.data.distributed import DistributedSampler

from pytorch_lightning.core import LightningModule
from pytorch_lightning.debugging import MisconfigurationException
from pytorch_lightning.utilities.exceptions import MisconfigurationException

try:
from apex import amp
Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/trainer/distrib_data_parallel.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ def train_fx(trial_hparams, cluster_manager, _):
import torch
from pytorch_lightning import _logger as log
from pytorch_lightning.loggers import LightningLoggerBase
from pytorch_lightning.debugging import MisconfigurationException
from pytorch_lightning.utilities.exceptions import MisconfigurationException

try:
from apex import amp
Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/trainer/distrib_parts.py
Original file line number Diff line number Diff line change
Expand Up @@ -344,7 +344,7 @@
LightningDistributedDataParallel,
LightningDataParallel,
)
from pytorch_lightning.debugging import MisconfigurationException
from pytorch_lightning.utilities.exceptions import MisconfigurationException

try:
from apex import amp
Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/trainer/evaluation_loop.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@

from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.overrides.data_parallel import LightningDistributedDataParallel, LightningDataParallel
from pytorch_lightning.debugging import MisconfigurationException
from pytorch_lightning.utilities.exceptions import MisconfigurationException

try:
import torch_xla.distributed.parallel_loader as xla_pl
Expand Down
4 changes: 2 additions & 2 deletions pytorch_lightning/trainer/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@
from pytorch_lightning import _logger as log
from pytorch_lightning.callbacks import ModelCheckpoint, EarlyStopping, Callback
from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.debugging import BaseProfiler, Profiler, PassThroughProfiler
from pytorch_lightning.loggers import LightningLoggerBase
from pytorch_lightning.profiling import Profiler, PassThroughProfiler, BaseProfiler
from pytorch_lightning.trainer.auto_mix_precision import TrainerAMPMixin
from pytorch_lightning.trainer.callback_config import TrainerCallbackConfigMixin
from pytorch_lightning.trainer.callback_hook import TrainerCallbackHookMixin
Expand All @@ -32,7 +32,7 @@
from pytorch_lightning.trainer.training_io import TrainerIOMixin
from pytorch_lightning.trainer.training_loop import TrainerTrainLoopMixin
from pytorch_lightning.trainer.training_tricks import TrainerTrainingTricksMixin
from pytorch_lightning.debugging import MisconfigurationException
from pytorch_lightning.utilities.exceptions import MisconfigurationException

try:
from apex import amp
Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/trainer/training_loop.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ def training_step(self, batch, batch_idx):
from pytorch_lightning.callbacks.base import Callback
from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.loggers import LightningLoggerBase
from pytorch_lightning.debugging import MisconfigurationException
from pytorch_lightning.utilities.exceptions import MisconfigurationException

try:
from apex import amp
Expand Down
File renamed without changes.
2 changes: 1 addition & 1 deletion tests/loggers/test_comet.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import tests.base.utils as tutils
from pytorch_lightning import Trainer
from pytorch_lightning.loggers import CometLogger
from pytorch_lightning.debugging import MisconfigurationException
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from tests.base import LightningTestModel


Expand Down
2 changes: 1 addition & 1 deletion tests/models/test_amp.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

import tests.base.utils as tutils
from pytorch_lightning import Trainer
from pytorch_lightning.debugging import MisconfigurationException
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from tests.base import (
LightningTestModel,
)
Expand Down
2 changes: 1 addition & 1 deletion tests/models/test_gpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
parse_gpu_ids,
determine_root_gpu_device,
)
from pytorch_lightning.debugging import MisconfigurationException
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from tests.base import LightningTestModel

PRETEND_N_OF_GPUS = 16
Expand Down
2 changes: 1 addition & 1 deletion tests/models/test_restore.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import tests.base.utils as tutils
from pytorch_lightning import Trainer
from pytorch_lightning.callbacks import ModelCheckpoint
from pytorch_lightning.debugging import MisconfigurationException
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from tests.base import (
LightningTestModel,
LightningTestModelWithoutHyperparametersArg,
Expand Down
2 changes: 0 additions & 2 deletions tests/test_deprecated.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,6 @@ def test_tbd_remove_in_v0_9_0_module_imports():
from pytorch_lightning.logging.test_tube import TestTubeLogger # noqa: F402
from pytorch_lightning.logging.wandb import WandbLogger # noqa: F402

from pytorch_lightning.profiler import Profiler, AdvancedProfiler # noqa: F402


class ModelVer0_6(LightTrainDataloader, LightEmptyTestStep, TestModelBase):

Expand Down
4 changes: 2 additions & 2 deletions tests/test_profiler.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import tempfile
import time
from pathlib import Path

import numpy as np
import pytest

from pytorch_lightning.debugging import AdvancedProfiler, Profiler
from pytorch_lightning.profiling import AdvancedProfiler, Profiler

PROFILER_OVERHEAD_MAX_TOLERANCE = 0.0001

Expand Down
2 changes: 1 addition & 1 deletion tests/trainer/test_dataloaders.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import tests.base.utils as tutils
from pytorch_lightning import Trainer
from pytorch_lightning.debugging import MisconfigurationException
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from tests.base import (
TestModelBase,
LightningTestModel,
Expand Down
2 changes: 1 addition & 1 deletion tests/trainer/test_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
)
from pytorch_lightning.core.lightning import load_hparams_from_tags_csv
from pytorch_lightning.trainer.logging import TrainerLoggingMixin
from pytorch_lightning.debugging import MisconfigurationException
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from tests.base import (
TestModelBase,
DictHparamsModel,
Expand Down

0 comments on commit 96a2825

Please sign in to comment.