Skip to content

Commit

Permalink
removed decorators (#1079)
Browse files Browse the repository at this point in the history
  • Loading branch information
williamFalcon authored Mar 6, 2020
1 parent 2bc01a0 commit 3d18099
Show file tree
Hide file tree
Showing 5 changed files with 0 additions and 12 deletions.
1 change: 0 additions & 1 deletion docs/source/tpu.rst
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,6 @@ train_dataloader (and val, train) code as follows.
import torch_xla.core.xla_model as xm
@pl.data_loader
def train_dataloader(self):
dataset = MNIST(
os.getcwd(),
Expand Down
1 change: 0 additions & 1 deletion pl_examples/domain_templates/gan.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,6 @@ def configure_optimizers(self):
opt_d = torch.optim.Adam(self.discriminator.parameters(), lr=lr, betas=(b1, b2))
return [opt_g, opt_d], []

@data_loader
def train_dataloader(self):
transform = transforms.Compose([transforms.ToTensor(),
transforms.Normalize([0.5], [0.5])])
Expand Down
3 changes: 0 additions & 3 deletions pl_examples/full_examples/imagenet/imagenet_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@

import pytorch_lightning as pl
from pytorch_lightning.core import LightningModule
from pytorch_lightning.core import data_loader

# pull out resnet names from torchvision models
MODEL_NAMES = sorted(
Expand Down Expand Up @@ -132,7 +131,6 @@ def configure_optimizers(self):
scheduler = lr_scheduler.ExponentialLR(optimizer, gamma=0.1)
return [optimizer], [scheduler]

@data_loader
def train_dataloader(self):
normalize = transforms.Normalize(
mean=[0.485, 0.456, 0.406],
Expand Down Expand Up @@ -163,7 +161,6 @@ def train_dataloader(self):
)
return train_loader

@data_loader
def val_dataloader(self):
normalize = transforms.Normalize(
mean=[0.485, 0.456, 0.406],
Expand Down
4 changes: 0 additions & 4 deletions pytorch_lightning/core/lightning.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import torch.distributed as dist
from torch.optim import Adam

from pytorch_lightning.core.decorators import data_loader
from pytorch_lightning.core.grads import GradInformation
from pytorch_lightning.core.hooks import ModelHooks
from pytorch_lightning.core.saving import ModelIO, load_hparams_from_tags_csv
Expand Down Expand Up @@ -1139,7 +1138,6 @@ def train_dataloader(self):
"""
return None

@data_loader
def tng_dataloader(self): # todo: remove in v1.0.0
"""Implement a PyTorch DataLoader.
Expand Down Expand Up @@ -1239,7 +1237,6 @@ def val_dataloader(self):
.. code-block:: python
@pl.data_loader
def val_dataloader(self):
transform = transforms.Compose([transforms.ToTensor(),
transforms.Normalize((0.5,), (1.0,))])
Expand All @@ -1254,7 +1251,6 @@ def val_dataloader(self):
return loader
# can also return multiple dataloaders
@pl.data_loader
def val_dataloader(self):
return [loader_a, loader_b, ..., loader_n]
Expand Down
3 changes: 0 additions & 3 deletions tests/models/debug.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,14 +41,11 @@ def validation_epoch_end(self, outputs):
def configure_optimizers(self):
return [torch.optim.Adam(self.parameters(), lr=0.02)]

@pl.data_loader
def train_dataloader(self):
return DataLoader(MNIST('path/to/save', train=True), batch_size=32)

@pl.data_loader
def val_dataloader(self):
return DataLoader(MNIST('path/to/save', train=False), batch_size=32)

@pl.data_loader
def test_dataloader(self):
return DataLoader(MNIST('path/to/save', train=False), batch_size=32)

0 comments on commit 3d18099

Please sign in to comment.