Skip to content

Commit

Permalink
Use raise .. from .. to explicitly chain exceptions (#3750)
Browse files Browse the repository at this point in the history
* Fix exception chaining

* names

* Change exception names for consistency

Co-authored-by: Nicki Skafte <[email protected]>

* Change exception names for consistency

Co-authored-by: Nicki Skafte <[email protected]>

Co-authored-by: Jirka Borovec <[email protected]>
Co-authored-by: Nicki Skafte <[email protected]>
  • Loading branch information
3 people authored Oct 1, 2020
1 parent e17712e commit ebc1b23
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 8 deletions.
4 changes: 2 additions & 2 deletions pytorch_lightning/accelerators/ddp2_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,9 @@ def _resolve_task_idx(self):
# torchelastic or general non_slurm ddp2
try:
self.task_idx = int(os.environ['LOCAL_RANK'])
except Exception as e:
except Exception as exp:
m = 'ddp2 only works in SLURM or via torchelastic with the WORLD_SIZE, LOCAL_RANK, GROUP_RANK flags'
raise MisconfigurationException(m)
raise MisconfigurationException(m) from exp

def train(self):
model = self.trainer.model
Expand Down
4 changes: 2 additions & 2 deletions pytorch_lightning/trainer/logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,13 +141,13 @@ def process_dict_result(self, output, train=False):
if train:
try:
loss = output['loss']
except Exception:
except Exception as exp:
if isinstance(output, torch.Tensor):
loss = output
else:
raise RuntimeError(
'No `loss` value in the dictionary returned from `model.training_step()`.'
)
) from exp

# when using dp need to reduce the loss
if self.use_dp or self.use_ddp2:
Expand Down
4 changes: 2 additions & 2 deletions pytorch_lightning/utilities/parsing.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,8 +158,8 @@ class AttributeDict(Dict):
def __getattr__(self, key):
try:
return self[key]
except KeyError:
raise AttributeError(f'Missing attribute "{key}"')
except KeyError as exp:
raise AttributeError(f'Missing attribute "{key}"') from exp

def __setattr__(self, key, val):
self[key] = val
Expand Down
4 changes: 2 additions & 2 deletions tests/base/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,9 @@

try:
from test_tube import HyperOptArgumentParser
except ImportError:
except ImportError as exp:
# TODO: this should be discussed and moved out of this package
raise ImportError('Missing test-tube package.')
raise ImportError('Missing test-tube package.') from exp

from pytorch_lightning.core.lightning import LightningModule

Expand Down

0 comments on commit ebc1b23

Please sign in to comment.