From 97c8a4baca85c9cdb33fe0faafe8a8a07dfce8ca Mon Sep 17 00:00:00 2001 From: Gregor Koporec Date: Mon, 14 Dec 2020 12:13:42 +0100 Subject: [PATCH 1/4] [bugfix] Group defaults to WORLD if None --- pytorch_lightning/utilities/distributed.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pytorch_lightning/utilities/distributed.py b/pytorch_lightning/utilities/distributed.py index 9724f05247c00..845af2460c5b1 100644 --- a/pytorch_lightning/utilities/distributed.py +++ b/pytorch_lightning/utilities/distributed.py @@ -203,6 +203,8 @@ def all_gather_ddp_if_available( Return: A tensor of shape (world_size, batch, ...) """ + group = group if group is not None else torch.distributed.group.WORLD + if torch.distributed.is_available() and torch.distributed.is_initialized(): if sync_grads: return AllGatherGrad.apply(tensor, group) From c5f8424b784571a50352822dafaf22ab440e6f28 Mon Sep 17 00:00:00 2001 From: Rohit Gupta Date: Mon, 14 Dec 2020 22:44:10 +0530 Subject: [PATCH 2/4] fix no_grad --- pytorch_lightning/utilities/distributed.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/utilities/distributed.py b/pytorch_lightning/utilities/distributed.py index 845af2460c5b1..847d41b2f79be 100644 --- a/pytorch_lightning/utilities/distributed.py +++ b/pytorch_lightning/utilities/distributed.py @@ -209,6 +209,6 @@ def all_gather_ddp_if_available( if sync_grads: return AllGatherGrad.apply(tensor, group) else: - with torch.no_grad: + with torch.no_grad(): return AllGatherGrad.apply(tensor, group) return tensor From 5fc3e88fbf161e91ecfcd6d80a50bc930715f5e7 Mon Sep 17 00:00:00 2001 From: Rohit Gupta Date: Mon, 14 Dec 2020 22:46:05 +0530 Subject: [PATCH 3/4] Update pytorch_lightning/utilities/distributed.py --- pytorch_lightning/utilities/distributed.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pytorch_lightning/utilities/distributed.py b/pytorch_lightning/utilities/distributed.py index 847d41b2f79be..7de69897c4ce1 100644 --- a/pytorch_lightning/utilities/distributed.py +++ b/pytorch_lightning/utilities/distributed.py @@ -204,7 +204,6 @@ def all_gather_ddp_if_available( A tensor of shape (world_size, batch, ...) """ group = group if group is not None else torch.distributed.group.WORLD - if torch.distributed.is_available() and torch.distributed.is_initialized(): if sync_grads: return AllGatherGrad.apply(tensor, group) From e44070b1dff42620591cb412a2f4b292e6d3a631 Mon Sep 17 00:00:00 2001 From: Rohit Gupta Date: Mon, 14 Dec 2020 22:52:50 +0530 Subject: [PATCH 4/4] Update pytorch_lightning/utilities/distributed.py --- pytorch_lightning/utilities/distributed.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytorch_lightning/utilities/distributed.py b/pytorch_lightning/utilities/distributed.py index 7de69897c4ce1..dd765ea6e78a1 100644 --- a/pytorch_lightning/utilities/distributed.py +++ b/pytorch_lightning/utilities/distributed.py @@ -208,6 +208,6 @@ def all_gather_ddp_if_available( if sync_grads: return AllGatherGrad.apply(tensor, group) else: - with torch.no_grad(): + with torch.no_grad: return AllGatherGrad.apply(tensor, group) return tensor