From 16f94e5db03a3efa28d862a3680eb1a93bc53713 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adrian=20W=C3=A4lchli?= Date: Mon, 28 Jun 2021 11:10:42 +0200 Subject: [PATCH] x --- pytorch_lightning/plugins/training_type/ddp.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/pytorch_lightning/plugins/training_type/ddp.py b/pytorch_lightning/plugins/training_type/ddp.py index 1ef704e7320c97..909faf99836279 100644 --- a/pytorch_lightning/plugins/training_type/ddp.py +++ b/pytorch_lightning/plugins/training_type/ddp.py @@ -324,12 +324,17 @@ def post_dispatch(self) -> None: self.cluster_environment.teardown() def barrier(self, *args, **kwargs): - if torch_distrib.is_initialized(): - if self.global_rank == 0: - import time - print("sleeping to test barrier") - time.sleep(10) + if not torch_distrib.is_initialized(): + return + + if self.global_rank == 0: + import time + print("sleeping to test barrier") + time.sleep(10) + if _TORCH_GREATER_EQUAL_1_8: torch_distrib.barrier(device_ids=self.determine_ddp_device_ids()) + else: + torch_distrib.barrier() def broadcast(self, obj: object, src: int = 0) -> object: return self.dist.broadcast(obj)