From b0860b0ca611492caa2810980f25606d99805f6b Mon Sep 17 00:00:00 2001 From: Jirka Date: Thu, 23 Jul 2020 12:08:00 +0200 Subject: [PATCH] format --- pytorch_lightning/trainer/distrib_data_parallel.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/pytorch_lightning/trainer/distrib_data_parallel.py b/pytorch_lightning/trainer/distrib_data_parallel.py index f016669c8499e..ac85af782af89 100644 --- a/pytorch_lightning/trainer/distrib_data_parallel.py +++ b/pytorch_lightning/trainer/distrib_data_parallel.py @@ -162,9 +162,9 @@ def train_fx(trial_hparams, cluster_manager, _): else: XLA_AVAILABLE = True -pid = os.getpid() -rng1 = np.random.RandomState(pid) -RANDOM_PORTS = rng1.randint(10000, 19999, 100) +PID = os.getpid() +RNG1 = np.random.RandomState(PID) +RANDOM_PORTS = RNG1.randint(10000, 19999, 100) class TrainerDDPMixin(ABC): @@ -345,7 +345,6 @@ def configure_slurm_ddp(self, num_gpu_nodes): def determine_local_rank(self): if self.is_slurm_managing_tasks: return int(os.environ['SLURM_LOCALID']) - else: return int(os.environ.get('LOCAL_RANK', 0))