From 9e4535881c3cf9df2d2bd5ab49e74bcb2bc903b3 Mon Sep 17 00:00:00 2001 From: Chris Chow Date: Thu, 16 Sep 2021 13:46:23 -0700 Subject: [PATCH 1/3] guard against None in pytorch get_xla_supported_devices --- pytorch_lightning/utilities/xla_device.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pytorch_lightning/utilities/xla_device.py b/pytorch_lightning/utilities/xla_device.py index b922a749e7742..c7f9cbf05056f 100644 --- a/pytorch_lightning/utilities/xla_device.py +++ b/pytorch_lightning/utilities/xla_device.py @@ -72,7 +72,8 @@ def _is_device_tpu() -> bool: # `xm.get_xla_supported_devices("TPU")` won't be possible. if xm.xrt_world_size() > 1: return True - return len(xm.get_xla_supported_devices("TPU")) > 0 + tpus = xm.get_xla_supported_devices("TPU") + return False if tpus is None else len(tpus) > 0 @staticmethod def xla_available() -> bool: From 2edaad9c59d60508fec2a925e7555cc574893c34 Mon Sep 17 00:00:00 2001 From: Chris Chow Date: Thu, 16 Sep 2021 14:03:52 -0700 Subject: [PATCH 2/3] changelog --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 42a720e5c0d1a..4da22e6224285 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -340,6 +340,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Fixed `BasePredictionWriter` not returning the batch_indices in a non-distributed setting ([#9432](https://github.com/PyTorchLightning/pytorch-lightning/pull/9432)) +- Fixed an error when running on in XLA environments with no TPU attached ([#9572](https://github.com/PyTorchLightning/pytorch-lightning/pull/9572)) + + ## [1.4.7] - 2021-09-14 - Fixed logging of nan parameters ([#9364](https://github.com/PyTorchLightning/pytorch-lightning/pull/9364)) From a3e9139037b93c8e24cf314dc1f323adfff52ae6 Mon Sep 17 00:00:00 2001 From: tchaton Date: Tue, 12 Oct 2021 12:15:04 +0100 Subject: [PATCH 3/3] update on comments --- pytorch_lightning/utilities/xla_device.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/pytorch_lightning/utilities/xla_device.py b/pytorch_lightning/utilities/xla_device.py index c7f9cbf05056f..2feef71c563f2 100644 --- a/pytorch_lightning/utilities/xla_device.py +++ b/pytorch_lightning/utilities/xla_device.py @@ -70,10 +70,7 @@ def _is_device_tpu() -> bool: # we would have to use `torch_xla.distributed.xla_dist` for # multiple VMs and TPU_CONFIG won't be available, running # `xm.get_xla_supported_devices("TPU")` won't be possible. - if xm.xrt_world_size() > 1: - return True - tpus = xm.get_xla_supported_devices("TPU") - return False if tpus is None else len(tpus) > 0 + return (xm.xrt_world_size() > 1) or bool(xm.get_xla_supported_devices("TPU")) @staticmethod def xla_available() -> bool: