We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent a7de60e commit 64aea56Copy full SHA for 64aea56
apex/__init__.py
@@ -24,7 +24,6 @@
24
# load time) the error message is timely and visible.
25
from . import optimizers
26
from . import normalization
27
-from . import transformer
28
29
30
# Logging utilities for apex.transformer module
apex/transformer/utils.py
@@ -8,6 +8,7 @@
8
# The following 4 lines are for backward comparability with
9
# older PyTorch.
10
if "all_gather_into_tensor" not in dir(torch.distributed):
11
+ assert torch.distributed.is_available(), "PyTorch Distributed is Not available or Disabled."
12
torch.distributed.all_gather_into_tensor = torch.distributed._all_gather_base
13
14
def ensure_divisibility(numerator, denominator):
0 commit comments