Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 12 additions & 2 deletions apex/contrib/test/optimizers/test_distributed_fused_lamb.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,22 @@
import os
import inspect

import torch
from torch.cuda.amp import GradScaler
from torch.testing._internal import common_utils
from apex.parallel.distributed import flat_dist_call
from torch.distributed.distributed_c10d import _coalescing_manager

from apex.contrib.optimizers.distributed_fused_lamb import DistributedFusedLAMB
from apex.transformer.testing.distributed_test_base import NcclDistributedTestBase


def flat_dist_call(param_list: list[torch.Tensor], op, args):
with _coalescing_manager(async_ops=True) as cm:
for p in param_list:
op(p, *args)

cm.wait()


def get_init_weights_func():
@torch.no_grad()
def init_weights(m):
Expand Down