Skip to content

Commit

Permalink
Add FusedLAMB optimizer
Browse files Browse the repository at this point in the history
Summary: Pull Request resolved: fairinternal/fairseq-py#971

Differential Revision: D19265752

Pulled By: myleott

fbshipit-source-id: 062748d3b44ef3627d35d65dccef8659709c2b5e
  • Loading branch information
myleott authored and facebook-github-bot committed Jan 3, 2020
1 parent 1e324a5 commit f75411a
Showing 1 changed file with 46 additions and 0 deletions.
46 changes: 46 additions & 0 deletions fairseq/optim/fused_lamb.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.

from fairseq.optim import FairseqOptimizer, register_optimizer


@register_optimizer('lamb')
class FairseqLAMB(FairseqOptimizer):
"""LAMB optimizer."""

def __init__(self, args, params):
super().__init__(args)
try:
from apex.optimizers import FusedLAMB
self._optimizer = FusedLAMB(params, **self.optimizer_config)
except ImportError:
raise ImportError('Please install apex to use LAMB optimizer')

@staticmethod
def add_args(parser):
"""Add optimizer-specific arguments to the parser."""
# fmt: off
parser.add_argument('--lamb-betas', default='(0.9, 0.999)', metavar='B',
help='betas for LAMB optimizer')
parser.add_argument('--lamb-eps', type=float, default=1e-8, metavar='D',
help='epsilon for LAMB optimizer')
parser.add_argument('--weight-decay', '--wd', default=0.0, type=float, metavar='WD',
help='weight decay')
# fmt: on

@property
def optimizer_config(self):
"""
Return a kwarg dictionary that will be used to override optimizer
args stored in checkpoints. This allows us to load a checkpoint and
resume training using a different set of optimizer args, e.g., with a
different learning rate.
"""
return {
'lr': self.args.lr[0],
'betas': eval(self.args.lamb_betas),
'eps': self.args.lamb_eps,
'weight_decay': self.args.weight_decay,
}

0 comments on commit f75411a

Please sign in to comment.