Skip to content

Commit

Permalink
Change EMA adjustment.
Browse files Browse the repository at this point in the history
  • Loading branch information
datumbox committed Sep 29, 2021
1 parent 7ecc6d8 commit 0563f9e
Showing 1 changed file with 6 additions and 6 deletions.
12 changes: 6 additions & 6 deletions references/classification/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,10 +260,10 @@ def main(args):
# Decay adjustment that aims to keep the decay independent from other hyper-parameters originally proposed at:
# https://github.com/facebookresearch/pycls/blob/f8cd9627/pycls/core/net.py#L123
#
# total_ema_updates = (Dataset_size / n_GPUs) * epochs / (batch_size * EMA_steps)
# We consider constant = (Dataset_size / n_GPUs) for a given dataset/setup and ommit it. Thus:
# adjust = 1 / total_ema_updates ~= batch_size * EMA_steps / epochs
adjust = args.batch_size * args.model_ema_steps / args.epochs
# total_ema_updates = (Dataset_size / n_GPUs) * epochs / (batch_size_per_gpu * EMA_steps)
# We consider constant = Dataset_size for a given dataset/setup and ommit it. Thus:
# adjust = 1 / total_ema_updates ~= n_GPUs * batch_size_per_gpu * EMA_steps / epochs
adjust = args.world_size * args.batch_size * args.model_ema_steps / args.epochs
alpha = 1.0 - args.model_ema_decay
alpha = min(1.0, alpha * adjust)
model_ema = utils.ExponentialMovingAverage(model_without_ddp, device=device, decay=1.0 - alpha)
Expand Down Expand Up @@ -397,8 +397,8 @@ def get_args_parser(add_help=True):
'--model-ema-steps', type=int, default=32,
help='the number of iterations that controls how often to update the EMA model (default: 32)')
parser.add_argument(
'--model-ema-decay', type=float, default=0.99999,
help='decay factor for Exponential Moving Average of model parameters (default: 0.99999)')
'--model-ema-decay', type=float, default=0.99998,
help='decay factor for Exponential Moving Average of model parameters (default: 0.99998)')

return parser

Expand Down

0 comments on commit 0563f9e

Please sign in to comment.