From 434996ad5b7da93981d353d96365c321ea4aaece Mon Sep 17 00:00:00 2001 From: Stas Bekman Date: Thu, 12 Aug 2021 22:23:41 -0700 Subject: [PATCH] fix arg help --- megatron/arguments.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/megatron/arguments.py b/megatron/arguments.py index 761641b05..326c948ee 100644 --- a/megatron/arguments.py +++ b/megatron/arguments.py @@ -396,14 +396,14 @@ def _add_training_args(parser): group.add_argument('--rampup-batch-size', nargs='*', default=None, help='Batch size ramp up with the following values:' ' --rampup-batch-size ' - ' ' + ' ' ' ' - 'For example:' - ' --rampup-batch-size 16 8 300000 \ ' - ' --global-batch-size 1024' + 'For example: ' + ' --rampup-batch-size 16 8 300000 ' + ' --global-batch-size 1024 ' 'will start with global batch size 16 and over ' - ' (1024 - 16) / 8 = 126 intervals will increase' - 'the batch size linearly to 1024. In each interval' + ' (1024 - 16) / 8 = 126 intervals will increase ' + 'the batch size linearly to 1024. In each interval ' 'we will use approximately 300000 / 126 = 2380 samples.') group.add_argument('--checkpoint-activations', action='store_true', help='Checkpoint activation to allow for training '