From de70639f76451481335a53b172a3b538975bd4b8 Mon Sep 17 00:00:00 2001 From: Dmytro Pykhtar Date: Mon, 19 Jun 2023 14:08:15 -0700 Subject: [PATCH] removed unnecessary print Signed-off-by: Dmytro Pykhtar --- .../nlp/models/language_modeling/megatron_gpt_model.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nemo/collections/nlp/models/language_modeling/megatron_gpt_model.py b/nemo/collections/nlp/models/language_modeling/megatron_gpt_model.py index 1ce153bcf0fb..c4bfdbbad143 100644 --- a/nemo/collections/nlp/models/language_modeling/megatron_gpt_model.py +++ b/nemo/collections/nlp/models/language_modeling/megatron_gpt_model.py @@ -518,7 +518,6 @@ def training_step(self, dataloader_iter, batch_idx): if self.rampup_batch_size: num_microbatch_calculator = apex.transformer.pipeline_parallel.utils._GLOBAL_NUM_MICROBATCHES_CALCULATOR current_global_batch_size = num_microbatch_calculator.current_global_batch_size - logging.info(current_global_batch_size) # do validation and save the checkpoint when gbs is changed if self.prev_global_batch_size != current_global_batch_size and self.prev_global_batch_size: self.trainer.should_stop = True