Skip to content

Commit

Permalink
Merge branch 'main' into gpt_pipeline_eval
Browse files Browse the repository at this point in the history
  • Loading branch information
ericharper authored Feb 16, 2022
2 parents a1b287f + a8f29af commit ebed265
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 14 deletions.
12 changes: 0 additions & 12 deletions nemo/utils/exp_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -756,18 +756,6 @@ def nemo_topk_check_previous_run(self):
self.best_model_path = best_k_models[0]
self.best_model_score = self.best_k_models[self.best_model_path]

# TODO remove _save_last_checkpoint after fix for issue #https://github.com/PyTorchLightning/pytorch-lightning/issues/11451
def _save_last_checkpoint(self, trainer, monitor_candidates) -> None:
if not self.save_last:
return

filepath = self.format_checkpoint_name(monitor_candidates, self.CHECKPOINT_NAME_LAST)
if self.last_model_path and self.last_model_path != filepath:
trainer.training_type_plugin.remove_checkpoint(self.last_model_path)

self.last_model_path = filepath
trainer.save_checkpoint(filepath, self.save_weights_only)

def on_save_checkpoint(self, trainer, pl_module, checkpoint):
# output = None
output = super().on_save_checkpoint(trainer, pl_module, checkpoint)
Expand Down
4 changes: 2 additions & 2 deletions requirements/requirements_lightning.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
pytorch-lightning>=1.5.9
pytorch-lightning>=1.5.10
torchmetrics>=0.4.1rc0
transformers>=4.0.1
webdataset>=0.1.48,<=0.1.62
omegaconf>=2.1.0
hydra-core>=1.1.0
pyyaml<6 # Pinned until omegaconf works with pyyaml>=6
pyyaml<6 # Pinned until omegaconf works with pyyaml>=6

0 comments on commit ebed265

Please sign in to comment.