From 0211f7f9b2c7d7544b26f19d1c91354730ef51a0 Mon Sep 17 00:00:00 2001 From: Sean Naren Date: Thu, 17 Dec 2020 01:08:12 +0000 Subject: [PATCH] Disable pl optimizer temporarily to fix AMP issues (#5163) * Disable pl optimizer temporarily to fix AMP issues * Add todo and enable pl optimizer in the test --- pytorch_lightning/trainer/trainer.py | 2 +- tests/callbacks/test_callbacks.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 014e0a62679dd..06cdc43674d1b 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -133,7 +133,7 @@ def __init__( distributed_backend: Optional[str] = None, automatic_optimization: Optional[bool] = None, move_metrics_to_cpu: bool = False, - enable_pl_optimizer: bool = True, + enable_pl_optimizer: bool = False, multiple_trainloader_mode: str = 'max_size_cycle', ): r""" diff --git a/tests/callbacks/test_callbacks.py b/tests/callbacks/test_callbacks.py index c00c712bb3b13..070bb4e9f6989 100644 --- a/tests/callbacks/test_callbacks.py +++ b/tests/callbacks/test_callbacks.py @@ -33,6 +33,8 @@ def test_trainer_callback_system(torch_save): limit_train_batches=3, limit_test_batches=2, progress_bar_refresh_rate=0, + # todo: enabled since internally we wrap the model for optimizer step, this should be fixed + enable_pl_optimizer=True ) # no call yet