Skip to content

Commit

Permalink
Change default interval to step and add lr in prog_bar in nemo.lightn…
Browse files Browse the repository at this point in the history
…ing (NVIDIA#10033)

Signed-off-by: Hemil Desai <[email protected]>
Signed-off-by: Hainan Xu <[email protected]>
  • Loading branch information
hemildesai authored and Hainan Xu committed Nov 5, 2024
1 parent 6257a4f commit bc31182
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 19 deletions.
2 changes: 1 addition & 1 deletion nemo/lightning/pytorch/optim/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ def optimizers(self, model) -> List[Optimizer]:
def on_train_batch_end(self, trainer, pl_module, outputs, batch, batch_idx) -> None:
if self._optimizers is not None:
lr = self._optimizers[0].param_groups[0]['lr']
pl_module.log('lr', lr, rank_zero_only=True, batch_size=1)
pl_module.log('lr', lr, rank_zero_only=True, batch_size=1, prog_bar=True)

def __call__(self, model: L.LightningModule, megatron_parallel=None) -> OptimizerLRScheduler:
"""Calls the setup and optimizers methods.
Expand Down
36 changes: 18 additions & 18 deletions nemo/lightning/pytorch/optim/lr_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def __init__(
warmup_ratio: Optional[float] = None,
max_steps: int = 10,
min_lr: float = 0.0,
interval: str = "epoch",
interval: str = "step",
frequency: int = 1,
monitor: str = "val_loss",
):
Expand Down Expand Up @@ -68,7 +68,7 @@ def __init__(
hold_ratio: Optional[float] = None,
max_steps: int = 10,
min_lr: float = 0.0,
interval: str = "epoch",
interval: str = "step",
frequency: int = 1,
monitor: str = "val_loss",
):
Expand Down Expand Up @@ -111,7 +111,7 @@ def __init__(
self,
max_steps: int = 10,
min_lr: float = 1e-5,
interval: str = "epoch",
interval: str = "step",
frequency: int = 1,
monitor: str = "val_loss",
):
Expand Down Expand Up @@ -142,7 +142,7 @@ def __init__(
self,
max_steps: int = 10,
min_lr: float = 0.0,
interval: str = "epoch",
interval: str = "step",
frequency: int = 1,
monitor: str = "val_loss",
):
Expand Down Expand Up @@ -176,7 +176,7 @@ def __init__(
warmup_ratio: Optional[float] = None,
max_steps: int = 10,
min_lr: float = 0.0,
interval: str = "epoch",
interval: str = "step",
frequency: int = 1,
monitor: str = "val_loss",
):
Expand Down Expand Up @@ -218,7 +218,7 @@ def __init__(
max_steps: int = 10,
decay_rate: float = 0.5,
min_lr: float = 0.0,
interval: str = "epoch",
interval: str = "step",
frequency: int = 1,
monitor: str = "val_loss",
):
Expand Down Expand Up @@ -252,7 +252,7 @@ def __init__(
self,
max_steps: int = 10,
min_lr: float = 0.0,
interval: str = "epoch",
interval: str = "step",
frequency: int = 1,
monitor: str = "val_loss",
):
Expand Down Expand Up @@ -283,7 +283,7 @@ def __init__(
self,
max_steps: int = 10,
min_lr: float = 0.0,
interval: str = "epoch",
interval: str = "step",
frequency: int = 1,
monitor: str = "val_loss",
):
Expand Down Expand Up @@ -314,7 +314,7 @@ def __init__(
self,
max_steps: int = 10,
min_lr: float = 0.0,
interval: str = "epoch",
interval: str = "step",
frequency: int = 1,
monitor: str = "val_loss",
):
Expand Down Expand Up @@ -347,7 +347,7 @@ def __init__(
min_lr: float = 0.0,
power: float = 1.0,
cycle: bool = False,
interval: str = "epoch",
interval: str = "step",
frequency: int = 1,
monitor: str = "val_loss",
):
Expand Down Expand Up @@ -384,7 +384,7 @@ def __init__(
min_lr: float = 0.0,
power: float = 1.0,
cycle: bool = False,
interval: str = "epoch",
interval: str = "step",
frequency: int = 1,
monitor: str = "val_loss",
):
Expand Down Expand Up @@ -415,13 +415,13 @@ def scheduler(self, model, optimizer):
class CosineAnnealingScheduler(LRSchedulerModule):
def __init__(
self,
max_steps=10,
warmup_steps=750,
constant_steps=80000,
min_lr=int(6e-5),
interval="epoch",
frequency=1,
monitor="val_loss",
max_steps: int = 10,
warmup_steps: int = 750,
constant_steps: int = 80000,
min_lr: float = 6e-5,
interval: str = "step",
frequency: int = 1,
monitor: str = "val_loss",
):
super().__init__()
self.max_steps = max_steps
Expand Down

0 comments on commit bc31182

Please sign in to comment.