Skip to content

Commit

Permalink
Add api MultiplicativeDecay (#38250)
Browse files Browse the repository at this point in the history
* delete the modification of dygraph

* CI

* check CI

* modify the retrun value of get_lr
  • Loading branch information
guguguzi authored Jan 7, 2022
1 parent c8fbd3c commit 4a3a2d6
Show file tree
Hide file tree
Showing 2 changed files with 144 additions and 66 deletions.
11 changes: 11 additions & 0 deletions python/paddle/fluid/tests/unittests/test_lr_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,6 +205,13 @@ def lambda_lr(epoch_num, learning_rate, lr_lambda, verbose=False):
return learning_rate * lr_lambda(epoch_num)


def multiplicative_lr(epoch_num, learning_rate, lr_lambda, verbose=False):
latest_lr = learning_rate
for i in range(epoch_num):
latest_lr = latest_lr * lr_lambda(i + 1)
return latest_lr


def piecewise_lr(epoch_num, boundaries, values, verbose=False):
assert len(boundaries) + 1 == len(values)
for i in range(len(boundaries)):
Expand Down Expand Up @@ -519,6 +526,10 @@ def test_scheduler(self):
"learning_rate": 0.5,
"lr_lambda": lambda x: 0.95**x,
"verbose": True
}), (multiplicative_lr, paddle.optimizer.lr.MultiplicativeDecay, {
"learning_rate": 0.5,
"lr_lambda": lambda x: 0.95,
"verbose": True
}), (cosine_annealing_lr, paddle.optimizer.lr.CosineAnnealingDecay, {
"learning_rate": 0.5,
"T_max": 10,
Expand Down
Loading

0 comments on commit 4a3a2d6

Please sign in to comment.