Skip to content

Commit

Permalink
modify adam to adamw in AdamW (#36028)
Browse files Browse the repository at this point in the history
* adam to adamw in AdamW

* add lr_ratio in adamw

* refine logic bug in cpu adamw

* delete fix bug for cpu adamw

* delete fix bug for cpu adamw
  • Loading branch information
zhangbo9674 authored Sep 26, 2021
1 parent b23b17c commit 49c8253
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 3 deletions.
9 changes: 8 additions & 1 deletion paddle/fluid/pybind/op_function_generator.cc
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,9 @@ std::map<std::string, std::set<std::string>> op_ins_map = {
{"adam",
{"Param", "Grad", "LearningRate", "Moment1", "Moment2", "Beta1Pow",
"Beta2Pow", "MasterParam"}},
{"adamw",
{"Param", "Grad", "LearningRate", "Moment1", "Moment2", "Beta1Pow",
"Beta2Pow", "MasterParam"}},
};

// NOTE(zhiqiu): Like op_ins_map.
Expand Down Expand Up @@ -110,6 +113,9 @@ std::map<std::string, std::set<std::string>> op_outs_map = {
{"adam",
{"ParamOut", "Moment1Out", "Moment2Out", "Beta1PowOut", "Beta2PowOut",
"MasterParamOut"}},
{"adamw",
{"ParamOut", "Moment1Out", "Moment2Out", "Beta1PowOut", "Beta2PowOut",
"MasterParamOut"}},
};

// NOTE(zhiqiu): Commonly, the outputs in auto-generated OP function are
Expand All @@ -129,7 +135,8 @@ std::map<std::string, std::set<std::string>> op_passing_outs_map = {
{"ParamOut", "Moment1Out", "Moment2Out", "Beta1PowOut", "Beta2PowOut",
"MasterParamOut"}},
{"adamw",
{"ParamOut", "Moment1Out", "Moment2Out", "Beta1PowOut", "Beta2PowOut"}},
{"ParamOut", "Moment1Out", "Moment2Out", "Beta1PowOut", "Beta2PowOut",
"MasterParamOut"}},
{"average_accumulates",
{"out_sum_1", "out_sum_2", "out_sum_3", "out_num_accumulates",
"out_old_num_accumulates", "out_num_updates"}},
Expand Down
4 changes: 2 additions & 2 deletions python/paddle/optimizer/adamw.py
Original file line number Diff line number Diff line change
Expand Up @@ -298,14 +298,14 @@ def _append_optimize_op(self, block, param_and_grad):
_beta2 = self._beta2 if not isinstance(
self._beta2, Variable) else self._beta2.numpy().item(0)

_, _, _, _, _, _ = _C_ops.adam(
_, _, _, _, _, _ = _C_ops.adamw(
param_and_grad[0], param_and_grad[1], lr, moment1, moment2,
beta1_pow_acc, beta2_pow_acc, master_weight, param_and_grad[0],
moment1, moment2, beta1_pow_acc, beta2_pow_acc, master_weight,
'epsilon', self._epsilon, 'lazy_mode', self._lazy_mode,
'min_row_size_to_use_multithread', 1000, 'beta1', _beta1,
'beta2', _beta2, 'coeff', self._coeff, 'multi_precision',
find_master)
find_master, "lr_ratio", lr_ratio_)

return None

Expand Down

0 comments on commit 49c8253

Please sign in to comment.