Skip to content

Commit

Permalink
[python-package] Allow to pass early stopping min delta in params (#6274
Browse files Browse the repository at this point in the history
)

* [python-package] Allow to pass early stopping min delta in params

* Fix test

* Add separate test

* Fix

* Add to cpp config

* Adjust test

* Adjust test

* Debug

* Revert

* Apply suggestions from code review

---------

Co-authored-by: James Lamb <[email protected]>
  • Loading branch information
borchero and jameslamb authored May 1, 2024
1 parent da9bb5f commit 9f5fbb6
Show file tree
Hide file tree
Showing 8 changed files with 47 additions and 1 deletion.
1 change: 1 addition & 0 deletions R-package/tests/testthat/test_lgb.Booster.R
Original file line number Diff line number Diff line change
Expand Up @@ -850,6 +850,7 @@ test_that("all parameters are stored correctly with save_model_to_string()", {
, "[extra_trees: 0]"
, "[extra_seed: 6642]"
, "[early_stopping_round: 0]"
, "[early_stopping_min_delta: 0]"
, "[first_metric_only: 0]"
, "[max_delta_step: 0]"
, "[lambda_l1: 0]"
Expand Down
4 changes: 4 additions & 0 deletions docs/Parameters.rst
Original file line number Diff line number Diff line change
Expand Up @@ -410,6 +410,10 @@ Learning Control Parameters

- can be used to speed up training

- ``early_stopping_min_delta`` :raw-html:`<a id="early_stopping_min_delta" title="Permalink to this parameter" href="#early_stopping_min_delta">&#x1F517;&#xFE0E;</a>`, default = ``0.0``, type = double, constraints: ``early_stopping_min_delta >= 0.0``

- when early stopping is used (i.e. ``early_stopping_round > 0``), require the early stopping metric to improve by at least this delta to be considered an improvement

- ``first_metric_only`` :raw-html:`<a id="first_metric_only" title="Permalink to this parameter" href="#first_metric_only">&#x1F517;&#xFE0E;</a>`, default = ``false``, type = bool

- LightGBM allows you to provide multiple evaluation metrics. Set this to ``true``, if you want to use only the first metric for early stopping
Expand Down
4 changes: 4 additions & 0 deletions include/LightGBM/config.h
Original file line number Diff line number Diff line change
Expand Up @@ -394,6 +394,10 @@ struct Config {
// desc = can be used to speed up training
int early_stopping_round = 0;

// check = >=0.0
// desc = when early stopping is used (i.e. ``early_stopping_round > 0``), require the early stopping metric to improve by at least this delta to be considered an improvement
double early_stopping_min_delta = 0.0;

// desc = LightGBM allows you to provide multiple evaluation metrics. Set this to ``true``, if you want to use only the first metric for early stopping
bool first_metric_only = false;

Expand Down
2 changes: 2 additions & 0 deletions python-package/lightgbm/engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,6 +241,7 @@ def train(
callback.early_stopping(
stopping_rounds=params["early_stopping_round"], # type: ignore[arg-type]
first_metric_only=first_metric_only,
min_delta=params.get("early_stopping_min_delta", 0.0),
verbose=_choose_param_value(
main_param_name="verbosity",
params=params,
Expand Down Expand Up @@ -765,6 +766,7 @@ def cv(
callback.early_stopping(
stopping_rounds=params["early_stopping_round"], # type: ignore[arg-type]
first_metric_only=first_metric_only,
min_delta=params.get("early_stopping_min_delta", 0.0),
verbose=_choose_param_value(
main_param_name="verbosity",
params=params,
Expand Down
4 changes: 3 additions & 1 deletion src/boosting/gbdt.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ GBDT::GBDT()
config_(nullptr),
objective_function_(nullptr),
early_stopping_round_(0),
early_stopping_min_delta_(0.0),
es_first_metric_only_(false),
max_feature_idx_(0),
num_tree_per_iteration_(1),
Expand Down Expand Up @@ -65,6 +66,7 @@ void GBDT::Init(const Config* config, const Dataset* train_data, const Objective
num_class_ = config->num_class;
config_ = std::unique_ptr<Config>(new Config(*config));
early_stopping_round_ = config_->early_stopping_round;
early_stopping_min_delta_ = config->early_stopping_min_delta;
es_first_metric_only_ = config_->first_metric_only;
shrinkage_rate_ = config_->learning_rate;

Expand Down Expand Up @@ -576,7 +578,7 @@ std::string GBDT::OutputMetric(int iter) {
if (es_first_metric_only_ && j > 0) { continue; }
if (ret.empty() && early_stopping_round_ > 0) {
auto cur_score = valid_metrics_[i][j]->factor_to_bigger_better() * test_scores.back();
if (cur_score > best_score_[i][j]) {
if (cur_score - best_score_[i][j] > early_stopping_min_delta_) {
best_score_[i][j] = cur_score;
best_iter_[i][j] = iter;
meet_early_stopping_pairs.emplace_back(i, j);
Expand Down
2 changes: 2 additions & 0 deletions src/boosting/gbdt.h
Original file line number Diff line number Diff line change
Expand Up @@ -532,6 +532,8 @@ class GBDT : public GBDTBase {
std::vector<std::vector<const Metric*>> valid_metrics_;
/*! \brief Number of rounds for early stopping */
int early_stopping_round_;
/*! \brief Minimum improvement for early stopping */
double early_stopping_min_delta_;
/*! \brief Only use first metric for early stopping */
bool es_first_metric_only_;
/*! \brief Best iteration(s) for early stopping */
Expand Down
7 changes: 7 additions & 0 deletions src/io/config_auto.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -214,6 +214,7 @@ const std::unordered_set<std::string>& Config::parameter_set() {
"extra_trees",
"extra_seed",
"early_stopping_round",
"early_stopping_min_delta",
"first_metric_only",
"max_delta_step",
"lambda_l1",
Expand Down Expand Up @@ -392,6 +393,9 @@ void Config::GetMembersFromString(const std::unordered_map<std::string, std::str

GetInt(params, "early_stopping_round", &early_stopping_round);

GetDouble(params, "early_stopping_min_delta", &early_stopping_min_delta);
CHECK_GE(early_stopping_min_delta, 0.0);

GetBool(params, "first_metric_only", &first_metric_only);

GetDouble(params, "max_delta_step", &max_delta_step);
Expand Down Expand Up @@ -690,6 +694,7 @@ std::string Config::SaveMembersToString() const {
str_buf << "[extra_trees: " << extra_trees << "]\n";
str_buf << "[extra_seed: " << extra_seed << "]\n";
str_buf << "[early_stopping_round: " << early_stopping_round << "]\n";
str_buf << "[early_stopping_min_delta: " << early_stopping_min_delta << "]\n";
str_buf << "[first_metric_only: " << first_metric_only << "]\n";
str_buf << "[max_delta_step: " << max_delta_step << "]\n";
str_buf << "[lambda_l1: " << lambda_l1 << "]\n";
Expand Down Expand Up @@ -814,6 +819,7 @@ const std::unordered_map<std::string, std::vector<std::string>>& Config::paramet
{"extra_trees", {"extra_tree"}},
{"extra_seed", {}},
{"early_stopping_round", {"early_stopping_rounds", "early_stopping", "n_iter_no_change"}},
{"early_stopping_min_delta", {}},
{"first_metric_only", {}},
{"max_delta_step", {"max_tree_output", "max_leaf_output"}},
{"lambda_l1", {"reg_alpha", "l1_regularization"}},
Expand Down Expand Up @@ -957,6 +963,7 @@ const std::unordered_map<std::string, std::string>& Config::ParameterTypes() {
{"extra_trees", "bool"},
{"extra_seed", "int"},
{"early_stopping_round", "int"},
{"early_stopping_min_delta", "double"},
{"first_metric_only", "bool"},
{"max_delta_step", "double"},
{"lambda_l1", "double"},
Expand Down
24 changes: 24 additions & 0 deletions tests/python_package_test/test_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -1067,6 +1067,29 @@ def test_early_stopping_min_delta(first_only, single_metric, greater_is_better):
assert np.greater_equal(last_score, best_score - min_delta).any()


@pytest.mark.parametrize("early_stopping_min_delta", [1e3, 0.0])
def test_early_stopping_min_delta_via_global_params(early_stopping_min_delta):
X, y = load_breast_cancer(return_X_y=True)
num_trees = 5
params = {
"num_trees": num_trees,
"num_leaves": 5,
"objective": "binary",
"metric": "None",
"verbose": -1,
"early_stopping_round": 2,
"early_stopping_min_delta": early_stopping_min_delta,
}
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.1, random_state=42)
lgb_train = lgb.Dataset(X_train, y_train)
lgb_eval = lgb.Dataset(X_test, y_test, reference=lgb_train)
gbm = lgb.train(params, lgb_train, feval=decreasing_metric, valid_sets=lgb_eval)
if early_stopping_min_delta == 0:
assert gbm.best_iteration == num_trees
else:
assert gbm.best_iteration == 1


def test_early_stopping_can_be_triggered_via_custom_callback():
X, y = make_synthetic_regression()

Expand Down Expand Up @@ -1556,6 +1579,7 @@ def test_all_expected_params_are_written_out_to_model_text(tmp_path):
"[extra_trees: 0]",
"[extra_seed: 6642]",
"[early_stopping_round: 0]",
"[early_stopping_min_delta: 0]",
"[first_metric_only: 0]",
"[max_delta_step: 0]",
"[lambda_l1: 0]",
Expand Down

0 comments on commit 9f5fbb6

Please sign in to comment.