Skip to content

Commit

Permalink
Keep searching hyperparameters when r2_score raises an error (#1325)
Browse files Browse the repository at this point in the history
* Keep searching hyperparameters when `r2_score` raises an error

* Add log info

---------

Co-authored-by: Li Jiang <[email protected]>
  • Loading branch information
Atry and thinkall authored Aug 6, 2024
1 parent 8e63dd4 commit 853c950
Showing 1 changed file with 13 additions and 8 deletions.
21 changes: 13 additions & 8 deletions flaml/automl/ml.py
Original file line number Diff line number Diff line change
Expand Up @@ -567,14 +567,19 @@ def _eval_estimator(

pred_time = (time.time() - pred_start) / num_val_rows

val_loss = metric_loss_score(
eval_metric,
y_processed_predict=val_pred_y,
y_processed_true=y_val,
labels=labels,
sample_weight=weight_val,
groups=groups_val,
)
try:
val_loss = metric_loss_score(
eval_metric,
y_processed_predict=val_pred_y,
y_processed_true=y_val,
labels=labels,
sample_weight=weight_val,
groups=groups_val,
)
except ValueError as e:
# `r2_score` and other metrics may raise a `ValueError` when a model returns `inf` or `nan` values. In this case, we set the val_loss to infinity.
val_loss = np.inf
logger.warning(f"ValueError {e} happened in `metric_loss_score`, set `val_loss` to `np.inf`")
metric_for_logging = {"pred_time": pred_time}
if log_training_metric:
train_pred_y = get_y_pred(estimator, X_train, eval_metric, task)
Expand Down

0 comments on commit 853c950

Please sign in to comment.