From 8ac9a393b8a9cc7690db7d0263a47ee7af56cb78 Mon Sep 17 00:00:00 2001 From: Li Jiang Date: Thu, 13 Jul 2023 20:54:39 +0800 Subject: [PATCH] Add log metric (#1125) * Add original metric to mlflow logging * Update metric --- flaml/automl/automl.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/flaml/automl/automl.py b/flaml/automl/automl.py index c53f70ec82..e37e7abde3 100644 --- a/flaml/automl/automl.py +++ b/flaml/automl/automl.py @@ -1785,6 +1785,7 @@ def is_to_reverse_metric(metric, task): else: error_metric = "customized metric" logger.info(f"Minimizing error metric: {error_metric}") + self._state.error_metric = error_metric is_spark_dataframe = isinstance(X_train, psDataFrame) or isinstance(dataframe, psDataFrame) estimator_list = task.default_estimator_list(estimator_list, is_spark_dataframe) @@ -2159,6 +2160,14 @@ def _log_trial(self, search_state, estimator): mlflow.log_metric("best_validation_loss", search_state.best_loss) mlflow.log_param("best_config", search_state.best_config) mlflow.log_param("best_learner", self._best_estimator) + mlflow.log_metric( + self._state.metric if isinstance(self._state.metric, str) else self._state.error_metric, + 1 - search_state.val_loss + if self._state.error_metric.startswith("1-") + else -search_state.val_loss + if self._state.error_metric.startswith("-") + else search_state.val_loss, + ) def _search_sequential(self): try: