Skip to content

Commit

Permalink
Merge pull request #3605 from ZipRecruiter/ntravis.metrics
Browse files Browse the repository at this point in the history
Add per-task metrics
  • Loading branch information
alanakbik authored Feb 3, 2025
2 parents 3d24c35 + 94a5ed1 commit f97264a
Showing 1 changed file with 7 additions and 1 deletion.
8 changes: 7 additions & 1 deletion flair/models/multitask_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,7 @@ def evaluate( # type: ignore[override]
main_score = 0.0
all_detailed_results = ""
all_classification_report: dict[str, dict[str, Any]] = {}
scores: dict[Any, float] = {}

for task_id, split in batch_split.items():
result = self.tasks[task_id].evaluate(
Expand Down Expand Up @@ -194,7 +195,12 @@ def evaluate( # type: ignore[override]
)
all_classification_report[task_id] = result.classification_report

scores = {"loss": loss.item() / len(batch_split)}
# Add metrics so they will be available to _publish_eval_result.
for avg_type in ("micro avg", "macro avg"):
for metric_type in ("f1-score", "precision", "recall"):
scores[(task_id, avg_type, metric_type)] = result.classification_report[avg_type][metric_type]

scores["loss"] = loss.item() / len(batch_split)

return Result(
main_score=main_score / len(batch_split),
Expand Down

0 comments on commit f97264a

Please sign in to comment.