Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
{
"dataset_revision": "b48bc27d383cfca5b6a47135a52390fa5f66b253",
"task_name": "AmazonCounterfactualVNClassification",
"mteb_version": "1.38.41",
"scores": {
"test": [
{
"accuracy": 0.619742,
"f1": 0.561867,
"f1_weighted": 0.660984,
"ap": 0.257592,
"ap_weighted": 0.257592,
"scores_per_experiment": [
{
"accuracy": 0.587983,
"f1": 0.555485,
"f1_weighted": 0.631829,
"ap": 0.276246,
"ap_weighted": 0.276246
},
{
"accuracy": 0.736052,
"f1": 0.655812,
"f1_weighted": 0.761372,
"ap": 0.318313,
"ap_weighted": 0.318313
},
{
"accuracy": 0.652361,
"f1": 0.594521,
"f1_weighted": 0.691796,
"ap": 0.280129,
"ap_weighted": 0.280129
},
{
"accuracy": 0.51073,
"f1": 0.482958,
"f1_weighted": 0.559073,
"ap": 0.224991,
"ap_weighted": 0.224991
},
{
"accuracy": 0.622318,
"f1": 0.539112,
"f1_weighted": 0.6635,
"ap": 0.219724,
"ap_weighted": 0.219724
},
{
"accuracy": 0.583691,
"f1": 0.53557,
"f1_weighted": 0.630528,
"ap": 0.241011,
"ap_weighted": 0.241011
},
{
"accuracy": 0.690987,
"f1": 0.626203,
"f1_weighted": 0.725049,
"ap": 0.302755,
"ap_weighted": 0.302755
},
{
"accuracy": 0.594421,
"f1": 0.549602,
"f1_weighted": 0.639849,
"ap": 0.255337,
"ap_weighted": 0.255337
},
{
"accuracy": 0.618026,
"f1": 0.556143,
"f1_weighted": 0.661415,
"ap": 0.244741,
"ap_weighted": 0.244741
},
{
"accuracy": 0.600858,
"f1": 0.523266,
"f1_weighted": 0.645433,
"ap": 0.212676,
"ap_weighted": 0.212676
}
],
"main_score": 0.619742,
"hf_subset": "default",
"languages": [
"vie-Latn"
]
}
]
},
"evaluation_time": 1.7541632652282715,
"kg_co2_emissions": null
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
{
"dataset_revision": "4e9a0d6e6bd97ab32f23c50c043d751eed2a5f8a",
"task_name": "AmazonPolarityVNClassification",
"mteb_version": "1.38.41",
"scores": {
"test": [
{
"accuracy": 0.887758,
"f1": 0.887555,
"f1_weighted": 0.88755,
"ap": 0.85126,
"ap_weighted": 0.85126,
"scores_per_experiment": [
{
"accuracy": 0.887756,
"f1": 0.887564,
"f1_weighted": 0.88753,
"ap": 0.86203,
"ap_weighted": 0.86203
},
{
"accuracy": 0.886623,
"f1": 0.886583,
"f1_weighted": 0.886599,
"ap": 0.840929,
"ap_weighted": 0.840929
},
{
"accuracy": 0.887771,
"f1": 0.887457,
"f1_weighted": 0.887413,
"ap": 0.86644,
"ap_weighted": 0.86644
},
{
"accuracy": 0.879607,
"f1": 0.879353,
"f1_weighted": 0.879312,
"ap": 0.852779,
"ap_weighted": 0.852779
},
{
"accuracy": 0.894125,
"f1": 0.894014,
"f1_weighted": 0.893989,
"ap": 0.86726,
"ap_weighted": 0.86726
},
{
"accuracy": 0.870804,
"f1": 0.870803,
"f1_weighted": 0.870801,
"ap": 0.827334,
"ap_weighted": 0.827334
},
{
"accuracy": 0.896821,
"f1": 0.896763,
"f1_weighted": 0.896744,
"ap": 0.86776,
"ap_weighted": 0.86776
},
{
"accuracy": 0.901696,
"f1": 0.901691,
"f1_weighted": 0.901696,
"ap": 0.863676,
"ap_weighted": 0.863676
},
{
"accuracy": 0.90081,
"f1": 0.900772,
"f1_weighted": 0.900787,
"ap": 0.858558,
"ap_weighted": 0.858558
},
{
"accuracy": 0.871574,
"f1": 0.870546,
"f1_weighted": 0.870631,
"ap": 0.805833,
"ap_weighted": 0.805833
}
],
"main_score": 0.887758,
"hf_subset": "default",
"languages": [
"vie-Latn"
]
}
]
},
"evaluation_time": 1072.2194910049438,
"kg_co2_emissions": null
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
{
"dataset_revision": "27da94deb6d4f44af789a3d70750fa506b79f189",
"task_name": "AmazonReviewsVNClassification",
"mteb_version": "1.38.41",
"scores": {
"test": [
{
"accuracy": 0.444801,
"f1": 0.426392,
"f1_weighted": 0.432322,
"scores_per_experiment": [
{
"accuracy": 0.450058,
"f1": 0.42758,
"f1_weighted": 0.434702
},
{
"accuracy": 0.470502,
"f1": 0.448032,
"f1_weighted": 0.455157
},
{
"accuracy": 0.436624,
"f1": 0.42657,
"f1_weighted": 0.430991
},
{
"accuracy": 0.451811,
"f1": 0.435001,
"f1_weighted": 0.441204
},
{
"accuracy": 0.462325,
"f1": 0.433835,
"f1_weighted": 0.440433
},
{
"accuracy": 0.439836,
"f1": 0.413124,
"f1_weighted": 0.419804
},
{
"accuracy": 0.418808,
"f1": 0.415917,
"f1_weighted": 0.420269
},
{
"accuracy": 0.463785,
"f1": 0.457309,
"f1_weighted": 0.461625
},
{
"accuracy": 0.439252,
"f1": 0.428232,
"f1_weighted": 0.433021
},
{
"accuracy": 0.415012,
"f1": 0.378321,
"f1_weighted": 0.386011
}
],
"main_score": 0.444801,
"hf_subset": "default",
"languages": [
"vie-Latn"
]
}
]
},
"evaluation_time": 11.328449964523315,
"kg_co2_emissions": null
}
Loading
Loading