Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
{
"dataset_revision": "b44c3b011063adb25877c13823db83bb193913c4",
"evaluation_time": 17.779419898986816,
"kg_co2_emissions": null,
"mteb_version": "1.14.5",
"scores": {
"validation": [
{
"cosine_pearson": 0.4250659691365476,
"cosine_spearman": 0.44183304700630943,
"euclidean_pearson": 0.42526113703430274,
"euclidean_spearman": 0.4417992717285455,
"hf_subset": "default",
"languages": [
"cmn-Hans"
],
"main_score": 0.44183304700630943,
"manhattan_pearson": 0.42253416555440887,
"manhattan_spearman": 0.4389046556586693,
"pearson": 0.4250659691365476,
"spearman": 0.44183304700630943
}
]
},
"task_name": "AFQMC"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
{
"dataset_revision": "0f319b1142f28d00e055a6770f3f726ae9b7d865",
"evaluation_time": 36.5313138961792,
"kg_co2_emissions": null,
"mteb_version": "1.14.5",
"scores": {
"test": [
{
"cosine_pearson": 0.4798403609373523,
"cosine_spearman": 0.497533476641491,
"euclidean_pearson": 0.518004622348761,
"euclidean_spearman": 0.49753581843355005,
"hf_subset": "default",
"languages": [
"cmn-Hans"
],
"main_score": 0.497533476641491,
"manhattan_pearson": 0.5164257039005756,
"manhattan_spearman": 0.49578269527810714,
"pearson": 0.4798403609373523,
"spearman": 0.497533476641491
}
],
"validation": [
{
"cosine_pearson": 0.48003489808011685,
"cosine_spearman": 0.5035318309318482,
"euclidean_pearson": 0.5160293065116957,
"euclidean_spearman": 0.5035375253666186,
"hf_subset": "default",
"languages": [
"cmn-Hans"
],
"main_score": 0.5035318309318482,
"manhattan_pearson": 0.5140614907941444,
"manhattan_spearman": 0.5015052656661405,
"pearson": 0.48003489808011685,
"spearman": 0.5035318309318482
}
]
},
"task_name": "ATEC"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,179 @@
{
"dataset_revision": "e8379541af4e31359cca9fbcf4b00f2671dba205",
"evaluation_time": 21.349095821380615,
"kg_co2_emissions": null,
"mteb_version": "1.14.5",
"scores": {
"test": [
{
"accuracy": 0.9643178410794603,
"ap": 0.7222960776019591,
"ap_weighted": 0.7222960776019591,
"f1": 0.9109270963593274,
"f1_weighted": 0.9655171548723889,
"hf_subset": "en-ext",
"languages": [
"eng-Latn"
],
"main_score": 0.9643178410794603,
"scores_per_experiment": [
{
"accuracy": 0.9647676161919041,
"ap": 0.725266929551478,
"ap_weighted": 0.725266929551478,
"f1": 0.9119987311693032,
"f1_weighted": 0.9659425284431332
},
{
"accuracy": 0.9640179910044977,
"ap": 0.717476337167374,
"ap_weighted": 0.717476337167374,
"f1": 0.9093365049698412,
"f1_weighted": 0.9650736181480238
},
{
"accuracy": 0.9647676161919041,
"ap": 0.725266929551478,
"ap_weighted": 0.725266929551478,
"f1": 0.9119987311693032,
"f1_weighted": 0.9659425284431332
},
{
"accuracy": 0.9632683658170914,
"ap": 0.7165319121453486,
"ap_weighted": 0.7165319121453486,
"f1": 0.9087825061575925,
"f1_weighted": 0.9645895554305612
},
{
"accuracy": 0.9640179910044977,
"ap": 0.7208726263164927,
"ap_weighted": 0.7208726263164927,
"f1": 0.9103860988731085,
"f1_weighted": 0.9652652443098788
},
{
"accuracy": 0.9647676161919041,
"ap": 0.725266929551478,
"ap_weighted": 0.725266929551478,
"f1": 0.9119987311693032,
"f1_weighted": 0.9659425284431332
},
{
"accuracy": 0.9647676161919041,
"ap": 0.725266929551478,
"ap_weighted": 0.725266929551478,
"f1": 0.9119987311693032,
"f1_weighted": 0.9659425284431332
},
{
"accuracy": 0.9647676161919041,
"ap": 0.725266929551478,
"ap_weighted": 0.725266929551478,
"f1": 0.9119987311693032,
"f1_weighted": 0.9659425284431332
},
{
"accuracy": 0.9640179910044977,
"ap": 0.7208726263164927,
"ap_weighted": 0.7208726263164927,
"f1": 0.9103860988731085,
"f1_weighted": 0.9652652443098788
},
{
"accuracy": 0.9640179910044977,
"ap": 0.7208726263164927,
"ap_weighted": 0.7208726263164927,
"f1": 0.9103860988731085,
"f1_weighted": 0.9652652443098788
}
]
},
{
"accuracy": 0.9525373134328357,
"ap": 0.799257555670098,
"ap_weighted": 0.799257555670098,
"f1": 0.9281773595632006,
"f1_weighted": 0.9536488319681403,
"hf_subset": "en",
"languages": [
"eng-Latn"
],
"main_score": 0.9525373134328357,
"scores_per_experiment": [
{
"accuracy": 0.9522388059701492,
"ap": 0.7981930350297796,
"ap_weighted": 0.7981930350297796,
"f1": 0.9277628032345013,
"f1_weighted": 0.9533684676348715
},
{
"accuracy": 0.9522388059701492,
"ap": 0.7981930350297796,
"ap_weighted": 0.7981930350297796,
"f1": 0.9277628032345013,
"f1_weighted": 0.9533684676348715
},
{
"accuracy": 0.9537313432835821,
"ap": 0.8035156382313725,
"ap_weighted": 0.8035156382313725,
"f1": 0.9298355848779977,
"f1_weighted": 0.9547702893012162
},
{
"accuracy": 0.9522388059701492,
"ap": 0.7981930350297796,
"ap_weighted": 0.7981930350297796,
"f1": 0.9277628032345013,
"f1_weighted": 0.9533684676348715
},
{
"accuracy": 0.9522388059701492,
"ap": 0.7981930350297796,
"ap_weighted": 0.7981930350297796,
"f1": 0.9277628032345013,
"f1_weighted": 0.9533684676348715
},
{
"accuracy": 0.9522388059701492,
"ap": 0.7981930350297796,
"ap_weighted": 0.7981930350297796,
"f1": 0.9277628032345013,
"f1_weighted": 0.9533684676348715
},
{
"accuracy": 0.9522388059701492,
"ap": 0.7981930350297796,
"ap_weighted": 0.7981930350297796,
"f1": 0.9277628032345013,
"f1_weighted": 0.9533684676348715
},
{
"accuracy": 0.9537313432835821,
"ap": 0.8035156382313725,
"ap_weighted": 0.8035156382313725,
"f1": 0.9298355848779977,
"f1_weighted": 0.9547702893012162
},
{
"accuracy": 0.9522388059701492,
"ap": 0.7981930350297796,
"ap_weighted": 0.7981930350297796,
"f1": 0.9277628032345013,
"f1_weighted": 0.9533684676348715
},
{
"accuracy": 0.9522388059701492,
"ap": 0.7981930350297796,
"ap_weighted": 0.7981930350297796,
"f1": 0.9277628032345013,
"f1_weighted": 0.9533684676348715
}
]
}
]
},
"task_name": "AmazonCounterfactualClassification"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
{
"dataset_revision": "e2d317d38cd51312af73b3d32a06d1a08b442046",
"evaluation_time": 626.0383305549622,
"kg_co2_emissions": null,
"mteb_version": "1.14.5",
"scores": {
"test": [
{
"accuracy": 0.9667155000000001,
"ap": 0.9506125484469905,
"ap_weighted": 0.9506125484469905,
"f1": 0.9667152624774944,
"f1_weighted": 0.9667152624774944,
"hf_subset": "default",
"languages": [
"eng-Latn"
],
"main_score": 0.9667155000000001,
"scores_per_experiment": [
{
"accuracy": 0.9666475,
"ap": 0.9497472243024325,
"ap_weighted": 0.9497472243024325,
"f1": 0.9666471820590516,
"f1_weighted": 0.9666471820590515
},
{
"accuracy": 0.9667525,
"ap": 0.9504797117267496,
"ap_weighted": 0.9504797117267496,
"f1": 0.9667523996286114,
"f1_weighted": 0.9667523996286115
},
{
"accuracy": 0.966815,
"ap": 0.951738573264202,
"ap_weighted": 0.951738573264202,
"f1": 0.9668149700505104,
"f1_weighted": 0.9668149700505104
},
{
"accuracy": 0.9667825,
"ap": 0.9520501113812326,
"ap_weighted": 0.9520501113812326,
"f1": 0.9667823962263373,
"f1_weighted": 0.9667823962263373
},
{
"accuracy": 0.96678,
"ap": 0.9523640592148901,
"ap_weighted": 0.9523640592148901,
"f1": 0.9667797940314009,
"f1_weighted": 0.9667797940314009
},
{
"accuracy": 0.966605,
"ap": 0.9492883595726318,
"ap_weighted": 0.9492883595726318,
"f1": 0.966604461656408,
"f1_weighted": 0.9666044616564079
},
{
"accuracy": 0.9668225,
"ap": 0.951117878410981,
"ap_weighted": 0.951117878410981,
"f1": 0.9668224917883593,
"f1_weighted": 0.9668224917883593
},
{
"accuracy": 0.9667075,
"ap": 0.9504910863800623,
"ap_weighted": 0.9504910863800623,
"f1": 0.9667074187192839,
"f1_weighted": 0.9667074187192839
},
{
"accuracy": 0.96667,
"ap": 0.9498578909038486,
"ap_weighted": 0.9498578909038486,
"f1": 0.9666697187249231,
"f1_weighted": 0.9666697187249231
},
{
"accuracy": 0.9665725,
"ap": 0.9489905893128749,
"ap_weighted": 0.9489905893128749,
"f1": 0.9665717918900587,
"f1_weighted": 0.9665717918900587
}
]
}
]
},
"task_name": "AmazonPolarityClassification"
}
Loading
Loading