results / results /multilingual-e5-large /MassiveScenarioClassification.json
Muennighoff's picture
Update
5fd5d7c
raw history blame
No virus
1.54 kB
{
"dataset_revision": "7d571f92784cd94a019292a1f45445077d0ef634",
"mteb_dataset_name": "MassiveScenarioClassification",
"mteb_version": "1.0.3.dev0",
"test": {
"da": {
"accuracy": 0.7073638197713517,
"accuracy_stderr": 0.015435224307893354,
"f1": 0.6946432292439878,
"f1_stderr": 0.01770631067782571,
"main_score": 0.7073638197713517
},
"evaluation_time": 296.6,
"nb": {
"accuracy": 0.6903496973772698,
"accuracy_stderr": 0.02128871451555135,
"f1": 0.6784970863633023,
"f1_stderr": 0.02071048980607578,
"main_score": 0.6903496973772698
},
"sv": {
"accuracy": 0.726025554808339,
"accuracy_stderr": 0.01763476177781155,
"f1": 0.7165731654076601,
"f1_stderr": 0.017408014747716232,
"main_score": 0.726025554808339
}
},
"validation": {
"da": {
"accuracy": 0.7001967535661584,
"accuracy_stderr": 0.012258177463848441,
"f1": 0.685452841446531,
"f1_stderr": 0.012429166161672134,
"main_score": 0.7001967535661584
},
"evaluation_time": 237.88,
"nb": {
"accuracy": 0.685981308411215,
"accuracy_stderr": 0.01645599826004996,
"f1": 0.6732216168533821,
"f1_stderr": 0.0144618478383849,
"main_score": 0.685981308411215
},
"sv": {
"accuracy": 0.7155435317265125,
"accuracy_stderr": 0.015148187903307278,
"f1": 0.7015078144973212,
"f1_stderr": 0.014432305122183504,
"main_score": 0.7155435317265125
}
}
}