results / results /multilingual-e5-small /MassiveIntentClassification.json
Muennighoff's picture
Update
5fd5d7c
raw history blame
No virus
1.54 kB
{
"dataset_revision": "31efe3c427b0bae9c22cbb560b8f15491cc6bed7",
"mteb_dataset_name": "MassiveIntentClassification",
"mteb_version": "1.0.3.dev0",
"test": {
"da": {
"accuracy": 0.5463012777404169,
"accuracy_stderr": 0.016338340737891303,
"f1": 0.5130844114989241,
"f1_stderr": 0.015361853327497454,
"main_score": 0.5463012777404169
},
"evaluation_time": 109.44,
"nb": {
"accuracy": 0.5396099529253531,
"accuracy_stderr": 0.018252261416206998,
"f1": 0.5038616252538654,
"f1_stderr": 0.015047174447154914,
"main_score": 0.5396099529253531
},
"sv": {
"accuracy": 0.5660390047074646,
"accuracy_stderr": 0.02211077102889151,
"f1": 0.5335715009690439,
"f1_stderr": 0.018736645213923295,
"main_score": 0.5660390047074646
}
},
"validation": {
"da": {
"accuracy": 0.5514018691588787,
"accuracy_stderr": 0.01588107229255058,
"f1": 0.5063911290316304,
"f1_stderr": 0.0112308371355632,
"main_score": 0.5514018691588787
},
"evaluation_time": 99.67,
"nb": {
"accuracy": 0.5375799311362519,
"accuracy_stderr": 0.019202948984155192,
"f1": 0.48284338435163193,
"f1_stderr": 0.013409153186098251,
"main_score": 0.5375799311362519
},
"sv": {
"accuracy": 0.5631087063453025,
"accuracy_stderr": 0.01797739821131694,
"f1": 0.5163792207302368,
"f1_stderr": 0.01972904403458276,
"main_score": 0.5631087063453025
}
}
}