results / results /multilingual-e5-base /MassiveIntentClassification.json
Muennighoff's picture
Update
5fd5d7c
raw history blame
No virus
1.55 kB
{
"dataset_revision": "31efe3c427b0bae9c22cbb560b8f15491cc6bed7",
"mteb_dataset_name": "MassiveIntentClassification",
"mteb_version": "1.0.3.dev0",
"test": {
"da": {
"accuracy": 0.6016476126429052,
"accuracy_stderr": 0.016738269771835478,
"f1": 0.5634231157127344,
"f1_stderr": 0.016818445263435046,
"main_score": 0.6016476126429052
},
"evaluation_time": 187.54,
"nb": {
"accuracy": 0.5982851378614661,
"accuracy_stderr": 0.015367686940145293,
"f1": 0.5604910761332663,
"f1_stderr": 0.016086224068945554,
"main_score": 0.5982851378614661
},
"sv": {
"accuracy": 0.6178211163416274,
"accuracy_stderr": 0.016452228664678048,
"f1": 0.5797049972201151,
"f1_stderr": 0.016076116616999002,
"main_score": 0.6178211163416274
}
},
"validation": {
"da": {
"accuracy": 0.6019183472700442,
"accuracy_stderr": 0.0122775052969324,
"f1": 0.5536043106249855,
"f1_stderr": 0.009964189135381725,
"main_score": 0.6019183472700442
},
"evaluation_time": 169.49,
"nb": {
"accuracy": 0.5980324643384161,
"accuracy_stderr": 0.015128609314066288,
"f1": 0.5459511913562556,
"f1_stderr": 0.013453351640031277,
"main_score": 0.5980324643384161
},
"sv": {
"accuracy": 0.6121495327102804,
"accuracy_stderr": 0.014359725112636508,
"f1": 0.5617276847284391,
"f1_stderr": 0.011568490797277911,
"main_score": 0.6121495327102804
}
}
}