results / results /all-MiniLM-L6-v2 /MassiveIntentClassification.json
Muennighoff's picture
Add nordic results
bda9a2d
raw history blame
No virus
2.03 kB
{
"test": {
"en": {
"accuracy": 0.6740416946872899,
"accuracy_stderr": 0.009932912138607529,
"f1": 0.6482901615433794,
"f1_stderr": 0.01101873248863987,
"main_score": 0.6740416946872899
},
"da": {
"accuracy": 0.4098520511096167,
"accuracy_stderr": 0.013546600921886436,
"f1": 0.39031952711786977,
"f1_stderr": 0.01465861207914092,
"main_score": 0.4098520511096167
},
"nb": {
"accuracy": 0.3934431741761937,
"accuracy_stderr": 0.010632494048802496,
"f1": 0.3725650363380114,
"f1_stderr": 0.0047186150764588165,
"main_score": 0.3934431741761937
},
"sv": {
"accuracy": 0.38103564223268327,
"accuracy_stderr": 0.015896221776346243,
"f1": 0.36129998730373103,
"f1_stderr": 0.009758357654155333,
"main_score": 0.38103564223268327
},
"evaluation_time": 20.8
},
"validation": {
"da": {
"accuracy": 0.396212493851451,
"accuracy_stderr": 0.016305413904394876,
"f1": 0.37526615524002804,
"f1_stderr": 0.016303136707962292,
"main_score": 0.396212493851451
},
"evaluation_time": 77.55,
"nb": {
"accuracy": 0.38145597638957207,
"accuracy_stderr": 0.008588270625147559,
"f1": 0.3557170505087034,
"f1_stderr": 0.008932333735728392,
"main_score": 0.38145597638957207
},
"sv": {
"accuracy": 0.37968519429414654,
"accuracy_stderr": 0.01211163638368146,
"f1": 0.3555505582605072,
"f1_stderr": 0.00793761524455261,
"main_score": 0.37968519429414654
}
},
"mteb_version": "1.0.3.dev0",
"mteb_dataset_name": "MassiveIntentClassification",
"dataset_revision": "31efe3c427b0bae9c22cbb560b8f15491cc6bed7"
}