results / results /bert-base-swedish-cased /MassiveIntentClassification.json
Muennighoff's picture
Add nordic results
bda9a2d
raw history blame
No virus
1.55 kB
{
"dataset_revision": "31efe3c427b0bae9c22cbb560b8f15491cc6bed7",
"mteb_dataset_name": "MassiveIntentClassification",
"mteb_version": "1.0.3.dev0",
"test": {
"da": {
"accuracy": 0.37982515131136513,
"accuracy_stderr": 0.011440520880727738,
"f1": 0.3448174943380284,
"f1_stderr": 0.009748846860402905,
"main_score": 0.37982515131136513
},
"evaluation_time": 211.15,
"nb": {
"accuracy": 0.3574983187626093,
"accuracy_stderr": 0.01486943985314982,
"f1": 0.3338735327824476,
"f1_stderr": 0.011441579699662988,
"main_score": 0.3574983187626093
},
"sv": {
"accuracy": 0.5275386684599865,
"accuracy_stderr": 0.00907799693547891,
"f1": 0.4917374109552649,
"f1_stderr": 0.007623106603827818,
"main_score": 0.5275386684599865
}
},
"validation": {
"da": {
"accuracy": 0.3873585833743237,
"accuracy_stderr": 0.017159982148168464,
"f1": 0.35577459105364684,
"f1_stderr": 0.013577678220635193,
"main_score": 0.3873585833743237
},
"evaluation_time": 189.08,
"nb": {
"accuracy": 0.366699458927693,
"accuracy_stderr": 0.01506522475665767,
"f1": 0.33492665872289507,
"f1_stderr": 0.01340872506343075,
"main_score": 0.366699458927693
},
"sv": {
"accuracy": 0.5293654697491392,
"accuracy_stderr": 0.008084871937552125,
"f1": 0.49166671254083794,
"f1_stderr": 0.01180695722613713,
"main_score": 0.5293654697491392
}
}
}