results / results /DanskBERT /MassiveScenarioClassification.json
Muennighoff's picture
Add nordic results
bda9a2d
raw history blame
No virus
1.55 kB
{
"dataset_revision": "7d571f92784cd94a019292a1f45445077d0ef634",
"mteb_dataset_name": "MassiveScenarioClassification",
"mteb_version": "1.0.3.dev0",
"test": {
"da": {
"accuracy": 0.5955615332885004,
"accuracy_stderr": 0.01577699539582055,
"f1": 0.5794034655925369,
"f1_stderr": 0.013384379870283205,
"main_score": 0.5955615332885004
},
"evaluation_time": 95.61,
"nb": {
"accuracy": 0.47552118359112316,
"accuracy_stderr": 0.018872921265933037,
"f1": 0.4640926498110063,
"f1_stderr": 0.01476824680252287,
"main_score": 0.47552118359112316
},
"sv": {
"accuracy": 0.40141223940820436,
"accuracy_stderr": 0.01808423841391776,
"f1": 0.3822692785842296,
"f1_stderr": 0.013452966848637938,
"main_score": 0.40141223940820436
}
},
"validation": {
"da": {
"accuracy": 0.594392523364486,
"accuracy_stderr": 0.01934448803580611,
"f1": 0.5796578284338747,
"f1_stderr": 0.01787448188066123,
"main_score": 0.594392523364486
},
"evaluation_time": 75.72,
"nb": {
"accuracy": 0.4664043285784555,
"accuracy_stderr": 0.018904987337221752,
"f1": 0.4617714246937276,
"f1_stderr": 0.013988693425569182,
"main_score": 0.4664043285784555
},
"sv": {
"accuracy": 0.39596655189375307,
"accuracy_stderr": 0.015554735170528184,
"f1": 0.379545671770527,
"f1_stderr": 0.013408542819112435,
"main_score": 0.39596655189375307
}
}
}