results / results /bert-base-swedish-cased /MassiveScenarioClassification.json
Muennighoff's picture
Add nordic results
bda9a2d
raw history blame
No virus
1.55 kB
{
"dataset_revision": "7d571f92784cd94a019292a1f45445077d0ef634",
"mteb_dataset_name": "MassiveScenarioClassification",
"mteb_version": "1.0.3.dev0",
"test": {
"da": {
"accuracy": 0.40437121721587077,
"accuracy_stderr": 0.0217415653668484,
"f1": 0.3723288747252761,
"f1_stderr": 0.019283422341780296,
"main_score": 0.40437121721587077
},
"evaluation_time": 95.92,
"nb": {
"accuracy": 0.35763281775386685,
"accuracy_stderr": 0.02580146772548029,
"f1": 0.3412599346756071,
"f1_stderr": 0.0237566412933957,
"main_score": 0.35763281775386685
},
"sv": {
"accuracy": 0.5609280430396772,
"accuracy_stderr": 0.024760603110657826,
"f1": 0.547979935150986,
"f1_stderr": 0.02207147319053608,
"main_score": 0.5609280430396772
}
},
"validation": {
"da": {
"accuracy": 0.39454008853910477,
"accuracy_stderr": 0.021005746966988717,
"f1": 0.3718680770887862,
"f1_stderr": 0.019314996889392557,
"main_score": 0.39454008853910477
},
"evaluation_time": 79.78,
"nb": {
"accuracy": 0.35125430398425966,
"accuracy_stderr": 0.023884953034132034,
"f1": 0.33753794511461643,
"f1_stderr": 0.021553023585863994,
"main_score": 0.35125430398425966
},
"sv": {
"accuracy": 0.5571077225774717,
"accuracy_stderr": 0.02686172612607831,
"f1": 0.5479549119696884,
"f1_stderr": 0.02461664558075998,
"main_score": 0.5571077225774717
}
}
}