results / results /electra-small-nordic /MassiveScenarioClassification.json
Muennighoff's picture
Add nordic results
bda9a2d
raw history blame
No virus
1.55 kB
{
"dataset_revision": "7d571f92784cd94a019292a1f45445077d0ef634",
"mteb_dataset_name": "MassiveScenarioClassification",
"mteb_version": "1.0.3.dev0",
"test": {
"da": {
"accuracy": 0.28934095494283796,
"accuracy_stderr": 0.012764345785374599,
"f1": 0.2721739007115139,
"f1_stderr": 0.010201933258922631,
"main_score": 0.28934095494283796
},
"evaluation_time": 31.95,
"nb": {
"accuracy": 0.2729657027572293,
"accuracy_stderr": 0.012971310319977877,
"f1": 0.2550361435799058,
"f1_stderr": 0.008997820373006915,
"main_score": 0.2729657027572293
},
"sv": {
"accuracy": 0.2992938802958978,
"accuracy_stderr": 0.018575017991964805,
"f1": 0.28544183384003097,
"f1_stderr": 0.017308765160599954,
"main_score": 0.2992938802958978
}
},
"validation": {
"da": {
"accuracy": 0.2778652238071815,
"accuracy_stderr": 0.014022488073356017,
"f1": 0.2632414850295354,
"f1_stderr": 0.012019925114669594,
"main_score": 0.2778652238071815
},
"evaluation_time": 28.12,
"nb": {
"accuracy": 0.2600098376783079,
"accuracy_stderr": 0.01341896825021553,
"f1": 0.24711791209015893,
"f1_stderr": 0.00692112398934422,
"main_score": 0.2600098376783079
},
"sv": {
"accuracy": 0.2881947860304968,
"accuracy_stderr": 0.015705584597804585,
"f1": 0.27551464915396506,
"f1_stderr": 0.01628580137003319,
"main_score": 0.2881947860304968
}
}
}