results / results /electra-small-swedish-cased-discriminator /MassiveScenarioClassification.json
Muennighoff's picture
Add nordic results
bda9a2d
raw history blame
No virus
1.56 kB
{
"dataset_revision": "7d571f92784cd94a019292a1f45445077d0ef634",
"mteb_dataset_name": "MassiveScenarioClassification",
"mteb_version": "1.0.3.dev0",
"test": {
"da": {
"accuracy": 0.11499663752521858,
"accuracy_stderr": 0.006917224409212533,
"f1": 0.1002213125938091,
"f1_stderr": 0.006521016859703459,
"main_score": 0.11499663752521858
},
"evaluation_time": 34.25,
"nb": {
"accuracy": 0.11264290517821118,
"accuracy_stderr": 0.0071975812851916175,
"f1": 0.09663541948337996,
"f1_stderr": 0.006912988810159833,
"main_score": 0.11264290517821118
},
"sv": {
"accuracy": 0.12155346334902488,
"accuracy_stderr": 0.009327155941023088,
"f1": 0.10615364153403199,
"f1_stderr": 0.00862797693736984,
"main_score": 0.12155346334902488
}
},
"validation": {
"da": {
"accuracy": 0.1159370388588293,
"accuracy_stderr": 0.008613026410880726,
"f1": 0.10116369545244888,
"f1_stderr": 0.009977072575369818,
"main_score": 0.1159370388588293
},
"evaluation_time": 28.77,
"nb": {
"accuracy": 0.1057058534185932,
"accuracy_stderr": 0.009865798472529346,
"f1": 0.09205814262378638,
"f1_stderr": 0.008275827745048385,
"main_score": 0.1057058534185932
},
"sv": {
"accuracy": 0.11815051647811117,
"accuracy_stderr": 0.012952154406783094,
"f1": 0.10299869319299178,
"f1_stderr": 0.010370090341977974,
"main_score": 0.11815051647811117
}
}
}