results / results /electra-small-nordic /MassiveIntentClassification.json
Muennighoff's picture
Add nordic results
bda9a2d
raw history blame
No virus
1.56 kB
{
"dataset_revision": "31efe3c427b0bae9c22cbb560b8f15491cc6bed7",
"mteb_dataset_name": "MassiveIntentClassification",
"mteb_version": "1.0.3.dev0",
"test": {
"da": {
"accuracy": 0.2629791526563551,
"accuracy_stderr": 0.008387942541855315,
"f1": 0.22980369734119951,
"f1_stderr": 0.007579204651192388,
"main_score": 0.2629791526563551
},
"evaluation_time": 54.92,
"nb": {
"accuracy": 0.24603227975790182,
"accuracy_stderr": 0.011219838214051958,
"f1": 0.21966728945303612,
"f1_stderr": 0.011543763578443968,
"main_score": 0.24603227975790182
},
"sv": {
"accuracy": 0.27575655682582384,
"accuracy_stderr": 0.015714273262737038,
"f1": 0.24809328854347754,
"f1_stderr": 0.014263974098066344,
"main_score": 0.27575655682582384
}
},
"validation": {
"da": {
"accuracy": 0.24451549434333497,
"accuracy_stderr": 0.0069125005086168295,
"f1": 0.21796392378919002,
"f1_stderr": 0.0072307709119297965,
"main_score": 0.24451549434333497
},
"evaluation_time": 50.68,
"nb": {
"accuracy": 0.2399901623216921,
"accuracy_stderr": 0.009305450499151588,
"f1": 0.21453643098814323,
"f1_stderr": 0.010078518406127232,
"main_score": 0.2399901623216921
},
"sv": {
"accuracy": 0.2617314313821938,
"accuracy_stderr": 0.013711920338807113,
"f1": 0.24030872466568934,
"f1_stderr": 0.01465892806848656,
"main_score": 0.2617314313821938
}
}
}