results / results /all-MiniLM-L6-v2 /MassiveScenarioClassification.json
Muennighoff's picture
Add nordic results
bda9a2d
raw history blame
No virus
2.03 kB
{
"test": {
"en": {
"accuracy": 0.7575655682582381,
"accuracy_stderr": 0.011626580127316636,
"f1": 0.7493126114560369,
"f1_stderr": 0.0116798506229795,
"main_score": 0.7575655682582381
},
"da": {
"accuracy": 0.47007397444519167,
"accuracy_stderr": 0.016525460362257268,
"f1": 0.44591837506202986,
"f1_stderr": 0.011715003849608418,
"main_score": 0.47007397444519167
},
"nb": {
"accuracy": 0.44670477471418957,
"accuracy_stderr": 0.01725133080004754,
"f1": 0.42175654729028544,
"f1_stderr": 0.015659543471027913,
"main_score": 0.44670477471418957
},
"sv": {
"accuracy": 0.4293207800941493,
"accuracy_stderr": 0.018548430093723933,
"f1": 0.3990548911517426,
"f1_stderr": 0.015416056576756836,
"main_score": 0.4293207800941493
},
"evaluation_time": 17.89
},
"validation": {
"da": {
"accuracy": 0.44692572552877524,
"accuracy_stderr": 0.012853520608224854,
"f1": 0.43967186299229216,
"f1_stderr": 0.00882990108429513,
"main_score": 0.44692572552877524
},
"evaluation_time": 27.56,
"nb": {
"accuracy": 0.4338908017707821,
"accuracy_stderr": 0.012673911928725553,
"f1": 0.4207858934675214,
"f1_stderr": 0.008274838306434095,
"main_score": 0.4338908017707821
},
"sv": {
"accuracy": 0.429021151008362,
"accuracy_stderr": 0.021686224117471994,
"f1": 0.4083151156205155,
"f1_stderr": 0.01881794915834628,
"main_score": 0.429021151008362
}
},
"mteb_version": "1.0.3.dev0",
"mteb_dataset_name": "MassiveScenarioClassification",
"dataset_revision": "7d571f92784cd94a019292a1f45445077d0ef634"
}