results / results /e5-small /MassiveScenarioClassification.json
Muennighoff's picture
Update
5fd5d7c
raw history blame
No virus
1.55 kB
{
"dataset_revision": "7d571f92784cd94a019292a1f45445077d0ef634",
"mteb_dataset_name": "MassiveScenarioClassification",
"mteb_version": "1.0.3.dev0",
"test": {
"da": {
"accuracy": 0.49926025554808345,
"accuracy_stderr": 0.016129227599473626,
"f1": 0.47224689228546657,
"f1_stderr": 0.018442568597481064,
"main_score": 0.49926025554808345
},
"evaluation_time": 101.9,
"nb": {
"accuracy": 0.4857767316745125,
"accuracy_stderr": 0.019317146922155894,
"f1": 0.45651634870157637,
"f1_stderr": 0.01685437076119252,
"main_score": 0.4857767316745125
},
"sv": {
"accuracy": 0.4706455951580363,
"accuracy_stderr": 0.01540369478672511,
"f1": 0.44281581197784325,
"f1_stderr": 0.01354462829679692,
"main_score": 0.4706455951580363
}
},
"validation": {
"da": {
"accuracy": 0.48189867191342833,
"accuracy_stderr": 0.012438051005294087,
"f1": 0.4647165417573832,
"f1_stderr": 0.012549474260390768,
"main_score": 0.48189867191342833
},
"evaluation_time": 89.1,
"nb": {
"accuracy": 0.4771765863256272,
"accuracy_stderr": 0.017531297554490753,
"f1": 0.4568245987716901,
"f1_stderr": 0.012664379744733356,
"main_score": 0.4771765863256272
},
"sv": {
"accuracy": 0.4689129365469749,
"accuracy_stderr": 0.016626913280547637,
"f1": 0.4462324789649177,
"f1_stderr": 0.01384705589706275,
"main_score": 0.4689129365469749
}
}
}