results / results /dfm-sentence-encoder-large-1 /MassiveScenarioClassification.json
Muennighoff's picture
Add nordic results
bda9a2d
raw history blame
No virus
1.54 kB
{
"dataset_revision": "7d571f92784cd94a019292a1f45445077d0ef634",
"mteb_dataset_name": "MassiveScenarioClassification",
"mteb_version": "1.0.3.dev0",
"test": {
"da": {
"accuracy": 0.7160726294552792,
"accuracy_stderr": 0.02100360122291461,
"f1": 0.7041873241706947,
"f1_stderr": 0.019159130877734326,
"main_score": 0.7160726294552792
},
"evaluation_time": 262.2,
"nb": {
"accuracy": 0.6365837256220579,
"accuracy_stderr": 0.015808495581346612,
"f1": 0.6196857169074892,
"f1_stderr": 0.013663579561431998,
"main_score": 0.6365837256220579
},
"sv": {
"accuracy": 0.5715870880968392,
"accuracy_stderr": 0.018867319112281,
"f1": 0.5542408157073229,
"f1_stderr": 0.014907747726350446,
"main_score": 0.5715870880968392
}
},
"validation": {
"da": {
"accuracy": 0.7087555336940482,
"accuracy_stderr": 0.02438916009084682,
"f1": 0.701567767327123,
"f1_stderr": 0.020460017961556956,
"main_score": 0.7087555336940482
},
"evaluation_time": 225.05,
"nb": {
"accuracy": 0.641957697983276,
"accuracy_stderr": 0.024085692494393,
"f1": 0.6304056832897913,
"f1_stderr": 0.019372053896422343,
"main_score": 0.641957697983276
},
"sv": {
"accuracy": 0.5650270536153468,
"accuracy_stderr": 0.021761124536612407,
"f1": 0.5525645796934578,
"f1_stderr": 0.016657811347352468,
"main_score": 0.5650270536153468
}
}
}