results / results /dfm-encoder-large-v1 /MassiveScenarioClassification.json
Muennighoff's picture
Add nordic results
bda9a2d
raw history blame
No virus
1.55 kB
{
"dataset_revision": "7d571f92784cd94a019292a1f45445077d0ef634",
"mteb_dataset_name": "MassiveScenarioClassification",
"mteb_version": "1.0.3.dev0",
"test": {
"da": {
"accuracy": 0.6415601882985877,
"accuracy_stderr": 0.01681304638870262,
"f1": 0.623984081357014,
"f1_stderr": 0.014082572901473661,
"main_score": 0.6415601882985877
},
"evaluation_time": 310.0,
"nb": {
"accuracy": 0.5459314055144586,
"accuracy_stderr": 0.019140841546015294,
"f1": 0.5225983817325339,
"f1_stderr": 0.021011670214319638,
"main_score": 0.5459314055144586
},
"sv": {
"accuracy": 0.5009751176866174,
"accuracy_stderr": 0.016466825531738845,
"f1": 0.47320217900892514,
"f1_stderr": 0.014959598967962843,
"main_score": 0.5009751176866174
}
},
"validation": {
"da": {
"accuracy": 0.6362026561731431,
"accuracy_stderr": 0.022801565267154378,
"f1": 0.6226378879262205,
"f1_stderr": 0.018728240491811456,
"main_score": 0.6362026561731431
},
"evaluation_time": 241.39,
"nb": {
"accuracy": 0.5439252336448599,
"accuracy_stderr": 0.026715953873539436,
"f1": 0.5268650081470168,
"f1_stderr": 0.02905488290688128,
"main_score": 0.5439252336448599
},
"sv": {
"accuracy": 0.4899163797343826,
"accuracy_stderr": 0.014599923344472037,
"f1": 0.47232304891259247,
"f1_stderr": 0.01402905143010114,
"main_score": 0.4899163797343826
}
}
}