results / results /dfm-encoder-large-v1 /MassiveIntentClassification.json
Muennighoff's picture
Add nordic results
bda9a2d
raw history blame
No virus
1.55 kB
{
"dataset_revision": "31efe3c427b0bae9c22cbb560b8f15491cc6bed7",
"mteb_dataset_name": "MassiveIntentClassification",
"mteb_version": "1.0.3.dev0",
"test": {
"da": {
"accuracy": 0.6055480833893746,
"accuracy_stderr": 0.010974707671310587,
"f1": 0.5575931847636777,
"f1_stderr": 0.005463323455870516,
"main_score": 0.6055480833893746
},
"evaluation_time": 494.47,
"nb": {
"accuracy": 0.5248823133826497,
"accuracy_stderr": 0.012975842025794888,
"f1": 0.4916043573899421,
"f1_stderr": 0.00929974027333486,
"main_score": 0.5248823133826497
},
"sv": {
"accuracy": 0.4974445191661063,
"accuracy_stderr": 0.011126895967691426,
"f1": 0.46779266847135725,
"f1_stderr": 0.009957825087612915,
"main_score": 0.4974445191661063
}
},
"validation": {
"da": {
"accuracy": 0.6100836202656174,
"accuracy_stderr": 0.01107569235371189,
"f1": 0.5654048677321653,
"f1_stderr": 0.0049041945696630335,
"main_score": 0.6100836202656174
},
"evaluation_time": 455.02,
"nb": {
"accuracy": 0.5286276438760453,
"accuracy_stderr": 0.010684302908423324,
"f1": 0.49794685904777597,
"f1_stderr": 0.00846388499335464,
"main_score": 0.5286276438760453
},
"sv": {
"accuracy": 0.4912936546974914,
"accuracy_stderr": 0.017198782824594354,
"f1": 0.4654452120067443,
"f1_stderr": 0.013571094945839175,
"main_score": 0.4912936546974914
}
}
}