results / results /e5-small /MassiveIntentClassification.json
Muennighoff's picture
Update
5fd5d7c
raw history blame
No virus
1.56 kB
{
"dataset_revision": "31efe3c427b0bae9c22cbb560b8f15491cc6bed7",
"mteb_dataset_name": "MassiveIntentClassification",
"mteb_version": "1.0.3.dev0",
"test": {
"da": {
"accuracy": 0.41893073301950234,
"accuracy_stderr": 0.011477274617836132,
"f1": 0.3916826783567854,
"f1_stderr": 0.012505345549790286,
"main_score": 0.41893073301950234
},
"evaluation_time": 191.94,
"nb": {
"accuracy": 0.40248823133826495,
"accuracy_stderr": 0.01653777085051505,
"f1": 0.37594036649527024,
"f1_stderr": 0.01620910514389266,
"main_score": 0.40248823133826495
},
"sv": {
"accuracy": 0.4007061197041022,
"accuracy_stderr": 0.01818788065442142,
"f1": 0.37856957810370473,
"f1_stderr": 0.014384271559521303,
"main_score": 0.4007061197041022
}
},
"validation": {
"da": {
"accuracy": 0.40590260698475156,
"accuracy_stderr": 0.013282323069554338,
"f1": 0.37169393884824814,
"f1_stderr": 0.010953166715913714,
"main_score": 0.40590260698475156
},
"evaluation_time": 224.51,
"nb": {
"accuracy": 0.38996556812592226,
"accuracy_stderr": 0.016635278401427818,
"f1": 0.3580128917829644,
"f1_stderr": 0.015100608221640612,
"main_score": 0.38996556812592226
},
"sv": {
"accuracy": 0.40009837678307913,
"accuracy_stderr": 0.016363108244717105,
"f1": 0.36646972324933075,
"f1_stderr": 0.015617988216503663,
"main_score": 0.40009837678307913
}
}
}