results / results /e5-base /MassiveIntentClassification.json
Muennighoff's picture
Update
5fd5d7c
raw history blame
No virus
1.55 kB
{
"dataset_revision": "31efe3c427b0bae9c22cbb560b8f15491cc6bed7",
"mteb_dataset_name": "MassiveIntentClassification",
"mteb_version": "1.0.3.dev0",
"test": {
"da": {
"accuracy": 0.4425353059852051,
"accuracy_stderr": 0.011987069326364846,
"f1": 0.42176846706873306,
"f1_stderr": 0.011356143085800888,
"main_score": 0.4425353059852051
},
"evaluation_time": 1773.34,
"nb": {
"accuracy": 0.4156691324815064,
"accuracy_stderr": 0.01598275910181979,
"f1": 0.38586961267500197,
"f1_stderr": 0.015427786451280444,
"main_score": 0.4156691324815064
},
"sv": {
"accuracy": 0.4134498991257566,
"accuracy_stderr": 0.018471099583417097,
"f1": 0.3929727867759662,
"f1_stderr": 0.013898327460181962,
"main_score": 0.4134498991257566
}
},
"validation": {
"da": {
"accuracy": 0.4398425971470733,
"accuracy_stderr": 0.011472183999356698,
"f1": 0.4100657510704496,
"f1_stderr": 0.011047044189643786,
"main_score": 0.4398425971470733
},
"evaluation_time": 794.93,
"nb": {
"accuracy": 0.4046237088047221,
"accuracy_stderr": 0.010695958905995989,
"f1": 0.3715515469232034,
"f1_stderr": 0.011460635159926987,
"main_score": 0.4046237088047221
},
"sv": {
"accuracy": 0.40550909985243483,
"accuracy_stderr": 0.015942199767989006,
"f1": 0.3785372697407485,
"f1_stderr": 0.018483194344065323,
"main_score": 0.40550909985243483
}
}
}