results / results /e5-large /MassiveIntentClassification.json
Muennighoff's picture
Update
5fd5d7c
raw history blame
No virus
1.56 kB
{
"dataset_revision": "31efe3c427b0bae9c22cbb560b8f15491cc6bed7",
"mteb_dataset_name": "MassiveIntentClassification",
"mteb_version": "1.0.3.dev0",
"test": {
"da": {
"accuracy": 0.42286482851378615,
"accuracy_stderr": 0.012383040984379165,
"f1": 0.4045450712997618,
"f1_stderr": 0.011450412901048949,
"main_score": 0.42286482851378615
},
"evaluation_time": 526.53,
"nb": {
"accuracy": 0.4063214525891056,
"accuracy_stderr": 0.014827865675936818,
"f1": 0.3789828222208245,
"f1_stderr": 0.012769924322664489,
"main_score": 0.4063214525891056
},
"sv": {
"accuracy": 0.4068594485541358,
"accuracy_stderr": 0.016741613025560997,
"f1": 0.3868424150540931,
"f1_stderr": 0.014421268110823617,
"main_score": 0.4068594485541358
}
},
"validation": {
"da": {
"accuracy": 0.42193802262666014,
"accuracy_stderr": 0.010585908699213985,
"f1": 0.39827047128659854,
"f1_stderr": 0.010040840264160085,
"main_score": 0.42193802262666014
},
"evaluation_time": 469.62,
"nb": {
"accuracy": 0.39163797343826856,
"accuracy_stderr": 0.012746913290575083,
"f1": 0.3589018215724903,
"f1_stderr": 0.014090109687736802,
"main_score": 0.39163797343826856
},
"sv": {
"accuracy": 0.40270536153467784,
"accuracy_stderr": 0.018089093686221885,
"f1": 0.3700168488921628,
"f1_stderr": 0.015663183947950413,
"main_score": 0.40270536153467784
}
}
}