results / results /multilingual-e5-large /MassiveIntentClassification.json
Muennighoff's picture
Update
5fd5d7c
raw history blame
No virus
1.55 kB
{
"dataset_revision": "31efe3c427b0bae9c22cbb560b8f15491cc6bed7",
"mteb_dataset_name": "MassiveIntentClassification",
"mteb_version": "1.0.3.dev0",
"test": {
"da": {
"accuracy": 0.6478816408876933,
"accuracy_stderr": 0.012939019665516943,
"f1": 0.6113513880493857,
"f1_stderr": 0.014027371072647229,
"main_score": 0.6478816408876933
},
"evaluation_time": 555.7,
"nb": {
"accuracy": 0.6411230665770007,
"accuracy_stderr": 0.015346336375965275,
"f1": 0.6047137612883793,
"f1_stderr": 0.015787583497599157,
"main_score": 0.6411230665770007
},
"sv": {
"accuracy": 0.6730665770006725,
"accuracy_stderr": 0.015159541447212228,
"f1": 0.6451692239685072,
"f1_stderr": 0.017514814756512953,
"main_score": 0.6730665770006725
}
},
"validation": {
"da": {
"accuracy": 0.6499262174126905,
"accuracy_stderr": 0.011689981127354004,
"f1": 0.5985671994320264,
"f1_stderr": 0.0105996382499193,
"main_score": 0.6499262174126905
},
"evaluation_time": 471.11,
"nb": {
"accuracy": 0.6467289719626168,
"accuracy_stderr": 0.014081632249095965,
"f1": 0.5892843869626572,
"f1_stderr": 0.014060982441520598,
"main_score": 0.6467289719626168
},
"sv": {
"accuracy": 0.6664043285784554,
"accuracy_stderr": 0.013269565796336365,
"f1": 0.6203530828870285,
"f1_stderr": 0.013514319514088017,
"main_score": 0.6664043285784554
}
}
}