results / results /allenai-specter /AmazonReviewsClassification.json
Muennighoff's picture
Update results
4951470
raw history blame
No virus
1.65 kB
{
"mteb_version": "0.0.2",
"test": {
"de": {
"accuracy": 0.24081999999999998,
"accuracy_stderr": 0.01794490456926422,
"f1": 0.23683977230436776,
"f1_stderr": 0.016995564976731978,
"main_score": 0.24081999999999998
},
"en": {
"accuracy": 0.26261999999999996,
"accuracy_stderr": 0.014452805955938106,
"f1": 0.2588719574606898,
"f1_stderr": 0.013433530981801752,
"main_score": 0.26261999999999996
},
"es": {
"accuracy": 0.23884,
"accuracy_stderr": 0.01168445120662498,
"f1": 0.23485858394715126,
"f1_stderr": 0.009909978267392322,
"main_score": 0.23884
},
"evaluation_time": 66.85,
"fr": {
"accuracy": 0.23306,
"accuracy_stderr": 0.015508720127721691,
"f1": 0.231095594077893,
"f1_stderr": 0.015224512099661771,
"main_score": 0.23306
},
"ja": {
"accuracy": 0.20248,
"accuracy_stderr": 0.00880963109329784,
"f1": 0.19271369437799923,
"f1_stderr": 0.010805728131787673,
"main_score": 0.20248
},
"zh": {
"accuracy": 0.20492,
"accuracy_stderr": 0.011744683903792387,
"f1": 0.19215547744535208,
"f1_stderr": 0.014355793182874239,
"main_score": 0.20492
}
},
"mteb_dataset_name": "AmazonReviewsClassification",
"dataset_revision": "c379a6705fec24a2493fa68e011692605f44e119"
}