results / results /all-MiniLM-L12-v2 /AmazonReviewsClassification.json
Muennighoff's picture
Update results
4951470
raw history blame
No virus
1.67 kB
{
"mteb_version": "0.0.2",
"test": {
"de": {
"accuracy": 0.25908,
"accuracy_stderr": 0.00901119303977004,
"f1": 0.25538149526380544,
"f1_stderr": 0.011673588756159523,
"main_score": 0.25908
},
"en": {
"accuracy": 0.30792,
"accuracy_stderr": 0.016674819339351176,
"f1": 0.3025456531557544,
"f1_stderr": 0.01789579738481092,
"main_score": 0.30792
},
"es": {
"accuracy": 0.27634000000000003,
"accuracy_stderr": 0.01265165601808711,
"f1": 0.27287076320171727,
"f1_stderr": 0.01282496330105531,
"main_score": 0.27634000000000003
},
"evaluation_time": 41.37,
"fr": {
"accuracy": 0.27540000000000003,
"accuracy_stderr": 0.017711013522664368,
"f1": 0.2721486019130574,
"f1_stderr": 0.016500699285740404,
"main_score": 0.27540000000000003
},
"ja": {
"accuracy": 0.23566000000000004,
"accuracy_stderr": 0.01216817159642319,
"f1": 0.23349265077190498,
"f1_stderr": 0.012794109244058882,
"main_score": 0.23566000000000004
},
"zh": {
"accuracy": 0.2299,
"accuracy_stderr": 0.011964698073917285,
"f1": 0.2247175043426865,
"f1_stderr": 0.011696175211431542,
"main_score": 0.2299
}
},
"mteb_dataset_name": "AmazonReviewsClassification",
"dataset_revision": "c379a6705fec24a2493fa68e011692605f44e119"
}