Muennighoff's picture
Update results
4951470
raw history blame
No virus
3.15 kB
{
"test": {
"de": {
"accuracy": 0.2479,
"accuracy_stderr": 0.01405709785126361,
"f1": 0.245833598854121,
"f1_stderr": 0.015461604594068135,
"main_score": 0.2479
},
"en": {
"accuracy": 0.31165999999999994,
"accuracy_stderr": 0.018214291092436177,
"f1": 0.30908870050167436,
"f1_stderr": 0.01830210891332752,
"main_score": 0.31165999999999994
},
"es": {
"accuracy": 0.26643999999999995,
"accuracy_stderr": 0.011960869533608332,
"f1": 0.2639012792213563,
"f1_stderr": 0.013800540988456476,
"main_score": 0.26643999999999995
},
"evaluation_time": 735.82,
"fr": {
"accuracy": 0.26386000000000004,
"accuracy_stderr": 0.017606828220892035,
"f1": 0.2627686779145487,
"f1_stderr": 0.01750120871837706,
"main_score": 0.26386000000000004
},
"ja": {
"accuracy": 0.22078000000000003,
"accuracy_stderr": 0.011609117106825998,
"f1": 0.21797960290226842,
"f1_stderr": 0.011959444775888735,
"main_score": 0.22078000000000003
},
"zh": {
"accuracy": 0.24274,
"accuracy_stderr": 0.007931733732293337,
"f1": 0.23887054434822627,
"f1_stderr": 0.008252486720387382,
"main_score": 0.24274
}
},
"validation": {
"de": {
"accuracy": 0.25168,
"accuracy_stderr": 0.015328457195686715,
"f1": 0.2502088133771333,
"f1_stderr": 0.01650287553492419,
"main_score": 0.25168
},
"en": {
"accuracy": 0.32438,
"accuracy_stderr": 0.022893920590410018,
"f1": 0.3212565428623893,
"f1_stderr": 0.021985479345985583,
"main_score": 0.32438
},
"es": {
"accuracy": 0.26704,
"accuracy_stderr": 0.012879068289282413,
"f1": 0.2641578465769073,
"f1_stderr": 0.01402375418117518,
"main_score": 0.26704
},
"evaluation_time": 740.71,
"fr": {
"accuracy": 0.26348,
"accuracy_stderr": 0.017907584985139675,
"f1": 0.26220101963133713,
"f1_stderr": 0.01807845581838548,
"main_score": 0.26348
},
"ja": {
"accuracy": 0.21562,
"accuracy_stderr": 0.011403666077187636,
"f1": 0.21286585397745714,
"f1_stderr": 0.011890703738952032,
"main_score": 0.21562
},
"zh": {
"accuracy": 0.23962,
"accuracy_stderr": 0.012780125195004927,
"f1": 0.23540552974934634,
"f1_stderr": 0.012407243106519977,
"main_score": 0.23962
}
},
"mteb_version": "0.0.2",
"mteb_dataset_name": "AmazonReviewsClassification",
"dataset_revision": "c379a6705fec24a2493fa68e011692605f44e119"
}