File size: 1,617 Bytes
c0b350d 088d502 c0b350d 4951470 c0b350d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 |
{
"mteb_version": "0.0.2",
"test": {
"de": {
"accuracy": 0.39918,
"accuracy_stderr": 0.017999877777362824,
"f1": 0.3872903904122544,
"f1_stderr": 0.015663086517712376,
"main_score": 0.39918
},
"en": {
"accuracy": 0.35802,
"accuracy_stderr": 0.031141284495023635,
"f1": 0.3543599382787903,
"f1_stderr": 0.02817265004916295,
"main_score": 0.35802
},
"es": {
"accuracy": 0.39386,
"accuracy_stderr": 0.016084539160324117,
"f1": 0.38117957858608553,
"f1_stderr": 0.01650338423324477,
"main_score": 0.39386
},
"evaluation_time": 89.32,
"fr": {
"accuracy": 0.3852399999999999,
"accuracy_stderr": 0.0278640700544626,
"f1": 0.3747344348473724,
"f1_stderr": 0.029456947779604455,
"main_score": 0.3852399999999999
},
"ja": {
"accuracy": 0.36444,
"accuracy_stderr": 0.01249121291148302,
"f1": 0.35658652904670474,
"f1_stderr": 0.008082128063389675,
"main_score": 0.36444
},
"zh": {
"accuracy": 0.3645,
"accuracy_stderr": 0.023223996210816087,
"f1": 0.3558331060182865,
"f1_stderr": 0.02222378177037217,
"main_score": 0.3645
}
},
"mteb_dataset_name": "AmazonReviewsClassification",
"dataset_revision": "c379a6705fec24a2493fa68e011692605f44e119"
} |