results / results /distiluse-base-multilingual-cased-v2 /AmazonReviewsClassification.json
Muennighoff's picture
add results - distiluse-base-multilingual-cased-v2 (#7)
f022761
raw history blame
No virus
2.64 kB
{
"dataset_revision": "1399c76144fd37290681b995c656ef9b2e06e26d",
"mteb_dataset_name": "AmazonReviewsClassification",
"mteb_version": "1.0.2",
"test": {
"de": {
"accuracy": 0.35032,
"accuracy_stderr": 0.024274793510965235,
"f1": 0.33939764470643535,
"f1_stderr": 0.02282070229582006,
"main_score": 0.35032
},
"en": {
"accuracy": 0.35454,
"accuracy_stderr": 0.022858880112551442,
"f1": 0.35019589142407004,
"f1_stderr": 0.02054968398529996,
"main_score": 0.35454
},
"es": {
"accuracy": 0.36242,
"accuracy_stderr": 0.010545691063178358,
"f1": 0.3498879083946539,
"f1_stderr": 0.01321622772317041,
"main_score": 0.36242
},
"evaluation_time": 62.08,
"fr": {
"accuracy": 0.357,
"accuracy_stderr": 0.022324336496299283,
"f1": 0.3474911268048424,
"f1_stderr": 0.022703709419436332,
"main_score": 0.357
},
"ja": {
"accuracy": 0.31076,
"accuracy_stderr": 0.01528968279592484,
"f1": 0.30525865114811995,
"f1_stderr": 0.01261552083352967,
"main_score": 0.31076
},
"zh": {
"accuracy": 0.33894,
"accuracy_stderr": 0.013566149048274542,
"f1": 0.32638513658296125,
"f1_stderr": 0.01590406062662379,
"main_score": 0.33894
}
},
"validation": {
"de": {
"accuracy": 0.35140000000000005,
"accuracy_stderr": 0.03167813125801457,
"f1": 0.33995664837547496,
"f1_stderr": 0.029651988528941266,
"main_score": 0.35140000000000005
},
"en": {
"accuracy": 0.35572,
"accuracy_stderr": 0.02285260597831241,
"f1": 0.35182678284471613,
"f1_stderr": 0.021081362317817858,
"main_score": 0.35572
},
"es": {
"accuracy": 0.36605999999999994,
"accuracy_stderr": 0.01066547701699273,
"f1": 0.35388988527330134,
"f1_stderr": 0.011414525934042363,
"main_score": 0.36605999999999994
},
"evaluation_time": 64.9,
"fr": {
"accuracy": 0.35146,
"accuracy_stderr": 0.021103374137800814,
"f1": 0.3420240760686955,
"f1_stderr": 0.020450899496203788,
"main_score": 0.35146
},
"ja": {
"accuracy": 0.31432,
"accuracy_stderr": 0.01748123565426655,
"f1": 0.3085361473217993,
"f1_stderr": 0.01532611041574829,
"main_score": 0.31432
},
"zh": {
"accuracy": 0.33161999999999997,
"accuracy_stderr": 0.011498330313571618,
"f1": 0.31936819236765673,
"f1_stderr": 0.011252640258108498,
"main_score": 0.33161999999999997
}
}
}