results / results /sentence-t5-base /AmazonReviewsClassification.json
Muennighoff's picture
Add res
653fa27
raw history blame
No virus
3.12 kB
{
"test": {
"de": {
"accuracy": 0.37895999999999996,
"accuracy_stderr": 0.023904108433489005,
"f1": 0.3542842923094611,
"f1_stderr": 0.01820199525217839,
"main_score": 0.37895999999999996
},
"en": {
"accuracy": 0.44943999999999995,
"accuracy_stderr": 0.012888382365525945,
"f1": 0.4268178385594884,
"f1_stderr": 0.013996886728022986,
"main_score": 0.44943999999999995
},
"es": {
"accuracy": 0.37328,
"accuracy_stderr": 0.011726619291168291,
"f1": 0.34263354567525534,
"f1_stderr": 0.01469109977557661,
"main_score": 0.37328
},
"evaluation_time": 391.4,
"fr": {
"accuracy": 0.3735,
"accuracy_stderr": 0.023887946751447686,
"f1": 0.3464493197423049,
"f1_stderr": 0.029775832242787608,
"main_score": 0.3735
},
"ja": {
"accuracy": 0.22290000000000001,
"accuracy_stderr": 0.013659648604557881,
"f1": 0.20438677904046304,
"f1_stderr": 0.011009769205853488,
"main_score": 0.22290000000000001
},
"zh": {
"accuracy": 0.2153,
"accuracy_stderr": 0.014814654906544397,
"f1": 0.18273004097867843,
"f1_stderr": 0.01238010675840164,
"main_score": 0.2153
}
},
"validation": {
"de": {
"accuracy": 0.37720000000000004,
"accuracy_stderr": 0.025560125195311548,
"f1": 0.35303162574612035,
"f1_stderr": 0.0197396858767609,
"main_score": 0.37720000000000004
},
"en": {
"accuracy": 0.44264000000000003,
"accuracy_stderr": 0.0203384955195806,
"f1": 0.4081419540275914,
"f1_stderr": 0.017055499037526824,
"main_score": 0.44264000000000003
},
"es": {
"accuracy": 0.37464,
"accuracy_stderr": 0.011471634582743645,
"f1": 0.34386580692321533,
"f1_stderr": 0.010636033299775806,
"main_score": 0.37464
},
"evaluation_time": 401.74,
"fr": {
"accuracy": 0.37306000000000006,
"accuracy_stderr": 0.019889102543855515,
"f1": 0.34816347443508533,
"f1_stderr": 0.026673046264337336,
"main_score": 0.37306000000000006
},
"ja": {
"accuracy": 0.22454000000000002,
"accuracy_stderr": 0.018130427463245318,
"f1": 0.2059579566027344,
"f1_stderr": 0.014241989110262138,
"main_score": 0.22454000000000002
},
"zh": {
"accuracy": 0.2165,
"accuracy_stderr": 0.013457265695526709,
"f1": 0.1860797990809301,
"f1_stderr": 0.013475463117734147,
"main_score": 0.2165
}
},
"dataset_version": null,
"mteb_version": "0.0.2"
}