results / results /SGPT-125M-weightedmean-nli-bitfit /AmazonReviewsClassification.json
Muennighoff's picture
Update results
4951470
raw history blame
No virus
3.18 kB
{
"test": {
"de": {
"accuracy": 0.24516,
"accuracy_stderr": 0.013653365885377858,
"f1": 0.2421748200448397,
"f1_stderr": 0.014364828062848542,
"main_score": 0.24516
},
"en": {
"accuracy": 0.35098,
"accuracy_stderr": 0.016031331822403287,
"f1": 0.34732656514357263,
"f1_stderr": 0.014226778450962598,
"main_score": 0.35098
},
"es": {
"accuracy": 0.29097999999999996,
"accuracy_stderr": 0.012527234331647189,
"f1": 0.28620040162757093,
"f1_stderr": 0.012203369115937461,
"main_score": 0.29097999999999996
},
"evaluation_time": 458.68,
"fr": {
"accuracy": 0.27396,
"accuracy_stderr": 0.019808240709361352,
"f1": 0.27146888644986283,
"f1_stderr": 0.019022355135502007,
"main_score": 0.27396
},
"ja": {
"accuracy": 0.21724000000000002,
"accuracy_stderr": 0.010203842413522466,
"f1": 0.2137230564276654,
"f1_stderr": 0.01242184804054769,
"main_score": 0.21724000000000002
},
"zh": {
"accuracy": 0.23975999999999997,
"accuracy_stderr": 0.012464605890279888,
"f1": 0.23741137981755484,
"f1_stderr": 0.01276710263102806,
"main_score": 0.23975999999999997
}
},
"validation": {
"de": {
"accuracy": 0.24498000000000003,
"accuracy_stderr": 0.014547425889139284,
"f1": 0.24228823605944716,
"f1_stderr": 0.01555608008031814,
"main_score": 0.24498000000000003
},
"en": {
"accuracy": 0.35306,
"accuracy_stderr": 0.0140618775417794,
"f1": 0.3497346805075623,
"f1_stderr": 0.012226804543653744,
"main_score": 0.35306
},
"es": {
"accuracy": 0.29144,
"accuracy_stderr": 0.010667633289535211,
"f1": 0.28677674422287674,
"f1_stderr": 0.010680354361873633,
"main_score": 0.29144
},
"evaluation_time": 451.13,
"fr": {
"accuracy": 0.27192,
"accuracy_stderr": 0.01950327152043985,
"f1": 0.2689250735785097,
"f1_stderr": 0.01790250686687033,
"main_score": 0.27192
},
"ja": {
"accuracy": 0.21986,
"accuracy_stderr": 0.008387156848420087,
"f1": 0.21661564151001683,
"f1_stderr": 0.009895205366288119,
"main_score": 0.21986
},
"zh": {
"accuracy": 0.23889999999999997,
"accuracy_stderr": 0.007997124483212702,
"f1": 0.2370552262227123,
"f1_stderr": 0.008576952995377527,
"main_score": 0.23889999999999997
}
},
"mteb_version": "0.0.2",
"mteb_dataset_name": "AmazonReviewsClassification",
"dataset_revision": "c379a6705fec24a2493fa68e011692605f44e119"
}