results / results /SGPT-5.8B-weightedmean-nli-bitfit /AmazonReviewsClassification.json
Muennighoff's picture
Add res
653fa27
raw history blame
No virus
3.12 kB
{
"test": {
"de": {
"accuracy": 0.35288,
"accuracy_stderr": 0.02107058613328068,
"f1": 0.346729955585181,
"f1_stderr": 0.01764134720311681,
"main_score": 0.35288
},
"en": {
"accuracy": 0.41584000000000004,
"accuracy_stderr": 0.020039520952358114,
"f1": 0.41203137944390117,
"f1_stderr": 0.017279005050930135,
"main_score": 0.41584000000000004
},
"es": {
"accuracy": 0.3834,
"accuracy_stderr": 0.01835952069091129,
"f1": 0.37608755629529456,
"f1_stderr": 0.015971492728360393,
"main_score": 0.3834
},
"evaluation_time": 13281.74,
"fr": {
"accuracy": 0.37839999999999996,
"accuracy_stderr": 0.019133844360190666,
"f1": 0.3686898201563507,
"f1_stderr": 0.021239321437143587,
"main_score": 0.37839999999999996
},
"ja": {
"accuracy": 0.30936,
"accuracy_stderr": 0.020697594063078927,
"f1": 0.3049401738527071,
"f1_stderr": 0.02099229173543672,
"main_score": 0.30936
},
"zh": {
"accuracy": 0.3375,
"accuracy_stderr": 0.019404587086562817,
"f1": 0.3338338946025617,
"f1_stderr": 0.018337942577180567,
"main_score": 0.3375
}
},
"validation": {
"de": {
"accuracy": 0.3484999999999999,
"accuracy_stderr": 0.022654491828332846,
"f1": 0.34293557697793375,
"f1_stderr": 0.018122235678018475,
"main_score": 0.3484999999999999
},
"en": {
"accuracy": 0.41007999999999994,
"accuracy_stderr": 0.017001223485384798,
"f1": 0.4067593570080389,
"f1_stderr": 0.016187821351959022,
"main_score": 0.41007999999999994
},
"es": {
"accuracy": 0.37961999999999996,
"accuracy_stderr": 0.01318103182607492,
"f1": 0.372414075307037,
"f1_stderr": 0.01108305113023458,
"main_score": 0.37961999999999996
},
"evaluation_time": 13235.6,
"fr": {
"accuracy": 0.37964000000000003,
"accuracy_stderr": 0.01537161019542195,
"f1": 0.3700936263936513,
"f1_stderr": 0.019073104819252437,
"main_score": 0.37964000000000003
},
"ja": {
"accuracy": 0.30918,
"accuracy_stderr": 0.022460712366263007,
"f1": 0.30519655038238686,
"f1_stderr": 0.022185715212270408,
"main_score": 0.30918
},
"zh": {
"accuracy": 0.32920000000000005,
"accuracy_stderr": 0.014646774388922642,
"f1": 0.32556124920509816,
"f1_stderr": 0.013820820607680005,
"main_score": 0.32920000000000005
}
},
"dataset_version": null,
"mteb_version": "0.0.2"
}