leaderboard / results /LASER2 /AmazonReviewsClassification.json
Muennighoff's picture
Add external models
2c63c2f
raw
history blame
1.57 kB
{
"dataset_version": null,
"mteb_version": "0.0.2",
"test": {
"de": {
"accuracy": 0.31068,
"accuracy_stderr": 0.030799766232879097,
"f1": 0.2938071341251565,
"f1_stderr": 0.03301311313116498,
"main_score": 0.31068
},
"en": {
"accuracy": 0.2871,
"accuracy_stderr": 0.031948740194254914,
"f1": 0.2763831660571802,
"f1_stderr": 0.03216710195455939,
"main_score": 0.2871
},
"es": {
"accuracy": 0.32724000000000003,
"accuracy_stderr": 0.015933311018115466,
"f1": 0.310782596824498,
"f1_stderr": 0.022188814588153163,
"main_score": 0.32724000000000003
},
"evaluation_time": 295.02,
"fr": {
"accuracy": 0.31116,
"accuracy_stderr": 0.030352304690089024,
"f1": 0.2995469284574527,
"f1_stderr": 0.03141580285250744,
"main_score": 0.31116
},
"ja": {
"accuracy": 0.28935999999999995,
"accuracy_stderr": 0.024075680675735834,
"f1": 0.2818735717046802,
"f1_stderr": 0.023753772760779744,
"main_score": 0.28935999999999995
},
"zh": {
"accuracy": 0.30892000000000003,
"accuracy_stderr": 0.02032696730946355,
"f1": 0.2990186813313857,
"f1_stderr": 0.021581437215568936,
"main_score": 0.30892000000000003
}
}
}