results / results /SGPT-5.8B-weightedmean-nli-bitfit /AmazonCounterfactualClassification.json
Muennighoff's picture
Add res
653fa27
raw history blame
No virus
2.89 kB
{
"test": {
"de": {
"accuracy": 0.6663811563169164,
"accuracy_stderr": 0.022068144746026603,
"ap": 0.7857252079915924,
"ap_stderr": 0.01626075149691798,
"f1": 0.645543087846584,
"f1_stderr": 0.02252023574955773,
"main_score": 0.7857252079915924
},
"en": {
"accuracy": 0.7407462686567164,
"accuracy_stderr": 0.04367833372178889,
"ap": 0.3744692407529112,
"ap_stderr": 0.03978200279110947,
"f1": 0.682897100391642,
"f1_stderr": 0.03883514198495523,
"main_score": 0.3744692407529112
},
"en-ext": {
"accuracy": 0.7721889055472263,
"accuracy_stderr": 0.03960185508277702,
"ap": 0.25663426367826714,
"ap_stderr": 0.038619048631651824,
"f1": 0.6426265688503177,
"f1_stderr": 0.03811240272126664,
"main_score": 0.25663426367826714
},
"evaluation_time": 1225.64,
"ja": {
"accuracy": 0.5806209850107067,
"accuracy_stderr": 0.0420214746003877,
"ap": 0.14028219107023915,
"ap_stderr": 0.008575826057837969,
"f1": 0.48103871896607775,
"f1_stderr": 0.024654134473470486,
"main_score": 0.14028219107023915
}
},
"validation": {
"de": {
"accuracy": 0.6793991416309013,
"accuracy_stderr": 0.019054053974612118,
"ap": 0.7928279115687206,
"ap_stderr": 0.015103992216292432,
"f1": 0.6570169873679498,
"f1_stderr": 0.02013652583765241,
"main_score": 0.7928279115687206
},
"en": {
"accuracy": 0.7402985074626866,
"accuracy_stderr": 0.042677172538893186,
"ap": 0.3252957597884174,
"ap_stderr": 0.03879964692606918,
"f1": 0.6613385079364107,
"f1_stderr": 0.039371147450282265,
"main_score": 0.3252957597884174
},
"en-ext": {
"accuracy": 0.7442942942942943,
"accuracy_stderr": 0.033345843498282436,
"ap": 0.2116350024463869,
"ap_stderr": 0.02536293503560953,
"f1": 0.6055803428080194,
"f1_stderr": 0.028861694280347662,
"main_score": 0.2116350024463869
},
"evaluation_time": 700.49,
"ja": {
"accuracy": 0.5723175965665236,
"accuracy_stderr": 0.04301544833997049,
"ap": 0.12638105191736498,
"ap_stderr": 0.012784838196735804,
"f1": 0.46631396940870024,
"f1_stderr": 0.025301090824361976,
"main_score": 0.12638105191736498
}
},
"dataset_version": null,
"mteb_version": "0.0.2"
}