results / results /sentence-t5-base /AmazonCounterfactualClassification.json
Muennighoff's picture
Add res
653fa27
raw history blame
No virus
2.88 kB
{
"test": {
"de": {
"accuracy": 0.6997858672376874,
"accuracy_stderr": 0.023730122771666,
"ap": 0.8089622545806847,
"ap_stderr": 0.017542167168256805,
"f1": 0.6809770164363411,
"f1_stderr": 0.02502119740678468,
"main_score": 0.8089622545806847
},
"en": {
"accuracy": 0.7582089552238807,
"accuracy_stderr": 0.04411956455850688,
"ap": 0.4058809426967639,
"ap_stderr": 0.04121912788521331,
"f1": 0.705050115572668,
"f1_stderr": 0.03905000730794773,
"main_score": 0.4058809426967639
},
"en-ext": {
"accuracy": 0.7680659670164918,
"accuracy_stderr": 0.041308633119833196,
"ap": 0.26663544686227125,
"ap_stderr": 0.03298596316694984,
"f1": 0.6452406535274052,
"f1_stderr": 0.03638650148023496,
"main_score": 0.26663544686227125
},
"evaluation_time": 30.88,
"ja": {
"accuracy": 0.4604925053533191,
"accuracy_stderr": 0.19229339644806037,
"ap": 0.10574096802771449,
"ap_stderr": 0.0025553531103145156,
"f1": 0.3674441737116304,
"f1_stderr": 0.09710742779603422,
"main_score": 0.10574096802771449
}
},
"validation": {
"de": {
"accuracy": 0.7218884120171675,
"accuracy_stderr": 0.02362854868412155,
"ap": 0.8269257610518839,
"ap_stderr": 0.018337677553463085,
"f1": 0.7040539344360247,
"f1_stderr": 0.024452294561550222,
"main_score": 0.8269257610518839
},
"en": {
"accuracy": 0.7740298507462686,
"accuracy_stderr": 0.04399416717449703,
"ap": 0.37647941571812804,
"ap_stderr": 0.047866726510032605,
"f1": 0.701145596143169,
"f1_stderr": 0.040686841158888855,
"main_score": 0.37647941571812804
},
"en-ext": {
"accuracy": 0.7606606606606606,
"accuracy_stderr": 0.03904019402310873,
"ap": 0.2370716655469026,
"ap_stderr": 0.02503636151493095,
"f1": 0.6271435481941479,
"f1_stderr": 0.03197535545570233,
"main_score": 0.2370716655469026
},
"evaluation_time": 31.36,
"ja": {
"accuracy": 0.4600858369098713,
"accuracy_stderr": 0.1951402610701886,
"ap": 0.10013210405763454,
"ap_stderr": 0.0024966947430583863,
"f1": 0.3633870320465661,
"f1_stderr": 0.09790436643153545,
"main_score": 0.10013210405763454
}
},
"dataset_version": null,
"mteb_version": "0.0.2"
}