results / results /sentence-t5-base /MTOPDomainClassification.json
Muennighoff's picture
Add res
653fa27
raw history blame
No virus
3.22 kB
{
"test": {
"de": {
"accuracy": 0.7698224852071005,
"accuracy_stderr": 0.019125260039426613,
"f1": 0.7510443724253961,
"f1_stderr": 0.017817696810049216,
"main_score": 0.7698224852071005
},
"en": {
"accuracy": 0.903374373005016,
"accuracy_stderr": 0.009585581302154669,
"f1": 0.9025497662319413,
"f1_stderr": 0.008774337932563447,
"main_score": 0.903374373005016
},
"es": {
"accuracy": 0.7360907271514343,
"accuracy_stderr": 0.021917325740495066,
"f1": 0.7315530983235772,
"f1_stderr": 0.02084097198193435,
"main_score": 0.7360907271514343
},
"evaluation_time": 97.69,
"fr": {
"accuracy": 0.7502975258377701,
"accuracy_stderr": 0.017879028966274964,
"f1": 0.7553083321964739,
"f1_stderr": 0.014707898465849939,
"main_score": 0.7502975258377701
},
"hi": {
"accuracy": 0.21401936177841518,
"accuracy_stderr": 0.03232394536614279,
"f1": 0.15465217146460256,
"f1_stderr": 0.013127707447034693,
"main_score": 0.21401936177841518
},
"th": {
"accuracy": 0.1620614828209765,
"accuracy_stderr": 0.01731719441697952,
"f1": 0.11580229602870344,
"f1_stderr": 0.015003572454865558,
"main_score": 0.1620614828209765
}
},
"validation": {
"de": {
"accuracy": 0.7694214876033058,
"accuracy_stderr": 0.020960186844768337,
"f1": 0.7549915141551,
"f1_stderr": 0.020028947473894634,
"main_score": 0.7694214876033058
},
"en": {
"accuracy": 0.9105145413870247,
"accuracy_stderr": 0.009186980057421447,
"f1": 0.9103403259746903,
"f1_stderr": 0.008234973136419275,
"main_score": 0.9105145413870247
},
"es": {
"accuracy": 0.7462999345121152,
"accuracy_stderr": 0.019473590869351017,
"f1": 0.747102192483126,
"f1_stderr": 0.020083594260645363,
"main_score": 0.7462999345121152
},
"evaluation_time": 71.45,
"fr": {
"accuracy": 0.7581483830057071,
"accuracy_stderr": 0.017703521039433696,
"f1": 0.7638880900825402,
"f1_stderr": 0.014708643403670018,
"main_score": 0.7581483830057071
},
"hi": {
"accuracy": 0.21187872763419485,
"accuracy_stderr": 0.024355956011372425,
"f1": 0.1617015230116376,
"f1_stderr": 0.010097732047053417,
"main_score": 0.21187872763419485
},
"th": {
"accuracy": 0.17061639736684617,
"accuracy_stderr": 0.010599219527604675,
"f1": 0.12206372413431979,
"f1_stderr": 0.01779085112704867,
"main_score": 0.17061639736684617
}
},
"dataset_version": null,
"mteb_version": "0.0.2"
}