results / results /distiluse-base-multilingual-cased-v2 /MTOPIntentClassification.json
Muennighoff's picture
add results - distiluse-base-multilingual-cased-v2 (#7)
f022761
raw history blame
No virus
2.84 kB
{
"dataset_revision": "ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba",
"mteb_dataset_name": "MTOPIntentClassification",
"mteb_version": "1.0.2",
"test": {
"de": {
"accuracy": 0.5920822766976612,
"accuracy_stderr": 0.02474701600685877,
"f1": 0.36594153749624536,
"f1_stderr": 0.010942758271326127,
"main_score": 0.5920822766976612
},
"en": {
"accuracy": 0.6640218878248975,
"accuracy_stderr": 0.01798938840121496,
"f1": 0.440157655128108,
"f1_stderr": 0.009354607672096033,
"main_score": 0.6640218878248975
},
"es": {
"accuracy": 0.5721147431621081,
"accuracy_stderr": 0.014623897660868575,
"f1": 0.3846167201793877,
"f1_stderr": 0.012539758475101621,
"main_score": 0.5721147431621081
},
"evaluation_time": 90.97,
"fr": {
"accuracy": 0.5340745380519887,
"accuracy_stderr": 0.027571336367074026,
"f1": 0.3687813951228687,
"f1_stderr": 0.0069233558553558824,
"main_score": 0.5340745380519887
},
"hi": {
"accuracy": 0.4554320544998208,
"accuracy_stderr": 0.026878830594223708,
"f1": 0.2809108688148479,
"f1_stderr": 0.010259727365242678,
"main_score": 0.4554320544998208
},
"th": {
"accuracy": 0.4773236889692586,
"accuracy_stderr": 0.024172988197644237,
"f1": 0.2987429451601028,
"f1_stderr": 0.009367086874615469,
"main_score": 0.4773236889692586
}
},
"validation": {
"de": {
"accuracy": 0.5873278236914601,
"accuracy_stderr": 0.023941280077355232,
"f1": 0.36695799030694254,
"f1_stderr": 0.016786355460051364,
"main_score": 0.5873278236914601
},
"en": {
"accuracy": 0.6718568232662192,
"accuracy_stderr": 0.017732125620084332,
"f1": 0.43355788345559115,
"f1_stderr": 0.015160468260757431,
"main_score": 0.6718568232662192
},
"es": {
"accuracy": 0.5925343811394892,
"accuracy_stderr": 0.014038867441106525,
"f1": 0.399761961435884,
"f1_stderr": 0.005740116482098513,
"main_score": 0.5925343811394892
},
"evaluation_time": 88.26,
"fr": {
"accuracy": 0.5419784400760939,
"accuracy_stderr": 0.020250135790377707,
"f1": 0.3466031643658519,
"f1_stderr": 0.009462256492570287,
"main_score": 0.5419784400760939
},
"hi": {
"accuracy": 0.45248508946322064,
"accuracy_stderr": 0.020827599876121216,
"f1": 0.29262307534575477,
"f1_stderr": 0.007752611943651198,
"main_score": 0.45248508946322064
},
"th": {
"accuracy": 0.4751645721125075,
"accuracy_stderr": 0.022562213559374794,
"f1": 0.29277985694657527,
"f1_stderr": 0.012170368969005575,
"main_score": 0.4751645721125075
}
}
}