results / results /distiluse-base-multilingual-cased-v2 /MTOPDomainClassification.json
Muennighoff's picture
add results - distiluse-base-multilingual-cased-v2 (#7)
f022761
raw history blame
No virus
2.83 kB
{
"dataset_revision": "d80d48c1eb48d3562165c59d59d0034df9fff0bf",
"mteb_dataset_name": "MTOPDomainClassification",
"mteb_version": "1.0.2",
"test": {
"de": {
"accuracy": 0.8619047619047618,
"accuracy_stderr": 0.009279975268553665,
"f1": 0.8442185095665184,
"f1_stderr": 0.01099157459261156,
"main_score": 0.8619047619047618
},
"en": {
"accuracy": 0.9159142726858185,
"accuracy_stderr": 0.005747905916342653,
"f1": 0.9116731589297895,
"f1_stderr": 0.006083518480038355,
"main_score": 0.9159142726858185
},
"es": {
"accuracy": 0.8774516344229486,
"accuracy_stderr": 0.009217490654903758,
"f1": 0.8689629934160831,
"f1_stderr": 0.010461264595061925,
"main_score": 0.8774516344229486
},
"evaluation_time": 9.29,
"fr": {
"accuracy": 0.8461321641089883,
"accuracy_stderr": 0.008683815611798849,
"f1": 0.8386194715158407,
"f1_stderr": 0.00994551331915671,
"main_score": 0.8461321641089883
},
"hi": {
"accuracy": 0.7641448547866619,
"accuracy_stderr": 0.009813835549335584,
"f1": 0.7466143814759416,
"f1_stderr": 0.011321537828004031,
"main_score": 0.7641448547866619
},
"th": {
"accuracy": 0.7361663652802893,
"accuracy_stderr": 0.009496339933087771,
"f1": 0.7159773512640322,
"f1_stderr": 0.013316284527249577,
"main_score": 0.7361663652802893
}
},
"validation": {
"de": {
"accuracy": 0.8556473829201103,
"accuracy_stderr": 0.008056144011651441,
"f1": 0.8387377179851955,
"f1_stderr": 0.010951159819488653,
"main_score": 0.8556473829201103
},
"en": {
"accuracy": 0.9127516778523491,
"accuracy_stderr": 0.007890466258858354,
"f1": 0.9110037602177025,
"f1_stderr": 0.008500038055347383,
"main_score": 0.9127516778523491
},
"es": {
"accuracy": 0.8756385068762278,
"accuracy_stderr": 0.01476443206023662,
"f1": 0.8712497428347236,
"f1_stderr": 0.016363301926595412,
"main_score": 0.8756385068762278
},
"evaluation_time": 6.95,
"fr": {
"accuracy": 0.8384908053265694,
"accuracy_stderr": 0.01185660167420024,
"f1": 0.8351704915222973,
"f1_stderr": 0.01309398488848049,
"main_score": 0.8384908053265694
},
"hi": {
"accuracy": 0.7288270377733598,
"accuracy_stderr": 0.011434004373595383,
"f1": 0.7182544532441011,
"f1_stderr": 0.012353541684199358,
"main_score": 0.7288270377733598
},
"th": {
"accuracy": 0.7256134051466188,
"accuracy_stderr": 0.007217393977886261,
"f1": 0.7120168370041309,
"f1_stderr": 0.011166807351882722,
"main_score": 0.7256134051466188
}
}
}