Muennighoff's picture
Update results
4951470
raw history blame
No virus
3.31 kB
{
"test": {
"de": {
"accuracy": 0.6273034657650043,
"accuracy_stderr": 0.026750019843962445,
"f1": 0.6078623915840713,
"f1_stderr": 0.024545709453124315,
"main_score": 0.6273034657650043
},
"en": {
"accuracy": 0.8695622435020519,
"accuracy_stderr": 0.005091511188494251,
"f1": 0.8658363130708494,
"f1_stderr": 0.0051871184408422396,
"main_score": 0.8695622435020519
},
"es": {
"accuracy": 0.6754503002001334,
"accuracy_stderr": 0.018521804912221235,
"f1": 0.6534879794116112,
"f1_stderr": 0.017925402376902783,
"main_score": 0.6754503002001334
},
"evaluation_time": 184.8,
"fr": {
"accuracy": 0.653523332289383,
"accuracy_stderr": 0.021385772427901146,
"f1": 0.6299400188244665,
"f1_stderr": 0.019299664587033034,
"main_score": 0.653523332289383
},
"hi": {
"accuracy": 0.45371100752958055,
"accuracy_stderr": 0.00950289876773436,
"f1": 0.4426285860740745,
"f1_stderr": 0.008239449950704894,
"main_score": 0.45371100752958055
},
"th": {
"accuracy": 0.5527667269439421,
"accuracy_stderr": 0.026744140679562094,
"f1": 0.5328388179869588,
"f1_stderr": 0.024911722678940297,
"main_score": 0.5527667269439421
}
},
"validation": {
"de": {
"accuracy": 0.623030303030303,
"accuracy_stderr": 0.018007006691627983,
"f1": 0.6090030451296102,
"f1_stderr": 0.016252335030927607,
"main_score": 0.623030303030303
},
"en": {
"accuracy": 0.8647874720357942,
"accuracy_stderr": 0.008134813907732269,
"f1": 0.8643221005490525,
"f1_stderr": 0.007924300687945415,
"main_score": 0.8647874720357942
},
"es": {
"accuracy": 0.6768172888015718,
"accuracy_stderr": 0.017930410630693398,
"f1": 0.6614808491907962,
"f1_stderr": 0.01724199330369825,
"main_score": 0.6768172888015718
},
"evaluation_time": 129.45,
"fr": {
"accuracy": 0.6497780596068484,
"accuracy_stderr": 0.025761089702497523,
"f1": 0.6384542819967916,
"f1_stderr": 0.02439328627444013,
"main_score": 0.6497780596068484
},
"hi": {
"accuracy": 0.44483101391650104,
"accuracy_stderr": 0.010657528607643997,
"f1": 0.441310151844022,
"f1_stderr": 0.007308843847135203,
"main_score": 0.44483101391650104
},
"th": {
"accuracy": 0.5390783961699582,
"accuracy_stderr": 0.021304608019150205,
"f1": 0.5230342245719538,
"f1_stderr": 0.0192316272968326,
"main_score": 0.5390783961699582
}
},
"mteb_version": "0.0.2",
"mteb_dataset_name": "MTOPDomainClassification",
"dataset_revision": "a7e2a951126a26fc8c6a69f835f33a346ba259e3"
}