Muennighoff's picture
Update results
4951470
raw history blame
No virus
3.34 kB
{
"test": {
"de": {
"accuracy": 0.4956043956043956,
"accuracy_stderr": 0.013804089435250997,
"f1": 0.32863336734985976,
"f1_stderr": 0.010106922158537998,
"main_score": 0.4956043956043956
},
"en": {
"accuracy": 0.6225262197902417,
"accuracy_stderr": 0.011612339032627523,
"f1": 0.43440840371488526,
"f1_stderr": 0.007521212698716522,
"main_score": 0.6225262197902417
},
"es": {
"accuracy": 0.4993995997331555,
"accuracy_stderr": 0.017333218216550737,
"f1": 0.34726671876888127,
"f1_stderr": 0.009821507962106622,
"main_score": 0.4993995997331555
},
"evaluation_time": 336.12,
"fr": {
"accuracy": 0.46329470717193855,
"accuracy_stderr": 0.021175134055927164,
"f1": 0.3232527361598279,
"f1_stderr": 0.015670594458593846,
"main_score": 0.46329470717193855
},
"hi": {
"accuracy": 0.3220867694514163,
"accuracy_stderr": 0.007943432090835067,
"f1": 0.21321851228151392,
"f1_stderr": 0.006969308099265944,
"main_score": 0.3220867694514163
},
"th": {
"accuracy": 0.43627486437613017,
"accuracy_stderr": 0.013057059666123479,
"f1": 0.27048729223475076,
"f1_stderr": 0.007192319024223997,
"main_score": 0.43627486437613017
}
},
"validation": {
"de": {
"accuracy": 0.4886501377410468,
"accuracy_stderr": 0.015115810673577453,
"f1": 0.29305729988528817,
"f1_stderr": 0.013274418090447234,
"main_score": 0.4886501377410468
},
"en": {
"accuracy": 0.6184787472035794,
"accuracy_stderr": 0.01880584337530569,
"f1": 0.43633936644556937,
"f1_stderr": 0.012308022004906816,
"main_score": 0.6184787472035794
},
"es": {
"accuracy": 0.4988212180746562,
"accuracy_stderr": 0.018919199418584817,
"f1": 0.31473396095509043,
"f1_stderr": 0.012676416625486455,
"main_score": 0.4988212180746562
},
"evaluation_time": 282.16,
"fr": {
"accuracy": 0.46119213696892836,
"accuracy_stderr": 0.021845344865718933,
"f1": 0.28044706940385955,
"f1_stderr": 0.010116190733840541,
"main_score": 0.46119213696892836
},
"hi": {
"accuracy": 0.3071570576540755,
"accuracy_stderr": 0.008319669784761797,
"f1": 0.19812640558669523,
"f1_stderr": 0.0072837780604048784,
"main_score": 0.3071570576540755
},
"th": {
"accuracy": 0.42160383004189106,
"accuracy_stderr": 0.010170908236196702,
"f1": 0.26845354880486205,
"f1_stderr": 0.010016659553097792,
"main_score": 0.42160383004189106
}
},
"mteb_version": "0.0.2",
"mteb_dataset_name": "MTOPIntentClassification",
"dataset_revision": "6299947a7777084cc2d4b64235bf7190381ce755"
}