Muennighoff's picture
Add MTEB eval
aa91968
raw
history blame
319 Bytes
{
"dataset_version": null,
"mteb_version": "0.0.2",
"test": {
"en": {
"accuracy": 0.9256269949840401,
"accuracy_stderr": 0.0038850782151341893,
"f1": 0.921020975473988,
"f1_stderr": 0.0044060167819252195,
"main_score": 0.9256269949840401
},
"evaluation_time": 45.61
}
}