results / results /e5-base /MassiveScenarioClassification.json
Muennighoff's picture
Update
5fd5d7c
raw history blame
No virus
1.55 kB
{
"dataset_revision": "7d571f92784cd94a019292a1f45445077d0ef634",
"mteb_dataset_name": "MassiveScenarioClassification",
"mteb_version": "1.0.3.dev0",
"test": {
"da": {
"accuracy": 0.5299260255548084,
"accuracy_stderr": 0.00994860772595831,
"f1": 0.498560032028262,
"f1_stderr": 0.011851321957858672,
"main_score": 0.5299260255548084
},
"evaluation_time": 106.25,
"nb": {
"accuracy": 0.503261600537996,
"accuracy_stderr": 0.023111260517713856,
"f1": 0.4690655880152623,
"f1_stderr": 0.02016718494227476,
"main_score": 0.503261600537996
},
"sv": {
"accuracy": 0.5000336247478143,
"accuracy_stderr": 0.016636232640655457,
"f1": 0.4690590587313784,
"f1_stderr": 0.015175993334931649,
"main_score": 0.5000336247478143
}
},
"validation": {
"da": {
"accuracy": 0.5171667486473192,
"accuracy_stderr": 0.011807673132943994,
"f1": 0.4974806088306596,
"f1_stderr": 0.010871699242333253,
"main_score": 0.5171667486473192
},
"evaluation_time": 78.77,
"nb": {
"accuracy": 0.49665518937530734,
"accuracy_stderr": 0.02356226642261646,
"f1": 0.47463894012003527,
"f1_stderr": 0.022075754462668067,
"main_score": 0.49665518937530734
},
"sv": {
"accuracy": 0.5000983767830793,
"accuracy_stderr": 0.015305495827354468,
"f1": 0.47399924919005876,
"f1_stderr": 0.013426657789642702,
"main_score": 0.5000983767830793
}
}
}