results / results /e5-large /MassiveScenarioClassification.json
Muennighoff's picture
Update
5fd5d7c
raw history blame
No virus
1.54 kB
{
"dataset_revision": "7d571f92784cd94a019292a1f45445077d0ef634",
"mteb_dataset_name": "MassiveScenarioClassification",
"mteb_version": "1.0.3.dev0",
"test": {
"da": {
"accuracy": 0.5294889038332213,
"accuracy_stderr": 0.01179845398050312,
"f1": 0.497844188181838,
"f1_stderr": 0.015948599545340024,
"main_score": 0.5294889038332213
},
"evaluation_time": 326.08,
"nb": {
"accuracy": 0.51906523201076,
"accuracy_stderr": 0.020893959423191263,
"f1": 0.48469253168791776,
"f1_stderr": 0.018431620456520796,
"main_score": 0.51906523201076
},
"sv": {
"accuracy": 0.5096503026227303,
"accuracy_stderr": 0.01508477542287528,
"f1": 0.47970318432652237,
"f1_stderr": 0.013314843525331407,
"main_score": 0.5096503026227303
}
},
"validation": {
"da": {
"accuracy": 0.5165272995573045,
"accuracy_stderr": 0.011849094541299784,
"f1": 0.4961063551503173,
"f1_stderr": 0.01124975130200044,
"main_score": 0.5165272995573045
},
"evaluation_time": 264.09,
"nb": {
"accuracy": 0.5115100836202656,
"accuracy_stderr": 0.01940848275049482,
"f1": 0.4856689476783229,
"f1_stderr": 0.018493463799682273,
"main_score": 0.5115100836202656
},
"sv": {
"accuracy": 0.5080177078209542,
"accuracy_stderr": 0.01546550615630958,
"f1": 0.4789298536921504,
"f1_stderr": 0.013200788328562323,
"main_score": 0.5080177078209542
}
}
}