File size: 1,739 Bytes
053befb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 |
{
"dataset_revision": "fad2c6e8459f9e1c45d9315f4953d921437d70f8",
"task_name": "MassiveScenarioClassification",
"mteb_version": "1.25.1",
"scores": {
"test": [
{
"accuracy": 0.726126,
"f1": 0.724117,
"f1_weighted": 0.726333,
"scores_per_experiment": [
{
"accuracy": 0.726295,
"f1": 0.724368,
"f1_weighted": 0.72364
},
{
"accuracy": 0.734701,
"f1": 0.737144,
"f1_weighted": 0.733128
},
{
"accuracy": 0.711163,
"f1": 0.711532,
"f1_weighted": 0.709362
},
{
"accuracy": 0.705783,
"f1": 0.699515,
"f1_weighted": 0.709819
},
{
"accuracy": 0.737727,
"f1": 0.733102,
"f1_weighted": 0.739757
},
{
"accuracy": 0.740081,
"f1": 0.729691,
"f1_weighted": 0.74056
},
{
"accuracy": 0.72226,
"f1": 0.724384,
"f1_weighted": 0.726585
},
{
"accuracy": 0.722596,
"f1": 0.725063,
"f1_weighted": 0.721339
},
{
"accuracy": 0.736382,
"f1": 0.733771,
"f1_weighted": 0.736615
},
{
"accuracy": 0.724277,
"f1": 0.722599,
"f1_weighted": 0.722527
}
],
"main_score": 0.726126,
"hf_subset": "en",
"languages": [
"eng-Latn"
]
}
]
},
"evaluation_time": 4.415860176086426,
"kg_co2_emissions": null
} |