File size: 1,738 Bytes
81d107a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 |
{
"dataset_revision": "0fd18e25b25c072e09e0d92ab615fda904d66300",
"task_name": "Banking77Classification",
"mteb_version": "1.25.1",
"scores": {
"test": [
{
"accuracy": 0.82026,
"f1": 0.819738,
"f1_weighted": 0.819738,
"scores_per_experiment": [
{
"accuracy": 0.820779,
"f1": 0.820659,
"f1_weighted": 0.820659
},
{
"accuracy": 0.827273,
"f1": 0.827807,
"f1_weighted": 0.827807
},
{
"accuracy": 0.837013,
"f1": 0.836189,
"f1_weighted": 0.836189
},
{
"accuracy": 0.827273,
"f1": 0.827111,
"f1_weighted": 0.827111
},
{
"accuracy": 0.816558,
"f1": 0.816573,
"f1_weighted": 0.816573
},
{
"accuracy": 0.800649,
"f1": 0.79907,
"f1_weighted": 0.79907
},
{
"accuracy": 0.816558,
"f1": 0.815777,
"f1_weighted": 0.815777
},
{
"accuracy": 0.812338,
"f1": 0.811554,
"f1_weighted": 0.811554
},
{
"accuracy": 0.828247,
"f1": 0.828126,
"f1_weighted": 0.828126
},
{
"accuracy": 0.815909,
"f1": 0.814514,
"f1_weighted": 0.814514
}
],
"main_score": 0.82026,
"hf_subset": "default",
"languages": [
"eng-Latn"
]
}
]
},
"evaluation_time": 11.350855588912964,
"kg_co2_emissions": null
} |