lm5-2b8-55b-c4 / evaluation /rankeval_s_denoiser /checkpoints_2b855b55bc4ul2ndfixnew_0_lm-eval_global_step52452_2023-02-09-17-38-12_0shots_backup.json
Muennighoff's picture
Add files
79391b8
raw
history blame
No virus
2.46 kB
{
"results": {
"anli_r1": {
"acc": 0.339,
"acc_stderr": 0.01497675877162034
},
"anli_r2": {
"acc": 0.336,
"acc_stderr": 0.014944140233795027
},
"anli_r3": {
"acc": 0.3358333333333333,
"acc_stderr": 0.01363926119093288
},
"cb": {
"acc": 0.4107142857142857,
"acc_stderr": 0.0663363415035954,
"f1": 0.1940928270042194
},
"copa": {
"acc": 0.63,
"acc_stderr": 0.04852365870939099
},
"hellaswag": {
"acc": 0.297450707030472,
"acc_stderr": 0.004562022467161891,
"acc_norm": 0.32374029077872934,
"acc_norm_stderr": 0.004669459891917689
},
"rte": {
"acc": 0.5234657039711191,
"acc_stderr": 0.03006330041190266
},
"winogrande": {
"acc": 0.5090765588003157,
"acc_stderr": 0.014050170094497707
},
"storycloze_2016": {
"acc": 0.5905932656333511,
"acc_stderr": 0.01137105952719707
},
"boolq": {
"acc": 0.5944954128440367,
"acc_stderr": 0.008587459055441612
},
"arc_easy": {
"acc": 0.43308080808080807,
"acc_stderr": 0.010167478013701799,
"acc_norm": 0.38173400673400676,
"acc_norm_stderr": 0.009968648851839667
},
"arc_challenge": {
"acc": 0.1885665529010239,
"acc_stderr": 0.011430897647675803,
"acc_norm": 0.22610921501706485,
"acc_norm_stderr": 0.01222420209706328
},
"sciq": {
"acc": 0.735,
"acc_stderr": 0.013963164754809953,
"acc_norm": 0.668,
"acc_norm_stderr": 0.014899597242811476
},
"piqa": {
"acc": 0.6158868335146899,
"acc_stderr": 0.011348160741479148,
"acc_norm": 0.6218715995647442,
"acc_norm_stderr": 0.011313980666854533
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}