lm5-2b8-55b-c4 / evaluation /rankeval_r_denoiser /checkpoints_2b855b55bc4ul2ndfixnew_3_lm-eval_global_step52452_2023-02-09-23-24-23_3shots_backup.json
Muennighoff's picture
Add files
79391b8
raw
history blame
No virus
2.46 kB
{
"results": {
"anli_r1": {
"acc": 0.312,
"acc_stderr": 0.014658474370509008
},
"anli_r2": {
"acc": 0.344,
"acc_stderr": 0.015029633724408947
},
"anli_r3": {
"acc": 0.3425,
"acc_stderr": 0.013704669762934725
},
"cb": {
"acc": 0.5178571428571429,
"acc_stderr": 0.06737697508644647,
"f1": 0.3422885572139303
},
"copa": {
"acc": 0.6,
"acc_stderr": 0.049236596391733084
},
"hellaswag": {
"acc": 0.291575383389763,
"acc_stderr": 0.004535589759202657,
"acc_norm": 0.32284405496912966,
"acc_norm_stderr": 0.004666080865179641
},
"rte": {
"acc": 0.5054151624548736,
"acc_stderr": 0.030094698123239966
},
"winogrande": {
"acc": 0.5011838989739542,
"acc_stderr": 0.014052446290529015
},
"storycloze_2016": {
"acc": 0.5740245857830037,
"acc_stderr": 0.011435014262181197
},
"boolq": {
"acc": 0.4688073394495413,
"acc_stderr": 0.008728020822889253
},
"arc_easy": {
"acc": 0.42424242424242425,
"acc_stderr": 0.010141333654958574,
"acc_norm": 0.38425925925925924,
"acc_norm_stderr": 0.009981120724601443
},
"arc_challenge": {
"acc": 0.18686006825938567,
"acc_stderr": 0.011391015649694391,
"acc_norm": 0.22440273037542663,
"acc_norm_stderr": 0.012191404938603838
},
"sciq": {
"acc": 0.723,
"acc_stderr": 0.014158794845306265,
"acc_norm": 0.682,
"acc_norm_stderr": 0.014734079309311901
},
"piqa": {
"acc": 0.6284004352557128,
"acc_stderr": 0.011274603006724743,
"acc_norm": 0.6196953210010882,
"acc_norm_stderr": 0.011326620892570314
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}