lm1-misc-pile / 220m7b57b5 /evaluation /lm1-220m-7b5-results_lm-eval_global_step14324_2023-01-24-13-53-29_2shots.json
Muennighoff's picture
Add
c305798
raw
history blame
2.47 kB
{
"results": {
"anli_r1": {
"acc": 0.327,
"acc_stderr": 0.014842213153411249
},
"anli_r2": {
"acc": 0.34,
"acc_stderr": 0.014987482264363937
},
"anli_r3": {
"acc": 0.3408333333333333,
"acc_stderr": 0.013688600793296934
},
"cb": {
"acc": 0.4107142857142857,
"acc_stderr": 0.0663363415035954,
"f1": 0.29081632653061223
},
"copa": {
"acc": 0.62,
"acc_stderr": 0.048783173121456316
},
"hellaswag": {
"acc": 0.2689703246365266,
"acc_stderr": 0.004425182676353203,
"acc_norm": 0.27763393746265685,
"acc_norm_stderr": 0.004469165728600334
},
"rte": {
"acc": 0.4981949458483754,
"acc_stderr": 0.030096267148976633
},
"winogrande": {
"acc": 0.5201262825572218,
"acc_stderr": 0.014041096664344329
},
"storycloze_2016": {
"acc": 0.5398182789951897,
"acc_stderr": 0.011525709570367516
},
"boolq": {
"acc": 0.5908256880733945,
"acc_stderr": 0.008599563442397349
},
"arc_easy": {
"acc": 0.40404040404040403,
"acc_stderr": 0.010069061649549545,
"acc_norm": 0.37542087542087543,
"acc_norm_stderr": 0.009936218527114304
},
"arc_challenge": {
"acc": 0.16552901023890784,
"acc_stderr": 0.010860860440277693,
"acc_norm": 0.2098976109215017,
"acc_norm_stderr": 0.011900548748047446
},
"sciq": {
"acc": 0.712,
"acc_stderr": 0.01432694179723156,
"acc_norm": 0.689,
"acc_norm_stderr": 0.014645596385722697
},
"piqa": {
"acc": 0.5908596300326442,
"acc_stderr": 0.011471593460443318,
"acc_norm": 0.5859630032644179,
"acc_norm_stderr": 0.01149211848141758
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}