lm1-2b8-55b-c4-dedup / evaluation /rankeval /lm1-2b8-55b-nodups_2.json
Muennighoff's picture
add
874e9d6
{
"results": {
"anli_r1": {
"acc": 0.329,
"acc_stderr": 0.014865395385928362
},
"anli_r2": {
"acc": 0.335,
"acc_stderr": 0.014933117490932568
},
"anli_r3": {
"acc": 0.3333333333333333,
"acc_stderr": 0.013613950010225606
},
"cb": {
"acc": 0.26785714285714285,
"acc_stderr": 0.05971290310957635,
"f1": 0.24172051976930028
},
"copa": {
"acc": 0.79,
"acc_stderr": 0.040936018074033256
},
"hellaswag": {
"acc": 0.4371639115714001,
"acc_stderr": 0.004950221546187576,
"acc_norm": 0.5686118303126867,
"acc_norm_stderr": 0.004942578520987359
},
"rte": {
"acc": 0.5018050541516246,
"acc_stderr": 0.030096267148976626
},
"winogrande": {
"acc": 0.5651144435674822,
"acc_stderr": 0.013932814110418029
},
"storycloze_2016": {
"acc": 0.7071084981293426,
"acc_stderr": 0.010523873293246304
},
"boolq": {
"acc": 0.5165137614678899,
"acc_stderr": 0.008740284046486644
},
"arc_easy": {
"acc": 0.5778619528619529,
"acc_stderr": 0.010134620524592271,
"acc_norm": 0.5357744107744108,
"acc_norm_stderr": 0.010233488709726544
},
"arc_challenge": {
"acc": 0.2645051194539249,
"acc_stderr": 0.012889272949313366,
"acc_norm": 0.2858361774744027,
"acc_norm_stderr": 0.013203196088537367
},
"sciq": {
"acc": 0.866,
"acc_stderr": 0.010777762298369683,
"acc_norm": 0.828,
"acc_norm_stderr": 0.011939788882495321
},
"piqa": {
"acc": 0.7475516866158868,
"acc_stderr": 0.010135665547362364,
"acc_norm": 0.7589771490750816,
"acc_norm_stderr": 0.009979042717267312
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}