lm1-2b8-55b-c4-dedup / evaluation /rankeval /lm1-2b8-55b-nodups_1.json
Muennighoff's picture
add
874e9d6
{
"results": {
"anli_r1": {
"acc": 0.335,
"acc_stderr": 0.014933117490932573
},
"anli_r2": {
"acc": 0.342,
"acc_stderr": 0.015008706182121728
},
"anli_r3": {
"acc": 0.35,
"acc_stderr": 0.013774667009018554
},
"cb": {
"acc": 0.30357142857142855,
"acc_stderr": 0.06199938655510754,
"f1": 0.264400871459695
},
"copa": {
"acc": 0.75,
"acc_stderr": 0.04351941398892446
},
"hellaswag": {
"acc": 0.4386576379207329,
"acc_stderr": 0.004952087083128898,
"acc_norm": 0.5731925911173074,
"acc_norm_stderr": 0.004936029827672035
},
"rte": {
"acc": 0.5379061371841155,
"acc_stderr": 0.030009848912529113
},
"winogrande": {
"acc": 0.5674822415153907,
"acc_stderr": 0.013923911578623839
},
"storycloze_2016": {
"acc": 0.711918760021379,
"acc_stderr": 0.01047253701982258
},
"boolq": {
"acc": 0.5079510703363914,
"acc_stderr": 0.00874394919013925
},
"arc_easy": {
"acc": 0.5572390572390572,
"acc_stderr": 0.010192333348394457,
"acc_norm": 0.5122053872053872,
"acc_norm_stderr": 0.010256726235129021
},
"arc_challenge": {
"acc": 0.2508532423208191,
"acc_stderr": 0.012668198621315433,
"acc_norm": 0.26706484641638223,
"acc_norm_stderr": 0.012928933196496354
},
"sciq": {
"acc": 0.845,
"acc_stderr": 0.011450157470799478,
"acc_norm": 0.816,
"acc_norm_stderr": 0.012259457340938584
},
"piqa": {
"acc": 0.749183895538629,
"acc_stderr": 0.010113869547069044,
"acc_norm": 0.7529923830250272,
"acc_norm_stderr": 0.01006226814077264
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}