Muennighoff's picture
Add eval
990f338
raw
history blame
2.46 kB
{
"results": {
"anli_r1": {
"acc": 0.335,
"acc_stderr": 0.014933117490932577
},
"anli_r2": {
"acc": 0.348,
"acc_stderr": 0.01507060460376841
},
"anli_r3": {
"acc": 0.32166666666666666,
"acc_stderr": 0.013490095282989521
},
"cb": {
"acc": 0.5178571428571429,
"acc_stderr": 0.06737697508644647,
"f1": 0.34263448969331317
},
"copa": {
"acc": 0.63,
"acc_stderr": 0.04852365870939099
},
"hellaswag": {
"acc": 0.3777136028679546,
"acc_stderr": 0.0048382464107862705,
"acc_norm": 0.4529974108743278,
"acc_norm_stderr": 0.004967685204073105
},
"rte": {
"acc": 0.5018050541516246,
"acc_stderr": 0.030096267148976633
},
"winogrande": {
"acc": 0.5074980268350434,
"acc_stderr": 0.014050905521228577
},
"storycloze_2016": {
"acc": 0.6306787814003206,
"acc_stderr": 0.011160545865067172
},
"boolq": {
"acc": 0.4948012232415902,
"acc_stderr": 0.008744582253526255
},
"arc_easy": {
"acc": 0.4675925925925926,
"acc_stderr": 0.010238210368801893,
"acc_norm": 0.4393939393939394,
"acc_norm_stderr": 0.010184134315437665
},
"arc_challenge": {
"acc": 0.23378839590443687,
"acc_stderr": 0.01236822537850714,
"acc_norm": 0.2431740614334471,
"acc_norm_stderr": 0.012536554144587092
},
"sciq": {
"acc": 0.718,
"acc_stderr": 0.01423652621529135,
"acc_norm": 0.71,
"acc_norm_stderr": 0.014356395999905694
},
"piqa": {
"acc": 0.6844396082698585,
"acc_stderr": 0.010843119201758945,
"acc_norm": 0.6926006528835691,
"acc_norm_stderr": 0.010765602506939063
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}