Muennighoff's picture
Add eval
990f338
raw
history blame
2.46 kB
{
"results": {
"anli_r1": {
"acc": 0.34,
"acc_stderr": 0.014987482264363935
},
"anli_r2": {
"acc": 0.338,
"acc_stderr": 0.014965960710224482
},
"anli_r3": {
"acc": 0.35333333333333333,
"acc_stderr": 0.013804572162314926
},
"cb": {
"acc": 0.44642857142857145,
"acc_stderr": 0.06703189227942398,
"f1": 0.3011143410852713
},
"copa": {
"acc": 0.79,
"acc_stderr": 0.040936018074033256
},
"hellaswag": {
"acc": 0.42202748456482775,
"acc_stderr": 0.004928735103635839,
"acc_norm": 0.5423222465644294,
"acc_norm_stderr": 0.004971874159777697
},
"rte": {
"acc": 0.5415162454873647,
"acc_stderr": 0.029992535385373314
},
"winogrande": {
"acc": 0.5414364640883977,
"acc_stderr": 0.014004146853791892
},
"storycloze_2016": {
"acc": 0.6766435061464458,
"acc_stderr": 0.01081682863306821
},
"boolq": {
"acc": 0.5831804281345566,
"acc_stderr": 0.008623192108843679
},
"arc_easy": {
"acc": 0.5648148148148148,
"acc_stderr": 0.01017321643037092,
"acc_norm": 0.5349326599326599,
"acc_norm_stderr": 0.01023471305272368
},
"arc_challenge": {
"acc": 0.257679180887372,
"acc_stderr": 0.0127807705627684,
"acc_norm": 0.2832764505119454,
"acc_norm_stderr": 0.013167478735134576
},
"sciq": {
"acc": 0.861,
"acc_stderr": 0.010945263761042955,
"acc_norm": 0.835,
"acc_norm_stderr": 0.01174363286691616
},
"piqa": {
"acc": 0.719260065288357,
"acc_stderr": 0.010484325438311829,
"acc_norm": 0.7263329706202394,
"acc_norm_stderr": 0.010402184206229218
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}