Muennighoff's picture
Add eval
1aec95b
raw
history blame contribute delete
No virus
2.45 kB
{
"results": {
"anli_r1": {
"acc": 0.339,
"acc_stderr": 0.014976758771620342
},
"anli_r2": {
"acc": 0.323,
"acc_stderr": 0.01479492784334864
},
"anli_r3": {
"acc": 0.3475,
"acc_stderr": 0.013751753243291852
},
"cb": {
"acc": 0.35714285714285715,
"acc_stderr": 0.0646095738380922,
"f1": 0.2627450980392157
},
"copa": {
"acc": 0.7,
"acc_stderr": 0.046056618647183814
},
"hellaswag": {
"acc": 0.4334793865763792,
"acc_stderr": 0.0049454247716115935,
"acc_norm": 0.5575582553276239,
"acc_norm_stderr": 0.0049566093272183885
},
"rte": {
"acc": 0.5379061371841155,
"acc_stderr": 0.030009848912529117
},
"winogrande": {
"acc": 0.5509076558800315,
"acc_stderr": 0.01397945938914085
},
"storycloze_2016": {
"acc": 0.6841261357562801,
"acc_stderr": 0.010749892827011111
},
"boolq": {
"acc": 0.5620795107033639,
"acc_stderr": 0.008677388652709261
},
"arc_easy": {
"acc": 0.5833333333333334,
"acc_stderr": 0.010116282977781239,
"acc_norm": 0.5361952861952862,
"acc_norm_stderr": 0.010232865550346736
},
"arc_challenge": {
"acc": 0.2551194539249147,
"acc_stderr": 0.012739038695202102,
"acc_norm": 0.28498293515358364,
"acc_norm_stderr": 0.013191348179838795
},
"sciq": {
"acc": 0.877,
"acc_stderr": 0.010391293421849877,
"acc_norm": 0.84,
"acc_norm_stderr": 0.011598902298689007
},
"piqa": {
"acc": 0.7323177366702938,
"acc_stderr": 0.01033011118937043,
"acc_norm": 0.7410228509249184,
"acc_norm_stderr": 0.010220966031405617
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}