Muennighoff's picture
Add eval
9e5d26a
raw
history blame
No virus
2.46 kB
{
"results": {
"anli_r1": {
"acc": 0.351,
"acc_stderr": 0.015100563798316402
},
"anli_r2": {
"acc": 0.352,
"acc_stderr": 0.015110404505648668
},
"anli_r3": {
"acc": 0.36833333333333335,
"acc_stderr": 0.013930121355353778
},
"cb": {
"acc": 0.48214285714285715,
"acc_stderr": 0.0673769750864465,
"f1": 0.3366858237547892
},
"copa": {
"acc": 0.76,
"acc_stderr": 0.04292346959909283
},
"hellaswag": {
"acc": 0.4567815176259709,
"acc_stderr": 0.0049711062650465545,
"acc_norm": 0.602370045807608,
"acc_norm_stderr": 0.004884079750433882
},
"rte": {
"acc": 0.5667870036101083,
"acc_stderr": 0.02982676408213828
},
"winogrande": {
"acc": 0.5595895816890292,
"acc_stderr": 0.013952330311915603
},
"storycloze_2016": {
"acc": 0.7156600748262961,
"acc_stderr": 0.010431614128665242
},
"boolq": {
"acc": 0.6070336391437309,
"acc_stderr": 0.008542335147970564
},
"arc_easy": {
"acc": 0.6456228956228957,
"acc_stderr": 0.009815004030251746,
"acc_norm": 0.6464646464646465,
"acc_norm_stderr": 0.0098097289481515
},
"arc_challenge": {
"acc": 0.2986348122866894,
"acc_stderr": 0.013374078615068756,
"acc_norm": 0.3310580204778157,
"acc_norm_stderr": 0.013752062419817836
},
"sciq": {
"acc": 0.922,
"acc_stderr": 0.008484573530118581,
"acc_norm": 0.929,
"acc_norm_stderr": 0.008125578442487917
},
"piqa": {
"acc": 0.7410228509249184,
"acc_stderr": 0.010220966031405609,
"acc_norm": 0.7431991294885746,
"acc_norm_stderr": 0.010192864802278042
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}