Muennighoff's picture
Add
43c0298
raw
history blame
2.46 kB
{
"results": {
"anli_r1": {
"acc": 0.343,
"acc_stderr": 0.015019206922356951
},
"anli_r2": {
"acc": 0.325,
"acc_stderr": 0.014818724459095527
},
"anli_r3": {
"acc": 0.33666666666666667,
"acc_stderr": 0.013647602942406393
},
"cb": {
"acc": 0.4642857142857143,
"acc_stderr": 0.06724777654937658,
"f1": 0.39080213903743316
},
"copa": {
"acc": 0.74,
"acc_stderr": 0.04408440022768079
},
"hellaswag": {
"acc": 0.47759410476000796,
"acc_stderr": 0.004984768912326932,
"acc_norm": 0.6294562836088429,
"acc_norm_stderr": 0.004819633668832537
},
"rte": {
"acc": 0.5523465703971119,
"acc_stderr": 0.029931070362939533
},
"winogrande": {
"acc": 0.5730071033938438,
"acc_stderr": 0.013901878072575057
},
"storycloze_2016": {
"acc": 0.706574024585783,
"acc_stderr": 0.010529489334744471
},
"boolq": {
"acc": 0.617125382262997,
"acc_stderr": 0.008501734385335953
},
"arc_easy": {
"acc": 0.617003367003367,
"acc_stderr": 0.009974920384536462,
"acc_norm": 0.5761784511784511,
"acc_norm_stderr": 0.01014000609521361
},
"arc_challenge": {
"acc": 0.30887372013651876,
"acc_stderr": 0.013501770929344003,
"acc_norm": 0.3165529010238908,
"acc_norm_stderr": 0.01359243151906808
},
"sciq": {
"acc": 0.906,
"acc_stderr": 0.009233052000787726,
"acc_norm": 0.885,
"acc_norm_stderr": 0.010093407594904628
},
"piqa": {
"acc": 0.750272034820457,
"acc_stderr": 0.010099232969867492,
"acc_norm": 0.7584330794341676,
"acc_norm_stderr": 0.009986718001804461
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}