Muennighoff's picture
Add files
ea88016
raw
history blame
2.46 kB
{
"results": {
"anli_r1": {
"acc": 0.331,
"acc_stderr": 0.014888272588203934
},
"anli_r2": {
"acc": 0.352,
"acc_stderr": 0.015110404505648668
},
"anli_r3": {
"acc": 0.35583333333333333,
"acc_stderr": 0.01382651874849331
},
"cb": {
"acc": 0.2857142857142857,
"acc_stderr": 0.06091449038731724,
"f1": 0.1717171717171717
},
"copa": {
"acc": 0.8,
"acc_stderr": 0.040201512610368445
},
"hellaswag": {
"acc": 0.5319657438757219,
"acc_stderr": 0.004979573765575866,
"acc_norm": 0.7045409281019717,
"acc_norm_stderr": 0.004553164013379556
},
"rte": {
"acc": 0.5667870036101083,
"acc_stderr": 0.029826764082138277
},
"winogrande": {
"acc": 0.6243093922651933,
"acc_stderr": 0.013611257508380437
},
"storycloze_2016": {
"acc": 0.7525387493319081,
"acc_stderr": 0.009979234591920141
},
"boolq": {
"acc": 0.6284403669724771,
"acc_stderr": 0.008451598145076575
},
"arc_easy": {
"acc": 0.6637205387205387,
"acc_stderr": 0.009694178072725206,
"acc_norm": 0.5896464646464646,
"acc_norm_stderr": 0.010093531255765452
},
"arc_challenge": {
"acc": 0.32081911262798635,
"acc_stderr": 0.013640943091946526,
"acc_norm": 0.33447098976109213,
"acc_norm_stderr": 0.013787460322441374
},
"sciq": {
"acc": 0.89,
"acc_stderr": 0.00989939381972444,
"acc_norm": 0.815,
"acc_norm_stderr": 0.012285191326386684
},
"piqa": {
"acc": 0.7731229597388466,
"acc_stderr": 0.009771584259215172,
"acc_norm": 0.7829162132752993,
"acc_norm_stderr": 0.009618708415756788
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}