Muennighoff's picture
Add
2aef930
{
"results": {
"anli_r1": {
"acc": 0.322,
"acc_stderr": 0.014782913600996664
},
"anli_r2": {
"acc": 0.313,
"acc_stderr": 0.014671272822977892
},
"anli_r3": {
"acc": 0.3491666666666667,
"acc_stderr": 0.013767075395077249
},
"cb": {
"acc": 0.42857142857142855,
"acc_stderr": 0.06672848092813058,
"f1": 0.2745098039215686
},
"copa": {
"acc": 0.75,
"acc_stderr": 0.04351941398892446
},
"hellaswag": {
"acc": 0.4613622784305915,
"acc_stderr": 0.004974860878464439,
"acc_norm": 0.6013742282413862,
"acc_norm_stderr": 0.004886147907627404
},
"rte": {
"acc": 0.5487364620938628,
"acc_stderr": 0.029953149241808943
},
"winogrande": {
"acc": 0.5509076558800315,
"acc_stderr": 0.013979459389140844
},
"storycloze_2016": {
"acc": 0.7033671833244255,
"acc_stderr": 0.010562819181563226
},
"boolq": {
"acc": 0.6174311926605505,
"acc_stderr": 0.008500443818876165
},
"arc_easy": {
"acc": 0.6022727272727273,
"acc_stderr": 0.010042861602178061,
"acc_norm": 0.5803872053872053,
"acc_norm_stderr": 0.010126315840891539
},
"arc_challenge": {
"acc": 0.2832764505119454,
"acc_stderr": 0.013167478735134575,
"acc_norm": 0.3165529010238908,
"acc_norm_stderr": 0.013592431519068079
},
"sciq": {
"acc": 0.867,
"acc_stderr": 0.010743669132397332,
"acc_norm": 0.85,
"acc_norm_stderr": 0.011297239823409296
},
"piqa": {
"acc": 0.7475516866158868,
"acc_stderr": 0.01013566554736236,
"acc_norm": 0.7383025027203483,
"acc_norm_stderr": 0.010255630772708229
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}