Muennighoff's picture
Add files
f5f9b77
raw
history blame
2.45 kB
{
"results": {
"anli_r1": {
"acc": 0.311,
"acc_stderr": 0.014645596385722694
},
"anli_r2": {
"acc": 0.299,
"acc_stderr": 0.014484778521220477
},
"anli_r3": {
"acc": 0.335,
"acc_stderr": 0.013630871843821474
},
"cb": {
"acc": 0.5178571428571429,
"acc_stderr": 0.06737697508644647,
"f1": 0.43401043401043404
},
"copa": {
"acc": 0.82,
"acc_stderr": 0.038612291966536955
},
"hellaswag": {
"acc": 0.45140410276837284,
"acc_stderr": 0.004966158142645416,
"acc_norm": 0.601274646484764,
"acc_norm_stderr": 0.0048863535635718415
},
"rte": {
"acc": 0.49097472924187724,
"acc_stderr": 0.030091559826331334
},
"winogrande": {
"acc": 0.590370955011839,
"acc_stderr": 0.013821049109655491
},
"storycloze_2016": {
"acc": 0.7097808658471406,
"acc_stderr": 0.010495529690730063
},
"boolq": {
"acc": 0.6241590214067279,
"acc_stderr": 0.008471147248160114
},
"arc_easy": {
"acc": 0.6401515151515151,
"acc_stderr": 0.009848484848484843,
"acc_norm": 0.6346801346801347,
"acc_norm_stderr": 0.009880576614806924
},
"arc_challenge": {
"acc": 0.28924914675767915,
"acc_stderr": 0.013250012579393443,
"acc_norm": 0.318259385665529,
"acc_norm_stderr": 0.013611993916971453
},
"sciq": {
"acc": 0.927,
"acc_stderr": 0.008230354715244055,
"acc_norm": 0.928,
"acc_norm_stderr": 0.008178195576218681
},
"piqa": {
"acc": 0.7453754080522307,
"acc_stderr": 0.010164432237060487,
"acc_norm": 0.7448313384113167,
"acc_norm_stderr": 0.010171571592521834
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}