Muennighoff's picture
Add
48e5290
raw
history blame
2.45 kB
{
"results": {
"anli_r1": {
"acc": 0.327,
"acc_stderr": 0.014842213153411249
},
"anli_r2": {
"acc": 0.35,
"acc_stderr": 0.015090650341444233
},
"anli_r3": {
"acc": 0.325,
"acc_stderr": 0.013526454480351025
},
"cb": {
"acc": 0.4107142857142857,
"acc_stderr": 0.06633634150359541,
"f1": 0.26894586894586897
},
"copa": {
"acc": 0.82,
"acc_stderr": 0.038612291966536955
},
"hellaswag": {
"acc": 0.4736108344951205,
"acc_stderr": 0.0049828269166871525,
"acc_norm": 0.6298546106353317,
"acc_norm_stderr": 0.004818566366066934
},
"rte": {
"acc": 0.5487364620938628,
"acc_stderr": 0.029953149241808943
},
"winogrande": {
"acc": 0.585635359116022,
"acc_stderr": 0.013844846232268563
},
"storycloze_2016": {
"acc": 0.7188669160876536,
"acc_stderr": 0.010395836091628108
},
"boolq": {
"acc": 0.6342507645259939,
"acc_stderr": 0.00842393006885078
},
"arc_easy": {
"acc": 0.6372053872053872,
"acc_stderr": 0.009865936757013938,
"acc_norm": 0.6077441077441077,
"acc_norm_stderr": 0.010018744689650043
},
"arc_challenge": {
"acc": 0.2841296928327645,
"acc_stderr": 0.013179442447653886,
"acc_norm": 0.30802047781569963,
"acc_norm_stderr": 0.013491429517292038
},
"sciq": {
"acc": 0.911,
"acc_stderr": 0.009008893392651528,
"acc_norm": 0.885,
"acc_norm_stderr": 0.010093407594904638
},
"piqa": {
"acc": 0.7611534276387377,
"acc_stderr": 0.0099481203853375,
"acc_norm": 0.7584330794341676,
"acc_norm_stderr": 0.009986718001804451
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}