Muennighoff's picture
add
cc04ae0
raw
history blame
2.45 kB
{
"results": {
"anli_r1": {
"acc": 0.338,
"acc_stderr": 0.014965960710224473
},
"anli_r2": {
"acc": 0.328,
"acc_stderr": 0.014853842487270333
},
"anli_r3": {
"acc": 0.3425,
"acc_stderr": 0.013704669762934728
},
"cb": {
"acc": 0.4642857142857143,
"acc_stderr": 0.0672477765493766,
"f1": 0.26271604938271603
},
"copa": {
"acc": 0.8,
"acc_stderr": 0.040201512610368445
},
"hellaswag": {
"acc": 0.46863174666401114,
"acc_stderr": 0.0049799521665955405,
"acc_norm": 0.6181039633539136,
"acc_norm_stderr": 0.004848583243606704
},
"rte": {
"acc": 0.5126353790613718,
"acc_stderr": 0.030086851767188564
},
"winogrande": {
"acc": 0.5643251775848461,
"acc_stderr": 0.013935709739615715
},
"storycloze_2016": {
"acc": 0.6958845537145911,
"acc_stderr": 0.010638172655194789
},
"boolq": {
"acc": 0.6100917431192661,
"acc_stderr": 0.00853043797286262
},
"arc_easy": {
"acc": 0.5984848484848485,
"acc_stderr": 0.010058790020755572,
"acc_norm": 0.571969696969697,
"acc_norm_stderr": 0.01015294331642626
},
"arc_challenge": {
"acc": 0.2815699658703072,
"acc_stderr": 0.01314337673500901,
"acc_norm": 0.3242320819112628,
"acc_norm_stderr": 0.01367881039951882
},
"sciq": {
"acc": 0.864,
"acc_stderr": 0.010845350230472995,
"acc_norm": 0.862,
"acc_norm_stderr": 0.01091215263250441
},
"piqa": {
"acc": 0.7393906420021763,
"acc_stderr": 0.010241826155811627,
"acc_norm": 0.7383025027203483,
"acc_norm_stderr": 0.010255630772708229
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}