Muennighoff's picture
Add eval
d522938
raw
history blame
2.45 kB
{
"results": {
"anli_r1": {
"acc": 0.336,
"acc_stderr": 0.014944140233795027
},
"anli_r2": {
"acc": 0.354,
"acc_stderr": 0.015129868238451772
},
"anli_r3": {
"acc": 0.3325,
"acc_stderr": 0.013605417345710526
},
"cb": {
"acc": 0.48214285714285715,
"acc_stderr": 0.0673769750864465,
"f1": 0.40945083014048533
},
"copa": {
"acc": 0.77,
"acc_stderr": 0.04229525846816506
},
"hellaswag": {
"acc": 0.4340768771161123,
"acc_stderr": 0.004946221512145273,
"acc_norm": 0.5635331607249552,
"acc_norm_stderr": 0.004949335356881862
},
"rte": {
"acc": 0.49458483754512633,
"acc_stderr": 0.030094698123239966
},
"winogrande": {
"acc": 0.5453827940015785,
"acc_stderr": 0.013994481027065997
},
"storycloze_2016": {
"acc": 0.6905398182789952,
"acc_stderr": 0.01068995674518907
},
"boolq": {
"acc": 0.6085626911314985,
"acc_stderr": 0.008536430524403957
},
"arc_easy": {
"acc": 0.5841750841750841,
"acc_stderr": 0.010113348244647869,
"acc_norm": 0.5614478114478114,
"acc_norm_stderr": 0.010182010275471116
},
"arc_challenge": {
"acc": 0.27986348122866894,
"acc_stderr": 0.013119040897725922,
"acc_norm": 0.29692832764505117,
"acc_norm_stderr": 0.013352025976725223
},
"sciq": {
"acc": 0.888,
"acc_stderr": 0.009977753031397234,
"acc_norm": 0.869,
"acc_norm_stderr": 0.010674874844837952
},
"piqa": {
"acc": 0.736126224156692,
"acc_stderr": 0.010282996367695562,
"acc_norm": 0.7421109902067464,
"acc_norm_stderr": 0.010206956662056246
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}