Muennighoff's picture
Add
43c0298
raw
history blame
2.46 kB
{
"results": {
"anli_r1": {
"acc": 0.336,
"acc_stderr": 0.014944140233795023
},
"anli_r2": {
"acc": 0.333,
"acc_stderr": 0.014910846164229864
},
"anli_r3": {
"acc": 0.3516666666666667,
"acc_stderr": 0.013789711695404785
},
"cb": {
"acc": 0.2857142857142857,
"acc_stderr": 0.06091449038731724,
"f1": 0.24845800389121164
},
"copa": {
"acc": 0.78,
"acc_stderr": 0.04163331998932261
},
"hellaswag": {
"acc": 0.47918741286596295,
"acc_stderr": 0.004985456752161002,
"acc_norm": 0.6287592113124876,
"acc_norm_stderr": 0.004821492994082102
},
"rte": {
"acc": 0.5306859205776173,
"acc_stderr": 0.03003973059219781
},
"winogrande": {
"acc": 0.5816890292028414,
"acc_stderr": 0.013863669961195904
},
"storycloze_2016": {
"acc": 0.7087119187600214,
"acc_stderr": 0.010506919924163614
},
"boolq": {
"acc": 0.6100917431192661,
"acc_stderr": 0.008530437972862622
},
"arc_easy": {
"acc": 0.6018518518518519,
"acc_stderr": 0.010044662374653398,
"acc_norm": 0.5214646464646465,
"acc_norm_stderr": 0.010250325159456652
},
"arc_challenge": {
"acc": 0.27559726962457337,
"acc_stderr": 0.01305716965576184,
"acc_norm": 0.30631399317406144,
"acc_norm_stderr": 0.013470584417276511
},
"sciq": {
"acc": 0.852,
"acc_stderr": 0.01123486636423524,
"acc_norm": 0.768,
"acc_norm_stderr": 0.01335493745228157
},
"piqa": {
"acc": 0.750816104461371,
"acc_stderr": 0.010091882770120216,
"acc_norm": 0.7616974972796517,
"acc_norm_stderr": 0.009940334245876219
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}