Muennighoff's picture
Add
43c0298
raw
history blame
2.47 kB
{
"results": {
"anli_r1": {
"acc": 0.304,
"acc_stderr": 0.014553205687950434
},
"anli_r2": {
"acc": 0.332,
"acc_stderr": 0.014899597242811482
},
"anli_r3": {
"acc": 0.34833333333333333,
"acc_stderr": 0.013759437498874061
},
"cb": {
"acc": 0.42857142857142855,
"acc_stderr": 0.06672848092813058,
"f1": 0.30465949820788535
},
"copa": {
"acc": 0.77,
"acc_stderr": 0.042295258468165065
},
"hellaswag": {
"acc": 0.45429197371041624,
"acc_stderr": 0.004968888130290068,
"acc_norm": 0.5927106154152559,
"acc_norm_stderr": 0.004903254264177628
},
"rte": {
"acc": 0.5595667870036101,
"acc_stderr": 0.02988212336311872
},
"winogrande": {
"acc": 0.5603788476716653,
"acc_stderr": 0.013949649776015696
},
"storycloze_2016": {
"acc": 0.6734366648850882,
"acc_stderr": 0.010844543793668893
},
"boolq": {
"acc": 0.6155963302752293,
"acc_stderr": 0.008508133844703919
},
"arc_easy": {
"acc": 0.5096801346801347,
"acc_stderr": 0.010257860554461122,
"acc_norm": 0.46296296296296297,
"acc_norm_stderr": 0.010231597249131062
},
"arc_challenge": {
"acc": 0.2508532423208191,
"acc_stderr": 0.01266819862131543,
"acc_norm": 0.2764505119453925,
"acc_norm_stderr": 0.013069662474252425
},
"sciq": {
"acc": 0.827,
"acc_stderr": 0.011967214137559941,
"acc_norm": 0.789,
"acc_norm_stderr": 0.01290913032104209
},
"piqa": {
"acc": 0.6953210010881393,
"acc_stderr": 0.010738889044325161,
"acc_norm": 0.6953210010881393,
"acc_norm_stderr": 0.010738889044325161
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}