lm1-misc-pile / 619m2b72b7 /evaluation /lm1-619m-2b7-results_lm-eval_global_step5111_2023-01-24-13-53-29_2shots.json
Muennighoff's picture
Add
8393ff0
{
"results": {
"anli_r1": {
"acc": 0.312,
"acc_stderr": 0.014658474370509
},
"anli_r2": {
"acc": 0.343,
"acc_stderr": 0.015019206922356951
},
"anli_r3": {
"acc": 0.3433333333333333,
"acc_stderr": 0.01371263383046586
},
"cb": {
"acc": 0.375,
"acc_stderr": 0.06527912098338669,
"f1": 0.26285714285714284
},
"copa": {
"acc": 0.61,
"acc_stderr": 0.04902071300001975
},
"hellaswag": {
"acc": 0.26847241585341564,
"acc_stderr": 0.00442259026238513,
"acc_norm": 0.2740489942242581,
"acc_norm_stderr": 0.004451222241494057
},
"rte": {
"acc": 0.5234657039711191,
"acc_stderr": 0.030063300411902666
},
"winogrande": {
"acc": 0.5138121546961326,
"acc_stderr": 0.014047122916440412
},
"storycloze_2016": {
"acc": 0.5339390700160342,
"acc_stderr": 0.01153576488164141
},
"boolq": {
"acc": 0.5782874617737003,
"acc_stderr": 0.008637194202160971
},
"arc_easy": {
"acc": 0.36826599326599324,
"acc_stderr": 0.009897286209010894,
"acc_norm": 0.3468013468013468,
"acc_norm_stderr": 0.009766326091716005
},
"arc_challenge": {
"acc": 0.15443686006825938,
"acc_stderr": 0.010560149230392597,
"acc_norm": 0.197098976109215,
"acc_norm_stderr": 0.011625047669880624
},
"sciq": {
"acc": 0.67,
"acc_stderr": 0.014876872027456736,
"acc_norm": 0.635,
"acc_norm_stderr": 0.0152317762262649
},
"piqa": {
"acc": 0.5767138193688792,
"acc_stderr": 0.011527699473614478,
"acc_norm": 0.5832426550598476,
"acc_norm_stderr": 0.011503015163618312
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}