lm1-2b8-55b-c4-dedup / evaluation /rankeval /lm1-2b8-55b-nodups_0.json
Muennighoff's picture
add
874e9d6
{
"results": {
"anli_r1": {
"acc": 0.332,
"acc_stderr": 0.014899597242811483
},
"anli_r2": {
"acc": 0.337,
"acc_stderr": 0.014955087918653607
},
"anli_r3": {
"acc": 0.3516666666666667,
"acc_stderr": 0.013789711695404792
},
"cb": {
"acc": 0.30357142857142855,
"acc_stderr": 0.06199938655510753,
"f1": 0.2584656084656085
},
"copa": {
"acc": 0.79,
"acc_stderr": 0.040936018074033256
},
"hellaswag": {
"acc": 0.4444333798048198,
"acc_stderr": 0.004958872288442143,
"acc_norm": 0.5771758613821948,
"acc_norm_stderr": 0.00492998369279507
},
"rte": {
"acc": 0.5306859205776173,
"acc_stderr": 0.030039730592197812
},
"winogrande": {
"acc": 0.5722178374112076,
"acc_stderr": 0.013905134013839953
},
"storycloze_2016": {
"acc": 0.7156600748262961,
"acc_stderr": 0.01043161412866525
},
"boolq": {
"acc": 0.5425076452599389,
"acc_stderr": 0.00871339478784262
},
"arc_easy": {
"acc": 0.5589225589225589,
"acc_stderr": 0.010188293221040569,
"acc_norm": 0.494949494949495,
"acc_norm_stderr": 0.010259260102565853
},
"arc_challenge": {
"acc": 0.24829351535836178,
"acc_stderr": 0.012624912868089762,
"acc_norm": 0.2713310580204778,
"acc_norm_stderr": 0.012993807727545796
},
"sciq": {
"acc": 0.816,
"acc_stderr": 0.012259457340938577,
"acc_norm": 0.734,
"acc_norm_stderr": 0.01397996564514515
},
"piqa": {
"acc": 0.7464635473340587,
"acc_stderr": 0.010150090834551791,
"acc_norm": 0.7540805223068553,
"acc_norm_stderr": 0.01004733186562518
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}