lm1-4b2-84b-c4seeds / 4b284b42bc4seed2 /evaluation /rankeval /4b284b42bc4seed2_0_lm-eval_global_step80108_2023-02-15-11-04-03_0shots_backup.json
Muennighoff's picture
Add
48e5290
raw
history blame
2.46 kB
{
"results": {
"anli_r1": {
"acc": 0.334,
"acc_stderr": 0.014922019523732963
},
"anli_r2": {
"acc": 0.336,
"acc_stderr": 0.014944140233795027
},
"anli_r3": {
"acc": 0.33666666666666667,
"acc_stderr": 0.013647602942406396
},
"cb": {
"acc": 0.39285714285714285,
"acc_stderr": 0.0658538889806635,
"f1": 0.18803418803418803
},
"copa": {
"acc": 0.76,
"acc_stderr": 0.04292346959909283
},
"hellaswag": {
"acc": 0.4736108344951205,
"acc_stderr": 0.004982826916687148,
"acc_norm": 0.6192989444333798,
"acc_norm_stderr": 0.004845668799108534
},
"rte": {
"acc": 0.5306859205776173,
"acc_stderr": 0.030039730592197812
},
"winogrande": {
"acc": 0.5974743488555643,
"acc_stderr": 0.013782866831703044
},
"storycloze_2016": {
"acc": 0.7177979690005345,
"acc_stderr": 0.010407834479647673
},
"boolq": {
"acc": 0.608868501529052,
"acc_stderr": 0.008535239054221164
},
"arc_easy": {
"acc": 0.5989057239057239,
"acc_stderr": 0.010057051106534364,
"acc_norm": 0.5290404040404041,
"acc_norm_stderr": 0.010242463826395614
},
"arc_challenge": {
"acc": 0.2815699658703072,
"acc_stderr": 0.013143376735009019,
"acc_norm": 0.29948805460750855,
"acc_norm_stderr": 0.013385021637313562
},
"sciq": {
"acc": 0.829,
"acc_stderr": 0.011912216456264604,
"acc_norm": 0.746,
"acc_norm_stderr": 0.01377220656516854
},
"piqa": {
"acc": 0.7568008705114254,
"acc_stderr": 0.01000961195385892,
"acc_norm": 0.7546245919477693,
"acc_norm_stderr": 0.010039831320422386
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}