lm5-2b8-55b-c4 / evaluation /rankeval_s_denoiser_44b /checkpoints_2b855b55bc4ul2ndfixnew_1.json
Muennighoff's picture
Add files
79391b8
{
"results": {
"anli_r1": {
"acc": 0.348,
"acc_stderr": 0.01507060460376841
},
"anli_r2": {
"acc": 0.335,
"acc_stderr": 0.014933117490932575
},
"anli_r3": {
"acc": 0.3416666666666667,
"acc_stderr": 0.013696658778002505
},
"cb": {
"acc": 0.44642857142857145,
"acc_stderr": 0.06703189227942398,
"f1": 0.2712571726656234
},
"copa": {
"acc": 0.65,
"acc_stderr": 0.04793724854411019
},
"hellaswag": {
"acc": 0.29396534554869547,
"acc_stderr": 0.004546451825028366,
"acc_norm": 0.3170683130850428,
"acc_norm_stderr": 0.004643832742876639
},
"rte": {
"acc": 0.5487364620938628,
"acc_stderr": 0.029953149241808946
},
"winogrande": {
"acc": 0.5067087608524072,
"acc_stderr": 0.014051220692330349
},
"storycloze_2016": {
"acc": 0.5783003741314805,
"acc_stderr": 0.011419774841868156
},
"boolq": {
"acc": 0.5587155963302752,
"acc_stderr": 0.008684548127832634
},
"arc_easy": {
"acc": 0.41203703703703703,
"acc_stderr": 0.010099765857562773,
"acc_norm": 0.3720538720538721,
"acc_norm_stderr": 0.009918187193096468
},
"arc_challenge": {
"acc": 0.181740614334471,
"acc_stderr": 0.011269198948880236,
"acc_norm": 0.2167235494880546,
"acc_norm_stderr": 0.012040156713481192
},
"sciq": {
"acc": 0.685,
"acc_stderr": 0.014696631960792492,
"acc_norm": 0.632,
"acc_norm_stderr": 0.0152580735615218
},
"piqa": {
"acc": 0.6294885745375408,
"acc_stderr": 0.011267826475447665,
"acc_norm": 0.6262241566920566,
"acc_norm_stderr": 0.011287972563201017
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}