Muennighoff's picture
Add
31b4221
raw
history blame
2.46 kB
{
"results": {
"anli_r1": {
"acc": 0.321,
"acc_stderr": 0.01477082181793464
},
"anli_r2": {
"acc": 0.342,
"acc_stderr": 0.01500870618212173
},
"anli_r3": {
"acc": 0.32083333333333336,
"acc_stderr": 0.013480882752851552
},
"cb": {
"acc": 0.5178571428571429,
"acc_stderr": 0.06737697508644647,
"f1": 0.35968427443837275
},
"copa": {
"acc": 0.82,
"acc_stderr": 0.038612291966536955
},
"hellaswag": {
"acc": 0.465345548695479,
"acc_stderr": 0.004977782217582457,
"acc_norm": 0.6182035451105358,
"acc_norm_stderr": 0.004848341560492137
},
"rte": {
"acc": 0.5595667870036101,
"acc_stderr": 0.029882123363118712
},
"winogrande": {
"acc": 0.5753749013417522,
"acc_stderr": 0.013891893150264224
},
"storycloze_2016": {
"acc": 0.7242116515232496,
"acc_stderr": 0.010334748387645672
},
"boolq": {
"acc": 0.6125382262996942,
"acc_stderr": 0.00852066653613694
},
"arc_easy": {
"acc": 0.6224747474747475,
"acc_stderr": 0.009947227833469432,
"acc_norm": 0.601010101010101,
"acc_norm_stderr": 0.010048240683798745
},
"arc_challenge": {
"acc": 0.28071672354948807,
"acc_stderr": 0.013131238126975588,
"acc_norm": 0.3191126279863481,
"acc_norm_stderr": 0.013621696119173297
},
"sciq": {
"acc": 0.905,
"acc_stderr": 0.009276910103103305,
"acc_norm": 0.902,
"acc_norm_stderr": 0.009406619184621235
},
"piqa": {
"acc": 0.7464635473340587,
"acc_stderr": 0.010150090834551782,
"acc_norm": 0.7557127312295974,
"acc_norm_stderr": 0.010024765172284256
}
},
"versions": {
"anli_r1": 0,
"anli_r2": 0,
"anli_r3": 0,
"cb": 1,
"copa": 0,
"hellaswag": 0,
"rte": 0,
"winogrande": 0,
"storycloze_2016": 0,
"boolq": 1,
"arc_easy": 0,
"arc_challenge": 0,
"sciq": 0,
"piqa": 0
}
}