Muennighoff's picture
Add eval
1e119e1
{
"results": {
"bigbench_arithmetic_3_digit_subtraction": {
"multiple_choice_grade": 0.56,
"multiple_choice_grade_stderr": 0.04988876515698589
},
"bigbench_analogies": {
"multiple_choice_grade": 0.36923076923076925,
"multiple_choice_grade_stderr": 0.042490254996217565
},
"bigbench_arithmetic_2_digit_division": {
"multiple_choice_grade": 0.48,
"multiple_choice_grade_stderr": 0.050211673156867795
},
"bigbench_cause_and_effect_two_sentences": {
"multiple_choice_grade": 0.4117647058823529,
"multiple_choice_grade_stderr": 0.06960093862470136
},
"bigbench_emotions": {
"multiple_choice_grade": 0.13125,
"multiple_choice_grade_stderr": 0.02677925573528598
},
"bigbench_arithmetic_1_digit_addition": {
"multiple_choice_grade": 0.53,
"multiple_choice_grade_stderr": 0.050161355804659205
},
"bigbench_hhh_alignment_helpful": {
"multiple_choice_grade": 0.3220338983050847,
"multiple_choice_grade_stderr": 0.06135370413564329
},
"bigbench_hhh_alignment_honest": {
"multiple_choice_grade": 0.3559322033898305,
"multiple_choice_grade_stderr": 0.06286883855871885
},
"bigbench_general_knowledge": {
"multiple_choice_grade": 0.2714285714285714,
"multiple_choice_grade_stderr": 0.05353518442674902
},
"bigbench_empirical_judgments": {
"multiple_choice_grade": 0.3434343434343434,
"multiple_choice_grade_stderr": 0.0479675905875748
},
"bigbench_arithmetic_5_digit_division": {
"multiple_choice_grade": 0.26,
"multiple_choice_grade_stderr": 0.04408440022768079
},
"bigbench_arithmetic_5_digit_addition": {
"multiple_choice_grade": 0.53,
"multiple_choice_grade_stderr": 0.050161355804659205
},
"bigbench_arithmetic_1_digit_subtraction": {
"multiple_choice_grade": 0.75,
"multiple_choice_grade_stderr": 0.04351941398892446
},
"bigbench_arithmetic_4_digit_multiplication": {
"multiple_choice_grade": 0.24,
"multiple_choice_grade_stderr": 0.042923469599092816
},
"bigbench_arithmetic_2_digit_addition": {
"multiple_choice_grade": 0.53,
"multiple_choice_grade_stderr": 0.050161355804659205
},
"bigbench_intent_recognition": {
"multiple_choice_grade": 0.5260115606936416,
"multiple_choice_grade_stderr": 0.01899513985787891
},
"bigbench_paraphrase": {
"multiple_choice_grade": 0.535,
"multiple_choice_grade_stderr": 0.035357115664894224
},
"bigbench_arithmetic_1_digit_division": {
"multiple_choice_grade": 0.7391304347826086,
"multiple_choice_grade_stderr": 0.09361833424764435
},
"bigbench_cause_and_effect_one_sentence_no_prompt": {
"multiple_choice_grade": 0.6666666666666666,
"multiple_choice_grade_stderr": 0.06666666666666664
},
"bigbench_arithmetic_4_digit_addition": {
"multiple_choice_grade": 0.35,
"multiple_choice_grade_stderr": 0.0479372485441102
},
"bigbench_arithmetic_5_digit_subtraction": {
"multiple_choice_grade": 0.58,
"multiple_choice_grade_stderr": 0.049604496374885836
},
"bigbench_misconceptions": {
"multiple_choice_grade": 0.44029850746268656,
"multiple_choice_grade_stderr": 0.0430453277257087
},
"bigbench_hhh_alignment_other": {
"multiple_choice_grade": 0.5348837209302325,
"multiple_choice_grade_stderr": 0.07696367820108108
},
"bigbench_similarities_abstraction": {
"multiple_choice_grade": 0.42105263157894735,
"multiple_choice_grade_stderr": 0.0570107958220067
},
"bigbench_hhh_alignment_harmless": {
"multiple_choice_grade": 0.41379310344827586,
"multiple_choice_grade_stderr": 0.06523484847771846
},
"bigbench_sentence_ambiguity": {
"multiple_choice_grade": 0.5333333333333333,
"multiple_choice_grade_stderr": 0.06494964005966064
},
"bigbench_arithmetic_5_digit_multiplication": {
"multiple_choice_grade": 0.29,
"multiple_choice_grade_stderr": 0.045604802157206845
},
"bigbench_arithmetic_4_digit_division": {
"multiple_choice_grade": 0.31,
"multiple_choice_grade_stderr": 0.04648231987117316
},
"bigbench_arithmetic_3_digit_division": {
"multiple_choice_grade": 0.45,
"multiple_choice_grade_stderr": 0.05
},
"bigbench_arithmetic_2_digit_subtraction": {
"multiple_choice_grade": 0.62,
"multiple_choice_grade_stderr": 0.04878317312145633
},
"bigbench_cause_and_effect_one_sentence": {
"multiple_choice_grade": 0.5098039215686274,
"multiple_choice_grade_stderr": 0.07069708383262727
},
"bigbench_arithmetic_4_digit_subtraction": {
"multiple_choice_grade": 0.52,
"multiple_choice_grade_stderr": 0.050211673156867795
},
"bigbench_arithmetic_1_digit_multiplication": {
"multiple_choice_grade": 0.52,
"multiple_choice_grade_stderr": 0.050211673156867795
},
"bigbench_arithmetic_3_digit_addition": {
"multiple_choice_grade": 0.46,
"multiple_choice_grade_stderr": 0.05009082659620333
},
"bigbench_arithmetic_3_digit_multiplication": {
"multiple_choice_grade": 0.29,
"multiple_choice_grade_stderr": 0.045604802157206845
},
"bigbench_arithmetic_2_digit_multiplication": {
"multiple_choice_grade": 0.33,
"multiple_choice_grade_stderr": 0.04725815626252604
}
},
"versions": {
"bigbench_arithmetic_3_digit_subtraction": 0,
"bigbench_analogies": 0,
"bigbench_arithmetic_2_digit_division": 0,
"bigbench_cause_and_effect_two_sentences": 0,
"bigbench_emotions": 0,
"bigbench_arithmetic_1_digit_addition": 0,
"bigbench_hhh_alignment_helpful": 0,
"bigbench_hhh_alignment_honest": 0,
"bigbench_general_knowledge": 0,
"bigbench_empirical_judgments": 0,
"bigbench_arithmetic_5_digit_division": 0,
"bigbench_arithmetic_5_digit_addition": 0,
"bigbench_arithmetic_1_digit_subtraction": 0,
"bigbench_arithmetic_4_digit_multiplication": 0,
"bigbench_arithmetic_2_digit_addition": 0,
"bigbench_intent_recognition": 0,
"bigbench_paraphrase": 0,
"bigbench_arithmetic_1_digit_division": 0,
"bigbench_cause_and_effect_one_sentence_no_prompt": 0,
"bigbench_arithmetic_4_digit_addition": 0,
"bigbench_arithmetic_5_digit_subtraction": 0,
"bigbench_misconceptions": 0,
"bigbench_hhh_alignment_other": 0,
"bigbench_similarities_abstraction": 0,
"bigbench_hhh_alignment_harmless": 0,
"bigbench_sentence_ambiguity": 0,
"bigbench_arithmetic_5_digit_multiplication": 0,
"bigbench_arithmetic_4_digit_division": 0,
"bigbench_arithmetic_3_digit_division": 0,
"bigbench_arithmetic_2_digit_subtraction": 0,
"bigbench_cause_and_effect_one_sentence": 0,
"bigbench_arithmetic_4_digit_subtraction": 0,
"bigbench_arithmetic_1_digit_multiplication": 0,
"bigbench_arithmetic_3_digit_addition": 0,
"bigbench_arithmetic_3_digit_multiplication": 0,
"bigbench_arithmetic_2_digit_multiplication": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=bloom",
"num_fewshot": 3,
"batch_size": null,
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}