Muennighoff's picture
Add eval
1e119e1
{
"results": {
"bigbench_arithmetic_4_digit_division": {
"multiple_choice_grade": 0.29,
"multiple_choice_grade_stderr": 0.04560480215720684
},
"bigbench_arithmetic_3_digit_addition": {
"multiple_choice_grade": 0.5,
"multiple_choice_grade_stderr": 0.050251890762960605
},
"bigbench_arithmetic_1_digit_multiplication": {
"multiple_choice_grade": 0.46,
"multiple_choice_grade_stderr": 0.05009082659620333
},
"bigbench_arithmetic_5_digit_division": {
"multiple_choice_grade": 0.26,
"multiple_choice_grade_stderr": 0.044084400227680794
},
"bigbench_misconceptions": {
"multiple_choice_grade": 0.5298507462686567,
"multiple_choice_grade_stderr": 0.04327816419216089
},
"bigbench_empirical_judgments": {
"multiple_choice_grade": 0.37373737373737376,
"multiple_choice_grade_stderr": 0.048870690395024875
},
"bigbench_similarities_abstraction": {
"multiple_choice_grade": 0.5921052631578947,
"multiple_choice_grade_stderr": 0.05674699645486274
},
"bigbench_arithmetic_2_digit_addition": {
"multiple_choice_grade": 0.14,
"multiple_choice_grade_stderr": 0.03487350880197771
},
"bigbench_sentence_ambiguity": {
"multiple_choice_grade": 0.48333333333333334,
"multiple_choice_grade_stderr": 0.06505828185300304
},
"bigbench_arithmetic_3_digit_subtraction": {
"multiple_choice_grade": 0.26,
"multiple_choice_grade_stderr": 0.04408440022768078
},
"bigbench_cause_and_effect_one_sentence": {
"multiple_choice_grade": 0.5686274509803921,
"multiple_choice_grade_stderr": 0.07004145529212454
},
"bigbench_arithmetic_4_digit_subtraction": {
"multiple_choice_grade": 0.42,
"multiple_choice_grade_stderr": 0.049604496374885836
},
"bigbench_hhh_alignment_harmless": {
"multiple_choice_grade": 0.3620689655172414,
"multiple_choice_grade_stderr": 0.06365684176876799
},
"bigbench_arithmetic_4_digit_addition": {
"multiple_choice_grade": 0.48,
"multiple_choice_grade_stderr": 0.050211673156867795
},
"bigbench_arithmetic_1_digit_addition": {
"multiple_choice_grade": 0.42,
"multiple_choice_grade_stderr": 0.04960449637488584
},
"bigbench_hhh_alignment_helpful": {
"multiple_choice_grade": 0.3728813559322034,
"multiple_choice_grade_stderr": 0.0634959746661109
},
"bigbench_cause_and_effect_two_sentences": {
"multiple_choice_grade": 0.47058823529411764,
"multiple_choice_grade_stderr": 0.07058823529411762
},
"bigbench_arithmetic_1_digit_subtraction": {
"multiple_choice_grade": 0.4,
"multiple_choice_grade_stderr": 0.04923659639173309
},
"bigbench_general_knowledge": {
"multiple_choice_grade": 0.42857142857142855,
"multiple_choice_grade_stderr": 0.059575546873449965
},
"bigbench_emotions": {
"multiple_choice_grade": 0.53125,
"multiple_choice_grade_stderr": 0.039575057062617526
},
"bigbench_hhh_alignment_honest": {
"multiple_choice_grade": 0.3898305084745763,
"multiple_choice_grade_stderr": 0.06403968100905791
},
"bigbench_arithmetic_5_digit_addition": {
"multiple_choice_grade": 0.59,
"multiple_choice_grade_stderr": 0.04943110704237102
},
"bigbench_hhh_alignment_other": {
"multiple_choice_grade": 0.5813953488372093,
"multiple_choice_grade_stderr": 0.07612251984976479
},
"bigbench_arithmetic_2_digit_division": {
"multiple_choice_grade": 0.56,
"multiple_choice_grade_stderr": 0.04988876515698589
},
"bigbench_cause_and_effect_one_sentence_no_prompt": {
"multiple_choice_grade": 0.7647058823529411,
"multiple_choice_grade_stderr": 0.05998846486579748
},
"bigbench_analogies": {
"multiple_choice_grade": 0.4307692307692308,
"multiple_choice_grade_stderr": 0.04359851186846965
},
"bigbench_arithmetic_5_digit_multiplication": {
"multiple_choice_grade": 0.3,
"multiple_choice_grade_stderr": 0.046056618647183814
},
"bigbench_arithmetic_3_digit_division": {
"multiple_choice_grade": 0.36,
"multiple_choice_grade_stderr": 0.04824181513244218
},
"bigbench_paraphrase": {
"multiple_choice_grade": 0.5,
"multiple_choice_grade_stderr": 0.0354440602504168
},
"bigbench_arithmetic_5_digit_subtraction": {
"multiple_choice_grade": 0.51,
"multiple_choice_grade_stderr": 0.05024183937956912
},
"bigbench_arithmetic_3_digit_multiplication": {
"multiple_choice_grade": 0.24,
"multiple_choice_grade_stderr": 0.042923469599092816
},
"bigbench_arithmetic_1_digit_division": {
"multiple_choice_grade": 0.6956521739130435,
"multiple_choice_grade_stderr": 0.09810018692482896
},
"bigbench_intent_recognition": {
"multiple_choice_grade": 0.42485549132947975,
"multiple_choice_grade_stderr": 0.01880485907341134
},
"bigbench_arithmetic_2_digit_multiplication": {
"multiple_choice_grade": 0.32,
"multiple_choice_grade_stderr": 0.046882617226215034
},
"bigbench_arithmetic_4_digit_multiplication": {
"multiple_choice_grade": 0.32,
"multiple_choice_grade_stderr": 0.04688261722621505
},
"bigbench_arithmetic_2_digit_subtraction": {
"multiple_choice_grade": 0.36,
"multiple_choice_grade_stderr": 0.04824181513244218
}
},
"versions": {
"bigbench_arithmetic_4_digit_division": 0,
"bigbench_arithmetic_3_digit_addition": 0,
"bigbench_arithmetic_1_digit_multiplication": 0,
"bigbench_arithmetic_5_digit_division": 0,
"bigbench_misconceptions": 0,
"bigbench_empirical_judgments": 0,
"bigbench_similarities_abstraction": 0,
"bigbench_arithmetic_2_digit_addition": 0,
"bigbench_sentence_ambiguity": 0,
"bigbench_arithmetic_3_digit_subtraction": 0,
"bigbench_cause_and_effect_one_sentence": 0,
"bigbench_arithmetic_4_digit_subtraction": 0,
"bigbench_hhh_alignment_harmless": 0,
"bigbench_arithmetic_4_digit_addition": 0,
"bigbench_arithmetic_1_digit_addition": 0,
"bigbench_hhh_alignment_helpful": 0,
"bigbench_cause_and_effect_two_sentences": 0,
"bigbench_arithmetic_1_digit_subtraction": 0,
"bigbench_general_knowledge": 0,
"bigbench_emotions": 0,
"bigbench_hhh_alignment_honest": 0,
"bigbench_arithmetic_5_digit_addition": 0,
"bigbench_hhh_alignment_other": 0,
"bigbench_arithmetic_2_digit_division": 0,
"bigbench_cause_and_effect_one_sentence_no_prompt": 0,
"bigbench_analogies": 0,
"bigbench_arithmetic_5_digit_multiplication": 0,
"bigbench_arithmetic_3_digit_division": 0,
"bigbench_paraphrase": 0,
"bigbench_arithmetic_5_digit_subtraction": 0,
"bigbench_arithmetic_3_digit_multiplication": 0,
"bigbench_arithmetic_1_digit_division": 0,
"bigbench_intent_recognition": 0,
"bigbench_arithmetic_2_digit_multiplication": 0,
"bigbench_arithmetic_4_digit_multiplication": 0,
"bigbench_arithmetic_2_digit_subtraction": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/pfs/lustrep2/scratch/project_462000185/muennighoff/bloom-finnish-176b",
"num_fewshot": 0,
"batch_size": null,
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}