Muennighoff's picture
Add eval
1e119e1
raw
history blame
7.45 kB
{
"results": {
"bigbench_arithmetic_3_digit_subtraction": {
"multiple_choice_grade": 0.55,
"multiple_choice_grade_stderr": 0.05
},
"bigbench_arithmetic_1_digit_addition": {
"multiple_choice_grade": 0.59,
"multiple_choice_grade_stderr": 0.04943110704237101
},
"bigbench_arithmetic_3_digit_multiplication": {
"multiple_choice_grade": 0.21,
"multiple_choice_grade_stderr": 0.040936018074033256
},
"bigbench_similarities_abstraction": {
"multiple_choice_grade": 0.39473684210526316,
"multiple_choice_grade_stderr": 0.05644108049875581
},
"bigbench_misconceptions": {
"multiple_choice_grade": 0.43283582089552236,
"multiple_choice_grade_stderr": 0.04296256221665221
},
"bigbench_sentence_ambiguity": {
"multiple_choice_grade": 0.5666666666666667,
"multiple_choice_grade_stderr": 0.0645132433593152
},
"bigbench_arithmetic_3_digit_addition": {
"multiple_choice_grade": 0.53,
"multiple_choice_grade_stderr": 0.05016135580465919
},
"bigbench_arithmetic_5_digit_subtraction": {
"multiple_choice_grade": 0.54,
"multiple_choice_grade_stderr": 0.05009082659620332
},
"bigbench_arithmetic_4_digit_multiplication": {
"multiple_choice_grade": 0.41,
"multiple_choice_grade_stderr": 0.04943110704237102
},
"bigbench_arithmetic_2_digit_addition": {
"multiple_choice_grade": 0.43,
"multiple_choice_grade_stderr": 0.049756985195624284
},
"bigbench_cause_and_effect_one_sentence_no_prompt": {
"multiple_choice_grade": 0.4117647058823529,
"multiple_choice_grade_stderr": 0.06960093862470136
},
"bigbench_empirical_judgments": {
"multiple_choice_grade": 0.3333333333333333,
"multiple_choice_grade_stderr": 0.04761904761904759
},
"bigbench_cause_and_effect_two_sentences": {
"multiple_choice_grade": 0.45098039215686275,
"multiple_choice_grade_stderr": 0.0703700331173583
},
"bigbench_hhh_alignment_honest": {
"multiple_choice_grade": 0.3559322033898305,
"multiple_choice_grade_stderr": 0.06286883855871885
},
"bigbench_arithmetic_2_digit_division": {
"multiple_choice_grade": 0.56,
"multiple_choice_grade_stderr": 0.04988876515698589
},
"bigbench_arithmetic_3_digit_division": {
"multiple_choice_grade": 0.33,
"multiple_choice_grade_stderr": 0.04725815626252605
},
"bigbench_arithmetic_1_digit_multiplication": {
"multiple_choice_grade": 0.54,
"multiple_choice_grade_stderr": 0.05009082659620333
},
"bigbench_arithmetic_4_digit_subtraction": {
"multiple_choice_grade": 0.52,
"multiple_choice_grade_stderr": 0.050211673156867795
},
"bigbench_arithmetic_1_digit_subtraction": {
"multiple_choice_grade": 0.62,
"multiple_choice_grade_stderr": 0.048783173121456316
},
"bigbench_emotions": {
"multiple_choice_grade": 0.14375,
"multiple_choice_grade_stderr": 0.02782310897584524
},
"bigbench_analogies": {
"multiple_choice_grade": 0.3230769230769231,
"multiple_choice_grade_stderr": 0.041174446886055975
},
"bigbench_general_knowledge": {
"multiple_choice_grade": 0.15714285714285714,
"multiple_choice_grade_stderr": 0.043812674294856725
},
"bigbench_arithmetic_5_digit_multiplication": {
"multiple_choice_grade": 0.28,
"multiple_choice_grade_stderr": 0.045126085985421296
},
"bigbench_arithmetic_1_digit_division": {
"multiple_choice_grade": 0.7391304347826086,
"multiple_choice_grade_stderr": 0.09361833424764436
},
"bigbench_arithmetic_4_digit_division": {
"multiple_choice_grade": 0.36,
"multiple_choice_grade_stderr": 0.04824181513244218
},
"bigbench_hhh_alignment_other": {
"multiple_choice_grade": 0.5116279069767442,
"multiple_choice_grade_stderr": 0.07713080907803253
},
"bigbench_arithmetic_2_digit_subtraction": {
"multiple_choice_grade": 0.56,
"multiple_choice_grade_stderr": 0.04988876515698589
},
"bigbench_arithmetic_4_digit_addition": {
"multiple_choice_grade": 0.36,
"multiple_choice_grade_stderr": 0.04824181513244218
},
"bigbench_arithmetic_2_digit_multiplication": {
"multiple_choice_grade": 0.28,
"multiple_choice_grade_stderr": 0.04512608598542127
},
"bigbench_hhh_alignment_harmless": {
"multiple_choice_grade": 0.39655172413793105,
"multiple_choice_grade_stderr": 0.06479366091762498
},
"bigbench_arithmetic_5_digit_addition": {
"multiple_choice_grade": 0.56,
"multiple_choice_grade_stderr": 0.049888765156985884
},
"bigbench_paraphrase": {
"multiple_choice_grade": 0.5,
"multiple_choice_grade_stderr": 0.0354440602504168
},
"bigbench_intent_recognition": {
"multiple_choice_grade": 0.18786127167630057,
"multiple_choice_grade_stderr": 0.014859175625220964
},
"bigbench_arithmetic_5_digit_division": {
"multiple_choice_grade": 0.32,
"multiple_choice_grade_stderr": 0.04688261722621505
},
"bigbench_hhh_alignment_helpful": {
"multiple_choice_grade": 0.288135593220339,
"multiple_choice_grade_stderr": 0.059467967781548406
},
"bigbench_cause_and_effect_one_sentence": {
"multiple_choice_grade": 0.5098039215686274,
"multiple_choice_grade_stderr": 0.07069708383262727
}
},
"versions": {
"bigbench_arithmetic_3_digit_subtraction": 0,
"bigbench_arithmetic_1_digit_addition": 0,
"bigbench_arithmetic_3_digit_multiplication": 0,
"bigbench_similarities_abstraction": 0,
"bigbench_misconceptions": 0,
"bigbench_sentence_ambiguity": 0,
"bigbench_arithmetic_3_digit_addition": 0,
"bigbench_arithmetic_5_digit_subtraction": 0,
"bigbench_arithmetic_4_digit_multiplication": 0,
"bigbench_arithmetic_2_digit_addition": 0,
"bigbench_cause_and_effect_one_sentence_no_prompt": 0,
"bigbench_empirical_judgments": 0,
"bigbench_cause_and_effect_two_sentences": 0,
"bigbench_hhh_alignment_honest": 0,
"bigbench_arithmetic_2_digit_division": 0,
"bigbench_arithmetic_3_digit_division": 0,
"bigbench_arithmetic_1_digit_multiplication": 0,
"bigbench_arithmetic_4_digit_subtraction": 0,
"bigbench_arithmetic_1_digit_subtraction": 0,
"bigbench_emotions": 0,
"bigbench_analogies": 0,
"bigbench_general_knowledge": 0,
"bigbench_arithmetic_5_digit_multiplication": 0,
"bigbench_arithmetic_1_digit_division": 0,
"bigbench_arithmetic_4_digit_division": 0,
"bigbench_hhh_alignment_other": 0,
"bigbench_arithmetic_2_digit_subtraction": 0,
"bigbench_arithmetic_4_digit_addition": 0,
"bigbench_arithmetic_2_digit_multiplication": 0,
"bigbench_hhh_alignment_harmless": 0,
"bigbench_arithmetic_5_digit_addition": 0,
"bigbench_paraphrase": 0,
"bigbench_intent_recognition": 0,
"bigbench_arithmetic_5_digit_division": 0,
"bigbench_hhh_alignment_helpful": 0,
"bigbench_cause_and_effect_one_sentence": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/pfs/lustrep2/scratch/project_462000185/muennighoff/bloom",
"num_fewshot": 0,
"batch_size": null,
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}