Muennighoff's picture
Add eval
1e119e1
raw
history blame
7.44 kB
{
"results": {
"bigbench_empirical_judgments": {
"multiple_choice_grade": 0.3434343434343434,
"multiple_choice_grade_stderr": 0.0479675905875748
},
"bigbench_arithmetic_4_digit_subtraction": {
"multiple_choice_grade": 0.46,
"multiple_choice_grade_stderr": 0.05009082659620332
},
"bigbench_arithmetic_5_digit_addition": {
"multiple_choice_grade": 0.56,
"multiple_choice_grade_stderr": 0.049888765156985884
},
"bigbench_paraphrase": {
"multiple_choice_grade": 0.52,
"multiple_choice_grade_stderr": 0.03541569365103447
},
"bigbench_cause_and_effect_one_sentence": {
"multiple_choice_grade": 0.5490196078431373,
"multiple_choice_grade_stderr": 0.07037003311735827
},
"bigbench_arithmetic_1_digit_subtraction": {
"multiple_choice_grade": 0.77,
"multiple_choice_grade_stderr": 0.04229525846816508
},
"bigbench_cause_and_effect_one_sentence_no_prompt": {
"multiple_choice_grade": 0.6274509803921569,
"multiple_choice_grade_stderr": 0.06837488538887332
},
"bigbench_arithmetic_2_digit_multiplication": {
"multiple_choice_grade": 0.36,
"multiple_choice_grade_stderr": 0.04824181513244218
},
"bigbench_analogies": {
"multiple_choice_grade": 0.36153846153846153,
"multiple_choice_grade_stderr": 0.042300915595389274
},
"bigbench_arithmetic_2_digit_subtraction": {
"multiple_choice_grade": 0.54,
"multiple_choice_grade_stderr": 0.05009082659620332
},
"bigbench_intent_recognition": {
"multiple_choice_grade": 0.5289017341040463,
"multiple_choice_grade_stderr": 0.018989093135982828
},
"bigbench_hhh_alignment_other": {
"multiple_choice_grade": 0.5348837209302325,
"multiple_choice_grade_stderr": 0.07696367820108108
},
"bigbench_similarities_abstraction": {
"multiple_choice_grade": 0.42105263157894735,
"multiple_choice_grade_stderr": 0.0570107958220067
},
"bigbench_hhh_alignment_honest": {
"multiple_choice_grade": 0.3559322033898305,
"multiple_choice_grade_stderr": 0.06286883855871885
},
"bigbench_arithmetic_3_digit_subtraction": {
"multiple_choice_grade": 0.58,
"multiple_choice_grade_stderr": 0.049604496374885836
},
"bigbench_emotions": {
"multiple_choice_grade": 0.08125,
"multiple_choice_grade_stderr": 0.02166764934878465
},
"bigbench_arithmetic_5_digit_division": {
"multiple_choice_grade": 0.27,
"multiple_choice_grade_stderr": 0.04461960433384739
},
"bigbench_arithmetic_4_digit_multiplication": {
"multiple_choice_grade": 0.31,
"multiple_choice_grade_stderr": 0.04648231987117316
},
"bigbench_arithmetic_3_digit_addition": {
"multiple_choice_grade": 0.38,
"multiple_choice_grade_stderr": 0.04878317312145633
},
"bigbench_hhh_alignment_harmless": {
"multiple_choice_grade": 0.39655172413793105,
"multiple_choice_grade_stderr": 0.06479366091762498
},
"bigbench_general_knowledge": {
"multiple_choice_grade": 0.22857142857142856,
"multiple_choice_grade_stderr": 0.05055152782453618
},
"bigbench_arithmetic_3_digit_division": {
"multiple_choice_grade": 0.4,
"multiple_choice_grade_stderr": 0.04923659639173309
},
"bigbench_arithmetic_3_digit_multiplication": {
"multiple_choice_grade": 0.22,
"multiple_choice_grade_stderr": 0.041633319989322674
},
"bigbench_arithmetic_4_digit_addition": {
"multiple_choice_grade": 0.34,
"multiple_choice_grade_stderr": 0.04760952285695235
},
"bigbench_cause_and_effect_two_sentences": {
"multiple_choice_grade": 0.3137254901960784,
"multiple_choice_grade_stderr": 0.0656203942379667
},
"bigbench_arithmetic_4_digit_division": {
"multiple_choice_grade": 0.31,
"multiple_choice_grade_stderr": 0.04648231987117316
},
"bigbench_hhh_alignment_helpful": {
"multiple_choice_grade": 0.288135593220339,
"multiple_choice_grade_stderr": 0.0594679677815484
},
"bigbench_arithmetic_2_digit_division": {
"multiple_choice_grade": 0.52,
"multiple_choice_grade_stderr": 0.050211673156867795
},
"bigbench_arithmetic_1_digit_multiplication": {
"multiple_choice_grade": 0.45,
"multiple_choice_grade_stderr": 0.05
},
"bigbench_arithmetic_5_digit_multiplication": {
"multiple_choice_grade": 0.28,
"multiple_choice_grade_stderr": 0.04512608598542128
},
"bigbench_misconceptions": {
"multiple_choice_grade": 0.4925373134328358,
"multiple_choice_grade_stderr": 0.04335066912520505
},
"bigbench_arithmetic_1_digit_addition": {
"multiple_choice_grade": 0.62,
"multiple_choice_grade_stderr": 0.04878317312145634
},
"bigbench_arithmetic_2_digit_addition": {
"multiple_choice_grade": 0.49,
"multiple_choice_grade_stderr": 0.05024183937956912
},
"bigbench_sentence_ambiguity": {
"multiple_choice_grade": 0.5166666666666667,
"multiple_choice_grade_stderr": 0.06505828185300304
},
"bigbench_arithmetic_5_digit_subtraction": {
"multiple_choice_grade": 0.49,
"multiple_choice_grade_stderr": 0.050241839379569095
},
"bigbench_arithmetic_1_digit_division": {
"multiple_choice_grade": 0.8260869565217391,
"multiple_choice_grade_stderr": 0.08081046758996392
}
},
"versions": {
"bigbench_empirical_judgments": 0,
"bigbench_arithmetic_4_digit_subtraction": 0,
"bigbench_arithmetic_5_digit_addition": 0,
"bigbench_paraphrase": 0,
"bigbench_cause_and_effect_one_sentence": 0,
"bigbench_arithmetic_1_digit_subtraction": 0,
"bigbench_cause_and_effect_one_sentence_no_prompt": 0,
"bigbench_arithmetic_2_digit_multiplication": 0,
"bigbench_analogies": 0,
"bigbench_arithmetic_2_digit_subtraction": 0,
"bigbench_intent_recognition": 0,
"bigbench_hhh_alignment_other": 0,
"bigbench_similarities_abstraction": 0,
"bigbench_hhh_alignment_honest": 0,
"bigbench_arithmetic_3_digit_subtraction": 0,
"bigbench_emotions": 0,
"bigbench_arithmetic_5_digit_division": 0,
"bigbench_arithmetic_4_digit_multiplication": 0,
"bigbench_arithmetic_3_digit_addition": 0,
"bigbench_hhh_alignment_harmless": 0,
"bigbench_general_knowledge": 0,
"bigbench_arithmetic_3_digit_division": 0,
"bigbench_arithmetic_3_digit_multiplication": 0,
"bigbench_arithmetic_4_digit_addition": 0,
"bigbench_cause_and_effect_two_sentences": 0,
"bigbench_arithmetic_4_digit_division": 0,
"bigbench_hhh_alignment_helpful": 0,
"bigbench_arithmetic_2_digit_division": 0,
"bigbench_arithmetic_1_digit_multiplication": 0,
"bigbench_arithmetic_5_digit_multiplication": 0,
"bigbench_misconceptions": 0,
"bigbench_arithmetic_1_digit_addition": 0,
"bigbench_arithmetic_2_digit_addition": 0,
"bigbench_sentence_ambiguity": 0,
"bigbench_arithmetic_5_digit_subtraction": 0,
"bigbench_arithmetic_1_digit_division": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/pfs/lustrep2/scratch/project_462000185/muennighoff/bloom",
"num_fewshot": 2,
"batch_size": null,
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}