Muennighoff's picture
Add eval
1e119e1
raw
history blame
7.47 kB
{
"results": {
"bigbench_hhh_alignment_harmless": {
"multiple_choice_grade": 0.41379310344827586,
"multiple_choice_grade_stderr": 0.06523484847771846
},
"bigbench_paraphrase": {
"multiple_choice_grade": 0.425,
"multiple_choice_grade_stderr": 0.035043046034511346
},
"bigbench_analogies": {
"multiple_choice_grade": 0.6846153846153846,
"multiple_choice_grade_stderr": 0.04091181286955817
},
"bigbench_hhh_alignment_helpful": {
"multiple_choice_grade": 0.3389830508474576,
"multiple_choice_grade_stderr": 0.062155747381159164
},
"bigbench_arithmetic_5_digit_addition": {
"multiple_choice_grade": 0.52,
"multiple_choice_grade_stderr": 0.05021167315686781
},
"bigbench_cause_and_effect_one_sentence_no_prompt": {
"multiple_choice_grade": 0.8235294117647058,
"multiple_choice_grade_stderr": 0.05391265523477458
},
"bigbench_arithmetic_4_digit_subtraction": {
"multiple_choice_grade": 0.5,
"multiple_choice_grade_stderr": 0.050251890762960605
},
"bigbench_sentence_ambiguity": {
"multiple_choice_grade": 0.5333333333333333,
"multiple_choice_grade_stderr": 0.06494964005966064
},
"bigbench_arithmetic_1_digit_subtraction": {
"multiple_choice_grade": 0.79,
"multiple_choice_grade_stderr": 0.04093601807403326
},
"bigbench_hhh_alignment_honest": {
"multiple_choice_grade": 0.4067796610169492,
"multiple_choice_grade_stderr": 0.06450206738198198
},
"bigbench_general_knowledge": {
"multiple_choice_grade": 0.5857142857142857,
"multiple_choice_grade_stderr": 0.05930186364895277
},
"bigbench_arithmetic_4_digit_division": {
"multiple_choice_grade": 0.43,
"multiple_choice_grade_stderr": 0.04975698519562428
},
"bigbench_arithmetic_3_digit_subtraction": {
"multiple_choice_grade": 0.59,
"multiple_choice_grade_stderr": 0.04943110704237102
},
"bigbench_arithmetic_2_digit_addition": {
"multiple_choice_grade": 0.53,
"multiple_choice_grade_stderr": 0.050161355804659205
},
"bigbench_arithmetic_1_digit_division": {
"multiple_choice_grade": 0.8695652173913043,
"multiple_choice_grade_stderr": 0.07180198468215394
},
"bigbench_cause_and_effect_two_sentences": {
"multiple_choice_grade": 0.49019607843137253,
"multiple_choice_grade_stderr": 0.07069708383262727
},
"bigbench_arithmetic_1_digit_addition": {
"multiple_choice_grade": 0.71,
"multiple_choice_grade_stderr": 0.045604802157206845
},
"bigbench_misconceptions": {
"multiple_choice_grade": 0.5895522388059702,
"multiple_choice_grade_stderr": 0.04265444264822084
},
"bigbench_hhh_alignment_other": {
"multiple_choice_grade": 0.5813953488372093,
"multiple_choice_grade_stderr": 0.07612251984976479
},
"bigbench_arithmetic_2_digit_subtraction": {
"multiple_choice_grade": 0.6,
"multiple_choice_grade_stderr": 0.049236596391733084
},
"bigbench_arithmetic_4_digit_multiplication": {
"multiple_choice_grade": 0.33,
"multiple_choice_grade_stderr": 0.04725815626252605
},
"bigbench_arithmetic_1_digit_multiplication": {
"multiple_choice_grade": 0.88,
"multiple_choice_grade_stderr": 0.03265986323710906
},
"bigbench_cause_and_effect_one_sentence": {
"multiple_choice_grade": 0.45098039215686275,
"multiple_choice_grade_stderr": 0.0703700331173583
},
"bigbench_arithmetic_5_digit_subtraction": {
"multiple_choice_grade": 0.52,
"multiple_choice_grade_stderr": 0.05021167315686779
},
"bigbench_arithmetic_5_digit_multiplication": {
"multiple_choice_grade": 0.32,
"multiple_choice_grade_stderr": 0.046882617226215034
},
"bigbench_arithmetic_3_digit_addition": {
"multiple_choice_grade": 0.4,
"multiple_choice_grade_stderr": 0.04923659639173309
},
"bigbench_arithmetic_2_digit_multiplication": {
"multiple_choice_grade": 0.3,
"multiple_choice_grade_stderr": 0.046056618647183814
},
"bigbench_arithmetic_2_digit_division": {
"multiple_choice_grade": 0.69,
"multiple_choice_grade_stderr": 0.04648231987117316
},
"bigbench_arithmetic_3_digit_division": {
"multiple_choice_grade": 0.6,
"multiple_choice_grade_stderr": 0.049236596391733084
},
"bigbench_emotions": {
"multiple_choice_grade": 0.525,
"multiple_choice_grade_stderr": 0.03960298254443846
},
"bigbench_arithmetic_4_digit_addition": {
"multiple_choice_grade": 0.32,
"multiple_choice_grade_stderr": 0.046882617226215034
},
"bigbench_arithmetic_5_digit_division": {
"multiple_choice_grade": 0.3,
"multiple_choice_grade_stderr": 0.046056618647183814
},
"bigbench_arithmetic_3_digit_multiplication": {
"multiple_choice_grade": 0.24,
"multiple_choice_grade_stderr": 0.04292346959909282
},
"bigbench_similarities_abstraction": {
"multiple_choice_grade": 0.6052631578947368,
"multiple_choice_grade_stderr": 0.05644108049875581
},
"bigbench_empirical_judgments": {
"multiple_choice_grade": 0.494949494949495,
"multiple_choice_grade_stderr": 0.05050505050505048
},
"bigbench_intent_recognition": {
"multiple_choice_grade": 0.7904624277456648,
"multiple_choice_grade_stderr": 0.015482195463332293
}
},
"versions": {
"bigbench_hhh_alignment_harmless": 0,
"bigbench_paraphrase": 0,
"bigbench_analogies": 0,
"bigbench_hhh_alignment_helpful": 0,
"bigbench_arithmetic_5_digit_addition": 0,
"bigbench_cause_and_effect_one_sentence_no_prompt": 0,
"bigbench_arithmetic_4_digit_subtraction": 0,
"bigbench_sentence_ambiguity": 0,
"bigbench_arithmetic_1_digit_subtraction": 0,
"bigbench_hhh_alignment_honest": 0,
"bigbench_general_knowledge": 0,
"bigbench_arithmetic_4_digit_division": 0,
"bigbench_arithmetic_3_digit_subtraction": 0,
"bigbench_arithmetic_2_digit_addition": 0,
"bigbench_arithmetic_1_digit_division": 0,
"bigbench_cause_and_effect_two_sentences": 0,
"bigbench_arithmetic_1_digit_addition": 0,
"bigbench_misconceptions": 0,
"bigbench_hhh_alignment_other": 0,
"bigbench_arithmetic_2_digit_subtraction": 0,
"bigbench_arithmetic_4_digit_multiplication": 0,
"bigbench_arithmetic_1_digit_multiplication": 0,
"bigbench_cause_and_effect_one_sentence": 0,
"bigbench_arithmetic_5_digit_subtraction": 0,
"bigbench_arithmetic_5_digit_multiplication": 0,
"bigbench_arithmetic_3_digit_addition": 0,
"bigbench_arithmetic_2_digit_multiplication": 0,
"bigbench_arithmetic_2_digit_division": 0,
"bigbench_arithmetic_3_digit_division": 0,
"bigbench_emotions": 0,
"bigbench_arithmetic_4_digit_addition": 0,
"bigbench_arithmetic_5_digit_division": 0,
"bigbench_arithmetic_3_digit_multiplication": 0,
"bigbench_similarities_abstraction": 0,
"bigbench_empirical_judgments": 0,
"bigbench_intent_recognition": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/pfs/lustrep2/scratch/project_462000185/muennighoff/bloom-finnish-176b",
"num_fewshot": 2,
"batch_size": null,
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}