Muennighoff's picture
Add eval
1e119e1
{
"results": {
"bigbench_arithmetic_1_digit_addition": {
"multiple_choice_grade": 0.64,
"multiple_choice_grade_stderr": 0.048241815132442176
},
"bigbench_arithmetic_4_digit_division": {
"multiple_choice_grade": 0.43,
"multiple_choice_grade_stderr": 0.04975698519562428
},
"bigbench_cause_and_effect_two_sentences": {
"multiple_choice_grade": 0.5490196078431373,
"multiple_choice_grade_stderr": 0.07037003311735829
},
"bigbench_hhh_alignment_harmless": {
"multiple_choice_grade": 0.41379310344827586,
"multiple_choice_grade_stderr": 0.06523484847771846
},
"bigbench_arithmetic_5_digit_division": {
"multiple_choice_grade": 0.34,
"multiple_choice_grade_stderr": 0.047609522856952344
},
"bigbench_arithmetic_3_digit_subtraction": {
"multiple_choice_grade": 0.63,
"multiple_choice_grade_stderr": 0.04852365870939098
},
"bigbench_analogies": {
"multiple_choice_grade": 0.7230769230769231,
"multiple_choice_grade_stderr": 0.03939825345266469
},
"bigbench_hhh_alignment_honest": {
"multiple_choice_grade": 0.4067796610169492,
"multiple_choice_grade_stderr": 0.06450206738198198
},
"bigbench_sentence_ambiguity": {
"multiple_choice_grade": 0.45,
"multiple_choice_grade_stderr": 0.06476816543825593
},
"bigbench_hhh_alignment_other": {
"multiple_choice_grade": 0.5813953488372093,
"multiple_choice_grade_stderr": 0.07612251984976479
},
"bigbench_cause_and_effect_one_sentence": {
"multiple_choice_grade": 0.39215686274509803,
"multiple_choice_grade_stderr": 0.06904634063395691
},
"bigbench_emotions": {
"multiple_choice_grade": 0.5875,
"multiple_choice_grade_stderr": 0.03904067786683381
},
"bigbench_arithmetic_1_digit_subtraction": {
"multiple_choice_grade": 0.81,
"multiple_choice_grade_stderr": 0.03942772444036623
},
"bigbench_empirical_judgments": {
"multiple_choice_grade": 0.5858585858585859,
"multiple_choice_grade_stderr": 0.04975740158605099
},
"bigbench_similarities_abstraction": {
"multiple_choice_grade": 0.6578947368421053,
"multiple_choice_grade_stderr": 0.05478068419647717
},
"bigbench_arithmetic_3_digit_addition": {
"multiple_choice_grade": 0.45,
"multiple_choice_grade_stderr": 0.05
},
"bigbench_cause_and_effect_one_sentence_no_prompt": {
"multiple_choice_grade": 0.8431372549019608,
"multiple_choice_grade_stderr": 0.051430890386682346
},
"bigbench_arithmetic_2_digit_addition": {
"multiple_choice_grade": 0.48,
"multiple_choice_grade_stderr": 0.050211673156867795
},
"bigbench_hhh_alignment_helpful": {
"multiple_choice_grade": 0.4067796610169492,
"multiple_choice_grade_stderr": 0.06450206738198198
},
"bigbench_arithmetic_4_digit_subtraction": {
"multiple_choice_grade": 0.57,
"multiple_choice_grade_stderr": 0.04975698519562428
},
"bigbench_arithmetic_5_digit_multiplication": {
"multiple_choice_grade": 0.3,
"multiple_choice_grade_stderr": 0.046056618647183814
},
"bigbench_arithmetic_1_digit_multiplication": {
"multiple_choice_grade": 0.86,
"multiple_choice_grade_stderr": 0.03487350880197771
},
"bigbench_misconceptions": {
"multiple_choice_grade": 0.5074626865671642,
"multiple_choice_grade_stderr": 0.04335066912520505
},
"bigbench_intent_recognition": {
"multiple_choice_grade": 0.8020231213872833,
"multiple_choice_grade_stderr": 0.01515868949062073
},
"bigbench_arithmetic_2_digit_division": {
"multiple_choice_grade": 0.71,
"multiple_choice_grade_stderr": 0.04560480215720684
},
"bigbench_paraphrase": {
"multiple_choice_grade": 0.415,
"multiple_choice_grade_stderr": 0.034928138718973496
},
"bigbench_arithmetic_3_digit_multiplication": {
"multiple_choice_grade": 0.28,
"multiple_choice_grade_stderr": 0.04512608598542127
},
"bigbench_arithmetic_4_digit_addition": {
"multiple_choice_grade": 0.31,
"multiple_choice_grade_stderr": 0.04648231987117316
},
"bigbench_arithmetic_2_digit_subtraction": {
"multiple_choice_grade": 0.65,
"multiple_choice_grade_stderr": 0.047937248544110196
},
"bigbench_arithmetic_1_digit_division": {
"multiple_choice_grade": 0.9130434782608695,
"multiple_choice_grade_stderr": 0.060073850409370216
},
"bigbench_general_knowledge": {
"multiple_choice_grade": 0.6571428571428571,
"multiple_choice_grade_stderr": 0.05714285714285713
},
"bigbench_arithmetic_3_digit_division": {
"multiple_choice_grade": 0.57,
"multiple_choice_grade_stderr": 0.049756985195624284
},
"bigbench_arithmetic_4_digit_multiplication": {
"multiple_choice_grade": 0.26,
"multiple_choice_grade_stderr": 0.0440844002276808
},
"bigbench_arithmetic_5_digit_subtraction": {
"multiple_choice_grade": 0.62,
"multiple_choice_grade_stderr": 0.048783173121456316
},
"bigbench_arithmetic_5_digit_addition": {
"multiple_choice_grade": 0.54,
"multiple_choice_grade_stderr": 0.05009082659620333
},
"bigbench_arithmetic_2_digit_multiplication": {
"multiple_choice_grade": 0.26,
"multiple_choice_grade_stderr": 0.04408440022768078
}
},
"versions": {
"bigbench_arithmetic_1_digit_addition": 0,
"bigbench_arithmetic_4_digit_division": 0,
"bigbench_cause_and_effect_two_sentences": 0,
"bigbench_hhh_alignment_harmless": 0,
"bigbench_arithmetic_5_digit_division": 0,
"bigbench_arithmetic_3_digit_subtraction": 0,
"bigbench_analogies": 0,
"bigbench_hhh_alignment_honest": 0,
"bigbench_sentence_ambiguity": 0,
"bigbench_hhh_alignment_other": 0,
"bigbench_cause_and_effect_one_sentence": 0,
"bigbench_emotions": 0,
"bigbench_arithmetic_1_digit_subtraction": 0,
"bigbench_empirical_judgments": 0,
"bigbench_similarities_abstraction": 0,
"bigbench_arithmetic_3_digit_addition": 0,
"bigbench_cause_and_effect_one_sentence_no_prompt": 0,
"bigbench_arithmetic_2_digit_addition": 0,
"bigbench_hhh_alignment_helpful": 0,
"bigbench_arithmetic_4_digit_subtraction": 0,
"bigbench_arithmetic_5_digit_multiplication": 0,
"bigbench_arithmetic_1_digit_multiplication": 0,
"bigbench_misconceptions": 0,
"bigbench_intent_recognition": 0,
"bigbench_arithmetic_2_digit_division": 0,
"bigbench_paraphrase": 0,
"bigbench_arithmetic_3_digit_multiplication": 0,
"bigbench_arithmetic_4_digit_addition": 0,
"bigbench_arithmetic_2_digit_subtraction": 0,
"bigbench_arithmetic_1_digit_division": 0,
"bigbench_general_knowledge": 0,
"bigbench_arithmetic_3_digit_division": 0,
"bigbench_arithmetic_4_digit_multiplication": 0,
"bigbench_arithmetic_5_digit_subtraction": 0,
"bigbench_arithmetic_5_digit_addition": 0,
"bigbench_arithmetic_2_digit_multiplication": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/pfs/lustrep2/scratch/project_462000185/muennighoff/bloom-finnish-176b",
"num_fewshot": 3,
"batch_size": null,
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}