evaluation / gpt3_finnish_3b_bigbench_2shot.json
Muennighoff's picture
Add
40c3147
raw
history blame
7.41 kB
{
"results": {
"bigbench_analogies": {
"multiple_choice_grade": 0.3923076923076923,
"multiple_choice_grade_stderr": 0.04298930521985775
},
"bigbench_arithmetic_1_digit_addition": {
"multiple_choice_grade": 0.57,
"multiple_choice_grade_stderr": 0.04975698519562426
},
"bigbench_arithmetic_1_digit_division": {
"multiple_choice_grade": 0.6956521739130435,
"multiple_choice_grade_stderr": 0.09810018692482896
},
"bigbench_arithmetic_1_digit_multiplication": {
"multiple_choice_grade": 0.52,
"multiple_choice_grade_stderr": 0.050211673156867795
},
"bigbench_arithmetic_1_digit_subtraction": {
"multiple_choice_grade": 0.52,
"multiple_choice_grade_stderr": 0.050211673156867795
},
"bigbench_arithmetic_2_digit_addition": {
"multiple_choice_grade": 0.57,
"multiple_choice_grade_stderr": 0.04975698519562428
},
"bigbench_arithmetic_2_digit_division": {
"multiple_choice_grade": 0.54,
"multiple_choice_grade_stderr": 0.05009082659620332
},
"bigbench_arithmetic_2_digit_multiplication": {
"multiple_choice_grade": 0.24,
"multiple_choice_grade_stderr": 0.04292346959909284
},
"bigbench_arithmetic_2_digit_subtraction": {
"multiple_choice_grade": 0.67,
"multiple_choice_grade_stderr": 0.047258156262526066
},
"bigbench_arithmetic_3_digit_addition": {
"multiple_choice_grade": 0.46,
"multiple_choice_grade_stderr": 0.05009082659620333
},
"bigbench_arithmetic_3_digit_division": {
"multiple_choice_grade": 0.43,
"multiple_choice_grade_stderr": 0.04975698519562428
},
"bigbench_arithmetic_3_digit_multiplication": {
"multiple_choice_grade": 0.26,
"multiple_choice_grade_stderr": 0.0440844002276808
},
"bigbench_arithmetic_3_digit_subtraction": {
"multiple_choice_grade": 0.66,
"multiple_choice_grade_stderr": 0.04760952285695237
},
"bigbench_arithmetic_4_digit_addition": {
"multiple_choice_grade": 0.41,
"multiple_choice_grade_stderr": 0.049431107042371025
},
"bigbench_arithmetic_4_digit_division": {
"multiple_choice_grade": 0.31,
"multiple_choice_grade_stderr": 0.04648231987117316
},
"bigbench_arithmetic_4_digit_multiplication": {
"multiple_choice_grade": 0.28,
"multiple_choice_grade_stderr": 0.04512608598542127
},
"bigbench_arithmetic_4_digit_subtraction": {
"multiple_choice_grade": 0.53,
"multiple_choice_grade_stderr": 0.050161355804659205
},
"bigbench_arithmetic_5_digit_addition": {
"multiple_choice_grade": 0.41,
"multiple_choice_grade_stderr": 0.049431107042371025
},
"bigbench_arithmetic_5_digit_division": {
"multiple_choice_grade": 0.29,
"multiple_choice_grade_stderr": 0.04560480215720685
},
"bigbench_arithmetic_5_digit_multiplication": {
"multiple_choice_grade": 0.22,
"multiple_choice_grade_stderr": 0.04163331998932268
},
"bigbench_arithmetic_5_digit_subtraction": {
"multiple_choice_grade": 0.65,
"multiple_choice_grade_stderr": 0.047937248544110196
},
"bigbench_cause_and_effect_one_sentence": {
"multiple_choice_grade": 0.5882352941176471,
"multiple_choice_grade_stderr": 0.06960093862470136
},
"bigbench_cause_and_effect_one_sentence_no_prompt": {
"multiple_choice_grade": 0.9215686274509803,
"multiple_choice_grade_stderr": 0.03802101848953983
},
"bigbench_cause_and_effect_two_sentences": {
"multiple_choice_grade": 0.49019607843137253,
"multiple_choice_grade_stderr": 0.07069708383262727
},
"bigbench_emotions": {
"multiple_choice_grade": 0.36875,
"multiple_choice_grade_stderr": 0.03826204233503226
},
"bigbench_empirical_judgments": {
"multiple_choice_grade": 0.29292929292929293,
"multiple_choice_grade_stderr": 0.04597267625418179
},
"bigbench_general_knowledge": {
"multiple_choice_grade": 0.37142857142857144,
"multiple_choice_grade_stderr": 0.05816884316617732
},
"bigbench_hhh_alignment_harmless": {
"multiple_choice_grade": 0.4482758620689655,
"multiple_choice_grade_stderr": 0.06587130109529918
},
"bigbench_hhh_alignment_helpful": {
"multiple_choice_grade": 0.3220338983050847,
"multiple_choice_grade_stderr": 0.06135370413564329
},
"bigbench_hhh_alignment_honest": {
"multiple_choice_grade": 0.3898305084745763,
"multiple_choice_grade_stderr": 0.06403968100905791
},
"bigbench_hhh_alignment_other": {
"multiple_choice_grade": 0.5348837209302325,
"multiple_choice_grade_stderr": 0.07696367820108108
},
"bigbench_intent_recognition": {
"multiple_choice_grade": 0.40028901734104044,
"multiple_choice_grade_stderr": 0.01863883718792749
},
"bigbench_misconceptions": {
"multiple_choice_grade": 0.4925373134328358,
"multiple_choice_grade_stderr": 0.04335066912520505
},
"bigbench_paraphrase": {
"multiple_choice_grade": 0.53,
"multiple_choice_grade_stderr": 0.03538020341900046
},
"bigbench_sentence_ambiguity": {
"multiple_choice_grade": 0.48333333333333334,
"multiple_choice_grade_stderr": 0.06505828185300304
},
"bigbench_similarities_abstraction": {
"multiple_choice_grade": 0.6578947368421053,
"multiple_choice_grade_stderr": 0.054780684196477164
}
},
"versions": {
"bigbench_analogies": 0,
"bigbench_arithmetic_1_digit_addition": 0,
"bigbench_arithmetic_1_digit_division": 0,
"bigbench_arithmetic_1_digit_multiplication": 0,
"bigbench_arithmetic_1_digit_subtraction": 0,
"bigbench_arithmetic_2_digit_addition": 0,
"bigbench_arithmetic_2_digit_division": 0,
"bigbench_arithmetic_2_digit_multiplication": 0,
"bigbench_arithmetic_2_digit_subtraction": 0,
"bigbench_arithmetic_3_digit_addition": 0,
"bigbench_arithmetic_3_digit_division": 0,
"bigbench_arithmetic_3_digit_multiplication": 0,
"bigbench_arithmetic_3_digit_subtraction": 0,
"bigbench_arithmetic_4_digit_addition": 0,
"bigbench_arithmetic_4_digit_division": 0,
"bigbench_arithmetic_4_digit_multiplication": 0,
"bigbench_arithmetic_4_digit_subtraction": 0,
"bigbench_arithmetic_5_digit_addition": 0,
"bigbench_arithmetic_5_digit_division": 0,
"bigbench_arithmetic_5_digit_multiplication": 0,
"bigbench_arithmetic_5_digit_subtraction": 0,
"bigbench_cause_and_effect_one_sentence": 0,
"bigbench_cause_and_effect_one_sentence_no_prompt": 0,
"bigbench_cause_and_effect_two_sentences": 0,
"bigbench_emotions": 0,
"bigbench_empirical_judgments": 0,
"bigbench_general_knowledge": 0,
"bigbench_hhh_alignment_harmless": 0,
"bigbench_hhh_alignment_helpful": 0,
"bigbench_hhh_alignment_honest": 0,
"bigbench_hhh_alignment_other": 0,
"bigbench_intent_recognition": 0,
"bigbench_misconceptions": 0,
"bigbench_paraphrase": 0,
"bigbench_sentence_ambiguity": 0,
"bigbench_similarities_abstraction": 0
},
"config": {
"model": "hf-causal",
"model_args": "pretrained=gpt3-finnish-3B",
"num_fewshot": 2,
"batch_size": null,
"device": "cuda:0",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}