evaluation / gpt3_finnish_3b_bigbench.json
Muennighoff's picture
Add
8075f48
{
"results": {
"bigbench_analogies": {
"multiple_choice_grade": 0.4461538461538462,
"multiple_choice_grade_stderr": 0.0437665221706563
},
"bigbench_arithmetic_1_digit_addition": {
"multiple_choice_grade": 0.4,
"multiple_choice_grade_stderr": 0.04923659639173309
},
"bigbench_arithmetic_1_digit_division": {
"multiple_choice_grade": 0.782608695652174,
"multiple_choice_grade_stderr": 0.08793911249520547
},
"bigbench_arithmetic_1_digit_multiplication": {
"multiple_choice_grade": 0.53,
"multiple_choice_grade_stderr": 0.05016135580465919
},
"bigbench_arithmetic_1_digit_subtraction": {
"multiple_choice_grade": 0.27,
"multiple_choice_grade_stderr": 0.044619604333847394
},
"bigbench_arithmetic_2_digit_addition": {
"multiple_choice_grade": 0.03,
"multiple_choice_grade_stderr": 0.017144660799776508
},
"bigbench_arithmetic_2_digit_division": {
"multiple_choice_grade": 0.56,
"multiple_choice_grade_stderr": 0.04988876515698589
},
"bigbench_arithmetic_2_digit_multiplication": {
"multiple_choice_grade": 0.15,
"multiple_choice_grade_stderr": 0.0358870281282637
},
"bigbench_arithmetic_2_digit_subtraction": {
"multiple_choice_grade": 0.37,
"multiple_choice_grade_stderr": 0.04852365870939099
},
"bigbench_arithmetic_3_digit_addition": {
"multiple_choice_grade": 0.41,
"multiple_choice_grade_stderr": 0.049431107042371025
},
"bigbench_arithmetic_3_digit_division": {
"multiple_choice_grade": 0.18,
"multiple_choice_grade_stderr": 0.03861229196653696
},
"bigbench_arithmetic_3_digit_multiplication": {
"multiple_choice_grade": 0.06,
"multiple_choice_grade_stderr": 0.023868325657594197
},
"bigbench_arithmetic_3_digit_subtraction": {
"multiple_choice_grade": 0.35,
"multiple_choice_grade_stderr": 0.0479372485441102
},
"bigbench_arithmetic_4_digit_addition": {
"multiple_choice_grade": 0.21,
"multiple_choice_grade_stderr": 0.04093601807403326
},
"bigbench_arithmetic_4_digit_division": {
"multiple_choice_grade": 0.28,
"multiple_choice_grade_stderr": 0.045126085985421276
},
"bigbench_arithmetic_4_digit_multiplication": {
"multiple_choice_grade": 0.09,
"multiple_choice_grade_stderr": 0.028762349126466125
},
"bigbench_arithmetic_4_digit_subtraction": {
"multiple_choice_grade": 0.37,
"multiple_choice_grade_stderr": 0.048523658709391
},
"bigbench_arithmetic_5_digit_addition": {
"multiple_choice_grade": 0.38,
"multiple_choice_grade_stderr": 0.048783173121456316
},
"bigbench_arithmetic_5_digit_division": {
"multiple_choice_grade": 0.17,
"multiple_choice_grade_stderr": 0.03775251680686371
},
"bigbench_arithmetic_5_digit_multiplication": {
"multiple_choice_grade": 0.08,
"multiple_choice_grade_stderr": 0.027265992434429086
},
"bigbench_arithmetic_5_digit_subtraction": {
"multiple_choice_grade": 0.38,
"multiple_choice_grade_stderr": 0.048783173121456316
},
"bigbench_cause_and_effect_one_sentence": {
"multiple_choice_grade": 0.5686274509803921,
"multiple_choice_grade_stderr": 0.07004145529212454
},
"bigbench_cause_and_effect_one_sentence_no_prompt": {
"multiple_choice_grade": 0.7843137254901961,
"multiple_choice_grade_stderr": 0.05816626264388756
},
"bigbench_cause_and_effect_two_sentences": {
"multiple_choice_grade": 0.47058823529411764,
"multiple_choice_grade_stderr": 0.07058823529411762
},
"bigbench_emotions": {
"multiple_choice_grade": 0.3,
"multiple_choice_grade_stderr": 0.036342189215581536
},
"bigbench_empirical_judgments": {
"multiple_choice_grade": 0.3434343434343434,
"multiple_choice_grade_stderr": 0.04796759058757477
},
"bigbench_general_knowledge": {
"multiple_choice_grade": 0.4,
"multiple_choice_grade_stderr": 0.058976782461958845
},
"bigbench_hhh_alignment_harmless": {
"multiple_choice_grade": 0.3793103448275862,
"multiple_choice_grade_stderr": 0.06426835284800642
},
"bigbench_hhh_alignment_helpful": {
"multiple_choice_grade": 0.3389830508474576,
"multiple_choice_grade_stderr": 0.062155747381159164
},
"bigbench_hhh_alignment_honest": {
"multiple_choice_grade": 0.3898305084745763,
"multiple_choice_grade_stderr": 0.06403968100905791
},
"bigbench_hhh_alignment_other": {
"multiple_choice_grade": 0.5348837209302325,
"multiple_choice_grade_stderr": 0.07696367820108108
},
"bigbench_intent_recognition": {
"multiple_choice_grade": 0.17630057803468208,
"multiple_choice_grade_stderr": 0.014496802592691349
},
"bigbench_misconceptions": {
"multiple_choice_grade": 0.5522388059701493,
"multiple_choice_grade_stderr": 0.04311822418389971
},
"bigbench_paraphrase": {
"multiple_choice_grade": 0.515,
"multiple_choice_grade_stderr": 0.03542810683297719
},
"bigbench_sentence_ambiguity": {
"multiple_choice_grade": 0.48333333333333334,
"multiple_choice_grade_stderr": 0.06505828185300304
},
"bigbench_similarities_abstraction": {
"multiple_choice_grade": 0.618421052631579,
"multiple_choice_grade_stderr": 0.05609235887280058
}
},
"versions": {
"bigbench_analogies": 0,
"bigbench_arithmetic_1_digit_addition": 0,
"bigbench_arithmetic_1_digit_division": 0,
"bigbench_arithmetic_1_digit_multiplication": 0,
"bigbench_arithmetic_1_digit_subtraction": 0,
"bigbench_arithmetic_2_digit_addition": 0,
"bigbench_arithmetic_2_digit_division": 0,
"bigbench_arithmetic_2_digit_multiplication": 0,
"bigbench_arithmetic_2_digit_subtraction": 0,
"bigbench_arithmetic_3_digit_addition": 0,
"bigbench_arithmetic_3_digit_division": 0,
"bigbench_arithmetic_3_digit_multiplication": 0,
"bigbench_arithmetic_3_digit_subtraction": 0,
"bigbench_arithmetic_4_digit_addition": 0,
"bigbench_arithmetic_4_digit_division": 0,
"bigbench_arithmetic_4_digit_multiplication": 0,
"bigbench_arithmetic_4_digit_subtraction": 0,
"bigbench_arithmetic_5_digit_addition": 0,
"bigbench_arithmetic_5_digit_division": 0,
"bigbench_arithmetic_5_digit_multiplication": 0,
"bigbench_arithmetic_5_digit_subtraction": 0,
"bigbench_cause_and_effect_one_sentence": 0,
"bigbench_cause_and_effect_one_sentence_no_prompt": 0,
"bigbench_cause_and_effect_two_sentences": 0,
"bigbench_emotions": 0,
"bigbench_empirical_judgments": 0,
"bigbench_general_knowledge": 0,
"bigbench_hhh_alignment_harmless": 0,
"bigbench_hhh_alignment_helpful": 0,
"bigbench_hhh_alignment_honest": 0,
"bigbench_hhh_alignment_other": 0,
"bigbench_intent_recognition": 0,
"bigbench_misconceptions": 0,
"bigbench_paraphrase": 0,
"bigbench_sentence_ambiguity": 0,
"bigbench_similarities_abstraction": 0
},
"config": {
"model": "hf-causal",
"model_args": "pretrained=gpt3-finnish-3B",
"num_fewshot": 0,
"batch_size": null,
"device": "cuda:0",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}