evaluation / gpt3_finnish_3b_bigbench_3shot.json
Muennighoff's picture
Add
40c3147
{
"results": {
"bigbench_analogies": {
"multiple_choice_grade": 0.3923076923076923,
"multiple_choice_grade_stderr": 0.042989305219857755
},
"bigbench_arithmetic_1_digit_addition": {
"multiple_choice_grade": 0.58,
"multiple_choice_grade_stderr": 0.04960449637488584
},
"bigbench_arithmetic_1_digit_division": {
"multiple_choice_grade": 0.5217391304347826,
"multiple_choice_grade_stderr": 0.10649955403405122
},
"bigbench_arithmetic_1_digit_multiplication": {
"multiple_choice_grade": 0.42,
"multiple_choice_grade_stderr": 0.049604496374885836
},
"bigbench_arithmetic_1_digit_subtraction": {
"multiple_choice_grade": 0.58,
"multiple_choice_grade_stderr": 0.049604496374885836
},
"bigbench_arithmetic_2_digit_addition": {
"multiple_choice_grade": 0.59,
"multiple_choice_grade_stderr": 0.04943110704237102
},
"bigbench_arithmetic_2_digit_division": {
"multiple_choice_grade": 0.47,
"multiple_choice_grade_stderr": 0.05016135580465919
},
"bigbench_arithmetic_2_digit_multiplication": {
"multiple_choice_grade": 0.21,
"multiple_choice_grade_stderr": 0.040936018074033256
},
"bigbench_arithmetic_2_digit_subtraction": {
"multiple_choice_grade": 0.65,
"multiple_choice_grade_stderr": 0.04793724854411019
},
"bigbench_arithmetic_3_digit_addition": {
"multiple_choice_grade": 0.49,
"multiple_choice_grade_stderr": 0.05024183937956911
},
"bigbench_arithmetic_3_digit_division": {
"multiple_choice_grade": 0.41,
"multiple_choice_grade_stderr": 0.04943110704237102
},
"bigbench_arithmetic_3_digit_multiplication": {
"multiple_choice_grade": 0.27,
"multiple_choice_grade_stderr": 0.0446196043338474
},
"bigbench_arithmetic_3_digit_subtraction": {
"multiple_choice_grade": 0.64,
"multiple_choice_grade_stderr": 0.048241815132442176
},
"bigbench_arithmetic_4_digit_addition": {
"multiple_choice_grade": 0.44,
"multiple_choice_grade_stderr": 0.04988876515698589
},
"bigbench_arithmetic_4_digit_division": {
"multiple_choice_grade": 0.34,
"multiple_choice_grade_stderr": 0.04760952285695235
},
"bigbench_arithmetic_4_digit_multiplication": {
"multiple_choice_grade": 0.3,
"multiple_choice_grade_stderr": 0.046056618647183814
},
"bigbench_arithmetic_4_digit_subtraction": {
"multiple_choice_grade": 0.56,
"multiple_choice_grade_stderr": 0.049888765156985884
},
"bigbench_arithmetic_5_digit_addition": {
"multiple_choice_grade": 0.41,
"multiple_choice_grade_stderr": 0.049431107042371025
},
"bigbench_arithmetic_5_digit_division": {
"multiple_choice_grade": 0.29,
"multiple_choice_grade_stderr": 0.045604802157206845
},
"bigbench_arithmetic_5_digit_multiplication": {
"multiple_choice_grade": 0.23,
"multiple_choice_grade_stderr": 0.04229525846816506
},
"bigbench_arithmetic_5_digit_subtraction": {
"multiple_choice_grade": 0.65,
"multiple_choice_grade_stderr": 0.04793724854411019
},
"bigbench_cause_and_effect_one_sentence": {
"multiple_choice_grade": 0.5490196078431373,
"multiple_choice_grade_stderr": 0.07037003311735827
},
"bigbench_cause_and_effect_one_sentence_no_prompt": {
"multiple_choice_grade": 0.8235294117647058,
"multiple_choice_grade_stderr": 0.05391265523477458
},
"bigbench_cause_and_effect_two_sentences": {
"multiple_choice_grade": 0.45098039215686275,
"multiple_choice_grade_stderr": 0.0703700331173583
},
"bigbench_emotions": {
"multiple_choice_grade": 0.41875,
"multiple_choice_grade_stderr": 0.039125538756915115
},
"bigbench_empirical_judgments": {
"multiple_choice_grade": 0.40404040404040403,
"multiple_choice_grade_stderr": 0.04956872738042618
},
"bigbench_general_knowledge": {
"multiple_choice_grade": 0.35714285714285715,
"multiple_choice_grade_stderr": 0.05768377522051772
},
"bigbench_hhh_alignment_harmless": {
"multiple_choice_grade": 0.4482758620689655,
"multiple_choice_grade_stderr": 0.06587130109529918
},
"bigbench_hhh_alignment_helpful": {
"multiple_choice_grade": 0.3559322033898305,
"multiple_choice_grade_stderr": 0.06286883855871885
},
"bigbench_hhh_alignment_honest": {
"multiple_choice_grade": 0.3898305084745763,
"multiple_choice_grade_stderr": 0.06403968100905791
},
"bigbench_hhh_alignment_other": {
"multiple_choice_grade": 0.5581395348837209,
"multiple_choice_grade_stderr": 0.07662832288817804
},
"bigbench_intent_recognition": {
"multiple_choice_grade": 0.41184971098265893,
"multiple_choice_grade_stderr": 0.018722960301969214
},
"bigbench_misconceptions": {
"multiple_choice_grade": 0.48507462686567165,
"multiple_choice_grade_stderr": 0.04333617784312701
},
"bigbench_paraphrase": {
"multiple_choice_grade": 0.565,
"multiple_choice_grade_stderr": 0.03514328173714408
},
"bigbench_sentence_ambiguity": {
"multiple_choice_grade": 0.4666666666666667,
"multiple_choice_grade_stderr": 0.06494964005966064
},
"bigbench_similarities_abstraction": {
"multiple_choice_grade": 0.6578947368421053,
"multiple_choice_grade_stderr": 0.054780684196477164
}
},
"versions": {
"bigbench_analogies": 0,
"bigbench_arithmetic_1_digit_addition": 0,
"bigbench_arithmetic_1_digit_division": 0,
"bigbench_arithmetic_1_digit_multiplication": 0,
"bigbench_arithmetic_1_digit_subtraction": 0,
"bigbench_arithmetic_2_digit_addition": 0,
"bigbench_arithmetic_2_digit_division": 0,
"bigbench_arithmetic_2_digit_multiplication": 0,
"bigbench_arithmetic_2_digit_subtraction": 0,
"bigbench_arithmetic_3_digit_addition": 0,
"bigbench_arithmetic_3_digit_division": 0,
"bigbench_arithmetic_3_digit_multiplication": 0,
"bigbench_arithmetic_3_digit_subtraction": 0,
"bigbench_arithmetic_4_digit_addition": 0,
"bigbench_arithmetic_4_digit_division": 0,
"bigbench_arithmetic_4_digit_multiplication": 0,
"bigbench_arithmetic_4_digit_subtraction": 0,
"bigbench_arithmetic_5_digit_addition": 0,
"bigbench_arithmetic_5_digit_division": 0,
"bigbench_arithmetic_5_digit_multiplication": 0,
"bigbench_arithmetic_5_digit_subtraction": 0,
"bigbench_cause_and_effect_one_sentence": 0,
"bigbench_cause_and_effect_one_sentence_no_prompt": 0,
"bigbench_cause_and_effect_two_sentences": 0,
"bigbench_emotions": 0,
"bigbench_empirical_judgments": 0,
"bigbench_general_knowledge": 0,
"bigbench_hhh_alignment_harmless": 0,
"bigbench_hhh_alignment_helpful": 0,
"bigbench_hhh_alignment_honest": 0,
"bigbench_hhh_alignment_other": 0,
"bigbench_intent_recognition": 0,
"bigbench_misconceptions": 0,
"bigbench_paraphrase": 0,
"bigbench_sentence_ambiguity": 0,
"bigbench_similarities_abstraction": 0
},
"config": {
"model": "hf-causal",
"model_args": "pretrained=gpt3-finnish-3B",
"num_fewshot": 3,
"batch_size": null,
"device": "cuda:0",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}