evaluation / gpt3_finnish_large_bigbench_3shot.json
Muennighoff's picture
Add
40c3147
raw
history blame
7.39 kB
{
"results": {
"bigbench_analogies": {
"multiple_choice_grade": 0.38461538461538464,
"multiple_choice_grade_stderr": 0.04283431126074645
},
"bigbench_arithmetic_1_digit_addition": {
"multiple_choice_grade": 0.47,
"multiple_choice_grade_stderr": 0.050161355804659205
},
"bigbench_arithmetic_1_digit_division": {
"multiple_choice_grade": 0.4782608695652174,
"multiple_choice_grade_stderr": 0.10649955403405124
},
"bigbench_arithmetic_1_digit_multiplication": {
"multiple_choice_grade": 0.47,
"multiple_choice_grade_stderr": 0.050161355804659205
},
"bigbench_arithmetic_1_digit_subtraction": {
"multiple_choice_grade": 0.47,
"multiple_choice_grade_stderr": 0.05016135580465919
},
"bigbench_arithmetic_2_digit_addition": {
"multiple_choice_grade": 0.1,
"multiple_choice_grade_stderr": 0.030151134457776348
},
"bigbench_arithmetic_2_digit_division": {
"multiple_choice_grade": 0.41,
"multiple_choice_grade_stderr": 0.04943110704237102
},
"bigbench_arithmetic_2_digit_multiplication": {
"multiple_choice_grade": 0.18,
"multiple_choice_grade_stderr": 0.03861229196653697
},
"bigbench_arithmetic_2_digit_subtraction": {
"multiple_choice_grade": 0.25,
"multiple_choice_grade_stderr": 0.04351941398892446
},
"bigbench_arithmetic_3_digit_addition": {
"multiple_choice_grade": 0.28,
"multiple_choice_grade_stderr": 0.04512608598542127
},
"bigbench_arithmetic_3_digit_division": {
"multiple_choice_grade": 0.29,
"multiple_choice_grade_stderr": 0.045604802157206845
},
"bigbench_arithmetic_3_digit_multiplication": {
"multiple_choice_grade": 0.3,
"multiple_choice_grade_stderr": 0.04605661864718381
},
"bigbench_arithmetic_3_digit_subtraction": {
"multiple_choice_grade": 0.26,
"multiple_choice_grade_stderr": 0.0440844002276808
},
"bigbench_arithmetic_4_digit_addition": {
"multiple_choice_grade": 0.22,
"multiple_choice_grade_stderr": 0.0416333199893227
},
"bigbench_arithmetic_4_digit_division": {
"multiple_choice_grade": 0.27,
"multiple_choice_grade_stderr": 0.044619604333847394
},
"bigbench_arithmetic_4_digit_multiplication": {
"multiple_choice_grade": 0.24,
"multiple_choice_grade_stderr": 0.042923469599092816
},
"bigbench_arithmetic_4_digit_subtraction": {
"multiple_choice_grade": 0.34,
"multiple_choice_grade_stderr": 0.04760952285695235
},
"bigbench_arithmetic_5_digit_addition": {
"multiple_choice_grade": 0.32,
"multiple_choice_grade_stderr": 0.046882617226215034
},
"bigbench_arithmetic_5_digit_division": {
"multiple_choice_grade": 0.25,
"multiple_choice_grade_stderr": 0.04351941398892446
},
"bigbench_arithmetic_5_digit_multiplication": {
"multiple_choice_grade": 0.24,
"multiple_choice_grade_stderr": 0.042923469599092816
},
"bigbench_arithmetic_5_digit_subtraction": {
"multiple_choice_grade": 0.48,
"multiple_choice_grade_stderr": 0.050211673156867795
},
"bigbench_cause_and_effect_one_sentence": {
"multiple_choice_grade": 0.5490196078431373,
"multiple_choice_grade_stderr": 0.07037003311735829
},
"bigbench_cause_and_effect_one_sentence_no_prompt": {
"multiple_choice_grade": 0.8431372549019608,
"multiple_choice_grade_stderr": 0.05143089038668235
},
"bigbench_cause_and_effect_two_sentences": {
"multiple_choice_grade": 0.5490196078431373,
"multiple_choice_grade_stderr": 0.07037003311735827
},
"bigbench_emotions": {
"multiple_choice_grade": 0.2625,
"multiple_choice_grade_stderr": 0.03489370652018762
},
"bigbench_empirical_judgments": {
"multiple_choice_grade": 0.36363636363636365,
"multiple_choice_grade_stderr": 0.048592953074986234
},
"bigbench_general_knowledge": {
"multiple_choice_grade": 0.4,
"multiple_choice_grade_stderr": 0.058976782461958845
},
"bigbench_hhh_alignment_harmless": {
"multiple_choice_grade": 0.43103448275862066,
"multiple_choice_grade_stderr": 0.06559361295281742
},
"bigbench_hhh_alignment_helpful": {
"multiple_choice_grade": 0.288135593220339,
"multiple_choice_grade_stderr": 0.059467967781548406
},
"bigbench_hhh_alignment_honest": {
"multiple_choice_grade": 0.3728813559322034,
"multiple_choice_grade_stderr": 0.0634959746661109
},
"bigbench_hhh_alignment_other": {
"multiple_choice_grade": 0.5813953488372093,
"multiple_choice_grade_stderr": 0.07612251984976479
},
"bigbench_intent_recognition": {
"multiple_choice_grade": 0.27601156069364163,
"multiple_choice_grade_stderr": 0.017005540046345993
},
"bigbench_misconceptions": {
"multiple_choice_grade": 0.4253731343283582,
"multiple_choice_grade_stderr": 0.0428698704965525
},
"bigbench_paraphrase": {
"multiple_choice_grade": 0.515,
"multiple_choice_grade_stderr": 0.03542810683297719
},
"bigbench_sentence_ambiguity": {
"multiple_choice_grade": 0.5333333333333333,
"multiple_choice_grade_stderr": 0.06494964005966064
},
"bigbench_similarities_abstraction": {
"multiple_choice_grade": 0.631578947368421,
"multiple_choice_grade_stderr": 0.05570002760135977
}
},
"versions": {
"bigbench_analogies": 0,
"bigbench_arithmetic_1_digit_addition": 0,
"bigbench_arithmetic_1_digit_division": 0,
"bigbench_arithmetic_1_digit_multiplication": 0,
"bigbench_arithmetic_1_digit_subtraction": 0,
"bigbench_arithmetic_2_digit_addition": 0,
"bigbench_arithmetic_2_digit_division": 0,
"bigbench_arithmetic_2_digit_multiplication": 0,
"bigbench_arithmetic_2_digit_subtraction": 0,
"bigbench_arithmetic_3_digit_addition": 0,
"bigbench_arithmetic_3_digit_division": 0,
"bigbench_arithmetic_3_digit_multiplication": 0,
"bigbench_arithmetic_3_digit_subtraction": 0,
"bigbench_arithmetic_4_digit_addition": 0,
"bigbench_arithmetic_4_digit_division": 0,
"bigbench_arithmetic_4_digit_multiplication": 0,
"bigbench_arithmetic_4_digit_subtraction": 0,
"bigbench_arithmetic_5_digit_addition": 0,
"bigbench_arithmetic_5_digit_division": 0,
"bigbench_arithmetic_5_digit_multiplication": 0,
"bigbench_arithmetic_5_digit_subtraction": 0,
"bigbench_cause_and_effect_one_sentence": 0,
"bigbench_cause_and_effect_one_sentence_no_prompt": 0,
"bigbench_cause_and_effect_two_sentences": 0,
"bigbench_emotions": 0,
"bigbench_empirical_judgments": 0,
"bigbench_general_knowledge": 0,
"bigbench_hhh_alignment_harmless": 0,
"bigbench_hhh_alignment_helpful": 0,
"bigbench_hhh_alignment_honest": 0,
"bigbench_hhh_alignment_other": 0,
"bigbench_intent_recognition": 0,
"bigbench_misconceptions": 0,
"bigbench_paraphrase": 0,
"bigbench_sentence_ambiguity": 0,
"bigbench_similarities_abstraction": 0
},
"config": {
"model": "hf-causal",
"model_args": "pretrained=gpt3-finnish-large",
"num_fewshot": 3,
"batch_size": null,
"device": "cuda:0",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}