evaluation / gpt3_finnish_13b_bigbench_1shot.json
Muennighoff's picture
Add
40c3147
raw
history blame
7.39 kB
{
"results": {
"bigbench_analogies": {
"multiple_choice_grade": 0.4076923076923077,
"multiple_choice_grade_stderr": 0.043265835022121765
},
"bigbench_arithmetic_1_digit_addition": {
"multiple_choice_grade": 0.37,
"multiple_choice_grade_stderr": 0.048523658709391
},
"bigbench_arithmetic_1_digit_division": {
"multiple_choice_grade": 0.5652173913043478,
"multiple_choice_grade_stderr": 0.10568965974008646
},
"bigbench_arithmetic_1_digit_multiplication": {
"multiple_choice_grade": 0.3,
"multiple_choice_grade_stderr": 0.046056618647183814
},
"bigbench_arithmetic_1_digit_subtraction": {
"multiple_choice_grade": 0.37,
"multiple_choice_grade_stderr": 0.04852365870939099
},
"bigbench_arithmetic_2_digit_addition": {
"multiple_choice_grade": 0.38,
"multiple_choice_grade_stderr": 0.04878317312145633
},
"bigbench_arithmetic_2_digit_division": {
"multiple_choice_grade": 0.41,
"multiple_choice_grade_stderr": 0.04943110704237102
},
"bigbench_arithmetic_2_digit_multiplication": {
"multiple_choice_grade": 0.15,
"multiple_choice_grade_stderr": 0.03588702812826371
},
"bigbench_arithmetic_2_digit_subtraction": {
"multiple_choice_grade": 0.48,
"multiple_choice_grade_stderr": 0.050211673156867795
},
"bigbench_arithmetic_3_digit_addition": {
"multiple_choice_grade": 0.38,
"multiple_choice_grade_stderr": 0.04878317312145632
},
"bigbench_arithmetic_3_digit_division": {
"multiple_choice_grade": 0.29,
"multiple_choice_grade_stderr": 0.045604802157206845
},
"bigbench_arithmetic_3_digit_multiplication": {
"multiple_choice_grade": 0.21,
"multiple_choice_grade_stderr": 0.04093601807403326
},
"bigbench_arithmetic_3_digit_subtraction": {
"multiple_choice_grade": 0.42,
"multiple_choice_grade_stderr": 0.04960449637488584
},
"bigbench_arithmetic_4_digit_addition": {
"multiple_choice_grade": 0.38,
"multiple_choice_grade_stderr": 0.04878317312145633
},
"bigbench_arithmetic_4_digit_division": {
"multiple_choice_grade": 0.29,
"multiple_choice_grade_stderr": 0.045604802157206845
},
"bigbench_arithmetic_4_digit_multiplication": {
"multiple_choice_grade": 0.27,
"multiple_choice_grade_stderr": 0.044619604333847394
},
"bigbench_arithmetic_4_digit_subtraction": {
"multiple_choice_grade": 0.35,
"multiple_choice_grade_stderr": 0.0479372485441102
},
"bigbench_arithmetic_5_digit_addition": {
"multiple_choice_grade": 0.48,
"multiple_choice_grade_stderr": 0.050211673156867795
},
"bigbench_arithmetic_5_digit_division": {
"multiple_choice_grade": 0.14,
"multiple_choice_grade_stderr": 0.0348735088019777
},
"bigbench_arithmetic_5_digit_multiplication": {
"multiple_choice_grade": 0.2,
"multiple_choice_grade_stderr": 0.04020151261036845
},
"bigbench_arithmetic_5_digit_subtraction": {
"multiple_choice_grade": 0.47,
"multiple_choice_grade_stderr": 0.050161355804659205
},
"bigbench_cause_and_effect_one_sentence": {
"multiple_choice_grade": 0.6078431372549019,
"multiple_choice_grade_stderr": 0.0690463406339569
},
"bigbench_cause_and_effect_one_sentence_no_prompt": {
"multiple_choice_grade": 0.9215686274509803,
"multiple_choice_grade_stderr": 0.03802101848953982
},
"bigbench_cause_and_effect_two_sentences": {
"multiple_choice_grade": 0.5294117647058824,
"multiple_choice_grade_stderr": 0.07058823529411762
},
"bigbench_emotions": {
"multiple_choice_grade": 0.38125,
"multiple_choice_grade_stderr": 0.03851802138867094
},
"bigbench_empirical_judgments": {
"multiple_choice_grade": 0.42424242424242425,
"multiple_choice_grade_stderr": 0.049924513396843256
},
"bigbench_general_knowledge": {
"multiple_choice_grade": 0.34285714285714286,
"multiple_choice_grade_stderr": 0.05714285714285713
},
"bigbench_hhh_alignment_harmless": {
"multiple_choice_grade": 0.43103448275862066,
"multiple_choice_grade_stderr": 0.06559361295281742
},
"bigbench_hhh_alignment_helpful": {
"multiple_choice_grade": 0.3728813559322034,
"multiple_choice_grade_stderr": 0.0634959746661109
},
"bigbench_hhh_alignment_honest": {
"multiple_choice_grade": 0.3898305084745763,
"multiple_choice_grade_stderr": 0.06403968100905791
},
"bigbench_hhh_alignment_other": {
"multiple_choice_grade": 0.5348837209302325,
"multiple_choice_grade_stderr": 0.07696367820108108
},
"bigbench_intent_recognition": {
"multiple_choice_grade": 0.4320809248554913,
"multiple_choice_grade_stderr": 0.018844592526348247
},
"bigbench_misconceptions": {
"multiple_choice_grade": 0.5074626865671642,
"multiple_choice_grade_stderr": 0.04335066912520505
},
"bigbench_paraphrase": {
"multiple_choice_grade": 0.565,
"multiple_choice_grade_stderr": 0.03514328173714407
},
"bigbench_sentence_ambiguity": {
"multiple_choice_grade": 0.5,
"multiple_choice_grade_stderr": 0.06509445549041193
},
"bigbench_similarities_abstraction": {
"multiple_choice_grade": 0.6578947368421053,
"multiple_choice_grade_stderr": 0.054780684196477164
}
},
"versions": {
"bigbench_analogies": 0,
"bigbench_arithmetic_1_digit_addition": 0,
"bigbench_arithmetic_1_digit_division": 0,
"bigbench_arithmetic_1_digit_multiplication": 0,
"bigbench_arithmetic_1_digit_subtraction": 0,
"bigbench_arithmetic_2_digit_addition": 0,
"bigbench_arithmetic_2_digit_division": 0,
"bigbench_arithmetic_2_digit_multiplication": 0,
"bigbench_arithmetic_2_digit_subtraction": 0,
"bigbench_arithmetic_3_digit_addition": 0,
"bigbench_arithmetic_3_digit_division": 0,
"bigbench_arithmetic_3_digit_multiplication": 0,
"bigbench_arithmetic_3_digit_subtraction": 0,
"bigbench_arithmetic_4_digit_addition": 0,
"bigbench_arithmetic_4_digit_division": 0,
"bigbench_arithmetic_4_digit_multiplication": 0,
"bigbench_arithmetic_4_digit_subtraction": 0,
"bigbench_arithmetic_5_digit_addition": 0,
"bigbench_arithmetic_5_digit_division": 0,
"bigbench_arithmetic_5_digit_multiplication": 0,
"bigbench_arithmetic_5_digit_subtraction": 0,
"bigbench_cause_and_effect_one_sentence": 0,
"bigbench_cause_and_effect_one_sentence_no_prompt": 0,
"bigbench_cause_and_effect_two_sentences": 0,
"bigbench_emotions": 0,
"bigbench_empirical_judgments": 0,
"bigbench_general_knowledge": 0,
"bigbench_hhh_alignment_harmless": 0,
"bigbench_hhh_alignment_helpful": 0,
"bigbench_hhh_alignment_honest": 0,
"bigbench_hhh_alignment_other": 0,
"bigbench_intent_recognition": 0,
"bigbench_misconceptions": 0,
"bigbench_paraphrase": 0,
"bigbench_sentence_ambiguity": 0,
"bigbench_similarities_abstraction": 0
},
"config": {
"model": "hf-causal",
"model_args": "pretrained=gpt3-finnish-13B",
"num_fewshot": 1,
"batch_size": null,
"device": "cuda:0",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}