evaluation / gpt3_finnish_large_bigbench_2shot.json
Muennighoff's picture
Add
40c3147
{
"results": {
"bigbench_analogies": {
"multiple_choice_grade": 0.3384615384615385,
"multiple_choice_grade_stderr": 0.0416617354083896
},
"bigbench_arithmetic_1_digit_addition": {
"multiple_choice_grade": 0.41,
"multiple_choice_grade_stderr": 0.04943110704237102
},
"bigbench_arithmetic_1_digit_division": {
"multiple_choice_grade": 0.6086956521739131,
"multiple_choice_grade_stderr": 0.10405096111532161
},
"bigbench_arithmetic_1_digit_multiplication": {
"multiple_choice_grade": 0.43,
"multiple_choice_grade_stderr": 0.04975698519562428
},
"bigbench_arithmetic_1_digit_subtraction": {
"multiple_choice_grade": 0.44,
"multiple_choice_grade_stderr": 0.04988876515698589
},
"bigbench_arithmetic_2_digit_addition": {
"multiple_choice_grade": 0.08,
"multiple_choice_grade_stderr": 0.027265992434429086
},
"bigbench_arithmetic_2_digit_division": {
"multiple_choice_grade": 0.4,
"multiple_choice_grade_stderr": 0.049236596391733084
},
"bigbench_arithmetic_2_digit_multiplication": {
"multiple_choice_grade": 0.2,
"multiple_choice_grade_stderr": 0.04020151261036843
},
"bigbench_arithmetic_2_digit_subtraction": {
"multiple_choice_grade": 0.19,
"multiple_choice_grade_stderr": 0.03942772444036623
},
"bigbench_arithmetic_3_digit_addition": {
"multiple_choice_grade": 0.32,
"multiple_choice_grade_stderr": 0.046882617226215034
},
"bigbench_arithmetic_3_digit_division": {
"multiple_choice_grade": 0.27,
"multiple_choice_grade_stderr": 0.04461960433384741
},
"bigbench_arithmetic_3_digit_multiplication": {
"multiple_choice_grade": 0.29,
"multiple_choice_grade_stderr": 0.045604802157206845
},
"bigbench_arithmetic_3_digit_subtraction": {
"multiple_choice_grade": 0.31,
"multiple_choice_grade_stderr": 0.04648231987117316
},
"bigbench_arithmetic_4_digit_addition": {
"multiple_choice_grade": 0.24,
"multiple_choice_grade_stderr": 0.04292346959909284
},
"bigbench_arithmetic_4_digit_division": {
"multiple_choice_grade": 0.22,
"multiple_choice_grade_stderr": 0.041633319989322695
},
"bigbench_arithmetic_4_digit_multiplication": {
"multiple_choice_grade": 0.21,
"multiple_choice_grade_stderr": 0.040936018074033256
},
"bigbench_arithmetic_4_digit_subtraction": {
"multiple_choice_grade": 0.35,
"multiple_choice_grade_stderr": 0.047937248544110196
},
"bigbench_arithmetic_5_digit_addition": {
"multiple_choice_grade": 0.34,
"multiple_choice_grade_stderr": 0.04760952285695236
},
"bigbench_arithmetic_5_digit_division": {
"multiple_choice_grade": 0.18,
"multiple_choice_grade_stderr": 0.038612291966536934
},
"bigbench_arithmetic_5_digit_multiplication": {
"multiple_choice_grade": 0.28,
"multiple_choice_grade_stderr": 0.04512608598542128
},
"bigbench_arithmetic_5_digit_subtraction": {
"multiple_choice_grade": 0.44,
"multiple_choice_grade_stderr": 0.049888765156985884
},
"bigbench_cause_and_effect_one_sentence": {
"multiple_choice_grade": 0.5490196078431373,
"multiple_choice_grade_stderr": 0.07037003311735829
},
"bigbench_cause_and_effect_one_sentence_no_prompt": {
"multiple_choice_grade": 0.7843137254901961,
"multiple_choice_grade_stderr": 0.05816626264388755
},
"bigbench_cause_and_effect_two_sentences": {
"multiple_choice_grade": 0.47058823529411764,
"multiple_choice_grade_stderr": 0.07058823529411762
},
"bigbench_emotions": {
"multiple_choice_grade": 0.25,
"multiple_choice_grade_stderr": 0.03434014098717226
},
"bigbench_empirical_judgments": {
"multiple_choice_grade": 0.36363636363636365,
"multiple_choice_grade_stderr": 0.048592953074986234
},
"bigbench_general_knowledge": {
"multiple_choice_grade": 0.45714285714285713,
"multiple_choice_grade_stderr": 0.05997140203803455
},
"bigbench_hhh_alignment_harmless": {
"multiple_choice_grade": 0.43103448275862066,
"multiple_choice_grade_stderr": 0.06559361295281742
},
"bigbench_hhh_alignment_helpful": {
"multiple_choice_grade": 0.2711864406779661,
"multiple_choice_grade_stderr": 0.05837517703884877
},
"bigbench_hhh_alignment_honest": {
"multiple_choice_grade": 0.3728813559322034,
"multiple_choice_grade_stderr": 0.0634959746661109
},
"bigbench_hhh_alignment_other": {
"multiple_choice_grade": 0.5813953488372093,
"multiple_choice_grade_stderr": 0.07612251984976479
},
"bigbench_intent_recognition": {
"multiple_choice_grade": 0.20086705202312138,
"multiple_choice_grade_stderr": 0.01524139953133884
},
"bigbench_misconceptions": {
"multiple_choice_grade": 0.48507462686567165,
"multiple_choice_grade_stderr": 0.04333617784312701
},
"bigbench_paraphrase": {
"multiple_choice_grade": 0.515,
"multiple_choice_grade_stderr": 0.03542810683297719
},
"bigbench_sentence_ambiguity": {
"multiple_choice_grade": 0.43333333333333335,
"multiple_choice_grade_stderr": 0.0645132433593152
},
"bigbench_similarities_abstraction": {
"multiple_choice_grade": 0.6052631578947368,
"multiple_choice_grade_stderr": 0.056441080498755805
}
},
"versions": {
"bigbench_analogies": 0,
"bigbench_arithmetic_1_digit_addition": 0,
"bigbench_arithmetic_1_digit_division": 0,
"bigbench_arithmetic_1_digit_multiplication": 0,
"bigbench_arithmetic_1_digit_subtraction": 0,
"bigbench_arithmetic_2_digit_addition": 0,
"bigbench_arithmetic_2_digit_division": 0,
"bigbench_arithmetic_2_digit_multiplication": 0,
"bigbench_arithmetic_2_digit_subtraction": 0,
"bigbench_arithmetic_3_digit_addition": 0,
"bigbench_arithmetic_3_digit_division": 0,
"bigbench_arithmetic_3_digit_multiplication": 0,
"bigbench_arithmetic_3_digit_subtraction": 0,
"bigbench_arithmetic_4_digit_addition": 0,
"bigbench_arithmetic_4_digit_division": 0,
"bigbench_arithmetic_4_digit_multiplication": 0,
"bigbench_arithmetic_4_digit_subtraction": 0,
"bigbench_arithmetic_5_digit_addition": 0,
"bigbench_arithmetic_5_digit_division": 0,
"bigbench_arithmetic_5_digit_multiplication": 0,
"bigbench_arithmetic_5_digit_subtraction": 0,
"bigbench_cause_and_effect_one_sentence": 0,
"bigbench_cause_and_effect_one_sentence_no_prompt": 0,
"bigbench_cause_and_effect_two_sentences": 0,
"bigbench_emotions": 0,
"bigbench_empirical_judgments": 0,
"bigbench_general_knowledge": 0,
"bigbench_hhh_alignment_harmless": 0,
"bigbench_hhh_alignment_helpful": 0,
"bigbench_hhh_alignment_honest": 0,
"bigbench_hhh_alignment_other": 0,
"bigbench_intent_recognition": 0,
"bigbench_misconceptions": 0,
"bigbench_paraphrase": 0,
"bigbench_sentence_ambiguity": 0,
"bigbench_similarities_abstraction": 0
},
"config": {
"model": "hf-causal",
"model_args": "pretrained=gpt3-finnish-large",
"num_fewshot": 2,
"batch_size": null,
"device": "cuda:0",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}