evaluation / gpt3_finnish_3b_bigbench_3shot.csv
Muennighoff's picture
Add
40c3147
task,metric,value,err,version
bigbench_analogies,multiple_choice_grade,0.3923076923076923,0.042989305219857755,0
bigbench_arithmetic_1_digit_addition,multiple_choice_grade,0.58,0.04960449637488584,0
bigbench_arithmetic_1_digit_division,multiple_choice_grade,0.5217391304347826,0.10649955403405122,0
bigbench_arithmetic_1_digit_multiplication,multiple_choice_grade,0.42,0.049604496374885836,0
bigbench_arithmetic_1_digit_subtraction,multiple_choice_grade,0.58,0.049604496374885836,0
bigbench_arithmetic_2_digit_addition,multiple_choice_grade,0.59,0.04943110704237102,0
bigbench_arithmetic_2_digit_division,multiple_choice_grade,0.47,0.05016135580465919,0
bigbench_arithmetic_2_digit_multiplication,multiple_choice_grade,0.21,0.040936018074033256,0
bigbench_arithmetic_2_digit_subtraction,multiple_choice_grade,0.65,0.04793724854411019,0
bigbench_arithmetic_3_digit_addition,multiple_choice_grade,0.49,0.05024183937956911,0
bigbench_arithmetic_3_digit_division,multiple_choice_grade,0.41,0.04943110704237102,0
bigbench_arithmetic_3_digit_multiplication,multiple_choice_grade,0.27,0.0446196043338474,0
bigbench_arithmetic_3_digit_subtraction,multiple_choice_grade,0.64,0.048241815132442176,0
bigbench_arithmetic_4_digit_addition,multiple_choice_grade,0.44,0.04988876515698589,0
bigbench_arithmetic_4_digit_division,multiple_choice_grade,0.34,0.04760952285695235,0
bigbench_arithmetic_4_digit_multiplication,multiple_choice_grade,0.3,0.046056618647183814,0
bigbench_arithmetic_4_digit_subtraction,multiple_choice_grade,0.56,0.049888765156985884,0
bigbench_arithmetic_5_digit_addition,multiple_choice_grade,0.41,0.049431107042371025,0
bigbench_arithmetic_5_digit_division,multiple_choice_grade,0.29,0.045604802157206845,0
bigbench_arithmetic_5_digit_multiplication,multiple_choice_grade,0.23,0.04229525846816506,0
bigbench_arithmetic_5_digit_subtraction,multiple_choice_grade,0.65,0.04793724854411019,0
bigbench_cause_and_effect_one_sentence,multiple_choice_grade,0.5490196078431373,0.07037003311735827,0
bigbench_cause_and_effect_one_sentence_no_prompt,multiple_choice_grade,0.8235294117647058,0.05391265523477458,0
bigbench_cause_and_effect_two_sentences,multiple_choice_grade,0.45098039215686275,0.0703700331173583,0
bigbench_emotions,multiple_choice_grade,0.41875,0.039125538756915115,0
bigbench_empirical_judgments,multiple_choice_grade,0.40404040404040403,0.04956872738042618,0
bigbench_general_knowledge,multiple_choice_grade,0.35714285714285715,0.05768377522051772,0
bigbench_hhh_alignment_harmless,multiple_choice_grade,0.4482758620689655,0.06587130109529918,0
bigbench_hhh_alignment_helpful,multiple_choice_grade,0.3559322033898305,0.06286883855871885,0
bigbench_hhh_alignment_honest,multiple_choice_grade,0.3898305084745763,0.06403968100905791,0
bigbench_hhh_alignment_other,multiple_choice_grade,0.5581395348837209,0.07662832288817804,0
bigbench_intent_recognition,multiple_choice_grade,0.41184971098265893,0.018722960301969214,0
bigbench_misconceptions,multiple_choice_grade,0.48507462686567165,0.04333617784312701,0
bigbench_paraphrase,multiple_choice_grade,0.565,0.03514328173714408,0
bigbench_sentence_ambiguity,multiple_choice_grade,0.4666666666666667,0.06494964005966064,0
bigbench_similarities_abstraction,multiple_choice_grade,0.6578947368421053,0.054780684196477164,0