evaluation / gpt3_finnish_3b_bigbench_1shot.csv
Muennighoff's picture
Add
40c3147
raw
history blame
3.31 kB
task,metric,value,err,version
bigbench_analogies,multiple_choice_grade,0.36153846153846153,0.042300915595389274,0
bigbench_arithmetic_1_digit_addition,multiple_choice_grade,0.58,0.049604496374885836,0
bigbench_arithmetic_1_digit_division,multiple_choice_grade,0.6521739130434783,0.10154334054280735,0
bigbench_arithmetic_1_digit_multiplication,multiple_choice_grade,0.52,0.050211673156867795,0
bigbench_arithmetic_1_digit_subtraction,multiple_choice_grade,0.49,0.05024183937956912,0
bigbench_arithmetic_2_digit_addition,multiple_choice_grade,0.57,0.04975698519562428,0
bigbench_arithmetic_2_digit_division,multiple_choice_grade,0.59,0.04943110704237102,0
bigbench_arithmetic_2_digit_multiplication,multiple_choice_grade,0.21,0.040936018074033256,0
bigbench_arithmetic_2_digit_subtraction,multiple_choice_grade,0.56,0.049888765156985884,0
bigbench_arithmetic_3_digit_addition,multiple_choice_grade,0.52,0.050211673156867795,0
bigbench_arithmetic_3_digit_division,multiple_choice_grade,0.4,0.049236596391733084,0
bigbench_arithmetic_3_digit_multiplication,multiple_choice_grade,0.24,0.04292346959909284,0
bigbench_arithmetic_3_digit_subtraction,multiple_choice_grade,0.61,0.04902071300001975,0
bigbench_arithmetic_4_digit_addition,multiple_choice_grade,0.38,0.04878317312145633,0
bigbench_arithmetic_4_digit_division,multiple_choice_grade,0.33,0.047258156262526045,0
bigbench_arithmetic_4_digit_multiplication,multiple_choice_grade,0.25,0.04351941398892446,0
bigbench_arithmetic_4_digit_subtraction,multiple_choice_grade,0.46,0.05009082659620332,0
bigbench_arithmetic_5_digit_addition,multiple_choice_grade,0.47,0.05016135580465919,0
bigbench_arithmetic_5_digit_division,multiple_choice_grade,0.3,0.046056618647183814,0
bigbench_arithmetic_5_digit_multiplication,multiple_choice_grade,0.29,0.045604802157206845,0
bigbench_arithmetic_5_digit_subtraction,multiple_choice_grade,0.59,0.04943110704237102,0
bigbench_cause_and_effect_one_sentence,multiple_choice_grade,0.6274509803921569,0.0683748853888733,0
bigbench_cause_and_effect_one_sentence_no_prompt,multiple_choice_grade,0.9019607843137255,0.042054138410837674,0
bigbench_cause_and_effect_two_sentences,multiple_choice_grade,0.47058823529411764,0.07058823529411762,0
bigbench_emotions,multiple_choice_grade,0.31875,0.036955560385363254,0
bigbench_empirical_judgments,multiple_choice_grade,0.37373737373737376,0.048870690395024875,0
bigbench_general_knowledge,multiple_choice_grade,0.35714285714285715,0.05768377522051772,0
bigbench_hhh_alignment_harmless,multiple_choice_grade,0.43103448275862066,0.06559361295281742,0
bigbench_hhh_alignment_helpful,multiple_choice_grade,0.3559322033898305,0.06286883855871885,0
bigbench_hhh_alignment_honest,multiple_choice_grade,0.3728813559322034,0.0634959746661109,0
bigbench_hhh_alignment_other,multiple_choice_grade,0.5348837209302325,0.07696367820108108,0
bigbench_intent_recognition,multiple_choice_grade,0.35260115606936415,0.018175601909899322,0
bigbench_misconceptions,multiple_choice_grade,0.48507462686567165,0.04333617784312701,0
bigbench_paraphrase,multiple_choice_grade,0.5,0.0354440602504168,0
bigbench_sentence_ambiguity,multiple_choice_grade,0.55,0.06476816543825593,0
bigbench_similarities_abstraction,multiple_choice_grade,0.6578947368421053,0.054780684196477164,0