evaluation / gpt3_finnish_3b_bigbench_2shot.csv
Muennighoff's picture
Add
40c3147
raw
history blame
3.32 kB
task,metric,value,err,version
bigbench_analogies,multiple_choice_grade,0.3923076923076923,0.04298930521985775,0
bigbench_arithmetic_1_digit_addition,multiple_choice_grade,0.57,0.04975698519562426,0
bigbench_arithmetic_1_digit_division,multiple_choice_grade,0.6956521739130435,0.09810018692482896,0
bigbench_arithmetic_1_digit_multiplication,multiple_choice_grade,0.52,0.050211673156867795,0
bigbench_arithmetic_1_digit_subtraction,multiple_choice_grade,0.52,0.050211673156867795,0
bigbench_arithmetic_2_digit_addition,multiple_choice_grade,0.57,0.04975698519562428,0
bigbench_arithmetic_2_digit_division,multiple_choice_grade,0.54,0.05009082659620332,0
bigbench_arithmetic_2_digit_multiplication,multiple_choice_grade,0.24,0.04292346959909284,0
bigbench_arithmetic_2_digit_subtraction,multiple_choice_grade,0.67,0.047258156262526066,0
bigbench_arithmetic_3_digit_addition,multiple_choice_grade,0.46,0.05009082659620333,0
bigbench_arithmetic_3_digit_division,multiple_choice_grade,0.43,0.04975698519562428,0
bigbench_arithmetic_3_digit_multiplication,multiple_choice_grade,0.26,0.0440844002276808,0
bigbench_arithmetic_3_digit_subtraction,multiple_choice_grade,0.66,0.04760952285695237,0
bigbench_arithmetic_4_digit_addition,multiple_choice_grade,0.41,0.049431107042371025,0
bigbench_arithmetic_4_digit_division,multiple_choice_grade,0.31,0.04648231987117316,0
bigbench_arithmetic_4_digit_multiplication,multiple_choice_grade,0.28,0.04512608598542127,0
bigbench_arithmetic_4_digit_subtraction,multiple_choice_grade,0.53,0.050161355804659205,0
bigbench_arithmetic_5_digit_addition,multiple_choice_grade,0.41,0.049431107042371025,0
bigbench_arithmetic_5_digit_division,multiple_choice_grade,0.29,0.04560480215720685,0
bigbench_arithmetic_5_digit_multiplication,multiple_choice_grade,0.22,0.04163331998932268,0
bigbench_arithmetic_5_digit_subtraction,multiple_choice_grade,0.65,0.047937248544110196,0
bigbench_cause_and_effect_one_sentence,multiple_choice_grade,0.5882352941176471,0.06960093862470136,0
bigbench_cause_and_effect_one_sentence_no_prompt,multiple_choice_grade,0.9215686274509803,0.03802101848953983,0
bigbench_cause_and_effect_two_sentences,multiple_choice_grade,0.49019607843137253,0.07069708383262727,0
bigbench_emotions,multiple_choice_grade,0.36875,0.03826204233503226,0
bigbench_empirical_judgments,multiple_choice_grade,0.29292929292929293,0.04597267625418179,0
bigbench_general_knowledge,multiple_choice_grade,0.37142857142857144,0.05816884316617732,0
bigbench_hhh_alignment_harmless,multiple_choice_grade,0.4482758620689655,0.06587130109529918,0
bigbench_hhh_alignment_helpful,multiple_choice_grade,0.3220338983050847,0.06135370413564329,0
bigbench_hhh_alignment_honest,multiple_choice_grade,0.3898305084745763,0.06403968100905791,0
bigbench_hhh_alignment_other,multiple_choice_grade,0.5348837209302325,0.07696367820108108,0
bigbench_intent_recognition,multiple_choice_grade,0.40028901734104044,0.01863883718792749,0
bigbench_misconceptions,multiple_choice_grade,0.4925373134328358,0.04335066912520505,0
bigbench_paraphrase,multiple_choice_grade,0.53,0.03538020341900046,0
bigbench_sentence_ambiguity,multiple_choice_grade,0.48333333333333334,0.06505828185300304,0
bigbench_similarities_abstraction,multiple_choice_grade,0.6578947368421053,0.054780684196477164,0