evaluation / gpt3_finnish_small_bigbench_2shot.csv
Muennighoff's picture
Add
40c3147
task,metric,value,err,version
bigbench_analogies,multiple_choice_grade,0.34615384615384615,0.04188683174943867,0
bigbench_arithmetic_1_digit_addition,multiple_choice_grade,0.49,0.05024183937956911,0
bigbench_arithmetic_1_digit_division,multiple_choice_grade,0.6086956521739131,0.10405096111532161,0
bigbench_arithmetic_1_digit_multiplication,multiple_choice_grade,0.23,0.04229525846816506,0
bigbench_arithmetic_1_digit_subtraction,multiple_choice_grade,0.44,0.04988876515698589,0
bigbench_arithmetic_2_digit_addition,multiple_choice_grade,0.4,0.049236596391733084,0
bigbench_arithmetic_2_digit_division,multiple_choice_grade,0.32,0.04688261722621504,0
bigbench_arithmetic_2_digit_multiplication,multiple_choice_grade,0.13,0.03379976689896309,0
bigbench_arithmetic_2_digit_subtraction,multiple_choice_grade,0.25,0.04351941398892446,0
bigbench_arithmetic_3_digit_addition,multiple_choice_grade,0.36,0.048241815132442176,0
bigbench_arithmetic_3_digit_division,multiple_choice_grade,0.2,0.04020151261036846,0
bigbench_arithmetic_3_digit_multiplication,multiple_choice_grade,0.28,0.045126085985421276,0
bigbench_arithmetic_3_digit_subtraction,multiple_choice_grade,0.3,0.046056618647183814,0
bigbench_arithmetic_4_digit_addition,multiple_choice_grade,0.23,0.04229525846816506,0
bigbench_arithmetic_4_digit_division,multiple_choice_grade,0.19,0.03942772444036624,0
bigbench_arithmetic_4_digit_multiplication,multiple_choice_grade,0.19,0.039427724440366234,0
bigbench_arithmetic_4_digit_subtraction,multiple_choice_grade,0.38,0.04878317312145632,0
bigbench_arithmetic_5_digit_addition,multiple_choice_grade,0.36,0.048241815132442176,0
bigbench_arithmetic_5_digit_division,multiple_choice_grade,0.08,0.027265992434429086,0
bigbench_arithmetic_5_digit_multiplication,multiple_choice_grade,0.21,0.04093601807403325,0
bigbench_arithmetic_5_digit_subtraction,multiple_choice_grade,0.41,0.049431107042371025,0
bigbench_cause_and_effect_one_sentence,multiple_choice_grade,0.6470588235294118,0.06758308995927091,0
bigbench_cause_and_effect_one_sentence_no_prompt,multiple_choice_grade,0.6862745098039216,0.0656203942379667,0
bigbench_cause_and_effect_two_sentences,multiple_choice_grade,0.47058823529411764,0.07058823529411762,0
bigbench_emotions,multiple_choice_grade,0.15,0.02831760569774277,0
bigbench_empirical_judgments,multiple_choice_grade,0.24242424242424243,0.043290043290043274,0
bigbench_general_knowledge,multiple_choice_grade,0.2,0.04815434123430765,0
bigbench_hhh_alignment_harmless,multiple_choice_grade,0.41379310344827586,0.06523484847771846,0
bigbench_hhh_alignment_helpful,multiple_choice_grade,0.3050847457627119,0.060459168847106955,0
bigbench_hhh_alignment_honest,multiple_choice_grade,0.3728813559322034,0.0634959746661109,0
bigbench_hhh_alignment_other,multiple_choice_grade,0.5348837209302325,0.07696367820108108,0
bigbench_intent_recognition,multiple_choice_grade,0.18063583815028902,0.01463529287638173,0
bigbench_misconceptions,multiple_choice_grade,0.44776119402985076,0.043118224183899725,0
bigbench_paraphrase,multiple_choice_grade,0.505,0.035442288003096976,0
bigbench_sentence_ambiguity,multiple_choice_grade,0.5166666666666667,0.06505828185300304,0
bigbench_similarities_abstraction,multiple_choice_grade,0.4342105263157895,0.05723306097613564,0