evaluation / gpt3_finnish_large_bigbench_2shot.csv
Muennighoff's picture
Add
40c3147
raw
history blame
3.32 kB
task,metric,value,err,version
bigbench_analogies,multiple_choice_grade,0.3384615384615385,0.0416617354083896,0
bigbench_arithmetic_1_digit_addition,multiple_choice_grade,0.41,0.04943110704237102,0
bigbench_arithmetic_1_digit_division,multiple_choice_grade,0.6086956521739131,0.10405096111532161,0
bigbench_arithmetic_1_digit_multiplication,multiple_choice_grade,0.43,0.04975698519562428,0
bigbench_arithmetic_1_digit_subtraction,multiple_choice_grade,0.44,0.04988876515698589,0
bigbench_arithmetic_2_digit_addition,multiple_choice_grade,0.08,0.027265992434429086,0
bigbench_arithmetic_2_digit_division,multiple_choice_grade,0.4,0.049236596391733084,0
bigbench_arithmetic_2_digit_multiplication,multiple_choice_grade,0.2,0.04020151261036843,0
bigbench_arithmetic_2_digit_subtraction,multiple_choice_grade,0.19,0.03942772444036623,0
bigbench_arithmetic_3_digit_addition,multiple_choice_grade,0.32,0.046882617226215034,0
bigbench_arithmetic_3_digit_division,multiple_choice_grade,0.27,0.04461960433384741,0
bigbench_arithmetic_3_digit_multiplication,multiple_choice_grade,0.29,0.045604802157206845,0
bigbench_arithmetic_3_digit_subtraction,multiple_choice_grade,0.31,0.04648231987117316,0
bigbench_arithmetic_4_digit_addition,multiple_choice_grade,0.24,0.04292346959909284,0
bigbench_arithmetic_4_digit_division,multiple_choice_grade,0.22,0.041633319989322695,0
bigbench_arithmetic_4_digit_multiplication,multiple_choice_grade,0.21,0.040936018074033256,0
bigbench_arithmetic_4_digit_subtraction,multiple_choice_grade,0.35,0.047937248544110196,0
bigbench_arithmetic_5_digit_addition,multiple_choice_grade,0.34,0.04760952285695236,0
bigbench_arithmetic_5_digit_division,multiple_choice_grade,0.18,0.038612291966536934,0
bigbench_arithmetic_5_digit_multiplication,multiple_choice_grade,0.28,0.04512608598542128,0
bigbench_arithmetic_5_digit_subtraction,multiple_choice_grade,0.44,0.049888765156985884,0
bigbench_cause_and_effect_one_sentence,multiple_choice_grade,0.5490196078431373,0.07037003311735829,0
bigbench_cause_and_effect_one_sentence_no_prompt,multiple_choice_grade,0.7843137254901961,0.05816626264388755,0
bigbench_cause_and_effect_two_sentences,multiple_choice_grade,0.47058823529411764,0.07058823529411762,0
bigbench_emotions,multiple_choice_grade,0.25,0.03434014098717226,0
bigbench_empirical_judgments,multiple_choice_grade,0.36363636363636365,0.048592953074986234,0
bigbench_general_knowledge,multiple_choice_grade,0.45714285714285713,0.05997140203803455,0
bigbench_hhh_alignment_harmless,multiple_choice_grade,0.43103448275862066,0.06559361295281742,0
bigbench_hhh_alignment_helpful,multiple_choice_grade,0.2711864406779661,0.05837517703884877,0
bigbench_hhh_alignment_honest,multiple_choice_grade,0.3728813559322034,0.0634959746661109,0
bigbench_hhh_alignment_other,multiple_choice_grade,0.5813953488372093,0.07612251984976479,0
bigbench_intent_recognition,multiple_choice_grade,0.20086705202312138,0.01524139953133884,0
bigbench_misconceptions,multiple_choice_grade,0.48507462686567165,0.04333617784312701,0
bigbench_paraphrase,multiple_choice_grade,0.515,0.03542810683297719,0
bigbench_sentence_ambiguity,multiple_choice_grade,0.43333333333333335,0.0645132433593152,0
bigbench_similarities_abstraction,multiple_choice_grade,0.6052631578947368,0.056441080498755805,0