evaluation / gpt3_finnish_large_bigbench_3shot.csv
Muennighoff's picture
Add
40c3147
raw
history blame
3.3 kB
task,metric,value,err,version
bigbench_analogies,multiple_choice_grade,0.38461538461538464,0.04283431126074645,0
bigbench_arithmetic_1_digit_addition,multiple_choice_grade,0.47,0.050161355804659205,0
bigbench_arithmetic_1_digit_division,multiple_choice_grade,0.4782608695652174,0.10649955403405124,0
bigbench_arithmetic_1_digit_multiplication,multiple_choice_grade,0.47,0.050161355804659205,0
bigbench_arithmetic_1_digit_subtraction,multiple_choice_grade,0.47,0.05016135580465919,0
bigbench_arithmetic_2_digit_addition,multiple_choice_grade,0.1,0.030151134457776348,0
bigbench_arithmetic_2_digit_division,multiple_choice_grade,0.41,0.04943110704237102,0
bigbench_arithmetic_2_digit_multiplication,multiple_choice_grade,0.18,0.03861229196653697,0
bigbench_arithmetic_2_digit_subtraction,multiple_choice_grade,0.25,0.04351941398892446,0
bigbench_arithmetic_3_digit_addition,multiple_choice_grade,0.28,0.04512608598542127,0
bigbench_arithmetic_3_digit_division,multiple_choice_grade,0.29,0.045604802157206845,0
bigbench_arithmetic_3_digit_multiplication,multiple_choice_grade,0.3,0.04605661864718381,0
bigbench_arithmetic_3_digit_subtraction,multiple_choice_grade,0.26,0.0440844002276808,0
bigbench_arithmetic_4_digit_addition,multiple_choice_grade,0.22,0.0416333199893227,0
bigbench_arithmetic_4_digit_division,multiple_choice_grade,0.27,0.044619604333847394,0
bigbench_arithmetic_4_digit_multiplication,multiple_choice_grade,0.24,0.042923469599092816,0
bigbench_arithmetic_4_digit_subtraction,multiple_choice_grade,0.34,0.04760952285695235,0
bigbench_arithmetic_5_digit_addition,multiple_choice_grade,0.32,0.046882617226215034,0
bigbench_arithmetic_5_digit_division,multiple_choice_grade,0.25,0.04351941398892446,0
bigbench_arithmetic_5_digit_multiplication,multiple_choice_grade,0.24,0.042923469599092816,0
bigbench_arithmetic_5_digit_subtraction,multiple_choice_grade,0.48,0.050211673156867795,0
bigbench_cause_and_effect_one_sentence,multiple_choice_grade,0.5490196078431373,0.07037003311735829,0
bigbench_cause_and_effect_one_sentence_no_prompt,multiple_choice_grade,0.8431372549019608,0.05143089038668235,0
bigbench_cause_and_effect_two_sentences,multiple_choice_grade,0.5490196078431373,0.07037003311735827,0
bigbench_emotions,multiple_choice_grade,0.2625,0.03489370652018762,0
bigbench_empirical_judgments,multiple_choice_grade,0.36363636363636365,0.048592953074986234,0
bigbench_general_knowledge,multiple_choice_grade,0.4,0.058976782461958845,0
bigbench_hhh_alignment_harmless,multiple_choice_grade,0.43103448275862066,0.06559361295281742,0
bigbench_hhh_alignment_helpful,multiple_choice_grade,0.288135593220339,0.059467967781548406,0
bigbench_hhh_alignment_honest,multiple_choice_grade,0.3728813559322034,0.0634959746661109,0
bigbench_hhh_alignment_other,multiple_choice_grade,0.5813953488372093,0.07612251984976479,0
bigbench_intent_recognition,multiple_choice_grade,0.27601156069364163,0.017005540046345993,0
bigbench_misconceptions,multiple_choice_grade,0.4253731343283582,0.0428698704965525,0
bigbench_paraphrase,multiple_choice_grade,0.515,0.03542810683297719,0
bigbench_sentence_ambiguity,multiple_choice_grade,0.5333333333333333,0.06494964005966064,0
bigbench_similarities_abstraction,multiple_choice_grade,0.631578947368421,0.05570002760135977,0