evaluation / gpt3_finnish_3b_bigbench.csv
Muennighoff's picture
Add
40c3147
raw
history blame
3.3 kB
task,metric,value,err,version
bigbench_analogies,multiple_choice_grade,0.4461538461538462,0.0437665221706563,0
bigbench_arithmetic_1_digit_addition,multiple_choice_grade,0.4,0.04923659639173309,0
bigbench_arithmetic_1_digit_division,multiple_choice_grade,0.782608695652174,0.08793911249520547,0
bigbench_arithmetic_1_digit_multiplication,multiple_choice_grade,0.53,0.05016135580465919,0
bigbench_arithmetic_1_digit_subtraction,multiple_choice_grade,0.27,0.044619604333847394,0
bigbench_arithmetic_2_digit_addition,multiple_choice_grade,0.03,0.017144660799776508,0
bigbench_arithmetic_2_digit_division,multiple_choice_grade,0.56,0.04988876515698589,0
bigbench_arithmetic_2_digit_multiplication,multiple_choice_grade,0.15,0.0358870281282637,0
bigbench_arithmetic_2_digit_subtraction,multiple_choice_grade,0.37,0.04852365870939099,0
bigbench_arithmetic_3_digit_addition,multiple_choice_grade,0.41,0.049431107042371025,0
bigbench_arithmetic_3_digit_division,multiple_choice_grade,0.18,0.03861229196653696,0
bigbench_arithmetic_3_digit_multiplication,multiple_choice_grade,0.06,0.023868325657594197,0
bigbench_arithmetic_3_digit_subtraction,multiple_choice_grade,0.35,0.0479372485441102,0
bigbench_arithmetic_4_digit_addition,multiple_choice_grade,0.21,0.04093601807403326,0
bigbench_arithmetic_4_digit_division,multiple_choice_grade,0.28,0.045126085985421276,0
bigbench_arithmetic_4_digit_multiplication,multiple_choice_grade,0.09,0.028762349126466125,0
bigbench_arithmetic_4_digit_subtraction,multiple_choice_grade,0.37,0.048523658709391,0
bigbench_arithmetic_5_digit_addition,multiple_choice_grade,0.38,0.048783173121456316,0
bigbench_arithmetic_5_digit_division,multiple_choice_grade,0.17,0.03775251680686371,0
bigbench_arithmetic_5_digit_multiplication,multiple_choice_grade,0.08,0.027265992434429086,0
bigbench_arithmetic_5_digit_subtraction,multiple_choice_grade,0.38,0.048783173121456316,0
bigbench_cause_and_effect_one_sentence,multiple_choice_grade,0.5686274509803921,0.07004145529212454,0
bigbench_cause_and_effect_one_sentence_no_prompt,multiple_choice_grade,0.7843137254901961,0.05816626264388756,0
bigbench_cause_and_effect_two_sentences,multiple_choice_grade,0.47058823529411764,0.07058823529411762,0
bigbench_emotions,multiple_choice_grade,0.3,0.036342189215581536,0
bigbench_empirical_judgments,multiple_choice_grade,0.3434343434343434,0.04796759058757477,0
bigbench_general_knowledge,multiple_choice_grade,0.4,0.058976782461958845,0
bigbench_hhh_alignment_harmless,multiple_choice_grade,0.3793103448275862,0.06426835284800642,0
bigbench_hhh_alignment_helpful,multiple_choice_grade,0.3389830508474576,0.062155747381159164,0
bigbench_hhh_alignment_honest,multiple_choice_grade,0.3898305084745763,0.06403968100905791,0
bigbench_hhh_alignment_other,multiple_choice_grade,0.5348837209302325,0.07696367820108108,0
bigbench_intent_recognition,multiple_choice_grade,0.17630057803468208,0.014496802592691349,0
bigbench_misconceptions,multiple_choice_grade,0.5522388059701493,0.04311822418389971,0
bigbench_paraphrase,multiple_choice_grade,0.515,0.03542810683297719,0
bigbench_sentence_ambiguity,multiple_choice_grade,0.48333333333333334,0.06505828185300304,0
bigbench_similarities_abstraction,multiple_choice_grade,0.618421052631579,0.05609235887280058,0