evaluation / gpt2_finnish_large_bigbench_3shot.csv
Muennighoff's picture
Add
79b60e5
task,metric,value,err,version
bigbench_analogies,multiple_choice_grade,0.23076923076923078,0.03709560170541631,0
bigbench_arithmetic_1_digit_addition,multiple_choice_grade,0.35,0.047937248544110196,0
bigbench_arithmetic_1_digit_division,multiple_choice_grade,0.5652173913043478,0.10568965974008646,0
bigbench_arithmetic_1_digit_multiplication,multiple_choice_grade,0.19,0.039427724440366234,0
bigbench_arithmetic_1_digit_subtraction,multiple_choice_grade,0.48,0.050211673156867795,0
bigbench_arithmetic_2_digit_addition,multiple_choice_grade,0.1,0.030151134457776358,0
bigbench_arithmetic_2_digit_division,multiple_choice_grade,0.36,0.04824181513244218,0
bigbench_arithmetic_2_digit_multiplication,multiple_choice_grade,0.12,0.03265986323710906,0
bigbench_arithmetic_2_digit_subtraction,multiple_choice_grade,0.2,0.040201512610368445,0
bigbench_arithmetic_3_digit_addition,multiple_choice_grade,0.09,0.02876234912646613,0
bigbench_arithmetic_3_digit_division,multiple_choice_grade,0.12,0.03265986323710906,0
bigbench_arithmetic_3_digit_multiplication,multiple_choice_grade,0.18,0.03861229196653697,0
bigbench_arithmetic_3_digit_subtraction,multiple_choice_grade,0.27,0.04461960433384741,0
bigbench_arithmetic_4_digit_addition,multiple_choice_grade,0.15,0.035887028128263714,0
bigbench_arithmetic_4_digit_division,multiple_choice_grade,0.17,0.0377525168068637,0
bigbench_arithmetic_4_digit_multiplication,multiple_choice_grade,0.09,0.028762349126466125,0
bigbench_arithmetic_4_digit_subtraction,multiple_choice_grade,0.22,0.0416333199893227,0
bigbench_arithmetic_5_digit_addition,multiple_choice_grade,0.12,0.03265986323710906,0
bigbench_arithmetic_5_digit_division,multiple_choice_grade,0.11,0.031446603773522035,0
bigbench_arithmetic_5_digit_multiplication,multiple_choice_grade,0.16,0.03684529491774708,0
bigbench_arithmetic_5_digit_subtraction,multiple_choice_grade,0.29,0.045604802157206845,0
bigbench_cause_and_effect_one_sentence,multiple_choice_grade,0.5882352941176471,0.06960093862470136,0
bigbench_cause_and_effect_one_sentence_no_prompt,multiple_choice_grade,0.6078431372549019,0.0690463406339569,0
bigbench_cause_and_effect_two_sentences,multiple_choice_grade,0.47058823529411764,0.07058823529411762,0
bigbench_emotions,multiple_choice_grade,0.18125,0.030550343799854465,0
bigbench_empirical_judgments,multiple_choice_grade,0.35353535353535354,0.04829206502361188,0
bigbench_general_knowledge,multiple_choice_grade,0.2571428571428571,0.052615698346701524,0
bigbench_hhh_alignment_harmless,multiple_choice_grade,0.43103448275862066,0.06559361295281742,0
bigbench_hhh_alignment_helpful,multiple_choice_grade,0.288135593220339,0.059467967781548406,0
bigbench_hhh_alignment_honest,multiple_choice_grade,0.3389830508474576,0.062155747381159164,0
bigbench_hhh_alignment_other,multiple_choice_grade,0.5813953488372093,0.07612251984976479,0
bigbench_intent_recognition,multiple_choice_grade,0.17341040462427745,0.014402686887147381,0
bigbench_misconceptions,multiple_choice_grade,0.5074626865671642,0.04335066912520504,0
bigbench_paraphrase,multiple_choice_grade,0.47,0.03538020341900045,0
bigbench_sentence_ambiguity,multiple_choice_grade,0.5166666666666667,0.06505828185300304,0
bigbench_similarities_abstraction,multiple_choice_grade,0.5526315789473685,0.05741427428755636,0