evaluation / gpt3_finnish_13b_bigbench_3shot.csv
Muennighoff's picture
Add
40c3147
task,metric,value,err,version
bigbench_analogies,multiple_choice_grade,0.34615384615384615,0.04188683174943868,0
bigbench_arithmetic_1_digit_addition,multiple_choice_grade,0.47,0.05016135580465919,0
bigbench_arithmetic_1_digit_division,multiple_choice_grade,0.5652173913043478,0.10568965974008646,0
bigbench_arithmetic_1_digit_multiplication,multiple_choice_grade,0.33,0.04725815626252605,0
bigbench_arithmetic_1_digit_subtraction,multiple_choice_grade,0.51,0.05024183937956911,0
bigbench_arithmetic_2_digit_addition,multiple_choice_grade,0.52,0.050211673156867795,0
bigbench_arithmetic_2_digit_division,multiple_choice_grade,0.44,0.04988876515698589,0
bigbench_arithmetic_2_digit_multiplication,multiple_choice_grade,0.25,0.04351941398892446,0
bigbench_arithmetic_2_digit_subtraction,multiple_choice_grade,0.64,0.048241815132442176,0
bigbench_arithmetic_3_digit_addition,multiple_choice_grade,0.58,0.049604496374885836,0
bigbench_arithmetic_3_digit_division,multiple_choice_grade,0.26,0.0440844002276808,0
bigbench_arithmetic_3_digit_multiplication,multiple_choice_grade,0.27,0.0446196043338474,0
bigbench_arithmetic_3_digit_subtraction,multiple_choice_grade,0.61,0.04902071300001974,0
bigbench_arithmetic_4_digit_addition,multiple_choice_grade,0.51,0.05024183937956912,0
bigbench_arithmetic_4_digit_division,multiple_choice_grade,0.27,0.0446196043338474,0
bigbench_arithmetic_4_digit_multiplication,multiple_choice_grade,0.33,0.04725815626252605,0
bigbench_arithmetic_4_digit_subtraction,multiple_choice_grade,0.52,0.050211673156867795,0
bigbench_arithmetic_5_digit_addition,multiple_choice_grade,0.59,0.04943110704237101,0
bigbench_arithmetic_5_digit_division,multiple_choice_grade,0.22,0.04163331998932269,0
bigbench_arithmetic_5_digit_multiplication,multiple_choice_grade,0.35,0.047937248544110196,0
bigbench_arithmetic_5_digit_subtraction,multiple_choice_grade,0.64,0.04824181513244218,0
bigbench_cause_and_effect_one_sentence,multiple_choice_grade,0.6078431372549019,0.0690463406339569,0
bigbench_cause_and_effect_one_sentence_no_prompt,multiple_choice_grade,0.8431372549019608,0.051430890386682346,0
bigbench_cause_and_effect_two_sentences,multiple_choice_grade,0.6078431372549019,0.0690463406339569,0
bigbench_emotions,multiple_choice_grade,0.38125,0.038518021388670956,0
bigbench_empirical_judgments,multiple_choice_grade,0.36363636363636365,0.048592953074986234,0
bigbench_general_knowledge,multiple_choice_grade,0.4,0.058976782461958845,0
bigbench_hhh_alignment_harmless,multiple_choice_grade,0.4482758620689655,0.06587130109529918,0
bigbench_hhh_alignment_helpful,multiple_choice_grade,0.3728813559322034,0.0634959746661109,0
bigbench_hhh_alignment_honest,multiple_choice_grade,0.4067796610169492,0.06450206738198198,0
bigbench_hhh_alignment_other,multiple_choice_grade,0.5813953488372093,0.07612251984976479,0
bigbench_intent_recognition,multiple_choice_grade,0.588150289017341,0.018722960301969224,0
bigbench_misconceptions,multiple_choice_grade,0.4626865671641791,0.043234602868397164,0
bigbench_paraphrase,multiple_choice_grade,0.47,0.03538020341900046,0
bigbench_sentence_ambiguity,multiple_choice_grade,0.5666666666666667,0.06451324335931521,0
bigbench_similarities_abstraction,multiple_choice_grade,0.7105263157894737,0.052367759847716855,0