evaluation / gpt2_finnish_bigbench_2shot.csv
Muennighoff's picture
Add
79b60e5
task,metric,value,err,version
bigbench_analogies,multiple_choice_grade,0.3153846153846154,0.04091181286955817,0
bigbench_arithmetic_1_digit_addition,multiple_choice_grade,0.35,0.047937248544110175,0
bigbench_arithmetic_1_digit_division,multiple_choice_grade,0.6086956521739131,0.10405096111532161,0
bigbench_arithmetic_1_digit_multiplication,multiple_choice_grade,0.15,0.03588702812826371,0
bigbench_arithmetic_1_digit_subtraction,multiple_choice_grade,0.36,0.048241815132442176,0
bigbench_arithmetic_2_digit_addition,multiple_choice_grade,0.15,0.03588702812826371,0
bigbench_arithmetic_2_digit_division,multiple_choice_grade,0.31,0.04648231987117316,0
bigbench_arithmetic_2_digit_multiplication,multiple_choice_grade,0.04,0.019694638556693237,0
bigbench_arithmetic_2_digit_subtraction,multiple_choice_grade,0.14,0.0348735088019777,0
bigbench_arithmetic_3_digit_addition,multiple_choice_grade,0.09,0.028762349126466143,0
bigbench_arithmetic_3_digit_division,multiple_choice_grade,0.18,0.038612291966536955,0
bigbench_arithmetic_3_digit_multiplication,multiple_choice_grade,0.05,0.021904291355759026,0
bigbench_arithmetic_3_digit_subtraction,multiple_choice_grade,0.26,0.0440844002276808,0
bigbench_arithmetic_4_digit_addition,multiple_choice_grade,0.1,0.030151134457776334,0
bigbench_arithmetic_4_digit_division,multiple_choice_grade,0.15,0.03588702812826371,0
bigbench_arithmetic_4_digit_multiplication,multiple_choice_grade,0.07,0.025643239997624294,0
bigbench_arithmetic_4_digit_subtraction,multiple_choice_grade,0.29,0.045604802157206845,0
bigbench_arithmetic_5_digit_addition,multiple_choice_grade,0.12,0.03265986323710906,0
bigbench_arithmetic_5_digit_division,multiple_choice_grade,0.14,0.0348735088019777,0
bigbench_arithmetic_5_digit_multiplication,multiple_choice_grade,0.08,0.0272659924344291,0
bigbench_arithmetic_5_digit_subtraction,multiple_choice_grade,0.26,0.04408440022768079,0
bigbench_cause_and_effect_one_sentence,multiple_choice_grade,0.5490196078431373,0.07037003311735829,0
bigbench_cause_and_effect_one_sentence_no_prompt,multiple_choice_grade,0.6470588235294118,0.06758308995927091,0
bigbench_cause_and_effect_two_sentences,multiple_choice_grade,0.6274509803921569,0.06837488538887332,0
bigbench_emotions,multiple_choice_grade,0.14375,0.02782310897584524,0
bigbench_empirical_judgments,multiple_choice_grade,0.23232323232323232,0.04266016017054687,0
bigbench_general_knowledge,multiple_choice_grade,0.11428571428571428,0.03830171786284782,0
bigbench_hhh_alignment_harmless,multiple_choice_grade,0.5,0.06622661785325219,0
bigbench_hhh_alignment_helpful,multiple_choice_grade,0.22033898305084745,0.05442326385157391,0
bigbench_hhh_alignment_honest,multiple_choice_grade,0.3389830508474576,0.062155747381159164,0
bigbench_hhh_alignment_other,multiple_choice_grade,0.5813953488372093,0.07612251984976479,0
bigbench_intent_recognition,multiple_choice_grade,0.13439306358381503,0.012975071726816043,0
bigbench_misconceptions,multiple_choice_grade,0.47761194029850745,0.043312014931941,0
bigbench_paraphrase,multiple_choice_grade,0.49,0.035436970729343674,0
bigbench_sentence_ambiguity,multiple_choice_grade,0.5,0.06509445549041193,0
bigbench_similarities_abstraction,multiple_choice_grade,0.4868421052631579,0.05771503210384553,0