evaluation / gpt3_finnish_medium_bigbench_1shot.csv
Muennighoff's picture
Add
40c3147
raw
history blame
3.3 kB
task,metric,value,err,version
bigbench_analogies,multiple_choice_grade,0.33076923076923076,0.041424344670265414,0
bigbench_arithmetic_1_digit_addition,multiple_choice_grade,0.38,0.04878317312145633,0
bigbench_arithmetic_1_digit_division,multiple_choice_grade,0.6086956521739131,0.10405096111532161,0
bigbench_arithmetic_1_digit_multiplication,multiple_choice_grade,0.26,0.04408440022768079,0
bigbench_arithmetic_1_digit_subtraction,multiple_choice_grade,0.41,0.049431107042371025,0
bigbench_arithmetic_2_digit_addition,multiple_choice_grade,0.3,0.046056618647183814,0
bigbench_arithmetic_2_digit_division,multiple_choice_grade,0.33,0.04725815626252604,0
bigbench_arithmetic_2_digit_multiplication,multiple_choice_grade,0.11,0.031446603773522035,0
bigbench_arithmetic_2_digit_subtraction,multiple_choice_grade,0.33,0.047258156262526045,0
bigbench_arithmetic_3_digit_addition,multiple_choice_grade,0.2,0.04020151261036845,0
bigbench_arithmetic_3_digit_division,multiple_choice_grade,0.22,0.041633319989322695,0
bigbench_arithmetic_3_digit_multiplication,multiple_choice_grade,0.23,0.04229525846816505,0
bigbench_arithmetic_3_digit_subtraction,multiple_choice_grade,0.34,0.04760952285695236,0
bigbench_arithmetic_4_digit_addition,multiple_choice_grade,0.2,0.04020151261036843,0
bigbench_arithmetic_4_digit_division,multiple_choice_grade,0.18,0.03861229196653696,0
bigbench_arithmetic_4_digit_multiplication,multiple_choice_grade,0.21,0.040936018074033256,0
bigbench_arithmetic_4_digit_subtraction,multiple_choice_grade,0.3,0.046056618647183814,0
bigbench_arithmetic_5_digit_addition,multiple_choice_grade,0.2,0.04020151261036845,0
bigbench_arithmetic_5_digit_division,multiple_choice_grade,0.14,0.03487350880197769,0
bigbench_arithmetic_5_digit_multiplication,multiple_choice_grade,0.14,0.034873508801977704,0
bigbench_arithmetic_5_digit_subtraction,multiple_choice_grade,0.4,0.049236596391733084,0
bigbench_cause_and_effect_one_sentence,multiple_choice_grade,0.5686274509803921,0.07004145529212454,0
bigbench_cause_and_effect_one_sentence_no_prompt,multiple_choice_grade,0.8235294117647058,0.05391265523477458,0
bigbench_cause_and_effect_two_sentences,multiple_choice_grade,0.43137254901960786,0.07004145529212454,0
bigbench_emotions,multiple_choice_grade,0.23125,0.033437582657277455,0
bigbench_empirical_judgments,multiple_choice_grade,0.40404040404040403,0.049568727380426184,0
bigbench_general_knowledge,multiple_choice_grade,0.3,0.055167728436737035,0
bigbench_hhh_alignment_harmless,multiple_choice_grade,0.39655172413793105,0.06479366091762498,0
bigbench_hhh_alignment_helpful,multiple_choice_grade,0.3050847457627119,0.060459168847106955,0
bigbench_hhh_alignment_honest,multiple_choice_grade,0.3728813559322034,0.0634959746661109,0
bigbench_hhh_alignment_other,multiple_choice_grade,0.5348837209302325,0.07696367820108108,0
bigbench_intent_recognition,multiple_choice_grade,0.16329479768786126,0.014061556151875897,0
bigbench_misconceptions,multiple_choice_grade,0.4701492537313433,0.04327816419216089,0
bigbench_paraphrase,multiple_choice_grade,0.485,0.03542810683297719,0
bigbench_sentence_ambiguity,multiple_choice_grade,0.5166666666666667,0.06505828185300304,0
bigbench_similarities_abstraction,multiple_choice_grade,0.5394736842105263,0.0575548235169272,0