Muennighoff's picture
Add eval
1e119e1
raw
history blame
3.31 kB
task,metric,value,err,version
bigbench_analogies,multiple_choice_grade,0.5692307692307692,0.04359851186846964,0
bigbench_arithmetic_1_digit_addition,multiple_choice_grade,0.73,0.044619604333847394,0
bigbench_arithmetic_1_digit_division,multiple_choice_grade,0.782608695652174,0.08793911249520547,0
bigbench_arithmetic_1_digit_multiplication,multiple_choice_grade,0.94,0.02386832565759419,0
bigbench_arithmetic_1_digit_subtraction,multiple_choice_grade,0.66,0.04760952285695238,0
bigbench_arithmetic_2_digit_addition,multiple_choice_grade,0.5,0.050251890762960605,0
bigbench_arithmetic_2_digit_division,multiple_choice_grade,0.74,0.04408440022768079,0
bigbench_arithmetic_2_digit_multiplication,multiple_choice_grade,0.27,0.044619604333847394,0
bigbench_arithmetic_2_digit_subtraction,multiple_choice_grade,0.56,0.049888765156985884,0
bigbench_arithmetic_3_digit_addition,multiple_choice_grade,0.42,0.04960449637488584,0
bigbench_arithmetic_3_digit_division,multiple_choice_grade,0.57,0.049756985195624284,0
bigbench_arithmetic_3_digit_multiplication,multiple_choice_grade,0.22,0.0416333199893227,0
bigbench_arithmetic_3_digit_subtraction,multiple_choice_grade,0.58,0.049604496374885836,0
bigbench_arithmetic_4_digit_addition,multiple_choice_grade,0.39,0.04902071300001975,0
bigbench_arithmetic_4_digit_division,multiple_choice_grade,0.5,0.050251890762960605,0
bigbench_arithmetic_4_digit_multiplication,multiple_choice_grade,0.28,0.04512608598542129,0
bigbench_arithmetic_4_digit_subtraction,multiple_choice_grade,0.48,0.050211673156867795,0
bigbench_arithmetic_5_digit_addition,multiple_choice_grade,0.5,0.050251890762960605,0
bigbench_arithmetic_5_digit_division,multiple_choice_grade,0.32,0.04688261722621505,0
bigbench_arithmetic_5_digit_multiplication,multiple_choice_grade,0.17,0.03775251680686371,0
bigbench_arithmetic_5_digit_subtraction,multiple_choice_grade,0.52,0.05021167315686779,0
bigbench_cause_and_effect_one_sentence,multiple_choice_grade,0.4117647058823529,0.06960093862470136,0
bigbench_cause_and_effect_one_sentence_no_prompt,multiple_choice_grade,0.8823529411764706,0.04556450995538139,0
bigbench_cause_and_effect_two_sentences,multiple_choice_grade,0.47058823529411764,0.07058823529411762,0
bigbench_emotions,multiple_choice_grade,0.56875,0.0392759498401892,0
bigbench_empirical_judgments,multiple_choice_grade,0.5656565656565656,0.050070278709660826,0
bigbench_general_knowledge,multiple_choice_grade,0.5714285714285714,0.05957554687344998,0
bigbench_hhh_alignment_harmless,multiple_choice_grade,0.39655172413793105,0.06479366091762498,0
bigbench_hhh_alignment_helpful,multiple_choice_grade,0.3559322033898305,0.06286883855871885,0
bigbench_hhh_alignment_honest,multiple_choice_grade,0.4067796610169492,0.06450206738198198,0
bigbench_hhh_alignment_other,multiple_choice_grade,0.6046511627906976,0.07544284088704808,0
bigbench_intent_recognition,multiple_choice_grade,0.7543352601156069,0.01637625506781708,0
bigbench_misconceptions,multiple_choice_grade,0.5223880597014925,0.043312014931941,0
bigbench_paraphrase,multiple_choice_grade,0.42,0.0349874349304872,0
bigbench_sentence_ambiguity,multiple_choice_grade,0.5333333333333333,0.06494964005966064,0
bigbench_similarities_abstraction,multiple_choice_grade,0.6052631578947368,0.05644108049875581,0