Muennighoff's picture
Add eval
1e119e1
{
"results": {
"bigbench_arithmetic_1_digit_subtraction": {
"multiple_choice_grade": 0.66,
"multiple_choice_grade_stderr": 0.04760952285695238
},
"bigbench_paraphrase": {
"multiple_choice_grade": 0.42,
"multiple_choice_grade_stderr": 0.0349874349304872
},
"bigbench_arithmetic_2_digit_addition": {
"multiple_choice_grade": 0.5,
"multiple_choice_grade_stderr": 0.050251890762960605
},
"bigbench_hhh_alignment_helpful": {
"multiple_choice_grade": 0.3559322033898305,
"multiple_choice_grade_stderr": 0.06286883855871885
},
"bigbench_arithmetic_3_digit_subtraction": {
"multiple_choice_grade": 0.58,
"multiple_choice_grade_stderr": 0.049604496374885836
},
"bigbench_hhh_alignment_other": {
"multiple_choice_grade": 0.6046511627906976,
"multiple_choice_grade_stderr": 0.07544284088704808
},
"bigbench_arithmetic_2_digit_subtraction": {
"multiple_choice_grade": 0.56,
"multiple_choice_grade_stderr": 0.049888765156985884
},
"bigbench_hhh_alignment_honest": {
"multiple_choice_grade": 0.4067796610169492,
"multiple_choice_grade_stderr": 0.06450206738198198
},
"bigbench_arithmetic_5_digit_subtraction": {
"multiple_choice_grade": 0.52,
"multiple_choice_grade_stderr": 0.05021167315686779
},
"bigbench_arithmetic_5_digit_division": {
"multiple_choice_grade": 0.32,
"multiple_choice_grade_stderr": 0.04688261722621505
},
"bigbench_arithmetic_1_digit_division": {
"multiple_choice_grade": 0.782608695652174,
"multiple_choice_grade_stderr": 0.08793911249520547
},
"bigbench_analogies": {
"multiple_choice_grade": 0.5692307692307692,
"multiple_choice_grade_stderr": 0.04359851186846964
},
"bigbench_arithmetic_2_digit_multiplication": {
"multiple_choice_grade": 0.27,
"multiple_choice_grade_stderr": 0.044619604333847394
},
"bigbench_general_knowledge": {
"multiple_choice_grade": 0.5714285714285714,
"multiple_choice_grade_stderr": 0.05957554687344998
},
"bigbench_arithmetic_4_digit_subtraction": {
"multiple_choice_grade": 0.48,
"multiple_choice_grade_stderr": 0.050211673156867795
},
"bigbench_arithmetic_5_digit_addition": {
"multiple_choice_grade": 0.5,
"multiple_choice_grade_stderr": 0.050251890762960605
},
"bigbench_arithmetic_1_digit_addition": {
"multiple_choice_grade": 0.73,
"multiple_choice_grade_stderr": 0.044619604333847394
},
"bigbench_emotions": {
"multiple_choice_grade": 0.56875,
"multiple_choice_grade_stderr": 0.0392759498401892
},
"bigbench_arithmetic_4_digit_multiplication": {
"multiple_choice_grade": 0.28,
"multiple_choice_grade_stderr": 0.04512608598542129
},
"bigbench_arithmetic_3_digit_multiplication": {
"multiple_choice_grade": 0.22,
"multiple_choice_grade_stderr": 0.0416333199893227
},
"bigbench_cause_and_effect_one_sentence": {
"multiple_choice_grade": 0.4117647058823529,
"multiple_choice_grade_stderr": 0.06960093862470136
},
"bigbench_arithmetic_5_digit_multiplication": {
"multiple_choice_grade": 0.17,
"multiple_choice_grade_stderr": 0.03775251680686371
},
"bigbench_arithmetic_4_digit_division": {
"multiple_choice_grade": 0.5,
"multiple_choice_grade_stderr": 0.050251890762960605
},
"bigbench_misconceptions": {
"multiple_choice_grade": 0.5223880597014925,
"multiple_choice_grade_stderr": 0.043312014931941
},
"bigbench_arithmetic_2_digit_division": {
"multiple_choice_grade": 0.74,
"multiple_choice_grade_stderr": 0.04408440022768079
},
"bigbench_arithmetic_3_digit_division": {
"multiple_choice_grade": 0.57,
"multiple_choice_grade_stderr": 0.049756985195624284
},
"bigbench_similarities_abstraction": {
"multiple_choice_grade": 0.6052631578947368,
"multiple_choice_grade_stderr": 0.05644108049875581
},
"bigbench_cause_and_effect_two_sentences": {
"multiple_choice_grade": 0.47058823529411764,
"multiple_choice_grade_stderr": 0.07058823529411762
},
"bigbench_sentence_ambiguity": {
"multiple_choice_grade": 0.5333333333333333,
"multiple_choice_grade_stderr": 0.06494964005966064
},
"bigbench_empirical_judgments": {
"multiple_choice_grade": 0.5656565656565656,
"multiple_choice_grade_stderr": 0.050070278709660826
},
"bigbench_hhh_alignment_harmless": {
"multiple_choice_grade": 0.39655172413793105,
"multiple_choice_grade_stderr": 0.06479366091762498
},
"bigbench_cause_and_effect_one_sentence_no_prompt": {
"multiple_choice_grade": 0.8823529411764706,
"multiple_choice_grade_stderr": 0.04556450995538139
},
"bigbench_arithmetic_4_digit_addition": {
"multiple_choice_grade": 0.39,
"multiple_choice_grade_stderr": 0.04902071300001975
},
"bigbench_arithmetic_3_digit_addition": {
"multiple_choice_grade": 0.42,
"multiple_choice_grade_stderr": 0.04960449637488584
},
"bigbench_intent_recognition": {
"multiple_choice_grade": 0.7543352601156069,
"multiple_choice_grade_stderr": 0.01637625506781708
},
"bigbench_arithmetic_1_digit_multiplication": {
"multiple_choice_grade": 0.94,
"multiple_choice_grade_stderr": 0.02386832565759419
}
},
"versions": {
"bigbench_arithmetic_1_digit_subtraction": 0,
"bigbench_paraphrase": 0,
"bigbench_arithmetic_2_digit_addition": 0,
"bigbench_hhh_alignment_helpful": 0,
"bigbench_arithmetic_3_digit_subtraction": 0,
"bigbench_hhh_alignment_other": 0,
"bigbench_arithmetic_2_digit_subtraction": 0,
"bigbench_hhh_alignment_honest": 0,
"bigbench_arithmetic_5_digit_subtraction": 0,
"bigbench_arithmetic_5_digit_division": 0,
"bigbench_arithmetic_1_digit_division": 0,
"bigbench_analogies": 0,
"bigbench_arithmetic_2_digit_multiplication": 0,
"bigbench_general_knowledge": 0,
"bigbench_arithmetic_4_digit_subtraction": 0,
"bigbench_arithmetic_5_digit_addition": 0,
"bigbench_arithmetic_1_digit_addition": 0,
"bigbench_emotions": 0,
"bigbench_arithmetic_4_digit_multiplication": 0,
"bigbench_arithmetic_3_digit_multiplication": 0,
"bigbench_cause_and_effect_one_sentence": 0,
"bigbench_arithmetic_5_digit_multiplication": 0,
"bigbench_arithmetic_4_digit_division": 0,
"bigbench_misconceptions": 0,
"bigbench_arithmetic_2_digit_division": 0,
"bigbench_arithmetic_3_digit_division": 0,
"bigbench_similarities_abstraction": 0,
"bigbench_cause_and_effect_two_sentences": 0,
"bigbench_sentence_ambiguity": 0,
"bigbench_empirical_judgments": 0,
"bigbench_hhh_alignment_harmless": 0,
"bigbench_cause_and_effect_one_sentence_no_prompt": 0,
"bigbench_arithmetic_4_digit_addition": 0,
"bigbench_arithmetic_3_digit_addition": 0,
"bigbench_intent_recognition": 0,
"bigbench_arithmetic_1_digit_multiplication": 0
},
"config": {
"model": "hf-causal-experimental",
"model_args": "pretrained=/pfs/lustrep2/scratch/project_462000185/muennighoff/bloom-finnish-176b",
"num_fewshot": 1,
"batch_size": null,
"device": "cuda:0",
"no_cache": true,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}