evaluation / gpt3_finnish_8b_bigbench_3shot.json
Muennighoff's picture
Add
40c3147
raw
history blame
7.38 kB
{
"results": {
"bigbench_analogies": {
"multiple_choice_grade": 0.4076923076923077,
"multiple_choice_grade_stderr": 0.04326583502212175
},
"bigbench_arithmetic_1_digit_addition": {
"multiple_choice_grade": 0.49,
"multiple_choice_grade_stderr": 0.05024183937956914
},
"bigbench_arithmetic_1_digit_division": {
"multiple_choice_grade": 0.6956521739130435,
"multiple_choice_grade_stderr": 0.09810018692482896
},
"bigbench_arithmetic_1_digit_multiplication": {
"multiple_choice_grade": 0.45,
"multiple_choice_grade_stderr": 0.05
},
"bigbench_arithmetic_1_digit_subtraction": {
"multiple_choice_grade": 0.66,
"multiple_choice_grade_stderr": 0.047609522856952385
},
"bigbench_arithmetic_2_digit_addition": {
"multiple_choice_grade": 0.45,
"multiple_choice_grade_stderr": 0.05
},
"bigbench_arithmetic_2_digit_division": {
"multiple_choice_grade": 0.47,
"multiple_choice_grade_stderr": 0.050161355804659205
},
"bigbench_arithmetic_2_digit_multiplication": {
"multiple_choice_grade": 0.21,
"multiple_choice_grade_stderr": 0.04093601807403326
},
"bigbench_arithmetic_2_digit_subtraction": {
"multiple_choice_grade": 0.59,
"multiple_choice_grade_stderr": 0.04943110704237102
},
"bigbench_arithmetic_3_digit_addition": {
"multiple_choice_grade": 0.47,
"multiple_choice_grade_stderr": 0.05016135580465919
},
"bigbench_arithmetic_3_digit_division": {
"multiple_choice_grade": 0.32,
"multiple_choice_grade_stderr": 0.046882617226215034
},
"bigbench_arithmetic_3_digit_multiplication": {
"multiple_choice_grade": 0.32,
"multiple_choice_grade_stderr": 0.046882617226215034
},
"bigbench_arithmetic_3_digit_subtraction": {
"multiple_choice_grade": 0.53,
"multiple_choice_grade_stderr": 0.05016135580465919
},
"bigbench_arithmetic_4_digit_addition": {
"multiple_choice_grade": 0.27,
"multiple_choice_grade_stderr": 0.04461960433384741
},
"bigbench_arithmetic_4_digit_division": {
"multiple_choice_grade": 0.31,
"multiple_choice_grade_stderr": 0.04648231987117316
},
"bigbench_arithmetic_4_digit_multiplication": {
"multiple_choice_grade": 0.3,
"multiple_choice_grade_stderr": 0.046056618647183814
},
"bigbench_arithmetic_4_digit_subtraction": {
"multiple_choice_grade": 0.48,
"multiple_choice_grade_stderr": 0.050211673156867795
},
"bigbench_arithmetic_5_digit_addition": {
"multiple_choice_grade": 0.51,
"multiple_choice_grade_stderr": 0.05024183937956911
},
"bigbench_arithmetic_5_digit_division": {
"multiple_choice_grade": 0.29,
"multiple_choice_grade_stderr": 0.045604802157206845
},
"bigbench_arithmetic_5_digit_multiplication": {
"multiple_choice_grade": 0.28,
"multiple_choice_grade_stderr": 0.04512608598542128
},
"bigbench_arithmetic_5_digit_subtraction": {
"multiple_choice_grade": 0.63,
"multiple_choice_grade_stderr": 0.04852365870939099
},
"bigbench_cause_and_effect_one_sentence": {
"multiple_choice_grade": 0.5098039215686274,
"multiple_choice_grade_stderr": 0.07069708383262727
},
"bigbench_cause_and_effect_one_sentence_no_prompt": {
"multiple_choice_grade": 0.8431372549019608,
"multiple_choice_grade_stderr": 0.05143089038668236
},
"bigbench_cause_and_effect_two_sentences": {
"multiple_choice_grade": 0.5686274509803921,
"multiple_choice_grade_stderr": 0.07004145529212454
},
"bigbench_emotions": {
"multiple_choice_grade": 0.44375,
"multiple_choice_grade_stderr": 0.039400853796259426
},
"bigbench_empirical_judgments": {
"multiple_choice_grade": 0.32323232323232326,
"multiple_choice_grade_stderr": 0.04724590344515123
},
"bigbench_general_knowledge": {
"multiple_choice_grade": 0.5428571428571428,
"multiple_choice_grade_stderr": 0.05997140203803455
},
"bigbench_hhh_alignment_harmless": {
"multiple_choice_grade": 0.4482758620689655,
"multiple_choice_grade_stderr": 0.06587130109529918
},
"bigbench_hhh_alignment_helpful": {
"multiple_choice_grade": 0.3559322033898305,
"multiple_choice_grade_stderr": 0.06286883855871885
},
"bigbench_hhh_alignment_honest": {
"multiple_choice_grade": 0.4067796610169492,
"multiple_choice_grade_stderr": 0.06450206738198198
},
"bigbench_hhh_alignment_other": {
"multiple_choice_grade": 0.6046511627906976,
"multiple_choice_grade_stderr": 0.07544284088704808
},
"bigbench_intent_recognition": {
"multiple_choice_grade": 0.5144508670520231,
"multiple_choice_grade_stderr": 0.019012950588334448
},
"bigbench_misconceptions": {
"multiple_choice_grade": 0.5298507462686567,
"multiple_choice_grade_stderr": 0.04327816419216089
},
"bigbench_paraphrase": {
"multiple_choice_grade": 0.53,
"multiple_choice_grade_stderr": 0.03538020341900046
},
"bigbench_sentence_ambiguity": {
"multiple_choice_grade": 0.5166666666666667,
"multiple_choice_grade_stderr": 0.06505828185300304
},
"bigbench_similarities_abstraction": {
"multiple_choice_grade": 0.6447368421052632,
"multiple_choice_grade_stderr": 0.05526315789473685
}
},
"versions": {
"bigbench_analogies": 0,
"bigbench_arithmetic_1_digit_addition": 0,
"bigbench_arithmetic_1_digit_division": 0,
"bigbench_arithmetic_1_digit_multiplication": 0,
"bigbench_arithmetic_1_digit_subtraction": 0,
"bigbench_arithmetic_2_digit_addition": 0,
"bigbench_arithmetic_2_digit_division": 0,
"bigbench_arithmetic_2_digit_multiplication": 0,
"bigbench_arithmetic_2_digit_subtraction": 0,
"bigbench_arithmetic_3_digit_addition": 0,
"bigbench_arithmetic_3_digit_division": 0,
"bigbench_arithmetic_3_digit_multiplication": 0,
"bigbench_arithmetic_3_digit_subtraction": 0,
"bigbench_arithmetic_4_digit_addition": 0,
"bigbench_arithmetic_4_digit_division": 0,
"bigbench_arithmetic_4_digit_multiplication": 0,
"bigbench_arithmetic_4_digit_subtraction": 0,
"bigbench_arithmetic_5_digit_addition": 0,
"bigbench_arithmetic_5_digit_division": 0,
"bigbench_arithmetic_5_digit_multiplication": 0,
"bigbench_arithmetic_5_digit_subtraction": 0,
"bigbench_cause_and_effect_one_sentence": 0,
"bigbench_cause_and_effect_one_sentence_no_prompt": 0,
"bigbench_cause_and_effect_two_sentences": 0,
"bigbench_emotions": 0,
"bigbench_empirical_judgments": 0,
"bigbench_general_knowledge": 0,
"bigbench_hhh_alignment_harmless": 0,
"bigbench_hhh_alignment_helpful": 0,
"bigbench_hhh_alignment_honest": 0,
"bigbench_hhh_alignment_other": 0,
"bigbench_intent_recognition": 0,
"bigbench_misconceptions": 0,
"bigbench_paraphrase": 0,
"bigbench_sentence_ambiguity": 0,
"bigbench_similarities_abstraction": 0
},
"config": {
"model": "hf-causal",
"model_args": "pretrained=gpt3-finnish-8B",
"num_fewshot": 3,
"batch_size": null,
"device": "cuda:0",
"no_cache": false,
"limit": null,
"bootstrap_iters": 100000,
"description_dict": {}
}
}