results / jb723 /cross_lingual_epoch2 /result_2023-10-26 12:25:31.json
open-ko-llm-bot's picture
Add results for 2023-10-26 12:25:31
21fb77d
raw
history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.2226962457337884,
"acc_stderr": 0.012158314774829928,
"acc_norm": 0.2841296928327645,
"acc_norm_stderr": 0.013179442447653887
},
"harness|ko_hellaswag|10": {
"acc": 0.26628161720772753,
"acc_stderr": 0.004411099046251013,
"acc_norm": 0.29107747460665206,
"acc_norm_stderr": 0.004533307758521328
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.03615507630310935,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.03615507630310935
},
"harness|ko_mmlu_management|5": {
"acc": 0.20388349514563106,
"acc_stderr": 0.039891398595317706,
"acc_norm": 0.20388349514563106,
"acc_norm_stderr": 0.039891398595317706
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.3205619412515964,
"acc_stderr": 0.016688893310803775,
"acc_norm": 0.3205619412515964,
"acc_norm_stderr": 0.016688893310803775
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.2962962962962963,
"acc_stderr": 0.03944624162501116,
"acc_norm": 0.2962962962962963,
"acc_norm_stderr": 0.03944624162501116
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252604,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.2851063829787234,
"acc_stderr": 0.029513196625539355,
"acc_norm": 0.2851063829787234,
"acc_norm_stderr": 0.029513196625539355
},
"harness|ko_mmlu_virology|5": {
"acc": 0.27710843373493976,
"acc_stderr": 0.034843315926805875,
"acc_norm": 0.27710843373493976,
"acc_norm_stderr": 0.034843315926805875
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.3504823151125402,
"acc_stderr": 0.027098652621301757,
"acc_norm": 0.3504823151125402,
"acc_norm_stderr": 0.027098652621301757
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.3632286995515695,
"acc_stderr": 0.032277904428505,
"acc_norm": 0.3632286995515695,
"acc_norm_stderr": 0.032277904428505
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.3053435114503817,
"acc_stderr": 0.0403931497872456,
"acc_norm": 0.3053435114503817,
"acc_norm_stderr": 0.0403931497872456
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.28,
"acc_stderr": 0.045126085985421296,
"acc_norm": 0.28,
"acc_norm_stderr": 0.045126085985421296
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.2878787878787879,
"acc_stderr": 0.03225883512300993,
"acc_norm": 0.2878787878787879,
"acc_norm_stderr": 0.03225883512300993
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.30344827586206896,
"acc_stderr": 0.038312260488503336,
"acc_norm": 0.30344827586206896,
"acc_norm_stderr": 0.038312260488503336
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.20588235294117646,
"acc_stderr": 0.04023382273617746,
"acc_norm": 0.20588235294117646,
"acc_norm_stderr": 0.04023382273617746
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.3235294117647059,
"acc_stderr": 0.03038835355188685,
"acc_norm": 0.3235294117647059,
"acc_norm_stderr": 0.03038835355188685
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.2153846153846154,
"acc_stderr": 0.020843034557462878,
"acc_norm": 0.2153846153846154,
"acc_norm_stderr": 0.020843034557462878
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.41,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.2962962962962963,
"acc_stderr": 0.04414343666854932,
"acc_norm": 0.2962962962962963,
"acc_norm_stderr": 0.04414343666854932
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.26108374384236455,
"acc_stderr": 0.030903796952114475,
"acc_norm": 0.26108374384236455,
"acc_norm_stderr": 0.030903796952114475
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.3,
"acc_stderr": 0.026069362295335134,
"acc_norm": 0.3,
"acc_norm_stderr": 0.026069362295335134
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.43162393162393164,
"acc_stderr": 0.0324483553531149,
"acc_norm": 0.43162393162393164,
"acc_norm_stderr": 0.0324483553531149
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.26037735849056604,
"acc_stderr": 0.027008766090708076,
"acc_norm": 0.26037735849056604,
"acc_norm_stderr": 0.027008766090708076
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.3181818181818182,
"acc_stderr": 0.04461272175910509,
"acc_norm": 0.3181818181818182,
"acc_norm_stderr": 0.04461272175910509
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.24444444444444444,
"acc_stderr": 0.02620276653465215,
"acc_norm": 0.24444444444444444,
"acc_norm_stderr": 0.02620276653465215
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2980132450331126,
"acc_stderr": 0.03734535676787198,
"acc_norm": 0.2980132450331126,
"acc_norm_stderr": 0.03734535676787198
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.36318407960199006,
"acc_stderr": 0.034005985055990146,
"acc_norm": 0.36318407960199006,
"acc_norm_stderr": 0.034005985055990146
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.24277456647398843,
"acc_stderr": 0.0326926380614177,
"acc_norm": 0.24277456647398843,
"acc_norm_stderr": 0.0326926380614177
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.25396825396825395,
"acc_stderr": 0.022418042891113942,
"acc_norm": 0.25396825396825395,
"acc_norm_stderr": 0.022418042891113942
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3541666666666667,
"acc_stderr": 0.039994111357535424,
"acc_norm": 0.3541666666666667,
"acc_norm_stderr": 0.039994111357535424
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.2,
"acc_stderr": 0.04020151261036845,
"acc_norm": 0.2,
"acc_norm_stderr": 0.04020151261036845
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.315028901734104,
"acc_stderr": 0.025009313790069706,
"acc_norm": 0.315028901734104,
"acc_norm_stderr": 0.025009313790069706
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.294478527607362,
"acc_stderr": 0.03581165790474082,
"acc_norm": 0.294478527607362,
"acc_norm_stderr": 0.03581165790474082
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.33024691358024694,
"acc_stderr": 0.026168298456732846,
"acc_norm": 0.33024691358024694,
"acc_norm_stderr": 0.026168298456732846
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.22797927461139897,
"acc_stderr": 0.03027690994517826,
"acc_norm": 0.22797927461139897,
"acc_norm_stderr": 0.03027690994517826
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2894736842105263,
"acc_stderr": 0.04266339443159394,
"acc_norm": 0.2894736842105263,
"acc_norm_stderr": 0.04266339443159394
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.28256880733944956,
"acc_stderr": 0.01930424349770715,
"acc_norm": 0.28256880733944956,
"acc_norm_stderr": 0.01930424349770715
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.25396825396825395,
"acc_stderr": 0.03893259610604673,
"acc_norm": 0.25396825396825395,
"acc_norm_stderr": 0.03893259610604673
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.30392156862745096,
"acc_stderr": 0.026336613469046637,
"acc_norm": 0.30392156862745096,
"acc_norm_stderr": 0.026336613469046637
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.34,
"acc_stderr": 0.047609522856952344,
"acc_norm": 0.34,
"acc_norm_stderr": 0.047609522856952344
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.32231404958677684,
"acc_stderr": 0.04266416363352168,
"acc_norm": 0.32231404958677684,
"acc_norm_stderr": 0.04266416363352168
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.27631578947368424,
"acc_stderr": 0.03639057569952924,
"acc_norm": 0.27631578947368424,
"acc_norm_stderr": 0.03639057569952924
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.30718954248366015,
"acc_stderr": 0.01866335967146366,
"acc_norm": 0.30718954248366015,
"acc_norm_stderr": 0.01866335967146366
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.25177304964539005,
"acc_stderr": 0.0258921511567094,
"acc_norm": 0.25177304964539005,
"acc_norm_stderr": 0.0258921511567094
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.26785714285714285,
"acc_stderr": 0.04203277291467764,
"acc_norm": 0.26785714285714285,
"acc_norm_stderr": 0.04203277291467764
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.18518518518518517,
"acc_stderr": 0.026491914727355154,
"acc_norm": 0.18518518518518517,
"acc_norm_stderr": 0.026491914727355154
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2681564245810056,
"acc_stderr": 0.014816119635317005,
"acc_norm": 0.2681564245810056,
"acc_norm_stderr": 0.014816119635317005
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.22,
"acc_stderr": 0.04163331998932269,
"acc_norm": 0.22,
"acc_norm_stderr": 0.04163331998932269
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542127,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542127
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.1948529411764706,
"acc_stderr": 0.024060599423487424,
"acc_norm": 0.1948529411764706,
"acc_norm_stderr": 0.024060599423487424
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.3306122448979592,
"acc_stderr": 0.030116426296540585,
"acc_norm": 0.3306122448979592,
"acc_norm_stderr": 0.030116426296540585
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.2911392405063291,
"acc_stderr": 0.029571601065753374,
"acc_norm": 0.2911392405063291,
"acc_norm_stderr": 0.029571601065753374
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.24902216427640156,
"acc_stderr": 0.01104489226404077,
"acc_norm": 0.24902216427640156,
"acc_norm_stderr": 0.01104489226404077
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.18627450980392157,
"acc_stderr": 0.027325470966716305,
"acc_norm": 0.18627450980392157,
"acc_norm_stderr": 0.027325470966716305
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.2787878787878788,
"acc_stderr": 0.03501438706296781,
"acc_norm": 0.2787878787878788,
"acc_norm_stderr": 0.03501438706296781
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.23255813953488372,
"mc1_stderr": 0.014789157531080522,
"mc2": 0.494893188252647,
"mc2_stderr": 0.016817822778795313
},
"harness|ko_commongen_v2|2": {
"acc": 0.09681227863046045,
"acc_stderr": 0.010166443512074711,
"acc_norm": 0.3612750885478158,
"acc_norm_stderr": 0.016515463022411997
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "jb723/cross_lingual_epoch2",
"model_sha": "aa1654ae948febe0f7cf3e27d5f81a8df7a58118",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}