results / Kaeri-Jenti /LDCC-with-korca /result_2023-11-06 01:29:42.json
open-ko-llm-bot's picture
Add results for 2023-11-06 01:29:42
96773c7
raw history blame
No virus
17.8 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.3993174061433447,
"acc_stderr": 0.014312094557946704,
"acc_norm": 0.4496587030716723,
"acc_norm_stderr": 0.014537144444284738
},
"harness|ko_hellaswag|10": {
"acc": 0.42202748456482775,
"acc_stderr": 0.004928735103635845,
"acc_norm": 0.5664210316669986,
"acc_norm_stderr": 0.004945558069852528
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.4678362573099415,
"acc_stderr": 0.03826882417660369,
"acc_norm": 0.4678362573099415,
"acc_norm_stderr": 0.03826882417660369
},
"harness|ko_mmlu_management|5": {
"acc": 0.49514563106796117,
"acc_stderr": 0.049505043821289195,
"acc_norm": 0.49514563106796117,
"acc_norm_stderr": 0.049505043821289195
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.5466155810983397,
"acc_stderr": 0.01780208713585031,
"acc_norm": 0.5466155810983397,
"acc_norm_stderr": 0.01780208713585031
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.43703703703703706,
"acc_stderr": 0.04284958639753398,
"acc_norm": 0.43703703703703706,
"acc_norm_stderr": 0.04284958639753398
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.27,
"acc_stderr": 0.0446196043338474,
"acc_norm": 0.27,
"acc_norm_stderr": 0.0446196043338474
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.4,
"acc_stderr": 0.03202563076101735,
"acc_norm": 0.4,
"acc_norm_stderr": 0.03202563076101735
},
"harness|ko_mmlu_virology|5": {
"acc": 0.45180722891566266,
"acc_stderr": 0.03874371556587952,
"acc_norm": 0.45180722891566266,
"acc_norm_stderr": 0.03874371556587952
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.5048231511254019,
"acc_stderr": 0.028396770444111298,
"acc_norm": 0.5048231511254019,
"acc_norm_stderr": 0.028396770444111298
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.4977578475336323,
"acc_stderr": 0.033557465352232634,
"acc_norm": 0.4977578475336323,
"acc_norm_stderr": 0.033557465352232634
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.48091603053435117,
"acc_stderr": 0.04382094705550989,
"acc_norm": 0.48091603053435117,
"acc_norm_stderr": 0.04382094705550989
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.4,
"acc_stderr": 0.049236596391733084,
"acc_norm": 0.4,
"acc_norm_stderr": 0.049236596391733084
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.5606060606060606,
"acc_stderr": 0.03536085947529481,
"acc_norm": 0.5606060606060606,
"acc_norm_stderr": 0.03536085947529481
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.43448275862068964,
"acc_stderr": 0.041307408795554966,
"acc_norm": 0.43448275862068964,
"acc_norm_stderr": 0.041307408795554966
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.2549019607843137,
"acc_stderr": 0.043364327079931785,
"acc_norm": 0.2549019607843137,
"acc_norm_stderr": 0.043364327079931785
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.44537815126050423,
"acc_stderr": 0.0322841062671639,
"acc_norm": 0.44537815126050423,
"acc_norm_stderr": 0.0322841062671639
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.45384615384615384,
"acc_stderr": 0.025242770987126177,
"acc_norm": 0.45384615384615384,
"acc_norm_stderr": 0.025242770987126177
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.55,
"acc_stderr": 0.04999999999999999,
"acc_norm": 0.55,
"acc_norm_stderr": 0.04999999999999999
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.34,
"acc_stderr": 0.047609522856952344,
"acc_norm": 0.34,
"acc_norm_stderr": 0.047609522856952344
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.4722222222222222,
"acc_stderr": 0.04826217294139894,
"acc_norm": 0.4722222222222222,
"acc_norm_stderr": 0.04826217294139894
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.3842364532019704,
"acc_stderr": 0.0342239856565755,
"acc_norm": 0.3842364532019704,
"acc_norm_stderr": 0.0342239856565755
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.45161290322580644,
"acc_stderr": 0.02831050034856839,
"acc_norm": 0.45161290322580644,
"acc_norm_stderr": 0.02831050034856839
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6239316239316239,
"acc_stderr": 0.03173393632969481,
"acc_norm": 0.6239316239316239,
"acc_norm_stderr": 0.03173393632969481
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.43018867924528303,
"acc_stderr": 0.03047144586718324,
"acc_norm": 0.43018867924528303,
"acc_norm_stderr": 0.03047144586718324
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5636363636363636,
"acc_stderr": 0.04750185058907296,
"acc_norm": 0.5636363636363636,
"acc_norm_stderr": 0.04750185058907296
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.27037037037037037,
"acc_stderr": 0.02708037281514565,
"acc_norm": 0.27037037037037037,
"acc_norm_stderr": 0.02708037281514565
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.31788079470198677,
"acc_stderr": 0.038020397601079024,
"acc_norm": 0.31788079470198677,
"acc_norm_stderr": 0.038020397601079024
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.6268656716417911,
"acc_stderr": 0.03419832608176007,
"acc_norm": 0.6268656716417911,
"acc_norm_stderr": 0.03419832608176007
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.37572254335260113,
"acc_stderr": 0.03692820767264867,
"acc_norm": 0.37572254335260113,
"acc_norm_stderr": 0.03692820767264867
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.3148148148148148,
"acc_stderr": 0.02391998416404774,
"acc_norm": 0.3148148148148148,
"acc_norm_stderr": 0.02391998416404774
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3888888888888889,
"acc_stderr": 0.04076663253918567,
"acc_norm": 0.3888888888888889,
"acc_norm_stderr": 0.04076663253918567
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.65,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.65,
"acc_norm_stderr": 0.047937248544110196
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.49421965317919075,
"acc_stderr": 0.02691729617914911,
"acc_norm": 0.49421965317919075,
"acc_norm_stderr": 0.02691729617914911
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.49693251533742333,
"acc_stderr": 0.03928297078179662,
"acc_norm": 0.49693251533742333,
"acc_norm_stderr": 0.03928297078179662
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.49074074074074076,
"acc_stderr": 0.027815973433878014,
"acc_norm": 0.49074074074074076,
"acc_norm_stderr": 0.027815973433878014
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252605,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252605
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.5440414507772021,
"acc_stderr": 0.035944137112724366,
"acc_norm": 0.5440414507772021,
"acc_norm_stderr": 0.035944137112724366
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.24561403508771928,
"acc_stderr": 0.040493392977481404,
"acc_norm": 0.24561403508771928,
"acc_norm_stderr": 0.040493392977481404
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.5669724770642202,
"acc_stderr": 0.021244146569074345,
"acc_norm": 0.5669724770642202,
"acc_norm_stderr": 0.021244146569074345
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.3492063492063492,
"acc_stderr": 0.04263906892795133,
"acc_norm": 0.3492063492063492,
"acc_norm_stderr": 0.04263906892795133
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.4084967320261438,
"acc_stderr": 0.028146405993096358,
"acc_norm": 0.4084967320261438,
"acc_norm_stderr": 0.028146405993096358
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.43,
"acc_stderr": 0.049756985195624284,
"acc_norm": 0.43,
"acc_norm_stderr": 0.049756985195624284
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6694214876033058,
"acc_stderr": 0.04294340845212095,
"acc_norm": 0.6694214876033058,
"acc_norm_stderr": 0.04294340845212095
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.40789473684210525,
"acc_stderr": 0.03999309712777471,
"acc_norm": 0.40789473684210525,
"acc_norm_stderr": 0.03999309712777471
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.4035947712418301,
"acc_stderr": 0.01984828016840116,
"acc_norm": 0.4035947712418301,
"acc_norm_stderr": 0.01984828016840116
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.3723404255319149,
"acc_stderr": 0.028838921471251458,
"acc_norm": 0.3723404255319149,
"acc_norm_stderr": 0.028838921471251458
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.25,
"acc_stderr": 0.04109974682633932,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04109974682633932
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.03214952147802749,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.03214952147802749
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2446927374301676,
"acc_stderr": 0.014378169884098424,
"acc_norm": 0.2446927374301676,
"acc_norm_stderr": 0.014378169884098424
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.41,
"acc_stderr": 0.04943110704237102,
"acc_norm": 0.41,
"acc_norm_stderr": 0.04943110704237102
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.3602941176470588,
"acc_stderr": 0.029163128570670736,
"acc_norm": 0.3602941176470588,
"acc_norm_stderr": 0.029163128570670736
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.46122448979591835,
"acc_stderr": 0.03191282052669277,
"acc_norm": 0.46122448979591835,
"acc_norm_stderr": 0.03191282052669277
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.6455696202531646,
"acc_stderr": 0.0311373042971858,
"acc_norm": 0.6455696202531646,
"acc_norm_stderr": 0.0311373042971858
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.35723598435462844,
"acc_stderr": 0.012238615750316498,
"acc_norm": 0.35723598435462844,
"acc_norm_stderr": 0.012238615750316498
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.47058823529411764,
"acc_stderr": 0.03503235296367992,
"acc_norm": 0.47058823529411764,
"acc_norm_stderr": 0.03503235296367992
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.5272727272727272,
"acc_stderr": 0.03898531605579418,
"acc_norm": 0.5272727272727272,
"acc_norm_stderr": 0.03898531605579418
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.28151774785801714,
"mc1_stderr": 0.01574402724825605,
"mc2": 0.4486998920807941,
"mc2_stderr": 0.015146223309438359
},
"harness|ko_commongen_v2|2": {
"acc": 0.46989374262101535,
"acc_stderr": 0.017159163590170213,
"acc_norm": 0.6127508854781583,
"acc_norm_stderr": 0.016747577991642792
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Kaeri-Jenti/LDCC-with-korca",
"model_sha": "50bca191d06902b5359abb3b1007b8106eff41f6",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}