{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3054607508532423, "acc_stderr": 0.013460080478002494, "acc_norm": 0.3583617747440273, "acc_norm_stderr": 0.014012883334859859 }, "harness|ko_hellaswag|10": { "acc": 0.3761202947619996, "acc_stderr": 0.004834207964061325, "acc_norm": 0.4910376419040032, "acc_norm_stderr": 0.004988979750014442 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.28654970760233917, "acc_stderr": 0.03467826685703826, "acc_norm": 0.28654970760233917, "acc_norm_stderr": 0.03467826685703826 }, "harness|ko_mmlu_management|5": { "acc": 0.2524271844660194, "acc_stderr": 0.04301250399690879, "acc_norm": 0.2524271844660194, "acc_norm_stderr": 0.04301250399690879 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.3282247765006386, "acc_stderr": 0.01679168564019289, "acc_norm": 0.3282247765006386, "acc_norm_stderr": 0.01679168564019289 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.34074074074074073, "acc_stderr": 0.04094376269996794, "acc_norm": 0.34074074074074073, "acc_norm_stderr": 0.04094376269996794 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.045126085985421255, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421255 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.251063829787234, "acc_stderr": 0.02834696377716245, "acc_norm": 0.251063829787234, "acc_norm_stderr": 0.02834696377716245 }, "harness|ko_mmlu_virology|5": { "acc": 0.25903614457831325, "acc_stderr": 0.034106466140718564, "acc_norm": 0.25903614457831325, "acc_norm_stderr": 0.034106466140718564 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.3311897106109325, "acc_stderr": 0.026730620728004917, "acc_norm": 0.3311897106109325, "acc_norm_stderr": 0.026730620728004917 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.29596412556053814, "acc_stderr": 0.0306365913486998, "acc_norm": 0.29596412556053814, "acc_norm_stderr": 0.0306365913486998 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.24427480916030533, "acc_stderr": 0.037683359597287434, "acc_norm": 0.24427480916030533, "acc_norm_stderr": 0.037683359597287434 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.24242424242424243, "acc_stderr": 0.030532892233932036, "acc_norm": 0.24242424242424243, "acc_norm_stderr": 0.030532892233932036 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.3448275862068966, "acc_stderr": 0.03960933549451207, "acc_norm": 0.3448275862068966, "acc_norm_stderr": 0.03960933549451207 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237655, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237655 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.23529411764705882, "acc_stderr": 0.02755361446786382, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.02755361446786382 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.2076923076923077, "acc_stderr": 0.0205675395672468, "acc_norm": 0.2076923076923077, "acc_norm_stderr": 0.0205675395672468 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.21, "acc_stderr": 0.04093601807403326, "acc_norm": 0.21, "acc_norm_stderr": 0.04093601807403326 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.3888888888888889, "acc_stderr": 0.0471282125742677, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.0471282125742677 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.32019704433497537, "acc_stderr": 0.032826493853041504, "acc_norm": 0.32019704433497537, "acc_norm_stderr": 0.032826493853041504 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.2645161290322581, "acc_stderr": 0.025091892378859275, "acc_norm": 0.2645161290322581, "acc_norm_stderr": 0.025091892378859275 }, "harness|ko_mmlu_marketing|5": { "acc": 0.358974358974359, "acc_stderr": 0.03142616993791925, "acc_norm": 0.358974358974359, "acc_norm_stderr": 0.03142616993791925 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.27169811320754716, "acc_stderr": 0.027377706624670713, "acc_norm": 0.27169811320754716, "acc_norm_stderr": 0.027377706624670713 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.32727272727272727, "acc_stderr": 0.04494290866252088, "acc_norm": 0.32727272727272727, "acc_norm_stderr": 0.04494290866252088 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.26296296296296295, "acc_stderr": 0.026842057873833706, "acc_norm": 0.26296296296296295, "acc_norm_stderr": 0.026842057873833706 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.26490066225165565, "acc_stderr": 0.03603038545360384, "acc_norm": 0.26490066225165565, "acc_norm_stderr": 0.03603038545360384 }, "harness|ko_mmlu_sociology|5": { "acc": 0.373134328358209, "acc_stderr": 0.03419832608176007, "acc_norm": 0.373134328358209, "acc_norm_stderr": 0.03419832608176007 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.2658959537572254, "acc_stderr": 0.033687629322594316, "acc_norm": 0.2658959537572254, "acc_norm_stderr": 0.033687629322594316 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2671957671957672, "acc_stderr": 0.02278967314577657, "acc_norm": 0.2671957671957672, "acc_norm_stderr": 0.02278967314577657 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2222222222222222, "acc_stderr": 0.03476590104304134, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.03476590104304134 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.2976878612716763, "acc_stderr": 0.024617055388676992, "acc_norm": 0.2976878612716763, "acc_norm_stderr": 0.024617055388676992 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.25153374233128833, "acc_stderr": 0.03408997886857529, "acc_norm": 0.25153374233128833, "acc_norm_stderr": 0.03408997886857529 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.30246913580246915, "acc_stderr": 0.025557653981868052, "acc_norm": 0.30246913580246915, "acc_norm_stderr": 0.025557653981868052 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.2694300518134715, "acc_stderr": 0.03201867122877795, "acc_norm": 0.2694300518134715, "acc_norm_stderr": 0.03201867122877795 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2719298245614035, "acc_stderr": 0.04185774424022056, "acc_norm": 0.2719298245614035, "acc_norm_stderr": 0.04185774424022056 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.26238532110091745, "acc_stderr": 0.018861885021534738, "acc_norm": 0.26238532110091745, "acc_norm_stderr": 0.018861885021534738 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.1349206349206349, "acc_stderr": 0.030557101589417515, "acc_norm": 0.1349206349206349, "acc_norm_stderr": 0.030557101589417515 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.25163398692810457, "acc_stderr": 0.0248480182638752, "acc_norm": 0.25163398692810457, "acc_norm_stderr": 0.0248480182638752 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.26, "acc_stderr": 0.04408440022768079, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768079 }, "harness|ko_mmlu_international_law|5": { "acc": 0.4049586776859504, "acc_stderr": 0.04481137755942469, "acc_norm": 0.4049586776859504, "acc_norm_stderr": 0.04481137755942469 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.2894736842105263, "acc_stderr": 0.03690677986137283, "acc_norm": 0.2894736842105263, "acc_norm_stderr": 0.03690677986137283 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.28921568627450983, "acc_stderr": 0.018342529845275908, "acc_norm": 0.28921568627450983, "acc_norm_stderr": 0.018342529845275908 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2695035460992908, "acc_stderr": 0.026469036818590634, "acc_norm": 0.2695035460992908, "acc_norm_stderr": 0.026469036818590634 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.24107142857142858, "acc_stderr": 0.04059867246952689, "acc_norm": 0.24107142857142858, "acc_norm_stderr": 0.04059867246952689 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.3287037037037037, "acc_stderr": 0.03203614084670058, "acc_norm": 0.3287037037037037, "acc_norm_stderr": 0.03203614084670058 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.24134078212290502, "acc_stderr": 0.014310999547961441, "acc_norm": 0.24134078212290502, "acc_norm_stderr": 0.014310999547961441 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.23529411764705882, "acc_stderr": 0.025767252010855963, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.025767252010855963 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.22857142857142856, "acc_stderr": 0.026882144922307744, "acc_norm": 0.22857142857142856, "acc_norm_stderr": 0.026882144922307744 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.3291139240506329, "acc_stderr": 0.03058732629470236, "acc_norm": 0.3291139240506329, "acc_norm_stderr": 0.03058732629470236 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.27835723598435463, "acc_stderr": 0.011446990197380989, "acc_norm": 0.27835723598435463, "acc_norm_stderr": 0.011446990197380989 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.2549019607843137, "acc_stderr": 0.030587591351604246, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.030587591351604246 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.2545454545454545, "acc_stderr": 0.0340150671524904, "acc_norm": 0.2545454545454545, "acc_norm_stderr": 0.0340150671524904 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2484700122399021, "mc1_stderr": 0.0151274270965207, "mc2": 0.3908977745790188, "mc2_stderr": 0.014711493002685353 }, "harness|ko_commongen_v2|2": { "acc": 0.3305785123966942, "acc_stderr": 0.0161734232988457, "acc_norm": 0.4604486422668241, "acc_norm_stderr": 0.01713648762604985 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "The-matt/llama2_ko-7b_distinctive-snowflake-182_1060", "model_sha": "090368cb655024491c0c4dad13f8ac9a8e7d31cc", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }