{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3575085324232082, "acc_stderr": 0.014005494275916573, "acc_norm": 0.4206484641638225, "acc_norm_stderr": 0.014426211252508401 }, "harness|ko_hellaswag|10": { "acc": 0.3994224258115913, "acc_stderr": 0.004887787255353492, "acc_norm": 0.5336586337382991, "acc_norm_stderr": 0.004978462690966916 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.52046783625731, "acc_stderr": 0.0383161053282193, "acc_norm": 0.52046783625731, "acc_norm_stderr": 0.0383161053282193 }, "harness|ko_mmlu_management|5": { "acc": 0.5922330097087378, "acc_stderr": 0.04865777570410769, "acc_norm": 0.5922330097087378, "acc_norm_stderr": 0.04865777570410769 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.5504469987228607, "acc_stderr": 0.017788725283507337, "acc_norm": 0.5504469987228607, "acc_norm_stderr": 0.017788725283507337 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.45185185185185184, "acc_stderr": 0.042992689054808624, "acc_norm": 0.45185185185185184, "acc_norm_stderr": 0.042992689054808624 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.04793724854411019, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411019 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.4127659574468085, "acc_stderr": 0.03218471141400351, "acc_norm": 0.4127659574468085, "acc_norm_stderr": 0.03218471141400351 }, "harness|ko_mmlu_virology|5": { "acc": 0.42168674698795183, "acc_stderr": 0.03844453181770917, "acc_norm": 0.42168674698795183, "acc_norm_stderr": 0.03844453181770917 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.5112540192926045, "acc_stderr": 0.028390897396863526, "acc_norm": 0.5112540192926045, "acc_norm_stderr": 0.028390897396863526 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.484304932735426, "acc_stderr": 0.0335412657542081, "acc_norm": 0.484304932735426, "acc_norm_stderr": 0.0335412657542081 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.5419847328244275, "acc_stderr": 0.04369802690578757, "acc_norm": 0.5419847328244275, "acc_norm_stderr": 0.04369802690578757 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.44, "acc_stderr": 0.0498887651569859, "acc_norm": 0.44, "acc_norm_stderr": 0.0498887651569859 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.6515151515151515, "acc_stderr": 0.033948539651564025, "acc_norm": 0.6515151515151515, "acc_norm_stderr": 0.033948539651564025 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.5379310344827586, "acc_stderr": 0.04154659671707548, "acc_norm": 0.5379310344827586, "acc_norm_stderr": 0.04154659671707548 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.3235294117647059, "acc_stderr": 0.046550104113196177, "acc_norm": 0.3235294117647059, "acc_norm_stderr": 0.046550104113196177 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.5084033613445378, "acc_stderr": 0.0324739027656967, "acc_norm": 0.5084033613445378, "acc_norm_stderr": 0.0324739027656967 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.5025641025641026, "acc_stderr": 0.025350672979412188, "acc_norm": 0.5025641025641026, "acc_norm_stderr": 0.025350672979412188 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252606, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252606 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5, "acc_stderr": 0.04833682445228318, "acc_norm": 0.5, "acc_norm_stderr": 0.04833682445228318 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.42857142857142855, "acc_stderr": 0.03481904844438803, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.03481904844438803 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.5096774193548387, "acc_stderr": 0.02843867799890955, "acc_norm": 0.5096774193548387, "acc_norm_stderr": 0.02843867799890955 }, "harness|ko_mmlu_marketing|5": { "acc": 0.7136752136752137, "acc_stderr": 0.029614323690456648, "acc_norm": 0.7136752136752137, "acc_norm_stderr": 0.029614323690456648 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4867924528301887, "acc_stderr": 0.030762134874500482, "acc_norm": 0.4867924528301887, "acc_norm_stderr": 0.030762134874500482 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.4727272727272727, "acc_stderr": 0.04782001791380063, "acc_norm": 0.4727272727272727, "acc_norm_stderr": 0.04782001791380063 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.028742040903948496, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.028742040903948496 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.36423841059602646, "acc_stderr": 0.03929111781242742, "acc_norm": 0.36423841059602646, "acc_norm_stderr": 0.03929111781242742 }, "harness|ko_mmlu_sociology|5": { "acc": 0.6318407960199005, "acc_stderr": 0.03410410565495302, "acc_norm": 0.6318407960199005, "acc_norm_stderr": 0.03410410565495302 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.44508670520231214, "acc_stderr": 0.03789401760283648, "acc_norm": 0.44508670520231214, "acc_norm_stderr": 0.03789401760283648 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.35185185185185186, "acc_stderr": 0.024594975128920938, "acc_norm": 0.35185185185185186, "acc_norm_stderr": 0.024594975128920938 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.4444444444444444, "acc_stderr": 0.041553199555931467, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.041553199555931467 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.5144508670520231, "acc_stderr": 0.02690784985628254, "acc_norm": 0.5144508670520231, "acc_norm_stderr": 0.02690784985628254 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.49693251533742333, "acc_stderr": 0.03928297078179662, "acc_norm": 0.49693251533742333, "acc_norm_stderr": 0.03928297078179662 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.5092592592592593, "acc_stderr": 0.027815973433878014, "acc_norm": 0.5092592592592593, "acc_norm_stderr": 0.027815973433878014 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.5958549222797928, "acc_stderr": 0.0354150857888402, "acc_norm": 0.5958549222797928, "acc_norm_stderr": 0.0354150857888402 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.32456140350877194, "acc_stderr": 0.044045561573747685, "acc_norm": 0.32456140350877194, "acc_norm_stderr": 0.044045561573747685 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.634862385321101, "acc_stderr": 0.020642801454383998, "acc_norm": 0.634862385321101, "acc_norm_stderr": 0.020642801454383998 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.29365079365079366, "acc_stderr": 0.04073524322147125, "acc_norm": 0.29365079365079366, "acc_norm_stderr": 0.04073524322147125 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.5098039215686274, "acc_stderr": 0.02862441255016795, "acc_norm": 0.5098039215686274, "acc_norm_stderr": 0.02862441255016795 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6611570247933884, "acc_stderr": 0.043207678075366705, "acc_norm": 0.6611570247933884, "acc_norm_stderr": 0.043207678075366705 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.5131578947368421, "acc_stderr": 0.04067533136309173, "acc_norm": 0.5131578947368421, "acc_norm_stderr": 0.04067533136309173 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.41830065359477125, "acc_stderr": 0.019955975145835542, "acc_norm": 0.41830065359477125, "acc_norm_stderr": 0.019955975145835542 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.3404255319148936, "acc_stderr": 0.028267657482650144, "acc_norm": 0.3404255319148936, "acc_norm_stderr": 0.028267657482650144 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.29464285714285715, "acc_stderr": 0.043270409325787317, "acc_norm": 0.29464285714285715, "acc_norm_stderr": 0.043270409325787317 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.35185185185185186, "acc_stderr": 0.032568505702936464, "acc_norm": 0.35185185185185186, "acc_norm_stderr": 0.032568505702936464 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.24134078212290502, "acc_stderr": 0.014310999547961443, "acc_norm": 0.24134078212290502, "acc_norm_stderr": 0.014310999547961443 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.4227941176470588, "acc_stderr": 0.030008562845003476, "acc_norm": 0.4227941176470588, "acc_norm_stderr": 0.030008562845003476 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.4816326530612245, "acc_stderr": 0.03198761546763126, "acc_norm": 0.4816326530612245, "acc_norm_stderr": 0.03198761546763126 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.6413502109704642, "acc_stderr": 0.03121956944530185, "acc_norm": 0.6413502109704642, "acc_norm_stderr": 0.03121956944530185 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.32529335071707954, "acc_stderr": 0.01196531153657153, "acc_norm": 0.32529335071707954, "acc_norm_stderr": 0.01196531153657153 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.5392156862745098, "acc_stderr": 0.03498501649369527, "acc_norm": 0.5392156862745098, "acc_norm_stderr": 0.03498501649369527 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.5575757575757576, "acc_stderr": 0.038783721137112745, "acc_norm": 0.5575757575757576, "acc_norm_stderr": 0.038783721137112745 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2594859241126071, "mc1_stderr": 0.015345409485557956, "mc2": 0.4121524749596002, "mc2_stderr": 0.014822327944942062 }, "harness|ko_commongen_v2|2": { "acc": 0.5785123966942148, "acc_stderr": 0.016977101932601518, "acc_norm": 0.6269185360094451, "acc_norm_stderr": 0.016627318275137443 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "GAI-LLM/Yi-Ko-6B-mixed-v11", "model_sha": "ca827b1389e67a8b8d8581d8f6f3accd4866ba11", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }