{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.19965870307167236, "acc_stderr": 0.01168162575688868, "acc_norm": 0.2593856655290102, "acc_norm_stderr": 0.012808273573927102 }, "harness|ko_hellaswag|10": { "acc": 0.28759211312487554, "acc_stderr": 0.0045171484341805035, "acc_norm": 0.32423819956184025, "acc_norm_stderr": 0.004671328673217806 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.3216374269005848, "acc_stderr": 0.03582529442573122, "acc_norm": 0.3216374269005848, "acc_norm_stderr": 0.03582529442573122 }, "harness|ko_mmlu_management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266196, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266196 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.25925925925925924, "acc_stderr": 0.015671006009339565, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.015671006009339565 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.32592592592592595, "acc_stderr": 0.040491220417025055, "acc_norm": 0.32592592592592595, "acc_norm_stderr": 0.040491220417025055 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932269, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932269 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.2553191489361702, "acc_stderr": 0.02850485647051421, "acc_norm": 0.2553191489361702, "acc_norm_stderr": 0.02850485647051421 }, "harness|ko_mmlu_virology|5": { "acc": 0.28313253012048195, "acc_stderr": 0.03507295431370518, "acc_norm": 0.28313253012048195, "acc_norm_stderr": 0.03507295431370518 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.2604501607717042, "acc_stderr": 0.02492672322484555, "acc_norm": 0.2604501607717042, "acc_norm_stderr": 0.02492672322484555 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.3273542600896861, "acc_stderr": 0.03149384670994131, "acc_norm": 0.3273542600896861, "acc_norm_stderr": 0.03149384670994131 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.25190839694656486, "acc_stderr": 0.03807387116306086, "acc_norm": 0.25190839694656486, "acc_norm_stderr": 0.03807387116306086 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.20707070707070707, "acc_stderr": 0.028869778460267045, "acc_norm": 0.20707070707070707, "acc_norm_stderr": 0.028869778460267045 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.27586206896551724, "acc_stderr": 0.03724563619774631, "acc_norm": 0.27586206896551724, "acc_norm_stderr": 0.03724563619774631 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237654, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237654 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.20168067226890757, "acc_stderr": 0.02606431340630453, "acc_norm": 0.20168067226890757, "acc_norm_stderr": 0.02606431340630453 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.26666666666666666, "acc_stderr": 0.02242127361292371, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.02242127361292371 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.22, "acc_stderr": 0.041633319989322695, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322695 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.25, "acc_stderr": 0.04186091791394607, "acc_norm": 0.25, "acc_norm_stderr": 0.04186091791394607 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.15763546798029557, "acc_stderr": 0.025639014131172404, "acc_norm": 0.15763546798029557, "acc_norm_stderr": 0.025639014131172404 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.1967741935483871, "acc_stderr": 0.02261640942074202, "acc_norm": 0.1967741935483871, "acc_norm_stderr": 0.02261640942074202 }, "harness|ko_mmlu_marketing|5": { "acc": 0.18803418803418803, "acc_stderr": 0.02559819368665224, "acc_norm": 0.18803418803418803, "acc_norm_stderr": 0.02559819368665224 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.2339622641509434, "acc_stderr": 0.02605529690115292, "acc_norm": 0.2339622641509434, "acc_norm_stderr": 0.02605529690115292 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.20909090909090908, "acc_stderr": 0.03895091015724136, "acc_norm": 0.20909090909090908, "acc_norm_stderr": 0.03895091015724136 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.28888888888888886, "acc_stderr": 0.027634907264178544, "acc_norm": 0.28888888888888886, "acc_norm_stderr": 0.027634907264178544 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2582781456953642, "acc_stderr": 0.035737053147634576, "acc_norm": 0.2582781456953642, "acc_norm_stderr": 0.035737053147634576 }, "harness|ko_mmlu_sociology|5": { "acc": 0.21890547263681592, "acc_stderr": 0.029239174636647, "acc_norm": 0.21890547263681592, "acc_norm_stderr": 0.029239174636647 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.21965317919075145, "acc_stderr": 0.031568093627031744, "acc_norm": 0.21965317919075145, "acc_norm_stderr": 0.031568093627031744 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.022569897074918417, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.022569897074918417 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2222222222222222, "acc_stderr": 0.03476590104304134, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.03476590104304134 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.2, "acc_stderr": 0.040201512610368445, "acc_norm": 0.2, "acc_norm_stderr": 0.040201512610368445 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.28, "acc_stderr": 0.045126085985421296, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421296 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.24855491329479767, "acc_stderr": 0.023267528432100174, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.023267528432100174 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.2883435582822086, "acc_stderr": 0.035590395316173425, "acc_norm": 0.2883435582822086, "acc_norm_stderr": 0.035590395316173425 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.21296296296296297, "acc_stderr": 0.02277971908873339, "acc_norm": 0.21296296296296297, "acc_norm_stderr": 0.02277971908873339 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.2538860103626943, "acc_stderr": 0.03141024780565317, "acc_norm": 0.2538860103626943, "acc_norm_stderr": 0.03141024780565317 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.21929824561403508, "acc_stderr": 0.038924311065187525, "acc_norm": 0.21929824561403508, "acc_norm_stderr": 0.038924311065187525 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.24036697247706423, "acc_stderr": 0.01832060732096407, "acc_norm": 0.24036697247706423, "acc_norm_stderr": 0.01832060732096407 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.15873015873015872, "acc_stderr": 0.03268454013011743, "acc_norm": 0.15873015873015872, "acc_norm_stderr": 0.03268454013011743 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.21568627450980393, "acc_stderr": 0.02355083135199509, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.02355083135199509 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_international_law|5": { "acc": 0.2892561983471074, "acc_stderr": 0.04139112727635463, "acc_norm": 0.2892561983471074, "acc_norm_stderr": 0.04139112727635463 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.03110318238312338, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.03110318238312338 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.24183006535947713, "acc_stderr": 0.017322789207784326, "acc_norm": 0.24183006535947713, "acc_norm_stderr": 0.017322789207784326 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.23404255319148937, "acc_stderr": 0.025257861359432417, "acc_norm": 0.23404255319148937, "acc_norm_stderr": 0.025257861359432417 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.25, "acc_stderr": 0.04109974682633932, "acc_norm": 0.25, "acc_norm_stderr": 0.04109974682633932 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.4722222222222222, "acc_stderr": 0.0340470532865388, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.0340470532865388 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.24022346368715083, "acc_stderr": 0.014288343803925324, "acc_norm": 0.24022346368715083, "acc_norm_stderr": 0.014288343803925324 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.35, "acc_stderr": 0.04793724854411019, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411019 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.4264705882352941, "acc_stderr": 0.03004261583271486, "acc_norm": 0.4264705882352941, "acc_norm_stderr": 0.03004261583271486 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.2163265306122449, "acc_stderr": 0.026358916334904017, "acc_norm": 0.2163265306122449, "acc_norm_stderr": 0.026358916334904017 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.2616033755274262, "acc_stderr": 0.028609516716994934, "acc_norm": 0.2616033755274262, "acc_norm_stderr": 0.028609516716994934 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.2457627118644068, "acc_stderr": 0.010996156635142692, "acc_norm": 0.2457627118644068, "acc_norm_stderr": 0.010996156635142692 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.25, "acc_stderr": 0.03039153369274154, "acc_norm": 0.25, "acc_norm_stderr": 0.03039153369274154 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03225078108306289, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.24724602203182375, "mc1_stderr": 0.015102404797359649, "mc2": 0.43499689918333406, "mc2_stderr": 0.015335243970671835 }, "harness|ko_commongen_v2|2": { "acc": 0.33412042502951594, "acc_stderr": 0.016216763304239688, "acc_norm": 0.4462809917355372, "acc_norm_stderr": 0.017090852631668332 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "blueapple8259/ANHSY_0.1", "model_sha": "62bb441a62b634f0fb14e909bebfabae6506ed39", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }