{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.19197952218430034, "acc_stderr": 0.011509598906598086, "acc_norm": 0.23976109215017063, "acc_norm_stderr": 0.012476304127453947 }, "harness|ko_hellaswag|10": { "acc": 0.2806213901613224, "acc_stderr": 0.004483845735187827, "acc_norm": 0.3103963353913563, "acc_norm_stderr": 0.0046171032803720095 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.25146198830409355, "acc_stderr": 0.033275044238468436, "acc_norm": 0.25146198830409355, "acc_norm_stderr": 0.033275044238468436 }, "harness|ko_mmlu_management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266196, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266196 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.28607918263090676, "acc_stderr": 0.016160871405127526, "acc_norm": 0.28607918263090676, "acc_norm_stderr": 0.016160871405127526 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.25925925925925924, "acc_stderr": 0.03785714465066654, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.03785714465066654 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.19, "acc_stderr": 0.03942772444036623, "acc_norm": 0.19, "acc_norm_stderr": 0.03942772444036623 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.24680851063829787, "acc_stderr": 0.0281854413012341, "acc_norm": 0.24680851063829787, "acc_norm_stderr": 0.0281854413012341 }, "harness|ko_mmlu_virology|5": { "acc": 0.28313253012048195, "acc_stderr": 0.03507295431370518, "acc_norm": 0.28313253012048195, "acc_norm_stderr": 0.03507295431370518 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.2797427652733119, "acc_stderr": 0.025494259350694905, "acc_norm": 0.2797427652733119, "acc_norm_stderr": 0.025494259350694905 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.3811659192825112, "acc_stderr": 0.032596251184168264, "acc_norm": 0.3811659192825112, "acc_norm_stderr": 0.032596251184168264 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.24427480916030533, "acc_stderr": 0.037683359597287434, "acc_norm": 0.24427480916030533, "acc_norm_stderr": 0.037683359597287434 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.20202020202020202, "acc_stderr": 0.02860620428922987, "acc_norm": 0.20202020202020202, "acc_norm_stderr": 0.02860620428922987 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.27586206896551724, "acc_stderr": 0.037245636197746325, "acc_norm": 0.27586206896551724, "acc_norm_stderr": 0.037245636197746325 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237654, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237654 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.3403361344537815, "acc_stderr": 0.030778057422931666, "acc_norm": 0.3403361344537815, "acc_norm_stderr": 0.030778057422931666 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.3564102564102564, "acc_stderr": 0.024283140529467295, "acc_norm": 0.3564102564102564, "acc_norm_stderr": 0.024283140529467295 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.24, "acc_stderr": 0.042923469599092816, "acc_norm": 0.24, "acc_norm_stderr": 0.042923469599092816 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.28703703703703703, "acc_stderr": 0.043733130409147614, "acc_norm": 0.28703703703703703, "acc_norm_stderr": 0.043733130409147614 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.29064039408866993, "acc_stderr": 0.0319474007226554, "acc_norm": 0.29064039408866993, "acc_norm_stderr": 0.0319474007226554 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.23870967741935484, "acc_stderr": 0.024251071262208834, "acc_norm": 0.23870967741935484, "acc_norm_stderr": 0.024251071262208834 }, "harness|ko_mmlu_marketing|5": { "acc": 0.19658119658119658, "acc_stderr": 0.02603538609895129, "acc_norm": 0.19658119658119658, "acc_norm_stderr": 0.02603538609895129 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.2, "acc_stderr": 0.02461829819586651, "acc_norm": 0.2, "acc_norm_stderr": 0.02461829819586651 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.23636363636363636, "acc_stderr": 0.04069306319721377, "acc_norm": 0.23636363636363636, "acc_norm_stderr": 0.04069306319721377 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.26296296296296295, "acc_stderr": 0.02684205787383371, "acc_norm": 0.26296296296296295, "acc_norm_stderr": 0.02684205787383371 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.23841059602649006, "acc_stderr": 0.0347918557259966, "acc_norm": 0.23841059602649006, "acc_norm_stderr": 0.0347918557259966 }, "harness|ko_mmlu_sociology|5": { "acc": 0.23383084577114427, "acc_stderr": 0.02992941540834839, "acc_norm": 0.23383084577114427, "acc_norm_stderr": 0.02992941540834839 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.28901734104046245, "acc_stderr": 0.03456425745086999, "acc_norm": 0.28901734104046245, "acc_norm_stderr": 0.03456425745086999 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2566137566137566, "acc_stderr": 0.022494510767503154, "acc_norm": 0.2566137566137566, "acc_norm_stderr": 0.022494510767503154 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.22916666666666666, "acc_stderr": 0.03514697467862388, "acc_norm": 0.22916666666666666, "acc_norm_stderr": 0.03514697467862388 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.21, "acc_stderr": 0.04093601807403326, "acc_norm": 0.21, "acc_norm_stderr": 0.04093601807403326 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.26, "acc_stderr": 0.044084400227680794, "acc_norm": 0.26, "acc_norm_stderr": 0.044084400227680794 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.24855491329479767, "acc_stderr": 0.023267528432100174, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.023267528432100174 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.2331288343558282, "acc_stderr": 0.0332201579577674, "acc_norm": 0.2331288343558282, "acc_norm_stderr": 0.0332201579577674 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.21604938271604937, "acc_stderr": 0.022899162918445796, "acc_norm": 0.21604938271604937, "acc_norm_stderr": 0.022899162918445796 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.26424870466321243, "acc_stderr": 0.03182155050916647, "acc_norm": 0.26424870466321243, "acc_norm_stderr": 0.03182155050916647 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.24561403508771928, "acc_stderr": 0.04049339297748141, "acc_norm": 0.24561403508771928, "acc_norm_stderr": 0.04049339297748141 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.21100917431192662, "acc_stderr": 0.017493922404112648, "acc_norm": 0.21100917431192662, "acc_norm_stderr": 0.017493922404112648 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.1746031746031746, "acc_stderr": 0.033954900208561116, "acc_norm": 0.1746031746031746, "acc_norm_stderr": 0.033954900208561116 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.2222222222222222, "acc_stderr": 0.023805186524888156, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.023805186524888156 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|ko_mmlu_international_law|5": { "acc": 0.2396694214876033, "acc_stderr": 0.03896878985070417, "acc_norm": 0.2396694214876033, "acc_norm_stderr": 0.03896878985070417 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.18421052631578946, "acc_stderr": 0.0315469804508223, "acc_norm": 0.18421052631578946, "acc_norm_stderr": 0.0315469804508223 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.2173202614379085, "acc_stderr": 0.016684820929148594, "acc_norm": 0.2173202614379085, "acc_norm_stderr": 0.016684820929148594 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.23404255319148937, "acc_stderr": 0.025257861359432414, "acc_norm": 0.23404255319148937, "acc_norm_stderr": 0.025257861359432414 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.22321428571428573, "acc_stderr": 0.039523019677025116, "acc_norm": 0.22321428571428573, "acc_norm_stderr": 0.039523019677025116 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.4722222222222222, "acc_stderr": 0.0340470532865388, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.0340470532865388 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.24692737430167597, "acc_stderr": 0.014422292204808852, "acc_norm": 0.24692737430167597, "acc_norm_stderr": 0.014422292204808852 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.4485294117647059, "acc_stderr": 0.030211479609121593, "acc_norm": 0.4485294117647059, "acc_norm_stderr": 0.030211479609121593 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.39183673469387753, "acc_stderr": 0.03125127591089165, "acc_norm": 0.39183673469387753, "acc_norm_stderr": 0.03125127591089165 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.2616033755274262, "acc_stderr": 0.028609516716994934, "acc_norm": 0.2616033755274262, "acc_norm_stderr": 0.028609516716994934 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.24641460234680573, "acc_stderr": 0.01100597139992723, "acc_norm": 0.24641460234680573, "acc_norm_stderr": 0.01100597139992723 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.28431372549019607, "acc_stderr": 0.03166009679399812, "acc_norm": 0.28431372549019607, "acc_norm_stderr": 0.03166009679399812 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03225078108306289, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2533659730722154, "mc1_stderr": 0.01522589934082682, "mc2": 0.45650352414713125, "mc2_stderr": 0.015641592781139333 }, "harness|ko_commongen_v2|2": { "acc": 0.36363636363636365, "acc_stderr": 0.016538691603327715, "acc_norm": 0.5289256198347108, "acc_norm_stderr": 0.017161563949916345 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "skt/kogpt2-base-v2", "model_sha": "d0c0df48bf2b2c9350dd855021a5b216f560c0c7", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }