{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.25597269624573377, "acc_stderr": 0.012753013241244508, "acc_norm": 0.295221843003413, "acc_norm_stderr": 0.013329750293382316 }, "harness|ko_hellaswag|10": { "acc": 0.3419637522405895, "acc_stderr": 0.0047339804707992195, "acc_norm": 0.4192391953794065, "acc_norm_stderr": 0.004924261467934419 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.28654970760233917, "acc_stderr": 0.03467826685703826, "acc_norm": 0.28654970760233917, "acc_norm_stderr": 0.03467826685703826 }, "harness|ko_mmlu_management|5": { "acc": 0.24271844660194175, "acc_stderr": 0.042450224863844935, "acc_norm": 0.24271844660194175, "acc_norm_stderr": 0.042450224863844935 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.27330779054916987, "acc_stderr": 0.01593668106262856, "acc_norm": 0.27330779054916987, "acc_norm_stderr": 0.01593668106262856 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.21481481481481482, "acc_stderr": 0.03547854198560826, "acc_norm": 0.21481481481481482, "acc_norm_stderr": 0.03547854198560826 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.23404255319148937, "acc_stderr": 0.027678452578212383, "acc_norm": 0.23404255319148937, "acc_norm_stderr": 0.027678452578212383 }, "harness|ko_mmlu_virology|5": { "acc": 0.2710843373493976, "acc_stderr": 0.03460579907553027, "acc_norm": 0.2710843373493976, "acc_norm_stderr": 0.03460579907553027 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.2733118971061093, "acc_stderr": 0.02531176597542612, "acc_norm": 0.2733118971061093, "acc_norm_stderr": 0.02531176597542612 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.2914798206278027, "acc_stderr": 0.030500283176545913, "acc_norm": 0.2914798206278027, "acc_norm_stderr": 0.030500283176545913 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.2748091603053435, "acc_stderr": 0.03915345408847835, "acc_norm": 0.2748091603053435, "acc_norm_stderr": 0.03915345408847835 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.2676767676767677, "acc_stderr": 0.031544498882702866, "acc_norm": 0.2676767676767677, "acc_norm_stderr": 0.031544498882702866 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.2206896551724138, "acc_stderr": 0.03455930201924812, "acc_norm": 0.2206896551724138, "acc_norm_stderr": 0.03455930201924812 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.22549019607843138, "acc_stderr": 0.041583075330832865, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.041583075330832865 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.3487394957983193, "acc_stderr": 0.030956636328566548, "acc_norm": 0.3487394957983193, "acc_norm_stderr": 0.030956636328566548 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.26666666666666666, "acc_stderr": 0.02242127361292372, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.02242127361292372 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.18, "acc_stderr": 0.03861229196653694, "acc_norm": 0.18, "acc_norm_stderr": 0.03861229196653694 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.28, "acc_stderr": 0.04512608598542126, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542126 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.2222222222222222, "acc_stderr": 0.0401910747255735, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.0401910747255735 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.2413793103448276, "acc_stderr": 0.030108330718011625, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.030108330718011625 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.3, "acc_stderr": 0.026069362295335127, "acc_norm": 0.3, "acc_norm_stderr": 0.026069362295335127 }, "harness|ko_mmlu_marketing|5": { "acc": 0.25213675213675213, "acc_stderr": 0.02844796547623101, "acc_norm": 0.25213675213675213, "acc_norm_stderr": 0.02844796547623101 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.2528301886792453, "acc_stderr": 0.026749899771241238, "acc_norm": 0.2528301886792453, "acc_norm_stderr": 0.026749899771241238 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.2818181818181818, "acc_stderr": 0.0430911870994646, "acc_norm": 0.2818181818181818, "acc_norm_stderr": 0.0430911870994646 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.2962962962962963, "acc_stderr": 0.027840811495871937, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.027840811495871937 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2185430463576159, "acc_stderr": 0.03374235550425694, "acc_norm": 0.2185430463576159, "acc_norm_stderr": 0.03374235550425694 }, "harness|ko_mmlu_sociology|5": { "acc": 0.23880597014925373, "acc_stderr": 0.03014777593540922, "acc_norm": 0.23880597014925373, "acc_norm_stderr": 0.03014777593540922 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.2023121387283237, "acc_stderr": 0.03063114553919882, "acc_norm": 0.2023121387283237, "acc_norm_stderr": 0.03063114553919882 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2698412698412698, "acc_stderr": 0.02286083830923207, "acc_norm": 0.2698412698412698, "acc_norm_stderr": 0.02286083830923207 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.25, "acc_stderr": 0.03621034121889507, "acc_norm": 0.25, "acc_norm_stderr": 0.03621034121889507 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.26, "acc_stderr": 0.04408440022768078, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768078 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.23, "acc_stderr": 0.042295258468165044, "acc_norm": 0.23, "acc_norm_stderr": 0.042295258468165044 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.24277456647398843, "acc_stderr": 0.0230836585869842, "acc_norm": 0.24277456647398843, "acc_norm_stderr": 0.0230836585869842 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.31901840490797545, "acc_stderr": 0.03661997551073836, "acc_norm": 0.31901840490797545, "acc_norm_stderr": 0.03661997551073836 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.2716049382716049, "acc_stderr": 0.024748624490537368, "acc_norm": 0.2716049382716049, "acc_norm_stderr": 0.024748624490537368 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.24, "acc_stderr": 0.042923469599092816, "acc_norm": 0.24, "acc_norm_stderr": 0.042923469599092816 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.26424870466321243, "acc_stderr": 0.031821550509166484, "acc_norm": 0.26424870466321243, "acc_norm_stderr": 0.031821550509166484 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2719298245614035, "acc_stderr": 0.041857744240220575, "acc_norm": 0.2719298245614035, "acc_norm_stderr": 0.041857744240220575 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.23486238532110093, "acc_stderr": 0.018175110510343595, "acc_norm": 0.23486238532110093, "acc_norm_stderr": 0.018175110510343595 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.3412698412698413, "acc_stderr": 0.04240799327574924, "acc_norm": 0.3412698412698413, "acc_norm_stderr": 0.04240799327574924 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.26143790849673204, "acc_stderr": 0.025160998214292456, "acc_norm": 0.26143790849673204, "acc_norm_stderr": 0.025160998214292456 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.23, "acc_stderr": 0.04229525846816507, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816507 }, "harness|ko_mmlu_international_law|5": { "acc": 0.1487603305785124, "acc_stderr": 0.03248470083807195, "acc_norm": 0.1487603305785124, "acc_norm_stderr": 0.03248470083807195 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.19078947368421054, "acc_stderr": 0.031975658210325, "acc_norm": 0.19078947368421054, "acc_norm_stderr": 0.031975658210325 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.2630718954248366, "acc_stderr": 0.017812676542320657, "acc_norm": 0.2630718954248366, "acc_norm_stderr": 0.017812676542320657 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2553191489361702, "acc_stderr": 0.02601199293090201, "acc_norm": 0.2553191489361702, "acc_norm_stderr": 0.02601199293090201 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.20535714285714285, "acc_stderr": 0.03834241021419073, "acc_norm": 0.20535714285714285, "acc_norm_stderr": 0.03834241021419073 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.2824074074074074, "acc_stderr": 0.030701372111510937, "acc_norm": 0.2824074074074074, "acc_norm_stderr": 0.030701372111510937 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.24916201117318434, "acc_stderr": 0.01446589382985992, "acc_norm": 0.24916201117318434, "acc_norm_stderr": 0.01446589382985992 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.2, "acc_stderr": 0.04020151261036845, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036845 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.24, "acc_stderr": 0.04292346959909284, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909284 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.28308823529411764, "acc_stderr": 0.027365861131513812, "acc_norm": 0.28308823529411764, "acc_norm_stderr": 0.027365861131513812 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.2653061224489796, "acc_stderr": 0.0282638899437846, "acc_norm": 0.2653061224489796, "acc_norm_stderr": 0.0282638899437846 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.2742616033755274, "acc_stderr": 0.029041333510598035, "acc_norm": 0.2742616033755274, "acc_norm_stderr": 0.029041333510598035 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.23468057366362452, "acc_stderr": 0.010824026872449346, "acc_norm": 0.23468057366362452, "acc_norm_stderr": 0.010824026872449346 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.22549019607843138, "acc_stderr": 0.02933116229425172, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.02933116229425172 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.23636363636363636, "acc_stderr": 0.033175059300091805, "acc_norm": 0.23636363636363636, "acc_norm_stderr": 0.033175059300091805 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.23255813953488372, "mc1_stderr": 0.014789157531080517, "mc2": 0.40663525842480935, "mc2_stderr": 0.01551567406322468 }, "harness|ko_commongen_v2|2": { "acc": 0.3293978748524203, "acc_stderr": 0.016158746868147143, "acc_norm": 0.40731995277449823, "acc_norm_stderr": 0.01689245669519127 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "BM-K/polyglot-ko-1.3b-it-v1.6", "model_sha": "97def0549ef147c96d755ba79a29c3efcdb3f737", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }