{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.2713310580204778, "acc_stderr": 0.012993807727545787, "acc_norm": 0.310580204778157, "acc_norm_stderr": 0.013522292098053055 }, "harness|ko_hellaswag|10": { "acc": 0.3331009759012149, "acc_stderr": 0.004703590558552501, "acc_norm": 0.41127265484963155, "acc_norm_stderr": 0.004910588449330016 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.47953216374269003, "acc_stderr": 0.038316105328219316, "acc_norm": 0.47953216374269003, "acc_norm_stderr": 0.038316105328219316 }, "harness|ko_mmlu_management|5": { "acc": 0.3106796116504854, "acc_stderr": 0.045821241601615506, "acc_norm": 0.3106796116504854, "acc_norm_stderr": 0.045821241601615506 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.36909323116219667, "acc_stderr": 0.017256283109124613, "acc_norm": 0.36909323116219667, "acc_norm_stderr": 0.017256283109124613 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.3037037037037037, "acc_stderr": 0.03972552884785138, "acc_norm": 0.3037037037037037, "acc_norm_stderr": 0.03972552884785138 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.3574468085106383, "acc_stderr": 0.03132941789476425, "acc_norm": 0.3574468085106383, "acc_norm_stderr": 0.03132941789476425 }, "harness|ko_mmlu_virology|5": { "acc": 0.25903614457831325, "acc_stderr": 0.03410646614071857, "acc_norm": 0.25903614457831325, "acc_norm_stderr": 0.03410646614071857 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.3279742765273312, "acc_stderr": 0.02666441088693762, "acc_norm": 0.3279742765273312, "acc_norm_stderr": 0.02666441088693762 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.35874439461883406, "acc_stderr": 0.032190792004199956, "acc_norm": 0.35874439461883406, "acc_norm_stderr": 0.032190792004199956 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.366412213740458, "acc_stderr": 0.04225875451969638, "acc_norm": 0.366412213740458, "acc_norm_stderr": 0.04225875451969638 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.3282828282828283, "acc_stderr": 0.03345678422756777, "acc_norm": 0.3282828282828283, "acc_norm_stderr": 0.03345678422756777 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.3586206896551724, "acc_stderr": 0.039966295748767186, "acc_norm": 0.3586206896551724, "acc_norm_stderr": 0.039966295748767186 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.16666666666666666, "acc_stderr": 0.03708284662416545, "acc_norm": 0.16666666666666666, "acc_norm_stderr": 0.03708284662416545 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.3235294117647059, "acc_stderr": 0.030388353551886845, "acc_norm": 0.3235294117647059, "acc_norm_stderr": 0.030388353551886845 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.30512820512820515, "acc_stderr": 0.023346335293325887, "acc_norm": 0.30512820512820515, "acc_norm_stderr": 0.023346335293325887 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.24, "acc_stderr": 0.042923469599092816, "acc_norm": 0.24, "acc_norm_stderr": 0.042923469599092816 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.39814814814814814, "acc_stderr": 0.04732332615978814, "acc_norm": 0.39814814814814814, "acc_norm_stderr": 0.04732332615978814 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.33497536945812806, "acc_stderr": 0.033208527423483104, "acc_norm": 0.33497536945812806, "acc_norm_stderr": 0.033208527423483104 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.32903225806451614, "acc_stderr": 0.02672949906834997, "acc_norm": 0.32903225806451614, "acc_norm_stderr": 0.02672949906834997 }, "harness|ko_mmlu_marketing|5": { "acc": 0.49145299145299143, "acc_stderr": 0.032751303000970296, "acc_norm": 0.49145299145299143, "acc_norm_stderr": 0.032751303000970296 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.3018867924528302, "acc_stderr": 0.028254200344438676, "acc_norm": 0.3018867924528302, "acc_norm_stderr": 0.028254200344438676 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.35454545454545455, "acc_stderr": 0.04582004841505415, "acc_norm": 0.35454545454545455, "acc_norm_stderr": 0.04582004841505415 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.2814814814814815, "acc_stderr": 0.027420019350945273, "acc_norm": 0.2814814814814815, "acc_norm_stderr": 0.027420019350945273 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2119205298013245, "acc_stderr": 0.033367670865679766, "acc_norm": 0.2119205298013245, "acc_norm_stderr": 0.033367670865679766 }, "harness|ko_mmlu_sociology|5": { "acc": 0.4577114427860697, "acc_stderr": 0.03522865864099597, "acc_norm": 0.4577114427860697, "acc_norm_stderr": 0.03522865864099597 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.26011560693641617, "acc_stderr": 0.03345036916788991, "acc_norm": 0.26011560693641617, "acc_norm_stderr": 0.03345036916788991 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2751322751322751, "acc_stderr": 0.02300008685906864, "acc_norm": 0.2751322751322751, "acc_norm_stderr": 0.02300008685906864 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2847222222222222, "acc_stderr": 0.03773809990686935, "acc_norm": 0.2847222222222222, "acc_norm_stderr": 0.03773809990686935 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.49, "acc_stderr": 0.05024183937956913, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956913 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.37283236994219654, "acc_stderr": 0.026033890613576288, "acc_norm": 0.37283236994219654, "acc_norm_stderr": 0.026033890613576288 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.2822085889570552, "acc_stderr": 0.03536117886664742, "acc_norm": 0.2822085889570552, "acc_norm_stderr": 0.03536117886664742 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.36728395061728397, "acc_stderr": 0.02682280175950789, "acc_norm": 0.36728395061728397, "acc_norm_stderr": 0.02682280175950789 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.35751295336787564, "acc_stderr": 0.03458816042181007, "acc_norm": 0.35751295336787564, "acc_norm_stderr": 0.03458816042181007 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813365, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813365 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.28807339449541286, "acc_stderr": 0.019416445892636018, "acc_norm": 0.28807339449541286, "acc_norm_stderr": 0.019416445892636018 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.29365079365079366, "acc_stderr": 0.04073524322147126, "acc_norm": 0.29365079365079366, "acc_norm_stderr": 0.04073524322147126 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.38235294117647056, "acc_stderr": 0.027826109307283683, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.027826109307283683 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|ko_mmlu_international_law|5": { "acc": 0.5206611570247934, "acc_stderr": 0.04560456086387235, "acc_norm": 0.5206611570247934, "acc_norm_stderr": 0.04560456086387235 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.3223684210526316, "acc_stderr": 0.03803510248351586, "acc_norm": 0.3223684210526316, "acc_norm_stderr": 0.03803510248351586 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.3088235294117647, "acc_stderr": 0.018690850273595284, "acc_norm": 0.3088235294117647, "acc_norm_stderr": 0.018690850273595284 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2872340425531915, "acc_stderr": 0.026992199173064356, "acc_norm": 0.2872340425531915, "acc_norm_stderr": 0.026992199173064356 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.37962962962962965, "acc_stderr": 0.03309682581119035, "acc_norm": 0.37962962962962965, "acc_norm_stderr": 0.03309682581119035 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2435754189944134, "acc_stderr": 0.01435591196476786, "acc_norm": 0.2435754189944134, "acc_norm_stderr": 0.01435591196476786 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.22426470588235295, "acc_stderr": 0.025336848563332386, "acc_norm": 0.22426470588235295, "acc_norm_stderr": 0.025336848563332386 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.39591836734693875, "acc_stderr": 0.03130802899065685, "acc_norm": 0.39591836734693875, "acc_norm_stderr": 0.03130802899065685 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.35864978902953587, "acc_stderr": 0.031219569445301854, "acc_norm": 0.35864978902953587, "acc_norm_stderr": 0.031219569445301854 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.2685788787483703, "acc_stderr": 0.011320056629121734, "acc_norm": 0.2685788787483703, "acc_norm_stderr": 0.011320056629121734 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.3088235294117647, "acc_stderr": 0.03242661719827218, "acc_norm": 0.3088235294117647, "acc_norm_stderr": 0.03242661719827218 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.3393939393939394, "acc_stderr": 0.036974422050315967, "acc_norm": 0.3393939393939394, "acc_norm_stderr": 0.036974422050315967 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2729498164014688, "mc1_stderr": 0.015594753632006516, "mc2": 0.4392204501367092, "mc2_stderr": 0.01533439619345391 }, "harness|ko_commongen_v2|2": { "acc": 0.27036599763872493, "acc_stderr": 0.015270152942068405, "acc_norm": 0.3530106257378985, "acc_norm_stderr": 0.016430745982427126 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "yeen214/test_llama2_7b", "model_sha": "69a4886f51ed752216cdd7f41a584d14240126f9", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }