{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.35409556313993173, "acc_stderr": 0.013975454122756557, "acc_norm": 0.4249146757679181, "acc_norm_stderr": 0.014445698968520767 }, "harness|ko_hellaswag|10": { "acc": 0.3908583947420832, "acc_stderr": 0.004869455150933824, "acc_norm": 0.563433578968333, "acc_norm_stderr": 0.004949462563681344 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.5847953216374269, "acc_stderr": 0.037792759455032014, "acc_norm": 0.5847953216374269, "acc_norm_stderr": 0.037792759455032014 }, "harness|ko_mmlu_management|5": { "acc": 0.6699029126213593, "acc_stderr": 0.04656147110012352, "acc_norm": 0.6699029126213593, "acc_norm_stderr": 0.04656147110012352 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.6283524904214559, "acc_stderr": 0.017280802522133182, "acc_norm": 0.6283524904214559, "acc_norm_stderr": 0.017280802522133182 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.48148148148148145, "acc_stderr": 0.04316378599511326, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.04316378599511326 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.4723404255319149, "acc_stderr": 0.03263597118409769, "acc_norm": 0.4723404255319149, "acc_norm_stderr": 0.03263597118409769 }, "harness|ko_mmlu_virology|5": { "acc": 0.45180722891566266, "acc_stderr": 0.03874371556587953, "acc_norm": 0.45180722891566266, "acc_norm_stderr": 0.03874371556587953 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.5852090032154341, "acc_stderr": 0.02798268045975956, "acc_norm": 0.5852090032154341, "acc_norm_stderr": 0.02798268045975956 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.5336322869955157, "acc_stderr": 0.03348180017060306, "acc_norm": 0.5336322869955157, "acc_norm_stderr": 0.03348180017060306 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.5877862595419847, "acc_stderr": 0.04317171194870255, "acc_norm": 0.5877862595419847, "acc_norm_stderr": 0.04317171194870255 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.48, "acc_stderr": 0.05021167315686779, "acc_norm": 0.48, "acc_norm_stderr": 0.05021167315686779 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.702020202020202, "acc_stderr": 0.03258630383836556, "acc_norm": 0.702020202020202, "acc_norm_stderr": 0.03258630383836556 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.4827586206896552, "acc_stderr": 0.04164188720169377, "acc_norm": 0.4827586206896552, "acc_norm_stderr": 0.04164188720169377 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.27450980392156865, "acc_stderr": 0.04440521906179328, "acc_norm": 0.27450980392156865, "acc_norm_stderr": 0.04440521906179328 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.5630252100840336, "acc_stderr": 0.03221943636566197, "acc_norm": 0.5630252100840336, "acc_norm_stderr": 0.03221943636566197 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.5102564102564102, "acc_stderr": 0.025345672221942374, "acc_norm": 0.5102564102564102, "acc_norm_stderr": 0.025345672221942374 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.63, "acc_stderr": 0.048523658709391, "acc_norm": 0.63, "acc_norm_stderr": 0.048523658709391 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5925925925925926, "acc_stderr": 0.04750077341199985, "acc_norm": 0.5925925925925926, "acc_norm_stderr": 0.04750077341199985 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.4039408866995074, "acc_stderr": 0.03452453903822039, "acc_norm": 0.4039408866995074, "acc_norm_stderr": 0.03452453903822039 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.532258064516129, "acc_stderr": 0.02838474778881334, "acc_norm": 0.532258064516129, "acc_norm_stderr": 0.02838474778881334 }, "harness|ko_mmlu_marketing|5": { "acc": 0.7606837606837606, "acc_stderr": 0.027951826808924336, "acc_norm": 0.7606837606837606, "acc_norm_stderr": 0.027951826808924336 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.5433962264150943, "acc_stderr": 0.030656748696739438, "acc_norm": 0.5433962264150943, "acc_norm_stderr": 0.030656748696739438 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.5727272727272728, "acc_stderr": 0.04738198703545483, "acc_norm": 0.5727272727272728, "acc_norm_stderr": 0.04738198703545483 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.32592592592592595, "acc_stderr": 0.02857834836547308, "acc_norm": 0.32592592592592595, "acc_norm_stderr": 0.02857834836547308 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.32450331125827814, "acc_stderr": 0.03822746937658753, "acc_norm": 0.32450331125827814, "acc_norm_stderr": 0.03822746937658753 }, "harness|ko_mmlu_sociology|5": { "acc": 0.6865671641791045, "acc_stderr": 0.03280188205348641, "acc_norm": 0.6865671641791045, "acc_norm_stderr": 0.03280188205348641 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.4161849710982659, "acc_stderr": 0.03758517775404947, "acc_norm": 0.4161849710982659, "acc_norm_stderr": 0.03758517775404947 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.41005291005291006, "acc_stderr": 0.02533120243894443, "acc_norm": 0.41005291005291006, "acc_norm_stderr": 0.02533120243894443 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.4652777777777778, "acc_stderr": 0.04171115858181617, "acc_norm": 0.4652777777777778, "acc_norm_stderr": 0.04171115858181617 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.66, "acc_stderr": 0.04760952285695238, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695238 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.5751445086705202, "acc_stderr": 0.026613350840261736, "acc_norm": 0.5751445086705202, "acc_norm_stderr": 0.026613350840261736 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.48466257668711654, "acc_stderr": 0.039265223787088424, "acc_norm": 0.48466257668711654, "acc_norm_stderr": 0.039265223787088424 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.5771604938271605, "acc_stderr": 0.027487472980871595, "acc_norm": 0.5771604938271605, "acc_norm_stderr": 0.027487472980871595 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.6373056994818653, "acc_stderr": 0.034697137917043715, "acc_norm": 0.6373056994818653, "acc_norm_stderr": 0.034697137917043715 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.42105263157894735, "acc_stderr": 0.046446020912223177, "acc_norm": 0.42105263157894735, "acc_norm_stderr": 0.046446020912223177 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.6458715596330276, "acc_stderr": 0.020504729013829114, "acc_norm": 0.6458715596330276, "acc_norm_stderr": 0.020504729013829114 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.35714285714285715, "acc_stderr": 0.042857142857142816, "acc_norm": 0.35714285714285715, "acc_norm_stderr": 0.042857142857142816 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.5620915032679739, "acc_stderr": 0.02840830202033269, "acc_norm": 0.5620915032679739, "acc_norm_stderr": 0.02840830202033269 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6528925619834711, "acc_stderr": 0.043457245702925335, "acc_norm": 0.6528925619834711, "acc_norm_stderr": 0.043457245702925335 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.4868421052631579, "acc_stderr": 0.04067533136309174, "acc_norm": 0.4868421052631579, "acc_norm_stderr": 0.04067533136309174 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.4738562091503268, "acc_stderr": 0.020200164564804588, "acc_norm": 0.4738562091503268, "acc_norm_stderr": 0.020200164564804588 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.38652482269503546, "acc_stderr": 0.02904919034254346, "acc_norm": 0.38652482269503546, "acc_norm_stderr": 0.02904919034254346 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.3392857142857143, "acc_stderr": 0.044939490686135404, "acc_norm": 0.3392857142857143, "acc_norm_stderr": 0.044939490686135404 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.4537037037037037, "acc_stderr": 0.03395322726375798, "acc_norm": 0.4537037037037037, "acc_norm_stderr": 0.03395322726375798 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2201117318435754, "acc_stderr": 0.013856994024227179, "acc_norm": 0.2201117318435754, "acc_norm_stderr": 0.013856994024227179 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.67, "acc_stderr": 0.04725815626252607, "acc_norm": 0.67, "acc_norm_stderr": 0.04725815626252607 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.47794117647058826, "acc_stderr": 0.030343264224213528, "acc_norm": 0.47794117647058826, "acc_norm_stderr": 0.030343264224213528 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.5265306122448979, "acc_stderr": 0.03196412734523272, "acc_norm": 0.5265306122448979, "acc_norm_stderr": 0.03196412734523272 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.7172995780590717, "acc_stderr": 0.029312814153955924, "acc_norm": 0.7172995780590717, "acc_norm_stderr": 0.029312814153955924 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.37614080834419816, "acc_stderr": 0.0123722144305998, "acc_norm": 0.37614080834419816, "acc_norm_stderr": 0.0123722144305998 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.6519607843137255, "acc_stderr": 0.03343311240488418, "acc_norm": 0.6519607843137255, "acc_norm_stderr": 0.03343311240488418 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.6606060606060606, "acc_stderr": 0.036974422050315946, "acc_norm": 0.6606060606060606, "acc_norm_stderr": 0.036974422050315946 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2631578947368421, "mc1_stderr": 0.015415241740237035, "mc2": 0.46444473625227906, "mc2_stderr": 0.016132820386457677 }, "harness|ko_commongen_v2|2": { "acc": 0.4085005903187721, "acc_stderr": 0.016900062879427122, "acc_norm": 0.51357733175915, "acc_norm_stderr": 0.01718401506040145 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "JaeyeonKang/CCK-v3", "model_sha": "2b1c13026f5b57ada825f937bbd9edd7142af375", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }