{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3387372013651877, "acc_stderr": 0.013830568927974332, "acc_norm": 0.3890784982935154, "acc_norm_stderr": 0.014247309976045607 }, "harness|ko_hellaswag|10": { "acc": 0.3805018920533758, "acc_stderr": 0.0048451800342716265, "acc_norm": 0.48297151961760604, "acc_norm_stderr": 0.004986886806565639 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.4853801169590643, "acc_stderr": 0.038331852752130205, "acc_norm": 0.4853801169590643, "acc_norm_stderr": 0.038331852752130205 }, "harness|ko_mmlu_management|5": { "acc": 0.5728155339805825, "acc_stderr": 0.04897957737781168, "acc_norm": 0.5728155339805825, "acc_norm_stderr": 0.04897957737781168 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.454661558109834, "acc_stderr": 0.017806304585052606, "acc_norm": 0.454661558109834, "acc_norm_stderr": 0.017806304585052606 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.4148148148148148, "acc_stderr": 0.04256193767901407, "acc_norm": 0.4148148148148148, "acc_norm_stderr": 0.04256193767901407 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.43829787234042555, "acc_stderr": 0.03243618636108101, "acc_norm": 0.43829787234042555, "acc_norm_stderr": 0.03243618636108101 }, "harness|ko_mmlu_virology|5": { "acc": 0.4036144578313253, "acc_stderr": 0.03819486140758397, "acc_norm": 0.4036144578313253, "acc_norm_stderr": 0.03819486140758397 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.4758842443729904, "acc_stderr": 0.028365041542564577, "acc_norm": 0.4758842443729904, "acc_norm_stderr": 0.028365041542564577 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.4349775784753363, "acc_stderr": 0.03327283370271344, "acc_norm": 0.4349775784753363, "acc_norm_stderr": 0.03327283370271344 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.4580152671755725, "acc_stderr": 0.04369802690578757, "acc_norm": 0.4580152671755725, "acc_norm_stderr": 0.04369802690578757 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5202020202020202, "acc_stderr": 0.03559443565563918, "acc_norm": 0.5202020202020202, "acc_norm_stderr": 0.03559443565563918 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.47586206896551725, "acc_stderr": 0.041618085035015295, "acc_norm": 0.47586206896551725, "acc_norm_stderr": 0.041618085035015295 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237655, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237655 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.5294117647058824, "acc_stderr": 0.03242225027115006, "acc_norm": 0.5294117647058824, "acc_norm_stderr": 0.03242225027115006 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.49230769230769234, "acc_stderr": 0.0253480060315348, "acc_norm": 0.49230769230769234, "acc_norm_stderr": 0.0253480060315348 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5277777777777778, "acc_stderr": 0.048262172941398944, "acc_norm": 0.5277777777777778, "acc_norm_stderr": 0.048262172941398944 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.4236453201970443, "acc_stderr": 0.034767257476490385, "acc_norm": 0.4236453201970443, "acc_norm_stderr": 0.034767257476490385 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.44516129032258067, "acc_stderr": 0.028272410186214906, "acc_norm": 0.44516129032258067, "acc_norm_stderr": 0.028272410186214906 }, "harness|ko_mmlu_marketing|5": { "acc": 0.7478632478632479, "acc_stderr": 0.02844796547623102, "acc_norm": 0.7478632478632479, "acc_norm_stderr": 0.02844796547623102 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4490566037735849, "acc_stderr": 0.030612730713641095, "acc_norm": 0.4490566037735849, "acc_norm_stderr": 0.030612730713641095 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.4727272727272727, "acc_stderr": 0.04782001791380063, "acc_norm": 0.4727272727272727, "acc_norm_stderr": 0.04782001791380063 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.3037037037037037, "acc_stderr": 0.028037929969114986, "acc_norm": 0.3037037037037037, "acc_norm_stderr": 0.028037929969114986 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2847682119205298, "acc_stderr": 0.03684881521389023, "acc_norm": 0.2847682119205298, "acc_norm_stderr": 0.03684881521389023 }, "harness|ko_mmlu_sociology|5": { "acc": 0.6019900497512438, "acc_stderr": 0.034611994290400135, "acc_norm": 0.6019900497512438, "acc_norm_stderr": 0.034611994290400135 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.3815028901734104, "acc_stderr": 0.03703851193099521, "acc_norm": 0.3815028901734104, "acc_norm_stderr": 0.03703851193099521 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.36507936507936506, "acc_stderr": 0.02479606060269995, "acc_norm": 0.36507936507936506, "acc_norm_stderr": 0.02479606060269995 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3194444444444444, "acc_stderr": 0.03899073687357335, "acc_norm": 0.3194444444444444, "acc_norm_stderr": 0.03899073687357335 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.4913294797687861, "acc_stderr": 0.026915047355369804, "acc_norm": 0.4913294797687861, "acc_norm_stderr": 0.026915047355369804 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.49693251533742333, "acc_stderr": 0.03928297078179662, "acc_norm": 0.49693251533742333, "acc_norm_stderr": 0.03928297078179662 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.4691358024691358, "acc_stderr": 0.027767689606833942, "acc_norm": 0.4691358024691358, "acc_norm_stderr": 0.027767689606833942 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.47668393782383417, "acc_stderr": 0.03604513672442206, "acc_norm": 0.47668393782383417, "acc_norm_stderr": 0.03604513672442206 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.3684210526315789, "acc_stderr": 0.04537815354939391, "acc_norm": 0.3684210526315789, "acc_norm_stderr": 0.04537815354939391 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.45504587155963305, "acc_stderr": 0.021350503090925167, "acc_norm": 0.45504587155963305, "acc_norm_stderr": 0.021350503090925167 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.3253968253968254, "acc_stderr": 0.04190596438871136, "acc_norm": 0.3253968253968254, "acc_norm_stderr": 0.04190596438871136 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.49019607843137253, "acc_stderr": 0.028624412550167958, "acc_norm": 0.49019607843137253, "acc_norm_stderr": 0.028624412550167958 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6776859504132231, "acc_stderr": 0.04266416363352168, "acc_norm": 0.6776859504132231, "acc_norm_stderr": 0.04266416363352168 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.34210526315789475, "acc_stderr": 0.038607315993160904, "acc_norm": 0.34210526315789475, "acc_norm_stderr": 0.038607315993160904 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.3954248366013072, "acc_stderr": 0.019780465954777508, "acc_norm": 0.3954248366013072, "acc_norm_stderr": 0.019780465954777508 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.34397163120567376, "acc_stderr": 0.02833801742861131, "acc_norm": 0.34397163120567376, "acc_norm_stderr": 0.02833801742861131 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.42857142857142855, "acc_stderr": 0.04697113923010213, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.04697113923010213 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.375, "acc_stderr": 0.033016908987210894, "acc_norm": 0.375, "acc_norm_stderr": 0.033016908987210894 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.293854748603352, "acc_stderr": 0.015235075776719608, "acc_norm": 0.293854748603352, "acc_norm_stderr": 0.015235075776719608 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.45, "acc_stderr": 0.049999999999999996, "acc_norm": 0.45, "acc_norm_stderr": 0.049999999999999996 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.35661764705882354, "acc_stderr": 0.029097209568411962, "acc_norm": 0.35661764705882354, "acc_norm_stderr": 0.029097209568411962 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.5061224489795918, "acc_stderr": 0.032006820201639065, "acc_norm": 0.5061224489795918, "acc_norm_stderr": 0.032006820201639065 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.5316455696202531, "acc_stderr": 0.032481974005110756, "acc_norm": 0.5316455696202531, "acc_norm_stderr": 0.032481974005110756 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.32073011734028684, "acc_stderr": 0.011921199991782613, "acc_norm": 0.32073011734028684, "acc_norm_stderr": 0.011921199991782613 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.36764705882352944, "acc_stderr": 0.03384132045674118, "acc_norm": 0.36764705882352944, "acc_norm_stderr": 0.03384132045674118 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.47878787878787876, "acc_stderr": 0.03900828913737302, "acc_norm": 0.47878787878787876, "acc_norm_stderr": 0.03900828913737302 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.29498164014687883, "mc1_stderr": 0.01596440096558967, "mc2": 0.4614845426101113, "mc2_stderr": 0.015908282639721598 }, "harness|ko_commongen_v2|2": { "acc": 0.47461629279811096, "acc_stderr": 0.017168187201429246, "acc_norm": 0.4970484061393152, "acc_norm_stderr": 0.017190054580194694 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "HanaGroup/Mini_category", "model_sha": "15482113d5d33f4a677f49741dce3c2a53810c4b", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }