{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.22696245733788395, "acc_stderr": 0.01224049153613286, "acc_norm": 0.22696245733788395, "acc_norm_stderr": 0.01224049153613286 }, "harness|ko_hellaswag|10": { "acc": 0.2504481179047998, "acc_stderr": 0.004323856300539177, "acc_norm": 0.2504481179047998, "acc_norm_stderr": 0.004323856300539177 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.3216374269005848, "acc_stderr": 0.03582529442573122, "acc_norm": 0.3216374269005848, "acc_norm_stderr": 0.03582529442573122 }, "harness|ko_mmlu_management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266196, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266196 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.23754789272030652, "acc_stderr": 0.015218733046150191, "acc_norm": 0.23754789272030652, "acc_norm_stderr": 0.015218733046150191 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.18518518518518517, "acc_stderr": 0.03355677216313142, "acc_norm": 0.18518518518518517, "acc_norm_stderr": 0.03355677216313142 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.26382978723404255, "acc_stderr": 0.028809989854102987, "acc_norm": 0.26382978723404255, "acc_norm_stderr": 0.028809989854102987 }, "harness|ko_mmlu_virology|5": { "acc": 0.28313253012048195, "acc_stderr": 0.03507295431370518, "acc_norm": 0.28313253012048195, "acc_norm_stderr": 0.03507295431370518 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.1864951768488746, "acc_stderr": 0.022122439772480757, "acc_norm": 0.1864951768488746, "acc_norm_stderr": 0.022122439772480757 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.31390134529147984, "acc_stderr": 0.031146796482972465, "acc_norm": 0.31390134529147984, "acc_norm_stderr": 0.031146796482972465 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.2595419847328244, "acc_stderr": 0.03844876139785271, "acc_norm": 0.2595419847328244, "acc_norm_stderr": 0.03844876139785271 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.17676767676767677, "acc_stderr": 0.027178752639044915, "acc_norm": 0.17676767676767677, "acc_norm_stderr": 0.027178752639044915 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03565998174135302, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03565998174135302 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237654, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237654 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.21008403361344538, "acc_stderr": 0.026461398717471874, "acc_norm": 0.21008403361344538, "acc_norm_stderr": 0.026461398717471874 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.20256410256410257, "acc_stderr": 0.020377660970371393, "acc_norm": 0.20256410256410257, "acc_norm_stderr": 0.020377660970371393 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.18, "acc_stderr": 0.038612291966536955, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536955 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.25925925925925924, "acc_stderr": 0.04236511258094633, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.04236511258094633 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.15270935960591134, "acc_stderr": 0.025308904539380627, "acc_norm": 0.15270935960591134, "acc_norm_stderr": 0.025308904539380627 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.1774193548387097, "acc_stderr": 0.021732540689329276, "acc_norm": 0.1774193548387097, "acc_norm_stderr": 0.021732540689329276 }, "harness|ko_mmlu_marketing|5": { "acc": 0.2905982905982906, "acc_stderr": 0.029745048572674057, "acc_norm": 0.2905982905982906, "acc_norm_stderr": 0.029745048572674057 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.21509433962264152, "acc_stderr": 0.025288394502891377, "acc_norm": 0.21509433962264152, "acc_norm_stderr": 0.025288394502891377 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03955932861795833, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03955932861795833 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.2111111111111111, "acc_stderr": 0.024882116857655075, "acc_norm": 0.2111111111111111, "acc_norm_stderr": 0.024882116857655075 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.1986754966887417, "acc_stderr": 0.03257847384436775, "acc_norm": 0.1986754966887417, "acc_norm_stderr": 0.03257847384436775 }, "harness|ko_mmlu_sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.030360490154014645, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.030360490154014645 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.20809248554913296, "acc_stderr": 0.030952890217749884, "acc_norm": 0.20809248554913296, "acc_norm_stderr": 0.030952890217749884 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.20899470899470898, "acc_stderr": 0.020940481565334835, "acc_norm": 0.20899470899470898, "acc_norm_stderr": 0.020940481565334835 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.2, "acc_stderr": 0.040201512610368445, "acc_norm": 0.2, "acc_norm_stderr": 0.040201512610368445 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.28, "acc_stderr": 0.04512608598542129, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542129 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.24855491329479767, "acc_stderr": 0.023267528432100174, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.023267528432100174 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.22085889570552147, "acc_stderr": 0.03259177392742178, "acc_norm": 0.22085889570552147, "acc_norm_stderr": 0.03259177392742178 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.21604938271604937, "acc_stderr": 0.022899162918445796, "acc_norm": 0.21604938271604937, "acc_norm_stderr": 0.022899162918445796 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.19689119170984457, "acc_stderr": 0.028697873971860677, "acc_norm": 0.19689119170984457, "acc_norm_stderr": 0.028697873971860677 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813365, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813365 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.1926605504587156, "acc_stderr": 0.016909276884936097, "acc_norm": 0.1926605504587156, "acc_norm_stderr": 0.016909276884936097 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04040610178208841, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04040610178208841 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.22549019607843138, "acc_stderr": 0.02392915551735129, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.02392915551735129 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_international_law|5": { "acc": 0.2396694214876033, "acc_stderr": 0.038968789850704164, "acc_norm": 0.2396694214876033, "acc_norm_stderr": 0.038968789850704164 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.03110318238312338, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.03110318238312338 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.25, "acc_stderr": 0.01751781884501444, "acc_norm": 0.25, "acc_norm_stderr": 0.01751781884501444 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.23404255319148937, "acc_stderr": 0.025257861359432414, "acc_norm": 0.23404255319148937, "acc_norm_stderr": 0.025257861359432414 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.1527777777777778, "acc_stderr": 0.024536326026134238, "acc_norm": 0.1527777777777778, "acc_norm_stderr": 0.024536326026134238 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574892, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574892 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.18382352941176472, "acc_stderr": 0.02352924218519311, "acc_norm": 0.18382352941176472, "acc_norm_stderr": 0.02352924218519311 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.18775510204081633, "acc_stderr": 0.025000256039546198, "acc_norm": 0.18775510204081633, "acc_norm_stderr": 0.025000256039546198 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.270042194092827, "acc_stderr": 0.028900721906293426, "acc_norm": 0.270042194092827, "acc_norm_stderr": 0.028900721906293426 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.2457627118644068, "acc_stderr": 0.010996156635142692, "acc_norm": 0.2457627118644068, "acc_norm_stderr": 0.010996156635142692 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.25, "acc_stderr": 0.03039153369274154, "acc_norm": 0.25, "acc_norm_stderr": 0.03039153369274154 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03225078108306289, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "harness|ko_truthfulqa_mc|0": { "mc1": 1.0, "mc1_stderr": 0.0, "mc2": NaN, "mc2_stderr": NaN }, "harness|ko_commongen_v2|2": { "acc": 0.24793388429752067, "acc_stderr": 0.014846044968252247, "acc_norm": 0.24793388429752067, "acc_norm_stderr": 0.014846044968252247 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "F24/llama-2-koen-orca-mini-platypus2-math-13b", "model_sha": "b5ee3b5b459be0a3fd99d5050ed0d38653404690", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }