{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3873720136518771, "acc_stderr": 0.014235872487909865, "acc_norm": 0.44368600682593856, "acc_norm_stderr": 0.014518421825670447 }, "harness|ko_hellaswag|10": { "acc": 0.42093208524198367, "acc_stderr": 0.004926996830194231, "acc_norm": 0.5696076478789086, "acc_norm_stderr": 0.0049411916073179105 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.49707602339181284, "acc_stderr": 0.03834759370936839, "acc_norm": 0.49707602339181284, "acc_norm_stderr": 0.03834759370936839 }, "harness|ko_mmlu_management|5": { "acc": 0.49514563106796117, "acc_stderr": 0.04950504382128921, "acc_norm": 0.49514563106796117, "acc_norm_stderr": 0.04950504382128921 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.5440613026819924, "acc_stderr": 0.01781040392543535, "acc_norm": 0.5440613026819924, "acc_norm_stderr": 0.01781040392543535 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.4888888888888889, "acc_stderr": 0.04318275491977978, "acc_norm": 0.4888888888888889, "acc_norm_stderr": 0.04318275491977978 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.045604802157206824, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206824 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.41702127659574467, "acc_stderr": 0.03223276266711712, "acc_norm": 0.41702127659574467, "acc_norm_stderr": 0.03223276266711712 }, "harness|ko_mmlu_virology|5": { "acc": 0.46987951807228917, "acc_stderr": 0.03885425420866767, "acc_norm": 0.46987951807228917, "acc_norm_stderr": 0.03885425420866767 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.49517684887459806, "acc_stderr": 0.02839677044411129, "acc_norm": 0.49517684887459806, "acc_norm_stderr": 0.02839677044411129 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.5022421524663677, "acc_stderr": 0.033557465352232634, "acc_norm": 0.5022421524663677, "acc_norm_stderr": 0.033557465352232634 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.46564885496183206, "acc_stderr": 0.04374928560599738, "acc_norm": 0.46564885496183206, "acc_norm_stderr": 0.04374928560599738 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5757575757575758, "acc_stderr": 0.03521224908841586, "acc_norm": 0.5757575757575758, "acc_norm_stderr": 0.03521224908841586 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.3931034482758621, "acc_stderr": 0.040703290137070705, "acc_norm": 0.3931034482758621, "acc_norm_stderr": 0.040703290137070705 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.22549019607843138, "acc_stderr": 0.041583075330832865, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.041583075330832865 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.4789915966386555, "acc_stderr": 0.03244980849990029, "acc_norm": 0.4789915966386555, "acc_norm_stderr": 0.03244980849990029 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.43333333333333335, "acc_stderr": 0.02512465352588513, "acc_norm": 0.43333333333333335, "acc_norm_stderr": 0.02512465352588513 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5092592592592593, "acc_stderr": 0.04832853553437055, "acc_norm": 0.5092592592592593, "acc_norm_stderr": 0.04832853553437055 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.4236453201970443, "acc_stderr": 0.03476725747649037, "acc_norm": 0.4236453201970443, "acc_norm_stderr": 0.03476725747649037 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.47419354838709676, "acc_stderr": 0.028406095057653315, "acc_norm": 0.47419354838709676, "acc_norm_stderr": 0.028406095057653315 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6324786324786325, "acc_stderr": 0.03158539157745637, "acc_norm": 0.6324786324786325, "acc_norm_stderr": 0.03158539157745637 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4339622641509434, "acc_stderr": 0.0305032920133426, "acc_norm": 0.4339622641509434, "acc_norm_stderr": 0.0305032920133426 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.509090909090909, "acc_stderr": 0.04788339768702861, "acc_norm": 0.509090909090909, "acc_norm_stderr": 0.04788339768702861 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.2518518518518518, "acc_stderr": 0.026466117538959916, "acc_norm": 0.2518518518518518, "acc_norm_stderr": 0.026466117538959916 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.03802039760107903, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.03802039760107903 }, "harness|ko_mmlu_sociology|5": { "acc": 0.582089552238806, "acc_stderr": 0.034875586404620636, "acc_norm": 0.582089552238806, "acc_norm_stderr": 0.034875586404620636 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.41040462427745666, "acc_stderr": 0.03750757044895538, "acc_norm": 0.41040462427745666, "acc_norm_stderr": 0.03750757044895538 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.29365079365079366, "acc_stderr": 0.023456037383982026, "acc_norm": 0.29365079365079366, "acc_norm_stderr": 0.023456037383982026 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3958333333333333, "acc_stderr": 0.040894654493255835, "acc_norm": 0.3958333333333333, "acc_norm_stderr": 0.040894654493255835 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.62, "acc_stderr": 0.048783173121456344, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456344 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.5028901734104047, "acc_stderr": 0.02691864538323901, "acc_norm": 0.5028901734104047, "acc_norm_stderr": 0.02691864538323901 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.49079754601226994, "acc_stderr": 0.03927705600787443, "acc_norm": 0.49079754601226994, "acc_norm_stderr": 0.03927705600787443 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.48148148148148145, "acc_stderr": 0.027801656212323667, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.027801656212323667 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.04725815626252606, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252606 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.5129533678756477, "acc_stderr": 0.03607228061047749, "acc_norm": 0.5129533678756477, "acc_norm_stderr": 0.03607228061047749 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2807017543859649, "acc_stderr": 0.042270544512321984, "acc_norm": 0.2807017543859649, "acc_norm_stderr": 0.042270544512321984 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.5541284403669725, "acc_stderr": 0.021311335009708575, "acc_norm": 0.5541284403669725, "acc_norm_stderr": 0.021311335009708575 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.3253968253968254, "acc_stderr": 0.04190596438871136, "acc_norm": 0.3253968253968254, "acc_norm_stderr": 0.04190596438871136 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.4477124183006536, "acc_stderr": 0.028472938478033526, "acc_norm": 0.4477124183006536, "acc_norm_stderr": 0.028472938478033526 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6033057851239669, "acc_stderr": 0.044658697805310094, "acc_norm": 0.6033057851239669, "acc_norm_stderr": 0.044658697805310094 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.40789473684210525, "acc_stderr": 0.03999309712777472, "acc_norm": 0.40789473684210525, "acc_norm_stderr": 0.03999309712777472 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.38562091503267976, "acc_stderr": 0.01969145905235415, "acc_norm": 0.38562091503267976, "acc_norm_stderr": 0.01969145905235415 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.29432624113475175, "acc_stderr": 0.02718712701150381, "acc_norm": 0.29432624113475175, "acc_norm_stderr": 0.02718712701150381 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.24107142857142858, "acc_stderr": 0.04059867246952687, "acc_norm": 0.24107142857142858, "acc_norm_stderr": 0.04059867246952687 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.33796296296296297, "acc_stderr": 0.03225941352631295, "acc_norm": 0.33796296296296297, "acc_norm_stderr": 0.03225941352631295 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.01433352205921789, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.01433352205921789 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.44, "acc_stderr": 0.049888765156985884, "acc_norm": 0.44, "acc_norm_stderr": 0.049888765156985884 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.3897058823529412, "acc_stderr": 0.029624663581159696, "acc_norm": 0.3897058823529412, "acc_norm_stderr": 0.029624663581159696 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.44081632653061226, "acc_stderr": 0.03178419114175363, "acc_norm": 0.44081632653061226, "acc_norm_stderr": 0.03178419114175363 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.6244725738396625, "acc_stderr": 0.03152256243091156, "acc_norm": 0.6244725738396625, "acc_norm_stderr": 0.03152256243091156 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.333116036505867, "acc_stderr": 0.012037930451512052, "acc_norm": 0.333116036505867, "acc_norm_stderr": 0.012037930451512052 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.4215686274509804, "acc_stderr": 0.03465868196380758, "acc_norm": 0.4215686274509804, "acc_norm_stderr": 0.03465868196380758 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.5393939393939394, "acc_stderr": 0.03892207016552012, "acc_norm": 0.5393939393939394, "acc_norm_stderr": 0.03892207016552012 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2631578947368421, "mc1_stderr": 0.015415241740237024, "mc2": 0.4173314540045968, "mc2_stderr": 0.014766350516789333 }, "harness|ko_commongen_v2|2": { "acc": 0.4769775678866588, "acc_stderr": 0.017172121546727637, "acc_norm": 0.5442739079102715, "acc_norm_stderr": 0.017122829143292658 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "MarkrAI/DopeorNope-maestro-v3-DPO-13b", "model_sha": "2b13d8118774db16fd5c520866865674899f3240", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }