{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.34897610921501704, "acc_stderr": 0.013928933461382504, "acc_norm": 0.4129692832764505, "acc_norm_stderr": 0.014388344935398326 }, "harness|ko_hellaswag|10": { "acc": 0.4160525791674965, "acc_stderr": 0.004918951019183889, "acc_norm": 0.5650268870742879, "acc_norm_stderr": 0.004947402907996247 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.4853801169590643, "acc_stderr": 0.038331852752130205, "acc_norm": 0.4853801169590643, "acc_norm_stderr": 0.038331852752130205 }, "harness|ko_mmlu_management|5": { "acc": 0.4368932038834951, "acc_stderr": 0.04911147107365777, "acc_norm": 0.4368932038834951, "acc_norm_stderr": 0.04911147107365777 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.5172413793103449, "acc_stderr": 0.01786933015400371, "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.01786933015400371 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.5185185185185185, "acc_stderr": 0.043163785995113245, "acc_norm": 0.5185185185185185, "acc_norm_stderr": 0.043163785995113245 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.40425531914893614, "acc_stderr": 0.03208115750788683, "acc_norm": 0.40425531914893614, "acc_norm_stderr": 0.03208115750788683 }, "harness|ko_mmlu_virology|5": { "acc": 0.4036144578313253, "acc_stderr": 0.03819486140758397, "acc_norm": 0.4036144578313253, "acc_norm_stderr": 0.03819486140758397 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.4565916398713826, "acc_stderr": 0.028290869054197608, "acc_norm": 0.4565916398713826, "acc_norm_stderr": 0.028290869054197608 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.4977578475336323, "acc_stderr": 0.03355746535223264, "acc_norm": 0.4977578475336323, "acc_norm_stderr": 0.03355746535223264 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.37404580152671757, "acc_stderr": 0.042438692422305246, "acc_norm": 0.37404580152671757, "acc_norm_stderr": 0.042438692422305246 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.4696969696969697, "acc_stderr": 0.03555804051763929, "acc_norm": 0.4696969696969697, "acc_norm_stderr": 0.03555804051763929 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.3724137931034483, "acc_stderr": 0.0402873153294756, "acc_norm": 0.3724137931034483, "acc_norm_stderr": 0.0402873153294756 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.19607843137254902, "acc_stderr": 0.03950581861179961, "acc_norm": 0.19607843137254902, "acc_norm_stderr": 0.03950581861179961 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.3865546218487395, "acc_stderr": 0.0316314580755238, "acc_norm": 0.3865546218487395, "acc_norm_stderr": 0.0316314580755238 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.382051282051282, "acc_stderr": 0.024635549163908223, "acc_norm": 0.382051282051282, "acc_norm_stderr": 0.024635549163908223 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.52, "acc_stderr": 0.05021167315686779, "acc_norm": 0.52, "acc_norm_stderr": 0.05021167315686779 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.4722222222222222, "acc_stderr": 0.04826217294139894, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.04826217294139894 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.3399014778325123, "acc_stderr": 0.03332769068410789, "acc_norm": 0.3399014778325123, "acc_norm_stderr": 0.03332769068410789 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.4096774193548387, "acc_stderr": 0.02797605491534736, "acc_norm": 0.4096774193548387, "acc_norm_stderr": 0.02797605491534736 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6196581196581197, "acc_stderr": 0.03180425204384099, "acc_norm": 0.6196581196581197, "acc_norm_stderr": 0.03180425204384099 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4, "acc_stderr": 0.030151134457776285, "acc_norm": 0.4, "acc_norm_stderr": 0.030151134457776285 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.5, "acc_stderr": 0.04789131426105757, "acc_norm": 0.5, "acc_norm_stderr": 0.04789131426105757 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.23333333333333334, "acc_stderr": 0.025787874220959316, "acc_norm": 0.23333333333333334, "acc_norm_stderr": 0.025787874220959316 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2781456953642384, "acc_stderr": 0.03658603262763743, "acc_norm": 0.2781456953642384, "acc_norm_stderr": 0.03658603262763743 }, "harness|ko_mmlu_sociology|5": { "acc": 0.5223880597014925, "acc_stderr": 0.03531987930208731, "acc_norm": 0.5223880597014925, "acc_norm_stderr": 0.03531987930208731 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.35260115606936415, "acc_stderr": 0.036430371689585496, "acc_norm": 0.35260115606936415, "acc_norm_stderr": 0.036430371689585496 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2777777777777778, "acc_stderr": 0.023068188848261114, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.023068188848261114 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3472222222222222, "acc_stderr": 0.039812405437178615, "acc_norm": 0.3472222222222222, "acc_norm_stderr": 0.039812405437178615 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.27, "acc_stderr": 0.04461960433384741, "acc_norm": 0.27, "acc_norm_stderr": 0.04461960433384741 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.59, "acc_stderr": 0.04943110704237102, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237102 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.45375722543352603, "acc_stderr": 0.026803720583206167, "acc_norm": 0.45375722543352603, "acc_norm_stderr": 0.026803720583206167 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.4110429447852761, "acc_stderr": 0.038656978537853624, "acc_norm": 0.4110429447852761, "acc_norm_stderr": 0.038656978537853624 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.43209876543209874, "acc_stderr": 0.02756301097160668, "acc_norm": 0.43209876543209874, "acc_norm_stderr": 0.02756301097160668 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.44559585492227977, "acc_stderr": 0.035870149860756595, "acc_norm": 0.44559585492227977, "acc_norm_stderr": 0.035870149860756595 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2894736842105263, "acc_stderr": 0.04266339443159394, "acc_norm": 0.2894736842105263, "acc_norm_stderr": 0.04266339443159394 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.48990825688073397, "acc_stderr": 0.021432956203453306, "acc_norm": 0.48990825688073397, "acc_norm_stderr": 0.021432956203453306 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.20634920634920634, "acc_stderr": 0.0361960452412425, "acc_norm": 0.20634920634920634, "acc_norm_stderr": 0.0361960452412425 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.39215686274509803, "acc_stderr": 0.02795604616542452, "acc_norm": 0.39215686274509803, "acc_norm_stderr": 0.02795604616542452 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6033057851239669, "acc_stderr": 0.044658697805310094, "acc_norm": 0.6033057851239669, "acc_norm_stderr": 0.044658697805310094 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.3223684210526316, "acc_stderr": 0.038035102483515854, "acc_norm": 0.3223684210526316, "acc_norm_stderr": 0.038035102483515854 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.35784313725490197, "acc_stderr": 0.019393058402355442, "acc_norm": 0.35784313725490197, "acc_norm_stderr": 0.019393058402355442 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2978723404255319, "acc_stderr": 0.027281608344469414, "acc_norm": 0.2978723404255319, "acc_norm_stderr": 0.027281608344469414 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04287858751340456, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04287858751340456 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.28703703703703703, "acc_stderr": 0.030851992993257017, "acc_norm": 0.28703703703703703, "acc_norm_stderr": 0.030851992993257017 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.01433352205921789, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.01433352205921789 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.29411764705882354, "acc_stderr": 0.027678468642144696, "acc_norm": 0.29411764705882354, "acc_norm_stderr": 0.027678468642144696 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.3510204081632653, "acc_stderr": 0.03055531675557364, "acc_norm": 0.3510204081632653, "acc_norm_stderr": 0.03055531675557364 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.5654008438818565, "acc_stderr": 0.03226759995510145, "acc_norm": 0.5654008438818565, "acc_norm_stderr": 0.03226759995510145 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.29726205997392435, "acc_stderr": 0.011673346173086045, "acc_norm": 0.29726205997392435, "acc_norm_stderr": 0.011673346173086045 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.4068627450980392, "acc_stderr": 0.03447891136353382, "acc_norm": 0.4068627450980392, "acc_norm_stderr": 0.03447891136353382 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.4909090909090909, "acc_stderr": 0.0390369864774844, "acc_norm": 0.4909090909090909, "acc_norm_stderr": 0.0390369864774844 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2741738066095471, "mc1_stderr": 0.015616518497219374, "mc2": 0.43479566764760613, "mc2_stderr": 0.014958184938646393 }, "harness|ko_commongen_v2|2": { "acc": 0.4604486422668241, "acc_stderr": 0.017136487626049846, "acc_norm": 0.5678866587957497, "acc_norm_stderr": 0.017031170198851742 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "F24/F23-llama2-13B-x1", "model_sha": "90b8a06c768a8981c6368bcbd0294a9e0f92aa79", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }