{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3412969283276451, "acc_stderr": 0.013855831287497724, "acc_norm": 0.38993174061433444, "acc_norm_stderr": 0.014252959848892893 }, "harness|ko_hellaswag|10": { "acc": 0.37801234813782114, "acc_stderr": 0.004838997427699758, "acc_norm": 0.4923322047400916, "acc_norm_stderr": 0.004989194627707854 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.47953216374269003, "acc_stderr": 0.0383161053282193, "acc_norm": 0.47953216374269003, "acc_norm_stderr": 0.0383161053282193 }, "harness|ko_mmlu_management|5": { "acc": 0.6019417475728155, "acc_stderr": 0.04846748253977238, "acc_norm": 0.6019417475728155, "acc_norm_stderr": 0.04846748253977238 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.4674329501915709, "acc_stderr": 0.017841995750520857, "acc_norm": 0.4674329501915709, "acc_norm_stderr": 0.017841995750520857 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.4444444444444444, "acc_stderr": 0.04292596718256981, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.04292596718256981 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.3659574468085106, "acc_stderr": 0.0314895582974553, "acc_norm": 0.3659574468085106, "acc_norm_stderr": 0.0314895582974553 }, "harness|ko_mmlu_virology|5": { "acc": 0.39156626506024095, "acc_stderr": 0.037998574544796354, "acc_norm": 0.39156626506024095, "acc_norm_stderr": 0.037998574544796354 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.4919614147909968, "acc_stderr": 0.028394421370984545, "acc_norm": 0.4919614147909968, "acc_norm_stderr": 0.028394421370984545 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.4080717488789238, "acc_stderr": 0.03298574607842821, "acc_norm": 0.4080717488789238, "acc_norm_stderr": 0.03298574607842821 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.44274809160305345, "acc_stderr": 0.04356447202665069, "acc_norm": 0.44274809160305345, "acc_norm_stderr": 0.04356447202665069 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5555555555555556, "acc_stderr": 0.035402943770953675, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.035402943770953675 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.46206896551724136, "acc_stderr": 0.04154659671707546, "acc_norm": 0.46206896551724136, "acc_norm_stderr": 0.04154659671707546 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.37254901960784315, "acc_stderr": 0.04810840148082635, "acc_norm": 0.37254901960784315, "acc_norm_stderr": 0.04810840148082635 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.5672268907563025, "acc_stderr": 0.032183581077426124, "acc_norm": 0.5672268907563025, "acc_norm_stderr": 0.032183581077426124 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.4564102564102564, "acc_stderr": 0.025254485424799605, "acc_norm": 0.4564102564102564, "acc_norm_stderr": 0.025254485424799605 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.55, "acc_stderr": 0.04999999999999999, "acc_norm": 0.55, "acc_norm_stderr": 0.04999999999999999 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.24, "acc_stderr": 0.04292346959909283, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909283 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5555555555555556, "acc_stderr": 0.04803752235190192, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.04803752235190192 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.3694581280788177, "acc_stderr": 0.033959703819985754, "acc_norm": 0.3694581280788177, "acc_norm_stderr": 0.033959703819985754 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.4612903225806452, "acc_stderr": 0.02835863485983692, "acc_norm": 0.4612903225806452, "acc_norm_stderr": 0.02835863485983692 }, "harness|ko_mmlu_marketing|5": { "acc": 0.7435897435897436, "acc_stderr": 0.028605953702004253, "acc_norm": 0.7435897435897436, "acc_norm_stderr": 0.028605953702004253 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4528301886792453, "acc_stderr": 0.030635627957961827, "acc_norm": 0.4528301886792453, "acc_norm_stderr": 0.030635627957961827 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.5454545454545454, "acc_stderr": 0.04769300568972745, "acc_norm": 0.5454545454545454, "acc_norm_stderr": 0.04769300568972745 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.2777777777777778, "acc_stderr": 0.02730914058823018, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.02730914058823018 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.25165562913907286, "acc_stderr": 0.03543304234389985, "acc_norm": 0.25165562913907286, "acc_norm_stderr": 0.03543304234389985 }, "harness|ko_mmlu_sociology|5": { "acc": 0.5920398009950248, "acc_stderr": 0.03475116365194092, "acc_norm": 0.5920398009950248, "acc_norm_stderr": 0.03475116365194092 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.43352601156069365, "acc_stderr": 0.03778621079092056, "acc_norm": 0.43352601156069365, "acc_norm_stderr": 0.03778621079092056 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.3783068783068783, "acc_stderr": 0.02497695405315525, "acc_norm": 0.3783068783068783, "acc_norm_stderr": 0.02497695405315525 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3263888888888889, "acc_stderr": 0.03921067198982266, "acc_norm": 0.3263888888888889, "acc_norm_stderr": 0.03921067198982266 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.59, "acc_stderr": 0.04943110704237101, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237101 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.49421965317919075, "acc_stderr": 0.026917296179149116, "acc_norm": 0.49421965317919075, "acc_norm_stderr": 0.026917296179149116 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.5030674846625767, "acc_stderr": 0.03928297078179663, "acc_norm": 0.5030674846625767, "acc_norm_stderr": 0.03928297078179663 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.4537037037037037, "acc_stderr": 0.0277012284685426, "acc_norm": 0.4537037037037037, "acc_norm_stderr": 0.0277012284685426 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.04725815626252606, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252606 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.5233160621761658, "acc_stderr": 0.03604513672442203, "acc_norm": 0.5233160621761658, "acc_norm_stderr": 0.03604513672442203 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2982456140350877, "acc_stderr": 0.04303684033537318, "acc_norm": 0.2982456140350877, "acc_norm_stderr": 0.04303684033537318 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.4990825688073395, "acc_stderr": 0.021437287056051215, "acc_norm": 0.4990825688073395, "acc_norm_stderr": 0.021437287056051215 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.4126984126984127, "acc_stderr": 0.04403438954768177, "acc_norm": 0.4126984126984127, "acc_norm_stderr": 0.04403438954768177 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.5032679738562091, "acc_stderr": 0.02862930519400355, "acc_norm": 0.5032679738562091, "acc_norm_stderr": 0.02862930519400355 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6363636363636364, "acc_stderr": 0.043913262867240704, "acc_norm": 0.6363636363636364, "acc_norm_stderr": 0.043913262867240704 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.4144736842105263, "acc_stderr": 0.04008973785779206, "acc_norm": 0.4144736842105263, "acc_norm_stderr": 0.04008973785779206 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.4084967320261438, "acc_stderr": 0.01988622103750187, "acc_norm": 0.4084967320261438, "acc_norm_stderr": 0.01988622103750187 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.31560283687943264, "acc_stderr": 0.027724989449509314, "acc_norm": 0.31560283687943264, "acc_norm_stderr": 0.027724989449509314 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.35714285714285715, "acc_stderr": 0.04547960999764376, "acc_norm": 0.35714285714285715, "acc_norm_stderr": 0.04547960999764376 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.4583333333333333, "acc_stderr": 0.033981108902946366, "acc_norm": 0.4583333333333333, "acc_norm_stderr": 0.033981108902946366 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2569832402234637, "acc_stderr": 0.01461446582196634, "acc_norm": 0.2569832402234637, "acc_norm_stderr": 0.01461446582196634 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.61, "acc_stderr": 0.04902071300001974, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001974 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.41911764705882354, "acc_stderr": 0.02997280717046463, "acc_norm": 0.41911764705882354, "acc_norm_stderr": 0.02997280717046463 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.5510204081632653, "acc_stderr": 0.03184213866687578, "acc_norm": 0.5510204081632653, "acc_norm_stderr": 0.03184213866687578 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.5443037974683544, "acc_stderr": 0.032419206846933335, "acc_norm": 0.5443037974683544, "acc_norm_stderr": 0.032419206846933335 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.33116036505867014, "acc_stderr": 0.01202012819598576, "acc_norm": 0.33116036505867014, "acc_norm_stderr": 0.01202012819598576 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.47058823529411764, "acc_stderr": 0.03503235296367992, "acc_norm": 0.47058823529411764, "acc_norm_stderr": 0.03503235296367992 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.48484848484848486, "acc_stderr": 0.03902551007374449, "acc_norm": 0.48484848484848486, "acc_norm_stderr": 0.03902551007374449 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.3084455324357405, "mc1_stderr": 0.01616803938315687, "mc2": 0.4911572350172599, "mc2_stderr": 0.015610028118935604 }, "harness|ko_commongen_v2|2": { "acc": 0.5159386068476978, "acc_stderr": 0.017181617837190192, "acc_norm": 0.5430932703659976, "acc_norm_stderr": 0.01712638909308678 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "MNC-LLM/Mistral-11B-Omni-OPA-u1k-ver0.7", "model_sha": "b191a814d7f0ab540eaa36f8f6ca4c189e4d3a5f", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }