{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.44368600682593856, "acc_stderr": 0.014518421825670447, "acc_norm": 0.5034129692832765, "acc_norm_stderr": 0.014611050403244084 }, "harness|ko_hellaswag|10": { "acc": 0.454690300736905, "acc_stderr": 0.004969251445596328, "acc_norm": 0.6224855606452898, "acc_norm_stderr": 0.004837744647345717 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.5146198830409356, "acc_stderr": 0.03833185275213026, "acc_norm": 0.5146198830409356, "acc_norm_stderr": 0.03833185275213026 }, "harness|ko_mmlu_management|5": { "acc": 0.6116504854368932, "acc_stderr": 0.048257293373563895, "acc_norm": 0.6116504854368932, "acc_norm_stderr": 0.048257293373563895 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.5478927203065134, "acc_stderr": 0.017797751493865633, "acc_norm": 0.5478927203065134, "acc_norm_stderr": 0.017797751493865633 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.3925925925925926, "acc_stderr": 0.04218506215368878, "acc_norm": 0.3925925925925926, "acc_norm_stderr": 0.04218506215368878 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.26, "acc_stderr": 0.04408440022768078, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768078 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.451063829787234, "acc_stderr": 0.032529096196131965, "acc_norm": 0.451063829787234, "acc_norm_stderr": 0.032529096196131965 }, "harness|ko_mmlu_virology|5": { "acc": 0.39156626506024095, "acc_stderr": 0.03799857454479636, "acc_norm": 0.39156626506024095, "acc_norm_stderr": 0.03799857454479636 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.4887459807073955, "acc_stderr": 0.028390897396863526, "acc_norm": 0.4887459807073955, "acc_norm_stderr": 0.028390897396863526 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.515695067264574, "acc_stderr": 0.0335412657542081, "acc_norm": 0.515695067264574, "acc_norm_stderr": 0.0335412657542081 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.42748091603053434, "acc_stderr": 0.04338920305792401, "acc_norm": 0.42748091603053434, "acc_norm_stderr": 0.04338920305792401 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.45, "acc_stderr": 0.04999999999999999, "acc_norm": 0.45, "acc_norm_stderr": 0.04999999999999999 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.601010101010101, "acc_stderr": 0.0348890161685273, "acc_norm": 0.601010101010101, "acc_norm_stderr": 0.0348890161685273 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.41379310344827586, "acc_stderr": 0.04104269211806232, "acc_norm": 0.41379310344827586, "acc_norm_stderr": 0.04104269211806232 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.35294117647058826, "acc_stderr": 0.04755129616062948, "acc_norm": 0.35294117647058826, "acc_norm_stderr": 0.04755129616062948 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.47478991596638653, "acc_stderr": 0.03243718055137411, "acc_norm": 0.47478991596638653, "acc_norm_stderr": 0.03243718055137411 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.5025641025641026, "acc_stderr": 0.025350672979412184, "acc_norm": 0.5025641025641026, "acc_norm_stderr": 0.025350672979412184 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5740740740740741, "acc_stderr": 0.0478034362693679, "acc_norm": 0.5740740740740741, "acc_norm_stderr": 0.0478034362693679 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.3399014778325123, "acc_stderr": 0.033327690684107895, "acc_norm": 0.3399014778325123, "acc_norm_stderr": 0.033327690684107895 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.4935483870967742, "acc_stderr": 0.02844163823354051, "acc_norm": 0.4935483870967742, "acc_norm_stderr": 0.02844163823354051 }, "harness|ko_mmlu_marketing|5": { "acc": 0.7136752136752137, "acc_stderr": 0.029614323690456645, "acc_norm": 0.7136752136752137, "acc_norm_stderr": 0.029614323690456645 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4528301886792453, "acc_stderr": 0.03063562795796182, "acc_norm": 0.4528301886792453, "acc_norm_stderr": 0.03063562795796182 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.5181818181818182, "acc_stderr": 0.04785964010794915, "acc_norm": 0.5181818181818182, "acc_norm_stderr": 0.04785964010794915 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.337037037037037, "acc_stderr": 0.028820884666253252, "acc_norm": 0.337037037037037, "acc_norm_stderr": 0.028820884666253252 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.32450331125827814, "acc_stderr": 0.038227469376587525, "acc_norm": 0.32450331125827814, "acc_norm_stderr": 0.038227469376587525 }, "harness|ko_mmlu_sociology|5": { "acc": 0.5920398009950248, "acc_stderr": 0.03475116365194092, "acc_norm": 0.5920398009950248, "acc_norm_stderr": 0.03475116365194092 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.4393063583815029, "acc_stderr": 0.03784271932887467, "acc_norm": 0.4393063583815029, "acc_norm_stderr": 0.03784271932887467 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.34656084656084657, "acc_stderr": 0.024508777521028417, "acc_norm": 0.34656084656084657, "acc_norm_stderr": 0.024508777521028417 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.4236111111111111, "acc_stderr": 0.041321250197233685, "acc_norm": 0.4236111111111111, "acc_norm_stderr": 0.041321250197233685 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.4421965317919075, "acc_stderr": 0.0267386036438074, "acc_norm": 0.4421965317919075, "acc_norm_stderr": 0.0267386036438074 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.4723926380368098, "acc_stderr": 0.0392237829061099, "acc_norm": 0.4723926380368098, "acc_norm_stderr": 0.0392237829061099 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.4228395061728395, "acc_stderr": 0.027487472980871598, "acc_norm": 0.4228395061728395, "acc_norm_stderr": 0.027487472980871598 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.6269430051813472, "acc_stderr": 0.03490205592048574, "acc_norm": 0.6269430051813472, "acc_norm_stderr": 0.03490205592048574 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.3684210526315789, "acc_stderr": 0.04537815354939391, "acc_norm": 0.3684210526315789, "acc_norm_stderr": 0.04537815354939391 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.5669724770642202, "acc_stderr": 0.021244146569074345, "acc_norm": 0.5669724770642202, "acc_norm_stderr": 0.021244146569074345 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04216370213557835, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04216370213557835 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.45751633986928103, "acc_stderr": 0.02852638345214264, "acc_norm": 0.45751633986928103, "acc_norm_stderr": 0.02852638345214264 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.59, "acc_stderr": 0.04943110704237102, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237102 }, "harness|ko_mmlu_international_law|5": { "acc": 0.5950413223140496, "acc_stderr": 0.04481137755942469, "acc_norm": 0.5950413223140496, "acc_norm_stderr": 0.04481137755942469 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.5263157894736842, "acc_stderr": 0.04063302731486671, "acc_norm": 0.5263157894736842, "acc_norm_stderr": 0.04063302731486671 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.39215686274509803, "acc_stderr": 0.019751726508762626, "acc_norm": 0.39215686274509803, "acc_norm_stderr": 0.019751726508762626 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2978723404255319, "acc_stderr": 0.027281608344469414, "acc_norm": 0.2978723404255319, "acc_norm_stderr": 0.027281608344469414 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.4107142857142857, "acc_stderr": 0.046695106638751926, "acc_norm": 0.4107142857142857, "acc_norm_stderr": 0.046695106638751926 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.35648148148148145, "acc_stderr": 0.032664783315272714, "acc_norm": 0.35648148148148145, "acc_norm_stderr": 0.032664783315272714 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.28268156424581004, "acc_stderr": 0.015060381730018089, "acc_norm": 0.28268156424581004, "acc_norm_stderr": 0.015060381730018089 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.3897058823529412, "acc_stderr": 0.029624663581159685, "acc_norm": 0.3897058823529412, "acc_norm_stderr": 0.029624663581159685 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.45714285714285713, "acc_stderr": 0.03189141832421397, "acc_norm": 0.45714285714285713, "acc_norm_stderr": 0.03189141832421397 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.6075949367088608, "acc_stderr": 0.03178471874564729, "acc_norm": 0.6075949367088608, "acc_norm_stderr": 0.03178471874564729 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.3305084745762712, "acc_stderr": 0.01201414210184297, "acc_norm": 0.3305084745762712, "acc_norm_stderr": 0.01201414210184297 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.5637254901960784, "acc_stderr": 0.03480693138457039, "acc_norm": 0.5637254901960784, "acc_norm_stderr": 0.03480693138457039 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.5515151515151515, "acc_stderr": 0.038835659779569286, "acc_norm": 0.5515151515151515, "acc_norm_stderr": 0.038835659779569286 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.3684210526315789, "mc1_stderr": 0.016886551261046042, "mc2": 0.5450380626898707, "mc2_stderr": 0.016088328593236022 }, "harness|ko_commongen_v2|2": { "acc": 0.48406139315230223, "acc_stderr": 0.017181617837190195, "acc_norm": 0.4852420306965762, "acc_norm_stderr": 0.017182864434998564 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "mssma/ko-solar-10.7b-v0.2b", "model_sha": "840cdeb76f96ef4bdebf585653b6d17061432169", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }