{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3310580204778157, "acc_stderr": 0.013752062419817832, "acc_norm": 0.3916382252559727, "acc_norm_stderr": 0.014264122124938215 }, "harness|ko_hellaswag|10": { "acc": 0.3873730332603067, "acc_stderr": 0.004861544478451855, "acc_norm": 0.5097590121489743, "acc_norm_stderr": 0.004988830884131634 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.4444444444444444, "acc_stderr": 0.03811079669833531, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.03811079669833531 }, "harness|ko_mmlu_management|5": { "acc": 0.3300970873786408, "acc_stderr": 0.0465614711001235, "acc_norm": 0.3300970873786408, "acc_norm_stderr": 0.0465614711001235 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.3895274584929757, "acc_stderr": 0.017438082556264594, "acc_norm": 0.3895274584929757, "acc_norm_stderr": 0.017438082556264594 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.4, "acc_stderr": 0.04232073695151589, "acc_norm": 0.4, "acc_norm_stderr": 0.04232073695151589 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.28936170212765955, "acc_stderr": 0.029644006577009618, "acc_norm": 0.28936170212765955, "acc_norm_stderr": 0.029644006577009618 }, "harness|ko_mmlu_virology|5": { "acc": 0.3433734939759036, "acc_stderr": 0.03696584317010601, "acc_norm": 0.3433734939759036, "acc_norm_stderr": 0.03696584317010601 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.40192926045016075, "acc_stderr": 0.027846476005930477, "acc_norm": 0.40192926045016075, "acc_norm_stderr": 0.027846476005930477 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.39461883408071746, "acc_stderr": 0.03280400504755291, "acc_norm": 0.39461883408071746, "acc_norm_stderr": 0.03280400504755291 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.48091603053435117, "acc_stderr": 0.04382094705550988, "acc_norm": 0.48091603053435117, "acc_norm_stderr": 0.04382094705550988 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.04605661864718381, "acc_norm": 0.3, "acc_norm_stderr": 0.04605661864718381 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.43434343434343436, "acc_stderr": 0.035315058793591834, "acc_norm": 0.43434343434343436, "acc_norm_stderr": 0.035315058793591834 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.3931034482758621, "acc_stderr": 0.040703290137070705, "acc_norm": 0.3931034482758621, "acc_norm_stderr": 0.040703290137070705 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.19607843137254902, "acc_stderr": 0.03950581861179962, "acc_norm": 0.19607843137254902, "acc_norm_stderr": 0.03950581861179962 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.3739495798319328, "acc_stderr": 0.031429466378837076, "acc_norm": 0.3739495798319328, "acc_norm_stderr": 0.031429466378837076 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.2948717948717949, "acc_stderr": 0.02311936275823229, "acc_norm": 0.2948717948717949, "acc_norm_stderr": 0.02311936275823229 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.43, "acc_stderr": 0.04975698519562428, "acc_norm": 0.43, "acc_norm_stderr": 0.04975698519562428 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.26, "acc_stderr": 0.04408440022768079, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768079 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.39814814814814814, "acc_stderr": 0.047323326159788126, "acc_norm": 0.39814814814814814, "acc_norm_stderr": 0.047323326159788126 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.2561576354679803, "acc_stderr": 0.0307127300709826, "acc_norm": 0.2561576354679803, "acc_norm_stderr": 0.0307127300709826 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.34516129032258064, "acc_stderr": 0.027045746573534327, "acc_norm": 0.34516129032258064, "acc_norm_stderr": 0.027045746573534327 }, "harness|ko_mmlu_marketing|5": { "acc": 0.48717948717948717, "acc_stderr": 0.032745319388423504, "acc_norm": 0.48717948717948717, "acc_norm_stderr": 0.032745319388423504 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.33584905660377357, "acc_stderr": 0.029067220146644823, "acc_norm": 0.33584905660377357, "acc_norm_stderr": 0.029067220146644823 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.41818181818181815, "acc_stderr": 0.0472457740573157, "acc_norm": 0.41818181818181815, "acc_norm_stderr": 0.0472457740573157 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.25555555555555554, "acc_stderr": 0.02659393910184408, "acc_norm": 0.25555555555555554, "acc_norm_stderr": 0.02659393910184408 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2781456953642384, "acc_stderr": 0.03658603262763743, "acc_norm": 0.2781456953642384, "acc_norm_stderr": 0.03658603262763743 }, "harness|ko_mmlu_sociology|5": { "acc": 0.46766169154228854, "acc_stderr": 0.035281314729336065, "acc_norm": 0.46766169154228854, "acc_norm_stderr": 0.035281314729336065 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.32947976878612717, "acc_stderr": 0.03583901754736411, "acc_norm": 0.32947976878612717, "acc_norm_stderr": 0.03583901754736411 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.25396825396825395, "acc_stderr": 0.022418042891113942, "acc_norm": 0.25396825396825395, "acc_norm_stderr": 0.022418042891113942 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3333333333333333, "acc_stderr": 0.039420826399272135, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.039420826399272135 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.24, "acc_stderr": 0.042923469599092816, "acc_norm": 0.24, "acc_norm_stderr": 0.042923469599092816 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.407514450867052, "acc_stderr": 0.026454578146931505, "acc_norm": 0.407514450867052, "acc_norm_stderr": 0.026454578146931505 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.32515337423312884, "acc_stderr": 0.03680350371286461, "acc_norm": 0.32515337423312884, "acc_norm_stderr": 0.03680350371286461 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.39197530864197533, "acc_stderr": 0.02716368603827123, "acc_norm": 0.39197530864197533, "acc_norm_stderr": 0.02716368603827123 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.41450777202072536, "acc_stderr": 0.03555300319557672, "acc_norm": 0.41450777202072536, "acc_norm_stderr": 0.03555300319557672 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2631578947368421, "acc_stderr": 0.04142439719489362, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.04142439719489362 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.42018348623853213, "acc_stderr": 0.021162420048273508, "acc_norm": 0.42018348623853213, "acc_norm_stderr": 0.021162420048273508 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.24603174603174602, "acc_stderr": 0.03852273364924315, "acc_norm": 0.24603174603174602, "acc_norm_stderr": 0.03852273364924315 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.41830065359477125, "acc_stderr": 0.02824513402438729, "acc_norm": 0.41830065359477125, "acc_norm_stderr": 0.02824513402438729 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_international_law|5": { "acc": 0.5289256198347108, "acc_stderr": 0.04556710331269498, "acc_norm": 0.5289256198347108, "acc_norm_stderr": 0.04556710331269498 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.3026315789473684, "acc_stderr": 0.03738520676119667, "acc_norm": 0.3026315789473684, "acc_norm_stderr": 0.03738520676119667 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.31699346405228757, "acc_stderr": 0.018824219512706214, "acc_norm": 0.31699346405228757, "acc_norm_stderr": 0.018824219512706214 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2872340425531915, "acc_stderr": 0.026992199173064356, "acc_norm": 0.2872340425531915, "acc_norm_stderr": 0.026992199173064356 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.25892857142857145, "acc_stderr": 0.04157751539865629, "acc_norm": 0.25892857142857145, "acc_norm_stderr": 0.04157751539865629 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.3287037037037037, "acc_stderr": 0.03203614084670058, "acc_norm": 0.3287037037037037, "acc_norm_stderr": 0.03203614084670058 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.25139664804469275, "acc_stderr": 0.014508979453553977, "acc_norm": 0.25139664804469275, "acc_norm_stderr": 0.014508979453553977 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.26, "acc_stderr": 0.044084400227680794, "acc_norm": 0.26, "acc_norm_stderr": 0.044084400227680794 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.4227941176470588, "acc_stderr": 0.030008562845003476, "acc_norm": 0.4227941176470588, "acc_norm_stderr": 0.030008562845003476 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.3877551020408163, "acc_stderr": 0.031192230726795656, "acc_norm": 0.3877551020408163, "acc_norm_stderr": 0.031192230726795656 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.5147679324894515, "acc_stderr": 0.032533028078777386, "acc_norm": 0.5147679324894515, "acc_norm_stderr": 0.032533028078777386 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.3305084745762712, "acc_stderr": 0.01201414210184297, "acc_norm": 0.3305084745762712, "acc_norm_stderr": 0.01201414210184297 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.3627450980392157, "acc_stderr": 0.033744993563193555, "acc_norm": 0.3627450980392157, "acc_norm_stderr": 0.033744993563193555 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.37575757575757573, "acc_stderr": 0.03781887353205982, "acc_norm": 0.37575757575757573, "acc_norm_stderr": 0.03781887353205982 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.23990208078335373, "mc1_stderr": 0.01494881267906214, "mc2": 0.3781293727977648, "mc2_stderr": 0.014917319628125631 }, "harness|ko_commongen_v2|2": { "acc": 0.21133412042502953, "acc_stderr": 0.01403609034293031, "acc_norm": 0.3022432113341204, "acc_norm_stderr": 0.01578865486302237 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "DopeorNope/COLA3-7B", "model_sha": "90a961edc95e63c6b777402191b76fbfa3ed3a8d", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }