{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.2593856655290102, "acc_stderr": 0.012808273573927094, "acc_norm": 0.3037542662116041, "acc_norm_stderr": 0.01343890918477876 }, "harness|ko_hellaswag|10": { "acc": 0.3390758812985461, "acc_stderr": 0.004724281487819373, "acc_norm": 0.4146584345747859, "acc_norm_stderr": 0.004916561213591286 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.26900584795321636, "acc_stderr": 0.03401052620104088, "acc_norm": 0.26900584795321636, "acc_norm_stderr": 0.03401052620104088 }, "harness|ko_mmlu_management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266196, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266196 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.23243933588761176, "acc_stderr": 0.015104550008905699, "acc_norm": 0.23243933588761176, "acc_norm_stderr": 0.015104550008905699 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.26666666666666666, "acc_stderr": 0.038201699145179055, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.038201699145179055 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.23829787234042554, "acc_stderr": 0.027851252973889788, "acc_norm": 0.23829787234042554, "acc_norm_stderr": 0.027851252973889788 }, "harness|ko_mmlu_virology|5": { "acc": 0.25903614457831325, "acc_stderr": 0.034106466140718564, "acc_norm": 0.25903614457831325, "acc_norm_stderr": 0.034106466140718564 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.27009646302250806, "acc_stderr": 0.025218040373410622, "acc_norm": 0.27009646302250806, "acc_norm_stderr": 0.025218040373410622 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.2556053811659193, "acc_stderr": 0.029275891003969927, "acc_norm": 0.2556053811659193, "acc_norm_stderr": 0.029275891003969927 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.22137404580152673, "acc_stderr": 0.0364129708131373, "acc_norm": 0.22137404580152673, "acc_norm_stderr": 0.0364129708131373 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.24, "acc_stderr": 0.04292346959909281, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909281 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.2474747474747475, "acc_stderr": 0.03074630074212451, "acc_norm": 0.2474747474747475, "acc_norm_stderr": 0.03074630074212451 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03565998174135302, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03565998174135302 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.13725490196078433, "acc_stderr": 0.034240846698915216, "acc_norm": 0.13725490196078433, "acc_norm_stderr": 0.034240846698915216 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.31092436974789917, "acc_stderr": 0.030066761582977934, "acc_norm": 0.31092436974789917, "acc_norm_stderr": 0.030066761582977934 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.28974358974358977, "acc_stderr": 0.02300062824368796, "acc_norm": 0.28974358974358977, "acc_norm_stderr": 0.02300062824368796 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.16, "acc_stderr": 0.03684529491774709, "acc_norm": 0.16, "acc_norm_stderr": 0.03684529491774709 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.2037037037037037, "acc_stderr": 0.038935425188248475, "acc_norm": 0.2037037037037037, "acc_norm_stderr": 0.038935425188248475 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.26108374384236455, "acc_stderr": 0.030903796952114475, "acc_norm": 0.26108374384236455, "acc_norm_stderr": 0.030903796952114475 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.31290322580645163, "acc_stderr": 0.02637756702864586, "acc_norm": 0.31290322580645163, "acc_norm_stderr": 0.02637756702864586 }, "harness|ko_mmlu_marketing|5": { "acc": 0.24358974358974358, "acc_stderr": 0.028120966503914418, "acc_norm": 0.24358974358974358, "acc_norm_stderr": 0.028120966503914418 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.21132075471698114, "acc_stderr": 0.025125766484827845, "acc_norm": 0.21132075471698114, "acc_norm_stderr": 0.025125766484827845 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.2818181818181818, "acc_stderr": 0.043091187099464585, "acc_norm": 0.2818181818181818, "acc_norm_stderr": 0.043091187099464585 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.24814814814814815, "acc_stderr": 0.0263357394040558, "acc_norm": 0.24814814814814815, "acc_norm_stderr": 0.0263357394040558 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.271523178807947, "acc_stderr": 0.03631329803969653, "acc_norm": 0.271523178807947, "acc_norm_stderr": 0.03631329803969653 }, "harness|ko_mmlu_sociology|5": { "acc": 0.23383084577114427, "acc_stderr": 0.029929415408348384, "acc_norm": 0.23383084577114427, "acc_norm_stderr": 0.029929415408348384 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.28901734104046245, "acc_stderr": 0.03456425745086998, "acc_norm": 0.28901734104046245, "acc_norm_stderr": 0.03456425745086998 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.23544973544973544, "acc_stderr": 0.021851509822031708, "acc_norm": 0.23544973544973544, "acc_norm_stderr": 0.021851509822031708 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.22916666666666666, "acc_stderr": 0.035146974678623884, "acc_norm": 0.22916666666666666, "acc_norm_stderr": 0.035146974678623884 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.21, "acc_stderr": 0.04093601807403326, "acc_norm": 0.21, "acc_norm_stderr": 0.04093601807403326 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.26, "acc_stderr": 0.04408440022768079, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768079 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.2543352601156069, "acc_stderr": 0.02344582627654555, "acc_norm": 0.2543352601156069, "acc_norm_stderr": 0.02344582627654555 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.24539877300613497, "acc_stderr": 0.03380939813943354, "acc_norm": 0.24539877300613497, "acc_norm_stderr": 0.03380939813943354 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.2777777777777778, "acc_stderr": 0.02492200116888632, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.02492200116888632 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.3316062176165803, "acc_stderr": 0.03397636541089116, "acc_norm": 0.3316062176165803, "acc_norm_stderr": 0.03397636541089116 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2894736842105263, "acc_stderr": 0.04266339443159394, "acc_norm": 0.2894736842105263, "acc_norm_stderr": 0.04266339443159394 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.21834862385321102, "acc_stderr": 0.01771260052872273, "acc_norm": 0.21834862385321102, "acc_norm_stderr": 0.01771260052872273 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.31746031746031744, "acc_stderr": 0.04163453031302859, "acc_norm": 0.31746031746031744, "acc_norm_stderr": 0.04163453031302859 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.2581699346405229, "acc_stderr": 0.025058503316958157, "acc_norm": 0.2581699346405229, "acc_norm_stderr": 0.025058503316958157 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|ko_mmlu_international_law|5": { "acc": 0.2231404958677686, "acc_stderr": 0.03800754475228733, "acc_norm": 0.2231404958677686, "acc_norm_stderr": 0.03800754475228733 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.27631578947368424, "acc_stderr": 0.03639057569952925, "acc_norm": 0.27631578947368424, "acc_norm_stderr": 0.03639057569952925 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.23039215686274508, "acc_stderr": 0.017035229258034038, "acc_norm": 0.23039215686274508, "acc_norm_stderr": 0.017035229258034038 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.24113475177304963, "acc_stderr": 0.025518731049537773, "acc_norm": 0.24113475177304963, "acc_norm_stderr": 0.025518731049537773 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.16071428571428573, "acc_stderr": 0.03485946096475741, "acc_norm": 0.16071428571428573, "acc_norm_stderr": 0.03485946096475741 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.27314814814814814, "acc_stderr": 0.03038805130167812, "acc_norm": 0.27314814814814814, "acc_norm_stderr": 0.03038805130167812 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.23128491620111732, "acc_stderr": 0.0141022236231526, "acc_norm": 0.23128491620111732, "acc_norm_stderr": 0.0141022236231526 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.2536764705882353, "acc_stderr": 0.02643132987078953, "acc_norm": 0.2536764705882353, "acc_norm_stderr": 0.02643132987078953 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.23673469387755103, "acc_stderr": 0.027212835884073132, "acc_norm": 0.23673469387755103, "acc_norm_stderr": 0.027212835884073132 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.26582278481012656, "acc_stderr": 0.02875679962965834, "acc_norm": 0.26582278481012656, "acc_norm_stderr": 0.02875679962965834 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.26140808344198174, "acc_stderr": 0.011222528169771316, "acc_norm": 0.26140808344198174, "acc_norm_stderr": 0.011222528169771316 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.2647058823529412, "acc_stderr": 0.030964517926923413, "acc_norm": 0.2647058823529412, "acc_norm_stderr": 0.030964517926923413 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03225078108306289, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2350061199510404, "mc1_stderr": 0.014843061507731613, "mc2": 0.40043350315231013, "mc2_stderr": 0.01604778937263507 }, "harness|ko_commongen_v2|2": { "acc": 0.2892561983471074, "acc_stderr": 0.015588800386053555, "acc_norm": 0.31759149940968123, "acc_norm_stderr": 0.016005581876229306 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "beomi/KoAlpaca-Polyglot-5.8B", "model_sha": "cb1597cbaf4a98e52e6b767381a80893e4818477", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }