{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3651877133105802, "acc_stderr": 0.014070265519268802, "acc_norm": 0.4104095563139932, "acc_norm_stderr": 0.014374922192642662 }, "harness|ko_hellaswag|10": { "acc": 0.38309101772555265, "acc_stderr": 0.004851466623601446, "acc_norm": 0.4949213304122685, "acc_norm_stderr": 0.0049895240030924425 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.5029239766081871, "acc_stderr": 0.03834759370936839, "acc_norm": 0.5029239766081871, "acc_norm_stderr": 0.03834759370936839 }, "harness|ko_mmlu_management|5": { "acc": 0.5728155339805825, "acc_stderr": 0.04897957737781168, "acc_norm": 0.5728155339805825, "acc_norm_stderr": 0.04897957737781168 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.47381864623243936, "acc_stderr": 0.017855434554041982, "acc_norm": 0.47381864623243936, "acc_norm_stderr": 0.017855434554041982 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.32592592592592595, "acc_stderr": 0.04049122041702506, "acc_norm": 0.32592592592592595, "acc_norm_stderr": 0.04049122041702506 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.4765957446808511, "acc_stderr": 0.03265019475033582, "acc_norm": 0.4765957446808511, "acc_norm_stderr": 0.03265019475033582 }, "harness|ko_mmlu_virology|5": { "acc": 0.4457831325301205, "acc_stderr": 0.03869543323472101, "acc_norm": 0.4457831325301205, "acc_norm_stderr": 0.03869543323472101 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.4790996784565916, "acc_stderr": 0.028373270961069414, "acc_norm": 0.4790996784565916, "acc_norm_stderr": 0.028373270961069414 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.484304932735426, "acc_stderr": 0.0335412657542081, "acc_norm": 0.484304932735426, "acc_norm_stderr": 0.0335412657542081 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.4351145038167939, "acc_stderr": 0.04348208051644858, "acc_norm": 0.4351145038167939, "acc_norm_stderr": 0.04348208051644858 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5404040404040404, "acc_stderr": 0.035507024651313425, "acc_norm": 0.5404040404040404, "acc_norm_stderr": 0.035507024651313425 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.45517241379310347, "acc_stderr": 0.04149886942192117, "acc_norm": 0.45517241379310347, "acc_norm_stderr": 0.04149886942192117 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.22549019607843138, "acc_stderr": 0.041583075330832865, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.041583075330832865 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.47478991596638653, "acc_stderr": 0.0324371805513741, "acc_norm": 0.47478991596638653, "acc_norm_stderr": 0.0324371805513741 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.4794871794871795, "acc_stderr": 0.025329663163489943, "acc_norm": 0.4794871794871795, "acc_norm_stderr": 0.025329663163489943 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.63, "acc_stderr": 0.048523658709390974, "acc_norm": 0.63, "acc_norm_stderr": 0.048523658709390974 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5092592592592593, "acc_stderr": 0.04832853553437055, "acc_norm": 0.5092592592592593, "acc_norm_stderr": 0.04832853553437055 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.41379310344827586, "acc_stderr": 0.03465304488406796, "acc_norm": 0.41379310344827586, "acc_norm_stderr": 0.03465304488406796 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.4645161290322581, "acc_stderr": 0.028372287797962952, "acc_norm": 0.4645161290322581, "acc_norm_stderr": 0.028372287797962952 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6837606837606838, "acc_stderr": 0.03046365674734026, "acc_norm": 0.6837606837606838, "acc_norm_stderr": 0.03046365674734026 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.45660377358490567, "acc_stderr": 0.030656748696739428, "acc_norm": 0.45660377358490567, "acc_norm_stderr": 0.030656748696739428 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.5272727272727272, "acc_stderr": 0.04782001791380061, "acc_norm": 0.5272727272727272, "acc_norm_stderr": 0.04782001791380061 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.3148148148148148, "acc_stderr": 0.028317533496066475, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.028317533496066475 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.26490066225165565, "acc_stderr": 0.03603038545360384, "acc_norm": 0.26490066225165565, "acc_norm_stderr": 0.03603038545360384 }, "harness|ko_mmlu_sociology|5": { "acc": 0.6318407960199005, "acc_stderr": 0.03410410565495302, "acc_norm": 0.6318407960199005, "acc_norm_stderr": 0.03410410565495302 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.3583815028901734, "acc_stderr": 0.036563436533531585, "acc_norm": 0.3583815028901734, "acc_norm_stderr": 0.036563436533531585 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.35978835978835977, "acc_stderr": 0.024718075944129277, "acc_norm": 0.35978835978835977, "acc_norm_stderr": 0.024718075944129277 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3472222222222222, "acc_stderr": 0.039812405437178615, "acc_norm": 0.3472222222222222, "acc_norm_stderr": 0.039812405437178615 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.62, "acc_stderr": 0.048783173121456344, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456344 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.5317919075144508, "acc_stderr": 0.026864624366756653, "acc_norm": 0.5317919075144508, "acc_norm_stderr": 0.026864624366756653 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.44171779141104295, "acc_stderr": 0.039015918258361836, "acc_norm": 0.44171779141104295, "acc_norm_stderr": 0.039015918258361836 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.4351851851851852, "acc_stderr": 0.027586006221607718, "acc_norm": 0.4351851851851852, "acc_norm_stderr": 0.027586006221607718 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.48704663212435234, "acc_stderr": 0.03607228061047749, "acc_norm": 0.48704663212435234, "acc_norm_stderr": 0.03607228061047749 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2719298245614035, "acc_stderr": 0.04185774424022056, "acc_norm": 0.2719298245614035, "acc_norm_stderr": 0.04185774424022056 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.46788990825688076, "acc_stderr": 0.021393071222680814, "acc_norm": 0.46788990825688076, "acc_norm_stderr": 0.021393071222680814 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.373015873015873, "acc_stderr": 0.04325506042017086, "acc_norm": 0.373015873015873, "acc_norm_stderr": 0.04325506042017086 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.4477124183006536, "acc_stderr": 0.028472938478033526, "acc_norm": 0.4477124183006536, "acc_norm_stderr": 0.028472938478033526 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6776859504132231, "acc_stderr": 0.04266416363352168, "acc_norm": 0.6776859504132231, "acc_norm_stderr": 0.04266416363352168 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.40789473684210525, "acc_stderr": 0.03999309712777472, "acc_norm": 0.40789473684210525, "acc_norm_stderr": 0.03999309712777472 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.4019607843137255, "acc_stderr": 0.01983517648437538, "acc_norm": 0.4019607843137255, "acc_norm_stderr": 0.01983517648437538 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.37943262411347517, "acc_stderr": 0.028947338851614105, "acc_norm": 0.37943262411347517, "acc_norm_stderr": 0.028947338851614105 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.42857142857142855, "acc_stderr": 0.04697113923010213, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.04697113923010213 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.37037037037037035, "acc_stderr": 0.03293377139415191, "acc_norm": 0.37037037037037035, "acc_norm_stderr": 0.03293377139415191 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2100558659217877, "acc_stderr": 0.013623755371333519, "acc_norm": 0.2100558659217877, "acc_norm_stderr": 0.013623755371333519 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.35294117647058826, "acc_stderr": 0.0290294228156814, "acc_norm": 0.35294117647058826, "acc_norm_stderr": 0.0290294228156814 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.4816326530612245, "acc_stderr": 0.03198761546763126, "acc_norm": 0.4816326530612245, "acc_norm_stderr": 0.03198761546763126 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.6118143459915611, "acc_stderr": 0.03172295004332331, "acc_norm": 0.6118143459915611, "acc_norm_stderr": 0.03172295004332331 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.31877444589308995, "acc_stderr": 0.011901895635786088, "acc_norm": 0.31877444589308995, "acc_norm_stderr": 0.011901895635786088 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.45098039215686275, "acc_stderr": 0.03492406104163613, "acc_norm": 0.45098039215686275, "acc_norm_stderr": 0.03492406104163613 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.42424242424242425, "acc_stderr": 0.038592681420702615, "acc_norm": 0.42424242424242425, "acc_norm_stderr": 0.038592681420702615 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2827417380660955, "mc1_stderr": 0.015764770836777305, "mc2": 0.4637619506541597, "mc2_stderr": 0.015446438806039912 }, "harness|ko_commongen_v2|2": { "acc": 0.45808736717827625, "acc_stderr": 0.01712985211791114, "acc_norm": 0.512396694214876, "acc_norm_stderr": 0.017185069732676528 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "MNCLLM/Mistral-7B-OP-over1k-grad0.3", "model_sha": "4053a441cc7724e204d047f88c2b1646a1d6aad2", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }