{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.2960750853242321, "acc_stderr": 0.013340916085246263, "acc_norm": 0.3319112627986348, "acc_norm_stderr": 0.01376098820088054 }, "harness|ko_hellaswag|10": { "acc": 0.36367257518422624, "acc_stderr": 0.004800728138792386, "acc_norm": 0.4591714797849034, "acc_norm_stderr": 0.004973117975062484 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.4152046783625731, "acc_stderr": 0.03779275945503201, "acc_norm": 0.4152046783625731, "acc_norm_stderr": 0.03779275945503201 }, "harness|ko_mmlu_management|5": { "acc": 0.42718446601941745, "acc_stderr": 0.04897957737781169, "acc_norm": 0.42718446601941745, "acc_norm_stderr": 0.04897957737781169 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.45849297573435505, "acc_stderr": 0.017818248603465568, "acc_norm": 0.45849297573435505, "acc_norm_stderr": 0.017818248603465568 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.37777777777777777, "acc_stderr": 0.04188307537595853, "acc_norm": 0.37777777777777777, "acc_norm_stderr": 0.04188307537595853 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.32340425531914896, "acc_stderr": 0.030579442773610337, "acc_norm": 0.32340425531914896, "acc_norm_stderr": 0.030579442773610337 }, "harness|ko_mmlu_virology|5": { "acc": 0.3373493975903614, "acc_stderr": 0.036807836907275814, "acc_norm": 0.3373493975903614, "acc_norm_stderr": 0.036807836907275814 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.3890675241157556, "acc_stderr": 0.027690337536485372, "acc_norm": 0.3890675241157556, "acc_norm_stderr": 0.027690337536485372 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.39461883408071746, "acc_stderr": 0.03280400504755291, "acc_norm": 0.39461883408071746, "acc_norm_stderr": 0.03280400504755291 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.3435114503816794, "acc_stderr": 0.041649760719448786, "acc_norm": 0.3435114503816794, "acc_norm_stderr": 0.041649760719448786 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.3939393939393939, "acc_stderr": 0.03481285338232963, "acc_norm": 0.3939393939393939, "acc_norm_stderr": 0.03481285338232963 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.36551724137931035, "acc_stderr": 0.04013124195424386, "acc_norm": 0.36551724137931035, "acc_norm_stderr": 0.04013124195424386 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.22549019607843138, "acc_stderr": 0.041583075330832865, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.041583075330832865 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.3445378151260504, "acc_stderr": 0.030868682604121633, "acc_norm": 0.3445378151260504, "acc_norm_stderr": 0.030868682604121633 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.3564102564102564, "acc_stderr": 0.0242831405294673, "acc_norm": 0.3564102564102564, "acc_norm_stderr": 0.0242831405294673 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.4166666666666667, "acc_stderr": 0.04766075165356461, "acc_norm": 0.4166666666666667, "acc_norm_stderr": 0.04766075165356461 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.35960591133004927, "acc_stderr": 0.03376458246509567, "acc_norm": 0.35960591133004927, "acc_norm_stderr": 0.03376458246509567 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.3935483870967742, "acc_stderr": 0.027791878753132274, "acc_norm": 0.3935483870967742, "acc_norm_stderr": 0.027791878753132274 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6581196581196581, "acc_stderr": 0.03107502852650775, "acc_norm": 0.6581196581196581, "acc_norm_stderr": 0.03107502852650775 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.3622641509433962, "acc_stderr": 0.029582245128384303, "acc_norm": 0.3622641509433962, "acc_norm_stderr": 0.029582245128384303 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.509090909090909, "acc_stderr": 0.04788339768702861, "acc_norm": 0.509090909090909, "acc_norm_stderr": 0.04788339768702861 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.3037037037037037, "acc_stderr": 0.028037929969114986, "acc_norm": 0.3037037037037037, "acc_norm_stderr": 0.028037929969114986 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.304635761589404, "acc_stderr": 0.03757949922943343, "acc_norm": 0.304635761589404, "acc_norm_stderr": 0.03757949922943343 }, "harness|ko_mmlu_sociology|5": { "acc": 0.4129353233830846, "acc_stderr": 0.03481520803367348, "acc_norm": 0.4129353233830846, "acc_norm_stderr": 0.03481520803367348 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.2774566473988439, "acc_stderr": 0.034140140070440354, "acc_norm": 0.2774566473988439, "acc_norm_stderr": 0.034140140070440354 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.328042328042328, "acc_stderr": 0.02418049716437691, "acc_norm": 0.328042328042328, "acc_norm_stderr": 0.02418049716437691 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2708333333333333, "acc_stderr": 0.03716177437566017, "acc_norm": 0.2708333333333333, "acc_norm_stderr": 0.03716177437566017 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.3670520231213873, "acc_stderr": 0.02595005433765408, "acc_norm": 0.3670520231213873, "acc_norm_stderr": 0.02595005433765408 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.3619631901840491, "acc_stderr": 0.037757007291414416, "acc_norm": 0.3619631901840491, "acc_norm_stderr": 0.037757007291414416 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.38580246913580246, "acc_stderr": 0.027085401226132143, "acc_norm": 0.38580246913580246, "acc_norm_stderr": 0.027085401226132143 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.40414507772020725, "acc_stderr": 0.035415085788840193, "acc_norm": 0.40414507772020725, "acc_norm_stderr": 0.035415085788840193 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2982456140350877, "acc_stderr": 0.043036840335373146, "acc_norm": 0.2982456140350877, "acc_norm_stderr": 0.043036840335373146 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.3871559633027523, "acc_stderr": 0.02088423199264345, "acc_norm": 0.3871559633027523, "acc_norm_stderr": 0.02088423199264345 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.21428571428571427, "acc_stderr": 0.03670066451047181, "acc_norm": 0.21428571428571427, "acc_norm_stderr": 0.03670066451047181 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.3790849673202614, "acc_stderr": 0.027780141207023334, "acc_norm": 0.3790849673202614, "acc_norm_stderr": 0.027780141207023334 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6115702479338843, "acc_stderr": 0.04449270350068382, "acc_norm": 0.6115702479338843, "acc_norm_stderr": 0.04449270350068382 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.3223684210526316, "acc_stderr": 0.03803510248351585, "acc_norm": 0.3223684210526316, "acc_norm_stderr": 0.03803510248351585 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.3480392156862745, "acc_stderr": 0.019270998708223977, "acc_norm": 0.3480392156862745, "acc_norm_stderr": 0.019270998708223977 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.3191489361702128, "acc_stderr": 0.027807990141320196, "acc_norm": 0.3191489361702128, "acc_norm_stderr": 0.027807990141320196 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.2767857142857143, "acc_stderr": 0.04246624336697625, "acc_norm": 0.2767857142857143, "acc_norm_stderr": 0.04246624336697625 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.25, "acc_stderr": 0.029531221160930918, "acc_norm": 0.25, "acc_norm_stderr": 0.029531221160930918 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.23910614525139665, "acc_stderr": 0.014265554192331149, "acc_norm": 0.23910614525139665, "acc_norm_stderr": 0.014265554192331149 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.2867647058823529, "acc_stderr": 0.027472274473233818, "acc_norm": 0.2867647058823529, "acc_norm_stderr": 0.027472274473233818 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.32653061224489793, "acc_stderr": 0.030021056238440327, "acc_norm": 0.32653061224489793, "acc_norm_stderr": 0.030021056238440327 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.47257383966244726, "acc_stderr": 0.03249822718301303, "acc_norm": 0.47257383966244726, "acc_norm_stderr": 0.03249822718301303 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.2940026075619296, "acc_stderr": 0.011636062953698604, "acc_norm": 0.2940026075619296, "acc_norm_stderr": 0.011636062953698604 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.3872549019607843, "acc_stderr": 0.03418931233833344, "acc_norm": 0.3872549019607843, "acc_norm_stderr": 0.03418931233833344 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.43636363636363634, "acc_stderr": 0.03872592983524753, "acc_norm": 0.43636363636363634, "acc_norm_stderr": 0.03872592983524753 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.29865361077111385, "mc1_stderr": 0.016021570613768545, "mc2": 0.4747810026483803, "mc2_stderr": 0.016087880887613513 }, "harness|ko_commongen_v2|2": { "acc": 0.3010625737898465, "acc_stderr": 0.015771113299945457, "acc_norm": 0.3140495867768595, "acc_norm_stderr": 0.015957332434295066 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "MNCJihun/Mistral-7B-guanaco-1k-orca-platy-1k-ep4", "model_sha": "13e5692b7a084265617f75f81209dce34e414489", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }