|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.19112627986348124, |
|
"acc_stderr": 0.011490055292778596, |
|
"acc_norm": 0.24829351535836178, |
|
"acc_norm_stderr": 0.012624912868089764 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.2756423023302131, |
|
"acc_stderr": 0.0044592414745187915, |
|
"acc_norm": 0.29884485162318264, |
|
"acc_norm_stderr": 0.004568161710399566 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.27485380116959063, |
|
"acc_stderr": 0.03424042924691582, |
|
"acc_norm": 0.27485380116959063, |
|
"acc_norm_stderr": 0.03424042924691582 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.17475728155339806, |
|
"acc_stderr": 0.037601780060266196, |
|
"acc_norm": 0.17475728155339806, |
|
"acc_norm_stderr": 0.037601780060266196 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.2669220945083014, |
|
"acc_stderr": 0.015818450894777555, |
|
"acc_norm": 0.2669220945083014, |
|
"acc_norm_stderr": 0.015818450894777555 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.3111111111111111, |
|
"acc_stderr": 0.03999262876617722, |
|
"acc_norm": 0.3111111111111111, |
|
"acc_norm_stderr": 0.03999262876617722 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.04229525846816506, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.04229525846816506 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.2936170212765957, |
|
"acc_stderr": 0.02977164271249123, |
|
"acc_norm": 0.2936170212765957, |
|
"acc_norm_stderr": 0.02977164271249123 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.27710843373493976, |
|
"acc_stderr": 0.03484331592680588, |
|
"acc_norm": 0.27710843373493976, |
|
"acc_norm_stderr": 0.03484331592680588 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.20257234726688103, |
|
"acc_stderr": 0.022827317491059686, |
|
"acc_norm": 0.20257234726688103, |
|
"acc_norm_stderr": 0.022827317491059686 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.3632286995515695, |
|
"acc_stderr": 0.032277904428505, |
|
"acc_norm": 0.3632286995515695, |
|
"acc_norm_stderr": 0.032277904428505 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.2824427480916031, |
|
"acc_stderr": 0.03948406125768361, |
|
"acc_norm": 0.2824427480916031, |
|
"acc_norm_stderr": 0.03948406125768361 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.21717171717171718, |
|
"acc_stderr": 0.029376616484945644, |
|
"acc_norm": 0.21717171717171718, |
|
"acc_norm_stderr": 0.029376616484945644 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.2206896551724138, |
|
"acc_stderr": 0.03455930201924812, |
|
"acc_norm": 0.2206896551724138, |
|
"acc_norm_stderr": 0.03455930201924812 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.14705882352941177, |
|
"acc_stderr": 0.035240689515674474, |
|
"acc_norm": 0.14705882352941177, |
|
"acc_norm_stderr": 0.035240689515674474 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.226890756302521, |
|
"acc_stderr": 0.027205371538279476, |
|
"acc_norm": 0.226890756302521, |
|
"acc_norm_stderr": 0.027205371538279476 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.2153846153846154, |
|
"acc_stderr": 0.020843034557462878, |
|
"acc_norm": 0.2153846153846154, |
|
"acc_norm_stderr": 0.020843034557462878 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.21, |
|
"acc_stderr": 0.040936018074033256, |
|
"acc_norm": 0.21, |
|
"acc_norm_stderr": 0.040936018074033256 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.0440844002276808, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.0440844002276808 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04186091791394607, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04186091791394607 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.13793103448275862, |
|
"acc_stderr": 0.024261984301044565, |
|
"acc_norm": 0.13793103448275862, |
|
"acc_norm_stderr": 0.024261984301044565 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.20967741935483872, |
|
"acc_stderr": 0.023157879349083536, |
|
"acc_norm": 0.20967741935483872, |
|
"acc_norm_stderr": 0.023157879349083536 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.2094017094017094, |
|
"acc_stderr": 0.026655699653922737, |
|
"acc_norm": 0.2094017094017094, |
|
"acc_norm_stderr": 0.026655699653922737 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.2490566037735849, |
|
"acc_stderr": 0.026616482980501715, |
|
"acc_norm": 0.2490566037735849, |
|
"acc_norm_stderr": 0.026616482980501715 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.21818181818181817, |
|
"acc_stderr": 0.03955932861795833, |
|
"acc_norm": 0.21818181818181817, |
|
"acc_norm_stderr": 0.03955932861795833 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.26296296296296295, |
|
"acc_stderr": 0.02684205787383371, |
|
"acc_norm": 0.26296296296296295, |
|
"acc_norm_stderr": 0.02684205787383371 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2847682119205298, |
|
"acc_stderr": 0.03684881521389024, |
|
"acc_norm": 0.2847682119205298, |
|
"acc_norm_stderr": 0.03684881521389024 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.24378109452736318, |
|
"acc_stderr": 0.03036049015401464, |
|
"acc_norm": 0.24378109452736318, |
|
"acc_norm_stderr": 0.03036049015401464 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.2138728323699422, |
|
"acc_stderr": 0.03126511206173043, |
|
"acc_norm": 0.2138728323699422, |
|
"acc_norm_stderr": 0.03126511206173043 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.25396825396825395, |
|
"acc_stderr": 0.022418042891113946, |
|
"acc_norm": 0.25396825396825395, |
|
"acc_norm_stderr": 0.022418042891113946 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2222222222222222, |
|
"acc_stderr": 0.03476590104304134, |
|
"acc_norm": 0.2222222222222222, |
|
"acc_norm_stderr": 0.03476590104304134 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.04292346959909283, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.04292346959909283 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.0440844002276808, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.0440844002276808 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.26011560693641617, |
|
"acc_stderr": 0.023618678310069363, |
|
"acc_norm": 0.26011560693641617, |
|
"acc_norm_stderr": 0.023618678310069363 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.27607361963190186, |
|
"acc_stderr": 0.0351238528370505, |
|
"acc_norm": 0.27607361963190186, |
|
"acc_norm_stderr": 0.0351238528370505 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.24074074074074073, |
|
"acc_stderr": 0.02378858355165854, |
|
"acc_norm": 0.24074074074074073, |
|
"acc_norm_stderr": 0.02378858355165854 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.20725388601036268, |
|
"acc_stderr": 0.02925282329180363, |
|
"acc_norm": 0.20725388601036268, |
|
"acc_norm_stderr": 0.02925282329180363 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2807017543859649, |
|
"acc_stderr": 0.042270544512322004, |
|
"acc_norm": 0.2807017543859649, |
|
"acc_norm_stderr": 0.042270544512322004 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.21100917431192662, |
|
"acc_stderr": 0.017493922404112648, |
|
"acc_norm": 0.21100917431192662, |
|
"acc_norm_stderr": 0.017493922404112648 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.23809523809523808, |
|
"acc_stderr": 0.038095238095238106, |
|
"acc_norm": 0.23809523809523808, |
|
"acc_norm_stderr": 0.038095238095238106 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.23529411764705882, |
|
"acc_stderr": 0.024288619466046116, |
|
"acc_norm": 0.23529411764705882, |
|
"acc_norm_stderr": 0.024288619466046116 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.19008264462809918, |
|
"acc_stderr": 0.03581796951709282, |
|
"acc_norm": 0.19008264462809918, |
|
"acc_norm_stderr": 0.03581796951709282 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.17763157894736842, |
|
"acc_stderr": 0.03110318238312338, |
|
"acc_norm": 0.17763157894736842, |
|
"acc_norm_stderr": 0.03110318238312338 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.25980392156862747, |
|
"acc_stderr": 0.017740899509177795, |
|
"acc_norm": 0.25980392156862747, |
|
"acc_norm_stderr": 0.017740899509177795 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2765957446808511, |
|
"acc_stderr": 0.026684564340460997, |
|
"acc_norm": 0.2765957446808511, |
|
"acc_norm_stderr": 0.026684564340460997 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.2857142857142857, |
|
"acc_stderr": 0.042878587513404544, |
|
"acc_norm": 0.2857142857142857, |
|
"acc_norm_stderr": 0.042878587513404544 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.35648148148148145, |
|
"acc_stderr": 0.032664783315272714, |
|
"acc_norm": 0.35648148148148145, |
|
"acc_norm_stderr": 0.032664783315272714 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.23016759776536314, |
|
"acc_stderr": 0.014078339253425809, |
|
"acc_norm": 0.23016759776536314, |
|
"acc_norm_stderr": 0.014078339253425809 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542128, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542128 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.29411764705882354, |
|
"acc_stderr": 0.02767846864214471, |
|
"acc_norm": 0.29411764705882354, |
|
"acc_norm_stderr": 0.02767846864214471 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.3142857142857143, |
|
"acc_stderr": 0.029719329422417468, |
|
"acc_norm": 0.3142857142857143, |
|
"acc_norm_stderr": 0.029719329422417468 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.2742616033755274, |
|
"acc_stderr": 0.02904133351059804, |
|
"acc_norm": 0.2742616033755274, |
|
"acc_norm_stderr": 0.02904133351059804 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.24511082138200782, |
|
"acc_stderr": 0.010986307870045526, |
|
"acc_norm": 0.24511082138200782, |
|
"acc_norm_stderr": 0.010986307870045526 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.27941176470588236, |
|
"acc_stderr": 0.03149328104507955, |
|
"acc_norm": 0.27941176470588236, |
|
"acc_norm_stderr": 0.03149328104507955 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.21818181818181817, |
|
"acc_stderr": 0.03225078108306289, |
|
"acc_norm": 0.21818181818181817, |
|
"acc_norm_stderr": 0.03225078108306289 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.29498164014687883, |
|
"mc1_stderr": 0.015964400965589674, |
|
"mc2": 0.49219803033147647, |
|
"mc2_stderr": 0.015947492879186672 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.0539906103286385, |
|
"acc_stderr": 0.007747151732014082, |
|
"acc_norm": 0.09859154929577464, |
|
"acc_norm_stderr": 0.010219175985280616 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "AtAndDev/ShortKingv0.1", |
|
"model_sha": "6cd9b5bc13ee15b5e7e7cfb46477bc6a7c0b5d47", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |