|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.514505119453925, |
|
"acc_stderr": 0.014605241081370053, |
|
"acc_norm": 0.5750853242320819, |
|
"acc_norm_stderr": 0.014445698968520776 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.5127464648476399, |
|
"acc_stderr": 0.004988159744742496, |
|
"acc_norm": 0.7033459470225055, |
|
"acc_norm_stderr": 0.004558491550673699 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.5614035087719298, |
|
"acc_stderr": 0.038057975055904594, |
|
"acc_norm": 0.5614035087719298, |
|
"acc_norm_stderr": 0.038057975055904594 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.6796116504854369, |
|
"acc_stderr": 0.04620284082280042, |
|
"acc_norm": 0.6796116504854369, |
|
"acc_norm_stderr": 0.04620284082280042 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.6500638569604087, |
|
"acc_stderr": 0.017055679797150426, |
|
"acc_norm": 0.6500638569604087, |
|
"acc_norm_stderr": 0.017055679797150426 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4666666666666667, |
|
"acc_stderr": 0.043097329010363554, |
|
"acc_norm": 0.4666666666666667, |
|
"acc_norm_stderr": 0.043097329010363554 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542126, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542126 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.43829787234042555, |
|
"acc_stderr": 0.032436186361081025, |
|
"acc_norm": 0.43829787234042555, |
|
"acc_norm_stderr": 0.032436186361081025 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.4578313253012048, |
|
"acc_stderr": 0.03878626771002361, |
|
"acc_norm": 0.4578313253012048, |
|
"acc_norm_stderr": 0.03878626771002361 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.6270096463022508, |
|
"acc_stderr": 0.027466610213140116, |
|
"acc_norm": 0.6270096463022508, |
|
"acc_norm_stderr": 0.027466610213140116 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.5874439461883408, |
|
"acc_stderr": 0.03304062175449297, |
|
"acc_norm": 0.5874439461883408, |
|
"acc_norm_stderr": 0.03304062175449297 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.648854961832061, |
|
"acc_stderr": 0.0418644516301375, |
|
"acc_norm": 0.648854961832061, |
|
"acc_norm_stderr": 0.0418644516301375 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.49, |
|
"acc_stderr": 0.05024183937956913, |
|
"acc_norm": 0.49, |
|
"acc_norm_stderr": 0.05024183937956913 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.702020202020202, |
|
"acc_stderr": 0.03258630383836556, |
|
"acc_norm": 0.702020202020202, |
|
"acc_norm_stderr": 0.03258630383836556 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4413793103448276, |
|
"acc_stderr": 0.04137931034482758, |
|
"acc_norm": 0.4413793103448276, |
|
"acc_norm_stderr": 0.04137931034482758 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.4019607843137255, |
|
"acc_stderr": 0.048786087144669955, |
|
"acc_norm": 0.4019607843137255, |
|
"acc_norm_stderr": 0.048786087144669955 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.6134453781512605, |
|
"acc_stderr": 0.0316314580755238, |
|
"acc_norm": 0.6134453781512605, |
|
"acc_norm_stderr": 0.0316314580755238 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.5564102564102564, |
|
"acc_stderr": 0.025189149894764215, |
|
"acc_norm": 0.5564102564102564, |
|
"acc_norm_stderr": 0.025189149894764215 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.62, |
|
"acc_stderr": 0.048783173121456316, |
|
"acc_norm": 0.62, |
|
"acc_norm_stderr": 0.048783173121456316 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.048523658709391, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.048523658709391 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.6388888888888888, |
|
"acc_stderr": 0.04643454608906276, |
|
"acc_norm": 0.6388888888888888, |
|
"acc_norm_stderr": 0.04643454608906276 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3891625615763547, |
|
"acc_stderr": 0.03430462416103872, |
|
"acc_norm": 0.3891625615763547, |
|
"acc_norm_stderr": 0.03430462416103872 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.5870967741935483, |
|
"acc_stderr": 0.02800913812540038, |
|
"acc_norm": 0.5870967741935483, |
|
"acc_norm_stderr": 0.02800913812540038 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.782051282051282, |
|
"acc_stderr": 0.027046857630716663, |
|
"acc_norm": 0.782051282051282, |
|
"acc_norm_stderr": 0.027046857630716663 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.5169811320754717, |
|
"acc_stderr": 0.030755120364119898, |
|
"acc_norm": 0.5169811320754717, |
|
"acc_norm_stderr": 0.030755120364119898 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5636363636363636, |
|
"acc_stderr": 0.04750185058907296, |
|
"acc_norm": 0.5636363636363636, |
|
"acc_norm_stderr": 0.04750185058907296 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.37037037037037035, |
|
"acc_stderr": 0.02944316932303154, |
|
"acc_norm": 0.37037037037037035, |
|
"acc_norm_stderr": 0.02944316932303154 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.33774834437086093, |
|
"acc_stderr": 0.038615575462551684, |
|
"acc_norm": 0.33774834437086093, |
|
"acc_norm_stderr": 0.038615575462551684 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.6716417910447762, |
|
"acc_stderr": 0.033206858897443244, |
|
"acc_norm": 0.6716417910447762, |
|
"acc_norm_stderr": 0.033206858897443244 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.4797687861271676, |
|
"acc_stderr": 0.03809342081273958, |
|
"acc_norm": 0.4797687861271676, |
|
"acc_norm_stderr": 0.03809342081273958 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.41534391534391535, |
|
"acc_stderr": 0.025379524910778405, |
|
"acc_norm": 0.41534391534391535, |
|
"acc_norm_stderr": 0.025379524910778405 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.5486111111111112, |
|
"acc_stderr": 0.04161402398403279, |
|
"acc_norm": 0.5486111111111112, |
|
"acc_norm_stderr": 0.04161402398403279 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621505, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621505 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.7, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.7, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5086705202312138, |
|
"acc_stderr": 0.026915047355369804, |
|
"acc_norm": 0.5086705202312138, |
|
"acc_norm_stderr": 0.026915047355369804 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.5030674846625767, |
|
"acc_stderr": 0.03928297078179663, |
|
"acc_norm": 0.5030674846625767, |
|
"acc_norm_stderr": 0.03928297078179663 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.595679012345679, |
|
"acc_stderr": 0.027306625297327677, |
|
"acc_norm": 0.595679012345679, |
|
"acc_norm_stderr": 0.027306625297327677 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.7305699481865285, |
|
"acc_stderr": 0.03201867122877794, |
|
"acc_norm": 0.7305699481865285, |
|
"acc_norm_stderr": 0.03201867122877794 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.4649122807017544, |
|
"acc_stderr": 0.046920083813689104, |
|
"acc_norm": 0.4649122807017544, |
|
"acc_norm_stderr": 0.046920083813689104 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.6770642201834862, |
|
"acc_stderr": 0.020048115923415325, |
|
"acc_norm": 0.6770642201834862, |
|
"acc_norm_stderr": 0.020048115923415325 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.36507936507936506, |
|
"acc_stderr": 0.043062412591271526, |
|
"acc_norm": 0.36507936507936506, |
|
"acc_norm_stderr": 0.043062412591271526 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.6143790849673203, |
|
"acc_stderr": 0.02787074527829027, |
|
"acc_norm": 0.6143790849673203, |
|
"acc_norm_stderr": 0.02787074527829027 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.56, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.56, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.71900826446281, |
|
"acc_stderr": 0.04103203830514511, |
|
"acc_norm": 0.71900826446281, |
|
"acc_norm_stderr": 0.04103203830514511 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.5789473684210527, |
|
"acc_stderr": 0.040179012759817494, |
|
"acc_norm": 0.5789473684210527, |
|
"acc_norm_stderr": 0.040179012759817494 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.5016339869281046, |
|
"acc_stderr": 0.020227726838150124, |
|
"acc_norm": 0.5016339869281046, |
|
"acc_norm_stderr": 0.020227726838150124 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3617021276595745, |
|
"acc_stderr": 0.028663820147199495, |
|
"acc_norm": 0.3617021276595745, |
|
"acc_norm_stderr": 0.028663820147199495 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.4017857142857143, |
|
"acc_stderr": 0.04653333146973647, |
|
"acc_norm": 0.4017857142857143, |
|
"acc_norm_stderr": 0.04653333146973647 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.5092592592592593, |
|
"acc_stderr": 0.034093869469927006, |
|
"acc_norm": 0.5092592592592593, |
|
"acc_norm_stderr": 0.034093869469927006 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.39106145251396646, |
|
"acc_stderr": 0.016320763763808383, |
|
"acc_norm": 0.39106145251396646, |
|
"acc_norm_stderr": 0.016320763763808383 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.65, |
|
"acc_stderr": 0.0479372485441102, |
|
"acc_norm": 0.65, |
|
"acc_norm_stderr": 0.0479372485441102 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.47058823529411764, |
|
"acc_stderr": 0.030320243265004137, |
|
"acc_norm": 0.47058823529411764, |
|
"acc_norm_stderr": 0.030320243265004137 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.6081632653061224, |
|
"acc_stderr": 0.031251275910891656, |
|
"acc_norm": 0.6081632653061224, |
|
"acc_norm_stderr": 0.031251275910891656 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.759493670886076, |
|
"acc_stderr": 0.027820781981149678, |
|
"acc_norm": 0.759493670886076, |
|
"acc_norm_stderr": 0.027820781981149678 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.4380704041720991, |
|
"acc_stderr": 0.01267190278256764, |
|
"acc_norm": 0.4380704041720991, |
|
"acc_norm_stderr": 0.01267190278256764 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.6568627450980392, |
|
"acc_stderr": 0.03332139944668086, |
|
"acc_norm": 0.6568627450980392, |
|
"acc_norm_stderr": 0.03332139944668086 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.6727272727272727, |
|
"acc_stderr": 0.03663974994391243, |
|
"acc_norm": 0.6727272727272727, |
|
"acc_norm_stderr": 0.03663974994391243 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.5079559363525091, |
|
"mc1_stderr": 0.017501285074551825, |
|
"mc2": 0.6849191804406951, |
|
"mc2_stderr": 0.015690245522535593 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.551357733175915, |
|
"acc_stderr": 0.017099430514725792, |
|
"acc_norm": 0.5631641086186541, |
|
"acc_norm_stderr": 0.017052633559856076 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "ENERGY-DRINK-LOVE/komt_DPOv3", |
|
"model_sha": "5ae1a1246cf3533ace0dfb1e18319b3762432ea4", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |