|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.2713310580204778, |
|
"acc_stderr": 0.012993807727545792, |
|
"acc_norm": 0.32337883959044367, |
|
"acc_norm_stderr": 0.013669421630012123 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.35222067317267475, |
|
"acc_stderr": 0.0047668609071715405, |
|
"acc_norm": 0.4458275243975304, |
|
"acc_norm_stderr": 0.004960408362133239 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.2573099415204678, |
|
"acc_stderr": 0.03352799844161865, |
|
"acc_norm": 0.2573099415204678, |
|
"acc_norm_stderr": 0.03352799844161865 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.1553398058252427, |
|
"acc_stderr": 0.03586594738573975, |
|
"acc_norm": 0.1553398058252427, |
|
"acc_norm_stderr": 0.03586594738573975 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.24393358876117496, |
|
"acc_stderr": 0.015357212665829479, |
|
"acc_norm": 0.24393358876117496, |
|
"acc_norm_stderr": 0.015357212665829479 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.2074074074074074, |
|
"acc_stderr": 0.03502553170678318, |
|
"acc_norm": 0.2074074074074074, |
|
"acc_norm_stderr": 0.03502553170678318 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.0446196043338474, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.0446196043338474 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.31063829787234043, |
|
"acc_stderr": 0.03025123757921317, |
|
"acc_norm": 0.31063829787234043, |
|
"acc_norm_stderr": 0.03025123757921317 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3132530120481928, |
|
"acc_stderr": 0.03610805018031023, |
|
"acc_norm": 0.3132530120481928, |
|
"acc_norm_stderr": 0.03610805018031023 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.2540192926045016, |
|
"acc_stderr": 0.024723861504771686, |
|
"acc_norm": 0.2540192926045016, |
|
"acc_norm_stderr": 0.024723861504771686 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.3542600896860987, |
|
"acc_stderr": 0.032100621541349864, |
|
"acc_norm": 0.3542600896860987, |
|
"acc_norm_stderr": 0.032100621541349864 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.26717557251908397, |
|
"acc_stderr": 0.038808483010823944, |
|
"acc_norm": 0.26717557251908397, |
|
"acc_norm_stderr": 0.038808483010823944 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.0446196043338474, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.0446196043338474 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.16161616161616163, |
|
"acc_stderr": 0.026225919863629293, |
|
"acc_norm": 0.16161616161616163, |
|
"acc_norm_stderr": 0.026225919863629293 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.27586206896551724, |
|
"acc_stderr": 0.03724563619774634, |
|
"acc_norm": 0.27586206896551724, |
|
"acc_norm_stderr": 0.03724563619774634 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.27450980392156865, |
|
"acc_stderr": 0.04440521906179326, |
|
"acc_norm": 0.27450980392156865, |
|
"acc_norm_stderr": 0.04440521906179326 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.24369747899159663, |
|
"acc_stderr": 0.02788682807838055, |
|
"acc_norm": 0.24369747899159663, |
|
"acc_norm_stderr": 0.02788682807838055 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.2230769230769231, |
|
"acc_stderr": 0.021107730127244, |
|
"acc_norm": 0.2230769230769231, |
|
"acc_norm_stderr": 0.021107730127244 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.28703703703703703, |
|
"acc_stderr": 0.043733130409147614, |
|
"acc_norm": 0.28703703703703703, |
|
"acc_norm_stderr": 0.043733130409147614 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.1724137931034483, |
|
"acc_stderr": 0.026577672183036576, |
|
"acc_norm": 0.1724137931034483, |
|
"acc_norm_stderr": 0.026577672183036576 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.22903225806451613, |
|
"acc_stderr": 0.023904914311782655, |
|
"acc_norm": 0.22903225806451613, |
|
"acc_norm_stderr": 0.023904914311782655 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.26495726495726496, |
|
"acc_stderr": 0.028911208802749475, |
|
"acc_norm": 0.26495726495726496, |
|
"acc_norm_stderr": 0.028911208802749475 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.2188679245283019, |
|
"acc_stderr": 0.025447863825108614, |
|
"acc_norm": 0.2188679245283019, |
|
"acc_norm_stderr": 0.025447863825108614 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.2636363636363636, |
|
"acc_stderr": 0.04220224692971987, |
|
"acc_norm": 0.2636363636363636, |
|
"acc_norm_stderr": 0.04220224692971987 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.27037037037037037, |
|
"acc_stderr": 0.027080372815145665, |
|
"acc_norm": 0.27037037037037037, |
|
"acc_norm_stderr": 0.027080372815145665 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.26490066225165565, |
|
"acc_stderr": 0.03603038545360384, |
|
"acc_norm": 0.26490066225165565, |
|
"acc_norm_stderr": 0.03603038545360384 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.24875621890547264, |
|
"acc_stderr": 0.030567675938916707, |
|
"acc_norm": 0.24875621890547264, |
|
"acc_norm_stderr": 0.030567675938916707 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.18497109826589594, |
|
"acc_stderr": 0.0296056239817712, |
|
"acc_norm": 0.18497109826589594, |
|
"acc_norm_stderr": 0.0296056239817712 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.24867724867724866, |
|
"acc_stderr": 0.022261817692400175, |
|
"acc_norm": 0.24867724867724866, |
|
"acc_norm_stderr": 0.022261817692400175 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.22916666666666666, |
|
"acc_stderr": 0.03514697467862388, |
|
"acc_norm": 0.22916666666666666, |
|
"acc_norm_stderr": 0.03514697467862388 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.22, |
|
"acc_stderr": 0.0416333199893227, |
|
"acc_norm": 0.22, |
|
"acc_norm_stderr": 0.0416333199893227 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.04605661864718381, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.04605661864718381 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.2254335260115607, |
|
"acc_stderr": 0.022497230190967554, |
|
"acc_norm": 0.2254335260115607, |
|
"acc_norm_stderr": 0.022497230190967554 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.24539877300613497, |
|
"acc_stderr": 0.03380939813943354, |
|
"acc_norm": 0.24539877300613497, |
|
"acc_norm_stderr": 0.03380939813943354 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.2716049382716049, |
|
"acc_stderr": 0.02474862449053737, |
|
"acc_norm": 0.2716049382716049, |
|
"acc_norm_stderr": 0.02474862449053737 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.04292346959909283, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.04292346959909283 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.17098445595854922, |
|
"acc_stderr": 0.02717121368316455, |
|
"acc_norm": 0.17098445595854922, |
|
"acc_norm_stderr": 0.02717121368316455 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.21929824561403508, |
|
"acc_stderr": 0.03892431106518754, |
|
"acc_norm": 0.21929824561403508, |
|
"acc_norm_stderr": 0.03892431106518754 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.20550458715596331, |
|
"acc_stderr": 0.01732435232501601, |
|
"acc_norm": 0.20550458715596331, |
|
"acc_norm_stderr": 0.01732435232501601 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.25396825396825395, |
|
"acc_stderr": 0.038932596106046734, |
|
"acc_norm": 0.25396825396825395, |
|
"acc_norm_stderr": 0.038932596106046734 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.20588235294117646, |
|
"acc_stderr": 0.023152722439402307, |
|
"acc_norm": 0.20588235294117646, |
|
"acc_norm_stderr": 0.023152722439402307 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.18, |
|
"acc_stderr": 0.03861229196653697, |
|
"acc_norm": 0.18, |
|
"acc_norm_stderr": 0.03861229196653697 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.35537190082644626, |
|
"acc_stderr": 0.04369236326573981, |
|
"acc_norm": 0.35537190082644626, |
|
"acc_norm_stderr": 0.04369236326573981 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.18421052631578946, |
|
"acc_stderr": 0.0315469804508223, |
|
"acc_norm": 0.18421052631578946, |
|
"acc_norm_stderr": 0.0315469804508223 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.24509803921568626, |
|
"acc_stderr": 0.017401816711427657, |
|
"acc_norm": 0.24509803921568626, |
|
"acc_norm_stderr": 0.017401816711427657 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2872340425531915, |
|
"acc_stderr": 0.026992199173064356, |
|
"acc_norm": 0.2872340425531915, |
|
"acc_norm_stderr": 0.026992199173064356 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.25892857142857145, |
|
"acc_stderr": 0.04157751539865629, |
|
"acc_norm": 0.25892857142857145, |
|
"acc_norm_stderr": 0.04157751539865629 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.33796296296296297, |
|
"acc_stderr": 0.03225941352631295, |
|
"acc_norm": 0.33796296296296297, |
|
"acc_norm_stderr": 0.03225941352631295 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.27262569832402234, |
|
"acc_stderr": 0.014893391735249608, |
|
"acc_norm": 0.27262569832402234, |
|
"acc_norm_stderr": 0.014893391735249608 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.042923469599092816, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.042923469599092816 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.4375, |
|
"acc_stderr": 0.030134614954403924, |
|
"acc_norm": 0.4375, |
|
"acc_norm_stderr": 0.030134614954403924 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.20816326530612245, |
|
"acc_stderr": 0.0259911176728133, |
|
"acc_norm": 0.20816326530612245, |
|
"acc_norm_stderr": 0.0259911176728133 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.29535864978902954, |
|
"acc_stderr": 0.029696338713422896, |
|
"acc_norm": 0.29535864978902954, |
|
"acc_norm_stderr": 0.029696338713422896 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.22620599739243807, |
|
"acc_stderr": 0.010685470750077785, |
|
"acc_norm": 0.22620599739243807, |
|
"acc_norm_stderr": 0.010685470750077785 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.03039153369274154, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.03039153369274154 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.23030303030303031, |
|
"acc_stderr": 0.03287666758603488, |
|
"acc_norm": 0.23030303030303031, |
|
"acc_norm_stderr": 0.03287666758603488 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2962056303549572, |
|
"mc1_stderr": 0.01598359510181139, |
|
"mc2": 0.43784783579631964, |
|
"mc2_stderr": 0.014867064946462295 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.25737898465171194, |
|
"acc_stderr": 0.015030899730346759, |
|
"acc_norm": 0.4025974025974026, |
|
"acc_norm_stderr": 0.01686102048640778 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "DooDooHyun/AIFT-42dot_LLM-PLM-1.3B-v1.51", |
|
"model_sha": "e0f88ee83e1c09208ce5c48b5c20eb4efdd41119", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |