|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.46245733788395904, |
|
"acc_stderr": 0.014570144495075574, |
|
"acc_norm": 0.5247440273037542, |
|
"acc_norm_stderr": 0.014593487694937743 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.45598486357299345, |
|
"acc_stderr": 0.004970410081009441, |
|
"acc_norm": 0.6325433180641307, |
|
"acc_norm_stderr": 0.0048112699754506005 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.5672514619883041, |
|
"acc_stderr": 0.03799978644370607, |
|
"acc_norm": 0.5672514619883041, |
|
"acc_norm_stderr": 0.03799978644370607 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.6213592233009708, |
|
"acc_stderr": 0.04802694698258974, |
|
"acc_norm": 0.6213592233009708, |
|
"acc_norm_stderr": 0.04802694698258974 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.6526181353767561, |
|
"acc_stderr": 0.01702667174865573, |
|
"acc_norm": 0.6526181353767561, |
|
"acc_norm_stderr": 0.01702667174865573 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.45925925925925926, |
|
"acc_stderr": 0.04304979692464244, |
|
"acc_norm": 0.45925925925925926, |
|
"acc_norm_stderr": 0.04304979692464244 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.19, |
|
"acc_stderr": 0.03942772444036623, |
|
"acc_norm": 0.19, |
|
"acc_norm_stderr": 0.03942772444036623 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.4765957446808511, |
|
"acc_stderr": 0.03265019475033583, |
|
"acc_norm": 0.4765957446808511, |
|
"acc_norm_stderr": 0.03265019475033583 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.4578313253012048, |
|
"acc_stderr": 0.03878626771002361, |
|
"acc_norm": 0.4578313253012048, |
|
"acc_norm_stderr": 0.03878626771002361 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.6109324758842444, |
|
"acc_stderr": 0.027690337536485376, |
|
"acc_norm": 0.6109324758842444, |
|
"acc_norm_stderr": 0.027690337536485376 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.5650224215246636, |
|
"acc_stderr": 0.033272833702713445, |
|
"acc_norm": 0.5650224215246636, |
|
"acc_norm_stderr": 0.033272833702713445 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.6183206106870229, |
|
"acc_stderr": 0.042607351576445594, |
|
"acc_norm": 0.6183206106870229, |
|
"acc_norm_stderr": 0.042607351576445594 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.51, |
|
"acc_stderr": 0.05024183937956909, |
|
"acc_norm": 0.51, |
|
"acc_norm_stderr": 0.05024183937956909 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.702020202020202, |
|
"acc_stderr": 0.032586303838365555, |
|
"acc_norm": 0.702020202020202, |
|
"acc_norm_stderr": 0.032586303838365555 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4413793103448276, |
|
"acc_stderr": 0.04137931034482758, |
|
"acc_norm": 0.4413793103448276, |
|
"acc_norm_stderr": 0.04137931034482758 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.3137254901960784, |
|
"acc_stderr": 0.04617034827006716, |
|
"acc_norm": 0.3137254901960784, |
|
"acc_norm_stderr": 0.04617034827006716 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.592436974789916, |
|
"acc_stderr": 0.03191863374478466, |
|
"acc_norm": 0.592436974789916, |
|
"acc_norm_stderr": 0.03191863374478466 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.5820512820512821, |
|
"acc_stderr": 0.025007329882461203, |
|
"acc_norm": 0.5820512820512821, |
|
"acc_norm_stderr": 0.025007329882461203 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.6, |
|
"acc_stderr": 0.04923659639173309, |
|
"acc_norm": 0.6, |
|
"acc_norm_stderr": 0.04923659639173309 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.6203703703703703, |
|
"acc_stderr": 0.04691521224077742, |
|
"acc_norm": 0.6203703703703703, |
|
"acc_norm_stderr": 0.04691521224077742 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.39408866995073893, |
|
"acc_stderr": 0.03438157967036544, |
|
"acc_norm": 0.39408866995073893, |
|
"acc_norm_stderr": 0.03438157967036544 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.5774193548387097, |
|
"acc_stderr": 0.02810096472427264, |
|
"acc_norm": 0.5774193548387097, |
|
"acc_norm_stderr": 0.02810096472427264 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7777777777777778, |
|
"acc_stderr": 0.027236013946196673, |
|
"acc_norm": 0.7777777777777778, |
|
"acc_norm_stderr": 0.027236013946196673 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.5320754716981132, |
|
"acc_stderr": 0.03070948699255655, |
|
"acc_norm": 0.5320754716981132, |
|
"acc_norm_stderr": 0.03070948699255655 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5909090909090909, |
|
"acc_stderr": 0.04709306978661895, |
|
"acc_norm": 0.5909090909090909, |
|
"acc_norm_stderr": 0.04709306978661895 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3148148148148148, |
|
"acc_stderr": 0.028317533496066482, |
|
"acc_norm": 0.3148148148148148, |
|
"acc_norm_stderr": 0.028317533496066482 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.32450331125827814, |
|
"acc_stderr": 0.03822746937658754, |
|
"acc_norm": 0.32450331125827814, |
|
"acc_norm_stderr": 0.03822746937658754 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.746268656716418, |
|
"acc_stderr": 0.030769444967296014, |
|
"acc_norm": 0.746268656716418, |
|
"acc_norm_stderr": 0.030769444967296014 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.4913294797687861, |
|
"acc_stderr": 0.03811890988940412, |
|
"acc_norm": 0.4913294797687861, |
|
"acc_norm_stderr": 0.03811890988940412 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.4312169312169312, |
|
"acc_stderr": 0.025506481698138215, |
|
"acc_norm": 0.4312169312169312, |
|
"acc_norm_stderr": 0.025506481698138215 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.4861111111111111, |
|
"acc_stderr": 0.041795966175810016, |
|
"acc_norm": 0.4861111111111111, |
|
"acc_norm_stderr": 0.041795966175810016 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.63, |
|
"acc_stderr": 0.048523658709391, |
|
"acc_norm": 0.63, |
|
"acc_norm_stderr": 0.048523658709391 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5838150289017341, |
|
"acc_stderr": 0.026538189104705474, |
|
"acc_norm": 0.5838150289017341, |
|
"acc_norm_stderr": 0.026538189104705474 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.5153374233128835, |
|
"acc_stderr": 0.039265223787088445, |
|
"acc_norm": 0.5153374233128835, |
|
"acc_norm_stderr": 0.039265223787088445 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.5895061728395061, |
|
"acc_stderr": 0.027371350925124768, |
|
"acc_norm": 0.5895061728395061, |
|
"acc_norm_stderr": 0.027371350925124768 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.04461960433384741, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.04461960433384741 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.7305699481865285, |
|
"acc_stderr": 0.03201867122877794, |
|
"acc_norm": 0.7305699481865285, |
|
"acc_norm_stderr": 0.03201867122877794 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.45614035087719296, |
|
"acc_stderr": 0.046854730419077895, |
|
"acc_norm": 0.45614035087719296, |
|
"acc_norm_stderr": 0.046854730419077895 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.6623853211009174, |
|
"acc_stderr": 0.020275265986638914, |
|
"acc_norm": 0.6623853211009174, |
|
"acc_norm_stderr": 0.020275265986638914 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.4126984126984127, |
|
"acc_stderr": 0.04403438954768177, |
|
"acc_norm": 0.4126984126984127, |
|
"acc_norm_stderr": 0.04403438954768177 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.5588235294117647, |
|
"acc_stderr": 0.028431095444176643, |
|
"acc_norm": 0.5588235294117647, |
|
"acc_norm_stderr": 0.028431095444176643 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.61, |
|
"acc_stderr": 0.049020713000019756, |
|
"acc_norm": 0.61, |
|
"acc_norm_stderr": 0.049020713000019756 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.7272727272727273, |
|
"acc_stderr": 0.04065578140908705, |
|
"acc_norm": 0.7272727272727273, |
|
"acc_norm_stderr": 0.04065578140908705 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.48026315789473684, |
|
"acc_stderr": 0.04065771002562605, |
|
"acc_norm": 0.48026315789473684, |
|
"acc_norm_stderr": 0.04065771002562605 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.4918300653594771, |
|
"acc_stderr": 0.020225134343057265, |
|
"acc_norm": 0.4918300653594771, |
|
"acc_norm_stderr": 0.020225134343057265 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.41843971631205673, |
|
"acc_stderr": 0.02942799403941999, |
|
"acc_norm": 0.41843971631205673, |
|
"acc_norm_stderr": 0.02942799403941999 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.39285714285714285, |
|
"acc_stderr": 0.04635550135609976, |
|
"acc_norm": 0.39285714285714285, |
|
"acc_norm_stderr": 0.04635550135609976 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.5185185185185185, |
|
"acc_stderr": 0.03407632093854054, |
|
"acc_norm": 0.5185185185185185, |
|
"acc_norm_stderr": 0.03407632093854054 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.30502793296089387, |
|
"acc_stderr": 0.015398723510916715, |
|
"acc_norm": 0.30502793296089387, |
|
"acc_norm_stderr": 0.015398723510916715 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.52, |
|
"acc_stderr": 0.050211673156867795, |
|
"acc_norm": 0.52, |
|
"acc_norm_stderr": 0.050211673156867795 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.62, |
|
"acc_stderr": 0.04878317312145633, |
|
"acc_norm": 0.62, |
|
"acc_norm_stderr": 0.04878317312145633 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.5073529411764706, |
|
"acc_stderr": 0.030369552523902173, |
|
"acc_norm": 0.5073529411764706, |
|
"acc_norm_stderr": 0.030369552523902173 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.5836734693877551, |
|
"acc_stderr": 0.03155782816556165, |
|
"acc_norm": 0.5836734693877551, |
|
"acc_norm_stderr": 0.03155782816556165 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.7552742616033755, |
|
"acc_stderr": 0.02798569938703641, |
|
"acc_norm": 0.7552742616033755, |
|
"acc_norm_stderr": 0.02798569938703641 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.4282920469361147, |
|
"acc_stderr": 0.012638223880313175, |
|
"acc_norm": 0.4282920469361147, |
|
"acc_norm_stderr": 0.012638223880313175 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.6715686274509803, |
|
"acc_stderr": 0.03296245110172229, |
|
"acc_norm": 0.6715686274509803, |
|
"acc_norm_stderr": 0.03296245110172229 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.7212121212121212, |
|
"acc_stderr": 0.03501438706296781, |
|
"acc_norm": 0.7212121212121212, |
|
"acc_norm_stderr": 0.03501438706296781 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.4749082007343941, |
|
"mc1_stderr": 0.017481446804103996, |
|
"mc2": 0.6332905645893946, |
|
"mc2_stderr": 0.015824976924307316 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.5938606847697757, |
|
"acc_stderr": 0.0168847495031914, |
|
"acc_norm": 0.6103896103896104, |
|
"acc_norm_stderr": 0.01676616167189351 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "JaeyeonKang/CCK-v1.0.0-DPO", |
|
"model_sha": "e33c9c9dc96d5fe2ffdb910640925e02c236dae8", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |