|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.5059726962457338, |
|
"acc_stderr": 0.014610348300255793, |
|
"acc_norm": 0.5494880546075085, |
|
"acc_norm_stderr": 0.014539646098471627 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.4880501892053376, |
|
"acc_stderr": 0.00498835614649901, |
|
"acc_norm": 0.6301533559051982, |
|
"acc_norm_stderr": 0.00481776358141023 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.5263157894736842, |
|
"acc_stderr": 0.03829509868994727, |
|
"acc_norm": 0.5263157894736842, |
|
"acc_norm_stderr": 0.03829509868994727 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5048543689320388, |
|
"acc_stderr": 0.049505043821289195, |
|
"acc_norm": 0.5048543689320388, |
|
"acc_norm_stderr": 0.049505043821289195 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.5683269476372924, |
|
"acc_stderr": 0.017712228939299798, |
|
"acc_norm": 0.5683269476372924, |
|
"acc_norm_stderr": 0.017712228939299798 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4888888888888889, |
|
"acc_stderr": 0.04318275491977976, |
|
"acc_norm": 0.4888888888888889, |
|
"acc_norm_stderr": 0.04318275491977976 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.044619604333847415, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.044619604333847415 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.40425531914893614, |
|
"acc_stderr": 0.03208115750788684, |
|
"acc_norm": 0.40425531914893614, |
|
"acc_norm_stderr": 0.03208115750788684 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.4036144578313253, |
|
"acc_stderr": 0.038194861407583984, |
|
"acc_norm": 0.4036144578313253, |
|
"acc_norm_stderr": 0.038194861407583984 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5016077170418006, |
|
"acc_stderr": 0.02839794490780661, |
|
"acc_norm": 0.5016077170418006, |
|
"acc_norm_stderr": 0.02839794490780661 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.5246636771300448, |
|
"acc_stderr": 0.033516951676526276, |
|
"acc_norm": 0.5246636771300448, |
|
"acc_norm_stderr": 0.033516951676526276 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.45038167938931295, |
|
"acc_stderr": 0.04363643698524779, |
|
"acc_norm": 0.45038167938931295, |
|
"acc_norm_stderr": 0.04363643698524779 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.6161616161616161, |
|
"acc_stderr": 0.03464881675016336, |
|
"acc_norm": 0.6161616161616161, |
|
"acc_norm_stderr": 0.03464881675016336 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4068965517241379, |
|
"acc_stderr": 0.04093793981266237, |
|
"acc_norm": 0.4068965517241379, |
|
"acc_norm_stderr": 0.04093793981266237 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.2549019607843137, |
|
"acc_stderr": 0.043364327079931785, |
|
"acc_norm": 0.2549019607843137, |
|
"acc_norm_stderr": 0.043364327079931785 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.48739495798319327, |
|
"acc_stderr": 0.03246816765752174, |
|
"acc_norm": 0.48739495798319327, |
|
"acc_norm_stderr": 0.03246816765752174 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4461538461538462, |
|
"acc_stderr": 0.025203571773028337, |
|
"acc_norm": 0.4461538461538462, |
|
"acc_norm_stderr": 0.025203571773028337 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.51, |
|
"acc_stderr": 0.05024183937956912, |
|
"acc_norm": 0.51, |
|
"acc_norm_stderr": 0.05024183937956912 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.04878317312145632, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.04878317312145632 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.4722222222222222, |
|
"acc_stderr": 0.04826217294139894, |
|
"acc_norm": 0.4722222222222222, |
|
"acc_norm_stderr": 0.04826217294139894 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3891625615763547, |
|
"acc_stderr": 0.034304624161038716, |
|
"acc_norm": 0.3891625615763547, |
|
"acc_norm_stderr": 0.034304624161038716 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.45161290322580644, |
|
"acc_stderr": 0.02831050034856839, |
|
"acc_norm": 0.45161290322580644, |
|
"acc_norm_stderr": 0.02831050034856839 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6538461538461539, |
|
"acc_stderr": 0.031166957367235907, |
|
"acc_norm": 0.6538461538461539, |
|
"acc_norm_stderr": 0.031166957367235907 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.45660377358490567, |
|
"acc_stderr": 0.03065674869673943, |
|
"acc_norm": 0.45660377358490567, |
|
"acc_norm_stderr": 0.03065674869673943 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5272727272727272, |
|
"acc_stderr": 0.04782001791380061, |
|
"acc_norm": 0.5272727272727272, |
|
"acc_norm_stderr": 0.04782001791380061 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.27037037037037037, |
|
"acc_stderr": 0.02708037281514566, |
|
"acc_norm": 0.27037037037037037, |
|
"acc_norm_stderr": 0.02708037281514566 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2781456953642384, |
|
"acc_stderr": 0.03658603262763743, |
|
"acc_norm": 0.2781456953642384, |
|
"acc_norm_stderr": 0.03658603262763743 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.572139303482587, |
|
"acc_stderr": 0.03498541988407795, |
|
"acc_norm": 0.572139303482587, |
|
"acc_norm_stderr": 0.03498541988407795 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3815028901734104, |
|
"acc_stderr": 0.037038511930995215, |
|
"acc_norm": 0.3815028901734104, |
|
"acc_norm_stderr": 0.037038511930995215 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.31216931216931215, |
|
"acc_stderr": 0.023865206836972613, |
|
"acc_norm": 0.31216931216931215, |
|
"acc_norm_stderr": 0.023865206836972613 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.4027777777777778, |
|
"acc_stderr": 0.04101405519842424, |
|
"acc_norm": 0.4027777777777778, |
|
"acc_norm_stderr": 0.04101405519842424 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.64, |
|
"acc_stderr": 0.048241815132442176, |
|
"acc_norm": 0.64, |
|
"acc_norm_stderr": 0.048241815132442176 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.4884393063583815, |
|
"acc_stderr": 0.026911898686377927, |
|
"acc_norm": 0.4884393063583815, |
|
"acc_norm_stderr": 0.026911898686377927 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.5460122699386503, |
|
"acc_stderr": 0.0391170190467718, |
|
"acc_norm": 0.5460122699386503, |
|
"acc_norm_stderr": 0.0391170190467718 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.49074074074074076, |
|
"acc_stderr": 0.027815973433878014, |
|
"acc_norm": 0.49074074074074076, |
|
"acc_norm_stderr": 0.027815973433878014 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5699481865284974, |
|
"acc_stderr": 0.03572954333144808, |
|
"acc_norm": 0.5699481865284974, |
|
"acc_norm_stderr": 0.03572954333144808 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2631578947368421, |
|
"acc_stderr": 0.04142439719489361, |
|
"acc_norm": 0.2631578947368421, |
|
"acc_norm_stderr": 0.04142439719489361 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.6091743119266055, |
|
"acc_stderr": 0.020920058346111076, |
|
"acc_norm": 0.6091743119266055, |
|
"acc_norm_stderr": 0.020920058346111076 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.30158730158730157, |
|
"acc_stderr": 0.04104947269903394, |
|
"acc_norm": 0.30158730158730157, |
|
"acc_norm_stderr": 0.04104947269903394 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.41830065359477125, |
|
"acc_stderr": 0.0282451340243873, |
|
"acc_norm": 0.41830065359477125, |
|
"acc_norm_stderr": 0.0282451340243873 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.049756985195624284, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.049756985195624284 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6446280991735537, |
|
"acc_stderr": 0.0436923632657398, |
|
"acc_norm": 0.6446280991735537, |
|
"acc_norm_stderr": 0.0436923632657398 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.3815789473684211, |
|
"acc_stderr": 0.03953173377749194, |
|
"acc_norm": 0.3815789473684211, |
|
"acc_norm_stderr": 0.03953173377749194 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.39215686274509803, |
|
"acc_stderr": 0.019751726508762626, |
|
"acc_norm": 0.39215686274509803, |
|
"acc_norm_stderr": 0.019751726508762626 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3404255319148936, |
|
"acc_stderr": 0.02826765748265014, |
|
"acc_norm": 0.3404255319148936, |
|
"acc_norm_stderr": 0.02826765748265014 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.30357142857142855, |
|
"acc_stderr": 0.04364226155841044, |
|
"acc_norm": 0.30357142857142855, |
|
"acc_norm_stderr": 0.04364226155841044 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.3287037037037037, |
|
"acc_stderr": 0.03203614084670058, |
|
"acc_norm": 0.3287037037037037, |
|
"acc_norm_stderr": 0.03203614084670058 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.24692737430167597, |
|
"acc_stderr": 0.01442229220480885, |
|
"acc_norm": 0.24692737430167597, |
|
"acc_norm_stderr": 0.01442229220480885 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.41911764705882354, |
|
"acc_stderr": 0.029972807170464626, |
|
"acc_norm": 0.41911764705882354, |
|
"acc_norm_stderr": 0.029972807170464626 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.42448979591836733, |
|
"acc_stderr": 0.031642094879429414, |
|
"acc_norm": 0.42448979591836733, |
|
"acc_norm_stderr": 0.031642094879429414 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.6286919831223629, |
|
"acc_stderr": 0.03145068600744859, |
|
"acc_norm": 0.6286919831223629, |
|
"acc_norm_stderr": 0.03145068600744859 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.35853976531942633, |
|
"acc_stderr": 0.012248487319682746, |
|
"acc_norm": 0.35853976531942633, |
|
"acc_norm_stderr": 0.012248487319682746 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.03509312031717982, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.03509312031717982 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.6, |
|
"acc_stderr": 0.03825460278380026, |
|
"acc_norm": 0.6, |
|
"acc_norm_stderr": 0.03825460278380026 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.3659730722154223, |
|
"mc1_stderr": 0.01686294168408836, |
|
"mc2": 0.5166857407308614, |
|
"mc2_stderr": 0.01622317540419704 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.4675324675324675, |
|
"acc_stderr": 0.017154073716682865, |
|
"acc_norm": 0.4982290436835891, |
|
"acc_norm_stderr": 0.017190246276231863 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "HumanF-MarkrAI/COKAL-DPO-13b-v2", |
|
"model_sha": "f90b0c3f6f91a58616aef3a19bdd1dc3c242028a", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |