|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.4044368600682594, |
|
"acc_stderr": 0.014342036483436175, |
|
"acc_norm": 0.4726962457337884, |
|
"acc_norm_stderr": 0.014589589101985998 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.4191396136227843, |
|
"acc_stderr": 0.004924098711864585, |
|
"acc_norm": 0.5668193586934873, |
|
"acc_norm_stderr": 0.0049450236570322765 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.5964912280701754, |
|
"acc_stderr": 0.03762738699917057, |
|
"acc_norm": 0.5964912280701754, |
|
"acc_norm_stderr": 0.03762738699917057 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5922330097087378, |
|
"acc_stderr": 0.04865777570410769, |
|
"acc_norm": 0.5922330097087378, |
|
"acc_norm_stderr": 0.04865777570410769 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.5683269476372924, |
|
"acc_stderr": 0.017712228939299798, |
|
"acc_norm": 0.5683269476372924, |
|
"acc_norm_stderr": 0.017712228939299798 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.45925925925925926, |
|
"acc_stderr": 0.04304979692464245, |
|
"acc_norm": 0.45925925925925926, |
|
"acc_norm_stderr": 0.04304979692464245 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.04461960433384741, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.04461960433384741 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.4595744680851064, |
|
"acc_stderr": 0.032579014820998356, |
|
"acc_norm": 0.4595744680851064, |
|
"acc_norm_stderr": 0.032579014820998356 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.42168674698795183, |
|
"acc_stderr": 0.038444531817709175, |
|
"acc_norm": 0.42168674698795183, |
|
"acc_norm_stderr": 0.038444531817709175 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.4983922829581994, |
|
"acc_stderr": 0.02839794490780661, |
|
"acc_norm": 0.4983922829581994, |
|
"acc_norm_stderr": 0.02839794490780661 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.4798206278026906, |
|
"acc_stderr": 0.033530461674123, |
|
"acc_norm": 0.4798206278026906, |
|
"acc_norm_stderr": 0.033530461674123 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.5267175572519084, |
|
"acc_stderr": 0.04379024936553893, |
|
"acc_norm": 0.5267175572519084, |
|
"acc_norm_stderr": 0.04379024936553893 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5909090909090909, |
|
"acc_stderr": 0.03502975799413007, |
|
"acc_norm": 0.5909090909090909, |
|
"acc_norm_stderr": 0.03502975799413007 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4068965517241379, |
|
"acc_stderr": 0.04093793981266237, |
|
"acc_norm": 0.4068965517241379, |
|
"acc_norm_stderr": 0.04093793981266237 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.2647058823529412, |
|
"acc_stderr": 0.04389869956808778, |
|
"acc_norm": 0.2647058823529412, |
|
"acc_norm_stderr": 0.04389869956808778 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.453781512605042, |
|
"acc_stderr": 0.03233943468182088, |
|
"acc_norm": 0.453781512605042, |
|
"acc_norm_stderr": 0.03233943468182088 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.47435897435897434, |
|
"acc_stderr": 0.02531764972644865, |
|
"acc_norm": 0.47435897435897434, |
|
"acc_norm_stderr": 0.02531764972644865 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.53, |
|
"acc_stderr": 0.05016135580465919, |
|
"acc_norm": 0.53, |
|
"acc_norm_stderr": 0.05016135580465919 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.048241815132442176, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.048241815132442176 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5555555555555556, |
|
"acc_stderr": 0.04803752235190192, |
|
"acc_norm": 0.5555555555555556, |
|
"acc_norm_stderr": 0.04803752235190192 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.39408866995073893, |
|
"acc_stderr": 0.03438157967036543, |
|
"acc_norm": 0.39408866995073893, |
|
"acc_norm_stderr": 0.03438157967036543 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.45806451612903226, |
|
"acc_stderr": 0.028343787250540636, |
|
"acc_norm": 0.45806451612903226, |
|
"acc_norm_stderr": 0.028343787250540636 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7521367521367521, |
|
"acc_stderr": 0.0282863240755644, |
|
"acc_norm": 0.7521367521367521, |
|
"acc_norm_stderr": 0.0282863240755644 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.43018867924528303, |
|
"acc_stderr": 0.030471445867183238, |
|
"acc_norm": 0.43018867924528303, |
|
"acc_norm_stderr": 0.030471445867183238 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5636363636363636, |
|
"acc_stderr": 0.04750185058907296, |
|
"acc_norm": 0.5636363636363636, |
|
"acc_norm_stderr": 0.04750185058907296 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.27037037037037037, |
|
"acc_stderr": 0.027080372815145658, |
|
"acc_norm": 0.27037037037037037, |
|
"acc_norm_stderr": 0.027080372815145658 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.304635761589404, |
|
"acc_stderr": 0.03757949922943342, |
|
"acc_norm": 0.304635761589404, |
|
"acc_norm_stderr": 0.03757949922943342 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.5870646766169154, |
|
"acc_stderr": 0.03481520803367348, |
|
"acc_norm": 0.5870646766169154, |
|
"acc_norm_stderr": 0.03481520803367348 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.4277456647398844, |
|
"acc_stderr": 0.03772446857518027, |
|
"acc_norm": 0.4277456647398844, |
|
"acc_norm_stderr": 0.03772446857518027 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2962962962962963, |
|
"acc_stderr": 0.023517294335963286, |
|
"acc_norm": 0.2962962962962963, |
|
"acc_norm_stderr": 0.023517294335963286 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.4166666666666667, |
|
"acc_stderr": 0.04122728707651282, |
|
"acc_norm": 0.4166666666666667, |
|
"acc_norm_stderr": 0.04122728707651282 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.65, |
|
"acc_stderr": 0.04793724854411018, |
|
"acc_norm": 0.65, |
|
"acc_norm_stderr": 0.04793724854411018 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.49710982658959535, |
|
"acc_stderr": 0.02691864538323901, |
|
"acc_norm": 0.49710982658959535, |
|
"acc_norm_stderr": 0.02691864538323901 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.49079754601226994, |
|
"acc_stderr": 0.039277056007874414, |
|
"acc_norm": 0.49079754601226994, |
|
"acc_norm_stderr": 0.039277056007874414 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.49691358024691357, |
|
"acc_stderr": 0.027820214158594384, |
|
"acc_norm": 0.49691358024691357, |
|
"acc_norm_stderr": 0.027820214158594384 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.047937248544110196, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.047937248544110196 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5544041450777202, |
|
"acc_stderr": 0.03587014986075659, |
|
"acc_norm": 0.5544041450777202, |
|
"acc_norm_stderr": 0.03587014986075659 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2631578947368421, |
|
"acc_stderr": 0.0414243971948936, |
|
"acc_norm": 0.2631578947368421, |
|
"acc_norm_stderr": 0.0414243971948936 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.5908256880733945, |
|
"acc_stderr": 0.021080670264433738, |
|
"acc_norm": 0.5908256880733945, |
|
"acc_norm_stderr": 0.021080670264433738 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.36507936507936506, |
|
"acc_stderr": 0.043062412591271526, |
|
"acc_norm": 0.36507936507936506, |
|
"acc_norm_stderr": 0.043062412591271526 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.4444444444444444, |
|
"acc_stderr": 0.02845263998508801, |
|
"acc_norm": 0.4444444444444444, |
|
"acc_norm_stderr": 0.02845263998508801 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.04975698519562428, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.04975698519562428 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.5702479338842975, |
|
"acc_stderr": 0.04519082021319773, |
|
"acc_norm": 0.5702479338842975, |
|
"acc_norm_stderr": 0.04519082021319773 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.4407894736842105, |
|
"acc_stderr": 0.04040311062490436, |
|
"acc_norm": 0.4407894736842105, |
|
"acc_norm_stderr": 0.04040311062490436 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.3888888888888889, |
|
"acc_stderr": 0.019722058939618068, |
|
"acc_norm": 0.3888888888888889, |
|
"acc_norm_stderr": 0.019722058939618068 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.028121636040639882, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.028121636040639882 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.26785714285714285, |
|
"acc_stderr": 0.04203277291467762, |
|
"acc_norm": 0.26785714285714285, |
|
"acc_norm_stderr": 0.04203277291467762 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.3611111111111111, |
|
"acc_stderr": 0.032757734861009996, |
|
"acc_norm": 0.3611111111111111, |
|
"acc_norm_stderr": 0.032757734861009996 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.26033519553072626, |
|
"acc_stderr": 0.014676252009319464, |
|
"acc_norm": 0.26033519553072626, |
|
"acc_norm_stderr": 0.014676252009319464 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.53, |
|
"acc_stderr": 0.050161355804659205, |
|
"acc_norm": 0.53, |
|
"acc_norm_stderr": 0.050161355804659205 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.4117647058823529, |
|
"acc_stderr": 0.029896163033125474, |
|
"acc_norm": 0.4117647058823529, |
|
"acc_norm_stderr": 0.029896163033125474 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.5469387755102041, |
|
"acc_stderr": 0.031867859300041275, |
|
"acc_norm": 0.5469387755102041, |
|
"acc_norm_stderr": 0.031867859300041275 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.6244725738396625, |
|
"acc_stderr": 0.03152256243091157, |
|
"acc_norm": 0.6244725738396625, |
|
"acc_norm_stderr": 0.03152256243091157 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.3226857887874837, |
|
"acc_stderr": 0.011940264193195986, |
|
"acc_norm": 0.3226857887874837, |
|
"acc_norm_stderr": 0.011940264193195986 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.5147058823529411, |
|
"acc_stderr": 0.035077938347913236, |
|
"acc_norm": 0.5147058823529411, |
|
"acc_norm_stderr": 0.035077938347913236 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.5818181818181818, |
|
"acc_stderr": 0.03851716319398393, |
|
"acc_norm": 0.5818181818181818, |
|
"acc_norm_stderr": 0.03851716319398393 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2827417380660955, |
|
"mc1_stderr": 0.015764770836777305, |
|
"mc2": 0.442704104876821, |
|
"mc2_stderr": 0.015215337318397937 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.4462809917355372, |
|
"acc_stderr": 0.017090852631668332, |
|
"acc_norm": 0.4887839433293979, |
|
"acc_norm_stderr": 0.017186028469489287 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "HY-KDPARK/llama-2-koen-13b-sft-v0.3", |
|
"model_sha": "5130b6ccb175caaddd0812cfc2f8b1fd3bfe4ae4", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |