|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.4334470989761092, |
|
"acc_stderr": 0.014481376224558896, |
|
"acc_norm": 0.4684300341296928, |
|
"acc_norm_stderr": 0.014582236460866977 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.4069906393148775, |
|
"acc_stderr": 0.004902690765066419, |
|
"acc_norm": 0.5370444134634534, |
|
"acc_norm_stderr": 0.004976067726432564 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.5730994152046783, |
|
"acc_stderr": 0.03793620616529916, |
|
"acc_norm": 0.5730994152046783, |
|
"acc_norm_stderr": 0.03793620616529916 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.6407766990291263, |
|
"acc_stderr": 0.04750458399041697, |
|
"acc_norm": 0.6407766990291263, |
|
"acc_norm_stderr": 0.04750458399041697 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.561941251596424, |
|
"acc_stderr": 0.017742232238257254, |
|
"acc_norm": 0.561941251596424, |
|
"acc_norm_stderr": 0.017742232238257254 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4222222222222222, |
|
"acc_stderr": 0.04266763404099582, |
|
"acc_norm": 0.4222222222222222, |
|
"acc_norm_stderr": 0.04266763404099582 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.46808510638297873, |
|
"acc_stderr": 0.03261936918467382, |
|
"acc_norm": 0.46808510638297873, |
|
"acc_norm_stderr": 0.03261936918467382 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.42168674698795183, |
|
"acc_stderr": 0.03844453181770917, |
|
"acc_norm": 0.42168674698795183, |
|
"acc_norm_stderr": 0.03844453181770917 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.4790996784565916, |
|
"acc_stderr": 0.028373270961069414, |
|
"acc_norm": 0.4790996784565916, |
|
"acc_norm_stderr": 0.028373270961069414 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.515695067264574, |
|
"acc_stderr": 0.0335412657542081, |
|
"acc_norm": 0.515695067264574, |
|
"acc_norm_stderr": 0.0335412657542081 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.48854961832061067, |
|
"acc_stderr": 0.043841400240780176, |
|
"acc_norm": 0.48854961832061067, |
|
"acc_norm_stderr": 0.043841400240780176 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.48, |
|
"acc_stderr": 0.05021167315686779, |
|
"acc_norm": 0.48, |
|
"acc_norm_stderr": 0.05021167315686779 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.6262626262626263, |
|
"acc_stderr": 0.03446897738659333, |
|
"acc_norm": 0.6262626262626263, |
|
"acc_norm_stderr": 0.03446897738659333 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4482758620689655, |
|
"acc_stderr": 0.04144311810878151, |
|
"acc_norm": 0.4482758620689655, |
|
"acc_norm_stderr": 0.04144311810878151 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.22549019607843138, |
|
"acc_stderr": 0.04158307533083286, |
|
"acc_norm": 0.22549019607843138, |
|
"acc_norm_stderr": 0.04158307533083286 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.4579831932773109, |
|
"acc_stderr": 0.03236361111951941, |
|
"acc_norm": 0.4579831932773109, |
|
"acc_norm_stderr": 0.03236361111951941 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.45897435897435895, |
|
"acc_stderr": 0.025265525491284295, |
|
"acc_norm": 0.45897435897435895, |
|
"acc_norm_stderr": 0.025265525491284295 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.61, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.61, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5277777777777778, |
|
"acc_stderr": 0.04826217294139894, |
|
"acc_norm": 0.5277777777777778, |
|
"acc_norm_stderr": 0.04826217294139894 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3694581280788177, |
|
"acc_stderr": 0.03395970381998574, |
|
"acc_norm": 0.3694581280788177, |
|
"acc_norm_stderr": 0.03395970381998574 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.46774193548387094, |
|
"acc_stderr": 0.02838474778881333, |
|
"acc_norm": 0.46774193548387094, |
|
"acc_norm_stderr": 0.02838474778881333 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7264957264957265, |
|
"acc_stderr": 0.029202540153431187, |
|
"acc_norm": 0.7264957264957265, |
|
"acc_norm_stderr": 0.029202540153431187 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.47924528301886793, |
|
"acc_stderr": 0.030746349975723463, |
|
"acc_norm": 0.47924528301886793, |
|
"acc_norm_stderr": 0.030746349975723463 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.509090909090909, |
|
"acc_stderr": 0.0478833976870286, |
|
"acc_norm": 0.509090909090909, |
|
"acc_norm_stderr": 0.0478833976870286 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3148148148148148, |
|
"acc_stderr": 0.02831753349606647, |
|
"acc_norm": 0.3148148148148148, |
|
"acc_norm_stderr": 0.02831753349606647 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2847682119205298, |
|
"acc_stderr": 0.03684881521389023, |
|
"acc_norm": 0.2847682119205298, |
|
"acc_norm_stderr": 0.03684881521389023 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.6318407960199005, |
|
"acc_stderr": 0.03410410565495302, |
|
"acc_norm": 0.6318407960199005, |
|
"acc_norm_stderr": 0.03410410565495302 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.36416184971098264, |
|
"acc_stderr": 0.03669072477416907, |
|
"acc_norm": 0.36416184971098264, |
|
"acc_norm_stderr": 0.03669072477416907 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.3888888888888889, |
|
"acc_stderr": 0.02510742548113728, |
|
"acc_norm": 0.3888888888888889, |
|
"acc_norm_stderr": 0.02510742548113728 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.4027777777777778, |
|
"acc_stderr": 0.04101405519842426, |
|
"acc_norm": 0.4027777777777778, |
|
"acc_norm_stderr": 0.04101405519842426 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252605, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252605 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.63, |
|
"acc_stderr": 0.048523658709391, |
|
"acc_norm": 0.63, |
|
"acc_norm_stderr": 0.048523658709391 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5260115606936416, |
|
"acc_stderr": 0.026882643434022895, |
|
"acc_norm": 0.5260115606936416, |
|
"acc_norm_stderr": 0.026882643434022895 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.4601226993865031, |
|
"acc_stderr": 0.0391585729143697, |
|
"acc_norm": 0.4601226993865031, |
|
"acc_norm_stderr": 0.0391585729143697 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.49382716049382713, |
|
"acc_stderr": 0.027818623962583302, |
|
"acc_norm": 0.49382716049382713, |
|
"acc_norm_stderr": 0.027818623962583302 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695236, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695236 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5492227979274611, |
|
"acc_stderr": 0.03590910952235524, |
|
"acc_norm": 0.5492227979274611, |
|
"acc_norm_stderr": 0.03590910952235524 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2894736842105263, |
|
"acc_stderr": 0.04266339443159394, |
|
"acc_norm": 0.2894736842105263, |
|
"acc_norm_stderr": 0.04266339443159394 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.5211009174311927, |
|
"acc_stderr": 0.021418224754264643, |
|
"acc_norm": 0.5211009174311927, |
|
"acc_norm_stderr": 0.021418224754264643 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.35714285714285715, |
|
"acc_stderr": 0.042857142857142816, |
|
"acc_norm": 0.35714285714285715, |
|
"acc_norm_stderr": 0.042857142857142816 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.48366013071895425, |
|
"acc_stderr": 0.028614624752805407, |
|
"acc_norm": 0.48366013071895425, |
|
"acc_norm_stderr": 0.028614624752805407 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.47, |
|
"acc_stderr": 0.050161355804659205, |
|
"acc_norm": 0.47, |
|
"acc_norm_stderr": 0.050161355804659205 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6776859504132231, |
|
"acc_stderr": 0.04266416363352167, |
|
"acc_norm": 0.6776859504132231, |
|
"acc_norm_stderr": 0.04266416363352167 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.48026315789473684, |
|
"acc_stderr": 0.040657710025626057, |
|
"acc_norm": 0.48026315789473684, |
|
"acc_norm_stderr": 0.040657710025626057 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.39215686274509803, |
|
"acc_stderr": 0.019751726508762626, |
|
"acc_norm": 0.39215686274509803, |
|
"acc_norm_stderr": 0.019751726508762626 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2695035460992908, |
|
"acc_stderr": 0.026469036818590634, |
|
"acc_norm": 0.2695035460992908, |
|
"acc_norm_stderr": 0.026469036818590634 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.42857142857142855, |
|
"acc_stderr": 0.04697113923010213, |
|
"acc_norm": 0.42857142857142855, |
|
"acc_norm_stderr": 0.04697113923010213 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.3425925925925926, |
|
"acc_stderr": 0.032365852526021574, |
|
"acc_norm": 0.3425925925925926, |
|
"acc_norm_stderr": 0.032365852526021574 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.18435754189944134, |
|
"acc_stderr": 0.012969152811883447, |
|
"acc_norm": 0.18435754189944134, |
|
"acc_norm_stderr": 0.012969152811883447 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.45, |
|
"acc_stderr": 0.05, |
|
"acc_norm": 0.45, |
|
"acc_norm_stderr": 0.05 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.59, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.59, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.3860294117647059, |
|
"acc_stderr": 0.029573269134411124, |
|
"acc_norm": 0.3860294117647059, |
|
"acc_norm_stderr": 0.029573269134411124 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.5714285714285714, |
|
"acc_stderr": 0.031680911612338825, |
|
"acc_norm": 0.5714285714285714, |
|
"acc_norm_stderr": 0.031680911612338825 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.6582278481012658, |
|
"acc_stderr": 0.030874537537553617, |
|
"acc_norm": 0.6582278481012658, |
|
"acc_norm_stderr": 0.030874537537553617 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.3363754889178618, |
|
"acc_stderr": 0.012067083079452225, |
|
"acc_norm": 0.3363754889178618, |
|
"acc_norm_stderr": 0.012067083079452225 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.5098039215686274, |
|
"acc_stderr": 0.035086373586305716, |
|
"acc_norm": 0.5098039215686274, |
|
"acc_norm_stderr": 0.035086373586305716 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.5818181818181818, |
|
"acc_stderr": 0.03851716319398393, |
|
"acc_norm": 0.5818181818181818, |
|
"acc_norm_stderr": 0.03851716319398393 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.3047735618115055, |
|
"mc1_stderr": 0.016114124156882473, |
|
"mc2": 0.4780289931552814, |
|
"mc2_stderr": 0.015543517557297408 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.4757969303423849, |
|
"acc_stderr": 0.017170202466520748, |
|
"acc_norm": 0.5407319952774499, |
|
"acc_norm_stderr": 0.01713321827653767 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Minirecord/Mini_synatra_7b_03", |
|
"model_sha": "6cd5c432930e923b0d73453fb22f817726da99bc", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |