|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.4069965870307167, |
|
"acc_stderr": 0.01435639941800912, |
|
"acc_norm": 0.4684300341296928, |
|
"acc_norm_stderr": 0.014582236460866977 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.42471619199362676, |
|
"acc_stderr": 0.004932896472460567, |
|
"acc_norm": 0.569308902609042, |
|
"acc_norm_stderr": 0.004941609820763586 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.5146198830409356, |
|
"acc_stderr": 0.038331852752130254, |
|
"acc_norm": 0.5146198830409356, |
|
"acc_norm_stderr": 0.038331852752130254 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5533980582524272, |
|
"acc_stderr": 0.04922424153458933, |
|
"acc_norm": 0.5533980582524272, |
|
"acc_norm_stderr": 0.04922424153458933 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.5632183908045977, |
|
"acc_stderr": 0.017736470837800698, |
|
"acc_norm": 0.5632183908045977, |
|
"acc_norm_stderr": 0.017736470837800698 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.45925925925925926, |
|
"acc_stderr": 0.04304979692464244, |
|
"acc_norm": 0.45925925925925926, |
|
"acc_norm_stderr": 0.04304979692464244 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.4297872340425532, |
|
"acc_stderr": 0.03236214467715564, |
|
"acc_norm": 0.4297872340425532, |
|
"acc_norm_stderr": 0.03236214467715564 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.42771084337349397, |
|
"acc_stderr": 0.038515976837185335, |
|
"acc_norm": 0.42771084337349397, |
|
"acc_norm_stderr": 0.038515976837185335 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5176848874598071, |
|
"acc_stderr": 0.02838032284907713, |
|
"acc_norm": 0.5176848874598071, |
|
"acc_norm_stderr": 0.02838032284907713 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.5515695067264574, |
|
"acc_stderr": 0.033378837362550984, |
|
"acc_norm": 0.5515695067264574, |
|
"acc_norm_stderr": 0.033378837362550984 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.48091603053435117, |
|
"acc_stderr": 0.04382094705550989, |
|
"acc_norm": 0.48091603053435117, |
|
"acc_norm_stderr": 0.04382094705550989 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5808080808080808, |
|
"acc_stderr": 0.035155207286704175, |
|
"acc_norm": 0.5808080808080808, |
|
"acc_norm_stderr": 0.035155207286704175 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.43448275862068964, |
|
"acc_stderr": 0.041307408795554966, |
|
"acc_norm": 0.43448275862068964, |
|
"acc_norm_stderr": 0.041307408795554966 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.22549019607843138, |
|
"acc_stderr": 0.041583075330832865, |
|
"acc_norm": 0.22549019607843138, |
|
"acc_norm_stderr": 0.041583075330832865 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.49159663865546216, |
|
"acc_stderr": 0.0324739027656967, |
|
"acc_norm": 0.49159663865546216, |
|
"acc_norm_stderr": 0.0324739027656967 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4564102564102564, |
|
"acc_stderr": 0.02525448542479961, |
|
"acc_norm": 0.4564102564102564, |
|
"acc_norm_stderr": 0.02525448542479961 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.49, |
|
"acc_stderr": 0.05024183937956912, |
|
"acc_norm": 0.49, |
|
"acc_norm_stderr": 0.05024183937956912 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.048523658709391, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.048523658709391 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.49074074074074076, |
|
"acc_stderr": 0.04832853553437055, |
|
"acc_norm": 0.49074074074074076, |
|
"acc_norm_stderr": 0.04832853553437055 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.4236453201970443, |
|
"acc_stderr": 0.03476725747649038, |
|
"acc_norm": 0.4236453201970443, |
|
"acc_norm_stderr": 0.03476725747649038 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.47419354838709676, |
|
"acc_stderr": 0.02840609505765332, |
|
"acc_norm": 0.47419354838709676, |
|
"acc_norm_stderr": 0.02840609505765332 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6410256410256411, |
|
"acc_stderr": 0.03142616993791924, |
|
"acc_norm": 0.6410256410256411, |
|
"acc_norm_stderr": 0.03142616993791924 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.4641509433962264, |
|
"acc_stderr": 0.030693675018458006, |
|
"acc_norm": 0.4641509433962264, |
|
"acc_norm_stderr": 0.030693675018458006 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5454545454545454, |
|
"acc_stderr": 0.04769300568972744, |
|
"acc_norm": 0.5454545454545454, |
|
"acc_norm_stderr": 0.04769300568972744 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.24814814814814815, |
|
"acc_stderr": 0.0263357394040558, |
|
"acc_norm": 0.24814814814814815, |
|
"acc_norm_stderr": 0.0263357394040558 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2980132450331126, |
|
"acc_stderr": 0.037345356767871984, |
|
"acc_norm": 0.2980132450331126, |
|
"acc_norm_stderr": 0.037345356767871984 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.6069651741293532, |
|
"acc_stderr": 0.0345368246603156, |
|
"acc_norm": 0.6069651741293532, |
|
"acc_norm_stderr": 0.0345368246603156 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.43352601156069365, |
|
"acc_stderr": 0.03778621079092055, |
|
"acc_norm": 0.43352601156069365, |
|
"acc_norm_stderr": 0.03778621079092055 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.023068188848261117, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.023068188848261117 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3680555555555556, |
|
"acc_stderr": 0.040329990539607195, |
|
"acc_norm": 0.3680555555555556, |
|
"acc_norm_stderr": 0.040329990539607195 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.65, |
|
"acc_stderr": 0.04793724854411019, |
|
"acc_norm": 0.65, |
|
"acc_norm_stderr": 0.04793724854411019 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5028901734104047, |
|
"acc_stderr": 0.02691864538323901, |
|
"acc_norm": 0.5028901734104047, |
|
"acc_norm_stderr": 0.02691864538323901 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.5153374233128835, |
|
"acc_stderr": 0.03926522378708843, |
|
"acc_norm": 0.5153374233128835, |
|
"acc_norm_stderr": 0.03926522378708843 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.5030864197530864, |
|
"acc_stderr": 0.027820214158594377, |
|
"acc_norm": 0.5030864197530864, |
|
"acc_norm_stderr": 0.027820214158594377 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621504, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621504 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5751295336787565, |
|
"acc_stderr": 0.0356747133521254, |
|
"acc_norm": 0.5751295336787565, |
|
"acc_norm_stderr": 0.0356747133521254 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2894736842105263, |
|
"acc_stderr": 0.04266339443159395, |
|
"acc_norm": 0.2894736842105263, |
|
"acc_norm_stderr": 0.04266339443159395 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.6036697247706422, |
|
"acc_stderr": 0.020971469947900525, |
|
"acc_norm": 0.6036697247706422, |
|
"acc_norm_stderr": 0.020971469947900525 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.042163702135578345, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.042163702135578345 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.4150326797385621, |
|
"acc_stderr": 0.028213504177824093, |
|
"acc_norm": 0.4150326797385621, |
|
"acc_norm_stderr": 0.028213504177824093 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.628099173553719, |
|
"acc_stderr": 0.04412015806624504, |
|
"acc_norm": 0.628099173553719, |
|
"acc_norm_stderr": 0.04412015806624504 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.40789473684210525, |
|
"acc_stderr": 0.03999309712777471, |
|
"acc_norm": 0.40789473684210525, |
|
"acc_norm_stderr": 0.03999309712777471 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.37745098039215685, |
|
"acc_stderr": 0.019610851474880286, |
|
"acc_norm": 0.37745098039215685, |
|
"acc_norm_stderr": 0.019610851474880286 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.02812163604063989, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.02812163604063989 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.20535714285714285, |
|
"acc_stderr": 0.038342410214190714, |
|
"acc_norm": 0.20535714285714285, |
|
"acc_norm_stderr": 0.038342410214190714 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.35185185185185186, |
|
"acc_stderr": 0.032568505702936464, |
|
"acc_norm": 0.35185185185185186, |
|
"acc_norm_stderr": 0.032568505702936464 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2424581005586592, |
|
"acc_stderr": 0.01433352205921789, |
|
"acc_norm": 0.2424581005586592, |
|
"acc_norm_stderr": 0.01433352205921789 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.0479372485441102, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.0479372485441102 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.049756985195624284, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.049756985195624284 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.4522058823529412, |
|
"acc_stderr": 0.030233758551596445, |
|
"acc_norm": 0.4522058823529412, |
|
"acc_norm_stderr": 0.030233758551596445 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.49795918367346936, |
|
"acc_stderr": 0.0320089533497105, |
|
"acc_norm": 0.49795918367346936, |
|
"acc_norm_stderr": 0.0320089533497105 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.6413502109704642, |
|
"acc_stderr": 0.031219569445301857, |
|
"acc_norm": 0.6413502109704642, |
|
"acc_norm_stderr": 0.031219569445301857 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.35528031290743156, |
|
"acc_stderr": 0.012223623364044046, |
|
"acc_norm": 0.35528031290743156, |
|
"acc_norm_stderr": 0.012223623364044046 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.4950980392156863, |
|
"acc_stderr": 0.03509143375606786, |
|
"acc_norm": 0.4950980392156863, |
|
"acc_norm_stderr": 0.03509143375606786 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.5151515151515151, |
|
"acc_stderr": 0.03902551007374448, |
|
"acc_norm": 0.5151515151515151, |
|
"acc_norm_stderr": 0.03902551007374448 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.26805385556915545, |
|
"mc1_stderr": 0.015506204722834557, |
|
"mc2": 0.4229553020954532, |
|
"mc2_stderr": 0.01482225107189349 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.4805194805194805, |
|
"acc_stderr": 0.01717730199234255, |
|
"acc_norm": 0.5608028335301063, |
|
"acc_norm_stderr": 0.017062775744780705 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "DopeorNope/COKALL-13B-v2", |
|
"model_sha": "5aa12e623e32ecb5d455cc2b6ce9c1f2b597c19f", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |