leaderboard-test-results
/
MarkrAI
/DopeorNope-maestro-v1.1-DPO-13b
/result_2023-11-29 22:34:39.json
{ | |
"results": { | |
"harness|ko_arc_challenge|25": { | |
"acc": 0.42406143344709896, | |
"acc_stderr": 0.0144418896274644, | |
"acc_norm": 0.4761092150170648, | |
"acc_norm_stderr": 0.014594701798071655 | |
}, | |
"harness|ko_hellaswag|10": { | |
"acc": 0.4298944433379805, | |
"acc_stderr": 0.00494049050824065, | |
"acc_norm": 0.5763792073292173, | |
"acc_norm_stderr": 0.004931219148182245 | |
}, | |
"harness|ko_mmlu_world_religions|5": { | |
"acc": 0.5146198830409356, | |
"acc_stderr": 0.038331852752130254, | |
"acc_norm": 0.5146198830409356, | |
"acc_norm_stderr": 0.038331852752130254 | |
}, | |
"harness|ko_mmlu_management|5": { | |
"acc": 0.5533980582524272, | |
"acc_stderr": 0.04922424153458933, | |
"acc_norm": 0.5533980582524272, | |
"acc_norm_stderr": 0.04922424153458933 | |
}, | |
"harness|ko_mmlu_miscellaneous|5": { | |
"acc": 0.558109833971903, | |
"acc_stderr": 0.01775880053421441, | |
"acc_norm": 0.558109833971903, | |
"acc_norm_stderr": 0.01775880053421441 | |
}, | |
"harness|ko_mmlu_anatomy|5": { | |
"acc": 0.45185185185185184, | |
"acc_stderr": 0.042992689054808624, | |
"acc_norm": 0.45185185185185184, | |
"acc_norm_stderr": 0.042992689054808624 | |
}, | |
"harness|ko_mmlu_abstract_algebra|5": { | |
"acc": 0.29, | |
"acc_stderr": 0.045604802157206824, | |
"acc_norm": 0.29, | |
"acc_norm_stderr": 0.045604802157206824 | |
}, | |
"harness|ko_mmlu_conceptual_physics|5": { | |
"acc": 0.43829787234042555, | |
"acc_stderr": 0.03243618636108101, | |
"acc_norm": 0.43829787234042555, | |
"acc_norm_stderr": 0.03243618636108101 | |
}, | |
"harness|ko_mmlu_virology|5": { | |
"acc": 0.42771084337349397, | |
"acc_stderr": 0.038515976837185335, | |
"acc_norm": 0.42771084337349397, | |
"acc_norm_stderr": 0.038515976837185335 | |
}, | |
"harness|ko_mmlu_philosophy|5": { | |
"acc": 0.5176848874598071, | |
"acc_stderr": 0.02838032284907713, | |
"acc_norm": 0.5176848874598071, | |
"acc_norm_stderr": 0.02838032284907713 | |
}, | |
"harness|ko_mmlu_human_aging|5": { | |
"acc": 0.5515695067264574, | |
"acc_stderr": 0.033378837362550984, | |
"acc_norm": 0.5515695067264574, | |
"acc_norm_stderr": 0.033378837362550984 | |
}, | |
"harness|ko_mmlu_human_sexuality|5": { | |
"acc": 0.4732824427480916, | |
"acc_stderr": 0.04379024936553894, | |
"acc_norm": 0.4732824427480916, | |
"acc_norm_stderr": 0.04379024936553894 | |
}, | |
"harness|ko_mmlu_medical_genetics|5": { | |
"acc": 0.39, | |
"acc_stderr": 0.04902071300001975, | |
"acc_norm": 0.39, | |
"acc_norm_stderr": 0.04902071300001975 | |
}, | |
"harness|ko_mmlu_high_school_geography|5": { | |
"acc": 0.5808080808080808, | |
"acc_stderr": 0.035155207286704175, | |
"acc_norm": 0.5808080808080808, | |
"acc_norm_stderr": 0.035155207286704175 | |
}, | |
"harness|ko_mmlu_electrical_engineering|5": { | |
"acc": 0.4206896551724138, | |
"acc_stderr": 0.0411391498118926, | |
"acc_norm": 0.4206896551724138, | |
"acc_norm_stderr": 0.0411391498118926 | |
}, | |
"harness|ko_mmlu_college_physics|5": { | |
"acc": 0.23529411764705882, | |
"acc_stderr": 0.04220773659171452, | |
"acc_norm": 0.23529411764705882, | |
"acc_norm_stderr": 0.04220773659171452 | |
}, | |
"harness|ko_mmlu_high_school_microeconomics|5": { | |
"acc": 0.49159663865546216, | |
"acc_stderr": 0.0324739027656967, | |
"acc_norm": 0.49159663865546216, | |
"acc_norm_stderr": 0.0324739027656967 | |
}, | |
"harness|ko_mmlu_high_school_macroeconomics|5": { | |
"acc": 0.4666666666666667, | |
"acc_stderr": 0.025294608023986483, | |
"acc_norm": 0.4666666666666667, | |
"acc_norm_stderr": 0.025294608023986483 | |
}, | |
"harness|ko_mmlu_computer_security|5": { | |
"acc": 0.52, | |
"acc_stderr": 0.050211673156867795, | |
"acc_norm": 0.52, | |
"acc_norm_stderr": 0.050211673156867795 | |
}, | |
"harness|ko_mmlu_global_facts|5": { | |
"acc": 0.37, | |
"acc_stderr": 0.048523658709391, | |
"acc_norm": 0.37, | |
"acc_norm_stderr": 0.048523658709391 | |
}, | |
"harness|ko_mmlu_jurisprudence|5": { | |
"acc": 0.5370370370370371, | |
"acc_stderr": 0.04820403072760626, | |
"acc_norm": 0.5370370370370371, | |
"acc_norm_stderr": 0.04820403072760626 | |
}, | |
"harness|ko_mmlu_high_school_chemistry|5": { | |
"acc": 0.39901477832512317, | |
"acc_stderr": 0.034454876862647144, | |
"acc_norm": 0.39901477832512317, | |
"acc_norm_stderr": 0.034454876862647144 | |
}, | |
"harness|ko_mmlu_high_school_biology|5": { | |
"acc": 0.4612903225806452, | |
"acc_stderr": 0.028358634859836918, | |
"acc_norm": 0.4612903225806452, | |
"acc_norm_stderr": 0.028358634859836918 | |
}, | |
"harness|ko_mmlu_marketing|5": { | |
"acc": 0.6452991452991453, | |
"acc_stderr": 0.03134250486245403, | |
"acc_norm": 0.6452991452991453, | |
"acc_norm_stderr": 0.03134250486245403 | |
}, | |
"harness|ko_mmlu_clinical_knowledge|5": { | |
"acc": 0.45660377358490567, | |
"acc_stderr": 0.03065674869673943, | |
"acc_norm": 0.45660377358490567, | |
"acc_norm_stderr": 0.03065674869673943 | |
}, | |
"harness|ko_mmlu_public_relations|5": { | |
"acc": 0.5727272727272728, | |
"acc_stderr": 0.04738198703545483, | |
"acc_norm": 0.5727272727272728, | |
"acc_norm_stderr": 0.04738198703545483 | |
}, | |
"harness|ko_mmlu_high_school_mathematics|5": { | |
"acc": 0.2814814814814815, | |
"acc_stderr": 0.027420019350945277, | |
"acc_norm": 0.2814814814814815, | |
"acc_norm_stderr": 0.027420019350945277 | |
}, | |
"harness|ko_mmlu_high_school_physics|5": { | |
"acc": 0.2913907284768212, | |
"acc_stderr": 0.037101857261199946, | |
"acc_norm": 0.2913907284768212, | |
"acc_norm_stderr": 0.037101857261199946 | |
}, | |
"harness|ko_mmlu_sociology|5": { | |
"acc": 0.6019900497512438, | |
"acc_stderr": 0.034611994290400135, | |
"acc_norm": 0.6019900497512438, | |
"acc_norm_stderr": 0.034611994290400135 | |
}, | |
"harness|ko_mmlu_college_medicine|5": { | |
"acc": 0.42196531791907516, | |
"acc_stderr": 0.0376574669386515, | |
"acc_norm": 0.42196531791907516, | |
"acc_norm_stderr": 0.0376574669386515 | |
}, | |
"harness|ko_mmlu_elementary_mathematics|5": { | |
"acc": 0.30952380952380953, | |
"acc_stderr": 0.023809523809523846, | |
"acc_norm": 0.30952380952380953, | |
"acc_norm_stderr": 0.023809523809523846 | |
}, | |
"harness|ko_mmlu_college_biology|5": { | |
"acc": 0.3888888888888889, | |
"acc_stderr": 0.04076663253918567, | |
"acc_norm": 0.3888888888888889, | |
"acc_norm_stderr": 0.04076663253918567 | |
}, | |
"harness|ko_mmlu_college_chemistry|5": { | |
"acc": 0.28, | |
"acc_stderr": 0.045126085985421276, | |
"acc_norm": 0.28, | |
"acc_norm_stderr": 0.045126085985421276 | |
}, | |
"harness|ko_mmlu_us_foreign_policy|5": { | |
"acc": 0.63, | |
"acc_stderr": 0.048523658709391, | |
"acc_norm": 0.63, | |
"acc_norm_stderr": 0.048523658709391 | |
}, | |
"harness|ko_mmlu_moral_disputes|5": { | |
"acc": 0.5144508670520231, | |
"acc_stderr": 0.02690784985628254, | |
"acc_norm": 0.5144508670520231, | |
"acc_norm_stderr": 0.02690784985628254 | |
}, | |
"harness|ko_mmlu_logical_fallacies|5": { | |
"acc": 0.5153374233128835, | |
"acc_stderr": 0.039265223787088424, | |
"acc_norm": 0.5153374233128835, | |
"acc_norm_stderr": 0.039265223787088424 | |
}, | |
"harness|ko_mmlu_prehistory|5": { | |
"acc": 0.49074074074074076, | |
"acc_stderr": 0.027815973433878014, | |
"acc_norm": 0.49074074074074076, | |
"acc_norm_stderr": 0.027815973433878014 | |
}, | |
"harness|ko_mmlu_college_mathematics|5": { | |
"acc": 0.32, | |
"acc_stderr": 0.04688261722621505, | |
"acc_norm": 0.32, | |
"acc_norm_stderr": 0.04688261722621505 | |
}, | |
"harness|ko_mmlu_high_school_government_and_politics|5": { | |
"acc": 0.5492227979274611, | |
"acc_stderr": 0.035909109522355244, | |
"acc_norm": 0.5492227979274611, | |
"acc_norm_stderr": 0.035909109522355244 | |
}, | |
"harness|ko_mmlu_econometrics|5": { | |
"acc": 0.2543859649122807, | |
"acc_stderr": 0.040969851398436695, | |
"acc_norm": 0.2543859649122807, | |
"acc_norm_stderr": 0.040969851398436695 | |
}, | |
"harness|ko_mmlu_high_school_psychology|5": { | |
"acc": 0.618348623853211, | |
"acc_stderr": 0.02082814851702261, | |
"acc_norm": 0.618348623853211, | |
"acc_norm_stderr": 0.02082814851702261 | |
}, | |
"harness|ko_mmlu_formal_logic|5": { | |
"acc": 0.3412698412698413, | |
"acc_stderr": 0.04240799327574925, | |
"acc_norm": 0.3412698412698413, | |
"acc_norm_stderr": 0.04240799327574925 | |
}, | |
"harness|ko_mmlu_nutrition|5": { | |
"acc": 0.41830065359477125, | |
"acc_stderr": 0.028245134024387292, | |
"acc_norm": 0.41830065359477125, | |
"acc_norm_stderr": 0.028245134024387292 | |
}, | |
"harness|ko_mmlu_business_ethics|5": { | |
"acc": 0.44, | |
"acc_stderr": 0.049888765156985884, | |
"acc_norm": 0.44, | |
"acc_norm_stderr": 0.049888765156985884 | |
}, | |
"harness|ko_mmlu_international_law|5": { | |
"acc": 0.6694214876033058, | |
"acc_stderr": 0.04294340845212094, | |
"acc_norm": 0.6694214876033058, | |
"acc_norm_stderr": 0.04294340845212094 | |
}, | |
"harness|ko_mmlu_astronomy|5": { | |
"acc": 0.3815789473684211, | |
"acc_stderr": 0.03953173377749194, | |
"acc_norm": 0.3815789473684211, | |
"acc_norm_stderr": 0.03953173377749194 | |
}, | |
"harness|ko_mmlu_professional_psychology|5": { | |
"acc": 0.39705882352941174, | |
"acc_stderr": 0.019794488900024106, | |
"acc_norm": 0.39705882352941174, | |
"acc_norm_stderr": 0.019794488900024106 | |
}, | |
"harness|ko_mmlu_professional_accounting|5": { | |
"acc": 0.3617021276595745, | |
"acc_stderr": 0.028663820147199495, | |
"acc_norm": 0.3617021276595745, | |
"acc_norm_stderr": 0.028663820147199495 | |
}, | |
"harness|ko_mmlu_machine_learning|5": { | |
"acc": 0.25, | |
"acc_stderr": 0.04109974682633932, | |
"acc_norm": 0.25, | |
"acc_norm_stderr": 0.04109974682633932 | |
}, | |
"harness|ko_mmlu_high_school_statistics|5": { | |
"acc": 0.36574074074074076, | |
"acc_stderr": 0.03284738857647207, | |
"acc_norm": 0.36574074074074076, | |
"acc_norm_stderr": 0.03284738857647207 | |
}, | |
"harness|ko_mmlu_moral_scenarios|5": { | |
"acc": 0.2424581005586592, | |
"acc_stderr": 0.01433352205921789, | |
"acc_norm": 0.2424581005586592, | |
"acc_norm_stderr": 0.01433352205921789 | |
}, | |
"harness|ko_mmlu_college_computer_science|5": { | |
"acc": 0.35, | |
"acc_stderr": 0.0479372485441102, | |
"acc_norm": 0.35, | |
"acc_norm_stderr": 0.0479372485441102 | |
}, | |
"harness|ko_mmlu_high_school_computer_science|5": { | |
"acc": 0.42, | |
"acc_stderr": 0.049604496374885836, | |
"acc_norm": 0.42, | |
"acc_norm_stderr": 0.049604496374885836 | |
}, | |
"harness|ko_mmlu_professional_medicine|5": { | |
"acc": 0.4227941176470588, | |
"acc_stderr": 0.03000856284500347, | |
"acc_norm": 0.4227941176470588, | |
"acc_norm_stderr": 0.03000856284500347 | |
}, | |
"harness|ko_mmlu_security_studies|5": { | |
"acc": 0.49387755102040815, | |
"acc_stderr": 0.032006820201639086, | |
"acc_norm": 0.49387755102040815, | |
"acc_norm_stderr": 0.032006820201639086 | |
}, | |
"harness|ko_mmlu_high_school_world_history|5": { | |
"acc": 0.6540084388185654, | |
"acc_stderr": 0.03096481058878671, | |
"acc_norm": 0.6540084388185654, | |
"acc_norm_stderr": 0.03096481058878671 | |
}, | |
"harness|ko_mmlu_professional_law|5": { | |
"acc": 0.36897001303780963, | |
"acc_stderr": 0.012323936650174859, | |
"acc_norm": 0.36897001303780963, | |
"acc_norm_stderr": 0.012323936650174859 | |
}, | |
"harness|ko_mmlu_high_school_us_history|5": { | |
"acc": 0.5245098039215687, | |
"acc_stderr": 0.03505093194348798, | |
"acc_norm": 0.5245098039215687, | |
"acc_norm_stderr": 0.03505093194348798 | |
}, | |
"harness|ko_mmlu_high_school_european_history|5": { | |
"acc": 0.5333333333333333, | |
"acc_stderr": 0.03895658065271847, | |
"acc_norm": 0.5333333333333333, | |
"acc_norm_stderr": 0.03895658065271847 | |
}, | |
"harness|ko_truthfulqa_mc|0": { | |
"mc1": 0.3047735618115055, | |
"mc1_stderr": 0.016114124156882466, | |
"mc2": 0.46627654282840275, | |
"mc2_stderr": 0.015286096744214328 | |
}, | |
"harness|ko_commongen_v2|2": { | |
"acc": 0.4651711924439197, | |
"acc_stderr": 0.017148598015747422, | |
"acc_norm": 0.5466351829988194, | |
"acc_norm_stderr": 0.01711541822522687 | |
} | |
}, | |
"versions": { | |
"all": 0, | |
"harness|ko_arc_challenge|25": 0, | |
"harness|ko_hellaswag|10": 0, | |
"harness|ko_mmlu_world_religions|5": 1, | |
"harness|ko_mmlu_management|5": 1, | |
"harness|ko_mmlu_miscellaneous|5": 1, | |
"harness|ko_mmlu_anatomy|5": 1, | |
"harness|ko_mmlu_abstract_algebra|5": 1, | |
"harness|ko_mmlu_conceptual_physics|5": 1, | |
"harness|ko_mmlu_virology|5": 1, | |
"harness|ko_mmlu_philosophy|5": 1, | |
"harness|ko_mmlu_human_aging|5": 1, | |
"harness|ko_mmlu_human_sexuality|5": 1, | |
"harness|ko_mmlu_medical_genetics|5": 1, | |
"harness|ko_mmlu_high_school_geography|5": 1, | |
"harness|ko_mmlu_electrical_engineering|5": 1, | |
"harness|ko_mmlu_college_physics|5": 1, | |
"harness|ko_mmlu_high_school_microeconomics|5": 1, | |
"harness|ko_mmlu_high_school_macroeconomics|5": 1, | |
"harness|ko_mmlu_computer_security|5": 1, | |
"harness|ko_mmlu_global_facts|5": 1, | |
"harness|ko_mmlu_jurisprudence|5": 1, | |
"harness|ko_mmlu_high_school_chemistry|5": 1, | |
"harness|ko_mmlu_high_school_biology|5": 1, | |
"harness|ko_mmlu_marketing|5": 1, | |
"harness|ko_mmlu_clinical_knowledge|5": 1, | |
"harness|ko_mmlu_public_relations|5": 1, | |
"harness|ko_mmlu_high_school_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_physics|5": 1, | |
"harness|ko_mmlu_sociology|5": 1, | |
"harness|ko_mmlu_college_medicine|5": 1, | |
"harness|ko_mmlu_elementary_mathematics|5": 1, | |
"harness|ko_mmlu_college_biology|5": 1, | |
"harness|ko_mmlu_college_chemistry|5": 1, | |
"harness|ko_mmlu_us_foreign_policy|5": 1, | |
"harness|ko_mmlu_moral_disputes|5": 1, | |
"harness|ko_mmlu_logical_fallacies|5": 1, | |
"harness|ko_mmlu_prehistory|5": 1, | |
"harness|ko_mmlu_college_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_government_and_politics|5": 1, | |
"harness|ko_mmlu_econometrics|5": 1, | |
"harness|ko_mmlu_high_school_psychology|5": 1, | |
"harness|ko_mmlu_formal_logic|5": 1, | |
"harness|ko_mmlu_nutrition|5": 1, | |
"harness|ko_mmlu_business_ethics|5": 1, | |
"harness|ko_mmlu_international_law|5": 1, | |
"harness|ko_mmlu_astronomy|5": 1, | |
"harness|ko_mmlu_professional_psychology|5": 1, | |
"harness|ko_mmlu_professional_accounting|5": 1, | |
"harness|ko_mmlu_machine_learning|5": 1, | |
"harness|ko_mmlu_high_school_statistics|5": 1, | |
"harness|ko_mmlu_moral_scenarios|5": 1, | |
"harness|ko_mmlu_college_computer_science|5": 1, | |
"harness|ko_mmlu_high_school_computer_science|5": 1, | |
"harness|ko_mmlu_professional_medicine|5": 1, | |
"harness|ko_mmlu_security_studies|5": 1, | |
"harness|ko_mmlu_high_school_world_history|5": 1, | |
"harness|ko_mmlu_professional_law|5": 1, | |
"harness|ko_mmlu_high_school_us_history|5": 1, | |
"harness|ko_mmlu_high_school_european_history|5": 1, | |
"harness|ko_truthfulqa_mc|0": 0, | |
"harness|ko_commongen_v2|2": 1 | |
}, | |
"config_general": { | |
"model_name": "MarkrAI/DopeorNope-maestro-v1.1-DPO-13b", | |
"model_sha": "8dc70bf0ccd7914ca6ebbe2e661f783e69172b95", | |
"model_dtype": "torch.float16", | |
"lighteval_sha": "", | |
"num_few_shot_default": 0, | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null | |
} | |
} |