|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.197098976109215, |
|
"acc_stderr": 0.011625047669880621, |
|
"acc_norm": 0.26023890784982934, |
|
"acc_norm_stderr": 0.012821930225112568 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.24905397331208923, |
|
"acc_stderr": 0.004315812968431592, |
|
"acc_norm": 0.2517426807408883, |
|
"acc_norm_stderr": 0.0043312717177738545 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.2046783625730994, |
|
"acc_stderr": 0.030944459778533214, |
|
"acc_norm": 0.2046783625730994, |
|
"acc_norm_stderr": 0.030944459778533214 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.2912621359223301, |
|
"acc_stderr": 0.04498676320572922, |
|
"acc_norm": 0.2912621359223301, |
|
"acc_norm_stderr": 0.04498676320572922 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.28607918263090676, |
|
"acc_stderr": 0.01616087140512753, |
|
"acc_norm": 0.28607918263090676, |
|
"acc_norm_stderr": 0.01616087140512753 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.24444444444444444, |
|
"acc_stderr": 0.03712537833614865, |
|
"acc_norm": 0.24444444444444444, |
|
"acc_norm_stderr": 0.03712537833614865 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.04408440022768077, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.04408440022768077 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.3276595744680851, |
|
"acc_stderr": 0.030683020843231004, |
|
"acc_norm": 0.3276595744680851, |
|
"acc_norm_stderr": 0.030683020843231004 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3373493975903614, |
|
"acc_stderr": 0.0368078369072758, |
|
"acc_norm": 0.3373493975903614, |
|
"acc_norm_stderr": 0.0368078369072758 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.2829581993569132, |
|
"acc_stderr": 0.02558306248998484, |
|
"acc_norm": 0.2829581993569132, |
|
"acc_norm_stderr": 0.02558306248998484 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.3811659192825112, |
|
"acc_stderr": 0.03259625118416827, |
|
"acc_norm": 0.3811659192825112, |
|
"acc_norm_stderr": 0.03259625118416827 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.2366412213740458, |
|
"acc_stderr": 0.03727673575596919, |
|
"acc_norm": 0.2366412213740458, |
|
"acc_norm_stderr": 0.03727673575596919 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.04408440022768077, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.04408440022768077 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.21717171717171718, |
|
"acc_stderr": 0.02937661648494563, |
|
"acc_norm": 0.21717171717171718, |
|
"acc_norm_stderr": 0.02937661648494563 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.21379310344827587, |
|
"acc_stderr": 0.03416520447747549, |
|
"acc_norm": 0.21379310344827587, |
|
"acc_norm_stderr": 0.03416520447747549 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.19607843137254902, |
|
"acc_stderr": 0.03950581861179962, |
|
"acc_norm": 0.19607843137254902, |
|
"acc_norm_stderr": 0.03950581861179962 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.22268907563025211, |
|
"acc_stderr": 0.02702543349888238, |
|
"acc_norm": 0.22268907563025211, |
|
"acc_norm_stderr": 0.02702543349888238 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.21794871794871795, |
|
"acc_stderr": 0.020932445774463168, |
|
"acc_norm": 0.21794871794871795, |
|
"acc_norm_stderr": 0.020932445774463168 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.044619604333847394, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.044619604333847394 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621504, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621504 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.2962962962962963, |
|
"acc_stderr": 0.044143436668549335, |
|
"acc_norm": 0.2962962962962963, |
|
"acc_norm_stderr": 0.044143436668549335 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.27586206896551724, |
|
"acc_stderr": 0.03144712581678243, |
|
"acc_norm": 0.27586206896551724, |
|
"acc_norm_stderr": 0.03144712581678243 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.25483870967741934, |
|
"acc_stderr": 0.024790118459332208, |
|
"acc_norm": 0.25483870967741934, |
|
"acc_norm_stderr": 0.024790118459332208 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.2692307692307692, |
|
"acc_stderr": 0.029058588303748845, |
|
"acc_norm": 0.2692307692307692, |
|
"acc_norm_stderr": 0.029058588303748845 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.27169811320754716, |
|
"acc_stderr": 0.027377706624670713, |
|
"acc_norm": 0.27169811320754716, |
|
"acc_norm_stderr": 0.027377706624670713 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.36363636363636365, |
|
"acc_stderr": 0.046075820907199756, |
|
"acc_norm": 0.36363636363636365, |
|
"acc_norm_stderr": 0.046075820907199756 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.26296296296296295, |
|
"acc_stderr": 0.02684205787383371, |
|
"acc_norm": 0.26296296296296295, |
|
"acc_norm_stderr": 0.02684205787383371 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2052980132450331, |
|
"acc_stderr": 0.03297986648473835, |
|
"acc_norm": 0.2052980132450331, |
|
"acc_norm_stderr": 0.03297986648473835 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.23880597014925373, |
|
"acc_stderr": 0.030147775935409224, |
|
"acc_norm": 0.23880597014925373, |
|
"acc_norm_stderr": 0.030147775935409224 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.2138728323699422, |
|
"acc_stderr": 0.031265112061730424, |
|
"acc_norm": 0.2138728323699422, |
|
"acc_norm_stderr": 0.031265112061730424 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2566137566137566, |
|
"acc_stderr": 0.022494510767503154, |
|
"acc_norm": 0.2566137566137566, |
|
"acc_norm_stderr": 0.022494510767503154 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2361111111111111, |
|
"acc_stderr": 0.03551446610810826, |
|
"acc_norm": 0.2361111111111111, |
|
"acc_norm_stderr": 0.03551446610810826 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.21, |
|
"acc_stderr": 0.040936018074033256, |
|
"acc_norm": 0.21, |
|
"acc_norm_stderr": 0.040936018074033256 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.21, |
|
"acc_stderr": 0.040936018074033256, |
|
"acc_norm": 0.21, |
|
"acc_norm_stderr": 0.040936018074033256 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.23121387283236994, |
|
"acc_stderr": 0.022698657167855713, |
|
"acc_norm": 0.23121387283236994, |
|
"acc_norm_stderr": 0.022698657167855713 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.24539877300613497, |
|
"acc_stderr": 0.03380939813943353, |
|
"acc_norm": 0.24539877300613497, |
|
"acc_norm_stderr": 0.03380939813943353 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.2716049382716049, |
|
"acc_stderr": 0.02474862449053737, |
|
"acc_norm": 0.2716049382716049, |
|
"acc_norm_stderr": 0.02474862449053737 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.04229525846816506, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.04229525846816506 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.20725388601036268, |
|
"acc_stderr": 0.02925282329180363, |
|
"acc_norm": 0.20725388601036268, |
|
"acc_norm_stderr": 0.02925282329180363 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2719298245614035, |
|
"acc_stderr": 0.041857744240220575, |
|
"acc_norm": 0.2719298245614035, |
|
"acc_norm_stderr": 0.041857744240220575 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.22385321100917432, |
|
"acc_stderr": 0.01787121776779024, |
|
"acc_norm": 0.22385321100917432, |
|
"acc_norm_stderr": 0.01787121776779024 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.1984126984126984, |
|
"acc_stderr": 0.03567016675276862, |
|
"acc_norm": 0.1984126984126984, |
|
"acc_norm_stderr": 0.03567016675276862 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.23202614379084968, |
|
"acc_stderr": 0.02417084087934101, |
|
"acc_norm": 0.23202614379084968, |
|
"acc_norm_stderr": 0.02417084087934101 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.256198347107438, |
|
"acc_stderr": 0.039849796533028704, |
|
"acc_norm": 0.256198347107438, |
|
"acc_norm_stderr": 0.039849796533028704 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.19736842105263158, |
|
"acc_stderr": 0.03238981601699397, |
|
"acc_norm": 0.19736842105263158, |
|
"acc_norm_stderr": 0.03238981601699397 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.26143790849673204, |
|
"acc_stderr": 0.017776947157528037, |
|
"acc_norm": 0.26143790849673204, |
|
"acc_norm_stderr": 0.017776947157528037 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2553191489361702, |
|
"acc_stderr": 0.02601199293090201, |
|
"acc_norm": 0.2553191489361702, |
|
"acc_norm_stderr": 0.02601199293090201 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.2857142857142857, |
|
"acc_stderr": 0.042878587513404565, |
|
"acc_norm": 0.2857142857142857, |
|
"acc_norm_stderr": 0.042878587513404565 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.16203703703703703, |
|
"acc_stderr": 0.025130453652268455, |
|
"acc_norm": 0.16203703703703703, |
|
"acc_norm_stderr": 0.025130453652268455 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2424581005586592, |
|
"acc_stderr": 0.01433352205921789, |
|
"acc_norm": 0.2424581005586592, |
|
"acc_norm_stderr": 0.01433352205921789 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.15, |
|
"acc_stderr": 0.0358870281282637, |
|
"acc_norm": 0.15, |
|
"acc_norm_stderr": 0.0358870281282637 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.19, |
|
"acc_stderr": 0.039427724440366234, |
|
"acc_norm": 0.19, |
|
"acc_norm_stderr": 0.039427724440366234 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.22426470588235295, |
|
"acc_stderr": 0.02533684856333237, |
|
"acc_norm": 0.22426470588235295, |
|
"acc_norm_stderr": 0.02533684856333237 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.1673469387755102, |
|
"acc_stderr": 0.02389714476891452, |
|
"acc_norm": 0.1673469387755102, |
|
"acc_norm_stderr": 0.02389714476891452 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.25738396624472576, |
|
"acc_stderr": 0.0284588209914603, |
|
"acc_norm": 0.25738396624472576, |
|
"acc_norm_stderr": 0.0284588209914603 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2392438070404172, |
|
"acc_stderr": 0.010896123652676651, |
|
"acc_norm": 0.2392438070404172, |
|
"acc_norm_stderr": 0.010896123652676651 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.24019607843137256, |
|
"acc_stderr": 0.02998373305591362, |
|
"acc_norm": 0.24019607843137256, |
|
"acc_norm_stderr": 0.02998373305591362 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.24848484848484848, |
|
"acc_stderr": 0.03374402644139404, |
|
"acc_norm": 0.24848484848484848, |
|
"acc_norm_stderr": 0.03374402644139404 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.24112607099143207, |
|
"mc1_stderr": 0.014974827279752329, |
|
"mc2": NaN, |
|
"mc2_stderr": NaN |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.09799291617473435, |
|
"acc_stderr": 0.010221558855214877, |
|
"acc_norm": 0.3305785123966942, |
|
"acc_norm_stderr": 0.0161734232988457 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "mssma/mssmako_Solar10.7B_dpo_v0.1", |
|
"model_sha": "a421b42cf5672ae4a918019ef5b294ef791e1e37", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |