|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.4180887372013652, |
|
"acc_stderr": 0.014413988396996081, |
|
"acc_norm": 0.4684300341296928, |
|
"acc_norm_stderr": 0.014582236460866984 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.41276638119896436, |
|
"acc_stderr": 0.004913253031155693, |
|
"acc_norm": 0.5619398526190001, |
|
"acc_norm_stderr": 0.004951346338164479 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.4269005847953216, |
|
"acc_stderr": 0.03793620616529917, |
|
"acc_norm": 0.4269005847953216, |
|
"acc_norm_stderr": 0.03793620616529917 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.34951456310679613, |
|
"acc_stderr": 0.04721188506097173, |
|
"acc_norm": 0.34951456310679613, |
|
"acc_norm_stderr": 0.04721188506097173 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.3831417624521073, |
|
"acc_stderr": 0.017384774194885634, |
|
"acc_norm": 0.3831417624521073, |
|
"acc_norm_stderr": 0.017384774194885634 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.37777777777777777, |
|
"acc_stderr": 0.04188307537595853, |
|
"acc_norm": 0.37777777777777777, |
|
"acc_norm_stderr": 0.04188307537595853 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.3148936170212766, |
|
"acc_stderr": 0.03036358219723816, |
|
"acc_norm": 0.3148936170212766, |
|
"acc_norm_stderr": 0.03036358219723816 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.25301204819277107, |
|
"acc_stderr": 0.033844291552331346, |
|
"acc_norm": 0.25301204819277107, |
|
"acc_norm_stderr": 0.033844291552331346 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.41479099678456594, |
|
"acc_stderr": 0.02798268045975957, |
|
"acc_norm": 0.41479099678456594, |
|
"acc_norm_stderr": 0.02798268045975957 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.3901345291479821, |
|
"acc_stderr": 0.03273766725459157, |
|
"acc_norm": 0.3901345291479821, |
|
"acc_norm_stderr": 0.03273766725459157 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.3282442748091603, |
|
"acc_stderr": 0.041184385658062976, |
|
"acc_norm": 0.3282442748091603, |
|
"acc_norm_stderr": 0.041184385658062976 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.4595959595959596, |
|
"acc_stderr": 0.035507024651313425, |
|
"acc_norm": 0.4595959595959596, |
|
"acc_norm_stderr": 0.035507024651313425 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4206896551724138, |
|
"acc_stderr": 0.0411391498118926, |
|
"acc_norm": 0.4206896551724138, |
|
"acc_norm_stderr": 0.0411391498118926 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.22549019607843138, |
|
"acc_stderr": 0.04158307533083287, |
|
"acc_norm": 0.22549019607843138, |
|
"acc_norm_stderr": 0.04158307533083287 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.3235294117647059, |
|
"acc_stderr": 0.030388353551886845, |
|
"acc_norm": 0.3235294117647059, |
|
"acc_norm_stderr": 0.030388353551886845 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.3641025641025641, |
|
"acc_stderr": 0.02439667298509477, |
|
"acc_norm": 0.3641025641025641, |
|
"acc_norm_stderr": 0.02439667298509477 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.4166666666666667, |
|
"acc_stderr": 0.04766075165356461, |
|
"acc_norm": 0.4166666666666667, |
|
"acc_norm_stderr": 0.04766075165356461 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.2955665024630542, |
|
"acc_stderr": 0.032104944337514575, |
|
"acc_norm": 0.2955665024630542, |
|
"acc_norm_stderr": 0.032104944337514575 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.3258064516129032, |
|
"acc_stderr": 0.026662010578567107, |
|
"acc_norm": 0.3258064516129032, |
|
"acc_norm_stderr": 0.026662010578567107 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.5128205128205128, |
|
"acc_stderr": 0.032745319388423504, |
|
"acc_norm": 0.5128205128205128, |
|
"acc_norm_stderr": 0.032745319388423504 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.30943396226415093, |
|
"acc_stderr": 0.028450154794118627, |
|
"acc_norm": 0.30943396226415093, |
|
"acc_norm_stderr": 0.028450154794118627 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.33636363636363636, |
|
"acc_stderr": 0.04525393596302505, |
|
"acc_norm": 0.33636363636363636, |
|
"acc_norm_stderr": 0.04525393596302505 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.02730914058823018, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.02730914058823018 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2913907284768212, |
|
"acc_stderr": 0.037101857261199946, |
|
"acc_norm": 0.2913907284768212, |
|
"acc_norm_stderr": 0.037101857261199946 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.48258706467661694, |
|
"acc_stderr": 0.03533389234739245, |
|
"acc_norm": 0.48258706467661694, |
|
"acc_norm_stderr": 0.03533389234739245 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3236994219653179, |
|
"acc_stderr": 0.035676037996391706, |
|
"acc_norm": 0.3236994219653179, |
|
"acc_norm_stderr": 0.035676037996391706 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2962962962962963, |
|
"acc_stderr": 0.02351729433596329, |
|
"acc_norm": 0.2962962962962963, |
|
"acc_norm_stderr": 0.02351729433596329 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3125, |
|
"acc_stderr": 0.038760854559127644, |
|
"acc_norm": 0.3125, |
|
"acc_norm_stderr": 0.038760854559127644 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.18, |
|
"acc_stderr": 0.03861229196653694, |
|
"acc_norm": 0.18, |
|
"acc_norm_stderr": 0.03861229196653694 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.33815028901734107, |
|
"acc_stderr": 0.025469770149400172, |
|
"acc_norm": 0.33815028901734107, |
|
"acc_norm_stderr": 0.025469770149400172 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.3374233128834356, |
|
"acc_stderr": 0.03714908409935573, |
|
"acc_norm": 0.3374233128834356, |
|
"acc_norm_stderr": 0.03714908409935573 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.35185185185185186, |
|
"acc_stderr": 0.026571483480719978, |
|
"acc_norm": 0.35185185185185186, |
|
"acc_norm_stderr": 0.026571483480719978 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.04408440022768079, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.04408440022768079 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.45595854922279794, |
|
"acc_stderr": 0.03594413711272437, |
|
"acc_norm": 0.45595854922279794, |
|
"acc_norm_stderr": 0.03594413711272437 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2719298245614035, |
|
"acc_stderr": 0.04185774424022056, |
|
"acc_norm": 0.2719298245614035, |
|
"acc_norm_stderr": 0.04185774424022056 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.3394495412844037, |
|
"acc_stderr": 0.02030210934266235, |
|
"acc_norm": 0.3394495412844037, |
|
"acc_norm_stderr": 0.02030210934266235 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.19047619047619047, |
|
"acc_stderr": 0.03512207412302052, |
|
"acc_norm": 0.19047619047619047, |
|
"acc_norm_stderr": 0.03512207412302052 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.026992544339297236, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.026992544339297236 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.4793388429752066, |
|
"acc_stderr": 0.04560456086387235, |
|
"acc_norm": 0.4793388429752066, |
|
"acc_norm_stderr": 0.04560456086387235 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.3618421052631579, |
|
"acc_stderr": 0.03910525752849726, |
|
"acc_norm": 0.3618421052631579, |
|
"acc_norm_stderr": 0.03910525752849726 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.3284313725490196, |
|
"acc_stderr": 0.01899970738316267, |
|
"acc_norm": 0.3284313725490196, |
|
"acc_norm_stderr": 0.01899970738316267 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2765957446808511, |
|
"acc_stderr": 0.026684564340460987, |
|
"acc_norm": 0.2765957446808511, |
|
"acc_norm_stderr": 0.026684564340460987 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.29464285714285715, |
|
"acc_stderr": 0.043270409325787317, |
|
"acc_norm": 0.29464285714285715, |
|
"acc_norm_stderr": 0.043270409325787317 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.24074074074074073, |
|
"acc_stderr": 0.029157522184605607, |
|
"acc_norm": 0.24074074074074073, |
|
"acc_norm_stderr": 0.029157522184605607 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.25139664804469275, |
|
"acc_stderr": 0.01450897945355399, |
|
"acc_norm": 0.25139664804469275, |
|
"acc_norm_stderr": 0.01450897945355399 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.20955882352941177, |
|
"acc_stderr": 0.02472311040767705, |
|
"acc_norm": 0.20955882352941177, |
|
"acc_norm_stderr": 0.02472311040767705 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.2530612244897959, |
|
"acc_stderr": 0.027833023871399666, |
|
"acc_norm": 0.2530612244897959, |
|
"acc_norm_stderr": 0.027833023871399666 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.47257383966244726, |
|
"acc_stderr": 0.03249822718301303, |
|
"acc_norm": 0.47257383966244726, |
|
"acc_norm_stderr": 0.03249822718301303 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.29726205997392435, |
|
"acc_stderr": 0.011673346173086033, |
|
"acc_norm": 0.29726205997392435, |
|
"acc_norm_stderr": 0.011673346173086033 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.3235294117647059, |
|
"acc_stderr": 0.03283472056108567, |
|
"acc_norm": 0.3235294117647059, |
|
"acc_norm_stderr": 0.03283472056108567 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.38181818181818183, |
|
"acc_stderr": 0.03793713171165635, |
|
"acc_norm": 0.38181818181818183, |
|
"acc_norm_stderr": 0.03793713171165635 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.26193390452876375, |
|
"mc1_stderr": 0.015392118805015011, |
|
"mc2": 0.4460619018984922, |
|
"mc2_stderr": 0.015743940227327165 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.4757969303423849, |
|
"acc_stderr": 0.017170202466520748, |
|
"acc_norm": 0.5702479338842975, |
|
"acc_norm_stderr": 0.017019847535972202 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "mssma/ko-solar-10.7b-v0.1b", |
|
"model_sha": "87eb36db3edbd506caee5893b552b5c97e7a653c", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |