|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.24061433447098976, |
|
"acc_stderr": 0.012491468532390576, |
|
"acc_norm": 0.2883959044368601, |
|
"acc_norm_stderr": 0.013238394422428171 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.30720971917944634, |
|
"acc_stderr": 0.00460394243986156, |
|
"acc_norm": 0.3979286994622585, |
|
"acc_norm_stderr": 0.004884702412456093 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.40350877192982454, |
|
"acc_stderr": 0.03762738699917055, |
|
"acc_norm": 0.40350877192982454, |
|
"acc_norm_stderr": 0.03762738699917055 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.33980582524271846, |
|
"acc_stderr": 0.046897659372781335, |
|
"acc_norm": 0.33980582524271846, |
|
"acc_norm_stderr": 0.046897659372781335 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.4163473818646232, |
|
"acc_stderr": 0.017627948030430298, |
|
"acc_norm": 0.4163473818646232, |
|
"acc_norm_stderr": 0.017627948030430298 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.37037037037037035, |
|
"acc_stderr": 0.04171654161354544, |
|
"acc_norm": 0.37037037037037035, |
|
"acc_norm_stderr": 0.04171654161354544 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.31063829787234043, |
|
"acc_stderr": 0.03025123757921317, |
|
"acc_norm": 0.31063829787234043, |
|
"acc_norm_stderr": 0.03025123757921317 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3493975903614458, |
|
"acc_stderr": 0.03711725190740753, |
|
"acc_norm": 0.3493975903614458, |
|
"acc_norm_stderr": 0.03711725190740753 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.4437299035369775, |
|
"acc_stderr": 0.02821768355665231, |
|
"acc_norm": 0.4437299035369775, |
|
"acc_norm_stderr": 0.02821768355665231 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.35874439461883406, |
|
"acc_stderr": 0.032190792004199956, |
|
"acc_norm": 0.35874439461883406, |
|
"acc_norm_stderr": 0.032190792004199956 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.3816793893129771, |
|
"acc_stderr": 0.042607351576445594, |
|
"acc_norm": 0.3816793893129771, |
|
"acc_norm_stderr": 0.042607351576445594 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.4696969696969697, |
|
"acc_stderr": 0.03555804051763929, |
|
"acc_norm": 0.4696969696969697, |
|
"acc_norm_stderr": 0.03555804051763929 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.3448275862068966, |
|
"acc_stderr": 0.03960933549451207, |
|
"acc_norm": 0.3448275862068966, |
|
"acc_norm_stderr": 0.03960933549451207 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.18627450980392157, |
|
"acc_stderr": 0.038739587141493524, |
|
"acc_norm": 0.18627450980392157, |
|
"acc_norm_stderr": 0.038739587141493524 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.35294117647058826, |
|
"acc_stderr": 0.031041941304059274, |
|
"acc_norm": 0.35294117647058826, |
|
"acc_norm_stderr": 0.031041941304059274 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.3769230769230769, |
|
"acc_stderr": 0.024570975364225995, |
|
"acc_norm": 0.3769230769230769, |
|
"acc_norm_stderr": 0.024570975364225995 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.47, |
|
"acc_stderr": 0.050161355804659205, |
|
"acc_norm": 0.47, |
|
"acc_norm_stderr": 0.050161355804659205 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.04852365870939099, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.04852365870939099 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.35185185185185186, |
|
"acc_stderr": 0.046166311118017125, |
|
"acc_norm": 0.35185185185185186, |
|
"acc_norm_stderr": 0.046166311118017125 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3103448275862069, |
|
"acc_stderr": 0.03255086769970103, |
|
"acc_norm": 0.3103448275862069, |
|
"acc_norm_stderr": 0.03255086769970103 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.3935483870967742, |
|
"acc_stderr": 0.027791878753132274, |
|
"acc_norm": 0.3935483870967742, |
|
"acc_norm_stderr": 0.027791878753132274 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6196581196581197, |
|
"acc_stderr": 0.03180425204384099, |
|
"acc_norm": 0.6196581196581197, |
|
"acc_norm_stderr": 0.03180425204384099 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.37358490566037733, |
|
"acc_stderr": 0.029773082713319878, |
|
"acc_norm": 0.37358490566037733, |
|
"acc_norm_stderr": 0.029773082713319878 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.4090909090909091, |
|
"acc_stderr": 0.047093069786618966, |
|
"acc_norm": 0.4090909090909091, |
|
"acc_norm_stderr": 0.047093069786618966 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.25925925925925924, |
|
"acc_stderr": 0.026719240783712163, |
|
"acc_norm": 0.25925925925925924, |
|
"acc_norm_stderr": 0.026719240783712163 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.33112582781456956, |
|
"acc_stderr": 0.038425817186598696, |
|
"acc_norm": 0.33112582781456956, |
|
"acc_norm_stderr": 0.038425817186598696 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.48258706467661694, |
|
"acc_stderr": 0.03533389234739245, |
|
"acc_norm": 0.48258706467661694, |
|
"acc_norm_stderr": 0.03533389234739245 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3988439306358382, |
|
"acc_stderr": 0.03733626655383509, |
|
"acc_norm": 0.3988439306358382, |
|
"acc_norm_stderr": 0.03733626655383509 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2857142857142857, |
|
"acc_stderr": 0.023266512213730564, |
|
"acc_norm": 0.2857142857142857, |
|
"acc_norm_stderr": 0.023266512213730564 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2847222222222222, |
|
"acc_stderr": 0.03773809990686935, |
|
"acc_norm": 0.2847222222222222, |
|
"acc_norm_stderr": 0.03773809990686935 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.04229525846816506, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.04229525846816506 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.050251890762960605, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.050251890762960605 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.430635838150289, |
|
"acc_stderr": 0.026658800273672387, |
|
"acc_norm": 0.430635838150289, |
|
"acc_norm_stderr": 0.026658800273672387 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.2822085889570552, |
|
"acc_stderr": 0.03536117886664743, |
|
"acc_norm": 0.2822085889570552, |
|
"acc_norm_stderr": 0.03536117886664743 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.37037037037037035, |
|
"acc_stderr": 0.026869490744815247, |
|
"acc_norm": 0.37037037037037035, |
|
"acc_norm_stderr": 0.026869490744815247 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.0446196043338474, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.0446196043338474 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.35233160621761656, |
|
"acc_stderr": 0.03447478286414357, |
|
"acc_norm": 0.35233160621761656, |
|
"acc_norm_stderr": 0.03447478286414357 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2982456140350877, |
|
"acc_stderr": 0.04303684033537316, |
|
"acc_norm": 0.2982456140350877, |
|
"acc_norm_stderr": 0.04303684033537316 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.42018348623853213, |
|
"acc_stderr": 0.021162420048273522, |
|
"acc_norm": 0.42018348623853213, |
|
"acc_norm_stderr": 0.021162420048273522 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.23015873015873015, |
|
"acc_stderr": 0.037649508797906024, |
|
"acc_norm": 0.23015873015873015, |
|
"acc_norm_stderr": 0.037649508797906024 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.4117647058823529, |
|
"acc_stderr": 0.02818059632825929, |
|
"acc_norm": 0.4117647058823529, |
|
"acc_norm_stderr": 0.02818059632825929 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.5537190082644629, |
|
"acc_stderr": 0.0453793517794788, |
|
"acc_norm": 0.5537190082644629, |
|
"acc_norm_stderr": 0.0453793517794788 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.3026315789473684, |
|
"acc_stderr": 0.037385206761196686, |
|
"acc_norm": 0.3026315789473684, |
|
"acc_norm_stderr": 0.037385206761196686 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.3088235294117647, |
|
"acc_stderr": 0.018690850273595284, |
|
"acc_norm": 0.3088235294117647, |
|
"acc_norm_stderr": 0.018690850273595284 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.33687943262411346, |
|
"acc_stderr": 0.028195534873966727, |
|
"acc_norm": 0.33687943262411346, |
|
"acc_norm_stderr": 0.028195534873966727 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.3125, |
|
"acc_stderr": 0.043994650575715215, |
|
"acc_norm": 0.3125, |
|
"acc_norm_stderr": 0.043994650575715215 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.3148148148148148, |
|
"acc_stderr": 0.03167468706828977, |
|
"acc_norm": 0.3148148148148148, |
|
"acc_norm_stderr": 0.03167468706828977 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2346368715083799, |
|
"acc_stderr": 0.014173044098303673, |
|
"acc_norm": 0.2346368715083799, |
|
"acc_norm_stderr": 0.014173044098303673 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695236, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695236 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.049756985195624284, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.049756985195624284 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.39338235294117646, |
|
"acc_stderr": 0.029674288281311183, |
|
"acc_norm": 0.39338235294117646, |
|
"acc_norm_stderr": 0.029674288281311183 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.34285714285714286, |
|
"acc_stderr": 0.03038726291954774, |
|
"acc_norm": 0.34285714285714286, |
|
"acc_norm_stderr": 0.03038726291954774 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.4641350210970464, |
|
"acc_stderr": 0.03246338898055659, |
|
"acc_norm": 0.4641350210970464, |
|
"acc_norm_stderr": 0.03246338898055659 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2894393741851369, |
|
"acc_stderr": 0.011582659702210233, |
|
"acc_norm": 0.2894393741851369, |
|
"acc_norm_stderr": 0.011582659702210233 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.3235294117647059, |
|
"acc_stderr": 0.03283472056108567, |
|
"acc_norm": 0.3235294117647059, |
|
"acc_norm_stderr": 0.03283472056108567 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.2606060606060606, |
|
"acc_stderr": 0.034277431758165236, |
|
"acc_norm": 0.2606060606060606, |
|
"acc_norm_stderr": 0.034277431758165236 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.25091799265605874, |
|
"mc1_stderr": 0.015176985027707687, |
|
"mc2": 0.44719810330395326, |
|
"mc2_stderr": 0.0165562423178332 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.17119244391971664, |
|
"acc_stderr": 0.012950423337299044, |
|
"acc_norm": 0.2762691853600944, |
|
"acc_norm_stderr": 0.015373387500464464 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Edentns/DataVortexS-10.7B-v0.1", |
|
"model_sha": "9160dba1ce26ebcecd1f8ebca001375dc1f41b1f", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |