results / momo /polyglot-ko-12.8b-Chat-QLoRA-Merge_v3 /result_2023-10-03 17:56:39.json
open-ko-llm-bot's picture
Add results for 2023-10-03 17:56:39
28607a2
raw history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.30119453924914674,
"acc_stderr": 0.013406741767847612,
"acc_norm": 0.33532423208191126,
"acc_norm_stderr": 0.013796182947785562
},
"harness|ko_hellaswag|10": {
"acc": 0.38707428799044014,
"acc_stderr": 0.004860854240821967,
"acc_norm": 0.5005974905397331,
"acc_norm_stderr": 0.004989777848791005
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.32748538011695905,
"acc_stderr": 0.035993357714560276,
"acc_norm": 0.32748538011695905,
"acc_norm_stderr": 0.035993357714560276
},
"harness|ko_mmlu_management|5": {
"acc": 0.17475728155339806,
"acc_stderr": 0.037601780060266196,
"acc_norm": 0.17475728155339806,
"acc_norm_stderr": 0.037601780060266196
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.3001277139208174,
"acc_stderr": 0.016389249691317425,
"acc_norm": 0.3001277139208174,
"acc_norm_stderr": 0.016389249691317425
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.34074074074074073,
"acc_stderr": 0.04094376269996794,
"acc_norm": 0.34074074074074073,
"acc_norm_stderr": 0.04094376269996794
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.2680851063829787,
"acc_stderr": 0.028957342788342343,
"acc_norm": 0.2680851063829787,
"acc_norm_stderr": 0.028957342788342343
},
"harness|ko_mmlu_virology|5": {
"acc": 0.24096385542168675,
"acc_stderr": 0.0332939411907353,
"acc_norm": 0.24096385542168675,
"acc_norm_stderr": 0.0332939411907353
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.3536977491961415,
"acc_stderr": 0.027155208103200868,
"acc_norm": 0.3536977491961415,
"acc_norm_stderr": 0.027155208103200868
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.19730941704035873,
"acc_stderr": 0.02670985334496796,
"acc_norm": 0.19730941704035873,
"acc_norm_stderr": 0.02670985334496796
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.24427480916030533,
"acc_stderr": 0.03768335959728743,
"acc_norm": 0.24427480916030533,
"acc_norm_stderr": 0.03768335959728743
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.2,
"acc_stderr": 0.04020151261036846,
"acc_norm": 0.2,
"acc_norm_stderr": 0.04020151261036846
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.30808080808080807,
"acc_stderr": 0.032894773300986155,
"acc_norm": 0.30808080808080807,
"acc_norm_stderr": 0.032894773300986155
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.3310344827586207,
"acc_stderr": 0.03921545312467122,
"acc_norm": 0.3310344827586207,
"acc_norm_stderr": 0.03921545312467122
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.3137254901960784,
"acc_stderr": 0.04617034827006717,
"acc_norm": 0.3137254901960784,
"acc_norm_stderr": 0.04617034827006717
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.28991596638655465,
"acc_stderr": 0.029472485833136112,
"acc_norm": 0.28991596638655465,
"acc_norm_stderr": 0.029472485833136112
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.28974358974358977,
"acc_stderr": 0.02300062824368797,
"acc_norm": 0.28974358974358977,
"acc_norm_stderr": 0.02300062824368797
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.27,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.27,
"acc_norm_stderr": 0.044619604333847394
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.25925925925925924,
"acc_stderr": 0.042365112580946336,
"acc_norm": 0.25925925925925924,
"acc_norm_stderr": 0.042365112580946336
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.30049261083743845,
"acc_stderr": 0.03225799476233485,
"acc_norm": 0.30049261083743845,
"acc_norm_stderr": 0.03225799476233485
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.25806451612903225,
"acc_stderr": 0.02489246917246283,
"acc_norm": 0.25806451612903225,
"acc_norm_stderr": 0.02489246917246283
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.32051282051282054,
"acc_stderr": 0.03057281131029961,
"acc_norm": 0.32051282051282054,
"acc_norm_stderr": 0.03057281131029961
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.27547169811320754,
"acc_stderr": 0.027495663683724046,
"acc_norm": 0.27547169811320754,
"acc_norm_stderr": 0.027495663683724046
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.24545454545454545,
"acc_stderr": 0.04122066502878285,
"acc_norm": 0.24545454545454545,
"acc_norm_stderr": 0.04122066502878285
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.25925925925925924,
"acc_stderr": 0.02671924078371217,
"acc_norm": 0.25925925925925924,
"acc_norm_stderr": 0.02671924078371217
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2913907284768212,
"acc_stderr": 0.037101857261199966,
"acc_norm": 0.2913907284768212,
"acc_norm_stderr": 0.037101857261199966
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.2835820895522388,
"acc_stderr": 0.03187187537919798,
"acc_norm": 0.2835820895522388,
"acc_norm_stderr": 0.03187187537919798
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.2658959537572254,
"acc_stderr": 0.033687629322594316,
"acc_norm": 0.2658959537572254,
"acc_norm_stderr": 0.033687629322594316
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.2671957671957672,
"acc_stderr": 0.022789673145776568,
"acc_norm": 0.2671957671957672,
"acc_norm_stderr": 0.022789673145776568
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.2361111111111111,
"acc_stderr": 0.03551446610810826,
"acc_norm": 0.2361111111111111,
"acc_norm_stderr": 0.03551446610810826
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.27,
"acc_stderr": 0.04461960433384741,
"acc_norm": 0.27,
"acc_norm_stderr": 0.04461960433384741
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542128,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542128
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.35260115606936415,
"acc_stderr": 0.02572280220089582,
"acc_norm": 0.35260115606936415,
"acc_norm_stderr": 0.02572280220089582
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.3128834355828221,
"acc_stderr": 0.036429145782924055,
"acc_norm": 0.3128834355828221,
"acc_norm_stderr": 0.036429145782924055
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.30246913580246915,
"acc_stderr": 0.025557653981868045,
"acc_norm": 0.30246913580246915,
"acc_norm_stderr": 0.025557653981868045
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.24,
"acc_stderr": 0.042923469599092816,
"acc_norm": 0.24,
"acc_norm_stderr": 0.042923469599092816
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.2538860103626943,
"acc_stderr": 0.03141024780565319,
"acc_norm": 0.2538860103626943,
"acc_norm_stderr": 0.03141024780565319
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.24561403508771928,
"acc_stderr": 0.04049339297748142,
"acc_norm": 0.24561403508771928,
"acc_norm_stderr": 0.04049339297748142
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.26788990825688075,
"acc_stderr": 0.01898746225797865,
"acc_norm": 0.26788990825688075,
"acc_norm_stderr": 0.01898746225797865
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.2619047619047619,
"acc_stderr": 0.03932537680392871,
"acc_norm": 0.2619047619047619,
"acc_norm_stderr": 0.03932537680392871
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.3431372549019608,
"acc_stderr": 0.02718449890994162,
"acc_norm": 0.3431372549019608,
"acc_norm_stderr": 0.02718449890994162
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.24,
"acc_stderr": 0.04292346959909281,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909281
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.2231404958677686,
"acc_stderr": 0.03800754475228733,
"acc_norm": 0.2231404958677686,
"acc_norm_stderr": 0.03800754475228733
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.17105263157894737,
"acc_stderr": 0.030643607071677098,
"acc_norm": 0.17105263157894737,
"acc_norm_stderr": 0.030643607071677098
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.29248366013071897,
"acc_stderr": 0.01840341571010979,
"acc_norm": 0.29248366013071897,
"acc_norm_stderr": 0.01840341571010979
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.2624113475177305,
"acc_stderr": 0.02624492034984301,
"acc_norm": 0.2624113475177305,
"acc_norm_stderr": 0.02624492034984301
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.21428571428571427,
"acc_stderr": 0.03894641120044792,
"acc_norm": 0.21428571428571427,
"acc_norm_stderr": 0.03894641120044792
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.37962962962962965,
"acc_stderr": 0.03309682581119035,
"acc_norm": 0.37962962962962965,
"acc_norm_stderr": 0.03309682581119035
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.264804469273743,
"acc_stderr": 0.014756906483260664,
"acc_norm": 0.264804469273743,
"acc_norm_stderr": 0.014756906483260664
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.3161764705882353,
"acc_stderr": 0.02824568739146292,
"acc_norm": 0.3161764705882353,
"acc_norm_stderr": 0.02824568739146292
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.33877551020408164,
"acc_stderr": 0.03029950656215418,
"acc_norm": 0.33877551020408164,
"acc_norm_stderr": 0.03029950656215418
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.379746835443038,
"acc_stderr": 0.031591887529658504,
"acc_norm": 0.379746835443038,
"acc_norm_stderr": 0.031591887529658504
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.31877444589308995,
"acc_stderr": 0.01190189563578609,
"acc_norm": 0.31877444589308995,
"acc_norm_stderr": 0.01190189563578609
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.35294117647058826,
"acc_stderr": 0.03354092437591518,
"acc_norm": 0.35294117647058826,
"acc_norm_stderr": 0.03354092437591518
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.3393939393939394,
"acc_stderr": 0.03697442205031596,
"acc_norm": 0.3393939393939394,
"acc_norm_stderr": 0.03697442205031596
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.24479804161566707,
"mc1_stderr": 0.015051869486715,
"mc2": 0.39860268740922694,
"mc2_stderr": 0.015473079108834439
},
"harness|ko_commongen_v2|2": {
"acc": 0.29279811097992914,
"acc_stderr": 0.015644823205401334,
"acc_norm": 0.33412042502951594,
"acc_norm_stderr": 0.016216763304239695
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "momo/polyglot-ko-12.8b-Chat-QLoRA-Merge_v3",
"model_sha": "33bfc3a65f355b210a21b6f7c8f04f49492835bf",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}