|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.257679180887372, |
|
"acc_stderr": 0.0127807705627684, |
|
"acc_norm": 0.3003412969283277, |
|
"acc_norm_stderr": 0.01339590930995701 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3263294164509062, |
|
"acc_stderr": 0.004679111783653908, |
|
"acc_norm": 0.385381398127863, |
|
"acc_norm_stderr": 0.00485690647371939 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.38596491228070173, |
|
"acc_stderr": 0.03733756969066164, |
|
"acc_norm": 0.38596491228070173, |
|
"acc_norm_stderr": 0.03733756969066164 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.3786407766990291, |
|
"acc_stderr": 0.048026946982589726, |
|
"acc_norm": 0.3786407766990291, |
|
"acc_norm_stderr": 0.048026946982589726 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.3959131545338442, |
|
"acc_stderr": 0.01748824700697927, |
|
"acc_norm": 0.3959131545338442, |
|
"acc_norm_stderr": 0.01748824700697927 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.04072314811876837, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.04072314811876837 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.31063829787234043, |
|
"acc_stderr": 0.030251237579213167, |
|
"acc_norm": 0.31063829787234043, |
|
"acc_norm_stderr": 0.030251237579213167 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3493975903614458, |
|
"acc_stderr": 0.0371172519074075, |
|
"acc_norm": 0.3493975903614458, |
|
"acc_norm_stderr": 0.0371172519074075 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.3729903536977492, |
|
"acc_stderr": 0.0274666102131401, |
|
"acc_norm": 0.3729903536977492, |
|
"acc_norm_stderr": 0.0274666102131401 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.3811659192825112, |
|
"acc_stderr": 0.032596251184168264, |
|
"acc_norm": 0.3811659192825112, |
|
"acc_norm_stderr": 0.032596251184168264 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.35877862595419846, |
|
"acc_stderr": 0.04206739313864908, |
|
"acc_norm": 0.35877862595419846, |
|
"acc_norm_stderr": 0.04206739313864908 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.3888888888888889, |
|
"acc_stderr": 0.0347327959083696, |
|
"acc_norm": 0.3888888888888889, |
|
"acc_norm_stderr": 0.0347327959083696 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4068965517241379, |
|
"acc_stderr": 0.04093793981266237, |
|
"acc_norm": 0.4068965517241379, |
|
"acc_norm_stderr": 0.04093793981266237 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.21568627450980393, |
|
"acc_stderr": 0.04092563958237655, |
|
"acc_norm": 0.21568627450980393, |
|
"acc_norm_stderr": 0.04092563958237655 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.3319327731092437, |
|
"acc_stderr": 0.030588697013783667, |
|
"acc_norm": 0.3319327731092437, |
|
"acc_norm_stderr": 0.030588697013783667 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.3282051282051282, |
|
"acc_stderr": 0.023807633198657262, |
|
"acc_norm": 0.3282051282051282, |
|
"acc_norm_stderr": 0.023807633198657262 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.04943110704237101, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.04943110704237101 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.047937248544110196, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.047937248544110196 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.4166666666666667, |
|
"acc_stderr": 0.04766075165356461, |
|
"acc_norm": 0.4166666666666667, |
|
"acc_norm_stderr": 0.04766075165356461 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3251231527093596, |
|
"acc_stderr": 0.032957975663112704, |
|
"acc_norm": 0.3251231527093596, |
|
"acc_norm_stderr": 0.032957975663112704 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.3387096774193548, |
|
"acc_stderr": 0.02692344605930284, |
|
"acc_norm": 0.3387096774193548, |
|
"acc_norm_stderr": 0.02692344605930284 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.5470085470085471, |
|
"acc_stderr": 0.0326109987309862, |
|
"acc_norm": 0.5470085470085471, |
|
"acc_norm_stderr": 0.0326109987309862 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.35471698113207545, |
|
"acc_stderr": 0.029445175328199593, |
|
"acc_norm": 0.35471698113207545, |
|
"acc_norm_stderr": 0.029445175328199593 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.0469237132203465, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.0469237132203465 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.2851851851851852, |
|
"acc_stderr": 0.027528599210340496, |
|
"acc_norm": 0.2851851851851852, |
|
"acc_norm_stderr": 0.027528599210340496 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.24503311258278146, |
|
"acc_stderr": 0.03511807571804724, |
|
"acc_norm": 0.24503311258278146, |
|
"acc_norm_stderr": 0.03511807571804724 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.44776119402985076, |
|
"acc_stderr": 0.03516184772952166, |
|
"acc_norm": 0.44776119402985076, |
|
"acc_norm_stderr": 0.03516184772952166 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3063583815028902, |
|
"acc_stderr": 0.03514942551267437, |
|
"acc_norm": 0.3063583815028902, |
|
"acc_norm_stderr": 0.03514942551267437 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2830687830687831, |
|
"acc_stderr": 0.023201392938194978, |
|
"acc_norm": 0.2830687830687831, |
|
"acc_norm_stderr": 0.023201392938194978 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2986111111111111, |
|
"acc_stderr": 0.03827052357950756, |
|
"acc_norm": 0.2986111111111111, |
|
"acc_norm_stderr": 0.03827052357950756 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.0440844002276808, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.0440844002276808 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.46, |
|
"acc_stderr": 0.05009082659620332, |
|
"acc_norm": 0.46, |
|
"acc_norm_stderr": 0.05009082659620332 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.3699421965317919, |
|
"acc_stderr": 0.025992472029306386, |
|
"acc_norm": 0.3699421965317919, |
|
"acc_norm_stderr": 0.025992472029306386 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.3803680981595092, |
|
"acc_stderr": 0.038142698932618374, |
|
"acc_norm": 0.3803680981595092, |
|
"acc_norm_stderr": 0.038142698932618374 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.3395061728395062, |
|
"acc_stderr": 0.026348564412011624, |
|
"acc_norm": 0.3395061728395062, |
|
"acc_norm_stderr": 0.026348564412011624 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.045126085985421296, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.045126085985421296 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.37823834196891193, |
|
"acc_stderr": 0.034998072761933396, |
|
"acc_norm": 0.37823834196891193, |
|
"acc_norm_stderr": 0.034998072761933396 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2631578947368421, |
|
"acc_stderr": 0.04142439719489362, |
|
"acc_norm": 0.2631578947368421, |
|
"acc_norm_stderr": 0.04142439719489362 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.3339449541284404, |
|
"acc_stderr": 0.020220554196736403, |
|
"acc_norm": 0.3339449541284404, |
|
"acc_norm_stderr": 0.020220554196736403 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.2619047619047619, |
|
"acc_stderr": 0.03932537680392871, |
|
"acc_norm": 0.2619047619047619, |
|
"acc_norm_stderr": 0.03932537680392871 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.3562091503267974, |
|
"acc_stderr": 0.02742047766262925, |
|
"acc_norm": 0.3562091503267974, |
|
"acc_norm_stderr": 0.02742047766262925 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.512396694214876, |
|
"acc_stderr": 0.04562951548180765, |
|
"acc_norm": 0.512396694214876, |
|
"acc_norm_stderr": 0.04562951548180765 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.3223684210526316, |
|
"acc_stderr": 0.03803510248351587, |
|
"acc_norm": 0.3223684210526316, |
|
"acc_norm_stderr": 0.03803510248351587 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.29248366013071897, |
|
"acc_stderr": 0.018403415710109797, |
|
"acc_norm": 0.29248366013071897, |
|
"acc_norm_stderr": 0.018403415710109797 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2730496453900709, |
|
"acc_stderr": 0.026577860943307857, |
|
"acc_norm": 0.2730496453900709, |
|
"acc_norm_stderr": 0.026577860943307857 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.30357142857142855, |
|
"acc_stderr": 0.04364226155841044, |
|
"acc_norm": 0.30357142857142855, |
|
"acc_norm_stderr": 0.04364226155841044 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.3148148148148148, |
|
"acc_stderr": 0.031674687068289784, |
|
"acc_norm": 0.3148148148148148, |
|
"acc_norm_stderr": 0.031674687068289784 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.24581005586592178, |
|
"acc_stderr": 0.014400296429225608, |
|
"acc_norm": 0.24581005586592178, |
|
"acc_norm_stderr": 0.014400296429225608 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.26838235294117646, |
|
"acc_stderr": 0.026917481224377243, |
|
"acc_norm": 0.26838235294117646, |
|
"acc_norm_stderr": 0.026917481224377243 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.3469387755102041, |
|
"acc_stderr": 0.030472526026726492, |
|
"acc_norm": 0.3469387755102041, |
|
"acc_norm_stderr": 0.030472526026726492 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.3670886075949367, |
|
"acc_stderr": 0.03137624072561618, |
|
"acc_norm": 0.3670886075949367, |
|
"acc_norm_stderr": 0.03137624072561618 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.28552803129074317, |
|
"acc_stderr": 0.011535751586665673, |
|
"acc_norm": 0.28552803129074317, |
|
"acc_norm_stderr": 0.011535751586665673 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.35784313725490197, |
|
"acc_stderr": 0.033644872860882996, |
|
"acc_norm": 0.35784313725490197, |
|
"acc_norm_stderr": 0.033644872860882996 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.42424242424242425, |
|
"acc_stderr": 0.038592681420702615, |
|
"acc_norm": 0.42424242424242425, |
|
"acc_norm_stderr": 0.038592681420702615 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2582619339045288, |
|
"mc1_stderr": 0.015321821688476196, |
|
"mc2": 0.41968593595047643, |
|
"mc2_stderr": 0.016254999867947123 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.31220657276995306, |
|
"acc_stderr": 0.015884928030374883, |
|
"acc_norm": 0.3403755868544601, |
|
"acc_norm_stderr": 0.016242870504270406 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Jaewoo1/Llama2-7B-Blend-3rd-dup-Active-LoRA", |
|
"model_sha": "cbb72323bf2db6eb9ea591a4a882d02964d53eed", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |