|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3199658703071672, |
|
"acc_stderr": 0.013631345807016195, |
|
"acc_norm": 0.386518771331058, |
|
"acc_norm_stderr": 0.014230084761910474 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3990240987851026, |
|
"acc_stderr": 0.004886969266944273, |
|
"acc_norm": 0.528281218880701, |
|
"acc_norm_stderr": 0.004981793089848266 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.42105263157894735, |
|
"acc_stderr": 0.037867207062342145, |
|
"acc_norm": 0.42105263157894735, |
|
"acc_norm_stderr": 0.037867207062342145 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.23300970873786409, |
|
"acc_stderr": 0.04185832598928315, |
|
"acc_norm": 0.23300970873786409, |
|
"acc_norm_stderr": 0.04185832598928315 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.36270753512132825, |
|
"acc_stderr": 0.017192708674602302, |
|
"acc_norm": 0.36270753512132825, |
|
"acc_norm_stderr": 0.017192708674602302 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4074074074074074, |
|
"acc_stderr": 0.042446332383532286, |
|
"acc_norm": 0.4074074074074074, |
|
"acc_norm_stderr": 0.042446332383532286 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.045126085985421255, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.045126085985421255 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.26382978723404255, |
|
"acc_stderr": 0.028809989854102967, |
|
"acc_norm": 0.26382978723404255, |
|
"acc_norm_stderr": 0.028809989854102967 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.25301204819277107, |
|
"acc_stderr": 0.03384429155233136, |
|
"acc_norm": 0.25301204819277107, |
|
"acc_norm_stderr": 0.03384429155233136 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.3247588424437299, |
|
"acc_stderr": 0.026596782287697043, |
|
"acc_norm": 0.3247588424437299, |
|
"acc_norm_stderr": 0.026596782287697043 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.33183856502242154, |
|
"acc_stderr": 0.03160295143776679, |
|
"acc_norm": 0.33183856502242154, |
|
"acc_norm_stderr": 0.03160295143776679 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.4198473282442748, |
|
"acc_stderr": 0.04328577215262971, |
|
"acc_norm": 0.4198473282442748, |
|
"acc_norm_stderr": 0.04328577215262971 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.3383838383838384, |
|
"acc_stderr": 0.033711241426263035, |
|
"acc_norm": 0.3383838383838384, |
|
"acc_norm_stderr": 0.033711241426263035 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.36551724137931035, |
|
"acc_stderr": 0.04013124195424385, |
|
"acc_norm": 0.36551724137931035, |
|
"acc_norm_stderr": 0.04013124195424385 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.1568627450980392, |
|
"acc_stderr": 0.036186648199362466, |
|
"acc_norm": 0.1568627450980392, |
|
"acc_norm_stderr": 0.036186648199362466 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.31512605042016806, |
|
"acc_stderr": 0.03017680828897434, |
|
"acc_norm": 0.31512605042016806, |
|
"acc_norm_stderr": 0.03017680828897434 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.24358974358974358, |
|
"acc_stderr": 0.0217637336841739, |
|
"acc_norm": 0.24358974358974358, |
|
"acc_norm_stderr": 0.0217637336841739 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.04852365870939099, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.04852365870939099 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.3148148148148148, |
|
"acc_stderr": 0.04489931073591312, |
|
"acc_norm": 0.3148148148148148, |
|
"acc_norm_stderr": 0.04489931073591312 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.28078817733990147, |
|
"acc_stderr": 0.03161856335358611, |
|
"acc_norm": 0.28078817733990147, |
|
"acc_norm_stderr": 0.03161856335358611 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.2903225806451613, |
|
"acc_stderr": 0.025822106119415895, |
|
"acc_norm": 0.2903225806451613, |
|
"acc_norm_stderr": 0.025822106119415895 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.4017094017094017, |
|
"acc_stderr": 0.03211693751051622, |
|
"acc_norm": 0.4017094017094017, |
|
"acc_norm_stderr": 0.03211693751051622 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.3320754716981132, |
|
"acc_stderr": 0.028985455652334395, |
|
"acc_norm": 0.3320754716981132, |
|
"acc_norm_stderr": 0.028985455652334395 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.37272727272727274, |
|
"acc_stderr": 0.04631381319425463, |
|
"acc_norm": 0.37272727272727274, |
|
"acc_norm_stderr": 0.04631381319425463 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.25925925925925924, |
|
"acc_stderr": 0.02671924078371216, |
|
"acc_norm": 0.25925925925925924, |
|
"acc_norm_stderr": 0.02671924078371216 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2582781456953642, |
|
"acc_stderr": 0.035737053147634576, |
|
"acc_norm": 0.2582781456953642, |
|
"acc_norm_stderr": 0.035737053147634576 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.31840796019900497, |
|
"acc_stderr": 0.03294118479054095, |
|
"acc_norm": 0.31840796019900497, |
|
"acc_norm_stderr": 0.03294118479054095 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.2947976878612717, |
|
"acc_stderr": 0.03476599607516478, |
|
"acc_norm": 0.2947976878612717, |
|
"acc_norm_stderr": 0.03476599607516478 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2566137566137566, |
|
"acc_stderr": 0.022494510767503154, |
|
"acc_norm": 0.2566137566137566, |
|
"acc_norm_stderr": 0.022494510767503154 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.24305555555555555, |
|
"acc_stderr": 0.03586879280080343, |
|
"acc_norm": 0.24305555555555555, |
|
"acc_norm_stderr": 0.03586879280080343 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.2, |
|
"acc_stderr": 0.04020151261036846, |
|
"acc_norm": 0.2, |
|
"acc_norm_stderr": 0.04020151261036846 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.3265895953757225, |
|
"acc_stderr": 0.02524826477424284, |
|
"acc_norm": 0.3265895953757225, |
|
"acc_norm_stderr": 0.02524826477424284 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.26993865030674846, |
|
"acc_stderr": 0.03487825168497892, |
|
"acc_norm": 0.26993865030674846, |
|
"acc_norm_stderr": 0.03487825168497892 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.33641975308641975, |
|
"acc_stderr": 0.026289734945952926, |
|
"acc_norm": 0.33641975308641975, |
|
"acc_norm_stderr": 0.026289734945952926 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.044619604333847394, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.044619604333847394 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.30569948186528495, |
|
"acc_stderr": 0.033248379397581594, |
|
"acc_norm": 0.30569948186528495, |
|
"acc_norm_stderr": 0.033248379397581594 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2543859649122807, |
|
"acc_stderr": 0.040969851398436716, |
|
"acc_norm": 0.2543859649122807, |
|
"acc_norm_stderr": 0.040969851398436716 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.3559633027522936, |
|
"acc_stderr": 0.020528559278244218, |
|
"acc_norm": 0.3559633027522936, |
|
"acc_norm_stderr": 0.020528559278244218 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.1746031746031746, |
|
"acc_stderr": 0.03395490020856111, |
|
"acc_norm": 0.1746031746031746, |
|
"acc_norm_stderr": 0.03395490020856111 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.3235294117647059, |
|
"acc_stderr": 0.026787453111906532, |
|
"acc_norm": 0.3235294117647059, |
|
"acc_norm_stderr": 0.026787453111906532 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.48760330578512395, |
|
"acc_stderr": 0.045629515481807666, |
|
"acc_norm": 0.48760330578512395, |
|
"acc_norm_stderr": 0.045629515481807666 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.2565789473684211, |
|
"acc_stderr": 0.0355418036802569, |
|
"acc_norm": 0.2565789473684211, |
|
"acc_norm_stderr": 0.0355418036802569 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.30718954248366015, |
|
"acc_stderr": 0.018663359671463663, |
|
"acc_norm": 0.30718954248366015, |
|
"acc_norm_stderr": 0.018663359671463663 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2730496453900709, |
|
"acc_stderr": 0.026577860943307857, |
|
"acc_norm": 0.2730496453900709, |
|
"acc_norm_stderr": 0.026577860943307857 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.33035714285714285, |
|
"acc_stderr": 0.044642857142857116, |
|
"acc_norm": 0.33035714285714285, |
|
"acc_norm_stderr": 0.044642857142857116 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.2175925925925926, |
|
"acc_stderr": 0.028139689444859672, |
|
"acc_norm": 0.2175925925925926, |
|
"acc_norm_stderr": 0.028139689444859672 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.23910614525139665, |
|
"acc_stderr": 0.014265554192331165, |
|
"acc_norm": 0.23910614525139665, |
|
"acc_norm_stderr": 0.014265554192331165 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.04408440022768078, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.04408440022768078 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.25735294117647056, |
|
"acc_stderr": 0.026556519470041513, |
|
"acc_norm": 0.25735294117647056, |
|
"acc_norm_stderr": 0.026556519470041513 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.39591836734693875, |
|
"acc_stderr": 0.03130802899065685, |
|
"acc_norm": 0.39591836734693875, |
|
"acc_norm_stderr": 0.03130802899065685 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.3924050632911392, |
|
"acc_stderr": 0.031784718745647283, |
|
"acc_norm": 0.3924050632911392, |
|
"acc_norm_stderr": 0.031784718745647283 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.28552803129074317, |
|
"acc_stderr": 0.011535751586665668, |
|
"acc_norm": 0.28552803129074317, |
|
"acc_norm_stderr": 0.011535751586665668 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.3235294117647059, |
|
"acc_stderr": 0.03283472056108567, |
|
"acc_norm": 0.3235294117647059, |
|
"acc_norm_stderr": 0.03283472056108567 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.3393939393939394, |
|
"acc_stderr": 0.03697442205031596, |
|
"acc_norm": 0.3393939393939394, |
|
"acc_norm_stderr": 0.03697442205031596 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.26193390452876375, |
|
"mc1_stderr": 0.01539211880501501, |
|
"mc2": 0.41139003440232036, |
|
"mc2_stderr": 0.015295007853319996 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.3028169014084507, |
|
"acc_stderr": 0.01575065796584493, |
|
"acc_norm": 0.3532863849765258, |
|
"acc_norm_stderr": 0.016385310378526204 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "lcw99/llama2-ko-chang-instruct-chat", |
|
"model_sha": "53faa94566bdad4d1ff0611ca8c78a7aabda8cee", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |