|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.2619453924914676, |
|
"acc_stderr": 0.012849054826858117, |
|
"acc_norm": 0.30802047781569963, |
|
"acc_norm_stderr": 0.01349142951729204 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.33957379008165706, |
|
"acc_stderr": 0.0047259676848064045, |
|
"acc_norm": 0.4195379406492731, |
|
"acc_norm_stderr": 0.004924748500639348 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.25146198830409355, |
|
"acc_stderr": 0.033275044238468436, |
|
"acc_norm": 0.25146198830409355, |
|
"acc_norm_stderr": 0.033275044238468436 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.2524271844660194, |
|
"acc_stderr": 0.04301250399690877, |
|
"acc_norm": 0.2524271844660194, |
|
"acc_norm_stderr": 0.04301250399690877 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.2784163473818646, |
|
"acc_stderr": 0.01602829518899247, |
|
"acc_norm": 0.2784163473818646, |
|
"acc_norm_stderr": 0.01602829518899247 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.23703703703703705, |
|
"acc_stderr": 0.03673731683969506, |
|
"acc_norm": 0.23703703703703705, |
|
"acc_norm_stderr": 0.03673731683969506 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.21, |
|
"acc_stderr": 0.040936018074033256, |
|
"acc_norm": 0.21, |
|
"acc_norm_stderr": 0.040936018074033256 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.25957446808510637, |
|
"acc_stderr": 0.028659179374292326, |
|
"acc_norm": 0.25957446808510637, |
|
"acc_norm_stderr": 0.028659179374292326 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3192771084337349, |
|
"acc_stderr": 0.036293353299478595, |
|
"acc_norm": 0.3192771084337349, |
|
"acc_norm_stderr": 0.036293353299478595 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.2733118971061093, |
|
"acc_stderr": 0.02531176597542612, |
|
"acc_norm": 0.2733118971061093, |
|
"acc_norm_stderr": 0.02531176597542612 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.29596412556053814, |
|
"acc_stderr": 0.030636591348699796, |
|
"acc_norm": 0.29596412556053814, |
|
"acc_norm_stderr": 0.030636591348699796 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.22900763358778625, |
|
"acc_stderr": 0.036853466317118506, |
|
"acc_norm": 0.22900763358778625, |
|
"acc_norm_stderr": 0.036853466317118506 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.042923469599092816, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.042923469599092816 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.2676767676767677, |
|
"acc_stderr": 0.03154449888270287, |
|
"acc_norm": 0.2676767676767677, |
|
"acc_norm_stderr": 0.03154449888270287 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.1724137931034483, |
|
"acc_stderr": 0.031478307902595745, |
|
"acc_norm": 0.1724137931034483, |
|
"acc_norm_stderr": 0.031478307902595745 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.22549019607843138, |
|
"acc_stderr": 0.041583075330832865, |
|
"acc_norm": 0.22549019607843138, |
|
"acc_norm_stderr": 0.041583075330832865 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.31512605042016806, |
|
"acc_stderr": 0.030176808288974337, |
|
"acc_norm": 0.31512605042016806, |
|
"acc_norm_stderr": 0.030176808288974337 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.2282051282051282, |
|
"acc_stderr": 0.021278393863586282, |
|
"acc_norm": 0.2282051282051282, |
|
"acc_norm_stderr": 0.021278393863586282 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04186091791394607, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04186091791394607 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.2315270935960591, |
|
"acc_stderr": 0.02967833314144446, |
|
"acc_norm": 0.2315270935960591, |
|
"acc_norm_stderr": 0.02967833314144446 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.2838709677419355, |
|
"acc_stderr": 0.02564938106302925, |
|
"acc_norm": 0.2838709677419355, |
|
"acc_norm_stderr": 0.02564938106302925 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.24786324786324787, |
|
"acc_stderr": 0.028286324075564393, |
|
"acc_norm": 0.24786324786324787, |
|
"acc_norm_stderr": 0.028286324075564393 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.27169811320754716, |
|
"acc_stderr": 0.027377706624670713, |
|
"acc_norm": 0.27169811320754716, |
|
"acc_norm_stderr": 0.027377706624670713 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.2909090909090909, |
|
"acc_stderr": 0.04350271442923243, |
|
"acc_norm": 0.2909090909090909, |
|
"acc_norm_stderr": 0.04350271442923243 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.26666666666666666, |
|
"acc_stderr": 0.02696242432507383, |
|
"acc_norm": 0.26666666666666666, |
|
"acc_norm_stderr": 0.02696242432507383 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.23841059602649006, |
|
"acc_stderr": 0.034791855725996586, |
|
"acc_norm": 0.23841059602649006, |
|
"acc_norm_stderr": 0.034791855725996586 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.263681592039801, |
|
"acc_stderr": 0.031157150869355568, |
|
"acc_norm": 0.263681592039801, |
|
"acc_norm_stderr": 0.031157150869355568 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.2254335260115607, |
|
"acc_stderr": 0.03186209851641144, |
|
"acc_norm": 0.2254335260115607, |
|
"acc_norm_stderr": 0.03186209851641144 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.25132275132275134, |
|
"acc_stderr": 0.022340482339643898, |
|
"acc_norm": 0.25132275132275134, |
|
"acc_norm_stderr": 0.022340482339643898 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.22916666666666666, |
|
"acc_stderr": 0.035146974678623884, |
|
"acc_norm": 0.22916666666666666, |
|
"acc_norm_stderr": 0.035146974678623884 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.042295258468165044, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.042295258468165044 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.21, |
|
"acc_stderr": 0.040936018074033256, |
|
"acc_norm": 0.21, |
|
"acc_norm_stderr": 0.040936018074033256 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.30057803468208094, |
|
"acc_stderr": 0.02468531686725781, |
|
"acc_norm": 0.30057803468208094, |
|
"acc_norm_stderr": 0.02468531686725781 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.26380368098159507, |
|
"acc_stderr": 0.03462419931615624, |
|
"acc_norm": 0.26380368098159507, |
|
"acc_norm_stderr": 0.03462419931615624 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.2623456790123457, |
|
"acc_stderr": 0.0244772228561351, |
|
"acc_norm": 0.2623456790123457, |
|
"acc_norm_stderr": 0.0244772228561351 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.044619604333847394, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.044619604333847394 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.24352331606217617, |
|
"acc_stderr": 0.03097543638684542, |
|
"acc_norm": 0.24352331606217617, |
|
"acc_norm_stderr": 0.03097543638684542 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.23684210526315788, |
|
"acc_stderr": 0.03999423879281336, |
|
"acc_norm": 0.23684210526315788, |
|
"acc_norm_stderr": 0.03999423879281336 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.21100917431192662, |
|
"acc_stderr": 0.01749392240411265, |
|
"acc_norm": 0.21100917431192662, |
|
"acc_norm_stderr": 0.01749392240411265 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.2619047619047619, |
|
"acc_stderr": 0.03932537680392871, |
|
"acc_norm": 0.2619047619047619, |
|
"acc_norm_stderr": 0.03932537680392871 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.25163398692810457, |
|
"acc_stderr": 0.0248480182638752, |
|
"acc_norm": 0.25163398692810457, |
|
"acc_norm_stderr": 0.0248480182638752 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.2, |
|
"acc_stderr": 0.040201512610368445, |
|
"acc_norm": 0.2, |
|
"acc_norm_stderr": 0.040201512610368445 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.35537190082644626, |
|
"acc_stderr": 0.04369236326573981, |
|
"acc_norm": 0.35537190082644626, |
|
"acc_norm_stderr": 0.04369236326573981 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.17105263157894737, |
|
"acc_stderr": 0.030643607071677105, |
|
"acc_norm": 0.17105263157894737, |
|
"acc_norm_stderr": 0.030643607071677105 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.2679738562091503, |
|
"acc_stderr": 0.017917974069594722, |
|
"acc_norm": 0.2679738562091503, |
|
"acc_norm_stderr": 0.017917974069594722 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2553191489361702, |
|
"acc_stderr": 0.02601199293090201, |
|
"acc_norm": 0.2553191489361702, |
|
"acc_norm_stderr": 0.02601199293090201 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.23214285714285715, |
|
"acc_stderr": 0.04007341809755806, |
|
"acc_norm": 0.23214285714285715, |
|
"acc_norm_stderr": 0.04007341809755806 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.2175925925925926, |
|
"acc_stderr": 0.02813968944485966, |
|
"acc_norm": 0.2175925925925926, |
|
"acc_norm_stderr": 0.02813968944485966 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.25921787709497207, |
|
"acc_stderr": 0.014655780837497717, |
|
"acc_norm": 0.25921787709497207, |
|
"acc_norm_stderr": 0.014655780837497717 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.15, |
|
"acc_stderr": 0.0358870281282637, |
|
"acc_norm": 0.15, |
|
"acc_norm_stderr": 0.0358870281282637 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.0446196043338474, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.0446196043338474 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.2867647058823529, |
|
"acc_stderr": 0.027472274473233818, |
|
"acc_norm": 0.2867647058823529, |
|
"acc_norm_stderr": 0.027472274473233818 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.3020408163265306, |
|
"acc_stderr": 0.029393609319879818, |
|
"acc_norm": 0.3020408163265306, |
|
"acc_norm_stderr": 0.029393609319879818 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.25738396624472576, |
|
"acc_stderr": 0.028458820991460288, |
|
"acc_norm": 0.25738396624472576, |
|
"acc_norm_stderr": 0.028458820991460288 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.23728813559322035, |
|
"acc_stderr": 0.010865436690780272, |
|
"acc_norm": 0.23728813559322035, |
|
"acc_norm_stderr": 0.010865436690780272 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.2549019607843137, |
|
"acc_stderr": 0.03058759135160425, |
|
"acc_norm": 0.2549019607843137, |
|
"acc_norm_stderr": 0.03058759135160425 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.2727272727272727, |
|
"acc_stderr": 0.0347769116216366, |
|
"acc_norm": 0.2727272727272727, |
|
"acc_norm_stderr": 0.0347769116216366 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.22888616891064872, |
|
"mc1_stderr": 0.014706994909055027, |
|
"mc2": 0.4031826036090223, |
|
"mc2_stderr": 0.0151985432197755 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.2975206611570248, |
|
"acc_stderr": 0.01571774220508992, |
|
"acc_norm": 0.37662337662337664, |
|
"acc_norm_stderr": 0.016658799874051975 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "BM-K/polyglot-ko-1.3b-it-v1.3", |
|
"model_sha": "1df1840d994fed4d5806ca38746639407c9bb970", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |