|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.21245733788395904, |
|
"acc_stderr": 0.011953482906582947, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.012653835621466646 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.28291177056363276, |
|
"acc_stderr": 0.00449493402546234, |
|
"acc_norm": 0.3088030272854013, |
|
"acc_norm_stderr": 0.004610554974411238 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.19298245614035087, |
|
"acc_stderr": 0.030267457554898465, |
|
"acc_norm": 0.19298245614035087, |
|
"acc_norm_stderr": 0.030267457554898465 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.39805825242718446, |
|
"acc_stderr": 0.04846748253977239, |
|
"acc_norm": 0.39805825242718446, |
|
"acc_norm_stderr": 0.04846748253977239 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.210727969348659, |
|
"acc_stderr": 0.014583812465862546, |
|
"acc_norm": 0.210727969348659, |
|
"acc_norm_stderr": 0.014583812465862546 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.2074074074074074, |
|
"acc_stderr": 0.035025531706783165, |
|
"acc_norm": 0.2074074074074074, |
|
"acc_norm_stderr": 0.035025531706783165 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.21, |
|
"acc_stderr": 0.040936018074033256, |
|
"acc_norm": 0.21, |
|
"acc_norm_stderr": 0.040936018074033256 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.2297872340425532, |
|
"acc_stderr": 0.02750175294441242, |
|
"acc_norm": 0.2297872340425532, |
|
"acc_norm_stderr": 0.02750175294441242 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.19879518072289157, |
|
"acc_stderr": 0.031069390260789406, |
|
"acc_norm": 0.19879518072289157, |
|
"acc_norm_stderr": 0.031069390260789406 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.2347266881028939, |
|
"acc_stderr": 0.024071805887677048, |
|
"acc_norm": 0.2347266881028939, |
|
"acc_norm_stderr": 0.024071805887677048 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.13004484304932734, |
|
"acc_stderr": 0.02257451942417487, |
|
"acc_norm": 0.13004484304932734, |
|
"acc_norm_stderr": 0.02257451942417487 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.25190839694656486, |
|
"acc_stderr": 0.03807387116306086, |
|
"acc_norm": 0.25190839694656486, |
|
"acc_norm_stderr": 0.03807387116306086 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.3484848484848485, |
|
"acc_stderr": 0.033948539651564025, |
|
"acc_norm": 0.3484848484848485, |
|
"acc_norm_stderr": 0.033948539651564025 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.2482758620689655, |
|
"acc_stderr": 0.036001056927277716, |
|
"acc_norm": 0.2482758620689655, |
|
"acc_norm_stderr": 0.036001056927277716 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.35294117647058826, |
|
"acc_stderr": 0.04755129616062947, |
|
"acc_norm": 0.35294117647058826, |
|
"acc_norm_stderr": 0.04755129616062947 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.31092436974789917, |
|
"acc_stderr": 0.030066761582977924, |
|
"acc_norm": 0.31092436974789917, |
|
"acc_norm_stderr": 0.030066761582977924 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.32051282051282054, |
|
"acc_stderr": 0.023661296393964273, |
|
"acc_norm": 0.32051282051282054, |
|
"acc_norm_stderr": 0.023661296393964273 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.04229525846816507, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.04229525846816507 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.19, |
|
"acc_stderr": 0.03942772444036623, |
|
"acc_norm": 0.19, |
|
"acc_norm_stderr": 0.03942772444036623 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.28703703703703703, |
|
"acc_stderr": 0.043733130409147614, |
|
"acc_norm": 0.28703703703703703, |
|
"acc_norm_stderr": 0.043733130409147614 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.2857142857142857, |
|
"acc_stderr": 0.031785297106427496, |
|
"acc_norm": 0.2857142857142857, |
|
"acc_norm_stderr": 0.031785297106427496 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.3193548387096774, |
|
"acc_stderr": 0.026522709674667765, |
|
"acc_norm": 0.3193548387096774, |
|
"acc_norm_stderr": 0.026522709674667765 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.3418803418803419, |
|
"acc_stderr": 0.031075028526507762, |
|
"acc_norm": 0.3418803418803419, |
|
"acc_norm_stderr": 0.031075028526507762 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.3169811320754717, |
|
"acc_stderr": 0.028637235639800935, |
|
"acc_norm": 0.3169811320754717, |
|
"acc_norm_stderr": 0.028637235639800935 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.20909090909090908, |
|
"acc_stderr": 0.03895091015724137, |
|
"acc_norm": 0.20909090909090908, |
|
"acc_norm_stderr": 0.03895091015724137 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.26296296296296295, |
|
"acc_stderr": 0.02684205787383371, |
|
"acc_norm": 0.26296296296296295, |
|
"acc_norm_stderr": 0.02684205787383371 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.3509933774834437, |
|
"acc_stderr": 0.03896981964257374, |
|
"acc_norm": 0.3509933774834437, |
|
"acc_norm_stderr": 0.03896981964257374 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.26865671641791045, |
|
"acc_stderr": 0.03134328358208954, |
|
"acc_norm": 0.26865671641791045, |
|
"acc_norm_stderr": 0.03134328358208954 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.26011560693641617, |
|
"acc_stderr": 0.033450369167889925, |
|
"acc_norm": 0.26011560693641617, |
|
"acc_norm_stderr": 0.033450369167889925 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.23809523809523808, |
|
"acc_stderr": 0.02193587808118476, |
|
"acc_norm": 0.23809523809523808, |
|
"acc_norm_stderr": 0.02193587808118476 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2916666666666667, |
|
"acc_stderr": 0.038009680605548574, |
|
"acc_norm": 0.2916666666666667, |
|
"acc_norm_stderr": 0.038009680605548574 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.04923659639173309, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.04923659639173309 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.0479372485441102, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.0479372485441102 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.26011560693641617, |
|
"acc_stderr": 0.023618678310069374, |
|
"acc_norm": 0.26011560693641617, |
|
"acc_norm_stderr": 0.023618678310069374 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.25153374233128833, |
|
"acc_stderr": 0.034089978868575295, |
|
"acc_norm": 0.25153374233128833, |
|
"acc_norm_stderr": 0.034089978868575295 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.22530864197530864, |
|
"acc_stderr": 0.02324620264781975, |
|
"acc_norm": 0.22530864197530864, |
|
"acc_norm_stderr": 0.02324620264781975 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.26424870466321243, |
|
"acc_stderr": 0.03182155050916647, |
|
"acc_norm": 0.26424870466321243, |
|
"acc_norm_stderr": 0.03182155050916647 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.24561403508771928, |
|
"acc_stderr": 0.040493392977481404, |
|
"acc_norm": 0.24561403508771928, |
|
"acc_norm_stderr": 0.040493392977481404 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.3119266055045872, |
|
"acc_stderr": 0.01986296797670724, |
|
"acc_norm": 0.3119266055045872, |
|
"acc_norm_stderr": 0.01986296797670724 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.36507936507936506, |
|
"acc_stderr": 0.04306241259127153, |
|
"acc_norm": 0.36507936507936506, |
|
"acc_norm_stderr": 0.04306241259127153 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.2973856209150327, |
|
"acc_stderr": 0.026173908506718576, |
|
"acc_norm": 0.2973856209150327, |
|
"acc_norm_stderr": 0.026173908506718576 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.24793388429752067, |
|
"acc_stderr": 0.039418975265163046, |
|
"acc_norm": 0.24793388429752067, |
|
"acc_norm_stderr": 0.039418975265163046 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.3157894736842105, |
|
"acc_stderr": 0.037827289808654706, |
|
"acc_norm": 0.3157894736842105, |
|
"acc_norm_stderr": 0.037827289808654706 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.2565359477124183, |
|
"acc_stderr": 0.017667841612378995, |
|
"acc_norm": 0.2565359477124183, |
|
"acc_norm_stderr": 0.017667841612378995 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.25177304964539005, |
|
"acc_stderr": 0.025892151156709405, |
|
"acc_norm": 0.25177304964539005, |
|
"acc_norm_stderr": 0.025892151156709405 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.16071428571428573, |
|
"acc_stderr": 0.034859460964757394, |
|
"acc_norm": 0.16071428571428573, |
|
"acc_norm_stderr": 0.034859460964757394 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.46296296296296297, |
|
"acc_stderr": 0.03400603625538272, |
|
"acc_norm": 0.46296296296296297, |
|
"acc_norm_stderr": 0.03400603625538272 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.27262569832402234, |
|
"acc_stderr": 0.014893391735249608, |
|
"acc_norm": 0.27262569832402234, |
|
"acc_norm_stderr": 0.014893391735249608 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.22, |
|
"acc_stderr": 0.04163331998932267, |
|
"acc_norm": 0.22, |
|
"acc_norm_stderr": 0.04163331998932267 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.45588235294117646, |
|
"acc_stderr": 0.03025437257397669, |
|
"acc_norm": 0.45588235294117646, |
|
"acc_norm_stderr": 0.03025437257397669 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.20408163265306123, |
|
"acc_stderr": 0.025801283475090496, |
|
"acc_norm": 0.20408163265306123, |
|
"acc_norm_stderr": 0.025801283475090496 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.2911392405063291, |
|
"acc_stderr": 0.029571601065753374, |
|
"acc_norm": 0.2911392405063291, |
|
"acc_norm_stderr": 0.029571601065753374 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.25097783572359844, |
|
"acc_stderr": 0.011073730299187217, |
|
"acc_norm": 0.25097783572359844, |
|
"acc_norm_stderr": 0.011073730299187217 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.2647058823529412, |
|
"acc_stderr": 0.030964517926923413, |
|
"acc_norm": 0.2647058823529412, |
|
"acc_norm_stderr": 0.030964517926923413 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.2909090909090909, |
|
"acc_stderr": 0.03546563019624336, |
|
"acc_norm": 0.2909090909090909, |
|
"acc_norm_stderr": 0.03546563019624336 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.27906976744186046, |
|
"mc1_stderr": 0.01570210709062789, |
|
"mc2": 0.4684529912207382, |
|
"mc2_stderr": 0.016283298202620658 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.2255017709563164, |
|
"acc_stderr": 0.014368122149532174, |
|
"acc_norm": 0.2939787485242031, |
|
"acc_norm_stderr": 0.015663242569091115 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "FINGU-AI/FinguAI-Chat-v1", |
|
"model_sha": "3557829049749742bdb0bfaf23de2d07ecf928f2", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |