|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.39505119453924914, |
|
"acc_stderr": 0.014285898292938162, |
|
"acc_norm": 0.4590443686006826, |
|
"acc_norm_stderr": 0.014562291073601229 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.4246166102370046, |
|
"acc_stderr": 0.004932745013072717, |
|
"acc_norm": 0.5680143397729536, |
|
"acc_norm_stderr": 0.004943400892881046 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.47368421052631576, |
|
"acc_stderr": 0.038295098689947286, |
|
"acc_norm": 0.47368421052631576, |
|
"acc_norm_stderr": 0.038295098689947286 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5825242718446602, |
|
"acc_stderr": 0.048828405482122375, |
|
"acc_norm": 0.5825242718446602, |
|
"acc_norm_stderr": 0.048828405482122375 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.5325670498084292, |
|
"acc_stderr": 0.017841995750520867, |
|
"acc_norm": 0.5325670498084292, |
|
"acc_norm_stderr": 0.017841995750520867 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.48148148148148145, |
|
"acc_stderr": 0.043163785995113245, |
|
"acc_norm": 0.48148148148148145, |
|
"acc_norm_stderr": 0.043163785995113245 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206824, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206824 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.4127659574468085, |
|
"acc_stderr": 0.03218471141400351, |
|
"acc_norm": 0.4127659574468085, |
|
"acc_norm_stderr": 0.03218471141400351 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.39759036144578314, |
|
"acc_stderr": 0.038099730845402184, |
|
"acc_norm": 0.39759036144578314, |
|
"acc_norm_stderr": 0.038099730845402184 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.4887459807073955, |
|
"acc_stderr": 0.028390897396863533, |
|
"acc_norm": 0.4887459807073955, |
|
"acc_norm_stderr": 0.028390897396863533 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.47533632286995514, |
|
"acc_stderr": 0.03351695167652628, |
|
"acc_norm": 0.47533632286995514, |
|
"acc_norm_stderr": 0.03351695167652628 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.42748091603053434, |
|
"acc_stderr": 0.043389203057924, |
|
"acc_norm": 0.42748091603053434, |
|
"acc_norm_stderr": 0.043389203057924 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.048783173121456316, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.048783173121456316 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5656565656565656, |
|
"acc_stderr": 0.03531505879359183, |
|
"acc_norm": 0.5656565656565656, |
|
"acc_norm_stderr": 0.03531505879359183 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.3931034482758621, |
|
"acc_stderr": 0.040703290137070705, |
|
"acc_norm": 0.3931034482758621, |
|
"acc_norm_stderr": 0.040703290137070705 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.19607843137254902, |
|
"acc_stderr": 0.03950581861179963, |
|
"acc_norm": 0.19607843137254902, |
|
"acc_norm_stderr": 0.03950581861179963 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.41596638655462187, |
|
"acc_stderr": 0.03201650100739615, |
|
"acc_norm": 0.41596638655462187, |
|
"acc_norm_stderr": 0.03201650100739615 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.45897435897435895, |
|
"acc_stderr": 0.025265525491284295, |
|
"acc_norm": 0.45897435897435895, |
|
"acc_norm_stderr": 0.025265525491284295 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.47, |
|
"acc_stderr": 0.050161355804659205, |
|
"acc_norm": 0.47, |
|
"acc_norm_stderr": 0.050161355804659205 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5370370370370371, |
|
"acc_stderr": 0.04820403072760628, |
|
"acc_norm": 0.5370370370370371, |
|
"acc_norm_stderr": 0.04820403072760628 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3793103448275862, |
|
"acc_stderr": 0.034139638059062345, |
|
"acc_norm": 0.3793103448275862, |
|
"acc_norm_stderr": 0.034139638059062345 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.45483870967741935, |
|
"acc_stderr": 0.028327743091561063, |
|
"acc_norm": 0.45483870967741935, |
|
"acc_norm_stderr": 0.028327743091561063 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6153846153846154, |
|
"acc_stderr": 0.03187195347942466, |
|
"acc_norm": 0.6153846153846154, |
|
"acc_norm_stderr": 0.03187195347942466 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.4528301886792453, |
|
"acc_stderr": 0.03063562795796182, |
|
"acc_norm": 0.4528301886792453, |
|
"acc_norm_stderr": 0.03063562795796182 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.509090909090909, |
|
"acc_stderr": 0.04788339768702861, |
|
"acc_norm": 0.509090909090909, |
|
"acc_norm_stderr": 0.04788339768702861 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.24814814814814815, |
|
"acc_stderr": 0.0263357394040558, |
|
"acc_norm": 0.24814814814814815, |
|
"acc_norm_stderr": 0.0263357394040558 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2980132450331126, |
|
"acc_stderr": 0.03734535676787198, |
|
"acc_norm": 0.2980132450331126, |
|
"acc_norm_stderr": 0.03734535676787198 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.5472636815920398, |
|
"acc_stderr": 0.03519702717576915, |
|
"acc_norm": 0.5472636815920398, |
|
"acc_norm_stderr": 0.03519702717576915 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3988439306358382, |
|
"acc_stderr": 0.03733626655383509, |
|
"acc_norm": 0.3988439306358382, |
|
"acc_norm_stderr": 0.03733626655383509 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.26455026455026454, |
|
"acc_stderr": 0.022717467897708617, |
|
"acc_norm": 0.26455026455026454, |
|
"acc_norm_stderr": 0.022717467897708617 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.375, |
|
"acc_stderr": 0.04048439222695598, |
|
"acc_norm": 0.375, |
|
"acc_norm_stderr": 0.04048439222695598 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.58, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.58, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.4682080924855491, |
|
"acc_stderr": 0.02686462436675665, |
|
"acc_norm": 0.4682080924855491, |
|
"acc_norm_stderr": 0.02686462436675665 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.4662576687116564, |
|
"acc_stderr": 0.03919415545048409, |
|
"acc_norm": 0.4662576687116564, |
|
"acc_norm_stderr": 0.03919415545048409 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.41358024691358025, |
|
"acc_stderr": 0.027402042040269955, |
|
"acc_norm": 0.41358024691358025, |
|
"acc_norm_stderr": 0.027402042040269955 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252606, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252606 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.538860103626943, |
|
"acc_stderr": 0.03597524411734578, |
|
"acc_norm": 0.538860103626943, |
|
"acc_norm_stderr": 0.03597524411734578 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2719298245614035, |
|
"acc_stderr": 0.04185774424022058, |
|
"acc_norm": 0.2719298245614035, |
|
"acc_norm_stderr": 0.04185774424022058 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.5504587155963303, |
|
"acc_stderr": 0.021327881417823387, |
|
"acc_norm": 0.5504587155963303, |
|
"acc_norm_stderr": 0.021327881417823387 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.30952380952380953, |
|
"acc_stderr": 0.041349130183033156, |
|
"acc_norm": 0.30952380952380953, |
|
"acc_norm_stderr": 0.041349130183033156 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.4411764705882353, |
|
"acc_stderr": 0.02843109544417664, |
|
"acc_norm": 0.4411764705882353, |
|
"acc_norm_stderr": 0.02843109544417664 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6033057851239669, |
|
"acc_stderr": 0.044658697805310094, |
|
"acc_norm": 0.6033057851239669, |
|
"acc_norm_stderr": 0.044658697805310094 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.375, |
|
"acc_stderr": 0.039397364351956274, |
|
"acc_norm": 0.375, |
|
"acc_norm_stderr": 0.039397364351956274 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.35130718954248363, |
|
"acc_stderr": 0.019312676065786575, |
|
"acc_norm": 0.35130718954248363, |
|
"acc_norm_stderr": 0.019312676065786575 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3120567375886525, |
|
"acc_stderr": 0.02764012054516993, |
|
"acc_norm": 0.3120567375886525, |
|
"acc_norm_stderr": 0.02764012054516993 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.26785714285714285, |
|
"acc_stderr": 0.04203277291467763, |
|
"acc_norm": 0.26785714285714285, |
|
"acc_norm_stderr": 0.04203277291467763 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.0321495214780275, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.0321495214780275 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2424581005586592, |
|
"acc_stderr": 0.01433352205921789, |
|
"acc_norm": 0.2424581005586592, |
|
"acc_norm_stderr": 0.01433352205921789 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.04560480215720683, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.04560480215720683 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.4485294117647059, |
|
"acc_stderr": 0.0302114796091216, |
|
"acc_norm": 0.4485294117647059, |
|
"acc_norm_stderr": 0.0302114796091216 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.3836734693877551, |
|
"acc_stderr": 0.031130880396235926, |
|
"acc_norm": 0.3836734693877551, |
|
"acc_norm_stderr": 0.031130880396235926 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.5822784810126582, |
|
"acc_stderr": 0.032103530322412685, |
|
"acc_norm": 0.5822784810126582, |
|
"acc_norm_stderr": 0.032103530322412685 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.30964797913950454, |
|
"acc_stderr": 0.011808598262503316, |
|
"acc_norm": 0.30964797913950454, |
|
"acc_norm_stderr": 0.011808598262503316 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.49019607843137253, |
|
"acc_stderr": 0.03508637358630573, |
|
"acc_norm": 0.49019607843137253, |
|
"acc_norm_stderr": 0.03508637358630573 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.5515151515151515, |
|
"acc_stderr": 0.038835659779569286, |
|
"acc_norm": 0.5515151515151515, |
|
"acc_norm_stderr": 0.038835659779569286 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.26438188494492043, |
|
"mc1_stderr": 0.015438211119522505, |
|
"mc2": 0.4142296152328429, |
|
"mc2_stderr": 0.014852594216061029 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.5053128689492326, |
|
"acc_stderr": 0.01718938362722971, |
|
"acc_norm": 0.5950413223140496, |
|
"acc_norm_stderr": 0.016876941165045612 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "PracticeLLM/Custom-KoLLM-13B-v7", |
|
"model_sha": "1fa610cc17b0a5c51c0637c98b9ac671df98c27c", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |