|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.39505119453924914, |
|
"acc_stderr": 0.014285898292938172, |
|
"acc_norm": 0.44112627986348124, |
|
"acc_norm_stderr": 0.014509747749064664 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.38637721569408484, |
|
"acc_stderr": 0.0048592361915797905, |
|
"acc_norm": 0.49960167297351127, |
|
"acc_norm_stderr": 0.0049897798280438485 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.5146198830409356, |
|
"acc_stderr": 0.03833185275213025, |
|
"acc_norm": 0.5146198830409356, |
|
"acc_norm_stderr": 0.03833185275213025 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.6213592233009708, |
|
"acc_stderr": 0.04802694698258975, |
|
"acc_norm": 0.6213592233009708, |
|
"acc_norm_stderr": 0.04802694698258975 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.5159642401021711, |
|
"acc_stderr": 0.01787084750608174, |
|
"acc_norm": 0.5159642401021711, |
|
"acc_norm_stderr": 0.01787084750608174 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.45185185185185184, |
|
"acc_stderr": 0.042992689054808624, |
|
"acc_norm": 0.45185185185185184, |
|
"acc_norm_stderr": 0.042992689054808624 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.04560480215720684, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.04560480215720684 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.3872340425531915, |
|
"acc_stderr": 0.03184389265339526, |
|
"acc_norm": 0.3872340425531915, |
|
"acc_norm_stderr": 0.03184389265339526 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.40963855421686746, |
|
"acc_stderr": 0.03828401115079021, |
|
"acc_norm": 0.40963855421686746, |
|
"acc_norm_stderr": 0.03828401115079021 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5273311897106109, |
|
"acc_stderr": 0.02835563356832818, |
|
"acc_norm": 0.5273311897106109, |
|
"acc_norm_stderr": 0.02835563356832818 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.5067264573991032, |
|
"acc_stderr": 0.033554765962343545, |
|
"acc_norm": 0.5067264573991032, |
|
"acc_norm_stderr": 0.033554765962343545 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.45038167938931295, |
|
"acc_stderr": 0.04363643698524779, |
|
"acc_norm": 0.45038167938931295, |
|
"acc_norm_stderr": 0.04363643698524779 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.6313131313131313, |
|
"acc_stderr": 0.03437305501980619, |
|
"acc_norm": 0.6313131313131313, |
|
"acc_norm_stderr": 0.03437305501980619 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4413793103448276, |
|
"acc_stderr": 0.04137931034482758, |
|
"acc_norm": 0.4413793103448276, |
|
"acc_norm_stderr": 0.04137931034482758 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.19607843137254902, |
|
"acc_stderr": 0.03950581861179962, |
|
"acc_norm": 0.19607843137254902, |
|
"acc_norm_stderr": 0.03950581861179962 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.032478490123081544, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.032478490123081544 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4717948717948718, |
|
"acc_stderr": 0.025310639254933903, |
|
"acc_norm": 0.4717948717948718, |
|
"acc_norm_stderr": 0.025310639254933903 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.61, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.61, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252604, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252604 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.49074074074074076, |
|
"acc_stderr": 0.04832853553437055, |
|
"acc_norm": 0.49074074074074076, |
|
"acc_norm_stderr": 0.04832853553437055 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.46798029556650245, |
|
"acc_stderr": 0.035107665979592154, |
|
"acc_norm": 0.46798029556650245, |
|
"acc_norm_stderr": 0.035107665979592154 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.47419354838709676, |
|
"acc_stderr": 0.028406095057653326, |
|
"acc_norm": 0.47419354838709676, |
|
"acc_norm_stderr": 0.028406095057653326 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7136752136752137, |
|
"acc_stderr": 0.029614323690456648, |
|
"acc_norm": 0.7136752136752137, |
|
"acc_norm_stderr": 0.029614323690456648 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.4867924528301887, |
|
"acc_stderr": 0.030762134874500482, |
|
"acc_norm": 0.4867924528301887, |
|
"acc_norm_stderr": 0.030762134874500482 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5181818181818182, |
|
"acc_stderr": 0.04785964010794915, |
|
"acc_norm": 0.5181818181818182, |
|
"acc_norm_stderr": 0.04785964010794915 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.028742040903948492, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.028742040903948492 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.40397350993377484, |
|
"acc_stderr": 0.0400648568536534, |
|
"acc_norm": 0.40397350993377484, |
|
"acc_norm_stderr": 0.0400648568536534 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.6567164179104478, |
|
"acc_stderr": 0.03357379665433431, |
|
"acc_norm": 0.6567164179104478, |
|
"acc_norm_stderr": 0.03357379665433431 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.4508670520231214, |
|
"acc_stderr": 0.03794012674697028, |
|
"acc_norm": 0.4508670520231214, |
|
"acc_norm_stderr": 0.03794012674697028 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.38095238095238093, |
|
"acc_stderr": 0.025010749116137588, |
|
"acc_norm": 0.38095238095238093, |
|
"acc_norm_stderr": 0.025010749116137588 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3958333333333333, |
|
"acc_stderr": 0.04089465449325582, |
|
"acc_norm": 0.3958333333333333, |
|
"acc_norm_stderr": 0.04089465449325582 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.63, |
|
"acc_stderr": 0.048523658709391, |
|
"acc_norm": 0.63, |
|
"acc_norm_stderr": 0.048523658709391 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.523121387283237, |
|
"acc_stderr": 0.026890297881303118, |
|
"acc_norm": 0.523121387283237, |
|
"acc_norm_stderr": 0.026890297881303118 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.5521472392638037, |
|
"acc_stderr": 0.03906947479456607, |
|
"acc_norm": 0.5521472392638037, |
|
"acc_norm_stderr": 0.03906947479456607 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.4845679012345679, |
|
"acc_stderr": 0.02780749004427621, |
|
"acc_norm": 0.4845679012345679, |
|
"acc_norm_stderr": 0.02780749004427621 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.0479372485441102, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.0479372485441102 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5854922279792746, |
|
"acc_stderr": 0.035553003195576686, |
|
"acc_norm": 0.5854922279792746, |
|
"acc_norm_stderr": 0.035553003195576686 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.30701754385964913, |
|
"acc_stderr": 0.0433913832257986, |
|
"acc_norm": 0.30701754385964913, |
|
"acc_norm_stderr": 0.0433913832257986 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.5302752293577981, |
|
"acc_stderr": 0.021397988604936965, |
|
"acc_norm": 0.5302752293577981, |
|
"acc_norm_stderr": 0.021397988604936965 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.35714285714285715, |
|
"acc_stderr": 0.04285714285714281, |
|
"acc_norm": 0.35714285714285715, |
|
"acc_norm_stderr": 0.04285714285714281 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.5163398692810458, |
|
"acc_stderr": 0.028614624752805434, |
|
"acc_norm": 0.5163398692810458, |
|
"acc_norm_stderr": 0.028614624752805434 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.54, |
|
"acc_stderr": 0.05009082659620333, |
|
"acc_norm": 0.54, |
|
"acc_norm_stderr": 0.05009082659620333 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6942148760330579, |
|
"acc_stderr": 0.04205953933884123, |
|
"acc_norm": 0.6942148760330579, |
|
"acc_norm_stderr": 0.04205953933884123 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.4868421052631579, |
|
"acc_stderr": 0.04067533136309174, |
|
"acc_norm": 0.4868421052631579, |
|
"acc_norm_stderr": 0.04067533136309174 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.4019607843137255, |
|
"acc_stderr": 0.01983517648437538, |
|
"acc_norm": 0.4019607843137255, |
|
"acc_norm_stderr": 0.01983517648437538 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.35815602836879434, |
|
"acc_stderr": 0.028602085862759415, |
|
"acc_norm": 0.35815602836879434, |
|
"acc_norm_stderr": 0.028602085862759415 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.29464285714285715, |
|
"acc_stderr": 0.04327040932578728, |
|
"acc_norm": 0.29464285714285715, |
|
"acc_norm_stderr": 0.04327040932578728 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.41203703703703703, |
|
"acc_stderr": 0.03356787758160835, |
|
"acc_norm": 0.41203703703703703, |
|
"acc_norm_stderr": 0.03356787758160835 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.26145251396648045, |
|
"acc_stderr": 0.014696599650364555, |
|
"acc_norm": 0.26145251396648045, |
|
"acc_norm_stderr": 0.014696599650364555 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.45, |
|
"acc_stderr": 0.05, |
|
"acc_norm": 0.45, |
|
"acc_norm_stderr": 0.05 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.58, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.58, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.4007352941176471, |
|
"acc_stderr": 0.02976826352893311, |
|
"acc_norm": 0.4007352941176471, |
|
"acc_norm_stderr": 0.02976826352893311 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.6122448979591837, |
|
"acc_stderr": 0.031192230726795656, |
|
"acc_norm": 0.6122448979591837, |
|
"acc_norm_stderr": 0.031192230726795656 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.6413502109704642, |
|
"acc_stderr": 0.03121956944530186, |
|
"acc_norm": 0.6413502109704642, |
|
"acc_norm_stderr": 0.03121956944530186 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.3539765319426336, |
|
"acc_stderr": 0.01221350473173165, |
|
"acc_norm": 0.3539765319426336, |
|
"acc_norm_stderr": 0.01221350473173165 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.03509312031717982, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.03509312031717982 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.5878787878787879, |
|
"acc_stderr": 0.03843566993588717, |
|
"acc_norm": 0.5878787878787879, |
|
"acc_norm_stderr": 0.03843566993588717 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.26560587515299877, |
|
"mc1_stderr": 0.015461027627253595, |
|
"mc2": 0.41198238256398484, |
|
"mc2_stderr": 0.015155918602262708 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.4002361275088548, |
|
"acc_stderr": 0.016844693510505045, |
|
"acc_norm": 0.46162927981109797, |
|
"acc_norm_stderr": 0.01713966022184556 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Changgil/K2S3-Mistral-7b-v1.48", |
|
"model_sha": "ac358c944bceb2129fb45398c7722321df5f55eb", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |