|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3097269624573379, |
|
"acc_stderr": 0.01351205841523836, |
|
"acc_norm": 0.38310580204778155, |
|
"acc_norm_stderr": 0.014206472661672877 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.35331607249551883, |
|
"acc_stderr": 0.004770229206838901, |
|
"acc_norm": 0.4451304521011751, |
|
"acc_norm_stderr": 0.004959645263390238 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.4678362573099415, |
|
"acc_stderr": 0.03826882417660369, |
|
"acc_norm": 0.4678362573099415, |
|
"acc_norm_stderr": 0.03826882417660369 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.46601941747572817, |
|
"acc_stderr": 0.0493929144727348, |
|
"acc_norm": 0.46601941747572817, |
|
"acc_norm_stderr": 0.0493929144727348 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.49169859514687103, |
|
"acc_stderr": 0.017877498991072008, |
|
"acc_norm": 0.49169859514687103, |
|
"acc_norm_stderr": 0.017877498991072008 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.37777777777777777, |
|
"acc_stderr": 0.04188307537595853, |
|
"acc_norm": 0.37777777777777777, |
|
"acc_norm_stderr": 0.04188307537595853 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.044619604333847394, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.044619604333847394 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.03202563076101735, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.03202563076101735 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3674698795180723, |
|
"acc_stderr": 0.03753267402120574, |
|
"acc_norm": 0.3674698795180723, |
|
"acc_norm_stderr": 0.03753267402120574 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5080385852090032, |
|
"acc_stderr": 0.028394421370984538, |
|
"acc_norm": 0.5080385852090032, |
|
"acc_norm_stderr": 0.028394421370984538 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.42152466367713004, |
|
"acc_stderr": 0.03314190222110658, |
|
"acc_norm": 0.42152466367713004, |
|
"acc_norm_stderr": 0.03314190222110658 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.5038167938931297, |
|
"acc_stderr": 0.04385162325601553, |
|
"acc_norm": 0.5038167938931297, |
|
"acc_norm_stderr": 0.04385162325601553 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5757575757575758, |
|
"acc_stderr": 0.03521224908841586, |
|
"acc_norm": 0.5757575757575758, |
|
"acc_norm_stderr": 0.03521224908841586 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4827586206896552, |
|
"acc_stderr": 0.04164188720169377, |
|
"acc_norm": 0.4827586206896552, |
|
"acc_norm_stderr": 0.04164188720169377 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.04690650298201942, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.04690650298201942 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.42857142857142855, |
|
"acc_stderr": 0.03214536859788639, |
|
"acc_norm": 0.42857142857142855, |
|
"acc_norm_stderr": 0.03214536859788639 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.45897435897435895, |
|
"acc_stderr": 0.025265525491284295, |
|
"acc_norm": 0.45897435897435895, |
|
"acc_norm_stderr": 0.025265525491284295 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.55, |
|
"acc_stderr": 0.04999999999999999, |
|
"acc_norm": 0.55, |
|
"acc_norm_stderr": 0.04999999999999999 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.044619604333847394, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.044619604333847394 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.4722222222222222, |
|
"acc_stderr": 0.04826217294139894, |
|
"acc_norm": 0.4722222222222222, |
|
"acc_norm_stderr": 0.04826217294139894 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.37438423645320196, |
|
"acc_stderr": 0.03405155380561952, |
|
"acc_norm": 0.37438423645320196, |
|
"acc_norm_stderr": 0.03405155380561952 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.44193548387096776, |
|
"acc_stderr": 0.02825155790684974, |
|
"acc_norm": 0.44193548387096776, |
|
"acc_norm_stderr": 0.02825155790684974 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6965811965811965, |
|
"acc_stderr": 0.03011821010694265, |
|
"acc_norm": 0.6965811965811965, |
|
"acc_norm_stderr": 0.03011821010694265 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.5132075471698113, |
|
"acc_stderr": 0.030762134874500476, |
|
"acc_norm": 0.5132075471698113, |
|
"acc_norm_stderr": 0.030762134874500476 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.45454545454545453, |
|
"acc_stderr": 0.04769300568972743, |
|
"acc_norm": 0.45454545454545453, |
|
"acc_norm_stderr": 0.04769300568972743 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3037037037037037, |
|
"acc_stderr": 0.02803792996911499, |
|
"acc_norm": 0.3037037037037037, |
|
"acc_norm_stderr": 0.02803792996911499 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2781456953642384, |
|
"acc_stderr": 0.03658603262763743, |
|
"acc_norm": 0.2781456953642384, |
|
"acc_norm_stderr": 0.03658603262763743 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.5323383084577115, |
|
"acc_stderr": 0.03528131472933607, |
|
"acc_norm": 0.5323383084577115, |
|
"acc_norm_stderr": 0.03528131472933607 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.4161849710982659, |
|
"acc_stderr": 0.037585177754049466, |
|
"acc_norm": 0.4161849710982659, |
|
"acc_norm_stderr": 0.037585177754049466 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.31746031746031744, |
|
"acc_stderr": 0.023973861998992062, |
|
"acc_norm": 0.31746031746031744, |
|
"acc_norm_stderr": 0.023973861998992062 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3263888888888889, |
|
"acc_stderr": 0.03921067198982266, |
|
"acc_norm": 0.3263888888888889, |
|
"acc_norm_stderr": 0.03921067198982266 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.04923659639173309, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.04923659639173309 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.59, |
|
"acc_stderr": 0.04943110704237101, |
|
"acc_norm": 0.59, |
|
"acc_norm_stderr": 0.04943110704237101 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.49421965317919075, |
|
"acc_stderr": 0.026917296179149116, |
|
"acc_norm": 0.49421965317919075, |
|
"acc_norm_stderr": 0.026917296179149116 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.39263803680981596, |
|
"acc_stderr": 0.03836740907831029, |
|
"acc_norm": 0.39263803680981596, |
|
"acc_norm_stderr": 0.03836740907831029 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.45987654320987653, |
|
"acc_stderr": 0.027731022753539274, |
|
"acc_norm": 0.45987654320987653, |
|
"acc_norm_stderr": 0.027731022753539274 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621505, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621505 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.538860103626943, |
|
"acc_stderr": 0.035975244117345775, |
|
"acc_norm": 0.538860103626943, |
|
"acc_norm_stderr": 0.035975244117345775 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2719298245614035, |
|
"acc_stderr": 0.041857744240220575, |
|
"acc_norm": 0.2719298245614035, |
|
"acc_norm_stderr": 0.041857744240220575 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.5064220183486239, |
|
"acc_stderr": 0.021435554820013077, |
|
"acc_norm": 0.5064220183486239, |
|
"acc_norm_stderr": 0.021435554820013077 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.38095238095238093, |
|
"acc_stderr": 0.043435254289490965, |
|
"acc_norm": 0.38095238095238093, |
|
"acc_norm_stderr": 0.043435254289490965 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.028629916715693413, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.028629916715693413 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.49, |
|
"acc_stderr": 0.05024183937956911, |
|
"acc_norm": 0.49, |
|
"acc_norm_stderr": 0.05024183937956911 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6363636363636364, |
|
"acc_stderr": 0.043913262867240704, |
|
"acc_norm": 0.6363636363636364, |
|
"acc_norm_stderr": 0.043913262867240704 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.4276315789473684, |
|
"acc_stderr": 0.040260970832965585, |
|
"acc_norm": 0.4276315789473684, |
|
"acc_norm_stderr": 0.040260970832965585 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.36764705882352944, |
|
"acc_stderr": 0.01950629169395486, |
|
"acc_norm": 0.36764705882352944, |
|
"acc_norm_stderr": 0.01950629169395486 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2978723404255319, |
|
"acc_stderr": 0.027281608344469414, |
|
"acc_norm": 0.2978723404255319, |
|
"acc_norm_stderr": 0.027281608344469414 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.4017857142857143, |
|
"acc_stderr": 0.04653333146973646, |
|
"acc_norm": 0.4017857142857143, |
|
"acc_norm_stderr": 0.04653333146973646 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.4398148148148148, |
|
"acc_stderr": 0.033851779760448106, |
|
"acc_norm": 0.4398148148148148, |
|
"acc_norm_stderr": 0.033851779760448106 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.3039106145251397, |
|
"acc_stderr": 0.01538284558758452, |
|
"acc_norm": 0.3039106145251397, |
|
"acc_norm_stderr": 0.01538284558758452 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.047937248544110196, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.047937248544110196 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.4338235294117647, |
|
"acc_stderr": 0.030105636570016636, |
|
"acc_norm": 0.4338235294117647, |
|
"acc_norm_stderr": 0.030105636570016636 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.5346938775510204, |
|
"acc_stderr": 0.03193207024425314, |
|
"acc_norm": 0.5346938775510204, |
|
"acc_norm_stderr": 0.03193207024425314 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.5864978902953587, |
|
"acc_stderr": 0.03205649904851858, |
|
"acc_norm": 0.5864978902953587, |
|
"acc_norm_stderr": 0.03205649904851858 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.3011734028683181, |
|
"acc_stderr": 0.01171714875164844, |
|
"acc_norm": 0.3011734028683181, |
|
"acc_norm_stderr": 0.01171714875164844 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.4215686274509804, |
|
"acc_stderr": 0.03465868196380757, |
|
"acc_norm": 0.4215686274509804, |
|
"acc_norm_stderr": 0.03465868196380757 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.509090909090909, |
|
"acc_stderr": 0.0390369864774844, |
|
"acc_norm": 0.509090909090909, |
|
"acc_norm_stderr": 0.0390369864774844 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.33047735618115054, |
|
"mc1_stderr": 0.016466769613698293, |
|
"mc2": 0.5058685155948915, |
|
"mc2_stderr": 0.01583111147395693 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.29043683589138136, |
|
"acc_stderr": 0.01560760256981463, |
|
"acc_norm": 0.38134592680047225, |
|
"acc_norm_stderr": 0.01669930176882808 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "maywell/Synatra_TbST02M_IN01", |
|
"model_sha": "ba8eef9720471e65dc86e856d2a3812da8b53527", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |