|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.5093856655290102, |
|
"acc_stderr": 0.014608816322065, |
|
"acc_norm": 0.5691126279863481, |
|
"acc_norm_stderr": 0.014471133392642471 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.48526190001991637, |
|
"acc_stderr": 0.004987613263678177, |
|
"acc_norm": 0.6581358295160327, |
|
"acc_norm_stderr": 0.004733649274814523 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.672514619883041, |
|
"acc_stderr": 0.035993357714560276, |
|
"acc_norm": 0.672514619883041, |
|
"acc_norm_stderr": 0.035993357714560276 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.6601941747572816, |
|
"acc_stderr": 0.046897659372781335, |
|
"acc_norm": 0.6601941747572816, |
|
"acc_norm_stderr": 0.046897659372781335 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.6475095785440613, |
|
"acc_stderr": 0.017084150244081376, |
|
"acc_norm": 0.6475095785440613, |
|
"acc_norm_stderr": 0.017084150244081376 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.43703703703703706, |
|
"acc_stderr": 0.04284958639753398, |
|
"acc_norm": 0.43703703703703706, |
|
"acc_norm_stderr": 0.04284958639753398 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.0446196043338474, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.0446196043338474 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.4553191489361702, |
|
"acc_stderr": 0.03255525359340354, |
|
"acc_norm": 0.4553191489361702, |
|
"acc_norm_stderr": 0.03255525359340354 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.4759036144578313, |
|
"acc_stderr": 0.03887971849597264, |
|
"acc_norm": 0.4759036144578313, |
|
"acc_norm_stderr": 0.03887971849597264 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5916398713826366, |
|
"acc_stderr": 0.02791705074848463, |
|
"acc_norm": 0.5916398713826366, |
|
"acc_norm_stderr": 0.02791705074848463 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.5695067264573991, |
|
"acc_stderr": 0.033231973029429394, |
|
"acc_norm": 0.5695067264573991, |
|
"acc_norm_stderr": 0.033231973029429394 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.5419847328244275, |
|
"acc_stderr": 0.04369802690578757, |
|
"acc_norm": 0.5419847328244275, |
|
"acc_norm_stderr": 0.04369802690578757 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.049888765156985905, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.049888765156985905 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.7575757575757576, |
|
"acc_stderr": 0.030532892233932036, |
|
"acc_norm": 0.7575757575757576, |
|
"acc_norm_stderr": 0.030532892233932036 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4689655172413793, |
|
"acc_stderr": 0.04158632762097828, |
|
"acc_norm": 0.4689655172413793, |
|
"acc_norm_stderr": 0.04158632762097828 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.04690650298201942, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.04690650298201942 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.6722689075630253, |
|
"acc_stderr": 0.03048991141767323, |
|
"acc_norm": 0.6722689075630253, |
|
"acc_norm_stderr": 0.03048991141767323 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.5923076923076923, |
|
"acc_stderr": 0.02491524398598785, |
|
"acc_norm": 0.5923076923076923, |
|
"acc_norm_stderr": 0.02491524398598785 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.59, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.59, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621504, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621504 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.6759259259259259, |
|
"acc_stderr": 0.045245960070300476, |
|
"acc_norm": 0.6759259259259259, |
|
"acc_norm_stderr": 0.045245960070300476 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.43349753694581283, |
|
"acc_stderr": 0.03486731727419872, |
|
"acc_norm": 0.43349753694581283, |
|
"acc_norm_stderr": 0.03486731727419872 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.5838709677419355, |
|
"acc_stderr": 0.02804098138076153, |
|
"acc_norm": 0.5838709677419355, |
|
"acc_norm_stderr": 0.02804098138076153 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7777777777777778, |
|
"acc_stderr": 0.027236013946196673, |
|
"acc_norm": 0.7777777777777778, |
|
"acc_norm_stderr": 0.027236013946196673 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.5547169811320755, |
|
"acc_stderr": 0.030588052974270658, |
|
"acc_norm": 0.5547169811320755, |
|
"acc_norm_stderr": 0.030588052974270658 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.6363636363636364, |
|
"acc_stderr": 0.04607582090719976, |
|
"acc_norm": 0.6363636363636364, |
|
"acc_norm_stderr": 0.04607582090719976 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.35185185185185186, |
|
"acc_stderr": 0.02911661760608301, |
|
"acc_norm": 0.35185185185185186, |
|
"acc_norm_stderr": 0.02911661760608301 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.33112582781456956, |
|
"acc_stderr": 0.038425817186598696, |
|
"acc_norm": 0.33112582781456956, |
|
"acc_norm_stderr": 0.038425817186598696 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.6417910447761194, |
|
"acc_stderr": 0.03390393042268815, |
|
"acc_norm": 0.6417910447761194, |
|
"acc_norm_stderr": 0.03390393042268815 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.45664739884393063, |
|
"acc_stderr": 0.03798106566014498, |
|
"acc_norm": 0.45664739884393063, |
|
"acc_norm_stderr": 0.03798106566014498 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.4576719576719577, |
|
"acc_stderr": 0.025658868862058325, |
|
"acc_norm": 0.4576719576719577, |
|
"acc_norm_stderr": 0.025658868862058325 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.5555555555555556, |
|
"acc_stderr": 0.041553199555931467, |
|
"acc_norm": 0.5555555555555556, |
|
"acc_norm_stderr": 0.041553199555931467 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.04852365870939098, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.04852365870939098 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.7, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.7, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5953757225433526, |
|
"acc_stderr": 0.026424816594009852, |
|
"acc_norm": 0.5953757225433526, |
|
"acc_norm_stderr": 0.026424816594009852 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.4662576687116564, |
|
"acc_stderr": 0.039194155450484096, |
|
"acc_norm": 0.4662576687116564, |
|
"acc_norm_stderr": 0.039194155450484096 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.6111111111111112, |
|
"acc_stderr": 0.02712511551316685, |
|
"acc_norm": 0.6111111111111112, |
|
"acc_norm_stderr": 0.02712511551316685 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252605, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252605 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.7202072538860104, |
|
"acc_stderr": 0.03239637046735704, |
|
"acc_norm": 0.7202072538860104, |
|
"acc_norm_stderr": 0.03239637046735704 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.4649122807017544, |
|
"acc_stderr": 0.046920083813689104, |
|
"acc_norm": 0.4649122807017544, |
|
"acc_norm_stderr": 0.046920083813689104 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.6770642201834862, |
|
"acc_stderr": 0.020048115923415332, |
|
"acc_norm": 0.6770642201834862, |
|
"acc_norm_stderr": 0.020048115923415332 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.46825396825396826, |
|
"acc_stderr": 0.04463112720677172, |
|
"acc_norm": 0.46825396825396826, |
|
"acc_norm_stderr": 0.04463112720677172 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.5718954248366013, |
|
"acc_stderr": 0.028332397483664274, |
|
"acc_norm": 0.5718954248366013, |
|
"acc_norm_stderr": 0.028332397483664274 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.62, |
|
"acc_stderr": 0.048783173121456316, |
|
"acc_norm": 0.62, |
|
"acc_norm_stderr": 0.048783173121456316 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6694214876033058, |
|
"acc_stderr": 0.04294340845212094, |
|
"acc_norm": 0.6694214876033058, |
|
"acc_norm_stderr": 0.04294340845212094 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.5986842105263158, |
|
"acc_stderr": 0.039889037033362836, |
|
"acc_norm": 0.5986842105263158, |
|
"acc_norm_stderr": 0.039889037033362836 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.49836601307189543, |
|
"acc_stderr": 0.020227726838150124, |
|
"acc_norm": 0.49836601307189543, |
|
"acc_norm_stderr": 0.020227726838150124 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.40425531914893614, |
|
"acc_stderr": 0.029275532159704725, |
|
"acc_norm": 0.40425531914893614, |
|
"acc_norm_stderr": 0.029275532159704725 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.39285714285714285, |
|
"acc_stderr": 0.04635550135609976, |
|
"acc_norm": 0.39285714285714285, |
|
"acc_norm_stderr": 0.04635550135609976 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.5462962962962963, |
|
"acc_stderr": 0.03395322726375797, |
|
"acc_norm": 0.5462962962962963, |
|
"acc_norm_stderr": 0.03395322726375797 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.3039106145251397, |
|
"acc_stderr": 0.015382845587584517, |
|
"acc_norm": 0.3039106145251397, |
|
"acc_norm_stderr": 0.015382845587584517 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.69, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.69, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.5257352941176471, |
|
"acc_stderr": 0.030332578094555033, |
|
"acc_norm": 0.5257352941176471, |
|
"acc_norm_stderr": 0.030332578094555033 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.5265306122448979, |
|
"acc_stderr": 0.03196412734523272, |
|
"acc_norm": 0.5265306122448979, |
|
"acc_norm_stderr": 0.03196412734523272 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.729957805907173, |
|
"acc_stderr": 0.028900721906293426, |
|
"acc_norm": 0.729957805907173, |
|
"acc_norm_stderr": 0.028900721906293426 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.4028683181225554, |
|
"acc_stderr": 0.012526955577118007, |
|
"acc_norm": 0.4028683181225554, |
|
"acc_norm_stderr": 0.012526955577118007 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.6862745098039216, |
|
"acc_stderr": 0.03256685484460388, |
|
"acc_norm": 0.6862745098039216, |
|
"acc_norm_stderr": 0.03256685484460388 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.7090909090909091, |
|
"acc_stderr": 0.03546563019624336, |
|
"acc_norm": 0.7090909090909091, |
|
"acc_norm_stderr": 0.03546563019624336 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.41982864137086906, |
|
"mc1_stderr": 0.017277030301775766, |
|
"mc2": 0.5876649986857919, |
|
"mc2_stderr": 0.016374992952232537 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.5348288075560803, |
|
"acc_stderr": 0.017148598015747422, |
|
"acc_norm": 0.5430932703659976, |
|
"acc_norm_stderr": 0.017126389093086777 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Edentns/DataVortexS-10.7B-dpo-v1.0", |
|
"model_sha": "d42328a3af59fe42ef67d45ddf256538fa5d5d0c", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |