|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.48378839590443684, |
|
"acc_stderr": 0.014603708567414948, |
|
"acc_norm": 0.5298634812286689, |
|
"acc_norm_stderr": 0.014585305840007102 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.47161919936267677, |
|
"acc_stderr": 0.004981736689518744, |
|
"acc_norm": 0.6479784903405696, |
|
"acc_norm_stderr": 0.00476624553960663 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.6374269005847953, |
|
"acc_stderr": 0.0368713061556206, |
|
"acc_norm": 0.6374269005847953, |
|
"acc_norm_stderr": 0.0368713061556206 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.6116504854368932, |
|
"acc_stderr": 0.0482572933735639, |
|
"acc_norm": 0.6116504854368932, |
|
"acc_norm_stderr": 0.0482572933735639 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.6641123882503193, |
|
"acc_stderr": 0.016889407235171683, |
|
"acc_norm": 0.6641123882503193, |
|
"acc_norm_stderr": 0.016889407235171683 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4666666666666667, |
|
"acc_stderr": 0.043097329010363554, |
|
"acc_norm": 0.4666666666666667, |
|
"acc_norm_stderr": 0.043097329010363554 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695236, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695236 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.502127659574468, |
|
"acc_stderr": 0.03268572658667493, |
|
"acc_norm": 0.502127659574468, |
|
"acc_norm_stderr": 0.03268572658667493 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.4819277108433735, |
|
"acc_stderr": 0.03889951252827216, |
|
"acc_norm": 0.4819277108433735, |
|
"acc_norm_stderr": 0.03889951252827216 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.6495176848874598, |
|
"acc_stderr": 0.027098652621301747, |
|
"acc_norm": 0.6495176848874598, |
|
"acc_norm_stderr": 0.027098652621301747 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.6412556053811659, |
|
"acc_stderr": 0.032190792004199956, |
|
"acc_norm": 0.6412556053811659, |
|
"acc_norm_stderr": 0.032190792004199956 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.6793893129770993, |
|
"acc_stderr": 0.040933292298342784, |
|
"acc_norm": 0.6793893129770993, |
|
"acc_norm_stderr": 0.040933292298342784 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.47, |
|
"acc_stderr": 0.05016135580465919, |
|
"acc_norm": 0.47, |
|
"acc_norm_stderr": 0.05016135580465919 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.7323232323232324, |
|
"acc_stderr": 0.03154449888270286, |
|
"acc_norm": 0.7323232323232324, |
|
"acc_norm_stderr": 0.03154449888270286 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.5379310344827586, |
|
"acc_stderr": 0.04154659671707548, |
|
"acc_norm": 0.5379310344827586, |
|
"acc_norm_stderr": 0.04154659671707548 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.2549019607843137, |
|
"acc_stderr": 0.043364327079931785, |
|
"acc_norm": 0.2549019607843137, |
|
"acc_norm_stderr": 0.043364327079931785 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.5882352941176471, |
|
"acc_stderr": 0.031968769891957786, |
|
"acc_norm": 0.5882352941176471, |
|
"acc_norm_stderr": 0.031968769891957786 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.5282051282051282, |
|
"acc_stderr": 0.025310639254933855, |
|
"acc_norm": 0.5282051282051282, |
|
"acc_norm_stderr": 0.025310639254933855 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.64, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.64, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.048523658709391, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.048523658709391 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.6481481481481481, |
|
"acc_stderr": 0.04616631111801713, |
|
"acc_norm": 0.6481481481481481, |
|
"acc_norm_stderr": 0.04616631111801713 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.4039408866995074, |
|
"acc_stderr": 0.0345245390382204, |
|
"acc_norm": 0.4039408866995074, |
|
"acc_norm_stderr": 0.0345245390382204 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.5709677419354838, |
|
"acc_stderr": 0.028156036538233193, |
|
"acc_norm": 0.5709677419354838, |
|
"acc_norm_stderr": 0.028156036538233193 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7905982905982906, |
|
"acc_stderr": 0.02665569965392276, |
|
"acc_norm": 0.7905982905982906, |
|
"acc_norm_stderr": 0.02665569965392276 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.5509433962264151, |
|
"acc_stderr": 0.030612730713641092, |
|
"acc_norm": 0.5509433962264151, |
|
"acc_norm_stderr": 0.030612730713641092 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.6454545454545455, |
|
"acc_stderr": 0.04582004841505418, |
|
"acc_norm": 0.6454545454545455, |
|
"acc_norm_stderr": 0.04582004841505418 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.337037037037037, |
|
"acc_stderr": 0.028820884666253255, |
|
"acc_norm": 0.337037037037037, |
|
"acc_norm_stderr": 0.028820884666253255 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.3509933774834437, |
|
"acc_stderr": 0.03896981964257375, |
|
"acc_norm": 0.3509933774834437, |
|
"acc_norm_stderr": 0.03896981964257375 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.736318407960199, |
|
"acc_stderr": 0.03115715086935557, |
|
"acc_norm": 0.736318407960199, |
|
"acc_norm_stderr": 0.03115715086935557 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.4913294797687861, |
|
"acc_stderr": 0.03811890988940412, |
|
"acc_norm": 0.4913294797687861, |
|
"acc_norm_stderr": 0.03811890988940412 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.38095238095238093, |
|
"acc_stderr": 0.025010749116137602, |
|
"acc_norm": 0.38095238095238093, |
|
"acc_norm_stderr": 0.025010749116137602 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.5555555555555556, |
|
"acc_stderr": 0.04155319955593146, |
|
"acc_norm": 0.5555555555555556, |
|
"acc_norm_stderr": 0.04155319955593146 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.04852365870939099, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.04852365870939099 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.78, |
|
"acc_stderr": 0.04163331998932262, |
|
"acc_norm": 0.78, |
|
"acc_norm_stderr": 0.04163331998932262 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.630057803468208, |
|
"acc_stderr": 0.02599247202930639, |
|
"acc_norm": 0.630057803468208, |
|
"acc_norm_stderr": 0.02599247202930639 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.5337423312883436, |
|
"acc_stderr": 0.039194155450484096, |
|
"acc_norm": 0.5337423312883436, |
|
"acc_norm_stderr": 0.039194155450484096 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.6203703703703703, |
|
"acc_stderr": 0.02700252103451647, |
|
"acc_norm": 0.6203703703703703, |
|
"acc_norm_stderr": 0.02700252103451647 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.694300518134715, |
|
"acc_stderr": 0.033248379397581594, |
|
"acc_norm": 0.694300518134715, |
|
"acc_norm_stderr": 0.033248379397581594 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.4473684210526316, |
|
"acc_stderr": 0.04677473004491199, |
|
"acc_norm": 0.4473684210526316, |
|
"acc_norm_stderr": 0.04677473004491199 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.671559633027523, |
|
"acc_stderr": 0.020135902797298395, |
|
"acc_norm": 0.671559633027523, |
|
"acc_norm_stderr": 0.020135902797298395 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.42857142857142855, |
|
"acc_stderr": 0.0442626668137991, |
|
"acc_norm": 0.42857142857142855, |
|
"acc_norm_stderr": 0.0442626668137991 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.6013071895424836, |
|
"acc_stderr": 0.028036092273891765, |
|
"acc_norm": 0.6013071895424836, |
|
"acc_norm_stderr": 0.028036092273891765 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.58, |
|
"acc_stderr": 0.04960449637488583, |
|
"acc_norm": 0.58, |
|
"acc_norm_stderr": 0.04960449637488583 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.71900826446281, |
|
"acc_stderr": 0.04103203830514511, |
|
"acc_norm": 0.71900826446281, |
|
"acc_norm_stderr": 0.04103203830514511 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.5986842105263158, |
|
"acc_stderr": 0.03988903703336284, |
|
"acc_norm": 0.5986842105263158, |
|
"acc_norm_stderr": 0.03988903703336284 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.5375816993464052, |
|
"acc_stderr": 0.02017061497496977, |
|
"acc_norm": 0.5375816993464052, |
|
"acc_norm_stderr": 0.02017061497496977 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.40425531914893614, |
|
"acc_stderr": 0.029275532159704732, |
|
"acc_norm": 0.40425531914893614, |
|
"acc_norm_stderr": 0.029275532159704732 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.38392857142857145, |
|
"acc_stderr": 0.046161430750285455, |
|
"acc_norm": 0.38392857142857145, |
|
"acc_norm_stderr": 0.046161430750285455 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.4675925925925926, |
|
"acc_stderr": 0.03402801581358966, |
|
"acc_norm": 0.4675925925925926, |
|
"acc_norm_stderr": 0.03402801581358966 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2435754189944134, |
|
"acc_stderr": 0.014355911964767864, |
|
"acc_norm": 0.2435754189944134, |
|
"acc_norm_stderr": 0.014355911964767864 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.72, |
|
"acc_stderr": 0.045126085985421296, |
|
"acc_norm": 0.72, |
|
"acc_norm_stderr": 0.045126085985421296 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.4632352941176471, |
|
"acc_stderr": 0.030290619180485687, |
|
"acc_norm": 0.4632352941176471, |
|
"acc_norm_stderr": 0.030290619180485687 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.7142857142857143, |
|
"acc_stderr": 0.028920583220675568, |
|
"acc_norm": 0.7142857142857143, |
|
"acc_norm_stderr": 0.028920583220675568 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.7552742616033755, |
|
"acc_stderr": 0.027985699387036423, |
|
"acc_norm": 0.7552742616033755, |
|
"acc_norm_stderr": 0.027985699387036423 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.3956975228161669, |
|
"acc_stderr": 0.012489290735449018, |
|
"acc_norm": 0.3956975228161669, |
|
"acc_norm_stderr": 0.012489290735449018 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.7352941176470589, |
|
"acc_stderr": 0.03096451792692341, |
|
"acc_norm": 0.7352941176470589, |
|
"acc_norm_stderr": 0.03096451792692341 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.7151515151515152, |
|
"acc_stderr": 0.0352439084451178, |
|
"acc_norm": 0.7151515151515152, |
|
"acc_norm_stderr": 0.0352439084451178 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.379436964504284, |
|
"mc1_stderr": 0.016987039266142995, |
|
"mc2": 0.5387447145181132, |
|
"mc2_stderr": 0.01572553347000412 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.6044864226682408, |
|
"acc_stderr": 0.016810815902206042, |
|
"acc_norm": 0.6174734356552538, |
|
"acc_norm_stderr": 0.01670916538722883 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Edentns/DataVortexS-10.7B-dpo-v1.3", |
|
"model_sha": "d09ba5c290b169397b8b4939fed230605c17d38b", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |