results / HY-KDPARK /llama-2-koen-13b-dpo-v0.4 /result_2023-12-16 09:24:56.json
open-ko-llm-bot's picture
Add results for 2023-12-16 09:24:56
57f2cd7
raw history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.3703071672354949,
"acc_stderr": 0.01411129875167495,
"acc_norm": 0.4308873720136519,
"acc_norm_stderr": 0.01447113339264247
},
"harness|ko_hellaswag|10": {
"acc": 0.41455885281816374,
"acc_stderr": 0.004916388962142332,
"acc_norm": 0.5623381796454889,
"acc_norm_stderr": 0.004950848456984546
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.4853801169590643,
"acc_stderr": 0.038331852752130205,
"acc_norm": 0.4853801169590643,
"acc_norm_stderr": 0.038331852752130205
},
"harness|ko_mmlu_management|5": {
"acc": 0.3786407766990291,
"acc_stderr": 0.04802694698258975,
"acc_norm": 0.3786407766990291,
"acc_norm_stderr": 0.04802694698258975
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.4878671775223499,
"acc_stderr": 0.017874698667491345,
"acc_norm": 0.4878671775223499,
"acc_norm_stderr": 0.017874698667491345
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.42962962962962964,
"acc_stderr": 0.04276349494376599,
"acc_norm": 0.42962962962962964,
"acc_norm_stderr": 0.04276349494376599
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206824,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206824
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.3829787234042553,
"acc_stderr": 0.03177821250236922,
"acc_norm": 0.3829787234042553,
"acc_norm_stderr": 0.03177821250236922
},
"harness|ko_mmlu_virology|5": {
"acc": 0.42771084337349397,
"acc_stderr": 0.038515976837185335,
"acc_norm": 0.42771084337349397,
"acc_norm_stderr": 0.038515976837185335
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.4662379421221865,
"acc_stderr": 0.02833327710956278,
"acc_norm": 0.4662379421221865,
"acc_norm_stderr": 0.02833327710956278
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.49327354260089684,
"acc_stderr": 0.033554765962343545,
"acc_norm": 0.49327354260089684,
"acc_norm_stderr": 0.033554765962343545
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.40458015267175573,
"acc_stderr": 0.043046937953806645,
"acc_norm": 0.40458015267175573,
"acc_norm_stderr": 0.043046937953806645
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.4898989898989899,
"acc_stderr": 0.035616254886737454,
"acc_norm": 0.4898989898989899,
"acc_norm_stderr": 0.035616254886737454
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.3724137931034483,
"acc_stderr": 0.0402873153294756,
"acc_norm": 0.3724137931034483,
"acc_norm_stderr": 0.0402873153294756
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.22549019607843138,
"acc_stderr": 0.041583075330832865,
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.041583075330832865
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.3865546218487395,
"acc_stderr": 0.03163145807552379,
"acc_norm": 0.3865546218487395,
"acc_norm_stderr": 0.03163145807552379
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.3974358974358974,
"acc_stderr": 0.024811920017903836,
"acc_norm": 0.3974358974358974,
"acc_norm_stderr": 0.024811920017903836
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.48,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.48,
"acc_norm_stderr": 0.050211673156867795
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.49074074074074076,
"acc_stderr": 0.04832853553437055,
"acc_norm": 0.49074074074074076,
"acc_norm_stderr": 0.04832853553437055
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.3793103448275862,
"acc_stderr": 0.034139638059062345,
"acc_norm": 0.3793103448275862,
"acc_norm_stderr": 0.034139638059062345
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.4290322580645161,
"acc_stderr": 0.028156036538233217,
"acc_norm": 0.4290322580645161,
"acc_norm_stderr": 0.028156036538233217
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6153846153846154,
"acc_stderr": 0.03187195347942466,
"acc_norm": 0.6153846153846154,
"acc_norm_stderr": 0.03187195347942466
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.4226415094339623,
"acc_stderr": 0.03040233144576954,
"acc_norm": 0.4226415094339623,
"acc_norm_stderr": 0.03040233144576954
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.43636363636363634,
"acc_stderr": 0.04750185058907297,
"acc_norm": 0.43636363636363634,
"acc_norm_stderr": 0.04750185058907297
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.25555555555555554,
"acc_stderr": 0.026593939101844058,
"acc_norm": 0.25555555555555554,
"acc_norm_stderr": 0.026593939101844058
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.271523178807947,
"acc_stderr": 0.03631329803969653,
"acc_norm": 0.271523178807947,
"acc_norm_stderr": 0.03631329803969653
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.4975124378109453,
"acc_stderr": 0.03535490150137289,
"acc_norm": 0.4975124378109453,
"acc_norm_stderr": 0.03535490150137289
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.3468208092485549,
"acc_stderr": 0.036291466701596636,
"acc_norm": 0.3468208092485549,
"acc_norm_stderr": 0.036291466701596636
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.2804232804232804,
"acc_stderr": 0.02313528797432563,
"acc_norm": 0.2804232804232804,
"acc_norm_stderr": 0.02313528797432563
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3819444444444444,
"acc_stderr": 0.040629907841466674,
"acc_norm": 0.3819444444444444,
"acc_norm_stderr": 0.040629907841466674
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252606,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252606
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.59,
"acc_stderr": 0.04943110704237101,
"acc_norm": 0.59,
"acc_norm_stderr": 0.04943110704237101
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.44508670520231214,
"acc_stderr": 0.02675625512966377,
"acc_norm": 0.44508670520231214,
"acc_norm_stderr": 0.02675625512966377
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.39263803680981596,
"acc_stderr": 0.03836740907831027,
"acc_norm": 0.39263803680981596,
"acc_norm_stderr": 0.03836740907831027
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.44135802469135804,
"acc_stderr": 0.02762873715566877,
"acc_norm": 0.44135802469135804,
"acc_norm_stderr": 0.02762873715566877
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.46113989637305697,
"acc_stderr": 0.035975244117345775,
"acc_norm": 0.46113989637305697,
"acc_norm_stderr": 0.035975244117345775
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.22807017543859648,
"acc_stderr": 0.03947152782669415,
"acc_norm": 0.22807017543859648,
"acc_norm_stderr": 0.03947152782669415
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.48440366972477067,
"acc_stderr": 0.02142689153920805,
"acc_norm": 0.48440366972477067,
"acc_norm_stderr": 0.02142689153920805
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.2619047619047619,
"acc_stderr": 0.039325376803928704,
"acc_norm": 0.2619047619047619,
"acc_norm_stderr": 0.039325376803928704
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.3954248366013072,
"acc_stderr": 0.027996723180631455,
"acc_norm": 0.3954248366013072,
"acc_norm_stderr": 0.027996723180631455
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.38,
"acc_stderr": 0.04878317312145633,
"acc_norm": 0.38,
"acc_norm_stderr": 0.04878317312145633
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.5537190082644629,
"acc_stderr": 0.0453793517794788,
"acc_norm": 0.5537190082644629,
"acc_norm_stderr": 0.0453793517794788
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.375,
"acc_stderr": 0.039397364351956274,
"acc_norm": 0.375,
"acc_norm_stderr": 0.039397364351956274
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.3464052287581699,
"acc_stderr": 0.019249785691717217,
"acc_norm": 0.3464052287581699,
"acc_norm_stderr": 0.019249785691717217
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.2765957446808511,
"acc_stderr": 0.026684564340460997,
"acc_norm": 0.2765957446808511,
"acc_norm_stderr": 0.026684564340460997
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.25892857142857145,
"acc_stderr": 0.04157751539865629,
"acc_norm": 0.25892857142857145,
"acc_norm_stderr": 0.04157751539865629
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.30092592592592593,
"acc_stderr": 0.031280390843298825,
"acc_norm": 0.30092592592592593,
"acc_norm_stderr": 0.031280390843298825
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2424581005586592,
"acc_stderr": 0.01433352205921789,
"acc_norm": 0.2424581005586592,
"acc_norm_stderr": 0.01433352205921789
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768079,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768079
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.4,
"acc_stderr": 0.049236596391733084,
"acc_norm": 0.4,
"acc_norm_stderr": 0.049236596391733084
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.3639705882352941,
"acc_stderr": 0.029227192460032025,
"acc_norm": 0.3639705882352941,
"acc_norm_stderr": 0.029227192460032025
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.45714285714285713,
"acc_stderr": 0.03189141832421397,
"acc_norm": 0.45714285714285713,
"acc_norm_stderr": 0.03189141832421397
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.5316455696202531,
"acc_stderr": 0.032481974005110756,
"acc_norm": 0.5316455696202531,
"acc_norm_stderr": 0.032481974005110756
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.3011734028683181,
"acc_stderr": 0.011717148751648431,
"acc_norm": 0.3011734028683181,
"acc_norm_stderr": 0.011717148751648431
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.39705882352941174,
"acc_stderr": 0.03434131164719128,
"acc_norm": 0.39705882352941174,
"acc_norm_stderr": 0.03434131164719128
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.4666666666666667,
"acc_stderr": 0.03895658065271846,
"acc_norm": 0.4666666666666667,
"acc_norm_stderr": 0.03895658065271846
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.27906976744186046,
"mc1_stderr": 0.015702107090627884,
"mc2": 0.43136545246089486,
"mc2_stderr": 0.014881985381415318
},
"harness|ko_commongen_v2|2": {
"acc": 0.4935064935064935,
"acc_stderr": 0.01718890435907731,
"acc_norm": 0.5678866587957497,
"acc_norm_stderr": 0.017031170198851742
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "HY-KDPARK/llama-2-koen-13b-dpo-v0.4",
"model_sha": "a3cd8b7790f43c87f36f7e7289a1a210102dd26f",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}