results / DopeorNope /COKAL-DPO_test-v2-13b /result_2024-01-10 23:53:09.json
open-ko-llm-bot's picture
Add results for 2024-01-10 23:53:09
d62c4f3 verified
raw
history blame
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.507679180887372,
"acc_stderr": 0.01460966744089257,
"acc_norm": 0.5563139931740614,
"acc_norm_stderr": 0.014518421825670435
},
"harness|ko_hellaswag|10": {
"acc": 0.5033857797251543,
"acc_stderr": 0.004989667009372646,
"acc_norm": 0.6352320254929297,
"acc_norm_stderr": 0.004803812631994954
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.52046783625731,
"acc_stderr": 0.0383161053282193,
"acc_norm": 0.52046783625731,
"acc_norm_stderr": 0.0383161053282193
},
"harness|ko_mmlu_management|5": {
"acc": 0.49514563106796117,
"acc_stderr": 0.049505043821289195,
"acc_norm": 0.49514563106796117,
"acc_norm_stderr": 0.049505043821289195
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.565772669220945,
"acc_stderr": 0.01772458938967779,
"acc_norm": 0.565772669220945,
"acc_norm_stderr": 0.01772458938967779
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.4740740740740741,
"acc_stderr": 0.04313531696750573,
"acc_norm": 0.4740740740740741,
"acc_norm_stderr": 0.04313531696750573
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.24,
"acc_stderr": 0.04292346959909284,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909284
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.39574468085106385,
"acc_stderr": 0.031967586978353627,
"acc_norm": 0.39574468085106385,
"acc_norm_stderr": 0.031967586978353627
},
"harness|ko_mmlu_virology|5": {
"acc": 0.39759036144578314,
"acc_stderr": 0.038099730845402184,
"acc_norm": 0.39759036144578314,
"acc_norm_stderr": 0.038099730845402184
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.5016077170418006,
"acc_stderr": 0.02839794490780661,
"acc_norm": 0.5016077170418006,
"acc_norm_stderr": 0.02839794490780661
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.5112107623318386,
"acc_stderr": 0.033549366530984746,
"acc_norm": 0.5112107623318386,
"acc_norm_stderr": 0.033549366530984746
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.46564885496183206,
"acc_stderr": 0.043749285605997376,
"acc_norm": 0.46564885496183206,
"acc_norm_stderr": 0.043749285605997376
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.38,
"acc_stderr": 0.048783173121456316,
"acc_norm": 0.38,
"acc_norm_stderr": 0.048783173121456316
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.5909090909090909,
"acc_stderr": 0.03502975799413008,
"acc_norm": 0.5909090909090909,
"acc_norm_stderr": 0.03502975799413008
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.42758620689655175,
"acc_stderr": 0.041227371113703316,
"acc_norm": 0.42758620689655175,
"acc_norm_stderr": 0.041227371113703316
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.23529411764705882,
"acc_stderr": 0.04220773659171453,
"acc_norm": 0.23529411764705882,
"acc_norm_stderr": 0.04220773659171453
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.5,
"acc_stderr": 0.032478490123081544,
"acc_norm": 0.5,
"acc_norm_stderr": 0.032478490123081544
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.4461538461538462,
"acc_stderr": 0.025203571773028337,
"acc_norm": 0.4461538461538462,
"acc_norm_stderr": 0.025203571773028337
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.53,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.53,
"acc_norm_stderr": 0.050161355804659205
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.39,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.39,
"acc_norm_stderr": 0.04902071300001975
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.4537037037037037,
"acc_stderr": 0.04812917324536823,
"acc_norm": 0.4537037037037037,
"acc_norm_stderr": 0.04812917324536823
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.35960591133004927,
"acc_stderr": 0.03376458246509567,
"acc_norm": 0.35960591133004927,
"acc_norm_stderr": 0.03376458246509567
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.44193548387096776,
"acc_stderr": 0.02825155790684973,
"acc_norm": 0.44193548387096776,
"acc_norm_stderr": 0.02825155790684973
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6538461538461539,
"acc_stderr": 0.031166957367235903,
"acc_norm": 0.6538461538461539,
"acc_norm_stderr": 0.031166957367235903
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.42641509433962266,
"acc_stderr": 0.03043779434298305,
"acc_norm": 0.42641509433962266,
"acc_norm_stderr": 0.03043779434298305
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5363636363636364,
"acc_stderr": 0.04776449162396197,
"acc_norm": 0.5363636363636364,
"acc_norm_stderr": 0.04776449162396197
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.26296296296296295,
"acc_stderr": 0.02684205787383371,
"acc_norm": 0.26296296296296295,
"acc_norm_stderr": 0.02684205787383371
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2980132450331126,
"acc_stderr": 0.037345356767871984,
"acc_norm": 0.2980132450331126,
"acc_norm_stderr": 0.037345356767871984
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.5671641791044776,
"acc_stderr": 0.03503490923673282,
"acc_norm": 0.5671641791044776,
"acc_norm_stderr": 0.03503490923673282
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.4161849710982659,
"acc_stderr": 0.03758517775404947,
"acc_norm": 0.4161849710982659,
"acc_norm_stderr": 0.03758517775404947
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.30158730158730157,
"acc_stderr": 0.023636975996101813,
"acc_norm": 0.30158730158730157,
"acc_norm_stderr": 0.023636975996101813
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.4375,
"acc_stderr": 0.04148415739394154,
"acc_norm": 0.4375,
"acc_norm_stderr": 0.04148415739394154
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.62,
"acc_stderr": 0.04878317312145633,
"acc_norm": 0.62,
"acc_norm_stderr": 0.04878317312145633
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.48554913294797686,
"acc_stderr": 0.02690784985628254,
"acc_norm": 0.48554913294797686,
"acc_norm_stderr": 0.02690784985628254
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.5276073619631901,
"acc_stderr": 0.03922378290610991,
"acc_norm": 0.5276073619631901,
"acc_norm_stderr": 0.03922378290610991
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.4876543209876543,
"acc_stderr": 0.027812262269327235,
"acc_norm": 0.4876543209876543,
"acc_norm_stderr": 0.027812262269327235
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.5906735751295337,
"acc_stderr": 0.03548608168860806,
"acc_norm": 0.5906735751295337,
"acc_norm_stderr": 0.03548608168860806
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2543859649122807,
"acc_stderr": 0.0409698513984367,
"acc_norm": 0.2543859649122807,
"acc_norm_stderr": 0.0409698513984367
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.6128440366972477,
"acc_stderr": 0.02088423199264345,
"acc_norm": 0.6128440366972477,
"acc_norm_stderr": 0.02088423199264345
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.31746031746031744,
"acc_stderr": 0.04163453031302859,
"acc_norm": 0.31746031746031744,
"acc_norm_stderr": 0.04163453031302859
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.4117647058823529,
"acc_stderr": 0.028180596328259287,
"acc_norm": 0.4117647058823529,
"acc_norm_stderr": 0.028180596328259287
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.42,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.42,
"acc_norm_stderr": 0.049604496374885836
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6363636363636364,
"acc_stderr": 0.043913262867240704,
"acc_norm": 0.6363636363636364,
"acc_norm_stderr": 0.043913262867240704
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.34210526315789475,
"acc_stderr": 0.03860731599316092,
"acc_norm": 0.34210526315789475,
"acc_norm_stderr": 0.03860731599316092
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.40032679738562094,
"acc_stderr": 0.019821843688271775,
"acc_norm": 0.40032679738562094,
"acc_norm_stderr": 0.019821843688271775
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.32978723404255317,
"acc_stderr": 0.028045946942042398,
"acc_norm": 0.32978723404255317,
"acc_norm_stderr": 0.028045946942042398
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.2767857142857143,
"acc_stderr": 0.04246624336697624,
"acc_norm": 0.2767857142857143,
"acc_norm_stderr": 0.04246624336697624
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.33796296296296297,
"acc_stderr": 0.03225941352631295,
"acc_norm": 0.33796296296296297,
"acc_norm_stderr": 0.03225941352631295
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.25251396648044694,
"acc_stderr": 0.014530330201468633,
"acc_norm": 0.25251396648044694,
"acc_norm_stderr": 0.014530330201468633
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.4,
"acc_stderr": 0.04923659639173309,
"acc_norm": 0.4,
"acc_norm_stderr": 0.04923659639173309
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.4007352941176471,
"acc_stderr": 0.029768263528933102,
"acc_norm": 0.4007352941176471,
"acc_norm_stderr": 0.029768263528933102
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.4163265306122449,
"acc_stderr": 0.03155782816556164,
"acc_norm": 0.4163265306122449,
"acc_norm_stderr": 0.03155782816556164
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.6244725738396625,
"acc_stderr": 0.03152256243091156,
"acc_norm": 0.6244725738396625,
"acc_norm_stderr": 0.03152256243091156
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.36114732724902215,
"acc_stderr": 0.01226793547751903,
"acc_norm": 0.36114732724902215,
"acc_norm_stderr": 0.01226793547751903
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.5294117647058824,
"acc_stderr": 0.03503235296367993,
"acc_norm": 0.5294117647058824,
"acc_norm_stderr": 0.03503235296367993
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.5818181818181818,
"acc_stderr": 0.03851716319398393,
"acc_norm": 0.5818181818181818,
"acc_norm_stderr": 0.03851716319398393
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.3684210526315789,
"mc1_stderr": 0.016886551261046046,
"mc2": 0.515036547042135,
"mc2_stderr": 0.01642149706217717
},
"harness|ko_commongen_v2|2": {
"acc": 0.4734356552538371,
"acc_stderr": 0.017166075717577747,
"acc_norm": 0.4923258559622196,
"acc_norm_stderr": 0.017188329219654276
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "DopeorNope/COKAL-DPO_test-v2-13b",
"model_sha": "f0547cda863e13012fcbcf468e8e8381ee5d2ecb",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}