results / AIFT /AIFT-ko-orca-plat-Yi-ko-6b-v1.2-dpo-3 /result_2024-01-30 00:13:23.json
open-ko-llm-bot's picture
Add results for 2024-01-30 00:13:23
f2e5243 verified
raw
history blame
17.8 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.36006825938566556,
"acc_stderr": 0.01402751681458519,
"acc_norm": 0.4112627986348123,
"acc_norm_stderr": 0.01437944106852208
},
"harness|ko_hellaswag|10": {
"acc": 0.3995220075682135,
"acc_stderr": 0.0048879912259502875,
"acc_norm": 0.5293766182035451,
"acc_norm_stderr": 0.004981161746388227
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.49122807017543857,
"acc_stderr": 0.038342347441649924,
"acc_norm": 0.49122807017543857,
"acc_norm_stderr": 0.038342347441649924
},
"harness|ko_mmlu_management|5": {
"acc": 0.6116504854368932,
"acc_stderr": 0.04825729337356389,
"acc_norm": 0.6116504854368932,
"acc_norm_stderr": 0.04825729337356389
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.5134099616858238,
"acc_stderr": 0.017873531736510385,
"acc_norm": 0.5134099616858238,
"acc_norm_stderr": 0.017873531736510385
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.5037037037037037,
"acc_stderr": 0.04319223625811331,
"acc_norm": 0.5037037037037037,
"acc_norm_stderr": 0.04319223625811331
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.34,
"acc_stderr": 0.047609522856952365,
"acc_norm": 0.34,
"acc_norm_stderr": 0.047609522856952365
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.4,
"acc_stderr": 0.03202563076101736,
"acc_norm": 0.4,
"acc_norm_stderr": 0.03202563076101736
},
"harness|ko_mmlu_virology|5": {
"acc": 0.3674698795180723,
"acc_stderr": 0.03753267402120574,
"acc_norm": 0.3674698795180723,
"acc_norm_stderr": 0.03753267402120574
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.5112540192926045,
"acc_stderr": 0.028390897396863526,
"acc_norm": 0.5112540192926045,
"acc_norm_stderr": 0.028390897396863526
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.43946188340807174,
"acc_stderr": 0.03331092511038179,
"acc_norm": 0.43946188340807174,
"acc_norm_stderr": 0.03331092511038179
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.44274809160305345,
"acc_stderr": 0.0435644720266507,
"acc_norm": 0.44274809160305345,
"acc_norm_stderr": 0.0435644720266507
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.41,
"acc_stderr": 0.04943110704237103,
"acc_norm": 0.41,
"acc_norm_stderr": 0.04943110704237103
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.5757575757575758,
"acc_stderr": 0.03521224908841586,
"acc_norm": 0.5757575757575758,
"acc_norm_stderr": 0.03521224908841586
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.4827586206896552,
"acc_stderr": 0.04164188720169377,
"acc_norm": 0.4827586206896552,
"acc_norm_stderr": 0.04164188720169377
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.19607843137254902,
"acc_stderr": 0.03950581861179963,
"acc_norm": 0.19607843137254902,
"acc_norm_stderr": 0.03950581861179963
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.4789915966386555,
"acc_stderr": 0.03244980849990029,
"acc_norm": 0.4789915966386555,
"acc_norm_stderr": 0.03244980849990029
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.4205128205128205,
"acc_stderr": 0.025028610276710855,
"acc_norm": 0.4205128205128205,
"acc_norm_stderr": 0.025028610276710855
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.58,
"acc_stderr": 0.04960449637488583,
"acc_norm": 0.58,
"acc_norm_stderr": 0.04960449637488583
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.5185185185185185,
"acc_stderr": 0.04830366024635331,
"acc_norm": 0.5185185185185185,
"acc_norm_stderr": 0.04830366024635331
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.35960591133004927,
"acc_stderr": 0.03376458246509567,
"acc_norm": 0.35960591133004927,
"acc_norm_stderr": 0.03376458246509567
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.4290322580645161,
"acc_stderr": 0.02815603653823321,
"acc_norm": 0.4290322580645161,
"acc_norm_stderr": 0.02815603653823321
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6623931623931624,
"acc_stderr": 0.030980296992618558,
"acc_norm": 0.6623931623931624,
"acc_norm_stderr": 0.030980296992618558
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.43018867924528303,
"acc_stderr": 0.030471445867183235,
"acc_norm": 0.43018867924528303,
"acc_norm_stderr": 0.030471445867183235
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5,
"acc_stderr": 0.04789131426105757,
"acc_norm": 0.5,
"acc_norm_stderr": 0.04789131426105757
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.28888888888888886,
"acc_stderr": 0.02763490726417854,
"acc_norm": 0.28888888888888886,
"acc_norm_stderr": 0.02763490726417854
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.31788079470198677,
"acc_stderr": 0.038020397601079024,
"acc_norm": 0.31788079470198677,
"acc_norm_stderr": 0.038020397601079024
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.527363184079602,
"acc_stderr": 0.03530235517334682,
"acc_norm": 0.527363184079602,
"acc_norm_stderr": 0.03530235517334682
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.36416184971098264,
"acc_stderr": 0.03669072477416908,
"acc_norm": 0.36416184971098264,
"acc_norm_stderr": 0.03669072477416908
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.31746031746031744,
"acc_stderr": 0.023973861998992072,
"acc_norm": 0.31746031746031744,
"acc_norm_stderr": 0.023973861998992072
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3819444444444444,
"acc_stderr": 0.040629907841466674,
"acc_norm": 0.3819444444444444,
"acc_norm_stderr": 0.040629907841466674
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.22,
"acc_stderr": 0.041633319989322695,
"acc_norm": 0.22,
"acc_norm_stderr": 0.041633319989322695
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.55,
"acc_stderr": 0.05,
"acc_norm": 0.55,
"acc_norm_stderr": 0.05
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.4797687861271676,
"acc_stderr": 0.026897049996382875,
"acc_norm": 0.4797687861271676,
"acc_norm_stderr": 0.026897049996382875
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.4110429447852761,
"acc_stderr": 0.038656978537853624,
"acc_norm": 0.4110429447852761,
"acc_norm_stderr": 0.038656978537853624
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.48148148148148145,
"acc_stderr": 0.027801656212323667,
"acc_norm": 0.48148148148148145,
"acc_norm_stderr": 0.027801656212323667
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.49222797927461137,
"acc_stderr": 0.03608003225569654,
"acc_norm": 0.49222797927461137,
"acc_norm_stderr": 0.03608003225569654
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.35964912280701755,
"acc_stderr": 0.04514496132873632,
"acc_norm": 0.35964912280701755,
"acc_norm_stderr": 0.04514496132873632
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.5486238532110091,
"acc_stderr": 0.021335714711268796,
"acc_norm": 0.5486238532110091,
"acc_norm_stderr": 0.021335714711268796
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.2777777777777778,
"acc_stderr": 0.040061680838488795,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.040061680838488795
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.42810457516339867,
"acc_stderr": 0.028332397483664274,
"acc_norm": 0.42810457516339867,
"acc_norm_stderr": 0.028332397483664274
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.5,
"acc_stderr": 0.050251890762960605,
"acc_norm": 0.5,
"acc_norm_stderr": 0.050251890762960605
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6446280991735537,
"acc_stderr": 0.0436923632657398,
"acc_norm": 0.6446280991735537,
"acc_norm_stderr": 0.0436923632657398
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.4407894736842105,
"acc_stderr": 0.04040311062490436,
"acc_norm": 0.4407894736842105,
"acc_norm_stderr": 0.04040311062490436
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.39215686274509803,
"acc_stderr": 0.019751726508762637,
"acc_norm": 0.39215686274509803,
"acc_norm_stderr": 0.019751726508762637
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.2872340425531915,
"acc_stderr": 0.026992199173064352,
"acc_norm": 0.2872340425531915,
"acc_norm_stderr": 0.026992199173064352
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.26785714285714285,
"acc_stderr": 0.0420327729146776,
"acc_norm": 0.26785714285714285,
"acc_norm_stderr": 0.0420327729146776
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.2777777777777778,
"acc_stderr": 0.030546745264953195,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.030546745264953195
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.293854748603352,
"acc_stderr": 0.01523507577671961,
"acc_norm": 0.293854748603352,
"acc_norm_stderr": 0.01523507577671961
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956912
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.3235294117647059,
"acc_stderr": 0.028418208619406797,
"acc_norm": 0.3235294117647059,
"acc_norm_stderr": 0.028418208619406797
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.3510204081632653,
"acc_stderr": 0.030555316755573637,
"acc_norm": 0.3510204081632653,
"acc_norm_stderr": 0.030555316755573637
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.5527426160337553,
"acc_stderr": 0.03236564251614192,
"acc_norm": 0.5527426160337553,
"acc_norm_stderr": 0.03236564251614192
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.3155149934810952,
"acc_stderr": 0.011869184843058636,
"acc_norm": 0.3155149934810952,
"acc_norm_stderr": 0.011869184843058636
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.4950980392156863,
"acc_stderr": 0.03509143375606786,
"acc_norm": 0.4950980392156863,
"acc_norm_stderr": 0.03509143375606786
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.5757575757575758,
"acc_stderr": 0.038592681420702636,
"acc_norm": 0.5757575757575758,
"acc_norm_stderr": 0.038592681420702636
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.31946144430844553,
"mc1_stderr": 0.0163226441829605,
"mc2": 0.45470433345322675,
"mc2_stderr": 0.015415678576305275
},
"harness|ko_commongen_v2|2": {
"acc": 0.5926800472255017,
"acc_stderr": 0.01689245669519127,
"acc_norm": 0.6257378984651711,
"acc_norm_stderr": 0.016637917789798742
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.2-dpo-3",
"model_sha": "65ff17e2f574d64c727ff839dc37b04147752960",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}