results / AIFT /AIFT-ko-orca-plat-Yi-ko-6b-v1.2-dpo-2 /result_2024-01-25 07:59:02.json
open-ko-llm-bot's picture
Add results for 2024-01-25 07:59:02
7a96cfe verified
raw
history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.3438566552901024,
"acc_stderr": 0.01388064457015621,
"acc_norm": 0.38822525597269625,
"acc_norm_stderr": 0.014241614207414046
},
"harness|ko_hellaswag|10": {
"acc": 0.39494124676359293,
"acc_stderr": 0.004878390226591714,
"acc_norm": 0.5264887472615017,
"acc_norm_stderr": 0.004982774293927772
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.49707602339181284,
"acc_stderr": 0.03834759370936839,
"acc_norm": 0.49707602339181284,
"acc_norm_stderr": 0.03834759370936839
},
"harness|ko_mmlu_management|5": {
"acc": 0.6116504854368932,
"acc_stderr": 0.048257293373563895,
"acc_norm": 0.6116504854368932,
"acc_norm_stderr": 0.048257293373563895
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.5134099616858238,
"acc_stderr": 0.017873531736510385,
"acc_norm": 0.5134099616858238,
"acc_norm_stderr": 0.017873531736510385
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.4962962962962963,
"acc_stderr": 0.043192236258113303,
"acc_norm": 0.4962962962962963,
"acc_norm_stderr": 0.043192236258113303
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.32,
"acc_stderr": 0.04688261722621502,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621502
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.41702127659574467,
"acc_stderr": 0.03223276266711712,
"acc_norm": 0.41702127659574467,
"acc_norm_stderr": 0.03223276266711712
},
"harness|ko_mmlu_virology|5": {
"acc": 0.3674698795180723,
"acc_stderr": 0.03753267402120574,
"acc_norm": 0.3674698795180723,
"acc_norm_stderr": 0.03753267402120574
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.5048231511254019,
"acc_stderr": 0.028396770444111298,
"acc_norm": 0.5048231511254019,
"acc_norm_stderr": 0.028396770444111298
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.43946188340807174,
"acc_stderr": 0.03331092511038179,
"acc_norm": 0.43946188340807174,
"acc_norm_stderr": 0.03331092511038179
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.45038167938931295,
"acc_stderr": 0.04363643698524779,
"acc_norm": 0.45038167938931295,
"acc_norm_stderr": 0.04363643698524779
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.42,
"acc_stderr": 0.04960449637488583,
"acc_norm": 0.42,
"acc_norm_stderr": 0.04960449637488583
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.5656565656565656,
"acc_stderr": 0.03531505879359182,
"acc_norm": 0.5656565656565656,
"acc_norm_stderr": 0.03531505879359182
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.45517241379310347,
"acc_stderr": 0.04149886942192117,
"acc_norm": 0.45517241379310347,
"acc_norm_stderr": 0.04149886942192117
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.17647058823529413,
"acc_stderr": 0.03793281185307809,
"acc_norm": 0.17647058823529413,
"acc_norm_stderr": 0.03793281185307809
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.47478991596638653,
"acc_stderr": 0.0324371805513741,
"acc_norm": 0.47478991596638653,
"acc_norm_stderr": 0.0324371805513741
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.4205128205128205,
"acc_stderr": 0.025028610276710855,
"acc_norm": 0.4205128205128205,
"acc_norm_stderr": 0.025028610276710855
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.6,
"acc_stderr": 0.049236596391733084,
"acc_norm": 0.6,
"acc_norm_stderr": 0.049236596391733084
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252604,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.5,
"acc_stderr": 0.04833682445228318,
"acc_norm": 0.5,
"acc_norm_stderr": 0.04833682445228318
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.35467980295566504,
"acc_stderr": 0.03366124489051448,
"acc_norm": 0.35467980295566504,
"acc_norm_stderr": 0.03366124489051448
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.4161290322580645,
"acc_stderr": 0.02804098138076155,
"acc_norm": 0.4161290322580645,
"acc_norm_stderr": 0.02804098138076155
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6623931623931624,
"acc_stderr": 0.030980296992618554,
"acc_norm": 0.6623931623931624,
"acc_norm_stderr": 0.030980296992618554
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.44150943396226416,
"acc_stderr": 0.030561590426731837,
"acc_norm": 0.44150943396226416,
"acc_norm_stderr": 0.030561590426731837
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.4727272727272727,
"acc_stderr": 0.04782001791380063,
"acc_norm": 0.4727272727272727,
"acc_norm_stderr": 0.04782001791380063
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.2777777777777778,
"acc_stderr": 0.027309140588230186,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.027309140588230186
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.32450331125827814,
"acc_stderr": 0.038227469376587525,
"acc_norm": 0.32450331125827814,
"acc_norm_stderr": 0.038227469376587525
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.5373134328358209,
"acc_stderr": 0.03525675167467974,
"acc_norm": 0.5373134328358209,
"acc_norm_stderr": 0.03525675167467974
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.37572254335260113,
"acc_stderr": 0.03692820767264867,
"acc_norm": 0.37572254335260113,
"acc_norm_stderr": 0.03692820767264867
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.30423280423280424,
"acc_stderr": 0.023695415009463087,
"acc_norm": 0.30423280423280424,
"acc_norm_stderr": 0.023695415009463087
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3819444444444444,
"acc_stderr": 0.040629907841466674,
"acc_norm": 0.3819444444444444,
"acc_norm_stderr": 0.040629907841466674
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.27,
"acc_stderr": 0.0446196043338474,
"acc_norm": 0.27,
"acc_norm_stderr": 0.0446196043338474
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.53,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.53,
"acc_norm_stderr": 0.050161355804659205
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.4797687861271676,
"acc_stderr": 0.026897049996382875,
"acc_norm": 0.4797687861271676,
"acc_norm_stderr": 0.026897049996382875
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.4110429447852761,
"acc_stderr": 0.038656978537853624,
"acc_norm": 0.4110429447852761,
"acc_norm_stderr": 0.038656978537853624
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.49382716049382713,
"acc_stderr": 0.027818623962583302,
"acc_norm": 0.49382716049382713,
"acc_norm_stderr": 0.027818623962583302
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.5077720207253886,
"acc_stderr": 0.03608003225569654,
"acc_norm": 0.5077720207253886,
"acc_norm_stderr": 0.03608003225569654
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.3508771929824561,
"acc_stderr": 0.044895393502706986,
"acc_norm": 0.3508771929824561,
"acc_norm_stderr": 0.044895393502706986
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.5486238532110091,
"acc_stderr": 0.0213357147112688,
"acc_norm": 0.5486238532110091,
"acc_norm_stderr": 0.0213357147112688
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.2698412698412698,
"acc_stderr": 0.03970158273235173,
"acc_norm": 0.2698412698412698,
"acc_norm_stderr": 0.03970158273235173
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.4411764705882353,
"acc_stderr": 0.028431095444176647,
"acc_norm": 0.4411764705882353,
"acc_norm_stderr": 0.028431095444176647
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956912
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6198347107438017,
"acc_stderr": 0.04431324501968431,
"acc_norm": 0.6198347107438017,
"acc_norm_stderr": 0.04431324501968431
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.40131578947368424,
"acc_stderr": 0.039889037033362836,
"acc_norm": 0.40131578947368424,
"acc_norm_stderr": 0.039889037033362836
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.4035947712418301,
"acc_stderr": 0.01984828016840116,
"acc_norm": 0.4035947712418301,
"acc_norm_stderr": 0.01984828016840116
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.2907801418439716,
"acc_stderr": 0.027090664368353178,
"acc_norm": 0.2907801418439716,
"acc_norm_stderr": 0.027090664368353178
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.2767857142857143,
"acc_stderr": 0.04246624336697623,
"acc_norm": 0.2767857142857143,
"acc_norm_stderr": 0.04246624336697623
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.30092592592592593,
"acc_stderr": 0.03128039084329882,
"acc_norm": 0.30092592592592593,
"acc_norm_stderr": 0.03128039084329882
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2759776536312849,
"acc_stderr": 0.014950103002475342,
"acc_norm": 0.2759776536312849,
"acc_norm_stderr": 0.014950103002475342
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956912
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.3088235294117647,
"acc_stderr": 0.028064998167040094,
"acc_norm": 0.3088235294117647,
"acc_norm_stderr": 0.028064998167040094
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.363265306122449,
"acc_stderr": 0.030789051139030806,
"acc_norm": 0.363265306122449,
"acc_norm_stderr": 0.030789051139030806
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.5569620253164557,
"acc_stderr": 0.032335327775334835,
"acc_norm": 0.5569620253164557,
"acc_norm_stderr": 0.032335327775334835
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.32333767926988266,
"acc_stderr": 0.011946565758447198,
"acc_norm": 0.32333767926988266,
"acc_norm_stderr": 0.011946565758447198
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.4852941176470588,
"acc_stderr": 0.03507793834791324,
"acc_norm": 0.4852941176470588,
"acc_norm_stderr": 0.03507793834791324
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.5575757575757576,
"acc_stderr": 0.03878372113711275,
"acc_norm": 0.5575757575757576,
"acc_norm_stderr": 0.03878372113711275
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.3011015911872705,
"mc1_stderr": 0.016058999026100626,
"mc2": 0.44231342468769663,
"mc2_stderr": 0.015175444883335621
},
"harness|ko_commongen_v2|2": {
"acc": 0.5938606847697757,
"acc_stderr": 0.016884749503191396,
"acc_norm": 0.6304604486422668,
"acc_norm_stderr": 0.016594883405685438
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.2-dpo-2",
"model_sha": "59b2795fd27a846c4f26c6e71fb5fb72574d0ff9",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}