results / AIFT /AIFT-ko-orca-plat-Yi-ko-6b-v1.8 /result_2024-03-01 06:53:02.json
open-ko-llm-bot's picture
Add results for 2024-03-01 06:53:02
584d83d verified
raw
history blame
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.35665529010238906,
"acc_stderr": 0.013998056902620192,
"acc_norm": 0.40187713310580203,
"acc_norm_stderr": 0.014327268614578278
},
"harness|ko_hellaswag|10": {
"acc": 0.3957379008165704,
"acc_stderr": 0.004880092083408037,
"acc_norm": 0.5309699263095001,
"acc_norm_stderr": 0.004980200451851671
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.45614035087719296,
"acc_stderr": 0.03820042586602966,
"acc_norm": 0.45614035087719296,
"acc_norm_stderr": 0.03820042586602966
},
"harness|ko_mmlu_management|5": {
"acc": 0.5922330097087378,
"acc_stderr": 0.048657775704107696,
"acc_norm": 0.5922330097087378,
"acc_norm_stderr": 0.048657775704107696
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.5555555555555556,
"acc_stderr": 0.01776925058353325,
"acc_norm": 0.5555555555555556,
"acc_norm_stderr": 0.01776925058353325
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.4888888888888889,
"acc_stderr": 0.04318275491977978,
"acc_norm": 0.4888888888888889,
"acc_norm_stderr": 0.04318275491977978
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206824,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206824
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.42127659574468085,
"acc_stderr": 0.03227834510146268,
"acc_norm": 0.42127659574468085,
"acc_norm_stderr": 0.03227834510146268
},
"harness|ko_mmlu_virology|5": {
"acc": 0.39759036144578314,
"acc_stderr": 0.03809973084540219,
"acc_norm": 0.39759036144578314,
"acc_norm_stderr": 0.03809973084540219
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.5466237942122186,
"acc_stderr": 0.028274359854894245,
"acc_norm": 0.5466237942122186,
"acc_norm_stderr": 0.028274359854894245
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.4484304932735426,
"acc_stderr": 0.033378837362550984,
"acc_norm": 0.4484304932735426,
"acc_norm_stderr": 0.033378837362550984
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.5190839694656488,
"acc_stderr": 0.043820947055509867,
"acc_norm": 0.5190839694656488,
"acc_norm_stderr": 0.043820947055509867
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.41,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.5909090909090909,
"acc_stderr": 0.03502975799413007,
"acc_norm": 0.5909090909090909,
"acc_norm_stderr": 0.03502975799413007
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.45517241379310347,
"acc_stderr": 0.04149886942192117,
"acc_norm": 0.45517241379310347,
"acc_norm_stderr": 0.04149886942192117
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.22549019607843138,
"acc_stderr": 0.04158307533083286,
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.04158307533083286
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.44537815126050423,
"acc_stderr": 0.032284106267163895,
"acc_norm": 0.44537815126050423,
"acc_norm_stderr": 0.032284106267163895
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.4205128205128205,
"acc_stderr": 0.025028610276710855,
"acc_norm": 0.4205128205128205,
"acc_norm_stderr": 0.025028610276710855
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.64,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.64,
"acc_norm_stderr": 0.04824181513244218
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.5185185185185185,
"acc_stderr": 0.04830366024635331,
"acc_norm": 0.5185185185185185,
"acc_norm_stderr": 0.04830366024635331
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.37438423645320196,
"acc_stderr": 0.03405155380561952,
"acc_norm": 0.37438423645320196,
"acc_norm_stderr": 0.03405155380561952
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.4290322580645161,
"acc_stderr": 0.02815603653823321,
"acc_norm": 0.4290322580645161,
"acc_norm_stderr": 0.02815603653823321
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.688034188034188,
"acc_stderr": 0.030351527323344958,
"acc_norm": 0.688034188034188,
"acc_norm_stderr": 0.030351527323344958
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.4716981132075472,
"acc_stderr": 0.030723535249006107,
"acc_norm": 0.4716981132075472,
"acc_norm_stderr": 0.030723535249006107
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.4909090909090909,
"acc_stderr": 0.04788339768702861,
"acc_norm": 0.4909090909090909,
"acc_norm_stderr": 0.04788339768702861
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.2851851851851852,
"acc_stderr": 0.027528599210340492,
"acc_norm": 0.2851851851851852,
"acc_norm_stderr": 0.027528599210340492
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2847682119205298,
"acc_stderr": 0.03684881521389023,
"acc_norm": 0.2847682119205298,
"acc_norm_stderr": 0.03684881521389023
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.5671641791044776,
"acc_stderr": 0.03503490923673282,
"acc_norm": 0.5671641791044776,
"acc_norm_stderr": 0.03503490923673282
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.3872832369942196,
"acc_stderr": 0.03714325906302064,
"acc_norm": 0.3872832369942196,
"acc_norm_stderr": 0.03714325906302064
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.335978835978836,
"acc_stderr": 0.024326310529149138,
"acc_norm": 0.335978835978836,
"acc_norm_stderr": 0.024326310529149138
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3888888888888889,
"acc_stderr": 0.04076663253918567,
"acc_norm": 0.3888888888888889,
"acc_norm_stderr": 0.04076663253918567
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.26,
"acc_stderr": 0.0440844002276808,
"acc_norm": 0.26,
"acc_norm_stderr": 0.0440844002276808
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.56,
"acc_stderr": 0.049888765156985884,
"acc_norm": 0.56,
"acc_norm_stderr": 0.049888765156985884
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.5115606936416185,
"acc_stderr": 0.02691189868637792,
"acc_norm": 0.5115606936416185,
"acc_norm_stderr": 0.02691189868637792
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.50920245398773,
"acc_stderr": 0.03927705600787443,
"acc_norm": 0.50920245398773,
"acc_norm_stderr": 0.03927705600787443
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.4722222222222222,
"acc_stderr": 0.027777777777777797,
"acc_norm": 0.4722222222222222,
"acc_norm_stderr": 0.027777777777777797
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.5544041450777202,
"acc_stderr": 0.035870149860756595,
"acc_norm": 0.5544041450777202,
"acc_norm_stderr": 0.035870149860756595
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2807017543859649,
"acc_stderr": 0.042270544512322004,
"acc_norm": 0.2807017543859649,
"acc_norm_stderr": 0.042270544512322004
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.5724770642201835,
"acc_stderr": 0.02121091020430043,
"acc_norm": 0.5724770642201835,
"acc_norm_stderr": 0.02121091020430043
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.2698412698412698,
"acc_stderr": 0.03970158273235173,
"acc_norm": 0.2698412698412698,
"acc_norm_stderr": 0.03970158273235173
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.43790849673202614,
"acc_stderr": 0.02840830202033269,
"acc_norm": 0.43790849673202614,
"acc_norm_stderr": 0.02840830202033269
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956912
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6776859504132231,
"acc_stderr": 0.04266416363352167,
"acc_norm": 0.6776859504132231,
"acc_norm_stderr": 0.04266416363352167
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.4473684210526316,
"acc_stderr": 0.04046336883978251,
"acc_norm": 0.4473684210526316,
"acc_norm_stderr": 0.04046336883978251
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.42810457516339867,
"acc_stderr": 0.0200176292142131,
"acc_norm": 0.42810457516339867,
"acc_norm_stderr": 0.0200176292142131
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.30851063829787234,
"acc_stderr": 0.02755336616510137,
"acc_norm": 0.30851063829787234,
"acc_norm_stderr": 0.02755336616510137
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.33035714285714285,
"acc_stderr": 0.04464285714285712,
"acc_norm": 0.33035714285714285,
"acc_norm_stderr": 0.04464285714285712
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.2962962962962963,
"acc_stderr": 0.03114144782353604,
"acc_norm": 0.2962962962962963,
"acc_norm_stderr": 0.03114144782353604
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.24134078212290502,
"acc_stderr": 0.014310999547961441,
"acc_norm": 0.24134078212290502,
"acc_norm_stderr": 0.014310999547961441
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.36,
"acc_stderr": 0.048241815132442176,
"acc_norm": 0.36,
"acc_norm_stderr": 0.048241815132442176
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.54,
"acc_stderr": 0.05009082659620333,
"acc_norm": 0.54,
"acc_norm_stderr": 0.05009082659620333
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.3639705882352941,
"acc_stderr": 0.02922719246003203,
"acc_norm": 0.3639705882352941,
"acc_norm_stderr": 0.02922719246003203
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.3795918367346939,
"acc_stderr": 0.03106721126287248,
"acc_norm": 0.3795918367346939,
"acc_norm_stderr": 0.03106721126287248
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.6118143459915611,
"acc_stderr": 0.031722950043323296,
"acc_norm": 0.6118143459915611,
"acc_norm_stderr": 0.031722950043323296
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.3259452411994785,
"acc_stderr": 0.011971507294982779,
"acc_norm": 0.3259452411994785,
"acc_norm_stderr": 0.011971507294982779
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.5392156862745098,
"acc_stderr": 0.03498501649369527,
"acc_norm": 0.5392156862745098,
"acc_norm_stderr": 0.03498501649369527
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.5333333333333333,
"acc_stderr": 0.03895658065271846,
"acc_norm": 0.5333333333333333,
"acc_norm_stderr": 0.03895658065271846
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2864137086903305,
"mc1_stderr": 0.015826142439502342,
"mc2": 0.42702483854882867,
"mc2_stderr": 0.015004259698872787
},
"harness|ko_commongen_v2|2": {
"acc": 0.5360094451003542,
"acc_stderr": 0.017145715365486664,
"acc_norm": 0.602125147579693,
"acc_norm_stderr": 0.01682795905473339
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.8",
"model_sha": "061f243c89e813b852608a835754731687ee3dac",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}