results / Edentns /DataVortexS-10.7B-dpo-v1.4 /result_2024-01-26 00:59:33.json
open-ko-llm-bot's picture
Add results for 2024-01-26 00:59:33
9a8e618 verified
raw
history blame
No virus
17.8 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.4598976109215017,
"acc_stderr": 0.01456431885692485,
"acc_norm": 0.5204778156996587,
"acc_norm_stderr": 0.014599131353035004
},
"harness|ko_hellaswag|10": {
"acc": 0.4569806811392153,
"acc_stderr": 0.004971278309204199,
"acc_norm": 0.6292571200955985,
"acc_norm_stderr": 0.004820166002253078
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.6374269005847953,
"acc_stderr": 0.0368713061556206,
"acc_norm": 0.6374269005847953,
"acc_norm_stderr": 0.0368713061556206
},
"harness|ko_mmlu_management|5": {
"acc": 0.6213592233009708,
"acc_stderr": 0.04802694698258973,
"acc_norm": 0.6213592233009708,
"acc_norm_stderr": 0.04802694698258973
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.665389527458493,
"acc_stderr": 0.016873468641592157,
"acc_norm": 0.665389527458493,
"acc_norm_stderr": 0.016873468641592157
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.4888888888888889,
"acc_stderr": 0.04318275491977976,
"acc_norm": 0.4888888888888889,
"acc_norm_stderr": 0.04318275491977976
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206824,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206824
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.48936170212765956,
"acc_stderr": 0.03267862331014063,
"acc_norm": 0.48936170212765956,
"acc_norm_stderr": 0.03267862331014063
},
"harness|ko_mmlu_virology|5": {
"acc": 0.4578313253012048,
"acc_stderr": 0.03878626771002361,
"acc_norm": 0.4578313253012048,
"acc_norm_stderr": 0.03878626771002361
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.6109324758842444,
"acc_stderr": 0.027690337536485376,
"acc_norm": 0.6109324758842444,
"acc_norm_stderr": 0.027690337536485376
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.6367713004484304,
"acc_stderr": 0.032277904428505,
"acc_norm": 0.6367713004484304,
"acc_norm_stderr": 0.032277904428505
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.6564885496183206,
"acc_stderr": 0.041649760719448786,
"acc_norm": 0.6564885496183206,
"acc_norm_stderr": 0.041649760719448786
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956914,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956914
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.7323232323232324,
"acc_stderr": 0.03154449888270285,
"acc_norm": 0.7323232323232324,
"acc_norm_stderr": 0.03154449888270285
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.5103448275862069,
"acc_stderr": 0.04165774775728763,
"acc_norm": 0.5103448275862069,
"acc_norm_stderr": 0.04165774775728763
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.28431372549019607,
"acc_stderr": 0.04488482852329017,
"acc_norm": 0.28431372549019607,
"acc_norm_stderr": 0.04488482852329017
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.5672268907563025,
"acc_stderr": 0.032183581077426124,
"acc_norm": 0.5672268907563025,
"acc_norm_stderr": 0.032183581077426124
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.5,
"acc_stderr": 0.02535100632816969,
"acc_norm": 0.5,
"acc_norm_stderr": 0.02535100632816969
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.64,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.64,
"acc_norm_stderr": 0.04824181513244218
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252604,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.6296296296296297,
"acc_stderr": 0.04668408033024931,
"acc_norm": 0.6296296296296297,
"acc_norm_stderr": 0.04668408033024931
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.4039408866995074,
"acc_stderr": 0.0345245390382204,
"acc_norm": 0.4039408866995074,
"acc_norm_stderr": 0.0345245390382204
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.5548387096774193,
"acc_stderr": 0.028272410186214906,
"acc_norm": 0.5548387096774193,
"acc_norm_stderr": 0.028272410186214906
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.8034188034188035,
"acc_stderr": 0.026035386098951292,
"acc_norm": 0.8034188034188035,
"acc_norm_stderr": 0.026035386098951292
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.5433962264150943,
"acc_stderr": 0.030656748696739435,
"acc_norm": 0.5433962264150943,
"acc_norm_stderr": 0.030656748696739435
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5818181818181818,
"acc_stderr": 0.04724577405731572,
"acc_norm": 0.5818181818181818,
"acc_norm_stderr": 0.04724577405731572
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.3148148148148148,
"acc_stderr": 0.028317533496066482,
"acc_norm": 0.3148148148148148,
"acc_norm_stderr": 0.028317533496066482
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.3443708609271523,
"acc_stderr": 0.038796870240733264,
"acc_norm": 0.3443708609271523,
"acc_norm_stderr": 0.038796870240733264
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.7412935323383084,
"acc_stderr": 0.03096590312357303,
"acc_norm": 0.7412935323383084,
"acc_norm_stderr": 0.03096590312357303
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.4624277456647399,
"acc_stderr": 0.0380168510452446,
"acc_norm": 0.4624277456647399,
"acc_norm_stderr": 0.0380168510452446
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.3862433862433862,
"acc_stderr": 0.025075981767601688,
"acc_norm": 0.3862433862433862,
"acc_norm_stderr": 0.025075981767601688
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.5416666666666666,
"acc_stderr": 0.04166666666666666,
"acc_norm": 0.5416666666666666,
"acc_norm_stderr": 0.04166666666666666
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.74,
"acc_stderr": 0.04408440022768079,
"acc_norm": 0.74,
"acc_norm_stderr": 0.04408440022768079
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.6242774566473989,
"acc_stderr": 0.02607431485165708,
"acc_norm": 0.6242774566473989,
"acc_norm_stderr": 0.02607431485165708
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.5644171779141104,
"acc_stderr": 0.03895632464138937,
"acc_norm": 0.5644171779141104,
"acc_norm_stderr": 0.03895632464138937
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.6327160493827161,
"acc_stderr": 0.026822801759507898,
"acc_norm": 0.6327160493827161,
"acc_norm_stderr": 0.026822801759507898
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.38,
"acc_stderr": 0.048783173121456316,
"acc_norm": 0.38,
"acc_norm_stderr": 0.048783173121456316
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.694300518134715,
"acc_stderr": 0.03324837939758159,
"acc_norm": 0.694300518134715,
"acc_norm_stderr": 0.03324837939758159
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.40350877192982454,
"acc_stderr": 0.04615186962583704,
"acc_norm": 0.40350877192982454,
"acc_norm_stderr": 0.04615186962583704
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.6623853211009174,
"acc_stderr": 0.020275265986638903,
"acc_norm": 0.6623853211009174,
"acc_norm_stderr": 0.020275265986638903
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.3888888888888889,
"acc_stderr": 0.04360314860077459,
"acc_norm": 0.3888888888888889,
"acc_norm_stderr": 0.04360314860077459
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.5784313725490197,
"acc_stderr": 0.02827549015679145,
"acc_norm": 0.5784313725490197,
"acc_norm_stderr": 0.02827549015679145
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.62,
"acc_stderr": 0.048783173121456316,
"acc_norm": 0.62,
"acc_norm_stderr": 0.048783173121456316
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.7355371900826446,
"acc_stderr": 0.04026187527591207,
"acc_norm": 0.7355371900826446,
"acc_norm_stderr": 0.04026187527591207
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.5592105263157895,
"acc_stderr": 0.04040311062490436,
"acc_norm": 0.5592105263157895,
"acc_norm_stderr": 0.04040311062490436
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.5261437908496732,
"acc_stderr": 0.020200164564804588,
"acc_norm": 0.5261437908496732,
"acc_norm_stderr": 0.020200164564804588
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.38652482269503546,
"acc_stderr": 0.02904919034254345,
"acc_norm": 0.38652482269503546,
"acc_norm_stderr": 0.02904919034254345
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.375,
"acc_stderr": 0.04595091388086298,
"acc_norm": 0.375,
"acc_norm_stderr": 0.04595091388086298
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.4398148148148148,
"acc_stderr": 0.033851779760448106,
"acc_norm": 0.4398148148148148,
"acc_norm_stderr": 0.033851779760448106
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.21340782122905028,
"acc_stderr": 0.013702859932196094,
"acc_norm": 0.21340782122905028,
"acc_norm_stderr": 0.013702859932196094
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.46,
"acc_stderr": 0.05009082659620332,
"acc_norm": 0.46,
"acc_norm_stderr": 0.05009082659620332
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.69,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.69,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.4485294117647059,
"acc_stderr": 0.030211479609121596,
"acc_norm": 0.4485294117647059,
"acc_norm_stderr": 0.030211479609121596
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.6408163265306123,
"acc_stderr": 0.03071356045510849,
"acc_norm": 0.6408163265306123,
"acc_norm_stderr": 0.03071356045510849
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.7468354430379747,
"acc_stderr": 0.028304657943035303,
"acc_norm": 0.7468354430379747,
"acc_norm_stderr": 0.028304657943035303
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.38657105606258146,
"acc_stderr": 0.012437288868088727,
"acc_norm": 0.38657105606258146,
"acc_norm_stderr": 0.012437288868088727
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.6911764705882353,
"acc_stderr": 0.03242661719827218,
"acc_norm": 0.6911764705882353,
"acc_norm_stderr": 0.03242661719827218
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.703030303030303,
"acc_stderr": 0.03567969772268047,
"acc_norm": 0.703030303030303,
"acc_norm_stderr": 0.03567969772268047
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.3488372093023256,
"mc1_stderr": 0.016684419859986865,
"mc2": 0.504220057699691,
"mc2_stderr": 0.015580354868697804
},
"harness|ko_commongen_v2|2": {
"acc": 0.4887839433293979,
"acc_stderr": 0.017186028469489287,
"acc_norm": 0.500590318772137,
"acc_norm_stderr": 0.017190342123448586
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Edentns/DataVortexS-10.7B-dpo-v1.4",
"model_sha": "5e53c971e2dd66918e597b865d6d64bbd8bd9d35",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}