results / Jaehyeon222 /M-SOLAR-10.7B-v1.0-DPO /result_2024-01-05 04:59:11.json
open-ko-llm-bot's picture
Add results for 2024-01-05 04:59:11
2df351c verified
raw
history blame
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.47440273037542663,
"acc_stderr": 0.014592230885298964,
"acc_norm": 0.5119453924914675,
"acc_norm_stderr": 0.014607220340597167
},
"harness|ko_hellaswag|10": {
"acc": 0.45180242979486157,
"acc_stderr": 0.004966544724452223,
"acc_norm": 0.6199960167297351,
"acc_norm_stderr": 0.004843954338451447
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.6198830409356725,
"acc_stderr": 0.03722965741385539,
"acc_norm": 0.6198830409356725,
"acc_norm_stderr": 0.03722965741385539
},
"harness|ko_mmlu_management|5": {
"acc": 0.6990291262135923,
"acc_stderr": 0.04541609446503948,
"acc_norm": 0.6990291262135923,
"acc_norm_stderr": 0.04541609446503948
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.6564495530012772,
"acc_stderr": 0.016982145632652473,
"acc_norm": 0.6564495530012772,
"acc_norm_stderr": 0.016982145632652473
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.4962962962962963,
"acc_stderr": 0.043192236258113303,
"acc_norm": 0.4962962962962963,
"acc_norm_stderr": 0.043192236258113303
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542128,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542128
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.5106382978723404,
"acc_stderr": 0.03267862331014063,
"acc_norm": 0.5106382978723404,
"acc_norm_stderr": 0.03267862331014063
},
"harness|ko_mmlu_virology|5": {
"acc": 0.4939759036144578,
"acc_stderr": 0.03892212195333047,
"acc_norm": 0.4939759036144578,
"acc_norm_stderr": 0.03892212195333047
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.6077170418006431,
"acc_stderr": 0.02773125864701199,
"acc_norm": 0.6077170418006431,
"acc_norm_stderr": 0.02773125864701199
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.5695067264573991,
"acc_stderr": 0.033231973029429394,
"acc_norm": 0.5695067264573991,
"acc_norm_stderr": 0.033231973029429394
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.5801526717557252,
"acc_stderr": 0.04328577215262972,
"acc_norm": 0.5801526717557252,
"acc_norm_stderr": 0.04328577215262972
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.47,
"acc_stderr": 0.05016135580465918,
"acc_norm": 0.47,
"acc_norm_stderr": 0.05016135580465918
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.7272727272727273,
"acc_stderr": 0.03173071239071724,
"acc_norm": 0.7272727272727273,
"acc_norm_stderr": 0.03173071239071724
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.5586206896551724,
"acc_stderr": 0.04137931034482758,
"acc_norm": 0.5586206896551724,
"acc_norm_stderr": 0.04137931034482758
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.38235294117647056,
"acc_stderr": 0.04835503696107224,
"acc_norm": 0.38235294117647056,
"acc_norm_stderr": 0.04835503696107224
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.6470588235294118,
"acc_stderr": 0.031041941304059288,
"acc_norm": 0.6470588235294118,
"acc_norm_stderr": 0.031041941304059288
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.5666666666666667,
"acc_stderr": 0.0251246535258851,
"acc_norm": 0.5666666666666667,
"acc_norm_stderr": 0.0251246535258851
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.6,
"acc_stderr": 0.049236596391733084,
"acc_norm": 0.6,
"acc_norm_stderr": 0.049236596391733084
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.36,
"acc_stderr": 0.048241815132442176,
"acc_norm": 0.36,
"acc_norm_stderr": 0.048241815132442176
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.6574074074074074,
"acc_stderr": 0.045879047413018105,
"acc_norm": 0.6574074074074074,
"acc_norm_stderr": 0.045879047413018105
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.4039408866995074,
"acc_stderr": 0.0345245390382204,
"acc_norm": 0.4039408866995074,
"acc_norm_stderr": 0.0345245390382204
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.5903225806451613,
"acc_stderr": 0.027976054915347368,
"acc_norm": 0.5903225806451613,
"acc_norm_stderr": 0.027976054915347368
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.7948717948717948,
"acc_stderr": 0.026453508054040342,
"acc_norm": 0.7948717948717948,
"acc_norm_stderr": 0.026453508054040342
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.5509433962264151,
"acc_stderr": 0.030612730713641092,
"acc_norm": 0.5509433962264151,
"acc_norm_stderr": 0.030612730713641092
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5909090909090909,
"acc_stderr": 0.04709306978661895,
"acc_norm": 0.5909090909090909,
"acc_norm_stderr": 0.04709306978661895
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.3592592592592593,
"acc_stderr": 0.029252905927251976,
"acc_norm": 0.3592592592592593,
"acc_norm_stderr": 0.029252905927251976
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.4304635761589404,
"acc_stderr": 0.04042809961395634,
"acc_norm": 0.4304635761589404,
"acc_norm_stderr": 0.04042809961395634
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.6517412935323383,
"acc_stderr": 0.033687874661154596,
"acc_norm": 0.6517412935323383,
"acc_norm_stderr": 0.033687874661154596
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.5202312138728323,
"acc_stderr": 0.03809342081273956,
"acc_norm": 0.5202312138728323,
"acc_norm_stderr": 0.03809342081273956
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.46296296296296297,
"acc_stderr": 0.02568056464005688,
"acc_norm": 0.46296296296296297,
"acc_norm_stderr": 0.02568056464005688
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.5416666666666666,
"acc_stderr": 0.04166666666666665,
"acc_norm": 0.5416666666666666,
"acc_norm_stderr": 0.04166666666666665
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.38,
"acc_stderr": 0.048783173121456316,
"acc_norm": 0.38,
"acc_norm_stderr": 0.048783173121456316
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.71,
"acc_stderr": 0.045604802157206824,
"acc_norm": 0.71,
"acc_norm_stderr": 0.045604802157206824
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.6127167630057804,
"acc_stderr": 0.026226158605124655,
"acc_norm": 0.6127167630057804,
"acc_norm_stderr": 0.026226158605124655
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.48466257668711654,
"acc_stderr": 0.03926522378708843,
"acc_norm": 0.48466257668711654,
"acc_norm_stderr": 0.03926522378708843
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.6141975308641975,
"acc_stderr": 0.027085401226132143,
"acc_norm": 0.6141975308641975,
"acc_norm_stderr": 0.027085401226132143
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.7357512953367875,
"acc_stderr": 0.03182155050916648,
"acc_norm": 0.7357512953367875,
"acc_norm_stderr": 0.03182155050916648
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.47368421052631576,
"acc_stderr": 0.046970851366478626,
"acc_norm": 0.47368421052631576,
"acc_norm_stderr": 0.046970851366478626
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.691743119266055,
"acc_stderr": 0.019798366698367254,
"acc_norm": 0.691743119266055,
"acc_norm_stderr": 0.019798366698367254
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.40476190476190477,
"acc_stderr": 0.0439025926537756,
"acc_norm": 0.40476190476190477,
"acc_norm_stderr": 0.0439025926537756
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.5686274509803921,
"acc_stderr": 0.028358956313423545,
"acc_norm": 0.5686274509803921,
"acc_norm_stderr": 0.028358956313423545
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.61,
"acc_stderr": 0.049020713000019756,
"acc_norm": 0.61,
"acc_norm_stderr": 0.049020713000019756
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.7272727272727273,
"acc_stderr": 0.040655781409087044,
"acc_norm": 0.7272727272727273,
"acc_norm_stderr": 0.040655781409087044
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.5986842105263158,
"acc_stderr": 0.039889037033362836,
"acc_norm": 0.5986842105263158,
"acc_norm_stderr": 0.039889037033362836
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.5147058823529411,
"acc_stderr": 0.020219083895133924,
"acc_norm": 0.5147058823529411,
"acc_norm_stderr": 0.020219083895133924
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.40425531914893614,
"acc_stderr": 0.02927553215970473,
"acc_norm": 0.40425531914893614,
"acc_norm_stderr": 0.02927553215970473
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.4375,
"acc_stderr": 0.04708567521880525,
"acc_norm": 0.4375,
"acc_norm_stderr": 0.04708567521880525
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.5138888888888888,
"acc_stderr": 0.03408655867977748,
"acc_norm": 0.5138888888888888,
"acc_norm_stderr": 0.03408655867977748
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.23128491620111732,
"acc_stderr": 0.014102223623152594,
"acc_norm": 0.23128491620111732,
"acc_norm_stderr": 0.014102223623152594
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.48,
"acc_stderr": 0.05021167315686779,
"acc_norm": 0.48,
"acc_norm_stderr": 0.05021167315686779
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.71,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.71,
"acc_norm_stderr": 0.045604802157206845
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.5220588235294118,
"acc_stderr": 0.03034326422421352,
"acc_norm": 0.5220588235294118,
"acc_norm_stderr": 0.03034326422421352
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.5877551020408164,
"acc_stderr": 0.03151236044674269,
"acc_norm": 0.5877551020408164,
"acc_norm_stderr": 0.03151236044674269
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.7721518987341772,
"acc_stderr": 0.02730348459906941,
"acc_norm": 0.7721518987341772,
"acc_norm_stderr": 0.02730348459906941
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.408735332464146,
"acc_stderr": 0.01255570134670339,
"acc_norm": 0.408735332464146,
"acc_norm_stderr": 0.01255570134670339
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.6470588235294118,
"acc_stderr": 0.03354092437591519,
"acc_norm": 0.6470588235294118,
"acc_norm_stderr": 0.03354092437591519
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.6787878787878788,
"acc_stderr": 0.036462049632538136,
"acc_norm": 0.6787878787878788,
"acc_norm_stderr": 0.036462049632538136
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.35495716034271724,
"mc1_stderr": 0.016750862381375905,
"mc2": 0.5241101036055598,
"mc2_stderr": 0.015594380871291398
},
"harness|ko_commongen_v2|2": {
"acc": 0.5537190082644629,
"acc_stderr": 0.017090852631668332,
"acc_norm": 0.6044864226682408,
"acc_norm_stderr": 0.01681081590220604
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Jaehyeon222/M-SOLAR-10.7B-v1.0-DPO",
"model_sha": "ab271d7e826dec3cbca538446d2c4f61af6fa30f",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}