results / Edentns /DataVortexS-10.7B-dpo-v1.8 /result_2024-02-02 00:11:00.json
open-ko-llm-bot's picture
Add results for 2024-02-02 00:11:00
6816f69 verified
raw
history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.48378839590443684,
"acc_stderr": 0.014603708567414947,
"acc_norm": 0.5255972696245734,
"acc_norm_stderr": 0.014592230885298962
},
"harness|ko_hellaswag|10": {
"acc": 0.4849631547500498,
"acc_stderr": 0.004987524454849698,
"acc_norm": 0.6667994423421629,
"acc_norm_stderr": 0.004703942346762255
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.6081871345029239,
"acc_stderr": 0.037439798259263996,
"acc_norm": 0.6081871345029239,
"acc_norm_stderr": 0.037439798259263996
},
"harness|ko_mmlu_management|5": {
"acc": 0.7087378640776699,
"acc_stderr": 0.044986763205729224,
"acc_norm": 0.7087378640776699,
"acc_norm_stderr": 0.044986763205729224
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.6309067688378033,
"acc_stderr": 0.01725628310912463,
"acc_norm": 0.6309067688378033,
"acc_norm_stderr": 0.01725628310912463
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.362962962962963,
"acc_stderr": 0.04153948404742401,
"acc_norm": 0.362962962962963,
"acc_norm_stderr": 0.04153948404742401
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.2,
"acc_stderr": 0.04020151261036846,
"acc_norm": 0.2,
"acc_norm_stderr": 0.04020151261036846
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.43829787234042555,
"acc_stderr": 0.032436186361081025,
"acc_norm": 0.43829787234042555,
"acc_norm_stderr": 0.032436186361081025
},
"harness|ko_mmlu_virology|5": {
"acc": 0.42168674698795183,
"acc_stderr": 0.03844453181770917,
"acc_norm": 0.42168674698795183,
"acc_norm_stderr": 0.03844453181770917
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.5691318327974276,
"acc_stderr": 0.028125340983972714,
"acc_norm": 0.5691318327974276,
"acc_norm_stderr": 0.028125340983972714
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.515695067264574,
"acc_stderr": 0.0335412657542081,
"acc_norm": 0.515695067264574,
"acc_norm_stderr": 0.0335412657542081
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.549618320610687,
"acc_stderr": 0.04363643698524779,
"acc_norm": 0.549618320610687,
"acc_norm_stderr": 0.04363643698524779
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.44,
"acc_stderr": 0.0498887651569859,
"acc_norm": 0.44,
"acc_norm_stderr": 0.0498887651569859
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.6868686868686869,
"acc_stderr": 0.033042050878136525,
"acc_norm": 0.6868686868686869,
"acc_norm_stderr": 0.033042050878136525
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.47586206896551725,
"acc_stderr": 0.0416180850350153,
"acc_norm": 0.47586206896551725,
"acc_norm_stderr": 0.0416180850350153
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.04690650298201943,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.04690650298201943
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.6134453781512605,
"acc_stderr": 0.03163145807552378,
"acc_norm": 0.6134453781512605,
"acc_norm_stderr": 0.03163145807552378
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.5282051282051282,
"acc_stderr": 0.02531063925493386,
"acc_norm": 0.5282051282051282,
"acc_norm_stderr": 0.02531063925493386
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.59,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.59,
"acc_norm_stderr": 0.049431107042371025
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.6018518518518519,
"acc_stderr": 0.047323326159788126,
"acc_norm": 0.6018518518518519,
"acc_norm_stderr": 0.047323326159788126
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.43842364532019706,
"acc_stderr": 0.03491207857486519,
"acc_norm": 0.43842364532019706,
"acc_norm_stderr": 0.03491207857486519
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.567741935483871,
"acc_stderr": 0.028181739720019406,
"acc_norm": 0.567741935483871,
"acc_norm_stderr": 0.028181739720019406
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.7350427350427351,
"acc_stderr": 0.028911208802749472,
"acc_norm": 0.7350427350427351,
"acc_norm_stderr": 0.028911208802749472
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.5207547169811321,
"acc_stderr": 0.03074634997572347,
"acc_norm": 0.5207547169811321,
"acc_norm_stderr": 0.03074634997572347
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5636363636363636,
"acc_stderr": 0.04750185058907296,
"acc_norm": 0.5636363636363636,
"acc_norm_stderr": 0.04750185058907296
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.35555555555555557,
"acc_stderr": 0.029185714949857406,
"acc_norm": 0.35555555555555557,
"acc_norm_stderr": 0.029185714949857406
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.3443708609271523,
"acc_stderr": 0.038796870240733264,
"acc_norm": 0.3443708609271523,
"acc_norm_stderr": 0.038796870240733264
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.6169154228855721,
"acc_stderr": 0.0343751933733825,
"acc_norm": 0.6169154228855721,
"acc_norm_stderr": 0.0343751933733825
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.45664739884393063,
"acc_stderr": 0.03798106566014498,
"acc_norm": 0.45664739884393063,
"acc_norm_stderr": 0.03798106566014498
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.4523809523809524,
"acc_stderr": 0.025634258115554965,
"acc_norm": 0.4523809523809524,
"acc_norm_stderr": 0.025634258115554965
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.5138888888888888,
"acc_stderr": 0.041795966175810016,
"acc_norm": 0.5138888888888888,
"acc_norm_stderr": 0.041795966175810016
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.39,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.39,
"acc_norm_stderr": 0.04902071300001975
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.71,
"acc_stderr": 0.04560480215720684,
"acc_norm": 0.71,
"acc_norm_stderr": 0.04560480215720684
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.546242774566474,
"acc_stderr": 0.026803720583206184,
"acc_norm": 0.546242774566474,
"acc_norm_stderr": 0.026803720583206184
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.44785276073619634,
"acc_stderr": 0.03906947479456601,
"acc_norm": 0.44785276073619634,
"acc_norm_stderr": 0.03906947479456601
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.5617283950617284,
"acc_stderr": 0.027607914087400477,
"acc_norm": 0.5617283950617284,
"acc_norm_stderr": 0.027607914087400477
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.6839378238341969,
"acc_stderr": 0.033553973696861736,
"acc_norm": 0.6839378238341969,
"acc_norm_stderr": 0.033553973696861736
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.5263157894736842,
"acc_stderr": 0.046970851366478626,
"acc_norm": 0.5263157894736842,
"acc_norm_stderr": 0.046970851366478626
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.6293577981651376,
"acc_stderr": 0.02070745816435298,
"acc_norm": 0.6293577981651376,
"acc_norm_stderr": 0.02070745816435298
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.4126984126984127,
"acc_stderr": 0.04403438954768176,
"acc_norm": 0.4126984126984127,
"acc_norm_stderr": 0.04403438954768176
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.5326797385620915,
"acc_stderr": 0.02856869975222587,
"acc_norm": 0.5326797385620915,
"acc_norm_stderr": 0.02856869975222587
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.52,
"acc_stderr": 0.05021167315686779,
"acc_norm": 0.52,
"acc_norm_stderr": 0.05021167315686779
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.628099173553719,
"acc_stderr": 0.044120158066245044,
"acc_norm": 0.628099173553719,
"acc_norm_stderr": 0.044120158066245044
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.5855263157894737,
"acc_stderr": 0.04008973785779206,
"acc_norm": 0.5855263157894737,
"acc_norm_stderr": 0.04008973785779206
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.48366013071895425,
"acc_stderr": 0.020217030653186453,
"acc_norm": 0.48366013071895425,
"acc_norm_stderr": 0.020217030653186453
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.375886524822695,
"acc_stderr": 0.028893955412115886,
"acc_norm": 0.375886524822695,
"acc_norm_stderr": 0.028893955412115886
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.3482142857142857,
"acc_stderr": 0.04521829902833585,
"acc_norm": 0.3482142857142857,
"acc_norm_stderr": 0.04521829902833585
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.4861111111111111,
"acc_stderr": 0.03408655867977749,
"acc_norm": 0.4861111111111111,
"acc_norm_stderr": 0.03408655867977749
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2871508379888268,
"acc_stderr": 0.01513160884996376,
"acc_norm": 0.2871508379888268,
"acc_norm_stderr": 0.01513160884996376
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.43,
"acc_stderr": 0.049756985195624284,
"acc_norm": 0.43,
"acc_norm_stderr": 0.049756985195624284
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.63,
"acc_stderr": 0.048523658709391,
"acc_norm": 0.63,
"acc_norm_stderr": 0.048523658709391
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.49264705882352944,
"acc_stderr": 0.030369552523902173,
"acc_norm": 0.49264705882352944,
"acc_norm_stderr": 0.030369552523902173
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.5265306122448979,
"acc_stderr": 0.03196412734523272,
"acc_norm": 0.5265306122448979,
"acc_norm_stderr": 0.03196412734523272
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.7552742616033755,
"acc_stderr": 0.027985699387036416,
"acc_norm": 0.7552742616033755,
"acc_norm_stderr": 0.027985699387036416
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.4061277705345502,
"acc_stderr": 0.012543154588412927,
"acc_norm": 0.4061277705345502,
"acc_norm_stderr": 0.012543154588412927
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.6323529411764706,
"acc_stderr": 0.03384132045674118,
"acc_norm": 0.6323529411764706,
"acc_norm_stderr": 0.03384132045674118
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.6787878787878788,
"acc_stderr": 0.03646204963253812,
"acc_norm": 0.6787878787878788,
"acc_norm_stderr": 0.03646204963253812
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.4112607099143207,
"mc1_stderr": 0.017225627083660856,
"mc2": 0.5927070170654897,
"mc2_stderr": 0.0162035946763131
},
"harness|ko_commongen_v2|2": {
"acc": 0.5962219598583235,
"acc_stderr": 0.016869031540298625,
"acc_norm": 0.6103896103896104,
"acc_norm_stderr": 0.0167661616718935
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Edentns/DataVortexS-10.7B-dpo-v1.8",
"model_sha": "91c4b182d2c6d514b5f0205001e7ca4e37cfbe60",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}