results / Edentns /DataVortexS-10.7B-dpo-v1.6 /result_2024-01-28 09:26:01.json
open-ko-llm-bot's picture
Add results for 2024-01-28 09:26:01
a97603d verified
raw
history blame
No virus
17.8 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.48464163822525597,
"acc_stderr": 0.014604496129394908,
"acc_norm": 0.53839590443686,
"acc_norm_stderr": 0.014568245550296358
},
"harness|ko_hellaswag|10": {
"acc": 0.4939255128460466,
"acc_stderr": 0.004989413158034799,
"acc_norm": 0.6790479984066919,
"acc_norm_stderr": 0.0046588829290995165
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.5555555555555556,
"acc_stderr": 0.038110796698335316,
"acc_norm": 0.5555555555555556,
"acc_norm_stderr": 0.038110796698335316
},
"harness|ko_mmlu_management|5": {
"acc": 0.6310679611650486,
"acc_stderr": 0.0477761518115674,
"acc_norm": 0.6310679611650486,
"acc_norm_stderr": 0.0477761518115674
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.6781609195402298,
"acc_stderr": 0.016706381415057904,
"acc_norm": 0.6781609195402298,
"acc_norm_stderr": 0.016706381415057904
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.37777777777777777,
"acc_stderr": 0.04188307537595853,
"acc_norm": 0.37777777777777777,
"acc_norm_stderr": 0.04188307537595853
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.451063829787234,
"acc_stderr": 0.032529096196131965,
"acc_norm": 0.451063829787234,
"acc_norm_stderr": 0.032529096196131965
},
"harness|ko_mmlu_virology|5": {
"acc": 0.42168674698795183,
"acc_stderr": 0.03844453181770917,
"acc_norm": 0.42168674698795183,
"acc_norm_stderr": 0.03844453181770917
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.6077170418006431,
"acc_stderr": 0.027731258647011994,
"acc_norm": 0.6077170418006431,
"acc_norm_stderr": 0.027731258647011994
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.5964125560538116,
"acc_stderr": 0.03292802819330314,
"acc_norm": 0.5964125560538116,
"acc_norm_stderr": 0.03292802819330314
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.6106870229007634,
"acc_stderr": 0.04276486542814591,
"acc_norm": 0.6106870229007634,
"acc_norm_stderr": 0.04276486542814591
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956914,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956914
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.7121212121212122,
"acc_stderr": 0.03225883512300993,
"acc_norm": 0.7121212121212122,
"acc_norm_stderr": 0.03225883512300993
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.4206896551724138,
"acc_stderr": 0.0411391498118926,
"acc_norm": 0.4206896551724138,
"acc_norm_stderr": 0.0411391498118926
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.4117647058823529,
"acc_stderr": 0.048971049527263666,
"acc_norm": 0.4117647058823529,
"acc_norm_stderr": 0.048971049527263666
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.6218487394957983,
"acc_stderr": 0.031499305777849054,
"acc_norm": 0.6218487394957983,
"acc_norm_stderr": 0.031499305777849054
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.5666666666666667,
"acc_stderr": 0.0251246535258851,
"acc_norm": 0.5666666666666667,
"acc_norm_stderr": 0.0251246535258851
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.61,
"acc_stderr": 0.04902071300001974,
"acc_norm": 0.61,
"acc_norm_stderr": 0.04902071300001974
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.5740740740740741,
"acc_stderr": 0.0478034362693679,
"acc_norm": 0.5740740740740741,
"acc_norm_stderr": 0.0478034362693679
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.3891625615763547,
"acc_stderr": 0.03430462416103872,
"acc_norm": 0.3891625615763547,
"acc_norm_stderr": 0.03430462416103872
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.5741935483870968,
"acc_stderr": 0.028129112709165908,
"acc_norm": 0.5741935483870968,
"acc_norm_stderr": 0.028129112709165908
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.7222222222222222,
"acc_stderr": 0.02934311479809446,
"acc_norm": 0.7222222222222222,
"acc_norm_stderr": 0.02934311479809446
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.5547169811320755,
"acc_stderr": 0.030588052974270658,
"acc_norm": 0.5547169811320755,
"acc_norm_stderr": 0.030588052974270658
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.6090909090909091,
"acc_stderr": 0.046737523336702384,
"acc_norm": 0.6090909090909091,
"acc_norm_stderr": 0.046737523336702384
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.35555555555555557,
"acc_stderr": 0.02918571494985741,
"acc_norm": 0.35555555555555557,
"acc_norm_stderr": 0.02918571494985741
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.33774834437086093,
"acc_stderr": 0.038615575462551684,
"acc_norm": 0.33774834437086093,
"acc_norm_stderr": 0.038615575462551684
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.6517412935323383,
"acc_stderr": 0.033687874661154596,
"acc_norm": 0.6517412935323383,
"acc_norm_stderr": 0.033687874661154596
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.5317919075144508,
"acc_stderr": 0.03804749744364764,
"acc_norm": 0.5317919075144508,
"acc_norm_stderr": 0.03804749744364764
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.4021164021164021,
"acc_stderr": 0.02525303255499769,
"acc_norm": 0.4021164021164021,
"acc_norm_stderr": 0.02525303255499769
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.5,
"acc_stderr": 0.04181210050035455,
"acc_norm": 0.5,
"acc_norm_stderr": 0.04181210050035455
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.41,
"acc_stderr": 0.04943110704237102,
"acc_norm": 0.41,
"acc_norm_stderr": 0.04943110704237102
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.69,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.69,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.5317919075144508,
"acc_stderr": 0.026864624366756656,
"acc_norm": 0.5317919075144508,
"acc_norm_stderr": 0.026864624366756656
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.5337423312883436,
"acc_stderr": 0.039194155450484096,
"acc_norm": 0.5337423312883436,
"acc_norm_stderr": 0.039194155450484096
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.5617283950617284,
"acc_stderr": 0.027607914087400473,
"acc_norm": 0.5617283950617284,
"acc_norm_stderr": 0.027607914087400473
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.7150259067357513,
"acc_stderr": 0.032577140777096614,
"acc_norm": 0.7150259067357513,
"acc_norm_stderr": 0.032577140777096614
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.4473684210526316,
"acc_stderr": 0.046774730044912,
"acc_norm": 0.4473684210526316,
"acc_norm_stderr": 0.046774730044912
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.673394495412844,
"acc_stderr": 0.020106990889937306,
"acc_norm": 0.673394495412844,
"acc_norm_stderr": 0.020106990889937306
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.38095238095238093,
"acc_stderr": 0.043435254289490965,
"acc_norm": 0.38095238095238093,
"acc_norm_stderr": 0.043435254289490965
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.565359477124183,
"acc_stderr": 0.028384256704883034,
"acc_norm": 0.565359477124183,
"acc_norm_stderr": 0.028384256704883034
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.56,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.56,
"acc_norm_stderr": 0.04988876515698589
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6694214876033058,
"acc_stderr": 0.042943408452120926,
"acc_norm": 0.6694214876033058,
"acc_norm_stderr": 0.042943408452120926
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.5263157894736842,
"acc_stderr": 0.04063302731486671,
"acc_norm": 0.5263157894736842,
"acc_norm_stderr": 0.04063302731486671
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.49673202614379086,
"acc_stderr": 0.020227402794434867,
"acc_norm": 0.49673202614379086,
"acc_norm_stderr": 0.020227402794434867
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.3900709219858156,
"acc_stderr": 0.029097675599463926,
"acc_norm": 0.3900709219858156,
"acc_norm_stderr": 0.029097675599463926
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.375,
"acc_stderr": 0.04595091388086298,
"acc_norm": 0.375,
"acc_norm_stderr": 0.04595091388086298
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.5277777777777778,
"acc_stderr": 0.0340470532865388,
"acc_norm": 0.5277777777777778,
"acc_norm_stderr": 0.0340470532865388
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.37988826815642457,
"acc_stderr": 0.016232826818678502,
"acc_norm": 0.37988826815642457,
"acc_norm_stderr": 0.016232826818678502
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.39,
"acc_stderr": 0.049020713000019756,
"acc_norm": 0.39,
"acc_norm_stderr": 0.049020713000019756
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.59,
"acc_stderr": 0.04943110704237102,
"acc_norm": 0.59,
"acc_norm_stderr": 0.04943110704237102
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.5367647058823529,
"acc_stderr": 0.030290619180485694,
"acc_norm": 0.5367647058823529,
"acc_norm_stderr": 0.030290619180485694
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.5510204081632653,
"acc_stderr": 0.03184213866687579,
"acc_norm": 0.5510204081632653,
"acc_norm_stderr": 0.03184213866687579
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.7468354430379747,
"acc_stderr": 0.028304657943035307,
"acc_norm": 0.7468354430379747,
"acc_norm_stderr": 0.028304657943035307
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.40547588005215124,
"acc_stderr": 0.012539960672377209,
"acc_norm": 0.40547588005215124,
"acc_norm_stderr": 0.012539960672377209
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.6225490196078431,
"acc_stderr": 0.03402272044340703,
"acc_norm": 0.6225490196078431,
"acc_norm_stderr": 0.03402272044340703
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.696969696969697,
"acc_stderr": 0.03588624800091708,
"acc_norm": 0.696969696969697,
"acc_norm_stderr": 0.03588624800091708
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.47368421052631576,
"mc1_stderr": 0.017479241161975526,
"mc2": 0.6460211319664573,
"mc2_stderr": 0.016128307064999306
},
"harness|ko_commongen_v2|2": {
"acc": 0.5430932703659976,
"acc_stderr": 0.01712638909308678,
"acc_norm": 0.5737898465171193,
"acc_norm_stderr": 0.017002122609489252
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Edentns/DataVortexS-10.7B-dpo-v1.6",
"model_sha": "c6e33d98405845bd570a434319e443c3f6677a0f",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}