results / Minirecord /minyi_dpo_6b /result_2023-12-18 09:01:03.json
open-ko-llm-bot's picture
Add results for 2023-12-18 09:01:03
9310149
raw
history blame
No virus
17.8 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.3643344709897611,
"acc_stderr": 0.014063260279882417,
"acc_norm": 0.4283276450511945,
"acc_norm_stderr": 0.014460496367599019
},
"harness|ko_hellaswag|10": {
"acc": 0.4122684724158534,
"acc_stderr": 0.004912370023913013,
"acc_norm": 0.5438159729137622,
"acc_norm_stderr": 0.004970585328297623
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.52046783625731,
"acc_stderr": 0.0383161053282193,
"acc_norm": 0.52046783625731,
"acc_norm_stderr": 0.0383161053282193
},
"harness|ko_mmlu_management|5": {
"acc": 0.6116504854368932,
"acc_stderr": 0.04825729337356389,
"acc_norm": 0.6116504854368932,
"acc_norm_stderr": 0.04825729337356389
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.5798212005108557,
"acc_stderr": 0.017650651363078022,
"acc_norm": 0.5798212005108557,
"acc_norm_stderr": 0.017650651363078022
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.5185185185185185,
"acc_stderr": 0.04316378599511324,
"acc_norm": 0.5185185185185185,
"acc_norm_stderr": 0.04316378599511324
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.42127659574468085,
"acc_stderr": 0.03227834510146268,
"acc_norm": 0.42127659574468085,
"acc_norm_stderr": 0.03227834510146268
},
"harness|ko_mmlu_virology|5": {
"acc": 0.39759036144578314,
"acc_stderr": 0.03809973084540219,
"acc_norm": 0.39759036144578314,
"acc_norm_stderr": 0.03809973084540219
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.5176848874598071,
"acc_stderr": 0.02838032284907713,
"acc_norm": 0.5176848874598071,
"acc_norm_stderr": 0.02838032284907713
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.5112107623318386,
"acc_stderr": 0.033549366530984746,
"acc_norm": 0.5112107623318386,
"acc_norm_stderr": 0.033549366530984746
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.5267175572519084,
"acc_stderr": 0.04379024936553894,
"acc_norm": 0.5267175572519084,
"acc_norm_stderr": 0.04379024936553894
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.45,
"acc_stderr": 0.04999999999999999,
"acc_norm": 0.45,
"acc_norm_stderr": 0.04999999999999999
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.5959595959595959,
"acc_stderr": 0.03496130972056128,
"acc_norm": 0.5959595959595959,
"acc_norm_stderr": 0.03496130972056128
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.4482758620689655,
"acc_stderr": 0.04144311810878151,
"acc_norm": 0.4482758620689655,
"acc_norm_stderr": 0.04144311810878151
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.24509803921568626,
"acc_stderr": 0.042801058373643945,
"acc_norm": 0.24509803921568626,
"acc_norm_stderr": 0.042801058373643945
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.44537815126050423,
"acc_stderr": 0.0322841062671639,
"acc_norm": 0.44537815126050423,
"acc_norm_stderr": 0.0322841062671639
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.4512820512820513,
"acc_stderr": 0.02523038123893483,
"acc_norm": 0.4512820512820513,
"acc_norm_stderr": 0.02523038123893483
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.66,
"acc_stderr": 0.04760952285695238,
"acc_norm": 0.66,
"acc_norm_stderr": 0.04760952285695238
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.5,
"acc_stderr": 0.04833682445228318,
"acc_norm": 0.5,
"acc_norm_stderr": 0.04833682445228318
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.39408866995073893,
"acc_stderr": 0.03438157967036543,
"acc_norm": 0.39408866995073893,
"acc_norm_stderr": 0.03438157967036543
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.4935483870967742,
"acc_stderr": 0.02844163823354051,
"acc_norm": 0.4935483870967742,
"acc_norm_stderr": 0.02844163823354051
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6794871794871795,
"acc_stderr": 0.030572811310299604,
"acc_norm": 0.6794871794871795,
"acc_norm_stderr": 0.030572811310299604
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.4830188679245283,
"acc_stderr": 0.030755120364119905,
"acc_norm": 0.4830188679245283,
"acc_norm_stderr": 0.030755120364119905
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5363636363636364,
"acc_stderr": 0.04776449162396197,
"acc_norm": 0.5363636363636364,
"acc_norm_stderr": 0.04776449162396197
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.31851851851851853,
"acc_stderr": 0.028406533090608463,
"acc_norm": 0.31851851851851853,
"acc_norm_stderr": 0.028406533090608463
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.3576158940397351,
"acc_stderr": 0.03913453431177258,
"acc_norm": 0.3576158940397351,
"acc_norm_stderr": 0.03913453431177258
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.6368159203980099,
"acc_stderr": 0.03400598505599015,
"acc_norm": 0.6368159203980099,
"acc_norm_stderr": 0.03400598505599015
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.3815028901734104,
"acc_stderr": 0.03703851193099521,
"acc_norm": 0.3815028901734104,
"acc_norm_stderr": 0.03703851193099521
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.3412698412698413,
"acc_stderr": 0.02441923496681906,
"acc_norm": 0.3412698412698413,
"acc_norm_stderr": 0.02441923496681906
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.4166666666666667,
"acc_stderr": 0.04122728707651283,
"acc_norm": 0.4166666666666667,
"acc_norm_stderr": 0.04122728707651283
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.63,
"acc_stderr": 0.048523658709391,
"acc_norm": 0.63,
"acc_norm_stderr": 0.048523658709391
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.5028901734104047,
"acc_stderr": 0.026918645383239004,
"acc_norm": 0.5028901734104047,
"acc_norm_stderr": 0.026918645383239004
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.48466257668711654,
"acc_stderr": 0.03926522378708843,
"acc_norm": 0.48466257668711654,
"acc_norm_stderr": 0.03926522378708843
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.4783950617283951,
"acc_stderr": 0.027794760105008736,
"acc_norm": 0.4783950617283951,
"acc_norm_stderr": 0.027794760105008736
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.5751295336787565,
"acc_stderr": 0.035674713352125395,
"acc_norm": 0.5751295336787565,
"acc_norm_stderr": 0.035674713352125395
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.34210526315789475,
"acc_stderr": 0.04462917535336938,
"acc_norm": 0.34210526315789475,
"acc_norm_stderr": 0.04462917535336938
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.6018348623853211,
"acc_stderr": 0.02098798942265426,
"acc_norm": 0.6018348623853211,
"acc_norm_stderr": 0.02098798942265426
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.2698412698412698,
"acc_stderr": 0.03970158273235172,
"acc_norm": 0.2698412698412698,
"acc_norm_stderr": 0.03970158273235172
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.4738562091503268,
"acc_stderr": 0.028590752958852394,
"acc_norm": 0.4738562091503268,
"acc_norm_stderr": 0.028590752958852394
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.55,
"acc_stderr": 0.05,
"acc_norm": 0.55,
"acc_norm_stderr": 0.05
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6446280991735537,
"acc_stderr": 0.0436923632657398,
"acc_norm": 0.6446280991735537,
"acc_norm_stderr": 0.0436923632657398
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.5131578947368421,
"acc_stderr": 0.04067533136309172,
"acc_norm": 0.5131578947368421,
"acc_norm_stderr": 0.04067533136309172
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.4068627450980392,
"acc_stderr": 0.019873802005061173,
"acc_norm": 0.4068627450980392,
"acc_norm_stderr": 0.019873802005061173
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.3191489361702128,
"acc_stderr": 0.0278079901413202,
"acc_norm": 0.3191489361702128,
"acc_norm_stderr": 0.0278079901413202
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.32142857142857145,
"acc_stderr": 0.044328040552915185,
"acc_norm": 0.32142857142857145,
"acc_norm_stderr": 0.044328040552915185
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.3287037037037037,
"acc_stderr": 0.032036140846700596,
"acc_norm": 0.3287037037037037,
"acc_norm_stderr": 0.032036140846700596
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2748603351955307,
"acc_stderr": 0.014931316703220504,
"acc_norm": 0.2748603351955307,
"acc_norm_stderr": 0.014931316703220504
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.56,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.56,
"acc_norm_stderr": 0.04988876515698589
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.35294117647058826,
"acc_stderr": 0.0290294228156814,
"acc_norm": 0.35294117647058826,
"acc_norm_stderr": 0.0290294228156814
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.5102040816326531,
"acc_stderr": 0.03200255347893782,
"acc_norm": 0.5102040816326531,
"acc_norm_stderr": 0.03200255347893782
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.6497890295358649,
"acc_stderr": 0.031052391937584346,
"acc_norm": 0.6497890295358649,
"acc_norm_stderr": 0.031052391937584346
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.34028683181225555,
"acc_stderr": 0.0121012176102238,
"acc_norm": 0.34028683181225555,
"acc_norm_stderr": 0.0121012176102238
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.5588235294117647,
"acc_stderr": 0.034849415144292316,
"acc_norm": 0.5588235294117647,
"acc_norm_stderr": 0.034849415144292316
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.6121212121212121,
"acc_stderr": 0.0380491365397101,
"acc_norm": 0.6121212121212121,
"acc_norm_stderr": 0.0380491365397101
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.28518971848225216,
"mc1_stderr": 0.015805827874454895,
"mc2": 0.4432499193765067,
"mc2_stderr": 0.01527417237825953
},
"harness|ko_commongen_v2|2": {
"acc": 0.5548996458087367,
"acc_stderr": 0.017086417431005467,
"acc_norm": 0.6269185360094451,
"acc_norm_stderr": 0.01662731827513745
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Minirecord/minyi_dpo_6b",
"model_sha": "61066958700e4fda47e6381d3cbc4b2736373868",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}