results / Dongwookss /zephyr_tuning_v1 /result_2024-06-18 02:38:53.json
choco9966
add backup results
70a679f
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.3122866894197952,
"acc_stderr": 0.013542598541688065,
"acc_norm": 0.3626279863481229,
"acc_norm_stderr": 0.014049106564955017
},
"harness|ko_hellaswag|10": {
"acc": 0.34644493128858794,
"acc_stderr": 0.004748645133281563,
"acc_norm": 0.4420434176458873,
"acc_norm_stderr": 0.00495614704610896
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.4093567251461988,
"acc_stderr": 0.03771283107626544,
"acc_norm": 0.4093567251461988,
"acc_norm_stderr": 0.03771283107626544
},
"harness|ko_mmlu_management|5": {
"acc": 0.5728155339805825,
"acc_stderr": 0.04897957737781168,
"acc_norm": 0.5728155339805825,
"acc_norm_stderr": 0.04897957737781168
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.43039591315453385,
"acc_stderr": 0.017705868776292377,
"acc_norm": 0.43039591315453385,
"acc_norm_stderr": 0.017705868776292377
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.34074074074074073,
"acc_stderr": 0.040943762699967946,
"acc_norm": 0.34074074074074073,
"acc_norm_stderr": 0.040943762699967946
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.32340425531914896,
"acc_stderr": 0.030579442773610337,
"acc_norm": 0.32340425531914896,
"acc_norm_stderr": 0.030579442773610337
},
"harness|ko_mmlu_virology|5": {
"acc": 0.3674698795180723,
"acc_stderr": 0.03753267402120574,
"acc_norm": 0.3674698795180723,
"acc_norm_stderr": 0.03753267402120574
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.43729903536977494,
"acc_stderr": 0.028173917761762878,
"acc_norm": 0.43729903536977494,
"acc_norm_stderr": 0.028173917761762878
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.3721973094170404,
"acc_stderr": 0.03244305283008732,
"acc_norm": 0.3721973094170404,
"acc_norm_stderr": 0.03244305283008732
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.4580152671755725,
"acc_stderr": 0.04369802690578756,
"acc_norm": 0.4580152671755725,
"acc_norm_stderr": 0.04369802690578756
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542128,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542128
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.5252525252525253,
"acc_stderr": 0.03557806245087314,
"acc_norm": 0.5252525252525253,
"acc_norm_stderr": 0.03557806245087314
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.4206896551724138,
"acc_stderr": 0.0411391498118926,
"acc_norm": 0.4206896551724138,
"acc_norm_stderr": 0.0411391498118926
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.29411764705882354,
"acc_stderr": 0.04533838195929776,
"acc_norm": 0.29411764705882354,
"acc_norm_stderr": 0.04533838195929776
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.4957983193277311,
"acc_stderr": 0.03247734334448111,
"acc_norm": 0.4957983193277311,
"acc_norm_stderr": 0.03247734334448111
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.45384615384615384,
"acc_stderr": 0.02524277098712617,
"acc_norm": 0.45384615384615384,
"acc_norm_stderr": 0.02524277098712617
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.52,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.52,
"acc_norm_stderr": 0.050211673156867795
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.48148148148148145,
"acc_stderr": 0.04830366024635331,
"acc_norm": 0.48148148148148145,
"acc_norm_stderr": 0.04830366024635331
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.39901477832512317,
"acc_stderr": 0.03445487686264715,
"acc_norm": 0.39901477832512317,
"acc_norm_stderr": 0.03445487686264715
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.44516129032258067,
"acc_stderr": 0.028272410186214906,
"acc_norm": 0.44516129032258067,
"acc_norm_stderr": 0.028272410186214906
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6324786324786325,
"acc_stderr": 0.031585391577456365,
"acc_norm": 0.6324786324786325,
"acc_norm_stderr": 0.031585391577456365
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.4188679245283019,
"acc_stderr": 0.030365050829115215,
"acc_norm": 0.4188679245283019,
"acc_norm_stderr": 0.030365050829115215
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.45454545454545453,
"acc_stderr": 0.04769300568972743,
"acc_norm": 0.45454545454545453,
"acc_norm_stderr": 0.04769300568972743
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.02874204090394849,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.02874204090394849
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2781456953642384,
"acc_stderr": 0.036586032627637426,
"acc_norm": 0.2781456953642384,
"acc_norm_stderr": 0.036586032627637426
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.5124378109452736,
"acc_stderr": 0.035344398485395806,
"acc_norm": 0.5124378109452736,
"acc_norm_stderr": 0.035344398485395806
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.3872832369942196,
"acc_stderr": 0.03714325906302065,
"acc_norm": 0.3872832369942196,
"acc_norm_stderr": 0.03714325906302065
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.36507936507936506,
"acc_stderr": 0.024796060602699968,
"acc_norm": 0.36507936507936506,
"acc_norm_stderr": 0.024796060602699968
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.2916666666666667,
"acc_stderr": 0.03800968060554859,
"acc_norm": 0.2916666666666667,
"acc_norm_stderr": 0.03800968060554859
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.39,
"acc_stderr": 0.04902071300001974,
"acc_norm": 0.39,
"acc_norm_stderr": 0.04902071300001974
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.51,
"acc_stderr": 0.050241839379569095,
"acc_norm": 0.51,
"acc_norm_stderr": 0.050241839379569095
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.4277456647398844,
"acc_stderr": 0.026636539741116082,
"acc_norm": 0.4277456647398844,
"acc_norm_stderr": 0.026636539741116082
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.4294478527607362,
"acc_stderr": 0.03889066619112722,
"acc_norm": 0.4294478527607362,
"acc_norm_stderr": 0.03889066619112722
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.4382716049382716,
"acc_stderr": 0.027607914087400466,
"acc_norm": 0.4382716049382716,
"acc_norm_stderr": 0.027607914087400466
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.48704663212435234,
"acc_stderr": 0.03607228061047749,
"acc_norm": 0.48704663212435234,
"acc_norm_stderr": 0.03607228061047749
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2982456140350877,
"acc_stderr": 0.04303684033537317,
"acc_norm": 0.2982456140350877,
"acc_norm_stderr": 0.04303684033537317
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.47155963302752296,
"acc_stderr": 0.02140261569734804,
"acc_norm": 0.47155963302752296,
"acc_norm_stderr": 0.02140261569734804
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.373015873015873,
"acc_stderr": 0.04325506042017086,
"acc_norm": 0.373015873015873,
"acc_norm_stderr": 0.04325506042017086
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.47058823529411764,
"acc_stderr": 0.028580341065138282,
"acc_norm": 0.47058823529411764,
"acc_norm_stderr": 0.028580341065138282
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.43,
"acc_stderr": 0.049756985195624284,
"acc_norm": 0.43,
"acc_norm_stderr": 0.049756985195624284
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.5785123966942148,
"acc_stderr": 0.04507732278775088,
"acc_norm": 0.5785123966942148,
"acc_norm_stderr": 0.04507732278775088
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.4407894736842105,
"acc_stderr": 0.040403110624904356,
"acc_norm": 0.4407894736842105,
"acc_norm_stderr": 0.040403110624904356
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.3137254901960784,
"acc_stderr": 0.018771683893528183,
"acc_norm": 0.3137254901960784,
"acc_norm_stderr": 0.018771683893528183
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.29432624113475175,
"acc_stderr": 0.027187127011503803,
"acc_norm": 0.29432624113475175,
"acc_norm_stderr": 0.027187127011503803
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.32142857142857145,
"acc_stderr": 0.044328040552915206,
"acc_norm": 0.32142857142857145,
"acc_norm_stderr": 0.044328040552915206
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.4351851851851852,
"acc_stderr": 0.033812000056435254,
"acc_norm": 0.4351851851851852,
"acc_norm_stderr": 0.033812000056435254
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.3195530726256983,
"acc_stderr": 0.015595520294147413,
"acc_norm": 0.3195530726256983,
"acc_norm_stderr": 0.015595520294147413
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.41,
"acc_stderr": 0.04943110704237102,
"acc_norm": 0.41,
"acc_norm_stderr": 0.04943110704237102
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.57,
"acc_stderr": 0.049756985195624284,
"acc_norm": 0.57,
"acc_norm_stderr": 0.049756985195624284
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.43014705882352944,
"acc_stderr": 0.030074971917302875,
"acc_norm": 0.43014705882352944,
"acc_norm_stderr": 0.030074971917302875
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.46938775510204084,
"acc_stderr": 0.031949171367580624,
"acc_norm": 0.46938775510204084,
"acc_norm_stderr": 0.031949171367580624
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.4472573839662447,
"acc_stderr": 0.03236564251614192,
"acc_norm": 0.4472573839662447,
"acc_norm_stderr": 0.03236564251614192
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.2842242503259452,
"acc_stderr": 0.011519880596516076,
"acc_norm": 0.2842242503259452,
"acc_norm_stderr": 0.011519880596516076
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.28921568627450983,
"acc_stderr": 0.03182231867647553,
"acc_norm": 0.28921568627450983,
"acc_norm_stderr": 0.03182231867647553
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.296969696969697,
"acc_stderr": 0.0356796977226805,
"acc_norm": 0.296969696969697,
"acc_norm_stderr": 0.0356796977226805
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2876376988984088,
"mc1_stderr": 0.015846315101394816,
"mc2": 0.4650159734520416,
"mc2_stderr": 0.015707129691814502
},
"harness|ko_commongen_v2|2": {
"acc": 0.3530106257378985,
"acc_stderr": 0.016430745982427136,
"acc_norm": 0.3624557260920897,
"acc_norm_stderr": 0.01652713124045371
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Dongwookss/zephyr_tuning_v1",
"model_sha": "0757d3285a25a82393dceb2cf9dc35b57e2217fe",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}