results / 4yo1 /llama3-pre1-pre2-ds-lora3 /result_2024-07-26 21:17:55.json
choco9966
add backup results
70a679f
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.20392491467576793,
"acc_stderr": 0.011774262478702256,
"acc_norm": 0.25853242320819114,
"acc_norm_stderr": 0.012794553754288666
},
"harness|ko_hellaswag|10": {
"acc": 0.2531368253335989,
"acc_stderr": 0.004339200363454499,
"acc_norm": 0.253734315873332,
"acc_norm_stderr": 0.004342580277662754
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.38596491228070173,
"acc_stderr": 0.03733756969066164,
"acc_norm": 0.38596491228070173,
"acc_norm_stderr": 0.03733756969066164
},
"harness|ko_mmlu_management|5": {
"acc": 0.2815533980582524,
"acc_stderr": 0.04453254836326468,
"acc_norm": 0.2815533980582524,
"acc_norm_stderr": 0.04453254836326468
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.30268199233716475,
"acc_stderr": 0.016428781581749364,
"acc_norm": 0.30268199233716475,
"acc_norm_stderr": 0.016428781581749364
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.37777777777777777,
"acc_stderr": 0.04188307537595852,
"acc_norm": 0.37777777777777777,
"acc_norm_stderr": 0.04188307537595852
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.37,
"acc_stderr": 0.048523658709391,
"acc_norm": 0.37,
"acc_norm_stderr": 0.048523658709391
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.24680851063829787,
"acc_stderr": 0.02818544130123409,
"acc_norm": 0.24680851063829787,
"acc_norm_stderr": 0.02818544130123409
},
"harness|ko_mmlu_virology|5": {
"acc": 0.3072289156626506,
"acc_stderr": 0.035915667978246635,
"acc_norm": 0.3072289156626506,
"acc_norm_stderr": 0.035915667978246635
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.3536977491961415,
"acc_stderr": 0.027155208103200854,
"acc_norm": 0.3536977491961415,
"acc_norm_stderr": 0.027155208103200854
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.2600896860986547,
"acc_stderr": 0.02944249558585747,
"acc_norm": 0.2600896860986547,
"acc_norm_stderr": 0.02944249558585747
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.35877862595419846,
"acc_stderr": 0.04206739313864908,
"acc_norm": 0.35877862595419846,
"acc_norm_stderr": 0.04206739313864908
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252606,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252606
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.2878787878787879,
"acc_stderr": 0.03225883512300993,
"acc_norm": 0.2878787878787879,
"acc_norm_stderr": 0.03225883512300993
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.3586206896551724,
"acc_stderr": 0.039966295748767186,
"acc_norm": 0.3586206896551724,
"acc_norm_stderr": 0.039966295748767186
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.24509803921568626,
"acc_stderr": 0.042801058373643966,
"acc_norm": 0.24509803921568626,
"acc_norm_stderr": 0.042801058373643966
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.28991596638655465,
"acc_stderr": 0.029472485833136084,
"acc_norm": 0.28991596638655465,
"acc_norm_stderr": 0.029472485833136084
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.26666666666666666,
"acc_stderr": 0.022421273612923703,
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.022421273612923703
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.3055555555555556,
"acc_stderr": 0.044531975073749834,
"acc_norm": 0.3055555555555556,
"acc_norm_stderr": 0.044531975073749834
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.2955665024630542,
"acc_stderr": 0.032104944337514575,
"acc_norm": 0.2955665024630542,
"acc_norm_stderr": 0.032104944337514575
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.3,
"acc_stderr": 0.026069362295335137,
"acc_norm": 0.3,
"acc_norm_stderr": 0.026069362295335137
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.2948717948717949,
"acc_stderr": 0.02987257770889114,
"acc_norm": 0.2948717948717949,
"acc_norm_stderr": 0.02987257770889114
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.26037735849056604,
"acc_stderr": 0.027008766090708076,
"acc_norm": 0.26037735849056604,
"acc_norm_stderr": 0.027008766090708076
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.21818181818181817,
"acc_stderr": 0.03955932861795833,
"acc_norm": 0.21818181818181817,
"acc_norm_stderr": 0.03955932861795833
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.29259259259259257,
"acc_stderr": 0.02773896963217609,
"acc_norm": 0.29259259259259257,
"acc_norm_stderr": 0.02773896963217609
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.304635761589404,
"acc_stderr": 0.03757949922943343,
"acc_norm": 0.304635761589404,
"acc_norm_stderr": 0.03757949922943343
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.2736318407960199,
"acc_stderr": 0.03152439186555402,
"acc_norm": 0.2736318407960199,
"acc_norm_stderr": 0.03152439186555402
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.2832369942196532,
"acc_stderr": 0.03435568056047875,
"acc_norm": 0.2832369942196532,
"acc_norm_stderr": 0.03435568056047875
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.291005291005291,
"acc_stderr": 0.02339382650048488,
"acc_norm": 0.291005291005291,
"acc_norm_stderr": 0.02339382650048488
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.22916666666666666,
"acc_stderr": 0.035146974678623884,
"acc_norm": 0.22916666666666666,
"acc_norm_stderr": 0.035146974678623884
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.21,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.21,
"acc_norm_stderr": 0.040936018074033256
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252604,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.33815028901734107,
"acc_stderr": 0.025469770149400175,
"acc_norm": 0.33815028901734107,
"acc_norm_stderr": 0.025469770149400175
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.3128834355828221,
"acc_stderr": 0.03642914578292404,
"acc_norm": 0.3128834355828221,
"acc_norm_stderr": 0.03642914578292404
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.2839506172839506,
"acc_stderr": 0.02508947852376513,
"acc_norm": 0.2839506172839506,
"acc_norm_stderr": 0.02508947852376513
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.24,
"acc_stderr": 0.042923469599092816,
"acc_norm": 0.24,
"acc_norm_stderr": 0.042923469599092816
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.24352331606217617,
"acc_stderr": 0.030975436386845436,
"acc_norm": 0.24352331606217617,
"acc_norm_stderr": 0.030975436386845436
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2543859649122807,
"acc_stderr": 0.04096985139843671,
"acc_norm": 0.2543859649122807,
"acc_norm_stderr": 0.04096985139843671
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.29357798165137616,
"acc_stderr": 0.019525151122639663,
"acc_norm": 0.29357798165137616,
"acc_norm_stderr": 0.019525151122639663
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.15873015873015872,
"acc_stderr": 0.03268454013011743,
"acc_norm": 0.15873015873015872,
"acc_norm_stderr": 0.03268454013011743
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.3006535947712418,
"acc_stderr": 0.026256053835718964,
"acc_norm": 0.3006535947712418,
"acc_norm_stderr": 0.026256053835718964
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.24,
"acc_stderr": 0.04292346959909281,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909281
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.4214876033057851,
"acc_stderr": 0.045077322787750944,
"acc_norm": 0.4214876033057851,
"acc_norm_stderr": 0.045077322787750944
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.40131578947368424,
"acc_stderr": 0.039889037033362836,
"acc_norm": 0.40131578947368424,
"acc_norm_stderr": 0.039889037033362836
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.2875816993464052,
"acc_stderr": 0.018311653053648222,
"acc_norm": 0.2875816993464052,
"acc_norm_stderr": 0.018311653053648222
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.24113475177304963,
"acc_stderr": 0.025518731049537786,
"acc_norm": 0.24113475177304963,
"acc_norm_stderr": 0.025518731049537786
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.30357142857142855,
"acc_stderr": 0.04364226155841044,
"acc_norm": 0.30357142857142855,
"acc_norm_stderr": 0.04364226155841044
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.3425925925925926,
"acc_stderr": 0.03236585252602158,
"acc_norm": 0.3425925925925926,
"acc_norm_stderr": 0.03236585252602158
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2670391061452514,
"acc_stderr": 0.01479650262256255,
"acc_norm": 0.2670391061452514,
"acc_norm_stderr": 0.01479650262256255
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695236,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695236
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.19117647058823528,
"acc_stderr": 0.023886881922440355,
"acc_norm": 0.19117647058823528,
"acc_norm_stderr": 0.023886881922440355
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.24897959183673468,
"acc_stderr": 0.027682979522960224,
"acc_norm": 0.24897959183673468,
"acc_norm_stderr": 0.027682979522960224
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.27848101265822783,
"acc_stderr": 0.029178682304842555,
"acc_norm": 0.27848101265822783,
"acc_norm_stderr": 0.029178682304842555
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.2627118644067797,
"acc_stderr": 0.011240545514995664,
"acc_norm": 0.2627118644067797,
"acc_norm_stderr": 0.011240545514995664
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.25,
"acc_stderr": 0.03039153369274154,
"acc_norm": 0.25,
"acc_norm_stderr": 0.03039153369274154
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.23636363636363636,
"acc_stderr": 0.03317505930009179,
"acc_norm": 0.23636363636363636,
"acc_norm_stderr": 0.03317505930009179
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.24969400244798043,
"mc1_stderr": 0.015152286907148125,
"mc2": 0.506116595507612,
"mc2_stderr": 0.017054384753311957
},
"harness|ko_commongen_v2|2": {
"acc": 0.09917355371900827,
"acc_stderr": 0.010276218268084948,
"acc_norm": 0.3659976387249115,
"acc_norm_stderr": 0.01656148966489569
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "4yo1/llama3-pre1-pre2-ds-lora3",
"model_sha": "1a0c007ab818dd0b388e73fe894f1b3a0ebe592d",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}