results
/
ENERGY-DRINK-LOVE
/leaderboard_inst_v1.3_deup_LDCC-SOLAR-10.7B_SFT
/result_2024-03-07 22:11:58.json
{ | |
"results": { | |
"harness|ko_arc_challenge|25": { | |
"acc": 0.42150170648464164, | |
"acc_stderr": 0.01443019706932602, | |
"acc_norm": 0.4761092150170648, | |
"acc_norm_stderr": 0.014594701798071657 | |
}, | |
"harness|ko_hellaswag|10": { | |
"acc": 0.44682334196375223, | |
"acc_stderr": 0.00496148138002378, | |
"acc_norm": 0.6104361680940051, | |
"acc_norm_stderr": 0.004866547422355566 | |
}, | |
"harness|ko_mmlu_world_religions|5": { | |
"acc": 0.6257309941520468, | |
"acc_stderr": 0.03711601185389481, | |
"acc_norm": 0.6257309941520468, | |
"acc_norm_stderr": 0.03711601185389481 | |
}, | |
"harness|ko_mmlu_management|5": { | |
"acc": 0.6310679611650486, | |
"acc_stderr": 0.0477761518115674, | |
"acc_norm": 0.6310679611650486, | |
"acc_norm_stderr": 0.0477761518115674 | |
}, | |
"harness|ko_mmlu_miscellaneous|5": { | |
"acc": 0.6666666666666666, | |
"acc_stderr": 0.016857391247472545, | |
"acc_norm": 0.6666666666666666, | |
"acc_norm_stderr": 0.016857391247472545 | |
}, | |
"harness|ko_mmlu_anatomy|5": { | |
"acc": 0.43703703703703706, | |
"acc_stderr": 0.04284958639753398, | |
"acc_norm": 0.43703703703703706, | |
"acc_norm_stderr": 0.04284958639753398 | |
}, | |
"harness|ko_mmlu_abstract_algebra|5": { | |
"acc": 0.29, | |
"acc_stderr": 0.045604802157206824, | |
"acc_norm": 0.29, | |
"acc_norm_stderr": 0.045604802157206824 | |
}, | |
"harness|ko_mmlu_conceptual_physics|5": { | |
"acc": 0.48936170212765956, | |
"acc_stderr": 0.03267862331014063, | |
"acc_norm": 0.48936170212765956, | |
"acc_norm_stderr": 0.03267862331014063 | |
}, | |
"harness|ko_mmlu_virology|5": { | |
"acc": 0.4879518072289157, | |
"acc_stderr": 0.038913644958358196, | |
"acc_norm": 0.4879518072289157, | |
"acc_norm_stderr": 0.038913644958358196 | |
}, | |
"harness|ko_mmlu_philosophy|5": { | |
"acc": 0.5916398713826366, | |
"acc_stderr": 0.02791705074848463, | |
"acc_norm": 0.5916398713826366, | |
"acc_norm_stderr": 0.02791705074848463 | |
}, | |
"harness|ko_mmlu_human_aging|5": { | |
"acc": 0.5964125560538116, | |
"acc_stderr": 0.032928028193303135, | |
"acc_norm": 0.5964125560538116, | |
"acc_norm_stderr": 0.032928028193303135 | |
}, | |
"harness|ko_mmlu_human_sexuality|5": { | |
"acc": 0.5648854961832062, | |
"acc_stderr": 0.04348208051644858, | |
"acc_norm": 0.5648854961832062, | |
"acc_norm_stderr": 0.04348208051644858 | |
}, | |
"harness|ko_mmlu_medical_genetics|5": { | |
"acc": 0.44, | |
"acc_stderr": 0.0498887651569859, | |
"acc_norm": 0.44, | |
"acc_norm_stderr": 0.0498887651569859 | |
}, | |
"harness|ko_mmlu_high_school_geography|5": { | |
"acc": 0.6919191919191919, | |
"acc_stderr": 0.03289477330098615, | |
"acc_norm": 0.6919191919191919, | |
"acc_norm_stderr": 0.03289477330098615 | |
}, | |
"harness|ko_mmlu_electrical_engineering|5": { | |
"acc": 0.42758620689655175, | |
"acc_stderr": 0.041227371113703316, | |
"acc_norm": 0.42758620689655175, | |
"acc_norm_stderr": 0.041227371113703316 | |
}, | |
"harness|ko_mmlu_college_physics|5": { | |
"acc": 0.27450980392156865, | |
"acc_stderr": 0.044405219061793275, | |
"acc_norm": 0.27450980392156865, | |
"acc_norm_stderr": 0.044405219061793275 | |
}, | |
"harness|ko_mmlu_high_school_microeconomics|5": { | |
"acc": 0.5546218487394958, | |
"acc_stderr": 0.03228410626716391, | |
"acc_norm": 0.5546218487394958, | |
"acc_norm_stderr": 0.03228410626716391 | |
}, | |
"harness|ko_mmlu_high_school_macroeconomics|5": { | |
"acc": 0.5205128205128206, | |
"acc_stderr": 0.02532966316348994, | |
"acc_norm": 0.5205128205128206, | |
"acc_norm_stderr": 0.02532966316348994 | |
}, | |
"harness|ko_mmlu_computer_security|5": { | |
"acc": 0.58, | |
"acc_stderr": 0.04960449637488583, | |
"acc_norm": 0.58, | |
"acc_norm_stderr": 0.04960449637488583 | |
}, | |
"harness|ko_mmlu_global_facts|5": { | |
"acc": 0.37, | |
"acc_stderr": 0.048523658709391, | |
"acc_norm": 0.37, | |
"acc_norm_stderr": 0.048523658709391 | |
}, | |
"harness|ko_mmlu_jurisprudence|5": { | |
"acc": 0.6296296296296297, | |
"acc_stderr": 0.04668408033024931, | |
"acc_norm": 0.6296296296296297, | |
"acc_norm_stderr": 0.04668408033024931 | |
}, | |
"harness|ko_mmlu_high_school_chemistry|5": { | |
"acc": 0.4039408866995074, | |
"acc_stderr": 0.0345245390382204, | |
"acc_norm": 0.4039408866995074, | |
"acc_norm_stderr": 0.0345245390382204 | |
}, | |
"harness|ko_mmlu_high_school_biology|5": { | |
"acc": 0.5548387096774193, | |
"acc_stderr": 0.028272410186214906, | |
"acc_norm": 0.5548387096774193, | |
"acc_norm_stderr": 0.028272410186214906 | |
}, | |
"harness|ko_mmlu_marketing|5": { | |
"acc": 0.8034188034188035, | |
"acc_stderr": 0.02603538609895129, | |
"acc_norm": 0.8034188034188035, | |
"acc_norm_stderr": 0.02603538609895129 | |
}, | |
"harness|ko_mmlu_clinical_knowledge|5": { | |
"acc": 0.5547169811320755, | |
"acc_stderr": 0.03058805297427066, | |
"acc_norm": 0.5547169811320755, | |
"acc_norm_stderr": 0.03058805297427066 | |
}, | |
"harness|ko_mmlu_public_relations|5": { | |
"acc": 0.5636363636363636, | |
"acc_stderr": 0.04750185058907296, | |
"acc_norm": 0.5636363636363636, | |
"acc_norm_stderr": 0.04750185058907296 | |
}, | |
"harness|ko_mmlu_high_school_mathematics|5": { | |
"acc": 0.31851851851851853, | |
"acc_stderr": 0.02840653309060846, | |
"acc_norm": 0.31851851851851853, | |
"acc_norm_stderr": 0.02840653309060846 | |
}, | |
"harness|ko_mmlu_high_school_physics|5": { | |
"acc": 0.31788079470198677, | |
"acc_stderr": 0.038020397601079024, | |
"acc_norm": 0.31788079470198677, | |
"acc_norm_stderr": 0.038020397601079024 | |
}, | |
"harness|ko_mmlu_sociology|5": { | |
"acc": 0.6915422885572139, | |
"acc_stderr": 0.032658195885126966, | |
"acc_norm": 0.6915422885572139, | |
"acc_norm_stderr": 0.032658195885126966 | |
}, | |
"harness|ko_mmlu_college_medicine|5": { | |
"acc": 0.4624277456647399, | |
"acc_stderr": 0.0380168510452446, | |
"acc_norm": 0.4624277456647399, | |
"acc_norm_stderr": 0.0380168510452446 | |
}, | |
"harness|ko_mmlu_elementary_mathematics|5": { | |
"acc": 0.3968253968253968, | |
"acc_stderr": 0.02519710107424649, | |
"acc_norm": 0.3968253968253968, | |
"acc_norm_stderr": 0.02519710107424649 | |
}, | |
"harness|ko_mmlu_college_biology|5": { | |
"acc": 0.4305555555555556, | |
"acc_stderr": 0.04140685639111502, | |
"acc_norm": 0.4305555555555556, | |
"acc_norm_stderr": 0.04140685639111502 | |
}, | |
"harness|ko_mmlu_college_chemistry|5": { | |
"acc": 0.34, | |
"acc_stderr": 0.04760952285695235, | |
"acc_norm": 0.34, | |
"acc_norm_stderr": 0.04760952285695235 | |
}, | |
"harness|ko_mmlu_us_foreign_policy|5": { | |
"acc": 0.76, | |
"acc_stderr": 0.04292346959909284, | |
"acc_norm": 0.76, | |
"acc_norm_stderr": 0.04292346959909284 | |
}, | |
"harness|ko_mmlu_moral_disputes|5": { | |
"acc": 0.5433526011560693, | |
"acc_stderr": 0.026817718130348923, | |
"acc_norm": 0.5433526011560693, | |
"acc_norm_stderr": 0.026817718130348923 | |
}, | |
"harness|ko_mmlu_logical_fallacies|5": { | |
"acc": 0.49079754601226994, | |
"acc_stderr": 0.03927705600787443, | |
"acc_norm": 0.49079754601226994, | |
"acc_norm_stderr": 0.03927705600787443 | |
}, | |
"harness|ko_mmlu_prehistory|5": { | |
"acc": 0.6172839506172839, | |
"acc_stderr": 0.027044538138402595, | |
"acc_norm": 0.6172839506172839, | |
"acc_norm_stderr": 0.027044538138402595 | |
}, | |
"harness|ko_mmlu_college_mathematics|5": { | |
"acc": 0.35, | |
"acc_stderr": 0.0479372485441102, | |
"acc_norm": 0.35, | |
"acc_norm_stderr": 0.0479372485441102 | |
}, | |
"harness|ko_mmlu_high_school_government_and_politics|5": { | |
"acc": 0.6580310880829016, | |
"acc_stderr": 0.03423465100104284, | |
"acc_norm": 0.6580310880829016, | |
"acc_norm_stderr": 0.03423465100104284 | |
}, | |
"harness|ko_mmlu_econometrics|5": { | |
"acc": 0.43859649122807015, | |
"acc_stderr": 0.04668000738510455, | |
"acc_norm": 0.43859649122807015, | |
"acc_norm_stderr": 0.04668000738510455 | |
}, | |
"harness|ko_mmlu_high_school_psychology|5": { | |
"acc": 0.636697247706422, | |
"acc_stderr": 0.020620603919625804, | |
"acc_norm": 0.636697247706422, | |
"acc_norm_stderr": 0.020620603919625804 | |
}, | |
"harness|ko_mmlu_formal_logic|5": { | |
"acc": 0.31746031746031744, | |
"acc_stderr": 0.0416345303130286, | |
"acc_norm": 0.31746031746031744, | |
"acc_norm_stderr": 0.0416345303130286 | |
}, | |
"harness|ko_mmlu_nutrition|5": { | |
"acc": 0.5784313725490197, | |
"acc_stderr": 0.028275490156791455, | |
"acc_norm": 0.5784313725490197, | |
"acc_norm_stderr": 0.028275490156791455 | |
}, | |
"harness|ko_mmlu_business_ethics|5": { | |
"acc": 0.53, | |
"acc_stderr": 0.05016135580465919, | |
"acc_norm": 0.53, | |
"acc_norm_stderr": 0.05016135580465919 | |
}, | |
"harness|ko_mmlu_international_law|5": { | |
"acc": 0.7272727272727273, | |
"acc_stderr": 0.04065578140908705, | |
"acc_norm": 0.7272727272727273, | |
"acc_norm_stderr": 0.04065578140908705 | |
}, | |
"harness|ko_mmlu_astronomy|5": { | |
"acc": 0.5328947368421053, | |
"acc_stderr": 0.040601270352363966, | |
"acc_norm": 0.5328947368421053, | |
"acc_norm_stderr": 0.040601270352363966 | |
}, | |
"harness|ko_mmlu_professional_psychology|5": { | |
"acc": 0.46895424836601307, | |
"acc_stderr": 0.020188804456361887, | |
"acc_norm": 0.46895424836601307, | |
"acc_norm_stderr": 0.020188804456361887 | |
}, | |
"harness|ko_mmlu_professional_accounting|5": { | |
"acc": 0.3617021276595745, | |
"acc_stderr": 0.028663820147199495, | |
"acc_norm": 0.3617021276595745, | |
"acc_norm_stderr": 0.028663820147199495 | |
}, | |
"harness|ko_mmlu_machine_learning|5": { | |
"acc": 0.38392857142857145, | |
"acc_stderr": 0.04616143075028546, | |
"acc_norm": 0.38392857142857145, | |
"acc_norm_stderr": 0.04616143075028546 | |
}, | |
"harness|ko_mmlu_high_school_statistics|5": { | |
"acc": 0.4351851851851852, | |
"acc_stderr": 0.033812000056435254, | |
"acc_norm": 0.4351851851851852, | |
"acc_norm_stderr": 0.033812000056435254 | |
}, | |
"harness|ko_mmlu_moral_scenarios|5": { | |
"acc": 0.2212290502793296, | |
"acc_stderr": 0.013882164598887288, | |
"acc_norm": 0.2212290502793296, | |
"acc_norm_stderr": 0.013882164598887288 | |
}, | |
"harness|ko_mmlu_college_computer_science|5": { | |
"acc": 0.36, | |
"acc_stderr": 0.04824181513244218, | |
"acc_norm": 0.36, | |
"acc_norm_stderr": 0.04824181513244218 | |
}, | |
"harness|ko_mmlu_high_school_computer_science|5": { | |
"acc": 0.7, | |
"acc_stderr": 0.046056618647183814, | |
"acc_norm": 0.7, | |
"acc_norm_stderr": 0.046056618647183814 | |
}, | |
"harness|ko_mmlu_professional_medicine|5": { | |
"acc": 0.48161764705882354, | |
"acc_stderr": 0.030352303395351964, | |
"acc_norm": 0.48161764705882354, | |
"acc_norm_stderr": 0.030352303395351964 | |
}, | |
"harness|ko_mmlu_security_studies|5": { | |
"acc": 0.5346938775510204, | |
"acc_stderr": 0.03193207024425314, | |
"acc_norm": 0.5346938775510204, | |
"acc_norm_stderr": 0.03193207024425314 | |
}, | |
"harness|ko_mmlu_high_school_world_history|5": { | |
"acc": 0.6877637130801688, | |
"acc_stderr": 0.030165137867847018, | |
"acc_norm": 0.6877637130801688, | |
"acc_norm_stderr": 0.030165137867847018 | |
}, | |
"harness|ko_mmlu_professional_law|5": { | |
"acc": 0.3741851368970013, | |
"acc_stderr": 0.012359335618172065, | |
"acc_norm": 0.3741851368970013, | |
"acc_norm_stderr": 0.012359335618172065 | |
}, | |
"harness|ko_mmlu_high_school_us_history|5": { | |
"acc": 0.5833333333333334, | |
"acc_stderr": 0.03460228327239171, | |
"acc_norm": 0.5833333333333334, | |
"acc_norm_stderr": 0.03460228327239171 | |
}, | |
"harness|ko_mmlu_high_school_european_history|5": { | |
"acc": 0.6121212121212121, | |
"acc_stderr": 0.0380491365397101, | |
"acc_norm": 0.6121212121212121, | |
"acc_norm_stderr": 0.0380491365397101 | |
}, | |
"harness|ko_truthfulqa_mc|0": { | |
"mc1": 0.23745410036719705, | |
"mc1_stderr": 0.014896277441041852, | |
"mc2": 0.371641156065405, | |
"mc2_stderr": 0.014765639375053739 | |
}, | |
"harness|ko_commongen_v2|2": { | |
"acc": 0.4887839433293979, | |
"acc_stderr": 0.017186028469489283, | |
"acc_norm": 0.5336481700118064, | |
"acc_norm_stderr": 0.017151384117131865 | |
} | |
}, | |
"versions": { | |
"all": 0, | |
"harness|ko_arc_challenge|25": 0, | |
"harness|ko_hellaswag|10": 0, | |
"harness|ko_mmlu_world_religions|5": 1, | |
"harness|ko_mmlu_management|5": 1, | |
"harness|ko_mmlu_miscellaneous|5": 1, | |
"harness|ko_mmlu_anatomy|5": 1, | |
"harness|ko_mmlu_abstract_algebra|5": 1, | |
"harness|ko_mmlu_conceptual_physics|5": 1, | |
"harness|ko_mmlu_virology|5": 1, | |
"harness|ko_mmlu_philosophy|5": 1, | |
"harness|ko_mmlu_human_aging|5": 1, | |
"harness|ko_mmlu_human_sexuality|5": 1, | |
"harness|ko_mmlu_medical_genetics|5": 1, | |
"harness|ko_mmlu_high_school_geography|5": 1, | |
"harness|ko_mmlu_electrical_engineering|5": 1, | |
"harness|ko_mmlu_college_physics|5": 1, | |
"harness|ko_mmlu_high_school_microeconomics|5": 1, | |
"harness|ko_mmlu_high_school_macroeconomics|5": 1, | |
"harness|ko_mmlu_computer_security|5": 1, | |
"harness|ko_mmlu_global_facts|5": 1, | |
"harness|ko_mmlu_jurisprudence|5": 1, | |
"harness|ko_mmlu_high_school_chemistry|5": 1, | |
"harness|ko_mmlu_high_school_biology|5": 1, | |
"harness|ko_mmlu_marketing|5": 1, | |
"harness|ko_mmlu_clinical_knowledge|5": 1, | |
"harness|ko_mmlu_public_relations|5": 1, | |
"harness|ko_mmlu_high_school_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_physics|5": 1, | |
"harness|ko_mmlu_sociology|5": 1, | |
"harness|ko_mmlu_college_medicine|5": 1, | |
"harness|ko_mmlu_elementary_mathematics|5": 1, | |
"harness|ko_mmlu_college_biology|5": 1, | |
"harness|ko_mmlu_college_chemistry|5": 1, | |
"harness|ko_mmlu_us_foreign_policy|5": 1, | |
"harness|ko_mmlu_moral_disputes|5": 1, | |
"harness|ko_mmlu_logical_fallacies|5": 1, | |
"harness|ko_mmlu_prehistory|5": 1, | |
"harness|ko_mmlu_college_mathematics|5": 1, | |
"harness|ko_mmlu_high_school_government_and_politics|5": 1, | |
"harness|ko_mmlu_econometrics|5": 1, | |
"harness|ko_mmlu_high_school_psychology|5": 1, | |
"harness|ko_mmlu_formal_logic|5": 1, | |
"harness|ko_mmlu_nutrition|5": 1, | |
"harness|ko_mmlu_business_ethics|5": 1, | |
"harness|ko_mmlu_international_law|5": 1, | |
"harness|ko_mmlu_astronomy|5": 1, | |
"harness|ko_mmlu_professional_psychology|5": 1, | |
"harness|ko_mmlu_professional_accounting|5": 1, | |
"harness|ko_mmlu_machine_learning|5": 1, | |
"harness|ko_mmlu_high_school_statistics|5": 1, | |
"harness|ko_mmlu_moral_scenarios|5": 1, | |
"harness|ko_mmlu_college_computer_science|5": 1, | |
"harness|ko_mmlu_high_school_computer_science|5": 1, | |
"harness|ko_mmlu_professional_medicine|5": 1, | |
"harness|ko_mmlu_security_studies|5": 1, | |
"harness|ko_mmlu_high_school_world_history|5": 1, | |
"harness|ko_mmlu_professional_law|5": 1, | |
"harness|ko_mmlu_high_school_us_history|5": 1, | |
"harness|ko_mmlu_high_school_european_history|5": 1, | |
"harness|ko_truthfulqa_mc|0": 0, | |
"harness|ko_commongen_v2|2": 1 | |
}, | |
"config_general": { | |
"model_name": "ENERGY-DRINK-LOVE/leaderboard_inst_v1.3_deup_LDCC-SOLAR-10.7B_SFT", | |
"model_sha": "145c81e2b96a7a498dae1de112ec0062c812dfc8", | |
"model_dtype": "torch.float16", | |
"lighteval_sha": "", | |
"num_few_shot_default": 0, | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null | |
} | |
} |