|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.43600682593856654, |
|
"acc_stderr": 0.014491225699230916, |
|
"acc_norm": 0.47525597269624575, |
|
"acc_norm_stderr": 0.01459348769493774 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.4387572196773551, |
|
"acc_stderr": 0.004952209831856584, |
|
"acc_norm": 0.5827524397530373, |
|
"acc_norm_stderr": 0.004920967192255291 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.49707602339181284, |
|
"acc_stderr": 0.03834759370936839, |
|
"acc_norm": 0.49707602339181284, |
|
"acc_norm_stderr": 0.03834759370936839 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5242718446601942, |
|
"acc_stderr": 0.049449010929737795, |
|
"acc_norm": 0.5242718446601942, |
|
"acc_norm_stderr": 0.049449010929737795 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.5478927203065134, |
|
"acc_stderr": 0.017797751493865636, |
|
"acc_norm": 0.5478927203065134, |
|
"acc_norm_stderr": 0.017797751493865636 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4888888888888889, |
|
"acc_stderr": 0.04318275491977976, |
|
"acc_norm": 0.4888888888888889, |
|
"acc_norm_stderr": 0.04318275491977976 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.045126085985421255, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.045126085985421255 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.03202563076101736, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.03202563076101736 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.4036144578313253, |
|
"acc_stderr": 0.03819486140758398, |
|
"acc_norm": 0.4036144578313253, |
|
"acc_norm_stderr": 0.03819486140758398 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5112540192926045, |
|
"acc_stderr": 0.028390897396863533, |
|
"acc_norm": 0.5112540192926045, |
|
"acc_norm_stderr": 0.028390897396863533 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.5291479820627802, |
|
"acc_stderr": 0.03350073248773404, |
|
"acc_norm": 0.5291479820627802, |
|
"acc_norm_stderr": 0.03350073248773404 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.4351145038167939, |
|
"acc_stderr": 0.04348208051644858, |
|
"acc_norm": 0.4351145038167939, |
|
"acc_norm_stderr": 0.04348208051644858 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5909090909090909, |
|
"acc_stderr": 0.03502975799413008, |
|
"acc_norm": 0.5909090909090909, |
|
"acc_norm_stderr": 0.03502975799413008 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.3793103448275862, |
|
"acc_stderr": 0.04043461861916747, |
|
"acc_norm": 0.3793103448275862, |
|
"acc_norm_stderr": 0.04043461861916747 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.21568627450980393, |
|
"acc_stderr": 0.04092563958237655, |
|
"acc_norm": 0.21568627450980393, |
|
"acc_norm_stderr": 0.04092563958237655 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.49159663865546216, |
|
"acc_stderr": 0.0324739027656967, |
|
"acc_norm": 0.49159663865546216, |
|
"acc_norm_stderr": 0.0324739027656967 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.45384615384615384, |
|
"acc_stderr": 0.025242770987126167, |
|
"acc_norm": 0.45384615384615384, |
|
"acc_norm_stderr": 0.025242770987126167 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.050251890762960605, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.050251890762960605 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.04852365870939099, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.04852365870939099 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5370370370370371, |
|
"acc_stderr": 0.04820403072760627, |
|
"acc_norm": 0.5370370370370371, |
|
"acc_norm_stderr": 0.04820403072760627 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.39408866995073893, |
|
"acc_stderr": 0.034381579670365446, |
|
"acc_norm": 0.39408866995073893, |
|
"acc_norm_stderr": 0.034381579670365446 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.46774193548387094, |
|
"acc_stderr": 0.028384747788813326, |
|
"acc_norm": 0.46774193548387094, |
|
"acc_norm_stderr": 0.028384747788813326 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6495726495726496, |
|
"acc_stderr": 0.03125610824421881, |
|
"acc_norm": 0.6495726495726496, |
|
"acc_norm_stderr": 0.03125610824421881 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.45660377358490567, |
|
"acc_stderr": 0.03065674869673943, |
|
"acc_norm": 0.45660377358490567, |
|
"acc_norm_stderr": 0.03065674869673943 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5454545454545454, |
|
"acc_stderr": 0.04769300568972744, |
|
"acc_norm": 0.5454545454545454, |
|
"acc_norm_stderr": 0.04769300568972744 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.2518518518518518, |
|
"acc_stderr": 0.026466117538959916, |
|
"acc_norm": 0.2518518518518518, |
|
"acc_norm_stderr": 0.026466117538959916 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2847682119205298, |
|
"acc_stderr": 0.03684881521389024, |
|
"acc_norm": 0.2847682119205298, |
|
"acc_norm_stderr": 0.03684881521389024 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.5771144278606966, |
|
"acc_stderr": 0.034932317774212816, |
|
"acc_norm": 0.5771144278606966, |
|
"acc_norm_stderr": 0.034932317774212816 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.41040462427745666, |
|
"acc_stderr": 0.037507570448955384, |
|
"acc_norm": 0.41040462427745666, |
|
"acc_norm_stderr": 0.037507570448955384 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.02306818884826111, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.02306818884826111 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3541666666666667, |
|
"acc_stderr": 0.039994111357535424, |
|
"acc_norm": 0.3541666666666667, |
|
"acc_norm_stderr": 0.039994111357535424 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.6, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.6, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5086705202312138, |
|
"acc_stderr": 0.026915047355369804, |
|
"acc_norm": 0.5086705202312138, |
|
"acc_norm_stderr": 0.026915047355369804 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.5214723926380368, |
|
"acc_stderr": 0.03924746876751129, |
|
"acc_norm": 0.5214723926380368, |
|
"acc_norm_stderr": 0.03924746876751129 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.4876543209876543, |
|
"acc_stderr": 0.027812262269327242, |
|
"acc_norm": 0.4876543209876543, |
|
"acc_norm_stderr": 0.027812262269327242 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5595854922279793, |
|
"acc_stderr": 0.035827245300360945, |
|
"acc_norm": 0.5595854922279793, |
|
"acc_norm_stderr": 0.035827245300360945 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.24561403508771928, |
|
"acc_stderr": 0.04049339297748141, |
|
"acc_norm": 0.24561403508771928, |
|
"acc_norm_stderr": 0.04049339297748141 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.5779816513761468, |
|
"acc_stderr": 0.021174991407763178, |
|
"acc_norm": 0.5779816513761468, |
|
"acc_norm_stderr": 0.021174991407763178 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.29365079365079366, |
|
"acc_stderr": 0.04073524322147127, |
|
"acc_norm": 0.29365079365079366, |
|
"acc_norm_stderr": 0.04073524322147127 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.3758169934640523, |
|
"acc_stderr": 0.027732834353363954, |
|
"acc_norm": 0.3758169934640523, |
|
"acc_norm_stderr": 0.027732834353363954 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.049888765156985884, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.049888765156985884 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6198347107438017, |
|
"acc_stderr": 0.04431324501968431, |
|
"acc_norm": 0.6198347107438017, |
|
"acc_norm_stderr": 0.04431324501968431 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.39473684210526316, |
|
"acc_stderr": 0.039777499346220734, |
|
"acc_norm": 0.39473684210526316, |
|
"acc_norm_stderr": 0.039777499346220734 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.39215686274509803, |
|
"acc_stderr": 0.019751726508762626, |
|
"acc_norm": 0.39215686274509803, |
|
"acc_norm_stderr": 0.019751726508762626 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3546099290780142, |
|
"acc_stderr": 0.02853865002887864, |
|
"acc_norm": 0.3546099290780142, |
|
"acc_norm_stderr": 0.02853865002887864 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.30357142857142855, |
|
"acc_stderr": 0.04364226155841044, |
|
"acc_norm": 0.30357142857142855, |
|
"acc_norm_stderr": 0.04364226155841044 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.3055555555555556, |
|
"acc_stderr": 0.03141554629402543, |
|
"acc_norm": 0.3055555555555556, |
|
"acc_norm_stderr": 0.03141554629402543 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2424581005586592, |
|
"acc_stderr": 0.01433352205921789, |
|
"acc_norm": 0.2424581005586592, |
|
"acc_norm_stderr": 0.01433352205921789 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.04923659639173309, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.04923659639173309 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.48, |
|
"acc_stderr": 0.050211673156867795, |
|
"acc_norm": 0.48, |
|
"acc_norm_stderr": 0.050211673156867795 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.43014705882352944, |
|
"acc_stderr": 0.030074971917302875, |
|
"acc_norm": 0.43014705882352944, |
|
"acc_norm_stderr": 0.030074971917302875 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.37142857142857144, |
|
"acc_stderr": 0.03093285879278984, |
|
"acc_norm": 0.37142857142857144, |
|
"acc_norm_stderr": 0.03093285879278984 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.6540084388185654, |
|
"acc_stderr": 0.03096481058878671, |
|
"acc_norm": 0.6540084388185654, |
|
"acc_norm_stderr": 0.03096481058878671 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.3520208604954368, |
|
"acc_stderr": 0.012198140605353593, |
|
"acc_norm": 0.3520208604954368, |
|
"acc_norm_stderr": 0.012198140605353593 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.5147058823529411, |
|
"acc_stderr": 0.03507793834791324, |
|
"acc_norm": 0.5147058823529411, |
|
"acc_norm_stderr": 0.03507793834791324 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.5757575757575758, |
|
"acc_stderr": 0.03859268142070264, |
|
"acc_norm": 0.5757575757575758, |
|
"acc_norm_stderr": 0.03859268142070264 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.3684210526315789, |
|
"mc1_stderr": 0.016886551261046046, |
|
"mc2": 0.5190921371587374, |
|
"mc2_stderr": 0.015978390538660552 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.4734356552538371, |
|
"acc_stderr": 0.017166075717577747, |
|
"acc_norm": 0.538370720188902, |
|
"acc_norm_stderr": 0.017139660221845557 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "daekeun-ml/Llama-2-ko-DPO-13B", |
|
"model_sha": "dba5dd11263b1b42fa7d904d627f41d47330317b", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |