|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.4880546075085324, |
|
"acc_stderr": 0.014607220340597171, |
|
"acc_norm": 0.5435153583617748, |
|
"acc_norm_stderr": 0.01455594976049644 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.4584744074885481, |
|
"acc_stderr": 0.0049725431277678695, |
|
"acc_norm": 0.6344353714399522, |
|
"acc_norm_stderr": 0.0048060390390089434 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.695906432748538, |
|
"acc_stderr": 0.03528211258245233, |
|
"acc_norm": 0.695906432748538, |
|
"acc_norm_stderr": 0.03528211258245233 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.6990291262135923, |
|
"acc_stderr": 0.045416094465039476, |
|
"acc_norm": 0.6990291262135923, |
|
"acc_norm_stderr": 0.045416094465039476 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.6934865900383141, |
|
"acc_stderr": 0.01648695289304152, |
|
"acc_norm": 0.6934865900383141, |
|
"acc_norm_stderr": 0.01648695289304152 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.45925925925925926, |
|
"acc_stderr": 0.04304979692464244, |
|
"acc_norm": 0.45925925925925926, |
|
"acc_norm_stderr": 0.04304979692464244 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.4595744680851064, |
|
"acc_stderr": 0.032579014820998356, |
|
"acc_norm": 0.4595744680851064, |
|
"acc_norm_stderr": 0.032579014820998356 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.463855421686747, |
|
"acc_stderr": 0.03882310850890594, |
|
"acc_norm": 0.463855421686747, |
|
"acc_norm_stderr": 0.03882310850890594 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5691318327974276, |
|
"acc_stderr": 0.028125340983972714, |
|
"acc_norm": 0.5691318327974276, |
|
"acc_norm_stderr": 0.028125340983972714 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.5560538116591929, |
|
"acc_stderr": 0.03334625674242728, |
|
"acc_norm": 0.5560538116591929, |
|
"acc_norm_stderr": 0.03334625674242728 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.5648854961832062, |
|
"acc_stderr": 0.04348208051644858, |
|
"acc_norm": 0.5648854961832062, |
|
"acc_norm_stderr": 0.04348208051644858 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.49, |
|
"acc_stderr": 0.05024183937956913, |
|
"acc_norm": 0.49, |
|
"acc_norm_stderr": 0.05024183937956913 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.6919191919191919, |
|
"acc_stderr": 0.03289477330098615, |
|
"acc_norm": 0.6919191919191919, |
|
"acc_norm_stderr": 0.03289477330098615 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.45517241379310347, |
|
"acc_stderr": 0.04149886942192117, |
|
"acc_norm": 0.45517241379310347, |
|
"acc_norm_stderr": 0.04149886942192117 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.27450980392156865, |
|
"acc_stderr": 0.04440521906179327, |
|
"acc_norm": 0.27450980392156865, |
|
"acc_norm_stderr": 0.04440521906179327 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.4579831932773109, |
|
"acc_stderr": 0.032363611119519416, |
|
"acc_norm": 0.4579831932773109, |
|
"acc_norm_stderr": 0.032363611119519416 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.558974358974359, |
|
"acc_stderr": 0.02517404838400073, |
|
"acc_norm": 0.558974358974359, |
|
"acc_norm_stderr": 0.02517404838400073 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.59, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.59, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.6203703703703703, |
|
"acc_stderr": 0.04691521224077742, |
|
"acc_norm": 0.6203703703703703, |
|
"acc_norm_stderr": 0.04691521224077742 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.4187192118226601, |
|
"acc_stderr": 0.03471192860518468, |
|
"acc_norm": 0.4187192118226601, |
|
"acc_norm_stderr": 0.03471192860518468 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.5935483870967742, |
|
"acc_stderr": 0.027941727346256304, |
|
"acc_norm": 0.5935483870967742, |
|
"acc_norm_stderr": 0.027941727346256304 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7777777777777778, |
|
"acc_stderr": 0.027236013946196697, |
|
"acc_norm": 0.7777777777777778, |
|
"acc_norm_stderr": 0.027236013946196697 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.5169811320754717, |
|
"acc_stderr": 0.030755120364119898, |
|
"acc_norm": 0.5169811320754717, |
|
"acc_norm_stderr": 0.030755120364119898 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.6, |
|
"acc_stderr": 0.0469237132203465, |
|
"acc_norm": 0.6, |
|
"acc_norm_stderr": 0.0469237132203465 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.37037037037037035, |
|
"acc_stderr": 0.02944316932303154, |
|
"acc_norm": 0.37037037037037035, |
|
"acc_norm_stderr": 0.02944316932303154 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.3708609271523179, |
|
"acc_stderr": 0.03943966699183629, |
|
"acc_norm": 0.3708609271523179, |
|
"acc_norm_stderr": 0.03943966699183629 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.6616915422885572, |
|
"acc_stderr": 0.033455630703391914, |
|
"acc_norm": 0.6616915422885572, |
|
"acc_norm_stderr": 0.033455630703391914 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.45664739884393063, |
|
"acc_stderr": 0.03798106566014498, |
|
"acc_norm": 0.45664739884393063, |
|
"acc_norm_stderr": 0.03798106566014498 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.3439153439153439, |
|
"acc_stderr": 0.024464426625596433, |
|
"acc_norm": 0.3439153439153439, |
|
"acc_norm_stderr": 0.024464426625596433 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.5069444444444444, |
|
"acc_stderr": 0.04180806750294938, |
|
"acc_norm": 0.5069444444444444, |
|
"acc_norm_stderr": 0.04180806750294938 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.79, |
|
"acc_stderr": 0.040936018074033256, |
|
"acc_norm": 0.79, |
|
"acc_norm_stderr": 0.040936018074033256 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5404624277456648, |
|
"acc_stderr": 0.026830805998952233, |
|
"acc_norm": 0.5404624277456648, |
|
"acc_norm_stderr": 0.026830805998952233 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.5276073619631901, |
|
"acc_stderr": 0.039223782906109894, |
|
"acc_norm": 0.5276073619631901, |
|
"acc_norm_stderr": 0.039223782906109894 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.5370370370370371, |
|
"acc_stderr": 0.027744313443376536, |
|
"acc_norm": 0.5370370370370371, |
|
"acc_norm_stderr": 0.027744313443376536 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.047258156262526045, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.047258156262526045 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.6632124352331606, |
|
"acc_stderr": 0.03410780251836184, |
|
"acc_norm": 0.6632124352331606, |
|
"acc_norm_stderr": 0.03410780251836184 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.35964912280701755, |
|
"acc_stderr": 0.04514496132873633, |
|
"acc_norm": 0.35964912280701755, |
|
"acc_norm_stderr": 0.04514496132873633 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.6385321100917432, |
|
"acc_stderr": 0.02059808200993737, |
|
"acc_norm": 0.6385321100917432, |
|
"acc_norm_stderr": 0.02059808200993737 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.3253968253968254, |
|
"acc_stderr": 0.04190596438871136, |
|
"acc_norm": 0.3253968253968254, |
|
"acc_norm_stderr": 0.04190596438871136 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.028629916715693413, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.028629916715693413 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.6, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.6, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6694214876033058, |
|
"acc_stderr": 0.04294340845212093, |
|
"acc_norm": 0.6694214876033058, |
|
"acc_norm_stderr": 0.04294340845212093 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.5723684210526315, |
|
"acc_stderr": 0.040260970832965634, |
|
"acc_norm": 0.5723684210526315, |
|
"acc_norm_stderr": 0.040260970832965634 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.49019607843137253, |
|
"acc_stderr": 0.02022394600507429, |
|
"acc_norm": 0.49019607843137253, |
|
"acc_norm_stderr": 0.02022394600507429 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.34397163120567376, |
|
"acc_stderr": 0.02833801742861132, |
|
"acc_norm": 0.34397163120567376, |
|
"acc_norm_stderr": 0.02833801742861132 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.29464285714285715, |
|
"acc_stderr": 0.0432704093257873, |
|
"acc_norm": 0.29464285714285715, |
|
"acc_norm_stderr": 0.0432704093257873 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.35185185185185186, |
|
"acc_stderr": 0.03256850570293646, |
|
"acc_norm": 0.35185185185185186, |
|
"acc_norm_stderr": 0.03256850570293646 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2223463687150838, |
|
"acc_stderr": 0.013907189208156881, |
|
"acc_norm": 0.2223463687150838, |
|
"acc_norm_stderr": 0.013907189208156881 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.52, |
|
"acc_stderr": 0.050211673156867795, |
|
"acc_norm": 0.52, |
|
"acc_norm_stderr": 0.050211673156867795 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.65, |
|
"acc_stderr": 0.047937248544110196, |
|
"acc_norm": 0.65, |
|
"acc_norm_stderr": 0.047937248544110196 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.40441176470588236, |
|
"acc_stderr": 0.029812630701569746, |
|
"acc_norm": 0.40441176470588236, |
|
"acc_norm_stderr": 0.029812630701569746 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.5714285714285714, |
|
"acc_stderr": 0.03168091161233882, |
|
"acc_norm": 0.5714285714285714, |
|
"acc_norm_stderr": 0.03168091161233882 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.7215189873417721, |
|
"acc_stderr": 0.029178682304842548, |
|
"acc_norm": 0.7215189873417721, |
|
"acc_norm_stderr": 0.029178682304842548 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.3617992177314211, |
|
"acc_stderr": 0.012272736233262936, |
|
"acc_norm": 0.3617992177314211, |
|
"acc_norm_stderr": 0.012272736233262936 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.6421568627450981, |
|
"acc_stderr": 0.03364487286088298, |
|
"acc_norm": 0.6421568627450981, |
|
"acc_norm_stderr": 0.03364487286088298 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.6060606060606061, |
|
"acc_stderr": 0.0381549430868893, |
|
"acc_norm": 0.6060606060606061, |
|
"acc_norm_stderr": 0.0381549430868893 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.34761321909424725, |
|
"mc1_stderr": 0.016670769188897306, |
|
"mc2": 0.5385013086039373, |
|
"mc2_stderr": 0.015618614265978098 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.6103896103896104, |
|
"acc_stderr": 0.016766161671893525, |
|
"acc_norm": 0.6115702479338843, |
|
"acc_norm_stderr": 0.01675692157106943 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Edentns/DataVortexS-10.7B-dpo-v1.1", |
|
"model_sha": "0c1209f805eebdc65d8c8c71c398bb156f6f8d86", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |