results
dict
versions
dict
config_general
dict
{ "harness|ko_arc_challenge|25": { "acc": 0.29266211604095566, "acc_stderr": 0.013295916103619404, "acc_norm": 0.3438566552901024, "acc_norm_stderr": 0.013880644570156213 }, "harness|ko_hellaswag|10": { "acc": 0.39016132244572793, "acc_stderr": 0.004867893927258242, "acc_norm": 0.5065723959370644, "acc_norm_stderr": 0.004989350311751651 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.2982456140350877, "acc_stderr": 0.03508771929824564, "acc_norm": 0.2982456140350877, "acc_norm_stderr": 0.03508771929824564 }, "harness|ko_mmlu_management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266196, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266196 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.2515964240102171, "acc_stderr": 0.015517322365529631, "acc_norm": 0.2515964240102171, "acc_norm_stderr": 0.015517322365529631 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.35555555555555557, "acc_stderr": 0.04135176749720386, "acc_norm": 0.35555555555555557, "acc_norm_stderr": 0.04135176749720386 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.20851063829787234, "acc_stderr": 0.026556982117838756, "acc_norm": 0.20851063829787234, "acc_norm_stderr": 0.026556982117838756 }, "harness|ko_mmlu_virology|5": { "acc": 0.18072289156626506, "acc_stderr": 0.029955737855810138, "acc_norm": 0.18072289156626506, "acc_norm_stderr": 0.029955737855810138 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.3086816720257235, "acc_stderr": 0.026236965881153266, "acc_norm": 0.3086816720257235, "acc_norm_stderr": 0.026236965881153266 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.17488789237668162, "acc_stderr": 0.025495284626444972, "acc_norm": 0.17488789237668162, "acc_norm_stderr": 0.025495284626444972 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.26717557251908397, "acc_stderr": 0.03880848301082396, "acc_norm": 0.26717557251908397, "acc_norm_stderr": 0.03880848301082396 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.22, "acc_stderr": 0.041633319989322695, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322695 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.2777777777777778, "acc_stderr": 0.03191178226713549, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.03191178226713549 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.3103448275862069, "acc_stderr": 0.03855289616378949, "acc_norm": 0.3103448275862069, "acc_norm_stderr": 0.03855289616378949 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237656, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237656 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.226890756302521, "acc_stderr": 0.02720537153827948, "acc_norm": 0.226890756302521, "acc_norm_stderr": 0.02720537153827948 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.22564102564102564, "acc_stderr": 0.02119363252514854, "acc_norm": 0.22564102564102564, "acc_norm_stderr": 0.02119363252514854 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.23, "acc_stderr": 0.042295258468165044, "acc_norm": 0.23, "acc_norm_stderr": 0.042295258468165044 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.23148148148148148, "acc_stderr": 0.04077494709252627, "acc_norm": 0.23148148148148148, "acc_norm_stderr": 0.04077494709252627 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.2512315270935961, "acc_stderr": 0.030516530732694436, "acc_norm": 0.2512315270935961, "acc_norm_stderr": 0.030516530732694436 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.27419354838709675, "acc_stderr": 0.025378139970885203, "acc_norm": 0.27419354838709675, "acc_norm_stderr": 0.025378139970885203 }, "harness|ko_mmlu_marketing|5": { "acc": 0.24358974358974358, "acc_stderr": 0.028120966503914407, "acc_norm": 0.24358974358974358, "acc_norm_stderr": 0.028120966503914407 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.25660377358490566, "acc_stderr": 0.026880647889051996, "acc_norm": 0.25660377358490566, "acc_norm_stderr": 0.026880647889051996 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.19090909090909092, "acc_stderr": 0.03764425585984927, "acc_norm": 0.19090909090909092, "acc_norm_stderr": 0.03764425585984927 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.24814814814814815, "acc_stderr": 0.0263357394040558, "acc_norm": 0.24814814814814815, "acc_norm_stderr": 0.0263357394040558 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.26490066225165565, "acc_stderr": 0.036030385453603826, "acc_norm": 0.26490066225165565, "acc_norm_stderr": 0.036030385453603826 }, "harness|ko_mmlu_sociology|5": { "acc": 0.21890547263681592, "acc_stderr": 0.029239174636647, "acc_norm": 0.21890547263681592, "acc_norm_stderr": 0.029239174636647 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.2543352601156069, "acc_stderr": 0.0332055644308557, "acc_norm": 0.2543352601156069, "acc_norm_stderr": 0.0332055644308557 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2328042328042328, "acc_stderr": 0.02176596167215452, "acc_norm": 0.2328042328042328, "acc_norm_stderr": 0.02176596167215452 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.18, "acc_stderr": 0.03861229196653695, "acc_norm": 0.18, "acc_norm_stderr": 0.03861229196653695 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.24855491329479767, "acc_stderr": 0.023267528432100174, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.023267528432100174 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.3067484662576687, "acc_stderr": 0.03623089915724148, "acc_norm": 0.3067484662576687, "acc_norm_stderr": 0.03623089915724148 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.30864197530864196, "acc_stderr": 0.02570264026060376, "acc_norm": 0.30864197530864196, "acc_norm_stderr": 0.02570264026060376 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.23316062176165803, "acc_stderr": 0.03051611137147602, "acc_norm": 0.23316062176165803, "acc_norm_stderr": 0.03051611137147602 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.24561403508771928, "acc_stderr": 0.04049339297748142, "acc_norm": 0.24561403508771928, "acc_norm_stderr": 0.04049339297748142 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.23669724770642203, "acc_stderr": 0.018224078117299078, "acc_norm": 0.23669724770642203, "acc_norm_stderr": 0.018224078117299078 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.1746031746031746, "acc_stderr": 0.0339549002085611, "acc_norm": 0.1746031746031746, "acc_norm_stderr": 0.0339549002085611 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.23202614379084968, "acc_stderr": 0.024170840879341016, "acc_norm": 0.23202614379084968, "acc_norm_stderr": 0.024170840879341016 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.22, "acc_stderr": 0.041633319989322674, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322674 }, "harness|ko_mmlu_international_law|5": { "acc": 0.2892561983471074, "acc_stderr": 0.041391127276354626, "acc_norm": 0.2892561983471074, "acc_norm_stderr": 0.041391127276354626 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.3026315789473684, "acc_stderr": 0.037385206761196665, "acc_norm": 0.3026315789473684, "acc_norm_stderr": 0.037385206761196665 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.2777777777777778, "acc_stderr": 0.018120224251484577, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.018120224251484577 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.24468085106382978, "acc_stderr": 0.025645553622266736, "acc_norm": 0.24468085106382978, "acc_norm_stderr": 0.025645553622266736 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.21428571428571427, "acc_stderr": 0.03894641120044793, "acc_norm": 0.21428571428571427, "acc_norm_stderr": 0.03894641120044793 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.3055555555555556, "acc_stderr": 0.03141554629402545, "acc_norm": 0.3055555555555556, "acc_norm_stderr": 0.03141554629402545 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.25921787709497207, "acc_stderr": 0.01465578083749772, "acc_norm": 0.25921787709497207, "acc_norm_stderr": 0.01465578083749772 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.3272058823529412, "acc_stderr": 0.028501452860396563, "acc_norm": 0.3272058823529412, "acc_norm_stderr": 0.028501452860396563 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.24897959183673468, "acc_stderr": 0.027682979522960234, "acc_norm": 0.24897959183673468, "acc_norm_stderr": 0.027682979522960234 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.26582278481012656, "acc_stderr": 0.028756799629658335, "acc_norm": 0.26582278481012656, "acc_norm_stderr": 0.028756799629658335 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.2816166883963494, "acc_stderr": 0.011487783272786696, "acc_norm": 0.2816166883963494, "acc_norm_stderr": 0.011487783272786696 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.28431372549019607, "acc_stderr": 0.03166009679399813, "acc_norm": 0.28431372549019607, "acc_norm_stderr": 0.03166009679399813 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.24242424242424243, "acc_stderr": 0.033464098810559534, "acc_norm": 0.24242424242424243, "acc_norm_stderr": 0.033464098810559534 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.27050183598531213, "mc1_stderr": 0.015550778332842883, "mc2": 0.4208363898748992, "mc2_stderr": 0.014946599322770709 }, "harness|ko_commongen_v2|2": { "acc": 0.6314553990610329, "acc_stderr": 0.016536804306154545, "acc_norm": 0.6936619718309859, "acc_norm_stderr": 0.0158019112867147 } }
{ "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }
{ "model_name": "ingeol/ppo_test", "model_sha": "af05b472a278a7bcad3de754828b78e7c284923a", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null }
{ "harness|ko_arc_challenge|25": { "acc": 0.29266211604095566, "acc_stderr": 0.013295916103619404, "acc_norm": 0.3438566552901024, "acc_norm_stderr": 0.013880644570156213 }, "harness|ko_hellaswag|10": { "acc": 0.39026090420235016, "acc_stderr": 0.004868117598481941, "acc_norm": 0.5064728141804421, "acc_norm_stderr": 0.00498936327695524 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.2982456140350877, "acc_stderr": 0.03508771929824564, "acc_norm": 0.2982456140350877, "acc_norm_stderr": 0.03508771929824564 }, "harness|ko_mmlu_management|5": { "acc": 0.18446601941747573, "acc_stderr": 0.03840423627288276, "acc_norm": 0.18446601941747573, "acc_norm_stderr": 0.03840423627288276 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.25287356321839083, "acc_stderr": 0.015543377313719681, "acc_norm": 0.25287356321839083, "acc_norm_stderr": 0.015543377313719681 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.35555555555555557, "acc_stderr": 0.04135176749720386, "acc_norm": 0.35555555555555557, "acc_norm_stderr": 0.04135176749720386 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.20425531914893616, "acc_stderr": 0.026355158413349407, "acc_norm": 0.20425531914893616, "acc_norm_stderr": 0.026355158413349407 }, "harness|ko_mmlu_virology|5": { "acc": 0.18072289156626506, "acc_stderr": 0.02995573785581014, "acc_norm": 0.18072289156626506, "acc_norm_stderr": 0.02995573785581014 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.3279742765273312, "acc_stderr": 0.026664410886937613, "acc_norm": 0.3279742765273312, "acc_norm_stderr": 0.026664410886937613 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.16591928251121077, "acc_stderr": 0.024967553196547157, "acc_norm": 0.16591928251121077, "acc_norm_stderr": 0.024967553196547157 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.26717557251908397, "acc_stderr": 0.03880848301082396, "acc_norm": 0.26717557251908397, "acc_norm_stderr": 0.03880848301082396 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.23, "acc_stderr": 0.04229525846816505, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816505 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.2777777777777778, "acc_stderr": 0.03191178226713549, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.03191178226713549 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.30344827586206896, "acc_stderr": 0.038312260488503336, "acc_norm": 0.30344827586206896, "acc_norm_stderr": 0.038312260488503336 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.22549019607843138, "acc_stderr": 0.041583075330832865, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.041583075330832865 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.22268907563025211, "acc_stderr": 0.027025433498882378, "acc_norm": 0.22268907563025211, "acc_norm_stderr": 0.027025433498882378 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.2282051282051282, "acc_stderr": 0.02127839386358628, "acc_norm": 0.2282051282051282, "acc_norm_stderr": 0.02127839386358628 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.23, "acc_stderr": 0.042295258468165044, "acc_norm": 0.23, "acc_norm_stderr": 0.042295258468165044 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.23148148148148148, "acc_stderr": 0.04077494709252627, "acc_norm": 0.23148148148148148, "acc_norm_stderr": 0.04077494709252627 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.24630541871921183, "acc_stderr": 0.030315099285617732, "acc_norm": 0.24630541871921183, "acc_norm_stderr": 0.030315099285617732 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.2709677419354839, "acc_stderr": 0.025284416114900156, "acc_norm": 0.2709677419354839, "acc_norm_stderr": 0.025284416114900156 }, "harness|ko_mmlu_marketing|5": { "acc": 0.23931623931623933, "acc_stderr": 0.027951826808924333, "acc_norm": 0.23931623931623933, "acc_norm_stderr": 0.027951826808924333 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.2641509433962264, "acc_stderr": 0.027134291628741695, "acc_norm": 0.2641509433962264, "acc_norm_stderr": 0.027134291628741695 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.19090909090909092, "acc_stderr": 0.03764425585984927, "acc_norm": 0.19090909090909092, "acc_norm_stderr": 0.03764425585984927 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.24814814814814815, "acc_stderr": 0.0263357394040558, "acc_norm": 0.24814814814814815, "acc_norm_stderr": 0.0263357394040558 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.271523178807947, "acc_stderr": 0.03631329803969654, "acc_norm": 0.271523178807947, "acc_norm_stderr": 0.03631329803969654 }, "harness|ko_mmlu_sociology|5": { "acc": 0.21890547263681592, "acc_stderr": 0.029239174636647, "acc_norm": 0.21890547263681592, "acc_norm_stderr": 0.029239174636647 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.26011560693641617, "acc_stderr": 0.03345036916788991, "acc_norm": 0.26011560693641617, "acc_norm_stderr": 0.03345036916788991 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.23809523809523808, "acc_stderr": 0.021935878081184766, "acc_norm": 0.23809523809523808, "acc_norm_stderr": 0.021935878081184766 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.25, "acc_stderr": 0.03621034121889507, "acc_norm": 0.25, "acc_norm_stderr": 0.03621034121889507 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.19, "acc_stderr": 0.03942772444036624, "acc_norm": 0.19, "acc_norm_stderr": 0.03942772444036624 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.2514450867052023, "acc_stderr": 0.023357365785874037, "acc_norm": 0.2514450867052023, "acc_norm_stderr": 0.023357365785874037 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.3067484662576687, "acc_stderr": 0.03623089915724148, "acc_norm": 0.3067484662576687, "acc_norm_stderr": 0.03623089915724148 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.30864197530864196, "acc_stderr": 0.02570264026060376, "acc_norm": 0.30864197530864196, "acc_norm_stderr": 0.02570264026060376 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.23316062176165803, "acc_stderr": 0.03051611137147602, "acc_norm": 0.23316062176165803, "acc_norm_stderr": 0.03051611137147602 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.03999423879281336, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.03999423879281336 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.23302752293577983, "acc_stderr": 0.0181256691808615, "acc_norm": 0.23302752293577983, "acc_norm_stderr": 0.0181256691808615 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.16666666666666666, "acc_stderr": 0.03333333333333338, "acc_norm": 0.16666666666666666, "acc_norm_stderr": 0.03333333333333338 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.24836601307189543, "acc_stderr": 0.02473998135511359, "acc_norm": 0.24836601307189543, "acc_norm_stderr": 0.02473998135511359 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.22, "acc_stderr": 0.041633319989322674, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322674 }, "harness|ko_mmlu_international_law|5": { "acc": 0.2975206611570248, "acc_stderr": 0.04173349148083499, "acc_norm": 0.2975206611570248, "acc_norm_stderr": 0.04173349148083499 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.3026315789473684, "acc_stderr": 0.03738520676119667, "acc_norm": 0.3026315789473684, "acc_norm_stderr": 0.03738520676119667 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.2761437908496732, "acc_stderr": 0.018087276935663133, "acc_norm": 0.2761437908496732, "acc_norm_stderr": 0.018087276935663133 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.24468085106382978, "acc_stderr": 0.025645553622266736, "acc_norm": 0.24468085106382978, "acc_norm_stderr": 0.025645553622266736 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.21428571428571427, "acc_stderr": 0.03894641120044793, "acc_norm": 0.21428571428571427, "acc_norm_stderr": 0.03894641120044793 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.3055555555555556, "acc_stderr": 0.03141554629402545, "acc_norm": 0.3055555555555556, "acc_norm_stderr": 0.03141554629402545 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2659217877094972, "acc_stderr": 0.014776765066438895, "acc_norm": 0.2659217877094972, "acc_norm_stderr": 0.014776765066438895 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.3272058823529412, "acc_stderr": 0.028501452860396563, "acc_norm": 0.3272058823529412, "acc_norm_stderr": 0.028501452860396563 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.24489795918367346, "acc_stderr": 0.027529637440174934, "acc_norm": 0.24489795918367346, "acc_norm_stderr": 0.027529637440174934 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.270042194092827, "acc_stderr": 0.028900721906293426, "acc_norm": 0.270042194092827, "acc_norm_stderr": 0.028900721906293426 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.27835723598435463, "acc_stderr": 0.011446990197380982, "acc_norm": 0.27835723598435463, "acc_norm_stderr": 0.011446990197380982 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.27450980392156865, "acc_stderr": 0.03132179803083292, "acc_norm": 0.27450980392156865, "acc_norm_stderr": 0.03132179803083292 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.2606060606060606, "acc_stderr": 0.03427743175816524, "acc_norm": 0.2606060606060606, "acc_norm_stderr": 0.03427743175816524 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.27050183598531213, "mc1_stderr": 0.015550778332842883, "mc2": 0.4208363898748992, "mc2_stderr": 0.014946599322770709 }, "harness|ko_commongen_v2|2": { "acc": 0.6314553990610329, "acc_stderr": 0.016536804306154545, "acc_norm": 0.6936619718309859, "acc_norm_stderr": 0.0158019112867147 } }
{ "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }
{ "model_name": "ingeol/ppo_test", "model_sha": "ec1c89b180c1eb383c5a348b4d113733c3e8e238", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null }
{ "harness|ko_arc_challenge|25": { "acc": 0.2713310580204778, "acc_stderr": 0.012993807727545789, "acc_norm": 0.310580204778157, "acc_norm_stderr": 0.013522292098053057 }, "harness|ko_hellaswag|10": { "acc": 0.36456881099382593, "acc_stderr": 0.004803253812881045, "acc_norm": 0.46564429396534557, "acc_norm_stderr": 0.004977988452502642 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.22807017543859648, "acc_stderr": 0.03218093795602357, "acc_norm": 0.22807017543859648, "acc_norm_stderr": 0.03218093795602357 }, "harness|ko_mmlu_management|5": { "acc": 0.2912621359223301, "acc_stderr": 0.04498676320572924, "acc_norm": 0.2912621359223301, "acc_norm_stderr": 0.04498676320572924 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.23499361430395913, "acc_stderr": 0.015162024152278445, "acc_norm": 0.23499361430395913, "acc_norm_stderr": 0.015162024152278445 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.2740740740740741, "acc_stderr": 0.03853254836552003, "acc_norm": 0.2740740740740741, "acc_norm_stderr": 0.03853254836552003 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.1829787234042553, "acc_stderr": 0.02527604100044997, "acc_norm": 0.1829787234042553, "acc_norm_stderr": 0.02527604100044997 }, "harness|ko_mmlu_virology|5": { "acc": 0.30120481927710846, "acc_stderr": 0.0357160923005348, "acc_norm": 0.30120481927710846, "acc_norm_stderr": 0.0357160923005348 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.27009646302250806, "acc_stderr": 0.0252180403734106, "acc_norm": 0.27009646302250806, "acc_norm_stderr": 0.0252180403734106 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.27802690582959644, "acc_stderr": 0.030069584874494033, "acc_norm": 0.27802690582959644, "acc_norm_stderr": 0.030069584874494033 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.2366412213740458, "acc_stderr": 0.03727673575596918, "acc_norm": 0.2366412213740458, "acc_norm_stderr": 0.03727673575596918 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.3181818181818182, "acc_stderr": 0.033184773338453315, "acc_norm": 0.3181818181818182, "acc_norm_stderr": 0.033184773338453315 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.2827586206896552, "acc_stderr": 0.03752833958003336, "acc_norm": 0.2827586206896552, "acc_norm_stderr": 0.03752833958003336 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.27450980392156865, "acc_stderr": 0.044405219061793254, "acc_norm": 0.27450980392156865, "acc_norm_stderr": 0.044405219061793254 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.3445378151260504, "acc_stderr": 0.030868682604121622, "acc_norm": 0.3445378151260504, "acc_norm_stderr": 0.030868682604121622 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.358974358974359, "acc_stderr": 0.024321738484602364, "acc_norm": 0.358974358974359, "acc_norm_stderr": 0.024321738484602364 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.24074074074074073, "acc_stderr": 0.04133119440243838, "acc_norm": 0.24074074074074073, "acc_norm_stderr": 0.04133119440243838 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03010833071801162, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03010833071801162 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.3096774193548387, "acc_stderr": 0.026302774983517414, "acc_norm": 0.3096774193548387, "acc_norm_stderr": 0.026302774983517414 }, "harness|ko_mmlu_marketing|5": { "acc": 0.21794871794871795, "acc_stderr": 0.02704685763071668, "acc_norm": 0.21794871794871795, "acc_norm_stderr": 0.02704685763071668 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.2528301886792453, "acc_stderr": 0.026749899771241238, "acc_norm": 0.2528301886792453, "acc_norm_stderr": 0.026749899771241238 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.3181818181818182, "acc_stderr": 0.04461272175910507, "acc_norm": 0.3181818181818182, "acc_norm_stderr": 0.04461272175910507 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.26666666666666666, "acc_stderr": 0.026962424325073838, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.026962424325073838 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2980132450331126, "acc_stderr": 0.037345356767871984, "acc_norm": 0.2980132450331126, "acc_norm_stderr": 0.037345356767871984 }, "harness|ko_mmlu_sociology|5": { "acc": 0.23880597014925373, "acc_stderr": 0.030147775935409217, "acc_norm": 0.23880597014925373, "acc_norm_stderr": 0.030147775935409217 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.2658959537572254, "acc_stderr": 0.03368762932259431, "acc_norm": 0.2658959537572254, "acc_norm_stderr": 0.03368762932259431 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.24603174603174602, "acc_stderr": 0.022182037202948365, "acc_norm": 0.24603174603174602, "acc_norm_stderr": 0.022182037202948365 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.23699421965317918, "acc_stderr": 0.022894082489925992, "acc_norm": 0.23699421965317918, "acc_norm_stderr": 0.022894082489925992 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.2331288343558282, "acc_stderr": 0.0332201579577674, "acc_norm": 0.2331288343558282, "acc_norm_stderr": 0.0332201579577674 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.2654320987654321, "acc_stderr": 0.024569223600460845, "acc_norm": 0.2654320987654321, "acc_norm_stderr": 0.024569223600460845 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.3160621761658031, "acc_stderr": 0.033553973696861736, "acc_norm": 0.3160621761658031, "acc_norm_stderr": 0.033553973696861736 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.24561403508771928, "acc_stderr": 0.0404933929774814, "acc_norm": 0.24561403508771928, "acc_norm_stderr": 0.0404933929774814 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.3247706422018349, "acc_stderr": 0.02007772910931033, "acc_norm": 0.3247706422018349, "acc_norm_stderr": 0.02007772910931033 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.30952380952380953, "acc_stderr": 0.04134913018303316, "acc_norm": 0.30952380952380953, "acc_norm_stderr": 0.04134913018303316 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.28104575163398693, "acc_stderr": 0.025738854797818737, "acc_norm": 0.28104575163398693, "acc_norm_stderr": 0.025738854797818737 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.24, "acc_stderr": 0.04292346959909283, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909283 }, "harness|ko_mmlu_international_law|5": { "acc": 0.24793388429752067, "acc_stderr": 0.03941897526516302, "acc_norm": 0.24793388429752067, "acc_norm_stderr": 0.03941897526516302 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.3223684210526316, "acc_stderr": 0.03803510248351586, "acc_norm": 0.3223684210526316, "acc_norm_stderr": 0.03803510248351586 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.22712418300653595, "acc_stderr": 0.016949853279212376, "acc_norm": 0.22712418300653595, "acc_norm_stderr": 0.016949853279212376 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2375886524822695, "acc_stderr": 0.025389512552729906, "acc_norm": 0.2375886524822695, "acc_norm_stderr": 0.025389512552729906 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.25892857142857145, "acc_stderr": 0.04157751539865629, "acc_norm": 0.25892857142857145, "acc_norm_stderr": 0.04157751539865629 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.38425925925925924, "acc_stderr": 0.03317354514310742, "acc_norm": 0.38425925925925924, "acc_norm_stderr": 0.03317354514310742 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.27262569832402234, "acc_stderr": 0.014893391735249608, "acc_norm": 0.27262569832402234, "acc_norm_stderr": 0.014893391735249608 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.24, "acc_stderr": 0.042923469599092816, "acc_norm": 0.24, "acc_norm_stderr": 0.042923469599092816 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.4522058823529412, "acc_stderr": 0.030233758551596452, "acc_norm": 0.4522058823529412, "acc_norm_stderr": 0.030233758551596452 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.3020408163265306, "acc_stderr": 0.029393609319879808, "acc_norm": 0.3020408163265306, "acc_norm_stderr": 0.029393609319879808 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.270042194092827, "acc_stderr": 0.028900721906293426, "acc_norm": 0.270042194092827, "acc_norm_stderr": 0.028900721906293426 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.25097783572359844, "acc_stderr": 0.011073730299187224, "acc_norm": 0.25097783572359844, "acc_norm_stderr": 0.011073730299187224 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.25, "acc_stderr": 0.03039153369274154, "acc_norm": 0.25, "acc_norm_stderr": 0.03039153369274154 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.2787878787878788, "acc_stderr": 0.03501438706296781, "acc_norm": 0.2787878787878788, "acc_norm_stderr": 0.03501438706296781 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.28886168910648713, "mc1_stderr": 0.015866346401384308, "mc2": 0.4382110452098873, "mc2_stderr": 0.015112522165835224 }, "harness|ko_commongen_v2|2": { "acc": 0.2828638497652582, "acc_stderr": 0.01543919852423632, "acc_norm": 0.36032863849765256, "acc_norm_stderr": 0.016457469695705117 } }
{ "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }
{ "model_name": "sue3489/test0_kullm-polyglot-5.8b-v2-koalpaca-v1.1b", "model_sha": "79acd9e76f6a5f1e814294761b11c31fc24b9e64", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null }
{ "harness|ko_arc_challenge|25": { "acc": 0.2841296928327645, "acc_stderr": 0.013179442447653887, "acc_norm": 0.32593856655290104, "acc_norm_stderr": 0.013697432466693237 }, "harness|ko_hellaswag|10": { "acc": 0.3791077474606652, "acc_stderr": 0.004841734453506664, "acc_norm": 0.4759012148974308, "acc_norm_stderr": 0.004983982396187361 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.19298245614035087, "acc_stderr": 0.030267457554898465, "acc_norm": 0.19298245614035087, "acc_norm_stderr": 0.030267457554898465 }, "harness|ko_mmlu_management|5": { "acc": 0.27184466019417475, "acc_stderr": 0.044052680241409216, "acc_norm": 0.27184466019417475, "acc_norm_stderr": 0.044052680241409216 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.2503192848020434, "acc_stderr": 0.015491088951494597, "acc_norm": 0.2503192848020434, "acc_norm_stderr": 0.015491088951494597 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.28888888888888886, "acc_stderr": 0.0391545063041425, "acc_norm": 0.28888888888888886, "acc_norm_stderr": 0.0391545063041425 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.041633319989322695, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322695 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.251063829787234, "acc_stderr": 0.028346963777162452, "acc_norm": 0.251063829787234, "acc_norm_stderr": 0.028346963777162452 }, "harness|ko_mmlu_virology|5": { "acc": 0.25301204819277107, "acc_stderr": 0.033844291552331346, "acc_norm": 0.25301204819277107, "acc_norm_stderr": 0.033844291552331346 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.26366559485530544, "acc_stderr": 0.02502553850053234, "acc_norm": 0.26366559485530544, "acc_norm_stderr": 0.02502553850053234 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.28699551569506726, "acc_stderr": 0.030360379710291936, "acc_norm": 0.28699551569506726, "acc_norm_stderr": 0.030360379710291936 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.2900763358778626, "acc_stderr": 0.03980066246467765, "acc_norm": 0.2900763358778626, "acc_norm_stderr": 0.03980066246467765 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.2676767676767677, "acc_stderr": 0.03154449888270286, "acc_norm": 0.2676767676767677, "acc_norm_stderr": 0.03154449888270286 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.2896551724137931, "acc_stderr": 0.037800192304380135, "acc_norm": 0.2896551724137931, "acc_norm_stderr": 0.037800192304380135 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.20588235294117646, "acc_stderr": 0.04023382273617746, "acc_norm": 0.20588235294117646, "acc_norm_stderr": 0.04023382273617746 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.23529411764705882, "acc_stderr": 0.027553614467863783, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.027553614467863783 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.2564102564102564, "acc_stderr": 0.02213908110397153, "acc_norm": 0.2564102564102564, "acc_norm_stderr": 0.02213908110397153 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.18, "acc_stderr": 0.03861229196653695, "acc_norm": 0.18, "acc_norm_stderr": 0.03861229196653695 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.25, "acc_stderr": 0.04186091791394607, "acc_norm": 0.25, "acc_norm_stderr": 0.04186091791394607 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.23645320197044334, "acc_stderr": 0.029896114291733545, "acc_norm": 0.23645320197044334, "acc_norm_stderr": 0.029896114291733545 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.24193548387096775, "acc_stderr": 0.024362599693031096, "acc_norm": 0.24193548387096775, "acc_norm_stderr": 0.024362599693031096 }, "harness|ko_mmlu_marketing|5": { "acc": 0.21794871794871795, "acc_stderr": 0.027046857630716677, "acc_norm": 0.21794871794871795, "acc_norm_stderr": 0.027046857630716677 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.24150943396226415, "acc_stderr": 0.026341480371118352, "acc_norm": 0.24150943396226415, "acc_norm_stderr": 0.026341480371118352 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.3181818181818182, "acc_stderr": 0.04461272175910508, "acc_norm": 0.3181818181818182, "acc_norm_stderr": 0.04461272175910508 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.2851851851851852, "acc_stderr": 0.027528599210340492, "acc_norm": 0.2851851851851852, "acc_norm_stderr": 0.027528599210340492 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2052980132450331, "acc_stderr": 0.03297986648473836, "acc_norm": 0.2052980132450331, "acc_norm_stderr": 0.03297986648473836 }, "harness|ko_mmlu_sociology|5": { "acc": 0.23880597014925373, "acc_stderr": 0.030147775935409217, "acc_norm": 0.23880597014925373, "acc_norm_stderr": 0.030147775935409217 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.24277456647398843, "acc_stderr": 0.0326926380614177, "acc_norm": 0.24277456647398843, "acc_norm_stderr": 0.0326926380614177 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2328042328042328, "acc_stderr": 0.021765961672154534, "acc_norm": 0.2328042328042328, "acc_norm_stderr": 0.021765961672154534 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3194444444444444, "acc_stderr": 0.03899073687357335, "acc_norm": 0.3194444444444444, "acc_norm_stderr": 0.03899073687357335 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.27, "acc_stderr": 0.04461960433384741, "acc_norm": 0.27, "acc_norm_stderr": 0.04461960433384741 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.24855491329479767, "acc_stderr": 0.023267528432100174, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.023267528432100174 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.22699386503067484, "acc_stderr": 0.03291099578615769, "acc_norm": 0.22699386503067484, "acc_norm_stderr": 0.03291099578615769 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.25617283950617287, "acc_stderr": 0.0242885336377261, "acc_norm": 0.25617283950617287, "acc_norm_stderr": 0.0242885336377261 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.2538860103626943, "acc_stderr": 0.03141024780565319, "acc_norm": 0.2538860103626943, "acc_norm_stderr": 0.03141024780565319 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.18421052631578946, "acc_stderr": 0.03646758875075566, "acc_norm": 0.18421052631578946, "acc_norm_stderr": 0.03646758875075566 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.30091743119266057, "acc_stderr": 0.019664751366802114, "acc_norm": 0.30091743119266057, "acc_norm_stderr": 0.019664751366802114 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.2777777777777778, "acc_stderr": 0.04006168083848878, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.04006168083848878 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.22875816993464052, "acc_stderr": 0.02405102973991225, "acc_norm": 0.22875816993464052, "acc_norm_stderr": 0.02405102973991225 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.24, "acc_stderr": 0.04292346959909282, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909282 }, "harness|ko_mmlu_international_law|5": { "acc": 0.2231404958677686, "acc_stderr": 0.03800754475228733, "acc_norm": 0.2231404958677686, "acc_norm_stderr": 0.03800754475228733 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.2565789473684211, "acc_stderr": 0.0355418036802569, "acc_norm": 0.2565789473684211, "acc_norm_stderr": 0.0355418036802569 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.24019607843137256, "acc_stderr": 0.017282760695167418, "acc_norm": 0.24019607843137256, "acc_norm_stderr": 0.017282760695167418 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2127659574468085, "acc_stderr": 0.024414612974307703, "acc_norm": 0.2127659574468085, "acc_norm_stderr": 0.024414612974307703 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.26851851851851855, "acc_stderr": 0.03022522616001237, "acc_norm": 0.26851851851851855, "acc_norm_stderr": 0.03022522616001237 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.27150837988826815, "acc_stderr": 0.014874252168095278, "acc_norm": 0.27150837988826815, "acc_norm_stderr": 0.014874252168095278 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.3602941176470588, "acc_stderr": 0.029163128570670736, "acc_norm": 0.3602941176470588, "acc_norm_stderr": 0.029163128570670736 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.2571428571428571, "acc_stderr": 0.027979823538744546, "acc_norm": 0.2571428571428571, "acc_norm_stderr": 0.027979823538744546 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.270042194092827, "acc_stderr": 0.028900721906293426, "acc_norm": 0.270042194092827, "acc_norm_stderr": 0.028900721906293426 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.2470664928292047, "acc_stderr": 0.011015752255279338, "acc_norm": 0.2470664928292047, "acc_norm_stderr": 0.011015752255279338 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.24509803921568626, "acc_stderr": 0.03019028245350194, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.03019028245350194 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03225078108306289, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2766217870257038, "mc1_stderr": 0.015659605755326905, "mc2": 0.43178124206391555, "mc2_stderr": 0.01588615796057271 }, "harness|ko_commongen_v2|2": { "acc": 0.20187793427230047, "acc_stderr": 0.013759869182275584, "acc_norm": 0.2323943661971831, "acc_norm_stderr": 0.014478284105610294 } }
{ "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }
{ "model_name": "sue3489/test1_kullm-polyglot-5.8b-v2-koalpaca-v1.1b", "model_sha": "acc7ed3105114ba922fe4b408807b57e39ec0cff", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null }
{ "harness|ko_arc_challenge|25": { "acc": 0.2713310580204778, "acc_stderr": 0.012993807727545787, "acc_norm": 0.31399317406143346, "acc_norm_stderr": 0.013562691224726293 }, "harness|ko_hellaswag|10": { "acc": 0.36656044612626965, "acc_stderr": 0.004808802114592829, "acc_norm": 0.46564429396534557, "acc_norm_stderr": 0.0049779884525026396 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.2222222222222222, "acc_stderr": 0.031885780176863984, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.031885780176863984 }, "harness|ko_mmlu_management|5": { "acc": 0.24271844660194175, "acc_stderr": 0.04245022486384493, "acc_norm": 0.24271844660194175, "acc_norm_stderr": 0.04245022486384493 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.24648786717752236, "acc_stderr": 0.015411308769686938, "acc_norm": 0.24648786717752236, "acc_norm_stderr": 0.015411308769686938 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.25925925925925924, "acc_stderr": 0.03785714465066652, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.03785714465066652 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.23, "acc_stderr": 0.04229525846816508, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816508 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.33191489361702126, "acc_stderr": 0.03078373675774566, "acc_norm": 0.33191489361702126, "acc_norm_stderr": 0.03078373675774566 }, "harness|ko_mmlu_virology|5": { "acc": 0.2891566265060241, "acc_stderr": 0.03529486801511115, "acc_norm": 0.2891566265060241, "acc_norm_stderr": 0.03529486801511115 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.26688102893890675, "acc_stderr": 0.025122637608816622, "acc_norm": 0.26688102893890675, "acc_norm_stderr": 0.025122637608816622 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.33183856502242154, "acc_stderr": 0.03160295143776679, "acc_norm": 0.33183856502242154, "acc_norm_stderr": 0.03160295143776679 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.26717557251908397, "acc_stderr": 0.038808483010823944, "acc_norm": 0.26717557251908397, "acc_norm_stderr": 0.038808483010823944 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.29797979797979796, "acc_stderr": 0.03258630383836556, "acc_norm": 0.29797979797979796, "acc_norm_stderr": 0.03258630383836556 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.2482758620689655, "acc_stderr": 0.03600105692727772, "acc_norm": 0.2482758620689655, "acc_norm_stderr": 0.03600105692727772 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.18627450980392157, "acc_stderr": 0.038739587141493524, "acc_norm": 0.18627450980392157, "acc_norm_stderr": 0.038739587141493524 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.23949579831932774, "acc_stderr": 0.027722065493361283, "acc_norm": 0.23949579831932774, "acc_norm_stderr": 0.027722065493361283 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.2076923076923077, "acc_stderr": 0.020567539567246804, "acc_norm": 0.2076923076923077, "acc_norm_stderr": 0.020567539567246804 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.22, "acc_stderr": 0.041633319989322695, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322695 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.23, "acc_stderr": 0.04229525846816505, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816505 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.2962962962962963, "acc_stderr": 0.04414343666854932, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.04414343666854932 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.2561576354679803, "acc_stderr": 0.030712730070982592, "acc_norm": 0.2561576354679803, "acc_norm_stderr": 0.030712730070982592 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.24193548387096775, "acc_stderr": 0.024362599693031083, "acc_norm": 0.24193548387096775, "acc_norm_stderr": 0.024362599693031083 }, "harness|ko_mmlu_marketing|5": { "acc": 0.2264957264957265, "acc_stderr": 0.027421007295392923, "acc_norm": 0.2264957264957265, "acc_norm_stderr": 0.027421007295392923 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.25660377358490566, "acc_stderr": 0.026880647889052, "acc_norm": 0.25660377358490566, "acc_norm_stderr": 0.026880647889052 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.3, "acc_stderr": 0.04389311454644286, "acc_norm": 0.3, "acc_norm_stderr": 0.04389311454644286 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.25555555555555554, "acc_stderr": 0.026593939101844054, "acc_norm": 0.25555555555555554, "acc_norm_stderr": 0.026593939101844054 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2052980132450331, "acc_stderr": 0.03297986648473835, "acc_norm": 0.2052980132450331, "acc_norm_stderr": 0.03297986648473835 }, "harness|ko_mmlu_sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.03036049015401465, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.03036049015401465 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.2138728323699422, "acc_stderr": 0.03126511206173041, "acc_norm": 0.2138728323699422, "acc_norm_stderr": 0.03126511206173041 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.24603174603174602, "acc_stderr": 0.022182037202948368, "acc_norm": 0.24603174603174602, "acc_norm_stderr": 0.022182037202948368 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3263888888888889, "acc_stderr": 0.03921067198982266, "acc_norm": 0.3263888888888889, "acc_norm_stderr": 0.03921067198982266 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.2514450867052023, "acc_stderr": 0.023357365785874037, "acc_norm": 0.2514450867052023, "acc_norm_stderr": 0.023357365785874037 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.24539877300613497, "acc_stderr": 0.03380939813943354, "acc_norm": 0.24539877300613497, "acc_norm_stderr": 0.03380939813943354 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.25925925925925924, "acc_stderr": 0.02438366553103545, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.02438366553103545 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.23316062176165803, "acc_stderr": 0.030516111371476005, "acc_norm": 0.23316062176165803, "acc_norm_stderr": 0.030516111371476005 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.21052631578947367, "acc_stderr": 0.038351539543994194, "acc_norm": 0.21052631578947367, "acc_norm_stderr": 0.038351539543994194 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.25688073394495414, "acc_stderr": 0.018732492928342472, "acc_norm": 0.25688073394495414, "acc_norm_stderr": 0.018732492928342472 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.29365079365079366, "acc_stderr": 0.04073524322147127, "acc_norm": 0.29365079365079366, "acc_norm_stderr": 0.04073524322147127 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.24836601307189543, "acc_stderr": 0.02473998135511359, "acc_norm": 0.24836601307189543, "acc_norm_stderr": 0.02473998135511359 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.19, "acc_stderr": 0.03942772444036624, "acc_norm": 0.19, "acc_norm_stderr": 0.03942772444036624 }, "harness|ko_mmlu_international_law|5": { "acc": 0.23140495867768596, "acc_stderr": 0.03849856098794088, "acc_norm": 0.23140495867768596, "acc_norm_stderr": 0.03849856098794088 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.24342105263157895, "acc_stderr": 0.034923496688842384, "acc_norm": 0.24342105263157895, "acc_norm_stderr": 0.034923496688842384 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.23039215686274508, "acc_stderr": 0.01703522925803404, "acc_norm": 0.23039215686274508, "acc_norm_stderr": 0.01703522925803404 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.22695035460992907, "acc_stderr": 0.02498710636564297, "acc_norm": 0.22695035460992907, "acc_norm_stderr": 0.02498710636564297 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.32142857142857145, "acc_stderr": 0.04432804055291519, "acc_norm": 0.32142857142857145, "acc_norm_stderr": 0.04432804055291519 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.2777777777777778, "acc_stderr": 0.030546745264953167, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.030546745264953167 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2681564245810056, "acc_stderr": 0.014816119635316994, "acc_norm": 0.2681564245810056, "acc_norm_stderr": 0.014816119635316994 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.17, "acc_stderr": 0.037752516806863715, "acc_norm": 0.17, "acc_norm_stderr": 0.037752516806863715 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.3014705882352941, "acc_stderr": 0.027875982114273168, "acc_norm": 0.3014705882352941, "acc_norm_stderr": 0.027875982114273168 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.23673469387755103, "acc_stderr": 0.027212835884073163, "acc_norm": 0.23673469387755103, "acc_norm_stderr": 0.027212835884073163 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.22362869198312235, "acc_stderr": 0.027123298205229972, "acc_norm": 0.22362869198312235, "acc_norm_stderr": 0.027123298205229972 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.24185136897001303, "acc_stderr": 0.010936550813827054, "acc_norm": 0.24185136897001303, "acc_norm_stderr": 0.010936550813827054 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.28431372549019607, "acc_stderr": 0.031660096793998116, "acc_norm": 0.28431372549019607, "acc_norm_stderr": 0.031660096793998116 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.2545454545454545, "acc_stderr": 0.03401506715249039, "acc_norm": 0.2545454545454545, "acc_norm_stderr": 0.03401506715249039 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2692778457772338, "mc1_stderr": 0.01552856663708731, "mc2": 0.42575853795337826, "mc2_stderr": 0.016210145327267837 }, "harness|ko_commongen_v2|2": { "acc": 0.1983568075117371, "acc_stderr": 0.013669396132574575, "acc_norm": 0.22535211267605634, "acc_norm_stderr": 0.014322479434188889 } }
{ "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }
{ "model_name": "sue3489/test2_kullm-polyglot-5.8b-v2-koalpaca-v1.1b", "model_sha": "ab9bbba26729005519ac0cc01b349be5e2ad95fe", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null }
{ "harness|ko_arc_challenge|25": { "acc": 0.2713310580204778, "acc_stderr": 0.012993807727545787, "acc_norm": 0.310580204778157, "acc_norm_stderr": 0.013522292098053055 }, "harness|ko_hellaswag|10": { "acc": 0.3331009759012149, "acc_stderr": 0.004703590558552501, "acc_norm": 0.41127265484963155, "acc_norm_stderr": 0.004910588449330016 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.47953216374269003, "acc_stderr": 0.038316105328219316, "acc_norm": 0.47953216374269003, "acc_norm_stderr": 0.038316105328219316 }, "harness|ko_mmlu_management|5": { "acc": 0.3106796116504854, "acc_stderr": 0.045821241601615506, "acc_norm": 0.3106796116504854, "acc_norm_stderr": 0.045821241601615506 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.36909323116219667, "acc_stderr": 0.017256283109124613, "acc_norm": 0.36909323116219667, "acc_norm_stderr": 0.017256283109124613 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.3037037037037037, "acc_stderr": 0.03972552884785138, "acc_norm": 0.3037037037037037, "acc_norm_stderr": 0.03972552884785138 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.3574468085106383, "acc_stderr": 0.03132941789476425, "acc_norm": 0.3574468085106383, "acc_norm_stderr": 0.03132941789476425 }, "harness|ko_mmlu_virology|5": { "acc": 0.25903614457831325, "acc_stderr": 0.03410646614071857, "acc_norm": 0.25903614457831325, "acc_norm_stderr": 0.03410646614071857 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.3279742765273312, "acc_stderr": 0.02666441088693762, "acc_norm": 0.3279742765273312, "acc_norm_stderr": 0.02666441088693762 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.35874439461883406, "acc_stderr": 0.032190792004199956, "acc_norm": 0.35874439461883406, "acc_norm_stderr": 0.032190792004199956 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.366412213740458, "acc_stderr": 0.04225875451969638, "acc_norm": 0.366412213740458, "acc_norm_stderr": 0.04225875451969638 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.3282828282828283, "acc_stderr": 0.03345678422756777, "acc_norm": 0.3282828282828283, "acc_norm_stderr": 0.03345678422756777 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.3586206896551724, "acc_stderr": 0.039966295748767186, "acc_norm": 0.3586206896551724, "acc_norm_stderr": 0.039966295748767186 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.16666666666666666, "acc_stderr": 0.03708284662416545, "acc_norm": 0.16666666666666666, "acc_norm_stderr": 0.03708284662416545 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.3235294117647059, "acc_stderr": 0.030388353551886845, "acc_norm": 0.3235294117647059, "acc_norm_stderr": 0.030388353551886845 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.30512820512820515, "acc_stderr": 0.023346335293325887, "acc_norm": 0.30512820512820515, "acc_norm_stderr": 0.023346335293325887 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.24, "acc_stderr": 0.042923469599092816, "acc_norm": 0.24, "acc_norm_stderr": 0.042923469599092816 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.39814814814814814, "acc_stderr": 0.04732332615978814, "acc_norm": 0.39814814814814814, "acc_norm_stderr": 0.04732332615978814 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.33497536945812806, "acc_stderr": 0.033208527423483104, "acc_norm": 0.33497536945812806, "acc_norm_stderr": 0.033208527423483104 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.32903225806451614, "acc_stderr": 0.02672949906834997, "acc_norm": 0.32903225806451614, "acc_norm_stderr": 0.02672949906834997 }, "harness|ko_mmlu_marketing|5": { "acc": 0.49145299145299143, "acc_stderr": 0.032751303000970296, "acc_norm": 0.49145299145299143, "acc_norm_stderr": 0.032751303000970296 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.3018867924528302, "acc_stderr": 0.028254200344438676, "acc_norm": 0.3018867924528302, "acc_norm_stderr": 0.028254200344438676 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.35454545454545455, "acc_stderr": 0.04582004841505415, "acc_norm": 0.35454545454545455, "acc_norm_stderr": 0.04582004841505415 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.2814814814814815, "acc_stderr": 0.027420019350945273, "acc_norm": 0.2814814814814815, "acc_norm_stderr": 0.027420019350945273 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2119205298013245, "acc_stderr": 0.033367670865679766, "acc_norm": 0.2119205298013245, "acc_norm_stderr": 0.033367670865679766 }, "harness|ko_mmlu_sociology|5": { "acc": 0.4577114427860697, "acc_stderr": 0.03522865864099597, "acc_norm": 0.4577114427860697, "acc_norm_stderr": 0.03522865864099597 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.26011560693641617, "acc_stderr": 0.03345036916788991, "acc_norm": 0.26011560693641617, "acc_norm_stderr": 0.03345036916788991 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2751322751322751, "acc_stderr": 0.02300008685906864, "acc_norm": 0.2751322751322751, "acc_norm_stderr": 0.02300008685906864 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2847222222222222, "acc_stderr": 0.03773809990686935, "acc_norm": 0.2847222222222222, "acc_norm_stderr": 0.03773809990686935 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.49, "acc_stderr": 0.05024183937956913, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956913 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.37283236994219654, "acc_stderr": 0.026033890613576288, "acc_norm": 0.37283236994219654, "acc_norm_stderr": 0.026033890613576288 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.2822085889570552, "acc_stderr": 0.03536117886664742, "acc_norm": 0.2822085889570552, "acc_norm_stderr": 0.03536117886664742 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.36728395061728397, "acc_stderr": 0.02682280175950789, "acc_norm": 0.36728395061728397, "acc_norm_stderr": 0.02682280175950789 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.35751295336787564, "acc_stderr": 0.03458816042181007, "acc_norm": 0.35751295336787564, "acc_norm_stderr": 0.03458816042181007 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813365, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813365 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.28807339449541286, "acc_stderr": 0.019416445892636018, "acc_norm": 0.28807339449541286, "acc_norm_stderr": 0.019416445892636018 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.29365079365079366, "acc_stderr": 0.04073524322147126, "acc_norm": 0.29365079365079366, "acc_norm_stderr": 0.04073524322147126 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.38235294117647056, "acc_stderr": 0.027826109307283683, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.027826109307283683 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|ko_mmlu_international_law|5": { "acc": 0.5206611570247934, "acc_stderr": 0.04560456086387235, "acc_norm": 0.5206611570247934, "acc_norm_stderr": 0.04560456086387235 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.3223684210526316, "acc_stderr": 0.03803510248351586, "acc_norm": 0.3223684210526316, "acc_norm_stderr": 0.03803510248351586 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.3088235294117647, "acc_stderr": 0.018690850273595284, "acc_norm": 0.3088235294117647, "acc_norm_stderr": 0.018690850273595284 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2872340425531915, "acc_stderr": 0.026992199173064356, "acc_norm": 0.2872340425531915, "acc_norm_stderr": 0.026992199173064356 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.37962962962962965, "acc_stderr": 0.03309682581119035, "acc_norm": 0.37962962962962965, "acc_norm_stderr": 0.03309682581119035 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2435754189944134, "acc_stderr": 0.01435591196476786, "acc_norm": 0.2435754189944134, "acc_norm_stderr": 0.01435591196476786 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.22426470588235295, "acc_stderr": 0.025336848563332386, "acc_norm": 0.22426470588235295, "acc_norm_stderr": 0.025336848563332386 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.39591836734693875, "acc_stderr": 0.03130802899065685, "acc_norm": 0.39591836734693875, "acc_norm_stderr": 0.03130802899065685 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.35864978902953587, "acc_stderr": 0.031219569445301854, "acc_norm": 0.35864978902953587, "acc_norm_stderr": 0.031219569445301854 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.2685788787483703, "acc_stderr": 0.011320056629121734, "acc_norm": 0.2685788787483703, "acc_norm_stderr": 0.011320056629121734 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.3088235294117647, "acc_stderr": 0.03242661719827218, "acc_norm": 0.3088235294117647, "acc_norm_stderr": 0.03242661719827218 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.3393939393939394, "acc_stderr": 0.036974422050315967, "acc_norm": 0.3393939393939394, "acc_norm_stderr": 0.036974422050315967 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2729498164014688, "mc1_stderr": 0.015594753632006516, "mc2": 0.4392204501367092, "mc2_stderr": 0.01533439619345391 }, "harness|ko_commongen_v2|2": { "acc": 0.4413145539906103, "acc_stderr": 0.01702131167184747, "acc_norm": 0.5, "acc_norm_stderr": 0.017139779254776524 } }
{ "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }
{ "model_name": "yeen214/test_llama2_7b", "model_sha": "69a4886f51ed752216cdd7f41a584d14240126f9", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null }
{ "harness|ko_arc_challenge|25": { "acc": 0.21416382252559726, "acc_stderr": 0.011988383205966496, "acc_norm": 0.257679180887372, "acc_norm_stderr": 0.012780770562768409 }, "harness|ko_hellaswag|10": { "acc": 0.2524397530372436, "acc_stderr": 0.004335243434486834, "acc_norm": 0.25323640709022105, "acc_norm_stderr": 0.004339764434219064 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.30994152046783624, "acc_stderr": 0.03546976959393163, "acc_norm": 0.30994152046783624, "acc_norm_stderr": 0.03546976959393163 }, "harness|ko_mmlu_management|5": { "acc": 0.22330097087378642, "acc_stderr": 0.04123553189891431, "acc_norm": 0.22330097087378642, "acc_norm_stderr": 0.04123553189891431 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.24265644955300128, "acc_stderr": 0.015329888940899894, "acc_norm": 0.24265644955300128, "acc_norm_stderr": 0.015329888940899894 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.28888888888888886, "acc_stderr": 0.03915450630414251, "acc_norm": 0.28888888888888886, "acc_norm_stderr": 0.03915450630414251 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.2170212765957447, "acc_stderr": 0.026947483121496228, "acc_norm": 0.2170212765957447, "acc_norm_stderr": 0.026947483121496228 }, "harness|ko_mmlu_virology|5": { "acc": 0.2289156626506024, "acc_stderr": 0.03270745277352477, "acc_norm": 0.2289156626506024, "acc_norm_stderr": 0.03270745277352477 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.2829581993569132, "acc_stderr": 0.025583062489984824, "acc_norm": 0.2829581993569132, "acc_norm_stderr": 0.025583062489984824 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.21973094170403587, "acc_stderr": 0.027790177064383602, "acc_norm": 0.21973094170403587, "acc_norm_stderr": 0.027790177064383602 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.20610687022900764, "acc_stderr": 0.03547771004159462, "acc_norm": 0.20610687022900764, "acc_norm_stderr": 0.03547771004159462 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.25252525252525254, "acc_stderr": 0.030954055470365914, "acc_norm": 0.25252525252525254, "acc_norm_stderr": 0.030954055470365914 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.2896551724137931, "acc_stderr": 0.03780019230438014, "acc_norm": 0.2896551724137931, "acc_norm_stderr": 0.03780019230438014 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.2549019607843137, "acc_stderr": 0.04336432707993177, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.04336432707993177 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.22268907563025211, "acc_stderr": 0.027025433498882392, "acc_norm": 0.22268907563025211, "acc_norm_stderr": 0.027025433498882392 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.2564102564102564, "acc_stderr": 0.022139081103971545, "acc_norm": 0.2564102564102564, "acc_norm_stderr": 0.022139081103971545 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.19, "acc_stderr": 0.03942772444036623, "acc_norm": 0.19, "acc_norm_stderr": 0.03942772444036623 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.2962962962962963, "acc_stderr": 0.04414343666854932, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.04414343666854932 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.22167487684729065, "acc_stderr": 0.029225575892489614, "acc_norm": 0.22167487684729065, "acc_norm_stderr": 0.029225575892489614 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.25161290322580643, "acc_stderr": 0.02468597928623996, "acc_norm": 0.25161290322580643, "acc_norm_stderr": 0.02468597928623996 }, "harness|ko_mmlu_marketing|5": { "acc": 0.2777777777777778, "acc_stderr": 0.029343114798094472, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.029343114798094472 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.24528301886792453, "acc_stderr": 0.026480357179895702, "acc_norm": 0.24528301886792453, "acc_norm_stderr": 0.026480357179895702 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.17272727272727273, "acc_stderr": 0.03620691833929219, "acc_norm": 0.17272727272727273, "acc_norm_stderr": 0.03620691833929219 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.26296296296296295, "acc_stderr": 0.02684205787383371, "acc_norm": 0.26296296296296295, "acc_norm_stderr": 0.02684205787383371 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.24503311258278146, "acc_stderr": 0.03511807571804724, "acc_norm": 0.24503311258278146, "acc_norm_stderr": 0.03511807571804724 }, "harness|ko_mmlu_sociology|5": { "acc": 0.23383084577114427, "acc_stderr": 0.02992941540834838, "acc_norm": 0.23383084577114427, "acc_norm_stderr": 0.02992941540834838 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.2658959537572254, "acc_stderr": 0.03368762932259431, "acc_norm": 0.2658959537572254, "acc_norm_stderr": 0.03368762932259431 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2222222222222222, "acc_stderr": 0.02141168439369418, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.02141168439369418 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2777777777777778, "acc_stderr": 0.03745554791462457, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.03745554791462457 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.24, "acc_stderr": 0.04292346959909284, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909284 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.2543352601156069, "acc_stderr": 0.02344582627654554, "acc_norm": 0.2543352601156069, "acc_norm_stderr": 0.02344582627654554 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.2331288343558282, "acc_stderr": 0.0332201579577674, "acc_norm": 0.2331288343558282, "acc_norm_stderr": 0.0332201579577674 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.2623456790123457, "acc_stderr": 0.02447722285613511, "acc_norm": 0.2623456790123457, "acc_norm_stderr": 0.02447722285613511 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.30569948186528495, "acc_stderr": 0.033248379397581594, "acc_norm": 0.30569948186528495, "acc_norm_stderr": 0.033248379397581594 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.24561403508771928, "acc_stderr": 0.04049339297748141, "acc_norm": 0.24561403508771928, "acc_norm_stderr": 0.04049339297748141 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.21834862385321102, "acc_stderr": 0.017712600528722727, "acc_norm": 0.21834862385321102, "acc_norm_stderr": 0.017712600528722727 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.23015873015873015, "acc_stderr": 0.037649508797906045, "acc_norm": 0.23015873015873015, "acc_norm_stderr": 0.037649508797906045 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.22549019607843138, "acc_stderr": 0.023929155517351298, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.023929155517351298 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.18, "acc_stderr": 0.038612291966536955, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536955 }, "harness|ko_mmlu_international_law|5": { "acc": 0.256198347107438, "acc_stderr": 0.03984979653302871, "acc_norm": 0.256198347107438, "acc_norm_stderr": 0.03984979653302871 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.25, "acc_stderr": 0.03523807393012047, "acc_norm": 0.25, "acc_norm_stderr": 0.03523807393012047 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.2630718954248366, "acc_stderr": 0.017812676542320657, "acc_norm": 0.2630718954248366, "acc_norm_stderr": 0.017812676542320657 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2553191489361702, "acc_stderr": 0.02601199293090203, "acc_norm": 0.2553191489361702, "acc_norm_stderr": 0.02601199293090203 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.30357142857142855, "acc_stderr": 0.04364226155841044, "acc_norm": 0.30357142857142855, "acc_norm_stderr": 0.04364226155841044 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.30092592592592593, "acc_stderr": 0.031280390843298825, "acc_norm": 0.30092592592592593, "acc_norm_stderr": 0.031280390843298825 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2536312849162011, "acc_stderr": 0.014551553659369916, "acc_norm": 0.2536312849162011, "acc_norm_stderr": 0.014551553659369916 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.04408440022768079, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768079 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.19852941176470587, "acc_stderr": 0.024231013370541087, "acc_norm": 0.19852941176470587, "acc_norm_stderr": 0.024231013370541087 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.24897959183673468, "acc_stderr": 0.02768297952296023, "acc_norm": 0.24897959183673468, "acc_norm_stderr": 0.02768297952296023 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.2911392405063291, "acc_stderr": 0.029571601065753374, "acc_norm": 0.2911392405063291, "acc_norm_stderr": 0.029571601065753374 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.25358539765319427, "acc_stderr": 0.011111715336101138, "acc_norm": 0.25358539765319427, "acc_norm_stderr": 0.011111715336101138 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.24509803921568626, "acc_stderr": 0.03019028245350195, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.03019028245350195 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.2787878787878788, "acc_stderr": 0.03501438706296781, "acc_norm": 0.2787878787878788, "acc_norm_stderr": 0.03501438706296781 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.23623011015911874, "mc1_stderr": 0.014869755015871112, "mc2": 0.49817574202268433, "mc2_stderr": 0.016860322660870557 }, "harness|ko_commongen_v2|2": { "acc": 0.09859154929577464, "acc_stderr": 0.010219175985280587, "acc_norm": 0.3955399061032864, "acc_norm_stderr": 0.016761550511163865 } }
{ "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }
{ "model_name": "yeen214/test_llama2_ko_7b", "model_sha": "45901e1d6ccb22f5ed8aec3f9dd366823fdd1c33", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null }

No dataset card yet

New: Create and edit this dataset card directly on the website!

Contribute a Dataset Card
Downloads last month
0
Add dataset card

Space using choco9966/results 1