|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.43600682593856654, |
|
"acc_stderr": 0.014491225699230914, |
|
"acc_norm": 0.4778156996587031, |
|
"acc_norm_stderr": 0.014597001927076133 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.4395538737303326, |
|
"acc_stderr": 0.004953184534223989, |
|
"acc_norm": 0.5835490938060147, |
|
"acc_norm_stderr": 0.0049196263806455115 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.49122807017543857, |
|
"acc_stderr": 0.038342347441649924, |
|
"acc_norm": 0.49122807017543857, |
|
"acc_norm_stderr": 0.038342347441649924 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5145631067961165, |
|
"acc_stderr": 0.04948637324026637, |
|
"acc_norm": 0.5145631067961165, |
|
"acc_norm_stderr": 0.04948637324026637 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.5478927203065134, |
|
"acc_stderr": 0.017797751493865636, |
|
"acc_norm": 0.5478927203065134, |
|
"acc_norm_stderr": 0.017797751493865636 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.48148148148148145, |
|
"acc_stderr": 0.043163785995113245, |
|
"acc_norm": 0.48148148148148145, |
|
"acc_norm_stderr": 0.043163785995113245 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.045126085985421255, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.045126085985421255 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.39574468085106385, |
|
"acc_stderr": 0.03196758697835362, |
|
"acc_norm": 0.39574468085106385, |
|
"acc_norm_stderr": 0.03196758697835362 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.4036144578313253, |
|
"acc_stderr": 0.03819486140758398, |
|
"acc_norm": 0.4036144578313253, |
|
"acc_norm_stderr": 0.03819486140758398 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5176848874598071, |
|
"acc_stderr": 0.02838032284907713, |
|
"acc_norm": 0.5176848874598071, |
|
"acc_norm_stderr": 0.02838032284907713 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.5336322869955157, |
|
"acc_stderr": 0.033481800170603065, |
|
"acc_norm": 0.5336322869955157, |
|
"acc_norm_stderr": 0.033481800170603065 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.4580152671755725, |
|
"acc_stderr": 0.04369802690578756, |
|
"acc_norm": 0.4580152671755725, |
|
"acc_norm_stderr": 0.04369802690578756 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5959595959595959, |
|
"acc_stderr": 0.03496130972056126, |
|
"acc_norm": 0.5959595959595959, |
|
"acc_norm_stderr": 0.03496130972056126 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.3793103448275862, |
|
"acc_stderr": 0.04043461861916747, |
|
"acc_norm": 0.3793103448275862, |
|
"acc_norm_stderr": 0.04043461861916747 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.21568627450980393, |
|
"acc_stderr": 0.04092563958237655, |
|
"acc_norm": 0.21568627450980393, |
|
"acc_norm_stderr": 0.04092563958237655 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.4789915966386555, |
|
"acc_stderr": 0.03244980849990029, |
|
"acc_norm": 0.4789915966386555, |
|
"acc_norm_stderr": 0.03244980849990029 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4461538461538462, |
|
"acc_stderr": 0.02520357177302834, |
|
"acc_norm": 0.4461538461538462, |
|
"acc_norm_stderr": 0.02520357177302834 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.52, |
|
"acc_stderr": 0.050211673156867795, |
|
"acc_norm": 0.52, |
|
"acc_norm_stderr": 0.050211673156867795 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.04852365870939099, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.04852365870939099 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5462962962962963, |
|
"acc_stderr": 0.048129173245368216, |
|
"acc_norm": 0.5462962962962963, |
|
"acc_norm_stderr": 0.048129173245368216 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3891625615763547, |
|
"acc_stderr": 0.034304624161038716, |
|
"acc_norm": 0.3891625615763547, |
|
"acc_norm_stderr": 0.034304624161038716 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.4612903225806452, |
|
"acc_stderr": 0.028358634859836928, |
|
"acc_norm": 0.4612903225806452, |
|
"acc_norm_stderr": 0.028358634859836928 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6367521367521367, |
|
"acc_stderr": 0.03150712523091265, |
|
"acc_norm": 0.6367521367521367, |
|
"acc_norm_stderr": 0.03150712523091265 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.4490566037735849, |
|
"acc_stderr": 0.030612730713641092, |
|
"acc_norm": 0.4490566037735849, |
|
"acc_norm_stderr": 0.030612730713641092 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5636363636363636, |
|
"acc_stderr": 0.04750185058907296, |
|
"acc_norm": 0.5636363636363636, |
|
"acc_norm_stderr": 0.04750185058907296 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.2518518518518518, |
|
"acc_stderr": 0.026466117538959916, |
|
"acc_norm": 0.2518518518518518, |
|
"acc_norm_stderr": 0.026466117538959916 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2781456953642384, |
|
"acc_stderr": 0.036586032627637426, |
|
"acc_norm": 0.2781456953642384, |
|
"acc_norm_stderr": 0.036586032627637426 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.5870646766169154, |
|
"acc_stderr": 0.03481520803367348, |
|
"acc_norm": 0.5870646766169154, |
|
"acc_norm_stderr": 0.03481520803367348 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.4046242774566474, |
|
"acc_stderr": 0.0374246119388725, |
|
"acc_norm": 0.4046242774566474, |
|
"acc_norm_stderr": 0.0374246119388725 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2751322751322751, |
|
"acc_stderr": 0.02300008685906864, |
|
"acc_norm": 0.2751322751322751, |
|
"acc_norm_stderr": 0.02300008685906864 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3611111111111111, |
|
"acc_stderr": 0.040166600304512336, |
|
"acc_norm": 0.3611111111111111, |
|
"acc_norm_stderr": 0.040166600304512336 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.61, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.61, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5144508670520231, |
|
"acc_stderr": 0.026907849856282532, |
|
"acc_norm": 0.5144508670520231, |
|
"acc_norm_stderr": 0.026907849856282532 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.5214723926380368, |
|
"acc_stderr": 0.03924746876751129, |
|
"acc_norm": 0.5214723926380368, |
|
"acc_norm_stderr": 0.03924746876751129 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.49691358024691357, |
|
"acc_stderr": 0.02782021415859437, |
|
"acc_norm": 0.49691358024691357, |
|
"acc_norm_stderr": 0.02782021415859437 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5647668393782384, |
|
"acc_stderr": 0.03578038165008586, |
|
"acc_norm": 0.5647668393782384, |
|
"acc_norm_stderr": 0.03578038165008586 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.24561403508771928, |
|
"acc_stderr": 0.04049339297748141, |
|
"acc_norm": 0.24561403508771928, |
|
"acc_norm_stderr": 0.04049339297748141 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.5908256880733945, |
|
"acc_stderr": 0.021080670264433738, |
|
"acc_norm": 0.5908256880733945, |
|
"acc_norm_stderr": 0.021080670264433738 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.30158730158730157, |
|
"acc_stderr": 0.04104947269903394, |
|
"acc_norm": 0.30158730158730157, |
|
"acc_norm_stderr": 0.04104947269903394 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.39215686274509803, |
|
"acc_stderr": 0.027956046165424513, |
|
"acc_norm": 0.39215686274509803, |
|
"acc_norm_stderr": 0.027956046165424513 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6446280991735537, |
|
"acc_stderr": 0.0436923632657398, |
|
"acc_norm": 0.6446280991735537, |
|
"acc_norm_stderr": 0.0436923632657398 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.3881578947368421, |
|
"acc_stderr": 0.03965842097512744, |
|
"acc_norm": 0.3881578947368421, |
|
"acc_norm_stderr": 0.03965842097512744 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.4019607843137255, |
|
"acc_stderr": 0.01983517648437538, |
|
"acc_norm": 0.4019607843137255, |
|
"acc_norm_stderr": 0.01983517648437538 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3475177304964539, |
|
"acc_stderr": 0.02840662780959095, |
|
"acc_norm": 0.3475177304964539, |
|
"acc_norm_stderr": 0.02840662780959095 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.25892857142857145, |
|
"acc_stderr": 0.041577515398656284, |
|
"acc_norm": 0.25892857142857145, |
|
"acc_norm_stderr": 0.041577515398656284 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.3148148148148148, |
|
"acc_stderr": 0.031674687068289784, |
|
"acc_norm": 0.3148148148148148, |
|
"acc_norm_stderr": 0.031674687068289784 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2424581005586592, |
|
"acc_stderr": 0.01433352205921789, |
|
"acc_norm": 0.2424581005586592, |
|
"acc_norm_stderr": 0.01433352205921789 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.04923659639173309, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.04923659639173309 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.46, |
|
"acc_stderr": 0.05009082659620332, |
|
"acc_norm": 0.46, |
|
"acc_norm_stderr": 0.05009082659620332 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.4338235294117647, |
|
"acc_stderr": 0.03010563657001664, |
|
"acc_norm": 0.4338235294117647, |
|
"acc_norm_stderr": 0.03010563657001664 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.39183673469387753, |
|
"acc_stderr": 0.03125127591089165, |
|
"acc_norm": 0.39183673469387753, |
|
"acc_norm_stderr": 0.03125127591089165 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.6540084388185654, |
|
"acc_stderr": 0.03096481058878671, |
|
"acc_norm": 0.6540084388185654, |
|
"acc_norm_stderr": 0.03096481058878671 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.35528031290743156, |
|
"acc_stderr": 0.012223623364044043, |
|
"acc_norm": 0.35528031290743156, |
|
"acc_norm_stderr": 0.012223623364044043 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.5147058823529411, |
|
"acc_stderr": 0.035077938347913236, |
|
"acc_norm": 0.5147058823529411, |
|
"acc_norm_stderr": 0.035077938347913236 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.5757575757575758, |
|
"acc_stderr": 0.038592681420702636, |
|
"acc_norm": 0.5757575757575758, |
|
"acc_norm_stderr": 0.038592681420702636 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.36964504283965727, |
|
"mc1_stderr": 0.0168981807069739, |
|
"mc2": 0.5205477409426235, |
|
"mc2_stderr": 0.01592635844376339 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.4817001180637544, |
|
"acc_stderr": 0.017178836639177752, |
|
"acc_norm": 0.5454545454545454, |
|
"acc_norm_stderr": 0.017119172208061504 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.4", |
|
"model_sha": "784a5488ff350bcd9fde9d7aff59a0b9988acc2a", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |