|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.36177474402730375, |
|
"acc_stderr": 0.014041957945038076, |
|
"acc_norm": 0.41723549488054607, |
|
"acc_norm_stderr": 0.014409825518403084 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.386476797450707, |
|
"acc_stderr": 0.00485946798415526, |
|
"acc_norm": 0.4965146385182235, |
|
"acc_norm_stderr": 0.00498966018079217 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.5146198830409356, |
|
"acc_stderr": 0.038331852752130254, |
|
"acc_norm": 0.5146198830409356, |
|
"acc_norm_stderr": 0.038331852752130254 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5242718446601942, |
|
"acc_stderr": 0.049449010929737795, |
|
"acc_norm": 0.5242718446601942, |
|
"acc_norm_stderr": 0.049449010929737795 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.4674329501915709, |
|
"acc_stderr": 0.017841995750520857, |
|
"acc_norm": 0.4674329501915709, |
|
"acc_norm_stderr": 0.017841995750520857 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.362962962962963, |
|
"acc_stderr": 0.04153948404742398, |
|
"acc_norm": 0.362962962962963, |
|
"acc_norm_stderr": 0.04153948404742398 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.04560480215720684, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.04560480215720684 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.4425531914893617, |
|
"acc_stderr": 0.03246956919789958, |
|
"acc_norm": 0.4425531914893617, |
|
"acc_norm_stderr": 0.03246956919789958 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.42771084337349397, |
|
"acc_stderr": 0.03851597683718533, |
|
"acc_norm": 0.42771084337349397, |
|
"acc_norm_stderr": 0.03851597683718533 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.45980707395498394, |
|
"acc_stderr": 0.028306190403305696, |
|
"acc_norm": 0.45980707395498394, |
|
"acc_norm_stderr": 0.028306190403305696 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.45739910313901344, |
|
"acc_stderr": 0.033435777055830646, |
|
"acc_norm": 0.45739910313901344, |
|
"acc_norm_stderr": 0.033435777055830646 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.46564885496183206, |
|
"acc_stderr": 0.04374928560599738, |
|
"acc_norm": 0.46564885496183206, |
|
"acc_norm_stderr": 0.04374928560599738 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.049756985195624284, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.049756985195624284 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5151515151515151, |
|
"acc_stderr": 0.0356071651653106, |
|
"acc_norm": 0.5151515151515151, |
|
"acc_norm_stderr": 0.0356071651653106 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4689655172413793, |
|
"acc_stderr": 0.04158632762097828, |
|
"acc_norm": 0.4689655172413793, |
|
"acc_norm_stderr": 0.04158632762097828 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.2549019607843137, |
|
"acc_stderr": 0.04336432707993177, |
|
"acc_norm": 0.2549019607843137, |
|
"acc_norm_stderr": 0.04336432707993177 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.48739495798319327, |
|
"acc_stderr": 0.03246816765752174, |
|
"acc_norm": 0.48739495798319327, |
|
"acc_norm_stderr": 0.03246816765752174 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4666666666666667, |
|
"acc_stderr": 0.02529460802398648, |
|
"acc_norm": 0.4666666666666667, |
|
"acc_norm_stderr": 0.02529460802398648 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.61, |
|
"acc_stderr": 0.04902071300001974, |
|
"acc_norm": 0.61, |
|
"acc_norm_stderr": 0.04902071300001974 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.4722222222222222, |
|
"acc_stderr": 0.048262172941398944, |
|
"acc_norm": 0.4722222222222222, |
|
"acc_norm_stderr": 0.048262172941398944 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.41379310344827586, |
|
"acc_stderr": 0.03465304488406796, |
|
"acc_norm": 0.41379310344827586, |
|
"acc_norm_stderr": 0.03465304488406796 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.47096774193548385, |
|
"acc_stderr": 0.028396016402761008, |
|
"acc_norm": 0.47096774193548385, |
|
"acc_norm_stderr": 0.028396016402761008 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7136752136752137, |
|
"acc_stderr": 0.029614323690456648, |
|
"acc_norm": 0.7136752136752137, |
|
"acc_norm_stderr": 0.029614323690456648 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.47924528301886793, |
|
"acc_stderr": 0.030746349975723463, |
|
"acc_norm": 0.47924528301886793, |
|
"acc_norm_stderr": 0.030746349975723463 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.4818181818181818, |
|
"acc_stderr": 0.04785964010794917, |
|
"acc_norm": 0.4818181818181818, |
|
"acc_norm_stderr": 0.04785964010794917 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.34444444444444444, |
|
"acc_stderr": 0.02897264888484427, |
|
"acc_norm": 0.34444444444444444, |
|
"acc_norm_stderr": 0.02897264888484427 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2847682119205298, |
|
"acc_stderr": 0.03684881521389023, |
|
"acc_norm": 0.2847682119205298, |
|
"acc_norm_stderr": 0.03684881521389023 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.6169154228855721, |
|
"acc_stderr": 0.034375193373382504, |
|
"acc_norm": 0.6169154228855721, |
|
"acc_norm_stderr": 0.034375193373382504 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.35260115606936415, |
|
"acc_stderr": 0.036430371689585496, |
|
"acc_norm": 0.35260115606936415, |
|
"acc_norm_stderr": 0.036430371689585496 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.35978835978835977, |
|
"acc_stderr": 0.024718075944129277, |
|
"acc_norm": 0.35978835978835977, |
|
"acc_norm_stderr": 0.024718075944129277 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3263888888888889, |
|
"acc_stderr": 0.03921067198982266, |
|
"acc_norm": 0.3263888888888889, |
|
"acc_norm_stderr": 0.03921067198982266 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.048783173121456316, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.048783173121456316 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.61, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.61, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5086705202312138, |
|
"acc_stderr": 0.0269150473553698, |
|
"acc_norm": 0.5086705202312138, |
|
"acc_norm_stderr": 0.0269150473553698 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.43558282208588955, |
|
"acc_stderr": 0.03895632464138937, |
|
"acc_norm": 0.43558282208588955, |
|
"acc_norm_stderr": 0.03895632464138937 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.4228395061728395, |
|
"acc_stderr": 0.027487472980871598, |
|
"acc_norm": 0.4228395061728395, |
|
"acc_norm_stderr": 0.027487472980871598 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.048783173121456316, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.048783173121456316 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5233160621761658, |
|
"acc_stderr": 0.036045136724422014, |
|
"acc_norm": 0.5233160621761658, |
|
"acc_norm_stderr": 0.036045136724422014 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2719298245614035, |
|
"acc_stderr": 0.04185774424022056, |
|
"acc_norm": 0.2719298245614035, |
|
"acc_norm_stderr": 0.04185774424022056 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.47706422018348627, |
|
"acc_stderr": 0.021414757058175506, |
|
"acc_norm": 0.47706422018348627, |
|
"acc_norm_stderr": 0.021414757058175506 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.4126984126984127, |
|
"acc_stderr": 0.04403438954768177, |
|
"acc_norm": 0.4126984126984127, |
|
"acc_norm_stderr": 0.04403438954768177 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.46405228758169936, |
|
"acc_stderr": 0.028555827516528787, |
|
"acc_norm": 0.46405228758169936, |
|
"acc_norm_stderr": 0.028555827516528787 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.46, |
|
"acc_stderr": 0.05009082659620332, |
|
"acc_norm": 0.46, |
|
"acc_norm_stderr": 0.05009082659620332 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6694214876033058, |
|
"acc_stderr": 0.04294340845212094, |
|
"acc_norm": 0.6694214876033058, |
|
"acc_norm_stderr": 0.04294340845212094 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.40789473684210525, |
|
"acc_stderr": 0.03999309712777472, |
|
"acc_norm": 0.40789473684210525, |
|
"acc_norm_stderr": 0.03999309712777472 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.39052287581699346, |
|
"acc_stderr": 0.019737008998094593, |
|
"acc_norm": 0.39052287581699346, |
|
"acc_norm_stderr": 0.019737008998094593 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3546099290780142, |
|
"acc_stderr": 0.02853865002887864, |
|
"acc_norm": 0.3546099290780142, |
|
"acc_norm_stderr": 0.02853865002887864 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.42857142857142855, |
|
"acc_stderr": 0.04697113923010213, |
|
"acc_norm": 0.42857142857142855, |
|
"acc_norm_stderr": 0.04697113923010213 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.4074074074074074, |
|
"acc_stderr": 0.033509916046960436, |
|
"acc_norm": 0.4074074074074074, |
|
"acc_norm_stderr": 0.033509916046960436 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.23575418994413408, |
|
"acc_stderr": 0.014196375686290804, |
|
"acc_norm": 0.23575418994413408, |
|
"acc_norm_stderr": 0.014196375686290804 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.049756985195624284, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.049756985195624284 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.62, |
|
"acc_stderr": 0.048783173121456316, |
|
"acc_norm": 0.62, |
|
"acc_norm_stderr": 0.048783173121456316 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.3786764705882353, |
|
"acc_stderr": 0.02946513363977613, |
|
"acc_norm": 0.3786764705882353, |
|
"acc_norm_stderr": 0.02946513363977613 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.5020408163265306, |
|
"acc_stderr": 0.0320089533497105, |
|
"acc_norm": 0.5020408163265306, |
|
"acc_norm_stderr": 0.0320089533497105 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.6160337552742616, |
|
"acc_stderr": 0.031658678064106674, |
|
"acc_norm": 0.6160337552742616, |
|
"acc_norm_stderr": 0.031658678064106674 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.3116036505867014, |
|
"acc_stderr": 0.011829039182849646, |
|
"acc_norm": 0.3116036505867014, |
|
"acc_norm_stderr": 0.011829039182849646 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.45588235294117646, |
|
"acc_stderr": 0.03495624522015474, |
|
"acc_norm": 0.45588235294117646, |
|
"acc_norm_stderr": 0.03495624522015474 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.43636363636363634, |
|
"acc_stderr": 0.03872592983524754, |
|
"acc_norm": 0.43636363636363634, |
|
"acc_norm_stderr": 0.03872592983524754 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.28886168910648713, |
|
"mc1_stderr": 0.015866346401384308, |
|
"mc2": 0.4667008752277657, |
|
"mc2_stderr": 0.015432114393165898 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.4722550177095632, |
|
"acc_stderr": 0.01716386797945601, |
|
"acc_norm": 0.5277449822904369, |
|
"acc_norm_stderr": 0.017163867979456016 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "MNCLLM/Mistral-7B-orca-platy-over1k", |
|
"model_sha": "65fda49b7459f17a98b8d1c5136001698f647919", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |