|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.4138225255972696, |
|
"acc_stderr": 0.014392730009221009, |
|
"acc_norm": 0.4803754266211604, |
|
"acc_norm_stderr": 0.014600132075947098 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.4291973710416252, |
|
"acc_stderr": 0.004939500404882179, |
|
"acc_norm": 0.5743875721967735, |
|
"acc_norm_stderr": 0.004934250390879774 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.5087719298245614, |
|
"acc_stderr": 0.038342347441649924, |
|
"acc_norm": 0.5087719298245614, |
|
"acc_norm_stderr": 0.038342347441649924 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5533980582524272, |
|
"acc_stderr": 0.04922424153458933, |
|
"acc_norm": 0.5533980582524272, |
|
"acc_norm_stderr": 0.04922424153458933 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.5530012771392082, |
|
"acc_stderr": 0.017779225233394223, |
|
"acc_norm": 0.5530012771392082, |
|
"acc_norm_stderr": 0.017779225233394223 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.42962962962962964, |
|
"acc_stderr": 0.04276349494376598, |
|
"acc_norm": 0.42962962962962964, |
|
"acc_norm_stderr": 0.04276349494376598 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206824, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206824 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.4127659574468085, |
|
"acc_stderr": 0.03218471141400351, |
|
"acc_norm": 0.4127659574468085, |
|
"acc_norm_stderr": 0.03218471141400351 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3855421686746988, |
|
"acc_stderr": 0.03789134424611548, |
|
"acc_norm": 0.3855421686746988, |
|
"acc_norm_stderr": 0.03789134424611548 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5112540192926045, |
|
"acc_stderr": 0.028390897396863533, |
|
"acc_norm": 0.5112540192926045, |
|
"acc_norm_stderr": 0.028390897396863533 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.5291479820627802, |
|
"acc_stderr": 0.03350073248773404, |
|
"acc_norm": 0.5291479820627802, |
|
"acc_norm_stderr": 0.03350073248773404 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.45038167938931295, |
|
"acc_stderr": 0.04363643698524779, |
|
"acc_norm": 0.45038167938931295, |
|
"acc_norm_stderr": 0.04363643698524779 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5808080808080808, |
|
"acc_stderr": 0.035155207286704175, |
|
"acc_norm": 0.5808080808080808, |
|
"acc_norm_stderr": 0.035155207286704175 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.3724137931034483, |
|
"acc_stderr": 0.0402873153294756, |
|
"acc_norm": 0.3724137931034483, |
|
"acc_norm_stderr": 0.0402873153294756 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.22549019607843138, |
|
"acc_stderr": 0.041583075330832865, |
|
"acc_norm": 0.22549019607843138, |
|
"acc_norm_stderr": 0.041583075330832865 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.48739495798319327, |
|
"acc_stderr": 0.03246816765752174, |
|
"acc_norm": 0.48739495798319327, |
|
"acc_norm_stderr": 0.03246816765752174 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.44358974358974357, |
|
"acc_stderr": 0.02518914989476419, |
|
"acc_norm": 0.44358974358974357, |
|
"acc_norm_stderr": 0.02518914989476419 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.050251890762960605, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.050251890762960605 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.048783173121456316, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.048783173121456316 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.04833682445228318, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.04833682445228318 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.4039408866995074, |
|
"acc_stderr": 0.03452453903822039, |
|
"acc_norm": 0.4039408866995074, |
|
"acc_norm_stderr": 0.03452453903822039 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.4774193548387097, |
|
"acc_stderr": 0.028414985019707868, |
|
"acc_norm": 0.4774193548387097, |
|
"acc_norm_stderr": 0.028414985019707868 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6623931623931624, |
|
"acc_stderr": 0.030980296992618558, |
|
"acc_norm": 0.6623931623931624, |
|
"acc_norm_stderr": 0.030980296992618558 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.4490566037735849, |
|
"acc_stderr": 0.030612730713641092, |
|
"acc_norm": 0.4490566037735849, |
|
"acc_norm_stderr": 0.030612730713641092 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5545454545454546, |
|
"acc_stderr": 0.047605488214603246, |
|
"acc_norm": 0.5545454545454546, |
|
"acc_norm_stderr": 0.047605488214603246 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.27037037037037037, |
|
"acc_stderr": 0.027080372815145658, |
|
"acc_norm": 0.27037037037037037, |
|
"acc_norm_stderr": 0.027080372815145658 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.31125827814569534, |
|
"acc_stderr": 0.03780445850526733, |
|
"acc_norm": 0.31125827814569534, |
|
"acc_norm_stderr": 0.03780445850526733 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.572139303482587, |
|
"acc_stderr": 0.03498541988407795, |
|
"acc_norm": 0.572139303482587, |
|
"acc_norm_stderr": 0.03498541988407795 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.42196531791907516, |
|
"acc_stderr": 0.0376574669386515, |
|
"acc_norm": 0.42196531791907516, |
|
"acc_norm_stderr": 0.0376574669386515 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.29894179894179895, |
|
"acc_stderr": 0.023577604791655816, |
|
"acc_norm": 0.29894179894179895, |
|
"acc_norm_stderr": 0.023577604791655816 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.375, |
|
"acc_stderr": 0.04048439222695598, |
|
"acc_norm": 0.375, |
|
"acc_norm_stderr": 0.04048439222695598 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.61, |
|
"acc_stderr": 0.049020713000019756, |
|
"acc_norm": 0.61, |
|
"acc_norm_stderr": 0.049020713000019756 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5086705202312138, |
|
"acc_stderr": 0.0269150473553698, |
|
"acc_norm": 0.5086705202312138, |
|
"acc_norm_stderr": 0.0269150473553698 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.5398773006134969, |
|
"acc_stderr": 0.03915857291436972, |
|
"acc_norm": 0.5398773006134969, |
|
"acc_norm_stderr": 0.03915857291436972 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.45987654320987653, |
|
"acc_stderr": 0.02773102275353928, |
|
"acc_norm": 0.45987654320987653, |
|
"acc_norm_stderr": 0.02773102275353928 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252605, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252605 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5906735751295337, |
|
"acc_stderr": 0.03548608168860806, |
|
"acc_norm": 0.5906735751295337, |
|
"acc_norm_stderr": 0.03548608168860806 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.32456140350877194, |
|
"acc_stderr": 0.04404556157374768, |
|
"acc_norm": 0.32456140350877194, |
|
"acc_norm_stderr": 0.04404556157374768 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.6256880733944954, |
|
"acc_stderr": 0.020748959408988334, |
|
"acc_norm": 0.6256880733944954, |
|
"acc_norm_stderr": 0.020748959408988334 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.042163702135578345, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.042163702135578345 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.42810457516339867, |
|
"acc_stderr": 0.028332397483664274, |
|
"acc_norm": 0.42810457516339867, |
|
"acc_norm_stderr": 0.028332397483664274 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.04943110704237102, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.04943110704237102 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6198347107438017, |
|
"acc_stderr": 0.04431324501968431, |
|
"acc_norm": 0.6198347107438017, |
|
"acc_norm_stderr": 0.04431324501968431 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.3684210526315789, |
|
"acc_stderr": 0.03925523381052932, |
|
"acc_norm": 0.3684210526315789, |
|
"acc_norm_stderr": 0.03925523381052932 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.37254901960784315, |
|
"acc_stderr": 0.019559646809215923, |
|
"acc_norm": 0.37254901960784315, |
|
"acc_norm_stderr": 0.019559646809215923 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3723404255319149, |
|
"acc_stderr": 0.028838921471251458, |
|
"acc_norm": 0.3723404255319149, |
|
"acc_norm_stderr": 0.028838921471251458 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.21428571428571427, |
|
"acc_stderr": 0.038946411200447915, |
|
"acc_norm": 0.21428571428571427, |
|
"acc_norm_stderr": 0.038946411200447915 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.375, |
|
"acc_stderr": 0.033016908987210894, |
|
"acc_norm": 0.375, |
|
"acc_norm_stderr": 0.033016908987210894 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2424581005586592, |
|
"acc_stderr": 0.01433352205921789, |
|
"acc_norm": 0.2424581005586592, |
|
"acc_norm_stderr": 0.01433352205921789 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.048783173121456316, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.048783173121456316 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.47058823529411764, |
|
"acc_stderr": 0.030320243265004137, |
|
"acc_norm": 0.47058823529411764, |
|
"acc_norm_stderr": 0.030320243265004137 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.4448979591836735, |
|
"acc_stderr": 0.031814251181977865, |
|
"acc_norm": 0.4448979591836735, |
|
"acc_norm_stderr": 0.031814251181977865 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.6244725738396625, |
|
"acc_stderr": 0.03152256243091157, |
|
"acc_norm": 0.6244725738396625, |
|
"acc_norm_stderr": 0.03152256243091157 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.34876140808344197, |
|
"acc_stderr": 0.012172035157127115, |
|
"acc_norm": 0.34876140808344197, |
|
"acc_norm_stderr": 0.012172035157127115 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.5343137254901961, |
|
"acc_stderr": 0.03501038327635897, |
|
"acc_norm": 0.5343137254901961, |
|
"acc_norm_stderr": 0.03501038327635897 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.5333333333333333, |
|
"acc_stderr": 0.03895658065271846, |
|
"acc_norm": 0.5333333333333333, |
|
"acc_norm_stderr": 0.03895658065271846 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2876376988984088, |
|
"mc1_stderr": 0.015846315101394816, |
|
"mc2": 0.46238471252084135, |
|
"mc2_stderr": 0.015296846959143042 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.46635182998819363, |
|
"acc_stderr": 0.017151384117131865, |
|
"acc_norm": 0.5324675324675324, |
|
"acc_norm_stderr": 0.017154073716682868 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "DopeorNope/Dear_My_best_Friend-SFT-v2-13B", |
|
"model_sha": "ef7f609ba5694a3740f8a95e1c7699a1d42abb1f", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |