|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.6510238907849829, |
|
"acc_stderr": 0.013928933461382501, |
|
"acc_norm": 0.7005119453924915, |
|
"acc_norm_stderr": 0.013385021637313565 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.41366261700856405, |
|
"acc_stderr": 0.0049148293849834756, |
|
"acc_norm": 0.5400318661621191, |
|
"acc_norm_stderr": 0.004973762948302803 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.6140350877192983, |
|
"acc_stderr": 0.03733756969066164, |
|
"acc_norm": 0.6140350877192983, |
|
"acc_norm_stderr": 0.03733756969066164 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.6213592233009708, |
|
"acc_stderr": 0.04802694698258974, |
|
"acc_norm": 0.6213592233009708, |
|
"acc_norm_stderr": 0.04802694698258974 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.6832694763729247, |
|
"acc_stderr": 0.016635566427712585, |
|
"acc_norm": 0.6832694763729247, |
|
"acc_norm_stderr": 0.016635566427712585 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.48148148148148145, |
|
"acc_stderr": 0.043163785995113245, |
|
"acc_norm": 0.48148148148148145, |
|
"acc_norm_stderr": 0.043163785995113245 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.4553191489361702, |
|
"acc_stderr": 0.03255525359340355, |
|
"acc_norm": 0.4553191489361702, |
|
"acc_norm_stderr": 0.03255525359340355 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.5060240963855421, |
|
"acc_stderr": 0.038922121953330446, |
|
"acc_norm": 0.5060240963855421, |
|
"acc_norm_stderr": 0.038922121953330446 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.6045016077170418, |
|
"acc_stderr": 0.027770918531427834, |
|
"acc_norm": 0.6045016077170418, |
|
"acc_norm_stderr": 0.027770918531427834 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.57847533632287, |
|
"acc_stderr": 0.03314190222110658, |
|
"acc_norm": 0.57847533632287, |
|
"acc_norm_stderr": 0.03314190222110658 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.5877862595419847, |
|
"acc_stderr": 0.04317171194870255, |
|
"acc_norm": 0.5877862595419847, |
|
"acc_norm_stderr": 0.04317171194870255 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.47, |
|
"acc_stderr": 0.05016135580465919, |
|
"acc_norm": 0.47, |
|
"acc_norm_stderr": 0.05016135580465919 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.6616161616161617, |
|
"acc_stderr": 0.03371124142626303, |
|
"acc_norm": 0.6616161616161617, |
|
"acc_norm_stderr": 0.03371124142626303 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.496551724137931, |
|
"acc_stderr": 0.041665675771015785, |
|
"acc_norm": 0.496551724137931, |
|
"acc_norm_stderr": 0.041665675771015785 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.04690650298201943, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.04690650298201943 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.6302521008403361, |
|
"acc_stderr": 0.03135709599613591, |
|
"acc_norm": 0.6302521008403361, |
|
"acc_norm_stderr": 0.03135709599613591 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.5615384615384615, |
|
"acc_stderr": 0.02515826601686861, |
|
"acc_norm": 0.5615384615384615, |
|
"acc_norm_stderr": 0.02515826601686861 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.63, |
|
"acc_stderr": 0.04852365870939099, |
|
"acc_norm": 0.63, |
|
"acc_norm_stderr": 0.04852365870939099 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.048783173121456316, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.048783173121456316 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5740740740740741, |
|
"acc_stderr": 0.0478034362693679, |
|
"acc_norm": 0.5740740740740741, |
|
"acc_norm_stderr": 0.0478034362693679 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.4729064039408867, |
|
"acc_stderr": 0.03512819077876106, |
|
"acc_norm": 0.4729064039408867, |
|
"acc_norm_stderr": 0.03512819077876106 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.5806451612903226, |
|
"acc_stderr": 0.02807158890109183, |
|
"acc_norm": 0.5806451612903226, |
|
"acc_norm_stderr": 0.02807158890109183 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7991452991452992, |
|
"acc_stderr": 0.026246772946890474, |
|
"acc_norm": 0.7991452991452992, |
|
"acc_norm_stderr": 0.026246772946890474 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.539622641509434, |
|
"acc_stderr": 0.03067609659938917, |
|
"acc_norm": 0.539622641509434, |
|
"acc_norm_stderr": 0.03067609659938917 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.6181818181818182, |
|
"acc_stderr": 0.046534298079135075, |
|
"acc_norm": 0.6181818181818182, |
|
"acc_norm_stderr": 0.046534298079135075 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.34074074074074073, |
|
"acc_stderr": 0.02889774874113114, |
|
"acc_norm": 0.34074074074074073, |
|
"acc_norm_stderr": 0.02889774874113114 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.3708609271523179, |
|
"acc_stderr": 0.03943966699183629, |
|
"acc_norm": 0.3708609271523179, |
|
"acc_norm_stderr": 0.03943966699183629 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.7014925373134329, |
|
"acc_stderr": 0.03235743789355043, |
|
"acc_norm": 0.7014925373134329, |
|
"acc_norm_stderr": 0.03235743789355043 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.49710982658959535, |
|
"acc_stderr": 0.038124005659748335, |
|
"acc_norm": 0.49710982658959535, |
|
"acc_norm_stderr": 0.038124005659748335 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.41005291005291006, |
|
"acc_stderr": 0.025331202438944423, |
|
"acc_norm": 0.41005291005291006, |
|
"acc_norm_stderr": 0.025331202438944423 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.4930555555555556, |
|
"acc_stderr": 0.04180806750294938, |
|
"acc_norm": 0.4930555555555556, |
|
"acc_norm_stderr": 0.04180806750294938 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.45, |
|
"acc_stderr": 0.05, |
|
"acc_norm": 0.45, |
|
"acc_norm_stderr": 0.05 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.81, |
|
"acc_stderr": 0.03942772444036623, |
|
"acc_norm": 0.81, |
|
"acc_norm_stderr": 0.03942772444036623 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5867052023121387, |
|
"acc_stderr": 0.026511261369409244, |
|
"acc_norm": 0.5867052023121387, |
|
"acc_norm_stderr": 0.026511261369409244 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.588957055214724, |
|
"acc_stderr": 0.038656978537853624, |
|
"acc_norm": 0.588957055214724, |
|
"acc_norm_stderr": 0.038656978537853624 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.595679012345679, |
|
"acc_stderr": 0.027306625297327677, |
|
"acc_norm": 0.595679012345679, |
|
"acc_norm_stderr": 0.027306625297327677 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.6787564766839378, |
|
"acc_stderr": 0.033699508685490674, |
|
"acc_norm": 0.6787564766839378, |
|
"acc_norm_stderr": 0.033699508685490674 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.42105263157894735, |
|
"acc_stderr": 0.04644602091222317, |
|
"acc_norm": 0.42105263157894735, |
|
"acc_norm_stderr": 0.04644602091222317 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.7247706422018348, |
|
"acc_stderr": 0.019149093743155196, |
|
"acc_norm": 0.7247706422018348, |
|
"acc_norm_stderr": 0.019149093743155196 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.3888888888888889, |
|
"acc_stderr": 0.04360314860077459, |
|
"acc_norm": 0.3888888888888889, |
|
"acc_norm_stderr": 0.04360314860077459 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.5718954248366013, |
|
"acc_stderr": 0.028332397483664274, |
|
"acc_norm": 0.5718954248366013, |
|
"acc_norm_stderr": 0.028332397483664274 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.53, |
|
"acc_stderr": 0.05016135580465919, |
|
"acc_norm": 0.53, |
|
"acc_norm_stderr": 0.05016135580465919 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.7520661157024794, |
|
"acc_stderr": 0.03941897526516304, |
|
"acc_norm": 0.7520661157024794, |
|
"acc_norm_stderr": 0.03941897526516304 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.5592105263157895, |
|
"acc_stderr": 0.04040311062490435, |
|
"acc_norm": 0.5592105263157895, |
|
"acc_norm_stderr": 0.04040311062490435 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.5081699346405228, |
|
"acc_stderr": 0.02022513434305728, |
|
"acc_norm": 0.5081699346405228, |
|
"acc_norm_stderr": 0.02022513434305728 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.375886524822695, |
|
"acc_stderr": 0.02889395541211589, |
|
"acc_norm": 0.375886524822695, |
|
"acc_norm_stderr": 0.02889395541211589 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.35714285714285715, |
|
"acc_stderr": 0.04547960999764376, |
|
"acc_norm": 0.35714285714285715, |
|
"acc_norm_stderr": 0.04547960999764376 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.48148148148148145, |
|
"acc_stderr": 0.03407632093854051, |
|
"acc_norm": 0.48148148148148145, |
|
"acc_norm_stderr": 0.03407632093854051 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.27932960893854747, |
|
"acc_stderr": 0.015005762446786157, |
|
"acc_norm": 0.27932960893854747, |
|
"acc_norm_stderr": 0.015005762446786157 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.63, |
|
"acc_stderr": 0.04852365870939099, |
|
"acc_norm": 0.63, |
|
"acc_norm_stderr": 0.04852365870939099 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.5257352941176471, |
|
"acc_stderr": 0.030332578094555026, |
|
"acc_norm": 0.5257352941176471, |
|
"acc_norm_stderr": 0.030332578094555026 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.6, |
|
"acc_stderr": 0.03136250240935894, |
|
"acc_norm": 0.6, |
|
"acc_norm_stderr": 0.03136250240935894 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.6751054852320675, |
|
"acc_stderr": 0.030486039389105293, |
|
"acc_norm": 0.6751054852320675, |
|
"acc_norm_stderr": 0.030486039389105293 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.4256844850065189, |
|
"acc_stderr": 0.01262839355181194, |
|
"acc_norm": 0.4256844850065189, |
|
"acc_norm_stderr": 0.01262839355181194 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.6029411764705882, |
|
"acc_stderr": 0.03434131164719129, |
|
"acc_norm": 0.6029411764705882, |
|
"acc_norm_stderr": 0.03434131164719129 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.6060606060606061, |
|
"acc_stderr": 0.0381549430868893, |
|
"acc_norm": 0.6060606060606061, |
|
"acc_norm_stderr": 0.0381549430868893 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.5667074663402693, |
|
"mc1_stderr": 0.017347024450107492, |
|
"mc2": 0.6802475288433785, |
|
"mc2_stderr": 0.014647532570120409 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.4510035419126328, |
|
"acc_stderr": 0.01710761885954934, |
|
"acc_norm": 0.4946871310507674, |
|
"acc_norm_stderr": 0.017189383627229684 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Deepnoid/deep-solar-v3.0", |
|
"model_sha": "24c9e5607891194ceb7512534666d354c899152a", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |