|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.6399317406143344, |
|
"acc_stderr": 0.014027516814585188, |
|
"acc_norm": 0.7013651877133106, |
|
"acc_norm_stderr": 0.013374078615068752 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.398725353515236, |
|
"acc_stderr": 0.0048863535635718545, |
|
"acc_norm": 0.5217088229436367, |
|
"acc_norm_stderr": 0.004985076094464756 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.6140350877192983, |
|
"acc_stderr": 0.03733756969066164, |
|
"acc_norm": 0.6140350877192983, |
|
"acc_norm_stderr": 0.03733756969066164 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5922330097087378, |
|
"acc_stderr": 0.048657775704107696, |
|
"acc_norm": 0.5922330097087378, |
|
"acc_norm_stderr": 0.048657775704107696 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.6283524904214559, |
|
"acc_stderr": 0.017280802522133185, |
|
"acc_norm": 0.6283524904214559, |
|
"acc_norm_stderr": 0.017280802522133185 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.04232073695151589, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.04232073695151589 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695236, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695236 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.43829787234042555, |
|
"acc_stderr": 0.03243618636108102, |
|
"acc_norm": 0.43829787234042555, |
|
"acc_norm_stderr": 0.03243618636108102 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.5240963855421686, |
|
"acc_stderr": 0.03887971849597264, |
|
"acc_norm": 0.5240963855421686, |
|
"acc_norm_stderr": 0.03887971849597264 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.572347266881029, |
|
"acc_stderr": 0.02809924077580957, |
|
"acc_norm": 0.572347266881029, |
|
"acc_norm_stderr": 0.02809924077580957 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.6053811659192825, |
|
"acc_stderr": 0.03280400504755291, |
|
"acc_norm": 0.6053811659192825, |
|
"acc_norm_stderr": 0.03280400504755291 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.5267175572519084, |
|
"acc_stderr": 0.04379024936553894, |
|
"acc_norm": 0.5267175572519084, |
|
"acc_norm_stderr": 0.04379024936553894 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.46, |
|
"acc_stderr": 0.05009082659620332, |
|
"acc_norm": 0.46, |
|
"acc_norm_stderr": 0.05009082659620332 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.6363636363636364, |
|
"acc_stderr": 0.034273086529999344, |
|
"acc_norm": 0.6363636363636364, |
|
"acc_norm_stderr": 0.034273086529999344 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.503448275862069, |
|
"acc_stderr": 0.04166567577101579, |
|
"acc_norm": 0.503448275862069, |
|
"acc_norm_stderr": 0.04166567577101579 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.04690650298201943, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.04690650298201943 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.5252100840336135, |
|
"acc_stderr": 0.03243718055137411, |
|
"acc_norm": 0.5252100840336135, |
|
"acc_norm_stderr": 0.03243718055137411 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.5564102564102564, |
|
"acc_stderr": 0.025189149894764215, |
|
"acc_norm": 0.5564102564102564, |
|
"acc_norm_stderr": 0.025189149894764215 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.61, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.61, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5277777777777778, |
|
"acc_stderr": 0.048262172941398944, |
|
"acc_norm": 0.5277777777777778, |
|
"acc_norm_stderr": 0.048262172941398944 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.4630541871921182, |
|
"acc_stderr": 0.035083705204426656, |
|
"acc_norm": 0.4630541871921182, |
|
"acc_norm_stderr": 0.035083705204426656 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.5806451612903226, |
|
"acc_stderr": 0.028071588901091845, |
|
"acc_norm": 0.5806451612903226, |
|
"acc_norm_stderr": 0.028071588901091845 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7948717948717948, |
|
"acc_stderr": 0.02645350805404032, |
|
"acc_norm": 0.7948717948717948, |
|
"acc_norm_stderr": 0.02645350805404032 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.5207547169811321, |
|
"acc_stderr": 0.030746349975723463, |
|
"acc_norm": 0.5207547169811321, |
|
"acc_norm_stderr": 0.030746349975723463 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5727272727272728, |
|
"acc_stderr": 0.047381987035454834, |
|
"acc_norm": 0.5727272727272728, |
|
"acc_norm_stderr": 0.047381987035454834 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3148148148148148, |
|
"acc_stderr": 0.028317533496066465, |
|
"acc_norm": 0.3148148148148148, |
|
"acc_norm_stderr": 0.028317533496066465 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.304635761589404, |
|
"acc_stderr": 0.03757949922943342, |
|
"acc_norm": 0.304635761589404, |
|
"acc_norm_stderr": 0.03757949922943342 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.6616915422885572, |
|
"acc_stderr": 0.03345563070339191, |
|
"acc_norm": 0.6616915422885572, |
|
"acc_norm_stderr": 0.03345563070339191 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.4913294797687861, |
|
"acc_stderr": 0.038118909889404105, |
|
"acc_norm": 0.4913294797687861, |
|
"acc_norm_stderr": 0.038118909889404105 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.3544973544973545, |
|
"acc_stderr": 0.024636830602841997, |
|
"acc_norm": 0.3544973544973545, |
|
"acc_norm_stderr": 0.024636830602841997 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.4513888888888889, |
|
"acc_stderr": 0.04161402398403279, |
|
"acc_norm": 0.4513888888888889, |
|
"acc_norm_stderr": 0.04161402398403279 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.75, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.75, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5086705202312138, |
|
"acc_stderr": 0.0269150473553698, |
|
"acc_norm": 0.5086705202312138, |
|
"acc_norm_stderr": 0.0269150473553698 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.5644171779141104, |
|
"acc_stderr": 0.03895632464138937, |
|
"acc_norm": 0.5644171779141104, |
|
"acc_norm_stderr": 0.03895632464138937 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.5864197530864198, |
|
"acc_stderr": 0.027402042040269966, |
|
"acc_norm": 0.5864197530864198, |
|
"acc_norm_stderr": 0.027402042040269966 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.6476683937823834, |
|
"acc_stderr": 0.034474782864143565, |
|
"acc_norm": 0.6476683937823834, |
|
"acc_norm_stderr": 0.034474782864143565 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2982456140350877, |
|
"acc_stderr": 0.04303684033537316, |
|
"acc_norm": 0.2982456140350877, |
|
"acc_norm_stderr": 0.04303684033537316 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.6935779816513762, |
|
"acc_stderr": 0.019765517220458523, |
|
"acc_norm": 0.6935779816513762, |
|
"acc_norm_stderr": 0.019765517220458523 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.3412698412698413, |
|
"acc_stderr": 0.04240799327574924, |
|
"acc_norm": 0.3412698412698413, |
|
"acc_norm_stderr": 0.04240799327574924 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.5326797385620915, |
|
"acc_stderr": 0.028568699752225875, |
|
"acc_norm": 0.5326797385620915, |
|
"acc_norm_stderr": 0.028568699752225875 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.49, |
|
"acc_stderr": 0.05024183937956911, |
|
"acc_norm": 0.49, |
|
"acc_norm_stderr": 0.05024183937956911 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6611570247933884, |
|
"acc_stderr": 0.04320767807536671, |
|
"acc_norm": 0.6611570247933884, |
|
"acc_norm_stderr": 0.04320767807536671 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.5592105263157895, |
|
"acc_stderr": 0.04040311062490436, |
|
"acc_norm": 0.5592105263157895, |
|
"acc_norm_stderr": 0.04040311062490436 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.46078431372549017, |
|
"acc_stderr": 0.02016552331390791, |
|
"acc_norm": 0.46078431372549017, |
|
"acc_norm_stderr": 0.02016552331390791 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.4219858156028369, |
|
"acc_stderr": 0.029462189233370583, |
|
"acc_norm": 0.4219858156028369, |
|
"acc_norm_stderr": 0.029462189233370583 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.33035714285714285, |
|
"acc_stderr": 0.044642857142857116, |
|
"acc_norm": 0.33035714285714285, |
|
"acc_norm_stderr": 0.044642857142857116 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.39814814814814814, |
|
"acc_stderr": 0.033384734032074016, |
|
"acc_norm": 0.39814814814814814, |
|
"acc_norm_stderr": 0.033384734032074016 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.24804469273743016, |
|
"acc_stderr": 0.014444157808261446, |
|
"acc_norm": 0.24804469273743016, |
|
"acc_norm_stderr": 0.014444157808261446 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.04923659639173309, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.04923659639173309 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.58, |
|
"acc_stderr": 0.04960449637488583, |
|
"acc_norm": 0.58, |
|
"acc_norm_stderr": 0.04960449637488583 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.4485294117647059, |
|
"acc_stderr": 0.030211479609121603, |
|
"acc_norm": 0.4485294117647059, |
|
"acc_norm_stderr": 0.030211479609121603 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.5795918367346938, |
|
"acc_stderr": 0.031601069934496004, |
|
"acc_norm": 0.5795918367346938, |
|
"acc_norm_stderr": 0.031601069934496004 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.6160337552742616, |
|
"acc_stderr": 0.03165867806410668, |
|
"acc_norm": 0.6160337552742616, |
|
"acc_norm_stderr": 0.03165867806410668 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.3898305084745763, |
|
"acc_stderr": 0.01245638661908259, |
|
"acc_norm": 0.3898305084745763, |
|
"acc_norm_stderr": 0.01245638661908259 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.5147058823529411, |
|
"acc_stderr": 0.03507793834791324, |
|
"acc_norm": 0.5147058823529411, |
|
"acc_norm_stderr": 0.03507793834791324 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.5151515151515151, |
|
"acc_stderr": 0.03902551007374449, |
|
"acc_norm": 0.5151515151515151, |
|
"acc_norm_stderr": 0.03902551007374449 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.5410036719706243, |
|
"mc1_stderr": 0.0174445444476612, |
|
"mc2": 0.6463838547258014, |
|
"mc2_stderr": 0.014895266557719184 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.44510035419126326, |
|
"acc_stderr": 0.017086417431005464, |
|
"acc_norm": 0.4887839433293979, |
|
"acc_norm_stderr": 0.017186028469489287 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Deepnoid/deep-solar-v2.0.3", |
|
"model_sha": "4b6d2432b8447af0fbce21df215925a0ac985cdc", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |