|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3395904436860068, |
|
"acc_stderr": 0.013839039762820169, |
|
"acc_norm": 0.39590443686006827, |
|
"acc_norm_stderr": 0.014291228393536588 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.38856801433977295, |
|
"acc_stderr": 0.004864286176731823, |
|
"acc_norm": 0.5073690499900418, |
|
"acc_norm_stderr": 0.004989239462835233 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.391812865497076, |
|
"acc_stderr": 0.037439798259263996, |
|
"acc_norm": 0.391812865497076, |
|
"acc_norm_stderr": 0.037439798259263996 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.27184466019417475, |
|
"acc_stderr": 0.044052680241409216, |
|
"acc_norm": 0.27184466019417475, |
|
"acc_norm_stderr": 0.044052680241409216 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.3946360153256705, |
|
"acc_stderr": 0.017478464305911545, |
|
"acc_norm": 0.3946360153256705, |
|
"acc_norm_stderr": 0.017478464305911545 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.35555555555555557, |
|
"acc_stderr": 0.04135176749720386, |
|
"acc_norm": 0.35555555555555557, |
|
"acc_norm_stderr": 0.04135176749720386 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.26382978723404255, |
|
"acc_stderr": 0.028809989854102956, |
|
"acc_norm": 0.26382978723404255, |
|
"acc_norm_stderr": 0.028809989854102956 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.30120481927710846, |
|
"acc_stderr": 0.03571609230053481, |
|
"acc_norm": 0.30120481927710846, |
|
"acc_norm_stderr": 0.03571609230053481 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.4115755627009646, |
|
"acc_stderr": 0.027950481494401266, |
|
"acc_norm": 0.4115755627009646, |
|
"acc_norm_stderr": 0.027950481494401266 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.3632286995515695, |
|
"acc_stderr": 0.032277904428505, |
|
"acc_norm": 0.3632286995515695, |
|
"acc_norm_stderr": 0.032277904428505 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.42748091603053434, |
|
"acc_stderr": 0.043389203057924, |
|
"acc_norm": 0.42748091603053434, |
|
"acc_norm_stderr": 0.043389203057924 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.048523658709391, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.048523658709391 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.35353535353535354, |
|
"acc_stderr": 0.03406086723547153, |
|
"acc_norm": 0.35353535353535354, |
|
"acc_norm_stderr": 0.03406086723547153 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.31724137931034485, |
|
"acc_stderr": 0.03878352372138621, |
|
"acc_norm": 0.31724137931034485, |
|
"acc_norm_stderr": 0.03878352372138621 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.13725490196078433, |
|
"acc_stderr": 0.03424084669891523, |
|
"acc_norm": 0.13725490196078433, |
|
"acc_norm_stderr": 0.03424084669891523 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.33613445378151263, |
|
"acc_stderr": 0.030684737115135367, |
|
"acc_norm": 0.33613445378151263, |
|
"acc_norm_stderr": 0.030684737115135367 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.258974358974359, |
|
"acc_stderr": 0.02221110681006167, |
|
"acc_norm": 0.258974358974359, |
|
"acc_norm_stderr": 0.02221110681006167 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.45, |
|
"acc_stderr": 0.05, |
|
"acc_norm": 0.45, |
|
"acc_norm_stderr": 0.05 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542128, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542128 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.39814814814814814, |
|
"acc_stderr": 0.04732332615978814, |
|
"acc_norm": 0.39814814814814814, |
|
"acc_norm_stderr": 0.04732332615978814 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.2561576354679803, |
|
"acc_stderr": 0.0307127300709826, |
|
"acc_norm": 0.2561576354679803, |
|
"acc_norm_stderr": 0.0307127300709826 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.3258064516129032, |
|
"acc_stderr": 0.026662010578567104, |
|
"acc_norm": 0.3258064516129032, |
|
"acc_norm_stderr": 0.026662010578567104 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.5512820512820513, |
|
"acc_stderr": 0.032583346493868806, |
|
"acc_norm": 0.5512820512820513, |
|
"acc_norm_stderr": 0.032583346493868806 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.35094339622641507, |
|
"acc_stderr": 0.029373646253234686, |
|
"acc_norm": 0.35094339622641507, |
|
"acc_norm_stderr": 0.029373646253234686 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.39090909090909093, |
|
"acc_stderr": 0.046737523336702384, |
|
"acc_norm": 0.39090909090909093, |
|
"acc_norm_stderr": 0.046737523336702384 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.26666666666666666, |
|
"acc_stderr": 0.026962424325073828, |
|
"acc_norm": 0.26666666666666666, |
|
"acc_norm_stderr": 0.026962424325073828 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.1986754966887417, |
|
"acc_stderr": 0.032578473844367746, |
|
"acc_norm": 0.1986754966887417, |
|
"acc_norm_stderr": 0.032578473844367746 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.4427860696517413, |
|
"acc_stderr": 0.03512310964123936, |
|
"acc_norm": 0.4427860696517413, |
|
"acc_norm_stderr": 0.03512310964123936 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3236994219653179, |
|
"acc_stderr": 0.0356760379963917, |
|
"acc_norm": 0.3236994219653179, |
|
"acc_norm_stderr": 0.0356760379963917 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2698412698412698, |
|
"acc_stderr": 0.022860838309232072, |
|
"acc_norm": 0.2698412698412698, |
|
"acc_norm_stderr": 0.022860838309232072 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2361111111111111, |
|
"acc_stderr": 0.03551446610810826, |
|
"acc_norm": 0.2361111111111111, |
|
"acc_norm_stderr": 0.03551446610810826 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.16, |
|
"acc_stderr": 0.03684529491774709, |
|
"acc_norm": 0.16, |
|
"acc_norm_stderr": 0.03684529491774709 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.04960449637488583, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.04960449637488583 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.3901734104046243, |
|
"acc_stderr": 0.026261677607806642, |
|
"acc_norm": 0.3901734104046243, |
|
"acc_norm_stderr": 0.026261677607806642 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.34355828220858897, |
|
"acc_stderr": 0.03731133519673893, |
|
"acc_norm": 0.34355828220858897, |
|
"acc_norm_stderr": 0.03731133519673893 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.39197530864197533, |
|
"acc_stderr": 0.02716368603827123, |
|
"acc_norm": 0.39197530864197533, |
|
"acc_norm_stderr": 0.02716368603827123 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.32642487046632124, |
|
"acc_stderr": 0.033840286211432945, |
|
"acc_norm": 0.32642487046632124, |
|
"acc_norm_stderr": 0.033840286211432945 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2631578947368421, |
|
"acc_stderr": 0.04142439719489361, |
|
"acc_norm": 0.2631578947368421, |
|
"acc_norm_stderr": 0.04142439719489361 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.3743119266055046, |
|
"acc_stderr": 0.02074895940898831, |
|
"acc_norm": 0.3743119266055046, |
|
"acc_norm_stderr": 0.02074895940898831 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.23015873015873015, |
|
"acc_stderr": 0.03764950879790604, |
|
"acc_norm": 0.23015873015873015, |
|
"acc_norm_stderr": 0.03764950879790604 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.4215686274509804, |
|
"acc_stderr": 0.028275490156791434, |
|
"acc_norm": 0.4215686274509804, |
|
"acc_norm_stderr": 0.028275490156791434 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.04560480215720684, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.04560480215720684 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.5785123966942148, |
|
"acc_stderr": 0.045077322787750874, |
|
"acc_norm": 0.5785123966942148, |
|
"acc_norm_stderr": 0.045077322787750874 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.40131578947368424, |
|
"acc_stderr": 0.039889037033362836, |
|
"acc_norm": 0.40131578947368424, |
|
"acc_norm_stderr": 0.039889037033362836 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.369281045751634, |
|
"acc_stderr": 0.019524316744866346, |
|
"acc_norm": 0.369281045751634, |
|
"acc_norm_stderr": 0.019524316744866346 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.30141843971631205, |
|
"acc_stderr": 0.02737412888263115, |
|
"acc_norm": 0.30141843971631205, |
|
"acc_norm_stderr": 0.02737412888263115 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.2767857142857143, |
|
"acc_stderr": 0.04246624336697624, |
|
"acc_norm": 0.2767857142857143, |
|
"acc_norm_stderr": 0.04246624336697624 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.27314814814814814, |
|
"acc_stderr": 0.030388051301678116, |
|
"acc_norm": 0.27314814814814814, |
|
"acc_norm_stderr": 0.030388051301678116 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2446927374301676, |
|
"acc_stderr": 0.014378169884098424, |
|
"acc_norm": 0.2446927374301676, |
|
"acc_norm_stderr": 0.014378169884098424 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.31985294117647056, |
|
"acc_stderr": 0.02833295951403124, |
|
"acc_norm": 0.31985294117647056, |
|
"acc_norm_stderr": 0.02833295951403124 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.37551020408163266, |
|
"acc_stderr": 0.03100120903989484, |
|
"acc_norm": 0.37551020408163266, |
|
"acc_norm_stderr": 0.03100120903989484 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.5232067510548524, |
|
"acc_stderr": 0.032512152011410174, |
|
"acc_norm": 0.5232067510548524, |
|
"acc_norm_stderr": 0.032512152011410174 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2985658409387223, |
|
"acc_stderr": 0.011688060141794208, |
|
"acc_norm": 0.2985658409387223, |
|
"acc_norm_stderr": 0.011688060141794208 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.38235294117647056, |
|
"acc_stderr": 0.03410785338904719, |
|
"acc_norm": 0.38235294117647056, |
|
"acc_norm_stderr": 0.03410785338904719 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.3939393939393939, |
|
"acc_stderr": 0.0381549430868893, |
|
"acc_norm": 0.3939393939393939, |
|
"acc_norm_stderr": 0.0381549430868893 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.24969400244798043, |
|
"mc1_stderr": 0.015152286907148125, |
|
"mc2": 0.38092210327853554, |
|
"mc2_stderr": 0.014881931344043989 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.47417840375586856, |
|
"acc_stderr": 0.017116907933735912, |
|
"acc_norm": 0.5586854460093896, |
|
"acc_norm_stderr": 0.017021311671847467 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "FINDA-FIT/llama-p", |
|
"model_sha": "e54c345988c60cdafe797a2f15e916801ee4ab7b", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |