{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3447098976109215, "acc_stderr": 0.01388881628678211, "acc_norm": 0.3984641638225256, "acc_norm_stderr": 0.014306946052735565 }, "harness|ko_hellaswag|10": { "acc": 0.3717386974706234, "acc_stderr": 0.004822814501358899, "acc_norm": 0.4774945230033858, "acc_norm_stderr": 0.004984724235115118 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.5321637426900585, "acc_stderr": 0.038268824176603704, "acc_norm": 0.5321637426900585, "acc_norm_stderr": 0.038268824176603704 }, "harness|ko_mmlu_management|5": { "acc": 0.5048543689320388, "acc_stderr": 0.04950504382128921, "acc_norm": 0.5048543689320388, "acc_norm_stderr": 0.04950504382128921 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.4559386973180077, "acc_stderr": 0.017810403925435366, "acc_norm": 0.4559386973180077, "acc_norm_stderr": 0.017810403925435366 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.31851851851851853, "acc_stderr": 0.04024778401977111, "acc_norm": 0.31851851851851853, "acc_norm_stderr": 0.04024778401977111 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.24, "acc_stderr": 0.042923469599092816, "acc_norm": 0.24, "acc_norm_stderr": 0.042923469599092816 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.39574468085106385, "acc_stderr": 0.031967586978353627, "acc_norm": 0.39574468085106385, "acc_norm_stderr": 0.031967586978353627 }, "harness|ko_mmlu_virology|5": { "acc": 0.41566265060240964, "acc_stderr": 0.038367221765980515, "acc_norm": 0.41566265060240964, "acc_norm_stderr": 0.038367221765980515 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.47266881028938906, "acc_stderr": 0.02835563356832818, "acc_norm": 0.47266881028938906, "acc_norm_stderr": 0.02835563356832818 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.47533632286995514, "acc_stderr": 0.033516951676526276, "acc_norm": 0.47533632286995514, "acc_norm_stderr": 0.033516951676526276 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.48854961832061067, "acc_stderr": 0.043841400240780176, "acc_norm": 0.48854961832061067, "acc_norm_stderr": 0.043841400240780176 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.44, "acc_stderr": 0.049888765156985905, "acc_norm": 0.44, "acc_norm_stderr": 0.049888765156985905 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5757575757575758, "acc_stderr": 0.035212249088415866, "acc_norm": 0.5757575757575758, "acc_norm_stderr": 0.035212249088415866 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.4482758620689655, "acc_stderr": 0.04144311810878151, "acc_norm": 0.4482758620689655, "acc_norm_stderr": 0.04144311810878151 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.17647058823529413, "acc_stderr": 0.03793281185307809, "acc_norm": 0.17647058823529413, "acc_norm_stderr": 0.03793281185307809 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.47478991596638653, "acc_stderr": 0.03243718055137411, "acc_norm": 0.47478991596638653, "acc_norm_stderr": 0.03243718055137411 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.4307692307692308, "acc_stderr": 0.025106820660539746, "acc_norm": 0.4307692307692308, "acc_norm_stderr": 0.025106820660539746 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.59, "acc_stderr": 0.049431107042371025, "acc_norm": 0.59, "acc_norm_stderr": 0.049431107042371025 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.21, "acc_stderr": 0.04093601807403325, "acc_norm": 0.21, "acc_norm_stderr": 0.04093601807403325 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5462962962962963, "acc_stderr": 0.04812917324536823, "acc_norm": 0.5462962962962963, "acc_norm_stderr": 0.04812917324536823 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.4039408866995074, "acc_stderr": 0.0345245390382204, "acc_norm": 0.4039408866995074, "acc_norm_stderr": 0.0345245390382204 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.47096774193548385, "acc_stderr": 0.028396016402761005, "acc_norm": 0.47096774193548385, "acc_norm_stderr": 0.028396016402761005 }, "harness|ko_mmlu_marketing|5": { "acc": 0.7051282051282052, "acc_stderr": 0.02987257770889118, "acc_norm": 0.7051282051282052, "acc_norm_stderr": 0.02987257770889118 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4830188679245283, "acc_stderr": 0.030755120364119898, "acc_norm": 0.4830188679245283, "acc_norm_stderr": 0.030755120364119898 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.509090909090909, "acc_stderr": 0.0478833976870286, "acc_norm": 0.509090909090909, "acc_norm_stderr": 0.0478833976870286 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.3296296296296296, "acc_stderr": 0.02866120111652459, "acc_norm": 0.3296296296296296, "acc_norm_stderr": 0.02866120111652459 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.31125827814569534, "acc_stderr": 0.03780445850526732, "acc_norm": 0.31125827814569534, "acc_norm_stderr": 0.03780445850526732 }, "harness|ko_mmlu_sociology|5": { "acc": 0.6417910447761194, "acc_stderr": 0.03390393042268814, "acc_norm": 0.6417910447761194, "acc_norm_stderr": 0.03390393042268814 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.42196531791907516, "acc_stderr": 0.0376574669386515, "acc_norm": 0.42196531791907516, "acc_norm_stderr": 0.0376574669386515 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.3544973544973545, "acc_stderr": 0.024636830602842, "acc_norm": 0.3544973544973545, "acc_norm_stderr": 0.024636830602842 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3125, "acc_stderr": 0.038760854559127644, "acc_norm": 0.3125, "acc_norm_stderr": 0.038760854559127644 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.62, "acc_stderr": 0.048783173121456344, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456344 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.476878612716763, "acc_stderr": 0.026890297881303128, "acc_norm": 0.476878612716763, "acc_norm_stderr": 0.026890297881303128 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.4785276073619632, "acc_stderr": 0.03924746876751129, "acc_norm": 0.4785276073619632, "acc_norm_stderr": 0.03924746876751129 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.41358024691358025, "acc_stderr": 0.027402042040269952, "acc_norm": 0.41358024691358025, "acc_norm_stderr": 0.027402042040269952 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.5181347150259067, "acc_stderr": 0.036060650018329185, "acc_norm": 0.5181347150259067, "acc_norm_stderr": 0.036060650018329185 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.3157894736842105, "acc_stderr": 0.04372748290278007, "acc_norm": 0.3157894736842105, "acc_norm_stderr": 0.04372748290278007 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.4935779816513762, "acc_stderr": 0.021435554820013077, "acc_norm": 0.4935779816513762, "acc_norm_stderr": 0.021435554820013077 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.35714285714285715, "acc_stderr": 0.04285714285714281, "acc_norm": 0.35714285714285715, "acc_norm_stderr": 0.04285714285714281 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.4803921568627451, "acc_stderr": 0.028607893699576066, "acc_norm": 0.4803921568627451, "acc_norm_stderr": 0.028607893699576066 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6611570247933884, "acc_stderr": 0.04320767807536669, "acc_norm": 0.6611570247933884, "acc_norm_stderr": 0.04320767807536669 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.39473684210526316, "acc_stderr": 0.03977749934622074, "acc_norm": 0.39473684210526316, "acc_norm_stderr": 0.03977749934622074 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.3660130718954248, "acc_stderr": 0.01948802574552966, "acc_norm": 0.3660130718954248, "acc_norm_stderr": 0.01948802574552966 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.32269503546099293, "acc_stderr": 0.02788913930053479, "acc_norm": 0.32269503546099293, "acc_norm_stderr": 0.02788913930053479 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.4017857142857143, "acc_stderr": 0.04653333146973646, "acc_norm": 0.4017857142857143, "acc_norm_stderr": 0.04653333146973646 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.4074074074074074, "acc_stderr": 0.033509916046960436, "acc_norm": 0.4074074074074074, "acc_norm_stderr": 0.033509916046960436 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.32625698324022345, "acc_stderr": 0.01568044151888918, "acc_norm": 0.32625698324022345, "acc_norm_stderr": 0.01568044151888918 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.40808823529411764, "acc_stderr": 0.029855261393483927, "acc_norm": 0.40808823529411764, "acc_norm_stderr": 0.029855261393483927 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.5183673469387755, "acc_stderr": 0.031987615467631264, "acc_norm": 0.5183673469387755, "acc_norm_stderr": 0.031987615467631264 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.6033755274261603, "acc_stderr": 0.03184399873811224, "acc_norm": 0.6033755274261603, "acc_norm_stderr": 0.03184399873811224 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.35528031290743156, "acc_stderr": 0.012223623364044043, "acc_norm": 0.35528031290743156, "acc_norm_stderr": 0.012223623364044043 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.5147058823529411, "acc_stderr": 0.035077938347913236, "acc_norm": 0.5147058823529411, "acc_norm_stderr": 0.035077938347913236 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.4484848484848485, "acc_stderr": 0.03883565977956929, "acc_norm": 0.4484848484848485, "acc_norm_stderr": 0.03883565977956929 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.32068543451652387, "mc1_stderr": 0.016339170373280906, "mc2": 0.4733613258729537, "mc2_stderr": 0.015571052806018785 }, "harness|ko_commongen_v2|2": { "acc": 0.3624557260920897, "acc_stderr": 0.01652713124045372, "acc_norm": 0.45336481700118064, "acc_norm_stderr": 0.01711541822522686 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "Loyola/Mistral-7b-ITmodel", "model_sha": "19d9919d9624af34763c8263e3ca64d3a038c596", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }