{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.36860068259385664, "acc_stderr": 0.014097810678042192, "acc_norm": 0.42235494880546076, "acc_norm_stderr": 0.014434138713379981 }, "harness|ko_hellaswag|10": { "acc": 0.4082852021509659, "acc_stderr": 0.004905119039849461, "acc_norm": 0.5435172276438957, "acc_norm_stderr": 0.004970846697552308 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.5555555555555556, "acc_stderr": 0.03811079669833531, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.03811079669833531 }, "harness|ko_mmlu_management|5": { "acc": 0.4563106796116505, "acc_stderr": 0.049318019942204146, "acc_norm": 0.4563106796116505, "acc_norm_stderr": 0.049318019942204146 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.5057471264367817, "acc_stderr": 0.017878782326129224, "acc_norm": 0.5057471264367817, "acc_norm_stderr": 0.017878782326129224 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.4148148148148148, "acc_stderr": 0.04256193767901407, "acc_norm": 0.4148148148148148, "acc_norm_stderr": 0.04256193767901407 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.045604802157206824, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206824 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.33191489361702126, "acc_stderr": 0.03078373675774564, "acc_norm": 0.33191489361702126, "acc_norm_stderr": 0.03078373675774564 }, "harness|ko_mmlu_virology|5": { "acc": 0.4397590361445783, "acc_stderr": 0.03864139923699122, "acc_norm": 0.4397590361445783, "acc_norm_stderr": 0.03864139923699122 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.5080385852090032, "acc_stderr": 0.028394421370984538, "acc_norm": 0.5080385852090032, "acc_norm_stderr": 0.028394421370984538 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.37668161434977576, "acc_stderr": 0.03252113489929187, "acc_norm": 0.37668161434977576, "acc_norm_stderr": 0.03252113489929187 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.48854961832061067, "acc_stderr": 0.04384140024078016, "acc_norm": 0.48854961832061067, "acc_norm_stderr": 0.04384140024078016 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5505050505050505, "acc_stderr": 0.035441324919479704, "acc_norm": 0.5505050505050505, "acc_norm_stderr": 0.035441324919479704 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.45517241379310347, "acc_stderr": 0.04149886942192117, "acc_norm": 0.45517241379310347, "acc_norm_stderr": 0.04149886942192117 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.19607843137254902, "acc_stderr": 0.03950581861179961, "acc_norm": 0.19607843137254902, "acc_norm_stderr": 0.03950581861179961 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.4369747899159664, "acc_stderr": 0.03221943636566197, "acc_norm": 0.4369747899159664, "acc_norm_stderr": 0.03221943636566197 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.3923076923076923, "acc_stderr": 0.02475600038213094, "acc_norm": 0.3923076923076923, "acc_norm_stderr": 0.02475600038213094 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.4722222222222222, "acc_stderr": 0.04826217294139894, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.04826217294139894 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.4187192118226601, "acc_stderr": 0.03471192860518468, "acc_norm": 0.4187192118226601, "acc_norm_stderr": 0.03471192860518468 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.432258064516129, "acc_stderr": 0.028181739720019416, "acc_norm": 0.432258064516129, "acc_norm_stderr": 0.028181739720019416 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6495726495726496, "acc_stderr": 0.0312561082442188, "acc_norm": 0.6495726495726496, "acc_norm_stderr": 0.0312561082442188 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4679245283018868, "acc_stderr": 0.030709486992556545, "acc_norm": 0.4679245283018868, "acc_norm_stderr": 0.030709486992556545 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.44545454545454544, "acc_stderr": 0.04760548821460325, "acc_norm": 0.44545454545454544, "acc_norm_stderr": 0.04760548821460325 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.2518518518518518, "acc_stderr": 0.026466117538959916, "acc_norm": 0.2518518518518518, "acc_norm_stderr": 0.026466117538959916 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2781456953642384, "acc_stderr": 0.03658603262763743, "acc_norm": 0.2781456953642384, "acc_norm_stderr": 0.03658603262763743 }, "harness|ko_mmlu_sociology|5": { "acc": 0.5572139303482587, "acc_stderr": 0.03512310964123937, "acc_norm": 0.5572139303482587, "acc_norm_stderr": 0.03512310964123937 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.3699421965317919, "acc_stderr": 0.03681229633394319, "acc_norm": 0.3699421965317919, "acc_norm_stderr": 0.03681229633394319 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2671957671957672, "acc_stderr": 0.022789673145776575, "acc_norm": 0.2671957671957672, "acc_norm_stderr": 0.022789673145776575 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3472222222222222, "acc_stderr": 0.039812405437178615, "acc_norm": 0.3472222222222222, "acc_norm_stderr": 0.039812405437178615 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.57, "acc_stderr": 0.04975698519562428, "acc_norm": 0.57, "acc_norm_stderr": 0.04975698519562428 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.4508670520231214, "acc_stderr": 0.026788811931562757, "acc_norm": 0.4508670520231214, "acc_norm_stderr": 0.026788811931562757 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.4110429447852761, "acc_stderr": 0.038656978537853624, "acc_norm": 0.4110429447852761, "acc_norm_stderr": 0.038656978537853624 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.46296296296296297, "acc_stderr": 0.02774431344337654, "acc_norm": 0.46296296296296297, "acc_norm_stderr": 0.02774431344337654 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.43005181347150256, "acc_stderr": 0.03572954333144807, "acc_norm": 0.43005181347150256, "acc_norm_stderr": 0.03572954333144807 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2631578947368421, "acc_stderr": 0.04142439719489361, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.04142439719489361 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.48256880733944957, "acc_stderr": 0.021424291871853147, "acc_norm": 0.48256880733944957, "acc_norm_stderr": 0.021424291871853147 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.25396825396825395, "acc_stderr": 0.03893259610604674, "acc_norm": 0.25396825396825395, "acc_norm_stderr": 0.03893259610604674 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.42483660130718953, "acc_stderr": 0.02830457667314112, "acc_norm": 0.42483660130718953, "acc_norm_stderr": 0.02830457667314112 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.37, "acc_stderr": 0.04852365870939098, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939098 }, "harness|ko_mmlu_international_law|5": { "acc": 0.5619834710743802, "acc_stderr": 0.045291468044357915, "acc_norm": 0.5619834710743802, "acc_norm_stderr": 0.045291468044357915 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.40789473684210525, "acc_stderr": 0.039993097127774734, "acc_norm": 0.40789473684210525, "acc_norm_stderr": 0.039993097127774734 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.32189542483660133, "acc_stderr": 0.018901015322093085, "acc_norm": 0.32189542483660133, "acc_norm_stderr": 0.018901015322093085 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.3120567375886525, "acc_stderr": 0.02764012054516994, "acc_norm": 0.3120567375886525, "acc_norm_stderr": 0.02764012054516994 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.21428571428571427, "acc_stderr": 0.038946411200447915, "acc_norm": 0.21428571428571427, "acc_norm_stderr": 0.038946411200447915 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.3194444444444444, "acc_stderr": 0.03179876342176851, "acc_norm": 0.3194444444444444, "acc_norm_stderr": 0.03179876342176851 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.01433352205921789, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.01433352205921789 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.2426470588235294, "acc_stderr": 0.026040662474201285, "acc_norm": 0.2426470588235294, "acc_norm_stderr": 0.026040662474201285 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.46938775510204084, "acc_stderr": 0.031949171367580624, "acc_norm": 0.46938775510204084, "acc_norm_stderr": 0.031949171367580624 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.5063291139240507, "acc_stderr": 0.03254462010767859, "acc_norm": 0.5063291139240507, "acc_norm_stderr": 0.03254462010767859 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.30247718383311606, "acc_stderr": 0.0117315242341657, "acc_norm": 0.30247718383311606, "acc_norm_stderr": 0.0117315242341657 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.38235294117647056, "acc_stderr": 0.03410785338904719, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.03410785338904719 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.4727272727272727, "acc_stderr": 0.03898531605579419, "acc_norm": 0.4727272727272727, "acc_norm_stderr": 0.03898531605579419 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2533659730722154, "mc1_stderr": 0.015225899340826824, "mc2": 0.40933802446057865, "mc2_stderr": 0.014937193336867839 }, "harness|ko_commongen_v2|2": { "acc": 0.4557260920897285, "acc_stderr": 0.017122829143292648, "acc_norm": 0.5147579693034239, "acc_norm_stderr": 0.01718286443499856 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "DopeorNope/COLA3_13B", "model_sha": "7725e7a1c6f8f022c7c4ec0286dd9f7fada126bd", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }