{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.2167235494880546, "acc_stderr": 0.012040156713481192, "acc_norm": 0.25597269624573377, "acc_norm_stderr": 0.012753013241244513 }, "harness|ko_hellaswag|10": { "acc": 0.2819159529974109, "acc_stderr": 0.004490130691020431, "acc_norm": 0.3150766779525991, "acc_norm_stderr": 0.004635970060392421 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.2046783625730994, "acc_stderr": 0.03094445977853321, "acc_norm": 0.2046783625730994, "acc_norm_stderr": 0.03094445977853321 }, "harness|ko_mmlu_management|5": { "acc": 0.18446601941747573, "acc_stderr": 0.03840423627288276, "acc_norm": 0.18446601941747573, "acc_norm_stderr": 0.03840423627288276 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.26436781609195403, "acc_stderr": 0.01576998484069053, "acc_norm": 0.26436781609195403, "acc_norm_stderr": 0.01576998484069053 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.1925925925925926, "acc_stderr": 0.03406542058502652, "acc_norm": 0.1925925925925926, "acc_norm_stderr": 0.03406542058502652 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.041633319989322695, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322695 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.2851063829787234, "acc_stderr": 0.029513196625539355, "acc_norm": 0.2851063829787234, "acc_norm_stderr": 0.029513196625539355 }, "harness|ko_mmlu_virology|5": { "acc": 0.35542168674698793, "acc_stderr": 0.03726214354322416, "acc_norm": 0.35542168674698793, "acc_norm_stderr": 0.03726214354322416 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.2829581993569132, "acc_stderr": 0.025583062489984838, "acc_norm": 0.2829581993569132, "acc_norm_stderr": 0.025583062489984838 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.33183856502242154, "acc_stderr": 0.031602951437766785, "acc_norm": 0.33183856502242154, "acc_norm_stderr": 0.031602951437766785 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.25190839694656486, "acc_stderr": 0.03807387116306086, "acc_norm": 0.25190839694656486, "acc_norm_stderr": 0.03807387116306086 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.29, "acc_stderr": 0.04560480215720683, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720683 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.1919191919191919, "acc_stderr": 0.02805779167298901, "acc_norm": 0.1919191919191919, "acc_norm_stderr": 0.02805779167298901 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.22758620689655173, "acc_stderr": 0.03493950380131184, "acc_norm": 0.22758620689655173, "acc_norm_stderr": 0.03493950380131184 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.19607843137254902, "acc_stderr": 0.0395058186117996, "acc_norm": 0.19607843137254902, "acc_norm_stderr": 0.0395058186117996 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.23109243697478993, "acc_stderr": 0.02738140692786898, "acc_norm": 0.23109243697478993, "acc_norm_stderr": 0.02738140692786898 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.2205128205128205, "acc_stderr": 0.021020672680827912, "acc_norm": 0.2205128205128205, "acc_norm_stderr": 0.021020672680827912 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.2, "acc_stderr": 0.04020151261036845, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036845 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.18, "acc_stderr": 0.038612291966536955, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536955 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.25, "acc_stderr": 0.04186091791394607, "acc_norm": 0.25, "acc_norm_stderr": 0.04186091791394607 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.16748768472906403, "acc_stderr": 0.026273086047535414, "acc_norm": 0.16748768472906403, "acc_norm_stderr": 0.026273086047535414 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.20967741935483872, "acc_stderr": 0.02315787934908353, "acc_norm": 0.20967741935483872, "acc_norm_stderr": 0.02315787934908353 }, "harness|ko_mmlu_marketing|5": { "acc": 0.2905982905982906, "acc_stderr": 0.02974504857267406, "acc_norm": 0.2905982905982906, "acc_norm_stderr": 0.02974504857267406 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.2, "acc_stderr": 0.02461829819586651, "acc_norm": 0.2, "acc_norm_stderr": 0.02461829819586651 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03955932861795833, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03955932861795833 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.23333333333333334, "acc_stderr": 0.025787874220959312, "acc_norm": 0.23333333333333334, "acc_norm_stderr": 0.025787874220959312 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.23178807947019867, "acc_stderr": 0.034454062719870546, "acc_norm": 0.23178807947019867, "acc_norm_stderr": 0.034454062719870546 }, "harness|ko_mmlu_sociology|5": { "acc": 0.21393034825870647, "acc_stderr": 0.028996909693328903, "acc_norm": 0.21393034825870647, "acc_norm_stderr": 0.028996909693328903 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.1791907514450867, "acc_stderr": 0.02924251305906329, "acc_norm": 0.1791907514450867, "acc_norm_stderr": 0.02924251305906329 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.22486772486772486, "acc_stderr": 0.021502096078229147, "acc_norm": 0.22486772486772486, "acc_norm_stderr": 0.021502096078229147 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2361111111111111, "acc_stderr": 0.03551446610810826, "acc_norm": 0.2361111111111111, "acc_norm_stderr": 0.03551446610810826 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.23, "acc_stderr": 0.042295258468165044, "acc_norm": 0.23, "acc_norm_stderr": 0.042295258468165044 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.28034682080924855, "acc_stderr": 0.024182427496577615, "acc_norm": 0.28034682080924855, "acc_norm_stderr": 0.024182427496577615 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.2331288343558282, "acc_stderr": 0.03322015795776742, "acc_norm": 0.2331288343558282, "acc_norm_stderr": 0.03322015795776742 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.23765432098765432, "acc_stderr": 0.023683591837008553, "acc_norm": 0.23765432098765432, "acc_norm_stderr": 0.023683591837008553 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.23316062176165803, "acc_stderr": 0.030516111371476008, "acc_norm": 0.23316062176165803, "acc_norm_stderr": 0.030516111371476008 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2543859649122807, "acc_stderr": 0.040969851398436716, "acc_norm": 0.2543859649122807, "acc_norm_stderr": 0.040969851398436716 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.1908256880733945, "acc_stderr": 0.016847676400091115, "acc_norm": 0.1908256880733945, "acc_norm_stderr": 0.016847676400091115 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.1746031746031746, "acc_stderr": 0.0339549002085611, "acc_norm": 0.1746031746031746, "acc_norm_stderr": 0.0339549002085611 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.24183006535947713, "acc_stderr": 0.024518195641879334, "acc_norm": 0.24183006535947713, "acc_norm_stderr": 0.024518195641879334 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|ko_mmlu_international_law|5": { "acc": 0.2644628099173554, "acc_stderr": 0.040261875275912073, "acc_norm": 0.2644628099173554, "acc_norm_stderr": 0.040261875275912073 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.19736842105263158, "acc_stderr": 0.03238981601699397, "acc_norm": 0.19736842105263158, "acc_norm_stderr": 0.03238981601699397 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.24183006535947713, "acc_stderr": 0.017322789207784326, "acc_norm": 0.24183006535947713, "acc_norm_stderr": 0.017322789207784326 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.23404255319148937, "acc_stderr": 0.025257861359432403, "acc_norm": 0.23404255319148937, "acc_norm_stderr": 0.025257861359432403 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.32142857142857145, "acc_stderr": 0.04432804055291519, "acc_norm": 0.32142857142857145, "acc_norm_stderr": 0.04432804055291519 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.2175925925925926, "acc_stderr": 0.028139689444859655, "acc_norm": 0.2175925925925926, "acc_norm_stderr": 0.028139689444859655 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2335195530726257, "acc_stderr": 0.014149575348976266, "acc_norm": 0.2335195530726257, "acc_norm_stderr": 0.014149575348976266 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.04408440022768078, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768078 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.19852941176470587, "acc_stderr": 0.02423101337054111, "acc_norm": 0.19852941176470587, "acc_norm_stderr": 0.02423101337054111 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.17959183673469387, "acc_stderr": 0.024573293589585637, "acc_norm": 0.17959183673469387, "acc_norm_stderr": 0.024573293589585637 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.2742616033755274, "acc_stderr": 0.029041333510598035, "acc_norm": 0.2742616033755274, "acc_norm_stderr": 0.029041333510598035 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.23533246414602346, "acc_stderr": 0.010834432543912224, "acc_norm": 0.23533246414602346, "acc_norm_stderr": 0.010834432543912224 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.25980392156862747, "acc_stderr": 0.03077855467869326, "acc_norm": 0.25980392156862747, "acc_norm_stderr": 0.03077855467869326 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.24242424242424243, "acc_stderr": 0.03346409881055953, "acc_norm": 0.24242424242424243, "acc_norm_stderr": 0.03346409881055953 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.3108935128518972, "mc1_stderr": 0.016203316673559693, "mc2": 0.5030438206753587, "mc2_stderr": 0.016137949960889377 }, "harness|ko_commongen_v2|2": { "acc": 0.24203069657615112, "acc_stderr": 0.014725696750525331, "acc_norm": 0.3105076741440378, "acc_norm_stderr": 0.01590800452876203 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "TinyLlama/TinyLlama-1.1B-intermediate-step-715k-1.5T", "model_sha": "314e0f65d90384e224ac8d7c0b228a661a06673f", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }