{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.35494880546075086, "acc_stderr": 0.013983036904094089, "acc_norm": 0.4308873720136519, "acc_norm_stderr": 0.014471133392642475 }, "harness|ko_hellaswag|10": { "acc": 0.39852619000199163, "acc_stderr": 0.004885942040894558, "acc_norm": 0.5380402310296754, "acc_norm_stderr": 0.004975319435777093 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.5087719298245614, "acc_stderr": 0.038342347441649924, "acc_norm": 0.5087719298245614, "acc_norm_stderr": 0.038342347441649924 }, "harness|ko_mmlu_management|5": { "acc": 0.5631067961165048, "acc_stderr": 0.04911147107365777, "acc_norm": 0.5631067961165048, "acc_norm_stderr": 0.04911147107365777 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.558109833971903, "acc_stderr": 0.017758800534214414, "acc_norm": 0.558109833971903, "acc_norm_stderr": 0.017758800534214414 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.4962962962962963, "acc_stderr": 0.043192236258113303, "acc_norm": 0.4962962962962963, "acc_norm_stderr": 0.043192236258113303 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.4340425531914894, "acc_stderr": 0.03240038086792747, "acc_norm": 0.4340425531914894, "acc_norm_stderr": 0.03240038086792747 }, "harness|ko_mmlu_virology|5": { "acc": 0.39156626506024095, "acc_stderr": 0.037998574544796354, "acc_norm": 0.39156626506024095, "acc_norm_stderr": 0.037998574544796354 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.5016077170418006, "acc_stderr": 0.02839794490780661, "acc_norm": 0.5016077170418006, "acc_norm_stderr": 0.02839794490780661 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.484304932735426, "acc_stderr": 0.0335412657542081, "acc_norm": 0.484304932735426, "acc_norm_stderr": 0.0335412657542081 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.5190839694656488, "acc_stderr": 0.043820947055509867, "acc_norm": 0.5190839694656488, "acc_norm_stderr": 0.043820947055509867 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.6313131313131313, "acc_stderr": 0.034373055019806184, "acc_norm": 0.6313131313131313, "acc_norm_stderr": 0.034373055019806184 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.5310344827586206, "acc_stderr": 0.04158632762097828, "acc_norm": 0.5310344827586206, "acc_norm_stderr": 0.04158632762097828 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.30392156862745096, "acc_stderr": 0.04576665403207762, "acc_norm": 0.30392156862745096, "acc_norm_stderr": 0.04576665403207762 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.5294117647058824, "acc_stderr": 0.03242225027115006, "acc_norm": 0.5294117647058824, "acc_norm_stderr": 0.03242225027115006 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.49743589743589745, "acc_stderr": 0.025350672979412202, "acc_norm": 0.49743589743589745, "acc_norm_stderr": 0.025350672979412202 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.59, "acc_stderr": 0.049431107042371025, "acc_norm": 0.59, "acc_norm_stderr": 0.049431107042371025 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5185185185185185, "acc_stderr": 0.04830366024635331, "acc_norm": 0.5185185185185185, "acc_norm_stderr": 0.04830366024635331 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.43349753694581283, "acc_stderr": 0.03486731727419872, "acc_norm": 0.43349753694581283, "acc_norm_stderr": 0.03486731727419872 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.532258064516129, "acc_stderr": 0.028384747788813336, "acc_norm": 0.532258064516129, "acc_norm_stderr": 0.028384747788813336 }, "harness|ko_mmlu_marketing|5": { "acc": 0.7435897435897436, "acc_stderr": 0.02860595370200425, "acc_norm": 0.7435897435897436, "acc_norm_stderr": 0.02860595370200425 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4830188679245283, "acc_stderr": 0.030755120364119905, "acc_norm": 0.4830188679245283, "acc_norm_stderr": 0.030755120364119905 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.4909090909090909, "acc_stderr": 0.04788339768702861, "acc_norm": 0.4909090909090909, "acc_norm_stderr": 0.04788339768702861 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.3074074074074074, "acc_stderr": 0.028133252578815635, "acc_norm": 0.3074074074074074, "acc_norm_stderr": 0.028133252578815635 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.36423841059602646, "acc_stderr": 0.03929111781242741, "acc_norm": 0.36423841059602646, "acc_norm_stderr": 0.03929111781242741 }, "harness|ko_mmlu_sociology|5": { "acc": 0.6417910447761194, "acc_stderr": 0.03390393042268814, "acc_norm": 0.6417910447761194, "acc_norm_stderr": 0.03390393042268814 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.4393063583815029, "acc_stderr": 0.03784271932887467, "acc_norm": 0.4393063583815029, "acc_norm_stderr": 0.03784271932887467 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.3306878306878307, "acc_stderr": 0.024229965298425082, "acc_norm": 0.3306878306878307, "acc_norm_stderr": 0.024229965298425082 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.4236111111111111, "acc_stderr": 0.041321250197233685, "acc_norm": 0.4236111111111111, "acc_norm_stderr": 0.041321250197233685 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.47109826589595377, "acc_stderr": 0.026874085883518348, "acc_norm": 0.47109826589595377, "acc_norm_stderr": 0.026874085883518348 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.4723926380368098, "acc_stderr": 0.039223782906109894, "acc_norm": 0.4723926380368098, "acc_norm_stderr": 0.039223782906109894 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.5246913580246914, "acc_stderr": 0.027786800931427443, "acc_norm": 0.5246913580246914, "acc_norm_stderr": 0.027786800931427443 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.5958549222797928, "acc_stderr": 0.0354150857888402, "acc_norm": 0.5958549222797928, "acc_norm_stderr": 0.0354150857888402 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04434600701584925, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04434600701584925 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.618348623853211, "acc_stderr": 0.020828148517022603, "acc_norm": 0.618348623853211, "acc_norm_stderr": 0.020828148517022603 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.30952380952380953, "acc_stderr": 0.04134913018303316, "acc_norm": 0.30952380952380953, "acc_norm_stderr": 0.04134913018303316 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.5228758169934641, "acc_stderr": 0.028599936776089775, "acc_norm": 0.5228758169934641, "acc_norm_stderr": 0.028599936776089775 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6611570247933884, "acc_stderr": 0.043207678075366705, "acc_norm": 0.6611570247933884, "acc_norm_stderr": 0.043207678075366705 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.4934210526315789, "acc_stderr": 0.040685900502249704, "acc_norm": 0.4934210526315789, "acc_norm_stderr": 0.040685900502249704 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.4019607843137255, "acc_stderr": 0.019835176484375383, "acc_norm": 0.4019607843137255, "acc_norm_stderr": 0.019835176484375383 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.33687943262411346, "acc_stderr": 0.02819553487396673, "acc_norm": 0.33687943262411346, "acc_norm_stderr": 0.02819553487396673 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.26785714285714285, "acc_stderr": 0.04203277291467762, "acc_norm": 0.26785714285714285, "acc_norm_stderr": 0.04203277291467762 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.3055555555555556, "acc_stderr": 0.03141554629402543, "acc_norm": 0.3055555555555556, "acc_norm_stderr": 0.03141554629402543 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.24134078212290502, "acc_stderr": 0.014310999547961441, "acc_norm": 0.24134078212290502, "acc_norm_stderr": 0.014310999547961441 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.49, "acc_stderr": 0.05024183937956911, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956911 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.41911764705882354, "acc_stderr": 0.029972807170464626, "acc_norm": 0.41911764705882354, "acc_norm_stderr": 0.029972807170464626 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.42857142857142855, "acc_stderr": 0.031680911612338825, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.031680911612338825 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.6413502109704642, "acc_stderr": 0.031219569445301847, "acc_norm": 0.6413502109704642, "acc_norm_stderr": 0.031219569445301847 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.32659713168187743, "acc_stderr": 0.011977676704715997, "acc_norm": 0.32659713168187743, "acc_norm_stderr": 0.011977676704715997 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.553921568627451, "acc_stderr": 0.034888454513049734, "acc_norm": 0.553921568627451, "acc_norm_stderr": 0.034888454513049734 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.5636363636363636, "acc_stderr": 0.03872592983524754, "acc_norm": 0.5636363636363636, "acc_norm_stderr": 0.03872592983524754 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2692778457772338, "mc1_stderr": 0.015528566637087298, "mc2": 0.41566543261161853, "mc2_stderr": 0.014814793696578963 }, "harness|ko_commongen_v2|2": { "acc": 0.5631641086186541, "acc_stderr": 0.01705263355985607, "acc_norm": 0.5914994096812278, "acc_norm_stderr": 0.016900062879427122 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "GAI-LLM/Yi-Ko-6B_mixed_v10", "model_sha": "5914204925b61986dfc048dcb78ccf5dc88d013e", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }