{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.2901023890784983, "acc_stderr": 0.013261573677520769, "acc_norm": 0.3412969283276451, "acc_norm_stderr": 0.01385583128749772 }, "harness|ko_hellaswag|10": { "acc": 0.33210515833499304, "acc_stderr": 0.00470005967137463, "acc_norm": 0.41585341565425216, "acc_norm_stderr": 0.004918612098944034 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.38011695906432746, "acc_stderr": 0.03722965741385539, "acc_norm": 0.38011695906432746, "acc_norm_stderr": 0.03722965741385539 }, "harness|ko_mmlu_management|5": { "acc": 0.5048543689320388, "acc_stderr": 0.04950504382128921, "acc_norm": 0.5048543689320388, "acc_norm_stderr": 0.04950504382128921 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.3997445721583653, "acc_stderr": 0.01751684790705327, "acc_norm": 0.3997445721583653, "acc_norm_stderr": 0.01751684790705327 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.31851851851851853, "acc_stderr": 0.04024778401977111, "acc_norm": 0.31851851851851853, "acc_norm_stderr": 0.04024778401977111 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.32340425531914896, "acc_stderr": 0.030579442773610334, "acc_norm": 0.32340425531914896, "acc_norm_stderr": 0.030579442773610334 }, "harness|ko_mmlu_virology|5": { "acc": 0.3614457831325301, "acc_stderr": 0.03740059382029319, "acc_norm": 0.3614457831325301, "acc_norm_stderr": 0.03740059382029319 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.4180064308681672, "acc_stderr": 0.02801365189199507, "acc_norm": 0.4180064308681672, "acc_norm_stderr": 0.02801365189199507 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.34080717488789236, "acc_stderr": 0.03181149747055359, "acc_norm": 0.34080717488789236, "acc_norm_stderr": 0.03181149747055359 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.44274809160305345, "acc_stderr": 0.043564472026650695, "acc_norm": 0.44274809160305345, "acc_norm_stderr": 0.043564472026650695 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5252525252525253, "acc_stderr": 0.03557806245087314, "acc_norm": 0.5252525252525253, "acc_norm_stderr": 0.03557806245087314 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.41379310344827586, "acc_stderr": 0.041042692118062316, "acc_norm": 0.41379310344827586, "acc_norm_stderr": 0.041042692118062316 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04690650298201943, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04690650298201943 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.453781512605042, "acc_stderr": 0.03233943468182087, "acc_norm": 0.453781512605042, "acc_norm_stderr": 0.03233943468182087 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.38974358974358975, "acc_stderr": 0.024726967886647074, "acc_norm": 0.38974358974358975, "acc_norm_stderr": 0.024726967886647074 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.4444444444444444, "acc_stderr": 0.04803752235190192, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.04803752235190192 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.3645320197044335, "acc_stderr": 0.0338640574606209, "acc_norm": 0.3645320197044335, "acc_norm_stderr": 0.0338640574606209 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.38387096774193546, "acc_stderr": 0.027666182075539635, "acc_norm": 0.38387096774193546, "acc_norm_stderr": 0.027666182075539635 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6196581196581197, "acc_stderr": 0.03180425204384099, "acc_norm": 0.6196581196581197, "acc_norm_stderr": 0.03180425204384099 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.37735849056603776, "acc_stderr": 0.029832808114796005, "acc_norm": 0.37735849056603776, "acc_norm_stderr": 0.029832808114796005 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.41818181818181815, "acc_stderr": 0.047245774057315705, "acc_norm": 0.41818181818181815, "acc_norm_stderr": 0.047245774057315705 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.3074074074074074, "acc_stderr": 0.028133252578815642, "acc_norm": 0.3074074074074074, "acc_norm_stderr": 0.028133252578815642 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.038615575462551684, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.038615575462551684 }, "harness|ko_mmlu_sociology|5": { "acc": 0.527363184079602, "acc_stderr": 0.035302355173346824, "acc_norm": 0.527363184079602, "acc_norm_stderr": 0.035302355173346824 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.34104046242774566, "acc_stderr": 0.036146654241808254, "acc_norm": 0.34104046242774566, "acc_norm_stderr": 0.036146654241808254 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.3492063492063492, "acc_stderr": 0.02455229220934266, "acc_norm": 0.3492063492063492, "acc_norm_stderr": 0.02455229220934266 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3125, "acc_stderr": 0.038760854559127644, "acc_norm": 0.3125, "acc_norm_stderr": 0.038760854559127644 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.53, "acc_stderr": 0.050161355804659205, "acc_norm": 0.53, "acc_norm_stderr": 0.050161355804659205 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.3872832369942196, "acc_stderr": 0.026226158605124655, "acc_norm": 0.3872832369942196, "acc_norm_stderr": 0.026226158605124655 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.3619631901840491, "acc_stderr": 0.037757007291414416, "acc_norm": 0.3619631901840491, "acc_norm_stderr": 0.037757007291414416 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.36419753086419754, "acc_stderr": 0.026774929899722327, "acc_norm": 0.36419753086419754, "acc_norm_stderr": 0.026774929899722327 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.40414507772020725, "acc_stderr": 0.0354150857888402, "acc_norm": 0.40414507772020725, "acc_norm_stderr": 0.0354150857888402 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2894736842105263, "acc_stderr": 0.04266339443159395, "acc_norm": 0.2894736842105263, "acc_norm_stderr": 0.04266339443159395 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.4055045871559633, "acc_stderr": 0.02105099799189684, "acc_norm": 0.4055045871559633, "acc_norm_stderr": 0.02105099799189684 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.42857142857142855, "acc_stderr": 0.0442626668137991, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.0442626668137991 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.3790849673202614, "acc_stderr": 0.02778014120702333, "acc_norm": 0.3790849673202614, "acc_norm_stderr": 0.02778014120702333 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|ko_mmlu_international_law|5": { "acc": 0.5619834710743802, "acc_stderr": 0.045291468044357915, "acc_norm": 0.5619834710743802, "acc_norm_stderr": 0.045291468044357915 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.40131578947368424, "acc_stderr": 0.03988903703336285, "acc_norm": 0.40131578947368424, "acc_norm_stderr": 0.03988903703336285 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.3202614379084967, "acc_stderr": 0.018875682938069443, "acc_norm": 0.3202614379084967, "acc_norm_stderr": 0.018875682938069443 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.3262411347517731, "acc_stderr": 0.02796845304356317, "acc_norm": 0.3262411347517731, "acc_norm_stderr": 0.02796845304356317 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.44907407407407407, "acc_stderr": 0.03392238405321617, "acc_norm": 0.44907407407407407, "acc_norm_stderr": 0.03392238405321617 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.3106145251396648, "acc_stderr": 0.015476515438005566, "acc_norm": 0.3106145251396648, "acc_norm_stderr": 0.015476515438005566 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.45, "acc_stderr": 0.049999999999999996, "acc_norm": 0.45, "acc_norm_stderr": 0.049999999999999996 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.4338235294117647, "acc_stderr": 0.030105636570016636, "acc_norm": 0.4338235294117647, "acc_norm_stderr": 0.030105636570016636 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.43673469387755104, "acc_stderr": 0.03175195237583322, "acc_norm": 0.43673469387755104, "acc_norm_stderr": 0.03175195237583322 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.4767932489451477, "acc_stderr": 0.032512152011410174, "acc_norm": 0.4767932489451477, "acc_norm_stderr": 0.032512152011410174 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.28683181225554105, "acc_stderr": 0.011551504781176933, "acc_norm": 0.28683181225554105, "acc_norm_stderr": 0.011551504781176933 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.39705882352941174, "acc_stderr": 0.034341311647191286, "acc_norm": 0.39705882352941174, "acc_norm_stderr": 0.034341311647191286 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.34545454545454546, "acc_stderr": 0.03713158067481913, "acc_norm": 0.34545454545454546, "acc_norm_stderr": 0.03713158067481913 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.32313341493268055, "mc1_stderr": 0.016371836286454614, "mc2": 0.4992370707389853, "mc2_stderr": 0.01568220201461622 }, "harness|ko_commongen_v2|2": { "acc": 0.3293978748524203, "acc_stderr": 0.016158746868147143, "acc_norm": 0.43211334120425027, "acc_norm_stderr": 0.017031170198851753 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "maywell/synatra_V0.01", "model_sha": "c27df4dbc7624ea0bcbf0b0ff149d49b58713a4e", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }