{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.19539249146757678, "acc_stderr": 0.01158690718995291, "acc_norm": 0.2619453924914676, "acc_norm_stderr": 0.012849054826858112 }, "harness|ko_hellaswag|10": { "acc": 0.2642899820752838, "acc_stderr": 0.00440053218855021, "acc_norm": 0.27763393746265685, "acc_norm_stderr": 0.00446916572860033 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.21052631578947367, "acc_stderr": 0.0312678171466318, "acc_norm": 0.21052631578947367, "acc_norm_stderr": 0.0312678171466318 }, "harness|ko_mmlu_management|5": { "acc": 0.3106796116504854, "acc_stderr": 0.04582124160161549, "acc_norm": 0.3106796116504854, "acc_norm_stderr": 0.04582124160161549 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.2681992337164751, "acc_stderr": 0.015842430835269438, "acc_norm": 0.2681992337164751, "acc_norm_stderr": 0.015842430835269438 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.2074074074074074, "acc_stderr": 0.03502553170678316, "acc_norm": 0.2074074074074074, "acc_norm_stderr": 0.03502553170678316 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.2553191489361702, "acc_stderr": 0.028504856470514203, "acc_norm": 0.2553191489361702, "acc_norm_stderr": 0.028504856470514203 }, "harness|ko_mmlu_virology|5": { "acc": 0.30120481927710846, "acc_stderr": 0.0357160923005348, "acc_norm": 0.30120481927710846, "acc_norm_stderr": 0.0357160923005348 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.2797427652733119, "acc_stderr": 0.02549425935069491, "acc_norm": 0.2797427652733119, "acc_norm_stderr": 0.02549425935069491 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.23318385650224216, "acc_stderr": 0.028380391147094716, "acc_norm": 0.23318385650224216, "acc_norm_stderr": 0.028380391147094716 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.2366412213740458, "acc_stderr": 0.037276735755969195, "acc_norm": 0.2366412213740458, "acc_norm_stderr": 0.037276735755969195 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.24242424242424243, "acc_stderr": 0.030532892233932032, "acc_norm": 0.24242424242424243, "acc_norm_stderr": 0.030532892233932032 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03565998174135302, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03565998174135302 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.17647058823529413, "acc_stderr": 0.03793281185307811, "acc_norm": 0.17647058823529413, "acc_norm_stderr": 0.03793281185307811 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.3487394957983193, "acc_stderr": 0.030956636328566545, "acc_norm": 0.3487394957983193, "acc_norm_stderr": 0.030956636328566545 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.3282051282051282, "acc_stderr": 0.02380763319865727, "acc_norm": 0.3282051282051282, "acc_norm_stderr": 0.02380763319865727 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.22, "acc_stderr": 0.0416333199893227, "acc_norm": 0.22, "acc_norm_stderr": 0.0416333199893227 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.15, "acc_stderr": 0.03588702812826371, "acc_norm": 0.15, "acc_norm_stderr": 0.03588702812826371 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.2222222222222222, "acc_stderr": 0.040191074725573483, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.040191074725573483 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.3103448275862069, "acc_stderr": 0.03255086769970103, "acc_norm": 0.3103448275862069, "acc_norm_stderr": 0.03255086769970103 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.3064516129032258, "acc_stderr": 0.026226485652553873, "acc_norm": 0.3064516129032258, "acc_norm_stderr": 0.026226485652553873 }, "harness|ko_mmlu_marketing|5": { "acc": 0.19658119658119658, "acc_stderr": 0.02603538609895129, "acc_norm": 0.19658119658119658, "acc_norm_stderr": 0.02603538609895129 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.23018867924528302, "acc_stderr": 0.025907897122408173, "acc_norm": 0.23018867924528302, "acc_norm_stderr": 0.025907897122408173 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.20909090909090908, "acc_stderr": 0.03895091015724138, "acc_norm": 0.20909090909090908, "acc_norm_stderr": 0.03895091015724138 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.02671924078371217, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.02671924078371217 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.32450331125827814, "acc_stderr": 0.03822746937658754, "acc_norm": 0.32450331125827814, "acc_norm_stderr": 0.03822746937658754 }, "harness|ko_mmlu_sociology|5": { "acc": 0.22388059701492538, "acc_stderr": 0.0294752502360172, "acc_norm": 0.22388059701492538, "acc_norm_stderr": 0.0294752502360172 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.2138728323699422, "acc_stderr": 0.03126511206173043, "acc_norm": 0.2138728323699422, "acc_norm_stderr": 0.03126511206173043 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2566137566137566, "acc_stderr": 0.022494510767503154, "acc_norm": 0.2566137566137566, "acc_norm_stderr": 0.022494510767503154 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.22916666666666666, "acc_stderr": 0.03514697467862388, "acc_norm": 0.22916666666666666, "acc_norm_stderr": 0.03514697467862388 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.23699421965317918, "acc_stderr": 0.022894082489925992, "acc_norm": 0.23699421965317918, "acc_norm_stderr": 0.022894082489925992 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.27607361963190186, "acc_stderr": 0.0351238528370505, "acc_norm": 0.27607361963190186, "acc_norm_stderr": 0.0351238528370505 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.25925925925925924, "acc_stderr": 0.02438366553103545, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.02438366553103545 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.27979274611398963, "acc_stderr": 0.032396370467357015, "acc_norm": 0.27979274611398963, "acc_norm_stderr": 0.032396370467357015 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2631578947368421, "acc_stderr": 0.04142439719489362, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.04142439719489362 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.24036697247706423, "acc_stderr": 0.01832060732096407, "acc_norm": 0.24036697247706423, "acc_norm_stderr": 0.01832060732096407 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.15873015873015872, "acc_stderr": 0.032684540130117436, "acc_norm": 0.15873015873015872, "acc_norm_stderr": 0.032684540130117436 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.25163398692810457, "acc_stderr": 0.0248480182638752, "acc_norm": 0.25163398692810457, "acc_norm_stderr": 0.0248480182638752 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.23, "acc_stderr": 0.04229525846816508, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816508 }, "harness|ko_mmlu_international_law|5": { "acc": 0.2892561983471074, "acc_stderr": 0.04139112727635464, "acc_norm": 0.2892561983471074, "acc_norm_stderr": 0.04139112727635464 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.25, "acc_stderr": 0.03523807393012047, "acc_norm": 0.25, "acc_norm_stderr": 0.03523807393012047 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.2238562091503268, "acc_stderr": 0.016863008585416617, "acc_norm": 0.2238562091503268, "acc_norm_stderr": 0.016863008585416617 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.24822695035460993, "acc_stderr": 0.025770015644290396, "acc_norm": 0.24822695035460993, "acc_norm_stderr": 0.025770015644290396 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.25, "acc_stderr": 0.04109974682633932, "acc_norm": 0.25, "acc_norm_stderr": 0.04109974682633932 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.4166666666666667, "acc_stderr": 0.03362277436608043, "acc_norm": 0.4166666666666667, "acc_norm_stderr": 0.03362277436608043 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.25139664804469275, "acc_stderr": 0.014508979453553983, "acc_norm": 0.25139664804469275, "acc_norm_stderr": 0.014508979453553983 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.4411764705882353, "acc_stderr": 0.030161911930767102, "acc_norm": 0.4411764705882353, "acc_norm_stderr": 0.030161911930767102 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.2612244897959184, "acc_stderr": 0.02812342933514279, "acc_norm": 0.2612244897959184, "acc_norm_stderr": 0.02812342933514279 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.22362869198312235, "acc_stderr": 0.027123298205229972, "acc_norm": 0.22362869198312235, "acc_norm_stderr": 0.027123298205229972 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.2333767926988266, "acc_stderr": 0.010803108481179088, "acc_norm": 0.2333767926988266, "acc_norm_stderr": 0.010803108481179088 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.2549019607843137, "acc_stderr": 0.030587591351604243, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.030587591351604243 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.23030303030303031, "acc_stderr": 0.03287666758603489, "acc_norm": 0.23030303030303031, "acc_norm_stderr": 0.03287666758603489 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2741738066095471, "mc1_stderr": 0.015616518497219385, "mc2": 0.5382255654218452, "mc2_stderr": 0.01636582464762524 }, "harness|ko_commongen_v2|2": { "acc": 0.1487603305785124, "acc_stderr": 0.012234446131035059, "acc_norm": 0.3789846517119244, "acc_norm_stderr": 0.016679260684229286 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "FINDA-FIT/llama-m", "model_sha": "7c06c7acb6bd18e1cf52846483e430def93686f2", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }