|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3532423208191126, |
|
"acc_stderr": 0.013967822714840055, |
|
"acc_norm": 0.4206484641638225, |
|
"acc_norm_stderr": 0.014426211252508397 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3968333001394145, |
|
"acc_stderr": 0.0048824100299354415, |
|
"acc_norm": 0.5318661621190998, |
|
"acc_norm_stderr": 0.004979637330230312 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.4853801169590643, |
|
"acc_stderr": 0.038331852752130205, |
|
"acc_norm": 0.4853801169590643, |
|
"acc_norm_stderr": 0.038331852752130205 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5825242718446602, |
|
"acc_stderr": 0.048828405482122375, |
|
"acc_norm": 0.5825242718446602, |
|
"acc_norm_stderr": 0.048828405482122375 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.5440613026819924, |
|
"acc_stderr": 0.017810403925435356, |
|
"acc_norm": 0.5440613026819924, |
|
"acc_norm_stderr": 0.017810403925435356 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.45925925925925926, |
|
"acc_stderr": 0.04304979692464244, |
|
"acc_norm": 0.45925925925925926, |
|
"acc_norm_stderr": 0.04304979692464244 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.40425531914893614, |
|
"acc_stderr": 0.032081157507886836, |
|
"acc_norm": 0.40425531914893614, |
|
"acc_norm_stderr": 0.032081157507886836 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.37349397590361444, |
|
"acc_stderr": 0.03765845117168863, |
|
"acc_norm": 0.37349397590361444, |
|
"acc_norm_stderr": 0.03765845117168863 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5209003215434084, |
|
"acc_stderr": 0.028373270961069414, |
|
"acc_norm": 0.5209003215434084, |
|
"acc_norm_stderr": 0.028373270961069414 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.4439461883408072, |
|
"acc_stderr": 0.03334625674242728, |
|
"acc_norm": 0.4439461883408072, |
|
"acc_norm_stderr": 0.03334625674242728 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.48854961832061067, |
|
"acc_stderr": 0.043841400240780176, |
|
"acc_norm": 0.48854961832061067, |
|
"acc_norm_stderr": 0.043841400240780176 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.049756985195624284, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.049756985195624284 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.6515151515151515, |
|
"acc_stderr": 0.033948539651564025, |
|
"acc_norm": 0.6515151515151515, |
|
"acc_norm_stderr": 0.033948539651564025 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.496551724137931, |
|
"acc_stderr": 0.041665675771015785, |
|
"acc_norm": 0.496551724137931, |
|
"acc_norm_stderr": 0.041665675771015785 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.22549019607843138, |
|
"acc_stderr": 0.04158307533083286, |
|
"acc_norm": 0.22549019607843138, |
|
"acc_norm_stderr": 0.04158307533083286 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.46638655462184875, |
|
"acc_stderr": 0.03240501447690071, |
|
"acc_norm": 0.46638655462184875, |
|
"acc_norm_stderr": 0.03240501447690071 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4846153846153846, |
|
"acc_stderr": 0.025339003010106522, |
|
"acc_norm": 0.4846153846153846, |
|
"acc_norm_stderr": 0.025339003010106522 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.59, |
|
"acc_stderr": 0.04943110704237102, |
|
"acc_norm": 0.59, |
|
"acc_norm_stderr": 0.04943110704237102 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5462962962962963, |
|
"acc_stderr": 0.04812917324536823, |
|
"acc_norm": 0.5462962962962963, |
|
"acc_norm_stderr": 0.04812917324536823 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3645320197044335, |
|
"acc_stderr": 0.0338640574606209, |
|
"acc_norm": 0.3645320197044335, |
|
"acc_norm_stderr": 0.0338640574606209 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.47096774193548385, |
|
"acc_stderr": 0.028396016402761008, |
|
"acc_norm": 0.47096774193548385, |
|
"acc_norm_stderr": 0.028396016402761008 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.688034188034188, |
|
"acc_stderr": 0.030351527323344948, |
|
"acc_norm": 0.688034188034188, |
|
"acc_norm_stderr": 0.030351527323344948 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.49433962264150944, |
|
"acc_stderr": 0.03077090076385131, |
|
"acc_norm": 0.49433962264150944, |
|
"acc_norm_stderr": 0.03077090076385131 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5545454545454546, |
|
"acc_stderr": 0.047605488214603246, |
|
"acc_norm": 0.5545454545454546, |
|
"acc_norm_stderr": 0.047605488214603246 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.28888888888888886, |
|
"acc_stderr": 0.02763490726417854, |
|
"acc_norm": 0.28888888888888886, |
|
"acc_norm_stderr": 0.02763490726417854 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2980132450331126, |
|
"acc_stderr": 0.037345356767871984, |
|
"acc_norm": 0.2980132450331126, |
|
"acc_norm_stderr": 0.037345356767871984 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.582089552238806, |
|
"acc_stderr": 0.034875586404620636, |
|
"acc_norm": 0.582089552238806, |
|
"acc_norm_stderr": 0.034875586404620636 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.4046242774566474, |
|
"acc_stderr": 0.0374246119388725, |
|
"acc_norm": 0.4046242774566474, |
|
"acc_norm_stderr": 0.0374246119388725 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.3544973544973545, |
|
"acc_stderr": 0.024636830602842, |
|
"acc_norm": 0.3544973544973545, |
|
"acc_norm_stderr": 0.024636830602842 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3194444444444444, |
|
"acc_stderr": 0.038990736873573344, |
|
"acc_norm": 0.3194444444444444, |
|
"acc_norm_stderr": 0.038990736873573344 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.57, |
|
"acc_stderr": 0.04975698519562427, |
|
"acc_norm": 0.57, |
|
"acc_norm_stderr": 0.04975698519562427 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.48265895953757226, |
|
"acc_stderr": 0.026902900458666647, |
|
"acc_norm": 0.48265895953757226, |
|
"acc_norm_stderr": 0.026902900458666647 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.4723926380368098, |
|
"acc_stderr": 0.039223782906109894, |
|
"acc_norm": 0.4723926380368098, |
|
"acc_norm_stderr": 0.039223782906109894 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.4783950617283951, |
|
"acc_stderr": 0.027794760105008746, |
|
"acc_norm": 0.4783950617283951, |
|
"acc_norm_stderr": 0.027794760105008746 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5958549222797928, |
|
"acc_stderr": 0.0354150857888402, |
|
"acc_norm": 0.5958549222797928, |
|
"acc_norm_stderr": 0.0354150857888402 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.30701754385964913, |
|
"acc_stderr": 0.0433913832257986, |
|
"acc_norm": 0.30701754385964913, |
|
"acc_norm_stderr": 0.0433913832257986 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.6, |
|
"acc_stderr": 0.02100420126042008, |
|
"acc_norm": 0.6, |
|
"acc_norm_stderr": 0.02100420126042008 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.30158730158730157, |
|
"acc_stderr": 0.04104947269903394, |
|
"acc_norm": 0.30158730158730157, |
|
"acc_norm_stderr": 0.04104947269903394 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.477124183006536, |
|
"acc_stderr": 0.028599936776089782, |
|
"acc_norm": 0.477124183006536, |
|
"acc_norm_stderr": 0.028599936776089782 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.51, |
|
"acc_stderr": 0.05024183937956911, |
|
"acc_norm": 0.51, |
|
"acc_norm_stderr": 0.05024183937956911 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6776859504132231, |
|
"acc_stderr": 0.04266416363352168, |
|
"acc_norm": 0.6776859504132231, |
|
"acc_norm_stderr": 0.04266416363352168 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.4407894736842105, |
|
"acc_stderr": 0.04040311062490436, |
|
"acc_norm": 0.4407894736842105, |
|
"acc_norm_stderr": 0.04040311062490436 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.4068627450980392, |
|
"acc_stderr": 0.019873802005061177, |
|
"acc_norm": 0.4068627450980392, |
|
"acc_norm_stderr": 0.019873802005061177 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.32269503546099293, |
|
"acc_stderr": 0.02788913930053478, |
|
"acc_norm": 0.32269503546099293, |
|
"acc_norm_stderr": 0.02788913930053478 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.2857142857142857, |
|
"acc_stderr": 0.042878587513404544, |
|
"acc_norm": 0.2857142857142857, |
|
"acc_norm_stderr": 0.042878587513404544 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.33796296296296297, |
|
"acc_stderr": 0.03225941352631295, |
|
"acc_norm": 0.33796296296296297, |
|
"acc_norm_stderr": 0.03225941352631295 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.27039106145251396, |
|
"acc_stderr": 0.01485499393801008, |
|
"acc_norm": 0.27039106145251396, |
|
"acc_norm_stderr": 0.01485499393801008 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.49, |
|
"acc_stderr": 0.05024183937956911, |
|
"acc_norm": 0.49, |
|
"acc_norm_stderr": 0.05024183937956911 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.38235294117647056, |
|
"acc_stderr": 0.029520095697687754, |
|
"acc_norm": 0.38235294117647056, |
|
"acc_norm_stderr": 0.029520095697687754 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.35918367346938773, |
|
"acc_stderr": 0.030713560455108493, |
|
"acc_norm": 0.35918367346938773, |
|
"acc_norm_stderr": 0.030713560455108493 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.6118143459915611, |
|
"acc_stderr": 0.031722950043323296, |
|
"acc_norm": 0.6118143459915611, |
|
"acc_norm_stderr": 0.031722950043323296 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.33116036505867014, |
|
"acc_stderr": 0.01202012819598577, |
|
"acc_norm": 0.33116036505867014, |
|
"acc_norm_stderr": 0.01202012819598577 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.5441176470588235, |
|
"acc_stderr": 0.03495624522015476, |
|
"acc_norm": 0.5441176470588235, |
|
"acc_norm_stderr": 0.03495624522015476 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.5757575757575758, |
|
"acc_stderr": 0.03859268142070265, |
|
"acc_norm": 0.5757575757575758, |
|
"acc_norm_stderr": 0.03859268142070265 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.29008567931456547, |
|
"mc1_stderr": 0.01588623687420952, |
|
"mc2": 0.42521496532720787, |
|
"mc2_stderr": 0.014980622040261423 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.5726092089728453, |
|
"acc_stderr": 0.017008129844823153, |
|
"acc_norm": 0.6375442739079102, |
|
"acc_norm_stderr": 0.016527131240453696 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.1-dpo", |
|
"model_sha": "d580b50b1a7f8afd838340f8c27e0c5e9f48b7b5", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |