|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.2713310580204778, |
|
"acc_stderr": 0.012993807727545787, |
|
"acc_norm": 0.31399317406143346, |
|
"acc_norm_stderr": 0.013562691224726293 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.36656044612626965, |
|
"acc_stderr": 0.004808802114592829, |
|
"acc_norm": 0.46564429396534557, |
|
"acc_norm_stderr": 0.0049779884525026396 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.2222222222222222, |
|
"acc_stderr": 0.031885780176863984, |
|
"acc_norm": 0.2222222222222222, |
|
"acc_norm_stderr": 0.031885780176863984 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.24271844660194175, |
|
"acc_stderr": 0.04245022486384493, |
|
"acc_norm": 0.24271844660194175, |
|
"acc_norm_stderr": 0.04245022486384493 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.24648786717752236, |
|
"acc_stderr": 0.015411308769686938, |
|
"acc_norm": 0.24648786717752236, |
|
"acc_norm_stderr": 0.015411308769686938 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.25925925925925924, |
|
"acc_stderr": 0.03785714465066652, |
|
"acc_norm": 0.25925925925925924, |
|
"acc_norm_stderr": 0.03785714465066652 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.04229525846816508, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.04229525846816508 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.33191489361702126, |
|
"acc_stderr": 0.03078373675774566, |
|
"acc_norm": 0.33191489361702126, |
|
"acc_norm_stderr": 0.03078373675774566 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.2891566265060241, |
|
"acc_stderr": 0.03529486801511115, |
|
"acc_norm": 0.2891566265060241, |
|
"acc_norm_stderr": 0.03529486801511115 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.26688102893890675, |
|
"acc_stderr": 0.025122637608816622, |
|
"acc_norm": 0.26688102893890675, |
|
"acc_norm_stderr": 0.025122637608816622 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.33183856502242154, |
|
"acc_stderr": 0.03160295143776679, |
|
"acc_norm": 0.33183856502242154, |
|
"acc_norm_stderr": 0.03160295143776679 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.26717557251908397, |
|
"acc_stderr": 0.038808483010823944, |
|
"acc_norm": 0.26717557251908397, |
|
"acc_norm_stderr": 0.038808483010823944 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.29797979797979796, |
|
"acc_stderr": 0.03258630383836556, |
|
"acc_norm": 0.29797979797979796, |
|
"acc_norm_stderr": 0.03258630383836556 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.2482758620689655, |
|
"acc_stderr": 0.03600105692727772, |
|
"acc_norm": 0.2482758620689655, |
|
"acc_norm_stderr": 0.03600105692727772 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.18627450980392157, |
|
"acc_stderr": 0.038739587141493524, |
|
"acc_norm": 0.18627450980392157, |
|
"acc_norm_stderr": 0.038739587141493524 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.23949579831932774, |
|
"acc_stderr": 0.027722065493361283, |
|
"acc_norm": 0.23949579831932774, |
|
"acc_norm_stderr": 0.027722065493361283 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.2076923076923077, |
|
"acc_stderr": 0.020567539567246804, |
|
"acc_norm": 0.2076923076923077, |
|
"acc_norm_stderr": 0.020567539567246804 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.22, |
|
"acc_stderr": 0.041633319989322695, |
|
"acc_norm": 0.22, |
|
"acc_norm_stderr": 0.041633319989322695 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.04229525846816505, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.04229525846816505 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.2962962962962963, |
|
"acc_stderr": 0.04414343666854932, |
|
"acc_norm": 0.2962962962962963, |
|
"acc_norm_stderr": 0.04414343666854932 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.2561576354679803, |
|
"acc_stderr": 0.030712730070982592, |
|
"acc_norm": 0.2561576354679803, |
|
"acc_norm_stderr": 0.030712730070982592 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.24193548387096775, |
|
"acc_stderr": 0.024362599693031083, |
|
"acc_norm": 0.24193548387096775, |
|
"acc_norm_stderr": 0.024362599693031083 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.2264957264957265, |
|
"acc_stderr": 0.027421007295392923, |
|
"acc_norm": 0.2264957264957265, |
|
"acc_norm_stderr": 0.027421007295392923 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.25660377358490566, |
|
"acc_stderr": 0.026880647889052, |
|
"acc_norm": 0.25660377358490566, |
|
"acc_norm_stderr": 0.026880647889052 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.04389311454644286, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.04389311454644286 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.25555555555555554, |
|
"acc_stderr": 0.026593939101844054, |
|
"acc_norm": 0.25555555555555554, |
|
"acc_norm_stderr": 0.026593939101844054 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2052980132450331, |
|
"acc_stderr": 0.03297986648473835, |
|
"acc_norm": 0.2052980132450331, |
|
"acc_norm_stderr": 0.03297986648473835 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.24378109452736318, |
|
"acc_stderr": 0.03036049015401465, |
|
"acc_norm": 0.24378109452736318, |
|
"acc_norm_stderr": 0.03036049015401465 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.2138728323699422, |
|
"acc_stderr": 0.03126511206173041, |
|
"acc_norm": 0.2138728323699422, |
|
"acc_norm_stderr": 0.03126511206173041 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.24603174603174602, |
|
"acc_stderr": 0.022182037202948368, |
|
"acc_norm": 0.24603174603174602, |
|
"acc_norm_stderr": 0.022182037202948368 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3263888888888889, |
|
"acc_stderr": 0.03921067198982266, |
|
"acc_norm": 0.3263888888888889, |
|
"acc_norm_stderr": 0.03921067198982266 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252604, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252604 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.2514450867052023, |
|
"acc_stderr": 0.023357365785874037, |
|
"acc_norm": 0.2514450867052023, |
|
"acc_norm_stderr": 0.023357365785874037 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.24539877300613497, |
|
"acc_stderr": 0.03380939813943354, |
|
"acc_norm": 0.24539877300613497, |
|
"acc_norm_stderr": 0.03380939813943354 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.25925925925925924, |
|
"acc_stderr": 0.02438366553103545, |
|
"acc_norm": 0.25925925925925924, |
|
"acc_norm_stderr": 0.02438366553103545 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.04560480215720684, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.04560480215720684 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.23316062176165803, |
|
"acc_stderr": 0.030516111371476005, |
|
"acc_norm": 0.23316062176165803, |
|
"acc_norm_stderr": 0.030516111371476005 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.21052631578947367, |
|
"acc_stderr": 0.038351539543994194, |
|
"acc_norm": 0.21052631578947367, |
|
"acc_norm_stderr": 0.038351539543994194 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.25688073394495414, |
|
"acc_stderr": 0.018732492928342472, |
|
"acc_norm": 0.25688073394495414, |
|
"acc_norm_stderr": 0.018732492928342472 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.29365079365079366, |
|
"acc_stderr": 0.04073524322147127, |
|
"acc_norm": 0.29365079365079366, |
|
"acc_norm_stderr": 0.04073524322147127 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.24836601307189543, |
|
"acc_stderr": 0.02473998135511359, |
|
"acc_norm": 0.24836601307189543, |
|
"acc_norm_stderr": 0.02473998135511359 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.19, |
|
"acc_stderr": 0.03942772444036624, |
|
"acc_norm": 0.19, |
|
"acc_norm_stderr": 0.03942772444036624 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.23140495867768596, |
|
"acc_stderr": 0.03849856098794088, |
|
"acc_norm": 0.23140495867768596, |
|
"acc_norm_stderr": 0.03849856098794088 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.24342105263157895, |
|
"acc_stderr": 0.034923496688842384, |
|
"acc_norm": 0.24342105263157895, |
|
"acc_norm_stderr": 0.034923496688842384 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.23039215686274508, |
|
"acc_stderr": 0.01703522925803404, |
|
"acc_norm": 0.23039215686274508, |
|
"acc_norm_stderr": 0.01703522925803404 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.22695035460992907, |
|
"acc_stderr": 0.02498710636564297, |
|
"acc_norm": 0.22695035460992907, |
|
"acc_norm_stderr": 0.02498710636564297 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.32142857142857145, |
|
"acc_stderr": 0.04432804055291519, |
|
"acc_norm": 0.32142857142857145, |
|
"acc_norm_stderr": 0.04432804055291519 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.030546745264953167, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.030546745264953167 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2681564245810056, |
|
"acc_stderr": 0.014816119635316994, |
|
"acc_norm": 0.2681564245810056, |
|
"acc_norm_stderr": 0.014816119635316994 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.17, |
|
"acc_stderr": 0.037752516806863715, |
|
"acc_norm": 0.17, |
|
"acc_norm_stderr": 0.037752516806863715 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.3014705882352941, |
|
"acc_stderr": 0.027875982114273168, |
|
"acc_norm": 0.3014705882352941, |
|
"acc_norm_stderr": 0.027875982114273168 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.23673469387755103, |
|
"acc_stderr": 0.027212835884073163, |
|
"acc_norm": 0.23673469387755103, |
|
"acc_norm_stderr": 0.027212835884073163 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.22362869198312235, |
|
"acc_stderr": 0.027123298205229972, |
|
"acc_norm": 0.22362869198312235, |
|
"acc_norm_stderr": 0.027123298205229972 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.24185136897001303, |
|
"acc_stderr": 0.010936550813827054, |
|
"acc_norm": 0.24185136897001303, |
|
"acc_norm_stderr": 0.010936550813827054 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.28431372549019607, |
|
"acc_stderr": 0.031660096793998116, |
|
"acc_norm": 0.28431372549019607, |
|
"acc_norm_stderr": 0.031660096793998116 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.2545454545454545, |
|
"acc_stderr": 0.03401506715249039, |
|
"acc_norm": 0.2545454545454545, |
|
"acc_norm_stderr": 0.03401506715249039 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2692778457772338, |
|
"mc1_stderr": 0.01552856663708731, |
|
"mc2": 0.42575853795337826, |
|
"mc2_stderr": 0.016210145327267837 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.1983568075117371, |
|
"acc_stderr": 0.013669396132574575, |
|
"acc_norm": 0.22535211267605634, |
|
"acc_norm_stderr": 0.014322479434188889 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "sue3489/test2_kullm-polyglot-5.8b-v2-koalpaca-v1.1b", |
|
"model_sha": "ab9bbba26729005519ac0cc01b349be5e2ad95fe", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |