|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.28754266211604096, |
|
"acc_stderr": 0.013226719056266132, |
|
"acc_norm": 0.3319112627986348, |
|
"acc_norm_stderr": 0.013760988200880543 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.344353714399522, |
|
"acc_stderr": 0.004741859753178417, |
|
"acc_norm": 0.4291973710416252, |
|
"acc_norm_stderr": 0.004939500404882185 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.23976608187134502, |
|
"acc_stderr": 0.03274485211946956, |
|
"acc_norm": 0.23976608187134502, |
|
"acc_norm_stderr": 0.03274485211946956 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.24271844660194175, |
|
"acc_stderr": 0.04245022486384495, |
|
"acc_norm": 0.24271844660194175, |
|
"acc_norm_stderr": 0.04245022486384495 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.3065134099616858, |
|
"acc_stderr": 0.016486952893041515, |
|
"acc_norm": 0.3065134099616858, |
|
"acc_norm_stderr": 0.016486952893041515 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.2962962962962963, |
|
"acc_stderr": 0.03944624162501116, |
|
"acc_norm": 0.2962962962962963, |
|
"acc_norm_stderr": 0.03944624162501116 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.31063829787234043, |
|
"acc_stderr": 0.030251237579213167, |
|
"acc_norm": 0.31063829787234043, |
|
"acc_norm_stderr": 0.030251237579213167 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.37349397590361444, |
|
"acc_stderr": 0.037658451171688624, |
|
"acc_norm": 0.37349397590361444, |
|
"acc_norm_stderr": 0.037658451171688624 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.29260450160771706, |
|
"acc_stderr": 0.025839898334877983, |
|
"acc_norm": 0.29260450160771706, |
|
"acc_norm_stderr": 0.025839898334877983 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.36771300448430494, |
|
"acc_stderr": 0.03236198350928275, |
|
"acc_norm": 0.36771300448430494, |
|
"acc_norm_stderr": 0.03236198350928275 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.35877862595419846, |
|
"acc_stderr": 0.04206739313864908, |
|
"acc_norm": 0.35877862595419846, |
|
"acc_norm_stderr": 0.04206739313864908 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.044619604333847394, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.044619604333847394 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.03191178226713546, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.03191178226713546 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.296551724137931, |
|
"acc_stderr": 0.038061426873099935, |
|
"acc_norm": 0.296551724137931, |
|
"acc_norm_stderr": 0.038061426873099935 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.18627450980392157, |
|
"acc_stderr": 0.038739587141493524, |
|
"acc_norm": 0.18627450980392157, |
|
"acc_norm_stderr": 0.038739587141493524 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.23109243697478993, |
|
"acc_stderr": 0.027381406927868973, |
|
"acc_norm": 0.23109243697478993, |
|
"acc_norm_stderr": 0.027381406927868973 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.26666666666666666, |
|
"acc_stderr": 0.022421273612923707, |
|
"acc_norm": 0.26666666666666666, |
|
"acc_norm_stderr": 0.022421273612923707 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.04852365870939099, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.04852365870939099 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.0446196043338474, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.0446196043338474 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.32407407407407407, |
|
"acc_stderr": 0.04524596007030048, |
|
"acc_norm": 0.32407407407407407, |
|
"acc_norm_stderr": 0.04524596007030048 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3103448275862069, |
|
"acc_stderr": 0.032550867699701024, |
|
"acc_norm": 0.3103448275862069, |
|
"acc_norm_stderr": 0.032550867699701024 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.026069362295335137, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.026069362295335137 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.32051282051282054, |
|
"acc_stderr": 0.030572811310299607, |
|
"acc_norm": 0.32051282051282054, |
|
"acc_norm_stderr": 0.030572811310299607 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.3169811320754717, |
|
"acc_stderr": 0.02863723563980092, |
|
"acc_norm": 0.3169811320754717, |
|
"acc_norm_stderr": 0.02863723563980092 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.3181818181818182, |
|
"acc_stderr": 0.04461272175910507, |
|
"acc_norm": 0.3181818181818182, |
|
"acc_norm_stderr": 0.04461272175910507 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.25555555555555554, |
|
"acc_stderr": 0.026593939101844075, |
|
"acc_norm": 0.25555555555555554, |
|
"acc_norm_stderr": 0.026593939101844075 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.23178807947019867, |
|
"acc_stderr": 0.034454062719870546, |
|
"acc_norm": 0.23178807947019867, |
|
"acc_norm_stderr": 0.034454062719870546 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.2935323383084577, |
|
"acc_stderr": 0.03220024104534205, |
|
"acc_norm": 0.2935323383084577, |
|
"acc_norm_stderr": 0.03220024104534205 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.26011560693641617, |
|
"acc_stderr": 0.033450369167889925, |
|
"acc_norm": 0.26011560693641617, |
|
"acc_norm_stderr": 0.033450369167889925 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.26455026455026454, |
|
"acc_stderr": 0.022717467897708617, |
|
"acc_norm": 0.26455026455026454, |
|
"acc_norm_stderr": 0.022717467897708617 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2152777777777778, |
|
"acc_stderr": 0.034370793441061316, |
|
"acc_norm": 0.2152777777777778, |
|
"acc_norm_stderr": 0.034370793441061316 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.19, |
|
"acc_stderr": 0.03942772444036623, |
|
"acc_norm": 0.19, |
|
"acc_norm_stderr": 0.03942772444036623 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.04923659639173309, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.04923659639173309 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.3208092485549133, |
|
"acc_stderr": 0.025131000233647907, |
|
"acc_norm": 0.3208092485549133, |
|
"acc_norm_stderr": 0.025131000233647907 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.25766871165644173, |
|
"acc_stderr": 0.03436150827846917, |
|
"acc_norm": 0.25766871165644173, |
|
"acc_norm_stderr": 0.03436150827846917 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.33024691358024694, |
|
"acc_stderr": 0.026168298456732846, |
|
"acc_norm": 0.33024691358024694, |
|
"acc_norm_stderr": 0.026168298456732846 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542127, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542127 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.24870466321243523, |
|
"acc_stderr": 0.03119584087770031, |
|
"acc_norm": 0.24870466321243523, |
|
"acc_norm_stderr": 0.03119584087770031 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2631578947368421, |
|
"acc_stderr": 0.04142439719489362, |
|
"acc_norm": 0.2631578947368421, |
|
"acc_norm_stderr": 0.04142439719489362 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.3100917431192661, |
|
"acc_stderr": 0.019830849684439752, |
|
"acc_norm": 0.3100917431192661, |
|
"acc_norm_stderr": 0.019830849684439752 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.2698412698412698, |
|
"acc_stderr": 0.03970158273235173, |
|
"acc_norm": 0.2698412698412698, |
|
"acc_norm_stderr": 0.03970158273235173 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.3366013071895425, |
|
"acc_stderr": 0.027057974624494382, |
|
"acc_norm": 0.3366013071895425, |
|
"acc_norm_stderr": 0.027057974624494382 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.371900826446281, |
|
"acc_stderr": 0.04412015806624504, |
|
"acc_norm": 0.371900826446281, |
|
"acc_norm_stderr": 0.04412015806624504 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.21052631578947367, |
|
"acc_stderr": 0.033176727875331574, |
|
"acc_norm": 0.21052631578947367, |
|
"acc_norm_stderr": 0.033176727875331574 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.27941176470588236, |
|
"acc_stderr": 0.01815287105153881, |
|
"acc_norm": 0.27941176470588236, |
|
"acc_norm_stderr": 0.01815287105153881 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.28368794326241137, |
|
"acc_stderr": 0.02689170942834396, |
|
"acc_norm": 0.28368794326241137, |
|
"acc_norm_stderr": 0.02689170942834396 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04109974682633932, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04109974682633932 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.25462962962962965, |
|
"acc_stderr": 0.029711275860005344, |
|
"acc_norm": 0.25462962962962965, |
|
"acc_norm_stderr": 0.029711275860005344 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2424581005586592, |
|
"acc_stderr": 0.01433352205921789, |
|
"acc_norm": 0.2424581005586592, |
|
"acc_norm_stderr": 0.01433352205921789 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.21, |
|
"acc_stderr": 0.040936018074033256, |
|
"acc_norm": 0.21, |
|
"acc_norm_stderr": 0.040936018074033256 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.04229525846816506, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.04229525846816506 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.33455882352941174, |
|
"acc_stderr": 0.02866199620233531, |
|
"acc_norm": 0.33455882352941174, |
|
"acc_norm_stderr": 0.02866199620233531 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.2693877551020408, |
|
"acc_stderr": 0.02840125202902294, |
|
"acc_norm": 0.2693877551020408, |
|
"acc_norm_stderr": 0.02840125202902294 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.31645569620253167, |
|
"acc_stderr": 0.030274974880218974, |
|
"acc_norm": 0.31645569620253167, |
|
"acc_norm_stderr": 0.030274974880218974 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.28552803129074317, |
|
"acc_stderr": 0.01153575158666565, |
|
"acc_norm": 0.28552803129074317, |
|
"acc_norm_stderr": 0.01153575158666565 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.30392156862745096, |
|
"acc_stderr": 0.032282103870378935, |
|
"acc_norm": 0.30392156862745096, |
|
"acc_norm_stderr": 0.032282103870378935 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.3090909090909091, |
|
"acc_stderr": 0.03608541011573967, |
|
"acc_norm": 0.3090909090909091, |
|
"acc_norm_stderr": 0.03608541011573967 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2729498164014688, |
|
"mc1_stderr": 0.015594753632006514, |
|
"mc2": 0.44588495304167214, |
|
"mc2_stderr": 0.015458963700699168 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.27744982290436837, |
|
"acc_stderr": 0.015393630236605975, |
|
"acc_norm": 0.3105076741440378, |
|
"acc_norm_stderr": 0.015908004528762024 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Changgil/K2S3-SOLAR-11b-v1.0", |
|
"model_sha": "3c5ff9c8a00dfb6cf8619ce08c2f06a22e650e0c", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |