results / BM-K /polyglot-ko-1.3b-it-v1.4 /result_2023-10-09 06:31:19.json
open-ko-llm-bot's picture
Add results for 2023-10-09 06:31:19
66dc51d
raw
history blame
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.25170648464163825,
"acc_stderr": 0.012682496334042963,
"acc_norm": 0.30887372013651876,
"acc_norm_stderr": 0.013501770929344003
},
"harness|ko_hellaswag|10": {
"acc": 0.34096793467436765,
"acc_stderr": 0.004730658073041557,
"acc_norm": 0.4206333399721171,
"acc_norm_stderr": 0.004926518439372268
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.23976608187134502,
"acc_stderr": 0.03274485211946956,
"acc_norm": 0.23976608187134502,
"acc_norm_stderr": 0.03274485211946956
},
"harness|ko_mmlu_management|5": {
"acc": 0.2815533980582524,
"acc_stderr": 0.04453254836326467,
"acc_norm": 0.2815533980582524,
"acc_norm_stderr": 0.04453254836326467
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.27586206896551724,
"acc_stderr": 0.01598281477469563,
"acc_norm": 0.27586206896551724,
"acc_norm_stderr": 0.01598281477469563
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.22962962962962963,
"acc_stderr": 0.03633384414073462,
"acc_norm": 0.22962962962962963,
"acc_norm_stderr": 0.03633384414073462
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.15,
"acc_stderr": 0.03588702812826373,
"acc_norm": 0.15,
"acc_norm_stderr": 0.03588702812826373
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.23829787234042554,
"acc_stderr": 0.027851252973889802,
"acc_norm": 0.23829787234042554,
"acc_norm_stderr": 0.027851252973889802
},
"harness|ko_mmlu_virology|5": {
"acc": 0.3072289156626506,
"acc_stderr": 0.03591566797824665,
"acc_norm": 0.3072289156626506,
"acc_norm_stderr": 0.03591566797824665
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.2508038585209003,
"acc_stderr": 0.024619771956697165,
"acc_norm": 0.2508038585209003,
"acc_norm_stderr": 0.024619771956697165
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.2062780269058296,
"acc_stderr": 0.02715715047956382,
"acc_norm": 0.2062780269058296,
"acc_norm_stderr": 0.02715715047956382
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.2595419847328244,
"acc_stderr": 0.03844876139785271,
"acc_norm": 0.2595419847328244,
"acc_norm_stderr": 0.03844876139785271
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.22,
"acc_stderr": 0.04163331998932269,
"acc_norm": 0.22,
"acc_norm_stderr": 0.04163331998932269
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.31313131313131315,
"acc_stderr": 0.033042050878136525,
"acc_norm": 0.31313131313131315,
"acc_norm_stderr": 0.033042050878136525
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.2413793103448276,
"acc_stderr": 0.03565998174135302,
"acc_norm": 0.2413793103448276,
"acc_norm_stderr": 0.03565998174135302
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.2647058823529412,
"acc_stderr": 0.04389869956808779,
"acc_norm": 0.2647058823529412,
"acc_norm_stderr": 0.04389869956808779
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.36134453781512604,
"acc_stderr": 0.031204691225150006,
"acc_norm": 0.36134453781512604,
"acc_norm_stderr": 0.031204691225150006
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.28974358974358977,
"acc_stderr": 0.023000628243687968,
"acc_norm": 0.28974358974358977,
"acc_norm_stderr": 0.023000628243687968
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.25925925925925924,
"acc_stderr": 0.04236511258094633,
"acc_norm": 0.25925925925925924,
"acc_norm_stderr": 0.04236511258094633
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.28078817733990147,
"acc_stderr": 0.03161856335358608,
"acc_norm": 0.28078817733990147,
"acc_norm_stderr": 0.03161856335358608
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.2806451612903226,
"acc_stderr": 0.025560604721022884,
"acc_norm": 0.2806451612903226,
"acc_norm_stderr": 0.025560604721022884
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.2094017094017094,
"acc_stderr": 0.026655699653922737,
"acc_norm": 0.2094017094017094,
"acc_norm_stderr": 0.026655699653922737
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.2792452830188679,
"acc_stderr": 0.027611163402399715,
"acc_norm": 0.2792452830188679,
"acc_norm_stderr": 0.027611163402399715
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.2818181818181818,
"acc_stderr": 0.04309118709946459,
"acc_norm": 0.2818181818181818,
"acc_norm_stderr": 0.04309118709946459
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.26296296296296295,
"acc_stderr": 0.026842057873833706,
"acc_norm": 0.26296296296296295,
"acc_norm_stderr": 0.026842057873833706
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2980132450331126,
"acc_stderr": 0.037345356767871984,
"acc_norm": 0.2980132450331126,
"acc_norm_stderr": 0.037345356767871984
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.18407960199004975,
"acc_stderr": 0.02740385941078684,
"acc_norm": 0.18407960199004975,
"acc_norm_stderr": 0.02740385941078684
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.20809248554913296,
"acc_stderr": 0.030952890217749884,
"acc_norm": 0.20809248554913296,
"acc_norm_stderr": 0.030952890217749884
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.24867724867724866,
"acc_stderr": 0.02226181769240018,
"acc_norm": 0.24867724867724866,
"acc_norm_stderr": 0.02226181769240018
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.24305555555555555,
"acc_stderr": 0.03586879280080342,
"acc_norm": 0.24305555555555555,
"acc_norm_stderr": 0.03586879280080342
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.26,
"acc_stderr": 0.0440844002276808,
"acc_norm": 0.26,
"acc_norm_stderr": 0.0440844002276808
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.2,
"acc_stderr": 0.04020151261036843,
"acc_norm": 0.2,
"acc_norm_stderr": 0.04020151261036843
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.2630057803468208,
"acc_stderr": 0.023703099525258172,
"acc_norm": 0.2630057803468208,
"acc_norm_stderr": 0.023703099525258172
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.3067484662576687,
"acc_stderr": 0.03623089915724148,
"acc_norm": 0.3067484662576687,
"acc_norm_stderr": 0.03623089915724148
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.25617283950617287,
"acc_stderr": 0.024288533637726095,
"acc_norm": 0.25617283950617287,
"acc_norm_stderr": 0.024288533637726095
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.25906735751295334,
"acc_stderr": 0.03161877917935411,
"acc_norm": 0.25906735751295334,
"acc_norm_stderr": 0.03161877917935411
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2982456140350877,
"acc_stderr": 0.043036840335373173,
"acc_norm": 0.2982456140350877,
"acc_norm_stderr": 0.043036840335373173
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.24770642201834864,
"acc_stderr": 0.018508143602547805,
"acc_norm": 0.24770642201834864,
"acc_norm_stderr": 0.018508143602547805
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.2698412698412698,
"acc_stderr": 0.03970158273235172,
"acc_norm": 0.2698412698412698,
"acc_norm_stderr": 0.03970158273235172
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.2679738562091503,
"acc_stderr": 0.025360603796242557,
"acc_norm": 0.2679738562091503,
"acc_norm_stderr": 0.025360603796242557
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.2,
"acc_stderr": 0.04020151261036844,
"acc_norm": 0.2,
"acc_norm_stderr": 0.04020151261036844
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.35537190082644626,
"acc_stderr": 0.04369236326573982,
"acc_norm": 0.35537190082644626,
"acc_norm_stderr": 0.04369236326573982
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.19078947368421054,
"acc_stderr": 0.031975658210325004,
"acc_norm": 0.19078947368421054,
"acc_norm_stderr": 0.031975658210325004
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.24183006535947713,
"acc_stderr": 0.017322789207784326,
"acc_norm": 0.24183006535947713,
"acc_norm_stderr": 0.017322789207784326
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.25177304964539005,
"acc_stderr": 0.0258921511567094,
"acc_norm": 0.25177304964539005,
"acc_norm_stderr": 0.0258921511567094
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.25892857142857145,
"acc_stderr": 0.04157751539865629,
"acc_norm": 0.25892857142857145,
"acc_norm_stderr": 0.04157751539865629
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.2361111111111111,
"acc_stderr": 0.028963702570791047,
"acc_norm": 0.2361111111111111,
"acc_norm_stderr": 0.028963702570791047
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2636871508379888,
"acc_stderr": 0.014736926383761987,
"acc_norm": 0.2636871508379888,
"acc_norm_stderr": 0.014736926383761987
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.16,
"acc_stderr": 0.03684529491774709,
"acc_norm": 0.16,
"acc_norm_stderr": 0.03684529491774709
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768079,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768079
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.19117647058823528,
"acc_stderr": 0.023886881922440362,
"acc_norm": 0.19117647058823528,
"acc_norm_stderr": 0.023886881922440362
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.2816326530612245,
"acc_stderr": 0.028795185574291282,
"acc_norm": 0.2816326530612245,
"acc_norm_stderr": 0.028795185574291282
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.24472573839662448,
"acc_stderr": 0.02798569938703642,
"acc_norm": 0.24472573839662448,
"acc_norm_stderr": 0.02798569938703642
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.23272490221642764,
"acc_stderr": 0.0107925955538885,
"acc_norm": 0.23272490221642764,
"acc_norm_stderr": 0.0107925955538885
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.24019607843137256,
"acc_stderr": 0.029983733055913623,
"acc_norm": 0.24019607843137256,
"acc_norm_stderr": 0.029983733055913623
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.2545454545454545,
"acc_stderr": 0.03401506715249039,
"acc_norm": 0.2545454545454545,
"acc_norm_stderr": 0.03401506715249039
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.23623011015911874,
"mc1_stderr": 0.014869755015871096,
"mc2": 0.414131633910044,
"mc2_stderr": 0.015365810716919849
},
"harness|ko_commongen_v2|2": {
"acc": 0.3105076741440378,
"acc_stderr": 0.015908004528762003,
"acc_norm": 0.3742621015348288,
"acc_norm_stderr": 0.016637917789798742
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "BM-K/polyglot-ko-1.3b-it-v1.4",
"model_sha": "acbd40970c01a4b40debc0d9a9ac096a74673d74",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}