results / DILAB-HYU /koquality-polyglot-1.3b /result_2023-10-30 14:22:39.json
open-ko-llm-bot's picture
Add results for 2023-10-30 14:22:39
5674cd1
raw
history blame
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.2363481228668942,
"acc_stderr": 0.012414960524301823,
"acc_norm": 0.28924914675767915,
"acc_norm_stderr": 0.013250012579393443
},
"harness|ko_hellaswag|10": {
"acc": 0.3379804819757021,
"acc_stderr": 0.004720551323547122,
"acc_norm": 0.4183429595698068,
"acc_norm_stderr": 0.004922789247319879
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.21052631578947367,
"acc_stderr": 0.0312678171466318,
"acc_norm": 0.21052631578947367,
"acc_norm_stderr": 0.0312678171466318
},
"harness|ko_mmlu_management|5": {
"acc": 0.23300970873786409,
"acc_stderr": 0.041858325989283136,
"acc_norm": 0.23300970873786409,
"acc_norm_stderr": 0.041858325989283136
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.2681992337164751,
"acc_stderr": 0.01584243083526944,
"acc_norm": 0.2681992337164751,
"acc_norm_stderr": 0.01584243083526944
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.2222222222222222,
"acc_stderr": 0.035914440841969694,
"acc_norm": 0.2222222222222222,
"acc_norm_stderr": 0.035914440841969694
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.2425531914893617,
"acc_stderr": 0.028020226271200217,
"acc_norm": 0.2425531914893617,
"acc_norm_stderr": 0.028020226271200217
},
"harness|ko_mmlu_virology|5": {
"acc": 0.21084337349397592,
"acc_stderr": 0.0317555478662992,
"acc_norm": 0.21084337349397592,
"acc_norm_stderr": 0.0317555478662992
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.2572347266881029,
"acc_stderr": 0.024826171289250888,
"acc_norm": 0.2572347266881029,
"acc_norm_stderr": 0.024826171289250888
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.3273542600896861,
"acc_stderr": 0.03149384670994131,
"acc_norm": 0.3273542600896861,
"acc_norm_stderr": 0.03149384670994131
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.22137404580152673,
"acc_stderr": 0.0364129708131373,
"acc_norm": 0.22137404580152673,
"acc_norm_stderr": 0.0364129708131373
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768077,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768077
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.2474747474747475,
"acc_stderr": 0.030746300742124498,
"acc_norm": 0.2474747474747475,
"acc_norm_stderr": 0.030746300742124498
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.2482758620689655,
"acc_stderr": 0.036001056927277716,
"acc_norm": 0.2482758620689655,
"acc_norm_stderr": 0.036001056927277716
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.23529411764705882,
"acc_stderr": 0.04220773659171453,
"acc_norm": 0.23529411764705882,
"acc_norm_stderr": 0.04220773659171453
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.3487394957983193,
"acc_stderr": 0.03095663632856654,
"acc_norm": 0.3487394957983193,
"acc_norm_stderr": 0.03095663632856654
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.2717948717948718,
"acc_stderr": 0.022556551010132354,
"acc_norm": 0.2717948717948718,
"acc_norm_stderr": 0.022556551010132354
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.29,
"acc_stderr": 0.04560480215720684,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720684
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.25,
"acc_stderr": 0.04186091791394607,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04186091791394607
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.27586206896551724,
"acc_stderr": 0.03144712581678243,
"acc_norm": 0.27586206896551724,
"acc_norm_stderr": 0.03144712581678243
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.2838709677419355,
"acc_stderr": 0.025649381063029258,
"acc_norm": 0.2838709677419355,
"acc_norm_stderr": 0.025649381063029258
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.21367521367521367,
"acc_stderr": 0.02685345037700916,
"acc_norm": 0.21367521367521367,
"acc_norm_stderr": 0.02685345037700916
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.27169811320754716,
"acc_stderr": 0.027377706624670713,
"acc_norm": 0.27169811320754716,
"acc_norm_stderr": 0.027377706624670713
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.2818181818181818,
"acc_stderr": 0.0430911870994646,
"acc_norm": 0.2818181818181818,
"acc_norm_stderr": 0.0430911870994646
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.29259259259259257,
"acc_stderr": 0.027738969632176088,
"acc_norm": 0.29259259259259257,
"acc_norm_stderr": 0.027738969632176088
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.23178807947019867,
"acc_stderr": 0.034454062719870546,
"acc_norm": 0.23178807947019867,
"acc_norm_stderr": 0.034454062719870546
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.22885572139303484,
"acc_stderr": 0.029705284056772432,
"acc_norm": 0.22885572139303484,
"acc_norm_stderr": 0.029705284056772432
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.2543352601156069,
"acc_stderr": 0.0332055644308557,
"acc_norm": 0.2543352601156069,
"acc_norm_stderr": 0.0332055644308557
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.25132275132275134,
"acc_stderr": 0.022340482339643898,
"acc_norm": 0.25132275132275134,
"acc_norm_stderr": 0.022340482339643898
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.2638888888888889,
"acc_stderr": 0.03685651095897532,
"acc_norm": 0.2638888888888889,
"acc_norm_stderr": 0.03685651095897532
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.21,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.21,
"acc_norm_stderr": 0.040936018074033256
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.2,
"acc_stderr": 0.04020151261036845,
"acc_norm": 0.2,
"acc_norm_stderr": 0.04020151261036845
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.24855491329479767,
"acc_stderr": 0.023267528432100174,
"acc_norm": 0.24855491329479767,
"acc_norm_stderr": 0.023267528432100174
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.3128834355828221,
"acc_stderr": 0.03642914578292404,
"acc_norm": 0.3128834355828221,
"acc_norm_stderr": 0.03642914578292404
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.2623456790123457,
"acc_stderr": 0.02447722285613511,
"acc_norm": 0.2623456790123457,
"acc_norm_stderr": 0.02447722285613511
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.33678756476683935,
"acc_stderr": 0.03410780251836184,
"acc_norm": 0.33678756476683935,
"acc_norm_stderr": 0.03410780251836184
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.21052631578947367,
"acc_stderr": 0.038351539543994194,
"acc_norm": 0.21052631578947367,
"acc_norm_stderr": 0.038351539543994194
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.21834862385321102,
"acc_stderr": 0.017712600528722734,
"acc_norm": 0.21834862385321102,
"acc_norm_stderr": 0.017712600528722734
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.18253968253968253,
"acc_stderr": 0.03455071019102149,
"acc_norm": 0.18253968253968253,
"acc_norm_stderr": 0.03455071019102149
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.21895424836601307,
"acc_stderr": 0.02367908986180772,
"acc_norm": 0.21895424836601307,
"acc_norm_stderr": 0.02367908986180772
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.18,
"acc_stderr": 0.03861229196653694,
"acc_norm": 0.18,
"acc_norm_stderr": 0.03861229196653694
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.2644628099173554,
"acc_stderr": 0.04026187527591206,
"acc_norm": 0.2644628099173554,
"acc_norm_stderr": 0.04026187527591206
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.23684210526315788,
"acc_stderr": 0.03459777606810537,
"acc_norm": 0.23684210526315788,
"acc_norm_stderr": 0.03459777606810537
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.2107843137254902,
"acc_stderr": 0.01650047297902479,
"acc_norm": 0.2107843137254902,
"acc_norm_stderr": 0.01650047297902479
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.2624113475177305,
"acc_stderr": 0.02624492034984301,
"acc_norm": 0.2624113475177305,
"acc_norm_stderr": 0.02624492034984301
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.2857142857142857,
"acc_stderr": 0.042878587513404565,
"acc_norm": 0.2857142857142857,
"acc_norm_stderr": 0.042878587513404565
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.46296296296296297,
"acc_stderr": 0.03400603625538272,
"acc_norm": 0.46296296296296297,
"acc_norm_stderr": 0.03400603625538272
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.24581005586592178,
"acc_stderr": 0.014400296429225608,
"acc_norm": 0.24581005586592178,
"acc_norm_stderr": 0.014400296429225608
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542127,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542127
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.36764705882352944,
"acc_stderr": 0.029289413409403192,
"acc_norm": 0.36764705882352944,
"acc_norm_stderr": 0.029289413409403192
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.3673469387755102,
"acc_stderr": 0.03086214492108755,
"acc_norm": 0.3673469387755102,
"acc_norm_stderr": 0.03086214492108755
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.24472573839662448,
"acc_stderr": 0.027985699387036423,
"acc_norm": 0.24472573839662448,
"acc_norm_stderr": 0.027985699387036423
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.2438070404172099,
"acc_stderr": 0.010966507972178472,
"acc_norm": 0.2438070404172099,
"acc_norm_stderr": 0.010966507972178472
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.25980392156862747,
"acc_stderr": 0.030778554678693257,
"acc_norm": 0.25980392156862747,
"acc_norm_stderr": 0.030778554678693257
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.20606060606060606,
"acc_stderr": 0.0315841532404771,
"acc_norm": 0.20606060606060606,
"acc_norm_stderr": 0.0315841532404771
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2386780905752754,
"mc1_stderr": 0.014922629695456418,
"mc2": 0.41348688566296676,
"mc2_stderr": 0.015238831556708764
},
"harness|ko_commongen_v2|2": {
"acc": 0.3293978748524203,
"acc_stderr": 0.016158746868147143,
"acc_norm": 0.40731995277449823,
"acc_norm_stderr": 0.01689245669519127
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "DILAB-HYU/koquality-polyglot-1.3b",
"model_sha": "ca9ba27cccf4065cf447f9fdd7d5aec1715a3175",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}