results / nayohan /polyglot-ko-5.8b-Inst /result_2023-10-12 03:26:44.json
open-ko-llm-bot's picture
Add results for 2023-10-12 03:26:44
6f4a7d8
raw
history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.27474402730375425,
"acc_stderr": 0.013044617212771227,
"acc_norm": 0.3191126279863481,
"acc_norm_stderr": 0.013621696119173307
},
"harness|ko_hellaswag|10": {
"acc": 0.37950607448715395,
"acc_stderr": 0.004842723234022034,
"acc_norm": 0.4827723561043617,
"acc_norm_stderr": 0.004986818680313436
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.15789473684210525,
"acc_stderr": 0.027966785859160893,
"acc_norm": 0.15789473684210525,
"acc_norm_stderr": 0.027966785859160893
},
"harness|ko_mmlu_management|5": {
"acc": 0.30097087378640774,
"acc_stderr": 0.04541609446503948,
"acc_norm": 0.30097087378640774,
"acc_norm_stderr": 0.04541609446503948
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.21966794380587484,
"acc_stderr": 0.014805384478371162,
"acc_norm": 0.21966794380587484,
"acc_norm_stderr": 0.014805384478371162
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.22962962962962963,
"acc_stderr": 0.036333844140734636,
"acc_norm": 0.22962962962962963,
"acc_norm_stderr": 0.036333844140734636
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.14042553191489363,
"acc_stderr": 0.022712077616627864,
"acc_norm": 0.14042553191489363,
"acc_norm_stderr": 0.022712077616627864
},
"harness|ko_mmlu_virology|5": {
"acc": 0.24096385542168675,
"acc_stderr": 0.03329394119073528,
"acc_norm": 0.24096385542168675,
"acc_norm_stderr": 0.03329394119073528
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.26688102893890675,
"acc_stderr": 0.025122637608816646,
"acc_norm": 0.26688102893890675,
"acc_norm_stderr": 0.025122637608816646
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.14349775784753363,
"acc_stderr": 0.02352937126961819,
"acc_norm": 0.14349775784753363,
"acc_norm_stderr": 0.02352937126961819
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.2748091603053435,
"acc_stderr": 0.039153454088478354,
"acc_norm": 0.2748091603053435,
"acc_norm_stderr": 0.039153454088478354
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252606,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252606
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.3434343434343434,
"acc_stderr": 0.03383201223244442,
"acc_norm": 0.3434343434343434,
"acc_norm_stderr": 0.03383201223244442
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.2413793103448276,
"acc_stderr": 0.03565998174135302,
"acc_norm": 0.2413793103448276,
"acc_norm_stderr": 0.03565998174135302
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.3137254901960784,
"acc_stderr": 0.04617034827006718,
"acc_norm": 0.3137254901960784,
"acc_norm_stderr": 0.04617034827006718
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.3445378151260504,
"acc_stderr": 0.030868682604121622,
"acc_norm": 0.3445378151260504,
"acc_norm_stderr": 0.030868682604121622
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.35128205128205126,
"acc_stderr": 0.024203665177902796,
"acc_norm": 0.35128205128205126,
"acc_norm_stderr": 0.024203665177902796
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.17,
"acc_stderr": 0.03775251680686371,
"acc_norm": 0.17,
"acc_norm_stderr": 0.03775251680686371
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.26,
"acc_stderr": 0.0440844002276808,
"acc_norm": 0.26,
"acc_norm_stderr": 0.0440844002276808
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.2222222222222222,
"acc_stderr": 0.040191074725573483,
"acc_norm": 0.2222222222222222,
"acc_norm_stderr": 0.040191074725573483
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.2413793103448276,
"acc_stderr": 0.030108330718011625,
"acc_norm": 0.2413793103448276,
"acc_norm_stderr": 0.030108330718011625
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.2967741935483871,
"acc_stderr": 0.0259885007924119,
"acc_norm": 0.2967741935483871,
"acc_norm_stderr": 0.0259885007924119
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.19658119658119658,
"acc_stderr": 0.02603538609895129,
"acc_norm": 0.19658119658119658,
"acc_norm_stderr": 0.02603538609895129
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.27547169811320754,
"acc_stderr": 0.027495663683724067,
"acc_norm": 0.27547169811320754,
"acc_norm_stderr": 0.027495663683724067
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.23636363636363636,
"acc_stderr": 0.04069306319721377,
"acc_norm": 0.23636363636363636,
"acc_norm_stderr": 0.04069306319721377
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.26296296296296295,
"acc_stderr": 0.02684205787383371,
"acc_norm": 0.26296296296296295,
"acc_norm_stderr": 0.02684205787383371
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.31125827814569534,
"acc_stderr": 0.03780445850526733,
"acc_norm": 0.31125827814569534,
"acc_norm_stderr": 0.03780445850526733
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.2537313432835821,
"acc_stderr": 0.030769444967296018,
"acc_norm": 0.2537313432835821,
"acc_norm_stderr": 0.030769444967296018
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.32947976878612717,
"acc_stderr": 0.035839017547364134,
"acc_norm": 0.32947976878612717,
"acc_norm_stderr": 0.035839017547364134
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.23544973544973544,
"acc_stderr": 0.02185150982203172,
"acc_norm": 0.23544973544973544,
"acc_norm_stderr": 0.02185150982203172
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.2708333333333333,
"acc_stderr": 0.03716177437566016,
"acc_norm": 0.2708333333333333,
"acc_norm_stderr": 0.03716177437566016
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252604,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.22254335260115607,
"acc_stderr": 0.02239421566194282,
"acc_norm": 0.22254335260115607,
"acc_norm_stderr": 0.02239421566194282
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.2392638036809816,
"acc_stderr": 0.0335195387952127,
"acc_norm": 0.2392638036809816,
"acc_norm_stderr": 0.0335195387952127
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.2345679012345679,
"acc_stderr": 0.023576881744005723,
"acc_norm": 0.2345679012345679,
"acc_norm_stderr": 0.023576881744005723
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.27,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.27,
"acc_norm_stderr": 0.044619604333847394
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.36787564766839376,
"acc_stderr": 0.034801756684660366,
"acc_norm": 0.36787564766839376,
"acc_norm_stderr": 0.034801756684660366
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2631578947368421,
"acc_stderr": 0.04142439719489361,
"acc_norm": 0.2631578947368421,
"acc_norm_stderr": 0.04142439719489361
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.3229357798165138,
"acc_stderr": 0.020048115923415318,
"acc_norm": 0.3229357798165138,
"acc_norm_stderr": 0.020048115923415318
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.2777777777777778,
"acc_stderr": 0.040061680838488774,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.040061680838488774
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.28104575163398693,
"acc_stderr": 0.025738854797818733,
"acc_norm": 0.28104575163398693,
"acc_norm_stderr": 0.025738854797818733
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.24,
"acc_stderr": 0.04292346959909283,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909283
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.2396694214876033,
"acc_stderr": 0.03896878985070417,
"acc_norm": 0.2396694214876033,
"acc_norm_stderr": 0.03896878985070417
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.23026315789473684,
"acc_stderr": 0.03426059424403165,
"acc_norm": 0.23026315789473684,
"acc_norm_stderr": 0.03426059424403165
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.2222222222222222,
"acc_stderr": 0.016819028375736386,
"acc_norm": 0.2222222222222222,
"acc_norm_stderr": 0.016819028375736386
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.22340425531914893,
"acc_stderr": 0.02484792135806396,
"acc_norm": 0.22340425531914893,
"acc_norm_stderr": 0.02484792135806396
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.24107142857142858,
"acc_stderr": 0.04059867246952687,
"acc_norm": 0.24107142857142858,
"acc_norm_stderr": 0.04059867246952687
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.4675925925925926,
"acc_stderr": 0.03402801581358966,
"acc_norm": 0.4675925925925926,
"acc_norm_stderr": 0.03402801581358966
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.27262569832402234,
"acc_stderr": 0.014893391735249608,
"acc_norm": 0.27262569832402234,
"acc_norm_stderr": 0.014893391735249608
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252604,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.16,
"acc_stderr": 0.03684529491774708,
"acc_norm": 0.16,
"acc_norm_stderr": 0.03684529491774708
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.4411764705882353,
"acc_stderr": 0.030161911930767102,
"acc_norm": 0.4411764705882353,
"acc_norm_stderr": 0.030161911930767102
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.4,
"acc_stderr": 0.03136250240935892,
"acc_norm": 0.4,
"acc_norm_stderr": 0.03136250240935892
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.20675105485232068,
"acc_stderr": 0.026361651668389094,
"acc_norm": 0.20675105485232068,
"acc_norm_stderr": 0.026361651668389094
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.23663624511082137,
"acc_stderr": 0.010855137351572746,
"acc_norm": 0.23663624511082137,
"acc_norm_stderr": 0.010855137351572746
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.2549019607843137,
"acc_stderr": 0.030587591351604246,
"acc_norm": 0.2549019607843137,
"acc_norm_stderr": 0.030587591351604246
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.24242424242424243,
"acc_stderr": 0.03346409881055953,
"acc_norm": 0.24242424242424243,
"acc_norm_stderr": 0.03346409881055953
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.24112607099143207,
"mc1_stderr": 0.014974827279752329,
"mc2": 0.40162480294038216,
"mc2_stderr": 0.015030387645461886
},
"harness|ko_commongen_v2|2": {
"acc": 0.33412042502951594,
"acc_stderr": 0.016216763304239688,
"acc_norm": 0.4179456906729634,
"acc_norm_stderr": 0.016957292005279713
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "nayohan/polyglot-ko-5.8b-Inst",
"model_sha": "f2d30b16043455a6303d11f28cfd012c46edc4cf",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}