results / beomi /KoAlpaca-Polyglot-12.8B /result_2023-09-26 09:57:09.json
open-ko-llm-bot's picture
Add results for 2023-09-26 09:57:09
a99a362
raw history blame
No virus
17.8 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.31569965870307165,
"acc_stderr": 0.013582571095815291,
"acc_norm": 0.3438566552901024,
"acc_norm_stderr": 0.013880644570156208
},
"harness|ko_hellaswag|10": {
"acc": 0.3873730332603067,
"acc_stderr": 0.004861544478451863,
"acc_norm": 0.4980083648675563,
"acc_norm_stderr": 0.004989741826250387
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.2807017543859649,
"acc_stderr": 0.034462962170884265,
"acc_norm": 0.2807017543859649,
"acc_norm_stderr": 0.034462962170884265
},
"harness|ko_mmlu_management|5": {
"acc": 0.22330097087378642,
"acc_stderr": 0.04123553189891431,
"acc_norm": 0.22330097087378642,
"acc_norm_stderr": 0.04123553189891431
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.26947637292464877,
"acc_stderr": 0.01586624307321505,
"acc_norm": 0.26947637292464877,
"acc_norm_stderr": 0.01586624307321505
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.24444444444444444,
"acc_stderr": 0.03712537833614867,
"acc_norm": 0.24444444444444444,
"acc_norm_stderr": 0.03712537833614867
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.2765957446808511,
"acc_stderr": 0.029241883869628827,
"acc_norm": 0.2765957446808511,
"acc_norm_stderr": 0.029241883869628827
},
"harness|ko_mmlu_virology|5": {
"acc": 0.24096385542168675,
"acc_stderr": 0.033293941190735296,
"acc_norm": 0.24096385542168675,
"acc_norm_stderr": 0.033293941190735296
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.3022508038585209,
"acc_stderr": 0.02608270069539966,
"acc_norm": 0.3022508038585209,
"acc_norm_stderr": 0.02608270069539966
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.20179372197309417,
"acc_stderr": 0.026936111912802277,
"acc_norm": 0.20179372197309417,
"acc_norm_stderr": 0.026936111912802277
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.2595419847328244,
"acc_stderr": 0.03844876139785271,
"acc_norm": 0.2595419847328244,
"acc_norm_stderr": 0.03844876139785271
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.26,
"acc_stderr": 0.0440844002276808,
"acc_norm": 0.26,
"acc_norm_stderr": 0.0440844002276808
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.25252525252525254,
"acc_stderr": 0.030954055470365897,
"acc_norm": 0.25252525252525254,
"acc_norm_stderr": 0.030954055470365897
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.21379310344827587,
"acc_stderr": 0.03416520447747549,
"acc_norm": 0.21379310344827587,
"acc_norm_stderr": 0.03416520447747549
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.1568627450980392,
"acc_stderr": 0.036186648199362445,
"acc_norm": 0.1568627450980392,
"acc_norm_stderr": 0.036186648199362445
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.22268907563025211,
"acc_stderr": 0.027025433498882374,
"acc_norm": 0.22268907563025211,
"acc_norm_stderr": 0.027025433498882374
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.2,
"acc_stderr": 0.020280805062535722,
"acc_norm": 0.2,
"acc_norm_stderr": 0.020280805062535722
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.32,
"acc_stderr": 0.04688261722621504,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621504
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.2037037037037037,
"acc_stderr": 0.03893542518824847,
"acc_norm": 0.2037037037037037,
"acc_norm_stderr": 0.03893542518824847
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.27586206896551724,
"acc_stderr": 0.03144712581678243,
"acc_norm": 0.27586206896551724,
"acc_norm_stderr": 0.03144712581678243
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.24838709677419354,
"acc_stderr": 0.02458002892148101,
"acc_norm": 0.24838709677419354,
"acc_norm_stderr": 0.02458002892148101
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.3034188034188034,
"acc_stderr": 0.030118210106942645,
"acc_norm": 0.3034188034188034,
"acc_norm_stderr": 0.030118210106942645
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.2,
"acc_stderr": 0.02461829819586651,
"acc_norm": 0.2,
"acc_norm_stderr": 0.02461829819586651
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.2545454545454545,
"acc_stderr": 0.041723430387053825,
"acc_norm": 0.2545454545454545,
"acc_norm_stderr": 0.041723430387053825
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.2740740740740741,
"acc_stderr": 0.027195934804085626,
"acc_norm": 0.2740740740740741,
"acc_norm_stderr": 0.027195934804085626
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.271523178807947,
"acc_stderr": 0.03631329803969653,
"acc_norm": 0.271523178807947,
"acc_norm_stderr": 0.03631329803969653
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.23880597014925373,
"acc_stderr": 0.03014777593540922,
"acc_norm": 0.23880597014925373,
"acc_norm_stderr": 0.03014777593540922
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.2543352601156069,
"acc_stderr": 0.0332055644308557,
"acc_norm": 0.2543352601156069,
"acc_norm_stderr": 0.0332055644308557
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.24867724867724866,
"acc_stderr": 0.022261817692400168,
"acc_norm": 0.24867724867724866,
"acc_norm_stderr": 0.022261817692400168
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.2708333333333333,
"acc_stderr": 0.037161774375660164,
"acc_norm": 0.2708333333333333,
"acc_norm_stderr": 0.037161774375660164
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.17,
"acc_stderr": 0.0377525168068637,
"acc_norm": 0.17,
"acc_norm_stderr": 0.0377525168068637
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252604,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.26878612716763006,
"acc_stderr": 0.02386800326250011,
"acc_norm": 0.26878612716763006,
"acc_norm_stderr": 0.02386800326250011
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.27607361963190186,
"acc_stderr": 0.0351238528370505,
"acc_norm": 0.27607361963190186,
"acc_norm_stderr": 0.0351238528370505
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.28703703703703703,
"acc_stderr": 0.02517104191530968,
"acc_norm": 0.28703703703703703,
"acc_norm_stderr": 0.02517104191530968
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.29,
"acc_stderr": 0.04560480215720683,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720683
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.24870466321243523,
"acc_stderr": 0.031195840877700286,
"acc_norm": 0.24870466321243523,
"acc_norm_stderr": 0.031195840877700286
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.21052631578947367,
"acc_stderr": 0.038351539543994194,
"acc_norm": 0.21052631578947367,
"acc_norm_stderr": 0.038351539543994194
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.23669724770642203,
"acc_stderr": 0.01822407811729908,
"acc_norm": 0.23669724770642203,
"acc_norm_stderr": 0.01822407811729908
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.18253968253968253,
"acc_stderr": 0.034550710191021496,
"acc_norm": 0.18253968253968253,
"acc_norm_stderr": 0.034550710191021496
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.2549019607843137,
"acc_stderr": 0.0249541843248799,
"acc_norm": 0.2549019607843137,
"acc_norm_stderr": 0.0249541843248799
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.2809917355371901,
"acc_stderr": 0.04103203830514512,
"acc_norm": 0.2809917355371901,
"acc_norm_stderr": 0.04103203830514512
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.23684210526315788,
"acc_stderr": 0.03459777606810535,
"acc_norm": 0.23684210526315788,
"acc_norm_stderr": 0.03459777606810535
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.26633986928104575,
"acc_stderr": 0.017883188134667192,
"acc_norm": 0.26633986928104575,
"acc_norm_stderr": 0.017883188134667192
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.2553191489361702,
"acc_stderr": 0.02601199293090201,
"acc_norm": 0.2553191489361702,
"acc_norm_stderr": 0.02601199293090201
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.3125,
"acc_stderr": 0.043994650575715215,
"acc_norm": 0.3125,
"acc_norm_stderr": 0.043994650575715215
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.20833333333333334,
"acc_stderr": 0.027696910713093936,
"acc_norm": 0.20833333333333334,
"acc_norm_stderr": 0.027696910713093936
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2748603351955307,
"acc_stderr": 0.014931316703220513,
"acc_norm": 0.2748603351955307,
"acc_norm_stderr": 0.014931316703220513
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.24,
"acc_stderr": 0.04292346959909283,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909283
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.17647058823529413,
"acc_stderr": 0.02315746830855938,
"acc_norm": 0.17647058823529413,
"acc_norm_stderr": 0.02315746830855938
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.23265306122448978,
"acc_stderr": 0.02704925791589618,
"acc_norm": 0.23265306122448978,
"acc_norm_stderr": 0.02704925791589618
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.29957805907172996,
"acc_stderr": 0.029818024749753102,
"acc_norm": 0.29957805907172996,
"acc_norm_stderr": 0.029818024749753102
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.25684485006518903,
"acc_stderr": 0.011158455853098862,
"acc_norm": 0.25684485006518903,
"acc_norm_stderr": 0.011158455853098862
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.2549019607843137,
"acc_stderr": 0.030587591351604246,
"acc_norm": 0.2549019607843137,
"acc_norm_stderr": 0.030587591351604246
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.2545454545454545,
"acc_stderr": 0.0340150671524904,
"acc_norm": 0.2545454545454545,
"acc_norm_stderr": 0.0340150671524904
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.24724602203182375,
"mc1_stderr": 0.015102404797359649,
"mc2": 0.4196185756093357,
"mc2_stderr": 0.01602551288494906
},
"harness|ko_commongen_v2|2": {
"acc": 0.2987012987012987,
"acc_stderr": 0.015735657391438295,
"acc_norm": 0.3482880755608028,
"acc_norm_stderr": 0.016379926739148037
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "beomi/KoAlpaca-Polyglot-12.8B",
"model_sha": "5f225e9c5ae6c7238fc2316da0b8a9922019674d",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}