results / etri-xainlp /polyglot-ko-12.8b-instruct /result_2023-10-05 00:56:42.json
open-ko-llm-bot's picture
Add results for 2023-10-05 00:56:42
2124e72
raw
history blame
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.31143344709897613,
"acc_stderr": 0.013532472099850947,
"acc_norm": 0.3464163822525597,
"acc_norm_stderr": 0.013905011180063247
},
"harness|ko_hellaswag|10": {
"acc": 0.4026090420235013,
"acc_stderr": 0.0048942100113032105,
"acc_norm": 0.5198167695678152,
"acc_norm_stderr": 0.004985860853427639
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.29239766081871343,
"acc_stderr": 0.03488647713457922,
"acc_norm": 0.29239766081871343,
"acc_norm_stderr": 0.03488647713457922
},
"harness|ko_mmlu_management|5": {
"acc": 0.21359223300970873,
"acc_stderr": 0.040580420156460344,
"acc_norm": 0.21359223300970873,
"acc_norm_stderr": 0.040580420156460344
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.2822477650063857,
"acc_stderr": 0.01609530296987857,
"acc_norm": 0.2822477650063857,
"acc_norm_stderr": 0.01609530296987857
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.26666666666666666,
"acc_stderr": 0.03820169914517905,
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.03820169914517905
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.23,
"acc_stderr": 0.042295258468165065,
"acc_norm": 0.23,
"acc_norm_stderr": 0.042295258468165065
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.28085106382978725,
"acc_stderr": 0.02937917046412483,
"acc_norm": 0.28085106382978725,
"acc_norm_stderr": 0.02937917046412483
},
"harness|ko_mmlu_virology|5": {
"acc": 0.2289156626506024,
"acc_stderr": 0.03270745277352477,
"acc_norm": 0.2289156626506024,
"acc_norm_stderr": 0.03270745277352477
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.21864951768488747,
"acc_stderr": 0.023475581417861113,
"acc_norm": 0.21864951768488747,
"acc_norm_stderr": 0.023475581417861113
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.26905829596412556,
"acc_stderr": 0.02976377940687498,
"acc_norm": 0.26905829596412556,
"acc_norm_stderr": 0.02976377940687498
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.2595419847328244,
"acc_stderr": 0.03844876139785271,
"acc_norm": 0.2595419847328244,
"acc_norm_stderr": 0.03844876139785271
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.24242424242424243,
"acc_stderr": 0.030532892233932036,
"acc_norm": 0.24242424242424243,
"acc_norm_stderr": 0.030532892233932036
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.3103448275862069,
"acc_stderr": 0.03855289616378947,
"acc_norm": 0.3103448275862069,
"acc_norm_stderr": 0.03855289616378947
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.24509803921568626,
"acc_stderr": 0.042801058373643966,
"acc_norm": 0.24509803921568626,
"acc_norm_stderr": 0.042801058373643966
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.23109243697478993,
"acc_stderr": 0.027381406927868956,
"acc_norm": 0.23109243697478993,
"acc_norm_stderr": 0.027381406927868956
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.23846153846153847,
"acc_stderr": 0.021606294494647727,
"acc_norm": 0.23846153846153847,
"acc_norm_stderr": 0.021606294494647727
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.23,
"acc_stderr": 0.04229525846816506,
"acc_norm": 0.23,
"acc_norm_stderr": 0.04229525846816506
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.32,
"acc_stderr": 0.04688261722621504,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621504
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.25925925925925924,
"acc_stderr": 0.04236511258094632,
"acc_norm": 0.25925925925925924,
"acc_norm_stderr": 0.04236511258094632
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.2315270935960591,
"acc_stderr": 0.029678333141444465,
"acc_norm": 0.2315270935960591,
"acc_norm_stderr": 0.029678333141444465
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.25483870967741934,
"acc_stderr": 0.024790118459332208,
"acc_norm": 0.25483870967741934,
"acc_norm_stderr": 0.024790118459332208
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.29914529914529914,
"acc_stderr": 0.029996951858349476,
"acc_norm": 0.29914529914529914,
"acc_norm_stderr": 0.029996951858349476
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.2792452830188679,
"acc_stderr": 0.02761116340239972,
"acc_norm": 0.2792452830188679,
"acc_norm_stderr": 0.02761116340239972
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.2545454545454545,
"acc_stderr": 0.041723430387053825,
"acc_norm": 0.2545454545454545,
"acc_norm_stderr": 0.041723430387053825
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.24074074074074073,
"acc_stderr": 0.026067159222275794,
"acc_norm": 0.24074074074074073,
"acc_norm_stderr": 0.026067159222275794
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.24503311258278146,
"acc_stderr": 0.03511807571804723,
"acc_norm": 0.24503311258278146,
"acc_norm_stderr": 0.03511807571804723
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.22885572139303484,
"acc_stderr": 0.02970528405677244,
"acc_norm": 0.22885572139303484,
"acc_norm_stderr": 0.02970528405677244
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.2543352601156069,
"acc_stderr": 0.0332055644308557,
"acc_norm": 0.2543352601156069,
"acc_norm_stderr": 0.0332055644308557
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.2804232804232804,
"acc_stderr": 0.02313528797432563,
"acc_norm": 0.2804232804232804,
"acc_norm_stderr": 0.02313528797432563
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.25,
"acc_stderr": 0.03621034121889507,
"acc_norm": 0.25,
"acc_norm_stderr": 0.03621034121889507
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.21,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.21,
"acc_norm_stderr": 0.040936018074033256
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.3092485549132948,
"acc_stderr": 0.02488314057007176,
"acc_norm": 0.3092485549132948,
"acc_norm_stderr": 0.02488314057007176
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.25766871165644173,
"acc_stderr": 0.03436150827846917,
"acc_norm": 0.25766871165644173,
"acc_norm_stderr": 0.03436150827846917
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.29012345679012347,
"acc_stderr": 0.025251173936495026,
"acc_norm": 0.29012345679012347,
"acc_norm_stderr": 0.025251173936495026
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542128,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542128
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.21761658031088082,
"acc_stderr": 0.02977866303775296,
"acc_norm": 0.21761658031088082,
"acc_norm_stderr": 0.02977866303775296
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.21052631578947367,
"acc_stderr": 0.038351539543994194,
"acc_norm": 0.21052631578947367,
"acc_norm_stderr": 0.038351539543994194
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.20917431192660552,
"acc_stderr": 0.01743793717334323,
"acc_norm": 0.20917431192660552,
"acc_norm_stderr": 0.01743793717334323
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.30158730158730157,
"acc_stderr": 0.04104947269903394,
"acc_norm": 0.30158730158730157,
"acc_norm_stderr": 0.04104947269903394
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.2908496732026144,
"acc_stderr": 0.026004800363952113,
"acc_norm": 0.2908496732026144,
"acc_norm_stderr": 0.026004800363952113
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.2644628099173554,
"acc_stderr": 0.040261875275912046,
"acc_norm": 0.2644628099173554,
"acc_norm_stderr": 0.040261875275912046
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.35526315789473684,
"acc_stderr": 0.03894734487013317,
"acc_norm": 0.35526315789473684,
"acc_norm_stderr": 0.03894734487013317
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.2679738562091503,
"acc_stderr": 0.017917974069594726,
"acc_norm": 0.2679738562091503,
"acc_norm_stderr": 0.017917974069594726
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.23049645390070922,
"acc_stderr": 0.025123739226872416,
"acc_norm": 0.23049645390070922,
"acc_norm_stderr": 0.025123739226872416
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.29464285714285715,
"acc_stderr": 0.043270409325787296,
"acc_norm": 0.29464285714285715,
"acc_norm_stderr": 0.043270409325787296
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.3472222222222222,
"acc_stderr": 0.03246887243637649,
"acc_norm": 0.3472222222222222,
"acc_norm_stderr": 0.03246887243637649
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.23575418994413408,
"acc_stderr": 0.014196375686290804,
"acc_norm": 0.23575418994413408,
"acc_norm_stderr": 0.014196375686290804
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.35,
"acc_stderr": 0.04793724854411021,
"acc_norm": 0.35,
"acc_norm_stderr": 0.04793724854411021
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.26,
"acc_stderr": 0.044084400227680794,
"acc_norm": 0.26,
"acc_norm_stderr": 0.044084400227680794
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.28308823529411764,
"acc_stderr": 0.02736586113151381,
"acc_norm": 0.28308823529411764,
"acc_norm_stderr": 0.02736586113151381
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.19591836734693877,
"acc_stderr": 0.025409301953225678,
"acc_norm": 0.19591836734693877,
"acc_norm_stderr": 0.025409301953225678
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.22362869198312235,
"acc_stderr": 0.027123298205229972,
"acc_norm": 0.22362869198312235,
"acc_norm_stderr": 0.027123298205229972
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.26401564537157757,
"acc_stderr": 0.011258435537723812,
"acc_norm": 0.26401564537157757,
"acc_norm_stderr": 0.011258435537723812
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.27450980392156865,
"acc_stderr": 0.03132179803083291,
"acc_norm": 0.27450980392156865,
"acc_norm_stderr": 0.03132179803083291
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.26666666666666666,
"acc_stderr": 0.03453131801885416,
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.03453131801885416
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.25458996328029376,
"mc1_stderr": 0.015250117079156472,
"mc2": 0.4202272328082401,
"mc2_stderr": 0.016142378134497877
},
"harness|ko_commongen_v2|2": {
"acc": 0.3474178403755869,
"acc_stderr": 0.016322206819108932,
"acc_norm": 0.3744131455399061,
"acc_norm_stderr": 0.016590312676984496
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "etri-xainlp/polyglot-ko-12.8b-instruct",
"model_sha": "ec0113994052a77ef4741cf14d7a9af887b2e1d5",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}