results / MrBananaHuman /polyglot-ko-5.8b /result_2023-11-06 09:18:33.json
open-ko-llm-bot's picture
Add results for 2023-11-06 09:18:33
57ec8cf
raw history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.2986348122866894,
"acc_stderr": 0.013374078615068754,
"acc_norm": 0.34982935153583616,
"acc_norm_stderr": 0.013936809212158296
},
"harness|ko_hellaswag|10": {
"acc": 0.39294961163114916,
"acc_stderr": 0.004874076250521577,
"acc_norm": 0.4985062736506672,
"acc_norm_stderr": 0.004989759144812286
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.24561403508771928,
"acc_stderr": 0.0330140594698725,
"acc_norm": 0.24561403508771928,
"acc_norm_stderr": 0.0330140594698725
},
"harness|ko_mmlu_management|5": {
"acc": 0.17475728155339806,
"acc_stderr": 0.037601780060266196,
"acc_norm": 0.17475728155339806,
"acc_norm_stderr": 0.037601780060266196
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.26053639846743293,
"acc_stderr": 0.015696008563807096,
"acc_norm": 0.26053639846743293,
"acc_norm_stderr": 0.015696008563807096
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.26666666666666666,
"acc_stderr": 0.038201699145179055,
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.038201699145179055
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.22,
"acc_stderr": 0.04163331998932269,
"acc_norm": 0.22,
"acc_norm_stderr": 0.04163331998932269
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.25957446808510637,
"acc_stderr": 0.028659179374292326,
"acc_norm": 0.25957446808510637,
"acc_norm_stderr": 0.028659179374292326
},
"harness|ko_mmlu_virology|5": {
"acc": 0.24096385542168675,
"acc_stderr": 0.03329394119073529,
"acc_norm": 0.24096385542168675,
"acc_norm_stderr": 0.03329394119073529
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.21864951768488747,
"acc_stderr": 0.02347558141786111,
"acc_norm": 0.21864951768488747,
"acc_norm_stderr": 0.02347558141786111
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.2556053811659193,
"acc_stderr": 0.029275891003969927,
"acc_norm": 0.2556053811659193,
"acc_norm_stderr": 0.029275891003969927
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.1984732824427481,
"acc_stderr": 0.0349814938546247,
"acc_norm": 0.1984732824427481,
"acc_norm_stderr": 0.0349814938546247
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768079,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768079
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.2474747474747475,
"acc_stderr": 0.0307463007421245,
"acc_norm": 0.2474747474747475,
"acc_norm_stderr": 0.0307463007421245
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.2413793103448276,
"acc_stderr": 0.03565998174135303,
"acc_norm": 0.2413793103448276,
"acc_norm_stderr": 0.03565998174135303
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.23529411764705882,
"acc_stderr": 0.04220773659171453,
"acc_norm": 0.23529411764705882,
"acc_norm_stderr": 0.04220773659171453
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.25630252100840334,
"acc_stderr": 0.02835962087053395,
"acc_norm": 0.25630252100840334,
"acc_norm_stderr": 0.02835962087053395
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.20512820512820512,
"acc_stderr": 0.020473233173551986,
"acc_norm": 0.20512820512820512,
"acc_norm_stderr": 0.020473233173551986
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.27,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.27,
"acc_norm_stderr": 0.044619604333847394
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.23,
"acc_stderr": 0.04229525846816505,
"acc_norm": 0.23,
"acc_norm_stderr": 0.04229525846816505
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.25925925925925924,
"acc_stderr": 0.04236511258094633,
"acc_norm": 0.25925925925925924,
"acc_norm_stderr": 0.04236511258094633
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.2315270935960591,
"acc_stderr": 0.029678333141444455,
"acc_norm": 0.2315270935960591,
"acc_norm_stderr": 0.029678333141444455
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.23548387096774193,
"acc_stderr": 0.02413763242933771,
"acc_norm": 0.23548387096774193,
"acc_norm_stderr": 0.02413763242933771
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.23931623931623933,
"acc_stderr": 0.02795182680892433,
"acc_norm": 0.23931623931623933,
"acc_norm_stderr": 0.02795182680892433
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.3283018867924528,
"acc_stderr": 0.02890159361241178,
"acc_norm": 0.3283018867924528,
"acc_norm_stderr": 0.02890159361241178
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.2909090909090909,
"acc_stderr": 0.04350271442923243,
"acc_norm": 0.2909090909090909,
"acc_norm_stderr": 0.04350271442923243
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.22592592592592592,
"acc_stderr": 0.02549753263960954,
"acc_norm": 0.22592592592592592,
"acc_norm_stderr": 0.02549753263960954
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.271523178807947,
"acc_stderr": 0.03631329803969654,
"acc_norm": 0.271523178807947,
"acc_norm_stderr": 0.03631329803969654
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.24378109452736318,
"acc_stderr": 0.030360490154014652,
"acc_norm": 0.24378109452736318,
"acc_norm_stderr": 0.030360490154014652
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.2832369942196532,
"acc_stderr": 0.03435568056047875,
"acc_norm": 0.2832369942196532,
"acc_norm_stderr": 0.03435568056047875
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.23809523809523808,
"acc_stderr": 0.02193587808118476,
"acc_norm": 0.23809523809523808,
"acc_norm_stderr": 0.02193587808118476
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.25,
"acc_stderr": 0.03621034121889507,
"acc_norm": 0.25,
"acc_norm_stderr": 0.03621034121889507
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.26,
"acc_stderr": 0.044084400227680794,
"acc_norm": 0.26,
"acc_norm_stderr": 0.044084400227680794
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.32,
"acc_stderr": 0.04688261722621505,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621505
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.24277456647398843,
"acc_stderr": 0.023083658586984204,
"acc_norm": 0.24277456647398843,
"acc_norm_stderr": 0.023083658586984204
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.2085889570552147,
"acc_stderr": 0.03192193448934725,
"acc_norm": 0.2085889570552147,
"acc_norm_stderr": 0.03192193448934725
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.2345679012345679,
"acc_stderr": 0.02357688174400572,
"acc_norm": 0.2345679012345679,
"acc_norm_stderr": 0.02357688174400572
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.27,
"acc_stderr": 0.04461960433384739,
"acc_norm": 0.27,
"acc_norm_stderr": 0.04461960433384739
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.22279792746113988,
"acc_stderr": 0.030031147977641545,
"acc_norm": 0.22279792746113988,
"acc_norm_stderr": 0.030031147977641545
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2719298245614035,
"acc_stderr": 0.04185774424022057,
"acc_norm": 0.2719298245614035,
"acc_norm_stderr": 0.04185774424022057
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.22568807339449543,
"acc_stderr": 0.017923087667803053,
"acc_norm": 0.22568807339449543,
"acc_norm_stderr": 0.017923087667803053
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.25396825396825395,
"acc_stderr": 0.03893259610604672,
"acc_norm": 0.25396825396825395,
"acc_norm_stderr": 0.03893259610604672
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.20588235294117646,
"acc_stderr": 0.023152722439402303,
"acc_norm": 0.20588235294117646,
"acc_norm_stderr": 0.023152722439402303
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.23140495867768596,
"acc_stderr": 0.03849856098794088,
"acc_norm": 0.23140495867768596,
"acc_norm_stderr": 0.03849856098794088
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.28289473684210525,
"acc_stderr": 0.03665349695640767,
"acc_norm": 0.28289473684210525,
"acc_norm_stderr": 0.03665349695640767
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.25980392156862747,
"acc_stderr": 0.01774089950917779,
"acc_norm": 0.25980392156862747,
"acc_norm_stderr": 0.01774089950917779
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.24822695035460993,
"acc_stderr": 0.025770015644290392,
"acc_norm": 0.24822695035460993,
"acc_norm_stderr": 0.025770015644290392
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.2857142857142857,
"acc_stderr": 0.042878587513404544,
"acc_norm": 0.2857142857142857,
"acc_norm_stderr": 0.042878587513404544
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.32407407407407407,
"acc_stderr": 0.03191923445686185,
"acc_norm": 0.32407407407407407,
"acc_norm_stderr": 0.03191923445686185
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.26927374301675977,
"acc_stderr": 0.014835616582882603,
"acc_norm": 0.26927374301675977,
"acc_norm_stderr": 0.014835616582882603
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.32,
"acc_stderr": 0.04688261722621504,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621504
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.19,
"acc_stderr": 0.03942772444036624,
"acc_norm": 0.19,
"acc_norm_stderr": 0.03942772444036624
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.2977941176470588,
"acc_stderr": 0.02777829870154544,
"acc_norm": 0.2977941176470588,
"acc_norm_stderr": 0.02777829870154544
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.23673469387755103,
"acc_stderr": 0.027212835884073142,
"acc_norm": 0.23673469387755103,
"acc_norm_stderr": 0.027212835884073142
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.27848101265822783,
"acc_stderr": 0.029178682304842538,
"acc_norm": 0.27848101265822783,
"acc_norm_stderr": 0.029178682304842538
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.24315514993481094,
"acc_stderr": 0.010956556654417355,
"acc_norm": 0.24315514993481094,
"acc_norm_stderr": 0.010956556654417355
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.2549019607843137,
"acc_stderr": 0.030587591351604246,
"acc_norm": 0.2549019607843137,
"acc_norm_stderr": 0.030587591351604246
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.21212121212121213,
"acc_stderr": 0.03192271569548299,
"acc_norm": 0.21212121212121213,
"acc_norm_stderr": 0.03192271569548299
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.26438188494492043,
"mc1_stderr": 0.015438211119522512,
"mc2": 0.4146123621762204,
"mc2_stderr": 0.015628722266635826
},
"harness|ko_commongen_v2|2": {
"acc": 0.27390791027154665,
"acc_stderr": 0.01533249947479102,
"acc_norm": 0.29988193624557263,
"acc_norm_stderr": 0.01575344761542946
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "MrBananaHuman/polyglot-ko-5.8b",
"model_sha": "05f8800a617b483b1c502d8c965ff434e63d387c",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}