results / EleutherAI /polyglot-ko-3.8b /result_2023-09-26 09:54:58.json
open-ko-llm-bot's picture
Add results for 2023-09-26 09:54:58
cb2d307
raw
history blame
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.2525597269624573,
"acc_stderr": 0.01269672898020771,
"acc_norm": 0.3046075085324232,
"acc_norm_stderr": 0.013449522109932494
},
"harness|ko_hellaswag|10": {
"acc": 0.3511252738498307,
"acc_stderr": 0.004763465139038552,
"acc_norm": 0.4420434176458873,
"acc_norm_stderr": 0.004956147046108961
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.2222222222222222,
"acc_stderr": 0.03188578017686398,
"acc_norm": 0.2222222222222222,
"acc_norm_stderr": 0.03188578017686398
},
"harness|ko_mmlu_management|5": {
"acc": 0.30097087378640774,
"acc_stderr": 0.045416094465039476,
"acc_norm": 0.30097087378640774,
"acc_norm_stderr": 0.045416094465039476
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.23627075351213284,
"acc_stderr": 0.015190473717037497,
"acc_norm": 0.23627075351213284,
"acc_norm_stderr": 0.015190473717037497
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.23703703703703705,
"acc_stderr": 0.03673731683969506,
"acc_norm": 0.23703703703703705,
"acc_norm_stderr": 0.03673731683969506
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.27,
"acc_stderr": 0.04461960433384741,
"acc_norm": 0.27,
"acc_norm_stderr": 0.04461960433384741
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.2,
"acc_stderr": 0.026148818018424502,
"acc_norm": 0.2,
"acc_norm_stderr": 0.026148818018424502
},
"harness|ko_mmlu_virology|5": {
"acc": 0.25301204819277107,
"acc_stderr": 0.03384429155233135,
"acc_norm": 0.25301204819277107,
"acc_norm_stderr": 0.03384429155233135
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.24437299035369775,
"acc_stderr": 0.024406162094668886,
"acc_norm": 0.24437299035369775,
"acc_norm_stderr": 0.024406162094668886
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.21524663677130046,
"acc_stderr": 0.027584066602208263,
"acc_norm": 0.21524663677130046,
"acc_norm_stderr": 0.027584066602208263
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.19083969465648856,
"acc_stderr": 0.034465133507525954,
"acc_norm": 0.19083969465648856,
"acc_norm_stderr": 0.034465133507525954
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.21,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.21,
"acc_norm_stderr": 0.040936018074033256
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.31313131313131315,
"acc_stderr": 0.033042050878136525,
"acc_norm": 0.31313131313131315,
"acc_norm_stderr": 0.033042050878136525
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.2413793103448276,
"acc_stderr": 0.03565998174135302,
"acc_norm": 0.2413793103448276,
"acc_norm_stderr": 0.03565998174135302
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.19607843137254902,
"acc_stderr": 0.03950581861179962,
"acc_norm": 0.19607843137254902,
"acc_norm_stderr": 0.03950581861179962
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.3487394957983193,
"acc_stderr": 0.030956636328566548,
"acc_norm": 0.3487394957983193,
"acc_norm_stderr": 0.030956636328566548
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.3564102564102564,
"acc_stderr": 0.024283140529467295,
"acc_norm": 0.3564102564102564,
"acc_norm_stderr": 0.024283140529467295
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.16,
"acc_stderr": 0.03684529491774709,
"acc_norm": 0.16,
"acc_norm_stderr": 0.03684529491774709
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.32,
"acc_stderr": 0.04688261722621504,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621504
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.21296296296296297,
"acc_stderr": 0.03957835471980981,
"acc_norm": 0.21296296296296297,
"acc_norm_stderr": 0.03957835471980981
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.29064039408866993,
"acc_stderr": 0.03194740072265541,
"acc_norm": 0.29064039408866993,
"acc_norm_stderr": 0.03194740072265541
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.3032258064516129,
"acc_stderr": 0.026148685930671742,
"acc_norm": 0.3032258064516129,
"acc_norm_stderr": 0.026148685930671742
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.19658119658119658,
"acc_stderr": 0.02603538609895129,
"acc_norm": 0.19658119658119658,
"acc_norm_stderr": 0.02603538609895129
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.3283018867924528,
"acc_stderr": 0.02890159361241178,
"acc_norm": 0.3283018867924528,
"acc_norm_stderr": 0.02890159361241178
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.23636363636363636,
"acc_stderr": 0.04069306319721376,
"acc_norm": 0.23636363636363636,
"acc_norm_stderr": 0.04069306319721376
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.25925925925925924,
"acc_stderr": 0.02671924078371216,
"acc_norm": 0.25925925925925924,
"acc_norm_stderr": 0.02671924078371216
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.32450331125827814,
"acc_stderr": 0.03822746937658753,
"acc_norm": 0.32450331125827814,
"acc_norm_stderr": 0.03822746937658753
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.26865671641791045,
"acc_stderr": 0.03134328358208954,
"acc_norm": 0.26865671641791045,
"acc_norm_stderr": 0.03134328358208954
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.32947976878612717,
"acc_stderr": 0.03583901754736412,
"acc_norm": 0.32947976878612717,
"acc_norm_stderr": 0.03583901754736412
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.24603174603174602,
"acc_stderr": 0.022182037202948368,
"acc_norm": 0.24603174603174602,
"acc_norm_stderr": 0.022182037202948368
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.2569444444444444,
"acc_stderr": 0.03653946969442099,
"acc_norm": 0.2569444444444444,
"acc_norm_stderr": 0.03653946969442099
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.4,
"acc_stderr": 0.049236596391733084,
"acc_norm": 0.4,
"acc_norm_stderr": 0.049236596391733084
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.26,
"acc_stderr": 0.044084400227680794,
"acc_norm": 0.26,
"acc_norm_stderr": 0.044084400227680794
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.2398843930635838,
"acc_stderr": 0.022989592543123567,
"acc_norm": 0.2398843930635838,
"acc_norm_stderr": 0.022989592543123567
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.3312883435582822,
"acc_stderr": 0.03697983910025588,
"acc_norm": 0.3312883435582822,
"acc_norm_stderr": 0.03697983910025588
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.26851851851851855,
"acc_stderr": 0.024659685185967277,
"acc_norm": 0.26851851851851855,
"acc_norm_stderr": 0.024659685185967277
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.3626943005181347,
"acc_stderr": 0.034697137917043715,
"acc_norm": 0.3626943005181347,
"acc_norm_stderr": 0.034697137917043715
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.22807017543859648,
"acc_stderr": 0.03947152782669415,
"acc_norm": 0.22807017543859648,
"acc_norm_stderr": 0.03947152782669415
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.25688073394495414,
"acc_stderr": 0.018732492928342448,
"acc_norm": 0.25688073394495414,
"acc_norm_stderr": 0.018732492928342448
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.3492063492063492,
"acc_stderr": 0.04263906892795132,
"acc_norm": 0.3492063492063492,
"acc_norm_stderr": 0.04263906892795132
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.2549019607843137,
"acc_stderr": 0.024954184324879905,
"acc_norm": 0.2549019607843137,
"acc_norm_stderr": 0.024954184324879905
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768079,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768079
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.1652892561983471,
"acc_stderr": 0.03390780612972776,
"acc_norm": 0.1652892561983471,
"acc_norm_stderr": 0.03390780612972776
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.29605263157894735,
"acc_stderr": 0.03715062154998904,
"acc_norm": 0.29605263157894735,
"acc_norm_stderr": 0.03715062154998904
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.21895424836601307,
"acc_stderr": 0.016729937565537537,
"acc_norm": 0.21895424836601307,
"acc_norm_stderr": 0.016729937565537537
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.24822695035460993,
"acc_stderr": 0.025770015644290396,
"acc_norm": 0.24822695035460993,
"acc_norm_stderr": 0.025770015644290396
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.15178571428571427,
"acc_stderr": 0.03405702838185692,
"acc_norm": 0.15178571428571427,
"acc_norm_stderr": 0.03405702838185692
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.4722222222222222,
"acc_stderr": 0.0340470532865388,
"acc_norm": 0.4722222222222222,
"acc_norm_stderr": 0.0340470532865388
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.26145251396648045,
"acc_stderr": 0.014696599650364546,
"acc_norm": 0.26145251396648045,
"acc_norm_stderr": 0.014696599650364546
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768078,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768078
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.22,
"acc_stderr": 0.04163331998932269,
"acc_norm": 0.22,
"acc_norm_stderr": 0.04163331998932269
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.44485294117647056,
"acc_stderr": 0.030187532060329383,
"acc_norm": 0.44485294117647056,
"acc_norm_stderr": 0.030187532060329383
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.39591836734693875,
"acc_stderr": 0.03130802899065685,
"acc_norm": 0.39591836734693875,
"acc_norm_stderr": 0.03130802899065685
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.23628691983122363,
"acc_stderr": 0.02765215314415926,
"acc_norm": 0.23628691983122363,
"acc_norm_stderr": 0.02765215314415926
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.24445893089960888,
"acc_stderr": 0.010976425013113912,
"acc_norm": 0.24445893089960888,
"acc_norm_stderr": 0.010976425013113912
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.2647058823529412,
"acc_stderr": 0.03096451792692341,
"acc_norm": 0.2647058823529412,
"acc_norm_stderr": 0.03096451792692341
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.296969696969697,
"acc_stderr": 0.03567969772268046,
"acc_norm": 0.296969696969697,
"acc_norm_stderr": 0.03567969772268046
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.24969400244798043,
"mc1_stderr": 0.015152286907148125,
"mc2": 0.40454723614569765,
"mc2_stderr": 0.014981033793701278
},
"harness|ko_commongen_v2|2": {
"acc": 0.282172373081464,
"acc_stderr": 0.015473271583988433,
"acc_norm": 0.3707201889020071,
"acc_norm_stderr": 0.016605801289212605
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "EleutherAI/polyglot-ko-3.8b",
"model_sha": "3c696a71c16b4a4622b7cabf6c5da4ba5a73b548",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}