results / heegyu /koalpaca-355m /result_2023-10-15 11:22:22.json
open-ko-llm-bot's picture
Add results for 2023-10-15 11:22:22
40d38a8
raw history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.21075085324232082,
"acc_stderr": 0.011918271754852189,
"acc_norm": 0.2687713310580205,
"acc_norm_stderr": 0.01295506596371068
},
"harness|ko_hellaswag|10": {
"acc": 0.3009360685122486,
"acc_stderr": 0.004577275844432453,
"acc_norm": 0.3458474407488548,
"acc_norm_stderr": 0.004746716805735747
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.2222222222222222,
"acc_stderr": 0.03188578017686398,
"acc_norm": 0.2222222222222222,
"acc_norm_stderr": 0.03188578017686398
},
"harness|ko_mmlu_management|5": {
"acc": 0.3786407766990291,
"acc_stderr": 0.048026946982589726,
"acc_norm": 0.3786407766990291,
"acc_norm_stderr": 0.048026946982589726
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.27330779054916987,
"acc_stderr": 0.015936681062628556,
"acc_norm": 0.27330779054916987,
"acc_norm_stderr": 0.015936681062628556
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.21481481481481482,
"acc_stderr": 0.03547854198560826,
"acc_norm": 0.21481481481481482,
"acc_norm_stderr": 0.03547854198560826
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.34893617021276596,
"acc_stderr": 0.03115852213135778,
"acc_norm": 0.34893617021276596,
"acc_norm_stderr": 0.03115852213135778
},
"harness|ko_mmlu_virology|5": {
"acc": 0.25301204819277107,
"acc_stderr": 0.03384429155233137,
"acc_norm": 0.25301204819277107,
"acc_norm_stderr": 0.03384429155233137
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.26366559485530544,
"acc_stderr": 0.025025538500532338,
"acc_norm": 0.26366559485530544,
"acc_norm_stderr": 0.025025538500532338
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.19730941704035873,
"acc_stderr": 0.02670985334496796,
"acc_norm": 0.19730941704035873,
"acc_norm_stderr": 0.02670985334496796
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.22900763358778625,
"acc_stderr": 0.036853466317118506,
"acc_norm": 0.22900763358778625,
"acc_norm_stderr": 0.036853466317118506
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.26,
"acc_stderr": 0.044084400227680794,
"acc_norm": 0.26,
"acc_norm_stderr": 0.044084400227680794
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.25757575757575757,
"acc_stderr": 0.031156269519646836,
"acc_norm": 0.25757575757575757,
"acc_norm_stderr": 0.031156269519646836
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.20689655172413793,
"acc_stderr": 0.03375672449560554,
"acc_norm": 0.20689655172413793,
"acc_norm_stderr": 0.03375672449560554
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.23529411764705882,
"acc_stderr": 0.04220773659171453,
"acc_norm": 0.23529411764705882,
"acc_norm_stderr": 0.04220773659171453
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.35294117647058826,
"acc_stderr": 0.031041941304059288,
"acc_norm": 0.35294117647058826,
"acc_norm_stderr": 0.031041941304059288
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.2923076923076923,
"acc_stderr": 0.023060438380857726,
"acc_norm": 0.2923076923076923,
"acc_norm_stderr": 0.023060438380857726
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.28,
"acc_stderr": 0.045126085985421276,
"acc_norm": 0.28,
"acc_norm_stderr": 0.045126085985421276
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768078,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768078
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.24074074074074073,
"acc_stderr": 0.041331194402438376,
"acc_norm": 0.24074074074074073,
"acc_norm_stderr": 0.041331194402438376
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.29064039408866993,
"acc_stderr": 0.0319474007226554,
"acc_norm": 0.29064039408866993,
"acc_norm_stderr": 0.0319474007226554
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.3161290322580645,
"acc_stderr": 0.026450874489042764,
"acc_norm": 0.3161290322580645,
"acc_norm_stderr": 0.026450874489042764
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.28205128205128205,
"acc_stderr": 0.02948036054954119,
"acc_norm": 0.28205128205128205,
"acc_norm_stderr": 0.02948036054954119
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.24528301886792453,
"acc_stderr": 0.0264803571798957,
"acc_norm": 0.24528301886792453,
"acc_norm_stderr": 0.0264803571798957
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.21818181818181817,
"acc_stderr": 0.03955932861795833,
"acc_norm": 0.21818181818181817,
"acc_norm_stderr": 0.03955932861795833
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.3111111111111111,
"acc_stderr": 0.028226446749683515,
"acc_norm": 0.3111111111111111,
"acc_norm_stderr": 0.028226446749683515
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.32450331125827814,
"acc_stderr": 0.03822746937658754,
"acc_norm": 0.32450331125827814,
"acc_norm_stderr": 0.03822746937658754
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.22885572139303484,
"acc_stderr": 0.029705284056772436,
"acc_norm": 0.22885572139303484,
"acc_norm_stderr": 0.029705284056772436
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.20809248554913296,
"acc_stderr": 0.030952890217749884,
"acc_norm": 0.20809248554913296,
"acc_norm_stderr": 0.030952890217749884
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.2275132275132275,
"acc_stderr": 0.021591269407823764,
"acc_norm": 0.2275132275132275,
"acc_norm_stderr": 0.021591269407823764
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.2222222222222222,
"acc_stderr": 0.03476590104304134,
"acc_norm": 0.2222222222222222,
"acc_norm_stderr": 0.03476590104304134
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542127,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542127
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.23699421965317918,
"acc_stderr": 0.02289408248992599,
"acc_norm": 0.23699421965317918,
"acc_norm_stderr": 0.02289408248992599
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.25153374233128833,
"acc_stderr": 0.034089978868575295,
"acc_norm": 0.25153374233128833,
"acc_norm_stderr": 0.034089978868575295
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.25617283950617287,
"acc_stderr": 0.0242885336377261,
"acc_norm": 0.25617283950617287,
"acc_norm_stderr": 0.0242885336377261
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.22,
"acc_stderr": 0.04163331998932269,
"acc_norm": 0.22,
"acc_norm_stderr": 0.04163331998932269
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.18652849740932642,
"acc_stderr": 0.02811209121011746,
"acc_norm": 0.18652849740932642,
"acc_norm_stderr": 0.02811209121011746
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2719298245614035,
"acc_stderr": 0.04185774424022056,
"acc_norm": 0.2719298245614035,
"acc_norm_stderr": 0.04185774424022056
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.21467889908256882,
"acc_stderr": 0.017604304149256483,
"acc_norm": 0.21467889908256882,
"acc_norm_stderr": 0.017604304149256483
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.2222222222222222,
"acc_stderr": 0.037184890068181146,
"acc_norm": 0.2222222222222222,
"acc_norm_stderr": 0.037184890068181146
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.2973856209150327,
"acc_stderr": 0.02617390850671858,
"acc_norm": 0.2973856209150327,
"acc_norm_stderr": 0.02617390850671858
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.38,
"acc_stderr": 0.04878317312145633,
"acc_norm": 0.38,
"acc_norm_stderr": 0.04878317312145633
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.2809917355371901,
"acc_stderr": 0.04103203830514511,
"acc_norm": 0.2809917355371901,
"acc_norm_stderr": 0.04103203830514511
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.16447368421052633,
"acc_stderr": 0.030167533468632726,
"acc_norm": 0.16447368421052633,
"acc_norm_stderr": 0.030167533468632726
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.24509803921568626,
"acc_stderr": 0.017401816711427653,
"acc_norm": 0.24509803921568626,
"acc_norm_stderr": 0.017401816711427653
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.22695035460992907,
"acc_stderr": 0.024987106365642973,
"acc_norm": 0.22695035460992907,
"acc_norm_stderr": 0.024987106365642973
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.2767857142857143,
"acc_stderr": 0.04246624336697625,
"acc_norm": 0.2767857142857143,
"acc_norm_stderr": 0.04246624336697625
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.4583333333333333,
"acc_stderr": 0.033981108902946366,
"acc_norm": 0.4583333333333333,
"acc_norm_stderr": 0.033981108902946366
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.23798882681564246,
"acc_stderr": 0.014242630070574892,
"acc_norm": 0.23798882681564246,
"acc_norm_stderr": 0.014242630070574892
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.23,
"acc_stderr": 0.04229525846816506,
"acc_norm": 0.23,
"acc_norm_stderr": 0.04229525846816506
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.4522058823529412,
"acc_stderr": 0.030233758551596452,
"acc_norm": 0.4522058823529412,
"acc_norm_stderr": 0.030233758551596452
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.1836734693877551,
"acc_stderr": 0.024789071332007633,
"acc_norm": 0.1836734693877551,
"acc_norm_stderr": 0.024789071332007633
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.2616033755274262,
"acc_stderr": 0.028609516716994934,
"acc_norm": 0.2616033755274262,
"acc_norm_stderr": 0.028609516716994934
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.242503259452412,
"acc_stderr": 0.01094657096634878,
"acc_norm": 0.242503259452412,
"acc_norm_stderr": 0.01094657096634878
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.25,
"acc_stderr": 0.03039153369274154,
"acc_norm": 0.25,
"acc_norm_stderr": 0.03039153369274154
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.21818181818181817,
"acc_stderr": 0.03225078108306289,
"acc_norm": 0.21818181818181817,
"acc_norm_stderr": 0.03225078108306289
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.23623011015911874,
"mc1_stderr": 0.014869755015871098,
"mc2": 0.428122521678851,
"mc2_stderr": 0.015366900048399064
},
"harness|ko_commongen_v2|2": {
"acc": 0.358913813459268,
"acc_stderr": 0.01649180210299904,
"acc_norm": 0.43565525383707204,
"acc_norm_stderr": 0.01704741522947634
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "heegyu/koalpaca-355m",
"model_sha": "a1f4b5022e95bd808e2375dd3ed4c9bfbb64df32",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}