results / cepiloth /ko-llama2-finetune-ex4 /result_2023-10-31 03:26:06.json
open-ko-llm-bot's picture
Add results for 2023-10-31 03:26:06
7904ea8
raw history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.27559726962457337,
"acc_stderr": 0.013057169655761836,
"acc_norm": 0.310580204778157,
"acc_norm_stderr": 0.013522292098053057
},
"harness|ko_hellaswag|10": {
"acc": 0.3202549292969528,
"acc_stderr": 0.004656208951541443,
"acc_norm": 0.37582154949213303,
"acc_norm_stderr": 0.004833444556338622
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.391812865497076,
"acc_stderr": 0.03743979825926399,
"acc_norm": 0.391812865497076,
"acc_norm_stderr": 0.03743979825926399
},
"harness|ko_mmlu_management|5": {
"acc": 0.22330097087378642,
"acc_stderr": 0.04123553189891431,
"acc_norm": 0.22330097087378642,
"acc_norm_stderr": 0.04123553189891431
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.3001277139208174,
"acc_stderr": 0.016389249691317425,
"acc_norm": 0.3001277139208174,
"acc_norm_stderr": 0.016389249691317425
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.3111111111111111,
"acc_stderr": 0.03999262876617723,
"acc_norm": 0.3111111111111111,
"acc_norm_stderr": 0.03999262876617723
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.24,
"acc_stderr": 0.042923469599092816,
"acc_norm": 0.24,
"acc_norm_stderr": 0.042923469599092816
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.2425531914893617,
"acc_stderr": 0.028020226271200217,
"acc_norm": 0.2425531914893617,
"acc_norm_stderr": 0.028020226271200217
},
"harness|ko_mmlu_virology|5": {
"acc": 0.24096385542168675,
"acc_stderr": 0.0332939411907353,
"acc_norm": 0.24096385542168675,
"acc_norm_stderr": 0.0332939411907353
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.2765273311897106,
"acc_stderr": 0.025403832978179604,
"acc_norm": 0.2765273311897106,
"acc_norm_stderr": 0.025403832978179604
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.24663677130044842,
"acc_stderr": 0.028930413120910874,
"acc_norm": 0.24663677130044842,
"acc_norm_stderr": 0.028930413120910874
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.25190839694656486,
"acc_stderr": 0.03807387116306086,
"acc_norm": 0.25190839694656486,
"acc_norm_stderr": 0.03807387116306086
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.27,
"acc_stderr": 0.04461960433384741,
"acc_norm": 0.27,
"acc_norm_stderr": 0.04461960433384741
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.25757575757575757,
"acc_stderr": 0.031156269519646836,
"acc_norm": 0.25757575757575757,
"acc_norm_stderr": 0.031156269519646836
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.3103448275862069,
"acc_stderr": 0.038552896163789485,
"acc_norm": 0.3103448275862069,
"acc_norm_stderr": 0.038552896163789485
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.21568627450980393,
"acc_stderr": 0.04092563958237654,
"acc_norm": 0.21568627450980393,
"acc_norm_stderr": 0.04092563958237654
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.27310924369747897,
"acc_stderr": 0.02894200404099817,
"acc_norm": 0.27310924369747897,
"acc_norm_stderr": 0.02894200404099817
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.23846153846153847,
"acc_stderr": 0.02160629449464773,
"acc_norm": 0.23846153846153847,
"acc_norm_stderr": 0.02160629449464773
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.28,
"acc_stderr": 0.045126085985421276,
"acc_norm": 0.28,
"acc_norm_stderr": 0.045126085985421276
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.19,
"acc_stderr": 0.039427724440366234,
"acc_norm": 0.19,
"acc_norm_stderr": 0.039427724440366234
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.28703703703703703,
"acc_stderr": 0.04373313040914761,
"acc_norm": 0.28703703703703703,
"acc_norm_stderr": 0.04373313040914761
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.2315270935960591,
"acc_stderr": 0.02967833314144444,
"acc_norm": 0.2315270935960591,
"acc_norm_stderr": 0.02967833314144444
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.2870967741935484,
"acc_stderr": 0.025736542745594525,
"acc_norm": 0.2870967741935484,
"acc_norm_stderr": 0.025736542745594525
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.33760683760683763,
"acc_stderr": 0.030980296992618558,
"acc_norm": 0.33760683760683763,
"acc_norm_stderr": 0.030980296992618558
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.2641509433962264,
"acc_stderr": 0.027134291628741706,
"acc_norm": 0.2641509433962264,
"acc_norm_stderr": 0.027134291628741706
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.2545454545454545,
"acc_stderr": 0.041723430387053825,
"acc_norm": 0.2545454545454545,
"acc_norm_stderr": 0.041723430387053825
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.24074074074074073,
"acc_stderr": 0.02606715922227579,
"acc_norm": 0.24074074074074073,
"acc_norm_stderr": 0.02606715922227579
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.25165562913907286,
"acc_stderr": 0.035433042343899844,
"acc_norm": 0.25165562913907286,
"acc_norm_stderr": 0.035433042343899844
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.2885572139303483,
"acc_stderr": 0.03203841040213321,
"acc_norm": 0.2885572139303483,
"acc_norm_stderr": 0.03203841040213321
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.2658959537572254,
"acc_stderr": 0.033687629322594316,
"acc_norm": 0.2658959537572254,
"acc_norm_stderr": 0.033687629322594316
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.23015873015873015,
"acc_stderr": 0.02167921966369317,
"acc_norm": 0.23015873015873015,
"acc_norm_stderr": 0.02167921966369317
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.2569444444444444,
"acc_stderr": 0.03653946969442099,
"acc_norm": 0.2569444444444444,
"acc_norm_stderr": 0.03653946969442099
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.22,
"acc_stderr": 0.04163331998932269,
"acc_norm": 0.22,
"acc_norm_stderr": 0.04163331998932269
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.32,
"acc_stderr": 0.04688261722621504,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621504
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.2832369942196532,
"acc_stderr": 0.024257901705323378,
"acc_norm": 0.2832369942196532,
"acc_norm_stderr": 0.024257901705323378
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.26993865030674846,
"acc_stderr": 0.034878251684978906,
"acc_norm": 0.26993865030674846,
"acc_norm_stderr": 0.034878251684978906
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.25925925925925924,
"acc_stderr": 0.024383665531035454,
"acc_norm": 0.25925925925925924,
"acc_norm_stderr": 0.024383665531035454
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.24870466321243523,
"acc_stderr": 0.031195840877700304,
"acc_norm": 0.24870466321243523,
"acc_norm_stderr": 0.031195840877700304
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2543859649122807,
"acc_stderr": 0.040969851398436716,
"acc_norm": 0.2543859649122807,
"acc_norm_stderr": 0.040969851398436716
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.27155963302752295,
"acc_stderr": 0.019069098363191452,
"acc_norm": 0.27155963302752295,
"acc_norm_stderr": 0.019069098363191452
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.2698412698412698,
"acc_stderr": 0.03970158273235172,
"acc_norm": 0.2698412698412698,
"acc_norm_stderr": 0.03970158273235172
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.2908496732026144,
"acc_stderr": 0.02600480036395211,
"acc_norm": 0.2908496732026144,
"acc_norm_stderr": 0.02600480036395211
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.2231404958677686,
"acc_stderr": 0.03800754475228732,
"acc_norm": 0.2231404958677686,
"acc_norm_stderr": 0.03800754475228732
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.3092105263157895,
"acc_stderr": 0.037610708698674805,
"acc_norm": 0.3092105263157895,
"acc_norm_stderr": 0.037610708698674805
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.25326797385620914,
"acc_stderr": 0.017593486895366835,
"acc_norm": 0.25326797385620914,
"acc_norm_stderr": 0.017593486895366835
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.26595744680851063,
"acc_stderr": 0.026358065698880592,
"acc_norm": 0.26595744680851063,
"acc_norm_stderr": 0.026358065698880592
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.2857142857142857,
"acc_stderr": 0.04287858751340456,
"acc_norm": 0.2857142857142857,
"acc_norm_stderr": 0.04287858751340456
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.20833333333333334,
"acc_stderr": 0.02769691071309394,
"acc_norm": 0.20833333333333334,
"acc_norm_stderr": 0.02769691071309394
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.23575418994413408,
"acc_stderr": 0.014196375686290804,
"acc_norm": 0.23575418994413408,
"acc_norm_stderr": 0.014196375686290804
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.29,
"acc_stderr": 0.04560480215720683,
"acc_norm": 0.29,
"acc_norm_stderr": 0.04560480215720683
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.3382352941176471,
"acc_stderr": 0.028739328513983566,
"acc_norm": 0.3382352941176471,
"acc_norm_stderr": 0.028739328513983566
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.33877551020408164,
"acc_stderr": 0.030299506562154178,
"acc_norm": 0.33877551020408164,
"acc_norm_stderr": 0.030299506562154178
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.19831223628691982,
"acc_stderr": 0.025955020841621112,
"acc_norm": 0.19831223628691982,
"acc_norm_stderr": 0.025955020841621112
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.2620599739243807,
"acc_stderr": 0.011231552795890392,
"acc_norm": 0.2620599739243807,
"acc_norm_stderr": 0.011231552795890392
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.28431372549019607,
"acc_stderr": 0.03166009679399812,
"acc_norm": 0.28431372549019607,
"acc_norm_stderr": 0.03166009679399812
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.23636363636363636,
"acc_stderr": 0.03317505930009181,
"acc_norm": 0.23636363636363636,
"acc_norm_stderr": 0.03317505930009181
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.26805385556915545,
"mc1_stderr": 0.015506204722834564,
"mc2": 0.42970330311039423,
"mc2_stderr": 0.01625558814144742
},
"harness|ko_commongen_v2|2": {
"acc": 0.2514757969303424,
"acc_stderr": 0.014916462437232256,
"acc_norm": 0.29043683589138136,
"acc_norm_stderr": 0.01560760256981463
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "cepiloth/ko-llama2-finetune-ex4",
"model_sha": "c368a2162df72c2310144879432d508736a16e90",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}