results / Chang-Su /llama-2-13b-chat-ko /result_2023-10-18 16:07:29.json
open-ko-llm-bot's picture
Add results for 2023-10-18 16:07:29
9ec2abf
raw
history blame
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.3037542662116041,
"acc_stderr": 0.013438909184778759,
"acc_norm": 0.3464163822525597,
"acc_norm_stderr": 0.013905011180063251
},
"harness|ko_hellaswag|10": {
"acc": 0.350726946823342,
"acc_stderr": 0.0047622234924352535,
"acc_norm": 0.45429197371041624,
"acc_norm_stderr": 0.004968888130290068
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.4678362573099415,
"acc_stderr": 0.03826882417660369,
"acc_norm": 0.4678362573099415,
"acc_norm_stderr": 0.03826882417660369
},
"harness|ko_mmlu_management|5": {
"acc": 0.4077669902912621,
"acc_stderr": 0.048657775704107696,
"acc_norm": 0.4077669902912621,
"acc_norm_stderr": 0.048657775704107696
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.4610472541507024,
"acc_stderr": 0.01782562179323902,
"acc_norm": 0.4610472541507024,
"acc_norm_stderr": 0.01782562179323902
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.4074074074074074,
"acc_stderr": 0.04244633238353228,
"acc_norm": 0.4074074074074074,
"acc_norm_stderr": 0.04244633238353228
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.39,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.39,
"acc_norm_stderr": 0.04902071300001975
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.3446808510638298,
"acc_stderr": 0.03106898596312215,
"acc_norm": 0.3446808510638298,
"acc_norm_stderr": 0.03106898596312215
},
"harness|ko_mmlu_virology|5": {
"acc": 0.3373493975903614,
"acc_stderr": 0.03680783690727581,
"acc_norm": 0.3373493975903614,
"acc_norm_stderr": 0.03680783690727581
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.4180064308681672,
"acc_stderr": 0.02801365189199507,
"acc_norm": 0.4180064308681672,
"acc_norm_stderr": 0.02801365189199507
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.36771300448430494,
"acc_stderr": 0.03236198350928276,
"acc_norm": 0.36771300448430494,
"acc_norm_stderr": 0.03236198350928276
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.44274809160305345,
"acc_stderr": 0.043564472026650695,
"acc_norm": 0.44274809160305345,
"acc_norm_stderr": 0.043564472026650695
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.41414141414141414,
"acc_stderr": 0.03509438348879629,
"acc_norm": 0.41414141414141414,
"acc_norm_stderr": 0.03509438348879629
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.4068965517241379,
"acc_stderr": 0.040937939812662374,
"acc_norm": 0.4068965517241379,
"acc_norm_stderr": 0.040937939812662374
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.2549019607843137,
"acc_stderr": 0.043364327079931785,
"acc_norm": 0.2549019607843137,
"acc_norm_stderr": 0.043364327079931785
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.41596638655462187,
"acc_stderr": 0.03201650100739615,
"acc_norm": 0.41596638655462187,
"acc_norm_stderr": 0.03201650100739615
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.4025641025641026,
"acc_stderr": 0.024864995159767762,
"acc_norm": 0.4025641025641026,
"acc_norm_stderr": 0.024864995159767762
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.43,
"acc_stderr": 0.049756985195624284,
"acc_norm": 0.43,
"acc_norm_stderr": 0.049756985195624284
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.34,
"acc_stderr": 0.047609522856952344,
"acc_norm": 0.34,
"acc_norm_stderr": 0.047609522856952344
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.5092592592592593,
"acc_stderr": 0.04832853553437055,
"acc_norm": 0.5092592592592593,
"acc_norm_stderr": 0.04832853553437055
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.3497536945812808,
"acc_stderr": 0.03355400904969565,
"acc_norm": 0.3497536945812808,
"acc_norm_stderr": 0.03355400904969565
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.3870967741935484,
"acc_stderr": 0.027709359675032488,
"acc_norm": 0.3870967741935484,
"acc_norm_stderr": 0.027709359675032488
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.5384615384615384,
"acc_stderr": 0.03265903381186194,
"acc_norm": 0.5384615384615384,
"acc_norm_stderr": 0.03265903381186194
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.3584905660377358,
"acc_stderr": 0.029514703583981765,
"acc_norm": 0.3584905660377358,
"acc_norm_stderr": 0.029514703583981765
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.4636363636363636,
"acc_stderr": 0.04776449162396197,
"acc_norm": 0.4636363636363636,
"acc_norm_stderr": 0.04776449162396197
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.3074074074074074,
"acc_stderr": 0.02813325257881564,
"acc_norm": 0.3074074074074074,
"acc_norm_stderr": 0.02813325257881564
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.31125827814569534,
"acc_stderr": 0.03780445850526733,
"acc_norm": 0.31125827814569534,
"acc_norm_stderr": 0.03780445850526733
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.4925373134328358,
"acc_stderr": 0.035351400842767194,
"acc_norm": 0.4925373134328358,
"acc_norm_stderr": 0.035351400842767194
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.3468208092485549,
"acc_stderr": 0.036291466701596636,
"acc_norm": 0.3468208092485549,
"acc_norm_stderr": 0.036291466701596636
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.28835978835978837,
"acc_stderr": 0.023330654054535903,
"acc_norm": 0.28835978835978837,
"acc_norm_stderr": 0.023330654054535903
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3055555555555556,
"acc_stderr": 0.03852084696008534,
"acc_norm": 0.3055555555555556,
"acc_norm_stderr": 0.03852084696008534
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.28,
"acc_stderr": 0.045126085985421276,
"acc_norm": 0.28,
"acc_norm_stderr": 0.045126085985421276
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.51,
"acc_stderr": 0.05024183937956911,
"acc_norm": 0.51,
"acc_norm_stderr": 0.05024183937956911
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.40173410404624277,
"acc_stderr": 0.026394104177643634,
"acc_norm": 0.40173410404624277,
"acc_norm_stderr": 0.026394104177643634
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.38650306748466257,
"acc_stderr": 0.038258255488486076,
"acc_norm": 0.38650306748466257,
"acc_norm_stderr": 0.038258255488486076
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.3888888888888889,
"acc_stderr": 0.027125115513166865,
"acc_norm": 0.3888888888888889,
"acc_norm_stderr": 0.027125115513166865
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.40932642487046633,
"acc_stderr": 0.03548608168860806,
"acc_norm": 0.40932642487046633,
"acc_norm_stderr": 0.03548608168860806
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2982456140350877,
"acc_stderr": 0.04303684033537318,
"acc_norm": 0.2982456140350877,
"acc_norm_stderr": 0.04303684033537318
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.3798165137614679,
"acc_stderr": 0.020808825617866244,
"acc_norm": 0.3798165137614679,
"acc_norm_stderr": 0.020808825617866244
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.25396825396825395,
"acc_stderr": 0.038932596106046734,
"acc_norm": 0.25396825396825395,
"acc_norm_stderr": 0.038932596106046734
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.37254901960784315,
"acc_stderr": 0.02768418188330289,
"acc_norm": 0.37254901960784315,
"acc_norm_stderr": 0.02768418188330289
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.41,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.5785123966942148,
"acc_stderr": 0.04507732278775088,
"acc_norm": 0.5785123966942148,
"acc_norm_stderr": 0.04507732278775088
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.4407894736842105,
"acc_stderr": 0.04040311062490436,
"acc_norm": 0.4407894736842105,
"acc_norm_stderr": 0.04040311062490436
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.3104575163398693,
"acc_stderr": 0.018718067052623227,
"acc_norm": 0.3104575163398693,
"acc_norm_stderr": 0.018718067052623227
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.2978723404255319,
"acc_stderr": 0.027281608344469414,
"acc_norm": 0.2978723404255319,
"acc_norm_stderr": 0.027281608344469414
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.24107142857142858,
"acc_stderr": 0.04059867246952687,
"acc_norm": 0.24107142857142858,
"acc_norm_stderr": 0.04059867246952687
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.2777777777777778,
"acc_stderr": 0.030546745264953202,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.030546745264953202
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2424581005586592,
"acc_stderr": 0.01433352205921789,
"acc_norm": 0.2424581005586592,
"acc_norm_stderr": 0.01433352205921789
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.41,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.2647058823529412,
"acc_stderr": 0.02679956202488769,
"acc_norm": 0.2647058823529412,
"acc_norm_stderr": 0.02679956202488769
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.49387755102040815,
"acc_stderr": 0.032006820201639086,
"acc_norm": 0.49387755102040815,
"acc_norm_stderr": 0.032006820201639086
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.4092827004219409,
"acc_stderr": 0.032007041833595914,
"acc_norm": 0.4092827004219409,
"acc_norm_stderr": 0.032007041833595914
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.31290743155149936,
"acc_stderr": 0.011842529823062999,
"acc_norm": 0.31290743155149936,
"acc_norm_stderr": 0.011842529823062999
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.29901960784313725,
"acc_stderr": 0.03213325717373616,
"acc_norm": 0.29901960784313725,
"acc_norm_stderr": 0.03213325717373616
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.3878787878787879,
"acc_stderr": 0.038049136539710114,
"acc_norm": 0.3878787878787879,
"acc_norm_stderr": 0.038049136539710114
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2631578947368421,
"mc1_stderr": 0.015415241740237035,
"mc2": 0.42145051773986575,
"mc2_stderr": 0.015233960921162444
},
"harness|ko_commongen_v2|2": {
"acc": 0.31759149940968123,
"acc_stderr": 0.0160055818762293,
"acc_norm": 0.40613931523022434,
"acc_norm_stderr": 0.016884749503191392
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Chang-Su/llama-2-13b-chat-ko",
"model_sha": "3a82a33f61584cbe72dc32c15d55bfd182cefd8b",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}