results / LDCC /LDCC-Instruct-Llama-2-ko-13B-v1.6 /result_2023-11-13 07:25:43.json
open-ko-llm-bot's picture
Add results for 2023-11-13 07:25:43
ccb07a2
raw
history blame
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.4308873720136519,
"acc_stderr": 0.01447113339264246,
"acc_norm": 0.4803754266211604,
"acc_norm_stderr": 0.0146001320759471
},
"harness|ko_hellaswag|10": {
"acc": 0.43328022306313485,
"acc_stderr": 0.004945157565218188,
"acc_norm": 0.5933081059549891,
"acc_norm_stderr": 0.004902125388002201
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.5146198830409356,
"acc_stderr": 0.038331852752130254,
"acc_norm": 0.5146198830409356,
"acc_norm_stderr": 0.038331852752130254
},
"harness|ko_mmlu_management|5": {
"acc": 0.5339805825242718,
"acc_stderr": 0.0493929144727348,
"acc_norm": 0.5339805825242718,
"acc_norm_stderr": 0.0493929144727348
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.5593869731800766,
"acc_stderr": 0.017753396973908493,
"acc_norm": 0.5593869731800766,
"acc_norm_stderr": 0.017753396973908493
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.45925925925925926,
"acc_stderr": 0.04304979692464244,
"acc_norm": 0.45925925925925926,
"acc_norm_stderr": 0.04304979692464244
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.3872340425531915,
"acc_stderr": 0.03184389265339526,
"acc_norm": 0.3872340425531915,
"acc_norm_stderr": 0.03184389265339526
},
"harness|ko_mmlu_virology|5": {
"acc": 0.4036144578313253,
"acc_stderr": 0.03819486140758398,
"acc_norm": 0.4036144578313253,
"acc_norm_stderr": 0.03819486140758398
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.5080385852090032,
"acc_stderr": 0.028394421370984538,
"acc_norm": 0.5080385852090032,
"acc_norm_stderr": 0.028394421370984538
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.5291479820627802,
"acc_stderr": 0.03350073248773404,
"acc_norm": 0.5291479820627802,
"acc_norm_stderr": 0.03350073248773404
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.48091603053435117,
"acc_stderr": 0.04382094705550989,
"acc_norm": 0.48091603053435117,
"acc_norm_stderr": 0.04382094705550989
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.43,
"acc_stderr": 0.04975698519562429,
"acc_norm": 0.43,
"acc_norm_stderr": 0.04975698519562429
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.5505050505050505,
"acc_stderr": 0.03544132491947969,
"acc_norm": 0.5505050505050505,
"acc_norm_stderr": 0.03544132491947969
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.4413793103448276,
"acc_stderr": 0.04137931034482757,
"acc_norm": 0.4413793103448276,
"acc_norm_stderr": 0.04137931034482757
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.21568627450980393,
"acc_stderr": 0.04092563958237655,
"acc_norm": 0.21568627450980393,
"acc_norm_stderr": 0.04092563958237655
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.4579831932773109,
"acc_stderr": 0.032363611119519416,
"acc_norm": 0.4579831932773109,
"acc_norm_stderr": 0.032363611119519416
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.43846153846153846,
"acc_stderr": 0.025158266016868547,
"acc_norm": 0.43846153846153846,
"acc_norm_stderr": 0.025158266016868547
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.52,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.52,
"acc_norm_stderr": 0.050211673156867795
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.5092592592592593,
"acc_stderr": 0.04832853553437055,
"acc_norm": 0.5092592592592593,
"acc_norm_stderr": 0.04832853553437055
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.39901477832512317,
"acc_stderr": 0.03445487686264715,
"acc_norm": 0.39901477832512317,
"acc_norm_stderr": 0.03445487686264715
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.47419354838709676,
"acc_stderr": 0.02840609505765332,
"acc_norm": 0.47419354838709676,
"acc_norm_stderr": 0.02840609505765332
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6324786324786325,
"acc_stderr": 0.031585391577456365,
"acc_norm": 0.6324786324786325,
"acc_norm_stderr": 0.031585391577456365
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.4716981132075472,
"acc_stderr": 0.030723535249006107,
"acc_norm": 0.4716981132075472,
"acc_norm_stderr": 0.030723535249006107
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.5,
"acc_stderr": 0.04789131426105757,
"acc_norm": 0.5,
"acc_norm_stderr": 0.04789131426105757
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.26296296296296295,
"acc_stderr": 0.02684205787383371,
"acc_norm": 0.26296296296296295,
"acc_norm_stderr": 0.02684205787383371
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2913907284768212,
"acc_stderr": 0.037101857261199946,
"acc_norm": 0.2913907284768212,
"acc_norm_stderr": 0.037101857261199946
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.6169154228855721,
"acc_stderr": 0.034375193373382504,
"acc_norm": 0.6169154228855721,
"acc_norm_stderr": 0.034375193373382504
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.43352601156069365,
"acc_stderr": 0.03778621079092055,
"acc_norm": 0.43352601156069365,
"acc_norm_stderr": 0.03778621079092055
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.2830687830687831,
"acc_stderr": 0.023201392938194978,
"acc_norm": 0.2830687830687831,
"acc_norm_stderr": 0.023201392938194978
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3611111111111111,
"acc_stderr": 0.040166600304512336,
"acc_norm": 0.3611111111111111,
"acc_norm_stderr": 0.040166600304512336
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.63,
"acc_stderr": 0.048523658709391,
"acc_norm": 0.63,
"acc_norm_stderr": 0.048523658709391
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.5173410404624278,
"acc_stderr": 0.026902900458666647,
"acc_norm": 0.5173410404624278,
"acc_norm_stderr": 0.026902900458666647
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.48466257668711654,
"acc_stderr": 0.03926522378708843,
"acc_norm": 0.48466257668711654,
"acc_norm_stderr": 0.03926522378708843
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.48148148148148145,
"acc_stderr": 0.027801656212323667,
"acc_norm": 0.48148148148148145,
"acc_norm_stderr": 0.027801656212323667
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.5544041450777202,
"acc_stderr": 0.035870149860756595,
"acc_norm": 0.5544041450777202,
"acc_norm_stderr": 0.035870149860756595
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2543859649122807,
"acc_stderr": 0.04096985139843671,
"acc_norm": 0.2543859649122807,
"acc_norm_stderr": 0.04096985139843671
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.5944954128440367,
"acc_stderr": 0.02105099799189684,
"acc_norm": 0.5944954128440367,
"acc_norm_stderr": 0.02105099799189684
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.30952380952380953,
"acc_stderr": 0.04134913018303316,
"acc_norm": 0.30952380952380953,
"acc_norm_stderr": 0.04134913018303316
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.43790849673202614,
"acc_stderr": 0.02840830202033269,
"acc_norm": 0.43790849673202614,
"acc_norm_stderr": 0.02840830202033269
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.43,
"acc_stderr": 0.04975698519562428,
"acc_norm": 0.43,
"acc_norm_stderr": 0.04975698519562428
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6446280991735537,
"acc_stderr": 0.0436923632657398,
"acc_norm": 0.6446280991735537,
"acc_norm_stderr": 0.0436923632657398
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.3881578947368421,
"acc_stderr": 0.03965842097512744,
"acc_norm": 0.3881578947368421,
"acc_norm_stderr": 0.03965842097512744
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.38235294117647056,
"acc_stderr": 0.01965992249362333,
"acc_norm": 0.38235294117647056,
"acc_norm_stderr": 0.01965992249362333
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.35815602836879434,
"acc_stderr": 0.02860208586275942,
"acc_norm": 0.35815602836879434,
"acc_norm_stderr": 0.02860208586275942
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.22321428571428573,
"acc_stderr": 0.039523019677025116,
"acc_norm": 0.22321428571428573,
"acc_norm_stderr": 0.039523019677025116
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.33796296296296297,
"acc_stderr": 0.032259413526312945,
"acc_norm": 0.33796296296296297,
"acc_norm_stderr": 0.032259413526312945
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2424581005586592,
"acc_stderr": 0.01433352205921789,
"acc_norm": 0.2424581005586592,
"acc_norm_stderr": 0.01433352205921789
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.38,
"acc_stderr": 0.04878317312145632,
"acc_norm": 0.38,
"acc_norm_stderr": 0.04878317312145632
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.43,
"acc_stderr": 0.049756985195624284,
"acc_norm": 0.43,
"acc_norm_stderr": 0.049756985195624284
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.4485294117647059,
"acc_stderr": 0.0302114796091216,
"acc_norm": 0.4485294117647059,
"acc_norm_stderr": 0.0302114796091216
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.5061224489795918,
"acc_stderr": 0.03200682020163907,
"acc_norm": 0.5061224489795918,
"acc_norm_stderr": 0.03200682020163907
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.6329113924050633,
"acc_stderr": 0.03137624072561619,
"acc_norm": 0.6329113924050633,
"acc_norm_stderr": 0.03137624072561619
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.3435462842242503,
"acc_stderr": 0.012128961174190154,
"acc_norm": 0.3435462842242503,
"acc_norm_stderr": 0.012128961174190154
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.4950980392156863,
"acc_stderr": 0.035091433756067866,
"acc_norm": 0.4950980392156863,
"acc_norm_stderr": 0.035091433756067866
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.5212121212121212,
"acc_stderr": 0.03900828913737301,
"acc_norm": 0.5212121212121212,
"acc_norm_stderr": 0.03900828913737301
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.32802937576499386,
"mc1_stderr": 0.01643563293281504,
"mc2": 0.46940366768411657,
"mc2_stderr": 0.016167620517601608
},
"harness|ko_commongen_v2|2": {
"acc": 0.41912632821723733,
"acc_stderr": 0.016963995010862792,
"acc_norm": 0.4805194805194805,
"acc_norm_stderr": 0.01717730199234256
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.6",
"model_sha": "8ca05731176451a126cf07e06a97f08e735e21b4",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}