results / Deepnoid /OPEN-SOLAR-KO-10.7B-v14 /result_2024-03-07 04:46:53.json
open-ko-llm-bot's picture
Add results for 2024-03-07 04:46:53
ba2036b verified
raw
history blame
No virus
17.8 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.43686006825938567,
"acc_stderr": 0.014494421584256517,
"acc_norm": 0.49573378839590443,
"acc_norm_stderr": 0.014610858923956959
},
"harness|ko_hellaswag|10": {
"acc": 0.4444333798048198,
"acc_stderr": 0.004958872288442145,
"acc_norm": 0.6016729735112527,
"acc_norm_stderr": 0.004885529674958332
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.6374269005847953,
"acc_stderr": 0.0368713061556206,
"acc_norm": 0.6374269005847953,
"acc_norm_stderr": 0.0368713061556206
},
"harness|ko_mmlu_management|5": {
"acc": 0.6796116504854369,
"acc_stderr": 0.04620284082280041,
"acc_norm": 0.6796116504854369,
"acc_norm_stderr": 0.04620284082280041
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.6743295019157088,
"acc_stderr": 0.016757989458549682,
"acc_norm": 0.6743295019157088,
"acc_norm_stderr": 0.016757989458549682
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.48148148148148145,
"acc_stderr": 0.043163785995113245,
"acc_norm": 0.48148148148148145,
"acc_norm_stderr": 0.043163785995113245
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.37,
"acc_stderr": 0.048523658709391,
"acc_norm": 0.37,
"acc_norm_stderr": 0.048523658709391
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.451063829787234,
"acc_stderr": 0.032529096196131965,
"acc_norm": 0.451063829787234,
"acc_norm_stderr": 0.032529096196131965
},
"harness|ko_mmlu_virology|5": {
"acc": 0.43373493975903615,
"acc_stderr": 0.03858158940685515,
"acc_norm": 0.43373493975903615,
"acc_norm_stderr": 0.03858158940685515
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.5691318327974276,
"acc_stderr": 0.028125340983972714,
"acc_norm": 0.5691318327974276,
"acc_norm_stderr": 0.028125340983972714
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.5605381165919282,
"acc_stderr": 0.03331092511038179,
"acc_norm": 0.5605381165919282,
"acc_norm_stderr": 0.03331092511038179
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.5801526717557252,
"acc_stderr": 0.04328577215262973,
"acc_norm": 0.5801526717557252,
"acc_norm_stderr": 0.04328577215262973
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956913,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956913
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.6212121212121212,
"acc_stderr": 0.03456088731993747,
"acc_norm": 0.6212121212121212,
"acc_norm_stderr": 0.03456088731993747
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.4827586206896552,
"acc_stderr": 0.041641887201693775,
"acc_norm": 0.4827586206896552,
"acc_norm_stderr": 0.041641887201693775
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.2647058823529412,
"acc_stderr": 0.0438986995680878,
"acc_norm": 0.2647058823529412,
"acc_norm_stderr": 0.0438986995680878
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.46638655462184875,
"acc_stderr": 0.03240501447690071,
"acc_norm": 0.46638655462184875,
"acc_norm_stderr": 0.03240501447690071
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.5333333333333333,
"acc_stderr": 0.025294608023986462,
"acc_norm": 0.5333333333333333,
"acc_norm_stderr": 0.025294608023986462
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.63,
"acc_stderr": 0.04852365870939098,
"acc_norm": 0.63,
"acc_norm_stderr": 0.04852365870939098
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.5185185185185185,
"acc_stderr": 0.04830366024635331,
"acc_norm": 0.5185185185185185,
"acc_norm_stderr": 0.04830366024635331
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.3694581280788177,
"acc_stderr": 0.03395970381998573,
"acc_norm": 0.3694581280788177,
"acc_norm_stderr": 0.03395970381998573
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.5548387096774193,
"acc_stderr": 0.028272410186214906,
"acc_norm": 0.5548387096774193,
"acc_norm_stderr": 0.028272410186214906
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.7606837606837606,
"acc_stderr": 0.027951826808924336,
"acc_norm": 0.7606837606837606,
"acc_norm_stderr": 0.027951826808924336
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.5471698113207547,
"acc_stderr": 0.03063562795796182,
"acc_norm": 0.5471698113207547,
"acc_norm_stderr": 0.03063562795796182
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.4909090909090909,
"acc_stderr": 0.0478833976870286,
"acc_norm": 0.4909090909090909,
"acc_norm_stderr": 0.0478833976870286
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.2962962962962963,
"acc_stderr": 0.027840811495871916,
"acc_norm": 0.2962962962962963,
"acc_norm_stderr": 0.027840811495871916
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.3509933774834437,
"acc_stderr": 0.03896981964257375,
"acc_norm": 0.3509933774834437,
"acc_norm_stderr": 0.03896981964257375
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.6467661691542289,
"acc_stderr": 0.03379790611796777,
"acc_norm": 0.6467661691542289,
"acc_norm_stderr": 0.03379790611796777
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.4277456647398844,
"acc_stderr": 0.037724468575180276,
"acc_norm": 0.4277456647398844,
"acc_norm_stderr": 0.037724468575180276
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.31746031746031744,
"acc_stderr": 0.02397386199899207,
"acc_norm": 0.31746031746031744,
"acc_norm_stderr": 0.02397386199899207
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.5069444444444444,
"acc_stderr": 0.04180806750294938,
"acc_norm": 0.5069444444444444,
"acc_norm_stderr": 0.04180806750294938
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695236,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695236
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.74,
"acc_stderr": 0.0440844002276808,
"acc_norm": 0.74,
"acc_norm_stderr": 0.0440844002276808
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.5115606936416185,
"acc_stderr": 0.026911898686377927,
"acc_norm": 0.5115606936416185,
"acc_norm_stderr": 0.026911898686377927
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.48466257668711654,
"acc_stderr": 0.039265223787088445,
"acc_norm": 0.48466257668711654,
"acc_norm_stderr": 0.039265223787088445
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.47530864197530864,
"acc_stderr": 0.02778680093142745,
"acc_norm": 0.47530864197530864,
"acc_norm_stderr": 0.02778680093142745
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.28,
"acc_stderr": 0.045126085985421276,
"acc_norm": 0.28,
"acc_norm_stderr": 0.045126085985421276
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.6113989637305699,
"acc_stderr": 0.035177397963731316,
"acc_norm": 0.6113989637305699,
"acc_norm_stderr": 0.035177397963731316
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.3157894736842105,
"acc_stderr": 0.04372748290278007,
"acc_norm": 0.3157894736842105,
"acc_norm_stderr": 0.04372748290278007
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.6146788990825688,
"acc_stderr": 0.020865850852794108,
"acc_norm": 0.6146788990825688,
"acc_norm_stderr": 0.020865850852794108
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.2857142857142857,
"acc_stderr": 0.0404061017820884,
"acc_norm": 0.2857142857142857,
"acc_norm_stderr": 0.0404061017820884
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.5,
"acc_stderr": 0.028629916715693413,
"acc_norm": 0.5,
"acc_norm_stderr": 0.028629916715693413
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.57,
"acc_stderr": 0.049756985195624284,
"acc_norm": 0.57,
"acc_norm_stderr": 0.049756985195624284
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.71900826446281,
"acc_stderr": 0.04103203830514511,
"acc_norm": 0.71900826446281,
"acc_norm_stderr": 0.04103203830514511
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.5263157894736842,
"acc_stderr": 0.04063302731486671,
"acc_norm": 0.5263157894736842,
"acc_norm_stderr": 0.04063302731486671
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.43137254901960786,
"acc_stderr": 0.020036393768352638,
"acc_norm": 0.43137254901960786,
"acc_norm_stderr": 0.020036393768352638
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.34397163120567376,
"acc_stderr": 0.02833801742861132,
"acc_norm": 0.34397163120567376,
"acc_norm_stderr": 0.02833801742861132
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.2857142857142857,
"acc_stderr": 0.04287858751340455,
"acc_norm": 0.2857142857142857,
"acc_norm_stderr": 0.04287858751340455
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.37037037037037035,
"acc_stderr": 0.03293377139415191,
"acc_norm": 0.37037037037037035,
"acc_norm_stderr": 0.03293377139415191
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2759776536312849,
"acc_stderr": 0.014950103002475347,
"acc_norm": 0.2759776536312849,
"acc_norm_stderr": 0.014950103002475347
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.48,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.48,
"acc_norm_stderr": 0.050211673156867795
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.6,
"acc_stderr": 0.04923659639173309,
"acc_norm": 0.6,
"acc_norm_stderr": 0.04923659639173309
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.4007352941176471,
"acc_stderr": 0.029768263528933095,
"acc_norm": 0.4007352941176471,
"acc_norm_stderr": 0.029768263528933095
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.5795918367346938,
"acc_stderr": 0.031601069934496004,
"acc_norm": 0.5795918367346938,
"acc_norm_stderr": 0.031601069934496004
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.679324894514768,
"acc_stderr": 0.030381931949990407,
"acc_norm": 0.679324894514768,
"acc_norm_stderr": 0.030381931949990407
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.3500651890482399,
"acc_stderr": 0.01218255231321517,
"acc_norm": 0.3500651890482399,
"acc_norm_stderr": 0.01218255231321517
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.5882352941176471,
"acc_stderr": 0.03454236585380609,
"acc_norm": 0.5882352941176471,
"acc_norm_stderr": 0.03454236585380609
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.6,
"acc_stderr": 0.03825460278380025,
"acc_norm": 0.6,
"acc_norm_stderr": 0.03825460278380025
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2460220318237454,
"mc1_stderr": 0.015077219200662581,
"mc2": 0.39548731719111496,
"mc2_stderr": 0.015045880520942254
},
"harness|ko_commongen_v2|2": {
"acc": 0.5041322314049587,
"acc_stderr": 0.017189767032130817,
"acc_norm": 0.5218417945690673,
"acc_norm_stderr": 0.017173944474294385
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Deepnoid/OPEN-SOLAR-KO-10.7B-v14",
"model_sha": "0d67dec530f606541a40f8705caf78e2bcc9caa8",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}