results / Herry443 /Mistral-7B-KNUT-ref /result_2024-02-06 03:44:34.json
open-ko-llm-bot's picture
Add results for 2024-02-06 03:44:34
423171f verified
raw
history blame
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.3037542662116041,
"acc_stderr": 0.013438909184778755,
"acc_norm": 0.3575085324232082,
"acc_norm_stderr": 0.014005494275916573
},
"harness|ko_hellaswag|10": {
"acc": 0.3509261103365863,
"acc_stderr": 0.00476284477090985,
"acc_norm": 0.44921330412268473,
"acc_norm_stderr": 0.004963974504003033
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.4619883040935672,
"acc_stderr": 0.03823727092882307,
"acc_norm": 0.4619883040935672,
"acc_norm_stderr": 0.03823727092882307
},
"harness|ko_mmlu_management|5": {
"acc": 0.5339805825242718,
"acc_stderr": 0.04939291447273481,
"acc_norm": 0.5339805825242718,
"acc_norm_stderr": 0.04939291447273481
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.4674329501915709,
"acc_stderr": 0.01784199575052086,
"acc_norm": 0.4674329501915709,
"acc_norm_stderr": 0.01784199575052086
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.34074074074074073,
"acc_stderr": 0.040943762699967946,
"acc_norm": 0.34074074074074073,
"acc_norm_stderr": 0.040943762699967946
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.3574468085106383,
"acc_stderr": 0.03132941789476425,
"acc_norm": 0.3574468085106383,
"acc_norm_stderr": 0.03132941789476425
},
"harness|ko_mmlu_virology|5": {
"acc": 0.3373493975903614,
"acc_stderr": 0.03680783690727581,
"acc_norm": 0.3373493975903614,
"acc_norm_stderr": 0.03680783690727581
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.43729903536977494,
"acc_stderr": 0.02817391776176287,
"acc_norm": 0.43729903536977494,
"acc_norm_stderr": 0.02817391776176287
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.4170403587443946,
"acc_stderr": 0.03309266936071721,
"acc_norm": 0.4170403587443946,
"acc_norm_stderr": 0.03309266936071721
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.4122137404580153,
"acc_stderr": 0.04317171194870255,
"acc_norm": 0.4122137404580153,
"acc_norm_stderr": 0.04317171194870255
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.4,
"acc_stderr": 0.049236596391733084,
"acc_norm": 0.4,
"acc_norm_stderr": 0.049236596391733084
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.48484848484848486,
"acc_stderr": 0.03560716516531061,
"acc_norm": 0.48484848484848486,
"acc_norm_stderr": 0.03560716516531061
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.36551724137931035,
"acc_stderr": 0.04013124195424385,
"acc_norm": 0.36551724137931035,
"acc_norm_stderr": 0.04013124195424385
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.27450980392156865,
"acc_stderr": 0.044405219061793254,
"acc_norm": 0.27450980392156865,
"acc_norm_stderr": 0.044405219061793254
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.5210084033613446,
"acc_stderr": 0.032449808499900284,
"acc_norm": 0.5210084033613446,
"acc_norm_stderr": 0.032449808499900284
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.38974358974358975,
"acc_stderr": 0.024726967886647074,
"acc_norm": 0.38974358974358975,
"acc_norm_stderr": 0.024726967886647074
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.53,
"acc_stderr": 0.05016135580465919,
"acc_norm": 0.53,
"acc_norm_stderr": 0.05016135580465919
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.5092592592592593,
"acc_stderr": 0.04832853553437056,
"acc_norm": 0.5092592592592593,
"acc_norm_stderr": 0.04832853553437056
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.37438423645320196,
"acc_stderr": 0.03405155380561952,
"acc_norm": 0.37438423645320196,
"acc_norm_stderr": 0.03405155380561952
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.4129032258064516,
"acc_stderr": 0.028009138125400387,
"acc_norm": 0.4129032258064516,
"acc_norm_stderr": 0.028009138125400387
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6794871794871795,
"acc_stderr": 0.030572811310299607,
"acc_norm": 0.6794871794871795,
"acc_norm_stderr": 0.030572811310299607
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.4528301886792453,
"acc_stderr": 0.030635627957961823,
"acc_norm": 0.4528301886792453,
"acc_norm_stderr": 0.030635627957961823
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.4636363636363636,
"acc_stderr": 0.04776449162396197,
"acc_norm": 0.4636363636363636,
"acc_norm_stderr": 0.04776449162396197
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.3,
"acc_stderr": 0.02794045713622841,
"acc_norm": 0.3,
"acc_norm_stderr": 0.02794045713622841
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.33774834437086093,
"acc_stderr": 0.038615575462551684,
"acc_norm": 0.33774834437086093,
"acc_norm_stderr": 0.038615575462551684
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.5223880597014925,
"acc_stderr": 0.035319879302087305,
"acc_norm": 0.5223880597014925,
"acc_norm_stderr": 0.035319879302087305
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.37572254335260113,
"acc_stderr": 0.036928207672648664,
"acc_norm": 0.37572254335260113,
"acc_norm_stderr": 0.036928207672648664
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.335978835978836,
"acc_stderr": 0.024326310529149135,
"acc_norm": 0.335978835978836,
"acc_norm_stderr": 0.024326310529149135
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3263888888888889,
"acc_stderr": 0.03921067198982266,
"acc_norm": 0.3263888888888889,
"acc_norm_stderr": 0.03921067198982266
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.4,
"acc_stderr": 0.049236596391733084,
"acc_norm": 0.4,
"acc_norm_stderr": 0.049236596391733084
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.53,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.53,
"acc_norm_stderr": 0.050161355804659205
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.4653179190751445,
"acc_stderr": 0.026854257928258882,
"acc_norm": 0.4653179190751445,
"acc_norm_stderr": 0.026854257928258882
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.39263803680981596,
"acc_stderr": 0.03836740907831029,
"acc_norm": 0.39263803680981596,
"acc_norm_stderr": 0.03836740907831029
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.4166666666666667,
"acc_stderr": 0.027431623722415012,
"acc_norm": 0.4166666666666667,
"acc_norm_stderr": 0.027431623722415012
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.46632124352331605,
"acc_stderr": 0.03600244069867178,
"acc_norm": 0.46632124352331605,
"acc_norm_stderr": 0.03600244069867178
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2631578947368421,
"acc_stderr": 0.041424397194893596,
"acc_norm": 0.2631578947368421,
"acc_norm_stderr": 0.041424397194893596
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.45871559633027525,
"acc_stderr": 0.02136412253388169,
"acc_norm": 0.45871559633027525,
"acc_norm_stderr": 0.02136412253388169
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.31746031746031744,
"acc_stderr": 0.04163453031302859,
"acc_norm": 0.31746031746031744,
"acc_norm_stderr": 0.04163453031302859
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.4477124183006536,
"acc_stderr": 0.028472938478033526,
"acc_norm": 0.4477124183006536,
"acc_norm_stderr": 0.028472938478033526
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.47,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.47,
"acc_norm_stderr": 0.050161355804659205
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.6363636363636364,
"acc_stderr": 0.043913262867240704,
"acc_norm": 0.6363636363636364,
"acc_norm_stderr": 0.043913262867240704
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.3881578947368421,
"acc_stderr": 0.03965842097512744,
"acc_norm": 0.3881578947368421,
"acc_norm_stderr": 0.03965842097512744
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.3431372549019608,
"acc_stderr": 0.019206606848825365,
"acc_norm": 0.3431372549019608,
"acc_norm_stderr": 0.019206606848825365
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.3262411347517731,
"acc_stderr": 0.027968453043563168,
"acc_norm": 0.3262411347517731,
"acc_norm_stderr": 0.027968453043563168
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.35714285714285715,
"acc_stderr": 0.04547960999764376,
"acc_norm": 0.35714285714285715,
"acc_norm_stderr": 0.04547960999764376
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.4074074074074074,
"acc_stderr": 0.03350991604696043,
"acc_norm": 0.4074074074074074,
"acc_norm_stderr": 0.03350991604696043
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2994413407821229,
"acc_stderr": 0.015318257745976708,
"acc_norm": 0.2994413407821229,
"acc_norm_stderr": 0.015318257745976708
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.38,
"acc_stderr": 0.04878317312145633,
"acc_norm": 0.38,
"acc_norm_stderr": 0.04878317312145633
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.44,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.44,
"acc_norm_stderr": 0.04988876515698589
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.4227941176470588,
"acc_stderr": 0.030008562845003476,
"acc_norm": 0.4227941176470588,
"acc_norm_stderr": 0.030008562845003476
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.4163265306122449,
"acc_stderr": 0.031557828165561644,
"acc_norm": 0.4163265306122449,
"acc_norm_stderr": 0.031557828165561644
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.5907172995780591,
"acc_stderr": 0.032007041833595914,
"acc_norm": 0.5907172995780591,
"acc_norm_stderr": 0.032007041833595914
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.3318122555410691,
"acc_stderr": 0.012026088259897625,
"acc_norm": 0.3318122555410691,
"acc_norm_stderr": 0.012026088259897625
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.49019607843137253,
"acc_stderr": 0.03508637358630573,
"acc_norm": 0.49019607843137253,
"acc_norm_stderr": 0.03508637358630573
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.45454545454545453,
"acc_stderr": 0.03888176921674099,
"acc_norm": 0.45454545454545453,
"acc_norm_stderr": 0.03888176921674099
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2839657282741738,
"mc1_stderr": 0.01578537085839671,
"mc2": 0.47210945803385745,
"mc2_stderr": 0.015515090553210324
},
"harness|ko_commongen_v2|2": {
"acc": 0.4757969303423849,
"acc_stderr": 0.017170202466520748,
"acc_norm": 0.5726092089728453,
"acc_norm_stderr": 0.017008129844823153
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Herry443/Mistral-7B-KNUT-ref",
"model_sha": "90de3478ca2a99cda0999bcb915fffb695359898",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}