results / Herry443 /Mistral-7B-KNUT-v0.3 /result_2023-12-09 08:52:37.json
open-ko-llm-bot's picture
Add results for 2023-12-09 08:52:37
a0a97c2
raw history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.2738907849829352,
"acc_stderr": 0.013032004972989503,
"acc_norm": 0.3054607508532423,
"acc_norm_stderr": 0.013460080478002505
},
"harness|ko_hellaswag|10": {
"acc": 0.3260306711810396,
"acc_stderr": 0.004678006403691725,
"acc_norm": 0.40021907986456884,
"acc_norm_stderr": 0.004889413126208774
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.32748538011695905,
"acc_stderr": 0.035993357714560276,
"acc_norm": 0.32748538011695905,
"acc_norm_stderr": 0.035993357714560276
},
"harness|ko_mmlu_management|5": {
"acc": 0.32038834951456313,
"acc_stderr": 0.0462028408228004,
"acc_norm": 0.32038834951456313,
"acc_norm_stderr": 0.0462028408228004
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.3282247765006386,
"acc_stderr": 0.01679168564019289,
"acc_norm": 0.3282247765006386,
"acc_norm_stderr": 0.01679168564019289
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.26666666666666666,
"acc_stderr": 0.038201699145179055,
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.038201699145179055
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695236,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695236
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.33617021276595743,
"acc_stderr": 0.030881618520676942,
"acc_norm": 0.33617021276595743,
"acc_norm_stderr": 0.030881618520676942
},
"harness|ko_mmlu_virology|5": {
"acc": 0.3433734939759036,
"acc_stderr": 0.03696584317010601,
"acc_norm": 0.3433734939759036,
"acc_norm_stderr": 0.03696584317010601
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.3440514469453376,
"acc_stderr": 0.026981478043648026,
"acc_norm": 0.3440514469453376,
"acc_norm_stderr": 0.026981478043648026
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.3632286995515695,
"acc_stderr": 0.03227790442850499,
"acc_norm": 0.3632286995515695,
"acc_norm_stderr": 0.03227790442850499
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.35877862595419846,
"acc_stderr": 0.04206739313864908,
"acc_norm": 0.35877862595419846,
"acc_norm_stderr": 0.04206739313864908
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.38,
"acc_stderr": 0.048783173121456316,
"acc_norm": 0.38,
"acc_norm_stderr": 0.048783173121456316
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.3434343434343434,
"acc_stderr": 0.03383201223244442,
"acc_norm": 0.3434343434343434,
"acc_norm_stderr": 0.03383201223244442
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.33793103448275863,
"acc_stderr": 0.03941707632064889,
"acc_norm": 0.33793103448275863,
"acc_norm_stderr": 0.03941707632064889
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.22549019607843138,
"acc_stderr": 0.04158307533083286,
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.04158307533083286
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.3487394957983193,
"acc_stderr": 0.030956636328566548,
"acc_norm": 0.3487394957983193,
"acc_norm_stderr": 0.030956636328566548
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.31025641025641026,
"acc_stderr": 0.0234546748894043,
"acc_norm": 0.31025641025641026,
"acc_norm_stderr": 0.0234546748894043
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.38,
"acc_stderr": 0.04878317312145633,
"acc_norm": 0.38,
"acc_norm_stderr": 0.04878317312145633
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.22,
"acc_stderr": 0.04163331998932268,
"acc_norm": 0.22,
"acc_norm_stderr": 0.04163331998932268
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.2962962962962963,
"acc_stderr": 0.044143436668549335,
"acc_norm": 0.2962962962962963,
"acc_norm_stderr": 0.044143436668549335
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.28078817733990147,
"acc_stderr": 0.03161856335358611,
"acc_norm": 0.28078817733990147,
"acc_norm_stderr": 0.03161856335358611
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.3709677419354839,
"acc_stderr": 0.027480541887953593,
"acc_norm": 0.3709677419354839,
"acc_norm_stderr": 0.027480541887953593
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.5170940170940171,
"acc_stderr": 0.032736940493481824,
"acc_norm": 0.5170940170940171,
"acc_norm_stderr": 0.032736940493481824
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.3018867924528302,
"acc_stderr": 0.02825420034443867,
"acc_norm": 0.3018867924528302,
"acc_norm_stderr": 0.02825420034443867
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.32727272727272727,
"acc_stderr": 0.044942908662520875,
"acc_norm": 0.32727272727272727,
"acc_norm_stderr": 0.044942908662520875
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.27037037037037037,
"acc_stderr": 0.02708037281514566,
"acc_norm": 0.27037037037037037,
"acc_norm_stderr": 0.02708037281514566
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.304635761589404,
"acc_stderr": 0.03757949922943343,
"acc_norm": 0.304635761589404,
"acc_norm_stderr": 0.03757949922943343
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.48258706467661694,
"acc_stderr": 0.03533389234739245,
"acc_norm": 0.48258706467661694,
"acc_norm_stderr": 0.03533389234739245
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.30057803468208094,
"acc_stderr": 0.03496101481191181,
"acc_norm": 0.30057803468208094,
"acc_norm_stderr": 0.03496101481191181
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.25925925925925924,
"acc_stderr": 0.02256989707491841,
"acc_norm": 0.25925925925925924,
"acc_norm_stderr": 0.02256989707491841
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.2916666666666667,
"acc_stderr": 0.03800968060554859,
"acc_norm": 0.2916666666666667,
"acc_norm_stderr": 0.03800968060554859
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.45,
"acc_stderr": 0.05,
"acc_norm": 0.45,
"acc_norm_stderr": 0.05
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.3179190751445087,
"acc_stderr": 0.025070713719153172,
"acc_norm": 0.3179190751445087,
"acc_norm_stderr": 0.025070713719153172
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.34355828220858897,
"acc_stderr": 0.037311335196738925,
"acc_norm": 0.34355828220858897,
"acc_norm_stderr": 0.037311335196738925
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.02622964917882116,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.02622964917882116
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.33678756476683935,
"acc_stderr": 0.034107802518361846,
"acc_norm": 0.33678756476683935,
"acc_norm_stderr": 0.034107802518361846
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.30701754385964913,
"acc_stderr": 0.0433913832257986,
"acc_norm": 0.30701754385964913,
"acc_norm_stderr": 0.0433913832257986
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.3211009174311927,
"acc_stderr": 0.020018149772733747,
"acc_norm": 0.3211009174311927,
"acc_norm_stderr": 0.020018149772733747
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.31746031746031744,
"acc_stderr": 0.041634530313028585,
"acc_norm": 0.31746031746031744,
"acc_norm_stderr": 0.041634530313028585
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.3758169934640523,
"acc_stderr": 0.027732834353363947,
"acc_norm": 0.3758169934640523,
"acc_norm_stderr": 0.027732834353363947
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.44,
"acc_stderr": 0.049888765156985884,
"acc_norm": 0.44,
"acc_norm_stderr": 0.049888765156985884
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.4049586776859504,
"acc_stderr": 0.04481137755942469,
"acc_norm": 0.4049586776859504,
"acc_norm_stderr": 0.04481137755942469
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.2565789473684211,
"acc_stderr": 0.0355418036802569,
"acc_norm": 0.2565789473684211,
"acc_norm_stderr": 0.0355418036802569
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.30392156862745096,
"acc_stderr": 0.018607552131279834,
"acc_norm": 0.30392156862745096,
"acc_norm_stderr": 0.018607552131279834
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.2765957446808511,
"acc_stderr": 0.026684564340460994,
"acc_norm": 0.2765957446808511,
"acc_norm_stderr": 0.026684564340460994
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.26785714285714285,
"acc_stderr": 0.04203277291467763,
"acc_norm": 0.26785714285714285,
"acc_norm_stderr": 0.04203277291467763
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.41203703703703703,
"acc_stderr": 0.03356787758160834,
"acc_norm": 0.41203703703703703,
"acc_norm_stderr": 0.03356787758160834
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.23798882681564246,
"acc_stderr": 0.014242630070574892,
"acc_norm": 0.23798882681564246,
"acc_norm_stderr": 0.014242630070574892
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.39,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.39,
"acc_norm_stderr": 0.04902071300001975
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.33088235294117646,
"acc_stderr": 0.02858270975389844,
"acc_norm": 0.33088235294117646,
"acc_norm_stderr": 0.02858270975389844
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.2938775510204082,
"acc_stderr": 0.02916273841024978,
"acc_norm": 0.2938775510204082,
"acc_norm_stderr": 0.02916273841024978
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.4177215189873418,
"acc_stderr": 0.032103530322412685,
"acc_norm": 0.4177215189873418,
"acc_norm_stderr": 0.032103530322412685
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.26401564537157757,
"acc_stderr": 0.011258435537723816,
"acc_norm": 0.26401564537157757,
"acc_norm_stderr": 0.011258435537723816
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.28431372549019607,
"acc_stderr": 0.03166009679399812,
"acc_norm": 0.28431372549019607,
"acc_norm_stderr": 0.03166009679399812
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.23636363636363636,
"acc_stderr": 0.033175059300091805,
"acc_norm": 0.23636363636363636,
"acc_norm_stderr": 0.033175059300091805
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.25458996328029376,
"mc1_stderr": 0.015250117079156467,
"mc2": 0.4346601144729828,
"mc2_stderr": 0.015485642516678326
},
"harness|ko_commongen_v2|2": {
"acc": 0.2727272727272727,
"acc_stderr": 0.01531185311030035,
"acc_norm": 0.34946871310507677,
"acc_norm_stderr": 0.016392797085769843
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Herry443/Mistral-7B-KNUT-v0.3",
"model_sha": "089a962c7ef124af537742bd25034c601f264fae",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}