results / MNCJihun /Mistral-7B-SlimOrca-eng-kor-combined /result_2023-10-24 01:03:10.json
open-ko-llm-bot's picture
Add results for 2023-10-24 01:03:10
9e5a078
raw history blame
No virus
17.9 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.295221843003413,
"acc_stderr": 0.013329750293382316,
"acc_norm": 0.3378839590443686,
"acc_norm_stderr": 0.013822047922283516
},
"harness|ko_hellaswag|10": {
"acc": 0.3476399123680542,
"acc_stderr": 0.004752476997887829,
"acc_norm": 0.434973112925712,
"acc_norm_stderr": 0.004947402907996247
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.49707602339181284,
"acc_stderr": 0.03834759370936839,
"acc_norm": 0.49707602339181284,
"acc_norm_stderr": 0.03834759370936839
},
"harness|ko_mmlu_management|5": {
"acc": 0.39805825242718446,
"acc_stderr": 0.0484674825397724,
"acc_norm": 0.39805825242718446,
"acc_norm_stderr": 0.0484674825397724
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.4112388250319285,
"acc_stderr": 0.017595971908056573,
"acc_norm": 0.4112388250319285,
"acc_norm_stderr": 0.017595971908056573
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.34814814814814815,
"acc_stderr": 0.041153246103369526,
"acc_norm": 0.34814814814814815,
"acc_norm_stderr": 0.041153246103369526
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542127,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542127
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.3617021276595745,
"acc_stderr": 0.03141082197596241,
"acc_norm": 0.3617021276595745,
"acc_norm_stderr": 0.03141082197596241
},
"harness|ko_mmlu_virology|5": {
"acc": 0.30120481927710846,
"acc_stderr": 0.03571609230053481,
"acc_norm": 0.30120481927710846,
"acc_norm_stderr": 0.03571609230053481
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.44694533762057875,
"acc_stderr": 0.028237769422085335,
"acc_norm": 0.44694533762057875,
"acc_norm_stderr": 0.028237769422085335
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.40358744394618834,
"acc_stderr": 0.03292802819330313,
"acc_norm": 0.40358744394618834,
"acc_norm_stderr": 0.03292802819330313
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.3282442748091603,
"acc_stderr": 0.041184385658062976,
"acc_norm": 0.3282442748091603,
"acc_norm_stderr": 0.041184385658062976
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.3181818181818182,
"acc_stderr": 0.03318477333845331,
"acc_norm": 0.3181818181818182,
"acc_norm_stderr": 0.03318477333845331
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.35172413793103446,
"acc_stderr": 0.03979236637497411,
"acc_norm": 0.35172413793103446,
"acc_norm_stderr": 0.03979236637497411
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.22549019607843138,
"acc_stderr": 0.041583075330832865,
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.041583075330832865
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.36554621848739494,
"acc_stderr": 0.03128217706368461,
"acc_norm": 0.36554621848739494,
"acc_norm_stderr": 0.03128217706368461
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.3564102564102564,
"acc_stderr": 0.0242831405294673,
"acc_norm": 0.3564102564102564,
"acc_norm_stderr": 0.0242831405294673
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.53,
"acc_stderr": 0.05016135580465919,
"acc_norm": 0.53,
"acc_norm_stderr": 0.05016135580465919
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.28,
"acc_stderr": 0.045126085985421276,
"acc_norm": 0.28,
"acc_norm_stderr": 0.045126085985421276
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.3611111111111111,
"acc_stderr": 0.04643454608906275,
"acc_norm": 0.3611111111111111,
"acc_norm_stderr": 0.04643454608906275
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.3448275862068966,
"acc_stderr": 0.03344283744280457,
"acc_norm": 0.3448275862068966,
"acc_norm_stderr": 0.03344283744280457
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.36451612903225805,
"acc_stderr": 0.02737987122994325,
"acc_norm": 0.36451612903225805,
"acc_norm_stderr": 0.02737987122994325
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.5854700854700855,
"acc_stderr": 0.0322739656762378,
"acc_norm": 0.5854700854700855,
"acc_norm_stderr": 0.0322739656762378
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.32452830188679244,
"acc_stderr": 0.028815615713432118,
"acc_norm": 0.32452830188679244,
"acc_norm_stderr": 0.028815615713432118
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.4818181818181818,
"acc_stderr": 0.04785964010794916,
"acc_norm": 0.4818181818181818,
"acc_norm_stderr": 0.04785964010794916
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.3148148148148148,
"acc_stderr": 0.02831753349606648,
"acc_norm": 0.3148148148148148,
"acc_norm_stderr": 0.02831753349606648
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.304635761589404,
"acc_stderr": 0.03757949922943343,
"acc_norm": 0.304635761589404,
"acc_norm_stderr": 0.03757949922943343
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.46766169154228854,
"acc_stderr": 0.035281314729336065,
"acc_norm": 0.46766169154228854,
"acc_norm_stderr": 0.035281314729336065
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.31213872832369943,
"acc_stderr": 0.035331333893236574,
"acc_norm": 0.31213872832369943,
"acc_norm_stderr": 0.035331333893236574
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.3253968253968254,
"acc_stderr": 0.024130158299762606,
"acc_norm": 0.3253968253968254,
"acc_norm_stderr": 0.024130158299762606
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3125,
"acc_stderr": 0.038760854559127644,
"acc_norm": 0.3125,
"acc_norm_stderr": 0.038760854559127644
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.23,
"acc_stderr": 0.04229525846816505,
"acc_norm": 0.23,
"acc_norm_stderr": 0.04229525846816505
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.5,
"acc_stderr": 0.050251890762960605,
"acc_norm": 0.5,
"acc_norm_stderr": 0.050251890762960605
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.41329479768786126,
"acc_stderr": 0.02651126136940924,
"acc_norm": 0.41329479768786126,
"acc_norm_stderr": 0.02651126136940924
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.3496932515337423,
"acc_stderr": 0.03746668325470021,
"acc_norm": 0.3496932515337423,
"acc_norm_stderr": 0.03746668325470021
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.39197530864197533,
"acc_stderr": 0.027163686038271233,
"acc_norm": 0.39197530864197533,
"acc_norm_stderr": 0.027163686038271233
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.41968911917098445,
"acc_stderr": 0.035615873276858855,
"acc_norm": 0.41968911917098445,
"acc_norm_stderr": 0.035615873276858855
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.2543859649122807,
"acc_stderr": 0.0409698513984367,
"acc_norm": 0.2543859649122807,
"acc_norm_stderr": 0.0409698513984367
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.3871559633027523,
"acc_stderr": 0.02088423199264345,
"acc_norm": 0.3871559633027523,
"acc_norm_stderr": 0.02088423199264345
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.35714285714285715,
"acc_stderr": 0.042857142857142816,
"acc_norm": 0.35714285714285715,
"acc_norm_stderr": 0.042857142857142816
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.3888888888888889,
"acc_stderr": 0.027914055510468,
"acc_norm": 0.3888888888888889,
"acc_norm_stderr": 0.027914055510468
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.41,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.5702479338842975,
"acc_stderr": 0.04519082021319772,
"acc_norm": 0.5702479338842975,
"acc_norm_stderr": 0.04519082021319772
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.2565789473684211,
"acc_stderr": 0.0355418036802569,
"acc_norm": 0.2565789473684211,
"acc_norm_stderr": 0.0355418036802569
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.3137254901960784,
"acc_stderr": 0.018771683893528183,
"acc_norm": 0.3137254901960784,
"acc_norm_stderr": 0.018771683893528183
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.31560283687943264,
"acc_stderr": 0.027724989449509314,
"acc_norm": 0.31560283687943264,
"acc_norm_stderr": 0.027724989449509314
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.35714285714285715,
"acc_stderr": 0.04547960999764376,
"acc_norm": 0.35714285714285715,
"acc_norm_stderr": 0.04547960999764376
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.2962962962962963,
"acc_stderr": 0.031141447823536048,
"acc_norm": 0.2962962962962963,
"acc_norm_stderr": 0.031141447823536048
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.22793296089385476,
"acc_stderr": 0.014030149950805097,
"acc_norm": 0.22793296089385476,
"acc_norm_stderr": 0.014030149950805097
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.42,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.42,
"acc_norm_stderr": 0.049604496374885836
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.28308823529411764,
"acc_stderr": 0.02736586113151381,
"acc_norm": 0.28308823529411764,
"acc_norm_stderr": 0.02736586113151381
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.24081632653061225,
"acc_stderr": 0.027372942201788153,
"acc_norm": 0.24081632653061225,
"acc_norm_stderr": 0.027372942201788153
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.48945147679324896,
"acc_stderr": 0.032539983791662855,
"acc_norm": 0.48945147679324896,
"acc_norm_stderr": 0.032539983791662855
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.31877444589308995,
"acc_stderr": 0.011901895635786088,
"acc_norm": 0.31877444589308995,
"acc_norm_stderr": 0.011901895635786088
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.31862745098039214,
"acc_stderr": 0.0327028718148208,
"acc_norm": 0.31862745098039214,
"acc_norm_stderr": 0.0327028718148208
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.3939393939393939,
"acc_stderr": 0.0381549430868893,
"acc_norm": 0.3939393939393939,
"acc_norm_stderr": 0.0381549430868893
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.2741738066095471,
"mc1_stderr": 0.015616518497219374,
"mc2": 0.4600089007139919,
"mc2_stderr": 0.015856276729730875
},
"harness|ko_commongen_v2|2": {
"acc": 0.24321133412042503,
"acc_stderr": 0.014750068360453263,
"acc_norm": 0.2798110979929162,
"acc_norm_stderr": 0.015433715795427778
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "MNCJihun/Mistral-7B-SlimOrca-eng-kor-combined",
"model_sha": "a9340fcc369bba2e0200a3a378078fa14f4075b3",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}