results / Ja3ck /llama-2-13b-instruct-Y24-v1 /result_2023-11-29 06:12:47.json
open-ko-llm-bot's picture
Add results for 2023-11-29 06:12:47
4172b0d
raw history blame
No virus
17.8 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.3267918088737201,
"acc_stderr": 0.013706665975587336,
"acc_norm": 0.38139931740614336,
"acc_norm_stderr": 0.01419438908668526
},
"harness|ko_hellaswag|10": {
"acc": 0.364070902210715,
"acc_stderr": 0.0048018528813297484,
"acc_norm": 0.46415056761601275,
"acc_norm_stderr": 0.004976939333240076
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.42105263157894735,
"acc_stderr": 0.037867207062342145,
"acc_norm": 0.42105263157894735,
"acc_norm_stderr": 0.037867207062342145
},
"harness|ko_mmlu_management|5": {
"acc": 0.49514563106796117,
"acc_stderr": 0.049505043821289195,
"acc_norm": 0.49514563106796117,
"acc_norm_stderr": 0.049505043821289195
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.4661558109833972,
"acc_stderr": 0.0178389560091368,
"acc_norm": 0.4661558109833972,
"acc_norm_stderr": 0.0178389560091368
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.37777777777777777,
"acc_stderr": 0.04188307537595853,
"acc_norm": 0.37777777777777777,
"acc_norm_stderr": 0.04188307537595853
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.37,
"acc_stderr": 0.048523658709391,
"acc_norm": 0.37,
"acc_norm_stderr": 0.048523658709391
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.37446808510638296,
"acc_stderr": 0.03163910665367291,
"acc_norm": 0.37446808510638296,
"acc_norm_stderr": 0.03163910665367291
},
"harness|ko_mmlu_virology|5": {
"acc": 0.3795180722891566,
"acc_stderr": 0.037777988227480165,
"acc_norm": 0.3795180722891566,
"acc_norm_stderr": 0.037777988227480165
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.4565916398713826,
"acc_stderr": 0.028290869054197598,
"acc_norm": 0.4565916398713826,
"acc_norm_stderr": 0.028290869054197598
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.4439461883408072,
"acc_stderr": 0.03334625674242728,
"acc_norm": 0.4439461883408072,
"acc_norm_stderr": 0.03334625674242728
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.4351145038167939,
"acc_stderr": 0.04348208051644858,
"acc_norm": 0.4351145038167939,
"acc_norm_stderr": 0.04348208051644858
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.5,
"acc_stderr": 0.035623524993954825,
"acc_norm": 0.5,
"acc_norm_stderr": 0.035623524993954825
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.4068965517241379,
"acc_stderr": 0.04093793981266237,
"acc_norm": 0.4068965517241379,
"acc_norm_stderr": 0.04093793981266237
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.22549019607843138,
"acc_stderr": 0.04158307533083286,
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.04158307533083286
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.36554621848739494,
"acc_stderr": 0.03128217706368461,
"acc_norm": 0.36554621848739494,
"acc_norm_stderr": 0.03128217706368461
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.3974358974358974,
"acc_stderr": 0.024811920017903836,
"acc_norm": 0.3974358974358974,
"acc_norm_stderr": 0.024811920017903836
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.47,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.47,
"acc_norm_stderr": 0.050161355804659205
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.5,
"acc_stderr": 0.04833682445228318,
"acc_norm": 0.5,
"acc_norm_stderr": 0.04833682445228318
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.3891625615763547,
"acc_stderr": 0.03430462416103872,
"acc_norm": 0.3891625615763547,
"acc_norm_stderr": 0.03430462416103872
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.45161290322580644,
"acc_stderr": 0.02831050034856839,
"acc_norm": 0.45161290322580644,
"acc_norm_stderr": 0.02831050034856839
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.6025641025641025,
"acc_stderr": 0.03205953453789293,
"acc_norm": 0.6025641025641025,
"acc_norm_stderr": 0.03205953453789293
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.4,
"acc_stderr": 0.030151134457776296,
"acc_norm": 0.4,
"acc_norm_stderr": 0.030151134457776296
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.4909090909090909,
"acc_stderr": 0.04788339768702861,
"acc_norm": 0.4909090909090909,
"acc_norm_stderr": 0.04788339768702861
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.25555555555555554,
"acc_stderr": 0.02659393910184407,
"acc_norm": 0.25555555555555554,
"acc_norm_stderr": 0.02659393910184407
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.2582781456953642,
"acc_stderr": 0.035737053147634576,
"acc_norm": 0.2582781456953642,
"acc_norm_stderr": 0.035737053147634576
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.5223880597014925,
"acc_stderr": 0.03531987930208731,
"acc_norm": 0.5223880597014925,
"acc_norm_stderr": 0.03531987930208731
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.3352601156069364,
"acc_stderr": 0.035995863012470784,
"acc_norm": 0.3352601156069364,
"acc_norm_stderr": 0.035995863012470784
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.2830687830687831,
"acc_stderr": 0.023201392938194978,
"acc_norm": 0.2830687830687831,
"acc_norm_stderr": 0.023201392938194978
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.039420826399272135,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.039420826399272135
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.53,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.53,
"acc_norm_stderr": 0.050161355804659205
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.43352601156069365,
"acc_stderr": 0.026680134761679214,
"acc_norm": 0.43352601156069365,
"acc_norm_stderr": 0.026680134761679214
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.4110429447852761,
"acc_stderr": 0.038656978537853624,
"acc_norm": 0.4110429447852761,
"acc_norm_stderr": 0.038656978537853624
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.3950617283950617,
"acc_stderr": 0.027201117666925657,
"acc_norm": 0.3950617283950617,
"acc_norm_stderr": 0.027201117666925657
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542129,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542129
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.41450777202072536,
"acc_stderr": 0.03555300319557673,
"acc_norm": 0.41450777202072536,
"acc_norm_stderr": 0.03555300319557673
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.30701754385964913,
"acc_stderr": 0.04339138322579858,
"acc_norm": 0.30701754385964913,
"acc_norm_stderr": 0.04339138322579858
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.42201834862385323,
"acc_stderr": 0.02117499140776317,
"acc_norm": 0.42201834862385323,
"acc_norm_stderr": 0.02117499140776317
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.2777777777777778,
"acc_stderr": 0.040061680838488774,
"acc_norm": 0.2777777777777778,
"acc_norm_stderr": 0.040061680838488774
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.43137254901960786,
"acc_stderr": 0.028358956313423545,
"acc_norm": 0.43137254901960786,
"acc_norm_stderr": 0.028358956313423545
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.628099173553719,
"acc_stderr": 0.04412015806624504,
"acc_norm": 0.628099173553719,
"acc_norm_stderr": 0.04412015806624504
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.4407894736842105,
"acc_stderr": 0.04040311062490436,
"acc_norm": 0.4407894736842105,
"acc_norm_stderr": 0.04040311062490436
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.3415032679738562,
"acc_stderr": 0.019184639328092487,
"acc_norm": 0.3415032679738562,
"acc_norm_stderr": 0.019184639328092487
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.29432624113475175,
"acc_stderr": 0.02718712701150381,
"acc_norm": 0.29432624113475175,
"acc_norm_stderr": 0.02718712701150381
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.22321428571428573,
"acc_stderr": 0.039523019677025116,
"acc_norm": 0.22321428571428573,
"acc_norm_stderr": 0.039523019677025116
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.25925925925925924,
"acc_stderr": 0.02988691054762697,
"acc_norm": 0.25925925925925924,
"acc_norm_stderr": 0.02988691054762697
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.2424581005586592,
"acc_stderr": 0.01433352205921789,
"acc_norm": 0.2424581005586592,
"acc_norm_stderr": 0.01433352205921789
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.34558823529411764,
"acc_stderr": 0.02888819310398866,
"acc_norm": 0.34558823529411764,
"acc_norm_stderr": 0.02888819310398866
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.363265306122449,
"acc_stderr": 0.030789051139030802,
"acc_norm": 0.363265306122449,
"acc_norm_stderr": 0.030789051139030802
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.45147679324894513,
"acc_stderr": 0.032393600173974704,
"acc_norm": 0.45147679324894513,
"acc_norm_stderr": 0.032393600173974704
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.29335071707953064,
"acc_stderr": 0.011628520449582075,
"acc_norm": 0.29335071707953064,
"acc_norm_stderr": 0.011628520449582075
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.3872549019607843,
"acc_stderr": 0.03418931233833343,
"acc_norm": 0.3872549019607843,
"acc_norm_stderr": 0.03418931233833343
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.41818181818181815,
"acc_stderr": 0.03851716319398395,
"acc_norm": 0.41818181818181815,
"acc_norm_stderr": 0.03851716319398395
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.24357405140758873,
"mc1_stderr": 0.015026354824910782,
"mc2": 0.408852370253922,
"mc2_stderr": 0.015158662984848508
},
"harness|ko_commongen_v2|2": {
"acc": 0.43211334120425027,
"acc_stderr": 0.017031170198851746,
"acc_norm": 0.5041322314049587,
"acc_norm_stderr": 0.017189767032130817
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "Ja3ck/llama-2-13b-instruct-Y24-v1",
"model_sha": "edce003ff6a63c6a225564d7763a89ade6eaa15d",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}