results / JaeyeonKang /CCK-v1.0.0-DPOM /result_2024-01-04 09:58:29.json
open-ko-llm-bot's picture
Add results for 2024-01-04 09:58:29
58543f8 verified
raw
history blame
No virus
17.8 kB
{
"results": {
"harness|ko_arc_challenge|25": {
"acc": 0.4138225255972696,
"acc_stderr": 0.014392730009221007,
"acc_norm": 0.4863481228668942,
"acc_norm_stderr": 0.014605943429860957
},
"harness|ko_hellaswag|10": {
"acc": 0.4230233021310496,
"acc_stderr": 0.004930293787545614,
"acc_norm": 0.5758812985461064,
"acc_norm_stderr": 0.004931984642695337
},
"harness|ko_mmlu_world_religions|5": {
"acc": 0.5730994152046783,
"acc_stderr": 0.03793620616529916,
"acc_norm": 0.5730994152046783,
"acc_norm_stderr": 0.03793620616529916
},
"harness|ko_mmlu_management|5": {
"acc": 0.6407766990291263,
"acc_stderr": 0.04750458399041697,
"acc_norm": 0.6407766990291263,
"acc_norm_stderr": 0.04750458399041697
},
"harness|ko_mmlu_miscellaneous|5": {
"acc": 0.648786717752235,
"acc_stderr": 0.017069982051499427,
"acc_norm": 0.648786717752235,
"acc_norm_stderr": 0.017069982051499427
},
"harness|ko_mmlu_anatomy|5": {
"acc": 0.4740740740740741,
"acc_stderr": 0.04313531696750573,
"acc_norm": 0.4740740740740741,
"acc_norm_stderr": 0.04313531696750573
},
"harness|ko_mmlu_abstract_algebra|5": {
"acc": 0.19,
"acc_stderr": 0.03942772444036623,
"acc_norm": 0.19,
"acc_norm_stderr": 0.03942772444036623
},
"harness|ko_mmlu_conceptual_physics|5": {
"acc": 0.4723404255319149,
"acc_stderr": 0.03263597118409769,
"acc_norm": 0.4723404255319149,
"acc_norm_stderr": 0.03263597118409769
},
"harness|ko_mmlu_virology|5": {
"acc": 0.4759036144578313,
"acc_stderr": 0.03887971849597264,
"acc_norm": 0.4759036144578313,
"acc_norm_stderr": 0.03887971849597264
},
"harness|ko_mmlu_philosophy|5": {
"acc": 0.6205787781350482,
"acc_stderr": 0.02755994980234782,
"acc_norm": 0.6205787781350482,
"acc_norm_stderr": 0.02755994980234782
},
"harness|ko_mmlu_human_aging|5": {
"acc": 0.5829596412556054,
"acc_stderr": 0.03309266936071721,
"acc_norm": 0.5829596412556054,
"acc_norm_stderr": 0.03309266936071721
},
"harness|ko_mmlu_human_sexuality|5": {
"acc": 0.6030534351145038,
"acc_stderr": 0.04291135671009224,
"acc_norm": 0.6030534351145038,
"acc_norm_stderr": 0.04291135671009224
},
"harness|ko_mmlu_medical_genetics|5": {
"acc": 0.5,
"acc_stderr": 0.050251890762960605,
"acc_norm": 0.5,
"acc_norm_stderr": 0.050251890762960605
},
"harness|ko_mmlu_high_school_geography|5": {
"acc": 0.702020202020202,
"acc_stderr": 0.032586303838365555,
"acc_norm": 0.702020202020202,
"acc_norm_stderr": 0.032586303838365555
},
"harness|ko_mmlu_electrical_engineering|5": {
"acc": 0.4413793103448276,
"acc_stderr": 0.04137931034482758,
"acc_norm": 0.4413793103448276,
"acc_norm_stderr": 0.04137931034482758
},
"harness|ko_mmlu_college_physics|5": {
"acc": 0.3431372549019608,
"acc_stderr": 0.047240073523838876,
"acc_norm": 0.3431372549019608,
"acc_norm_stderr": 0.047240073523838876
},
"harness|ko_mmlu_high_school_microeconomics|5": {
"acc": 0.6092436974789915,
"acc_stderr": 0.031693802357129965,
"acc_norm": 0.6092436974789915,
"acc_norm_stderr": 0.031693802357129965
},
"harness|ko_mmlu_high_school_macroeconomics|5": {
"acc": 0.5487179487179488,
"acc_stderr": 0.02523038123893484,
"acc_norm": 0.5487179487179488,
"acc_norm_stderr": 0.02523038123893484
},
"harness|ko_mmlu_computer_security|5": {
"acc": 0.6,
"acc_stderr": 0.04923659639173309,
"acc_norm": 0.6,
"acc_norm_stderr": 0.04923659639173309
},
"harness|ko_mmlu_global_facts|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|ko_mmlu_jurisprudence|5": {
"acc": 0.6388888888888888,
"acc_stderr": 0.04643454608906275,
"acc_norm": 0.6388888888888888,
"acc_norm_stderr": 0.04643454608906275
},
"harness|ko_mmlu_high_school_chemistry|5": {
"acc": 0.39901477832512317,
"acc_stderr": 0.03445487686264715,
"acc_norm": 0.39901477832512317,
"acc_norm_stderr": 0.03445487686264715
},
"harness|ko_mmlu_high_school_biology|5": {
"acc": 0.5838709677419355,
"acc_stderr": 0.02804098138076153,
"acc_norm": 0.5838709677419355,
"acc_norm_stderr": 0.02804098138076153
},
"harness|ko_mmlu_marketing|5": {
"acc": 0.7649572649572649,
"acc_stderr": 0.027778835904935427,
"acc_norm": 0.7649572649572649,
"acc_norm_stderr": 0.027778835904935427
},
"harness|ko_mmlu_clinical_knowledge|5": {
"acc": 0.5358490566037736,
"acc_stderr": 0.030693675018458006,
"acc_norm": 0.5358490566037736,
"acc_norm_stderr": 0.030693675018458006
},
"harness|ko_mmlu_public_relations|5": {
"acc": 0.6,
"acc_stderr": 0.0469237132203465,
"acc_norm": 0.6,
"acc_norm_stderr": 0.0469237132203465
},
"harness|ko_mmlu_high_school_mathematics|5": {
"acc": 0.3074074074074074,
"acc_stderr": 0.028133252578815642,
"acc_norm": 0.3074074074074074,
"acc_norm_stderr": 0.028133252578815642
},
"harness|ko_mmlu_high_school_physics|5": {
"acc": 0.3973509933774834,
"acc_stderr": 0.03995524007681682,
"acc_norm": 0.3973509933774834,
"acc_norm_stderr": 0.03995524007681682
},
"harness|ko_mmlu_sociology|5": {
"acc": 0.7313432835820896,
"acc_stderr": 0.031343283582089536,
"acc_norm": 0.7313432835820896,
"acc_norm_stderr": 0.031343283582089536
},
"harness|ko_mmlu_college_medicine|5": {
"acc": 0.5028901734104047,
"acc_stderr": 0.038124005659748335,
"acc_norm": 0.5028901734104047,
"acc_norm_stderr": 0.038124005659748335
},
"harness|ko_mmlu_elementary_mathematics|5": {
"acc": 0.4444444444444444,
"acc_stderr": 0.02559185776138219,
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.02559185776138219
},
"harness|ko_mmlu_college_biology|5": {
"acc": 0.4861111111111111,
"acc_stderr": 0.041795966175810016,
"acc_norm": 0.4861111111111111,
"acc_norm_stderr": 0.041795966175810016
},
"harness|ko_mmlu_college_chemistry|5": {
"acc": 0.38,
"acc_stderr": 0.04878317312145632,
"acc_norm": 0.38,
"acc_norm_stderr": 0.04878317312145632
},
"harness|ko_mmlu_us_foreign_policy|5": {
"acc": 0.63,
"acc_stderr": 0.048523658709391,
"acc_norm": 0.63,
"acc_norm_stderr": 0.048523658709391
},
"harness|ko_mmlu_moral_disputes|5": {
"acc": 0.569364161849711,
"acc_stderr": 0.02665880027367238,
"acc_norm": 0.569364161849711,
"acc_norm_stderr": 0.02665880027367238
},
"harness|ko_mmlu_logical_fallacies|5": {
"acc": 0.5153374233128835,
"acc_stderr": 0.039265223787088445,
"acc_norm": 0.5153374233128835,
"acc_norm_stderr": 0.039265223787088445
},
"harness|ko_mmlu_prehistory|5": {
"acc": 0.6018518518518519,
"acc_stderr": 0.027237415094592488,
"acc_norm": 0.6018518518518519,
"acc_norm_stderr": 0.027237415094592488
},
"harness|ko_mmlu_college_mathematics|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|ko_mmlu_high_school_government_and_politics|5": {
"acc": 0.694300518134715,
"acc_stderr": 0.033248379397581594,
"acc_norm": 0.694300518134715,
"acc_norm_stderr": 0.033248379397581594
},
"harness|ko_mmlu_econometrics|5": {
"acc": 0.4473684210526316,
"acc_stderr": 0.046774730044912005,
"acc_norm": 0.4473684210526316,
"acc_norm_stderr": 0.046774730044912005
},
"harness|ko_mmlu_high_school_psychology|5": {
"acc": 0.6605504587155964,
"acc_stderr": 0.02030210934266235,
"acc_norm": 0.6605504587155964,
"acc_norm_stderr": 0.02030210934266235
},
"harness|ko_mmlu_formal_logic|5": {
"acc": 0.4523809523809524,
"acc_stderr": 0.044518079590553275,
"acc_norm": 0.4523809523809524,
"acc_norm_stderr": 0.044518079590553275
},
"harness|ko_mmlu_nutrition|5": {
"acc": 0.5686274509803921,
"acc_stderr": 0.02835895631342355,
"acc_norm": 0.5686274509803921,
"acc_norm_stderr": 0.02835895631342355
},
"harness|ko_mmlu_business_ethics|5": {
"acc": 0.58,
"acc_stderr": 0.04960449637488583,
"acc_norm": 0.58,
"acc_norm_stderr": 0.04960449637488583
},
"harness|ko_mmlu_international_law|5": {
"acc": 0.7355371900826446,
"acc_stderr": 0.04026187527591206,
"acc_norm": 0.7355371900826446,
"acc_norm_stderr": 0.04026187527591206
},
"harness|ko_mmlu_astronomy|5": {
"acc": 0.5328947368421053,
"acc_stderr": 0.040601270352363966,
"acc_norm": 0.5328947368421053,
"acc_norm_stderr": 0.040601270352363966
},
"harness|ko_mmlu_professional_psychology|5": {
"acc": 0.5032679738562091,
"acc_stderr": 0.020227402794434864,
"acc_norm": 0.5032679738562091,
"acc_norm_stderr": 0.020227402794434864
},
"harness|ko_mmlu_professional_accounting|5": {
"acc": 0.41843971631205673,
"acc_stderr": 0.02942799403941999,
"acc_norm": 0.41843971631205673,
"acc_norm_stderr": 0.02942799403941999
},
"harness|ko_mmlu_machine_learning|5": {
"acc": 0.4017857142857143,
"acc_stderr": 0.04653333146973646,
"acc_norm": 0.4017857142857143,
"acc_norm_stderr": 0.04653333146973646
},
"harness|ko_mmlu_high_school_statistics|5": {
"acc": 0.5509259259259259,
"acc_stderr": 0.033922384053216154,
"acc_norm": 0.5509259259259259,
"acc_norm_stderr": 0.033922384053216154
},
"harness|ko_mmlu_moral_scenarios|5": {
"acc": 0.3016759776536313,
"acc_stderr": 0.015350767572220285,
"acc_norm": 0.3016759776536313,
"acc_norm_stderr": 0.015350767572220285
},
"harness|ko_mmlu_college_computer_science|5": {
"acc": 0.52,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.52,
"acc_norm_stderr": 0.050211673156867795
},
"harness|ko_mmlu_high_school_computer_science|5": {
"acc": 0.65,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.65,
"acc_norm_stderr": 0.047937248544110196
},
"harness|ko_mmlu_professional_medicine|5": {
"acc": 0.5183823529411765,
"acc_stderr": 0.03035230339535196,
"acc_norm": 0.5183823529411765,
"acc_norm_stderr": 0.03035230339535196
},
"harness|ko_mmlu_security_studies|5": {
"acc": 0.5959183673469388,
"acc_stderr": 0.031414708025865885,
"acc_norm": 0.5959183673469388,
"acc_norm_stderr": 0.031414708025865885
},
"harness|ko_mmlu_high_school_world_history|5": {
"acc": 0.7510548523206751,
"acc_stderr": 0.028146970599422647,
"acc_norm": 0.7510548523206751,
"acc_norm_stderr": 0.028146970599422647
},
"harness|ko_mmlu_professional_law|5": {
"acc": 0.4198174706649283,
"acc_stderr": 0.01260496081608737,
"acc_norm": 0.4198174706649283,
"acc_norm_stderr": 0.01260496081608737
},
"harness|ko_mmlu_high_school_us_history|5": {
"acc": 0.6715686274509803,
"acc_stderr": 0.03296245110172229,
"acc_norm": 0.6715686274509803,
"acc_norm_stderr": 0.03296245110172229
},
"harness|ko_mmlu_high_school_european_history|5": {
"acc": 0.703030303030303,
"acc_stderr": 0.035679697722680474,
"acc_norm": 0.703030303030303,
"acc_norm_stderr": 0.035679697722680474
},
"harness|ko_truthfulqa_mc|0": {
"mc1": 0.4222766217870257,
"mc1_stderr": 0.017290733254248167,
"mc2": 0.6005885460566108,
"mc2_stderr": 0.016071484830998732
},
"harness|ko_commongen_v2|2": {
"acc": 0.6257378984651711,
"acc_stderr": 0.016637917789798746,
"acc_norm": 0.6375442739079102,
"acc_norm_stderr": 0.016527131240453696
}
},
"versions": {
"all": 0,
"harness|ko_arc_challenge|25": 0,
"harness|ko_hellaswag|10": 0,
"harness|ko_mmlu_world_religions|5": 1,
"harness|ko_mmlu_management|5": 1,
"harness|ko_mmlu_miscellaneous|5": 1,
"harness|ko_mmlu_anatomy|5": 1,
"harness|ko_mmlu_abstract_algebra|5": 1,
"harness|ko_mmlu_conceptual_physics|5": 1,
"harness|ko_mmlu_virology|5": 1,
"harness|ko_mmlu_philosophy|5": 1,
"harness|ko_mmlu_human_aging|5": 1,
"harness|ko_mmlu_human_sexuality|5": 1,
"harness|ko_mmlu_medical_genetics|5": 1,
"harness|ko_mmlu_high_school_geography|5": 1,
"harness|ko_mmlu_electrical_engineering|5": 1,
"harness|ko_mmlu_college_physics|5": 1,
"harness|ko_mmlu_high_school_microeconomics|5": 1,
"harness|ko_mmlu_high_school_macroeconomics|5": 1,
"harness|ko_mmlu_computer_security|5": 1,
"harness|ko_mmlu_global_facts|5": 1,
"harness|ko_mmlu_jurisprudence|5": 1,
"harness|ko_mmlu_high_school_chemistry|5": 1,
"harness|ko_mmlu_high_school_biology|5": 1,
"harness|ko_mmlu_marketing|5": 1,
"harness|ko_mmlu_clinical_knowledge|5": 1,
"harness|ko_mmlu_public_relations|5": 1,
"harness|ko_mmlu_high_school_mathematics|5": 1,
"harness|ko_mmlu_high_school_physics|5": 1,
"harness|ko_mmlu_sociology|5": 1,
"harness|ko_mmlu_college_medicine|5": 1,
"harness|ko_mmlu_elementary_mathematics|5": 1,
"harness|ko_mmlu_college_biology|5": 1,
"harness|ko_mmlu_college_chemistry|5": 1,
"harness|ko_mmlu_us_foreign_policy|5": 1,
"harness|ko_mmlu_moral_disputes|5": 1,
"harness|ko_mmlu_logical_fallacies|5": 1,
"harness|ko_mmlu_prehistory|5": 1,
"harness|ko_mmlu_college_mathematics|5": 1,
"harness|ko_mmlu_high_school_government_and_politics|5": 1,
"harness|ko_mmlu_econometrics|5": 1,
"harness|ko_mmlu_high_school_psychology|5": 1,
"harness|ko_mmlu_formal_logic|5": 1,
"harness|ko_mmlu_nutrition|5": 1,
"harness|ko_mmlu_business_ethics|5": 1,
"harness|ko_mmlu_international_law|5": 1,
"harness|ko_mmlu_astronomy|5": 1,
"harness|ko_mmlu_professional_psychology|5": 1,
"harness|ko_mmlu_professional_accounting|5": 1,
"harness|ko_mmlu_machine_learning|5": 1,
"harness|ko_mmlu_high_school_statistics|5": 1,
"harness|ko_mmlu_moral_scenarios|5": 1,
"harness|ko_mmlu_college_computer_science|5": 1,
"harness|ko_mmlu_high_school_computer_science|5": 1,
"harness|ko_mmlu_professional_medicine|5": 1,
"harness|ko_mmlu_security_studies|5": 1,
"harness|ko_mmlu_high_school_world_history|5": 1,
"harness|ko_mmlu_professional_law|5": 1,
"harness|ko_mmlu_high_school_us_history|5": 1,
"harness|ko_mmlu_high_school_european_history|5": 1,
"harness|ko_truthfulqa_mc|0": 0,
"harness|ko_commongen_v2|2": 1
},
"config_general": {
"model_name": "JaeyeonKang/CCK-v1.0.0-DPOM",
"model_sha": "f9c1e428a37fd392a37abc4461fd286978aedc99",
"model_dtype": "torch.float16",
"lighteval_sha": "",
"num_few_shot_default": 0,
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null
}
}